1use std::borrow::Cow;
2use std::fmt;
3use std::hash::Hash;
45use rustc_data_structures::base_n::{BaseNString, CASE_INSENSITIVE, ToBaseN};
6use rustc_data_structures::fingerprint::Fingerprint;
7use rustc_data_structures::fx::FxIndexMap;
8use rustc_data_structures::stable_hasher::{
9StableHash, StableHashCtxt, StableHasher, ToStableHashKey,
10};
11use rustc_data_structures::unord::UnordMap;
12use rustc_hashes::Hash128;
13use rustc_hir::ItemId;
14use rustc_hir::attrs::{InlineAttr, Linkage};
15use rustc_hir::def_id::{CrateNum, DefId, DefIdSet, LOCAL_CRATE};
16use rustc_macros::{StableHash, TyDecodable, TyEncodable};
17use rustc_session::config::OptLevel;
18use rustc_span::{Span, Symbol};
19use rustc_target::spec::SymbolVisibility;
20use tracing::debug;
2122use crate::dep_graph::dep_node::{make_compile_codegen_unit, make_compile_mono_item};
23use crate::dep_graph::{DepNode, WorkProduct, WorkProductId};
24use crate::middle::codegen_fn_attrs::CodegenFnAttrFlags;
25use crate::ty::{self, GenericArgs, Instance, InstanceKind, SymbolName, Ty, TyCtxt};
2627/// Describes how a monomorphization will be instantiated in object files.
28#[derive(#[automatically_derived]
impl ::core::cmp::PartialEq for InstantiationMode {
#[inline]
fn eq(&self, other: &InstantiationMode) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(InstantiationMode::GloballyShared { may_conflict: __self_0 },
InstantiationMode::GloballyShared { may_conflict: __arg1_0
}) => __self_0 == __arg1_0,
_ => true,
}
}
}PartialEq)]
29pub enum InstantiationMode {
30/// There will be exactly one instance of the given MonoItem. It will have
31 /// external linkage so that it can be linked to from other codegen units.
32GloballyShared {
33/// In some compilation scenarios we may decide to take functions that
34 /// are typically `LocalCopy` and instead move them to `GloballyShared`
35 /// to avoid codegenning them a bunch of times. In this situation,
36 /// however, our local copy may conflict with other crates also
37 /// inlining the same function.
38 ///
39 /// This flag indicates that this situation is occurring, and informs
40 /// symbol name calculation that some extra mangling is needed to
41 /// avoid conflicts. Note that this may eventually go away entirely if
42 /// ThinLTO enables us to *always* have a globally shared instance of a
43 /// function within one crate's compilation.
44may_conflict: bool,
45 },
4647/// Each codegen unit containing a reference to the given MonoItem will
48 /// have its own private copy of the function (with internal linkage).
49LocalCopy,
50}
5152#[derive(#[automatically_derived]
impl ::core::cmp::PartialEq for NormalizationErrorInMono {
#[inline]
fn eq(&self, other: &NormalizationErrorInMono) -> bool { true }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for NormalizationErrorInMono {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_fields_are_eq(&self) {}
}Eq, #[automatically_derived]
impl ::core::clone::Clone for NormalizationErrorInMono {
#[inline]
fn clone(&self) -> NormalizationErrorInMono { *self }
}Clone, #[automatically_derived]
impl ::core::marker::Copy for NormalizationErrorInMono { }Copy, #[automatically_derived]
impl ::core::fmt::Debug for NormalizationErrorInMono {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f, "NormalizationErrorInMono")
}
}Debug, #[automatically_derived]
impl ::core::hash::Hash for NormalizationErrorInMono {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {}
}Hash, const _: () =
{
impl ::rustc_data_structures::stable_hasher::StableHash for
NormalizationErrorInMono {
#[inline]
fn stable_hash<__Hcx: ::rustc_data_structures::stable_hasher::StableHashCtxt>(&self,
__hcx: &mut __Hcx,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self { NormalizationErrorInMono => {} }
}
}
};StableHash, const _: () =
{
impl<'tcx, __E: ::rustc_middle::ty::codec::TyEncoder<'tcx>>
::rustc_serialize::Encodable<__E> for NormalizationErrorInMono {
fn encode(&self, __encoder: &mut __E) {
match *self { NormalizationErrorInMono => {} }
}
}
};TyEncodable, const _: () =
{
impl<'tcx, __D: ::rustc_middle::ty::codec::TyDecoder<'tcx>>
::rustc_serialize::Decodable<__D> for NormalizationErrorInMono {
fn decode(__decoder: &mut __D) -> Self {
NormalizationErrorInMono
}
}
};TyDecodable)]
53pub struct NormalizationErrorInMono;
5455#[derive(#[automatically_derived]
impl<'tcx> ::core::cmp::PartialEq for MonoItem<'tcx> {
#[inline]
fn eq(&self, other: &MonoItem<'tcx>) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(MonoItem::Fn(__self_0), MonoItem::Fn(__arg1_0)) =>
__self_0 == __arg1_0,
(MonoItem::Static(__self_0), MonoItem::Static(__arg1_0)) =>
__self_0 == __arg1_0,
(MonoItem::GlobalAsm(__self_0), MonoItem::GlobalAsm(__arg1_0))
=> __self_0 == __arg1_0,
_ => unsafe { ::core::intrinsics::unreachable() }
}
}
}PartialEq, #[automatically_derived]
impl<'tcx> ::core::cmp::Eq for MonoItem<'tcx> {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_fields_are_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Instance<'tcx>>;
let _: ::core::cmp::AssertParamIsEq<DefId>;
let _: ::core::cmp::AssertParamIsEq<ItemId>;
}
}Eq, #[automatically_derived]
impl<'tcx> ::core::clone::Clone for MonoItem<'tcx> {
#[inline]
fn clone(&self) -> MonoItem<'tcx> {
let _: ::core::clone::AssertParamIsClone<Instance<'tcx>>;
let _: ::core::clone::AssertParamIsClone<DefId>;
let _: ::core::clone::AssertParamIsClone<ItemId>;
*self
}
}Clone, #[automatically_derived]
impl<'tcx> ::core::marker::Copy for MonoItem<'tcx> { }Copy, #[automatically_derived]
impl<'tcx> ::core::fmt::Debug for MonoItem<'tcx> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
MonoItem::Fn(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Fn",
&__self_0),
MonoItem::Static(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Static",
&__self_0),
MonoItem::GlobalAsm(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"GlobalAsm", &__self_0),
}
}
}Debug, #[automatically_derived]
impl<'tcx> ::core::hash::Hash for MonoItem<'tcx> {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state);
match self {
MonoItem::Fn(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
MonoItem::Static(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
MonoItem::GlobalAsm(__self_0) =>
::core::hash::Hash::hash(__self_0, state),
}
}
}Hash, const _: () =
{
impl<'tcx> ::rustc_data_structures::stable_hasher::StableHash for
MonoItem<'tcx> {
#[inline]
fn stable_hash<__Hcx: ::rustc_data_structures::stable_hasher::StableHashCtxt>(&self,
__hcx: &mut __Hcx,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
::std::mem::discriminant(self).stable_hash(__hcx, __hasher);
match *self {
MonoItem::Fn(ref __binding_0) => {
{ __binding_0.stable_hash(__hcx, __hasher); }
}
MonoItem::Static(ref __binding_0) => {
{ __binding_0.stable_hash(__hcx, __hasher); }
}
MonoItem::GlobalAsm(ref __binding_0) => {
{ __binding_0.stable_hash(__hcx, __hasher); }
}
}
}
}
};StableHash, const _: () =
{
impl<'tcx, __E: ::rustc_middle::ty::codec::TyEncoder<'tcx>>
::rustc_serialize::Encodable<__E> for MonoItem<'tcx> {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
MonoItem::Fn(ref __binding_0) => { 0usize }
MonoItem::Static(ref __binding_0) => { 1usize }
MonoItem::GlobalAsm(ref __binding_0) => { 2usize }
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
MonoItem::Fn(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
MonoItem::Static(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
MonoItem::GlobalAsm(ref __binding_0) => {
::rustc_serialize::Encodable::<__E>::encode(__binding_0,
__encoder);
}
}
}
}
};TyEncodable, const _: () =
{
impl<'tcx, __D: ::rustc_middle::ty::codec::TyDecoder<'tcx>>
::rustc_serialize::Decodable<__D> for MonoItem<'tcx> {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => {
MonoItem::Fn(::rustc_serialize::Decodable::decode(__decoder))
}
1usize => {
MonoItem::Static(::rustc_serialize::Decodable::decode(__decoder))
}
2usize => {
MonoItem::GlobalAsm(::rustc_serialize::Decodable::decode(__decoder))
}
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `MonoItem`, expected 0..3, actual {0}",
n));
}
}
}
}
};TyDecodable)]
56pub enum MonoItem<'tcx> {
57 Fn(Instance<'tcx>),
58 Static(DefId),
59 GlobalAsm(ItemId),
60}
6162fn opt_incr_drop_glue_mode<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> InstantiationMode {
63// Non-ADTs can't have a Drop impl. This case is mostly hit by closures whose captures require
64 // dropping.
65let ty::Adt(adt_def, _) = ty.kind() else {
66return InstantiationMode::LocalCopy;
67 };
6869// Types that don't have a direct Drop impl, but have fields that require dropping.
70let Some(dtor) = adt_def.destructor(tcx) else {
71// We use LocalCopy for drops of enums only; this code is inherited from
72 // https://github.com/rust-lang/rust/pull/67332 and the theory is that we get to optimize
73 // out code like drop_in_place(Option::None) before crate-local ThinLTO, which improves
74 // compile time. At the time of writing, simply removing this entire check does seem to
75 // regress incr-opt compile times. But it sure seems like a more sophisticated check could
76 // do better here.
77if adt_def.is_enum() {
78return InstantiationMode::LocalCopy;
79 } else {
80return InstantiationMode::GloballyShared { may_conflict: true };
81 }
82 };
8384// We've gotten to a drop_in_place for a type that directly implements Drop.
85 // The drop glue is a wrapper for the Drop::drop impl, and we are an optimized build, so in an
86 // effort to coordinate with the mode that the actual impl will get, we make the glue also
87 // LocalCopy.
88if tcx.cross_crate_inlinable(dtor.did) {
89 InstantiationMode::LocalCopy90 } else {
91 InstantiationMode::GloballyShared { may_conflict: true }
92 }
93}
9495impl<'tcx> MonoItem<'tcx> {
96/// Returns `true` if the mono item is user-defined (i.e. not compiler-generated, like shims).
97pub fn is_user_defined(&self) -> bool {
98match *self {
99 MonoItem::Fn(instance) => #[allow(non_exhaustive_omitted_patterns)] match instance.def {
InstanceKind::Item(..) => true,
_ => false,
}matches!(instance.def, InstanceKind::Item(..)),
100 MonoItem::Static(..) | MonoItem::GlobalAsm(..) => true,
101 }
102 }
103104// Note: if you change how item size estimates work, you might need to
105 // change NON_INCR_MIN_CGU_SIZE as well.
106pub fn size_estimate(&self, tcx: TyCtxt<'tcx>) -> usize {
107match *self {
108 MonoItem::Fn(instance) => tcx.size_estimate(instance),
109// Conservatively estimate the size of a static declaration or
110 // assembly item to be 1.
111MonoItem::Static(_) | MonoItem::GlobalAsm(_) => 1,
112 }
113 }
114115pub fn is_generic_fn(&self) -> bool {
116match self {
117 MonoItem::Fn(instance) => instance.args.non_erasable_generics().next().is_some(),
118 MonoItem::Static(..) | MonoItem::GlobalAsm(..) => false,
119 }
120 }
121122pub fn symbol_name(&self, tcx: TyCtxt<'tcx>) -> SymbolName<'tcx> {
123match *self {
124 MonoItem::Fn(instance) => tcx.symbol_name(instance),
125 MonoItem::Static(def_id) => tcx.symbol_name(Instance::mono(tcx, def_id)),
126 MonoItem::GlobalAsm(item_id) => {
127SymbolName::new(tcx, &::alloc::__export::must_use({
::alloc::fmt::format(format_args!("global_asm_{0:?}",
item_id.owner_id))
})format!("global_asm_{:?}", item_id.owner_id))
128 }
129 }
130 }
131132pub fn instantiation_mode(&self, tcx: TyCtxt<'tcx>) -> InstantiationMode {
133// The case handling here is written in the same style as cross_crate_inlinable, we first
134 // handle the cases where we must use a particular instantiation mode, then cascade down
135 // through a sequence of heuristics.
136137 // The first thing we do is detect MonoItems which we must instantiate exactly once in the
138 // whole program.
139140 // Statics and global_asm! must be instantiated exactly once.
141let instance = match *self {
142 MonoItem::Fn(instance) => instance,
143 MonoItem::Static(..) | MonoItem::GlobalAsm(..) => {
144return InstantiationMode::GloballyShared { may_conflict: false };
145 }
146 };
147148// Similarly, the executable entrypoint must be instantiated exactly once.
149if tcx.is_entrypoint(instance.def_id()) {
150return InstantiationMode::GloballyShared { may_conflict: false };
151 }
152153// If the function is #[naked] or contains any other attribute that requires exactly-once
154 // instantiation:
155 // We emit an unused_attributes lint for this case, which should be kept in sync if possible.
156let codegen_fn_attrs = tcx.codegen_instance_attrs(instance.def);
157if codegen_fn_attrs.contains_extern_indicator()
158 || codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED)
159 {
160return InstantiationMode::GloballyShared { may_conflict: false };
161 }
162163// This is technically a heuristic even though it's in the "not a heuristic" part of
164 // instantiation mode selection.
165 // It is surely possible to untangle this; the root problem is that the way we instantiate
166 // InstanceKind other than Item is very complicated.
167 //
168 // The fallback case is to give everything else GloballyShared at OptLevel::No and
169 // LocalCopy at all other opt levels. This is a good default, except for one specific build
170 // configuration: Optimized incremental builds.
171 // In the current compiler architecture there is a fundamental tension between
172 // optimizations (which want big CGUs with as many things LocalCopy as possible) and
173 // incrementality (which wants small CGUs with as many things GloballyShared as possible).
174 // The heuristics implemented here do better than a completely naive approach in the
175 // compiler benchmark suite, but there is no reason to believe they are optimal.
176if let InstanceKind::DropGlue(_, Some(ty)) = instance.def {
177if tcx.sess.opts.optimize == OptLevel::No {
178return InstantiationMode::GloballyShared { may_conflict: false };
179 }
180if tcx.sess.opts.incremental.is_none() {
181return InstantiationMode::LocalCopy;
182 }
183return opt_incr_drop_glue_mode(tcx, ty);
184 }
185186// We need to ensure that we do not decide the InstantiationMode of an exported symbol is
187 // LocalCopy. Since exported symbols are computed based on the output of
188 // cross_crate_inlinable, we are beholden to our previous decisions.
189 //
190 // Note that just like above, this check for requires_inline is technically a heuristic
191 // even though it's in the "not a heuristic" part of instantiation mode selection.
192if !tcx.cross_crate_inlinable(instance.def_id()) && !instance.def.requires_inline(tcx) {
193return InstantiationMode::GloballyShared { may_conflict: false };
194 }
195196// Beginning of heuristics. The handling of link-dead-code and inline(always) are QoL only,
197 // the compiler should not crash and linkage should work, but codegen may be undesirable.
198199 // -Clink-dead-code was given an unfortunate name; the point of the flag is to assist
200 // coverage tools which rely on having every function in the program appear in the
201 // generated code. If we select LocalCopy, functions which are not used because they are
202 // missing test coverage will disappear from such coverage reports, defeating the point.
203 // Note that -Cinstrument-coverage does not require such assistance from us, only coverage
204 // tools implemented without compiler support ironically require a special compiler flag.
205if tcx.sess.link_dead_code() {
206return InstantiationMode::GloballyShared { may_conflict: true };
207 }
208209// To ensure that #[inline(always)] can be inlined as much as possible, especially in unoptimized
210 // builds, we always select LocalCopy.
211if codegen_fn_attrs.inline.always() {
212return InstantiationMode::LocalCopy;
213 }
214215// #[inline(never)] functions in general are poor candidates for inlining and thus since
216 // LocalCopy generally increases code size for the benefit of optimizations from inlining,
217 // we want to give them GloballyShared codegen.
218 // The slight problem is that generic functions need to always support cross-crate
219 // compilation, so all previous stages of the compiler are obligated to treat generic
220 // functions the same as those that unconditionally get LocalCopy codegen. It's only when
221 // we get here that we can at least not codegen a #[inline(never)] generic function in all
222 // of our CGUs.
223if let InlineAttr::Never = codegen_fn_attrs.inline
224 && self.is_generic_fn()
225 {
226return InstantiationMode::GloballyShared { may_conflict: true };
227 }
228229// The fallthrough case is to generate LocalCopy for all optimized builds, and
230 // GloballyShared with conflict prevention when optimizations are disabled.
231match tcx.sess.opts.optimize {
232 OptLevel::No => InstantiationMode::GloballyShared { may_conflict: true },
233_ => InstantiationMode::LocalCopy,
234 }
235 }
236237pub fn explicit_linkage(&self, tcx: TyCtxt<'tcx>) -> Option<Linkage> {
238let instance_kind = match *self {
239 MonoItem::Fn(ref instance) => instance.def,
240 MonoItem::Static(def_id) => InstanceKind::Item(def_id),
241 MonoItem::GlobalAsm(..) => return None,
242 };
243244tcx.codegen_instance_attrs(instance_kind).linkage
245 }
246247/// Returns `true` if this instance is instantiable - whether it has no unsatisfied
248 /// predicates.
249 ///
250 /// In order to codegen an item, all of its predicates must hold, because
251 /// otherwise the item does not make sense. Type-checking ensures that
252 /// the predicates of every item that is *used by* a valid item *do*
253 /// hold, so we can rely on that.
254 ///
255 /// However, we codegen collector roots (reachable items) and functions
256 /// in vtables when they are seen, even if they are not used, and so they
257 /// might not be instantiable. For example, a programmer can define this
258 /// public function:
259 ///
260 /// pub fn foo<'a>(s: &'a mut ()) where &'a mut (): Clone {
261 /// <&mut () as Clone>::clone(&s);
262 /// }
263 ///
264 /// That function can't be codegened, because the method `<&mut () as Clone>::clone`
265 /// does not exist. Luckily for us, that function can't ever be used,
266 /// because that would require for `&'a mut (): Clone` to hold, so we
267 /// can just not emit any code, or even a linker reference for it.
268 ///
269 /// Similarly, if a vtable method has such a signature, and therefore can't
270 /// be used, we can just not emit it and have a placeholder (a null pointer,
271 /// which will never be accessed) in its place.
272pub fn is_instantiable(&self, tcx: TyCtxt<'tcx>) -> bool {
273{
use ::tracing::__macro_support::Callsite as _;
static __CALLSITE: ::tracing::callsite::DefaultCallsite =
{
static META: ::tracing::Metadata<'static> =
{
::tracing_core::metadata::Metadata::new("event compiler/rustc_middle/src/mono.rs:273",
"rustc_middle::mono", ::tracing::Level::DEBUG,
::tracing_core::__macro_support::Option::Some("compiler/rustc_middle/src/mono.rs"),
::tracing_core::__macro_support::Option::Some(273u32),
::tracing_core::__macro_support::Option::Some("rustc_middle::mono"),
::tracing_core::field::FieldSet::new(&["message"],
::tracing_core::callsite::Identifier(&__CALLSITE)),
::tracing::metadata::Kind::EVENT)
};
::tracing::callsite::DefaultCallsite::new(&META)
};
let enabled =
::tracing::Level::DEBUG <= ::tracing::level_filters::STATIC_MAX_LEVEL
&&
::tracing::Level::DEBUG <=
::tracing::level_filters::LevelFilter::current() &&
{
let interest = __CALLSITE.interest();
!interest.is_never() &&
::tracing::__macro_support::__is_enabled(__CALLSITE.metadata(),
interest)
};
if enabled {
(|value_set: ::tracing::field::ValueSet|
{
let meta = __CALLSITE.metadata();
::tracing::Event::dispatch(meta, &value_set);
;
})({
#[allow(unused_imports)]
use ::tracing::field::{debug, display, Value};
let mut iter = __CALLSITE.metadata().fields().iter();
__CALLSITE.metadata().fields().value_set(&[(&::tracing::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
::tracing::__macro_support::Option::Some(&format_args!("is_instantiable({0:?})",
self) as &dyn Value))])
});
} else { ; }
};debug!("is_instantiable({:?})", self);
274let (def_id, args) = match *self {
275 MonoItem::Fn(ref instance) => (instance.def_id(), instance.args),
276 MonoItem::Static(def_id) => (def_id, GenericArgs::empty()),
277// global asm never has predicates
278MonoItem::GlobalAsm(..) => return true,
279 };
280281 !tcx.instantiate_and_check_impossible_predicates((def_id, &args))
282 }
283284pub fn local_span(&self, tcx: TyCtxt<'tcx>) -> Option<Span> {
285match *self {
286 MonoItem::Fn(Instance { def, .. }) => def.def_id().as_local(),
287 MonoItem::Static(def_id) => def_id.as_local(),
288 MonoItem::GlobalAsm(item_id) => Some(item_id.owner_id.def_id),
289 }
290 .map(|def_id| tcx.def_span(def_id))
291 }
292293// Only used by rustc_codegen_cranelift
294pub fn codegen_dep_node(&self, tcx: TyCtxt<'tcx>) -> DepNode {
295make_compile_mono_item(tcx, self)
296 }
297298/// Returns the item's `CrateNum`
299pub fn krate(&self) -> CrateNum {
300match self {
301 MonoItem::Fn(instance) => instance.def_id().krate,
302 MonoItem::Static(def_id) => def_id.krate,
303 MonoItem::GlobalAsm(..) => LOCAL_CRATE,
304 }
305 }
306307/// Returns the item's `DefId`
308pub fn def_id(&self) -> DefId {
309match *self {
310 MonoItem::Fn(Instance { def, .. }) => def.def_id(),
311 MonoItem::Static(def_id) => def_id,
312 MonoItem::GlobalAsm(item_id) => item_id.owner_id.to_def_id(),
313 }
314 }
315}
316317impl<'tcx> fmt::Displayfor MonoItem<'tcx> {
318fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
319match *self {
320 MonoItem::Fn(instance) => f.write_fmt(format_args!("fn {0}", instance))write!(f, "fn {instance}"),
321 MonoItem::Static(def_id) => {
322f.write_fmt(format_args!("static {0}",
Instance::new_raw(def_id, GenericArgs::empty())))write!(f, "static {}", Instance::new_raw(def_id, GenericArgs::empty()))323 }
324 MonoItem::GlobalAsm(..) => f.write_fmt(format_args!("global_asm"))write!(f, "global_asm"),
325 }
326 }
327}
328329impl ToStableHashKeyfor MonoItem<'_> {
330type KeyType = Fingerprint;
331332fn to_stable_hash_key<Hcx: StableHashCtxt>(&self, hcx: &mut Hcx) -> Self::KeyType {
333let mut hasher = StableHasher::new();
334self.stable_hash(hcx, &mut hasher);
335hasher.finish()
336 }
337}
338339#[derive(#[automatically_derived]
impl<'tcx> ::core::fmt::Debug for MonoItemPartitions<'tcx> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field2_finish(f,
"MonoItemPartitions", "codegen_units", &self.codegen_units,
"all_mono_items", &&self.all_mono_items)
}
}Debug, const _: () =
{
impl<'tcx> ::rustc_data_structures::stable_hasher::StableHash for
MonoItemPartitions<'tcx> {
#[inline]
fn stable_hash<__Hcx: ::rustc_data_structures::stable_hasher::StableHashCtxt>(&self,
__hcx: &mut __Hcx,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
MonoItemPartitions {
codegen_units: ref __binding_0,
all_mono_items: ref __binding_1 } => {
{ __binding_0.stable_hash(__hcx, __hasher); }
{ __binding_1.stable_hash(__hcx, __hasher); }
}
}
}
}
};StableHash, #[automatically_derived]
impl<'tcx> ::core::marker::Copy for MonoItemPartitions<'tcx> { }Copy, #[automatically_derived]
impl<'tcx> ::core::clone::Clone for MonoItemPartitions<'tcx> {
#[inline]
fn clone(&self) -> MonoItemPartitions<'tcx> {
let _: ::core::clone::AssertParamIsClone<&'tcx [CodegenUnit<'tcx>]>;
let _: ::core::clone::AssertParamIsClone<&'tcx DefIdSet>;
*self
}
}Clone)]
340pub struct MonoItemPartitions<'tcx> {
341pub codegen_units: &'tcx [CodegenUnit<'tcx>],
342pub all_mono_items: &'tcx DefIdSet,
343}
344345#[derive(#[automatically_derived]
impl<'tcx> ::core::fmt::Debug for CodegenUnit<'tcx> {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field5_finish(f, "CodegenUnit",
"name", &self.name, "items", &self.items, "size_estimate",
&self.size_estimate, "primary", &self.primary,
"is_code_coverage_dead_code_cgu",
&&self.is_code_coverage_dead_code_cgu)
}
}Debug, const _: () =
{
impl<'tcx> ::rustc_data_structures::stable_hasher::StableHash for
CodegenUnit<'tcx> {
#[inline]
fn stable_hash<__Hcx: ::rustc_data_structures::stable_hasher::StableHashCtxt>(&self,
__hcx: &mut __Hcx,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
CodegenUnit {
name: ref __binding_0,
items: ref __binding_1,
size_estimate: ref __binding_2,
primary: ref __binding_3,
is_code_coverage_dead_code_cgu: ref __binding_4 } => {
{ __binding_0.stable_hash(__hcx, __hasher); }
{ __binding_1.stable_hash(__hcx, __hasher); }
{ __binding_2.stable_hash(__hcx, __hasher); }
{ __binding_3.stable_hash(__hcx, __hasher); }
{ __binding_4.stable_hash(__hcx, __hasher); }
}
}
}
}
};StableHash)]
346pub struct CodegenUnit<'tcx> {
347/// A name for this CGU. Incremental compilation requires that
348 /// name be unique amongst **all** crates. Therefore, it should
349 /// contain something unique to this crate (e.g., a module path)
350 /// as well as the crate name and disambiguator.
351name: Symbol,
352 items: FxIndexMap<MonoItem<'tcx>, MonoItemData>,
353 size_estimate: usize,
354 primary: bool,
355/// True if this is CGU is used to hold code coverage information for dead code,
356 /// false otherwise.
357is_code_coverage_dead_code_cgu: bool,
358}
359360/// Auxiliary info about a `MonoItem`.
361#[derive(#[automatically_derived]
impl ::core::marker::Copy for MonoItemData { }Copy, #[automatically_derived]
impl ::core::clone::Clone for MonoItemData {
#[inline]
fn clone(&self) -> MonoItemData {
let _: ::core::clone::AssertParamIsClone<bool>;
let _: ::core::clone::AssertParamIsClone<Linkage>;
let _: ::core::clone::AssertParamIsClone<Visibility>;
let _: ::core::clone::AssertParamIsClone<usize>;
*self
}
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for MonoItemData {
#[inline]
fn eq(&self, other: &MonoItemData) -> bool {
self.inlined == other.inlined && self.linkage == other.linkage &&
self.visibility == other.visibility &&
self.size_estimate == other.size_estimate
}
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for MonoItemData {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::debug_struct_field4_finish(f, "MonoItemData",
"inlined", &self.inlined, "linkage", &self.linkage, "visibility",
&self.visibility, "size_estimate", &&self.size_estimate)
}
}Debug, const _: () =
{
impl ::rustc_data_structures::stable_hasher::StableHash for
MonoItemData {
#[inline]
fn stable_hash<__Hcx: ::rustc_data_structures::stable_hasher::StableHashCtxt>(&self,
__hcx: &mut __Hcx,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
match *self {
MonoItemData {
inlined: ref __binding_0,
linkage: ref __binding_1,
visibility: ref __binding_2,
size_estimate: ref __binding_3 } => {
{ __binding_0.stable_hash(__hcx, __hasher); }
{ __binding_1.stable_hash(__hcx, __hasher); }
{ __binding_2.stable_hash(__hcx, __hasher); }
{ __binding_3.stable_hash(__hcx, __hasher); }
}
}
}
}
};StableHash)]
362pub struct MonoItemData {
363/// A cached copy of the result of `MonoItem::instantiation_mode`, where
364 /// `GloballyShared` maps to `false` and `LocalCopy` maps to `true`.
365pub inlined: bool,
366367pub linkage: Linkage,
368pub visibility: Visibility,
369370/// A cached copy of the result of `MonoItem::size_estimate`.
371pub size_estimate: usize,
372}
373374/// Specifies the symbol visibility with regards to dynamic linking.
375///
376/// Visibility doesn't have any effect when linkage is internal.
377///
378/// DSO means dynamic shared object, that is a dynamically linked executable or dylib.
379#[derive(#[automatically_derived]
impl ::core::marker::Copy for Visibility { }Copy, #[automatically_derived]
impl ::core::clone::Clone for Visibility {
#[inline]
fn clone(&self) -> Visibility { *self }
}Clone, #[automatically_derived]
impl ::core::cmp::PartialEq for Visibility {
#[inline]
fn eq(&self, other: &Visibility) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq, #[automatically_derived]
impl ::core::fmt::Debug for Visibility {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
Visibility::Default => "Default",
Visibility::Hidden => "Hidden",
Visibility::Protected => "Protected",
})
}
}Debug, const _: () =
{
impl ::rustc_data_structures::stable_hasher::StableHash for Visibility
{
#[inline]
fn stable_hash<__Hcx: ::rustc_data_structures::stable_hasher::StableHashCtxt>(&self,
__hcx: &mut __Hcx,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
::std::mem::discriminant(self).stable_hash(__hcx, __hasher);
match *self {
Visibility::Default => {}
Visibility::Hidden => {}
Visibility::Protected => {}
}
}
}
};StableHash, const _: () =
{
impl<'tcx, __E: ::rustc_middle::ty::codec::TyEncoder<'tcx>>
::rustc_serialize::Encodable<__E> for Visibility {
fn encode(&self, __encoder: &mut __E) {
let disc =
match *self {
Visibility::Default => { 0usize }
Visibility::Hidden => { 1usize }
Visibility::Protected => { 2usize }
};
::rustc_serialize::Encoder::emit_u8(__encoder, disc as u8);
match *self {
Visibility::Default => {}
Visibility::Hidden => {}
Visibility::Protected => {}
}
}
}
};TyEncodable, const _: () =
{
impl<'tcx, __D: ::rustc_middle::ty::codec::TyDecoder<'tcx>>
::rustc_serialize::Decodable<__D> for Visibility {
fn decode(__decoder: &mut __D) -> Self {
match ::rustc_serialize::Decoder::read_u8(__decoder) as usize
{
0usize => { Visibility::Default }
1usize => { Visibility::Hidden }
2usize => { Visibility::Protected }
n => {
::core::panicking::panic_fmt(format_args!("invalid enum variant tag while decoding `Visibility`, expected 0..3, actual {0}",
n));
}
}
}
}
};TyDecodable)]
380pub enum Visibility {
381/// Export the symbol from the DSO and apply overrides of the symbol by outside DSOs to within
382 /// the DSO if the object file format supports this.
383Default,
384/// Hide the symbol outside of the defining DSO even when external linkage is used to export it
385 /// from the object file.
386Hidden,
387/// Export the symbol from the DSO, but don't apply overrides of the symbol by outside DSOs to
388 /// within the DSO. Equivalent to default visibility with object file formats that don't support
389 /// overriding exported symbols by another DSO.
390Protected,
391}
392393impl From<SymbolVisibility> for Visibility {
394fn from(value: SymbolVisibility) -> Self {
395match value {
396 SymbolVisibility::Hidden => Visibility::Hidden,
397 SymbolVisibility::Protected => Visibility::Protected,
398 SymbolVisibility::Interposable => Visibility::Default,
399 }
400 }
401}
402403impl<'tcx> CodegenUnit<'tcx> {
404#[inline]
405pub fn new(name: Symbol) -> CodegenUnit<'tcx> {
406CodegenUnit {
407name,
408 items: Default::default(),
409 size_estimate: 0,
410 primary: false,
411 is_code_coverage_dead_code_cgu: false,
412 }
413 }
414415pub fn name(&self) -> Symbol {
416self.name
417 }
418419pub fn set_name(&mut self, name: Symbol) {
420self.name = name;
421 }
422423pub fn is_primary(&self) -> bool {
424self.primary
425 }
426427pub fn make_primary(&mut self) {
428self.primary = true;
429 }
430431pub fn items(&self) -> &FxIndexMap<MonoItem<'tcx>, MonoItemData> {
432&self.items
433 }
434435pub fn items_mut(&mut self) -> &mut FxIndexMap<MonoItem<'tcx>, MonoItemData> {
436&mut self.items
437 }
438439pub fn is_code_coverage_dead_code_cgu(&self) -> bool {
440self.is_code_coverage_dead_code_cgu
441 }
442443/// Marks this CGU as the one used to contain code coverage information for dead code.
444pub fn make_code_coverage_dead_code_cgu(&mut self) {
445self.is_code_coverage_dead_code_cgu = true;
446 }
447448pub fn mangle_name(human_readable_name: &str) -> BaseNString {
449let mut hasher = StableHasher::new();
450human_readable_name.hash(&mut hasher);
451let hash: Hash128 = hasher.finish();
452hash.as_u128().to_base_fixed_len(CASE_INSENSITIVE)
453 }
454455pub fn shorten_name(human_readable_name: &str) -> Cow<'_, str> {
456// Set a limit a somewhat below the common platform limits for file names.
457const MAX_CGU_NAME_LENGTH: usize = 200;
458const TRUNCATED_NAME_PREFIX: &str = "-trunc-";
459if human_readable_name.len() > MAX_CGU_NAME_LENGTH {
460let mangled_name = Self::mangle_name(human_readable_name);
461// Determine a safe byte offset to truncate the name to
462let truncate_to = human_readable_name.floor_char_boundary(
463MAX_CGU_NAME_LENGTH - TRUNCATED_NAME_PREFIX.len() - mangled_name.len(),
464 );
465::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}{1}{2}",
&human_readable_name[..truncate_to], TRUNCATED_NAME_PREFIX,
mangled_name))
})format!(
466"{}{}{}",
467&human_readable_name[..truncate_to],
468 TRUNCATED_NAME_PREFIX,
469 mangled_name
470 )471 .into()
472 } else {
473// If the name is short enough, we can just return it as is.
474human_readable_name.into()
475 }
476 }
477478pub fn compute_size_estimate(&mut self) {
479// The size of a codegen unit as the sum of the sizes of the items
480 // within it.
481self.size_estimate = self.items.values().map(|data| data.size_estimate).sum();
482 }
483484/// Should only be called if [`compute_size_estimate`] has previously been called.
485 ///
486 /// [`compute_size_estimate`]: Self::compute_size_estimate
487#[inline]
488pub fn size_estimate(&self) -> usize {
489// Items are never zero-sized, so if we have items the estimate must be
490 // non-zero, unless we forgot to call `compute_size_estimate` first.
491if !(self.items.is_empty() || self.size_estimate != 0) {
::core::panicking::panic("assertion failed: self.items.is_empty() || self.size_estimate != 0")
};assert!(self.items.is_empty() || self.size_estimate != 0);
492self.size_estimate
493 }
494495pub fn contains_item(&self, item: &MonoItem<'tcx>) -> bool {
496self.items().contains_key(item)
497 }
498499pub fn work_product_id(&self) -> WorkProductId {
500WorkProductId::from_cgu_name(self.name().as_str())
501 }
502503pub fn previous_work_product(&self, tcx: TyCtxt<'_>) -> WorkProduct {
504let work_product_id = self.work_product_id();
505tcx.dep_graph
506 .previous_work_product(&work_product_id)
507 .unwrap_or_else(|| {
::core::panicking::panic_fmt(format_args!("Could not find work-product for CGU `{0}`",
self.name()));
}panic!("Could not find work-product for CGU `{}`", self.name()))
508 }
509510pub fn items_in_deterministic_order(
511&self,
512 tcx: TyCtxt<'tcx>,
513 ) -> Vec<(MonoItem<'tcx>, MonoItemData)> {
514// The codegen tests rely on items being process in the same order as
515 // they appear in the file, so for local items, we sort by span first
516#[derive(#[automatically_derived]
impl<'tcx> ::core::cmp::PartialEq for ItemSortKey<'tcx> {
#[inline]
fn eq(&self, other: &ItemSortKey<'tcx>) -> bool {
self.0 == other.0 && self.1 == other.1
}
}PartialEq, #[automatically_derived]
impl<'tcx> ::core::cmp::Eq for ItemSortKey<'tcx> {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_fields_are_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Option<Span>>;
let _: ::core::cmp::AssertParamIsEq<SymbolName<'tcx>>;
}
}Eq, #[automatically_derived]
impl<'tcx> ::core::cmp::PartialOrd for ItemSortKey<'tcx> {
#[inline]
fn partial_cmp(&self, other: &ItemSortKey<'tcx>)
-> ::core::option::Option<::core::cmp::Ordering> {
match ::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
::core::cmp::PartialOrd::partial_cmp(&self.1, &other.1),
cmp => cmp,
}
}
}PartialOrd, #[automatically_derived]
impl<'tcx> ::core::cmp::Ord for ItemSortKey<'tcx> {
#[inline]
fn cmp(&self, other: &ItemSortKey<'tcx>) -> ::core::cmp::Ordering {
match ::core::cmp::Ord::cmp(&self.0, &other.0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ord::cmp(&self.1, &other.1),
cmp => cmp,
}
}
}Ord)]
517struct ItemSortKey<'tcx>(Option<Span>, SymbolName<'tcx>);
518519// We only want to take HirIds of user-defines instances into account.
520 // The others don't matter for the codegen tests and can even make item
521 // order unstable.
522fn local_item_id<'tcx>(item: MonoItem<'tcx>) -> Option<DefId> {
523match item {
524 MonoItem::Fn(ref instance) => match instance.def {
525 InstanceKind::Item(def) => def.as_local().map(|_| def),
526 InstanceKind::VTableShim(..)
527 | InstanceKind::ReifyShim(..)
528 | InstanceKind::Intrinsic(..)
529 | InstanceKind::FnPtrShim(..)
530 | InstanceKind::Virtual(..)
531 | InstanceKind::ClosureOnceShim { .. }
532 | InstanceKind::ConstructCoroutineInClosureShim { .. }
533 | InstanceKind::DropGlue(..)
534 | InstanceKind::CloneShim(..)
535 | InstanceKind::ThreadLocalShim(..)
536 | InstanceKind::FnPtrAddrShim(..)
537 | InstanceKind::AsyncDropGlue(..)
538 | InstanceKind::FutureDropPollShim(..)
539 | InstanceKind::AsyncDropGlueCtorShim(..) => None,
540 },
541 MonoItem::Static(def_id) => def_id.as_local().map(|_| def_id),
542 MonoItem::GlobalAsm(item_id) => Some(item_id.owner_id.def_id.to_def_id()),
543 }
544 }
545fn item_sort_key<'tcx>(tcx: TyCtxt<'tcx>, item: MonoItem<'tcx>) -> ItemSortKey<'tcx> {
546ItemSortKey(
547local_item_id(item)
548 .map(|def_id| tcx.def_span(def_id).find_ancestor_not_from_macro())
549 .flatten(),
550item.symbol_name(tcx),
551 )
552 }
553554let mut items: Vec<_> = self.items().iter().map(|(&i, &data)| (i, data)).collect();
555if !tcx.sess.opts.unstable_opts.codegen_source_order {
556// In this case, we do not need to keep the items in any specific order, as the input
557 // is already deterministic.
558 //
559 // However, it seems that moving related things (such as different
560 // monomorphizations of the same function) close to one another is actually beneficial
561 // for LLVM performance.
562 // LLVM will codegen the items in the order we pass them to it, and when it handles
563 // similar things in succession, it seems that it leads to better cache utilization,
564 // less branch mispredictions and in general to better performance.
565 // For example, if we have functions `a`, `c::<u32>`, `b`, `c::<i16>`, `d` and
566 // `c::<bool>`, it seems that it helps LLVM's performance to codegen the three `c`
567 // instantiations right after one another, as they will likely reference similar types,
568 // call similar functions, etc.
569 //
570 // See https://github.com/rust-lang/rust/pull/145358 for more details.
571 //
572 // Sorting by symbol name should not incur any new non-determinism.
573items.sort_by_cached_key(|&(i, _)| i.symbol_name(tcx));
574 } else {
575items.sort_by_cached_key(|&(i, _)| item_sort_key(tcx, i));
576 }
577items578 }
579580pub fn codegen_dep_node(&self, tcx: TyCtxt<'tcx>) -> DepNode {
581make_compile_codegen_unit(tcx, self.name())
582 }
583}
584585impl ToStableHashKeyfor CodegenUnit<'_> {
586type KeyType = String;
587588fn to_stable_hash_key<Hcx>(&self, _: &mut Hcx) -> Self::KeyType {
589// Codegen unit names are conceptually required to be stable across
590 // compilation session so that object file names match up.
591self.name.to_string()
592 }
593}
594595pub struct CodegenUnitNameBuilder<'tcx> {
596 tcx: TyCtxt<'tcx>,
597 cache: UnordMap<CrateNum, String>,
598}
599600impl<'tcx> CodegenUnitNameBuilder<'tcx> {
601pub fn new(tcx: TyCtxt<'tcx>) -> Self {
602CodegenUnitNameBuilder { tcx, cache: Default::default() }
603 }
604605/// CGU names should fulfill the following requirements:
606 /// - They should be able to act as a file name on any kind of file system
607 /// - They should not collide with other CGU names, even for different versions
608 /// of the same crate.
609 ///
610 /// Consequently, we don't use special characters except for '.' and '-' and we
611 /// prefix each name with the crate-name and crate-disambiguator.
612 ///
613 /// This function will build CGU names of the form:
614 ///
615 /// ```text
616 /// <crate-name>.<crate-disambiguator>[-in-<local-crate-id>](-<component>)*[.<special-suffix>]
617 /// <local-crate-id> = <local-crate-name>.<local-crate-disambiguator>
618 /// ```
619 ///
620 /// The '.' before `<special-suffix>` makes sure that names with a special
621 /// suffix can never collide with a name built out of regular Rust
622 /// identifiers (e.g., module paths).
623pub fn build_cgu_name<I, C, S>(
624&mut self,
625 cnum: CrateNum,
626 components: I,
627 special_suffix: Option<S>,
628 ) -> Symbol629where
630I: IntoIterator<Item = C>,
631 C: fmt::Display,
632 S: fmt::Display,
633 {
634let cgu_name = self.build_cgu_name_no_mangle(cnum, components, special_suffix);
635636if self.tcx.sess.opts.unstable_opts.human_readable_cgu_names {
637Symbol::intern(&CodegenUnit::shorten_name(cgu_name.as_str()))
638 } else {
639Symbol::intern(&CodegenUnit::mangle_name(cgu_name.as_str()))
640 }
641 }
642643/// Same as `CodegenUnit::build_cgu_name()` but will never mangle the
644 /// resulting name.
645pub fn build_cgu_name_no_mangle<I, C, S>(
646&mut self,
647 cnum: CrateNum,
648 components: I,
649 special_suffix: Option<S>,
650 ) -> Symbol651where
652I: IntoIterator<Item = C>,
653 C: fmt::Display,
654 S: fmt::Display,
655 {
656use std::fmt::Write;
657658let mut cgu_name = String::with_capacity(64);
659660// Start out with the crate name and disambiguator
661let tcx = self.tcx;
662let crate_prefix = self.cache.entry(cnum).or_insert_with(|| {
663// Whenever the cnum is not LOCAL_CRATE we also mix in the
664 // local crate's ID. Otherwise there can be collisions between CGUs
665 // instantiating stuff for upstream crates.
666let local_crate_id = if cnum != LOCAL_CRATE {
667let local_stable_crate_id = tcx.stable_crate_id(LOCAL_CRATE);
668::alloc::__export::must_use({
::alloc::fmt::format(format_args!("-in-{0}.{1:08x}",
tcx.crate_name(LOCAL_CRATE), local_stable_crate_id))
})format!("-in-{}.{:08x}", tcx.crate_name(LOCAL_CRATE), local_stable_crate_id)669 } else {
670String::new()
671 };
672673let stable_crate_id = tcx.stable_crate_id(LOCAL_CRATE);
674::alloc::__export::must_use({
::alloc::fmt::format(format_args!("{0}.{1:08x}{2}",
tcx.crate_name(cnum), stable_crate_id, local_crate_id))
})format!("{}.{:08x}{}", tcx.crate_name(cnum), stable_crate_id, local_crate_id)675 });
676677cgu_name.write_fmt(format_args!("{0}", crate_prefix))write!(cgu_name, "{crate_prefix}").unwrap();
678679// Add the components
680for component in components {
681cgu_name.write_fmt(format_args!("-{0}", component))write!(cgu_name, "-{component}").unwrap();
682 }
683684if let Some(special_suffix) = special_suffix {
685// We add a dot in here so it cannot clash with anything in a regular
686 // Rust identifier
687cgu_name.write_fmt(format_args!(".{0}", special_suffix))write!(cgu_name, ".{special_suffix}").unwrap();
688 }
689690Symbol::intern(&cgu_name)
691 }
692}
693694/// See module-level docs of `rustc_monomorphize::collector` on some context for "mentioned" items.
695#[derive(#[automatically_derived]
impl ::core::marker::Copy for CollectionMode { }Copy, #[automatically_derived]
impl ::core::clone::Clone for CollectionMode {
#[inline]
fn clone(&self) -> CollectionMode { *self }
}Clone, #[automatically_derived]
impl ::core::fmt::Debug for CollectionMode {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self {
CollectionMode::UsedItems => "UsedItems",
CollectionMode::MentionedItems => "MentionedItems",
})
}
}Debug, #[automatically_derived]
impl ::core::cmp::PartialEq for CollectionMode {
#[inline]
fn eq(&self, other: &CollectionMode) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for CollectionMode {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_fields_are_eq(&self) {}
}Eq, #[automatically_derived]
impl ::core::hash::Hash for CollectionMode {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state)
}
}Hash, const _: () =
{
impl ::rustc_data_structures::stable_hasher::StableHash for
CollectionMode {
#[inline]
fn stable_hash<__Hcx: ::rustc_data_structures::stable_hasher::StableHashCtxt>(&self,
__hcx: &mut __Hcx,
__hasher:
&mut ::rustc_data_structures::stable_hasher::StableHasher) {
::std::mem::discriminant(self).stable_hash(__hcx, __hasher);
match *self {
CollectionMode::UsedItems => {}
CollectionMode::MentionedItems => {}
}
}
}
};StableHash)]
696pub enum CollectionMode {
697/// Collect items that are used, i.e., actually needed for codegen.
698 ///
699 /// Which items are used can depend on optimization levels, as MIR optimizations can remove
700 /// uses.
701UsedItems,
702/// Collect items that are mentioned. The goal of this mode is that it is independent of
703 /// optimizations: the set of "mentioned" items is computed before optimizations are run.
704 ///
705 /// The exact contents of this set are *not* a stable guarantee. (For instance, it is currently
706 /// computed after drop-elaboration. If we ever do some optimizations even in debug builds, we
707 /// might decide to run them before computing mentioned items.) The key property of this set is
708 /// that it is optimization-independent.
709MentionedItems,
710}