1use std::iter;
40
41use ast::visit::Visitor;
42use hir::def::{DefKind, Res};
43use hir::{BodyId, HirId};
44use rustc_abi::ExternAbi;
45use rustc_ast as ast;
46use rustc_ast::*;
47use rustc_data_structures::fx::FxHashSet;
48use rustc_errors::ErrorGuaranteed;
49use rustc_hir::attrs::{AttributeKind, InlineAttr};
50use rustc_hir::def_id::DefId;
51use rustc_hir::{self as hir, FnDeclFlags};
52use rustc_middle::span_bug;
53use rustc_middle::ty::Asyncness;
54use rustc_span::symbol::kw;
55use rustc_span::{Ident, Span, Symbol};
56use smallvec::SmallVec;
57
58use crate::delegation::generics::{GenericsGenerationResult, GenericsGenerationResults};
59use crate::errors::{CycleInDelegationSignatureResolution, UnresolvedDelegationCallee};
60use crate::{
61 AllowReturnTypeNotation, GenericArgsMode, ImplTraitContext, ImplTraitPosition, LoweringContext,
62 ParamMode, ResolverAstLoweringExt,
63};
64
65mod generics;
66
67pub(crate) struct DelegationResults<'hir> {
68 pub body_id: hir::BodyId,
69 pub sig: hir::FnSig<'hir>,
70 pub ident: Ident,
71 pub generics: &'hir hir::Generics<'hir>,
72}
73
74struct AttrAdditionInfo {
75 pub equals: fn(&hir::Attribute) -> bool,
76 pub kind: AttrAdditionKind,
77}
78
79enum AttrAdditionKind {
80 Default { factory: fn(Span) -> hir::Attribute },
81 Inherit { factory: fn(Span, &hir::Attribute) -> hir::Attribute },
82}
83
84const PARENT_ID: hir::ItemLocalId = hir::ItemLocalId::ZERO;
85
86static ATTRS_ADDITIONS: &[AttrAdditionInfo] = &[
87 AttrAdditionInfo {
88 equals: |a| #[allow(non_exhaustive_omitted_patterns)] match a {
hir::Attribute::Parsed(AttributeKind::MustUse { .. }) => true,
_ => false,
}matches!(a, hir::Attribute::Parsed(AttributeKind::MustUse { .. })),
89 kind: AttrAdditionKind::Inherit {
90 factory: |span, original_attr| {
91 let reason = match original_attr {
92 hir::Attribute::Parsed(AttributeKind::MustUse { reason, .. }) => *reason,
93 _ => None,
94 };
95
96 hir::Attribute::Parsed(AttributeKind::MustUse { span, reason })
97 },
98 },
99 },
100 AttrAdditionInfo {
101 equals: |a| #[allow(non_exhaustive_omitted_patterns)] match a {
hir::Attribute::Parsed(AttributeKind::Inline(..)) => true,
_ => false,
}matches!(a, hir::Attribute::Parsed(AttributeKind::Inline(..))),
102 kind: AttrAdditionKind::Default {
103 factory: |span| hir::Attribute::Parsed(AttributeKind::Inline(InlineAttr::Hint, span)),
104 },
105 },
106];
107
108impl<'hir> LoweringContext<'_, 'hir> {
109 fn is_method(&self, def_id: DefId, span: Span) -> bool {
110 match self.tcx.def_kind(def_id) {
111 DefKind::Fn => false,
112 DefKind::AssocFn => self.tcx.associated_item(def_id).is_method(),
113 _ => ::rustc_middle::util::bug::span_bug_fmt(span,
format_args!("unexpected DefKind for delegation item"))span_bug!(span, "unexpected DefKind for delegation item"),
114 }
115 }
116
117 pub(crate) fn lower_delegation(
118 &mut self,
119 delegation: &Delegation,
120 item_id: NodeId,
121 ) -> DelegationResults<'hir> {
122 let span = self.lower_span(delegation.path.segments.last().unwrap().ident.span);
123
124 let sig_id = if let Some(delegation_info) =
126 self.resolver.delegation_info(self.local_def_id(item_id))
127 {
128 self.get_sig_id(delegation_info.resolution_node, span)
129 } else {
130 self.dcx().span_delayed_bug(
131 span,
132 ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("LoweringContext: the delegation {0:?} is unresolved",
item_id))
})format!("LoweringContext: the delegation {:?} is unresolved", item_id),
133 );
134
135 return self.generate_delegation_error(span, delegation);
136 };
137
138 match sig_id {
139 Ok(sig_id) => {
140 self.add_attrs_if_needed(span, sig_id);
141
142 let is_method = self.is_method(sig_id, span);
143
144 let (param_count, c_variadic) = self.param_count(sig_id);
145
146 let mut generics =
147 self.uplift_delegation_generics(delegation, sig_id, item_id, is_method);
148
149 let body_id = self.lower_delegation_body(
150 delegation,
151 is_method,
152 param_count,
153 &mut generics,
154 span,
155 );
156
157 let decl =
158 self.lower_delegation_decl(sig_id, param_count, c_variadic, span, &generics);
159
160 let sig = self.lower_delegation_sig(sig_id, decl, span);
161 let ident = self.lower_ident(delegation.ident);
162
163 let generics = self.arena.alloc(hir::Generics {
164 has_where_clause_predicates: false,
165 params: self.arena.alloc_from_iter(generics.all_params(span, self)),
166 predicates: self.arena.alloc_from_iter(generics.all_predicates(span, self)),
167 span,
168 where_clause_span: span,
169 });
170
171 DelegationResults { body_id, sig, ident, generics }
172 }
173 Err(_) => self.generate_delegation_error(span, delegation),
174 }
175 }
176
177 fn add_attrs_if_needed(&mut self, span: Span, sig_id: DefId) {
178 let new_attrs =
179 self.create_new_attrs(ATTRS_ADDITIONS, span, sig_id, self.attrs.get(&PARENT_ID));
180
181 if new_attrs.is_empty() {
182 return;
183 }
184
185 let new_arena_allocated_attrs = match self.attrs.get(&PARENT_ID) {
186 Some(existing_attrs) => self.arena.alloc_from_iter(
187 existing_attrs.iter().map(|a| a.clone()).chain(new_attrs.into_iter()),
188 ),
189 None => self.arena.alloc_from_iter(new_attrs.into_iter()),
190 };
191
192 self.attrs.insert(PARENT_ID, new_arena_allocated_attrs);
193 }
194
195 fn create_new_attrs(
196 &self,
197 candidate_additions: &[AttrAdditionInfo],
198 span: Span,
199 sig_id: DefId,
200 existing_attrs: Option<&&[hir::Attribute]>,
201 ) -> Vec<hir::Attribute> {
202 candidate_additions
203 .iter()
204 .filter_map(|addition_info| {
205 if let Some(existing_attrs) = existing_attrs
206 && existing_attrs
207 .iter()
208 .any(|existing_attr| (addition_info.equals)(existing_attr))
209 {
210 return None;
211 }
212
213 match addition_info.kind {
214 AttrAdditionKind::Default { factory } => Some(factory(span)),
215 AttrAdditionKind::Inherit { factory, .. } =>
216 {
217 #[allow(deprecated)]
218 self.tcx
219 .get_all_attrs(sig_id)
220 .iter()
221 .find_map(|a| (addition_info.equals)(a).then(|| factory(span, a)))
222 }
223 }
224 })
225 .collect::<Vec<_>>()
226 }
227
228 fn get_sig_id(&self, mut node_id: NodeId, span: Span) -> Result<DefId, ErrorGuaranteed> {
229 let mut visited: FxHashSet<NodeId> = Default::default();
230 let mut path: SmallVec<[DefId; 1]> = Default::default();
231
232 loop {
233 visited.insert(node_id);
234
235 let Some(def_id) = self.get_resolution_id(node_id) else {
236 return Err(self.tcx.dcx().span_delayed_bug(
237 span,
238 ::alloc::__export::must_use({
::alloc::fmt::format(format_args!("LoweringContext: couldn\'t resolve node {0:?} in delegation item",
node_id))
})format!(
239 "LoweringContext: couldn't resolve node {:?} in delegation item",
240 node_id
241 ),
242 ));
243 };
244
245 path.push(def_id);
246
247 if let Some(local_id) = def_id.as_local()
251 && let Some(delegation_info) = self.resolver.delegation_info(local_id)
252 {
253 node_id = delegation_info.resolution_node;
254 if visited.contains(&node_id) {
255 return Err(match visited.len() {
258 1 => self.dcx().emit_err(UnresolvedDelegationCallee { span }),
259 _ => self.dcx().emit_err(CycleInDelegationSignatureResolution { span }),
260 });
261 }
262 } else {
263 return Ok(path[0]);
264 }
265 }
266 }
267
268 fn get_resolution_id(&self, node_id: NodeId) -> Option<DefId> {
269 self.get_partial_res(node_id).and_then(|r| r.expect_full_res().opt_def_id())
270 }
271
272 fn param_count(&self, def_id: DefId) -> (usize, bool ) {
274 let sig = self.tcx.fn_sig(def_id).skip_binder().skip_binder();
275 (sig.inputs().len() + usize::from(sig.c_variadic()), sig.c_variadic())
276 }
277
278 fn lower_delegation_decl(
279 &mut self,
280 sig_id: DefId,
281 param_count: usize,
282 c_variadic: bool,
283 span: Span,
284 generics: &GenericsGenerationResults<'hir>,
285 ) -> &'hir hir::FnDecl<'hir> {
286 let decl_param_count = param_count - c_variadic as usize;
289 let inputs = self.arena.alloc_from_iter((0..decl_param_count).map(|arg| hir::Ty {
290 hir_id: self.next_id(),
291 kind: hir::TyKind::InferDelegation(hir::InferDelegation::Sig(
292 sig_id,
293 hir::InferDelegationSig::Input(arg),
294 )),
295 span,
296 }));
297
298 let output = self.arena.alloc(hir::Ty {
299 hir_id: self.next_id(),
300 kind: hir::TyKind::InferDelegation(hir::InferDelegation::Sig(
301 sig_id,
302 hir::InferDelegationSig::Output(self.arena.alloc(hir::DelegationGenerics {
303 child_args_segment_id: generics.child.args_segment_id,
304 parent_args_segment_id: generics.parent.args_segment_id,
305 self_ty_id: generics.self_ty_id,
306 propagate_self_ty: generics.propagate_self_ty,
307 })),
308 )),
309 span,
310 });
311
312 self.arena.alloc(hir::FnDecl {
313 inputs,
314 output: hir::FnRetTy::Return(output),
315 fn_decl_kind: FnDeclFlags::default()
316 .set_lifetime_elision_allowed(true)
317 .set_c_variadic(c_variadic),
318 })
319 }
320
321 fn lower_delegation_sig(
322 &mut self,
323 sig_id: DefId,
324 decl: &'hir hir::FnDecl<'hir>,
325 span: Span,
326 ) -> hir::FnSig<'hir> {
327 let sig = self.tcx.fn_sig(sig_id).skip_binder().skip_binder();
328 let asyncness = match self.tcx.asyncness(sig_id) {
329 Asyncness::Yes => hir::IsAsync::Async(span),
330 Asyncness::No => hir::IsAsync::NotAsync,
331 };
332
333 let header = hir::FnHeader {
334 safety: if self.tcx.codegen_fn_attrs(sig_id).safe_target_features {
335 hir::HeaderSafety::SafeTargetFeatures
336 } else {
337 hir::HeaderSafety::Normal(sig.safety())
338 },
339 constness: self.tcx.constness(sig_id),
340 asyncness,
341 abi: sig.abi(),
342 };
343
344 hir::FnSig { decl, header, span }
345 }
346
347 fn generate_param(
348 &mut self,
349 is_method: bool,
350 idx: usize,
351 span: Span,
352 ) -> (hir::Param<'hir>, NodeId) {
353 let pat_node_id = self.next_node_id();
354 let pat_id = self.lower_node_id(pat_node_id);
355 let name = if is_method && idx == 0 {
357 kw::SelfLower
358 } else {
359 Symbol::intern(&::alloc::__export::must_use({
::alloc::fmt::format(format_args!("arg{0}", idx))
})format!("arg{idx}"))
360 };
361 let ident = Ident::with_dummy_span(name);
362 let pat = self.arena.alloc(hir::Pat {
363 hir_id: pat_id,
364 kind: hir::PatKind::Binding(hir::BindingMode::NONE, pat_id, ident, None),
365 span,
366 default_binding_modes: false,
367 });
368
369 (hir::Param { hir_id: self.next_id(), pat, ty_span: span, span }, pat_node_id)
370 }
371
372 fn generate_arg(
373 &mut self,
374 is_method: bool,
375 idx: usize,
376 param_id: HirId,
377 span: Span,
378 ) -> hir::Expr<'hir> {
379 let name = if is_method && idx == 0 {
381 kw::SelfLower
382 } else {
383 Symbol::intern(&::alloc::__export::must_use({
::alloc::fmt::format(format_args!("arg{0}", idx))
})format!("arg{idx}"))
384 };
385
386 let segments = self.arena.alloc_from_iter(iter::once(hir::PathSegment {
387 ident: Ident::with_dummy_span(name),
388 hir_id: self.next_id(),
389 res: Res::Local(param_id),
390 args: None,
391 infer_args: false,
392 }));
393
394 let path = self.arena.alloc(hir::Path { span, res: Res::Local(param_id), segments });
395 self.mk_expr(hir::ExprKind::Path(hir::QPath::Resolved(None, path)), span)
396 }
397
398 fn lower_delegation_body(
399 &mut self,
400 delegation: &Delegation,
401 is_method: bool,
402 param_count: usize,
403 generics: &mut GenericsGenerationResults<'hir>,
404 span: Span,
405 ) -> BodyId {
406 let block = delegation.body.as_deref();
407
408 self.lower_body(|this| {
409 let mut parameters: Vec<hir::Param<'_>> = Vec::with_capacity(param_count);
410 let mut args: Vec<hir::Expr<'_>> = Vec::with_capacity(param_count);
411
412 for idx in 0..param_count {
413 let (param, pat_node_id) = this.generate_param(is_method, idx, span);
414 parameters.push(param);
415
416 let arg = if let Some(block) = block
417 && idx == 0
418 {
419 let mut self_resolver = SelfResolver {
420 ctxt: this,
421 path_id: delegation.id,
422 self_param_id: pat_node_id,
423 };
424 self_resolver.visit_block(block);
425 this.ident_and_label_to_local_id.insert(pat_node_id, param.pat.hir_id.local_id);
427 this.lower_target_expr(&block)
428 } else {
429 this.generate_arg(is_method, idx, param.pat.hir_id, span)
430 };
431 args.push(arg);
432 }
433
434 if param_count == 0
440 && let Some(block) = block
441 {
442 args.push(this.lower_target_expr(&block));
443 }
444
445 let final_expr = this.finalize_body_lowering(delegation, args, generics, span);
446
447 (this.arena.alloc_from_iter(parameters), final_expr)
448 })
449 }
450
451 fn lower_target_expr(&mut self, block: &Block) -> hir::Expr<'hir> {
454 if let [stmt] = block.stmts.as_slice()
455 && let StmtKind::Expr(expr) = &stmt.kind
456 {
457 return self.lower_expr_mut(expr);
458 }
459
460 let block = self.lower_block(block, false);
461 self.mk_expr(hir::ExprKind::Block(block, None), block.span)
462 }
463
464 fn finalize_body_lowering(
480 &mut self,
481 delegation: &Delegation,
482 args: Vec<hir::Expr<'hir>>,
483 generics: &mut GenericsGenerationResults<'hir>,
484 span: Span,
485 ) -> hir::Expr<'hir> {
486 let args = self.arena.alloc_from_iter(args);
487
488 let has_generic_args =
489 delegation.path.segments.iter().rev().skip(1).any(|segment| segment.args.is_some());
490
491 let call = if self
492 .get_resolution_id(delegation.id)
493 .map(|def_id| self.is_method(def_id, span))
494 .unwrap_or_default()
495 && delegation.qself.is_none()
496 && !has_generic_args
497 && !args.is_empty()
498 {
499 let ast_segment = delegation.path.segments.last().unwrap();
500 let segment = self.lower_path_segment(
501 delegation.path.span,
502 ast_segment,
503 ParamMode::Optional,
504 GenericArgsMode::Err,
505 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
506 None,
507 );
508
509 let segment = self.process_segment(span, &segment, &mut generics.child);
512 let segment = self.arena.alloc(segment);
513
514 self.arena.alloc(hir::Expr {
515 hir_id: self.next_id(),
516 kind: hir::ExprKind::MethodCall(segment, &args[0], &args[1..], span),
517 span,
518 })
519 } else {
520 let path = self.lower_qpath(
521 delegation.id,
522 &delegation.qself,
523 &delegation.path,
524 ParamMode::Optional,
525 AllowReturnTypeNotation::No,
526 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
527 None,
528 );
529
530 let new_path = match path {
531 hir::QPath::Resolved(ty, path) => {
532 let mut new_path = path.clone();
533 let len = new_path.segments.len();
534
535 new_path.segments = self.arena.alloc_from_iter(
536 new_path.segments.iter().enumerate().map(|(idx, segment)| {
537 if idx + 2 == len {
538 self.process_segment(span, segment, &mut generics.parent)
539 } else if idx + 1 == len {
540 self.process_segment(span, segment, &mut generics.child)
541 } else {
542 segment.clone()
543 }
544 }),
545 );
546
547 hir::QPath::Resolved(ty, self.arena.alloc(new_path))
548 }
549 hir::QPath::TypeRelative(ty, segment) => {
550 let segment = self.process_segment(span, segment, &mut generics.child);
551
552 hir::QPath::TypeRelative(ty, self.arena.alloc(segment))
553 }
554 };
555
556 generics.self_ty_id = match new_path {
557 hir::QPath::Resolved(ty, _) => ty,
558 hir::QPath::TypeRelative(ty, _) => Some(ty),
559 }
560 .map(|ty| ty.hir_id);
561
562 let callee_path = self.arena.alloc(self.mk_expr(hir::ExprKind::Path(new_path), span));
563 self.arena.alloc(self.mk_expr(hir::ExprKind::Call(callee_path, args), span))
564 };
565
566 let block = self.arena.alloc(hir::Block {
567 stmts: &[],
568 expr: Some(call),
569 hir_id: self.next_id(),
570 rules: hir::BlockCheckMode::DefaultBlock,
571 span,
572 targeted_by_break: false,
573 });
574
575 self.mk_expr(hir::ExprKind::Block(block, None), span)
576 }
577
578 fn process_segment(
579 &mut self,
580 span: Span,
581 segment: &hir::PathSegment<'hir>,
582 result: &mut GenericsGenerationResult<'hir>,
583 ) -> hir::PathSegment<'hir> {
584 let details = result.generics.args_propagation_details();
585
586 let segment = if details.should_propagate {
587 let generics = result.generics.into_hir_generics(self, span);
588 let args = generics.into_generic_args(self, span);
589
590 let args = if args.is_empty() { None } else { Some(args) };
592
593 hir::PathSegment { args, ..segment.clone() }
594 } else {
595 segment.clone()
596 };
597
598 if details.use_args_in_sig_inheritance {
599 result.args_segment_id = Some(segment.hir_id);
600 }
601
602 segment
603 }
604
605 fn generate_delegation_error(
606 &mut self,
607 span: Span,
608 delegation: &Delegation,
609 ) -> DelegationResults<'hir> {
610 let decl = self.arena.alloc(hir::FnDecl::dummy(span));
611
612 let header = self.generate_header_error();
613 let sig = hir::FnSig { decl, header, span };
614
615 let ident = self.lower_ident(delegation.ident);
616
617 let body_id = self.lower_body(|this| {
618 let path = this.lower_qpath(
619 delegation.id,
620 &delegation.qself,
621 &delegation.path,
622 ParamMode::Optional,
623 AllowReturnTypeNotation::No,
624 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
625 None,
626 );
627
628 let callee_path = this.arena.alloc(this.mk_expr(hir::ExprKind::Path(path), span));
629 let args = if let Some(box block) = delegation.body.as_ref() {
630 this.arena.alloc_slice(&[this.lower_target_expr(block)])
631 } else {
632 &mut []
633 };
634
635 let call = this.arena.alloc(this.mk_expr(hir::ExprKind::Call(callee_path, args), span));
636
637 let block = this.arena.alloc(hir::Block {
638 stmts: &[],
639 expr: Some(call),
640 hir_id: this.next_id(),
641 rules: hir::BlockCheckMode::DefaultBlock,
642 span,
643 targeted_by_break: false,
644 });
645
646 (&[], this.mk_expr(hir::ExprKind::Block(block, None), span))
647 });
648
649 let generics = hir::Generics::empty();
650 DelegationResults { ident, generics, body_id, sig }
651 }
652
653 fn generate_header_error(&self) -> hir::FnHeader {
654 hir::FnHeader {
655 safety: hir::Safety::Safe.into(),
656 constness: hir::Constness::NotConst,
657 asyncness: hir::IsAsync::NotAsync,
658 abi: ExternAbi::Rust,
659 }
660 }
661
662 #[inline]
663 fn mk_expr(&mut self, kind: hir::ExprKind<'hir>, span: Span) -> hir::Expr<'hir> {
664 hir::Expr { hir_id: self.next_id(), kind, span }
665 }
666}
667
668struct SelfResolver<'a, 'b, 'hir> {
669 ctxt: &'a mut LoweringContext<'b, 'hir>,
670 path_id: NodeId,
671 self_param_id: NodeId,
672}
673
674impl SelfResolver<'_, '_, '_> {
675 fn try_replace_id(&mut self, id: NodeId) {
676 if let Some(res) = self.ctxt.get_partial_res(id)
677 && let Some(Res::Local(sig_id)) = res.full_res()
678 && sig_id == self.path_id
679 {
680 self.ctxt.partial_res_overrides.insert(id, self.self_param_id);
681 }
682 }
683}
684
685impl<'ast> Visitor<'ast> for SelfResolver<'_, '_, '_> {
686 fn visit_id(&mut self, id: NodeId) {
687 self.try_replace_id(id);
688 }
689}