1use std::ops::{ControlFlow, RangeInclusive};
2
3use super::{Byte, Def, Reference, Region, Type};
4
5#[cfg(test)]
6mod tests;
7
8#[derive(#[automatically_derived]
impl<D: ::core::clone::Clone, R: ::core::clone::Clone,
T: ::core::clone::Clone> ::core::clone::Clone for Tree<D, R, T> where
D: Def, R: Region, T: Type {
#[inline]
fn clone(&self) -> Tree<D, R, T> {
match self {
Tree::Seq(__self_0) =>
Tree::Seq(::core::clone::Clone::clone(__self_0)),
Tree::Alt(__self_0) =>
Tree::Alt(::core::clone::Clone::clone(__self_0)),
Tree::Def(__self_0) =>
Tree::Def(::core::clone::Clone::clone(__self_0)),
Tree::Ref(__self_0) =>
Tree::Ref(::core::clone::Clone::clone(__self_0)),
Tree::Byte(__self_0) =>
Tree::Byte(::core::clone::Clone::clone(__self_0)),
}
}
}Clone, #[automatically_derived]
impl<D: ::core::fmt::Debug, R: ::core::fmt::Debug, T: ::core::fmt::Debug>
::core::fmt::Debug for Tree<D, R, T> where D: Def, R: Region, T: Type {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
Tree::Seq(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Seq",
&__self_0),
Tree::Alt(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Alt",
&__self_0),
Tree::Def(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Def",
&__self_0),
Tree::Ref(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Ref",
&__self_0),
Tree::Byte(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Byte",
&__self_0),
}
}
}Debug, #[automatically_derived]
impl<D: ::core::hash::Hash, R: ::core::hash::Hash, T: ::core::hash::Hash>
::core::hash::Hash for Tree<D, R, T> where D: Def, R: Region, T: Type {
#[inline]
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
let __self_discr = ::core::intrinsics::discriminant_value(self);
::core::hash::Hash::hash(&__self_discr, state);
match self {
Tree::Seq(__self_0) => ::core::hash::Hash::hash(__self_0, state),
Tree::Alt(__self_0) => ::core::hash::Hash::hash(__self_0, state),
Tree::Def(__self_0) => ::core::hash::Hash::hash(__self_0, state),
Tree::Ref(__self_0) => ::core::hash::Hash::hash(__self_0, state),
Tree::Byte(__self_0) => ::core::hash::Hash::hash(__self_0, state),
}
}
}Hash, #[automatically_derived]
impl<D: ::core::cmp::PartialEq, R: ::core::cmp::PartialEq,
T: ::core::cmp::PartialEq> ::core::cmp::PartialEq for Tree<D, R, T> where
D: Def, R: Region, T: Type {
#[inline]
fn eq(&self, other: &Tree<D, R, T>) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr &&
match (self, other) {
(Tree::Seq(__self_0), Tree::Seq(__arg1_0)) =>
__self_0 == __arg1_0,
(Tree::Alt(__self_0), Tree::Alt(__arg1_0)) =>
__self_0 == __arg1_0,
(Tree::Def(__self_0), Tree::Def(__arg1_0)) =>
__self_0 == __arg1_0,
(Tree::Ref(__self_0), Tree::Ref(__arg1_0)) =>
__self_0 == __arg1_0,
(Tree::Byte(__self_0), Tree::Byte(__arg1_0)) =>
__self_0 == __arg1_0,
_ => unsafe { ::core::intrinsics::unreachable() }
}
}
}PartialEq, #[automatically_derived]
impl<D: ::core::cmp::Eq, R: ::core::cmp::Eq, T: ::core::cmp::Eq>
::core::cmp::Eq for Tree<D, R, T> where D: Def, R: Region, T: Type {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_fields_are_eq(&self) {
let _: ::core::cmp::AssertParamIsEq<Vec<Self>>;
let _: ::core::cmp::AssertParamIsEq<Vec<Self>>;
let _: ::core::cmp::AssertParamIsEq<D>;
let _: ::core::cmp::AssertParamIsEq<Reference<R, T>>;
let _: ::core::cmp::AssertParamIsEq<Byte>;
}
}Eq)]
18pub(crate) enum Tree<D, R, T>
19where
20 D: Def,
21 R: Region,
22 T: Type,
23{
24 Seq(Vec<Self>),
26 Alt(Vec<Self>),
28 Def(D),
30 Ref(Reference<R, T>),
32 Byte(Byte),
34}
35
36#[derive(#[automatically_derived]
impl ::core::fmt::Debug for Endian {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
::core::fmt::Formatter::write_str(f,
match self { Endian::Little => "Little", Endian::Big => "Big", })
}
}Debug, #[automatically_derived]
impl ::core::marker::Copy for Endian { }Copy, #[automatically_derived]
impl ::core::clone::Clone for Endian {
#[inline]
fn clone(&self) -> Endian { *self }
}Clone, #[automatically_derived]
impl ::core::cmp::Eq for Endian {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_fields_are_eq(&self) {}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialEq for Endian {
#[inline]
fn eq(&self, other: &Endian) -> bool {
let __self_discr = ::core::intrinsics::discriminant_value(self);
let __arg1_discr = ::core::intrinsics::discriminant_value(other);
__self_discr == __arg1_discr
}
}PartialEq)]
37pub(crate) enum Endian {
38 Little,
39 Big,
40}
41
42#[cfg(feature = "rustc")]
43impl From<rustc_abi::Endian> for Endian {
44 fn from(order: rustc_abi::Endian) -> Endian {
45 match order {
46 rustc_abi::Endian::Little => Endian::Little,
47 rustc_abi::Endian::Big => Endian::Big,
48 }
49 }
50}
51
52impl<D, R, T> Tree<D, R, T>
53where
54 D: Def,
55 R: Region,
56 T: Type,
57{
58 pub(crate) fn def(def: D) -> Self {
60 Self::Def(def)
61 }
62
63 pub(crate) fn uninhabited() -> Self {
65 Self::Alt(::alloc::vec::Vec::new()vec![])
66 }
67
68 pub(crate) fn unit() -> Self {
70 Self::Seq(Vec::new())
71 }
72
73 pub(crate) fn uninit() -> Self {
75 Self::Byte(Byte::uninit())
76 }
77
78 pub(crate) fn bool() -> Self {
80 Self::byte(0x00..=0x01)
81 }
82
83 pub(crate) fn u8() -> Self {
85 Self::byte(0x00..=0xFF)
86 }
87
88 pub(crate) fn char(order: Endian) -> Self {
90 const _0: RangeInclusive<u8> = 0..=0;
101 const BYTE: RangeInclusive<u8> = 0x00..=0xFF;
102 let x = Self::from_big_endian(order, [_0, _0, 0x00..=0xD7, BYTE]);
103 let y = Self::from_big_endian(order, [_0, _0, 0xE0..=0xFF, BYTE]);
104 let z = Self::from_big_endian(order, [_0, 0x01..=0x10, BYTE, BYTE]);
105 Self::alt([x, y, z])
106 }
107
108 #[allow(dead_code)]
110 pub(crate) fn nonzero(width_in_bytes: u64) -> Self {
111 const BYTE: RangeInclusive<u8> = 0x00..=0xFF;
112 const NONZERO: RangeInclusive<u8> = 0x01..=0xFF;
113
114 (0..width_in_bytes)
115 .map(|nz_idx| {
116 (0..width_in_bytes)
117 .map(|pos| Self::byte(if pos == nz_idx { NONZERO } else { BYTE }))
118 .fold(Self::unit(), Self::then)
119 })
120 .fold(Self::uninhabited(), Self::or)
121 }
122
123 pub(crate) fn bytes<const N: usize, B: Into<Byte>>(bytes: [B; N]) -> Self {
124 Self::seq(bytes.map(B::into).map(Self::Byte))
125 }
126
127 pub(crate) fn byte(byte: impl Into<Byte>) -> Self {
128 Self::Byte(byte.into())
129 }
130
131 pub(crate) fn number(width_in_bytes: u64) -> Self {
133 Self::Seq(::alloc::vec::from_elem(Self::u8(), width_in_bytes.try_into().unwrap())vec![Self::u8(); width_in_bytes.try_into().unwrap()])
134 }
135
136 pub(crate) fn padding(width_in_bytes: usize) -> Self {
138 Self::Seq(::alloc::vec::from_elem(Self::uninit(), width_in_bytes)vec![Self::uninit(); width_in_bytes])
139 }
140
141 pub(crate) fn prune<F>(self, f: &F) -> Tree<!, R, T>
144 where
145 F: Fn(D) -> bool,
146 {
147 match self {
148 Self::Seq(elts) => match elts.into_iter().map(|elt| elt.prune(f)).try_fold(
149 Tree::unit(),
150 |elts, elt| {
151 if elt == Tree::uninhabited() {
152 ControlFlow::Break(Tree::uninhabited())
153 } else {
154 ControlFlow::Continue(elts.then(elt))
155 }
156 },
157 ) {
158 ControlFlow::Break(node) | ControlFlow::Continue(node) => node,
159 },
160 Self::Alt(alts) => alts
161 .into_iter()
162 .map(|alt| alt.prune(f))
163 .fold(Tree::uninhabited(), |alts, alt| alts.or(alt)),
164 Self::Byte(b) => Tree::Byte(b),
165 Self::Ref(r) => Tree::Ref(r),
166 Self::Def(d) => {
167 if f(d) {
168 Tree::uninhabited()
169 } else {
170 Tree::unit()
171 }
172 }
173 }
174 }
175
176 pub(crate) fn is_inhabited(&self) -> bool {
178 match self {
179 Self::Seq(elts) => elts.into_iter().all(|elt| elt.is_inhabited()),
180 Self::Alt(alts) => alts.into_iter().any(|alt| alt.is_inhabited()),
181 Self::Byte(..) | Self::Ref(..) | Self::Def(..) => true,
182 }
183 }
184
185 pub(crate) fn from_big_endian<const N: usize, B: Into<Byte>>(
191 order: Endian,
192 mut bytes: [B; N],
193 ) -> Self {
194 if order == Endian::Little {
195 (&mut bytes[..]).reverse();
196 }
197
198 Self::bytes(bytes)
199 }
200
201 pub(crate) fn seq<const N: usize>(trees: [Tree<D, R, T>; N]) -> Self {
204 trees.into_iter().fold(Tree::unit(), Self::then)
205 }
206
207 pub(crate) fn alt<const N: usize>(trees: [Tree<D, R, T>; N]) -> Self {
210 trees.into_iter().fold(Tree::uninhabited(), Self::or)
211 }
212
213 pub(crate) fn then(self, other: Self) -> Self {
215 match (self, other) {
216 (Self::Seq(elts), other) | (other, Self::Seq(elts)) if elts.len() == 0 => other,
217 (Self::Seq(mut lhs), Self::Seq(mut rhs)) => {
218 lhs.append(&mut rhs);
219 Self::Seq(lhs)
220 }
221 (Self::Seq(mut lhs), rhs) => {
222 lhs.push(rhs);
223 Self::Seq(lhs)
224 }
225 (lhs, Self::Seq(mut rhs)) => {
226 rhs.insert(0, lhs);
227 Self::Seq(rhs)
228 }
229 (lhs, rhs) => Self::Seq(::alloc::boxed::box_assume_init_into_vec_unsafe(::alloc::intrinsics::write_box_via_move(::alloc::boxed::Box::new_uninit(),
[lhs, rhs]))vec![lhs, rhs]),
230 }
231 }
232
233 pub(crate) fn or(self, other: Self) -> Self {
235 match (self, other) {
236 (Self::Alt(alts), other) | (other, Self::Alt(alts)) if alts.len() == 0 => other,
237 (Self::Alt(mut lhs), Self::Alt(rhs)) => {
238 lhs.extend(rhs);
239 Self::Alt(lhs)
240 }
241 (Self::Alt(mut alts), alt) | (alt, Self::Alt(mut alts)) => {
242 alts.push(alt);
243 Self::Alt(alts)
244 }
245 (lhs, rhs) => Self::Alt(::alloc::boxed::box_assume_init_into_vec_unsafe(::alloc::intrinsics::write_box_via_move(::alloc::boxed::Box::new_uninit(),
[lhs, rhs]))vec![lhs, rhs]),
246 }
247 }
248}
249
250#[cfg(feature = "rustc")]
251pub(crate) mod rustc {
252 use rustc_abi::{
253 FieldIdx, FieldsShape, Layout, Size, TagEncoding, TyAndLayout, VariantIdx, Variants,
254 };
255 use rustc_middle::ty::layout::{HasTyCtxt, LayoutCx, LayoutError};
256 use rustc_middle::ty::{
257 self, AdtDef, AdtKind, List, Region, ScalarInt, Ty, TyCtxt, TypeVisitableExt,
258 };
259 use rustc_span::ErrorGuaranteed;
260
261 use super::Tree;
262 use crate::layout::Reference;
263 use crate::layout::rustc::{Def, layout_of};
264
265 #[derive(#[automatically_derived]
impl ::core::fmt::Debug for Err {
#[inline]
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match self {
Err::NotYetSupported =>
::core::fmt::Formatter::write_str(f, "NotYetSupported"),
Err::UnknownLayout =>
::core::fmt::Formatter::write_str(f, "UnknownLayout"),
Err::SizeOverflow =>
::core::fmt::Formatter::write_str(f, "SizeOverflow"),
Err::TypeError(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f,
"TypeError", &__self_0),
}
}
}Debug, #[automatically_derived]
impl ::core::marker::Copy for Err { }Copy, #[automatically_derived]
impl ::core::clone::Clone for Err {
#[inline]
fn clone(&self) -> Err {
let _: ::core::clone::AssertParamIsClone<ErrorGuaranteed>;
*self
}
}Clone)]
266 pub(crate) enum Err {
267 NotYetSupported,
269 UnknownLayout,
271 SizeOverflow,
273 TypeError(ErrorGuaranteed),
274 }
275
276 impl<'tcx> From<&LayoutError<'tcx>> for Err {
277 fn from(err: &LayoutError<'tcx>) -> Self {
278 match err {
279 LayoutError::Unknown(..)
280 | LayoutError::ReferencesError(..)
281 | LayoutError::TooGeneric(..)
282 | LayoutError::InvalidSimd { .. }
283 | LayoutError::NormalizationFailure(..) => Self::UnknownLayout,
284 LayoutError::SizeOverflow(..) => Self::SizeOverflow,
285 }
286 }
287 }
288
289 impl<'tcx> Tree<Def<'tcx>, Region<'tcx>, Ty<'tcx>> {
290 pub(crate) fn from_ty(ty: Ty<'tcx>, cx: LayoutCx<'tcx>) -> Result<Self, Err> {
291 use rustc_abi::HasDataLayout;
292 let layout = layout_of(cx, ty)?;
293
294 if let Err(e) = ty.error_reported() {
295 return Err(Err::TypeError(e));
296 }
297
298 let target = cx.data_layout();
299 let pointer_size = target.pointer_size();
300
301 match ty.kind() {
302 ty::Bool => Ok(Self::bool()),
303
304 ty::Float(nty) => {
305 let width = nty.bit_width() / 8;
306 Ok(Self::number(width.try_into().unwrap()))
307 }
308
309 ty::Int(nty) => {
310 let width = nty.normalize(pointer_size.bits() as _).bit_width().unwrap() / 8;
311 Ok(Self::number(width.try_into().unwrap()))
312 }
313
314 ty::Uint(nty) => {
315 let width = nty.normalize(pointer_size.bits() as _).bit_width().unwrap() / 8;
316 Ok(Self::number(width.try_into().unwrap()))
317 }
318
319 ty::Tuple(members) => Self::from_tuple((ty, layout), members, cx),
320
321 ty::Array(inner_ty, _len) => {
322 let FieldsShape::Array { stride, count } = &layout.fields else {
323 return Err(Err::NotYetSupported);
324 };
325 let inner_layout = layout_of(cx, *inner_ty)?;
326 match (&*stride, &inner_layout.size) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(*stride, inner_layout.size);
327 let elt = Tree::from_ty(*inner_ty, cx)?;
328 Ok(std::iter::repeat_n(elt, *count as usize)
329 .fold(Tree::unit(), |tree, elt| tree.then(elt)))
330 }
331
332 ty::Adt(adt_def, _args_ref) if !ty.is_box() => match adt_def.adt_kind() {
333 AdtKind::Struct => Self::from_struct((ty, layout), *adt_def, cx),
334 AdtKind::Enum => Self::from_enum((ty, layout), *adt_def, cx),
335 AdtKind::Union => Self::from_union((ty, layout), *adt_def, cx),
336 },
337
338 ty::Ref(region, ty, mutability) => {
339 let layout = layout_of(cx, *ty)?;
340 let referent_align = layout.align.bytes_usize();
341 let referent_size = layout.size.bytes_usize();
342
343 Ok(Tree::Ref(Reference {
344 region: *region,
345 is_mut: mutability.is_mut(),
346 referent: *ty,
347 referent_align,
348 referent_size,
349 }))
350 }
351
352 ty::Char => Ok(Self::char(cx.tcx().data_layout.endian.into())),
353
354 _ => Err(Err::NotYetSupported),
355 }
356 }
357
358 fn from_tuple(
360 (ty, layout): (Ty<'tcx>, Layout<'tcx>),
361 members: &'tcx List<Ty<'tcx>>,
362 cx: LayoutCx<'tcx>,
363 ) -> Result<Self, Err> {
364 match &layout.fields {
365 FieldsShape::Primitive => {
366 match (&members.len(), &1) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(members.len(), 1);
367 let inner_ty = members[0];
368 Self::from_ty(inner_ty, cx)
369 }
370 FieldsShape::Arbitrary { offsets, .. } => {
371 match (&offsets.len(), &members.len()) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(offsets.len(), members.len());
372 Self::from_variant(Def::Primitive, None, (ty, layout), layout.size, cx)
373 }
374 FieldsShape::Array { .. } | FieldsShape::Union(_) => Err(Err::NotYetSupported),
375 }
376 }
377
378 fn from_struct(
384 (ty, layout): (Ty<'tcx>, Layout<'tcx>),
385 def: AdtDef<'tcx>,
386 cx: LayoutCx<'tcx>,
387 ) -> Result<Self, Err> {
388 if !def.is_struct() {
::core::panicking::panic("assertion failed: def.is_struct()")
};assert!(def.is_struct());
389 let def = Def::Adt(def);
390 Self::from_variant(def, None, (ty, layout), layout.size, cx)
391 }
392
393 fn from_enum(
399 (ty, layout): (Ty<'tcx>, Layout<'tcx>),
400 def: AdtDef<'tcx>,
401 cx: LayoutCx<'tcx>,
402 ) -> Result<Self, Err> {
403 if !def.is_enum() {
::core::panicking::panic("assertion failed: def.is_enum()")
};assert!(def.is_enum());
404
405 let layout_of_variant = |index, encoding: Option<_>| -> Result<Self, Err> {
407 let variant_layout = ty_variant(cx, (ty, layout), index);
408 if variant_layout.is_uninhabited() {
409 return Ok(Self::uninhabited());
410 }
411 let tag = cx.tcx().tag_for_variant(
412 cx.typing_env.as_query_input((cx.tcx().erase_and_anonymize_regions(ty), index)),
413 );
414 let variant_def = Def::Variant(def.variant(index));
415 Self::from_variant(
416 variant_def,
417 tag.map(|tag| (tag, index, encoding.unwrap())),
418 (ty, variant_layout),
419 layout.size,
420 cx,
421 )
422 };
423
424 match layout.variants() {
425 Variants::Empty => Ok(Self::uninhabited()),
426 Variants::Single { index } => {
427 layout_of_variant(*index, None)
430 }
431 Variants::Multiple { tag: _, tag_encoding, tag_field, .. } => {
432 match (&*tag_field, &FieldIdx::ZERO) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(*tag_field, FieldIdx::ZERO);
439
440 let variants = def.discriminants(cx.tcx()).try_fold(
441 Self::uninhabited(),
442 |variants, (idx, _discriminant)| {
443 let variant = layout_of_variant(idx, Some(tag_encoding.clone()))?;
444 Result::<Self, Err>::Ok(variants.or(variant))
445 },
446 )?;
447
448 Ok(Self::def(Def::Adt(def)).then(variants))
449 }
450 }
451 }
452
453 fn from_variant(
463 def: Def<'tcx>,
464 tag: Option<(ScalarInt, VariantIdx, TagEncoding<VariantIdx>)>,
465 (ty, layout): (Ty<'tcx>, Layout<'tcx>),
466 total_size: Size,
467 cx: LayoutCx<'tcx>,
468 ) -> Result<Self, Err> {
469 let FieldsShape::Arbitrary { offsets, in_memory_order } = layout.fields() else {
472 return Err(Err::NotYetSupported);
473 };
474
475 if !(layout.size <= total_size) {
::core::panicking::panic("assertion failed: layout.size <= total_size")
};assert!(layout.size <= total_size);
479
480 let mut size = Size::ZERO;
481 let mut struct_tree = Self::def(def);
482
483 if let Some((tag, index, encoding)) = &tag {
485 match encoding {
486 TagEncoding::Direct => {
487 size += tag.size();
488 }
489 TagEncoding::Niche { niche_variants, .. } => {
490 if !niche_variants.contains(index) {
491 size += tag.size();
492 }
493 }
494 }
495 struct_tree = struct_tree.then(Self::from_tag(*tag, cx.tcx()));
496 }
497
498 for &field_idx in in_memory_order.iter() {
500 let padding_needed = offsets[field_idx] - size;
502 let padding = Self::padding(padding_needed.bytes_usize());
503
504 let field_ty = ty_field(cx, (ty, layout), field_idx);
505 let field_layout = layout_of(cx, field_ty)?;
506 let field_tree = Self::from_ty(field_ty, cx)?;
507
508 struct_tree = struct_tree.then(padding).then(field_tree);
509
510 size += padding_needed + field_layout.size;
511 }
512
513 let padding_needed = total_size - size;
515 let trailing_padding = Self::padding(padding_needed.bytes_usize());
516
517 Ok(struct_tree.then(trailing_padding))
518 }
519
520 fn from_tag(tag: ScalarInt, tcx: TyCtxt<'tcx>) -> Self {
522 use rustc_abi::Endian;
523 let size = tag.size();
524 let bits = tag.to_bits(size);
525 let bytes: [u8; 16];
526 let bytes = match tcx.data_layout.endian {
527 Endian::Little => {
528 bytes = bits.to_le_bytes();
529 &bytes[..size.bytes_usize()]
530 }
531 Endian::Big => {
532 bytes = bits.to_be_bytes();
533 &bytes[bytes.len() - size.bytes_usize()..]
534 }
535 };
536 Self::Seq(bytes.iter().map(|&b| Self::byte(b)).collect())
537 }
538
539 fn from_union(
545 (ty, layout): (Ty<'tcx>, Layout<'tcx>),
546 def: AdtDef<'tcx>,
547 cx: LayoutCx<'tcx>,
548 ) -> Result<Self, Err> {
549 if !def.is_union() {
::core::panicking::panic("assertion failed: def.is_union()")
};assert!(def.is_union());
550
551 let FieldsShape::Union(_fields) = layout.fields() else {
554 return Err(Err::NotYetSupported);
555 };
556
557 let fields = &def.non_enum_variant().fields;
558 let fields = fields.iter_enumerated().try_fold(
559 Self::uninhabited(),
560 |fields, (idx, _field_def)| {
561 let field_ty = ty_field(cx, (ty, layout), idx);
562 let field_layout = layout_of(cx, field_ty)?;
563 let field = Self::from_ty(field_ty, cx)?;
564 let trailing_padding_needed = layout.size - field_layout.size;
565 let trailing_padding = Self::padding(trailing_padding_needed.bytes_usize());
566 let field_and_padding = field.then(trailing_padding);
567 Result::<Self, Err>::Ok(fields.or(field_and_padding))
568 },
569 )?;
570
571 Ok(Self::def(Def::Adt(def)).then(fields))
572 }
573 }
574
575 fn ty_field<'tcx>(
576 cx: LayoutCx<'tcx>,
577 (ty, layout): (Ty<'tcx>, Layout<'tcx>),
578 i: FieldIdx,
579 ) -> Ty<'tcx> {
580 match ty.kind() {
585 ty::Adt(def, args) => {
586 match layout.variants {
587 Variants::Single { index } => {
588 let field = &def.variant(index).fields[i];
589 field.ty(cx.tcx(), args)
590 }
591 Variants::Empty => {
::core::panicking::panic_fmt(format_args!("there is no field in Variants::Empty types"));
}panic!("there is no field in Variants::Empty types"),
592 Variants::Multiple { tag, .. } => {
594 match (&i.as_usize(), &0) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
let kind = ::core::panicking::AssertKind::Eq;
::core::panicking::assert_failed(kind, &*left_val, &*right_val,
::core::option::Option::None);
}
}
};assert_eq!(i.as_usize(), 0);
595 ty::layout::PrimitiveExt::to_ty(&tag.primitive(), cx.tcx())
596 }
597 }
598 }
599 ty::Tuple(fields) => fields[i.as_usize()],
600 kind => {
::core::panicking::panic_fmt(format_args!("not implemented: {0}",
format_args!("only a subset of `Ty::ty_and_layout_field`\'s functionality is implemented. implementation needed for {0:?}",
kind)));
}unimplemented!(
601 "only a subset of `Ty::ty_and_layout_field`'s functionality is implemented. implementation needed for {:?}",
602 kind
603 ),
604 }
605 }
606
607 fn ty_variant<'tcx>(
608 cx: LayoutCx<'tcx>,
609 (ty, layout): (Ty<'tcx>, Layout<'tcx>),
610 i: VariantIdx,
611 ) -> Layout<'tcx> {
612 let ty = cx.tcx().erase_and_anonymize_regions(ty);
613 TyAndLayout { ty, layout }.for_variant(&cx, i).layout
614 }
615}