1#![cfg_attr(feature = "nightly", allow(internal_features))]
3#![cfg_attr(feature = "nightly", doc(rust_logo))]
4#![cfg_attr(feature = "nightly", feature(assert_matches))]
5#![cfg_attr(feature = "nightly", feature(rustc_attrs))]
6#![cfg_attr(feature = "nightly", feature(rustdoc_internals))]
7#![cfg_attr(feature = "nightly", feature(step_trait))]
8use std::fmt;
43#[cfg(feature = "nightly")]
44use std::iter::Step;
45use std::num::{NonZeroUsize, ParseIntError};
46use std::ops::{Add, AddAssign, Deref, Mul, RangeFull, RangeInclusive, Sub};
47use std::str::FromStr;
48
49use bitflags::bitflags;
50#[cfg(feature = "nightly")]
51use rustc_data_structures::stable_hasher::StableOrd;
52use rustc_hashes::Hash64;
53use rustc_index::{Idx, IndexSlice, IndexVec};
54#[cfg(feature = "nightly")]
55use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_Generic};
56
57mod callconv;
58mod canon_abi;
59mod extern_abi;
60mod layout;
61#[cfg(test)]
62mod tests;
63
64pub use callconv::{Heterogeneous, HomogeneousAggregate, Reg, RegKind};
65pub use canon_abi::{ArmCall, CanonAbi, InterruptKind, X86Call};
66pub use extern_abi::{ExternAbi, all_names};
67#[cfg(feature = "nightly")]
68pub use layout::{FIRST_VARIANT, FieldIdx, Layout, TyAbiInterface, TyAndLayout, VariantIdx};
69pub use layout::{LayoutCalculator, LayoutCalculatorError};
70
71#[cfg(feature = "nightly")]
75pub trait HashStableContext {}
76
77#[derive(Clone, Copy, PartialEq, Eq, Default)]
78#[cfg_attr(
79 feature = "nightly",
80 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
81)]
82pub struct ReprFlags(u8);
83
84bitflags! {
85 impl ReprFlags: u8 {
86 const IS_C = 1 << 0;
87 const IS_SIMD = 1 << 1;
88 const IS_TRANSPARENT = 1 << 2;
89 const IS_LINEAR = 1 << 3;
92 const RANDOMIZE_LAYOUT = 1 << 4;
96 const FIELD_ORDER_UNOPTIMIZABLE = ReprFlags::IS_C.bits()
98 | ReprFlags::IS_SIMD.bits()
99 | ReprFlags::IS_LINEAR.bits();
100 const ABI_UNOPTIMIZABLE = ReprFlags::IS_C.bits() | ReprFlags::IS_SIMD.bits();
101 }
102}
103
104impl std::fmt::Debug for ReprFlags {
107 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
108 bitflags::parser::to_writer(self, f)
109 }
110}
111
112#[derive(Copy, Clone, Debug, Eq, PartialEq)]
113#[cfg_attr(
114 feature = "nightly",
115 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
116)]
117pub enum IntegerType {
118 Pointer(bool),
121 Fixed(Integer, bool),
124}
125
126impl IntegerType {
127 pub fn is_signed(&self) -> bool {
128 match self {
129 IntegerType::Pointer(b) => *b,
130 IntegerType::Fixed(_, b) => *b,
131 }
132 }
133}
134
135#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
137#[cfg_attr(
138 feature = "nightly",
139 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
140)]
141pub struct ReprOptions {
142 pub int: Option<IntegerType>,
143 pub align: Option<Align>,
144 pub pack: Option<Align>,
145 pub flags: ReprFlags,
146 pub field_shuffle_seed: Hash64,
154}
155
156impl ReprOptions {
157 #[inline]
158 pub fn simd(&self) -> bool {
159 self.flags.contains(ReprFlags::IS_SIMD)
160 }
161
162 #[inline]
163 pub fn c(&self) -> bool {
164 self.flags.contains(ReprFlags::IS_C)
165 }
166
167 #[inline]
168 pub fn packed(&self) -> bool {
169 self.pack.is_some()
170 }
171
172 #[inline]
173 pub fn transparent(&self) -> bool {
174 self.flags.contains(ReprFlags::IS_TRANSPARENT)
175 }
176
177 #[inline]
178 pub fn linear(&self) -> bool {
179 self.flags.contains(ReprFlags::IS_LINEAR)
180 }
181
182 pub fn discr_type(&self) -> IntegerType {
185 self.int.unwrap_or(IntegerType::Pointer(true))
186 }
187
188 pub fn inhibit_enum_layout_opt(&self) -> bool {
192 self.c() || self.int.is_some()
193 }
194
195 pub fn inhibit_newtype_abi_optimization(&self) -> bool {
196 self.flags.intersects(ReprFlags::ABI_UNOPTIMIZABLE)
197 }
198
199 pub fn inhibit_struct_field_reordering(&self) -> bool {
202 self.flags.intersects(ReprFlags::FIELD_ORDER_UNOPTIMIZABLE) || self.int.is_some()
203 }
204
205 pub fn can_randomize_type_layout(&self) -> bool {
208 !self.inhibit_struct_field_reordering() && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
209 }
210
211 pub fn inhibits_union_abi_opt(&self) -> bool {
213 self.c()
214 }
215}
216
217pub const MAX_SIMD_LANES: u64 = 1 << 0xF;
223
224#[derive(Copy, Clone, Debug, PartialEq, Eq)]
226pub struct PointerSpec {
227 pointer_size: Size,
229 pointer_align: AbiAlign,
231 pointer_offset: Size,
233 _is_fat: bool,
236}
237
238#[derive(Debug, PartialEq, Eq)]
241pub struct TargetDataLayout {
242 pub endian: Endian,
243 pub i1_align: AbiAlign,
244 pub i8_align: AbiAlign,
245 pub i16_align: AbiAlign,
246 pub i32_align: AbiAlign,
247 pub i64_align: AbiAlign,
248 pub i128_align: AbiAlign,
249 pub f16_align: AbiAlign,
250 pub f32_align: AbiAlign,
251 pub f64_align: AbiAlign,
252 pub f128_align: AbiAlign,
253 pub aggregate_align: AbiAlign,
254
255 pub vector_align: Vec<(Size, AbiAlign)>,
257
258 pub default_address_space: AddressSpace,
259 pub default_address_space_pointer_spec: PointerSpec,
260
261 address_space_info: Vec<(AddressSpace, PointerSpec)>,
268
269 pub instruction_address_space: AddressSpace,
270
271 pub c_enum_min_size: Integer,
275}
276
277impl Default for TargetDataLayout {
278 fn default() -> TargetDataLayout {
280 let align = |bits| Align::from_bits(bits).unwrap();
281 TargetDataLayout {
282 endian: Endian::Big,
283 i1_align: AbiAlign::new(align(8)),
284 i8_align: AbiAlign::new(align(8)),
285 i16_align: AbiAlign::new(align(16)),
286 i32_align: AbiAlign::new(align(32)),
287 i64_align: AbiAlign::new(align(32)),
288 i128_align: AbiAlign::new(align(32)),
289 f16_align: AbiAlign::new(align(16)),
290 f32_align: AbiAlign::new(align(32)),
291 f64_align: AbiAlign::new(align(64)),
292 f128_align: AbiAlign::new(align(128)),
293 aggregate_align: AbiAlign { abi: align(8) },
294 vector_align: vec![
295 (Size::from_bits(64), AbiAlign::new(align(64))),
296 (Size::from_bits(128), AbiAlign::new(align(128))),
297 ],
298 default_address_space: AddressSpace::ZERO,
299 default_address_space_pointer_spec: PointerSpec {
300 pointer_size: Size::from_bits(64),
301 pointer_align: AbiAlign::new(align(64)),
302 pointer_offset: Size::from_bits(64),
303 _is_fat: false,
304 },
305 address_space_info: vec![],
306 instruction_address_space: AddressSpace::ZERO,
307 c_enum_min_size: Integer::I32,
308 }
309 }
310}
311
312pub enum TargetDataLayoutErrors<'a> {
313 InvalidAddressSpace { addr_space: &'a str, cause: &'a str, err: ParseIntError },
314 InvalidBits { kind: &'a str, bit: &'a str, cause: &'a str, err: ParseIntError },
315 MissingAlignment { cause: &'a str },
316 InvalidAlignment { cause: &'a str, err: AlignFromBytesError },
317 InconsistentTargetArchitecture { dl: &'a str, target: &'a str },
318 InconsistentTargetPointerWidth { pointer_size: u64, target: u32 },
319 InvalidBitsSize { err: String },
320 UnknownPointerSpecification { err: String },
321}
322
323impl TargetDataLayout {
324 pub fn parse_from_llvm_datalayout_string<'a>(
330 input: &'a str,
331 default_address_space: AddressSpace,
332 ) -> Result<TargetDataLayout, TargetDataLayoutErrors<'a>> {
333 let parse_address_space = |s: &'a str, cause: &'a str| {
335 s.parse::<u32>().map(AddressSpace).map_err(|err| {
336 TargetDataLayoutErrors::InvalidAddressSpace { addr_space: s, cause, err }
337 })
338 };
339
340 let parse_bits = |s: &'a str, kind: &'a str, cause: &'a str| {
342 s.parse::<u64>().map_err(|err| TargetDataLayoutErrors::InvalidBits {
343 kind,
344 bit: s,
345 cause,
346 err,
347 })
348 };
349
350 let parse_size =
352 |s: &'a str, cause: &'a str| parse_bits(s, "size", cause).map(Size::from_bits);
353
354 let parse_align_str = |s: &'a str, cause: &'a str| {
356 let align_from_bits = |bits| {
357 Align::from_bits(bits)
358 .map_err(|err| TargetDataLayoutErrors::InvalidAlignment { cause, err })
359 };
360 let abi = parse_bits(s, "alignment", cause)?;
361 Ok(AbiAlign::new(align_from_bits(abi)?))
362 };
363
364 let parse_align_seq = |s: &[&'a str], cause: &'a str| {
367 if s.is_empty() {
368 return Err(TargetDataLayoutErrors::MissingAlignment { cause });
369 }
370 parse_align_str(s[0], cause)
371 };
372
373 let mut dl = TargetDataLayout::default();
374 dl.default_address_space = default_address_space;
375
376 let mut i128_align_src = 64;
377 for spec in input.split('-') {
378 let spec_parts = spec.split(':').collect::<Vec<_>>();
379
380 match &*spec_parts {
381 ["e"] => dl.endian = Endian::Little,
382 ["E"] => dl.endian = Endian::Big,
383 [p] if p.starts_with('P') => {
384 dl.instruction_address_space = parse_address_space(&p[1..], "P")?
385 }
386 ["a", a @ ..] => dl.aggregate_align = parse_align_seq(a, "a")?,
387 ["f16", a @ ..] => dl.f16_align = parse_align_seq(a, "f16")?,
388 ["f32", a @ ..] => dl.f32_align = parse_align_seq(a, "f32")?,
389 ["f64", a @ ..] => dl.f64_align = parse_align_seq(a, "f64")?,
390 ["f128", a @ ..] => dl.f128_align = parse_align_seq(a, "f128")?,
391 [p, s, a @ ..] if p.starts_with("p") => {
392 let mut p = p.strip_prefix('p').unwrap();
393 let mut _is_fat = false;
394
395 if p.starts_with('f') {
399 p = p.strip_prefix('f').unwrap();
400 _is_fat = true;
401 }
402
403 if p.starts_with(char::is_alphabetic) {
406 return Err(TargetDataLayoutErrors::UnknownPointerSpecification {
407 err: p.to_string(),
408 });
409 }
410
411 let addr_space = if !p.is_empty() {
412 parse_address_space(p, "p-")?
413 } else {
414 AddressSpace::ZERO
415 };
416
417 let pointer_size = parse_size(s, "p-")?;
418 let pointer_align = parse_align_seq(a, "p-")?;
419 let info = PointerSpec {
420 pointer_offset: pointer_size,
421 pointer_size,
422 pointer_align,
423 _is_fat,
424 };
425 if addr_space == default_address_space {
426 dl.default_address_space_pointer_spec = info;
427 } else {
428 match dl.address_space_info.iter_mut().find(|(a, _)| *a == addr_space) {
429 Some(e) => e.1 = info,
430 None => {
431 dl.address_space_info.push((addr_space, info));
432 }
433 }
434 }
435 }
436 [p, s, a, _pr, i] if p.starts_with("p") => {
437 let mut p = p.strip_prefix('p').unwrap();
438 let mut _is_fat = false;
439
440 if p.starts_with('f') {
444 p = p.strip_prefix('f').unwrap();
445 _is_fat = true;
446 }
447
448 if p.starts_with(char::is_alphabetic) {
451 return Err(TargetDataLayoutErrors::UnknownPointerSpecification {
452 err: p.to_string(),
453 });
454 }
455
456 let addr_space = if !p.is_empty() {
457 parse_address_space(p, "p")?
458 } else {
459 AddressSpace::ZERO
460 };
461
462 let info = PointerSpec {
463 pointer_size: parse_size(s, "p-")?,
464 pointer_align: parse_align_str(a, "p-")?,
465 pointer_offset: parse_size(i, "p-")?,
466 _is_fat,
467 };
468
469 if addr_space == default_address_space {
470 dl.default_address_space_pointer_spec = info;
471 } else {
472 match dl.address_space_info.iter_mut().find(|(a, _)| *a == addr_space) {
473 Some(e) => e.1 = info,
474 None => {
475 dl.address_space_info.push((addr_space, info));
476 }
477 }
478 }
479 }
480
481 [s, a @ ..] if s.starts_with('i') => {
482 let Ok(bits) = s[1..].parse::<u64>() else {
483 parse_size(&s[1..], "i")?; continue;
485 };
486 let a = parse_align_seq(a, s)?;
487 match bits {
488 1 => dl.i1_align = a,
489 8 => dl.i8_align = a,
490 16 => dl.i16_align = a,
491 32 => dl.i32_align = a,
492 64 => dl.i64_align = a,
493 _ => {}
494 }
495 if bits >= i128_align_src && bits <= 128 {
496 i128_align_src = bits;
499 dl.i128_align = a;
500 }
501 }
502 [s, a @ ..] if s.starts_with('v') => {
503 let v_size = parse_size(&s[1..], "v")?;
504 let a = parse_align_seq(a, s)?;
505 if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {
506 v.1 = a;
507 continue;
508 }
509 dl.vector_align.push((v_size, a));
511 }
512 _ => {} }
514 }
515
516 if (dl.instruction_address_space != dl.default_address_space)
519 && dl
520 .address_space_info
521 .iter()
522 .find(|(a, _)| *a == dl.instruction_address_space)
523 .is_none()
524 {
525 dl.address_space_info.push((
526 dl.instruction_address_space,
527 dl.default_address_space_pointer_spec.clone(),
528 ));
529 }
530
531 Ok(dl)
532 }
533
534 #[inline]
545 pub fn obj_size_bound(&self) -> u64 {
546 match self.pointer_size().bits() {
547 16 => 1 << 15,
548 32 => 1 << 31,
549 64 => 1 << 61,
550 bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
551 }
552 }
553
554 #[inline]
564 pub fn obj_size_bound_in(&self, address_space: AddressSpace) -> u64 {
565 match self.pointer_size_in(address_space).bits() {
566 16 => 1 << 15,
567 32 => 1 << 31,
568 64 => 1 << 61,
569 bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
570 }
571 }
572
573 #[inline]
574 pub fn ptr_sized_integer(&self) -> Integer {
575 use Integer::*;
576 match self.pointer_offset().bits() {
577 16 => I16,
578 32 => I32,
579 64 => I64,
580 bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
581 }
582 }
583
584 #[inline]
585 pub fn ptr_sized_integer_in(&self, address_space: AddressSpace) -> Integer {
586 use Integer::*;
587 match self.pointer_offset_in(address_space).bits() {
588 16 => I16,
589 32 => I32,
590 64 => I64,
591 bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
592 }
593 }
594
595 #[inline]
597 fn cabi_vector_align(&self, vec_size: Size) -> Option<AbiAlign> {
598 self.vector_align
599 .iter()
600 .find(|(size, _align)| *size == vec_size)
601 .map(|(_size, align)| *align)
602 }
603
604 #[inline]
606 pub fn llvmlike_vector_align(&self, vec_size: Size) -> AbiAlign {
607 self.cabi_vector_align(vec_size).unwrap_or(AbiAlign::new(
608 Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap(),
609 ))
610 }
611
612 #[inline]
614 pub fn pointer_size(&self) -> Size {
615 self.default_address_space_pointer_spec.pointer_size
616 }
617
618 #[inline]
620 pub fn pointer_size_in(&self, c: AddressSpace) -> Size {
621 if c == self.default_address_space {
622 return self.default_address_space_pointer_spec.pointer_size;
623 }
624
625 if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
626 e.1.pointer_size
627 } else {
628 panic!("Use of unknown address space {c:?}");
629 }
630 }
631
632 #[inline]
634 pub fn pointer_offset(&self) -> Size {
635 self.default_address_space_pointer_spec.pointer_offset
636 }
637
638 #[inline]
640 pub fn pointer_offset_in(&self, c: AddressSpace) -> Size {
641 if c == self.default_address_space {
642 return self.default_address_space_pointer_spec.pointer_offset;
643 }
644
645 if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
646 e.1.pointer_offset
647 } else {
648 panic!("Use of unknown address space {c:?}");
649 }
650 }
651
652 #[inline]
654 pub fn pointer_align(&self) -> AbiAlign {
655 self.default_address_space_pointer_spec.pointer_align
656 }
657
658 #[inline]
660 pub fn pointer_align_in(&self, c: AddressSpace) -> AbiAlign {
661 if c == self.default_address_space {
662 return self.default_address_space_pointer_spec.pointer_align;
663 }
664
665 if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
666 e.1.pointer_align
667 } else {
668 panic!("Use of unknown address space {c:?}");
669 }
670 }
671}
672
673pub trait HasDataLayout {
674 fn data_layout(&self) -> &TargetDataLayout;
675}
676
677impl HasDataLayout for TargetDataLayout {
678 #[inline]
679 fn data_layout(&self) -> &TargetDataLayout {
680 self
681 }
682}
683
684impl HasDataLayout for &TargetDataLayout {
686 #[inline]
687 fn data_layout(&self) -> &TargetDataLayout {
688 (**self).data_layout()
689 }
690}
691
692#[derive(Copy, Clone, PartialEq, Eq)]
694pub enum Endian {
695 Little,
696 Big,
697}
698
699impl Endian {
700 pub fn as_str(&self) -> &'static str {
701 match self {
702 Self::Little => "little",
703 Self::Big => "big",
704 }
705 }
706}
707
708impl fmt::Debug for Endian {
709 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
710 f.write_str(self.as_str())
711 }
712}
713
714impl FromStr for Endian {
715 type Err = String;
716
717 fn from_str(s: &str) -> Result<Self, Self::Err> {
718 match s {
719 "little" => Ok(Self::Little),
720 "big" => Ok(Self::Big),
721 _ => Err(format!(r#"unknown endian: "{s}""#)),
722 }
723 }
724}
725
726#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
728#[cfg_attr(
729 feature = "nightly",
730 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
731)]
732pub struct Size {
733 raw: u64,
734}
735
736#[cfg(feature = "nightly")]
737impl StableOrd for Size {
738 const CAN_USE_UNSTABLE_SORT: bool = true;
739
740 const THIS_IMPLEMENTATION_HAS_BEEN_TRIPLE_CHECKED: () = ();
743}
744
745impl fmt::Debug for Size {
747 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
748 write!(f, "Size({} bytes)", self.bytes())
749 }
750}
751
752impl Size {
753 pub const ZERO: Size = Size { raw: 0 };
754
755 pub fn from_bits(bits: impl TryInto<u64>) -> Size {
758 let bits = bits.try_into().ok().unwrap();
759 Size { raw: bits.div_ceil(8) }
760 }
761
762 #[inline]
763 pub fn from_bytes(bytes: impl TryInto<u64>) -> Size {
764 let bytes: u64 = bytes.try_into().ok().unwrap();
765 Size { raw: bytes }
766 }
767
768 #[inline]
769 pub fn bytes(self) -> u64 {
770 self.raw
771 }
772
773 #[inline]
774 pub fn bytes_usize(self) -> usize {
775 self.bytes().try_into().unwrap()
776 }
777
778 #[inline]
779 pub fn bits(self) -> u64 {
780 #[cold]
781 fn overflow(bytes: u64) -> ! {
782 panic!("Size::bits: {bytes} bytes in bits doesn't fit in u64")
783 }
784
785 self.bytes().checked_mul(8).unwrap_or_else(|| overflow(self.bytes()))
786 }
787
788 #[inline]
789 pub fn bits_usize(self) -> usize {
790 self.bits().try_into().unwrap()
791 }
792
793 #[inline]
794 pub fn align_to(self, align: Align) -> Size {
795 let mask = align.bytes() - 1;
796 Size::from_bytes((self.bytes() + mask) & !mask)
797 }
798
799 #[inline]
800 pub fn is_aligned(self, align: Align) -> bool {
801 let mask = align.bytes() - 1;
802 self.bytes() & mask == 0
803 }
804
805 #[inline]
806 pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: &C) -> Option<Size> {
807 let dl = cx.data_layout();
808
809 let bytes = self.bytes().checked_add(offset.bytes())?;
810
811 if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
812 }
813
814 #[inline]
815 pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: &C) -> Option<Size> {
816 let dl = cx.data_layout();
817
818 let bytes = self.bytes().checked_mul(count)?;
819 if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
820 }
821
822 #[inline]
825 pub fn sign_extend(self, value: u128) -> i128 {
826 let size = self.bits();
827 if size == 0 {
828 return 0;
830 }
831 let shift = 128 - size;
833 ((value << shift) as i128) >> shift
836 }
837
838 #[inline]
840 pub fn truncate(self, value: u128) -> u128 {
841 let size = self.bits();
842 if size == 0 {
843 return 0;
845 }
846 let shift = 128 - size;
847 (value << shift) >> shift
849 }
850
851 #[inline]
852 pub fn signed_int_min(&self) -> i128 {
853 self.sign_extend(1_u128 << (self.bits() - 1))
854 }
855
856 #[inline]
857 pub fn signed_int_max(&self) -> i128 {
858 i128::MAX >> (128 - self.bits())
859 }
860
861 #[inline]
862 pub fn unsigned_int_max(&self) -> u128 {
863 u128::MAX >> (128 - self.bits())
864 }
865}
866
867impl Add for Size {
871 type Output = Size;
872 #[inline]
873 fn add(self, other: Size) -> Size {
874 Size::from_bytes(self.bytes().checked_add(other.bytes()).unwrap_or_else(|| {
875 panic!("Size::add: {} + {} doesn't fit in u64", self.bytes(), other.bytes())
876 }))
877 }
878}
879
880impl Sub for Size {
881 type Output = Size;
882 #[inline]
883 fn sub(self, other: Size) -> Size {
884 Size::from_bytes(self.bytes().checked_sub(other.bytes()).unwrap_or_else(|| {
885 panic!("Size::sub: {} - {} would result in negative size", self.bytes(), other.bytes())
886 }))
887 }
888}
889
890impl Mul<Size> for u64 {
891 type Output = Size;
892 #[inline]
893 fn mul(self, size: Size) -> Size {
894 size * self
895 }
896}
897
898impl Mul<u64> for Size {
899 type Output = Size;
900 #[inline]
901 fn mul(self, count: u64) -> Size {
902 match self.bytes().checked_mul(count) {
903 Some(bytes) => Size::from_bytes(bytes),
904 None => panic!("Size::mul: {} * {} doesn't fit in u64", self.bytes(), count),
905 }
906 }
907}
908
909impl AddAssign for Size {
910 #[inline]
911 fn add_assign(&mut self, other: Size) {
912 *self = *self + other;
913 }
914}
915
916#[cfg(feature = "nightly")]
917impl Step for Size {
918 #[inline]
919 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
920 u64::steps_between(&start.bytes(), &end.bytes())
921 }
922
923 #[inline]
924 fn forward_checked(start: Self, count: usize) -> Option<Self> {
925 u64::forward_checked(start.bytes(), count).map(Self::from_bytes)
926 }
927
928 #[inline]
929 fn forward(start: Self, count: usize) -> Self {
930 Self::from_bytes(u64::forward(start.bytes(), count))
931 }
932
933 #[inline]
934 unsafe fn forward_unchecked(start: Self, count: usize) -> Self {
935 Self::from_bytes(unsafe { u64::forward_unchecked(start.bytes(), count) })
936 }
937
938 #[inline]
939 fn backward_checked(start: Self, count: usize) -> Option<Self> {
940 u64::backward_checked(start.bytes(), count).map(Self::from_bytes)
941 }
942
943 #[inline]
944 fn backward(start: Self, count: usize) -> Self {
945 Self::from_bytes(u64::backward(start.bytes(), count))
946 }
947
948 #[inline]
949 unsafe fn backward_unchecked(start: Self, count: usize) -> Self {
950 Self::from_bytes(unsafe { u64::backward_unchecked(start.bytes(), count) })
951 }
952}
953
954#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
956#[cfg_attr(
957 feature = "nightly",
958 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
959)]
960pub struct Align {
961 pow2: u8,
962}
963
964impl fmt::Debug for Align {
966 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
967 write!(f, "Align({} bytes)", self.bytes())
968 }
969}
970
971#[derive(Clone, Copy)]
972pub enum AlignFromBytesError {
973 NotPowerOfTwo(u64),
974 TooLarge(u64),
975}
976
977impl AlignFromBytesError {
978 pub fn diag_ident(self) -> &'static str {
979 match self {
980 Self::NotPowerOfTwo(_) => "not_power_of_two",
981 Self::TooLarge(_) => "too_large",
982 }
983 }
984
985 pub fn align(self) -> u64 {
986 let (Self::NotPowerOfTwo(align) | Self::TooLarge(align)) = self;
987 align
988 }
989}
990
991impl fmt::Debug for AlignFromBytesError {
992 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
993 fmt::Display::fmt(self, f)
994 }
995}
996
997impl fmt::Display for AlignFromBytesError {
998 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
999 match self {
1000 AlignFromBytesError::NotPowerOfTwo(align) => write!(f, "`{align}` is not a power of 2"),
1001 AlignFromBytesError::TooLarge(align) => write!(f, "`{align}` is too large"),
1002 }
1003 }
1004}
1005
1006impl Align {
1007 pub const ONE: Align = Align { pow2: 0 };
1008 pub const EIGHT: Align = Align { pow2: 3 };
1009 pub const MAX: Align = Align { pow2: 29 };
1011
1012 #[inline]
1013 pub fn from_bits(bits: u64) -> Result<Align, AlignFromBytesError> {
1014 Align::from_bytes(Size::from_bits(bits).bytes())
1015 }
1016
1017 #[inline]
1018 pub const fn from_bytes(align: u64) -> Result<Align, AlignFromBytesError> {
1019 if align == 0 {
1021 return Ok(Align::ONE);
1022 }
1023
1024 #[cold]
1025 const fn not_power_of_2(align: u64) -> AlignFromBytesError {
1026 AlignFromBytesError::NotPowerOfTwo(align)
1027 }
1028
1029 #[cold]
1030 const fn too_large(align: u64) -> AlignFromBytesError {
1031 AlignFromBytesError::TooLarge(align)
1032 }
1033
1034 let tz = align.trailing_zeros();
1035 if align != (1 << tz) {
1036 return Err(not_power_of_2(align));
1037 }
1038
1039 let pow2 = tz as u8;
1040 if pow2 > Self::MAX.pow2 {
1041 return Err(too_large(align));
1042 }
1043
1044 Ok(Align { pow2 })
1045 }
1046
1047 #[inline]
1048 pub const fn bytes(self) -> u64 {
1049 1 << self.pow2
1050 }
1051
1052 #[inline]
1053 pub fn bytes_usize(self) -> usize {
1054 self.bytes().try_into().unwrap()
1055 }
1056
1057 #[inline]
1058 pub const fn bits(self) -> u64 {
1059 self.bytes() * 8
1060 }
1061
1062 #[inline]
1063 pub fn bits_usize(self) -> usize {
1064 self.bits().try_into().unwrap()
1065 }
1066
1067 #[inline]
1072 pub fn max_aligned_factor(size: Size) -> Align {
1073 Align { pow2: size.bytes().trailing_zeros() as u8 }
1074 }
1075
1076 #[inline]
1078 pub fn restrict_for_offset(self, size: Size) -> Align {
1079 self.min(Align::max_aligned_factor(size))
1080 }
1081}
1082
1083#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
1093#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1094pub struct AbiAlign {
1095 pub abi: Align,
1096}
1097
1098impl AbiAlign {
1099 #[inline]
1100 pub fn new(align: Align) -> AbiAlign {
1101 AbiAlign { abi: align }
1102 }
1103
1104 #[inline]
1105 pub fn min(self, other: AbiAlign) -> AbiAlign {
1106 AbiAlign { abi: self.abi.min(other.abi) }
1107 }
1108
1109 #[inline]
1110 pub fn max(self, other: AbiAlign) -> AbiAlign {
1111 AbiAlign { abi: self.abi.max(other.abi) }
1112 }
1113}
1114
1115impl Deref for AbiAlign {
1116 type Target = Align;
1117
1118 fn deref(&self) -> &Self::Target {
1119 &self.abi
1120 }
1121}
1122
1123#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
1125#[cfg_attr(
1126 feature = "nightly",
1127 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
1128)]
1129pub enum Integer {
1130 I8,
1131 I16,
1132 I32,
1133 I64,
1134 I128,
1135}
1136
1137impl Integer {
1138 pub fn int_ty_str(self) -> &'static str {
1139 use Integer::*;
1140 match self {
1141 I8 => "i8",
1142 I16 => "i16",
1143 I32 => "i32",
1144 I64 => "i64",
1145 I128 => "i128",
1146 }
1147 }
1148
1149 pub fn uint_ty_str(self) -> &'static str {
1150 use Integer::*;
1151 match self {
1152 I8 => "u8",
1153 I16 => "u16",
1154 I32 => "u32",
1155 I64 => "u64",
1156 I128 => "u128",
1157 }
1158 }
1159
1160 #[inline]
1161 pub fn size(self) -> Size {
1162 use Integer::*;
1163 match self {
1164 I8 => Size::from_bytes(1),
1165 I16 => Size::from_bytes(2),
1166 I32 => Size::from_bytes(4),
1167 I64 => Size::from_bytes(8),
1168 I128 => Size::from_bytes(16),
1169 }
1170 }
1171
1172 pub fn from_attr<C: HasDataLayout>(cx: &C, ity: IntegerType) -> Integer {
1174 let dl = cx.data_layout();
1175
1176 match ity {
1177 IntegerType::Pointer(_) => dl.ptr_sized_integer(),
1178 IntegerType::Fixed(x, _) => x,
1179 }
1180 }
1181
1182 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1183 use Integer::*;
1184 let dl = cx.data_layout();
1185
1186 match self {
1187 I8 => dl.i8_align,
1188 I16 => dl.i16_align,
1189 I32 => dl.i32_align,
1190 I64 => dl.i64_align,
1191 I128 => dl.i128_align,
1192 }
1193 }
1194
1195 #[inline]
1197 pub fn signed_max(self) -> i128 {
1198 use Integer::*;
1199 match self {
1200 I8 => i8::MAX as i128,
1201 I16 => i16::MAX as i128,
1202 I32 => i32::MAX as i128,
1203 I64 => i64::MAX as i128,
1204 I128 => i128::MAX,
1205 }
1206 }
1207
1208 #[inline]
1210 pub fn signed_min(self) -> i128 {
1211 use Integer::*;
1212 match self {
1213 I8 => i8::MIN as i128,
1214 I16 => i16::MIN as i128,
1215 I32 => i32::MIN as i128,
1216 I64 => i64::MIN as i128,
1217 I128 => i128::MIN,
1218 }
1219 }
1220
1221 #[inline]
1223 pub fn fit_signed(x: i128) -> Integer {
1224 use Integer::*;
1225 match x {
1226 -0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
1227 -0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16,
1228 -0x0000_0000_8000_0000..=0x0000_0000_7fff_ffff => I32,
1229 -0x8000_0000_0000_0000..=0x7fff_ffff_ffff_ffff => I64,
1230 _ => I128,
1231 }
1232 }
1233
1234 #[inline]
1236 pub fn fit_unsigned(x: u128) -> Integer {
1237 use Integer::*;
1238 match x {
1239 0..=0x0000_0000_0000_00ff => I8,
1240 0..=0x0000_0000_0000_ffff => I16,
1241 0..=0x0000_0000_ffff_ffff => I32,
1242 0..=0xffff_ffff_ffff_ffff => I64,
1243 _ => I128,
1244 }
1245 }
1246
1247 pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> {
1249 use Integer::*;
1250 let dl = cx.data_layout();
1251
1252 [I8, I16, I32, I64, I128].into_iter().find(|&candidate| {
1253 wanted == candidate.align(dl).abi && wanted.bytes() == candidate.size().bytes()
1254 })
1255 }
1256
1257 pub fn approximate_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Integer {
1259 use Integer::*;
1260 let dl = cx.data_layout();
1261
1262 for candidate in [I64, I32, I16] {
1264 if wanted >= candidate.align(dl).abi && wanted.bytes() >= candidate.size().bytes() {
1265 return candidate;
1266 }
1267 }
1268 I8
1269 }
1270
1271 #[inline]
1274 pub fn from_size(size: Size) -> Result<Self, String> {
1275 match size.bits() {
1276 8 => Ok(Integer::I8),
1277 16 => Ok(Integer::I16),
1278 32 => Ok(Integer::I32),
1279 64 => Ok(Integer::I64),
1280 128 => Ok(Integer::I128),
1281 _ => Err(format!("rust does not support integers with {} bits", size.bits())),
1282 }
1283 }
1284}
1285
1286#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
1288#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1289pub enum Float {
1290 F16,
1291 F32,
1292 F64,
1293 F128,
1294}
1295
1296impl Float {
1297 pub fn size(self) -> Size {
1298 use Float::*;
1299
1300 match self {
1301 F16 => Size::from_bits(16),
1302 F32 => Size::from_bits(32),
1303 F64 => Size::from_bits(64),
1304 F128 => Size::from_bits(128),
1305 }
1306 }
1307
1308 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1309 use Float::*;
1310 let dl = cx.data_layout();
1311
1312 match self {
1313 F16 => dl.f16_align,
1314 F32 => dl.f32_align,
1315 F64 => dl.f64_align,
1316 F128 => dl.f128_align,
1317 }
1318 }
1319}
1320
1321#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
1323#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1324pub enum Primitive {
1325 Int(Integer, bool),
1333 Float(Float),
1334 Pointer(AddressSpace),
1335}
1336
1337impl Primitive {
1338 pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
1339 use Primitive::*;
1340 let dl = cx.data_layout();
1341
1342 match self {
1343 Int(i, _) => i.size(),
1344 Float(f) => f.size(),
1345 Pointer(a) => dl.pointer_size_in(a),
1346 }
1347 }
1348
1349 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1350 use Primitive::*;
1351 let dl = cx.data_layout();
1352
1353 match self {
1354 Int(i, _) => i.align(dl),
1355 Float(f) => f.align(dl),
1356 Pointer(a) => dl.pointer_align_in(a),
1357 }
1358 }
1359}
1360
1361#[derive(Clone, Copy, PartialEq, Eq, Hash)]
1371#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1372pub struct WrappingRange {
1373 pub start: u128,
1374 pub end: u128,
1375}
1376
1377impl WrappingRange {
1378 pub fn full(size: Size) -> Self {
1379 Self { start: 0, end: size.unsigned_int_max() }
1380 }
1381
1382 #[inline(always)]
1384 pub fn contains(&self, v: u128) -> bool {
1385 if self.start <= self.end {
1386 self.start <= v && v <= self.end
1387 } else {
1388 self.start <= v || v <= self.end
1389 }
1390 }
1391
1392 #[inline(always)]
1395 pub fn contains_range(&self, other: Self, size: Size) -> bool {
1396 if self.is_full_for(size) {
1397 true
1398 } else {
1399 let trunc = |x| size.truncate(x);
1400
1401 let delta = self.start;
1402 let max = trunc(self.end.wrapping_sub(delta));
1403
1404 let other_start = trunc(other.start.wrapping_sub(delta));
1405 let other_end = trunc(other.end.wrapping_sub(delta));
1406
1407 (other_start <= other_end) && (other_end <= max)
1411 }
1412 }
1413
1414 #[inline(always)]
1416 fn with_start(mut self, start: u128) -> Self {
1417 self.start = start;
1418 self
1419 }
1420
1421 #[inline(always)]
1423 fn with_end(mut self, end: u128) -> Self {
1424 self.end = end;
1425 self
1426 }
1427
1428 #[inline]
1434 fn is_full_for(&self, size: Size) -> bool {
1435 let max_value = size.unsigned_int_max();
1436 debug_assert!(self.start <= max_value && self.end <= max_value);
1437 self.start == (self.end.wrapping_add(1) & max_value)
1438 }
1439
1440 #[inline]
1446 pub fn no_unsigned_wraparound(&self, size: Size) -> Result<bool, RangeFull> {
1447 if self.is_full_for(size) { Err(..) } else { Ok(self.start <= self.end) }
1448 }
1449
1450 #[inline]
1459 pub fn no_signed_wraparound(&self, size: Size) -> Result<bool, RangeFull> {
1460 if self.is_full_for(size) {
1461 Err(..)
1462 } else {
1463 let start: i128 = size.sign_extend(self.start);
1464 let end: i128 = size.sign_extend(self.end);
1465 Ok(start <= end)
1466 }
1467 }
1468}
1469
1470impl fmt::Debug for WrappingRange {
1471 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
1472 if self.start > self.end {
1473 write!(fmt, "(..={}) | ({}..)", self.end, self.start)?;
1474 } else {
1475 write!(fmt, "{}..={}", self.start, self.end)?;
1476 }
1477 Ok(())
1478 }
1479}
1480
1481#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1483#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1484pub enum Scalar {
1485 Initialized {
1486 value: Primitive,
1487
1488 valid_range: WrappingRange,
1492 },
1493 Union {
1494 value: Primitive,
1500 },
1501}
1502
1503impl Scalar {
1504 #[inline]
1505 pub fn is_bool(&self) -> bool {
1506 use Integer::*;
1507 matches!(
1508 self,
1509 Scalar::Initialized {
1510 value: Primitive::Int(I8, false),
1511 valid_range: WrappingRange { start: 0, end: 1 }
1512 }
1513 )
1514 }
1515
1516 pub fn primitive(&self) -> Primitive {
1519 match *self {
1520 Scalar::Initialized { value, .. } | Scalar::Union { value } => value,
1521 }
1522 }
1523
1524 pub fn align(self, cx: &impl HasDataLayout) -> AbiAlign {
1525 self.primitive().align(cx)
1526 }
1527
1528 pub fn size(self, cx: &impl HasDataLayout) -> Size {
1529 self.primitive().size(cx)
1530 }
1531
1532 #[inline]
1533 pub fn to_union(&self) -> Self {
1534 Self::Union { value: self.primitive() }
1535 }
1536
1537 #[inline]
1538 pub fn valid_range(&self, cx: &impl HasDataLayout) -> WrappingRange {
1539 match *self {
1540 Scalar::Initialized { valid_range, .. } => valid_range,
1541 Scalar::Union { value } => WrappingRange::full(value.size(cx)),
1542 }
1543 }
1544
1545 #[inline]
1546 pub fn valid_range_mut(&mut self) -> &mut WrappingRange {
1549 match self {
1550 Scalar::Initialized { valid_range, .. } => valid_range,
1551 Scalar::Union { .. } => panic!("cannot change the valid range of a union"),
1552 }
1553 }
1554
1555 #[inline]
1558 pub fn is_always_valid<C: HasDataLayout>(&self, cx: &C) -> bool {
1559 match *self {
1560 Scalar::Initialized { valid_range, .. } => valid_range.is_full_for(self.size(cx)),
1561 Scalar::Union { .. } => true,
1562 }
1563 }
1564
1565 #[inline]
1567 pub fn is_uninit_valid(&self) -> bool {
1568 match *self {
1569 Scalar::Initialized { .. } => false,
1570 Scalar::Union { .. } => true,
1571 }
1572 }
1573
1574 #[inline]
1576 pub fn is_signed(&self) -> bool {
1577 match self.primitive() {
1578 Primitive::Int(_, signed) => signed,
1579 _ => false,
1580 }
1581 }
1582}
1583
1584#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1587#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1588pub enum FieldsShape<FieldIdx: Idx> {
1589 Primitive,
1591
1592 Union(NonZeroUsize),
1594
1595 Array { stride: Size, count: u64 },
1597
1598 Arbitrary {
1606 offsets: IndexVec<FieldIdx, Size>,
1611
1612 memory_index: IndexVec<FieldIdx, u32>,
1625 },
1626}
1627
1628impl<FieldIdx: Idx> FieldsShape<FieldIdx> {
1629 #[inline]
1630 pub fn count(&self) -> usize {
1631 match *self {
1632 FieldsShape::Primitive => 0,
1633 FieldsShape::Union(count) => count.get(),
1634 FieldsShape::Array { count, .. } => count.try_into().unwrap(),
1635 FieldsShape::Arbitrary { ref offsets, .. } => offsets.len(),
1636 }
1637 }
1638
1639 #[inline]
1640 pub fn offset(&self, i: usize) -> Size {
1641 match *self {
1642 FieldsShape::Primitive => {
1643 unreachable!("FieldsShape::offset: `Primitive`s have no fields")
1644 }
1645 FieldsShape::Union(count) => {
1646 assert!(i < count.get(), "tried to access field {i} of union with {count} fields");
1647 Size::ZERO
1648 }
1649 FieldsShape::Array { stride, count } => {
1650 let i = u64::try_from(i).unwrap();
1651 assert!(i < count, "tried to access field {i} of array with {count} fields");
1652 stride * i
1653 }
1654 FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::new(i)],
1655 }
1656 }
1657
1658 #[inline]
1659 pub fn memory_index(&self, i: usize) -> usize {
1660 match *self {
1661 FieldsShape::Primitive => {
1662 unreachable!("FieldsShape::memory_index: `Primitive`s have no fields")
1663 }
1664 FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1665 FieldsShape::Arbitrary { ref memory_index, .. } => {
1666 memory_index[FieldIdx::new(i)].try_into().unwrap()
1667 }
1668 }
1669 }
1670
1671 #[inline]
1673 pub fn index_by_increasing_offset(&self) -> impl ExactSizeIterator<Item = usize> {
1674 let mut inverse_small = [0u8; 64];
1675 let mut inverse_big = IndexVec::new();
1676 let use_small = self.count() <= inverse_small.len();
1677
1678 if let FieldsShape::Arbitrary { ref memory_index, .. } = *self {
1680 if use_small {
1681 for (field_idx, &mem_idx) in memory_index.iter_enumerated() {
1682 inverse_small[mem_idx as usize] = field_idx.index() as u8;
1683 }
1684 } else {
1685 inverse_big = memory_index.invert_bijective_mapping();
1686 }
1687 }
1688
1689 let pseudofield_count = if let FieldsShape::Primitive = self { 1 } else { self.count() };
1693
1694 (0..pseudofield_count).map(move |i| match *self {
1695 FieldsShape::Primitive | FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1696 FieldsShape::Arbitrary { .. } => {
1697 if use_small {
1698 inverse_small[i] as usize
1699 } else {
1700 inverse_big[i as u32].index()
1701 }
1702 }
1703 })
1704 }
1705}
1706
1707#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
1711#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1712pub struct AddressSpace(pub u32);
1713
1714impl AddressSpace {
1715 pub const ZERO: Self = AddressSpace(0);
1717}
1718
1719#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1730#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1731pub enum BackendRepr {
1732 Scalar(Scalar),
1733 ScalarPair(Scalar, Scalar),
1734 SimdVector {
1735 element: Scalar,
1736 count: u64,
1737 },
1738 Memory {
1740 sized: bool,
1742 },
1743}
1744
1745impl BackendRepr {
1746 #[inline]
1748 pub fn is_unsized(&self) -> bool {
1749 match *self {
1750 BackendRepr::Scalar(_)
1751 | BackendRepr::ScalarPair(..)
1752 | BackendRepr::SimdVector { .. } => false,
1753 BackendRepr::Memory { sized } => !sized,
1754 }
1755 }
1756
1757 #[inline]
1758 pub fn is_sized(&self) -> bool {
1759 !self.is_unsized()
1760 }
1761
1762 #[inline]
1765 pub fn is_signed(&self) -> bool {
1766 match self {
1767 BackendRepr::Scalar(scal) => scal.is_signed(),
1768 _ => panic!("`is_signed` on non-scalar ABI {self:?}"),
1769 }
1770 }
1771
1772 #[inline]
1774 pub fn is_scalar(&self) -> bool {
1775 matches!(*self, BackendRepr::Scalar(_))
1776 }
1777
1778 #[inline]
1780 pub fn is_bool(&self) -> bool {
1781 matches!(*self, BackendRepr::Scalar(s) if s.is_bool())
1782 }
1783
1784 pub fn scalar_align<C: HasDataLayout>(&self, cx: &C) -> Option<Align> {
1788 match *self {
1789 BackendRepr::Scalar(s) => Some(s.align(cx).abi),
1790 BackendRepr::ScalarPair(s1, s2) => Some(s1.align(cx).max(s2.align(cx)).abi),
1791 BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => None,
1793 }
1794 }
1795
1796 pub fn scalar_size<C: HasDataLayout>(&self, cx: &C) -> Option<Size> {
1800 match *self {
1801 BackendRepr::Scalar(s) => Some(s.size(cx)),
1803 BackendRepr::ScalarPair(s1, s2) => {
1805 let field2_offset = s1.size(cx).align_to(s2.align(cx).abi);
1806 let size = (field2_offset + s2.size(cx)).align_to(
1807 self.scalar_align(cx)
1808 .unwrap(),
1810 );
1811 Some(size)
1812 }
1813 BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => None,
1815 }
1816 }
1817
1818 pub fn to_union(&self) -> Self {
1820 match *self {
1821 BackendRepr::Scalar(s) => BackendRepr::Scalar(s.to_union()),
1822 BackendRepr::ScalarPair(s1, s2) => {
1823 BackendRepr::ScalarPair(s1.to_union(), s2.to_union())
1824 }
1825 BackendRepr::SimdVector { element, count } => {
1826 BackendRepr::SimdVector { element: element.to_union(), count }
1827 }
1828 BackendRepr::Memory { .. } => BackendRepr::Memory { sized: true },
1829 }
1830 }
1831
1832 pub fn eq_up_to_validity(&self, other: &Self) -> bool {
1833 match (self, other) {
1834 (BackendRepr::Scalar(l), BackendRepr::Scalar(r)) => l.primitive() == r.primitive(),
1837 (
1838 BackendRepr::SimdVector { element: element_l, count: count_l },
1839 BackendRepr::SimdVector { element: element_r, count: count_r },
1840 ) => element_l.primitive() == element_r.primitive() && count_l == count_r,
1841 (BackendRepr::ScalarPair(l1, l2), BackendRepr::ScalarPair(r1, r2)) => {
1842 l1.primitive() == r1.primitive() && l2.primitive() == r2.primitive()
1843 }
1844 _ => self == other,
1846 }
1847 }
1848}
1849
1850#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1852#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1853pub enum Variants<FieldIdx: Idx, VariantIdx: Idx> {
1854 Empty,
1856
1857 Single {
1859 index: VariantIdx,
1861 },
1862
1863 Multiple {
1870 tag: Scalar,
1871 tag_encoding: TagEncoding<VariantIdx>,
1872 tag_field: FieldIdx,
1873 variants: IndexVec<VariantIdx, LayoutData<FieldIdx, VariantIdx>>,
1874 },
1875}
1876
1877#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1879#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1880pub enum TagEncoding<VariantIdx: Idx> {
1881 Direct,
1884
1885 Niche {
1909 untagged_variant: VariantIdx,
1910 niche_variants: RangeInclusive<VariantIdx>,
1913 niche_start: u128,
1916 },
1917}
1918
1919#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1920#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1921pub struct Niche {
1922 pub offset: Size,
1923 pub value: Primitive,
1924 pub valid_range: WrappingRange,
1925}
1926
1927impl Niche {
1928 pub fn from_scalar<C: HasDataLayout>(cx: &C, offset: Size, scalar: Scalar) -> Option<Self> {
1929 let Scalar::Initialized { value, valid_range } = scalar else { return None };
1930 let niche = Niche { offset, value, valid_range };
1931 if niche.available(cx) > 0 { Some(niche) } else { None }
1932 }
1933
1934 pub fn available<C: HasDataLayout>(&self, cx: &C) -> u128 {
1935 let Self { value, valid_range: v, .. } = *self;
1936 let size = value.size(cx);
1937 assert!(size.bits() <= 128);
1938 let max_value = size.unsigned_int_max();
1939
1940 let niche = v.end.wrapping_add(1)..v.start;
1942 niche.end.wrapping_sub(niche.start) & max_value
1943 }
1944
1945 pub fn reserve<C: HasDataLayout>(&self, cx: &C, count: u128) -> Option<(u128, Scalar)> {
1946 assert!(count > 0);
1947
1948 let Self { value, valid_range: v, .. } = *self;
1949 let size = value.size(cx);
1950 assert!(size.bits() <= 128);
1951 let max_value = size.unsigned_int_max();
1952
1953 let niche = v.end.wrapping_add(1)..v.start;
1954 let available = niche.end.wrapping_sub(niche.start) & max_value;
1955 if count > available {
1956 return None;
1957 }
1958
1959 let move_start = |v: WrappingRange| {
1973 let start = v.start.wrapping_sub(count) & max_value;
1974 Some((start, Scalar::Initialized { value, valid_range: v.with_start(start) }))
1975 };
1976 let move_end = |v: WrappingRange| {
1977 let start = v.end.wrapping_add(1) & max_value;
1978 let end = v.end.wrapping_add(count) & max_value;
1979 Some((start, Scalar::Initialized { value, valid_range: v.with_end(end) }))
1980 };
1981 let distance_end_zero = max_value - v.end;
1982 if v.start > v.end {
1983 move_end(v)
1985 } else if v.start <= distance_end_zero {
1986 if count <= v.start {
1987 move_start(v)
1988 } else {
1989 move_end(v)
1991 }
1992 } else {
1993 let end = v.end.wrapping_add(count) & max_value;
1994 let overshot_zero = (1..=v.end).contains(&end);
1995 if overshot_zero {
1996 move_start(v)
1998 } else {
1999 move_end(v)
2000 }
2001 }
2002 }
2003}
2004
2005#[derive(PartialEq, Eq, Hash, Clone)]
2007#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
2008pub struct LayoutData<FieldIdx: Idx, VariantIdx: Idx> {
2009 pub fields: FieldsShape<FieldIdx>,
2011
2012 pub variants: Variants<FieldIdx, VariantIdx>,
2020
2021 pub backend_repr: BackendRepr,
2029
2030 pub largest_niche: Option<Niche>,
2033 pub uninhabited: bool,
2038
2039 pub align: AbiAlign,
2040 pub size: Size,
2041
2042 pub max_repr_align: Option<Align>,
2046
2047 pub unadjusted_abi_align: Align,
2051
2052 pub randomization_seed: Hash64,
2063}
2064
2065impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
2066 pub fn is_aggregate(&self) -> bool {
2068 match self.backend_repr {
2069 BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. } => false,
2070 BackendRepr::ScalarPair(..) | BackendRepr::Memory { .. } => true,
2071 }
2072 }
2073
2074 pub fn is_uninhabited(&self) -> bool {
2076 self.uninhabited
2077 }
2078}
2079
2080impl<FieldIdx: Idx, VariantIdx: Idx> fmt::Debug for LayoutData<FieldIdx, VariantIdx>
2081where
2082 FieldsShape<FieldIdx>: fmt::Debug,
2083 Variants<FieldIdx, VariantIdx>: fmt::Debug,
2084{
2085 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2086 let LayoutData {
2090 size,
2091 align,
2092 backend_repr,
2093 fields,
2094 largest_niche,
2095 uninhabited,
2096 variants,
2097 max_repr_align,
2098 unadjusted_abi_align,
2099 randomization_seed,
2100 } = self;
2101 f.debug_struct("Layout")
2102 .field("size", size)
2103 .field("align", align)
2104 .field("backend_repr", backend_repr)
2105 .field("fields", fields)
2106 .field("largest_niche", largest_niche)
2107 .field("uninhabited", uninhabited)
2108 .field("variants", variants)
2109 .field("max_repr_align", max_repr_align)
2110 .field("unadjusted_abi_align", unadjusted_abi_align)
2111 .field("randomization_seed", randomization_seed)
2112 .finish()
2113 }
2114}
2115
2116#[derive(Copy, Clone, PartialEq, Eq, Debug)]
2117pub enum PointerKind {
2118 SharedRef { frozen: bool },
2120 MutableRef { unpin: bool },
2122 Box { unpin: bool, global: bool },
2125}
2126
2127#[derive(Copy, Clone, Debug)]
2132pub struct PointeeInfo {
2133 pub safe: Option<PointerKind>,
2136 pub size: Size,
2142 pub align: Align,
2144}
2145
2146impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
2147 #[inline]
2149 pub fn is_unsized(&self) -> bool {
2150 self.backend_repr.is_unsized()
2151 }
2152
2153 #[inline]
2154 pub fn is_sized(&self) -> bool {
2155 self.backend_repr.is_sized()
2156 }
2157
2158 pub fn is_1zst(&self) -> bool {
2160 self.is_sized() && self.size.bytes() == 0 && self.align.abi.bytes() == 1
2161 }
2162
2163 pub fn is_zst(&self) -> bool {
2168 match self.backend_repr {
2169 BackendRepr::Scalar(_)
2170 | BackendRepr::ScalarPair(..)
2171 | BackendRepr::SimdVector { .. } => false,
2172 BackendRepr::Memory { sized } => sized && self.size.bytes() == 0,
2173 }
2174 }
2175
2176 pub fn eq_abi(&self, other: &Self) -> bool {
2182 self.size == other.size
2186 && self.is_sized() == other.is_sized()
2187 && self.backend_repr.eq_up_to_validity(&other.backend_repr)
2188 && self.backend_repr.is_bool() == other.backend_repr.is_bool()
2189 && self.align.abi == other.align.abi
2190 && self.max_repr_align == other.max_repr_align
2191 && self.unadjusted_abi_align == other.unadjusted_abi_align
2192 }
2193}
2194
2195#[derive(Copy, Clone, Debug)]
2196pub enum StructKind {
2197 AlwaysSized,
2199 MaybeUnsized,
2201 Prefixed(Size, Align),
2203}
2204
2205#[derive(Clone, Debug)]
2206pub enum AbiFromStrErr {
2207 Unknown,
2209 NoExplicitUnwind,
2211}