diff options
Diffstat (limited to 'compiler/rustc_abi/src/lib.rs')
-rw-r--r-- | compiler/rustc_abi/src/lib.rs | 80 |
1 files changed, 64 insertions, 16 deletions
diff --git a/compiler/rustc_abi/src/lib.rs b/compiler/rustc_abi/src/lib.rs index 39574ca55..b0c0ee942 100644 --- a/compiler/rustc_abi/src/lib.rs +++ b/compiler/rustc_abi/src/lib.rs @@ -11,7 +11,7 @@ use bitflags::bitflags; use rustc_data_structures::intern::Interned; #[cfg(feature = "nightly")] use rustc_data_structures::stable_hasher::StableOrd; -use rustc_index::vec::{Idx, IndexVec}; +use rustc_index::vec::{Idx, IndexSlice, IndexVec}; #[cfg(feature = "nightly")] use rustc_macros::HashStable_Generic; #[cfg(feature = "nightly")] @@ -1057,6 +1057,32 @@ impl Scalar { } } +rustc_index::newtype_index! { + /// The *source-order* index of a field in a variant. + /// + /// This is how most code after type checking refers to fields, rather than + /// using names (as names have hygiene complications and more complex lookup). + /// + /// Particularly for `repr(Rust)` types, this may not be the same as *layout* order. + /// (It is for `repr(C)` `struct`s, however.) + /// + /// For example, in the following types, + /// ```rust + /// # enum Never {} + /// # #[repr(u16)] + /// enum Demo1 { + /// Variant0 { a: Never, b: i32 } = 100, + /// Variant1 { c: u8, d: u64 } = 10, + /// } + /// struct Demo2 { e: u8, f: u16, g: u8 } + /// ``` + /// `b` is `FieldIdx(1)` in `VariantIdx(0)`, + /// `d` is `FieldIdx(1)` in `VariantIdx(1)`, and + /// `f` is `FieldIdx(1)` in `VariantIdx(0)`. + #[derive(HashStable_Generic)] + pub struct FieldIdx {} +} + /// Describes how the fields of a type are located in memory. #[derive(PartialEq, Eq, Hash, Clone, Debug)] #[cfg_attr(feature = "nightly", derive(HashStable_Generic))] @@ -1082,7 +1108,7 @@ pub enum FieldsShape { /// ordered to match the source definition order. /// This vector does not go in increasing order. // FIXME(eddyb) use small vector optimization for the common case. - offsets: Vec<Size>, + offsets: IndexVec<FieldIdx, Size>, /// Maps source order field indices to memory order indices, /// depending on how the fields were reordered (if at all). @@ -1096,7 +1122,7 @@ pub enum FieldsShape { /// // FIXME(eddyb) build a better abstraction for permutations, if possible. // FIXME(camlorn) also consider small vector optimization here. - memory_index: Vec<u32>, + memory_index: IndexVec<FieldIdx, u32>, }, } @@ -1131,7 +1157,7 @@ impl FieldsShape { assert!(i < count); stride * i } - FieldsShape::Arbitrary { ref offsets, .. } => offsets[i], + FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::from_usize(i)], } } @@ -1142,28 +1168,27 @@ impl FieldsShape { unreachable!("FieldsShape::memory_index: `Primitive`s have no fields") } FieldsShape::Union(_) | FieldsShape::Array { .. } => i, - FieldsShape::Arbitrary { ref memory_index, .. } => memory_index[i].try_into().unwrap(), + FieldsShape::Arbitrary { ref memory_index, .. } => { + memory_index[FieldIdx::from_usize(i)].try_into().unwrap() + } } } /// Gets source indices of the fields by increasing offsets. #[inline] - pub fn index_by_increasing_offset<'a>(&'a self) -> impl Iterator<Item = usize> + 'a { + pub fn index_by_increasing_offset(&self) -> impl Iterator<Item = usize> + '_ { let mut inverse_small = [0u8; 64]; - let mut inverse_big = vec![]; + let mut inverse_big = IndexVec::new(); let use_small = self.count() <= inverse_small.len(); // We have to write this logic twice in order to keep the array small. if let FieldsShape::Arbitrary { ref memory_index, .. } = *self { if use_small { - for i in 0..self.count() { - inverse_small[memory_index[i] as usize] = i as u8; + for (field_idx, &mem_idx) in memory_index.iter_enumerated() { + inverse_small[mem_idx as usize] = field_idx.as_u32() as u8; } } else { - inverse_big = vec![0; self.count()]; - for i in 0..self.count() { - inverse_big[memory_index[i] as usize] = i as u32; - } + inverse_big = memory_index.invert_bijective_mapping(); } } @@ -1173,7 +1198,7 @@ impl FieldsShape { if use_small { inverse_small[i] as usize } else { - inverse_big[i] as usize + inverse_big[i as u32].as_usize() } } }) @@ -1380,8 +1405,21 @@ impl Niche { } rustc_index::newtype_index! { + /// The *source-order* index of a variant in a type. + /// + /// For enums, these are always `0..variant_count`, regardless of any + /// custom discriminants that may have been defined, and including any + /// variants that may end up uninhabited due to field types. (Some of the + /// variants may not be present in a monomorphized ABI [`Variants`], but + /// those skipped variants are always counted when determining the *index*.) + /// + /// `struct`s, `tuples`, and `unions`s are considered to have a single variant + /// with variant index zero, aka [`FIRST_VARIANT`]. #[derive(HashStable_Generic)] - pub struct VariantIdx {} + pub struct VariantIdx { + /// Equivalent to `VariantIdx(0)`. + const FIRST_VARIANT = 0; + } } #[derive(PartialEq, Eq, Hash, Clone)] @@ -1422,7 +1460,7 @@ impl LayoutS { let size = scalar.size(cx); let align = scalar.align(cx); LayoutS { - variants: Variants::Single { index: VariantIdx::new(0) }, + variants: Variants::Single { index: FIRST_VARIANT }, fields: FieldsShape::Primitive, abi: Abi::Scalar(scalar), largest_niche, @@ -1484,6 +1522,16 @@ impl<'a> Layout<'a> { pub fn size(self) -> Size { self.0.0.size } + + /// Whether the layout is from a type that implements [`std::marker::PointerLike`]. + /// + /// Currently, that means that the type is pointer-sized, pointer-aligned, + /// and has a scalar ABI. + pub fn is_pointer_like(self, data_layout: &TargetDataLayout) -> bool { + self.size() == data_layout.pointer_size + && self.align().abi == data_layout.pointer_align.abi + && matches!(self.abi(), Abi::Scalar(..)) + } } #[derive(Copy, Clone, PartialEq, Eq, Debug)] |