summaryrefslogtreecommitdiffstats
path: root/compiler/stable_mir/src/abi.rs
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/stable_mir/src/abi.rs')
-rw-r--r--compiler/stable_mir/src/abi.rs283
1 files changed, 283 insertions, 0 deletions
diff --git a/compiler/stable_mir/src/abi.rs b/compiler/stable_mir/src/abi.rs
new file mode 100644
index 000000000..53dac6abe
--- /dev/null
+++ b/compiler/stable_mir/src/abi.rs
@@ -0,0 +1,283 @@
+use crate::compiler_interface::with;
+use crate::mir::FieldIdx;
+use crate::ty::{Align, IndexedVal, Size, Ty, VariantIdx};
+use crate::Opaque;
+use std::num::NonZeroUsize;
+use std::ops::RangeInclusive;
+
+/// A function ABI definition.
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub struct FnAbi {
+ /// The types of each argument.
+ pub args: Vec<ArgAbi>,
+
+ /// The expected return type.
+ pub ret: ArgAbi,
+
+ /// The count of non-variadic arguments.
+ ///
+ /// Should only be different from `args.len()` when a function is a C variadic function.
+ pub fixed_count: u32,
+
+ /// The ABI convention.
+ pub conv: CallConvention,
+
+ /// Whether this is a variadic C function,
+ pub c_variadic: bool,
+}
+
+/// Information about the ABI of a function's argument, or return value.
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub struct ArgAbi {
+ pub ty: Ty,
+ pub layout: Layout,
+ pub mode: PassMode,
+}
+
+/// How a function argument should be passed in to the target function.
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum PassMode {
+ /// Ignore the argument.
+ ///
+ /// The argument is either uninhabited or a ZST.
+ Ignore,
+ /// Pass the argument directly.
+ ///
+ /// The argument has a layout abi of `Scalar` or `Vector`.
+ Direct(Opaque),
+ /// Pass a pair's elements directly in two arguments.
+ ///
+ /// The argument has a layout abi of `ScalarPair`.
+ Pair(Opaque, Opaque),
+ /// Pass the argument after casting it.
+ Cast { pad_i32: bool, cast: Opaque },
+ /// Pass the argument indirectly via a hidden pointer.
+ Indirect { attrs: Opaque, meta_attrs: Opaque, on_stack: bool },
+}
+
+/// The layout of a type, alongside the type itself.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub struct TyAndLayout {
+ pub ty: Ty,
+ pub layout: Layout,
+}
+
+/// The layout of a type in memory.
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub struct LayoutShape {
+ /// The fields location withing the layout
+ pub fields: FieldsShape,
+
+ /// Encodes information about multi-variant layouts.
+ /// Even with `Multiple` variants, a layout still has its own fields! Those are then
+ /// shared between all variants.
+ ///
+ /// To access all fields of this layout, both `fields` and the fields of the active variant
+ /// must be taken into account.
+ pub variants: VariantsShape,
+
+ /// The `abi` defines how this data is passed between functions.
+ pub abi: ValueAbi,
+
+ /// The ABI mandated alignment in bytes.
+ pub abi_align: Align,
+
+ /// The size of this layout in bytes.
+ pub size: Size,
+}
+
+impl LayoutShape {
+ /// Returns `true` if the layout corresponds to an unsized type.
+ #[inline]
+ pub fn is_unsized(&self) -> bool {
+ self.abi.is_unsized()
+ }
+
+ #[inline]
+ pub fn is_sized(&self) -> bool {
+ !self.abi.is_unsized()
+ }
+
+ /// Returns `true` if the type is sized and a 1-ZST (meaning it has size 0 and alignment 1).
+ pub fn is_1zst(&self) -> bool {
+ self.is_sized() && self.size == 0 && self.abi_align == 1
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub struct Layout(usize);
+
+impl Layout {
+ pub fn shape(self) -> LayoutShape {
+ with(|cx| cx.layout_shape(self))
+ }
+}
+
+impl IndexedVal for Layout {
+ fn to_val(index: usize) -> Self {
+ Layout(index)
+ }
+ fn to_index(&self) -> usize {
+ self.0
+ }
+}
+
+/// Describes how the fields of a type are shaped in memory.
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum FieldsShape {
+ /// Scalar primitives and `!`, which never have fields.
+ Primitive,
+
+ /// All fields start at no offset. The `usize` is the field count.
+ Union(NonZeroUsize),
+
+ /// Array/vector-like placement, with all fields of identical types.
+ Array { stride: Size, count: u64 },
+
+ /// Struct-like placement, with precomputed offsets.
+ ///
+ /// Fields are guaranteed to not overlap, but note that gaps
+ /// before, between and after all the fields are NOT always
+ /// padding, and as such their contents may not be discarded.
+ /// For example, enum variants leave a gap at the start,
+ /// where the discriminant field in the enum layout goes.
+ Arbitrary {
+ /// Offsets for the first byte of each field,
+ /// ordered to match the source definition order.
+ /// I.e.: It follows the same order as [crate::ty::VariantDef::fields()].
+ /// This vector does not go in increasing order.
+ offsets: Vec<Size>,
+ },
+}
+
+impl FieldsShape {
+ pub fn fields_by_offset_order(&self) -> Vec<FieldIdx> {
+ match self {
+ FieldsShape::Primitive => vec![],
+ FieldsShape::Union(_) | FieldsShape::Array { .. } => (0..self.count()).collect(),
+ FieldsShape::Arbitrary { offsets, .. } => {
+ let mut indices = (0..offsets.len()).collect::<Vec<_>>();
+ indices.sort_by_key(|idx| offsets[*idx]);
+ indices
+ }
+ }
+ }
+
+ pub fn count(&self) -> usize {
+ match self {
+ FieldsShape::Primitive => 0,
+ FieldsShape::Union(count) => count.get(),
+ FieldsShape::Array { count, .. } => *count as usize,
+ FieldsShape::Arbitrary { offsets, .. } => offsets.len(),
+ }
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum VariantsShape {
+ /// Single enum variants, structs/tuples, unions, and all non-ADTs.
+ Single { index: VariantIdx },
+
+ /// Enum-likes with more than one inhabited variant: each variant comes with
+ /// a *discriminant* (usually the same as the variant index but the user can
+ /// assign explicit discriminant values). That discriminant is encoded
+ /// as a *tag* on the machine. The layout of each variant is
+ /// a struct, and they all have space reserved for the tag.
+ /// For enums, the tag is the sole field of the layout.
+ Multiple {
+ tag: Scalar,
+ tag_encoding: TagEncoding,
+ tag_field: usize,
+ variants: Vec<LayoutShape>,
+ },
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum TagEncoding {
+ /// The tag directly stores the discriminant, but possibly with a smaller layout
+ /// (so converting the tag to the discriminant can require sign extension).
+ Direct,
+
+ /// Niche (values invalid for a type) encoding the discriminant:
+ /// Discriminant and variant index coincide.
+ /// The variant `untagged_variant` contains a niche at an arbitrary
+ /// offset (field `tag_field` of the enum), which for a variant with
+ /// discriminant `d` is set to
+ /// `(d - niche_variants.start).wrapping_add(niche_start)`.
+ ///
+ /// For example, `Option<(usize, &T)>` is represented such that
+ /// `None` has a null pointer for the second tuple field, and
+ /// `Some` is the identity function (with a non-null reference).
+ Niche {
+ untagged_variant: VariantIdx,
+ niche_variants: RangeInclusive<VariantIdx>,
+ niche_start: u128,
+ },
+}
+
+/// Describes how values of the type are passed by target ABIs,
+/// in terms of categories of C types there are ABI rules for.
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum ValueAbi {
+ Uninhabited,
+ Scalar(Scalar),
+ ScalarPair(Scalar, Scalar),
+ Vector {
+ element: Scalar,
+ count: u64,
+ },
+ Aggregate {
+ /// If true, the size is exact, otherwise it's only a lower bound.
+ sized: bool,
+ },
+}
+
+impl ValueAbi {
+ /// Returns `true` if the layout corresponds to an unsized type.
+ pub fn is_unsized(&self) -> bool {
+ match *self {
+ ValueAbi::Uninhabited
+ | ValueAbi::Scalar(_)
+ | ValueAbi::ScalarPair(..)
+ | ValueAbi::Vector { .. } => false,
+ ValueAbi::Aggregate { sized } => !sized,
+ }
+ }
+}
+
+/// We currently do not support `Scalar`, and use opaque instead.
+type Scalar = Opaque;
+
+/// General language calling conventions.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum CallConvention {
+ C,
+ Rust,
+
+ Cold,
+ PreserveMost,
+ PreserveAll,
+
+ // Target-specific calling conventions.
+ ArmAapcs,
+ CCmseNonSecureCall,
+
+ Msp430Intr,
+
+ PtxKernel,
+
+ X86Fastcall,
+ X86Intr,
+ X86Stdcall,
+ X86ThisCall,
+ X86VectorCall,
+
+ X86_64SysV,
+ X86_64Win64,
+
+ AmdGpuKernel,
+ AvrInterrupt,
+ AvrNonBlockingInterrupt,
+
+ RiscvInterrupt,
+}