summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_metadata/src/rmeta/table.rs
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_metadata/src/rmeta/table.rs')
-rw-r--r--compiler/rustc_metadata/src/rmeta/table.rs231
1 files changed, 159 insertions, 72 deletions
diff --git a/compiler/rustc_metadata/src/rmeta/table.rs b/compiler/rustc_metadata/src/rmeta/table.rs
index 716655c7f..b89d48ec1 100644
--- a/compiler/rustc_metadata/src/rmeta/table.rs
+++ b/compiler/rustc_metadata/src/rmeta/table.rs
@@ -10,18 +10,61 @@ use rustc_span::hygiene::MacroKind;
use std::marker::PhantomData;
use std::num::NonZeroUsize;
+pub(super) trait IsDefault: Default {
+ fn is_default(&self) -> bool;
+}
+
+impl<T> IsDefault for Option<T> {
+ fn is_default(&self) -> bool {
+ self.is_none()
+ }
+}
+
+impl IsDefault for AttrFlags {
+ fn is_default(&self) -> bool {
+ self.is_empty()
+ }
+}
+
+impl IsDefault for bool {
+ fn is_default(&self) -> bool {
+ !self
+ }
+}
+
+impl IsDefault for u32 {
+ fn is_default(&self) -> bool {
+ *self == 0
+ }
+}
+
+impl<T> IsDefault for LazyArray<T> {
+ fn is_default(&self) -> bool {
+ self.num_elems == 0
+ }
+}
+
+impl IsDefault for DefPathHash {
+ fn is_default(&self) -> bool {
+ self.0 == Fingerprint::ZERO
+ }
+}
+
/// Helper trait, for encoding to, and decoding from, a fixed number of bytes.
/// Used mainly for Lazy positions and lengths.
/// Unchecked invariant: `Self::default()` should encode as `[0; BYTE_LEN]`,
/// but this has no impact on safety.
-pub(super) trait FixedSizeEncoding: Default {
+pub(super) trait FixedSizeEncoding: IsDefault {
/// This should be `[u8; BYTE_LEN]`;
+ /// Cannot use an associated `const BYTE_LEN: usize` instead due to const eval limitations.
type ByteArray;
fn from_bytes(b: &Self::ByteArray) -> Self;
fn write_to_bytes(self, b: &mut Self::ByteArray);
}
+/// This implementation is not used generically, but for reading/writing
+/// concrete `u32` fields in `Lazy*` structures, which may be zero.
impl FixedSizeEncoding for u32 {
type ByteArray = [u8; 4];
@@ -57,7 +100,7 @@ macro_rules! fixed_size_enum {
fn write_to_bytes(self, b: &mut [u8;1]) {
use $ty::*;
b[0] = match self {
- None => 0,
+ None => unreachable!(),
$(Some($($pat)*) => 1 + ${index()},)*
}
}
@@ -93,7 +136,8 @@ fixed_size_enum! {
( Field )
( LifetimeParam )
( GlobalAsm )
- ( Impl )
+ ( Impl { of_trait: false } )
+ ( Impl { of_trait: true } )
( Closure )
( Generator )
( Static(ast::Mutability::Not) )
@@ -154,20 +198,18 @@ fixed_size_enum! {
}
// We directly encode `DefPathHash` because a `LazyValue` would incur a 25% cost.
-impl FixedSizeEncoding for Option<DefPathHash> {
+impl FixedSizeEncoding for DefPathHash {
type ByteArray = [u8; 16];
#[inline]
fn from_bytes(b: &[u8; 16]) -> Self {
- Some(DefPathHash(Fingerprint::from_le_bytes(*b)))
+ DefPathHash(Fingerprint::from_le_bytes(*b))
}
#[inline]
fn write_to_bytes(self, b: &mut [u8; 16]) {
- let Some(DefPathHash(fingerprint)) = self else {
- panic!("Trying to encode absent DefPathHash.")
- };
- *b = fingerprint.to_le_bytes();
+ debug_assert!(!self.is_default());
+ *b = self.0.to_le_bytes();
}
}
@@ -178,17 +220,17 @@ impl FixedSizeEncoding for Option<RawDefId> {
#[inline]
fn from_bytes(b: &[u8; 8]) -> Self {
let krate = u32::from_le_bytes(b[0..4].try_into().unwrap());
- let index = u32::from_le_bytes(b[4..8].try_into().unwrap());
if krate == 0 {
return None;
}
+ let index = u32::from_le_bytes(b[4..8].try_into().unwrap());
Some(RawDefId { krate: krate - 1, index })
}
#[inline]
fn write_to_bytes(self, b: &mut [u8; 8]) {
match self {
- None => *b = [0; 8],
+ None => unreachable!(),
Some(RawDefId { krate, index }) => {
// CrateNum is less than `CrateNum::MAX_AS_U32`.
debug_assert!(krate < u32::MAX);
@@ -199,17 +241,33 @@ impl FixedSizeEncoding for Option<RawDefId> {
}
}
-impl FixedSizeEncoding for Option<()> {
+impl FixedSizeEncoding for AttrFlags {
+ type ByteArray = [u8; 1];
+
+ #[inline]
+ fn from_bytes(b: &[u8; 1]) -> Self {
+ AttrFlags::from_bits_truncate(b[0])
+ }
+
+ #[inline]
+ fn write_to_bytes(self, b: &mut [u8; 1]) {
+ debug_assert!(!self.is_default());
+ b[0] = self.bits();
+ }
+}
+
+impl FixedSizeEncoding for bool {
type ByteArray = [u8; 1];
#[inline]
fn from_bytes(b: &[u8; 1]) -> Self {
- (b[0] != 0).then(|| ())
+ b[0] != 0
}
#[inline]
fn write_to_bytes(self, b: &mut [u8; 1]) {
- b[0] = self.is_some() as u8
+ debug_assert!(!self.is_default());
+ b[0] = self as u8
}
}
@@ -227,76 +285,115 @@ impl<T> FixedSizeEncoding for Option<LazyValue<T>> {
#[inline]
fn write_to_bytes(self, b: &mut [u8; 4]) {
- let position = self.map_or(0, |lazy| lazy.position.get());
+ match self {
+ None => unreachable!(),
+ Some(lazy) => {
+ let position = lazy.position.get();
+ let position: u32 = position.try_into().unwrap();
+ position.write_to_bytes(b)
+ }
+ }
+ }
+}
+
+impl<T> LazyArray<T> {
+ #[inline]
+ fn write_to_bytes_impl(self, b: &mut [u8; 8]) {
+ let ([position_bytes, meta_bytes],[])= b.as_chunks_mut::<4>() else { panic!() };
+
+ let position = self.position.get();
let position: u32 = position.try_into().unwrap();
- position.write_to_bytes(b)
+ position.write_to_bytes(position_bytes);
+
+ let len = self.num_elems;
+ let len: u32 = len.try_into().unwrap();
+ len.write_to_bytes(meta_bytes);
+ }
+
+ fn from_bytes_impl(position_bytes: &[u8; 4], meta_bytes: &[u8; 4]) -> Option<LazyArray<T>> {
+ let position = NonZeroUsize::new(u32::from_bytes(position_bytes) as usize)?;
+ let len = u32::from_bytes(meta_bytes) as usize;
+ Some(LazyArray::from_position_and_num_elems(position, len))
}
}
-impl<T> FixedSizeEncoding for Option<LazyArray<T>> {
+impl<T> FixedSizeEncoding for LazyArray<T> {
type ByteArray = [u8; 8];
#[inline]
fn from_bytes(b: &[u8; 8]) -> Self {
- let ([ref position_bytes, ref meta_bytes],[])= b.as_chunks::<4>() else { panic!() };
- let position = NonZeroUsize::new(u32::from_bytes(position_bytes) as usize)?;
- let len = u32::from_bytes(meta_bytes) as usize;
- Some(LazyArray::from_position_and_num_elems(position, len))
+ let ([position_bytes, meta_bytes],[])= b.as_chunks::<4>() else { panic!() };
+ if *meta_bytes == [0; 4] {
+ return Default::default();
+ }
+ LazyArray::from_bytes_impl(position_bytes, meta_bytes).unwrap()
}
#[inline]
fn write_to_bytes(self, b: &mut [u8; 8]) {
- let ([ref mut position_bytes, ref mut meta_bytes],[])= b.as_chunks_mut::<4>() else { panic!() };
+ assert!(!self.is_default());
+ self.write_to_bytes_impl(b)
+ }
+}
- let position = self.map_or(0, |lazy| lazy.position.get());
- let position: u32 = position.try_into().unwrap();
- position.write_to_bytes(position_bytes);
+impl<T> FixedSizeEncoding for Option<LazyArray<T>> {
+ type ByteArray = [u8; 8];
- let len = self.map_or(0, |lazy| lazy.num_elems);
- let len: u32 = len.try_into().unwrap();
- len.write_to_bytes(meta_bytes);
+ #[inline]
+ fn from_bytes(b: &[u8; 8]) -> Self {
+ let ([position_bytes, meta_bytes],[])= b.as_chunks::<4>() else { panic!() };
+ LazyArray::from_bytes_impl(position_bytes, meta_bytes)
+ }
+
+ #[inline]
+ fn write_to_bytes(self, b: &mut [u8; 8]) {
+ match self {
+ None => unreachable!(),
+ Some(lazy) => lazy.write_to_bytes_impl(b),
+ }
}
}
/// Helper for constructing a table's serialization (also see `Table`).
-pub(super) struct TableBuilder<I: Idx, T>
-where
- Option<T>: FixedSizeEncoding,
-{
- blocks: IndexVec<I, <Option<T> as FixedSizeEncoding>::ByteArray>,
+pub(super) struct TableBuilder<I: Idx, T: FixedSizeEncoding> {
+ blocks: IndexVec<I, T::ByteArray>,
_marker: PhantomData<T>,
}
-impl<I: Idx, T> Default for TableBuilder<I, T>
-where
- Option<T>: FixedSizeEncoding,
-{
+impl<I: Idx, T: FixedSizeEncoding> Default for TableBuilder<I, T> {
fn default() -> Self {
TableBuilder { blocks: Default::default(), _marker: PhantomData }
}
}
-impl<I: Idx, T> TableBuilder<I, T>
+impl<I: Idx, const N: usize, T> TableBuilder<I, Option<T>>
where
- Option<T>: FixedSizeEncoding,
+ Option<T>: FixedSizeEncoding<ByteArray = [u8; N]>,
{
- pub(crate) fn set<const N: usize>(&mut self, i: I, value: T)
- where
- Option<T>: FixedSizeEncoding<ByteArray = [u8; N]>,
- {
- // FIXME(eddyb) investigate more compact encodings for sparse tables.
- // On the PR @michaelwoerister mentioned:
- // > Space requirements could perhaps be optimized by using the HAMT `popcnt`
- // > trick (i.e. divide things into buckets of 32 or 64 items and then
- // > store bit-masks of which item in each bucket is actually serialized).
- self.blocks.ensure_contains_elem(i, || [0; N]);
- Some(value).write_to_bytes(&mut self.blocks[i]);
- }
-
- pub(crate) fn encode<const N: usize>(&self, buf: &mut FileEncoder) -> LazyTable<I, T>
- where
- Option<T>: FixedSizeEncoding<ByteArray = [u8; N]>,
- {
+ pub(crate) fn set_some(&mut self, i: I, value: T) {
+ self.set(i, Some(value))
+ }
+}
+
+impl<I: Idx, const N: usize, T: FixedSizeEncoding<ByteArray = [u8; N]>> TableBuilder<I, T> {
+ /// Sets the table value if it is not default.
+ /// ATTENTION: For optimization default values are simply ignored by this function, because
+ /// right now metadata tables never need to reset non-default values to default. If such need
+ /// arises in the future then a new method (e.g. `clear` or `reset`) will need to be introduced
+ /// for doing that explicitly.
+ pub(crate) fn set(&mut self, i: I, value: T) {
+ if !value.is_default() {
+ // FIXME(eddyb) investigate more compact encodings for sparse tables.
+ // On the PR @michaelwoerister mentioned:
+ // > Space requirements could perhaps be optimized by using the HAMT `popcnt`
+ // > trick (i.e. divide things into buckets of 32 or 64 items and then
+ // > store bit-masks of which item in each bucket is actually serialized).
+ self.blocks.ensure_contains_elem(i, || [0; N]);
+ value.write_to_bytes(&mut self.blocks[i]);
+ }
+ }
+
+ pub(crate) fn encode(&self, buf: &mut FileEncoder) -> LazyTable<I, T> {
let pos = buf.position();
for block in &self.blocks {
buf.emit_raw_bytes(block);
@@ -309,34 +406,24 @@ where
}
}
-impl<I: Idx, T: ParameterizedOverTcx> LazyTable<I, T>
+impl<I: Idx, const N: usize, T: FixedSizeEncoding<ByteArray = [u8; N]> + ParameterizedOverTcx>
+ LazyTable<I, T>
where
- Option<T>: FixedSizeEncoding,
+ for<'tcx> T::Value<'tcx>: FixedSizeEncoding<ByteArray = [u8; N]>,
{
/// Given the metadata, extract out the value at a particular index (if any).
#[inline(never)]
- pub(super) fn get<'a, 'tcx, M: Metadata<'a, 'tcx>, const N: usize>(
- &self,
- metadata: M,
- i: I,
- ) -> Option<T::Value<'tcx>>
- where
- Option<T::Value<'tcx>>: FixedSizeEncoding<ByteArray = [u8; N]>,
- {
+ pub(super) fn get<'a, 'tcx, M: Metadata<'a, 'tcx>>(&self, metadata: M, i: I) -> T::Value<'tcx> {
debug!("LazyTable::lookup: index={:?} len={:?}", i, self.encoded_size);
let start = self.position.get();
let bytes = &metadata.blob()[start..start + self.encoded_size];
let (bytes, []) = bytes.as_chunks::<N>() else { panic!() };
- let bytes = bytes.get(i.index())?;
- FixedSizeEncoding::from_bytes(bytes)
+ bytes.get(i.index()).map_or_else(Default::default, FixedSizeEncoding::from_bytes)
}
/// Size of the table in entries, including possible gaps.
- pub(super) fn size<const N: usize>(&self) -> usize
- where
- for<'tcx> Option<T::Value<'tcx>>: FixedSizeEncoding<ByteArray = [u8; N]>,
- {
+ pub(super) fn size(&self) -> usize {
self.encoded_size / N
}
}