summaryrefslogtreecommitdiffstats
path: root/vendor/windows-metadata/src/file
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/windows-metadata/src/file')
-rw-r--r--vendor/windows-metadata/src/file/mod.rs338
-rw-r--r--vendor/windows-metadata/src/file/reader.rs359
-rw-r--r--vendor/windows-metadata/src/file/table.rs57
-rw-r--r--vendor/windows-metadata/src/file/view.rs55
4 files changed, 809 insertions, 0 deletions
diff --git a/vendor/windows-metadata/src/file/mod.rs b/vendor/windows-metadata/src/file/mod.rs
new file mode 100644
index 000000000..41a6db920
--- /dev/null
+++ b/vendor/windows-metadata/src/file/mod.rs
@@ -0,0 +1,338 @@
+mod reader;
+mod table;
+mod view;
+use super::*;
+pub use reader::RowReader;
+use std::cmp::Ordering;
+use table::Table;
+use view::View;
+type Result<T> = std::result::Result<T, ()>;
+
+#[derive(Default)]
+pub struct File {
+ bytes: Vec<u8>,
+ strings: usize,
+ blobs: usize,
+ tables: [Table; 17],
+}
+
+impl File {
+ pub fn new(bytes: Vec<u8>) -> Option<Self> {
+ Self::ok(bytes).ok()
+ }
+
+ fn ok(bytes: Vec<u8>) -> Result<Self> {
+ let mut result = File { bytes, ..Default::default() };
+
+ let dos = result.bytes.view_as::<IMAGE_DOS_HEADER>(0)?;
+
+ if dos.e_magic != IMAGE_DOS_SIGNATURE || result.bytes.copy_as::<u32>(dos.e_lfanew as usize)? != IMAGE_NT_SIGNATURE {
+ return Err(());
+ }
+
+ let file_offset = dos.e_lfanew as usize + std::mem::size_of::<u32>();
+ let file = result.bytes.view_as::<IMAGE_FILE_HEADER>(file_offset)?;
+
+ let optional_offset = file_offset + std::mem::size_of::<IMAGE_FILE_HEADER>();
+
+ let (com_virtual_address, sections) = match result.bytes.copy_as::<u16>(optional_offset)? {
+ IMAGE_NT_OPTIONAL_HDR32_MAGIC => {
+ let optional = result.bytes.view_as::<IMAGE_OPTIONAL_HEADER32>(optional_offset)?;
+ (optional.DataDirectory[IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR as usize].VirtualAddress, result.bytes.view_as_slice_of::<IMAGE_SECTION_HEADER>(optional_offset + std::mem::size_of::<IMAGE_OPTIONAL_HEADER32>(), file.NumberOfSections as usize)?)
+ }
+ IMAGE_NT_OPTIONAL_HDR64_MAGIC => {
+ let optional = result.bytes.view_as::<IMAGE_OPTIONAL_HEADER64>(optional_offset)?;
+ (optional.DataDirectory[IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR as usize].VirtualAddress, result.bytes.view_as_slice_of::<IMAGE_SECTION_HEADER>(optional_offset + std::mem::size_of::<IMAGE_OPTIONAL_HEADER64>(), file.NumberOfSections as usize)?)
+ }
+ _ => return Err(()),
+ };
+
+ let clr = result.bytes.view_as::<IMAGE_COR20_HEADER>(offset_from_rva(section_from_rva(sections, com_virtual_address)?, com_virtual_address))?;
+
+ if clr.cb != std::mem::size_of::<IMAGE_COR20_HEADER>() as u32 {
+ return Err(());
+ }
+
+ let metadata_offset = offset_from_rva(section_from_rva(sections, clr.MetaData.VirtualAddress)?, clr.MetaData.VirtualAddress);
+ let metadata = result.bytes.view_as::<METADATA_HEADER>(metadata_offset)?;
+
+ if metadata.signature != METADATA_SIGNATURE {
+ return Err(());
+ }
+
+ // The METADATA_HEADER struct is not a fixed size so have to offset a little more carefully.
+ let mut view = metadata_offset + metadata.length as usize + 20;
+ let mut tables_data: (usize, usize) = (0, 0);
+
+ for _ in 0..result.bytes.copy_as::<u16>(metadata_offset + metadata.length as usize + 18)? {
+ let stream_offset = result.bytes.copy_as::<u32>(view)? as usize;
+ let stream_len = result.bytes.copy_as::<u32>(view + 4)? as usize;
+ let stream_name = result.bytes.view_as_str(view + 8)?;
+ match stream_name {
+ b"#Strings" => result.strings = metadata_offset + stream_offset,
+ b"#Blob" => result.blobs = metadata_offset + stream_offset,
+ b"#~" => tables_data = (metadata_offset + stream_offset, stream_len),
+ b"#GUID" => {}
+ b"#US" => {}
+ rest => unimplemented!("{rest:?}"),
+ }
+ let mut padding = 4 - stream_name.len() % 4;
+ if padding == 0 {
+ padding = 4;
+ }
+ view += 8 + stream_name.len() + padding;
+ }
+
+ let heap_sizes = result.bytes.copy_as::<u8>(tables_data.0 + 6)?;
+ let string_index_size = if (heap_sizes & 1) == 1 { 4 } else { 2 };
+ let guid_index_size = if (heap_sizes >> 1 & 1) == 1 { 4 } else { 2 };
+ let blob_index_size = if (heap_sizes >> 2 & 1) == 1 { 4 } else { 2 };
+ let valid_bits = result.bytes.copy_as::<u64>(tables_data.0 + 8)?;
+ view = tables_data.0 + 24;
+
+ // These tables are unused by the reader, but needed temporarily to calculate sizes and offsets for subsequent tables.
+ let unused_empty = Table::default();
+ let mut unused_assembly = Table::default();
+ let mut unused_assembly_os = Table::default();
+ let mut unused_assembly_processor = Table::default();
+ let mut unused_assembly_ref_os = Table::default();
+ let mut unused_assembly_ref_processor = Table::default();
+ let mut unused_decl_security = Table::default();
+ let mut unused_event = Table::default();
+ let mut unused_event_map = Table::default();
+ let mut unused_exported_type = Table::default();
+ let mut unused_field_layout = Table::default();
+ let mut unused_field_marshal = Table::default();
+ let mut unused_field_rva = Table::default();
+ let mut unused_file = Table::default();
+ let mut unused_generic_param_constraint = Table::default();
+ let mut unused_manifest_resource = Table::default();
+ let mut unused_method_impl = Table::default();
+ let mut unused_method_semantics = Table::default();
+ let mut unused_method_spec = Table::default();
+ let mut unused_property = Table::default();
+ let mut unused_property_map = Table::default();
+ let mut unused_standalone_sig = Table::default();
+
+ for i in 0..64 {
+ if (valid_bits >> i & 1) == 0 {
+ continue;
+ }
+
+ let len = result.bytes.copy_as::<u32>(view)? as usize;
+ view += 4;
+
+ match i {
+ 0x00 => result.tables[Module::TABLE].len = len,
+ 0x01 => result.tables[TypeRef::TABLE].len = len,
+ 0x02 => result.tables[TypeDef::TABLE].len = len,
+ 0x04 => result.tables[Field::TABLE].len = len,
+ 0x06 => result.tables[MethodDef::TABLE].len = len,
+ 0x08 => result.tables[Param::TABLE].len = len,
+ 0x09 => result.tables[InterfaceImpl::TABLE].len = len,
+ 0x0a => result.tables[MemberRef::TABLE].len = len,
+ 0x0b => result.tables[Constant::TABLE].len = len,
+ 0x0c => result.tables[Attribute::TABLE].len = len,
+ 0x0d => unused_field_marshal.len = len,
+ 0x0e => unused_decl_security.len = len,
+ 0x0f => result.tables[ClassLayout::TABLE].len = len,
+ 0x10 => unused_field_layout.len = len,
+ 0x11 => unused_standalone_sig.len = len,
+ 0x12 => unused_event_map.len = len,
+ 0x14 => unused_event.len = len,
+ 0x15 => unused_property_map.len = len,
+ 0x17 => unused_property.len = len,
+ 0x18 => unused_method_semantics.len = len,
+ 0x19 => unused_method_impl.len = len,
+ 0x1a => result.tables[ModuleRef::TABLE].len = len,
+ 0x1b => result.tables[TypeSpec::TABLE].len = len,
+ 0x1c => result.tables[ImplMap::TABLE].len = len,
+ 0x1d => unused_field_rva.len = len,
+ 0x20 => unused_assembly.len = len,
+ 0x21 => unused_assembly_processor.len = len,
+ 0x22 => unused_assembly_os.len = len,
+ 0x23 => result.tables[AssemblyRef::TABLE].len = len,
+ 0x24 => unused_assembly_ref_processor.len = len,
+ 0x25 => unused_assembly_ref_os.len = len,
+ 0x26 => unused_file.len = len,
+ 0x27 => unused_exported_type.len = len,
+ 0x28 => unused_manifest_resource.len = len,
+ 0x29 => result.tables[NestedClass::TABLE].len = len,
+ 0x2a => result.tables[GenericParam::TABLE].len = len,
+ 0x2b => unused_method_spec.len = len,
+ 0x2c => unused_generic_param_constraint.len = len,
+ _ => unreachable!(),
+ };
+ }
+
+ let tables = &result.tables;
+ let type_def_or_ref = coded_index_size(&[tables[TypeDef::TABLE].len, tables[TypeRef::TABLE].len, tables[TypeSpec::TABLE].len]);
+ let has_constant = coded_index_size(&[tables[Field::TABLE].len, tables[Param::TABLE].len, unused_property.len]);
+ let has_field_marshal = coded_index_size(&[tables[Field::TABLE].len, tables[Param::TABLE].len]);
+ let has_decl_security = coded_index_size(&[tables[TypeDef::TABLE].len, tables[MethodDef::TABLE].len, unused_assembly.len]);
+ let member_ref_parent = coded_index_size(&[tables[TypeDef::TABLE].len, tables[TypeRef::TABLE].len, tables[ModuleRef::TABLE].len, tables[MethodDef::TABLE].len, tables[TypeSpec::TABLE].len]);
+ let has_semantics = coded_index_size(&[unused_event.len, unused_property.len]);
+ let method_def_or_ref = coded_index_size(&[tables[MethodDef::TABLE].len, tables[MemberRef::TABLE].len]);
+ let member_forwarded = coded_index_size(&[tables[Field::TABLE].len, tables[MethodDef::TABLE].len]);
+ let implementation = coded_index_size(&[unused_file.len, tables[AssemblyRef::TABLE].len, unused_exported_type.len]);
+ let custom_attribute_type = coded_index_size(&[tables[MethodDef::TABLE].len, tables[MemberRef::TABLE].len, unused_empty.len, unused_empty.len, unused_empty.len]);
+ let resolution_scope = coded_index_size(&[tables[Module::TABLE].len, tables[ModuleRef::TABLE].len, tables[AssemblyRef::TABLE].len, tables[TypeRef::TABLE].len]);
+ let type_or_method_def = coded_index_size(&[tables[TypeDef::TABLE].len, tables[MethodDef::TABLE].len]);
+
+ let has_custom_attribute = coded_index_size(&[
+ tables[MethodDef::TABLE].len,
+ tables[Field::TABLE].len,
+ tables[TypeRef::TABLE].len,
+ tables[TypeDef::TABLE].len,
+ tables[Param::TABLE].len,
+ tables[InterfaceImpl::TABLE].len,
+ tables[MemberRef::TABLE].len,
+ tables[Module::TABLE].len,
+ unused_property.len,
+ unused_event.len,
+ unused_standalone_sig.len,
+ tables[ModuleRef::TABLE].len,
+ tables[TypeSpec::TABLE].len,
+ unused_assembly.len,
+ tables[AssemblyRef::TABLE].len,
+ unused_file.len,
+ unused_exported_type.len,
+ unused_manifest_resource.len,
+ tables[GenericParam::TABLE].len,
+ unused_generic_param_constraint.len,
+ unused_method_spec.len,
+ ]);
+
+ unused_assembly.set_columns(4, 8, 4, blob_index_size, string_index_size, string_index_size);
+ unused_assembly_os.set_columns(4, 4, 4, 0, 0, 0);
+ unused_assembly_processor.set_columns(4, 0, 0, 0, 0, 0);
+ result.tables[AssemblyRef::TABLE].set_columns(8, 4, blob_index_size, string_index_size, string_index_size, blob_index_size);
+ unused_assembly_ref_os.set_columns(4, 4, 4, result.tables[AssemblyRef::TABLE].index_width(), 0, 0);
+ unused_assembly_ref_processor.set_columns(4, result.tables[AssemblyRef::TABLE].index_width(), 0, 0, 0, 0);
+ result.tables[ClassLayout::TABLE].set_columns(2, 4, result.tables[TypeDef::TABLE].index_width(), 0, 0, 0);
+ result.tables[Constant::TABLE].set_columns(2, has_constant, blob_index_size, 0, 0, 0);
+ result.tables[Attribute::TABLE].set_columns(has_custom_attribute, custom_attribute_type, blob_index_size, 0, 0, 0);
+ unused_decl_security.set_columns(2, has_decl_security, blob_index_size, 0, 0, 0);
+ unused_event_map.set_columns(result.tables[TypeDef::TABLE].index_width(), unused_event.index_width(), 0, 0, 0, 0);
+ unused_event.set_columns(2, string_index_size, type_def_or_ref, 0, 0, 0);
+ unused_exported_type.set_columns(4, 4, string_index_size, string_index_size, implementation, 0);
+ result.tables[Field::TABLE].set_columns(2, string_index_size, blob_index_size, 0, 0, 0);
+ unused_field_layout.set_columns(4, result.tables[Field::TABLE].index_width(), 0, 0, 0, 0);
+ unused_field_marshal.set_columns(has_field_marshal, blob_index_size, 0, 0, 0, 0);
+ unused_field_rva.set_columns(4, result.tables[Field::TABLE].index_width(), 0, 0, 0, 0);
+ unused_file.set_columns(4, string_index_size, blob_index_size, 0, 0, 0);
+ result.tables[GenericParam::TABLE].set_columns(2, 2, type_or_method_def, string_index_size, 0, 0);
+ unused_generic_param_constraint.set_columns(result.tables[GenericParam::TABLE].index_width(), type_def_or_ref, 0, 0, 0, 0);
+ result.tables[ImplMap::TABLE].set_columns(2, member_forwarded, string_index_size, result.tables[ModuleRef::TABLE].index_width(), 0, 0);
+ result.tables[InterfaceImpl::TABLE].set_columns(result.tables[TypeDef::TABLE].index_width(), type_def_or_ref, 0, 0, 0, 0);
+ unused_manifest_resource.set_columns(4, 4, string_index_size, implementation, 0, 0);
+ result.tables[MemberRef::TABLE].set_columns(member_ref_parent, string_index_size, blob_index_size, 0, 0, 0);
+ result.tables[MethodDef::TABLE].set_columns(4, 2, 2, string_index_size, blob_index_size, result.tables[Param::TABLE].index_width());
+ unused_method_impl.set_columns(result.tables[TypeDef::TABLE].index_width(), method_def_or_ref, method_def_or_ref, 0, 0, 0);
+ unused_method_semantics.set_columns(2, result.tables[MethodDef::TABLE].index_width(), has_semantics, 0, 0, 0);
+ unused_method_spec.set_columns(method_def_or_ref, blob_index_size, 0, 0, 0, 0);
+ result.tables[Module::TABLE].set_columns(2, string_index_size, guid_index_size, guid_index_size, guid_index_size, 0);
+ result.tables[ModuleRef::TABLE].set_columns(string_index_size, 0, 0, 0, 0, 0);
+ result.tables[NestedClass::TABLE].set_columns(result.tables[TypeDef::TABLE].index_width(), result.tables[TypeDef::TABLE].index_width(), 0, 0, 0, 0);
+ result.tables[Param::TABLE].set_columns(2, 2, string_index_size, 0, 0, 0);
+ unused_property.set_columns(2, string_index_size, blob_index_size, 0, 0, 0);
+ unused_property_map.set_columns(result.tables[TypeDef::TABLE].index_width(), unused_property.index_width(), 0, 0, 0, 0);
+ unused_standalone_sig.set_columns(blob_index_size, 0, 0, 0, 0, 0);
+ result.tables[TypeDef::TABLE].set_columns(4, string_index_size, string_index_size, type_def_or_ref, result.tables[Field::TABLE].index_width(), result.tables[MethodDef::TABLE].index_width());
+ result.tables[TypeRef::TABLE].set_columns(resolution_scope, string_index_size, string_index_size, 0, 0, 0);
+ result.tables[TypeSpec::TABLE].set_columns(blob_index_size, 0, 0, 0, 0, 0);
+
+ result.tables[Module::TABLE].set_data(&mut view);
+ result.tables[TypeRef::TABLE].set_data(&mut view);
+ result.tables[TypeDef::TABLE].set_data(&mut view);
+ result.tables[Field::TABLE].set_data(&mut view);
+ result.tables[MethodDef::TABLE].set_data(&mut view);
+ result.tables[Param::TABLE].set_data(&mut view);
+ result.tables[InterfaceImpl::TABLE].set_data(&mut view);
+ result.tables[MemberRef::TABLE].set_data(&mut view);
+ result.tables[Constant::TABLE].set_data(&mut view);
+ result.tables[Attribute::TABLE].set_data(&mut view);
+ unused_field_marshal.set_data(&mut view);
+ unused_decl_security.set_data(&mut view);
+ result.tables[ClassLayout::TABLE].set_data(&mut view);
+ unused_field_layout.set_data(&mut view);
+ unused_standalone_sig.set_data(&mut view);
+ unused_event_map.set_data(&mut view);
+ unused_event.set_data(&mut view);
+ unused_property_map.set_data(&mut view);
+ unused_property.set_data(&mut view);
+ unused_method_semantics.set_data(&mut view);
+ unused_method_impl.set_data(&mut view);
+ result.tables[ModuleRef::TABLE].set_data(&mut view);
+ result.tables[TypeSpec::TABLE].set_data(&mut view);
+ result.tables[ImplMap::TABLE].set_data(&mut view);
+ unused_field_rva.set_data(&mut view);
+ unused_assembly.set_data(&mut view);
+ unused_assembly_processor.set_data(&mut view);
+ unused_assembly_os.set_data(&mut view);
+ result.tables[AssemblyRef::TABLE].set_data(&mut view);
+ unused_assembly_ref_processor.set_data(&mut view);
+ unused_assembly_ref_os.set_data(&mut view);
+ unused_file.set_data(&mut view);
+ unused_exported_type.set_data(&mut view);
+ unused_manifest_resource.set_data(&mut view);
+ result.tables[NestedClass::TABLE].set_data(&mut view);
+ result.tables[GenericParam::TABLE].set_data(&mut view);
+
+ Ok(result)
+ }
+
+ fn usize(&self, row: usize, table: usize, column: usize) -> usize {
+ let table = &self.tables[table];
+ let column = &table.columns[column];
+ let offset = table.offset + row * table.width + column.offset;
+ match column.width {
+ 1 => self.bytes.copy_as::<u8>(offset).map_or(0, |v| v as usize),
+ 2 => self.bytes.copy_as::<u16>(offset).map_or(0, |v| v as usize),
+ 4 => self.bytes.copy_as::<u32>(offset).map_or(0, |v| v as usize),
+ _ => self.bytes.copy_as::<u64>(offset).map_or(0, |v| v as usize),
+ }
+ }
+
+ fn lower_bound_of(&self, table: usize, mut first: usize, last: usize, column: usize, value: usize) -> usize {
+ let mut count = last - first;
+ while count > 0 {
+ let count2 = count / 2;
+ let middle = first + count2;
+ if self.usize(middle, table, column) < value {
+ first = middle + 1;
+ count -= count2 + 1;
+ } else {
+ count = count2;
+ }
+ }
+ first
+ }
+
+ fn upper_bound_of(&self, table: usize, mut first: usize, last: usize, column: usize, value: usize) -> usize {
+ let mut count = last - first;
+ while count > 0 {
+ let count2 = count / 2;
+ let middle = first + count2;
+ if value < self.usize(middle, table, column) {
+ count = count2
+ } else {
+ first = middle + 1;
+ count -= count2 + 1;
+ }
+ }
+ first
+ }
+
+ pub fn table<R: AsRow>(&self, file: usize) -> RowIterator<R> {
+ RowIterator::new(file, 0..self.tables[R::TABLE].len)
+ }
+}
+
+fn section_from_rva(sections: &[IMAGE_SECTION_HEADER], rva: u32) -> Result<&IMAGE_SECTION_HEADER> {
+ sections.iter().find(|&s| rva >= s.VirtualAddress && rva < s.VirtualAddress + unsafe { s.Misc.VirtualSize }).ok_or(())
+}
+
+fn offset_from_rva(section: &IMAGE_SECTION_HEADER, rva: u32) -> usize {
+ (rva - section.VirtualAddress + section.PointerToRawData) as usize
+}
diff --git a/vendor/windows-metadata/src/file/reader.rs b/vendor/windows-metadata/src/file/reader.rs
new file mode 100644
index 000000000..571a06d5e
--- /dev/null
+++ b/vendor/windows-metadata/src/file/reader.rs
@@ -0,0 +1,359 @@
+use super::*;
+
+pub trait RowReader<'a> {
+ fn row_file<R: AsRow>(&self, row: R) -> &'a File;
+
+ fn row_usize<R: AsRow>(&self, row: R, column: usize) -> usize {
+ let file = self.row_file(row);
+ let row = row.to_row();
+ file.usize(row.row, R::TABLE, column)
+ }
+
+ fn row_str<R: AsRow>(&self, row: R, column: usize) -> &'a str {
+ let file = self.row_file(row);
+ let offset = file.strings + self.row_usize(row, column);
+ let bytes = &file.bytes[offset..];
+ let nul_pos = bytes.iter().position(|&c| c == 0).expect("expected null-terminated C-string");
+ std::str::from_utf8(&bytes[..nul_pos]).expect("expected valid utf-8 C-string")
+ }
+
+ fn row_blob<R: AsRow>(&self, row: R, column: usize) -> Blob<'a> {
+ let file = self.row_file(row);
+ let offset = file.blobs + self.row_usize(row, column);
+ let initial_byte = file.bytes[offset];
+
+ let (blob_size, blob_size_bytes) = match initial_byte >> 5 {
+ 0..=3 => (initial_byte & 0x7f, 1),
+ 4..=5 => (initial_byte & 0x3f, 2),
+ 6 => (initial_byte & 0x1f, 4),
+ rest => unimplemented!("{rest:?}"),
+ };
+
+ let mut blob_size = blob_size as usize;
+
+ for byte in &file.bytes[offset + 1..offset + blob_size_bytes] {
+ blob_size = blob_size.checked_shl(8).unwrap_or(0) + (*byte as usize);
+ }
+
+ let offset = offset + blob_size_bytes;
+ Blob::new(row.file(), &file.bytes[offset..offset + blob_size])
+ }
+
+ fn row_list<R: AsRow, L: AsRow>(&self, row: R, column: usize) -> RowIterator<L> {
+ let file = self.row_file(row);
+ let first = self.row_usize(row, column) - 1;
+ let next = row.next();
+ let last = if next.index() < file.tables[R::TABLE].len { self.row_usize(next, column) - 1 } else { file.tables[L::TABLE].len };
+ RowIterator::new(row.file(), first..last)
+ }
+
+ fn row_equal_range<R: AsRow, L: AsRow>(&self, row: R, column: usize, value: usize) -> RowIterator<L> {
+ let file = self.row_file(row);
+ let mut first = 0;
+ let mut last = file.tables[L::TABLE].len;
+ let mut count = last;
+
+ loop {
+ if count == 0 {
+ last = first;
+ break;
+ }
+
+ let count2 = count / 2;
+ let middle = first + count2;
+ let middle_value = file.usize(middle, L::TABLE, column);
+
+ match middle_value.cmp(&value) {
+ Ordering::Less => {
+ first = middle + 1;
+ count -= count2 + 1;
+ }
+ Ordering::Greater => count = count2,
+ Ordering::Equal => {
+ let first2 = file.lower_bound_of(L::TABLE, first, middle, column, value);
+ first += count;
+ last = file.upper_bound_of(L::TABLE, middle + 1, first, column, value);
+ first = first2;
+ break;
+ }
+ }
+ }
+
+ RowIterator::new(row.file(), first..last)
+ }
+
+ fn row_decode<R: AsRow, T: Decode>(&self, row: R, column: usize) -> T {
+ T::decode(row.file(), self.row_usize(row, column))
+ }
+
+ //
+ // Attribute
+ //
+
+ fn attribute_name(&self, row: Attribute) -> &'a str {
+ let AttributeType::MemberRef(row) = self.row_decode(row, 1);
+ let MemberRefParent::TypeRef(row) = self.row_decode(row, 0);
+ self.type_ref_name(row)
+ }
+
+ fn attributes<R: AsRow + Into<HasAttribute>>(&self, row: R) -> RowIterator<Attribute> {
+ self.row_equal_range(row, 0, row.into().encode())
+ }
+
+ fn find_attribute<R: AsRow + Into<HasAttribute>>(&self, row: R, name: &str) -> Option<Attribute> {
+ self.attributes(row).find(|attribute| self.attribute_name(*attribute) == name)
+ }
+
+ fn has_attribute<R: AsRow + Into<HasAttribute>>(&self, row: R, name: &str) -> bool {
+ self.find_attribute(row, name).is_some()
+ }
+
+ //
+ // Other
+ //
+
+ fn type_def_or_ref(&self, code: TypeDefOrRef) -> TypeName<'a> {
+ match code {
+ TypeDefOrRef::TypeDef(row) => TypeName::new(self.type_def_namespace(row), self.type_def_name(row)),
+ TypeDefOrRef::TypeRef(row) => TypeName::new(self.type_ref_namespace(row), self.type_ref_name(row)),
+ rest => unimplemented!("{rest:?}"),
+ }
+ }
+
+ //
+ // ClassLayout
+ //
+
+ fn class_layout_packing_size(&self, row: ClassLayout) -> usize {
+ self.row_usize(row, 0)
+ }
+
+ //
+ // Constant
+ //
+
+ fn constant_type(&self, row: Constant) -> Type {
+ let code = self.row_usize(row, 0);
+ Type::from_code(code).expect("Constant type not found")
+ }
+
+ fn constant_value(&self, row: Constant) -> Value {
+ let mut blob = self.row_blob(row, 2);
+ match self.constant_type(row) {
+ Type::I8 => Value::I8(blob.read_i8()),
+ Type::U8 => Value::U8(blob.read_u8()),
+ Type::I16 => Value::I16(blob.read_i16()),
+ Type::U16 => Value::U16(blob.read_u16()),
+ Type::I32 => Value::I32(blob.read_i32()),
+ Type::U32 => Value::U32(blob.read_u32()),
+ Type::I64 => Value::I64(blob.read_i64()),
+ Type::U64 => Value::U64(blob.read_u64()),
+ Type::F32 => Value::F32(blob.read_f32()),
+ Type::F64 => Value::F64(blob.read_f64()),
+ Type::String => Value::String(blob.read_string()),
+ rest => unimplemented!("{rest:?}"),
+ }
+ }
+
+ //
+ // Field
+ //
+
+ fn field_flags(&self, row: Field) -> FieldAttributes {
+ FieldAttributes(self.row_usize(row, 0) as u16)
+ }
+
+ fn field_name(&self, row: Field) -> &'a str {
+ self.row_str(row, 1)
+ }
+
+ fn field_constant(&self, row: Field) -> Option<Constant> {
+ self.row_equal_range(row, 1, HasConstant::Field(row).encode()).next()
+ }
+
+ //
+ // GenericParam
+ //
+
+ fn generic_param_number(&self, row: GenericParam) -> u16 {
+ self.row_usize(row, 0) as u16
+ }
+
+ fn generic_param_name(&self, row: GenericParam) -> &'a str {
+ self.row_str(row, 3)
+ }
+
+ //
+ // ImplMap
+ //
+
+ fn impl_map_flags(&self, row: ImplMap) -> PInvokeAttributes {
+ PInvokeAttributes(self.row_usize(row, 0))
+ }
+
+ fn impl_map_scope(&self, row: ImplMap) -> ModuleRef {
+ ModuleRef(Row::new(self.row_usize(row, 3) - 1, row.file()))
+ }
+
+ fn impl_map_import_name(&self, row: ImplMap) -> &'a str {
+ self.row_str(row, 2)
+ }
+
+ //
+ // MemberRef
+ //
+
+ fn member_ref_parent(&self, row: MemberRef) -> MemberRefParent {
+ self.row_decode(row, 0)
+ }
+
+ fn member_ref_signature(&self, row: MemberRef) -> Blob<'a> {
+ self.row_blob(row, 2)
+ }
+
+ //
+ // MethodDef
+ //
+
+ fn method_def_impl_flags(&self, row: MethodDef) -> MethodImplAttributes {
+ MethodImplAttributes(self.row_usize(row, 1) as u16)
+ }
+
+ fn method_def_flags(&self, row: MethodDef) -> MethodAttributes {
+ MethodAttributes(self.row_usize(row, 2) as u16)
+ }
+
+ fn method_def_name(&self, row: MethodDef) -> &'a str {
+ self.row_str(row, 3)
+ }
+
+ fn method_def_params(&self, row: MethodDef) -> RowIterator<Param> {
+ self.row_list(row, 5)
+ }
+
+ fn method_def_impl_map(&self, row: MethodDef) -> Option<ImplMap> {
+ self.row_equal_range(row, 1, MemberForwarded::MethodDef(row).encode()).next()
+ }
+
+ fn method_def_module_name(&self, row: MethodDef) -> String {
+ // TODO: riddle should always lower case the module name to avoid allocating here
+ let Some(impl_map) = self.method_def_impl_map(row) else {
+ return String::new();
+ };
+
+ self.module_ref_name(self.impl_map_scope(impl_map)).to_lowercase()
+ }
+
+ //
+ // ModuleRef
+ //
+
+ fn module_ref_name(&self, row: ModuleRef) -> &'a str {
+ self.row_str(row, 0)
+ }
+
+ //
+ // NestedClass
+ //
+
+ fn nested_class_inner(&self, row: NestedClass) -> TypeDef {
+ TypeDef(Row::new(self.row_usize(row, 0) - 1, row.file()))
+ }
+
+ fn nested_class_outer(&self, row: NestedClass) -> TypeDef {
+ TypeDef(Row::new(self.row_usize(row, 1) - 1, row.file()))
+ }
+
+ //
+ // Param
+ //
+
+ fn param_flags(&self, row: Param) -> ParamAttributes {
+ ParamAttributes(self.row_usize(row, 0) as u16)
+ }
+
+ fn param_sequence(&self, row: Param) -> u16 {
+ self.row_usize(row, 1) as u16
+ }
+
+ fn param_name(&self, row: Param) -> &'a str {
+ self.row_str(row, 2)
+ }
+
+ //
+ // TypeDef
+ //
+
+ fn type_def_flags(&self, row: TypeDef) -> TypeAttributes {
+ TypeAttributes(self.row_usize(row, 0) as u32)
+ }
+
+ fn type_def_name(&self, row: TypeDef) -> &'a str {
+ self.row_str(row, 1)
+ }
+
+ fn type_def_namespace(&self, row: TypeDef) -> &'a str {
+ self.row_str(row, 2)
+ }
+
+ fn type_def_extends(&self, row: TypeDef) -> Option<TypeName<'a>> {
+ match self.row_usize(row, 3) {
+ 0 => None,
+ code => Some(self.type_def_or_ref(TypeDefOrRef::decode(row.file(), code))),
+ }
+ }
+
+ fn type_def_methods(&self, row: TypeDef) -> RowIterator<MethodDef> {
+ self.row_list(row, 5)
+ }
+
+ fn type_def_fields(&self, row: TypeDef) -> RowIterator<Field> {
+ self.row_list(row, 4)
+ }
+
+ fn type_def_generics(&self, row: TypeDef) -> RowIterator<GenericParam> {
+ self.row_equal_range(row, 2, TypeOrMethodDef::TypeDef(row).encode())
+ }
+
+ fn type_def_interface_impls(&self, row: TypeDef) -> RowIterator<InterfaceImpl> {
+ self.row_equal_range(row, 0, row.0.row + 1)
+ }
+
+ fn type_def_enclosing_type(&self, row: TypeDef) -> Option<TypeDef> {
+ self.row_equal_range::<TypeDef, NestedClass>(row, 0, row.0.row + 1).next().map(|row| TypeDef(Row::new(self.row_usize(row, 1) - 1, row.file())))
+ }
+
+ fn type_def_class_layout(&self, row: TypeDef) -> Option<ClassLayout> {
+ self.row_equal_range(row, 2, row.0.row + 1).next()
+ }
+
+ //
+ // TypeRef
+ //
+
+ fn type_ref_name(&self, row: TypeRef) -> &'a str {
+ self.row_str(row, 1)
+ }
+
+ fn type_ref_namespace(&self, row: TypeRef) -> &'a str {
+ self.row_str(row, 2)
+ }
+
+ fn type_ref_resolution_scope(&self, row: TypeRef) -> ResolutionScope {
+ self.row_decode(row, 0)
+ }
+
+ //
+ // TypeSpec
+ //
+
+ fn type_spec_signature(&self, row: TypeSpec) -> Blob<'a> {
+ self.row_blob(row, 0)
+ }
+}
+
+impl<'a> RowReader<'a> for &'a [File] {
+ fn row_file<R: AsRow>(&self, row: R) -> &'a File {
+ &self[row.to_row().file]
+ }
+}
diff --git a/vendor/windows-metadata/src/file/table.rs b/vendor/windows-metadata/src/file/table.rs
new file mode 100644
index 000000000..af9599c31
--- /dev/null
+++ b/vendor/windows-metadata/src/file/table.rs
@@ -0,0 +1,57 @@
+#[derive(Default)]
+pub struct Table {
+ pub offset: usize,
+ pub len: usize,
+ pub width: usize,
+ pub columns: [Column; 6],
+}
+
+#[derive(Default)]
+pub struct Column {
+ pub offset: usize,
+ pub width: usize,
+}
+
+impl Table {
+ pub fn index_width(&self) -> usize {
+ if self.len < (1 << 16) {
+ 2
+ } else {
+ 4
+ }
+ }
+
+ pub fn set_columns(&mut self, a: usize, b: usize, c: usize, d: usize, e: usize, f: usize) {
+ self.width = a + b + c + d + e + f;
+ self.columns[0] = Column::new(0, a);
+ if b != 0 {
+ self.columns[1] = Column::new(a, b);
+ }
+ if c != 0 {
+ self.columns[2] = Column::new(a + b, c);
+ }
+ if d != 0 {
+ self.columns[3] = Column::new(a + b + c, d);
+ }
+ if e != 0 {
+ self.columns[4] = Column::new(a + b + c + d, e);
+ }
+ if f != 0 {
+ self.columns[5] = Column::new(a + b + c + d + e, f);
+ }
+ }
+
+ pub fn set_data(&mut self, offset: &mut usize) {
+ if self.len != 0 {
+ let next = *offset + self.len * self.width;
+ self.offset = *offset;
+ *offset = next;
+ }
+ }
+}
+
+impl Column {
+ fn new(offset: usize, width: usize) -> Self {
+ Self { offset, width }
+ }
+}
diff --git a/vendor/windows-metadata/src/file/view.rs b/vendor/windows-metadata/src/file/view.rs
new file mode 100644
index 000000000..31eb1541b
--- /dev/null
+++ b/vendor/windows-metadata/src/file/view.rs
@@ -0,0 +1,55 @@
+type Result<T> = std::result::Result<T, ()>;
+
+pub trait View {
+ fn view_as<T>(&self, offset: usize) -> Result<&T>;
+ fn view_as_slice_of<T>(&self, offset: usize, len: usize) -> Result<&[T]>;
+ fn copy_as<T: Copy>(&self, offset: usize) -> Result<T>;
+ fn view_as_str(&self, offset: usize) -> Result<&[u8]>;
+ fn is_proper_length<T>(&self, offset: usize) -> Result<()>;
+ fn is_proper_length_and_alignment<T>(&self, offset: usize, count: usize) -> Result<*const T>;
+}
+
+impl View for [u8] {
+ fn view_as<T>(&self, offset: usize) -> Result<&T> {
+ unsafe { Ok(&*self.is_proper_length_and_alignment(offset, 1)?) }
+ }
+
+ fn view_as_slice_of<T>(&self, offset: usize, len: usize) -> Result<&[T]> {
+ unsafe { Ok(std::slice::from_raw_parts(self.is_proper_length_and_alignment(offset, len)?, len)) }
+ }
+
+ fn copy_as<T>(&self, offset: usize) -> Result<T> {
+ self.is_proper_length::<T>(offset)?;
+
+ unsafe {
+ let mut data = std::mem::MaybeUninit::zeroed().assume_init();
+ std::ptr::copy_nonoverlapping(self[offset..].as_ptr(), &mut data as *mut T as *mut u8, std::mem::size_of::<T>());
+ Ok(data)
+ }
+ }
+
+ fn view_as_str(&self, offset: usize) -> Result<&[u8]> {
+ let buffer = &self[offset..];
+ let index = buffer.iter().position(|c| *c == b'\0').ok_or(())?;
+ Ok(&self[offset..offset + index])
+ }
+
+ fn is_proper_length<T>(&self, offset: usize) -> Result<()> {
+ if offset + std::mem::size_of::<T>() <= self.len() {
+ Ok(())
+ } else {
+ Err(())
+ }
+ }
+
+ fn is_proper_length_and_alignment<T>(&self, offset: usize, count: usize) -> Result<*const T> {
+ self.is_proper_length::<T>(offset * count)?;
+ let ptr = &self[offset] as *const u8 as *const T;
+
+ if ptr.align_offset(std::mem::align_of::<T>()) == 0 {
+ Ok(ptr)
+ } else {
+ Err(())
+ }
+ }
+}