summaryrefslogtreecommitdiffstats
path: root/third_party/rust/wasmparser/src/validator
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
commit36d22d82aa202bb199967e9512281e9a53db42c9 (patch)
tree105e8c98ddea1c1e4784a60a5a6410fa416be2de /third_party/rust/wasmparser/src/validator
parentInitial commit. (diff)
downloadfirefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.tar.xz
firefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.zip
Adding upstream version 115.7.0esr.upstream/115.7.0esr
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/rust/wasmparser/src/validator')
-rw-r--r--third_party/rust/wasmparser/src/validator/component.rs2101
-rw-r--r--third_party/rust/wasmparser/src/validator/core.rs1278
-rw-r--r--third_party/rust/wasmparser/src/validator/func.rs348
-rw-r--r--third_party/rust/wasmparser/src/validator/operators.rs3474
-rw-r--r--third_party/rust/wasmparser/src/validator/types.rs2166
5 files changed, 9367 insertions, 0 deletions
diff --git a/third_party/rust/wasmparser/src/validator/component.rs b/third_party/rust/wasmparser/src/validator/component.rs
new file mode 100644
index 0000000000..641b18a2cc
--- /dev/null
+++ b/third_party/rust/wasmparser/src/validator/component.rs
@@ -0,0 +1,2101 @@
+//! State relating to validating a WebAssembly component.
+
+use super::{
+ check_max, combine_type_sizes,
+ core::Module,
+ types::{
+ ComponentFuncType, ComponentInstanceType, ComponentInstanceTypeKind, ComponentType,
+ ComponentValType, EntityType, InstanceType, KebabString, ModuleType, RecordType, Type,
+ TypeAlloc, TypeId, TypeList, VariantCase,
+ },
+};
+use crate::{
+ limits::*,
+ types::{
+ ComponentDefinedType, ComponentEntityType, InstanceTypeKind, KebabStr, LoweringInfo,
+ TupleType, UnionType, VariantType,
+ },
+ BinaryReaderError, CanonicalOption, ComponentExternalKind, ComponentOuterAliasKind,
+ ComponentTypeRef, ExternalKind, FuncType, GlobalType, InstantiationArgKind, MemoryType, Result,
+ TableType, TypeBounds, ValType, WasmFeatures,
+};
+use indexmap::{map::Entry, IndexMap, IndexSet};
+use std::{collections::HashSet, mem};
+use url::Url;
+
+fn to_kebab_str<'a>(s: &'a str, desc: &str, offset: usize) -> Result<&'a KebabStr> {
+ match KebabStr::new(s) {
+ Some(s) => Ok(s),
+ None => {
+ if s.is_empty() {
+ bail!(offset, "{desc} name cannot be empty");
+ }
+
+ bail!(offset, "{desc} name `{s}` is not in kebab case");
+ }
+ }
+}
+
+fn parse_url(url: &str, offset: usize) -> Result<Option<Url>> {
+ if url.is_empty() {
+ return Ok(None);
+ }
+
+ Url::parse(url)
+ .map(Some)
+ .map_err(|e| BinaryReaderError::new(e.to_string(), offset))
+}
+
+pub(crate) struct ComponentState {
+ // Core index spaces
+ pub core_types: Vec<TypeId>,
+ pub core_modules: Vec<TypeId>,
+ pub core_instances: Vec<TypeId>,
+ pub core_funcs: Vec<TypeId>,
+ pub core_memories: Vec<MemoryType>,
+ pub core_tables: Vec<TableType>,
+ pub core_globals: Vec<GlobalType>,
+ pub core_tags: Vec<TypeId>,
+
+ // Component index spaces
+ pub types: Vec<TypeId>,
+ pub funcs: Vec<TypeId>,
+ pub values: Vec<(ComponentValType, bool)>,
+ pub instances: Vec<TypeId>,
+ pub components: Vec<TypeId>,
+
+ /// A set of all imports and exports since they share the same namespace.
+ pub externs: IndexMap<KebabString, (Option<Url>, ComponentEntityType, ExternKind)>,
+
+ // Note: URL validation requires unique URLs by byte comparison, so
+ // strings are used here and the URLs are not normalized.
+ import_urls: HashSet<String>,
+ export_urls: HashSet<String>,
+
+ has_start: bool,
+ type_size: u32,
+}
+
+pub enum ExternKind {
+ Import,
+ Export,
+}
+
+impl ExternKind {
+ fn desc(&self) -> &'static str {
+ match self {
+ ExternKind::Import => "import",
+ ExternKind::Export => "export",
+ }
+ }
+}
+
+impl ComponentState {
+ pub fn type_count(&self) -> usize {
+ self.core_types.len() + self.types.len()
+ }
+
+ pub fn instance_count(&self) -> usize {
+ self.core_instances.len() + self.instances.len()
+ }
+
+ pub fn function_count(&self) -> usize {
+ self.core_funcs.len() + self.funcs.len()
+ }
+
+ pub fn add_core_type(
+ components: &mut [Self],
+ ty: crate::CoreType,
+ features: &WasmFeatures,
+ types: &mut TypeAlloc,
+ offset: usize,
+ check_limit: bool,
+ ) -> Result<()> {
+ let ty = match ty {
+ crate::CoreType::Func(ty) => Type::Func(ty),
+ crate::CoreType::Module(decls) => Type::Module(Self::create_module_type(
+ components,
+ decls.into_vec(),
+ features,
+ types,
+ offset,
+ )?),
+ };
+
+ let current = components.last_mut().unwrap();
+
+ if check_limit {
+ check_max(current.type_count(), 1, MAX_WASM_TYPES, "types", offset)?;
+ }
+
+ let id = types.push_defined(ty);
+ current.core_types.push(id);
+
+ Ok(())
+ }
+
+ pub fn add_core_module(
+ &mut self,
+ module: &Module,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ let imports = module.imports_for_module_type(offset)?;
+
+ // We have to clone the module's imports and exports here
+ // because we cannot take the data out of the `MaybeOwned`
+ // as it might be shared with a function validator.
+ let ty = Type::Module(ModuleType {
+ type_size: module.type_size,
+ imports,
+ exports: module.exports.clone(),
+ });
+
+ let id = types.push_anon(ty);
+ self.core_modules.push(id);
+
+ Ok(())
+ }
+
+ pub fn add_core_instance(
+ &mut self,
+ instance: crate::Instance,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ let instance = match instance {
+ crate::Instance::Instantiate { module_index, args } => {
+ self.instantiate_module(module_index, args.into_vec(), types, offset)?
+ }
+ crate::Instance::FromExports(exports) => {
+ self.instantiate_core_exports(exports.into_vec(), types, offset)?
+ }
+ };
+
+ self.core_instances.push(instance);
+
+ Ok(())
+ }
+
+ pub fn add_type(
+ components: &mut Vec<Self>,
+ ty: crate::ComponentType,
+ features: &WasmFeatures,
+ types: &mut TypeAlloc,
+ offset: usize,
+ check_limit: bool,
+ ) -> Result<()> {
+ assert!(!components.is_empty());
+ let ty = match ty {
+ crate::ComponentType::Defined(ty) => Type::Defined(
+ components
+ .last_mut()
+ .unwrap()
+ .create_defined_type(ty, types, offset)?,
+ ),
+ crate::ComponentType::Func(ty) => Type::ComponentFunc(
+ components
+ .last_mut()
+ .unwrap()
+ .create_function_type(ty, types, offset)?,
+ ),
+ crate::ComponentType::Component(decls) => Type::Component(Self::create_component_type(
+ components,
+ decls.into_vec(),
+ features,
+ types,
+ offset,
+ )?),
+ crate::ComponentType::Instance(decls) => Type::ComponentInstance(
+ Self::create_instance_type(components, decls.into_vec(), features, types, offset)?,
+ ),
+ };
+
+ let current = components.last_mut().unwrap();
+ if check_limit {
+ check_max(current.type_count(), 1, MAX_WASM_TYPES, "types", offset)?;
+ }
+
+ let id = types.push_defined(ty);
+ current.types.push(id);
+
+ Ok(())
+ }
+
+ pub fn add_import(
+ &mut self,
+ import: crate::ComponentImport,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ let entity = self.check_type_ref(&import.ty, types, offset)?;
+ self.add_entity(entity, false, offset)?;
+ let name = to_kebab_str(import.name, "import", offset)?;
+
+ match self.externs.entry(name.to_owned()) {
+ Entry::Occupied(e) => {
+ bail!(
+ offset,
+ "import name `{name}` conflicts with previous {desc} name `{prev}`",
+ name = import.name,
+ prev = e.key(),
+ desc = e.get().2.desc(),
+ );
+ }
+ Entry::Vacant(e) => {
+ let url = parse_url(import.url, offset)?;
+ if let Some(url) = url.as_ref() {
+ if !self.import_urls.insert(url.to_string()) {
+ bail!(offset, "duplicate import URL `{url}`");
+ }
+ }
+
+ self.type_size = combine_type_sizes(self.type_size, entity.type_size(), offset)?;
+ e.insert((url, entity, ExternKind::Import));
+ }
+ }
+
+ Ok(())
+ }
+
+ fn add_entity(
+ &mut self,
+ ty: ComponentEntityType,
+ value_used: bool,
+ offset: usize,
+ ) -> Result<()> {
+ let (len, max, desc) = match ty {
+ ComponentEntityType::Module(id) => {
+ self.core_modules.push(id);
+ (self.core_modules.len(), MAX_WASM_MODULES, "modules")
+ }
+ ComponentEntityType::Component(id) => {
+ self.components.push(id);
+ (self.components.len(), MAX_WASM_COMPONENTS, "components")
+ }
+ ComponentEntityType::Instance(id) => {
+ self.instances.push(id);
+ (self.instance_count(), MAX_WASM_INSTANCES, "instances")
+ }
+ ComponentEntityType::Func(id) => {
+ self.funcs.push(id);
+ (self.function_count(), MAX_WASM_FUNCTIONS, "functions")
+ }
+ ComponentEntityType::Value(ty) => {
+ self.values.push((ty, value_used));
+ (self.values.len(), MAX_WASM_VALUES, "values")
+ }
+ ComponentEntityType::Type { created, .. } => {
+ self.types.push(created);
+ (self.types.len(), MAX_WASM_TYPES, "types")
+ }
+ };
+
+ check_max(len, 0, max, desc, offset)?;
+ Ok(())
+ }
+
+ pub fn add_export(
+ &mut self,
+ name: &str,
+ url: &str,
+ ty: ComponentEntityType,
+ offset: usize,
+ check_limit: bool,
+ ) -> Result<()> {
+ if check_limit {
+ check_max(
+ self.externs.len(),
+ 1,
+ MAX_WASM_EXPORTS,
+ "imports and exports",
+ offset,
+ )?;
+ }
+ self.add_entity(ty, true, offset)?;
+
+ let name = to_kebab_str(name, "export", offset)?;
+
+ match self.externs.entry(name.to_owned()) {
+ Entry::Occupied(e) => {
+ bail!(
+ offset,
+ "export name `{name}` conflicts with previous {desc} name `{prev}`",
+ prev = e.key(),
+ desc = e.get().2.desc(),
+ );
+ }
+ Entry::Vacant(e) => {
+ let url = parse_url(url, offset)?;
+ if let Some(url) = url.as_ref() {
+ if !self.export_urls.insert(url.to_string()) {
+ bail!(offset, "duplicate export URL `{url}`");
+ }
+ }
+
+ self.type_size = combine_type_sizes(self.type_size, ty.type_size(), offset)?;
+ e.insert((url, ty, ExternKind::Export));
+ }
+ }
+
+ Ok(())
+ }
+
+ pub fn lift_function(
+ &mut self,
+ core_func_index: u32,
+ type_index: u32,
+ options: Vec<CanonicalOption>,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ let ty = self.function_type_at(type_index, types, offset)?;
+ let core_ty = types[self.core_function_at(core_func_index, offset)?]
+ .as_func_type()
+ .unwrap();
+
+ // Lifting a function is for an export, so match the expected canonical ABI
+ // export signature
+ let info = ty.lower(types, false);
+ self.check_options(Some(core_ty), &info, &options, types, offset)?;
+
+ if core_ty.params() != info.params.as_slice() {
+ bail!(
+ offset,
+ "lowered parameter types `{:?}` do not match parameter types \
+ `{:?}` of core function {core_func_index}",
+ info.params.as_slice(),
+ core_ty.params(),
+ );
+ }
+
+ if core_ty.results() != info.results.as_slice() {
+ bail!(
+ offset,
+ "lowered result types `{:?}` do not match result types \
+ `{:?}` of core function {core_func_index}",
+ info.results.as_slice(),
+ core_ty.results()
+ );
+ }
+
+ self.funcs.push(self.types[type_index as usize]);
+
+ Ok(())
+ }
+
+ pub fn lower_function(
+ &mut self,
+ func_index: u32,
+ options: Vec<CanonicalOption>,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ let ty = types[self.function_at(func_index, offset)?]
+ .as_component_func_type()
+ .unwrap();
+
+ // Lowering a function is for an import, so use a function type that matches
+ // the expected canonical ABI import signature.
+ let info = ty.lower(types, true);
+
+ self.check_options(None, &info, &options, types, offset)?;
+
+ let lowered_ty = Type::Func(info.into_func_type());
+
+ let id = types.push_anon(lowered_ty);
+ self.core_funcs.push(id);
+
+ Ok(())
+ }
+
+ pub fn add_component(&mut self, component: &mut Self, types: &mut TypeAlloc) {
+ let ty = Type::Component(component.take_component_type());
+ let id = types.push_anon(ty);
+ self.components.push(id);
+ }
+
+ pub fn add_instance(
+ &mut self,
+ instance: crate::ComponentInstance,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ let instance = match instance {
+ crate::ComponentInstance::Instantiate {
+ component_index,
+ args,
+ } => self.instantiate_component(component_index, args.into_vec(), types, offset)?,
+ crate::ComponentInstance::FromExports(exports) => {
+ self.instantiate_exports(exports.into_vec(), types, offset)?
+ }
+ };
+
+ self.instances.push(instance);
+
+ Ok(())
+ }
+
+ pub fn add_alias(
+ components: &mut [Self],
+ alias: crate::ComponentAlias,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ match alias {
+ crate::ComponentAlias::InstanceExport {
+ instance_index,
+ kind,
+ name,
+ } => components.last_mut().unwrap().alias_instance_export(
+ instance_index,
+ kind,
+ name,
+ types,
+ offset,
+ ),
+ crate::ComponentAlias::CoreInstanceExport {
+ instance_index,
+ kind,
+ name,
+ } => components.last_mut().unwrap().alias_core_instance_export(
+ instance_index,
+ kind,
+ name,
+ types,
+ offset,
+ ),
+ crate::ComponentAlias::Outer { kind, count, index } => match kind {
+ ComponentOuterAliasKind::CoreModule => {
+ Self::alias_module(components, count, index, offset)
+ }
+ ComponentOuterAliasKind::CoreType => {
+ Self::alias_core_type(components, count, index, types, offset)
+ }
+ ComponentOuterAliasKind::Type => {
+ Self::alias_type(components, count, index, types, offset)
+ }
+ ComponentOuterAliasKind::Component => {
+ Self::alias_component(components, count, index, offset)
+ }
+ },
+ }
+ }
+
+ pub fn add_start(
+ &mut self,
+ func_index: u32,
+ args: &[u32],
+ results: u32,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ if self.has_start {
+ return Err(BinaryReaderError::new(
+ "component cannot have more than one start function",
+ offset,
+ ));
+ }
+
+ let ft = types[self.function_at(func_index, offset)?]
+ .as_component_func_type()
+ .unwrap();
+
+ if ft.params.len() != args.len() {
+ bail!(
+ offset,
+ "component start function requires {} arguments but was given {}",
+ ft.params.len(),
+ args.len()
+ );
+ }
+
+ if ft.results.len() as u32 != results {
+ bail!(
+ offset,
+ "component start function has a result count of {results} \
+ but the function type has a result count of {type_results}",
+ type_results = ft.results.len(),
+ );
+ }
+
+ for (i, ((_, ty), arg)) in ft.params.iter().zip(args).enumerate() {
+ // Ensure the value's type is a subtype of the parameter type
+ if !ComponentValType::internal_is_subtype_of(
+ self.value_at(*arg, offset)?,
+ types,
+ ty,
+ types,
+ ) {
+ bail!(
+ offset,
+ "value type mismatch for component start function argument {i}"
+ );
+ }
+ }
+
+ for (_, ty) in ft.results.iter() {
+ self.values.push((*ty, false));
+ }
+
+ self.has_start = true;
+
+ Ok(())
+ }
+
+ fn check_options(
+ &self,
+ core_ty: Option<&FuncType>,
+ info: &LoweringInfo,
+ options: &[CanonicalOption],
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ fn display(option: CanonicalOption) -> &'static str {
+ match option {
+ CanonicalOption::UTF8 => "utf8",
+ CanonicalOption::UTF16 => "utf16",
+ CanonicalOption::CompactUTF16 => "latin1-utf16",
+ CanonicalOption::Memory(_) => "memory",
+ CanonicalOption::Realloc(_) => "realloc",
+ CanonicalOption::PostReturn(_) => "post-return",
+ }
+ }
+
+ let mut encoding = None;
+ let mut memory = None;
+ let mut realloc = None;
+ let mut post_return = None;
+
+ for option in options {
+ match option {
+ CanonicalOption::UTF8 | CanonicalOption::UTF16 | CanonicalOption::CompactUTF16 => {
+ match encoding {
+ Some(existing) => {
+ bail!(
+ offset,
+ "canonical encoding option `{}` conflicts with option `{}`",
+ display(existing),
+ display(*option),
+ )
+ }
+ None => encoding = Some(*option),
+ }
+ }
+ CanonicalOption::Memory(idx) => {
+ memory = match memory {
+ None => {
+ self.memory_at(*idx, offset)?;
+ Some(*idx)
+ }
+ Some(_) => {
+ return Err(BinaryReaderError::new(
+ "canonical option `memory` is specified more than once",
+ offset,
+ ))
+ }
+ }
+ }
+ CanonicalOption::Realloc(idx) => {
+ realloc = match realloc {
+ None => {
+ let ty = types[self.core_function_at(*idx, offset)?]
+ .as_func_type()
+ .unwrap();
+ if ty.params()
+ != [ValType::I32, ValType::I32, ValType::I32, ValType::I32]
+ || ty.results() != [ValType::I32]
+ {
+ return Err(BinaryReaderError::new(
+ "canonical option `realloc` uses a core function with an incorrect signature",
+ offset,
+ ));
+ }
+ Some(*idx)
+ }
+ Some(_) => {
+ return Err(BinaryReaderError::new(
+ "canonical option `realloc` is specified more than once",
+ offset,
+ ))
+ }
+ }
+ }
+ CanonicalOption::PostReturn(idx) => {
+ post_return = match post_return {
+ None => {
+ let core_ty = core_ty.ok_or_else(|| {
+ BinaryReaderError::new(
+ "canonical option `post-return` cannot be specified for lowerings",
+ offset,
+ )
+ })?;
+
+ let ty = types[self.core_function_at(*idx, offset)?]
+ .as_func_type()
+ .unwrap();
+
+ if ty.params() != core_ty.results() || !ty.results().is_empty() {
+ return Err(BinaryReaderError::new(
+ "canonical option `post-return` uses a core function with an incorrect signature",
+ offset,
+ ));
+ }
+ Some(*idx)
+ }
+ Some(_) => {
+ return Err(BinaryReaderError::new(
+ "canonical option `post-return` is specified more than once",
+ offset,
+ ))
+ }
+ }
+ }
+ }
+ }
+
+ if info.requires_memory && memory.is_none() {
+ return Err(BinaryReaderError::new(
+ "canonical option `memory` is required",
+ offset,
+ ));
+ }
+
+ if info.requires_realloc && realloc.is_none() {
+ return Err(BinaryReaderError::new(
+ "canonical option `realloc` is required",
+ offset,
+ ));
+ }
+
+ Ok(())
+ }
+
+ fn check_type_ref(
+ &self,
+ ty: &ComponentTypeRef,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<ComponentEntityType> {
+ Ok(match ty {
+ ComponentTypeRef::Module(index) => {
+ let id = self.type_at(*index, true, offset)?;
+ types[id].as_module_type().ok_or_else(|| {
+ format_err!(offset, "core type index {index} is not a module type")
+ })?;
+ ComponentEntityType::Module(id)
+ }
+ ComponentTypeRef::Func(index) => {
+ let id = self.type_at(*index, false, offset)?;
+ types[id].as_component_func_type().ok_or_else(|| {
+ format_err!(offset, "type index {index} is not a function type")
+ })?;
+ ComponentEntityType::Func(id)
+ }
+ ComponentTypeRef::Value(ty) => {
+ let ty = match ty {
+ crate::ComponentValType::Primitive(ty) => ComponentValType::Primitive(*ty),
+ crate::ComponentValType::Type(index) => {
+ ComponentValType::Type(self.defined_type_at(*index, types, offset)?)
+ }
+ };
+ ComponentEntityType::Value(ty)
+ }
+ ComponentTypeRef::Type(TypeBounds::Eq, index) => {
+ let referenced = self.type_at(*index, false, offset)?;
+ let created = types.with_unique(referenced);
+ ComponentEntityType::Type {
+ referenced,
+ created,
+ }
+ }
+ ComponentTypeRef::Instance(index) => {
+ let id = self.type_at(*index, false, offset)?;
+ types[id].as_component_instance_type().ok_or_else(|| {
+ format_err!(offset, "type index {index} is not an instance type")
+ })?;
+ ComponentEntityType::Instance(id)
+ }
+ ComponentTypeRef::Component(index) => {
+ let id = self.type_at(*index, false, offset)?;
+ types[id].as_component_type().ok_or_else(|| {
+ format_err!(offset, "type index {index} is not a component type")
+ })?;
+ ComponentEntityType::Component(id)
+ }
+ })
+ }
+
+ pub fn export_to_entity_type(
+ &mut self,
+ export: &crate::ComponentExport,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<ComponentEntityType> {
+ let actual = match export.kind {
+ ComponentExternalKind::Module => {
+ ComponentEntityType::Module(self.module_at(export.index, offset)?)
+ }
+ ComponentExternalKind::Func => {
+ ComponentEntityType::Func(self.function_at(export.index, offset)?)
+ }
+ ComponentExternalKind::Value => {
+ ComponentEntityType::Value(*self.value_at(export.index, offset)?)
+ }
+ ComponentExternalKind::Type => {
+ let referenced = self.type_at(export.index, false, offset)?;
+ let created = types.with_unique(referenced);
+ ComponentEntityType::Type {
+ referenced,
+ created,
+ }
+ }
+ ComponentExternalKind::Instance => {
+ ComponentEntityType::Instance(self.instance_at(export.index, offset)?)
+ }
+ ComponentExternalKind::Component => {
+ ComponentEntityType::Component(self.component_at(export.index, offset)?)
+ }
+ };
+
+ let ascribed = match &export.ty {
+ Some(ty) => self.check_type_ref(ty, types, offset)?,
+ None => return Ok(actual),
+ };
+
+ if !ComponentEntityType::internal_is_subtype_of(&actual, types, &ascribed, types) {
+ bail!(
+ offset,
+ "ascribed type of export is not compatible with item's type"
+ );
+ }
+
+ Ok(ascribed)
+ }
+
+ fn create_module_type(
+ components: &[Self],
+ decls: Vec<crate::ModuleTypeDeclaration>,
+ features: &WasmFeatures,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<ModuleType> {
+ let mut state = Module::default();
+
+ for decl in decls {
+ match decl {
+ crate::ModuleTypeDeclaration::Type(ty) => {
+ state.add_type(ty, features, types, offset, true)?;
+ }
+ crate::ModuleTypeDeclaration::Export { name, ty } => {
+ let ty = state.check_type_ref(&ty, features, types, offset)?;
+ state.add_export(name, ty, features, offset, true)?;
+ }
+ crate::ModuleTypeDeclaration::OuterAlias { kind, count, index } => {
+ if count > 1 {
+ return Err(BinaryReaderError::new(
+ "outer type aliases in module type declarations are limited to a maximum count of 1",
+ offset,
+ ));
+ }
+ match kind {
+ crate::OuterAliasKind::Type => {
+ let ty = if count == 0 {
+ // Local alias, check the local module state
+ state.type_at(index, offset)?
+ } else {
+ // Otherwise, check the enclosing component state
+ let component =
+ Self::check_alias_count(components, count - 1, offset)?;
+ component.type_at(index, true, offset)?
+ };
+
+ check_max(state.types.len(), 1, MAX_WASM_TYPES, "types", offset)?;
+
+ state.types.push(ty);
+ }
+ }
+ }
+ crate::ModuleTypeDeclaration::Import(import) => {
+ state.add_import(import, features, types, offset)?;
+ }
+ }
+ }
+
+ let imports = state.imports_for_module_type(offset)?;
+
+ Ok(ModuleType {
+ type_size: state.type_size,
+ imports,
+ exports: state.exports,
+ })
+ }
+
+ fn create_component_type(
+ components: &mut Vec<Self>,
+ decls: Vec<crate::ComponentTypeDeclaration>,
+ features: &WasmFeatures,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<ComponentType> {
+ components.push(ComponentState::default());
+
+ for decl in decls {
+ match decl {
+ crate::ComponentTypeDeclaration::CoreType(ty) => {
+ Self::add_core_type(components, ty, features, types, offset, true)?;
+ }
+ crate::ComponentTypeDeclaration::Type(ty) => {
+ Self::add_type(components, ty, features, types, offset, true)?;
+ }
+ crate::ComponentTypeDeclaration::Export { name, url, ty } => {
+ let current = components.last_mut().unwrap();
+ let ty = current.check_type_ref(&ty, types, offset)?;
+ current.add_export(name, url, ty, offset, true)?;
+ }
+ crate::ComponentTypeDeclaration::Import(import) => {
+ components
+ .last_mut()
+ .unwrap()
+ .add_import(import, types, offset)?;
+ }
+ crate::ComponentTypeDeclaration::Alias(alias) => {
+ Self::add_alias(components, alias, types, offset)?;
+ }
+ };
+ }
+
+ let mut state = components.pop().unwrap();
+
+ Ok(state.take_component_type())
+ }
+
+ fn create_instance_type(
+ components: &mut Vec<Self>,
+ decls: Vec<crate::InstanceTypeDeclaration>,
+ features: &WasmFeatures,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<ComponentInstanceType> {
+ components.push(ComponentState::default());
+
+ for decl in decls {
+ match decl {
+ crate::InstanceTypeDeclaration::CoreType(ty) => {
+ Self::add_core_type(components, ty, features, types, offset, true)?;
+ }
+ crate::InstanceTypeDeclaration::Type(ty) => {
+ Self::add_type(components, ty, features, types, offset, true)?;
+ }
+ crate::InstanceTypeDeclaration::Export { name, url, ty } => {
+ let current = components.last_mut().unwrap();
+ let ty = current.check_type_ref(&ty, types, offset)?;
+ current.add_export(name, url, ty, offset, true)?;
+ }
+ crate::InstanceTypeDeclaration::Alias(alias) => {
+ Self::add_alias(components, alias, types, offset)?;
+ }
+ };
+ }
+
+ let state = components.pop().unwrap();
+
+ Ok(ComponentInstanceType {
+ type_size: state.type_size,
+ kind: ComponentInstanceTypeKind::Defined(
+ state
+ .externs
+ .into_iter()
+ .filter_map(|(name, (url, ty, kind))| match kind {
+ ExternKind::Export => Some((name, (url, ty))),
+ ExternKind::Import => None,
+ })
+ .collect(),
+ ),
+ })
+ }
+
+ fn create_function_type(
+ &self,
+ ty: crate::ComponentFuncType,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<ComponentFuncType> {
+ let mut type_size = 1;
+
+ let mut set =
+ HashSet::with_capacity(std::cmp::max(ty.params.len(), ty.results.type_count()));
+
+ let params = ty
+ .params
+ .iter()
+ .map(|(name, ty)| {
+ let name = to_kebab_str(name, "function parameter", offset)?;
+ if !set.insert(name) {
+ bail!(
+ offset,
+ "function parameter name `{name}` conflicts with previous parameter name `{prev}`",
+ prev = set.get(&name).unwrap(),
+ );
+ }
+
+ let ty = self.create_component_val_type(*ty, types, offset)?;
+ type_size = combine_type_sizes(type_size, ty.type_size(), offset)?;
+ Ok((name.to_owned(), ty))
+ })
+ .collect::<Result<_>>()?;
+
+ set.clear();
+
+ let results = ty
+ .results
+ .iter()
+ .map(|(name, ty)| {
+ let name = name
+ .map(|name| {
+ let name = to_kebab_str(name, "function result", offset)?;
+ if !set.insert(name) {
+ bail!(
+ offset,
+ "function result name `{name}` conflicts with previous result name `{prev}`",
+ prev = set.get(name).unwrap(),
+ );
+ }
+
+ Ok(name.to_owned())
+ })
+ .transpose()?;
+
+ let ty = self.create_component_val_type(*ty, types, offset)?;
+ type_size = combine_type_sizes(type_size, ty.type_size(), offset)?;
+ Ok((name, ty))
+ })
+ .collect::<Result<_>>()?;
+
+ Ok(ComponentFuncType {
+ type_size,
+ params,
+ results,
+ })
+ }
+
+ fn instantiate_module(
+ &self,
+ module_index: u32,
+ module_args: Vec<crate::InstantiationArg>,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<TypeId> {
+ fn insert_arg<'a>(
+ name: &'a str,
+ arg: &'a InstanceType,
+ args: &mut IndexMap<&'a str, &'a InstanceType>,
+ offset: usize,
+ ) -> Result<()> {
+ if args.insert(name, arg).is_some() {
+ bail!(
+ offset,
+ "duplicate module instantiation argument named `{name}`"
+ );
+ }
+
+ Ok(())
+ }
+
+ let module_type_id = self.module_at(module_index, offset)?;
+ let mut args = IndexMap::new();
+
+ // Populate the arguments
+ for module_arg in module_args {
+ match module_arg.kind {
+ InstantiationArgKind::Instance => {
+ let instance_type = types[self.core_instance_at(module_arg.index, offset)?]
+ .as_instance_type()
+ .unwrap();
+ insert_arg(module_arg.name, instance_type, &mut args, offset)?;
+ }
+ }
+ }
+
+ // Validate the arguments
+ let module_type = types[module_type_id].as_module_type().unwrap();
+ for ((module, name), expected) in module_type.imports.iter() {
+ let instance = args.get(module.as_str()).ok_or_else(|| {
+ format_err!(
+ offset,
+ "missing module instantiation argument named `{module}`"
+ )
+ })?;
+
+ let arg = instance
+ .internal_exports(types)
+ .get(name.as_str())
+ .ok_or_else(|| {
+ format_err!(
+ offset,
+ "module instantiation argument `{module}` does not \
+ export an item named `{name}`",
+ )
+ })?;
+
+ match (arg, expected) {
+ (EntityType::Func(_), EntityType::Func(_))
+ | (EntityType::Table(_), EntityType::Table(_))
+ | (EntityType::Memory(_), EntityType::Memory(_))
+ | (EntityType::Global(_), EntityType::Global(_))
+ | (EntityType::Tag(_), EntityType::Tag(_)) => {}
+ _ => {
+ bail!(
+ offset,
+ "module instantiation argument `{module}` exports \
+ an item named `{name}` but it is not a {}",
+ expected.desc()
+ )
+ }
+ }
+
+ if !EntityType::internal_is_subtype_of(arg, types, expected, types) {
+ bail!(
+ offset,
+ "{} type mismatch for export `{name}` of module \
+ instantiation argument `{module}`",
+ expected.desc(),
+ );
+ }
+ }
+
+ let ty = Type::Instance(InstanceType {
+ type_size: module_type
+ .exports
+ .iter()
+ .fold(1, |acc, (_, ty)| acc + ty.type_size()),
+ kind: InstanceTypeKind::Instantiated(module_type_id),
+ });
+
+ Ok(types.push_anon(ty))
+ }
+
+ fn instantiate_component(
+ &mut self,
+ component_index: u32,
+ component_args: Vec<crate::ComponentInstantiationArg>,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<TypeId> {
+ fn insert_arg<'a>(
+ name: &'a str,
+ arg: ComponentEntityType,
+ args: &mut IndexMap<&'a KebabStr, ComponentEntityType>,
+ offset: usize,
+ ) -> Result<()> {
+ let name = to_kebab_str(name, "instantiation argument", offset)?;
+ match args.entry(name) {
+ Entry::Occupied(e) => {
+ bail!(
+ offset,
+ "instantiation argument `{name}` conflicts with previous argument `{prev}`",
+ prev = e.key()
+ );
+ }
+ Entry::Vacant(e) => {
+ e.insert(arg);
+ }
+ }
+
+ Ok(())
+ }
+
+ let component_type_id = self.component_at(component_index, offset)?;
+ let mut args = IndexMap::new();
+
+ // Populate the arguments
+ for component_arg in component_args {
+ match component_arg.kind {
+ ComponentExternalKind::Module => {
+ insert_arg(
+ component_arg.name,
+ ComponentEntityType::Module(self.module_at(component_arg.index, offset)?),
+ &mut args,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Component => {
+ insert_arg(
+ component_arg.name,
+ ComponentEntityType::Component(
+ self.component_at(component_arg.index, offset)?,
+ ),
+ &mut args,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Instance => {
+ insert_arg(
+ component_arg.name,
+ ComponentEntityType::Instance(
+ self.instance_at(component_arg.index, offset)?,
+ ),
+ &mut args,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Func => {
+ insert_arg(
+ component_arg.name,
+ ComponentEntityType::Func(self.function_at(component_arg.index, offset)?),
+ &mut args,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Value => {
+ insert_arg(
+ component_arg.name,
+ ComponentEntityType::Value(*self.value_at(component_arg.index, offset)?),
+ &mut args,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Type => {
+ let ty = self.type_at(component_arg.index, false, offset)?;
+ insert_arg(
+ component_arg.name,
+ ComponentEntityType::Type {
+ referenced: ty,
+ created: ty,
+ },
+ &mut args,
+ offset,
+ )?;
+ }
+ }
+ }
+
+ // Validate the arguments
+ let component_type = types[component_type_id].as_component_type().unwrap();
+ for (name, (_, expected)) in component_type.imports.iter() {
+ match args.get(&name.as_kebab_str()) {
+ Some(arg) => {
+ match (arg, expected) {
+ (ComponentEntityType::Module(_), ComponentEntityType::Module(_))
+ | (ComponentEntityType::Component(_), ComponentEntityType::Component(_))
+ | (ComponentEntityType::Instance(_), ComponentEntityType::Instance(_))
+ | (ComponentEntityType::Func(_), ComponentEntityType::Func(_))
+ | (ComponentEntityType::Value(_), ComponentEntityType::Value(_))
+ | (ComponentEntityType::Type { .. }, ComponentEntityType::Type { .. }) => {}
+ _ => {
+ bail!(
+ offset,
+ "expected component instantiation argument `{name}` to be a {desc}",
+ desc = expected.desc()
+ )
+ }
+ };
+
+ if !ComponentEntityType::internal_is_subtype_of(arg, types, expected, types) {
+ bail!(
+ offset,
+ "type mismatch for component instantiation argument `{name}`"
+ );
+ }
+ }
+ None => {
+ bail!(
+ offset,
+ "missing component instantiation argument named `{name}`"
+ );
+ }
+ }
+ }
+
+ let ty = Type::ComponentInstance(ComponentInstanceType {
+ type_size: component_type
+ .exports
+ .iter()
+ .fold(1, |acc, (_, (_, ty))| acc + ty.type_size()),
+ kind: ComponentInstanceTypeKind::Instantiated(component_type_id),
+ });
+
+ Ok(types.push_anon(ty))
+ }
+
+ fn instantiate_exports(
+ &mut self,
+ exports: Vec<crate::ComponentExport>,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<TypeId> {
+ fn insert_export(
+ name: &str,
+ export: ComponentEntityType,
+ exports: &mut IndexMap<KebabString, (Option<Url>, ComponentEntityType)>,
+ type_size: &mut u32,
+ offset: usize,
+ ) -> Result<()> {
+ let name = to_kebab_str(name, "instance export", offset)?;
+ match exports.entry(name.to_owned()) {
+ Entry::Occupied(e) => bail!(
+ offset,
+ "instance export name `{name}` conflicts with previous export name `{prev}`",
+ prev = e.key()
+ ),
+ Entry::Vacant(e) => {
+ *type_size = combine_type_sizes(*type_size, export.type_size(), offset)?;
+ e.insert((None, export));
+ }
+ }
+
+ Ok(())
+ }
+
+ let mut type_size = 1;
+ let mut inst_exports = IndexMap::new();
+ for export in exports {
+ assert!(export.ty.is_none());
+ match export.kind {
+ ComponentExternalKind::Module => {
+ insert_export(
+ export.name,
+ ComponentEntityType::Module(self.module_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Component => {
+ insert_export(
+ export.name,
+ ComponentEntityType::Component(self.component_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Instance => {
+ insert_export(
+ export.name,
+ ComponentEntityType::Instance(self.instance_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Func => {
+ insert_export(
+ export.name,
+ ComponentEntityType::Func(self.function_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Value => {
+ insert_export(
+ export.name,
+ ComponentEntityType::Value(*self.value_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Type => {
+ let ty = self.type_at(export.index, false, offset)?;
+ insert_export(
+ export.name,
+ ComponentEntityType::Type {
+ referenced: ty,
+ // The created type index here isn't used anywhere
+ // in index spaces because a "bag of exports"
+ // doesn't build up its own index spaces. Just fill
+ // in the same index here in this case as what's
+ // referenced.
+ created: ty,
+ },
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?;
+ }
+ }
+ }
+
+ let ty = Type::ComponentInstance(ComponentInstanceType {
+ type_size,
+ kind: ComponentInstanceTypeKind::Exports(inst_exports),
+ });
+
+ Ok(types.push_anon(ty))
+ }
+
+ fn instantiate_core_exports(
+ &mut self,
+ exports: Vec<crate::Export>,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<TypeId> {
+ fn insert_export(
+ name: &str,
+ export: EntityType,
+ exports: &mut IndexMap<String, EntityType>,
+ type_size: &mut u32,
+ offset: usize,
+ ) -> Result<()> {
+ *type_size = combine_type_sizes(*type_size, export.type_size(), offset)?;
+
+ if exports.insert(name.to_string(), export).is_some() {
+ bail!(
+ offset,
+ "duplicate instantiation export name `{name}` already defined",
+ )
+ }
+
+ Ok(())
+ }
+
+ let mut type_size = 1;
+ let mut inst_exports = IndexMap::new();
+ for export in exports {
+ match export.kind {
+ ExternalKind::Func => {
+ insert_export(
+ export.name,
+ EntityType::Func(self.core_function_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?;
+ }
+ ExternalKind::Table => insert_export(
+ export.name,
+ EntityType::Table(*self.table_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?,
+ ExternalKind::Memory => insert_export(
+ export.name,
+ EntityType::Memory(*self.memory_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?,
+ ExternalKind::Global => {
+ insert_export(
+ export.name,
+ EntityType::Global(*self.global_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?;
+ }
+ ExternalKind::Tag => insert_export(
+ export.name,
+ EntityType::Tag(self.core_function_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?,
+ }
+ }
+
+ let ty = Type::Instance(InstanceType {
+ type_size,
+ kind: InstanceTypeKind::Exports(inst_exports),
+ });
+
+ Ok(types.push_anon(ty))
+ }
+
+ fn alias_core_instance_export(
+ &mut self,
+ instance_index: u32,
+ kind: ExternalKind,
+ name: &str,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ macro_rules! push_module_export {
+ ($expected:path, $collection:ident, $ty:literal) => {{
+ match self.core_instance_export(instance_index, name, types, offset)? {
+ $expected(ty) => {
+ self.$collection.push(*ty);
+ Ok(())
+ }
+ _ => {
+ bail!(
+ offset,
+ "export `{name}` for core instance {instance_index} is not a {}",
+ $ty
+ )
+ }
+ }
+ }};
+ }
+
+ match kind {
+ ExternalKind::Func => {
+ check_max(
+ self.function_count(),
+ 1,
+ MAX_WASM_FUNCTIONS,
+ "functions",
+ offset,
+ )?;
+ push_module_export!(EntityType::Func, core_funcs, "function")
+ }
+ ExternalKind::Table => {
+ check_max(self.core_tables.len(), 1, MAX_WASM_TABLES, "tables", offset)?;
+ push_module_export!(EntityType::Table, core_tables, "table")
+ }
+ ExternalKind::Memory => {
+ check_max(
+ self.core_memories.len(),
+ 1,
+ MAX_WASM_MEMORIES,
+ "memories",
+ offset,
+ )?;
+ push_module_export!(EntityType::Memory, core_memories, "memory")
+ }
+ ExternalKind::Global => {
+ check_max(
+ self.core_globals.len(),
+ 1,
+ MAX_WASM_GLOBALS,
+ "globals",
+ offset,
+ )?;
+ push_module_export!(EntityType::Global, core_globals, "global")
+ }
+ ExternalKind::Tag => {
+ check_max(self.core_tags.len(), 1, MAX_WASM_TAGS, "tags", offset)?;
+ push_module_export!(EntityType::Tag, core_tags, "tag")
+ }
+ }
+ }
+
+ fn alias_instance_export(
+ &mut self,
+ instance_index: u32,
+ kind: ComponentExternalKind,
+ name: &str,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ let name = to_kebab_str(name, "alias export", offset)?;
+
+ macro_rules! push_component_export {
+ ($expected:path, $collection:ident, $ty:literal) => {{
+ match self.instance_export(instance_index, name, types, offset)? {
+ $expected(ty) => {
+ self.$collection.push(*ty);
+ Ok(())
+ }
+ _ => {
+ bail!(
+ offset,
+ "export `{name}` for instance {instance_index} is not a {}",
+ $ty
+ )
+ }
+ }
+ }};
+ }
+
+ match kind {
+ ComponentExternalKind::Module => {
+ check_max(
+ self.core_modules.len(),
+ 1,
+ MAX_WASM_MODULES,
+ "modules",
+ offset,
+ )?;
+ push_component_export!(ComponentEntityType::Module, core_modules, "module")
+ }
+ ComponentExternalKind::Component => {
+ check_max(
+ self.components.len(),
+ 1,
+ MAX_WASM_COMPONENTS,
+ "components",
+ offset,
+ )?;
+ push_component_export!(ComponentEntityType::Component, components, "component")
+ }
+ ComponentExternalKind::Instance => {
+ check_max(
+ self.instance_count(),
+ 1,
+ MAX_WASM_INSTANCES,
+ "instances",
+ offset,
+ )?;
+ push_component_export!(ComponentEntityType::Instance, instances, "instance")
+ }
+ ComponentExternalKind::Func => {
+ check_max(
+ self.function_count(),
+ 1,
+ MAX_WASM_FUNCTIONS,
+ "functions",
+ offset,
+ )?;
+ push_component_export!(ComponentEntityType::Func, funcs, "function")
+ }
+ ComponentExternalKind::Value => {
+ check_max(self.values.len(), 1, MAX_WASM_VALUES, "values", offset)?;
+ match self.instance_export(instance_index, name, types, offset)? {
+ ComponentEntityType::Value(ty) => {
+ self.values.push((*ty, false));
+ Ok(())
+ }
+ _ => bail!(
+ offset,
+ "export `{name}` for instance {instance_index} is not a value",
+ ),
+ }
+ }
+ ComponentExternalKind::Type => {
+ check_max(self.type_count(), 1, MAX_WASM_TYPES, "types", offset)?;
+ match *self.instance_export(instance_index, name, types, offset)? {
+ ComponentEntityType::Type { created, .. } => {
+ let id = types.with_unique(created);
+ self.types.push(id);
+ Ok(())
+ }
+ _ => {
+ bail!(
+ offset,
+ "export `{name}` for instance {instance_index} is not a type",
+ )
+ }
+ }
+ }
+ }
+ }
+
+ fn alias_module(components: &mut [Self], count: u32, index: u32, offset: usize) -> Result<()> {
+ let component = Self::check_alias_count(components, count, offset)?;
+ let ty = component.module_at(index, offset)?;
+
+ let current = components.last_mut().unwrap();
+ check_max(
+ current.core_modules.len(),
+ 1,
+ MAX_WASM_MODULES,
+ "modules",
+ offset,
+ )?;
+
+ current.core_modules.push(ty);
+ Ok(())
+ }
+
+ fn alias_component(
+ components: &mut [Self],
+ count: u32,
+ index: u32,
+ offset: usize,
+ ) -> Result<()> {
+ let component = Self::check_alias_count(components, count, offset)?;
+ let ty = component.component_at(index, offset)?;
+
+ let current = components.last_mut().unwrap();
+ check_max(
+ current.components.len(),
+ 1,
+ MAX_WASM_COMPONENTS,
+ "components",
+ offset,
+ )?;
+
+ current.components.push(ty);
+ Ok(())
+ }
+
+ fn alias_core_type(
+ components: &mut [Self],
+ count: u32,
+ index: u32,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ let component = Self::check_alias_count(components, count, offset)?;
+ let ty = component.type_at(index, true, offset)?;
+
+ let current = components.last_mut().unwrap();
+ check_max(current.type_count(), 1, MAX_WASM_TYPES, "types", offset)?;
+
+ let id = types.with_unique(ty);
+ current.core_types.push(id);
+
+ Ok(())
+ }
+
+ fn alias_type(
+ components: &mut [Self],
+ count: u32,
+ index: u32,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ let component = Self::check_alias_count(components, count, offset)?;
+ let ty = component.type_at(index, false, offset)?;
+
+ let current = components.last_mut().unwrap();
+ check_max(current.type_count(), 1, MAX_WASM_TYPES, "types", offset)?;
+
+ let id = types.with_unique(ty);
+ current.types.push(id);
+
+ Ok(())
+ }
+
+ fn check_alias_count(components: &[Self], count: u32, offset: usize) -> Result<&Self> {
+ let count = count as usize;
+ if count >= components.len() {
+ bail!(offset, "invalid outer alias count of {count}");
+ }
+
+ Ok(&components[components.len() - count - 1])
+ }
+
+ fn create_defined_type(
+ &self,
+ ty: crate::ComponentDefinedType,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<ComponentDefinedType> {
+ match ty {
+ crate::ComponentDefinedType::Primitive(ty) => Ok(ComponentDefinedType::Primitive(ty)),
+ crate::ComponentDefinedType::Record(fields) => {
+ self.create_record_type(fields.as_ref(), types, offset)
+ }
+ crate::ComponentDefinedType::Variant(cases) => {
+ self.create_variant_type(cases.as_ref(), types, offset)
+ }
+ crate::ComponentDefinedType::List(ty) => Ok(ComponentDefinedType::List(
+ self.create_component_val_type(ty, types, offset)?,
+ )),
+ crate::ComponentDefinedType::Tuple(tys) => {
+ self.create_tuple_type(tys.as_ref(), types, offset)
+ }
+ crate::ComponentDefinedType::Flags(names) => {
+ self.create_flags_type(names.as_ref(), offset)
+ }
+ crate::ComponentDefinedType::Enum(cases) => {
+ self.create_enum_type(cases.as_ref(), offset)
+ }
+ crate::ComponentDefinedType::Union(tys) => {
+ self.create_union_type(tys.as_ref(), types, offset)
+ }
+ crate::ComponentDefinedType::Option(ty) => Ok(ComponentDefinedType::Option(
+ self.create_component_val_type(ty, types, offset)?,
+ )),
+ crate::ComponentDefinedType::Result { ok, err } => Ok(ComponentDefinedType::Result {
+ ok: ok
+ .map(|ty| self.create_component_val_type(ty, types, offset))
+ .transpose()?,
+ err: err
+ .map(|ty| self.create_component_val_type(ty, types, offset))
+ .transpose()?,
+ }),
+ }
+ }
+
+ fn create_record_type(
+ &self,
+ fields: &[(&str, crate::ComponentValType)],
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<ComponentDefinedType> {
+ let mut type_size = 1;
+ let mut field_map = IndexMap::with_capacity(fields.len());
+
+ for (name, ty) in fields {
+ let name = to_kebab_str(name, "record field", offset)?;
+ let ty = self.create_component_val_type(*ty, types, offset)?;
+
+ match field_map.entry(name.to_owned()) {
+ Entry::Occupied(e) => bail!(
+ offset,
+ "record field name `{name}` conflicts with previous field name `{prev}`",
+ prev = e.key()
+ ),
+ Entry::Vacant(e) => {
+ type_size = combine_type_sizes(type_size, ty.type_size(), offset)?;
+ e.insert(ty);
+ }
+ }
+ }
+
+ Ok(ComponentDefinedType::Record(RecordType {
+ type_size,
+ fields: field_map,
+ }))
+ }
+
+ fn create_variant_type(
+ &self,
+ cases: &[crate::VariantCase],
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<ComponentDefinedType> {
+ let mut type_size = 1;
+ let mut case_map: IndexMap<KebabString, VariantCase> = IndexMap::with_capacity(cases.len());
+
+ if cases.is_empty() {
+ return Err(BinaryReaderError::new(
+ "variant type must have at least one case",
+ offset,
+ ));
+ }
+
+ if cases.len() > u32::MAX as usize {
+ return Err(BinaryReaderError::new(
+ "variant type cannot be represented with a 32-bit discriminant value",
+ offset,
+ ));
+ }
+
+ for (i, case) in cases.iter().enumerate() {
+ if let Some(refines) = case.refines {
+ if refines >= i as u32 {
+ return Err(BinaryReaderError::new(
+ "variant case can only refine a previously defined case",
+ offset,
+ ));
+ }
+ }
+
+ let name = to_kebab_str(case.name, "variant case", offset)?;
+
+ let ty = case
+ .ty
+ .map(|ty| self.create_component_val_type(ty, types, offset))
+ .transpose()?;
+
+ match case_map.entry(name.to_owned()) {
+ Entry::Occupied(e) => bail!(
+ offset,
+ "variant case name `{name}` conflicts with previous case name `{prev}`",
+ name = case.name,
+ prev = e.key()
+ ),
+ Entry::Vacant(e) => {
+ type_size = combine_type_sizes(
+ type_size,
+ ty.map(|ty| ty.type_size()).unwrap_or(1),
+ offset,
+ )?;
+
+ // Safety: the use of `KebabStr::new_unchecked` here is safe because the string
+ // was already verified to be kebab case.
+ e.insert(VariantCase {
+ ty,
+ refines: case
+ .refines
+ .map(|i| KebabStr::new_unchecked(cases[i as usize].name).to_owned()),
+ });
+ }
+ }
+ }
+
+ Ok(ComponentDefinedType::Variant(VariantType {
+ type_size,
+ cases: case_map,
+ }))
+ }
+
+ fn create_tuple_type(
+ &self,
+ tys: &[crate::ComponentValType],
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<ComponentDefinedType> {
+ let mut type_size = 1;
+ let types = tys
+ .iter()
+ .map(|ty| {
+ let ty = self.create_component_val_type(*ty, types, offset)?;
+ type_size = combine_type_sizes(type_size, ty.type_size(), offset)?;
+ Ok(ty)
+ })
+ .collect::<Result<_>>()?;
+
+ Ok(ComponentDefinedType::Tuple(TupleType { type_size, types }))
+ }
+
+ fn create_flags_type(&self, names: &[&str], offset: usize) -> Result<ComponentDefinedType> {
+ let mut names_set = IndexSet::with_capacity(names.len());
+
+ for name in names {
+ let name = to_kebab_str(name, "flag", offset)?;
+ if !names_set.insert(name.to_owned()) {
+ bail!(
+ offset,
+ "flag name `{name}` conflicts with previous flag name `{prev}`",
+ prev = names_set.get(name).unwrap()
+ );
+ }
+ }
+
+ Ok(ComponentDefinedType::Flags(names_set))
+ }
+
+ fn create_enum_type(&self, cases: &[&str], offset: usize) -> Result<ComponentDefinedType> {
+ if cases.len() > u32::MAX as usize {
+ return Err(BinaryReaderError::new(
+ "enumeration type cannot be represented with a 32-bit discriminant value",
+ offset,
+ ));
+ }
+
+ let mut tags = IndexSet::with_capacity(cases.len());
+
+ for tag in cases {
+ let tag = to_kebab_str(tag, "enum tag", offset)?;
+ if !tags.insert(tag.to_owned()) {
+ bail!(
+ offset,
+ "enum tag name `{tag}` conflicts with previous tag name `{prev}`",
+ prev = tags.get(tag).unwrap()
+ );
+ }
+ }
+
+ Ok(ComponentDefinedType::Enum(tags))
+ }
+
+ fn create_union_type(
+ &self,
+ tys: &[crate::ComponentValType],
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<ComponentDefinedType> {
+ let mut type_size = 1;
+ let types = tys
+ .iter()
+ .map(|ty| {
+ let ty = self.create_component_val_type(*ty, types, offset)?;
+ type_size = combine_type_sizes(type_size, ty.type_size(), offset)?;
+ Ok(ty)
+ })
+ .collect::<Result<_>>()?;
+
+ Ok(ComponentDefinedType::Union(UnionType { type_size, types }))
+ }
+
+ fn create_component_val_type(
+ &self,
+ ty: crate::ComponentValType,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<ComponentValType> {
+ Ok(match ty {
+ crate::ComponentValType::Primitive(pt) => ComponentValType::Primitive(pt),
+ crate::ComponentValType::Type(idx) => {
+ ComponentValType::Type(self.defined_type_at(idx, types, offset)?)
+ }
+ })
+ }
+
+ pub fn type_at(&self, idx: u32, core: bool, offset: usize) -> Result<TypeId> {
+ let types = if core { &self.core_types } else { &self.types };
+ types
+ .get(idx as usize)
+ .copied()
+ .ok_or_else(|| format_err!(offset, "unknown type {idx}: type index out of bounds"))
+ }
+
+ fn function_type_at<'a>(
+ &self,
+ idx: u32,
+ types: &'a TypeList,
+ offset: usize,
+ ) -> Result<&'a ComponentFuncType> {
+ types[self.type_at(idx, false, offset)?]
+ .as_component_func_type()
+ .ok_or_else(|| format_err!(offset, "type index {idx} is not a function type"))
+ }
+
+ fn function_at(&self, idx: u32, offset: usize) -> Result<TypeId> {
+ self.funcs.get(idx as usize).copied().ok_or_else(|| {
+ format_err!(
+ offset,
+ "unknown function {idx}: function index out of bounds"
+ )
+ })
+ }
+
+ fn component_at(&self, idx: u32, offset: usize) -> Result<TypeId> {
+ self.components.get(idx as usize).copied().ok_or_else(|| {
+ format_err!(
+ offset,
+ "unknown component {idx}: component index out of bounds"
+ )
+ })
+ }
+
+ fn instance_at(&self, idx: u32, offset: usize) -> Result<TypeId> {
+ self.instances.get(idx as usize).copied().ok_or_else(|| {
+ format_err!(
+ offset,
+ "unknown instance {idx}: instance index out of bounds"
+ )
+ })
+ }
+
+ fn instance_export<'a>(
+ &self,
+ instance_index: u32,
+ name: &KebabStr,
+ types: &'a TypeList,
+ offset: usize,
+ ) -> Result<&'a ComponentEntityType> {
+ match types[self.instance_at(instance_index, offset)?]
+ .as_component_instance_type()
+ .unwrap()
+ .internal_exports(types)
+ .get(name)
+ {
+ Some((_, ty)) => Ok(ty),
+ None => bail!(
+ offset,
+ "instance {instance_index} has no export named `{name}`"
+ ),
+ }
+ }
+
+ fn value_at(&mut self, idx: u32, offset: usize) -> Result<&ComponentValType> {
+ match self.values.get_mut(idx as usize) {
+ Some((ty, used)) if !*used => {
+ *used = true;
+ Ok(ty)
+ }
+ Some(_) => bail!(offset, "value {idx} cannot be used more than once"),
+ None => bail!(offset, "unknown value {idx}: value index out of bounds"),
+ }
+ }
+
+ fn defined_type_at(&self, idx: u32, types: &TypeList, offset: usize) -> Result<TypeId> {
+ let id = self.type_at(idx, false, offset)?;
+ match &types[id] {
+ Type::Defined(_) => Ok(id),
+ _ => bail!(offset, "type index {} is not a defined type", idx),
+ }
+ }
+
+ fn core_function_at(&self, idx: u32, offset: usize) -> Result<TypeId> {
+ match self.core_funcs.get(idx as usize) {
+ Some(id) => Ok(*id),
+ None => bail!(
+ offset,
+ "unknown core function {idx}: function index out of bounds"
+ ),
+ }
+ }
+
+ fn module_at(&self, idx: u32, offset: usize) -> Result<TypeId> {
+ match self.core_modules.get(idx as usize) {
+ Some(id) => Ok(*id),
+ None => bail!(offset, "unknown module {idx}: module index out of bounds"),
+ }
+ }
+
+ fn core_instance_at(&self, idx: u32, offset: usize) -> Result<TypeId> {
+ match self.core_instances.get(idx as usize) {
+ Some(id) => Ok(*id),
+ None => bail!(
+ offset,
+ "unknown core instance {idx}: instance index out of bounds"
+ ),
+ }
+ }
+
+ fn core_instance_export<'a>(
+ &self,
+ instance_index: u32,
+ name: &str,
+ types: &'a TypeList,
+ offset: usize,
+ ) -> Result<&'a EntityType> {
+ match types[self.core_instance_at(instance_index, offset)?]
+ .as_instance_type()
+ .unwrap()
+ .internal_exports(types)
+ .get(name)
+ {
+ Some(export) => Ok(export),
+ None => bail!(
+ offset,
+ "core instance {instance_index} has no export named `{name}`"
+ ),
+ }
+ }
+
+ fn global_at(&self, idx: u32, offset: usize) -> Result<&GlobalType> {
+ match self.core_globals.get(idx as usize) {
+ Some(t) => Ok(t),
+ None => bail!(offset, "unknown global {idx}: global index out of bounds"),
+ }
+ }
+
+ fn table_at(&self, idx: u32, offset: usize) -> Result<&TableType> {
+ match self.core_tables.get(idx as usize) {
+ Some(t) => Ok(t),
+ None => bail!(offset, "unknown table {idx}: table index out of bounds"),
+ }
+ }
+
+ fn memory_at(&self, idx: u32, offset: usize) -> Result<&MemoryType> {
+ match self.core_memories.get(idx as usize) {
+ Some(t) => Ok(t),
+ None => bail!(offset, "unknown memory {idx}: memory index out of bounds"),
+ }
+ }
+
+ fn take_component_type(&mut self) -> ComponentType {
+ let mut ty = ComponentType {
+ type_size: self.type_size,
+ imports: Default::default(),
+ exports: Default::default(),
+ };
+
+ for (name, (url, t, kind)) in mem::take(&mut self.externs) {
+ let map = match kind {
+ ExternKind::Import => &mut ty.imports,
+ ExternKind::Export => &mut ty.exports,
+ };
+ let prev = map.insert(name, (url, t));
+ assert!(prev.is_none());
+ }
+
+ ty
+ }
+}
+
+impl Default for ComponentState {
+ fn default() -> Self {
+ Self {
+ core_types: Default::default(),
+ core_modules: Default::default(),
+ core_instances: Default::default(),
+ core_funcs: Default::default(),
+ core_memories: Default::default(),
+ core_tables: Default::default(),
+ core_globals: Default::default(),
+ core_tags: Default::default(),
+ types: Default::default(),
+ funcs: Default::default(),
+ values: Default::default(),
+ instances: Default::default(),
+ components: Default::default(),
+ externs: Default::default(),
+ export_urls: Default::default(),
+ import_urls: Default::default(),
+ has_start: Default::default(),
+ type_size: 1,
+ }
+ }
+}
diff --git a/third_party/rust/wasmparser/src/validator/core.rs b/third_party/rust/wasmparser/src/validator/core.rs
new file mode 100644
index 0000000000..5707e1e73b
--- /dev/null
+++ b/third_party/rust/wasmparser/src/validator/core.rs
@@ -0,0 +1,1278 @@
+//! State relating to validating a WebAssembly module.
+//!
+use super::{
+ check_max, combine_type_sizes,
+ operators::{ty_to_str, OperatorValidator, OperatorValidatorAllocations},
+ types::{EntityType, Type, TypeAlloc, TypeId, TypeList},
+};
+use crate::limits::*;
+use crate::validator::core::arc::MaybeOwned;
+use crate::{
+ BinaryReaderError, ConstExpr, Data, DataKind, Element, ElementKind, ExternalKind, FuncType,
+ Global, GlobalType, HeapType, MemoryType, RefType, Result, Table, TableInit, TableType,
+ TagType, TypeRef, ValType, VisitOperator, WasmFeatures, WasmFuncType, WasmModuleResources,
+};
+use indexmap::IndexMap;
+use std::mem;
+use std::{collections::HashSet, sync::Arc};
+
+// Section order for WebAssembly modules.
+//
+// Component sections are unordered and allow for duplicates,
+// so this isn't used for components.
+#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug)]
+pub enum Order {
+ Initial,
+ Type,
+ Import,
+ Function,
+ Table,
+ Memory,
+ Tag,
+ Global,
+ Export,
+ Start,
+ Element,
+ DataCount,
+ Code,
+ Data,
+}
+
+impl Default for Order {
+ fn default() -> Order {
+ Order::Initial
+ }
+}
+
+#[derive(Default)]
+pub(crate) struct ModuleState {
+ /// Internal state that is incrementally built-up for the module being
+ /// validated. This houses type information for all wasm items, like
+ /// functions. Note that this starts out as a solely owned `Arc<T>` so we can
+ /// get mutable access, but after we get to the code section this is never
+ /// mutated to we can clone it cheaply and hand it to sub-validators.
+ pub module: arc::MaybeOwned<Module>,
+
+ /// Where we are, order-wise, in the wasm binary.
+ order: Order,
+
+ /// The number of data segments in the data section (if present).
+ pub data_segment_count: u32,
+
+ /// The number of functions we expect to be defined in the code section, or
+ /// basically the length of the function section if it was found. The next
+ /// index is where we are, in the code section index space, for the next
+ /// entry in the code section (used to figure out what type is next for the
+ /// function being validated).
+ pub expected_code_bodies: Option<u32>,
+
+ const_expr_allocs: OperatorValidatorAllocations,
+
+ /// When parsing the code section, represents the current index in the section.
+ code_section_index: Option<usize>,
+}
+
+impl ModuleState {
+ pub fn update_order(&mut self, order: Order, offset: usize) -> Result<()> {
+ if self.order >= order {
+ return Err(BinaryReaderError::new("section out of order", offset));
+ }
+
+ self.order = order;
+
+ Ok(())
+ }
+
+ pub fn validate_end(&self, offset: usize) -> Result<()> {
+ // Ensure that the data count section, if any, was correct.
+ if let Some(data_count) = self.module.data_count {
+ if data_count != self.data_segment_count {
+ return Err(BinaryReaderError::new(
+ "data count and data section have inconsistent lengths",
+ offset,
+ ));
+ }
+ }
+ // Ensure that the function section, if nonzero, was paired with a code
+ // section with the appropriate length.
+ if let Some(n) = self.expected_code_bodies {
+ if n > 0 {
+ return Err(BinaryReaderError::new(
+ "function and code section have inconsistent lengths",
+ offset,
+ ));
+ }
+ }
+
+ Ok(())
+ }
+
+ pub fn next_code_index_and_type(&mut self, offset: usize) -> Result<(u32, u32)> {
+ let index = self
+ .code_section_index
+ .get_or_insert(self.module.num_imported_functions as usize);
+
+ if *index >= self.module.functions.len() {
+ return Err(BinaryReaderError::new(
+ "code section entry exceeds number of functions",
+ offset,
+ ));
+ }
+
+ let ty = self.module.functions[*index];
+ *index += 1;
+
+ Ok(((*index - 1) as u32, ty))
+ }
+
+ pub fn add_global(
+ &mut self,
+ global: Global,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ self.module
+ .check_global_type(&global.ty, features, types, offset)?;
+ self.check_const_expr(&global.init_expr, global.ty.content_type, features, types)?;
+ self.module.assert_mut().globals.push(global.ty);
+ Ok(())
+ }
+
+ pub fn add_table(
+ &mut self,
+ table: Table<'_>,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ self.module
+ .check_table_type(&table.ty, features, types, offset)?;
+
+ match &table.init {
+ TableInit::RefNull => {
+ if !table.ty.element_type.nullable {
+ bail!(offset, "type mismatch: non-defaultable element type");
+ }
+ }
+ TableInit::Expr(expr) => {
+ if !features.function_references {
+ bail!(
+ offset,
+ "tables with expression initializers require \
+ the function-references proposal"
+ );
+ }
+ self.check_const_expr(expr, table.ty.element_type.into(), features, types)?;
+ }
+ }
+ self.module.assert_mut().tables.push(table.ty);
+ Ok(())
+ }
+
+ pub fn add_data_segment(
+ &mut self,
+ data: Data,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ match data.kind {
+ DataKind::Passive => Ok(()),
+ DataKind::Active {
+ memory_index,
+ offset_expr,
+ } => {
+ let ty = self.module.memory_at(memory_index, offset)?.index_type();
+ self.check_const_expr(&offset_expr, ty, features, types)
+ }
+ }
+ }
+
+ pub fn add_element_segment(
+ &mut self,
+ e: Element,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ // the `funcref` value type is allowed all the way back to the MVP, so
+ // don't check it here
+ if e.ty != RefType::FUNCREF {
+ self.module
+ .check_value_type(ValType::Ref(e.ty), features, types, offset)?;
+ }
+ match e.kind {
+ ElementKind::Active {
+ table_index,
+ offset_expr,
+ } => {
+ let table = self.module.table_at(table_index, offset)?;
+ if !self
+ .module
+ .matches(ValType::Ref(e.ty), ValType::Ref(table.element_type), types)
+ {
+ return Err(BinaryReaderError::new(
+ format!(
+ "type mismatch: invalid element type `{}` for table type `{}`",
+ ty_to_str(e.ty.into()),
+ ty_to_str(table.element_type.into()),
+ ),
+ offset,
+ ));
+ }
+
+ self.check_const_expr(&offset_expr, ValType::I32, features, types)?;
+ }
+ ElementKind::Passive | ElementKind::Declared => {
+ if !features.bulk_memory {
+ return Err(BinaryReaderError::new(
+ "bulk memory must be enabled",
+ offset,
+ ));
+ }
+ }
+ }
+
+ let validate_count = |count: u32| -> Result<(), BinaryReaderError> {
+ if count > MAX_WASM_TABLE_ENTRIES as u32 {
+ Err(BinaryReaderError::new(
+ "number of elements is out of bounds",
+ offset,
+ ))
+ } else {
+ Ok(())
+ }
+ };
+ match e.items {
+ crate::ElementItems::Functions(reader) => {
+ let count = reader.count();
+ if !e.ty.nullable && count <= 0 {
+ return Err(BinaryReaderError::new(
+ "a non-nullable element must come with an initialization expression",
+ offset,
+ ));
+ }
+ validate_count(count)?;
+ for f in reader.into_iter_with_offsets() {
+ let (offset, f) = f?;
+ self.module.get_func_type(f, types, offset)?;
+ self.module.assert_mut().function_references.insert(f);
+ }
+ }
+ crate::ElementItems::Expressions(reader) => {
+ validate_count(reader.count())?;
+ for expr in reader {
+ self.check_const_expr(&expr?, ValType::Ref(e.ty), features, types)?;
+ }
+ }
+ }
+ self.module.assert_mut().element_types.push(e.ty);
+ Ok(())
+ }
+
+ fn check_const_expr(
+ &mut self,
+ expr: &ConstExpr<'_>,
+ expected_ty: ValType,
+ features: &WasmFeatures,
+ types: &TypeList,
+ ) -> Result<()> {
+ let mut validator = VisitConstOperator {
+ offset: 0,
+ order: self.order,
+ uninserted_funcref: false,
+ ops: OperatorValidator::new_const_expr(
+ features,
+ expected_ty,
+ mem::take(&mut self.const_expr_allocs),
+ ),
+ resources: OperatorValidatorResources {
+ types,
+ module: &mut self.module,
+ },
+ };
+
+ let mut ops = expr.get_operators_reader();
+ while !ops.eof() {
+ validator.offset = ops.original_position();
+ ops.visit_operator(&mut validator)??;
+ }
+ validator.ops.finish(ops.original_position())?;
+
+ // See comment in `RefFunc` below for why this is an assert.
+ assert!(!validator.uninserted_funcref);
+
+ self.const_expr_allocs = validator.ops.into_allocations();
+
+ return Ok(());
+
+ struct VisitConstOperator<'a> {
+ offset: usize,
+ uninserted_funcref: bool,
+ ops: OperatorValidator,
+ resources: OperatorValidatorResources<'a>,
+ order: Order,
+ }
+
+ impl VisitConstOperator<'_> {
+ fn validator(&mut self) -> impl VisitOperator<'_, Output = Result<()>> {
+ self.ops.with_resources(&self.resources, self.offset)
+ }
+
+ fn validate_extended_const(&mut self) -> Result<()> {
+ if self.ops.features.extended_const {
+ Ok(())
+ } else {
+ Err(BinaryReaderError::new(
+ "constant expression required: non-constant operator",
+ self.offset,
+ ))
+ }
+ }
+
+ fn validate_global(&mut self, index: u32) -> Result<()> {
+ let module = &self.resources.module;
+ let global = module.global_at(index, self.offset)?;
+ if index >= module.num_imported_globals {
+ return Err(BinaryReaderError::new(
+ "constant expression required: global.get of locally defined global",
+ self.offset,
+ ));
+ }
+ if global.mutable {
+ return Err(BinaryReaderError::new(
+ "constant expression required: global.get of mutable global",
+ self.offset,
+ ));
+ }
+ Ok(())
+ }
+
+ // Functions in initialization expressions are only valid in
+ // element segment initialization expressions and globals. In
+ // these contexts we want to record all function references.
+ //
+ // Initialization expressions can also be found in the data
+ // section, however. A `RefFunc` instruction in those situations
+ // is always invalid and needs to produce a validation error. In
+ // this situation, though, we can no longer modify
+ // the state since it's been "snapshot" already for
+ // parallel validation of functions.
+ //
+ // If we cannot modify the function references then this function
+ // *should* result in a validation error, but we defer that
+ // validation error to happen later. The `uninserted_funcref`
+ // boolean here is used to track this and will cause a panic
+ // (aka a fuzz bug) if we somehow forget to emit an error somewhere
+ // else.
+ fn insert_ref_func(&mut self, index: u32) {
+ if self.order == Order::Data {
+ self.uninserted_funcref = true;
+ } else {
+ self.resources
+ .module
+ .assert_mut()
+ .function_references
+ .insert(index);
+ }
+ }
+ }
+
+ macro_rules! define_visit_operator {
+ ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => {
+ $(
+ #[allow(unused_variables)]
+ fn $visit(&mut self $($(,$arg: $argty)*)?) -> Self::Output {
+ define_visit_operator!(@visit self $visit $($($arg)*)?)
+ }
+ )*
+ };
+
+ // These are always valid in const expressions
+ (@visit $self:ident visit_i32_const $val:ident) => {{
+ $self.validator().visit_i32_const($val)
+ }};
+ (@visit $self:ident visit_i64_const $val:ident) => {{
+ $self.validator().visit_i64_const($val)
+ }};
+ (@visit $self:ident visit_f32_const $val:ident) => {{
+ $self.validator().visit_f32_const($val)
+ }};
+ (@visit $self:ident visit_f64_const $val:ident) => {{
+ $self.validator().visit_f64_const($val)
+ }};
+ (@visit $self:ident visit_v128_const $val:ident) => {{
+ $self.validator().visit_v128_const($val)
+ }};
+ (@visit $self:ident visit_ref_null $val:ident) => {{
+ $self.validator().visit_ref_null($val)
+ }};
+ (@visit $self:ident visit_end) => {{
+ $self.validator().visit_end()
+ }};
+
+
+ // These are valid const expressions when the extended-const proposal is enabled.
+ (@visit $self:ident visit_i32_add) => {{
+ $self.validate_extended_const()?;
+ $self.validator().visit_i32_add()
+ }};
+ (@visit $self:ident visit_i32_sub) => {{
+ $self.validate_extended_const()?;
+ $self.validator().visit_i32_sub()
+ }};
+ (@visit $self:ident visit_i32_mul) => {{
+ $self.validate_extended_const()?;
+ $self.validator().visit_i32_mul()
+ }};
+ (@visit $self:ident visit_i64_add) => {{
+ $self.validate_extended_const()?;
+ $self.validator().visit_i64_add()
+ }};
+ (@visit $self:ident visit_i64_sub) => {{
+ $self.validate_extended_const()?;
+ $self.validator().visit_i64_sub()
+ }};
+ (@visit $self:ident visit_i64_mul) => {{
+ $self.validate_extended_const()?;
+ $self.validator().visit_i64_mul()
+ }};
+
+ // `global.get` is a valid const expression for imported, immutable
+ // globals.
+ (@visit $self:ident visit_global_get $idx:ident) => {{
+ $self.validate_global($idx)?;
+ $self.validator().visit_global_get($idx)
+ }};
+ // `ref.func`, if it's in a `global` initializer, will insert into
+ // the set of referenced functions so it's processed here.
+ (@visit $self:ident visit_ref_func $idx:ident) => {{
+ $self.insert_ref_func($idx);
+ $self.validator().visit_ref_func($idx)
+ }};
+
+ (@visit $self:ident $op:ident $($args:tt)*) => {{
+ Err(BinaryReaderError::new(
+ "constant expression required: non-constant operator",
+ $self.offset,
+ ))
+ }}
+ }
+
+ impl<'a> VisitOperator<'a> for VisitConstOperator<'a> {
+ type Output = Result<()>;
+
+ for_each_operator!(define_visit_operator);
+ }
+ }
+}
+
+pub(crate) struct Module {
+ // This is set once the code section starts.
+ // `WasmModuleResources` implementations use the snapshot to
+ // enable parallel validation of functions.
+ pub snapshot: Option<Arc<TypeList>>,
+ // Stores indexes into the validator's types list.
+ pub types: Vec<TypeId>,
+ pub tables: Vec<TableType>,
+ pub memories: Vec<MemoryType>,
+ pub globals: Vec<GlobalType>,
+ pub element_types: Vec<RefType>,
+ pub data_count: Option<u32>,
+ // Stores indexes into `types`.
+ pub functions: Vec<u32>,
+ pub tags: Vec<TypeId>,
+ pub function_references: HashSet<u32>,
+ pub imports: IndexMap<(String, String), Vec<EntityType>>,
+ pub exports: IndexMap<String, EntityType>,
+ pub type_size: u32,
+ num_imported_globals: u32,
+ num_imported_functions: u32,
+}
+
+impl Module {
+ pub fn add_type(
+ &mut self,
+ ty: crate::Type,
+ features: &WasmFeatures,
+ types: &mut TypeAlloc,
+ offset: usize,
+ check_limit: bool,
+ ) -> Result<()> {
+ let ty = match ty {
+ crate::Type::Func(t) => {
+ for ty in t.params().iter().chain(t.results()) {
+ self.check_value_type(*ty, features, types, offset)?;
+ }
+ if t.results().len() > 1 && !features.multi_value {
+ return Err(BinaryReaderError::new(
+ "func type returns multiple values but the multi-value feature is not enabled",
+ offset,
+ ));
+ }
+ Type::Func(t)
+ }
+ };
+
+ if check_limit {
+ check_max(self.types.len(), 1, MAX_WASM_TYPES, "types", offset)?;
+ }
+
+ let id = types.push_defined(ty);
+ self.types.push(id);
+ Ok(())
+ }
+
+ pub fn add_import(
+ &mut self,
+ import: crate::Import,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ let entity = self.check_type_ref(&import.ty, features, types, offset)?;
+
+ let (len, max, desc) = match import.ty {
+ TypeRef::Func(type_index) => {
+ self.functions.push(type_index);
+ self.num_imported_functions += 1;
+ (self.functions.len(), MAX_WASM_FUNCTIONS, "functions")
+ }
+ TypeRef::Table(ty) => {
+ self.tables.push(ty);
+ (self.tables.len(), self.max_tables(features), "tables")
+ }
+ TypeRef::Memory(ty) => {
+ self.memories.push(ty);
+ (self.memories.len(), self.max_memories(features), "memories")
+ }
+ TypeRef::Tag(ty) => {
+ self.tags.push(self.types[ty.func_type_idx as usize]);
+ (self.tags.len(), MAX_WASM_TAGS, "tags")
+ }
+ TypeRef::Global(ty) => {
+ if !features.mutable_global && ty.mutable {
+ return Err(BinaryReaderError::new(
+ "mutable global support is not enabled",
+ offset,
+ ));
+ }
+ self.globals.push(ty);
+ self.num_imported_globals += 1;
+ (self.globals.len(), MAX_WASM_GLOBALS, "globals")
+ }
+ };
+
+ check_max(len, 0, max, desc, offset)?;
+
+ self.type_size = combine_type_sizes(self.type_size, entity.type_size(), offset)?;
+
+ self.imports
+ .entry((import.module.to_string(), import.name.to_string()))
+ .or_default()
+ .push(entity);
+
+ Ok(())
+ }
+
+ pub fn add_export(
+ &mut self,
+ name: &str,
+ ty: EntityType,
+ features: &WasmFeatures,
+ offset: usize,
+ check_limit: bool,
+ ) -> Result<()> {
+ if !features.mutable_global {
+ if let EntityType::Global(global_type) = ty {
+ if global_type.mutable {
+ return Err(BinaryReaderError::new(
+ "mutable global support is not enabled",
+ offset,
+ ));
+ }
+ }
+ }
+
+ if check_limit {
+ check_max(self.exports.len(), 1, MAX_WASM_EXPORTS, "exports", offset)?;
+ }
+
+ self.type_size = combine_type_sizes(self.type_size, ty.type_size(), offset)?;
+
+ match self.exports.insert(name.to_string(), ty) {
+ Some(_) => Err(format_err!(
+ offset,
+ "duplicate export name `{name}` already defined"
+ )),
+ None => Ok(()),
+ }
+ }
+
+ pub fn add_function(&mut self, type_index: u32, types: &TypeList, offset: usize) -> Result<()> {
+ self.func_type_at(type_index, types, offset)?;
+ self.functions.push(type_index);
+ Ok(())
+ }
+
+ pub fn add_memory(
+ &mut self,
+ ty: MemoryType,
+ features: &WasmFeatures,
+ offset: usize,
+ ) -> Result<()> {
+ self.check_memory_type(&ty, features, offset)?;
+ self.memories.push(ty);
+ Ok(())
+ }
+
+ pub fn add_tag(
+ &mut self,
+ ty: TagType,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ self.check_tag_type(&ty, features, types, offset)?;
+ self.tags.push(self.types[ty.func_type_idx as usize]);
+ Ok(())
+ }
+
+ pub fn type_at(&self, idx: u32, offset: usize) -> Result<TypeId> {
+ self.types
+ .get(idx as usize)
+ .copied()
+ .ok_or_else(|| format_err!(offset, "unknown type {idx}: type index out of bounds"))
+ }
+
+ fn func_type_at<'a>(
+ &self,
+ type_index: u32,
+ types: &'a TypeList,
+ offset: usize,
+ ) -> Result<&'a FuncType> {
+ types[self.type_at(type_index, offset)?]
+ .as_func_type()
+ .ok_or_else(|| format_err!(offset, "type index {type_index} is not a function type"))
+ }
+
+ pub fn check_type_ref(
+ &self,
+ type_ref: &TypeRef,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<EntityType> {
+ Ok(match type_ref {
+ TypeRef::Func(type_index) => {
+ self.func_type_at(*type_index, types, offset)?;
+ EntityType::Func(self.types[*type_index as usize])
+ }
+ TypeRef::Table(t) => {
+ self.check_table_type(t, features, types, offset)?;
+ EntityType::Table(*t)
+ }
+ TypeRef::Memory(t) => {
+ self.check_memory_type(t, features, offset)?;
+ EntityType::Memory(*t)
+ }
+ TypeRef::Tag(t) => {
+ self.check_tag_type(t, features, types, offset)?;
+ EntityType::Tag(self.types[t.func_type_idx as usize])
+ }
+ TypeRef::Global(t) => {
+ self.check_global_type(t, features, types, offset)?;
+ EntityType::Global(*t)
+ }
+ })
+ }
+
+ fn check_table_type(
+ &self,
+ ty: &TableType,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ // the `funcref` value type is allowed all the way back to the MVP, so
+ // don't check it here
+ if ty.element_type != RefType::FUNCREF {
+ self.check_value_type(ValType::Ref(ty.element_type), features, types, offset)?
+ }
+
+ self.check_limits(ty.initial, ty.maximum, offset)?;
+ if ty.initial > MAX_WASM_TABLE_ENTRIES as u32 {
+ return Err(BinaryReaderError::new(
+ "minimum table size is out of bounds",
+ offset,
+ ));
+ }
+ Ok(())
+ }
+
+ fn check_memory_type(
+ &self,
+ ty: &MemoryType,
+ features: &WasmFeatures,
+ offset: usize,
+ ) -> Result<()> {
+ self.check_limits(ty.initial, ty.maximum, offset)?;
+ let (true_maximum, err) = if ty.memory64 {
+ if !features.memory64 {
+ return Err(BinaryReaderError::new(
+ "memory64 must be enabled for 64-bit memories",
+ offset,
+ ));
+ }
+ (
+ MAX_WASM_MEMORY64_PAGES,
+ "memory size must be at most 2**48 pages",
+ )
+ } else {
+ (
+ MAX_WASM_MEMORY32_PAGES,
+ "memory size must be at most 65536 pages (4GiB)",
+ )
+ };
+ if ty.initial > true_maximum {
+ return Err(BinaryReaderError::new(err, offset));
+ }
+ if let Some(maximum) = ty.maximum {
+ if maximum > true_maximum {
+ return Err(BinaryReaderError::new(err, offset));
+ }
+ }
+ if ty.shared {
+ if !features.threads {
+ return Err(BinaryReaderError::new(
+ "threads must be enabled for shared memories",
+ offset,
+ ));
+ }
+ if ty.maximum.is_none() {
+ return Err(BinaryReaderError::new(
+ "shared memory must have maximum size",
+ offset,
+ ));
+ }
+ }
+ Ok(())
+ }
+
+ pub(crate) fn imports_for_module_type(
+ &self,
+ offset: usize,
+ ) -> Result<IndexMap<(String, String), EntityType>> {
+ // Ensure imports are unique, which is a requirement of the component model
+ self.imports
+ .iter()
+ .map(|((module, name), types)| {
+ if types.len() != 1 {
+ bail!(
+ offset,
+ "module has a duplicate import name `{module}:{name}` \
+ that is not allowed in components",
+ );
+ }
+ Ok(((module.clone(), name.clone()), types[0]))
+ })
+ .collect::<Result<_>>()
+ }
+
+ fn check_value_type(
+ &self,
+ ty: ValType,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ match features.check_value_type(ty) {
+ Ok(()) => Ok(()),
+ Err(e) => Err(BinaryReaderError::new(e, offset)),
+ }?;
+ // The above only checks the value type for features.
+ // We must check it if it's a reference.
+ match ty {
+ ValType::Ref(rt) => {
+ self.check_ref_type(rt, types, offset)?;
+ }
+ _ => (),
+ }
+ Ok(())
+ }
+
+ fn check_ref_type(&self, ty: RefType, types: &TypeList, offset: usize) -> Result<()> {
+ // Check that the heap type is valid
+ match ty.heap_type {
+ HeapType::Func | HeapType::Extern => (),
+ HeapType::TypedFunc(type_index) => {
+ // Just check that the index is valid
+ self.func_type_at(type_index.into(), types, offset)?;
+ }
+ }
+ Ok(())
+ }
+
+ fn eq_valtypes(&self, ty1: ValType, ty2: ValType, types: &TypeList) -> bool {
+ match (ty1, ty2) {
+ (ValType::Ref(rt1), ValType::Ref(rt2)) => {
+ rt1.nullable == rt2.nullable
+ && match (rt1.heap_type, rt2.heap_type) {
+ (HeapType::Func, HeapType::Func) => true,
+ (HeapType::Extern, HeapType::Extern) => true,
+ (HeapType::TypedFunc(n1), HeapType::TypedFunc(n2)) => {
+ let n1 = self.func_type_at(n1.into(), types, 0).unwrap();
+ let n2 = self.func_type_at(n2.into(), types, 0).unwrap();
+ self.eq_fns(n1, n2, types)
+ }
+ (_, _) => false,
+ }
+ }
+ _ => ty1 == ty2,
+ }
+ }
+ fn eq_fns(&self, f1: &impl WasmFuncType, f2: &impl WasmFuncType, types: &TypeList) -> bool {
+ f1.len_inputs() == f2.len_inputs()
+ && f2.len_outputs() == f2.len_outputs()
+ && f1
+ .inputs()
+ .zip(f2.inputs())
+ .all(|(t1, t2)| self.eq_valtypes(t1, t2, types))
+ && f1
+ .outputs()
+ .zip(f2.outputs())
+ .all(|(t1, t2)| self.eq_valtypes(t1, t2, types))
+ }
+
+ pub(crate) fn matches(&self, ty1: ValType, ty2: ValType, types: &TypeList) -> bool {
+ fn matches_null(null1: bool, null2: bool) -> bool {
+ (null1 == null2) || null2
+ }
+
+ let matches_heap = |ty1: HeapType, ty2: HeapType, types: &TypeList| -> bool {
+ match (ty1, ty2) {
+ (HeapType::TypedFunc(n1), HeapType::TypedFunc(n2)) => {
+ // Check whether the defined types are (structurally) equivalent.
+ let n1 = self.func_type_at(n1.into(), types, 0).unwrap();
+ let n2 = self.func_type_at(n2.into(), types, 0).unwrap();
+ self.eq_fns(n1, n2, types)
+ }
+ (HeapType::TypedFunc(_), HeapType::Func) => true,
+ (_, _) => ty1 == ty2,
+ }
+ };
+
+ let matches_ref = |ty1: RefType, ty2: RefType, types: &TypeList| -> bool {
+ matches_heap(ty1.heap_type, ty2.heap_type, types)
+ && matches_null(ty1.nullable, ty2.nullable)
+ };
+
+ match (ty1, ty2) {
+ (ValType::Ref(rt1), ValType::Ref(rt2)) => matches_ref(rt1, rt2, types),
+ (_, _) => ty1 == ty2,
+ }
+ }
+
+ fn check_tag_type(
+ &self,
+ ty: &TagType,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ if !features.exceptions {
+ return Err(BinaryReaderError::new(
+ "exceptions proposal not enabled",
+ offset,
+ ));
+ }
+ let ty = self.func_type_at(ty.func_type_idx, types, offset)?;
+ if !ty.results().is_empty() {
+ return Err(BinaryReaderError::new(
+ "invalid exception type: non-empty tag result type",
+ offset,
+ ));
+ }
+ Ok(())
+ }
+
+ fn check_global_type(
+ &self,
+ ty: &GlobalType,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ self.check_value_type(ty.content_type, features, types, offset)
+ }
+
+ fn check_limits<T>(&self, initial: T, maximum: Option<T>, offset: usize) -> Result<()>
+ where
+ T: Into<u64>,
+ {
+ if let Some(max) = maximum {
+ if initial.into() > max.into() {
+ return Err(BinaryReaderError::new(
+ "size minimum must not be greater than maximum",
+ offset,
+ ));
+ }
+ }
+ Ok(())
+ }
+
+ pub fn max_tables(&self, features: &WasmFeatures) -> usize {
+ if features.reference_types {
+ MAX_WASM_TABLES
+ } else {
+ 1
+ }
+ }
+
+ pub fn max_memories(&self, features: &WasmFeatures) -> usize {
+ if features.multi_memory {
+ MAX_WASM_MEMORIES
+ } else {
+ 1
+ }
+ }
+
+ pub fn export_to_entity_type(
+ &mut self,
+ export: &crate::Export,
+ offset: usize,
+ ) -> Result<EntityType> {
+ let check = |ty: &str, index: u32, total: usize| {
+ if index as usize >= total {
+ Err(format_err!(
+ offset,
+ "unknown {ty} {index}: exported {ty} index out of bounds",
+ ))
+ } else {
+ Ok(())
+ }
+ };
+
+ Ok(match export.kind {
+ ExternalKind::Func => {
+ check("function", export.index, self.functions.len())?;
+ self.function_references.insert(export.index);
+ EntityType::Func(self.types[self.functions[export.index as usize] as usize])
+ }
+ ExternalKind::Table => {
+ check("table", export.index, self.tables.len())?;
+ EntityType::Table(self.tables[export.index as usize])
+ }
+ ExternalKind::Memory => {
+ check("memory", export.index, self.memories.len())?;
+ EntityType::Memory(self.memories[export.index as usize])
+ }
+ ExternalKind::Global => {
+ check("global", export.index, self.globals.len())?;
+ EntityType::Global(self.globals[export.index as usize])
+ }
+ ExternalKind::Tag => {
+ check("tag", export.index, self.tags.len())?;
+ EntityType::Tag(self.tags[export.index as usize])
+ }
+ })
+ }
+
+ pub fn get_func_type<'a>(
+ &self,
+ func_idx: u32,
+ types: &'a TypeList,
+ offset: usize,
+ ) -> Result<&'a FuncType> {
+ match self.functions.get(func_idx as usize) {
+ Some(idx) => self.func_type_at(*idx, types, offset),
+ None => Err(format_err!(
+ offset,
+ "unknown function {func_idx}: func index out of bounds",
+ )),
+ }
+ }
+
+ fn global_at(&self, idx: u32, offset: usize) -> Result<&GlobalType> {
+ match self.globals.get(idx as usize) {
+ Some(t) => Ok(t),
+ None => Err(format_err!(
+ offset,
+ "unknown global {idx}: global index out of bounds"
+ )),
+ }
+ }
+
+ fn table_at(&self, idx: u32, offset: usize) -> Result<&TableType> {
+ match self.tables.get(idx as usize) {
+ Some(t) => Ok(t),
+ None => Err(format_err!(
+ offset,
+ "unknown table {idx}: table index out of bounds"
+ )),
+ }
+ }
+
+ fn memory_at(&self, idx: u32, offset: usize) -> Result<&MemoryType> {
+ match self.memories.get(idx as usize) {
+ Some(t) => Ok(t),
+ None => Err(format_err!(
+ offset,
+ "unknown memory {idx}: memory index out of bounds"
+ )),
+ }
+ }
+}
+
+impl Default for Module {
+ fn default() -> Self {
+ Self {
+ snapshot: Default::default(),
+ types: Default::default(),
+ tables: Default::default(),
+ memories: Default::default(),
+ globals: Default::default(),
+ element_types: Default::default(),
+ data_count: Default::default(),
+ functions: Default::default(),
+ tags: Default::default(),
+ function_references: Default::default(),
+ imports: Default::default(),
+ exports: Default::default(),
+ type_size: 1,
+ num_imported_globals: Default::default(),
+ num_imported_functions: Default::default(),
+ }
+ }
+}
+
+struct OperatorValidatorResources<'a> {
+ module: &'a mut MaybeOwned<Module>,
+ types: &'a TypeList,
+}
+
+impl WasmModuleResources for OperatorValidatorResources<'_> {
+ type FuncType = crate::FuncType;
+
+ fn table_at(&self, at: u32) -> Option<TableType> {
+ self.module.tables.get(at as usize).cloned()
+ }
+
+ fn memory_at(&self, at: u32) -> Option<MemoryType> {
+ self.module.memories.get(at as usize).cloned()
+ }
+
+ fn tag_at(&self, at: u32) -> Option<&Self::FuncType> {
+ Some(
+ self.types[*self.module.tags.get(at as usize)?]
+ .as_func_type()
+ .unwrap(),
+ )
+ }
+
+ fn global_at(&self, at: u32) -> Option<GlobalType> {
+ self.module.globals.get(at as usize).cloned()
+ }
+
+ fn func_type_at(&self, at: u32) -> Option<&Self::FuncType> {
+ Some(
+ self.types[*self.module.types.get(at as usize)?]
+ .as_func_type()
+ .unwrap(),
+ )
+ }
+
+ fn type_index_of_function(&self, at: u32) -> Option<u32> {
+ self.module.functions.get(at as usize).cloned()
+ }
+
+ fn type_of_function(&self, at: u32) -> Option<&Self::FuncType> {
+ self.func_type_at(self.type_index_of_function(at)?)
+ }
+
+ fn check_value_type(&self, t: ValType, features: &WasmFeatures, offset: usize) -> Result<()> {
+ self.module
+ .check_value_type(t, features, self.types, offset)
+ }
+
+ fn element_type_at(&self, at: u32) -> Option<RefType> {
+ self.module.element_types.get(at as usize).cloned()
+ }
+
+ fn matches(&self, t1: ValType, t2: ValType) -> bool {
+ self.module.matches(t1, t2, self.types)
+ }
+
+ fn element_count(&self) -> u32 {
+ self.module.element_types.len() as u32
+ }
+
+ fn data_count(&self) -> Option<u32> {
+ self.module.data_count
+ }
+
+ fn is_function_referenced(&self, idx: u32) -> bool {
+ self.module.function_references.contains(&idx)
+ }
+}
+
+/// The implementation of [`WasmModuleResources`] used by
+/// [`Validator`](crate::Validator).
+pub struct ValidatorResources(pub(crate) Arc<Module>);
+
+impl WasmModuleResources for ValidatorResources {
+ type FuncType = crate::FuncType;
+
+ fn table_at(&self, at: u32) -> Option<TableType> {
+ self.0.tables.get(at as usize).cloned()
+ }
+
+ fn memory_at(&self, at: u32) -> Option<MemoryType> {
+ self.0.memories.get(at as usize).cloned()
+ }
+
+ fn tag_at(&self, at: u32) -> Option<&Self::FuncType> {
+ Some(
+ self.0.snapshot.as_ref().unwrap()[*self.0.tags.get(at as usize)?]
+ .as_func_type()
+ .unwrap(),
+ )
+ }
+
+ fn global_at(&self, at: u32) -> Option<GlobalType> {
+ self.0.globals.get(at as usize).cloned()
+ }
+
+ fn func_type_at(&self, at: u32) -> Option<&Self::FuncType> {
+ Some(
+ self.0.snapshot.as_ref().unwrap()[*self.0.types.get(at as usize)?]
+ .as_func_type()
+ .unwrap(),
+ )
+ }
+
+ fn type_index_of_function(&self, at: u32) -> Option<u32> {
+ self.0.functions.get(at as usize).cloned()
+ }
+
+ fn type_of_function(&self, at: u32) -> Option<&Self::FuncType> {
+ self.func_type_at(self.type_index_of_function(at)?)
+ }
+
+ fn check_value_type(&self, t: ValType, features: &WasmFeatures, offset: usize) -> Result<()> {
+ self.0
+ .check_value_type(t, features, self.0.snapshot.as_ref().unwrap(), offset)
+ }
+
+ fn element_type_at(&self, at: u32) -> Option<RefType> {
+ self.0.element_types.get(at as usize).cloned()
+ }
+
+ fn matches(&self, t1: ValType, t2: ValType) -> bool {
+ self.0.matches(t1, t2, self.0.snapshot.as_ref().unwrap())
+ }
+
+ fn element_count(&self) -> u32 {
+ self.0.element_types.len() as u32
+ }
+
+ fn data_count(&self) -> Option<u32> {
+ self.0.data_count
+ }
+
+ fn is_function_referenced(&self, idx: u32) -> bool {
+ self.0.function_references.contains(&idx)
+ }
+}
+
+const _: () = {
+ fn assert_send<T: Send>() {}
+
+ // Assert that `ValidatorResources` is Send so function validation
+ // can be parallelizable
+ fn assert() {
+ assert_send::<ValidatorResources>();
+ }
+};
+
+mod arc {
+ use std::ops::Deref;
+ use std::sync::Arc;
+
+ enum Inner<T> {
+ Owned(T),
+ Shared(Arc<T>),
+
+ Empty, // Only used for swapping from owned to shared.
+ }
+
+ pub struct MaybeOwned<T> {
+ inner: Inner<T>,
+ }
+
+ impl<T> MaybeOwned<T> {
+ #[inline]
+ fn as_mut(&mut self) -> Option<&mut T> {
+ match &mut self.inner {
+ Inner::Owned(x) => Some(x),
+ Inner::Shared(_) => None,
+ Inner::Empty => Self::unreachable(),
+ }
+ }
+
+ #[inline]
+ pub fn assert_mut(&mut self) -> &mut T {
+ self.as_mut().unwrap()
+ }
+
+ pub fn arc(&mut self) -> &Arc<T> {
+ self.make_shared();
+ match &self.inner {
+ Inner::Shared(x) => x,
+ _ => Self::unreachable(),
+ }
+ }
+
+ #[inline]
+ fn make_shared(&mut self) {
+ if let Inner::Shared(_) = self.inner {
+ return;
+ }
+
+ let inner = std::mem::replace(&mut self.inner, Inner::Empty);
+ let x = match inner {
+ Inner::Owned(x) => x,
+ _ => Self::unreachable(),
+ };
+ let x = Arc::new(x);
+ self.inner = Inner::Shared(x);
+ }
+
+ #[cold]
+ #[inline(never)]
+ fn unreachable() -> ! {
+ unreachable!()
+ }
+ }
+
+ impl<T: Default> Default for MaybeOwned<T> {
+ fn default() -> MaybeOwned<T> {
+ MaybeOwned {
+ inner: Inner::Owned(T::default()),
+ }
+ }
+ }
+
+ impl<T> Deref for MaybeOwned<T> {
+ type Target = T;
+
+ fn deref(&self) -> &T {
+ match &self.inner {
+ Inner::Owned(x) => x,
+ Inner::Shared(x) => x,
+ Inner::Empty => Self::unreachable(),
+ }
+ }
+ }
+}
diff --git a/third_party/rust/wasmparser/src/validator/func.rs b/third_party/rust/wasmparser/src/validator/func.rs
new file mode 100644
index 0000000000..4d405f9615
--- /dev/null
+++ b/third_party/rust/wasmparser/src/validator/func.rs
@@ -0,0 +1,348 @@
+use super::operators::{Frame, OperatorValidator, OperatorValidatorAllocations};
+use crate::{BinaryReader, Result, ValType, VisitOperator};
+use crate::{FunctionBody, Operator, WasmFeatures, WasmModuleResources};
+
+/// Resources necessary to perform validation of a function.
+///
+/// This structure is created by
+/// [`Validator::code_section_entry`](crate::Validator::code_section_entry) and
+/// is created per-function in a WebAssembly module. This structure is suitable
+/// for sending to other threads while the original
+/// [`Validator`](crate::Validator) continues processing other functions.
+pub struct FuncToValidate<T> {
+ resources: T,
+ index: u32,
+ ty: u32,
+ features: WasmFeatures,
+}
+
+impl<T: WasmModuleResources> FuncToValidate<T> {
+ /// Creates a new function to validate which will have the specified
+ /// configuration parameters:
+ ///
+ /// * `index` - the core wasm function index being validated
+ /// * `ty` - the core wasm type index of the function being validated,
+ /// defining the results and parameters to the function.
+ /// * `resources` - metadata and type information about the module that
+ /// this function is validated within.
+ /// * `features` - enabled WebAssembly features.
+ pub fn new(index: u32, ty: u32, resources: T, features: &WasmFeatures) -> FuncToValidate<T> {
+ FuncToValidate {
+ resources,
+ index,
+ ty,
+ features: *features,
+ }
+ }
+
+ /// Converts this [`FuncToValidate`] into a [`FuncValidator`] using the
+ /// `allocs` provided.
+ ///
+ /// This method, in conjunction with [`FuncValidator::into_allocations`],
+ /// provides a means to reuse allocations across validation of each
+ /// individual function. Note that it is also sufficient to call this
+ /// method with `Default::default()` if no prior allocations are
+ /// available.
+ ///
+ /// # Panics
+ ///
+ /// If a `FuncToValidate` was created with an invalid `ty` index then this
+ /// function will panic.
+ pub fn into_validator(self, allocs: FuncValidatorAllocations) -> FuncValidator<T> {
+ let FuncToValidate {
+ resources,
+ index,
+ ty,
+ features,
+ } = self;
+ let validator =
+ OperatorValidator::new_func(ty, 0, &features, &resources, allocs.0).unwrap();
+ FuncValidator {
+ validator,
+ resources,
+ index,
+ }
+ }
+}
+
+/// Validation context for a WebAssembly function.
+///
+/// This is a finalized validator which is ready to process a [`FunctionBody`].
+/// This is created from the [`FuncToValidate::into_validator`] method.
+pub struct FuncValidator<T> {
+ validator: OperatorValidator,
+ resources: T,
+ index: u32,
+}
+
+/// External handle to the internal allocations used during function validation.
+///
+/// This is created with either the `Default` implementation or with
+/// [`FuncValidator::into_allocations`]. It is then passed as an argument to
+/// [`FuncToValidate::into_validator`] to provide a means of reusing allocations
+/// between each function.
+#[derive(Default)]
+pub struct FuncValidatorAllocations(OperatorValidatorAllocations);
+
+impl<T: WasmModuleResources> FuncValidator<T> {
+ /// Convenience function to validate an entire function's body.
+ ///
+ /// You may not end up using this in final implementations because you'll
+ /// often want to interleave validation with parsing.
+ pub fn validate(&mut self, body: &FunctionBody<'_>) -> Result<()> {
+ let mut reader = body.get_binary_reader();
+ self.read_locals(&mut reader)?;
+ reader.allow_memarg64(self.validator.features.memory64);
+ while !reader.eof() {
+ reader.visit_operator(&mut self.visitor(reader.original_position()))??;
+ }
+ self.finish(reader.original_position())
+ }
+
+ /// Reads the local definitions from the given `BinaryReader`, often sourced
+ /// from a `FunctionBody`.
+ ///
+ /// This function will automatically advance the `BinaryReader` forward,
+ /// leaving reading operators up to the caller afterwards.
+ pub fn read_locals(&mut self, reader: &mut BinaryReader<'_>) -> Result<()> {
+ for _ in 0..reader.read_var_u32()? {
+ let offset = reader.original_position();
+ let cnt = reader.read()?;
+ let ty = reader.read()?;
+ self.define_locals(offset, cnt, ty)?;
+ }
+ Ok(())
+ }
+
+ /// Defines locals into this validator.
+ ///
+ /// This should be used if the application is already reading local
+ /// definitions and there's no need to re-parse the function again.
+ pub fn define_locals(&mut self, offset: usize, count: u32, ty: ValType) -> Result<()> {
+ self.validator
+ .define_locals(offset, count, ty, &self.resources)
+ }
+
+ /// Validates the next operator in a function.
+ ///
+ /// This functions is expected to be called once-per-operator in a
+ /// WebAssembly function. Each operator's offset in the original binary and
+ /// the operator itself are passed to this function to provide more useful
+ /// error messages.
+ pub fn op(&mut self, offset: usize, operator: &Operator<'_>) -> Result<()> {
+ self.visitor(offset).visit_operator(operator)
+ }
+
+ /// Get the operator visitor for the next operator in the function.
+ ///
+ /// The returned visitor is intended to visit just one instruction at the `offset`.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// # use wasmparser::{WasmModuleResources, FuncValidator, FunctionBody, Result};
+ /// pub fn validate<R>(validator: &mut FuncValidator<R>, body: &FunctionBody<'_>) -> Result<()>
+ /// where R: WasmModuleResources
+ /// {
+ /// let mut operator_reader = body.get_binary_reader();
+ /// while !operator_reader.eof() {
+ /// let mut visitor = validator.visitor(operator_reader.original_position());
+ /// operator_reader.visit_operator(&mut visitor)??;
+ /// }
+ /// validator.finish(operator_reader.original_position())
+ /// }
+ /// ```
+ pub fn visitor<'this, 'a: 'this>(
+ &'this mut self,
+ offset: usize,
+ ) -> impl VisitOperator<'a, Output = Result<()>> + 'this {
+ self.validator.with_resources(&self.resources, offset)
+ }
+
+ /// Function that must be called after the last opcode has been processed.
+ ///
+ /// This will validate that the function was properly terminated with the
+ /// `end` opcode. If this function is not called then the function will not
+ /// be properly validated.
+ ///
+ /// The `offset` provided to this function will be used as a position for an
+ /// error if validation fails.
+ pub fn finish(&mut self, offset: usize) -> Result<()> {
+ self.validator.finish(offset)
+ }
+
+ /// Returns the underlying module resources that this validator is using.
+ pub fn resources(&self) -> &T {
+ &self.resources
+ }
+
+ /// The index of the function within the module's function index space that
+ /// is being validated.
+ pub fn index(&self) -> u32 {
+ self.index
+ }
+
+ /// Returns the number of defined local variables in the function.
+ pub fn len_locals(&self) -> u32 {
+ self.validator.locals.len_locals()
+ }
+
+ /// Returns the type of the local variable at the given `index` if any.
+ pub fn get_local_type(&self, index: u32) -> Option<ValType> {
+ self.validator.locals.get(index)
+ }
+
+ /// Get the current height of the operand stack.
+ ///
+ /// This returns the height of the whole operand stack for this function,
+ /// not just for the current control frame.
+ pub fn operand_stack_height(&self) -> u32 {
+ self.validator.operand_stack_height() as u32
+ }
+
+ /// Returns the optional value type of the value operand at the given
+ /// `depth` from the top of the operand stack.
+ ///
+ /// - Returns `None` if the `depth` is out of bounds.
+ /// - Returns `Some(None)` if there is a value with unknown type
+ /// at the given `depth`.
+ ///
+ /// # Note
+ ///
+ /// A `depth` of 0 will refer to the last operand on the stack.
+ pub fn get_operand_type(&self, depth: usize) -> Option<Option<ValType>> {
+ self.validator.peek_operand_at(depth)
+ }
+
+ /// Returns the number of frames on the control flow stack.
+ ///
+ /// This returns the height of the whole control stack for this function,
+ /// not just for the current control frame.
+ pub fn control_stack_height(&self) -> u32 {
+ self.validator.control_stack_height() as u32
+ }
+
+ /// Returns a shared reference to the control flow [`Frame`] of the
+ /// control flow stack at the given `depth` if any.
+ ///
+ /// Returns `None` if the `depth` is out of bounds.
+ ///
+ /// # Note
+ ///
+ /// A `depth` of 0 will refer to the last frame on the stack.
+ pub fn get_control_frame(&self, depth: usize) -> Option<&Frame> {
+ self.validator.get_frame(depth)
+ }
+
+ /// Consumes this validator and returns the underlying allocations that
+ /// were used during the validation process.
+ ///
+ /// The returned value here can be paired with
+ /// [`FuncToValidate::into_validator`] to reuse the allocations already
+ /// created by this validator.
+ pub fn into_allocations(self) -> FuncValidatorAllocations {
+ FuncValidatorAllocations(self.validator.into_allocations())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::WasmFuncType;
+
+ struct EmptyResources;
+
+ impl WasmModuleResources for EmptyResources {
+ type FuncType = EmptyFuncType;
+
+ fn table_at(&self, _at: u32) -> Option<crate::TableType> {
+ todo!()
+ }
+ fn memory_at(&self, _at: u32) -> Option<crate::MemoryType> {
+ todo!()
+ }
+ fn tag_at(&self, _at: u32) -> Option<&Self::FuncType> {
+ todo!()
+ }
+ fn global_at(&self, _at: u32) -> Option<crate::GlobalType> {
+ todo!()
+ }
+ fn func_type_at(&self, _type_idx: u32) -> Option<&Self::FuncType> {
+ Some(&EmptyFuncType)
+ }
+ fn type_index_of_function(&self, _at: u32) -> Option<u32> {
+ todo!()
+ }
+ fn type_of_function(&self, _func_idx: u32) -> Option<&Self::FuncType> {
+ todo!()
+ }
+ fn check_value_type(
+ &self,
+ _t: ValType,
+ _features: &WasmFeatures,
+ _offset: usize,
+ ) -> Result<()> {
+ Ok(())
+ }
+ fn element_type_at(&self, _at: u32) -> Option<crate::RefType> {
+ todo!()
+ }
+ fn matches(&self, _t1: ValType, _t2: ValType) -> bool {
+ todo!()
+ }
+ fn element_count(&self) -> u32 {
+ todo!()
+ }
+ fn data_count(&self) -> Option<u32> {
+ todo!()
+ }
+ fn is_function_referenced(&self, _idx: u32) -> bool {
+ todo!()
+ }
+ }
+
+ struct EmptyFuncType;
+
+ impl WasmFuncType for EmptyFuncType {
+ fn len_inputs(&self) -> usize {
+ 0
+ }
+ fn len_outputs(&self) -> usize {
+ 0
+ }
+ fn input_at(&self, _at: u32) -> Option<ValType> {
+ todo!()
+ }
+ fn output_at(&self, _at: u32) -> Option<ValType> {
+ todo!()
+ }
+ }
+
+ #[test]
+ fn operand_stack_height() {
+ let mut v = FuncToValidate::new(0, 0, EmptyResources, &Default::default())
+ .into_validator(Default::default());
+
+ // Initially zero values on the stack.
+ assert_eq!(v.operand_stack_height(), 0);
+
+ // Pushing a constant value makes use have one value on the stack.
+ assert!(v.op(0, &Operator::I32Const { value: 0 }).is_ok());
+ assert_eq!(v.operand_stack_height(), 1);
+
+ // Entering a new control block does not affect the stack height.
+ assert!(v
+ .op(
+ 1,
+ &Operator::Block {
+ blockty: crate::BlockType::Empty
+ }
+ )
+ .is_ok());
+ assert_eq!(v.operand_stack_height(), 1);
+
+ // Pushing another constant value makes use have two values on the stack.
+ assert!(v.op(2, &Operator::I32Const { value: 99 }).is_ok());
+ assert_eq!(v.operand_stack_height(), 2);
+ }
+}
diff --git a/third_party/rust/wasmparser/src/validator/operators.rs b/third_party/rust/wasmparser/src/validator/operators.rs
new file mode 100644
index 0000000000..54fee8acc6
--- /dev/null
+++ b/third_party/rust/wasmparser/src/validator/operators.rs
@@ -0,0 +1,3474 @@
+/* Copyright 2019 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// The basic validation algorithm here is copied from the "Validation
+// Algorithm" section of the WebAssembly specification -
+// https://webassembly.github.io/spec/core/appendix/algorithm.html.
+//
+// That algorithm is followed pretty closely here, namely `push_operand`,
+// `pop_operand`, `push_ctrl`, and `pop_ctrl`. If anything here is a bit
+// confusing it's recommended to read over that section to see how it maps to
+// the various methods here.
+
+use crate::{
+ limits::MAX_WASM_FUNCTION_LOCALS, BinaryReaderError, BlockType, BrTable, HeapType, Ieee32,
+ Ieee64, MemArg, RefType, Result, ValType, VisitOperator, WasmFeatures, WasmFuncType,
+ WasmModuleResources, V128,
+};
+use std::ops::{Deref, DerefMut};
+
+pub(crate) struct OperatorValidator {
+ pub(super) locals: Locals,
+ pub(super) local_inits: Vec<bool>,
+
+ // This is a list of flags for wasm features which are used to gate various
+ // instructions.
+ pub(crate) features: WasmFeatures,
+
+ // Temporary storage used during the validation of `br_table`.
+ br_table_tmp: Vec<MaybeType>,
+
+ /// The `control` list is the list of blocks that we're currently in.
+ control: Vec<Frame>,
+ /// The `operands` is the current type stack.
+ operands: Vec<MaybeType>,
+ /// When local_inits is modified, the relevant index is recorded here to be
+ /// undone when control pops
+ inits: Vec<u32>,
+
+ /// Offset of the `end` instruction which emptied the `control` stack, which
+ /// must be the end of the function.
+ end_which_emptied_control: Option<usize>,
+}
+
+// No science was performed in the creation of this number, feel free to change
+// it if you so like.
+const MAX_LOCALS_TO_TRACK: usize = 50;
+
+pub(super) struct Locals {
+ // Total number of locals in the function.
+ num_locals: u32,
+
+ // The first MAX_LOCALS_TO_TRACK locals in a function. This is used to
+ // optimize the theoretically common case where most functions don't have
+ // many locals and don't need a full binary search in the entire local space
+ // below.
+ first: Vec<ValType>,
+
+ // This is a "compressed" list of locals for this function. The list of
+ // locals are represented as a list of tuples. The second element is the
+ // type of the local, and the first element is monotonically increasing as
+ // you visit elements of this list. The first element is the maximum index
+ // of the local, after the previous index, of the type specified.
+ //
+ // This allows us to do a binary search on the list for a local's index for
+ // `local.{get,set,tee}`. We do a binary search for the index desired, and
+ // it either lies in a "hole" where the maximum index is specified later,
+ // or it's at the end of the list meaning it's out of bounds.
+ all: Vec<(u32, ValType)>,
+}
+
+/// A Wasm control flow block on the control flow stack during Wasm validation.
+//
+// # Dev. Note
+//
+// This structure corresponds to `ctrl_frame` as specified at in the validation
+// appendix of the wasm spec
+#[derive(Debug, Copy, Clone)]
+pub struct Frame {
+ /// Indicator for what kind of instruction pushed this frame.
+ pub kind: FrameKind,
+ /// The type signature of this frame, represented as a singular return type
+ /// or a type index pointing into the module's types.
+ pub block_type: BlockType,
+ /// The index, below which, this frame cannot modify the operand stack.
+ pub height: usize,
+ /// Whether this frame is unreachable so far.
+ pub unreachable: bool,
+ /// The number of initializations in the stack at the time of its creation
+ pub init_height: usize,
+}
+
+/// The kind of a control flow [`Frame`].
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum FrameKind {
+ /// A Wasm `block` control block.
+ Block,
+ /// A Wasm `if` control block.
+ If,
+ /// A Wasm `else` control block.
+ Else,
+ /// A Wasm `loop` control block.
+ Loop,
+ /// A Wasm `try` control block.
+ ///
+ /// # Note
+ ///
+ /// This belongs to the Wasm exception handling proposal.
+ Try,
+ /// A Wasm `catch` control block.
+ ///
+ /// # Note
+ ///
+ /// This belongs to the Wasm exception handling proposal.
+ Catch,
+ /// A Wasm `catch_all` control block.
+ ///
+ /// # Note
+ ///
+ /// This belongs to the Wasm exception handling proposal.
+ CatchAll,
+}
+
+struct OperatorValidatorTemp<'validator, 'resources, T> {
+ offset: usize,
+ inner: &'validator mut OperatorValidator,
+ resources: &'resources T,
+}
+
+#[derive(Default)]
+pub struct OperatorValidatorAllocations {
+ br_table_tmp: Vec<MaybeType>,
+ control: Vec<Frame>,
+ operands: Vec<MaybeType>,
+ local_inits: Vec<bool>,
+ inits: Vec<u32>,
+ locals_first: Vec<ValType>,
+ locals_all: Vec<(u32, ValType)>,
+}
+
+/// Type storage within the validator.
+///
+/// This is used to manage the operand stack and notably isn't just `ValType` to
+/// handle unreachable code and the "bottom" type.
+#[derive(Debug, Copy, Clone)]
+enum MaybeType {
+ Bot,
+ HeapBot,
+ Type(ValType),
+}
+
+// The validator is pretty performance-sensitive and `MaybeType` is the main
+// unit of storage, so assert that it doesn't exceed 4 bytes which is the
+// current expected size.
+const _: () = {
+ assert!(std::mem::size_of::<MaybeType>() == 4);
+};
+
+impl From<ValType> for MaybeType {
+ fn from(ty: ValType) -> MaybeType {
+ MaybeType::Type(ty)
+ }
+}
+
+impl OperatorValidator {
+ fn new(features: &WasmFeatures, allocs: OperatorValidatorAllocations) -> Self {
+ let OperatorValidatorAllocations {
+ br_table_tmp,
+ control,
+ operands,
+ local_inits,
+ inits,
+ locals_first,
+ locals_all,
+ } = allocs;
+ debug_assert!(br_table_tmp.is_empty());
+ debug_assert!(control.is_empty());
+ debug_assert!(operands.is_empty());
+ debug_assert!(local_inits.is_empty());
+ debug_assert!(inits.is_empty());
+ debug_assert!(locals_first.is_empty());
+ debug_assert!(locals_all.is_empty());
+ OperatorValidator {
+ locals: Locals {
+ num_locals: 0,
+ first: locals_first,
+ all: locals_all,
+ },
+ local_inits,
+ inits,
+ features: *features,
+ br_table_tmp,
+ operands,
+ control,
+ end_which_emptied_control: None,
+ }
+ }
+
+ /// Creates a new operator validator which will be used to validate a
+ /// function whose type is the `ty` index specified.
+ ///
+ /// The `resources` are used to learn about the function type underlying
+ /// `ty`.
+ pub fn new_func<T>(
+ ty: u32,
+ offset: usize,
+ features: &WasmFeatures,
+ resources: &T,
+ allocs: OperatorValidatorAllocations,
+ ) -> Result<Self>
+ where
+ T: WasmModuleResources,
+ {
+ let mut ret = OperatorValidator::new(features, allocs);
+ ret.control.push(Frame {
+ kind: FrameKind::Block,
+ block_type: BlockType::FuncType(ty),
+ height: 0,
+ unreachable: false,
+ init_height: 0,
+ });
+ let params = OperatorValidatorTemp {
+ // This offset is used by the `func_type_at` and `inputs`.
+ offset,
+ inner: &mut ret,
+ resources,
+ }
+ .func_type_at(ty)?
+ .inputs();
+ for ty in params {
+ ret.locals.define(1, ty);
+ ret.local_inits.push(true);
+ }
+ Ok(ret)
+ }
+
+ /// Creates a new operator validator which will be used to validate an
+ /// `init_expr` constant expression which should result in the `ty`
+ /// specified.
+ pub fn new_const_expr(
+ features: &WasmFeatures,
+ ty: ValType,
+ allocs: OperatorValidatorAllocations,
+ ) -> Self {
+ let mut ret = OperatorValidator::new(features, allocs);
+ ret.control.push(Frame {
+ kind: FrameKind::Block,
+ block_type: BlockType::Type(ty),
+ height: 0,
+ unreachable: false,
+ init_height: 0,
+ });
+ ret
+ }
+
+ pub fn define_locals(
+ &mut self,
+ offset: usize,
+ count: u32,
+ ty: ValType,
+ resources: &impl WasmModuleResources,
+ ) -> Result<()> {
+ resources.check_value_type(ty, &self.features, offset)?;
+ if count == 0 {
+ return Ok(());
+ }
+ if !self.locals.define(count, ty) {
+ return Err(BinaryReaderError::new(
+ "too many locals: locals exceed maximum",
+ offset,
+ ));
+ }
+ self.local_inits
+ .resize(self.local_inits.len() + count as usize, ty.is_defaultable());
+ Ok(())
+ }
+
+ /// Returns the current operands stack height.
+ pub fn operand_stack_height(&self) -> usize {
+ self.operands.len()
+ }
+
+ /// Returns the optional value type of the value operand at the given
+ /// `depth` from the top of the operand stack.
+ ///
+ /// - Returns `None` if the `depth` is out of bounds.
+ /// - Returns `Some(None)` if there is a value with unknown type
+ /// at the given `depth`.
+ ///
+ /// # Note
+ ///
+ /// A `depth` of 0 will refer to the last operand on the stack.
+ pub fn peek_operand_at(&self, depth: usize) -> Option<Option<ValType>> {
+ Some(match self.operands.iter().rev().nth(depth)? {
+ MaybeType::Type(t) => Some(*t),
+ MaybeType::Bot | MaybeType::HeapBot => None,
+ })
+ }
+
+ /// Returns the number of frames on the control flow stack.
+ pub fn control_stack_height(&self) -> usize {
+ self.control.len()
+ }
+
+ pub fn get_frame(&self, depth: usize) -> Option<&Frame> {
+ self.control.iter().rev().nth(depth)
+ }
+
+ /// Create a temporary [`OperatorValidatorTemp`] for validation.
+ pub fn with_resources<'a, 'validator, 'resources, T>(
+ &'validator mut self,
+ resources: &'resources T,
+ offset: usize,
+ ) -> impl VisitOperator<'a, Output = Result<()>> + 'validator
+ where
+ T: WasmModuleResources,
+ 'resources: 'validator,
+ {
+ WasmProposalValidator(OperatorValidatorTemp {
+ offset,
+ inner: self,
+ resources,
+ })
+ }
+
+ pub fn finish(&mut self, offset: usize) -> Result<()> {
+ if self.control.last().is_some() {
+ bail!(
+ offset,
+ "control frames remain at end of function: END opcode expected"
+ );
+ }
+
+ // The `end` opcode is one byte which means that the `offset` here
+ // should point just beyond the `end` opcode which emptied the control
+ // stack. If not that means more instructions were present after the
+ // control stack was emptied.
+ if offset != self.end_which_emptied_control.unwrap() + 1 {
+ return Err(self.err_beyond_end(offset));
+ }
+ Ok(())
+ }
+
+ fn err_beyond_end(&self, offset: usize) -> BinaryReaderError {
+ format_err!(offset, "operators remaining after end of function")
+ }
+
+ pub fn into_allocations(self) -> OperatorValidatorAllocations {
+ fn truncate<T>(mut tmp: Vec<T>) -> Vec<T> {
+ tmp.truncate(0);
+ tmp
+ }
+ OperatorValidatorAllocations {
+ br_table_tmp: truncate(self.br_table_tmp),
+ control: truncate(self.control),
+ operands: truncate(self.operands),
+ local_inits: truncate(self.local_inits),
+ inits: truncate(self.inits),
+ locals_first: truncate(self.locals.first),
+ locals_all: truncate(self.locals.all),
+ }
+ }
+}
+
+impl<R> Deref for OperatorValidatorTemp<'_, '_, R> {
+ type Target = OperatorValidator;
+ fn deref(&self) -> &OperatorValidator {
+ self.inner
+ }
+}
+
+impl<R> DerefMut for OperatorValidatorTemp<'_, '_, R> {
+ fn deref_mut(&mut self) -> &mut OperatorValidator {
+ self.inner
+ }
+}
+
+impl<'resources, R: WasmModuleResources> OperatorValidatorTemp<'_, 'resources, R> {
+ /// Pushes a type onto the operand stack.
+ ///
+ /// This is used by instructions to represent a value that is pushed to the
+ /// operand stack. This can fail, but only if `Type` is feature gated.
+ /// Otherwise the push operation always succeeds.
+ fn push_operand<T>(&mut self, ty: T) -> Result<()>
+ where
+ T: Into<MaybeType>,
+ {
+ let maybe_ty = ty.into();
+ self.operands.push(maybe_ty);
+ Ok(())
+ }
+
+ /// Attempts to pop a type from the operand stack.
+ ///
+ /// This function is used to remove types from the operand stack. The
+ /// `expected` argument can be used to indicate that a type is required, or
+ /// simply that something is needed to be popped.
+ ///
+ /// If `expected` is `Some(T)` then this will be guaranteed to return
+ /// `T`, and it will only return success if the current block is
+ /// unreachable or if `T` was found at the top of the operand stack.
+ ///
+ /// If `expected` is `None` then it indicates that something must be on the
+ /// operand stack, but it doesn't matter what's on the operand stack. This
+ /// is useful for polymorphic instructions like `select`.
+ ///
+ /// If `Some(T)` is returned then `T` was popped from the operand stack and
+ /// matches `expected`. If `None` is returned then it means that `None` was
+ /// expected and a type was successfully popped, but its exact type is
+ /// indeterminate because the current block is unreachable.
+ fn pop_operand(&mut self, expected: Option<ValType>) -> Result<MaybeType> {
+ // This method is one of the hottest methods in the validator so to
+ // improve codegen this method contains a fast-path success case where
+ // if the top operand on the stack is as expected it's returned
+ // immediately. This is the most common case where the stack will indeed
+ // have the expected type and all we need to do is pop it off.
+ //
+ // Note that this still has to be careful to be correct, though. For
+ // efficiency an operand is unconditionally popped and on success it is
+ // matched against the state of the world to see if we could actually
+ // pop it. If we shouldn't have popped it then it's passed to the slow
+ // path to get pushed back onto the stack.
+ let popped = match self.operands.pop() {
+ Some(MaybeType::Type(actual_ty)) => {
+ if Some(actual_ty) == expected {
+ if let Some(control) = self.control.last() {
+ if self.operands.len() >= control.height {
+ return Ok(MaybeType::Type(actual_ty));
+ }
+ }
+ }
+ Some(MaybeType::Type(actual_ty))
+ }
+ other => other,
+ };
+
+ self._pop_operand(expected, popped)
+ }
+
+ // This is the "real" implementation of `pop_operand` which is 100%
+ // spec-compliant with little attention paid to efficiency since this is the
+ // slow-path from the actual `pop_operand` function above.
+ #[cold]
+ fn _pop_operand(
+ &mut self,
+ expected: Option<ValType>,
+ popped: Option<MaybeType>,
+ ) -> Result<MaybeType> {
+ self.operands.extend(popped);
+ let control = match self.control.last() {
+ Some(c) => c,
+ None => return Err(self.err_beyond_end(self.offset)),
+ };
+ let actual = if self.operands.len() == control.height && control.unreachable {
+ MaybeType::Bot
+ } else {
+ if self.operands.len() == control.height {
+ let desc = match expected {
+ Some(ty) => ty_to_str(ty),
+ None => "a type",
+ };
+ bail!(
+ self.offset,
+ "type mismatch: expected {desc} but nothing on stack"
+ )
+ } else {
+ self.operands.pop().unwrap()
+ }
+ };
+ if let Some(expected) = expected {
+ match (actual, expected) {
+ // The bottom type matches all expectations
+ (MaybeType::Bot, _)
+ // The "heap bottom" type only matches other references types,
+ // but not any integer types.
+ | (MaybeType::HeapBot, ValType::Ref(_)) => {}
+
+ // Use the `matches` predicate to test if a found type matches
+ // the expectation.
+ (MaybeType::Type(actual), expected) => {
+ if !self.resources.matches(actual, expected) {
+ bail!(
+ self.offset,
+ "type mismatch: expected {}, found {}",
+ ty_to_str(expected),
+ ty_to_str(actual)
+ );
+ }
+ }
+
+ // A "heap bottom" type cannot match any numeric types.
+ (
+ MaybeType::HeapBot,
+ ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128,
+ ) => {
+ bail!(
+ self.offset,
+ "type mismatche: expected {}, found heap type",
+ ty_to_str(expected)
+ )
+ }
+ }
+ }
+ Ok(actual)
+ }
+
+ fn pop_ref(&mut self) -> Result<Option<RefType>> {
+ match self.pop_operand(None)? {
+ MaybeType::Bot | MaybeType::HeapBot => Ok(None),
+ MaybeType::Type(ValType::Ref(rt)) => Ok(Some(rt)),
+ MaybeType::Type(ty) => bail!(
+ self.offset,
+ "type mismatch: expected ref but found {}",
+ ty_to_str(ty)
+ ),
+ }
+ }
+
+ /// Fetches the type for the local at `idx`, returning an error if it's out
+ /// of bounds.
+ fn local(&self, idx: u32) -> Result<ValType> {
+ match self.locals.get(idx) {
+ Some(ty) => Ok(ty),
+ None => bail!(
+ self.offset,
+ "unknown local {}: local index out of bounds",
+ idx
+ ),
+ }
+ }
+
+ /// Flags the current control frame as unreachable, additionally truncating
+ /// the currently active operand stack.
+ fn unreachable(&mut self) -> Result<()> {
+ let control = match self.control.last_mut() {
+ Some(frame) => frame,
+ None => return Err(self.err_beyond_end(self.offset)),
+ };
+ control.unreachable = true;
+ let new_height = control.height;
+ self.operands.truncate(new_height);
+ Ok(())
+ }
+
+ /// Pushes a new frame onto the control stack.
+ ///
+ /// This operation is used when entering a new block such as an if, loop,
+ /// or block itself. The `kind` of block is specified which indicates how
+ /// breaks interact with this block's type. Additionally the type signature
+ /// of the block is specified by `ty`.
+ fn push_ctrl(&mut self, kind: FrameKind, ty: BlockType) -> Result<()> {
+ // Push a new frame which has a snapshot of the height of the current
+ // operand stack.
+ let height = self.operands.len();
+ let init_height = self.inits.len();
+ self.control.push(Frame {
+ kind,
+ block_type: ty,
+ height,
+ unreachable: false,
+ init_height,
+ });
+ // All of the parameters are now also available in this control frame,
+ // so we push them here in order.
+ for ty in self.params(ty)? {
+ self.push_operand(ty)?;
+ }
+ Ok(())
+ }
+
+ /// Pops a frame from the control stack.
+ ///
+ /// This function is used when exiting a block and leaves a block scope.
+ /// Internally this will validate that blocks have the correct result type.
+ fn pop_ctrl(&mut self) -> Result<Frame> {
+ // Read the expected type and expected height of the operand stack the
+ // end of the frame.
+ let frame = match self.control.last() {
+ Some(f) => f,
+ None => return Err(self.err_beyond_end(self.offset)),
+ };
+ let ty = frame.block_type;
+ let height = frame.height;
+ let init_height = frame.init_height;
+
+ // reset_locals in the spec
+ for init in self.inits.split_off(init_height) {
+ self.local_inits[init as usize] = false;
+ }
+
+ // Pop all the result types, in reverse order, from the operand stack.
+ // These types will, possibly, be transferred to the next frame.
+ for ty in self.results(ty)?.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+
+ // Make sure that the operand stack has returned to is original
+ // height...
+ if self.operands.len() != height {
+ bail!(
+ self.offset,
+ "type mismatch: values remaining on stack at end of block"
+ );
+ }
+
+ // And then we can remove it!
+ Ok(self.control.pop().unwrap())
+ }
+
+ /// Validates a relative jump to the `depth` specified.
+ ///
+ /// Returns the type signature of the block that we're jumping to as well
+ /// as the kind of block if the jump is valid. Otherwise returns an error.
+ fn jump(&self, depth: u32) -> Result<(BlockType, FrameKind)> {
+ if self.control.is_empty() {
+ return Err(self.err_beyond_end(self.offset));
+ }
+ match (self.control.len() - 1).checked_sub(depth as usize) {
+ Some(i) => {
+ let frame = &self.control[i];
+ Ok((frame.block_type, frame.kind))
+ }
+ None => bail!(self.offset, "unknown label: branch depth too large"),
+ }
+ }
+
+ /// Validates that `memory_index` is valid in this module, and returns the
+ /// type of address used to index the memory specified.
+ fn check_memory_index(&self, memory_index: u32) -> Result<ValType> {
+ match self.resources.memory_at(memory_index) {
+ Some(mem) => Ok(mem.index_type()),
+ None => bail!(self.offset, "unknown memory {}", memory_index),
+ }
+ }
+
+ /// Validates a `memarg for alignment and such (also the memory it
+ /// references), and returns the type of index used to address the memory.
+ fn check_memarg(&self, memarg: MemArg) -> Result<ValType> {
+ let index_ty = self.check_memory_index(memarg.memory)?;
+ if memarg.align > memarg.max_align {
+ bail!(self.offset, "alignment must not be larger than natural");
+ }
+ if index_ty == ValType::I32 && memarg.offset > u64::from(u32::MAX) {
+ bail!(self.offset, "offset out of range: must be <= 2**32");
+ }
+ Ok(index_ty)
+ }
+
+ fn check_floats_enabled(&self) -> Result<()> {
+ if !self.features.floats {
+ bail!(self.offset, "floating-point instruction disallowed");
+ }
+ Ok(())
+ }
+
+ fn check_shared_memarg(&self, memarg: MemArg) -> Result<ValType> {
+ if memarg.align != memarg.max_align {
+ bail!(
+ self.offset,
+ "atomic instructions must always specify maximum alignment"
+ );
+ }
+ self.check_memory_index(memarg.memory)
+ }
+
+ fn check_simd_lane_index(&self, index: u8, max: u8) -> Result<()> {
+ if index >= max {
+ bail!(self.offset, "SIMD index out of bounds");
+ }
+ Ok(())
+ }
+
+ /// Validates a block type, primarily with various in-flight proposals.
+ fn check_block_type(&self, ty: BlockType) -> Result<()> {
+ match ty {
+ BlockType::Empty => Ok(()),
+ BlockType::Type(t) => self
+ .resources
+ .check_value_type(t, &self.features, self.offset),
+ BlockType::FuncType(idx) => {
+ if !self.features.multi_value {
+ bail!(
+ self.offset,
+ "blocks, loops, and ifs may only produce a resulttype \
+ when multi-value is not enabled",
+ );
+ }
+ self.func_type_at(idx)?;
+ Ok(())
+ }
+ }
+ }
+
+ /// Validates a `call` instruction, ensuring that the function index is
+ /// in-bounds and the right types are on the stack to call the function.
+ fn check_call(&mut self, function_index: u32) -> Result<()> {
+ let ty = match self.resources.type_index_of_function(function_index) {
+ Some(i) => i,
+ None => {
+ bail!(
+ self.offset,
+ "unknown function {function_index}: function index out of bounds",
+ );
+ }
+ };
+ self.check_call_ty(ty)
+ }
+
+ fn check_call_ty(&mut self, type_index: u32) -> Result<()> {
+ let ty = match self.resources.func_type_at(type_index) {
+ Some(i) => i,
+ None => {
+ bail!(
+ self.offset,
+ "unknown type {type_index}: type index out of bounds",
+ );
+ }
+ };
+ for ty in ty.inputs().rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ for ty in ty.outputs() {
+ self.push_operand(ty)?;
+ }
+ Ok(())
+ }
+
+ /// Validates a call to an indirect function, very similar to `check_call`.
+ fn check_call_indirect(&mut self, index: u32, table_index: u32) -> Result<()> {
+ match self.resources.table_at(table_index) {
+ None => {
+ bail!(self.offset, "unknown table: table index out of bounds");
+ }
+ Some(tab) => {
+ if !self
+ .resources
+ .matches(ValType::Ref(tab.element_type), ValType::FUNCREF)
+ {
+ bail!(
+ self.offset,
+ "indirect calls must go through a table with type <= funcref",
+ );
+ }
+ }
+ }
+ let ty = self.func_type_at(index)?;
+ self.pop_operand(Some(ValType::I32))?;
+ for ty in ty.inputs().rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ for ty in ty.outputs() {
+ self.push_operand(ty)?;
+ }
+ Ok(())
+ }
+
+ /// Validates a `return` instruction, popping types from the operand
+ /// stack that the function needs.
+ fn check_return(&mut self) -> Result<()> {
+ if self.control.is_empty() {
+ return Err(self.err_beyond_end(self.offset));
+ }
+ for ty in self.results(self.control[0].block_type)?.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ self.unreachable()?;
+ Ok(())
+ }
+
+ /// Checks the validity of a common comparison operator.
+ fn check_cmp_op(&mut self, ty: ValType) -> Result<()> {
+ self.pop_operand(Some(ty))?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+
+ /// Checks the validity of a common float comparison operator.
+ fn check_fcmp_op(&mut self, ty: ValType) -> Result<()> {
+ debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
+ self.check_floats_enabled()?;
+ self.check_cmp_op(ty)
+ }
+
+ /// Checks the validity of a common unary operator.
+ fn check_unary_op(&mut self, ty: ValType) -> Result<()> {
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ty)?;
+ Ok(())
+ }
+
+ /// Checks the validity of a common unary float operator.
+ fn check_funary_op(&mut self, ty: ValType) -> Result<()> {
+ debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
+ self.check_floats_enabled()?;
+ self.check_unary_op(ty)
+ }
+
+ /// Checks the validity of a common conversion operator.
+ fn check_conversion_op(&mut self, into: ValType, from: ValType) -> Result<()> {
+ self.pop_operand(Some(from))?;
+ self.push_operand(into)?;
+ Ok(())
+ }
+
+ /// Checks the validity of a common conversion operator.
+ fn check_fconversion_op(&mut self, into: ValType, from: ValType) -> Result<()> {
+ debug_assert!(matches!(into, ValType::F32 | ValType::F64));
+ self.check_floats_enabled()?;
+ self.check_conversion_op(into, from)
+ }
+
+ /// Checks the validity of a common binary operator.
+ fn check_binary_op(&mut self, ty: ValType) -> Result<()> {
+ self.pop_operand(Some(ty))?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ty)?;
+ Ok(())
+ }
+
+ /// Checks the validity of a common binary float operator.
+ fn check_fbinary_op(&mut self, ty: ValType) -> Result<()> {
+ debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
+ self.check_floats_enabled()?;
+ self.check_binary_op(ty)
+ }
+
+ /// Checks the validity of an atomic load operator.
+ fn check_atomic_load(&mut self, memarg: MemArg, load_ty: ValType) -> Result<()> {
+ let ty = self.check_shared_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(load_ty)?;
+ Ok(())
+ }
+
+ /// Checks the validity of an atomic store operator.
+ fn check_atomic_store(&mut self, memarg: MemArg, store_ty: ValType) -> Result<()> {
+ let ty = self.check_shared_memarg(memarg)?;
+ self.pop_operand(Some(store_ty))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+
+ /// Checks the validity of a common atomic binary operator.
+ fn check_atomic_binary_op(&mut self, memarg: MemArg, op_ty: ValType) -> Result<()> {
+ let ty = self.check_shared_memarg(memarg)?;
+ self.pop_operand(Some(op_ty))?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(op_ty)?;
+ Ok(())
+ }
+
+ /// Checks the validity of an atomic compare exchange operator.
+ fn check_atomic_binary_cmpxchg(&mut self, memarg: MemArg, op_ty: ValType) -> Result<()> {
+ let ty = self.check_shared_memarg(memarg)?;
+ self.pop_operand(Some(op_ty))?;
+ self.pop_operand(Some(op_ty))?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(op_ty)?;
+ Ok(())
+ }
+
+ /// Checks a [`V128`] splat operator.
+ fn check_v128_splat(&mut self, src_ty: ValType) -> Result<()> {
+ self.pop_operand(Some(src_ty))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+
+ /// Checks a [`V128`] binary operator.
+ fn check_v128_binary_op(&mut self) -> Result<()> {
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+
+ /// Checks a [`V128`] binary float operator.
+ fn check_v128_fbinary_op(&mut self) -> Result<()> {
+ self.check_floats_enabled()?;
+ self.check_v128_binary_op()
+ }
+
+ /// Checks a [`V128`] binary operator.
+ fn check_v128_unary_op(&mut self) -> Result<()> {
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+
+ /// Checks a [`V128`] binary operator.
+ fn check_v128_funary_op(&mut self) -> Result<()> {
+ self.check_floats_enabled()?;
+ self.check_v128_unary_op()
+ }
+
+ /// Checks a [`V128`] relaxed ternary operator.
+ fn check_v128_ternary_op(&mut self) -> Result<()> {
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+
+ /// Checks a [`V128`] relaxed ternary operator.
+ fn check_v128_bitmask_op(&mut self) -> Result<()> {
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+
+ /// Checks a [`V128`] relaxed ternary operator.
+ fn check_v128_shift_op(&mut self) -> Result<()> {
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+
+ /// Checks a [`V128`] common load operator.
+ fn check_v128_load_op(&mut self, memarg: MemArg) -> Result<()> {
+ let idx = self.check_memarg(memarg)?;
+ self.pop_operand(Some(idx))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+
+ fn func_type_at(&self, at: u32) -> Result<&'resources R::FuncType> {
+ self.resources
+ .func_type_at(at)
+ .ok_or_else(|| format_err!(self.offset, "unknown type: type index out of bounds"))
+ }
+
+ fn tag_at(&self, at: u32) -> Result<&'resources R::FuncType> {
+ self.resources
+ .tag_at(at)
+ .ok_or_else(|| format_err!(self.offset, "unknown tag {}: tag index out of bounds", at))
+ }
+
+ fn params(&self, ty: BlockType) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
+ Ok(match ty {
+ BlockType::Empty | BlockType::Type(_) => Either::B(None.into_iter()),
+ BlockType::FuncType(t) => Either::A(self.func_type_at(t)?.inputs()),
+ })
+ }
+
+ fn results(&self, ty: BlockType) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
+ Ok(match ty {
+ BlockType::Empty => Either::B(None.into_iter()),
+ BlockType::Type(t) => Either::B(Some(t).into_iter()),
+ BlockType::FuncType(t) => Either::A(self.func_type_at(t)?.outputs()),
+ })
+ }
+
+ fn label_types(
+ &self,
+ ty: BlockType,
+ kind: FrameKind,
+ ) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
+ Ok(match kind {
+ FrameKind::Loop => Either::A(self.params(ty)?),
+ _ => Either::B(self.results(ty)?),
+ })
+ }
+}
+
+pub fn ty_to_str(ty: ValType) -> &'static str {
+ match ty {
+ ValType::I32 => "i32",
+ ValType::I64 => "i64",
+ ValType::F32 => "f32",
+ ValType::F64 => "f64",
+ ValType::V128 => "v128",
+ ValType::FUNCREF => "funcref",
+ ValType::EXTERNREF => "externref",
+ ValType::Ref(RefType {
+ nullable: false,
+ heap_type: HeapType::Func,
+ }) => "(ref func)",
+ ValType::Ref(RefType {
+ nullable: false,
+ heap_type: HeapType::Extern,
+ }) => "(ref extern)",
+ ValType::Ref(RefType {
+ nullable: false,
+ heap_type: HeapType::TypedFunc(_),
+ }) => "(ref $type)",
+ ValType::Ref(RefType {
+ nullable: true,
+ heap_type: HeapType::TypedFunc(_),
+ }) => "(ref null $type)",
+ }
+}
+
+/// A wrapper "visitor" around the real operator validator internally which
+/// exists to check that the required wasm feature is enabled to proceed with
+/// validation.
+///
+/// This validator is macro-generated to ensure that the proposal listed in this
+/// crate's macro matches the one that's validated here. Each instruction's
+/// visit method validates the specified proposal is enabled and then delegates
+/// to `OperatorValidatorTemp` to perform the actual opcode validation.
+struct WasmProposalValidator<'validator, 'resources, T>(
+ OperatorValidatorTemp<'validator, 'resources, T>,
+);
+
+impl<T> WasmProposalValidator<'_, '_, T> {
+ fn check_enabled(&self, flag: bool, desc: &str) -> Result<()> {
+ if flag {
+ return Ok(());
+ }
+ bail!(self.0.offset, "{desc} support is not enabled");
+ }
+}
+
+macro_rules! validate_proposal {
+ ($( @$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => {
+ $(
+ fn $visit(&mut self $($(,$arg: $argty)*)?) -> Result<()> {
+ validate_proposal!(validate self $proposal);
+ self.0.$visit($( $($arg),* )?)
+ }
+ )*
+ };
+
+ (validate self mvp) => {};
+ (validate $self:ident $proposal:ident) => {
+ $self.check_enabled($self.0.features.$proposal, validate_proposal!(desc $proposal))?
+ };
+
+ (desc simd) => ("SIMD");
+ (desc relaxed_simd) => ("relaxed SIMD");
+ (desc threads) => ("threads");
+ (desc saturating_float_to_int) => ("saturating float to int conversions");
+ (desc reference_types) => ("reference types");
+ (desc bulk_memory) => ("bulk memory");
+ (desc sign_extension) => ("sign extension operations");
+ (desc exceptions) => ("exceptions");
+ (desc tail_call) => ("tail calls");
+ (desc function_references) => ("function references");
+ (desc memory_control) => ("memory control");
+}
+
+impl<'a, T> VisitOperator<'a> for WasmProposalValidator<'_, '_, T>
+where
+ T: WasmModuleResources,
+{
+ type Output = Result<()>;
+
+ for_each_operator!(validate_proposal);
+}
+
+impl<'a, T> VisitOperator<'a> for OperatorValidatorTemp<'_, '_, T>
+where
+ T: WasmModuleResources,
+{
+ type Output = Result<()>;
+
+ fn visit_nop(&mut self) -> Self::Output {
+ Ok(())
+ }
+ fn visit_unreachable(&mut self) -> Self::Output {
+ self.unreachable()?;
+ Ok(())
+ }
+ fn visit_block(&mut self, ty: BlockType) -> Self::Output {
+ self.check_block_type(ty)?;
+ for ty in self.params(ty)?.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ self.push_ctrl(FrameKind::Block, ty)?;
+ Ok(())
+ }
+ fn visit_loop(&mut self, ty: BlockType) -> Self::Output {
+ self.check_block_type(ty)?;
+ for ty in self.params(ty)?.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ self.push_ctrl(FrameKind::Loop, ty)?;
+ Ok(())
+ }
+ fn visit_if(&mut self, ty: BlockType) -> Self::Output {
+ self.check_block_type(ty)?;
+ self.pop_operand(Some(ValType::I32))?;
+ for ty in self.params(ty)?.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ self.push_ctrl(FrameKind::If, ty)?;
+ Ok(())
+ }
+ fn visit_else(&mut self) -> Self::Output {
+ let frame = self.pop_ctrl()?;
+ if frame.kind != FrameKind::If {
+ bail!(self.offset, "else found outside of an `if` block");
+ }
+ self.push_ctrl(FrameKind::Else, frame.block_type)?;
+ Ok(())
+ }
+ fn visit_try(&mut self, ty: BlockType) -> Self::Output {
+ self.check_block_type(ty)?;
+ for ty in self.params(ty)?.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ self.push_ctrl(FrameKind::Try, ty)?;
+ Ok(())
+ }
+ fn visit_catch(&mut self, index: u32) -> Self::Output {
+ let frame = self.pop_ctrl()?;
+ if frame.kind != FrameKind::Try && frame.kind != FrameKind::Catch {
+ bail!(self.offset, "catch found outside of an `try` block");
+ }
+ // Start a new frame and push `exnref` value.
+ let height = self.operands.len();
+ let init_height = self.inits.len();
+ self.control.push(Frame {
+ kind: FrameKind::Catch,
+ block_type: frame.block_type,
+ height,
+ unreachable: false,
+ init_height,
+ });
+ // Push exception argument types.
+ let ty = self.tag_at(index)?;
+ for ty in ty.inputs() {
+ self.push_operand(ty)?;
+ }
+ Ok(())
+ }
+ fn visit_throw(&mut self, index: u32) -> Self::Output {
+ // Check values associated with the exception.
+ let ty = self.tag_at(index)?;
+ for ty in ty.inputs().rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ if ty.outputs().len() > 0 {
+ bail!(
+ self.offset,
+ "result type expected to be empty for exception"
+ );
+ }
+ self.unreachable()?;
+ Ok(())
+ }
+ fn visit_rethrow(&mut self, relative_depth: u32) -> Self::Output {
+ // This is not a jump, but we need to check that the `rethrow`
+ // targets an actual `catch` to get the exception.
+ let (_, kind) = self.jump(relative_depth)?;
+ if kind != FrameKind::Catch && kind != FrameKind::CatchAll {
+ bail!(
+ self.offset,
+ "invalid rethrow label: target was not a `catch` block"
+ );
+ }
+ self.unreachable()?;
+ Ok(())
+ }
+ fn visit_delegate(&mut self, relative_depth: u32) -> Self::Output {
+ let frame = self.pop_ctrl()?;
+ if frame.kind != FrameKind::Try {
+ bail!(self.offset, "delegate found outside of an `try` block");
+ }
+ // This operation is not a jump, but we need to check the
+ // depth for validity
+ let _ = self.jump(relative_depth)?;
+ for ty in self.results(frame.block_type)? {
+ self.push_operand(ty)?;
+ }
+ Ok(())
+ }
+ fn visit_catch_all(&mut self) -> Self::Output {
+ let frame = self.pop_ctrl()?;
+ if frame.kind == FrameKind::CatchAll {
+ bail!(self.offset, "only one catch_all allowed per `try` block");
+ } else if frame.kind != FrameKind::Try && frame.kind != FrameKind::Catch {
+ bail!(self.offset, "catch_all found outside of a `try` block");
+ }
+ let height = self.operands.len();
+ let init_height = self.inits.len();
+ self.control.push(Frame {
+ kind: FrameKind::CatchAll,
+ block_type: frame.block_type,
+ height,
+ unreachable: false,
+ init_height,
+ });
+ Ok(())
+ }
+ fn visit_end(&mut self) -> Self::Output {
+ let mut frame = self.pop_ctrl()?;
+
+ // Note that this `if` isn't included in the appendix right
+ // now, but it's used to allow for `if` statements that are
+ // missing an `else` block which have the same parameter/return
+ // types on the block (since that's valid).
+ if frame.kind == FrameKind::If {
+ self.push_ctrl(FrameKind::Else, frame.block_type)?;
+ frame = self.pop_ctrl()?;
+ }
+ for ty in self.results(frame.block_type)? {
+ self.push_operand(ty)?;
+ }
+
+ if self.control.is_empty() && self.end_which_emptied_control.is_none() {
+ assert_ne!(self.offset, 0);
+ self.end_which_emptied_control = Some(self.offset);
+ }
+ Ok(())
+ }
+ fn visit_br(&mut self, relative_depth: u32) -> Self::Output {
+ let (ty, kind) = self.jump(relative_depth)?;
+ for ty in self.label_types(ty, kind)?.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ self.unreachable()?;
+ Ok(())
+ }
+ fn visit_br_if(&mut self, relative_depth: u32) -> Self::Output {
+ self.pop_operand(Some(ValType::I32))?;
+ let (ty, kind) = self.jump(relative_depth)?;
+ let types = self.label_types(ty, kind)?;
+ for ty in types.clone().rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ for ty in types {
+ self.push_operand(ty)?;
+ }
+ Ok(())
+ }
+ fn visit_br_table(&mut self, table: BrTable) -> Self::Output {
+ self.pop_operand(Some(ValType::I32))?;
+ let default = self.jump(table.default())?;
+ let default_types = self.label_types(default.0, default.1)?;
+ for element in table.targets() {
+ let relative_depth = element?;
+ let block = self.jump(relative_depth)?;
+ let tys = self.label_types(block.0, block.1)?;
+ if tys.len() != default_types.len() {
+ bail!(
+ self.offset,
+ "type mismatch: br_table target labels have different number of types"
+ );
+ }
+ debug_assert!(self.br_table_tmp.is_empty());
+ for ty in tys.rev() {
+ let ty = self.pop_operand(Some(ty))?;
+ self.br_table_tmp.push(ty);
+ }
+ for ty in self.inner.br_table_tmp.drain(..).rev() {
+ self.inner.operands.push(ty);
+ }
+ }
+ for ty in default_types.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ self.unreachable()?;
+ Ok(())
+ }
+ fn visit_return(&mut self) -> Self::Output {
+ self.check_return()?;
+ Ok(())
+ }
+ fn visit_call(&mut self, function_index: u32) -> Self::Output {
+ self.check_call(function_index)?;
+ Ok(())
+ }
+ fn visit_return_call(&mut self, function_index: u32) -> Self::Output {
+ self.check_call(function_index)?;
+ self.check_return()?;
+ Ok(())
+ }
+ fn visit_call_ref(&mut self, hty: HeapType) -> Self::Output {
+ self.resources
+ .check_heap_type(hty, &self.features, self.offset)?;
+ // If `None` is popped then that means a "bottom" type was popped which
+ // is always considered equivalent to the `hty` tag.
+ if let Some(rt) = self.pop_ref()? {
+ let expected = RefType {
+ nullable: true,
+ heap_type: hty,
+ };
+ if !self
+ .resources
+ .matches(ValType::Ref(rt), ValType::Ref(expected))
+ {
+ bail!(
+ self.offset,
+ "type mismatch: funcref on stack does not match specified type",
+ );
+ }
+ }
+ match hty {
+ HeapType::TypedFunc(type_index) => self.check_call_ty(type_index.into())?,
+ _ => bail!(
+ self.offset,
+ "type mismatch: instruction requires function reference type",
+ ),
+ }
+ Ok(())
+ }
+ fn visit_return_call_ref(&mut self, hty: HeapType) -> Self::Output {
+ self.visit_call_ref(hty)?;
+ self.check_return()
+ }
+ fn visit_call_indirect(
+ &mut self,
+ index: u32,
+ table_index: u32,
+ table_byte: u8,
+ ) -> Self::Output {
+ if table_byte != 0 && !self.features.reference_types {
+ bail!(
+ self.offset,
+ "reference-types not enabled: zero byte expected"
+ );
+ }
+ self.check_call_indirect(index, table_index)?;
+ Ok(())
+ }
+ fn visit_return_call_indirect(&mut self, index: u32, table_index: u32) -> Self::Output {
+ self.check_call_indirect(index, table_index)?;
+ self.check_return()?;
+ Ok(())
+ }
+ fn visit_drop(&mut self) -> Self::Output {
+ self.pop_operand(None)?;
+ Ok(())
+ }
+ fn visit_select(&mut self) -> Self::Output {
+ self.pop_operand(Some(ValType::I32))?;
+ let ty1 = self.pop_operand(None)?;
+ let ty2 = self.pop_operand(None)?;
+
+ let ty = match (ty1, ty2) {
+ // All heap-related types aren't allowed with the `select`
+ // instruction
+ (MaybeType::HeapBot, _)
+ | (_, MaybeType::HeapBot)
+ | (MaybeType::Type(ValType::Ref(_)), _)
+ | (_, MaybeType::Type(ValType::Ref(_))) => {
+ bail!(
+ self.offset,
+ "type mismatch: select only takes integral types"
+ )
+ }
+
+ // If one operand is the "bottom" type then whatever the other
+ // operand is is the result of the `select`
+ (MaybeType::Bot, t) | (t, MaybeType::Bot) => t,
+
+ // Otherwise these are two integral types and they must match for
+ // `select` to typecheck.
+ (t @ MaybeType::Type(t1), MaybeType::Type(t2)) => {
+ if t1 != t2 {
+ bail!(
+ self.offset,
+ "type mismatch: select operands have different types"
+ );
+ }
+ t
+ }
+ };
+ self.push_operand(ty)?;
+ Ok(())
+ }
+ fn visit_typed_select(&mut self, ty: ValType) -> Self::Output {
+ self.resources
+ .check_value_type(ty, &self.features, self.offset)?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ty))?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ty)?;
+ Ok(())
+ }
+ fn visit_local_get(&mut self, local_index: u32) -> Self::Output {
+ let ty = self.local(local_index)?;
+ if !self.local_inits[local_index as usize] {
+ bail!(self.offset, "uninitialized local: {}", local_index);
+ }
+ self.push_operand(ty)?;
+ Ok(())
+ }
+ fn visit_local_set(&mut self, local_index: u32) -> Self::Output {
+ let ty = self.local(local_index)?;
+ self.pop_operand(Some(ty))?;
+ if !self.local_inits[local_index as usize] {
+ self.local_inits[local_index as usize] = true;
+ self.inits.push(local_index);
+ }
+ Ok(())
+ }
+ fn visit_local_tee(&mut self, local_index: u32) -> Self::Output {
+ let ty = self.local(local_index)?;
+ self.pop_operand(Some(ty))?;
+ if !self.local_inits[local_index as usize] {
+ self.local_inits[local_index as usize] = true;
+ self.inits.push(local_index);
+ }
+
+ self.push_operand(ty)?;
+ Ok(())
+ }
+ fn visit_global_get(&mut self, global_index: u32) -> Self::Output {
+ if let Some(ty) = self.resources.global_at(global_index) {
+ self.push_operand(ty.content_type)?;
+ } else {
+ bail!(self.offset, "unknown global: global index out of bounds");
+ };
+ Ok(())
+ }
+ fn visit_global_set(&mut self, global_index: u32) -> Self::Output {
+ if let Some(ty) = self.resources.global_at(global_index) {
+ if !ty.mutable {
+ bail!(
+ self.offset,
+ "global is immutable: cannot modify it with `global.set`"
+ );
+ }
+ self.pop_operand(Some(ty.content_type))?;
+ } else {
+ bail!(self.offset, "unknown global: global index out of bounds");
+ };
+ Ok(())
+ }
+ fn visit_i32_load(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i64_load(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I64)?;
+ Ok(())
+ }
+ fn visit_f32_load(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_floats_enabled()?;
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::F32)?;
+ Ok(())
+ }
+ fn visit_f64_load(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_floats_enabled()?;
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::F64)?;
+ Ok(())
+ }
+ fn visit_i32_load8_s(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i32_load8_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.visit_i32_load8_s(memarg)
+ }
+ fn visit_i32_load16_s(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i32_load16_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.visit_i32_load16_s(memarg)
+ }
+ fn visit_i64_load8_s(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I64)?;
+ Ok(())
+ }
+ fn visit_i64_load8_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.visit_i64_load8_s(memarg)
+ }
+ fn visit_i64_load16_s(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I64)?;
+ Ok(())
+ }
+ fn visit_i64_load16_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.visit_i64_load16_s(memarg)
+ }
+ fn visit_i64_load32_s(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I64)?;
+ Ok(())
+ }
+ fn visit_i64_load32_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.visit_i64_load32_s(memarg)
+ }
+ fn visit_i32_store(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_i64_store(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I64))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_f32_store(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_floats_enabled()?;
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::F32))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_f64_store(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_floats_enabled()?;
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::F64))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_i32_store8(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_i32_store16(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_i64_store8(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I64))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_i64_store16(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I64))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_i64_store32(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I64))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_memory_size(&mut self, mem: u32, mem_byte: u8) -> Self::Output {
+ if mem_byte != 0 && !self.features.multi_memory {
+ bail!(self.offset, "multi-memory not enabled: zero byte expected");
+ }
+ let index_ty = self.check_memory_index(mem)?;
+ self.push_operand(index_ty)?;
+ Ok(())
+ }
+ fn visit_memory_grow(&mut self, mem: u32, mem_byte: u8) -> Self::Output {
+ if mem_byte != 0 && !self.features.multi_memory {
+ bail!(self.offset, "multi-memory not enabled: zero byte expected");
+ }
+ let index_ty = self.check_memory_index(mem)?;
+ self.pop_operand(Some(index_ty))?;
+ self.push_operand(index_ty)?;
+ Ok(())
+ }
+ fn visit_i32_const(&mut self, _value: i32) -> Self::Output {
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i64_const(&mut self, _value: i64) -> Self::Output {
+ self.push_operand(ValType::I64)?;
+ Ok(())
+ }
+ fn visit_f32_const(&mut self, _value: Ieee32) -> Self::Output {
+ self.check_floats_enabled()?;
+ self.push_operand(ValType::F32)?;
+ Ok(())
+ }
+ fn visit_f64_const(&mut self, _value: Ieee64) -> Self::Output {
+ self.check_floats_enabled()?;
+ self.push_operand(ValType::F64)?;
+ Ok(())
+ }
+ fn visit_i32_eqz(&mut self) -> Self::Output {
+ self.pop_operand(Some(ValType::I32))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i32_eq(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_ne(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_lt_s(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_lt_u(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_gt_s(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_gt_u(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_le_s(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_le_u(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_ge_s(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_ge_u(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i64_eqz(&mut self) -> Self::Output {
+ self.pop_operand(Some(ValType::I64))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i64_eq(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_ne(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_lt_s(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_lt_u(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_gt_s(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_gt_u(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_le_s(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_le_u(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_ge_s(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_ge_u(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_f32_eq(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F32)
+ }
+ fn visit_f32_ne(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F32)
+ }
+ fn visit_f32_lt(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F32)
+ }
+ fn visit_f32_gt(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F32)
+ }
+ fn visit_f32_le(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F32)
+ }
+ fn visit_f32_ge(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F32)
+ }
+ fn visit_f64_eq(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F64)
+ }
+ fn visit_f64_ne(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F64)
+ }
+ fn visit_f64_lt(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F64)
+ }
+ fn visit_f64_gt(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F64)
+ }
+ fn visit_f64_le(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F64)
+ }
+ fn visit_f64_ge(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F64)
+ }
+ fn visit_i32_clz(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I32)
+ }
+ fn visit_i32_ctz(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I32)
+ }
+ fn visit_i32_popcnt(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I32)
+ }
+ fn visit_i32_add(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_sub(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_mul(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_div_s(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_div_u(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_rem_s(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_rem_u(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_and(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_or(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_xor(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_shl(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_shr_s(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_shr_u(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_rotl(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_rotr(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i64_clz(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I64)
+ }
+ fn visit_i64_ctz(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I64)
+ }
+ fn visit_i64_popcnt(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I64)
+ }
+ fn visit_i64_add(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_sub(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_mul(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_div_s(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_div_u(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_rem_s(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_rem_u(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_and(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_or(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_xor(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_shl(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_shr_s(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_shr_u(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_rotl(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_rotr(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_f32_abs(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F32)
+ }
+ fn visit_f32_neg(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F32)
+ }
+ fn visit_f32_ceil(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F32)
+ }
+ fn visit_f32_floor(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F32)
+ }
+ fn visit_f32_trunc(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F32)
+ }
+ fn visit_f32_nearest(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F32)
+ }
+ fn visit_f32_sqrt(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F32)
+ }
+ fn visit_f32_add(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F32)
+ }
+ fn visit_f32_sub(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F32)
+ }
+ fn visit_f32_mul(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F32)
+ }
+ fn visit_f32_div(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F32)
+ }
+ fn visit_f32_min(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F32)
+ }
+ fn visit_f32_max(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F32)
+ }
+ fn visit_f32_copysign(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F32)
+ }
+ fn visit_f64_abs(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F64)
+ }
+ fn visit_f64_neg(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F64)
+ }
+ fn visit_f64_ceil(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F64)
+ }
+ fn visit_f64_floor(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F64)
+ }
+ fn visit_f64_trunc(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F64)
+ }
+ fn visit_f64_nearest(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F64)
+ }
+ fn visit_f64_sqrt(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F64)
+ }
+ fn visit_f64_add(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F64)
+ }
+ fn visit_f64_sub(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F64)
+ }
+ fn visit_f64_mul(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F64)
+ }
+ fn visit_f64_div(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F64)
+ }
+ fn visit_f64_min(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F64)
+ }
+ fn visit_f64_max(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F64)
+ }
+ fn visit_f64_copysign(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F64)
+ }
+ fn visit_i32_wrap_i64(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::I64)
+ }
+ fn visit_i32_trunc_f32_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F32)
+ }
+ fn visit_i32_trunc_f32_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F32)
+ }
+ fn visit_i32_trunc_f64_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F64)
+ }
+ fn visit_i32_trunc_f64_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F64)
+ }
+ fn visit_i64_extend_i32_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::I32)
+ }
+ fn visit_i64_extend_i32_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::I32)
+ }
+ fn visit_i64_trunc_f32_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F32)
+ }
+ fn visit_i64_trunc_f32_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F32)
+ }
+ fn visit_i64_trunc_f64_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F64)
+ }
+ fn visit_i64_trunc_f64_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F64)
+ }
+ fn visit_f32_convert_i32_s(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F32, ValType::I32)
+ }
+ fn visit_f32_convert_i32_u(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F32, ValType::I32)
+ }
+ fn visit_f32_convert_i64_s(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F32, ValType::I64)
+ }
+ fn visit_f32_convert_i64_u(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F32, ValType::I64)
+ }
+ fn visit_f32_demote_f64(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F32, ValType::F64)
+ }
+ fn visit_f64_convert_i32_s(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F64, ValType::I32)
+ }
+ fn visit_f64_convert_i32_u(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F64, ValType::I32)
+ }
+ fn visit_f64_convert_i64_s(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F64, ValType::I64)
+ }
+ fn visit_f64_convert_i64_u(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F64, ValType::I64)
+ }
+ fn visit_f64_promote_f32(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F64, ValType::F32)
+ }
+ fn visit_i32_reinterpret_f32(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F32)
+ }
+ fn visit_i64_reinterpret_f64(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F64)
+ }
+ fn visit_f32_reinterpret_i32(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F32, ValType::I32)
+ }
+ fn visit_f64_reinterpret_i64(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F64, ValType::I64)
+ }
+ fn visit_i32_trunc_sat_f32_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F32)
+ }
+ fn visit_i32_trunc_sat_f32_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F32)
+ }
+ fn visit_i32_trunc_sat_f64_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F64)
+ }
+ fn visit_i32_trunc_sat_f64_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F64)
+ }
+ fn visit_i64_trunc_sat_f32_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F32)
+ }
+ fn visit_i64_trunc_sat_f32_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F32)
+ }
+ fn visit_i64_trunc_sat_f64_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F64)
+ }
+ fn visit_i64_trunc_sat_f64_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F64)
+ }
+ fn visit_i32_extend8_s(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I32)
+ }
+ fn visit_i32_extend16_s(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I32)
+ }
+ fn visit_i64_extend8_s(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I64)
+ }
+ fn visit_i64_extend16_s(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I64)
+ }
+ fn visit_i64_extend32_s(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I64)
+ }
+ fn visit_i32_atomic_load(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_load(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_load(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_load(memarg, ValType::I32)
+ }
+ fn visit_i64_atomic_load(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_load(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_load32_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_load(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_load(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_load(memarg, ValType::I64)
+ }
+ fn visit_i32_atomic_store(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_store(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_store16(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_store(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_store8(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_store(memarg, ValType::I32)
+ }
+ fn visit_i64_atomic_store(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_store(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_store32(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_store(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_store16(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_store(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_store8(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_store(memarg, ValType::I64)
+ }
+ fn visit_i32_atomic_rmw_add(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw_sub(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw_and(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw_or(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw_xor(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw16_add_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw16_sub_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw16_and_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw16_or_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw16_xor_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw8_add_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw8_sub_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw8_and_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw8_or_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw8_xor_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i64_atomic_rmw_add(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw_sub(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw_and(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw_or(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw_xor(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw32_add_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw32_sub_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw32_and_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw32_or_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw32_xor_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw16_add_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw16_sub_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw16_and_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw16_or_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw16_xor_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw8_add_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw8_sub_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw8_and_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw8_or_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw8_xor_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i32_atomic_rmw_xchg(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw16_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw8_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw_cmpxchg(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_cmpxchg(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw16_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_cmpxchg(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw8_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_cmpxchg(memarg, ValType::I32)
+ }
+ fn visit_i64_atomic_rmw_xchg(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw32_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw16_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw8_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw_cmpxchg(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_cmpxchg(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw32_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_cmpxchg(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw16_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_cmpxchg(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw8_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_cmpxchg(memarg, ValType::I64)
+ }
+ fn visit_memory_atomic_notify(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_memory_atomic_wait32(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_shared_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I64))?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_memory_atomic_wait64(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_shared_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I64))?;
+ self.pop_operand(Some(ValType::I64))?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_atomic_fence(&mut self) -> Self::Output {
+ Ok(())
+ }
+ fn visit_ref_null(&mut self, heap_type: HeapType) -> Self::Output {
+ self.resources
+ .check_heap_type(heap_type, &self.features, self.offset)?;
+ self.push_operand(ValType::Ref(RefType {
+ nullable: true,
+ heap_type,
+ }))?;
+ Ok(())
+ }
+
+ fn visit_ref_as_non_null(&mut self) -> Self::Output {
+ let ty = match self.pop_ref()? {
+ Some(ty) => MaybeType::Type(ValType::Ref(RefType {
+ nullable: false,
+ heap_type: ty.heap_type,
+ })),
+ None => MaybeType::HeapBot,
+ };
+ self.push_operand(ty)?;
+ Ok(())
+ }
+ fn visit_br_on_null(&mut self, relative_depth: u32) -> Self::Output {
+ let ty = match self.pop_ref()? {
+ None => MaybeType::HeapBot,
+ Some(ty) => MaybeType::Type(ValType::Ref(RefType {
+ nullable: false,
+ heap_type: ty.heap_type,
+ })),
+ };
+ let (ft, kind) = self.jump(relative_depth)?;
+ for ty in self.label_types(ft, kind)?.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ for ty in self.label_types(ft, kind)? {
+ self.push_operand(ty)?;
+ }
+ self.push_operand(ty)?;
+ Ok(())
+ }
+ fn visit_br_on_non_null(&mut self, relative_depth: u32) -> Self::Output {
+ let ty = self.pop_ref()?;
+ let (ft, kind) = self.jump(relative_depth)?;
+ let mut lts = self.label_types(ft, kind)?;
+ match (lts.next_back(), ty) {
+ (None, _) => bail!(
+ self.offset,
+ "type mismatch: br_on_non_null target has no label types",
+ ),
+ (Some(ValType::Ref(_)), None) => {}
+ (Some(rt1 @ ValType::Ref(_)), Some(rt0)) => {
+ // Switch rt0, our popped type, to a non-nullable type and
+ // perform the match because if the branch is taken it's a
+ // non-null value.
+ let ty = RefType {
+ nullable: false,
+ heap_type: rt0.heap_type,
+ };
+ if !self.resources.matches(ty.into(), rt1) {
+ bail!(
+ self.offset,
+ "type mismatch: expected {} but found {}",
+ ty_to_str(rt0.into()),
+ ty_to_str(rt1)
+ )
+ }
+ }
+ (Some(_), _) => bail!(
+ self.offset,
+ "type mismatch: br_on_non_null target does not end with heap type",
+ ),
+ }
+ for ty in self.label_types(ft, kind)?.rev().skip(1) {
+ self.pop_operand(Some(ty))?;
+ }
+ for ty in lts {
+ self.push_operand(ty)?;
+ }
+ Ok(())
+ }
+ fn visit_ref_is_null(&mut self) -> Self::Output {
+ self.pop_ref()?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_ref_func(&mut self, function_index: u32) -> Self::Output {
+ let type_index = match self.resources.type_index_of_function(function_index) {
+ Some(idx) => idx,
+ None => bail!(
+ self.offset,
+ "unknown function {}: function index out of bounds",
+ function_index,
+ ),
+ };
+ if !self.resources.is_function_referenced(function_index) {
+ bail!(self.offset, "undeclared function reference");
+ }
+
+ // FIXME(#924) this should not be conditional based on enabled
+ // proposals.
+ if self.features.function_references {
+ let heap_type = HeapType::TypedFunc(match type_index.try_into() {
+ Ok(packed) => packed,
+ Err(_) => {
+ bail!(self.offset, "type index of `ref.func` target too large")
+ }
+ });
+ self.push_operand(ValType::Ref(RefType {
+ nullable: false,
+ heap_type,
+ }))?;
+ } else {
+ self.push_operand(ValType::FUNCREF)?;
+ }
+ Ok(())
+ }
+ fn visit_v128_load(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_store(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_v128_const(&mut self, _value: V128) -> Self::Output {
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_i8x16_splat(&mut self) -> Self::Output {
+ self.check_v128_splat(ValType::I32)
+ }
+ fn visit_i16x8_splat(&mut self) -> Self::Output {
+ self.check_v128_splat(ValType::I32)
+ }
+ fn visit_i32x4_splat(&mut self) -> Self::Output {
+ self.check_v128_splat(ValType::I32)
+ }
+ fn visit_i64x2_splat(&mut self) -> Self::Output {
+ self.check_v128_splat(ValType::I64)
+ }
+ fn visit_f32x4_splat(&mut self) -> Self::Output {
+ self.check_floats_enabled()?;
+ self.check_v128_splat(ValType::F32)
+ }
+ fn visit_f64x2_splat(&mut self) -> Self::Output {
+ self.check_floats_enabled()?;
+ self.check_v128_splat(ValType::F64)
+ }
+ fn visit_i8x16_extract_lane_s(&mut self, lane: u8) -> Self::Output {
+ self.check_simd_lane_index(lane, 16)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i8x16_extract_lane_u(&mut self, lane: u8) -> Self::Output {
+ self.visit_i8x16_extract_lane_s(lane)
+ }
+ fn visit_i16x8_extract_lane_s(&mut self, lane: u8) -> Self::Output {
+ self.check_simd_lane_index(lane, 8)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i16x8_extract_lane_u(&mut self, lane: u8) -> Self::Output {
+ self.visit_i16x8_extract_lane_s(lane)
+ }
+ fn visit_i32x4_extract_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_simd_lane_index(lane, 4)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i8x16_replace_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_simd_lane_index(lane, 16)?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_i16x8_replace_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_simd_lane_index(lane, 8)?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_i32x4_replace_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_simd_lane_index(lane, 4)?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_i64x2_extract_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_simd_lane_index(lane, 2)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::I64)?;
+ Ok(())
+ }
+ fn visit_i64x2_replace_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_simd_lane_index(lane, 2)?;
+ self.pop_operand(Some(ValType::I64))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_f32x4_extract_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_floats_enabled()?;
+ self.check_simd_lane_index(lane, 4)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::F32)?;
+ Ok(())
+ }
+ fn visit_f32x4_replace_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_floats_enabled()?;
+ self.check_simd_lane_index(lane, 4)?;
+ self.pop_operand(Some(ValType::F32))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_f64x2_extract_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_floats_enabled()?;
+ self.check_simd_lane_index(lane, 2)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::F64)?;
+ Ok(())
+ }
+ fn visit_f64x2_replace_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_floats_enabled()?;
+ self.check_simd_lane_index(lane, 2)?;
+ self.pop_operand(Some(ValType::F64))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_f32x4_eq(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_ne(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_lt(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_gt(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_le(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_ge(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_eq(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_ne(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_lt(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_gt(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_le(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_ge(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_add(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_sub(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_mul(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_div(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_min(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_max(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_pmin(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_pmax(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_add(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_sub(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_mul(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_div(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_min(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_max(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_pmin(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_pmax(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_i8x16_eq(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_ne(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_lt_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_lt_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_gt_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_gt_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_le_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_le_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_ge_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_ge_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_eq(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_ne(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_lt_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_lt_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_gt_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_gt_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_le_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_le_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_ge_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_ge_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_eq(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_ne(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_lt_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_lt_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_gt_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_gt_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_le_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_le_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_ge_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_ge_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_eq(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_ne(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_lt_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_gt_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_le_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_ge_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_v128_and(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_v128_andnot(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_v128_or(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_v128_xor(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_add(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_add_sat_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_add_sat_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_sub(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_sub_sat_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_sub_sat_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_min_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_min_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_max_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_max_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_add(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_add_sat_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_add_sat_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_sub(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_sub_sat_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_sub_sat_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_mul(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_min_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_min_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_max_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_max_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_add(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_sub(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_mul(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_min_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_min_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_max_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_max_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_dot_i16x8_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_add(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_sub(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_mul(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_avgr_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_avgr_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_narrow_i16x8_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_narrow_i16x8_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_narrow_i32x4_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_narrow_i32x4_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_extmul_low_i8x16_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_extmul_high_i8x16_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_extmul_low_i8x16_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_extmul_high_i8x16_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_extmul_low_i16x8_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_extmul_high_i16x8_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_extmul_low_i16x8_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_extmul_high_i16x8_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_extmul_low_i32x4_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_extmul_high_i32x4_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_extmul_low_i32x4_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_extmul_high_i32x4_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_q15mulr_sat_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_f32x4_ceil(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_floor(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_trunc(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_nearest(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_ceil(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_floor(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_trunc(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_nearest(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_abs(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_neg(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_sqrt(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_abs(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_neg(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_sqrt(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_demote_f64x2_zero(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_promote_low_f32x4(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_convert_low_i32x4_s(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_convert_low_i32x4_u(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_i32x4_trunc_sat_f32x4_s(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_i32x4_trunc_sat_f32x4_u(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_i32x4_trunc_sat_f64x2_s_zero(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_i32x4_trunc_sat_f64x2_u_zero(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_convert_i32x4_s(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_convert_i32x4_u(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_v128_not(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i8x16_abs(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i8x16_neg(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i8x16_popcnt(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i16x8_abs(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i16x8_neg(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_abs(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_neg(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i64x2_abs(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i64x2_neg(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i16x8_extend_low_i8x16_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i16x8_extend_high_i8x16_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i16x8_extend_low_i8x16_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i16x8_extend_high_i8x16_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_extend_low_i16x8_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_extend_high_i16x8_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_extend_low_i16x8_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_extend_high_i16x8_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i64x2_extend_low_i32x4_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i64x2_extend_high_i32x4_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i64x2_extend_low_i32x4_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i64x2_extend_high_i32x4_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i16x8_extadd_pairwise_i8x16_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i16x8_extadd_pairwise_i8x16_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_extadd_pairwise_i16x8_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_extadd_pairwise_i16x8_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_v128_bitselect(&mut self) -> Self::Output {
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_i8x16_relaxed_swizzle(&mut self) -> Self::Output {
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_i32x4_relaxed_trunc_f32x4_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_relaxed_trunc_f32x4_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_relaxed_trunc_f64x2_s_zero(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_relaxed_trunc_f64x2_u_zero(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_f32x4_relaxed_madd(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_f32x4_relaxed_nmadd(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_f64x2_relaxed_madd(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_f64x2_relaxed_nmadd(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_i8x16_relaxed_laneselect(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_i16x8_relaxed_laneselect(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_i32x4_relaxed_laneselect(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_i64x2_relaxed_laneselect(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_f32x4_relaxed_min(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_f32x4_relaxed_max(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_f64x2_relaxed_min(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_f64x2_relaxed_max(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_relaxed_q15mulr_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_relaxed_dot_i8x16_i7x16_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_relaxed_dot_i8x16_i7x16_add_s(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_v128_any_true(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i8x16_all_true(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i8x16_bitmask(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i16x8_all_true(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i16x8_bitmask(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i32x4_all_true(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i32x4_bitmask(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i64x2_all_true(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i64x2_bitmask(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i8x16_shl(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i8x16_shr_s(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i8x16_shr_u(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i16x8_shl(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i16x8_shr_s(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i16x8_shr_u(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i32x4_shl(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i32x4_shr_s(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i32x4_shr_u(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i64x2_shl(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i64x2_shr_s(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i64x2_shr_u(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i8x16_swizzle(&mut self) -> Self::Output {
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_i8x16_shuffle(&mut self, lanes: [u8; 16]) -> Self::Output {
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ValType::V128))?;
+ for i in lanes {
+ self.check_simd_lane_index(i, 32)?;
+ }
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_load8_splat(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_load16_splat(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_load32_splat(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_load32_zero(&mut self, memarg: MemArg) -> Self::Output {
+ self.visit_v128_load32_splat(memarg)
+ }
+ fn visit_v128_load64_splat(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_v128_load_op(memarg)
+ }
+ fn visit_v128_load64_zero(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_v128_load_op(memarg)
+ }
+ fn visit_v128_load8x8_s(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_v128_load_op(memarg)
+ }
+ fn visit_v128_load8x8_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_v128_load_op(memarg)
+ }
+ fn visit_v128_load16x4_s(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_v128_load_op(memarg)
+ }
+ fn visit_v128_load16x4_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_v128_load_op(memarg)
+ }
+ fn visit_v128_load32x2_s(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_v128_load_op(memarg)
+ }
+ fn visit_v128_load32x2_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_v128_load_op(memarg)
+ }
+ fn visit_v128_load8_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
+ let idx = self.check_memarg(memarg)?;
+ self.check_simd_lane_index(lane, 16)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(idx))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_load16_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
+ let idx = self.check_memarg(memarg)?;
+ self.check_simd_lane_index(lane, 8)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(idx))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_load32_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
+ let idx = self.check_memarg(memarg)?;
+ self.check_simd_lane_index(lane, 4)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(idx))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_load64_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
+ let idx = self.check_memarg(memarg)?;
+ self.check_simd_lane_index(lane, 2)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(idx))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_store8_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
+ let idx = self.check_memarg(memarg)?;
+ self.check_simd_lane_index(lane, 16)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(idx))?;
+ Ok(())
+ }
+ fn visit_v128_store16_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
+ let idx = self.check_memarg(memarg)?;
+ self.check_simd_lane_index(lane, 8)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(idx))?;
+ Ok(())
+ }
+ fn visit_v128_store32_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
+ let idx = self.check_memarg(memarg)?;
+ self.check_simd_lane_index(lane, 4)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(idx))?;
+ Ok(())
+ }
+ fn visit_v128_store64_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
+ let idx = self.check_memarg(memarg)?;
+ self.check_simd_lane_index(lane, 2)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(idx))?;
+ Ok(())
+ }
+ fn visit_memory_init(&mut self, segment: u32, mem: u32) -> Self::Output {
+ let ty = self.check_memory_index(mem)?;
+ match self.resources.data_count() {
+ None => bail!(self.offset, "data count section required"),
+ Some(count) if segment < count => {}
+ Some(_) => bail!(self.offset, "unknown data segment {}", segment),
+ }
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_data_drop(&mut self, segment: u32) -> Self::Output {
+ match self.resources.data_count() {
+ None => bail!(self.offset, "data count section required"),
+ Some(count) if segment < count => {}
+ Some(_) => bail!(self.offset, "unknown data segment {}", segment),
+ }
+ Ok(())
+ }
+ fn visit_memory_copy(&mut self, dst: u32, src: u32) -> Self::Output {
+ let dst_ty = self.check_memory_index(dst)?;
+ let src_ty = self.check_memory_index(src)?;
+
+ // The length operand here is the smaller of src/dst, which is
+ // i32 if one is i32
+ self.pop_operand(Some(match src_ty {
+ ValType::I32 => ValType::I32,
+ _ => dst_ty,
+ }))?;
+
+ // ... and the offset into each memory is required to be
+ // whatever the indexing type is for that memory
+ self.pop_operand(Some(src_ty))?;
+ self.pop_operand(Some(dst_ty))?;
+ Ok(())
+ }
+ fn visit_memory_fill(&mut self, mem: u32) -> Self::Output {
+ let ty = self.check_memory_index(mem)?;
+ self.pop_operand(Some(ty))?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_memory_discard(&mut self, mem: u32) -> Self::Output {
+ let ty = self.check_memory_index(mem)?;
+ self.pop_operand(Some(ty))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_table_init(&mut self, segment: u32, table: u32) -> Self::Output {
+ if table > 0 {}
+ let table = match self.resources.table_at(table) {
+ Some(table) => table,
+ None => bail!(
+ self.offset,
+ "unknown table {}: table index out of bounds",
+ table
+ ),
+ };
+ let segment_ty = match self.resources.element_type_at(segment) {
+ Some(ty) => ty,
+ None => bail!(
+ self.offset,
+ "unknown elem segment {}: segment index out of bounds",
+ segment
+ ),
+ };
+ if segment_ty != table.element_type {
+ bail!(self.offset, "type mismatch");
+ }
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::I32))?;
+ Ok(())
+ }
+ fn visit_elem_drop(&mut self, segment: u32) -> Self::Output {
+ if segment >= self.resources.element_count() {
+ bail!(
+ self.offset,
+ "unknown elem segment {}: segment index out of bounds",
+ segment
+ );
+ }
+ Ok(())
+ }
+ fn visit_table_copy(&mut self, dst_table: u32, src_table: u32) -> Self::Output {
+ if src_table > 0 || dst_table > 0 {}
+ let (src, dst) = match (
+ self.resources.table_at(src_table),
+ self.resources.table_at(dst_table),
+ ) {
+ (Some(a), Some(b)) => (a, b),
+ _ => bail!(self.offset, "table index out of bounds"),
+ };
+ if !self.resources.matches(
+ ValType::Ref(src.element_type),
+ ValType::Ref(dst.element_type),
+ ) {
+ bail!(self.offset, "type mismatch");
+ }
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::I32))?;
+ Ok(())
+ }
+ fn visit_table_get(&mut self, table: u32) -> Self::Output {
+ let ty = match self.resources.table_at(table) {
+ Some(ty) => ty.element_type,
+ None => bail!(self.offset, "table index out of bounds"),
+ };
+ self.pop_operand(Some(ValType::I32))?;
+ self.push_operand(ValType::Ref(ty))?;
+ Ok(())
+ }
+ fn visit_table_set(&mut self, table: u32) -> Self::Output {
+ let ty = match self.resources.table_at(table) {
+ Some(ty) => ty.element_type,
+ None => bail!(self.offset, "table index out of bounds"),
+ };
+ self.pop_operand(Some(ValType::Ref(ty)))?;
+ self.pop_operand(Some(ValType::I32))?;
+ Ok(())
+ }
+ fn visit_table_grow(&mut self, table: u32) -> Self::Output {
+ let ty = match self.resources.table_at(table) {
+ Some(ty) => ty.element_type,
+ None => bail!(self.offset, "table index out of bounds"),
+ };
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::Ref(ty)))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_table_size(&mut self, table: u32) -> Self::Output {
+ if self.resources.table_at(table).is_none() {
+ bail!(self.offset, "table index out of bounds");
+ }
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_table_fill(&mut self, table: u32) -> Self::Output {
+ let ty = match self.resources.table_at(table) {
+ Some(ty) => ty.element_type,
+ None => bail!(self.offset, "table index out of bounds"),
+ };
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::Ref(ty)))?;
+ self.pop_operand(Some(ValType::I32))?;
+ Ok(())
+ }
+}
+
+#[derive(Clone)]
+enum Either<A, B> {
+ A(A),
+ B(B),
+}
+
+impl<A, B> Iterator for Either<A, B>
+where
+ A: Iterator,
+ B: Iterator<Item = A::Item>,
+{
+ type Item = A::Item;
+ fn next(&mut self) -> Option<A::Item> {
+ match self {
+ Either::A(a) => a.next(),
+ Either::B(b) => b.next(),
+ }
+ }
+}
+
+impl<A, B> DoubleEndedIterator for Either<A, B>
+where
+ A: DoubleEndedIterator,
+ B: DoubleEndedIterator<Item = A::Item>,
+{
+ fn next_back(&mut self) -> Option<A::Item> {
+ match self {
+ Either::A(a) => a.next_back(),
+ Either::B(b) => b.next_back(),
+ }
+ }
+}
+
+impl<A, B> ExactSizeIterator for Either<A, B>
+where
+ A: ExactSizeIterator,
+ B: ExactSizeIterator<Item = A::Item>,
+{
+ fn len(&self) -> usize {
+ match self {
+ Either::A(a) => a.len(),
+ Either::B(b) => b.len(),
+ }
+ }
+}
+
+trait PreciseIterator: ExactSizeIterator + DoubleEndedIterator + Clone {}
+impl<T: ExactSizeIterator + DoubleEndedIterator + Clone> PreciseIterator for T {}
+
+impl Locals {
+ /// Defines another group of `count` local variables of type `ty`.
+ ///
+ /// Returns `true` if the definition was successful. Local variable
+ /// definition is unsuccessful in case the amount of total variables
+ /// after definition exceeds the allowed maximum number.
+ fn define(&mut self, count: u32, ty: ValType) -> bool {
+ match self.num_locals.checked_add(count) {
+ Some(n) => self.num_locals = n,
+ None => return false,
+ }
+ if self.num_locals > (MAX_WASM_FUNCTION_LOCALS as u32) {
+ return false;
+ }
+ for _ in 0..count {
+ if self.first.len() >= MAX_LOCALS_TO_TRACK {
+ break;
+ }
+ self.first.push(ty);
+ }
+ self.all.push((self.num_locals - 1, ty));
+ true
+ }
+
+ /// Returns the number of defined local variables.
+ pub(super) fn len_locals(&self) -> u32 {
+ self.num_locals
+ }
+
+ /// Returns the type of the local variable at the given index if any.
+ #[inline]
+ pub(super) fn get(&self, idx: u32) -> Option<ValType> {
+ match self.first.get(idx as usize) {
+ Some(ty) => Some(*ty),
+ None => self.get_bsearch(idx),
+ }
+ }
+
+ fn get_bsearch(&self, idx: u32) -> Option<ValType> {
+ match self.all.binary_search_by_key(&idx, |(idx, _)| *idx) {
+ // If this index would be inserted at the end of the list, then the
+ // index is out of bounds and we return an error.
+ Err(i) if i == self.all.len() => None,
+
+ // If `Ok` is returned we found the index exactly, or if `Err` is
+ // returned the position is the one which is the least index
+ // greater that `idx`, which is still the type of `idx` according
+ // to our "compressed" representation. In both cases we access the
+ // list at index `i`.
+ Ok(i) | Err(i) => Some(self.all[i].1),
+ }
+ }
+}
diff --git a/third_party/rust/wasmparser/src/validator/types.rs b/third_party/rust/wasmparser/src/validator/types.rs
new file mode 100644
index 0000000000..ce0559d34c
--- /dev/null
+++ b/third_party/rust/wasmparser/src/validator/types.rs
@@ -0,0 +1,2166 @@
+//! Types relating to type information provided by validation.
+
+use super::{component::ComponentState, core::Module};
+use crate::{
+ Export, ExternalKind, FuncType, GlobalType, Import, MemoryType, PrimitiveValType, RefType,
+ TableType, TypeRef, ValType,
+};
+use indexmap::{IndexMap, IndexSet};
+use std::collections::HashMap;
+use std::{
+ borrow::Borrow,
+ fmt,
+ hash::{Hash, Hasher},
+ mem,
+ ops::{Deref, DerefMut},
+ sync::Arc,
+};
+use url::Url;
+
+/// The maximum number of parameters in the canonical ABI that can be passed by value.
+///
+/// Functions that exceed this limit will instead pass parameters indirectly from
+/// linear memory via a single pointer parameter.
+const MAX_FLAT_FUNC_PARAMS: usize = 16;
+/// The maximum number of results in the canonical ABI that can be returned by a function.
+///
+/// Functions that exceed this limit have their results written to linear memory via an
+/// additional pointer parameter (imports) or return a single pointer value (exports).
+const MAX_FLAT_FUNC_RESULTS: usize = 1;
+
+/// The maximum lowered types, including a possible type for a return pointer parameter.
+const MAX_LOWERED_TYPES: usize = MAX_FLAT_FUNC_PARAMS + 1;
+
+/// Represents a kebab string slice used in validation.
+///
+/// This is a wrapper around `str` that ensures the slice is
+/// a valid kebab case string according to the component model
+/// specification.
+///
+/// It also provides an equality and hashing implementation
+/// that ignores ASCII case.
+#[derive(Debug, Eq)]
+#[repr(transparent)]
+pub struct KebabStr(str);
+
+impl KebabStr {
+ /// Creates a new kebab string slice.
+ ///
+ /// Returns `None` if the given string is not a valid kebab string.
+ pub fn new<'a>(s: impl AsRef<str> + 'a) -> Option<&'a Self> {
+ let s = Self::new_unchecked(s);
+ if s.is_kebab_case() {
+ Some(s)
+ } else {
+ None
+ }
+ }
+
+ pub(crate) fn new_unchecked<'a>(s: impl AsRef<str> + 'a) -> &'a Self {
+ // Safety: `KebabStr` is a transparent wrapper around `str`
+ // Therefore transmuting `&str` to `&KebabStr` is safe.
+ unsafe { std::mem::transmute::<_, &Self>(s.as_ref()) }
+ }
+
+ /// Gets the underlying string slice.
+ pub fn as_str(&self) -> &str {
+ &self.0
+ }
+
+ /// Converts the slice to an owned string.
+ pub fn to_kebab_string(&self) -> KebabString {
+ KebabString(self.to_string())
+ }
+
+ fn is_kebab_case(&self) -> bool {
+ let mut lower = false;
+ let mut upper = false;
+ for c in self.chars() {
+ match c {
+ 'a'..='z' if !lower && !upper => lower = true,
+ 'A'..='Z' if !lower && !upper => upper = true,
+ 'a'..='z' if lower => {}
+ 'A'..='Z' if upper => {}
+ '0'..='9' if lower || upper => {}
+ '-' if lower || upper => {
+ lower = false;
+ upper = false;
+ }
+ _ => return false,
+ }
+ }
+
+ !self.is_empty() && !self.ends_with('-')
+ }
+}
+
+impl Deref for KebabStr {
+ type Target = str;
+
+ fn deref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl PartialEq for KebabStr {
+ fn eq(&self, other: &Self) -> bool {
+ if self.len() != other.len() {
+ return false;
+ }
+
+ self.chars()
+ .zip(other.chars())
+ .all(|(a, b)| a.to_ascii_lowercase() == b.to_ascii_lowercase())
+ }
+}
+
+impl PartialEq<KebabString> for KebabStr {
+ fn eq(&self, other: &KebabString) -> bool {
+ self.eq(other.as_kebab_str())
+ }
+}
+
+impl Hash for KebabStr {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.len().hash(state);
+
+ for b in self.chars() {
+ b.to_ascii_lowercase().hash(state);
+ }
+ }
+}
+
+impl fmt::Display for KebabStr {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ (self as &str).fmt(f)
+ }
+}
+
+impl ToOwned for KebabStr {
+ type Owned = KebabString;
+
+ fn to_owned(&self) -> Self::Owned {
+ self.to_kebab_string()
+ }
+}
+
+/// Represents an owned kebab string for validation.
+///
+/// This is a wrapper around `String` that ensures the string is
+/// a valid kebab case string according to the component model
+/// specification.
+///
+/// It also provides an equality and hashing implementation
+/// that ignores ASCII case.
+#[derive(Debug, Clone, Eq)]
+pub struct KebabString(String);
+
+impl KebabString {
+ /// Creates a new kebab string.
+ ///
+ /// Returns `None` if the given string is not a valid kebab string.
+ pub fn new(s: impl Into<String>) -> Option<Self> {
+ let s = s.into();
+ if KebabStr::new(&s).is_some() {
+ Some(Self(s))
+ } else {
+ None
+ }
+ }
+
+ /// Gets the underlying string.
+ pub fn as_str(&self) -> &str {
+ self.0.as_str()
+ }
+
+ /// Converts the kebab string to a kebab string slice.
+ pub fn as_kebab_str(&self) -> &KebabStr {
+ // Safety: internal string is always valid kebab-case
+ KebabStr::new_unchecked(self.as_str())
+ }
+}
+
+impl Deref for KebabString {
+ type Target = KebabStr;
+
+ fn deref(&self) -> &Self::Target {
+ self.as_kebab_str()
+ }
+}
+
+impl Borrow<KebabStr> for KebabString {
+ fn borrow(&self) -> &KebabStr {
+ self.as_kebab_str()
+ }
+}
+
+impl PartialEq for KebabString {
+ fn eq(&self, other: &Self) -> bool {
+ self.as_kebab_str().eq(other.as_kebab_str())
+ }
+}
+
+impl PartialEq<KebabStr> for KebabString {
+ fn eq(&self, other: &KebabStr) -> bool {
+ self.as_kebab_str().eq(other)
+ }
+}
+
+impl Hash for KebabString {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.as_kebab_str().hash(state)
+ }
+}
+
+impl fmt::Display for KebabString {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.as_kebab_str().fmt(f)
+ }
+}
+
+impl From<KebabString> for String {
+ fn from(s: KebabString) -> String {
+ s.0
+ }
+}
+
+/// A simple alloc-free list of types used for calculating lowered function signatures.
+pub(crate) struct LoweredTypes {
+ types: [ValType; MAX_LOWERED_TYPES],
+ len: usize,
+ max: usize,
+}
+
+impl LoweredTypes {
+ fn new(max: usize) -> Self {
+ assert!(max <= MAX_LOWERED_TYPES);
+ Self {
+ types: [ValType::I32; MAX_LOWERED_TYPES],
+ len: 0,
+ max,
+ }
+ }
+
+ fn len(&self) -> usize {
+ self.len
+ }
+
+ fn maxed(&self) -> bool {
+ self.len == self.max
+ }
+
+ fn get_mut(&mut self, index: usize) -> Option<&mut ValType> {
+ if index < self.len {
+ Some(&mut self.types[index])
+ } else {
+ None
+ }
+ }
+
+ fn push(&mut self, ty: ValType) -> bool {
+ if self.maxed() {
+ return false;
+ }
+
+ self.types[self.len] = ty;
+ self.len += 1;
+ true
+ }
+
+ fn clear(&mut self) {
+ self.len = 0;
+ }
+
+ pub fn as_slice(&self) -> &[ValType] {
+ &self.types[..self.len]
+ }
+
+ pub fn iter(&self) -> impl Iterator<Item = ValType> + '_ {
+ self.as_slice().iter().copied()
+ }
+}
+
+/// Represents information about a component function type lowering.
+pub(crate) struct LoweringInfo {
+ pub(crate) params: LoweredTypes,
+ pub(crate) results: LoweredTypes,
+ pub(crate) requires_memory: bool,
+ pub(crate) requires_realloc: bool,
+}
+
+impl LoweringInfo {
+ pub(crate) fn into_func_type(self) -> FuncType {
+ FuncType::new(
+ self.params.as_slice().iter().copied(),
+ self.results.as_slice().iter().copied(),
+ )
+ }
+}
+
+impl Default for LoweringInfo {
+ fn default() -> Self {
+ Self {
+ params: LoweredTypes::new(MAX_FLAT_FUNC_PARAMS),
+ results: LoweredTypes::new(MAX_FLAT_FUNC_RESULTS),
+ requires_memory: false,
+ requires_realloc: false,
+ }
+ }
+}
+
+fn push_primitive_wasm_types(ty: &PrimitiveValType, lowered_types: &mut LoweredTypes) -> bool {
+ match ty {
+ PrimitiveValType::Bool
+ | PrimitiveValType::S8
+ | PrimitiveValType::U8
+ | PrimitiveValType::S16
+ | PrimitiveValType::U16
+ | PrimitiveValType::S32
+ | PrimitiveValType::U32
+ | PrimitiveValType::Char => lowered_types.push(ValType::I32),
+ PrimitiveValType::S64 | PrimitiveValType::U64 => lowered_types.push(ValType::I64),
+ PrimitiveValType::Float32 => lowered_types.push(ValType::F32),
+ PrimitiveValType::Float64 => lowered_types.push(ValType::F64),
+ PrimitiveValType::String => {
+ lowered_types.push(ValType::I32) && lowered_types.push(ValType::I32)
+ }
+ }
+}
+
+/// Represents a unique identifier for a type known to a [`crate::Validator`].
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct TypeId {
+ /// The index into the global list of types.
+ pub(crate) index: usize,
+ /// The effective type size for the type.
+ ///
+ /// This is stored as part of the ID to avoid having to recurse through
+ /// the global type list when calculating type sizes.
+ pub(crate) type_size: u32,
+ /// A unique integer assigned to this type.
+ ///
+ /// The purpose of this field is to ensure that two different `TypeId`
+ /// representations can be handed out for two different aliased types within
+ /// a component that actually point to the same underlying type (as pointed
+ /// to by the `index` field).
+ unique_id: u32,
+}
+
+// The size of `TypeId` was seen to have a large-ish impact in #844, so this
+// assert ensures that it stays relatively small.
+const _: () = {
+ assert!(std::mem::size_of::<TypeId>() <= 16);
+};
+
+/// A unified type definition for validating WebAssembly modules and components.
+#[derive(Debug)]
+pub enum Type {
+ /// The definition is for a core function type.
+ Func(FuncType),
+ /// The definition is for a core module type.
+ ///
+ /// This variant is only supported when parsing a component.
+ Module(ModuleType),
+ /// The definition is for a core module instance type.
+ ///
+ /// This variant is only supported when parsing a component.
+ Instance(InstanceType),
+ /// The definition is for a component type.
+ ///
+ /// This variant is only supported when parsing a component.
+ Component(ComponentType),
+ /// The definition is for a component instance type.
+ ///
+ /// This variant is only supported when parsing a component.
+ ComponentInstance(ComponentInstanceType),
+ /// The definition is for a component function type.
+ ///
+ /// This variant is only supported when parsing a component.
+ ComponentFunc(ComponentFuncType),
+ /// The definition is for a component defined type.
+ ///
+ /// This variant is only supported when parsing a component.
+ Defined(ComponentDefinedType),
+}
+
+impl Type {
+ /// Converts the type to a core function type.
+ pub fn as_func_type(&self) -> Option<&FuncType> {
+ match self {
+ Self::Func(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ /// Converts the type to a core module type.
+ pub fn as_module_type(&self) -> Option<&ModuleType> {
+ match self {
+ Self::Module(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ /// Converts the type to a core module instance type.
+ pub fn as_instance_type(&self) -> Option<&InstanceType> {
+ match self {
+ Self::Instance(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ /// Converts the type to a component type.
+ pub fn as_component_type(&self) -> Option<&ComponentType> {
+ match self {
+ Self::Component(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ /// Converts the type to a component instance type.
+ pub fn as_component_instance_type(&self) -> Option<&ComponentInstanceType> {
+ match self {
+ Self::ComponentInstance(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ /// Converts the type to a component function type.
+ pub fn as_component_func_type(&self) -> Option<&ComponentFuncType> {
+ match self {
+ Self::ComponentFunc(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ /// Converts the type to a component defined type.
+ pub fn as_defined_type(&self) -> Option<&ComponentDefinedType> {
+ match self {
+ Self::Defined(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn type_size(&self) -> u32 {
+ match self {
+ Self::Func(ty) => 1 + (ty.params().len() + ty.results().len()) as u32,
+ Self::Module(ty) => ty.type_size,
+ Self::Instance(ty) => ty.type_size,
+ Self::Component(ty) => ty.type_size,
+ Self::ComponentInstance(ty) => ty.type_size,
+ Self::ComponentFunc(ty) => ty.type_size,
+ Self::Defined(ty) => ty.type_size(),
+ }
+ }
+}
+
+/// A component value type.
+#[derive(Debug, Clone, Copy)]
+pub enum ComponentValType {
+ /// The value type is one of the primitive types.
+ Primitive(PrimitiveValType),
+ /// The type is represented with the given type identifier.
+ Type(TypeId),
+}
+
+impl ComponentValType {
+ pub(crate) fn requires_realloc(&self, types: &TypeList) -> bool {
+ match self {
+ ComponentValType::Primitive(ty) => ty.requires_realloc(),
+ ComponentValType::Type(ty) => types[*ty]
+ .as_defined_type()
+ .unwrap()
+ .requires_realloc(types),
+ }
+ }
+
+ /// Determines if component value type `a` is a subtype of `b`.
+ pub fn is_subtype_of(a: &Self, at: TypesRef, b: &Self, bt: TypesRef) -> bool {
+ Self::internal_is_subtype_of(a, at.list, b, bt.list)
+ }
+
+ pub(crate) fn internal_is_subtype_of(a: &Self, at: &TypeList, b: &Self, bt: &TypeList) -> bool {
+ match (a, b) {
+ (ComponentValType::Primitive(a), ComponentValType::Primitive(b)) => {
+ PrimitiveValType::is_subtype_of(*a, *b)
+ }
+ (ComponentValType::Type(a), ComponentValType::Type(b)) => {
+ ComponentDefinedType::internal_is_subtype_of(
+ at[*a].as_defined_type().unwrap(),
+ at,
+ bt[*b].as_defined_type().unwrap(),
+ bt,
+ )
+ }
+ (ComponentValType::Primitive(a), ComponentValType::Type(b)) => {
+ match bt[*b].as_defined_type().unwrap() {
+ ComponentDefinedType::Primitive(b) => PrimitiveValType::is_subtype_of(*a, *b),
+ _ => false,
+ }
+ }
+ (ComponentValType::Type(a), ComponentValType::Primitive(b)) => {
+ match at[*a].as_defined_type().unwrap() {
+ ComponentDefinedType::Primitive(a) => PrimitiveValType::is_subtype_of(*a, *b),
+ _ => false,
+ }
+ }
+ }
+ }
+
+ fn push_wasm_types(&self, types: &TypeList, lowered_types: &mut LoweredTypes) -> bool {
+ match self {
+ Self::Primitive(ty) => push_primitive_wasm_types(ty, lowered_types),
+ Self::Type(id) => types[*id]
+ .as_defined_type()
+ .unwrap()
+ .push_wasm_types(types, lowered_types),
+ }
+ }
+
+ pub(crate) fn type_size(&self) -> u32 {
+ match self {
+ Self::Primitive(_) => 1,
+ Self::Type(id) => id.type_size,
+ }
+ }
+}
+
+/// The entity type for imports and exports of a module.
+#[derive(Debug, Clone, Copy)]
+pub enum EntityType {
+ /// The entity is a function.
+ Func(TypeId),
+ /// The entity is a table.
+ Table(TableType),
+ /// The entity is a memory.
+ Memory(MemoryType),
+ /// The entity is a global.
+ Global(GlobalType),
+ /// The entity is a tag.
+ Tag(TypeId),
+}
+
+impl EntityType {
+ /// Determines if entity type `a` is a subtype of `b`.
+ pub fn is_subtype_of(a: &Self, at: TypesRef, b: &Self, bt: TypesRef) -> bool {
+ Self::internal_is_subtype_of(a, at.list, b, bt.list)
+ }
+
+ pub(crate) fn internal_is_subtype_of(a: &Self, at: &TypeList, b: &Self, bt: &TypeList) -> bool {
+ macro_rules! limits_match {
+ ($a:expr, $b:expr) => {{
+ let a = $a;
+ let b = $b;
+ a.initial >= b.initial
+ && match b.maximum {
+ Some(b_max) => match a.maximum {
+ Some(a_max) => a_max <= b_max,
+ None => false,
+ },
+ None => true,
+ }
+ }};
+ }
+
+ match (a, b) {
+ (EntityType::Func(a), EntityType::Func(b)) => {
+ at[*a].as_func_type().unwrap() == bt[*b].as_func_type().unwrap()
+ }
+ (EntityType::Table(a), EntityType::Table(b)) => {
+ a.element_type == b.element_type && limits_match!(a, b)
+ }
+ (EntityType::Memory(a), EntityType::Memory(b)) => {
+ a.shared == b.shared && a.memory64 == b.memory64 && limits_match!(a, b)
+ }
+ (EntityType::Global(a), EntityType::Global(b)) => a == b,
+ (EntityType::Tag(a), EntityType::Tag(b)) => {
+ at[*a].as_func_type().unwrap() == bt[*b].as_func_type().unwrap()
+ }
+ _ => false,
+ }
+ }
+
+ pub(crate) fn desc(&self) -> &'static str {
+ match self {
+ Self::Func(_) => "function",
+ Self::Table(_) => "table",
+ Self::Memory(_) => "memory",
+ Self::Global(_) => "global",
+ Self::Tag(_) => "tag",
+ }
+ }
+
+ pub(crate) fn type_size(&self) -> u32 {
+ match self {
+ Self::Func(id) | Self::Tag(id) => id.type_size,
+ Self::Table(_) | Self::Memory(_) | Self::Global(_) => 1,
+ }
+ }
+}
+
+trait ModuleImportKey {
+ fn module(&self) -> &str;
+ fn name(&self) -> &str;
+}
+
+impl<'a> Borrow<dyn ModuleImportKey + 'a> for (String, String) {
+ fn borrow(&self) -> &(dyn ModuleImportKey + 'a) {
+ self
+ }
+}
+
+impl Hash for (dyn ModuleImportKey + '_) {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.module().hash(state);
+ self.name().hash(state);
+ }
+}
+
+impl PartialEq for (dyn ModuleImportKey + '_) {
+ fn eq(&self, other: &Self) -> bool {
+ self.module() == other.module() && self.name() == other.name()
+ }
+}
+
+impl Eq for (dyn ModuleImportKey + '_) {}
+
+impl ModuleImportKey for (String, String) {
+ fn module(&self) -> &str {
+ &self.0
+ }
+
+ fn name(&self) -> &str {
+ &self.1
+ }
+}
+
+impl ModuleImportKey for (&str, &str) {
+ fn module(&self) -> &str {
+ self.0
+ }
+
+ fn name(&self) -> &str {
+ self.1
+ }
+}
+
+/// Represents a core module type.
+#[derive(Debug, Clone)]
+pub struct ModuleType {
+ /// The effective type size for the module type.
+ pub(crate) type_size: u32,
+ /// The imports of the module type.
+ pub imports: IndexMap<(String, String), EntityType>,
+ /// The exports of the module type.
+ pub exports: IndexMap<String, EntityType>,
+}
+
+impl ModuleType {
+ /// Looks up an import by its module and name.
+ ///
+ /// Returns `None` if the import was not found.
+ pub fn lookup_import(&self, module: &str, name: &str) -> Option<&EntityType> {
+ self.imports.get(&(module, name) as &dyn ModuleImportKey)
+ }
+
+ /// Determines if module type `a` is a subtype of `b`.
+ pub fn is_subtype_of(a: &Self, at: TypesRef, b: &Self, bt: TypesRef) -> bool {
+ Self::internal_is_subtype_of(a, at.list, b, bt.list)
+ }
+
+ pub(crate) fn internal_is_subtype_of(a: &Self, at: &TypeList, b: &Self, bt: &TypeList) -> bool {
+ // For module type subtyping, all exports in the other module type
+ // must be present in this module type's exports (i.e. it can export
+ // *more* than what this module type needs).
+ // However, for imports, the check is reversed (i.e. it is okay
+ // to import *less* than what this module type needs).
+ a.imports.iter().all(|(k, a)| match b.imports.get(k) {
+ Some(b) => EntityType::internal_is_subtype_of(b, bt, a, at),
+ None => false,
+ }) && b.exports.iter().all(|(k, b)| match a.exports.get(k) {
+ Some(a) => EntityType::internal_is_subtype_of(a, at, b, bt),
+ None => false,
+ })
+ }
+}
+
+/// Represents the kind of module instance type.
+#[derive(Debug, Clone)]
+pub enum InstanceTypeKind {
+ /// The instance type is the result of instantiating a module type.
+ Instantiated(TypeId),
+ /// The instance type is the result of instantiating from exported items.
+ Exports(IndexMap<String, EntityType>),
+}
+
+/// Represents a module instance type.
+#[derive(Debug, Clone)]
+pub struct InstanceType {
+ /// The effective type size for the module instance type.
+ pub(crate) type_size: u32,
+ /// The kind of module instance type.
+ pub kind: InstanceTypeKind,
+}
+
+impl InstanceType {
+ /// Gets the exports of the instance type.
+ pub fn exports<'a>(&'a self, types: TypesRef<'a>) -> &'a IndexMap<String, EntityType> {
+ self.internal_exports(types.list)
+ }
+
+ pub(crate) fn internal_exports<'a>(
+ &'a self,
+ types: &'a TypeList,
+ ) -> &'a IndexMap<String, EntityType> {
+ match &self.kind {
+ InstanceTypeKind::Instantiated(id) => &types[*id].as_module_type().unwrap().exports,
+ InstanceTypeKind::Exports(exports) => exports,
+ }
+ }
+}
+
+/// The entity type for imports and exports of a component.
+#[derive(Debug, Clone, Copy)]
+pub enum ComponentEntityType {
+ /// The entity is a core module.
+ Module(TypeId),
+ /// The entity is a function.
+ Func(TypeId),
+ /// The entity is a value.
+ Value(ComponentValType),
+ /// The entity is a type.
+ Type {
+ /// This is the identifier of the type that was referenced when this
+ /// entity was created.
+ referenced: TypeId,
+ /// This is the identifier of the type that was created when this type
+ /// was imported or exported from the component.
+ ///
+ /// Note that the underlying type information for the `referenced`
+ /// field and for this `created` field is the same, but these two types
+ /// will hash to different values.
+ created: TypeId,
+ },
+ /// The entity is a component instance.
+ Instance(TypeId),
+ /// The entity is a component.
+ Component(TypeId),
+}
+
+impl ComponentEntityType {
+ /// Determines if component entity type `a` is a subtype of `b`.
+ pub fn is_subtype_of(a: &Self, at: TypesRef, b: &Self, bt: TypesRef) -> bool {
+ Self::internal_is_subtype_of(a, at.list, b, bt.list)
+ }
+
+ pub(crate) fn internal_is_subtype_of(a: &Self, at: &TypeList, b: &Self, bt: &TypeList) -> bool {
+ match (a, b) {
+ (Self::Module(a), Self::Module(b)) => ModuleType::internal_is_subtype_of(
+ at[*a].as_module_type().unwrap(),
+ at,
+ bt[*b].as_module_type().unwrap(),
+ bt,
+ ),
+ (Self::Func(a), Self::Func(b)) => ComponentFuncType::internal_is_subtype_of(
+ at[*a].as_component_func_type().unwrap(),
+ at,
+ bt[*b].as_component_func_type().unwrap(),
+ bt,
+ ),
+ (Self::Value(a), Self::Value(b)) => {
+ ComponentValType::internal_is_subtype_of(a, at, b, bt)
+ }
+ (Self::Type { referenced: a, .. }, Self::Type { referenced: b, .. }) => {
+ ComponentDefinedType::internal_is_subtype_of(
+ at[*a].as_defined_type().unwrap(),
+ at,
+ bt[*b].as_defined_type().unwrap(),
+ bt,
+ )
+ }
+ (Self::Instance(a), Self::Instance(b)) => {
+ ComponentInstanceType::internal_is_subtype_of(
+ at[*a].as_component_instance_type().unwrap(),
+ at,
+ bt[*b].as_component_instance_type().unwrap(),
+ bt,
+ )
+ }
+ (Self::Component(a), Self::Component(b)) => ComponentType::internal_is_subtype_of(
+ at[*a].as_component_type().unwrap(),
+ at,
+ bt[*b].as_component_type().unwrap(),
+ bt,
+ ),
+ _ => false,
+ }
+ }
+
+ pub(crate) fn desc(&self) -> &'static str {
+ match self {
+ Self::Module(_) => "module",
+ Self::Func(_) => "function",
+ Self::Value(_) => "value",
+ Self::Type { .. } => "type",
+ Self::Instance(_) => "instance",
+ Self::Component(_) => "component",
+ }
+ }
+
+ pub(crate) fn type_size(&self) -> u32 {
+ match self {
+ Self::Module(ty)
+ | Self::Func(ty)
+ | Self::Type { referenced: ty, .. }
+ | Self::Instance(ty)
+ | Self::Component(ty) => ty.type_size,
+ Self::Value(ty) => ty.type_size(),
+ }
+ }
+}
+
+/// Represents a type of a component.
+#[derive(Debug, Clone)]
+pub struct ComponentType {
+ /// The effective type size for the component type.
+ pub(crate) type_size: u32,
+ /// The imports of the component type.
+ pub imports: IndexMap<KebabString, (Option<Url>, ComponentEntityType)>,
+ /// The exports of the component type.
+ pub exports: IndexMap<KebabString, (Option<Url>, ComponentEntityType)>,
+}
+
+impl ComponentType {
+ /// Determines if component type `a` is a subtype of `b`.
+ pub fn is_subtype_of(a: &Self, at: TypesRef, b: &Self, bt: TypesRef) -> bool {
+ Self::internal_is_subtype_of(a, at.list, b, bt.list)
+ }
+
+ pub(crate) fn internal_is_subtype_of(a: &Self, at: &TypeList, b: &Self, bt: &TypeList) -> bool {
+ // For component type subtyping, all exports in the other component type
+ // must be present in this component type's exports (i.e. it can export
+ // *more* than what this component type needs).
+ // However, for imports, the check is reversed (i.e. it is okay
+ // to import *less* than what this component type needs).
+ a.imports.iter().all(|(k, (_, a))| match b.imports.get(k) {
+ Some((_, b)) => ComponentEntityType::internal_is_subtype_of(b, bt, a, at),
+ None => false,
+ }) && b.exports.iter().all(|(k, (_, b))| match a.exports.get(k) {
+ Some((_, a)) => ComponentEntityType::internal_is_subtype_of(a, at, b, bt),
+ None => false,
+ })
+ }
+}
+
+/// Represents the kind of a component instance.
+#[derive(Debug, Clone)]
+pub enum ComponentInstanceTypeKind {
+ /// The instance type is from a definition.
+ Defined(IndexMap<KebabString, (Option<Url>, ComponentEntityType)>),
+ /// The instance type is the result of instantiating a component type.
+ Instantiated(TypeId),
+ /// The instance type is the result of instantiating from exported items.
+ Exports(IndexMap<KebabString, (Option<Url>, ComponentEntityType)>),
+}
+
+/// Represents a type of a component instance.
+#[derive(Debug, Clone)]
+pub struct ComponentInstanceType {
+ /// The effective type size for the instance type.
+ pub(crate) type_size: u32,
+ /// The kind of instance type.
+ pub kind: ComponentInstanceTypeKind,
+}
+
+impl ComponentInstanceType {
+ /// Gets the exports of the instance type.
+ pub fn exports<'a>(
+ &'a self,
+ types: TypesRef<'a>,
+ ) -> impl ExactSizeIterator<Item = (&'a KebabStr, &'a Option<Url>, ComponentEntityType)> + Clone
+ {
+ self.internal_exports(types.list)
+ .iter()
+ .map(|(n, (u, t))| (n.as_kebab_str(), u, *t))
+ }
+
+ pub(crate) fn internal_exports<'a>(
+ &'a self,
+ types: &'a TypeList,
+ ) -> &'a IndexMap<KebabString, (Option<Url>, ComponentEntityType)> {
+ match &self.kind {
+ ComponentInstanceTypeKind::Defined(exports)
+ | ComponentInstanceTypeKind::Exports(exports) => exports,
+ ComponentInstanceTypeKind::Instantiated(id) => {
+ &types[*id].as_component_type().unwrap().exports
+ }
+ }
+ }
+
+ /// Determines if component instance type `a` is a subtype of `b`.
+ pub fn is_subtype_of(a: &Self, at: TypesRef, b: &Self, bt: TypesRef) -> bool {
+ Self::internal_is_subtype_of(a, at.list, b, bt.list)
+ }
+
+ pub(crate) fn internal_is_subtype_of(a: &Self, at: &TypeList, b: &Self, bt: &TypeList) -> bool {
+ let exports = a.internal_exports(at);
+
+ // For instance type subtyping, all exports in the other instance type
+ // must be present in this instance type's exports (i.e. it can export
+ // *more* than what this instance type needs).
+ b.internal_exports(bt)
+ .iter()
+ .all(|(k, (_, b))| match exports.get(k) {
+ Some((_, a)) => ComponentEntityType::internal_is_subtype_of(a, at, b, bt),
+ None => false,
+ })
+ }
+}
+
+/// Represents a type of a component function.
+#[derive(Debug, Clone)]
+pub struct ComponentFuncType {
+ /// The effective type size for the component function type.
+ pub(crate) type_size: u32,
+ /// The function parameters.
+ pub params: Box<[(KebabString, ComponentValType)]>,
+ /// The function's results.
+ pub results: Box<[(Option<KebabString>, ComponentValType)]>,
+}
+
+impl ComponentFuncType {
+ /// Determines if component function type `a` is a subtype of `b`.
+ pub fn is_subtype_of(a: &Self, at: TypesRef, b: &Self, bt: TypesRef) -> bool {
+ Self::internal_is_subtype_of(a, at.list, b, bt.list)
+ }
+
+ pub(crate) fn internal_is_subtype_of(a: &Self, at: &TypeList, b: &Self, bt: &TypeList) -> bool {
+ // Note that this intentionally diverges from the upstream specification
+ // in terms of subtyping. This is a full type-equality check which
+ // ensures that the structure of `a` exactly matches the structure of
+ // `b`. The rationale for this is:
+ //
+ // * Primarily in Wasmtime subtyping based on function types is not
+ // implemented. This includes both subtyping a host import and
+ // additionally handling subtyping as functions cross component
+ // boundaries. The host import subtyping (or component export
+ // subtyping) is not clear how to handle at all at this time. The
+ // subtyping of functions between components can more easily be
+ // handled by extending the `fact` compiler, but that hasn't been done
+ // yet.
+ //
+ // * The upstream specification is currently pretty intentionally vague
+ // precisely what subtyping is allowed. Implementing a strict check
+ // here is intended to be a conservative starting point for the
+ // component model which can be extended in the future if necessary.
+ //
+ // * The interaction with subtyping on bindings generation, for example,
+ // is a tricky problem that doesn't have a clear answer at this time.
+ // Effectively this is more rationale for being conservative in the
+ // first pass of the component model.
+ //
+ // So, in conclusion, the test here (and other places that reference
+ // this comment) is for exact type equality with no differences.
+ a.params.len() == b.params.len()
+ && a.results.len() == b.results.len()
+ && a.params
+ .iter()
+ .zip(b.params.iter())
+ .all(|((an, a), (bn, b))| {
+ an == bn && ComponentValType::internal_is_subtype_of(a, at, b, bt)
+ })
+ && a.results
+ .iter()
+ .zip(b.results.iter())
+ .all(|((an, a), (bn, b))| {
+ an == bn && ComponentValType::internal_is_subtype_of(a, at, b, bt)
+ })
+ }
+
+ /// Lowers the component function type to core parameter and result types for the
+ /// canonical ABI.
+ pub(crate) fn lower(&self, types: &TypeList, import: bool) -> LoweringInfo {
+ let mut info = LoweringInfo::default();
+
+ for (_, ty) in self.params.iter() {
+ // When `import` is false, it means we're lifting a core function,
+ // check if the parameters needs realloc
+ if !import && !info.requires_realloc {
+ info.requires_realloc = ty.requires_realloc(types);
+ }
+
+ if !ty.push_wasm_types(types, &mut info.params) {
+ // Too many parameters to pass directly
+ // Function will have a single pointer parameter to pass the arguments
+ // via linear memory
+ info.params.clear();
+ assert!(info.params.push(ValType::I32));
+ info.requires_memory = true;
+
+ // We need realloc as well when lifting a function
+ if !import {
+ info.requires_realloc = true;
+ }
+ break;
+ }
+ }
+
+ for (_, ty) in self.results.iter() {
+ // When `import` is true, it means we're lowering a component function,
+ // check if the result needs realloc
+ if import && !info.requires_realloc {
+ info.requires_realloc = ty.requires_realloc(types);
+ }
+
+ if !ty.push_wasm_types(types, &mut info.results) {
+ // Too many results to return directly, either a retptr parameter will be used (import)
+ // or a single pointer will be returned (export)
+ info.results.clear();
+ if import {
+ info.params.max = MAX_LOWERED_TYPES;
+ assert!(info.params.push(ValType::I32));
+ } else {
+ assert!(info.results.push(ValType::I32));
+ }
+ info.requires_memory = true;
+ break;
+ }
+ }
+
+ // Memory is always required when realloc is required
+ info.requires_memory |= info.requires_realloc;
+
+ info
+ }
+}
+
+/// Represents a variant case.
+#[derive(Debug, Clone)]
+pub struct VariantCase {
+ /// The variant case type.
+ pub ty: Option<ComponentValType>,
+ /// The name of the variant case refined by this one.
+ pub refines: Option<KebabString>,
+}
+
+/// Represents a record type.
+#[derive(Debug, Clone)]
+pub struct RecordType {
+ /// The effective type size for the record type.
+ pub(crate) type_size: u32,
+ /// The map of record fields.
+ pub fields: IndexMap<KebabString, ComponentValType>,
+}
+
+/// Represents a variant type.
+#[derive(Debug, Clone)]
+pub struct VariantType {
+ /// The effective type size for the variant type.
+ pub(crate) type_size: u32,
+ /// The map of variant cases.
+ pub cases: IndexMap<KebabString, VariantCase>,
+}
+
+/// Represents a tuple type.
+#[derive(Debug, Clone)]
+pub struct TupleType {
+ /// The effective type size for the tuple type.
+ pub(crate) type_size: u32,
+ /// The types of the tuple.
+ pub types: Box<[ComponentValType]>,
+}
+
+/// Represents a union type.
+#[derive(Debug, Clone)]
+pub struct UnionType {
+ /// The inclusive type count for the union type.
+ pub(crate) type_size: u32,
+ /// The types of the union.
+ pub types: Box<[ComponentValType]>,
+}
+
+/// Represents a component defined type.
+#[derive(Debug, Clone)]
+pub enum ComponentDefinedType {
+ /// The type is a primitive value type.
+ Primitive(PrimitiveValType),
+ /// The type is a record.
+ Record(RecordType),
+ /// The type is a variant.
+ Variant(VariantType),
+ /// The type is a list.
+ List(ComponentValType),
+ /// The type is a tuple.
+ Tuple(TupleType),
+ /// The type is a set of flags.
+ Flags(IndexSet<KebabString>),
+ /// The type is an enumeration.
+ Enum(IndexSet<KebabString>),
+ /// The type is a union.
+ Union(UnionType),
+ /// The type is an `option`.
+ Option(ComponentValType),
+ /// The type is a `result`.
+ Result {
+ /// The `ok` type.
+ ok: Option<ComponentValType>,
+ /// The `error` type.
+ err: Option<ComponentValType>,
+ },
+}
+
+impl ComponentDefinedType {
+ pub(crate) fn requires_realloc(&self, types: &TypeList) -> bool {
+ match self {
+ Self::Primitive(ty) => ty.requires_realloc(),
+ Self::Record(r) => r.fields.values().any(|ty| ty.requires_realloc(types)),
+ Self::Variant(v) => v.cases.values().any(|case| {
+ case.ty
+ .map(|ty| ty.requires_realloc(types))
+ .unwrap_or(false)
+ }),
+ Self::List(_) => true,
+ Self::Tuple(t) => t.types.iter().any(|ty| ty.requires_realloc(types)),
+ Self::Union(u) => u.types.iter().any(|ty| ty.requires_realloc(types)),
+ Self::Flags(_) | Self::Enum(_) => false,
+ Self::Option(ty) => ty.requires_realloc(types),
+ Self::Result { ok, err } => {
+ ok.map(|ty| ty.requires_realloc(types)).unwrap_or(false)
+ || err.map(|ty| ty.requires_realloc(types)).unwrap_or(false)
+ }
+ }
+ }
+
+ /// Determines if component defined type `a` is a subtype of `b`.
+ pub fn is_subtype_of(a: &Self, at: TypesRef, b: &Self, bt: TypesRef) -> bool {
+ Self::internal_is_subtype_of(a, at.list, b, bt.list)
+ }
+
+ pub(crate) fn internal_is_subtype_of(a: &Self, at: &TypeList, b: &Self, bt: &TypeList) -> bool {
+ // Note that the implementation of subtyping here diverges from the
+ // upstream specification intentionally, see the documentation on
+ // function subtyping for more information.
+ match (a, b) {
+ (Self::Primitive(a), Self::Primitive(b)) => PrimitiveValType::is_subtype_of(*a, *b),
+ (Self::Record(a), Self::Record(b)) => {
+ a.fields.len() == b.fields.len()
+ && a.fields
+ .iter()
+ .zip(b.fields.iter())
+ .all(|((aname, a), (bname, b))| {
+ aname == bname && ComponentValType::internal_is_subtype_of(a, at, b, bt)
+ })
+ }
+ (Self::Variant(a), Self::Variant(b)) => {
+ a.cases.len() == b.cases.len()
+ && a.cases
+ .iter()
+ .zip(b.cases.iter())
+ .all(|((aname, a), (bname, b))| {
+ aname == bname
+ && match (&a.ty, &b.ty) {
+ (Some(a), Some(b)) => {
+ ComponentValType::internal_is_subtype_of(a, at, b, bt)
+ }
+ (None, None) => true,
+ _ => false,
+ }
+ })
+ }
+ (Self::List(a), Self::List(b)) | (Self::Option(a), Self::Option(b)) => {
+ ComponentValType::internal_is_subtype_of(a, at, b, bt)
+ }
+ (Self::Tuple(a), Self::Tuple(b)) => {
+ if a.types.len() != b.types.len() {
+ return false;
+ }
+ a.types
+ .iter()
+ .zip(b.types.iter())
+ .all(|(a, b)| ComponentValType::internal_is_subtype_of(a, at, b, bt))
+ }
+ (Self::Union(a), Self::Union(b)) => {
+ if a.types.len() != b.types.len() {
+ return false;
+ }
+ a.types
+ .iter()
+ .zip(b.types.iter())
+ .all(|(a, b)| ComponentValType::internal_is_subtype_of(a, at, b, bt))
+ }
+ (Self::Flags(a), Self::Flags(b)) | (Self::Enum(a), Self::Enum(b)) => {
+ a.len() == b.len() && a.iter().eq(b.iter())
+ }
+ (Self::Result { ok: ao, err: ae }, Self::Result { ok: bo, err: be }) => {
+ Self::is_optional_subtype_of(*ao, at, *bo, bt)
+ && Self::is_optional_subtype_of(*ae, at, *be, bt)
+ }
+ _ => false,
+ }
+ }
+
+ pub(crate) fn type_size(&self) -> u32 {
+ match self {
+ Self::Primitive(_) => 1,
+ Self::Flags(_) | Self::Enum(_) => 1,
+ Self::Record(r) => r.type_size,
+ Self::Variant(v) => v.type_size,
+ Self::Tuple(t) => t.type_size,
+ Self::Union(u) => u.type_size,
+ Self::List(ty) | Self::Option(ty) => ty.type_size(),
+ Self::Result { ok, err } => {
+ ok.map(|ty| ty.type_size()).unwrap_or(1) + err.map(|ty| ty.type_size()).unwrap_or(1)
+ }
+ }
+ }
+
+ fn is_optional_subtype_of(
+ a: Option<ComponentValType>,
+ at: &TypeList,
+ b: Option<ComponentValType>,
+ bt: &TypeList,
+ ) -> bool {
+ match (a, b) {
+ (None, None) => true,
+ (Some(a), Some(b)) => ComponentValType::internal_is_subtype_of(&a, at, &b, bt),
+ _ => false,
+ }
+ }
+ fn push_wasm_types(&self, types: &TypeList, lowered_types: &mut LoweredTypes) -> bool {
+ match self {
+ Self::Primitive(ty) => push_primitive_wasm_types(ty, lowered_types),
+ Self::Record(r) => r
+ .fields
+ .iter()
+ .all(|(_, ty)| ty.push_wasm_types(types, lowered_types)),
+ Self::Variant(v) => Self::push_variant_wasm_types(
+ v.cases.iter().filter_map(|(_, case)| case.ty.as_ref()),
+ types,
+ lowered_types,
+ ),
+ Self::List(_) => lowered_types.push(ValType::I32) && lowered_types.push(ValType::I32),
+ Self::Tuple(t) => t
+ .types
+ .iter()
+ .all(|ty| ty.push_wasm_types(types, lowered_types)),
+ Self::Flags(names) => {
+ (0..(names.len() + 31) / 32).all(|_| lowered_types.push(ValType::I32))
+ }
+ Self::Enum(_) => lowered_types.push(ValType::I32),
+ Self::Union(u) => Self::push_variant_wasm_types(u.types.iter(), types, lowered_types),
+ Self::Option(ty) => {
+ Self::push_variant_wasm_types([ty].into_iter(), types, lowered_types)
+ }
+ Self::Result { ok, err } => {
+ Self::push_variant_wasm_types(ok.iter().chain(err.iter()), types, lowered_types)
+ }
+ }
+ }
+
+ fn push_variant_wasm_types<'a>(
+ cases: impl Iterator<Item = &'a ComponentValType>,
+ types: &TypeList,
+ lowered_types: &mut LoweredTypes,
+ ) -> bool {
+ // Push the discriminant
+ if !lowered_types.push(ValType::I32) {
+ return false;
+ }
+
+ let start = lowered_types.len();
+
+ for ty in cases {
+ let mut temp = LoweredTypes::new(lowered_types.max);
+
+ if !ty.push_wasm_types(types, &mut temp) {
+ return false;
+ }
+
+ for (i, ty) in temp.iter().enumerate() {
+ match lowered_types.get_mut(start + i) {
+ Some(prev) => *prev = Self::join_types(*prev, ty),
+ None => {
+ if !lowered_types.push(ty) {
+ return false;
+ }
+ }
+ }
+ }
+ }
+
+ true
+ }
+
+ fn join_types(a: ValType, b: ValType) -> ValType {
+ use ValType::*;
+
+ match (a, b) {
+ (I32, I32) | (I64, I64) | (F32, F32) | (F64, F64) => a,
+ (I32, F32) | (F32, I32) => I32,
+ (_, I64 | F64) | (I64 | F64, _) => I64,
+ _ => panic!("unexpected wasm type for canonical ABI"),
+ }
+ }
+}
+
+#[allow(clippy::large_enum_variant)]
+enum TypesKind {
+ Module(Arc<Module>),
+ Component(ComponentState),
+}
+
+/// Represents the types known to a [`crate::Validator`] once validation has completed.
+///
+/// The type information is returned via the [`crate::Validator::end`] method.
+pub struct Types {
+ list: TypeList,
+ kind: TypesKind,
+}
+
+#[derive(Clone, Copy)]
+enum TypesRefKind<'a> {
+ Module(&'a Module),
+ Component(&'a ComponentState),
+}
+
+/// Represents the types known to a [`crate::Validator`] during validation.
+///
+/// Retrieved via the [`crate::Validator::types`] method.
+#[derive(Clone, Copy)]
+pub struct TypesRef<'a> {
+ list: &'a TypeList,
+ kind: TypesRefKind<'a>,
+}
+
+impl<'a> TypesRef<'a> {
+ pub(crate) fn from_module(types: &'a TypeList, module: &'a Module) -> Self {
+ Self {
+ list: types,
+ kind: TypesRefKind::Module(module),
+ }
+ }
+
+ pub(crate) fn from_component(types: &'a TypeList, component: &'a ComponentState) -> Self {
+ Self {
+ list: types,
+ kind: TypesRefKind::Component(component),
+ }
+ }
+
+ fn types(&self, core: bool) -> Option<&'a [TypeId]> {
+ Some(match &self.kind {
+ TypesRefKind::Module(module) => {
+ if core {
+ &module.types
+ } else {
+ return None;
+ }
+ }
+ TypesRefKind::Component(component) => {
+ if core {
+ &component.core_types
+ } else {
+ &component.types
+ }
+ }
+ })
+ }
+
+ /// Gets a type based on its type id.
+ ///
+ /// Returns `None` if the type id is unknown.
+ pub fn type_from_id(&self, id: TypeId) -> Option<&'a Type> {
+ self.list.get(id.index)
+ }
+
+ /// Gets a type id from a type index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn id_from_type_index(&self, index: u32, core: bool) -> Option<TypeId> {
+ self.types(core)?.get(index as usize).copied()
+ }
+
+ /// Gets a type at the given type index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn type_at(&self, index: u32, core: bool) -> Option<&'a Type> {
+ self.type_from_id(*self.types(core)?.get(index as usize)?)
+ }
+
+ /// Gets a defined core function type at the given type index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn func_type_at(&self, index: u32) -> Option<&'a FuncType> {
+ match self.type_at(index, true)? {
+ Type::Func(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ /// Gets the type of a table at the given table index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn table_at(&self, index: u32) -> Option<TableType> {
+ let tables = match &self.kind {
+ TypesRefKind::Module(module) => &module.tables,
+ TypesRefKind::Component(component) => &component.core_tables,
+ };
+
+ tables.get(index as usize).copied()
+ }
+
+ /// Gets the type of a memory at the given memory index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn memory_at(&self, index: u32) -> Option<MemoryType> {
+ let memories = match &self.kind {
+ TypesRefKind::Module(module) => &module.memories,
+ TypesRefKind::Component(component) => &component.core_memories,
+ };
+
+ memories.get(index as usize).copied()
+ }
+
+ /// Gets the type of a global at the given global index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn global_at(&self, index: u32) -> Option<GlobalType> {
+ let globals = match &self.kind {
+ TypesRefKind::Module(module) => &module.globals,
+ TypesRefKind::Component(component) => &component.core_globals,
+ };
+
+ globals.get(index as usize).copied()
+ }
+
+ /// Gets the type of a tag at the given tag index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn tag_at(&self, index: u32) -> Option<&'a FuncType> {
+ let tags = match &self.kind {
+ TypesRefKind::Module(module) => &module.tags,
+ TypesRefKind::Component(component) => &component.core_tags,
+ };
+
+ Some(
+ self.list[*tags.get(index as usize)?]
+ .as_func_type()
+ .unwrap(),
+ )
+ }
+
+ /// Gets the type of a core function at the given function index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn function_at(&self, index: u32) -> Option<&'a FuncType> {
+ let id = match &self.kind {
+ TypesRefKind::Module(module) => {
+ &module.types[*module.functions.get(index as usize)? as usize]
+ }
+ TypesRefKind::Component(component) => component.core_funcs.get(index as usize)?,
+ };
+
+ match &self.list[*id] {
+ Type::Func(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ /// Gets the type of an element segment at the given element segment index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn element_at(&self, index: u32) -> Option<RefType> {
+ match &self.kind {
+ TypesRefKind::Module(module) => module.element_types.get(index as usize).copied(),
+ TypesRefKind::Component(_) => None,
+ }
+ }
+
+ /// Gets the type of a component function at the given function index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn component_function_at(&self, index: u32) -> Option<&'a ComponentFuncType> {
+ match &self.kind {
+ TypesRefKind::Module(_) => None,
+ TypesRefKind::Component(component) => Some(
+ self.list[*component.funcs.get(index as usize)?]
+ .as_component_func_type()
+ .unwrap(),
+ ),
+ }
+ }
+
+ /// Gets the type of a module at the given module index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn module_at(&self, index: u32) -> Option<&'a ModuleType> {
+ match &self.kind {
+ TypesRefKind::Module(_) => None,
+ TypesRefKind::Component(component) => Some(
+ self.list[*component.core_modules.get(index as usize)?]
+ .as_module_type()
+ .unwrap(),
+ ),
+ }
+ }
+
+ /// Gets the type of a module instance at the given module instance index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn instance_at(&self, index: u32) -> Option<&'a InstanceType> {
+ match &self.kind {
+ TypesRefKind::Module(_) => None,
+ TypesRefKind::Component(component) => {
+ let id = component.core_instances.get(index as usize)?;
+ match &self.list[*id] {
+ Type::Instance(ty) => Some(ty),
+ _ => None,
+ }
+ }
+ }
+ }
+
+ /// Gets the type of a component at the given component index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn component_at(&self, index: u32) -> Option<&'a ComponentType> {
+ match &self.kind {
+ TypesRefKind::Module(_) => None,
+ TypesRefKind::Component(component) => Some(
+ self.list[*component.components.get(index as usize)?]
+ .as_component_type()
+ .unwrap(),
+ ),
+ }
+ }
+
+ /// Gets the type of an component instance at the given component instance index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn component_instance_at(&self, index: u32) -> Option<&'a ComponentInstanceType> {
+ match &self.kind {
+ TypesRefKind::Module(_) => None,
+ TypesRefKind::Component(component) => {
+ let id = component.instances.get(index as usize)?;
+ match &self.list[*id] {
+ Type::ComponentInstance(ty) => Some(ty),
+ _ => None,
+ }
+ }
+ }
+ }
+
+ /// Gets the type of a value at the given value index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn value_at(&self, index: u32) -> Option<ComponentValType> {
+ match &self.kind {
+ TypesRefKind::Module(_) => None,
+ TypesRefKind::Component(component) => {
+ component.values.get(index as usize).map(|(r, _)| *r)
+ }
+ }
+ }
+
+ /// Gets the entity type for the given import.
+ pub fn entity_type_from_import(&self, import: &Import) -> Option<EntityType> {
+ match &self.kind {
+ TypesRefKind::Module(module) => Some(match import.ty {
+ TypeRef::Func(idx) => EntityType::Func(*module.types.get(idx as usize)?),
+ TypeRef::Table(ty) => EntityType::Table(ty),
+ TypeRef::Memory(ty) => EntityType::Memory(ty),
+ TypeRef::Global(ty) => EntityType::Global(ty),
+ TypeRef::Tag(ty) => EntityType::Tag(*module.types.get(ty.func_type_idx as usize)?),
+ }),
+ TypesRefKind::Component(_) => None,
+ }
+ }
+
+ /// Gets the entity type from the given export.
+ pub fn entity_type_from_export(&self, export: &Export) -> Option<EntityType> {
+ match &self.kind {
+ TypesRefKind::Module(module) => Some(match export.kind {
+ ExternalKind::Func => EntityType::Func(
+ module.types[*module.functions.get(export.index as usize)? as usize],
+ ),
+ ExternalKind::Table => {
+ EntityType::Table(*module.tables.get(export.index as usize)?)
+ }
+ ExternalKind::Memory => {
+ EntityType::Memory(*module.memories.get(export.index as usize)?)
+ }
+ ExternalKind::Global => {
+ EntityType::Global(*module.globals.get(export.index as usize)?)
+ }
+ ExternalKind::Tag => EntityType::Tag(
+ module.types[*module.functions.get(export.index as usize)? as usize],
+ ),
+ }),
+ TypesRefKind::Component(_) => None,
+ }
+ }
+
+ /// Gets the component entity type for the given component import.
+ pub fn component_entity_type_of_extern(&self, name: &str) -> Option<ComponentEntityType> {
+ match &self.kind {
+ TypesRefKind::Module(_) => None,
+ TypesRefKind::Component(component) => {
+ let key = KebabStr::new(name)?;
+ Some(component.externs.get(key)?.1)
+ }
+ }
+ }
+}
+
+impl Types {
+ pub(crate) fn from_module(types: TypeList, module: Arc<Module>) -> Self {
+ Self {
+ list: types,
+ kind: TypesKind::Module(module),
+ }
+ }
+
+ pub(crate) fn from_component(types: TypeList, component: ComponentState) -> Self {
+ Self {
+ list: types,
+ kind: TypesKind::Component(component),
+ }
+ }
+
+ /// Gets a reference to this validation type information.
+ pub fn as_ref(&self) -> TypesRef {
+ TypesRef {
+ list: &self.list,
+ kind: match &self.kind {
+ TypesKind::Module(module) => TypesRefKind::Module(module),
+ TypesKind::Component(component) => TypesRefKind::Component(component),
+ },
+ }
+ }
+
+ /// Gets a type based on its type id.
+ ///
+ /// Returns `None` if the type id is unknown.
+ pub fn type_from_id(&self, id: TypeId) -> Option<&Type> {
+ self.as_ref().type_from_id(id)
+ }
+
+ /// Gets a type id from a type index.
+ ///
+ /// Returns `None` if the type index is out of bounds.
+ pub fn id_from_type_index(&self, index: u32, core: bool) -> Option<TypeId> {
+ self.as_ref().id_from_type_index(index, core)
+ }
+
+ /// Gets a type at the given type index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn type_at(&self, index: u32, core: bool) -> Option<&Type> {
+ self.as_ref().type_at(index, core)
+ }
+
+ /// Gets a defined core function type at the given type index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn func_type_at(&self, index: u32) -> Option<&FuncType> {
+ self.as_ref().func_type_at(index)
+ }
+
+ /// Gets the count of core types.
+ pub fn type_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(module) => module.types.len(),
+ TypesKind::Component(component) => component.core_types.len(),
+ }
+ }
+
+ /// Gets the type of a table at the given table index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn table_at(&self, index: u32) -> Option<TableType> {
+ self.as_ref().table_at(index)
+ }
+
+ /// Gets the count of imported and defined tables.
+ pub fn table_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(module) => module.tables.len(),
+ TypesKind::Component(component) => component.core_tables.len(),
+ }
+ }
+
+ /// Gets the type of a memory at the given memory index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn memory_at(&self, index: u32) -> Option<MemoryType> {
+ self.as_ref().memory_at(index)
+ }
+
+ /// Gets the count of imported and defined memories.
+ pub fn memory_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(module) => module.memories.len(),
+ TypesKind::Component(component) => component.core_memories.len(),
+ }
+ }
+
+ /// Gets the type of a global at the given global index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn global_at(&self, index: u32) -> Option<GlobalType> {
+ self.as_ref().global_at(index)
+ }
+
+ /// Gets the count of imported and defined globals.
+ pub fn global_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(module) => module.globals.len(),
+ TypesKind::Component(component) => component.core_globals.len(),
+ }
+ }
+
+ /// Gets the type of a tag at the given tag index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn tag_at(&self, index: u32) -> Option<&FuncType> {
+ self.as_ref().tag_at(index)
+ }
+
+ /// Gets the count of imported and defined tags.
+ pub fn tag_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(module) => module.tags.len(),
+ TypesKind::Component(component) => component.core_tags.len(),
+ }
+ }
+
+ /// Gets the type of a core function at the given function index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn function_at(&self, index: u32) -> Option<&FuncType> {
+ self.as_ref().function_at(index)
+ }
+
+ /// Gets the count of imported and defined core functions.
+ ///
+ /// The count also includes aliased core functions in components.
+ pub fn function_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(module) => module.functions.len(),
+ TypesKind::Component(component) => component.core_funcs.len(),
+ }
+ }
+
+ /// Gets the type of an element segment at the given element segment index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn element_at(&self, index: u32) -> Option<RefType> {
+ match &self.kind {
+ TypesKind::Module(module) => module.element_types.get(index as usize).copied(),
+ TypesKind::Component(_) => None,
+ }
+ }
+
+ /// Gets the count of element segments.
+ pub fn element_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(module) => module.element_types.len(),
+ TypesKind::Component(_) => 0,
+ }
+ }
+
+ /// Gets the type of a component function at the given function index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn component_function_at(&self, index: u32) -> Option<&ComponentFuncType> {
+ self.as_ref().component_function_at(index)
+ }
+
+ /// Gets the count of imported, exported, or aliased component functions.
+ pub fn component_function_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(_) => 0,
+ TypesKind::Component(component) => component.funcs.len(),
+ }
+ }
+
+ /// Gets the type of a module at the given module index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn module_at(&self, index: u32) -> Option<&ModuleType> {
+ self.as_ref().module_at(index)
+ }
+
+ /// Gets the count of imported, exported, or aliased modules.
+ pub fn module_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(_) => 0,
+ TypesKind::Component(component) => component.core_modules.len(),
+ }
+ }
+
+ /// Gets the type of a module instance at the given module instance index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn instance_at(&self, index: u32) -> Option<&InstanceType> {
+ self.as_ref().instance_at(index)
+ }
+
+ /// Gets the count of imported, exported, or aliased core module instances.
+ pub fn instance_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(_) => 0,
+ TypesKind::Component(component) => component.core_instances.len(),
+ }
+ }
+
+ /// Gets the type of a component at the given component index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn component_at(&self, index: u32) -> Option<&ComponentType> {
+ self.as_ref().component_at(index)
+ }
+
+ /// Gets the count of imported, exported, or aliased components.
+ pub fn component_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(_) => 0,
+ TypesKind::Component(component) => component.components.len(),
+ }
+ }
+
+ /// Gets the type of an component instance at the given component instance index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn component_instance_at(&self, index: u32) -> Option<&ComponentInstanceType> {
+ self.as_ref().component_instance_at(index)
+ }
+
+ /// Gets the count of imported, exported, or aliased component instances.
+ pub fn component_instance_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(_) => 0,
+ TypesKind::Component(component) => component.instances.len(),
+ }
+ }
+
+ /// Gets the type of a value at the given value index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn value_at(&self, index: u32) -> Option<ComponentValType> {
+ self.as_ref().value_at(index)
+ }
+
+ /// Gets the count of imported, exported, or aliased values.
+ pub fn value_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(_) => 0,
+ TypesKind::Component(component) => component.values.len(),
+ }
+ }
+
+ /// Gets the entity type from the given import.
+ pub fn entity_type_from_import(&self, import: &Import) -> Option<EntityType> {
+ self.as_ref().entity_type_from_import(import)
+ }
+
+ /// Gets the entity type from the given export.
+ pub fn entity_type_from_export(&self, export: &Export) -> Option<EntityType> {
+ self.as_ref().entity_type_from_export(export)
+ }
+
+ /// Gets the component entity type for the given component import or export
+ /// name.
+ pub fn component_entity_type_of_extern(&self, name: &str) -> Option<ComponentEntityType> {
+ self.as_ref().component_entity_type_of_extern(name)
+ }
+
+ /// Attempts to lookup the type id that `ty` is an alias of.
+ ///
+ /// Returns `None` if `ty` wasn't listed as aliasing a prior type.
+ pub fn peel_alias(&self, ty: TypeId) -> Option<TypeId> {
+ self.list.peel_alias(ty)
+ }
+}
+
+/// This is a type which mirrors a subset of the `Vec<T>` API, but is intended
+/// to be able to be cheaply snapshotted and cloned.
+///
+/// When each module's code sections start we "commit" the current list of types
+/// in the global list of types. This means that the temporary `cur` vec here is
+/// pushed onto `snapshots` and wrapped up in an `Arc`. At that point we clone
+/// this entire list (which is then O(modules), not O(types in all modules)) and
+/// pass out as a context to each function validator.
+///
+/// Otherwise, though, this type behaves as if it were a large `Vec<T>`, but
+/// it's represented by lists of contiguous chunks.
+pub(crate) struct SnapshotList<T> {
+ // All previous snapshots, the "head" of the list that this type represents.
+ // The first entry in this pair is the starting index for all elements
+ // contained in the list, and the second element is the list itself. Note
+ // the `Arc` wrapper around sub-lists, which makes cloning time for this
+ // `SnapshotList` O(snapshots) rather than O(snapshots_total), which for
+ // us in this context means the number of modules, not types.
+ //
+ // Note that this list is sorted least-to-greatest in order of the index for
+ // binary searching.
+ snapshots: Vec<Arc<Snapshot<T>>>,
+
+ // This is the total length of all lists in the `snapshots` array.
+ snapshots_total: usize,
+
+ // The current list of types for the current snapshot that are being built.
+ cur: Vec<T>,
+
+ unique_mappings: HashMap<u32, u32>,
+ unique_counter: u32,
+}
+
+struct Snapshot<T> {
+ prior_types: usize,
+ unique_counter: u32,
+ unique_mappings: HashMap<u32, u32>,
+ items: Vec<T>,
+}
+
+impl<T> SnapshotList<T> {
+ /// Same as `<&[T]>::get`
+ pub(crate) fn get(&self, index: usize) -> Option<&T> {
+ // Check to see if this index falls on our local list
+ if index >= self.snapshots_total {
+ return self.cur.get(index - self.snapshots_total);
+ }
+ // ... and failing that we do a binary search to figure out which bucket
+ // it's in. Note the `i-1` in the `Err` case because if we don't find an
+ // exact match the type is located in the previous bucket.
+ let i = match self
+ .snapshots
+ .binary_search_by_key(&index, |snapshot| snapshot.prior_types)
+ {
+ Ok(i) => i,
+ Err(i) => i - 1,
+ };
+ let snapshot = &self.snapshots[i];
+ Some(&snapshot.items[index - snapshot.prior_types])
+ }
+
+ /// Same as `<&mut [T]>::get_mut`, except only works for indexes into the
+ /// current snapshot being built.
+ ///
+ /// # Panics
+ ///
+ /// Panics if an index is passed in which falls within the
+ /// previously-snapshotted list of types. This should never happen in our
+ /// context and the panic is intended to weed out possible bugs in
+ /// wasmparser.
+ pub(crate) fn get_mut(&mut self, index: usize) -> Option<&mut T> {
+ if index >= self.snapshots_total {
+ return self.cur.get_mut(index - self.snapshots_total);
+ }
+ panic!("cannot get a mutable reference in snapshotted part of list")
+ }
+
+ /// Same as `Vec::push`
+ pub(crate) fn push(&mut self, val: T) {
+ self.cur.push(val);
+ }
+
+ /// Same as `<[T]>::len`
+ pub(crate) fn len(&self) -> usize {
+ self.cur.len() + self.snapshots_total
+ }
+
+ /// Reserve space for an additional count of items.
+ pub(crate) fn reserve(&mut self, additional: usize) {
+ self.cur.reserve(additional);
+ }
+
+ /// Commits previously pushed types into this snapshot vector, and returns a
+ /// clone of this list.
+ ///
+ /// The returned `SnapshotList` can be used to access all the same types as
+ /// this list itself. This list also is not changed (from an external
+ /// perspective) and can continue to access all the same types.
+ pub(crate) fn commit(&mut self) -> SnapshotList<T> {
+ // If the current chunk has new elements, commit them in to an
+ // `Arc`-wrapped vector in the snapshots list. Note the `shrink_to_fit`
+ // ahead of time to hopefully keep memory usage lower than it would
+ // otherwise be. Additionally note that the `unique_counter` is bumped
+ // here to ensure that the previous value of the unique counter is
+ // never used for an actual type so it's suitable for lookup via a
+ // binary search.
+ let len = self.cur.len();
+ if len > 0 {
+ self.unique_counter += 1;
+ self.cur.shrink_to_fit();
+ self.snapshots.push(Arc::new(Snapshot {
+ prior_types: self.snapshots_total,
+ unique_counter: self.unique_counter - 1,
+ unique_mappings: mem::take(&mut self.unique_mappings),
+ items: mem::take(&mut self.cur),
+ }));
+ self.snapshots_total += len;
+ }
+ SnapshotList {
+ snapshots: self.snapshots.clone(),
+ snapshots_total: self.snapshots_total,
+ unique_mappings: HashMap::new(),
+ unique_counter: self.unique_counter,
+ cur: Vec::new(),
+ }
+ }
+
+ /// Modifies a `TypeId` to have the same contents but a fresh new unique id.
+ ///
+ /// This is used during aliasing with components to assign types a unique
+ /// identifier that isn't equivalent to anything else but still
+ /// points to the same underlying type.
+ pub fn with_unique(&mut self, mut ty: TypeId) -> TypeId {
+ self.unique_mappings
+ .insert(self.unique_counter, ty.unique_id);
+ ty.unique_id = self.unique_counter;
+ self.unique_counter += 1;
+ ty
+ }
+
+ /// Attempts to lookup the type id that `ty` is an alias of.
+ ///
+ /// Returns `None` if `ty` wasn't listed as aliasing a prior type.
+ pub fn peel_alias(&self, ty: TypeId) -> Option<TypeId> {
+ // The unique counter in each snapshot is the unique counter at the
+ // time of the snapshot so it's guaranteed to never be used, meaning
+ // that `Ok` should never show up here. With an `Err` it's where the
+ // index would be placed meaning that the index in question is the
+ // smallest value over the unique id's value, meaning that slot has the
+ // mapping we're interested in.
+ let i = match self
+ .snapshots
+ .binary_search_by_key(&ty.unique_id, |snapshot| snapshot.unique_counter)
+ {
+ Ok(_) => unreachable!(),
+ Err(i) => i,
+ };
+
+ // If the `i` index is beyond the snapshot array then lookup in the
+ // current mappings instead since it may refer to a type not snapshot
+ // yet.
+ let unique_id = match self.snapshots.get(i) {
+ Some(snapshot) => *snapshot.unique_mappings.get(&ty.unique_id)?,
+ None => *self.unique_mappings.get(&ty.unique_id)?,
+ };
+ Some(TypeId { unique_id, ..ty })
+ }
+}
+
+impl<T> std::ops::Index<usize> for SnapshotList<T> {
+ type Output = T;
+
+ #[inline]
+ fn index(&self, index: usize) -> &T {
+ self.get(index).unwrap()
+ }
+}
+
+impl<T> std::ops::IndexMut<usize> for SnapshotList<T> {
+ #[inline]
+ fn index_mut(&mut self, index: usize) -> &mut T {
+ self.get_mut(index).unwrap()
+ }
+}
+
+impl<T> std::ops::Index<TypeId> for SnapshotList<T> {
+ type Output = T;
+
+ #[inline]
+ fn index(&self, id: TypeId) -> &T {
+ self.get(id.index).unwrap()
+ }
+}
+
+impl<T> std::ops::IndexMut<TypeId> for SnapshotList<T> {
+ #[inline]
+ fn index_mut(&mut self, id: TypeId) -> &mut T {
+ self.get_mut(id.index).unwrap()
+ }
+}
+
+impl<T> Default for SnapshotList<T> {
+ fn default() -> SnapshotList<T> {
+ SnapshotList {
+ snapshots: Vec::new(),
+ snapshots_total: 0,
+ cur: Vec::new(),
+ unique_counter: 1,
+ unique_mappings: HashMap::new(),
+ }
+ }
+}
+
+/// A snapshot list of types.
+pub(crate) type TypeList = SnapshotList<Type>;
+
+/// Thin wrapper around `TypeList` which provides an allocator of unique ids for
+/// types contained within this list.
+pub(crate) struct TypeAlloc {
+ list: TypeList,
+}
+
+impl Deref for TypeAlloc {
+ type Target = TypeList;
+ fn deref(&self) -> &TypeList {
+ &self.list
+ }
+}
+
+impl DerefMut for TypeAlloc {
+ fn deref_mut(&mut self) -> &mut TypeList {
+ &mut self.list
+ }
+}
+
+impl TypeAlloc {
+ /// Pushes a new anonymous type into this list which will have its
+ /// `unique_id` field cleared.
+ pub fn push_anon(&mut self, ty: Type) -> TypeId {
+ let index = self.list.len();
+ let type_size = ty.type_size();
+ self.list.push(ty);
+ TypeId {
+ index,
+ type_size,
+ unique_id: 0,
+ }
+ }
+
+ /// Pushes a new defined type which has an index in core wasm onto this
+ /// list.
+ ///
+ /// The returned `TypeId` is guaranteed to be unique and not hash-equivalent
+ /// to any other prior ID in this list.
+ pub fn push_defined(&mut self, ty: Type) -> TypeId {
+ let id = self.push_anon(ty);
+ self.with_unique(id)
+ }
+}
+
+impl Default for TypeAlloc {
+ fn default() -> TypeAlloc {
+ TypeAlloc {
+ list: Default::default(),
+ }
+ }
+}