summaryrefslogtreecommitdiffstats
path: root/third_party/rust/parity-wasm/src
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 14:29:10 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 14:29:10 +0000
commit2aa4a82499d4becd2284cdb482213d541b8804dd (patch)
treeb80bf8bf13c3766139fbacc530efd0dd9d54394c /third_party/rust/parity-wasm/src
parentInitial commit. (diff)
downloadfirefox-2aa4a82499d4becd2284cdb482213d541b8804dd.tar.xz
firefox-2aa4a82499d4becd2284cdb482213d541b8804dd.zip
Adding upstream version 86.0.1.upstream/86.0.1upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/rust/parity-wasm/src')
-rw-r--r--third_party/rust/parity-wasm/src/builder/code.rs414
-rw-r--r--third_party/rust/parity-wasm/src/builder/data.rs56
-rw-r--r--third_party/rust/parity-wasm/src/builder/export.rs116
-rw-r--r--third_party/rust/parity-wasm/src/builder/global.rs93
-rw-r--r--third_party/rust/parity-wasm/src/builder/import.rs130
-rw-r--r--third_party/rust/parity-wasm/src/builder/invoke.rs17
-rw-r--r--third_party/rust/parity-wasm/src/builder/memory.rs85
-rw-r--r--third_party/rust/parity-wasm/src/builder/misc.rs93
-rw-r--r--third_party/rust/parity-wasm/src/builder/mod.rs25
-rw-r--r--third_party/rust/parity-wasm/src/builder/module.rs598
-rw-r--r--third_party/rust/parity-wasm/src/builder/table.rs85
-rw-r--r--third_party/rust/parity-wasm/src/elements/export_entry.rs102
-rw-r--r--third_party/rust/parity-wasm/src/elements/func.rs156
-rw-r--r--third_party/rust/parity-wasm/src/elements/global_entry.rs50
-rw-r--r--third_party/rust/parity-wasm/src/elements/import_entry.rs356
-rw-r--r--third_party/rust/parity-wasm/src/elements/index_map.rs595
-rw-r--r--third_party/rust/parity-wasm/src/elements/mod.rs332
-rw-r--r--third_party/rust/parity-wasm/src/elements/module.rs971
-rw-r--r--third_party/rust/parity-wasm/src/elements/name_section.rs373
-rw-r--r--third_party/rust/parity-wasm/src/elements/ops.rs2951
-rw-r--r--third_party/rust/parity-wasm/src/elements/primitives.rs902
-rw-r--r--third_party/rust/parity-wasm/src/elements/reloc_section.rs347
-rw-r--r--third_party/rust/parity-wasm/src/elements/section.rs1153
-rw-r--r--third_party/rust/parity-wasm/src/elements/segment.rs283
-rw-r--r--third_party/rust/parity-wasm/src/elements/types.rs267
-rw-r--r--third_party/rust/parity-wasm/src/io.rs122
-rw-r--r--third_party/rust/parity-wasm/src/lib.rs24
27 files changed, 10696 insertions, 0 deletions
diff --git a/third_party/rust/parity-wasm/src/builder/code.rs b/third_party/rust/parity-wasm/src/builder/code.rs
new file mode 100644
index 0000000000..305a7e7fad
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/builder/code.rs
@@ -0,0 +1,414 @@
+use alloc::vec::Vec;
+use crate::elements;
+use super::{
+ invoke::{Invoke, Identity},
+ misc::{ValueTypeBuilder, ValueTypesBuilder, OptionalValueTypeBuilder},
+};
+
+/// Signature template description
+pub enum Signature {
+ TypeReference(u32),
+ Inline(elements::FunctionType),
+}
+
+/// Signature builder
+pub struct SignatureBuilder<F=Identity> {
+ callback: F,
+ signature: elements::FunctionType,
+}
+
+impl SignatureBuilder {
+ /// New signature builder
+ pub fn new() -> Self {
+ SignatureBuilder::with_callback(Identity)
+ }
+}
+
+impl<F> SignatureBuilder<F> where F: Invoke<elements::FunctionType> {
+ /// New builder with callback function specified
+ pub fn with_callback(callback: F) -> Self {
+ SignatureBuilder {
+ callback: callback,
+ signature: elements::FunctionType::default(),
+ }
+ }
+
+ /// Add argument to signature builder
+ pub fn with_param(mut self, value_type: elements::ValueType) -> Self {
+ self.signature.params_mut().push(value_type);
+ self
+ }
+
+ /// Add multiple arguments to signature builder
+ pub fn with_params(mut self, value_types: Vec<elements::ValueType>) -> Self {
+ self.signature.params_mut().extend(value_types);
+ self
+ }
+
+ /// Override signature return type
+ pub fn with_return_type(mut self, return_type: Option<elements::ValueType>) -> Self {
+ *self.signature.return_type_mut() = return_type;
+ self
+ }
+
+ /// Start build new argument
+ pub fn param(self) -> ValueTypeBuilder<Self> {
+ ValueTypeBuilder::with_callback(self)
+ }
+
+ /// Start build multiple arguments
+ pub fn params(self) -> ValueTypesBuilder<Self> {
+ ValueTypesBuilder::with_callback(self)
+ }
+
+ /// Start building return type
+ pub fn return_type(self) -> OptionalValueTypeBuilder<Self> {
+ OptionalValueTypeBuilder::with_callback(self)
+ }
+
+ /// Finish current builder
+ pub fn build(self) -> F::Result {
+ self.callback.invoke(self.signature)
+ }
+
+ /// Finish current builder returning intermediate `Signature` struct
+ pub fn build_sig(self) -> Signature {
+ Signature::Inline(self.signature)
+ }
+}
+
+impl<F> Invoke<Vec<elements::ValueType>> for SignatureBuilder<F>
+ where F: Invoke<elements::FunctionType>
+{
+ type Result = Self;
+
+ fn invoke(self, args: Vec<elements::ValueType>) -> Self {
+ self.with_params(args)
+ }
+}
+
+impl<F> Invoke<Option<elements::ValueType>> for SignatureBuilder<F>
+ where F: Invoke<elements::FunctionType>
+{
+ type Result = Self;
+
+ fn invoke(self, arg: Option<elements::ValueType>) -> Self {
+ self.with_return_type(arg)
+ }
+}
+
+impl<F> Invoke<elements::ValueType> for SignatureBuilder<F>
+ where F: Invoke<elements::FunctionType>
+{
+ type Result = Self;
+
+ fn invoke(self, arg: elements::ValueType) -> Self {
+ self.with_param(arg)
+ }
+}
+
+/// Type (signature) reference builder (for function/import/indirect call)
+pub struct TypeRefBuilder<F=Identity> {
+ callback: F,
+ type_ref: u32,
+}
+
+impl<F> TypeRefBuilder<F> where F: Invoke<u32> {
+ /// New builder chained with specified callback
+ pub fn with_callback(callback: F) -> Self {
+ TypeRefBuilder {
+ callback: callback,
+ type_ref: 0
+ }
+ }
+
+ /// Set/override of type reference
+ pub fn val(mut self, val: u32) -> Self {
+ self.type_ref = val;
+ self
+ }
+
+ /// Finish current builder
+ pub fn build(self) -> F::Result { self.callback.invoke(self.type_ref) }
+}
+
+/// Multiple signatures builder
+pub struct SignaturesBuilder<F=Identity> {
+ callback: F,
+ section: Vec<Signature>,
+}
+
+impl SignaturesBuilder {
+ /// New empty functions section builder
+ pub fn new() -> Self {
+ SignaturesBuilder::with_callback(Identity)
+ }
+}
+
+impl<F> SignaturesBuilder<F> {
+ /// New builder chained with specified callback
+ pub fn with_callback(callback: F) -> Self {
+ SignaturesBuilder {
+ callback: callback,
+ section: Vec::new(),
+ }
+ }
+
+ /// Push new signature into the builder output
+ pub fn with_signature(mut self, signature: Signature) -> Self {
+ self.section.push(signature);
+ self
+ }
+
+ /// Start building new signature with `TypeRefBuilder`
+ pub fn type_ref(self) -> TypeRefBuilder<Self> {
+ TypeRefBuilder::with_callback(self)
+ }
+}
+
+impl<F> SignaturesBuilder<F> where F: Invoke<SignatureBindings> {
+ /// Start building new signature with dedicated builder
+ pub fn signature(self) -> SignatureBuilder<Self> {
+ SignatureBuilder::with_callback(self)
+ }
+}
+
+impl<F> Invoke<elements::FunctionType> for SignaturesBuilder<F> {
+ type Result = Self;
+
+ fn invoke(self, signature: elements::FunctionType) -> Self {
+ self.with_signature(Signature::Inline(signature))
+ }
+}
+
+impl<F> Invoke<u32> for SignaturesBuilder<F> {
+ type Result = Self;
+
+ fn invoke(self, type_ref: u32) -> Self {
+ self.with_signature(Signature::TypeReference(type_ref))
+ }
+}
+
+impl<F> SignaturesBuilder<F> where F: Invoke<elements::FunctionSection> {
+
+ /// Finalize builder spawning element
+ pub fn build(self) -> F::Result {
+ let mut result = elements::FunctionSection::default();
+ for f in self.section.into_iter() {
+ if let Signature::TypeReference(type_ref) = f {
+ result.entries_mut().push(elements::Func::new(type_ref));
+ } else {
+ unreachable!(); // never possible with current generics impl-s
+ }
+ }
+ self.callback.invoke(result)
+ }
+}
+
+/// Signature bindings
+pub type SignatureBindings = Vec<Signature>;
+
+impl<F> SignaturesBuilder<F> where F: Invoke<SignatureBindings> {
+ /// Bind signature list
+ pub fn bind(self) -> F::Result {
+ self.callback.invoke(self.section)
+ }
+}
+
+/// Function body (code) builder
+pub struct FuncBodyBuilder<F=Identity> {
+ callback: F,
+ body: elements::FuncBody,
+}
+
+impl<F> FuncBodyBuilder<F> {
+ /// New body (code) builder given the chain callback
+ pub fn with_callback(callback: F) -> Self {
+ FuncBodyBuilder {
+ callback: callback,
+ body: elements::FuncBody::new(Vec::new(), elements::Instructions::empty()),
+ }
+ }
+}
+
+impl<F> FuncBodyBuilder<F> where F: Invoke<elements::FuncBody> {
+ /// Set/override entirely with FuncBody struct
+ pub fn with_func(mut self, func: elements::FuncBody) -> Self {
+ self.body = func;
+ self
+ }
+
+ /// Extend function local list with new entries
+ pub fn with_locals(mut self, locals: Vec<elements::Local>) -> Self {
+ self.body.locals_mut().extend(locals);
+ self
+ }
+
+ /// Set code of the function
+ pub fn with_instructions(mut self, instructions: elements::Instructions) -> Self {
+ *self.body.code_mut() = instructions;
+ self
+ }
+
+ /// Finish current builder spawning resulting struct
+ pub fn build(self) -> F::Result {
+ self.callback.invoke(self.body)
+ }
+}
+
+/// Function definition (extended structure to specify function entirely, incl. signature, mainness and code)
+pub struct FunctionDefinition {
+ /// Is this function is start function
+ pub is_main: bool,
+ /// Signature description
+ pub signature: Signature,
+ /// Body (code) of the function
+ pub code: elements::FuncBody,
+}
+
+impl Default for FunctionDefinition {
+ fn default() -> Self {
+ FunctionDefinition {
+ is_main: false,
+ signature: Signature::TypeReference(0),
+ code: elements::FuncBody::empty(),
+ }
+ }
+}
+
+/// Function definition builder
+pub struct FunctionBuilder<F=Identity> {
+ callback: F,
+ func: FunctionDefinition,
+}
+
+impl FunctionBuilder {
+ /// New function builder
+ pub fn new() -> Self {
+ FunctionBuilder::with_callback(Identity)
+ }
+}
+
+impl<F> FunctionBuilder<F> where F: Invoke<FunctionDefinition> {
+ /// New function builder with chained callback
+ pub fn with_callback(callback: F) -> Self {
+ FunctionBuilder {
+ callback: callback,
+ func: Default::default(),
+ }
+ }
+
+ /// Set that this function is main entry point
+ pub fn main(mut self) -> Self {
+ self.func.is_main = true;
+ self
+ }
+
+ /// Start signature builder of the function
+ pub fn signature(self) -> SignatureBuilder<Self> {
+ SignatureBuilder::with_callback(self)
+ }
+
+ /// Override current signature entirely with new one from known struct
+ pub fn with_signature(mut self, signature: Signature) -> Self {
+ self.func.signature = signature;
+ self
+ }
+
+ /// Start code (body) builder
+ pub fn body(self) -> FuncBodyBuilder<Self> {
+ FuncBodyBuilder::with_callback(self)
+ }
+
+ /// Set body (code) for this function
+ pub fn with_body(mut self, body: elements::FuncBody) -> Self {
+ self.func.code = body;
+ self
+ }
+
+ /// Finalize current builder spawning resulting struct in the callback
+ pub fn build(self) -> F::Result {
+ self.callback.invoke(self.func)
+ }
+}
+
+impl<F> Invoke<elements::FunctionType> for FunctionBuilder<F> where F: Invoke<FunctionDefinition> {
+ type Result = Self;
+
+ fn invoke(self, signature: elements::FunctionType) -> Self {
+ self.with_signature(Signature::Inline(signature))
+ }
+}
+
+impl<F> Invoke<u32> for FunctionBuilder<F> where F: Invoke<FunctionDefinition> {
+ type Result = Self;
+
+ fn invoke(self, type_ref: u32) -> Self {
+ self.with_signature(Signature::TypeReference(type_ref))
+ }
+}
+
+impl<F> Invoke<elements::FuncBody> for FunctionBuilder<F> where F: Invoke<FunctionDefinition> {
+ type Result = Self;
+
+ fn invoke(self, body: elements::FuncBody) -> Self::Result {
+ self.with_body(body)
+ }
+}
+
+/// New builder of signature list
+pub fn signatures() -> SignaturesBuilder {
+ SignaturesBuilder::new()
+}
+
+/// New signature builder
+pub fn signature() -> SignatureBuilder {
+ SignatureBuilder::new()
+}
+
+/// New builder of function (signature & body)
+pub fn function() -> FunctionBuilder {
+ FunctionBuilder::new()
+}
+
+#[cfg(test)]
+mod tests {
+
+ use super::{signatures, function};
+ use crate::elements;
+
+ #[test]
+ fn example() {
+ let result = signatures()
+ .type_ref().val(1).build()
+ .build();
+
+ assert_eq!(result.entries().len(), 1);
+
+ let result = signatures()
+ .signature()
+ .param().i32()
+ .param().i32()
+ .return_type().i64()
+ .build()
+ .bind();
+
+ assert_eq!(result.len(), 1);
+ }
+
+ #[test]
+ fn func_example() {
+ let func = function()
+ .signature()
+ .param().i32()
+ .return_type().i32()
+ .build()
+ .body()
+ .with_instructions(elements::Instructions::empty())
+ .build()
+ .build();
+
+ assert_eq!(func.code.locals().len(), 0);
+ assert_eq!(func.code.code().elements().len(), 1);
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/builder/data.rs b/third_party/rust/parity-wasm/src/builder/data.rs
new file mode 100644
index 0000000000..bca5e34f1a
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/builder/data.rs
@@ -0,0 +1,56 @@
+use alloc::vec::Vec;
+use super::invoke::{Identity, Invoke};
+use crate::elements;
+
+/// Data segment builder
+pub struct DataSegmentBuilder<F=Identity> {
+ callback: F,
+ // todo: add mapper once multiple memory refs possible
+ mem_index: u32,
+ offset: elements::InitExpr,
+ value: Vec<u8>,
+}
+
+impl DataSegmentBuilder {
+ /// New data segment builder
+ pub fn new() -> Self {
+ DataSegmentBuilder::with_callback(Identity)
+ }
+}
+
+impl<F> DataSegmentBuilder<F> {
+ /// New data segment builder inside the chain context
+ pub fn with_callback(callback: F) -> Self {
+ DataSegmentBuilder {
+ callback: callback,
+ mem_index: 0,
+ offset: elements::InitExpr::empty(),
+ value: Vec::new(),
+ }
+ }
+
+ /// Set offset initialization instruction. `End` instruction will be added automatically.
+ pub fn offset(mut self, instruction: elements::Instruction) -> Self {
+ self.offset = elements::InitExpr::new(vec![instruction, elements::Instruction::End]);
+ self
+ }
+
+ /// Set the bytes value of the segment
+ pub fn value(mut self, value: Vec<u8>) -> Self {
+ self.value = value;
+ self
+ }
+}
+
+impl<F> DataSegmentBuilder<F> where F: Invoke<elements::DataSegment> {
+ /// Finish current builder, spawning resulting struct
+ pub fn build(self) -> F::Result {
+ self.callback.invoke(
+ elements::DataSegment::new(
+ self.mem_index,
+ Some(self.offset),
+ self.value,
+ )
+ )
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/builder/export.rs b/third_party/rust/parity-wasm/src/builder/export.rs
new file mode 100644
index 0000000000..a2de79c24e
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/builder/export.rs
@@ -0,0 +1,116 @@
+use alloc::{borrow::ToOwned, string::String};
+use super::invoke::{Invoke, Identity};
+use crate::elements;
+
+/// Export entry builder
+pub struct ExportBuilder<F=Identity> {
+ callback: F,
+ field: String,
+ binding: elements::Internal,
+}
+
+impl ExportBuilder {
+ /// New export builder
+ pub fn new() -> Self {
+ ExportBuilder::with_callback(Identity)
+ }
+}
+
+impl<F> ExportBuilder<F> {
+
+ /// New export entry builder in the specified chained context
+ pub fn with_callback(callback: F) -> Self {
+ ExportBuilder {
+ callback: callback,
+ field: String::new(),
+ binding: elements::Internal::Function(0),
+ }
+ }
+
+ /// Set the field name of the export entry
+ pub fn field(mut self, field: &str) -> Self {
+ self.field = field.to_owned();
+ self
+ }
+
+ /// Specify the internal module mapping for this entry
+ pub fn with_internal(mut self, external: elements::Internal) -> Self {
+ self.binding = external;
+ self
+ }
+
+ /// Start the internal builder for this export entry
+ pub fn internal(self) -> ExportInternalBuilder<Self> {
+ ExportInternalBuilder::with_callback(self)
+ }
+}
+
+impl<F> ExportBuilder<F> where F: Invoke<elements::ExportEntry> {
+ /// Finalize export entry builder spawning the resulting struct
+ pub fn build(self) -> F::Result {
+ self.callback.invoke(elements::ExportEntry::new(self.field, self.binding))
+ }
+}
+
+impl<F> Invoke<elements::Internal> for ExportBuilder<F> {
+ type Result = Self;
+ fn invoke(self, val: elements::Internal) -> Self {
+ self.with_internal(val)
+ }
+}
+
+/// Internal mapping builder for export entry
+pub struct ExportInternalBuilder<F=Identity> {
+ callback: F,
+ binding: elements::Internal,
+}
+
+impl<F> ExportInternalBuilder<F> where F: Invoke<elements::Internal> {
+ /// New export entry internal mapping for the chained context
+ pub fn with_callback(callback: F) -> Self {
+ ExportInternalBuilder{
+ callback: callback,
+ binding: elements::Internal::Function(0),
+ }
+ }
+
+ /// Map to function by index
+ pub fn func(mut self, index: u32) -> F::Result {
+ self.binding = elements::Internal::Function(index);
+ self.callback.invoke(self.binding)
+ }
+
+ /// Map to memory
+ pub fn memory(mut self, index: u32) -> F::Result {
+ self.binding = elements::Internal::Memory(index);
+ self.callback.invoke(self.binding)
+ }
+
+ /// Map to table
+ pub fn table(mut self, index: u32) -> F::Result {
+ self.binding = elements::Internal::Table(index);
+ self.callback.invoke(self.binding)
+ }
+
+ /// Map to global
+ pub fn global(mut self, index: u32) -> F::Result {
+ self.binding = elements::Internal::Global(index);
+ self.callback.invoke(self.binding)
+ }
+}
+
+/// New builder for export entry
+pub fn export() -> ExportBuilder {
+ ExportBuilder::new()
+}
+
+#[cfg(test)]
+mod tests {
+ use super::export;
+
+ #[test]
+ fn example() {
+ let entry = export().field("memory").internal().memory(0).build();
+ assert_eq!(entry.field(), "memory");
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/builder/global.rs b/third_party/rust/parity-wasm/src/builder/global.rs
new file mode 100644
index 0000000000..0b3acb8560
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/builder/global.rs
@@ -0,0 +1,93 @@
+use super::{
+ invoke::{Invoke, Identity},
+ misc::ValueTypeBuilder,
+};
+
+use crate::elements;
+
+/// Global builder
+pub struct GlobalBuilder<F=Identity> {
+ callback: F,
+ value_type: elements::ValueType,
+ is_mutable: bool,
+ init_expr: elements::InitExpr,
+}
+
+impl GlobalBuilder {
+ /// New global builder
+ pub fn new() -> Self {
+ GlobalBuilder::with_callback(Identity)
+ }
+}
+
+impl<F> GlobalBuilder<F> {
+ /// New global builder with callback (in chained context)
+ pub fn with_callback(callback: F) -> Self {
+ GlobalBuilder {
+ callback: callback,
+ value_type: elements::ValueType::I32,
+ init_expr: elements::InitExpr::empty(),
+ is_mutable: false,
+ }
+ }
+
+ /// Set/override resulting global type
+ pub fn with_type(mut self, value_type: elements::ValueType) -> Self {
+ self.value_type = value_type;
+ self
+ }
+
+ /// Set mutabilty to true
+ pub fn mutable(mut self) -> Self {
+ self.is_mutable = true;
+ self
+ }
+
+ /// Set initialization expression instruction for this global (`end` instruction will be added automatically)
+ pub fn init_expr(mut self, instruction: elements::Instruction) -> Self {
+ self.init_expr = elements::InitExpr::new(vec![instruction, elements::Instruction::End]);
+ self
+ }
+
+ /// Start value type builder
+ pub fn value_type(self) -> ValueTypeBuilder<Self> {
+ ValueTypeBuilder::with_callback(self)
+ }
+}
+
+impl<F> GlobalBuilder<F> where F: Invoke<elements::GlobalEntry> {
+ /// Finalize current builder spawning resulting struct
+ pub fn build(self) -> F::Result {
+ self.callback.invoke(
+ elements::GlobalEntry::new(
+ elements::GlobalType::new(self.value_type, self.is_mutable),
+ self.init_expr,
+ )
+ )
+ }
+}
+
+impl<F> Invoke<elements::ValueType> for GlobalBuilder<F> {
+ type Result = Self;
+ fn invoke(self, the_type: elements::ValueType) -> Self {
+ self.with_type(the_type)
+ }
+}
+
+/// New builder for export entry
+pub fn global() -> GlobalBuilder {
+ GlobalBuilder::new()
+}
+
+#[cfg(test)]
+mod tests {
+ use super::global;
+ use crate::elements;
+
+ #[test]
+ fn example() {
+ let entry = global().value_type().i32().build();
+ assert_eq!(entry.global_type().content_type(), elements::ValueType::I32);
+ assert_eq!(entry.global_type().is_mutable(), false);
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/builder/import.rs b/third_party/rust/parity-wasm/src/builder/import.rs
new file mode 100644
index 0000000000..4d9d7665fe
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/builder/import.rs
@@ -0,0 +1,130 @@
+use alloc::{borrow::ToOwned, string::String};
+use super::invoke::{Identity, Invoke};
+use crate::elements;
+
+/// Import builder
+pub struct ImportBuilder<F=Identity> {
+ callback: F,
+ module: String,
+ field: String,
+ binding: elements::External,
+}
+
+impl ImportBuilder {
+ /// New import builder
+ pub fn new() -> Self {
+ ImportBuilder::with_callback(Identity)
+ }
+}
+
+impl<F> ImportBuilder<F> {
+ /// New import builder with callback (in chained context)
+ pub fn with_callback(callback: F) -> Self {
+ ImportBuilder {
+ callback: callback,
+ module: String::new(),
+ field: String::new(),
+ binding: elements::External::Function(0),
+ }
+ }
+
+ /// Set/override module name
+ pub fn module(mut self, name: &str) -> Self {
+ self.module = name.to_owned();
+ self
+ }
+
+ /// Set/override field name
+ pub fn field(mut self, name: &str) -> Self {
+ self.field = name.to_owned();
+ self
+ }
+
+ /// Set/override both module name and field name
+ pub fn path(self, module: &str, field: &str) -> Self {
+ self.module(module).field(field)
+ }
+
+ /// Set/override external mapping for this import
+ pub fn with_external(mut self, external: elements::External) -> Self {
+ self.binding = external;
+ self
+ }
+
+ /// Start new external mapping builder
+ pub fn external(self) -> ImportExternalBuilder<Self> {
+ ImportExternalBuilder::with_callback(self)
+ }
+}
+
+impl<F> ImportBuilder<F> where F: Invoke<elements::ImportEntry> {
+ /// Finalize current builder spawning the resulting struct
+ pub fn build(self) -> F::Result {
+ self.callback.invoke(elements::ImportEntry::new(self.module, self.field, self.binding))
+ }
+}
+
+impl<F> Invoke<elements::External> for ImportBuilder<F> {
+ type Result = Self;
+ fn invoke(self, val: elements::External) -> Self {
+ self.with_external(val)
+ }
+}
+
+/// Import to external mapping builder
+pub struct ImportExternalBuilder<F=Identity> {
+ callback: F,
+ binding: elements::External,
+}
+
+impl<F> ImportExternalBuilder<F> where F: Invoke<elements::External> {
+ /// New import to external mapping builder with callback (in chained context)
+ pub fn with_callback(callback: F) -> Self {
+ ImportExternalBuilder{
+ callback: callback,
+ binding: elements::External::Function(0),
+ }
+ }
+
+ /// Function mapping with type reference
+ pub fn func(mut self, index: u32) -> F::Result {
+ self.binding = elements::External::Function(index);
+ self.callback.invoke(self.binding)
+ }
+
+ /// Memory mapping with specified limits
+ pub fn memory(mut self, min: u32, max: Option<u32>) -> F::Result {
+ self.binding = elements::External::Memory(elements::MemoryType::new(min, max));
+ self.callback.invoke(self.binding)
+ }
+
+ /// Table mapping with specified limits
+ pub fn table(mut self, min: u32, max: Option<u32>) -> F::Result {
+ self.binding = elements::External::Table(elements::TableType::new(min, max));
+ self.callback.invoke(self.binding)
+ }
+
+ /// Global mapping with speciifed type and mutability
+ pub fn global(mut self, value_type: elements::ValueType, is_mut: bool) -> F::Result {
+ self.binding = elements::External::Global(elements::GlobalType::new(value_type, is_mut));
+ self.callback.invoke(self.binding)
+ }
+}
+
+/// New builder for import entry
+pub fn import() -> ImportBuilder {
+ ImportBuilder::new()
+}
+
+#[cfg(test)]
+mod tests {
+ use super::import;
+
+ #[test]
+ fn example() {
+ let entry = import().module("env").field("memory").external().memory(256, Some(256)).build();
+
+ assert_eq!(entry.module(), "env");
+ assert_eq!(entry.field(), "memory");
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/builder/invoke.rs b/third_party/rust/parity-wasm/src/builder/invoke.rs
new file mode 100644
index 0000000000..1dd6598d0d
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/builder/invoke.rs
@@ -0,0 +1,17 @@
+//! invoke helper
+
+/// Helper trait to allow chaining
+pub trait Invoke<A> {
+ type Result;
+
+ fn invoke(self, arg: A) -> Self::Result;
+}
+
+/// Identity chain element
+pub struct Identity;
+
+impl<A> Invoke<A> for Identity {
+ type Result = A;
+
+ fn invoke(self, arg: A) -> A { arg }
+} \ No newline at end of file
diff --git a/third_party/rust/parity-wasm/src/builder/memory.rs b/third_party/rust/parity-wasm/src/builder/memory.rs
new file mode 100644
index 0000000000..9f7a6887ef
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/builder/memory.rs
@@ -0,0 +1,85 @@
+use alloc::vec::Vec;
+use crate::elements;
+use super::invoke::{Invoke, Identity};
+
+/// Memory definition struct
+#[derive(Debug, PartialEq)]
+pub struct MemoryDefinition {
+ /// Minimum memory size
+ pub min: u32,
+ /// Maximum memory size
+ pub max: Option<u32>,
+ /// Memory data segments (static regions)
+ pub data: Vec<MemoryDataDefinition>,
+}
+
+/// Memory static region entry definition
+#[derive(Debug, PartialEq)]
+pub struct MemoryDataDefinition {
+ /// Segment initialization expression for offset
+ pub offset: elements::InitExpr,
+ /// Raw bytes of static region
+ pub values: Vec<u8>,
+}
+
+/// Memory and static regions builder
+pub struct MemoryBuilder<F=Identity> {
+ callback: F,
+ memory: MemoryDefinition,
+}
+
+impl MemoryBuilder {
+ /// New memory builder
+ pub fn new() -> Self {
+ MemoryBuilder::with_callback(Identity)
+ }
+}
+
+impl<F> MemoryBuilder<F> where F: Invoke<MemoryDefinition> {
+ /// New memory builder with callback (in chained context)
+ pub fn with_callback(callback: F) -> Self {
+ MemoryBuilder {
+ callback: callback,
+ memory: Default::default(),
+ }
+ }
+
+ /// Set/override minimum size
+ pub fn with_min(mut self, min: u32) -> Self {
+ self.memory.min = min;
+ self
+ }
+
+ /// Set/override maximum size
+ pub fn with_max(mut self, max: Option<u32>) -> Self {
+ self.memory.max = max;
+ self
+ }
+
+ /// Push new static region with initialized offset expression and raw bytes
+ pub fn with_data(mut self, index: u32, values: Vec<u8>) -> Self {
+ self.memory.data.push(MemoryDataDefinition {
+ offset: elements::InitExpr::new(vec![
+ elements::Instruction::I32Const(index as i32),
+ elements::Instruction::End,
+ ]),
+ values: values,
+ });
+ self
+ }
+
+ /// Finalize current builder, spawning resulting struct
+ pub fn build(self) -> F::Result {
+ self.callback.invoke(self.memory)
+ }
+}
+
+impl Default for MemoryDefinition {
+ fn default() -> Self {
+ MemoryDefinition {
+ min: 1,
+ max: None,
+ data: Vec::new(),
+ }
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/builder/misc.rs b/third_party/rust/parity-wasm/src/builder/misc.rs
new file mode 100644
index 0000000000..2b6cd2dcc5
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/builder/misc.rs
@@ -0,0 +1,93 @@
+use alloc::vec::Vec;
+use super::invoke::{Invoke, Identity};
+use crate::elements;
+
+pub struct ValueTypeBuilder<F=Identity> {
+ callback: F,
+}
+
+impl<F> ValueTypeBuilder<F> where F: Invoke<elements::ValueType> {
+ pub fn with_callback(callback: F) -> Self {
+ ValueTypeBuilder { callback: callback }
+ }
+
+ pub fn i32(self) -> F::Result {
+ self.callback.invoke(elements::ValueType::I32)
+ }
+
+ pub fn i64(self) -> F::Result {
+ self.callback.invoke(elements::ValueType::I64)
+ }
+
+ pub fn f32(self) -> F::Result {
+ self.callback.invoke(elements::ValueType::F32)
+ }
+
+ pub fn f64(self) -> F::Result {
+ self.callback.invoke(elements::ValueType::F64)
+ }
+}
+
+pub struct OptionalValueTypeBuilder<F=Identity> {
+ callback: F,
+}
+
+impl<F> OptionalValueTypeBuilder<F> where F: Invoke<Option<elements::ValueType>> {
+ pub fn with_callback(callback: F) -> Self {
+ OptionalValueTypeBuilder { callback: callback }
+ }
+
+ pub fn i32(self) -> F::Result {
+ self.callback.invoke(Some(elements::ValueType::I32))
+ }
+
+ pub fn i64(self) -> F::Result {
+ self.callback.invoke(Some(elements::ValueType::I64))
+ }
+
+ pub fn f32(self) -> F::Result {
+ self.callback.invoke(Some(elements::ValueType::F32))
+ }
+
+ pub fn f64(self) -> F::Result {
+ self.callback.invoke(Some(elements::ValueType::F64))
+ }
+}
+
+pub struct ValueTypesBuilder<F=Identity> {
+ callback: F,
+ value_types: Vec<elements::ValueType>,
+}
+
+impl<F> ValueTypesBuilder<F> where F: Invoke<Vec<elements::ValueType>> {
+ pub fn with_callback(callback: F) -> Self {
+ ValueTypesBuilder {
+ callback: callback,
+ value_types: Vec::new(),
+ }
+ }
+
+ pub fn i32(mut self) -> Self {
+ self.value_types.push(elements::ValueType::I32);
+ self
+ }
+
+ pub fn i64(mut self) -> Self {
+ self.value_types.push(elements::ValueType::I64);
+ self
+ }
+
+ pub fn f32(mut self) -> Self {
+ self.value_types.push(elements::ValueType::F32);
+ self
+ }
+
+ pub fn f64(mut self) -> Self {
+ self.value_types.push(elements::ValueType::F64);
+ self
+ }
+
+ pub fn build(self) -> F::Result {
+ self.callback.invoke(self.value_types)
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/builder/mod.rs b/third_party/rust/parity-wasm/src/builder/mod.rs
new file mode 100644
index 0000000000..b40b05efcd
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/builder/mod.rs
@@ -0,0 +1,25 @@
+//! Various builders to generate/alter wasm components
+
+mod invoke;
+mod module;
+mod code;
+mod misc;
+mod import;
+mod memory;
+mod table;
+mod export;
+mod global;
+mod data;
+
+pub use self::code::{
+ signatures, signature, function, SignatureBuilder, SignaturesBuilder,
+ FunctionBuilder, TypeRefBuilder, FuncBodyBuilder, FunctionDefinition,
+};
+pub use self::data::DataSegmentBuilder;
+pub use self::export::{export, ExportBuilder, ExportInternalBuilder};
+pub use self::global::{global, GlobalBuilder};
+pub use self::import::{import, ImportBuilder};
+pub use self::invoke::Identity;
+pub use self::memory::MemoryBuilder;
+pub use self::module::{module, from_module, ModuleBuilder};
+pub use self::table::{TableBuilder, TableDefinition, TableEntryDefinition};
diff --git a/third_party/rust/parity-wasm/src/builder/module.rs b/third_party/rust/parity-wasm/src/builder/module.rs
new file mode 100644
index 0000000000..49806e126c
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/builder/module.rs
@@ -0,0 +1,598 @@
+use alloc::vec::Vec;
+use crate::elements;
+use super::{
+ import,
+ export,
+ global,
+ data,
+ invoke::{Invoke, Identity},
+ code::{self, SignaturesBuilder, FunctionBuilder},
+ memory::{self, MemoryBuilder},
+ table::{self, TableBuilder},
+};
+
+/// Module builder
+pub struct ModuleBuilder<F=Identity> {
+ callback: F,
+ module: ModuleScaffold,
+}
+
+/// Location of the internal module function
+pub struct CodeLocation {
+ /// Location (index in 'functions' section) of the signature
+ pub signature: u32,
+ /// Location (index in the 'code' section) of the body
+ pub body: u32,
+}
+
+#[derive(Default, PartialEq)]
+struct ModuleScaffold {
+ pub types: elements::TypeSection,
+ pub import: elements::ImportSection,
+ pub functions: elements::FunctionSection,
+ pub table: elements::TableSection,
+ pub memory: elements::MemorySection,
+ pub global: elements::GlobalSection,
+ pub export: elements::ExportSection,
+ pub start: Option<u32>,
+ pub element: elements::ElementSection,
+ pub code: elements::CodeSection,
+ pub data: elements::DataSection,
+ pub other: Vec<elements::Section>,
+}
+
+impl From<elements::Module> for ModuleScaffold {
+ fn from(module: elements::Module) -> Self {
+ let mut types: Option<elements::TypeSection> = None;
+ let mut import: Option<elements::ImportSection> = None;
+ let mut funcs: Option<elements::FunctionSection> = None;
+ let mut table: Option<elements::TableSection> = None;
+ let mut memory: Option<elements::MemorySection> = None;
+ let mut global: Option<elements::GlobalSection> = None;
+ let mut export: Option<elements::ExportSection> = None;
+ let mut start: Option<u32> = None;
+ let mut element: Option<elements::ElementSection> = None;
+ let mut code: Option<elements::CodeSection> = None;
+ let mut data: Option<elements::DataSection> = None;
+
+ let mut sections = module.into_sections();
+ while let Some(section) = sections.pop() {
+ match section {
+ elements::Section::Type(sect) => { types = Some(sect); }
+ elements::Section::Import(sect) => { import = Some(sect); }
+ elements::Section::Function(sect) => { funcs = Some(sect); }
+ elements::Section::Table(sect) => { table = Some(sect); }
+ elements::Section::Memory(sect) => { memory = Some(sect); }
+ elements::Section::Global(sect) => { global = Some(sect); }
+ elements::Section::Export(sect) => { export = Some(sect); }
+ elements::Section::Start(index) => { start = Some(index); }
+ elements::Section::Element(sect) => { element = Some(sect); }
+ elements::Section::Code(sect) => { code = Some(sect); }
+ elements::Section::Data(sect) => { data = Some(sect); }
+ _ => {}
+ }
+ }
+
+ ModuleScaffold {
+ types: types.unwrap_or_default(),
+ import: import.unwrap_or_default(),
+ functions: funcs.unwrap_or_default(),
+ table: table.unwrap_or_default(),
+ memory: memory.unwrap_or_default(),
+ global: global.unwrap_or_default(),
+ export: export.unwrap_or_default(),
+ start: start,
+ element: element.unwrap_or_default(),
+ code: code.unwrap_or_default(),
+ data: data.unwrap_or_default(),
+ other: sections,
+ }
+ }
+}
+
+impl From<ModuleScaffold> for elements::Module {
+ fn from(module: ModuleScaffold) -> Self {
+ let mut sections = Vec::new();
+
+ let types = module.types;
+ if types.types().len() > 0 {
+ sections.push(elements::Section::Type(types));
+ }
+ let import = module.import;
+ if import.entries().len() > 0 {
+ sections.push(elements::Section::Import(import));
+ }
+ let functions = module.functions;
+ if functions.entries().len() > 0 {
+ sections.push(elements::Section::Function(functions));
+ }
+ let table = module.table;
+ if table.entries().len() > 0 {
+ sections.push(elements::Section::Table(table));
+ }
+ let memory = module.memory;
+ if memory.entries().len() > 0 {
+ sections.push(elements::Section::Memory(memory));
+ }
+ let global = module.global;
+ if global.entries().len() > 0 {
+ sections.push(elements::Section::Global(global));
+ }
+ let export = module.export;
+ if export.entries().len() > 0 {
+ sections.push(elements::Section::Export(export));
+ }
+ if let Some(start) = module.start {
+ sections.push(elements::Section::Start(start));
+ }
+ let element = module.element;
+ if element.entries().len() > 0 {
+ sections.push(elements::Section::Element(element));
+ }
+ let code = module.code;
+ if code.bodies().len() > 0 {
+ sections.push(elements::Section::Code(code));
+ }
+ let data = module.data;
+ if data.entries().len() > 0 {
+ sections.push(elements::Section::Data(data));
+ }
+ sections.extend(module.other);
+ elements::Module::new(sections)
+ }
+}
+
+impl ModuleBuilder {
+ /// New empty module builder
+ pub fn new() -> Self {
+ ModuleBuilder::with_callback(Identity)
+ }
+}
+
+impl<F> ModuleBuilder<F> where F: Invoke<elements::Module> {
+ /// New module builder with bound callback
+ pub fn with_callback(callback: F) -> Self {
+ ModuleBuilder {
+ callback: callback,
+ module: Default::default(),
+ }
+ }
+
+ /// Builder from raw module
+ pub fn with_module(mut self, module: elements::Module) -> Self {
+ self.module = module.into();
+ self
+ }
+
+ /// Fill module with sections from iterator
+ pub fn with_sections<I>(mut self, sections: I) -> Self
+ where I: IntoIterator<Item=elements::Section>
+ {
+ self.module.other.extend(sections);
+ self
+ }
+
+ /// Add additional section
+ pub fn with_section(mut self, section: elements::Section) -> Self {
+ self.module.other.push(section);
+ self
+ }
+
+ /// Binds to the type section, creates additional types when required
+ pub fn with_signatures(mut self, bindings: code::SignatureBindings) -> Self {
+ self.push_signatures(bindings);
+ self
+ }
+
+ /// Push stand-alone function definition, creating sections, signature and code blocks
+ /// in corresponding sections.
+ /// `FunctionDefinition` can be build using `builder::function` builder
+ pub fn push_function(&mut self, func: code::FunctionDefinition) -> CodeLocation {
+ let signature = func.signature;
+ let body = func.code;
+
+ let type_ref = self.resolve_type_ref(signature);
+
+ self.module.functions.entries_mut().push(elements::Func::new(type_ref));
+ let signature_index = self.module.functions.entries_mut().len() as u32 - 1;
+ self.module.code.bodies_mut().push(body);
+ let body_index = self.module.code.bodies_mut().len() as u32 - 1;
+
+ if func.is_main {
+ self.module.start = Some(body_index);
+ }
+
+ CodeLocation {
+ signature: signature_index,
+ body: body_index,
+ }
+ }
+
+ /// Push linear memory region
+ pub fn push_memory(&mut self, mut memory: memory::MemoryDefinition) -> u32 {
+ let entries = self.module.memory.entries_mut();
+ entries.push(elements::MemoryType::new(memory.min, memory.max));
+ let memory_index = (entries.len() - 1) as u32;
+ for data in memory.data.drain(..) {
+ self.module.data.entries_mut()
+ .push(elements::DataSegment::new(memory_index, Some(data.offset), data.values))
+ }
+ memory_index
+ }
+
+ /// Push table
+ pub fn push_table(&mut self, mut table: table::TableDefinition) -> u32 {
+ let entries = self.module.table.entries_mut();
+ entries.push(elements::TableType::new(table.min, table.max));
+ let table_index = (entries.len() - 1) as u32;
+ for entry in table.elements.drain(..) {
+ self.module.element.entries_mut()
+ .push(elements::ElementSegment::new(table_index, Some(entry.offset), entry.values))
+ }
+ table_index
+ }
+
+ fn resolve_type_ref(&mut self, signature: code::Signature) -> u32 {
+ match signature {
+ code::Signature::Inline(func_type) => {
+ if let Some(existing_entry) = self.module.types.types().iter().enumerate().find(|(_idx, t)| {
+ let elements::Type::Function(ref existing) = t;
+ *existing == func_type
+ }) {
+ return existing_entry.0 as u32
+ }
+ self.module.types.types_mut().push(elements::Type::Function(func_type));
+ self.module.types.types().len() as u32 - 1
+ }
+ code::Signature::TypeReference(type_ref) => {
+ type_ref
+ }
+ }
+ }
+
+ /// Push one function signature, returning it's calling index.
+ /// Can create corresponding type in type section.
+ pub fn push_signature(&mut self, signature: code::Signature) -> u32 {
+ self.resolve_type_ref(signature)
+ }
+
+ /// Push signatures in the module, returning corresponding indices of pushed signatures
+ pub fn push_signatures(&mut self, signatures: code::SignatureBindings) -> Vec<u32> {
+ signatures.into_iter().map(|binding|
+ self.resolve_type_ref(binding)
+ ).collect()
+ }
+
+ /// Push import entry to module. Note that this does not update calling indices in
+ /// function bodies.
+ pub fn push_import(&mut self, import: elements::ImportEntry) -> u32 {
+ self.module.import.entries_mut().push(import);
+ // todo: actually update calling addresses in function bodies
+ // todo: also batch push
+
+ self.module.import.entries_mut().len() as u32 - 1
+ }
+
+ /// Push export entry to module.
+ pub fn push_export(&mut self, export: elements::ExportEntry) -> u32 {
+ self.module.export.entries_mut().push(export);
+ self.module.export.entries_mut().len() as u32 - 1
+ }
+
+ /// Add new function using dedicated builder
+ pub fn function(self) -> FunctionBuilder<Self> {
+ FunctionBuilder::with_callback(self)
+ }
+
+ /// Add new linear memory using dedicated builder
+ pub fn memory(self) -> MemoryBuilder<Self> {
+ MemoryBuilder::with_callback(self)
+ }
+
+ /// Add new table using dedicated builder
+ pub fn table(self) -> TableBuilder<Self> {
+ TableBuilder::with_callback(self)
+ }
+
+ /// Define functions section
+ pub fn functions(self) -> SignaturesBuilder<Self> {
+ SignaturesBuilder::with_callback(self)
+ }
+
+ /// With inserted export entry
+ pub fn with_export(mut self, entry: elements::ExportEntry) -> Self {
+ self.module.export.entries_mut().push(entry);
+ self
+ }
+
+ /// With inserted import entry
+ pub fn with_import(mut self, entry: elements::ImportEntry) -> Self {
+ self.module.import.entries_mut().push(entry);
+ self
+ }
+
+ /// Import entry builder
+ /// # Examples
+ /// ```
+ /// use parity_wasm::builder::module;
+ ///
+ /// let module = module()
+ /// .import()
+ /// .module("env")
+ /// .field("memory")
+ /// .external().memory(256, Some(256))
+ /// .build()
+ /// .build();
+ ///
+ /// assert_eq!(module.import_section().expect("import section to exist").entries().len(), 1);
+ /// ```
+ pub fn import(self) -> import::ImportBuilder<Self> {
+ import::ImportBuilder::with_callback(self)
+ }
+
+ /// With global variable
+ pub fn with_global(mut self, global: elements::GlobalEntry) -> Self {
+ self.module.global.entries_mut().push(global);
+ self
+ }
+
+ /// With table
+ pub fn with_table(mut self, table: elements::TableType) -> Self {
+ self.module.table.entries_mut().push(table);
+ self
+ }
+
+ /// Export entry builder
+ /// # Examples
+ /// ```
+ /// use parity_wasm::builder::module;
+ /// use parity_wasm::elements::Instruction::*;
+ ///
+ /// let module = module()
+ /// .global()
+ /// .value_type().i32()
+ /// .init_expr(I32Const(0))
+ /// .build()
+ /// .export()
+ /// .field("_zero")
+ /// .internal().global(0)
+ /// .build()
+ /// .build();
+ ///
+ /// assert_eq!(module.export_section().expect("export section to exist").entries().len(), 1);
+ /// ```
+ pub fn export(self) -> export::ExportBuilder<Self> {
+ export::ExportBuilder::with_callback(self)
+ }
+
+ /// Glboal entry builder
+ /// # Examples
+ /// ```
+ /// use parity_wasm::builder::module;
+ /// use parity_wasm::elements::Instruction::*;
+ ///
+ /// let module = module()
+ /// .global()
+ /// .value_type().i32()
+ /// .init_expr(I32Const(0))
+ /// .build()
+ /// .build();
+ ///
+ /// assert_eq!(module.global_section().expect("global section to exist").entries().len(), 1);
+ /// ```
+ pub fn global(self) -> global::GlobalBuilder<Self> {
+ global::GlobalBuilder::with_callback(self)
+ }
+
+ /// Add data segment to the builder
+ pub fn with_data_segment(mut self, segment: elements::DataSegment) -> Self {
+ self.module.data.entries_mut().push(segment);
+ self
+ }
+
+ /// Data entry builder
+ pub fn data(self) -> data::DataSegmentBuilder<Self> {
+ data::DataSegmentBuilder::with_callback(self)
+ }
+
+ /// Build module (final step)
+ pub fn build(self) -> F::Result {
+ self.callback.invoke(self.module.into())
+ }
+}
+
+impl<F> Invoke<elements::FunctionSection> for ModuleBuilder<F>
+ where F: Invoke<elements::Module>
+{
+ type Result = Self;
+
+ fn invoke(self, section: elements::FunctionSection) -> Self {
+ self.with_section(elements::Section::Function(section))
+ }
+}
+
+impl<F> Invoke<code::SignatureBindings> for ModuleBuilder<F>
+ where F: Invoke<elements::Module>
+{
+ type Result = Self;
+
+ fn invoke(self, bindings: code::SignatureBindings) -> Self {
+ self.with_signatures(bindings)
+ }
+}
+
+
+impl<F> Invoke<code::FunctionDefinition> for ModuleBuilder<F>
+ where F: Invoke<elements::Module>
+{
+ type Result = Self;
+
+ fn invoke(self, def: code::FunctionDefinition) -> Self {
+ let mut b = self;
+ b.push_function(def);
+ b
+ }
+}
+
+impl<F> Invoke<memory::MemoryDefinition> for ModuleBuilder<F>
+ where F: Invoke<elements::Module>
+{
+ type Result = Self;
+
+ fn invoke(self, def: memory::MemoryDefinition) -> Self {
+ let mut b = self;
+ b.push_memory(def);
+ b
+ }
+}
+
+impl<F> Invoke<table::TableDefinition> for ModuleBuilder<F>
+ where F: Invoke<elements::Module>
+{
+ type Result = Self;
+
+ fn invoke(self, def: table::TableDefinition) -> Self {
+ let mut b = self;
+ b.push_table(def);
+ b
+ }
+}
+
+impl<F> Invoke<elements::ImportEntry> for ModuleBuilder<F>
+ where F: Invoke<elements::Module>
+{
+ type Result = Self;
+
+ fn invoke(self, entry: elements::ImportEntry) -> Self::Result {
+ self.with_import(entry)
+ }
+}
+
+impl<F> Invoke<elements::ExportEntry> for ModuleBuilder<F>
+ where F: Invoke<elements::Module>
+{
+ type Result = Self;
+
+ fn invoke(self, entry: elements::ExportEntry) -> Self::Result {
+ self.with_export(entry)
+ }
+}
+
+impl<F> Invoke<elements::GlobalEntry> for ModuleBuilder<F>
+ where F: Invoke<elements::Module>
+{
+ type Result = Self;
+
+ fn invoke(self, entry: elements::GlobalEntry) -> Self::Result {
+ self.with_global(entry)
+ }
+}
+
+impl<F> Invoke<elements::DataSegment> for ModuleBuilder<F>
+ where F: Invoke<elements::Module>
+{
+ type Result = Self;
+
+ fn invoke(self, segment: elements::DataSegment) -> Self {
+ self.with_data_segment(segment)
+ }
+}
+
+/// Start new module builder
+/// # Examples
+///
+/// ```
+/// use parity_wasm::builder;
+///
+/// let module = builder::module()
+/// .function()
+/// .signature().param().i32().build()
+/// .body().build()
+/// .build()
+/// .build();
+///
+/// assert_eq!(module.type_section().expect("type section to exist").types().len(), 1);
+/// assert_eq!(module.function_section().expect("function section to exist").entries().len(), 1);
+/// assert_eq!(module.code_section().expect("code section to exist").bodies().len(), 1);
+/// ```
+pub fn module() -> ModuleBuilder {
+ ModuleBuilder::new()
+}
+
+/// Start builder to extend existing module
+pub fn from_module(module: elements::Module) -> ModuleBuilder {
+ ModuleBuilder::new().with_module(module)
+}
+
+#[cfg(test)]
+mod tests {
+
+ use crate::elements;
+ use super::module;
+
+ #[test]
+ fn smoky() {
+ let module = module().build();
+ assert_eq!(module.sections().len(), 0);
+ }
+
+ #[test]
+ fn functions() {
+ let module = module()
+ .function()
+ .signature().param().i32().build()
+ .body().build()
+ .build()
+ .build();
+
+ assert_eq!(module.type_section().expect("type section to exist").types().len(), 1);
+ assert_eq!(module.function_section().expect("function section to exist").entries().len(), 1);
+ assert_eq!(module.code_section().expect("code section to exist").bodies().len(), 1);
+ }
+
+ #[test]
+ fn export() {
+ let module = module()
+ .export().field("call").internal().func(0).build()
+ .build();
+
+ assert_eq!(module.export_section().expect("export section to exist").entries().len(), 1);
+ }
+
+ #[test]
+ fn global() {
+ let module = module()
+ .global().value_type().i64().mutable().init_expr(elements::Instruction::I64Const(5)).build()
+ .build();
+
+ assert_eq!(module.global_section().expect("global section to exist").entries().len(), 1);
+ }
+
+ #[test]
+ fn data() {
+ let module = module()
+ .data()
+ .offset(elements::Instruction::I32Const(16))
+ .value(vec![0u8, 15, 10, 5, 25])
+ .build()
+ .build();
+
+ assert_eq!(module.data_section().expect("data section to exist").entries().len(), 1);
+ }
+
+ #[test]
+ fn reuse_types() {
+ let module = module()
+ .function()
+ .signature().param().i32().build()
+ .body().build()
+ .build()
+ .function()
+ .signature().param().i32().build()
+ .body().build()
+ .build()
+ .build();
+
+ assert_eq!(module.type_section().expect("type section failed").types().len(), 1);
+ }
+ }
diff --git a/third_party/rust/parity-wasm/src/builder/table.rs b/third_party/rust/parity-wasm/src/builder/table.rs
new file mode 100644
index 0000000000..29bbe2d2dc
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/builder/table.rs
@@ -0,0 +1,85 @@
+use alloc::vec::Vec;
+use crate::elements;
+use super::invoke::{Invoke, Identity};
+
+/// Table definition
+#[derive(Debug, PartialEq)]
+pub struct TableDefinition {
+ /// Minimum length
+ pub min: u32,
+ /// Maximum length, if any
+ pub max: Option<u32>,
+ /// Element segments, if any
+ pub elements: Vec<TableEntryDefinition>,
+}
+
+/// Table elements entry definition
+#[derive(Debug, PartialEq)]
+pub struct TableEntryDefinition {
+ /// Offset initialization expression
+ pub offset: elements::InitExpr,
+ /// Values of initialization
+ pub values: Vec<u32>,
+}
+
+/// Table builder
+pub struct TableBuilder<F=Identity> {
+ callback: F,
+ table: TableDefinition,
+}
+
+impl TableBuilder {
+ /// New table builder
+ pub fn new() -> Self {
+ TableBuilder::with_callback(Identity)
+ }
+}
+
+impl<F> TableBuilder<F> where F: Invoke<TableDefinition> {
+ /// New table builder with callback in chained context
+ pub fn with_callback(callback: F) -> Self {
+ TableBuilder {
+ callback: callback,
+ table: Default::default(),
+ }
+ }
+
+ /// Set/override minimum length
+ pub fn with_min(mut self, min: u32) -> Self {
+ self.table.min = min;
+ self
+ }
+
+ /// Set/override maximum length
+ pub fn with_max(mut self, max: Option<u32>) -> Self {
+ self.table.max = max;
+ self
+ }
+
+ /// Generate initialization expression and element values on specified index
+ pub fn with_element(mut self, index: u32, values: Vec<u32>) -> Self {
+ self.table.elements.push(TableEntryDefinition {
+ offset: elements::InitExpr::new(vec![
+ elements::Instruction::I32Const(index as i32),
+ elements::Instruction::End,
+ ]),
+ values: values,
+ });
+ self
+ }
+
+ /// Finalize current builder spawning resulting struct
+ pub fn build(self) -> F::Result {
+ self.callback.invoke(self.table)
+ }
+}
+
+impl Default for TableDefinition {
+ fn default() -> Self {
+ TableDefinition {
+ min: 0,
+ max: None,
+ elements: Vec::new(),
+ }
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/elements/export_entry.rs b/third_party/rust/parity-wasm/src/elements/export_entry.rs
new file mode 100644
index 0000000000..5fbef79aae
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/elements/export_entry.rs
@@ -0,0 +1,102 @@
+use alloc::string::String;
+use super::{Deserialize, Serialize, Error, VarUint7, VarUint32};
+use crate::io;
+
+/// Internal reference of the exported entry.
+#[derive(Debug, Clone, Copy, PartialEq)]
+pub enum Internal {
+ /// Function reference.
+ Function(u32),
+ /// Table reference.
+ Table(u32),
+ /// Memory reference.
+ Memory(u32),
+ /// Global reference.
+ Global(u32),
+}
+
+impl Deserialize for Internal {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let kind = VarUint7::deserialize(reader)?;
+ match kind.into() {
+ 0x00 => Ok(Internal::Function(VarUint32::deserialize(reader)?.into())),
+ 0x01 => Ok(Internal::Table(VarUint32::deserialize(reader)?.into())),
+ 0x02 => Ok(Internal::Memory(VarUint32::deserialize(reader)?.into())),
+ 0x03 => Ok(Internal::Global(VarUint32::deserialize(reader)?.into())),
+ _ => Err(Error::UnknownInternalKind(kind.into())),
+ }
+ }
+}
+
+impl Serialize for Internal {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let (bt, arg) = match self {
+ Internal::Function(arg) => (0x00, arg),
+ Internal::Table(arg) => (0x01, arg),
+ Internal::Memory(arg) => (0x02, arg),
+ Internal::Global(arg) => (0x03, arg),
+ };
+
+ VarUint7::from(bt).serialize(writer)?;
+ VarUint32::from(arg).serialize(writer)?;
+
+ Ok(())
+ }
+}
+
+/// Export entry.
+#[derive(Debug, Clone, PartialEq)]
+pub struct ExportEntry {
+ field_str: String,
+ internal: Internal,
+}
+
+impl ExportEntry {
+ /// New export entry.
+ pub fn new(field: String, internal: Internal) -> Self {
+ ExportEntry {
+ field_str: field,
+ internal: internal
+ }
+ }
+
+ /// Public name.
+ pub fn field(&self) -> &str { &self.field_str }
+
+ /// Public name (mutable).
+ pub fn field_mut(&mut self) -> &mut String { &mut self.field_str }
+
+ /// Internal reference of the export entry.
+ pub fn internal(&self) -> &Internal { &self.internal }
+
+ /// Internal reference of the export entry (mutable).
+ pub fn internal_mut(&mut self) -> &mut Internal { &mut self.internal }
+}
+
+impl Deserialize for ExportEntry {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let field_str = String::deserialize(reader)?;
+ let internal = Internal::deserialize(reader)?;
+
+ Ok(ExportEntry {
+ field_str: field_str,
+ internal: internal,
+ })
+ }
+}
+
+impl Serialize for ExportEntry {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ self.field_str.serialize(writer)?;
+ self.internal.serialize(writer)?;
+ Ok(())
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/elements/func.rs b/third_party/rust/parity-wasm/src/elements/func.rs
new file mode 100644
index 0000000000..66a047f81f
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/elements/func.rs
@@ -0,0 +1,156 @@
+use alloc::vec::Vec;
+use super::{
+ Deserialize, Error, ValueType, VarUint32, CountedList, Instructions,
+ Serialize, CountedWriter, CountedListWriter,
+};
+use crate::{io, elements::section::SectionReader};
+
+/// Function signature (type reference)
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct Func(u32);
+
+impl Func {
+ /// New function signature
+ pub fn new(type_ref: u32) -> Self { Func(type_ref) }
+
+ /// Function signature type reference.
+ pub fn type_ref(&self) -> u32 {
+ self.0
+ }
+
+ /// Function signature type reference (mutable).
+ pub fn type_ref_mut(&mut self) -> &mut u32 {
+ &mut self.0
+ }
+}
+
+impl Serialize for Func {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ VarUint32::from(self.0).serialize(writer)
+ }
+}
+
+impl Deserialize for Func {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ Ok(Func(VarUint32::deserialize(reader)?.into()))
+ }
+}
+
+/// Local definition inside the function body.
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct Local {
+ count: u32,
+ value_type: ValueType,
+}
+
+impl Local {
+ /// New local with `count` and `value_type`.
+ pub fn new(count: u32, value_type: ValueType) -> Self {
+ Local { count: count, value_type: value_type }
+ }
+
+ /// Number of locals with the shared type.
+ pub fn count(&self) -> u32 { self.count }
+
+ /// Type of the locals.
+ pub fn value_type(&self) -> ValueType { self.value_type }
+}
+
+impl Deserialize for Local {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let count = VarUint32::deserialize(reader)?;
+ let value_type = ValueType::deserialize(reader)?;
+ Ok(Local { count: count.into(), value_type: value_type })
+ }
+}
+
+impl Serialize for Local {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ VarUint32::from(self.count).serialize(writer)?;
+ self.value_type.serialize(writer)?;
+ Ok(())
+ }
+}
+
+/// Function body definition.
+#[derive(Debug, Clone, PartialEq)]
+pub struct FuncBody {
+ locals: Vec<Local>,
+ instructions: Instructions,
+}
+
+impl FuncBody {
+ /// New function body with given `locals` and `instructions`.
+ pub fn new(locals: Vec<Local>, instructions: Instructions) -> Self {
+ FuncBody { locals: locals, instructions: instructions }
+ }
+
+ /// List of individual instructions.
+ pub fn empty() -> Self {
+ FuncBody { locals: Vec::new(), instructions: Instructions::empty() }
+ }
+
+ /// Locals declared in function body.
+ pub fn locals(&self) -> &[Local] { &self.locals }
+
+ /// Instruction list of the function body. Minimal instruction list
+ ///
+ /// is just `&[Instruction::End]`
+ pub fn code(&self) -> &Instructions { &self.instructions }
+
+ /// Locals declared in function body (mutable).
+ pub fn locals_mut(&mut self) -> &mut Vec<Local> { &mut self.locals }
+
+ /// Instruction list of the function body (mutable).
+ pub fn code_mut(&mut self) -> &mut Instructions { &mut self.instructions }
+}
+
+impl Deserialize for FuncBody {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let mut body_reader = SectionReader::new(reader)?;
+ let locals: Vec<Local> = CountedList::<Local>::deserialize(&mut body_reader)?.into_inner();
+
+ // The specification obliges us to count the total number of local variables while
+ // decoding the binary format.
+ locals
+ .iter()
+ .try_fold(0u32, |acc, &Local { count, .. }| acc.checked_add(count))
+ .ok_or_else(|| Error::TooManyLocals)?;
+
+ let instructions = Instructions::deserialize(&mut body_reader)?;
+ body_reader.close()?;
+ Ok(FuncBody { locals: locals, instructions: instructions })
+ }
+}
+
+impl Serialize for FuncBody {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let mut counted_writer = CountedWriter::new(writer);
+
+ let data = self.locals;
+ let counted_list = CountedListWriter::<Local, _>(
+ data.len(),
+ data.into_iter().map(Into::into),
+ );
+ counted_list.serialize(&mut counted_writer)?;
+
+ let code = self.instructions;
+ code.serialize(&mut counted_writer)?;
+
+ counted_writer.done()?;
+
+ Ok(())
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/elements/global_entry.rs b/third_party/rust/parity-wasm/src/elements/global_entry.rs
new file mode 100644
index 0000000000..d2ef398b53
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/elements/global_entry.rs
@@ -0,0 +1,50 @@
+use crate::io;
+use super::{Deserialize, Serialize, Error, GlobalType, InitExpr};
+
+/// Global entry in the module.
+#[derive(Clone, Debug, PartialEq)]
+pub struct GlobalEntry {
+ global_type: GlobalType,
+ init_expr: InitExpr,
+}
+
+impl GlobalEntry {
+ /// New global entry.
+ pub fn new(global_type: GlobalType, init_expr: InitExpr) -> Self {
+ GlobalEntry {
+ global_type: global_type,
+ init_expr: init_expr,
+ }
+ }
+ /// Global type.
+ pub fn global_type(&self) -> &GlobalType { &self.global_type }
+ /// Initialization expression (instructions) for global.
+ pub fn init_expr(&self) -> &InitExpr { &self.init_expr }
+ /// Global type (mutable).
+ pub fn global_type_mut(&mut self) -> &mut GlobalType { &mut self.global_type }
+ /// Initialization expression (instructions) for global (mutable).
+ pub fn init_expr_mut(&mut self) -> &mut InitExpr { &mut self.init_expr }
+}
+
+impl Deserialize for GlobalEntry {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let global_type = GlobalType::deserialize(reader)?;
+ let init_expr = InitExpr::deserialize(reader)?;
+
+ Ok(GlobalEntry {
+ global_type: global_type,
+ init_expr: init_expr,
+ })
+ }
+}
+
+impl Serialize for GlobalEntry {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ self.global_type.serialize(writer)?;
+ self.init_expr.serialize(writer)
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/elements/import_entry.rs b/third_party/rust/parity-wasm/src/elements/import_entry.rs
new file mode 100644
index 0000000000..9aa6327a6a
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/elements/import_entry.rs
@@ -0,0 +1,356 @@
+use alloc::string::String;
+use crate::io;
+use super::{
+ Deserialize, Serialize, Error, VarUint7, VarInt7, VarUint32, VarUint1, Uint8,
+ ValueType, TableElementType
+};
+
+const FLAG_HAS_MAX: u8 = 0x01;
+#[cfg(feature="atomics")]
+const FLAG_SHARED: u8 = 0x02;
+
+/// Global definition struct
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct GlobalType {
+ content_type: ValueType,
+ is_mutable: bool,
+}
+
+impl GlobalType {
+ /// New global type
+ pub fn new(content_type: ValueType, is_mutable: bool) -> Self {
+ GlobalType {
+ content_type: content_type,
+ is_mutable: is_mutable,
+ }
+ }
+
+ /// Type of the global entry
+ pub fn content_type(&self) -> ValueType { self.content_type }
+
+ /// Is global entry is declared as mutable
+ pub fn is_mutable(&self) -> bool { self.is_mutable }
+}
+
+impl Deserialize for GlobalType {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let content_type = ValueType::deserialize(reader)?;
+ let is_mutable = VarUint1::deserialize(reader)?;
+ Ok(GlobalType {
+ content_type: content_type,
+ is_mutable: is_mutable.into(),
+ })
+ }
+}
+
+impl Serialize for GlobalType {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ self.content_type.serialize(writer)?;
+ VarUint1::from(self.is_mutable).serialize(writer)?;
+ Ok(())
+ }
+}
+
+/// Table entry
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct TableType {
+ elem_type: TableElementType,
+ limits: ResizableLimits,
+}
+
+impl TableType {
+ /// New table definition
+ pub fn new(min: u32, max: Option<u32>) -> Self {
+ TableType {
+ elem_type: TableElementType::AnyFunc,
+ limits: ResizableLimits::new(min, max),
+ }
+ }
+
+ /// Table memory specification
+ pub fn limits(&self) -> &ResizableLimits { &self.limits }
+
+ /// Table element type
+ pub fn elem_type(&self) -> TableElementType { self.elem_type }
+}
+
+impl Deserialize for TableType {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let elem_type = TableElementType::deserialize(reader)?;
+ let limits = ResizableLimits::deserialize(reader)?;
+ Ok(TableType {
+ elem_type: elem_type,
+ limits: limits,
+ })
+ }
+}
+
+impl Serialize for TableType {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ self.elem_type.serialize(writer)?;
+ self.limits.serialize(writer)
+ }
+}
+
+/// Memory and table limits.
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct ResizableLimits {
+ initial: u32,
+ maximum: Option<u32>,
+ #[cfg(feature = "atomics")]
+ shared: bool,
+}
+
+impl ResizableLimits {
+ /// New memory limits definition.
+ pub fn new(min: u32, max: Option<u32>) -> Self {
+ ResizableLimits {
+ initial: min,
+ maximum: max,
+ #[cfg(feature = "atomics")]
+ shared: false,
+ }
+ }
+ /// Initial size.
+ pub fn initial(&self) -> u32 { self.initial }
+ /// Maximum size.
+ pub fn maximum(&self) -> Option<u32> { self.maximum }
+
+ #[cfg(feature = "atomics")]
+ /// Whether or not this is a shared array buffer.
+ pub fn shared(&self) -> bool { self.shared }
+}
+
+impl Deserialize for ResizableLimits {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let flags: u8 = Uint8::deserialize(reader)?.into();
+ match flags {
+ // Default flags are always supported. This is simply: FLAG_HAS_MAX={true, false}.
+ 0x00 | 0x01 => {},
+
+ // Atomics proposal introduce FLAG_SHARED (0x02). Shared memories can be used only
+ // together with FLAG_HAS_MAX (0x01), hence 0x03.
+ #[cfg(feature="atomics")]
+ 0x03 => {},
+
+ _ => return Err(Error::InvalidLimitsFlags(flags)),
+ }
+
+ let initial = VarUint32::deserialize(reader)?;
+ let maximum = if flags & FLAG_HAS_MAX != 0 {
+ Some(VarUint32::deserialize(reader)?.into())
+ } else {
+ None
+ };
+
+ Ok(ResizableLimits {
+ initial: initial.into(),
+ maximum: maximum,
+
+ #[cfg(feature="atomics")]
+ shared: flags & FLAG_SHARED != 0,
+ })
+ }
+}
+
+impl Serialize for ResizableLimits {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let mut flags: u8 = 0;
+ if self.maximum.is_some() {
+ flags |= FLAG_HAS_MAX;
+ }
+
+ #[cfg(feature="atomics")]
+ {
+ // If the atomics feature is enabled and if the shared flag is set, add logically
+ // it to the flags.
+ if self.shared {
+ flags |= FLAG_SHARED;
+ }
+ }
+ Uint8::from(flags).serialize(writer)?;
+ VarUint32::from(self.initial).serialize(writer)?;
+ if let Some(max) = self.maximum {
+ VarUint32::from(max).serialize(writer)?;
+ }
+ Ok(())
+ }
+}
+
+/// Memory entry.
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct MemoryType(ResizableLimits);
+
+impl MemoryType {
+ /// New memory definition
+ pub fn new(min: u32, max: Option<u32>) -> Self {
+ let r = ResizableLimits::new(min, max);
+ MemoryType(r)
+ }
+
+ /// Set the `shared` flag that denotes a memory that can be shared between threads.
+ ///
+ /// `false` by default. This is only available if the `atomics` feature is enabled.
+ #[cfg(feature = "atomics")]
+ pub fn set_shared(&mut self, shared: bool) {
+ self.0.shared = shared;
+ }
+
+ /// Limits of the memory entry.
+ pub fn limits(&self) -> &ResizableLimits {
+ &self.0
+ }
+}
+
+impl Deserialize for MemoryType {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ Ok(MemoryType(ResizableLimits::deserialize(reader)?))
+ }
+}
+
+impl Serialize for MemoryType {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ self.0.serialize(writer)
+ }
+}
+
+/// External to local binding.
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub enum External {
+ /// Binds to a function whose type is associated with the given index in the
+ /// type section.
+ Function(u32),
+ /// Describes local table definition to be imported as.
+ Table(TableType),
+ /// Describes local memory definition to be imported as.
+ Memory(MemoryType),
+ /// Describes local global entry to be imported as.
+ Global(GlobalType),
+}
+
+impl Deserialize for External {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let kind = VarUint7::deserialize(reader)?;
+ match kind.into() {
+ 0x00 => Ok(External::Function(VarUint32::deserialize(reader)?.into())),
+ 0x01 => Ok(External::Table(TableType::deserialize(reader)?)),
+ 0x02 => Ok(External::Memory(MemoryType::deserialize(reader)?)),
+ 0x03 => Ok(External::Global(GlobalType::deserialize(reader)?)),
+ _ => Err(Error::UnknownExternalKind(kind.into())),
+ }
+ }
+}
+
+impl Serialize for External {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ use self::External::*;
+
+ match self {
+ Function(index) => {
+ VarUint7::from(0x00).serialize(writer)?;
+ VarUint32::from(index).serialize(writer)?;
+ },
+ Table(tt) => {
+ VarInt7::from(0x01).serialize(writer)?;
+ tt.serialize(writer)?;
+ },
+ Memory(mt) => {
+ VarInt7::from(0x02).serialize(writer)?;
+ mt.serialize(writer)?;
+ },
+ Global(gt) => {
+ VarInt7::from(0x03).serialize(writer)?;
+ gt.serialize(writer)?;
+ },
+ }
+
+ Ok(())
+ }
+}
+
+/// Import entry.
+#[derive(Debug, Clone, PartialEq)]
+pub struct ImportEntry {
+ module_str: String,
+ field_str: String,
+ external: External,
+}
+
+impl ImportEntry {
+ /// New import entry.
+ pub fn new(module_str: String, field_str: String, external: External) -> Self {
+ ImportEntry {
+ module_str: module_str,
+ field_str: field_str,
+ external: external,
+ }
+ }
+
+ /// Module reference of the import entry.
+ pub fn module(&self) -> &str { &self.module_str }
+
+ /// Module reference of the import entry (mutable).
+ pub fn module_mut(&mut self) -> &mut String {
+ &mut self.module_str
+ }
+
+ /// Field reference of the import entry.
+ pub fn field(&self) -> &str { &self.field_str }
+
+ /// Field reference of the import entry (mutable)
+ pub fn field_mut(&mut self) -> &mut String {
+ &mut self.field_str
+ }
+
+ /// Local binidng of the import entry.
+ pub fn external(&self) -> &External { &self.external }
+
+ /// Local binidng of the import entry (mutable)
+ pub fn external_mut(&mut self) -> &mut External { &mut self.external }
+}
+
+impl Deserialize for ImportEntry {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let module_str = String::deserialize(reader)?;
+ let field_str = String::deserialize(reader)?;
+ let external = External::deserialize(reader)?;
+
+ Ok(ImportEntry {
+ module_str: module_str,
+ field_str: field_str,
+ external: external,
+ })
+ }
+}
+
+impl Serialize for ImportEntry {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ self.module_str.serialize(writer)?;
+ self.field_str.serialize(writer)?;
+ self.external.serialize(writer)
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/elements/index_map.rs b/third_party/rust/parity-wasm/src/elements/index_map.rs
new file mode 100644
index 0000000000..151f5250e3
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/elements/index_map.rs
@@ -0,0 +1,595 @@
+use alloc::vec::Vec;
+use crate::io;
+
+use super::{Deserialize, Error, Serialize, VarUint32};
+
+use alloc::vec;
+use core::{
+ cmp::min,
+ iter::{FromIterator, IntoIterator},
+ mem, slice
+};
+
+/// A map from non-contiguous `u32` keys to values of type `T`, which is
+/// serialized and deserialized ascending order of the keys. Normally used for
+/// relative dense maps with occasional "holes", and stored as an array.
+///
+/// **SECURITY WARNING:** This code is currently subject to a denial of service
+/// attack if you create a map containing the key `u32::MAX`, which should never
+/// happen in normal data. It would be pretty easy to provide a safe
+/// deserializing mechanism which addressed this problem.
+#[derive(Debug, Default)]
+pub struct IndexMap<T> {
+ /// The number of non-`None` entries in this map.
+ len: usize,
+
+ /// A vector of entries. Missing entries are represented as `None`.
+ entries: Vec<Option<T>>,
+}
+
+impl<T> IndexMap<T> {
+ /// Create an empty `IndexMap`, preallocating enough space to store
+ /// `capacity` entries without needing to reallocate the underlying memory.
+ pub fn with_capacity(capacity: usize) -> IndexMap<T> {
+ IndexMap {
+ len: 0,
+ entries: Vec::with_capacity(capacity),
+ }
+ }
+
+ /// Clear the map.
+ pub fn clear(&mut self) {
+ self.entries.clear();
+ self.len = 0;
+ }
+
+ /// Return the name for the specified index, if it exists.
+ pub fn get(&self, idx: u32) -> Option<&T> {
+ match self.entries.get(idx as usize) {
+ Some(&Some(ref value)) => Some(value),
+ Some(&None) | None => None,
+ }
+ }
+
+ /// Does the map contain an entry for the specified index?
+ pub fn contains_key(&self, idx: u32) -> bool {
+ match self.entries.get(idx as usize) {
+ Some(&Some(_)) => true,
+ Some(&None) | None => false,
+ }
+ }
+
+ /// Insert a name into our map, returning the existing value if present.
+ ///
+ /// Note: This API is designed for reasonably dense indices based on valid
+ /// data. Inserting a huge `idx` will use up a lot of RAM, and this function
+ /// will not try to protect you against that.
+ pub fn insert(&mut self, idx: u32, value: T) -> Option<T> {
+ let idx = idx as usize;
+ let result = if idx >= self.entries.len() {
+ // We need to grow the array, and add the new element at the end.
+ for _ in 0..(idx - self.entries.len()) {
+ // We can't use `extend(repeat(None)).take(n)`, because that
+ // would require `T` to implement `Clone`.
+ self.entries.push(None);
+ }
+ self.entries.push(Some(value));
+ debug_assert_eq!(idx + 1, self.entries.len());
+ self.len += 1;
+ None
+ } else {
+ // We're either replacing an existing element, or filling in a
+ // missing one.
+ let existing = self.entries[idx].take();
+ if existing.is_none() {
+ self.len += 1;
+ }
+ self.entries[idx] = Some(value);
+ existing
+ };
+ if mem::size_of::<usize>() > 4 {
+ debug_assert!(self.entries.len() <= (u32::max_value() as usize) + 1);
+ }
+ #[cfg(slow_assertions)]
+ debug_assert_eq!(self.len, self.slow_len());
+ result
+ }
+
+ /// Remove an item if present and return it.
+ pub fn remove(&mut self, idx: u32) -> Option<T> {
+ let result = match self.entries.get_mut(idx as usize) {
+ Some(value @ &mut Some(_)) => {
+ self.len -= 1;
+ value.take()
+ }
+ Some(&mut None) | None => None,
+ };
+ #[cfg(slow_assertions)]
+ debug_assert_eq!(self.len, self.slow_len());
+ result
+ }
+
+ /// The number of items in this map.
+ pub fn len(&self) -> usize {
+ #[cfg(slow_assertions)]
+ debug_assert_eq!(self.len, self.slow_len());
+ self.len
+ }
+
+ /// Is this map empty?
+ pub fn is_empty(&self) -> bool {
+ self.len == 0
+ }
+
+ /// This function is only compiled when `--cfg slow_assertions` is enabled.
+ /// It computes the `len` value using a slow algorithm.
+ ///
+ /// WARNING: This turns a bunch of O(n) operations into O(n^2) operations.
+ /// We may want to remove it once the code is tested, or to put it behind
+ /// a feature flag named `slow_debug_checks`, or something like that.
+ #[cfg(slow_assertions)]
+ fn slow_len(&self) -> usize {
+ self.entries.iter().filter(|entry| entry.is_some()).count()
+ }
+
+ /// Create a non-consuming iterator over this `IndexMap`'s keys and values.
+ pub fn iter(&self) -> Iter<T> {
+ // Note that this does the right thing because we use `&self`.
+ self.into_iter()
+ }
+
+ /// Custom deserialization routine.
+ ///
+ /// We will allocate an underlying array no larger than `max_entry_space` to
+ /// hold the data, so the maximum index must be less than `max_entry_space`.
+ /// This prevents mallicious *.wasm files from having a single entry with
+ /// the index `u32::MAX`, which would consume far too much memory.
+ ///
+ /// The `deserialize_value` function will be passed the index of the value
+ /// being deserialized, and must deserialize the value.
+ pub fn deserialize_with<R, F>(
+ max_entry_space: usize,
+ deserialize_value: &F,
+ rdr: &mut R,
+ ) -> Result<IndexMap<T>, Error>
+ where
+ R: io::Read,
+ F: Fn(u32, &mut R) -> Result<T, Error>,
+ {
+ let len: u32 = VarUint32::deserialize(rdr)?.into();
+ let mut map = IndexMap::with_capacity(len as usize);
+ let mut prev_idx = None;
+ for _ in 0..len {
+ let idx: u32 = VarUint32::deserialize(rdr)?.into();
+ if idx as usize >= max_entry_space {
+ return Err(Error::Other("index is larger than expected"));
+ }
+ match prev_idx {
+ Some(prev) if prev >= idx => {
+ // Supposedly these names must be "sorted by index", so
+ // let's try enforcing that and seeing what happens.
+ return Err(Error::Other("indices are out of order"));
+ }
+ _ => {
+ prev_idx = Some(idx);
+ }
+ }
+ let val = deserialize_value(idx, rdr)?;
+ map.insert(idx, val);
+ }
+ Ok(map)
+ }
+
+}
+
+impl<T: Clone> Clone for IndexMap<T> {
+ fn clone(&self) -> IndexMap<T> {
+ IndexMap {
+ len: self.len,
+ entries: self.entries.clone(),
+ }
+ }
+}
+
+impl<T: PartialEq> PartialEq<IndexMap<T>> for IndexMap<T> {
+ fn eq(&self, other: &IndexMap<T>) -> bool {
+ if self.len() != other.len() {
+ // If the number of non-`None` entries is different, we can't match.
+ false
+ } else {
+ // This is tricky, because one `Vec` might have a bunch of empty
+ // entries at the end which we want to ignore.
+ let smallest_len = min(self.entries.len(), other.entries.len());
+ self.entries[0..smallest_len].eq(&other.entries[0..smallest_len])
+ }
+ }
+}
+
+impl<T: Eq> Eq for IndexMap<T> {}
+
+impl<T> FromIterator<(u32, T)> for IndexMap<T> {
+ /// Create an `IndexMap` from an iterator.
+ ///
+ /// Note: This API is designed for reasonably dense indices based on valid
+ /// data. Inserting a huge `idx` will use up a lot of RAM, and this function
+ /// will not try to protect you against that.
+ fn from_iter<I>(iter: I) -> Self
+ where
+ I: IntoIterator<Item = (u32, T)>,
+ {
+ let iter = iter.into_iter();
+ let (lower, upper_opt) = iter.size_hint();
+ let mut map = IndexMap::with_capacity(upper_opt.unwrap_or(lower));
+ for (idx, value) in iter {
+ map.insert(idx, value);
+ }
+ map
+ }
+}
+
+/// An iterator over an `IndexMap` which takes ownership of it.
+pub struct IntoIter<T> {
+ next_idx: u32,
+ remaining_len: usize,
+ iter: vec::IntoIter<Option<T>>,
+}
+
+impl<T> Iterator for IntoIter<T> {
+ type Item = (u32, T);
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.remaining_len, Some(self.remaining_len))
+ }
+
+ fn next(&mut self) -> Option<Self::Item> {
+ // Bail early if we know there are no more items. This also keeps us
+ // from repeatedly calling `self.iter.next()` once it has been
+ // exhausted, which is not guaranteed to keep returning `None`.
+ if self.remaining_len == 0 {
+ return None;
+ }
+ while let Some(value_opt) = self.iter.next() {
+ let idx = self.next_idx;
+ self.next_idx += 1;
+ if let Some(value) = value_opt {
+ self.remaining_len -= 1;
+ return Some((idx, value));
+ }
+ }
+ debug_assert_eq!(self.remaining_len, 0);
+ None
+ }
+}
+
+impl<T> IntoIterator for IndexMap<T> {
+ type Item = (u32, T);
+ type IntoIter = IntoIter<T>;
+
+ fn into_iter(self) -> IntoIter<T> {
+ IntoIter {
+ next_idx: 0,
+ remaining_len: self.len,
+ iter: self.entries.into_iter(),
+ }
+ }
+}
+
+/// An iterator over a borrowed `IndexMap`.
+pub struct Iter<'a, T: 'static> {
+ next_idx: u32,
+ remaining_len: usize,
+ iter: slice::Iter<'a, Option<T>>,
+}
+
+impl<'a, T: 'static> Iterator for Iter<'a, T> {
+ type Item = (u32, &'a T);
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.remaining_len, Some(self.remaining_len))
+ }
+
+ fn next(&mut self) -> Option<Self::Item> {
+ // Bail early if we know there are no more items. This also keeps us
+ // from repeatedly calling `self.iter.next()` once it has been
+ // exhausted, which is not guaranteed to keep returning `None`.
+ if self.remaining_len == 0 {
+ return None;
+ }
+ while let Some(value_opt) = self.iter.next() {
+ let idx = self.next_idx;
+ self.next_idx += 1;
+ if let &Some(ref value) = value_opt {
+ self.remaining_len -= 1;
+ return Some((idx, value));
+ }
+ }
+ debug_assert_eq!(self.remaining_len, 0);
+ None
+ }
+}
+
+impl<'a, T: 'static> IntoIterator for &'a IndexMap<T> {
+ type Item = (u32, &'a T);
+ type IntoIter = Iter<'a, T>;
+
+ fn into_iter(self) -> Iter<'a, T> {
+ Iter {
+ next_idx: 0,
+ remaining_len: self.len,
+ iter: self.entries.iter(),
+ }
+ }
+}
+
+impl<T> Serialize for IndexMap<T>
+where
+ T: Serialize,
+ Error: From<<T as Serialize>::Error>,
+{
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, wtr: &mut W) -> Result<(), Self::Error> {
+ VarUint32::from(self.len()).serialize(wtr)?;
+ for (idx, value) in self {
+ VarUint32::from(idx).serialize(wtr)?;
+ value.serialize(wtr)?;
+ }
+ Ok(())
+ }
+}
+
+impl<T: Deserialize> IndexMap<T>
+where
+ T: Deserialize,
+ Error: From<<T as Deserialize>::Error>,
+{
+ /// Deserialize a map containing simple values that support `Deserialize`.
+ /// We will allocate an underlying array no larger than `max_entry_space` to
+ /// hold the data, so the maximum index must be less than `max_entry_space`.
+ pub fn deserialize<R: io::Read>(
+ max_entry_space: usize,
+ rdr: &mut R,
+ ) -> Result<Self, Error> {
+ let deserialize_value: fn(u32, &mut R) -> Result<T, Error> = |_idx, rdr| {
+ T::deserialize(rdr).map_err(Error::from)
+ };
+ Self::deserialize_with(max_entry_space, &deserialize_value, rdr)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::io;
+ use super::*;
+
+ #[test]
+ fn default_is_empty_no_matter_how_we_look_at_it() {
+ let map = IndexMap::<String>::default();
+ assert_eq!(map.len(), 0);
+ assert!(map.is_empty());
+ assert_eq!(map.iter().collect::<Vec<_>>().len(), 0);
+ assert_eq!(map.into_iter().collect::<Vec<_>>().len(), 0);
+ }
+
+ #[test]
+ fn with_capacity_creates_empty_map() {
+ let map = IndexMap::<String>::with_capacity(10);
+ assert!(map.is_empty());
+ }
+
+ #[test]
+ fn clear_removes_all_values() {
+ let mut map = IndexMap::<String>::default();
+ map.insert(0, "sample value".to_string());
+ assert_eq!(map.len(), 1);
+ map.clear();
+ assert_eq!(map.len(), 0);
+ }
+
+ #[test]
+ fn get_returns_elements_that_are_there_but_nothing_else() {
+ let mut map = IndexMap::<String>::default();
+ map.insert(1, "sample value".to_string());
+ assert_eq!(map.len(), 1);
+ assert_eq!(map.get(0), None);
+ assert_eq!(map.get(1), Some(&"sample value".to_string()));
+ assert_eq!(map.get(2), None);
+ }
+
+ #[test]
+ fn contains_key_returns_true_when_a_key_is_present() {
+ let mut map = IndexMap::<String>::default();
+ map.insert(1, "sample value".to_string());
+ assert!(!map.contains_key(0));
+ assert!(map.contains_key(1));
+ assert!(!map.contains_key(2));
+ }
+
+ #[test]
+ fn insert_behaves_like_other_maps() {
+ let mut map = IndexMap::<String>::default();
+
+ // Insert a key which requires extending our storage.
+ assert_eq!(map.insert(1, "val 1".to_string()), None);
+ assert_eq!(map.len(), 1);
+ assert!(map.contains_key(1));
+
+ // Insert a key which requires filling in a hole.
+ assert_eq!(map.insert(0, "val 0".to_string()), None);
+ assert_eq!(map.len(), 2);
+ assert!(map.contains_key(0));
+
+ // Insert a key which replaces an existing key.
+ assert_eq!(
+ map.insert(1, "val 1.1".to_string()),
+ Some("val 1".to_string())
+ );
+ assert_eq!(map.len(), 2);
+ assert!(map.contains_key(1));
+ assert_eq!(map.get(1), Some(&"val 1.1".to_string()));
+ }
+
+ #[test]
+ fn remove_behaves_like_other_maps() {
+ let mut map = IndexMap::<String>::default();
+ assert_eq!(map.insert(1, "val 1".to_string()), None);
+
+ // Remove an out-of-bounds element.
+ assert_eq!(map.remove(2), None);
+ assert_eq!(map.len(), 1);
+
+ // Remove an in-bounds but missing element.
+ assert_eq!(map.remove(0), None);
+ assert_eq!(map.len(), 1);
+
+ // Remove an existing element.
+ assert_eq!(map.remove(1), Some("val 1".to_string()));
+ assert_eq!(map.len(), 0);
+ }
+
+ #[test]
+ fn partial_eq_works_as_expected_in_simple_cases() {
+ let mut map1 = IndexMap::<String>::default();
+ let mut map2 = IndexMap::<String>::default();
+ assert_eq!(map1, map2);
+
+ map1.insert(1, "a".to_string());
+ map2.insert(1, "a".to_string());
+ assert_eq!(map1, map2);
+
+ map1.insert(0, "b".to_string());
+ assert_ne!(map1, map2);
+ map1.remove(0);
+ assert_eq!(map1, map2);
+
+ map1.insert(1, "not a".to_string());
+ assert_ne!(map1, map2);
+ }
+
+ #[test]
+ fn partial_eq_is_smart_about_none_values_at_the_end() {
+ let mut map1 = IndexMap::<String>::default();
+ let mut map2 = IndexMap::<String>::default();
+
+ map1.insert(1, "a".to_string());
+ map2.insert(1, "a".to_string());
+
+ // Both maps have the same (idx, value) pairs, but map2 has extra space.
+ map2.insert(10, "b".to_string());
+ map2.remove(10);
+ assert_eq!(map1, map2);
+
+ // Both maps have the same (idx, value) pairs, but map1 has extra space.
+ map1.insert(100, "b".to_string());
+ map1.remove(100);
+ assert_eq!(map1, map2);
+
+ // Let's be paranoid.
+ map2.insert(1, "b".to_string());
+ assert_ne!(map1, map2);
+ }
+
+ #[test]
+ fn from_iterator_builds_a_map() {
+ let data = &[
+ // We support out-of-order values here!
+ (3, "val 3"),
+ (2, "val 2"),
+ (5, "val 5"),
+ ];
+ let iter = data.iter().map(|&(idx, val)| (idx, val.to_string()));
+ let map = IndexMap::from_iter(iter);
+ assert_eq!(map.len(), 3);
+ assert_eq!(map.get(2), Some(&"val 2".to_string()));
+ assert_eq!(map.get(3), Some(&"val 3".to_string()));
+ assert_eq!(map.get(5), Some(&"val 5".to_string()));
+ }
+
+ #[test]
+ fn iterators_are_well_behaved() {
+ // Create a map with reasonably complex internal structure, making
+ // sure that we have both internal missing elements, and a bunch of
+ // missing elements at the end.
+ let data = &[(3, "val 3"), (2, "val 2"), (5, "val 5")];
+ let src_iter = data.iter().map(|&(idx, val)| (idx, val.to_string()));
+ let mut map = IndexMap::from_iter(src_iter);
+ map.remove(5);
+
+ // Make sure `size_hint` and `next` behave as we expect at each step.
+ {
+ let mut iter1 = map.iter();
+ assert_eq!(iter1.size_hint(), (2, Some(2)));
+ assert_eq!(iter1.next(), Some((2, &"val 2".to_string())));
+ assert_eq!(iter1.size_hint(), (1, Some(1)));
+ assert_eq!(iter1.next(), Some((3, &"val 3".to_string())));
+ assert_eq!(iter1.size_hint(), (0, Some(0)));
+ assert_eq!(iter1.next(), None);
+ assert_eq!(iter1.size_hint(), (0, Some(0)));
+ assert_eq!(iter1.next(), None);
+ assert_eq!(iter1.size_hint(), (0, Some(0)));
+ }
+
+ // Now do the same for a consuming iterator.
+ let mut iter2 = map.into_iter();
+ assert_eq!(iter2.size_hint(), (2, Some(2)));
+ assert_eq!(iter2.next(), Some((2, "val 2".to_string())));
+ assert_eq!(iter2.size_hint(), (1, Some(1)));
+ assert_eq!(iter2.next(), Some((3, "val 3".to_string())));
+ assert_eq!(iter2.size_hint(), (0, Some(0)));
+ assert_eq!(iter2.next(), None);
+ assert_eq!(iter2.size_hint(), (0, Some(0)));
+ assert_eq!(iter2.next(), None);
+ assert_eq!(iter2.size_hint(), (0, Some(0)));
+ }
+
+ #[test]
+ fn serialize_and_deserialize() {
+ let mut map = IndexMap::<String>::default();
+ map.insert(1, "val 1".to_string());
+
+ let mut output = vec![];
+ map.clone()
+ .serialize(&mut output)
+ .expect("serialize failed");
+
+ let mut input = io::Cursor::new(&output);
+ let deserialized = IndexMap::deserialize(2, &mut input).expect("deserialize failed");
+
+ assert_eq!(deserialized, map);
+ }
+
+ #[test]
+ fn deserialize_requires_elements_to_be_in_order() {
+ // Build a in-order example by hand.
+ let mut valid = vec![];
+ VarUint32::from(2u32).serialize(&mut valid).unwrap();
+ VarUint32::from(0u32).serialize(&mut valid).unwrap();
+ "val 0".to_string().serialize(&mut valid).unwrap();
+ VarUint32::from(1u32).serialize(&mut valid).unwrap();
+ "val 1".to_string().serialize(&mut valid).unwrap();
+ let map = IndexMap::<String>::deserialize(2, &mut io::Cursor::new(valid))
+ .expect("unexpected error deserializing");
+ assert_eq!(map.len(), 2);
+
+ // Build an out-of-order example by hand.
+ let mut invalid = vec![];
+ VarUint32::from(2u32).serialize(&mut invalid).unwrap();
+ VarUint32::from(1u32).serialize(&mut invalid).unwrap();
+ "val 1".to_string().serialize(&mut invalid).unwrap();
+ VarUint32::from(0u32).serialize(&mut invalid).unwrap();
+ "val 0".to_string().serialize(&mut invalid).unwrap();
+ let res = IndexMap::<String>::deserialize(2, &mut io::Cursor::new(invalid));
+ assert!(res.is_err());
+ }
+
+ #[test]
+ fn deserialize_enforces_max_idx() {
+ // Build an example with an out-of-bounds index by hand.
+ let mut invalid = vec![];
+ VarUint32::from(1u32).serialize(&mut invalid).unwrap();
+ VarUint32::from(5u32).serialize(&mut invalid).unwrap();
+ "val 5".to_string().serialize(&mut invalid).unwrap();
+ let res = IndexMap::<String>::deserialize(1, &mut io::Cursor::new(invalid));
+ assert!(res.is_err());
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/elements/mod.rs b/third_party/rust/parity-wasm/src/elements/mod.rs
new file mode 100644
index 0000000000..3c0e8ac02b
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/elements/mod.rs
@@ -0,0 +1,332 @@
+//! Elements of the WebAssembly binary format.
+
+use alloc::{string::String, vec::Vec};
+use crate::io;
+
+use core::fmt;
+
+macro_rules! buffered_read {
+ ($buffer_size: expr, $length: expr, $reader: expr) => {
+ {
+ let mut vec_buf = Vec::new();
+ let mut total_read = 0;
+ let mut buf = [0u8; $buffer_size];
+ while total_read < $length {
+ let next_to_read = if $length - total_read > $buffer_size { $buffer_size } else { $length - total_read };
+ $reader.read(&mut buf[0..next_to_read])?;
+ vec_buf.extend_from_slice(&buf[0..next_to_read]);
+ total_read += next_to_read;
+ }
+ vec_buf
+ }
+ }
+}
+
+mod primitives;
+mod module;
+mod section;
+mod types;
+mod import_entry;
+mod export_entry;
+mod global_entry;
+mod ops;
+mod func;
+mod segment;
+mod index_map;
+mod name_section;
+mod reloc_section;
+
+pub use self::module::{Module, peek_size, ImportCountType};
+pub use self::section::{
+ Section, FunctionSection, CodeSection, MemorySection, DataSection,
+ ImportSection, ExportSection, GlobalSection, TypeSection, ElementSection,
+ TableSection, CustomSection,
+};
+pub use self::import_entry::{ImportEntry, ResizableLimits, MemoryType, TableType, GlobalType, External};
+pub use self::export_entry::{ExportEntry, Internal};
+pub use self::global_entry::GlobalEntry;
+pub use self::primitives::{
+ VarUint32, VarUint7, Uint8, VarUint1, VarInt7, Uint32, VarInt32, VarInt64,
+ Uint64, VarUint64, CountedList, CountedWriter, CountedListWriter,
+};
+pub use self::types::{Type, ValueType, BlockType, FunctionType, TableElementType};
+pub use self::ops::{Instruction, Instructions, InitExpr, opcodes, BrTableData};
+
+#[cfg(feature="atomics")]
+pub use self::ops::AtomicsInstruction;
+
+#[cfg(feature="simd")]
+pub use self::ops::SimdInstruction;
+
+#[cfg(feature="sign_ext")]
+pub use self::ops::SignExtInstruction;
+
+#[cfg(feature="bulk")]
+pub use self::ops::BulkInstruction;
+
+#[cfg(any(feature="simd", feature="atomics"))]
+pub use self::ops::MemArg;
+
+pub use self::func::{Func, FuncBody, Local};
+pub use self::segment::{ElementSegment, DataSegment};
+pub use self::index_map::IndexMap;
+pub use self::name_section::{
+ NameMap, NameSection, ModuleNameSubsection, FunctionNameSubsection,
+ LocalNameSubsection,
+};
+pub use self::reloc_section::{
+ RelocSection, RelocationEntry,
+};
+
+/// Deserialization from serial i/o.
+pub trait Deserialize : Sized {
+ /// Serialization error produced by deserialization routine.
+ type Error: From<io::Error>;
+ /// Deserialize type from serial i/o
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error>;
+}
+
+/// Serialization to serial i/o. Takes self by value to consume less memory
+/// (parity-wasm IR is being partially freed by filling the result buffer).
+pub trait Serialize {
+ /// Serialization error produced by serialization routine.
+ type Error: From<io::Error>;
+ /// Serialize type to serial i/o
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error>;
+}
+
+/// Deserialization/serialization error
+#[derive(Debug, Clone)]
+pub enum Error {
+ /// Unexpected end of input.
+ UnexpectedEof,
+ /// Invalid magic.
+ InvalidMagic,
+ /// Unsupported version.
+ UnsupportedVersion(u32),
+ /// Inconsistence between declared and actual length.
+ InconsistentLength {
+ /// Expected length of the definition.
+ expected: usize,
+ /// Actual length of the definition.
+ actual: usize
+ },
+ /// Other static error.
+ Other(&'static str),
+ /// Other allocated error.
+ HeapOther(String),
+ /// Invalid/unknown value type declaration.
+ UnknownValueType(i8),
+ /// Invalid/unknown table element type declaration.
+ UnknownTableElementType(i8),
+ /// Non-utf8 string.
+ NonUtf8String,
+ /// Unknown external kind code.
+ UnknownExternalKind(u8),
+ /// Unknown internal kind code.
+ UnknownInternalKind(u8),
+ /// Unknown opcode encountered.
+ UnknownOpcode(u8),
+ #[cfg(feature="simd")]
+ /// Unknown SIMD opcode encountered.
+ UnknownSimdOpcode(u32),
+ /// Invalid VarUint1 value.
+ InvalidVarUint1(u8),
+ /// Invalid VarInt32 value.
+ InvalidVarInt32,
+ /// Invalid VarInt64 value.
+ InvalidVarInt64,
+ /// Invalid VarUint32 value.
+ InvalidVarUint32,
+ /// Invalid VarUint64 value.
+ InvalidVarUint64,
+ /// Inconsistent metadata.
+ InconsistentMetadata,
+ /// Invalid section id.
+ InvalidSectionId(u8),
+ /// Sections are out of order.
+ SectionsOutOfOrder,
+ /// Duplicated sections.
+ DuplicatedSections(u8),
+ /// Invalid memory reference (should be 0).
+ InvalidMemoryReference(u8),
+ /// Invalid table reference (should be 0).
+ InvalidTableReference(u8),
+ /// Invalid value used for flags in limits type.
+ InvalidLimitsFlags(u8),
+ /// Unknown function form (should be 0x60).
+ UnknownFunctionForm(u8),
+ /// Invalid varint7 (should be in -64..63 range).
+ InvalidVarInt7(u8),
+ /// Number of function body entries and signatures does not match.
+ InconsistentCode,
+ /// Only flags 0, 1, and 2 are accepted on segments.
+ InvalidSegmentFlags(u32),
+ /// Sum of counts of locals is greater than 2^32.
+ TooManyLocals,
+ /// Duplicated name subsections.
+ DuplicatedNameSubsections(u8),
+ /// Unknown name subsection type.
+ UnknownNameSubsectionType(u8),
+}
+
+impl fmt::Display for Error {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match *self {
+ Error::UnexpectedEof => write!(f, "Unexpected end of input"),
+ Error::InvalidMagic => write!(f, "Invalid magic number at start of file"),
+ Error::UnsupportedVersion(v) => write!(f, "Unsupported wasm version {}", v),
+ Error::InconsistentLength { expected, actual } => {
+ write!(f, "Expected length {}, found {}", expected, actual)
+ }
+ Error::Other(msg) => write!(f, "{}", msg),
+ Error::HeapOther(ref msg) => write!(f, "{}", msg),
+ Error::UnknownValueType(ty) => write!(f, "Invalid or unknown value type {}", ty),
+ Error::UnknownTableElementType(ty) => write!(f, "Unknown table element type {}", ty),
+ Error::NonUtf8String => write!(f, "Non-UTF-8 string"),
+ Error::UnknownExternalKind(kind) => write!(f, "Unknown external kind {}", kind),
+ Error::UnknownInternalKind(kind) => write!(f, "Unknown internal kind {}", kind),
+ Error::UnknownOpcode(opcode) => write!(f, "Unknown opcode {}", opcode),
+ #[cfg(feature="simd")]
+ Error::UnknownSimdOpcode(opcode) => write!(f, "Unknown SIMD opcode {}", opcode),
+ Error::InvalidVarUint1(val) => write!(f, "Not an unsigned 1-bit integer: {}", val),
+ Error::InvalidVarInt7(val) => write!(f, "Not a signed 7-bit integer: {}", val),
+ Error::InvalidVarInt32 => write!(f, "Not a signed 32-bit integer"),
+ Error::InvalidVarUint32 => write!(f, "Not an unsigned 32-bit integer"),
+ Error::InvalidVarInt64 => write!(f, "Not a signed 64-bit integer"),
+ Error::InvalidVarUint64 => write!(f, "Not an unsigned 64-bit integer"),
+ Error::InconsistentMetadata => write!(f, "Inconsistent metadata"),
+ Error::InvalidSectionId(ref id) => write!(f, "Invalid section id: {}", id),
+ Error::SectionsOutOfOrder => write!(f, "Sections out of order"),
+ Error::DuplicatedSections(ref id) => write!(f, "Duplicated sections ({})", id),
+ Error::InvalidMemoryReference(ref mem_ref) => write!(f, "Invalid memory reference ({})", mem_ref),
+ Error::InvalidTableReference(ref table_ref) => write!(f, "Invalid table reference ({})", table_ref),
+ Error::InvalidLimitsFlags(ref flags) => write!(f, "Invalid limits flags ({})", flags),
+ Error::UnknownFunctionForm(ref form) => write!(f, "Unknown function form ({})", form),
+ Error::InconsistentCode => write!(f, "Number of function body entries and signatures does not match"),
+ Error::InvalidSegmentFlags(n) => write!(f, "Invalid segment flags: {}", n),
+ Error::TooManyLocals => write!(f, "Too many locals"),
+ Error::DuplicatedNameSubsections(n) => write!(f, "Duplicated name subsections: {}", n),
+ Error::UnknownNameSubsectionType(n) => write!(f, "Unknown subsection type: {}", n),
+ }
+ }
+}
+
+#[cfg(feature = "std")]
+impl ::std::error::Error for Error {
+ fn description(&self) -> &str {
+ match *self {
+ Error::UnexpectedEof => "Unexpected end of input",
+ Error::InvalidMagic => "Invalid magic number at start of file",
+ Error::UnsupportedVersion(_) => "Unsupported wasm version",
+ Error::InconsistentLength { .. } => "Inconsistent length",
+ Error::Other(msg) => msg,
+ Error::HeapOther(ref msg) => &msg[..],
+ Error::UnknownValueType(_) => "Invalid or unknown value type",
+ Error::UnknownTableElementType(_) => "Unknown table element type",
+ Error::NonUtf8String => "Non-UTF-8 string",
+ Error::UnknownExternalKind(_) => "Unknown external kind",
+ Error::UnknownInternalKind(_) => "Unknown internal kind",
+ Error::UnknownOpcode(_) => "Unknown opcode",
+ #[cfg(feature="simd")]
+ Error::UnknownSimdOpcode(_) => "Unknown SIMD opcode",
+ Error::InvalidVarUint1(_) => "Not an unsigned 1-bit integer",
+ Error::InvalidVarInt32 => "Not a signed 32-bit integer",
+ Error::InvalidVarInt7(_) => "Not a signed 7-bit integer",
+ Error::InvalidVarUint32 => "Not an unsigned 32-bit integer",
+ Error::InvalidVarInt64 => "Not a signed 64-bit integer",
+ Error::InvalidVarUint64 => "Not an unsigned 64-bit integer",
+ Error::InconsistentMetadata => "Inconsistent metadata",
+ Error::InvalidSectionId(_) => "Invalid section id",
+ Error::SectionsOutOfOrder => "Sections out of order",
+ Error::DuplicatedSections(_) => "Duplicated section",
+ Error::InvalidMemoryReference(_) => "Invalid memory reference",
+ Error::InvalidTableReference(_) => "Invalid table reference",
+ Error::InvalidLimitsFlags(_) => "Invalid limits flags",
+ Error::UnknownFunctionForm(_) => "Unknown function form",
+ Error::InconsistentCode => "Number of function body entries and signatures does not match",
+ Error::InvalidSegmentFlags(_) => "Invalid segment flags",
+ Error::TooManyLocals => "Too many locals",
+ Error::DuplicatedNameSubsections(_) => "Duplicated name subsections",
+ Error::UnknownNameSubsectionType(_) => "Unknown name subsections type",
+ }
+ }
+}
+
+impl From<io::Error> for Error {
+ fn from(err: io::Error) -> Self {
+ Error::HeapOther(format!("I/O Error: {:?}", err))
+ }
+}
+
+// These are emitted by section parsers, such as `parse_names` and `parse_reloc`.
+impl From<(Vec<(usize, Error)>, Module)> for Error {
+ fn from(err: (Vec<(usize, Error)>, Module)) -> Self {
+ let ret = err.0.iter()
+ .fold(
+ String::new(),
+ |mut acc, item| { acc.push_str(&format!("In section {}: {}\n", item.0, item.1)); acc }
+ );
+ Error::HeapOther(ret)
+ }
+}
+
+/// Unparsed part of the module/section.
+pub struct Unparsed(pub Vec<u8>);
+
+impl Deserialize for Unparsed {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let len = VarUint32::deserialize(reader)?.into();
+ let mut vec = vec![0u8; len];
+ reader.read(&mut vec[..])?;
+ Ok(Unparsed(vec))
+ }
+}
+
+impl From<Unparsed> for Vec<u8> {
+ fn from(u: Unparsed) -> Vec<u8> {
+ u.0
+ }
+}
+
+/// Deserialize deserializable type from buffer.
+pub fn deserialize_buffer<T: Deserialize>(contents: &[u8]) -> Result<T, T::Error> {
+ let mut reader = io::Cursor::new(contents);
+ let result = T::deserialize(&mut reader)?;
+ if reader.position() != contents.len() {
+ // It's a TrailingData, since if there is not enough data then
+ // UnexpectedEof must have been returned earlier in T::deserialize.
+ return Err(io::Error::TrailingData.into())
+ }
+ Ok(result)
+}
+
+/// Create buffer with serialized value.
+pub fn serialize<T: Serialize>(val: T) -> Result<Vec<u8>, T::Error> {
+ let mut buf = Vec::new();
+ val.serialize(&mut buf)?;
+ Ok(buf)
+}
+
+/// Deserialize module from the file.
+#[cfg(feature = "std")]
+pub fn deserialize_file<P: AsRef<::std::path::Path>>(p: P) -> Result<Module, Error> {
+ let mut f = ::std::fs::File::open(p)
+ .map_err(|e| Error::HeapOther(format!("Can't read from the file: {:?}", e)))?;
+
+ Module::deserialize(&mut f)
+}
+
+/// Serialize module to the file
+#[cfg(feature = "std")]
+pub fn serialize_to_file<P: AsRef<::std::path::Path>>(p: P, module: Module) -> Result<(), Error> {
+ let mut io = ::std::fs::File::create(p)
+ .map_err(|e|
+ Error::HeapOther(format!("Can't create the file: {:?}", e))
+ )?;
+
+ module.serialize(&mut io)?;
+ Ok(())
+}
diff --git a/third_party/rust/parity-wasm/src/elements/module.rs b/third_party/rust/parity-wasm/src/elements/module.rs
new file mode 100644
index 0000000000..4ad1c6d60b
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/elements/module.rs
@@ -0,0 +1,971 @@
+use alloc::{borrow::ToOwned, vec::Vec, string::String};
+use crate::io;
+
+use super::{deserialize_buffer, serialize, Deserialize, Serialize, Error, Uint32, External};
+use super::section::{
+ Section, CodeSection, TypeSection, ImportSection, ExportSection, FunctionSection,
+ GlobalSection, TableSection, ElementSection, DataSection, MemorySection,
+ CustomSection,
+};
+use super::name_section::NameSection;
+use super::reloc_section::RelocSection;
+
+use core::cmp;
+
+const WASM_MAGIC_NUMBER: [u8; 4] = [0x00, 0x61, 0x73, 0x6d];
+
+/// WebAssembly module
+#[derive(Debug, Clone, PartialEq)]
+pub struct Module {
+ magic: u32,
+ version: u32,
+ sections: Vec<Section>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq)]
+/// Type of the import entry to count
+pub enum ImportCountType {
+ /// Count functions
+ Function,
+ /// Count globals
+ Global,
+ /// Count tables
+ Table,
+ /// Count memories
+ Memory,
+}
+
+impl Default for Module {
+ fn default() -> Self {
+ Module {
+ magic: u32::from_le_bytes(WASM_MAGIC_NUMBER),
+ version: 1,
+ sections: Vec::with_capacity(16),
+ }
+ }
+}
+
+impl Module {
+ /// New module with sections
+ pub fn new(sections: Vec<Section>) -> Self {
+ Module {
+ sections: sections, ..Default::default()
+ }
+ }
+
+ /// Construct a module from a slice.
+ pub fn from_bytes<T: AsRef<[u8]>>(input: T) -> Result<Self, Error> {
+ Ok(deserialize_buffer::<Module>(input.as_ref())?)
+ }
+
+ /// Serialize a module to a vector.
+ pub fn to_bytes(self) -> Result<Vec<u8>, Error> {
+ Ok(serialize::<Module>(self)?)
+ }
+
+ /// Destructure the module, yielding sections
+ pub fn into_sections(self) -> Vec<Section> {
+ self.sections
+ }
+
+ /// Version of module.
+ pub fn version(&self) -> u32 { self.version }
+
+ /// Sections list.
+ ///
+ /// Each known section is optional and may appear at most once.
+ pub fn sections(&self) -> &[Section] {
+ &self.sections
+ }
+
+ /// Sections list (mutable).
+ ///
+ /// Each known section is optional and may appear at most once.
+ pub fn sections_mut(&mut self) -> &mut Vec<Section> {
+ &mut self.sections
+ }
+
+ /// Insert a section, in the correct section ordering. This will fail with an error,
+ /// if the section can only appear once.
+ pub fn insert_section(&mut self, section: Section) -> Result<(), Error> {
+ let sections = self.sections_mut();
+
+ // Custom sections can be inserted anywhere. Lets always insert them last here.
+ if section.order() == 0 {
+ sections.push(section);
+ return Ok(());
+ }
+
+ // Check if the section already exists.
+ if sections.iter().position(|s| s.order() == section.order()).is_some() {
+ return Err(Error::DuplicatedSections(section.order()));
+ }
+
+ // Assume that the module is already well-ordered.
+ if let Some(pos) = sections.iter().position(|s| section.order() < s.order()) {
+ sections.insert(pos, section);
+ } else {
+ sections.push(section);
+ }
+
+ Ok(())
+ }
+
+ /// Code section reference, if any.
+ pub fn code_section(&self) -> Option<&CodeSection> {
+ for section in self.sections() {
+ if let &Section::Code(ref code_section) = section { return Some(code_section); }
+ }
+ None
+ }
+
+ /// Code section mutable reference, if any.
+ pub fn code_section_mut(&mut self) -> Option<&mut CodeSection> {
+ for section in self.sections_mut() {
+ if let Section::Code(ref mut code_section) = *section { return Some(code_section); }
+ }
+ None
+ }
+
+ /// Types section reference, if any.
+ pub fn type_section(&self) -> Option<&TypeSection> {
+ for section in self.sections() {
+ if let &Section::Type(ref type_section) = section { return Some(type_section); }
+ }
+ None
+ }
+
+ /// Types section mutable reference, if any.
+ pub fn type_section_mut(&mut self) -> Option<&mut TypeSection> {
+ for section in self.sections_mut() {
+ if let Section::Type(ref mut type_section) = *section { return Some(type_section); }
+ }
+ None
+ }
+
+ /// Imports section reference, if any.
+ pub fn import_section(&self) -> Option<&ImportSection> {
+ for section in self.sections() {
+ if let &Section::Import(ref import_section) = section { return Some(import_section); }
+ }
+ None
+ }
+
+ /// Imports section mutable reference, if any.
+ pub fn import_section_mut(&mut self) -> Option<&mut ImportSection> {
+ for section in self.sections_mut() {
+ if let Section::Import(ref mut import_section) = *section { return Some(import_section); }
+ }
+ None
+ }
+
+ /// Globals section reference, if any.
+ pub fn global_section(&self) -> Option<&GlobalSection> {
+ for section in self.sections() {
+ if let &Section::Global(ref section) = section { return Some(section); }
+ }
+ None
+ }
+
+ /// Globals section mutable reference, if any.
+ pub fn global_section_mut(&mut self) -> Option<&mut GlobalSection> {
+ for section in self.sections_mut() {
+ if let Section::Global(ref mut section) = *section { return Some(section); }
+ }
+ None
+ }
+
+
+ /// Exports section reference, if any.
+ pub fn export_section(&self) -> Option<&ExportSection> {
+ for section in self.sections() {
+ if let &Section::Export(ref export_section) = section { return Some(export_section); }
+ }
+ None
+ }
+
+ /// Exports section mutable reference, if any.
+ pub fn export_section_mut(&mut self) -> Option<&mut ExportSection> {
+ for section in self.sections_mut() {
+ if let Section::Export(ref mut export_section) = *section { return Some(export_section); }
+ }
+ None
+ }
+
+ /// Table section reference, if any.
+ pub fn table_section(&self) -> Option<&TableSection> {
+ for section in self.sections() {
+ if let &Section::Table(ref section) = section { return Some(section); }
+ }
+ None
+ }
+
+ /// Table section mutable reference, if any.
+ pub fn table_section_mut(&mut self) -> Option<&mut TableSection> {
+ for section in self.sections_mut() {
+ if let Section::Table(ref mut section) = *section { return Some(section); }
+ }
+ None
+ }
+
+ /// Data section reference, if any.
+ pub fn data_section(&self) -> Option<&DataSection> {
+ for section in self.sections() {
+ if let &Section::Data(ref section) = section { return Some(section); }
+ }
+ None
+ }
+
+ /// Data section mutable reference, if any.
+ pub fn data_section_mut(&mut self) -> Option<&mut DataSection> {
+ for section in self.sections_mut() {
+ if let Section::Data(ref mut section) = *section { return Some(section); }
+ }
+ None
+ }
+
+ /// Element section reference, if any.
+ pub fn elements_section(&self) -> Option<&ElementSection> {
+ for section in self.sections() {
+ if let &Section::Element(ref section) = section { return Some(section); }
+ }
+ None
+ }
+
+ /// Element section mutable reference, if any.
+ pub fn elements_section_mut(&mut self) -> Option<&mut ElementSection> {
+ for section in self.sections_mut() {
+ if let Section::Element(ref mut section) = *section { return Some(section); }
+ }
+ None
+ }
+
+ /// Memory section reference, if any.
+ pub fn memory_section(&self) -> Option<&MemorySection> {
+ for section in self.sections() {
+ if let &Section::Memory(ref section) = section { return Some(section); }
+ }
+ None
+ }
+
+ /// Memory section mutable reference, if any.
+ pub fn memory_section_mut(&mut self) -> Option<&mut MemorySection> {
+ for section in self.sections_mut() {
+ if let Section::Memory(ref mut section) = *section { return Some(section); }
+ }
+ None
+ }
+
+ /// Functions signatures section reference, if any.
+ pub fn function_section(&self) -> Option<&FunctionSection> {
+ for section in self.sections() {
+ if let &Section::Function(ref sect) = section { return Some(sect); }
+ }
+ None
+ }
+
+ /// Functions signatures section mutable reference, if any.
+ pub fn function_section_mut(&mut self) -> Option<&mut FunctionSection> {
+ for section in self.sections_mut() {
+ if let Section::Function(ref mut sect) = *section { return Some(sect); }
+ }
+ None
+ }
+
+ /// Start section, if any.
+ pub fn start_section(&self) -> Option<u32> {
+ for section in self.sections() {
+ if let &Section::Start(sect) = section { return Some(sect); }
+ }
+ None
+ }
+
+ /// Changes the module's start section.
+ pub fn set_start_section(&mut self, new_start: u32) {
+ for section in self.sections_mut().iter_mut() {
+ if let &mut Section::Start(_sect) = section {
+ *section = Section::Start(new_start);
+ return
+ }
+ }
+ // This should not fail, because we update the existing section above.
+ self.insert_section(Section::Start(new_start)).expect("insert_section should not fail");
+ }
+
+ /// Removes the module's start section.
+ pub fn clear_start_section(&mut self) {
+ let sections = self.sections_mut();
+ let mut rmidx = sections.len();
+ for (index, section) in sections.iter_mut().enumerate() {
+ if let Section::Start(_sect) = section {
+ rmidx = index;
+ break;
+ }
+ }
+ if rmidx < sections.len() {
+ sections.remove(rmidx);
+ }
+ }
+
+ /// Returns an iterator over the module's custom sections
+ pub fn custom_sections(&self) -> impl Iterator<Item=&CustomSection> {
+ self.sections().iter().filter_map(|s| {
+ if let Section::Custom(s) = s {
+ Some(s)
+ } else {
+ None
+ }
+ })
+ }
+
+ /// Sets the payload associated with the given custom section, or adds a new custom section,
+ /// as appropriate.
+ pub fn set_custom_section(&mut self, name: impl Into<String>, payload: Vec<u8>) {
+ let name: String = name.into();
+ for section in self.sections_mut() {
+ if let &mut Section::Custom(ref mut sect) = section {
+ if sect.name() == name {
+ *sect = CustomSection::new(name, payload);
+ return
+ }
+ }
+ }
+ self.sections_mut().push(Section::Custom(CustomSection::new(name, payload)));
+ }
+
+ /// Removes the given custom section, if it exists.
+ /// Returns the removed section if it existed, or None otherwise.
+ pub fn clear_custom_section(&mut self, name: impl AsRef<str>) -> Option<CustomSection> {
+ let name: &str = name.as_ref();
+
+ let sections = self.sections_mut();
+
+ for i in 0..sections.len() {
+ let mut remove = false;
+ if let Section::Custom(ref sect) = sections[i] {
+ if sect.name() == name {
+ remove = true;
+ }
+ }
+
+ if remove {
+ let removed = sections.remove(i);
+ match removed {
+ Section::Custom(sect) => return Some(sect),
+ _ => unreachable!(), // This is the section we just matched on, so...
+ }
+ }
+ }
+ None
+ }
+
+ /// True if a name section is present.
+ ///
+ /// NOTE: this can return true even if the section was not parsed, hence `names_section()` may return `None`
+ /// even if this returns `true`
+ pub fn has_names_section(&self) -> bool {
+ self.sections().iter().any(|e| {
+ match e {
+ // The default case, when the section was not parsed
+ Section::Custom(custom) => custom.name() == "name",
+ // This is the case, when the section was parsed
+ Section::Name(_) => true,
+ _ => false,
+ }
+ })
+ }
+
+ /// Functions signatures section reference, if any.
+ ///
+ /// NOTE: name section is not parsed by default so `names_section` could return None even if name section exists.
+ /// Call `parse_names` to parse name section
+ pub fn names_section(&self) -> Option<&NameSection> {
+ for section in self.sections() {
+ if let Section::Name(ref sect) = *section { return Some(sect); }
+ }
+ None
+ }
+
+ /// Functions signatures section mutable reference, if any.
+ ///
+ /// NOTE: name section is not parsed by default so `names_section` could return None even if name section exists.
+ /// Call `parse_names` to parse name section
+ pub fn names_section_mut(&mut self) -> Option<&mut NameSection> {
+ for section in self.sections_mut() {
+ if let Section::Name(ref mut sect) = *section { return Some(sect); }
+ }
+ None
+ }
+
+ /// Try to parse name section in place.
+ ///
+ /// Corresponding custom section with proper header will convert to name sections
+ /// If some of them will fail to be decoded, Err variant is returned with the list of
+ /// (index, Error) tuples of failed sections.
+ pub fn parse_names(mut self) -> Result<Self, (Vec<(usize, Error)>, Self)> {
+ let mut parse_errors = Vec::new();
+
+ for i in 0..self.sections.len() {
+ if let Some(name_section) = {
+ let section = self.sections.get(i).expect("cannot fail because i in range 0..len; qed");
+ if let Section::Custom(ref custom) = *section {
+ if custom.name() == "name" {
+ let mut rdr = io::Cursor::new(custom.payload());
+ let name_section = match NameSection::deserialize(&self, &mut rdr) {
+ Ok(ns) => ns,
+ Err(e) => { parse_errors.push((i, e)); continue; }
+ };
+ Some(name_section)
+ } else {
+ None
+ }
+ } else { None }
+ } {
+ // todo: according to the spec a Wasm binary can contain only one name section
+ *self.sections.get_mut(i).expect("cannot fail because i in range 0..len; qed") = Section::Name(name_section);
+ }
+ }
+
+ if parse_errors.len() > 0 {
+ Err((parse_errors, self))
+ } else {
+ Ok(self)
+ }
+ }
+
+ /// Try to parse reloc section in place.
+ ///
+ /// Corresponding custom section with proper header will convert to reloc sections
+ /// If some of them will fail to be decoded, Err variant is returned with the list of
+ /// (index, Error) tuples of failed sections.
+ pub fn parse_reloc(mut self) -> Result<Self, (Vec<(usize, Error)>, Self)> {
+ let mut parse_errors = Vec::new();
+
+ for (i, section) in self.sections.iter_mut().enumerate() {
+ if let Some(relocation_section) = {
+ if let Section::Custom(ref custom) = *section {
+ if custom.name().starts_with("reloc.") {
+ let mut rdr = io::Cursor::new(custom.payload());
+ let reloc_section = match RelocSection::deserialize(custom.name().to_owned(), &mut rdr) {
+ Ok(reloc_section) => reloc_section,
+ Err(e) => { parse_errors.push((i, e)); continue; }
+ };
+ if rdr.position() != custom.payload().len() {
+ parse_errors.push((i, io::Error::InvalidData.into()));
+ continue;
+ }
+ Some(Section::Reloc(reloc_section))
+ }
+ else {
+ None
+ }
+ }
+ else {
+ None
+ }
+ } {
+ *section = relocation_section;
+ }
+ }
+
+ if parse_errors.len() > 0 {
+ Err((parse_errors, self))
+ } else {
+ Ok(self)
+ }
+ }
+
+ /// Count imports by provided type.
+ pub fn import_count(&self, count_type: ImportCountType) -> usize {
+ self.import_section()
+ .map(|is|
+ is.entries().iter().filter(|import| match (count_type, *import.external()) {
+ (ImportCountType::Function, External::Function(_)) => true,
+ (ImportCountType::Global, External::Global(_)) => true,
+ (ImportCountType::Table, External::Table(_)) => true,
+ (ImportCountType::Memory, External::Memory(_)) => true,
+ _ => false
+ }).count())
+ .unwrap_or(0)
+ }
+
+ /// Query functions space.
+ pub fn functions_space(&self) -> usize {
+ self.import_count(ImportCountType::Function) +
+ self.function_section().map(|fs| fs.entries().len()).unwrap_or(0)
+ }
+
+ /// Query globals space.
+ pub fn globals_space(&self) -> usize {
+ self.import_count(ImportCountType::Global) +
+ self.global_section().map(|gs| gs.entries().len()).unwrap_or(0)
+ }
+
+ /// Query table space.
+ pub fn table_space(&self) -> usize {
+ self.import_count(ImportCountType::Table) +
+ self.table_section().map(|ts| ts.entries().len()).unwrap_or(0)
+ }
+
+ /// Query memory space.
+ pub fn memory_space(&self) -> usize {
+ self.import_count(ImportCountType::Memory) +
+ self.memory_section().map(|ms| ms.entries().len()).unwrap_or(0)
+ }
+}
+
+impl Deserialize for Module {
+ type Error = super::Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let mut sections = Vec::new();
+
+ let mut magic = [0u8; 4];
+ reader.read(&mut magic)?;
+ if magic != WASM_MAGIC_NUMBER {
+ return Err(Error::InvalidMagic);
+ }
+
+ let version: u32 = Uint32::deserialize(reader)?.into();
+
+ if version != 1 {
+ return Err(Error::UnsupportedVersion(version));
+ }
+
+ let mut last_section_order = 0;
+
+ loop {
+ match Section::deserialize(reader) {
+ Err(Error::UnexpectedEof) => { break; },
+ Err(e) => { return Err(e) },
+ Ok(section) => {
+ if section.order() != 0 {
+ if last_section_order > section.order() {
+ return Err(Error::SectionsOutOfOrder);
+ } else if last_section_order == section.order() {
+ return Err(Error::DuplicatedSections(last_section_order));
+ }
+ last_section_order = section.order();
+ }
+ sections.push(section);
+ }
+ }
+ }
+
+ let module = Module {
+ magic: u32::from_le_bytes(magic),
+ version: version,
+ sections: sections,
+ };
+
+ if module.code_section().map(|cs| cs.bodies().len()).unwrap_or(0) !=
+ module.function_section().map(|fs| fs.entries().len()).unwrap_or(0)
+ {
+ return Err(Error::InconsistentCode);
+ }
+
+ Ok(module)
+ }
+}
+
+impl Serialize for Module {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, w: &mut W) -> Result<(), Self::Error> {
+ Uint32::from(self.magic).serialize(w)?;
+ Uint32::from(self.version).serialize(w)?;
+ for section in self.sections.into_iter() {
+ // todo: according to the spec the name section should appear after the data section
+ section.serialize(w)?;
+ }
+ Ok(())
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq)]
+struct PeekSection<'a> {
+ cursor: usize,
+ region: &'a [u8],
+}
+
+impl<'a> io::Read for PeekSection<'a> {
+ fn read(&mut self, buf: &mut [u8]) -> io::Result<()> {
+ let available = cmp::min(buf.len(), self.region.len() - self.cursor);
+ if available < buf.len() {
+ return Err(io::Error::UnexpectedEof);
+ }
+
+ let range = self.cursor..self.cursor + buf.len();
+ buf.copy_from_slice(&self.region[range]);
+
+ self.cursor += available;
+ Ok(())
+ }
+}
+
+/// Returns size of the module in the provided stream.
+pub fn peek_size(source: &[u8]) -> usize {
+ if source.len() < 9 {
+ return 0;
+ }
+
+ let mut cursor = 8;
+ loop {
+ let (new_cursor, section_id, section_len) = {
+ let mut peek_section = PeekSection { cursor: 0, region: &source[cursor..] };
+ let section_id: u8 = match super::VarUint7::deserialize(&mut peek_section) {
+ Ok(res) => res.into(),
+ Err(_) => { break; },
+ };
+ let section_len: u32 = match super::VarUint32::deserialize(&mut peek_section) {
+ Ok(res) => res.into(),
+ Err(_) => { break; },
+ };
+
+ (peek_section.cursor, section_id, section_len)
+ };
+
+ if section_id <= 11 && section_len > 0 {
+ let next_cursor = cursor + new_cursor + section_len as usize;
+ if next_cursor > source.len() {
+ break;
+ } else if next_cursor == source.len() {
+ cursor = next_cursor;
+ break;
+ }
+ cursor = next_cursor;
+ } else {
+ break;
+ }
+ }
+
+ cursor
+}
+
+#[cfg(test)]
+mod integration_tests {
+ use super::super::{deserialize_file, serialize, deserialize_buffer, Section, TypeSection, FunctionSection, ExportSection, CodeSection};
+ use super::Module;
+
+ #[test]
+ fn hello() {
+ let module = deserialize_file("./res/cases/v1/hello.wasm").expect("Should be deserialized");
+
+ assert_eq!(module.version(), 1);
+ assert_eq!(module.sections().len(), 8);
+ }
+
+ #[test]
+ fn serde() {
+ let module = deserialize_file("./res/cases/v1/test5.wasm").expect("Should be deserialized");
+ let buf = serialize(module).expect("serialization to succeed");
+
+ let module_new: Module = deserialize_buffer(&buf).expect("deserialization to succeed");
+ let module_old = deserialize_file("./res/cases/v1/test5.wasm").expect("Should be deserialized");
+
+ assert_eq!(module_old.sections().len(), module_new.sections().len());
+ }
+
+ #[test]
+ fn serde_type() {
+ let mut module = deserialize_file("./res/cases/v1/test5.wasm").expect("Should be deserialized");
+ module.sections_mut().retain(|x| {
+ if let &Section::Type(_) = x { true } else { false }
+ });
+
+ let buf = serialize(module).expect("serialization to succeed");
+
+ let module_new: Module = deserialize_buffer(&buf).expect("deserialization to succeed");
+ let module_old = deserialize_file("./res/cases/v1/test5.wasm").expect("Should be deserialized");
+ assert_eq!(
+ module_old.type_section().expect("type section exists").types().len(),
+ module_new.type_section().expect("type section exists").types().len(),
+ "There should be equal amount of types before and after serialization"
+ );
+ }
+
+ #[test]
+ fn serde_import() {
+ let mut module = deserialize_file("./res/cases/v1/test5.wasm").expect("Should be deserialized");
+ module.sections_mut().retain(|x| {
+ if let &Section::Import(_) = x { true } else { false }
+ });
+
+ let buf = serialize(module).expect("serialization to succeed");
+
+ let module_new: Module = deserialize_buffer(&buf).expect("deserialization to succeed");
+ let module_old = deserialize_file("./res/cases/v1/test5.wasm").expect("Should be deserialized");
+ assert_eq!(
+ module_old.import_section().expect("import section exists").entries().len(),
+ module_new.import_section().expect("import section exists").entries().len(),
+ "There should be equal amount of import entries before and after serialization"
+ );
+ }
+
+ #[test]
+ fn serde_code() {
+ let mut module = deserialize_file("./res/cases/v1/test5.wasm").expect("Should be deserialized");
+ module.sections_mut().retain(|x| {
+ if let &Section::Code(_) = x { return true }
+ if let &Section::Function(_) = x { true } else { false }
+ });
+
+ let buf = serialize(module).expect("serialization to succeed");
+
+ let module_new: Module = deserialize_buffer(&buf).expect("deserialization to succeed");
+ let module_old = deserialize_file("./res/cases/v1/test5.wasm").expect("Should be deserialized");
+ assert_eq!(
+ module_old.code_section().expect("code section exists").bodies().len(),
+ module_new.code_section().expect("code section exists").bodies().len(),
+ "There should be equal amount of function bodies before and after serialization"
+ );
+ }
+
+ #[test]
+ fn const_() {
+ use super::super::Instruction::*;
+
+ let module = deserialize_file("./res/cases/v1/const.wasm").expect("Should be deserialized");
+ let func = &module.code_section().expect("Code section to exist").bodies()[0];
+ assert_eq!(func.code().elements().len(), 20);
+
+ assert_eq!(I64Const(9223372036854775807), func.code().elements()[0]);
+ assert_eq!(I64Const(-9223372036854775808), func.code().elements()[1]);
+ assert_eq!(I64Const(-1152894205662152753), func.code().elements()[2]);
+ assert_eq!(I64Const(-8192), func.code().elements()[3]);
+ assert_eq!(I32Const(1024), func.code().elements()[4]);
+ assert_eq!(I32Const(2048), func.code().elements()[5]);
+ assert_eq!(I32Const(4096), func.code().elements()[6]);
+ assert_eq!(I32Const(8192), func.code().elements()[7]);
+ assert_eq!(I32Const(16384), func.code().elements()[8]);
+ assert_eq!(I32Const(32767), func.code().elements()[9]);
+ assert_eq!(I32Const(-1024), func.code().elements()[10]);
+ assert_eq!(I32Const(-2048), func.code().elements()[11]);
+ assert_eq!(I32Const(-4096), func.code().elements()[12]);
+ assert_eq!(I32Const(-8192), func.code().elements()[13]);
+ assert_eq!(I32Const(-16384), func.code().elements()[14]);
+ assert_eq!(I32Const(-32768), func.code().elements()[15]);
+ assert_eq!(I32Const(-2147483648), func.code().elements()[16]);
+ assert_eq!(I32Const(2147483647), func.code().elements()[17]);
+ }
+
+ #[test]
+ fn store() {
+ use super::super::Instruction::*;
+
+ let module = deserialize_file("./res/cases/v1/offset.wasm").expect("Should be deserialized");
+ let func = &module.code_section().expect("Code section to exist").bodies()[0];
+
+ assert_eq!(func.code().elements().len(), 5);
+ assert_eq!(I64Store(0, 32), func.code().elements()[2]);
+ }
+
+ #[test]
+ fn peek() {
+ use super::peek_size;
+
+ let module = deserialize_file("./res/cases/v1/test5.wasm").expect("Should be deserialized");
+ let mut buf = serialize(module).expect("serialization to succeed");
+
+ buf.extend_from_slice(&[1, 5, 12, 17]);
+
+ assert_eq!(peek_size(&buf), buf.len() - 4);
+ }
+
+
+ #[test]
+ fn peek_2() {
+ use super::peek_size;
+
+ let module = deserialize_file("./res/cases/v1/offset.wasm").expect("Should be deserialized");
+ let mut buf = serialize(module).expect("serialization to succeed");
+
+ buf.extend_from_slice(&[0, 0, 0, 0, 0, 1, 5, 12, 17]);
+
+ assert_eq!(peek_size(&buf), buf.len() - 9);
+ }
+
+ #[test]
+ fn peek_3() {
+ use super::peek_size;
+
+ let module = deserialize_file("./res/cases/v1/peek_sample.wasm").expect("Should be deserialized");
+ let buf = serialize(module).expect("serialization to succeed");
+
+ assert_eq!(peek_size(&buf), buf.len());
+ }
+
+ #[test]
+ fn module_default_round_trip() {
+ let module1 = Module::default();
+ let buf = serialize(module1).expect("Serialization should succeed");
+
+ let module2: Module = deserialize_buffer(&buf).expect("Deserialization should succeed");
+ assert_eq!(Module::default().magic, module2.magic);
+ }
+
+ #[test]
+ fn names() {
+ let module = deserialize_file("./res/cases/v1/with_names.wasm")
+ .expect("Should be deserialized")
+ .parse_names()
+ .expect("Names to be parsed");
+
+ let mut found_section = false;
+ for section in module.sections() {
+ match *section {
+ Section::Name(ref name_section) => {
+ let function_name_subsection = name_section
+ .functions()
+ .expect("function_name_subsection should be present");
+ assert_eq!(
+ function_name_subsection.names().get(0).expect("Should be entry #0"),
+ "elog"
+ );
+ assert_eq!(
+ function_name_subsection.names().get(11).expect("Should be entry #0"),
+ "_ZN48_$LT$pwasm_token_contract..Endpoint$LT$T$GT$$GT$3new17hc3ace6dea0978cd9E"
+ );
+
+ found_section = true;
+ },
+ _ => {},
+ }
+ }
+
+ assert!(found_section, "Name section should be present in dedicated example");
+ }
+
+ // This test fixture has FLAG_SHARED so it depends on atomics feature.
+ #[test]
+ fn shared_memory_flag() {
+ let module = deserialize_file("./res/cases/v1/varuint1_1.wasm");
+ assert_eq!(module.is_ok(), cfg!(feature="atomics"));
+ }
+
+
+ #[test]
+ fn memory_space() {
+ let module = deserialize_file("./res/cases/v1/two-mems.wasm").expect("failed to deserialize");
+ assert_eq!(module.memory_space(), 2);
+ }
+
+ #[test]
+ fn add_custom_section() {
+ let mut module = deserialize_file("./res/cases/v1/start_mut.wasm").expect("failed to deserialize");
+ assert!(module.custom_sections().next().is_none());
+ module.set_custom_section("mycustomsection".to_string(), vec![1, 2, 3, 4]);
+ {
+ let sections = module.custom_sections().collect::<Vec<_>>();
+ assert_eq!(sections.len(), 1);
+ assert_eq!(sections[0].name(), "mycustomsection");
+ assert_eq!(sections[0].payload(), &[1, 2, 3, 4]);
+ }
+
+ let old_section = module.clear_custom_section("mycustomsection");
+ assert_eq!(old_section.expect("Did not find custom section").payload(), &[1, 2, 3, 4]);
+
+ assert!(module.custom_sections().next().is_none());
+ }
+
+ #[test]
+ fn mut_start() {
+ let mut module = deserialize_file("./res/cases/v1/start_mut.wasm").expect("failed to deserialize");
+ assert_eq!(module.start_section().expect("Did not find any start section"), 1);
+ module.set_start_section(0);
+ assert_eq!(module.start_section().expect("Did not find any start section"), 0);
+ module.clear_start_section();
+ assert_eq!(None, module.start_section());
+ }
+
+ #[test]
+ fn add_start() {
+ let mut module = deserialize_file("./res/cases/v1/start_add.wasm").expect("failed to deserialize");
+ assert!(module.start_section().is_none());
+ module.set_start_section(0);
+ assert_eq!(module.start_section().expect("Did not find any start section"), 0);
+
+ let sections = module.sections().iter().map(|s| s.order()).collect::<Vec<_>>();
+ assert_eq!(sections, vec![1, 2, 3, 6, 7, 8, 9, 11, 12]);
+ }
+
+ #[test]
+ fn add_start_custom() {
+ let mut module = deserialize_file("./res/cases/v1/start_add_custom.wasm").expect("failed to deserialize");
+
+ let sections = module.sections().iter().map(|s| s.order()).collect::<Vec<_>>();
+ assert_eq!(sections, vec![1, 2, 3, 6, 7, 9, 11, 12, 0]);
+
+ assert!(module.start_section().is_none());
+ module.set_start_section(0);
+ assert_eq!(module.start_section().expect("Dorder not find any start section"), 0);
+
+ let sections = module.sections().iter().map(|s| s.order()).collect::<Vec<_>>();
+ assert_eq!(sections, vec![1, 2, 3, 6, 7, 8, 9, 11, 12, 0]);
+ }
+
+ #[test]
+ fn names_section_present() {
+ let mut module = deserialize_file("./res/cases/v1/names.wasm").expect("failed to deserialize");
+
+ // Before parsing
+ assert!(module.names_section().is_none());
+ assert!(module.names_section_mut().is_none());
+ assert!(module.has_names_section());
+
+ // After parsing
+ let mut module = module.parse_names().expect("failed to parse names section");
+ assert!(module.names_section().is_some());
+ assert!(module.names_section_mut().is_some());
+ assert!(module.has_names_section());
+ }
+
+ #[test]
+ fn names_section_not_present() {
+ let mut module = deserialize_file("./res/cases/v1/test.wasm").expect("failed to deserialize");
+
+ // Before parsing
+ assert!(module.names_section().is_none());
+ assert!(module.names_section_mut().is_none());
+ assert!(!module.has_names_section());
+
+ // After parsing
+ let mut module = module.parse_names().expect("failed to parse names section");
+ assert!(module.names_section().is_none());
+ assert!(module.names_section_mut().is_none());
+ assert!(!module.has_names_section());
+ }
+
+ #[test]
+ fn insert_sections() {
+ let mut module = Module::default();
+
+ assert!(module.insert_section(Section::Function(FunctionSection::with_entries(vec![]))).is_ok());
+ // Duplicate.
+ assert!(module.insert_section(Section::Function(FunctionSection::with_entries(vec![]))).is_err());
+
+ assert!(module.insert_section(Section::Type(TypeSection::with_types(vec![]))).is_ok());
+ // Duplicate.
+ assert!(module.insert_section(Section::Type(TypeSection::with_types(vec![]))).is_err());
+
+ assert!(module.insert_section(Section::Export(ExportSection::with_entries(vec![]))).is_ok());
+ // Duplicate.
+ assert!(module.insert_section(Section::Export(ExportSection::with_entries(vec![]))).is_err());
+
+ assert!(module.insert_section(Section::Code(CodeSection::with_bodies(vec![]))).is_ok());
+ // Duplicate.
+ assert!(module.insert_section(Section::Code(CodeSection::with_bodies(vec![]))).is_err());
+
+ // Try serialisation roundtrip to check well-orderedness.
+ let serialized = serialize(module).expect("serialization to succeed");
+ assert!(deserialize_buffer::<Module>(&serialized).is_ok());
+ }
+
+ #[test]
+ fn serialization_roundtrip() {
+ let module = deserialize_file("./res/cases/v1/test.wasm").expect("failed to deserialize");
+ let module_copy = module.clone().to_bytes().expect("failed to serialize");
+ let module_copy = Module::from_bytes(&module_copy).expect("failed to deserialize");
+ assert_eq!(module, module_copy);
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/elements/name_section.rs b/third_party/rust/parity-wasm/src/elements/name_section.rs
new file mode 100644
index 0000000000..700108566d
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/elements/name_section.rs
@@ -0,0 +1,373 @@
+use alloc::{string::String, vec::Vec};
+use crate::io;
+
+use super::{Deserialize, Error, Module, Serialize, VarUint32, VarUint7, Type};
+use super::index_map::IndexMap;
+
+const NAME_TYPE_MODULE: u8 = 0;
+const NAME_TYPE_FUNCTION: u8 = 1;
+const NAME_TYPE_LOCAL: u8 = 2;
+
+/// Debug name information.
+#[derive(Clone, Debug, PartialEq)]
+pub struct NameSection {
+ /// Module name subsection.
+ module: Option<ModuleNameSubsection>,
+
+ /// Function name subsection.
+ functions: Option<FunctionNameSubsection>,
+
+ /// Local name subsection.
+ locals: Option<LocalNameSubsection>,
+}
+
+impl NameSection {
+ /// Creates a new name section.
+ pub fn new(module: Option<ModuleNameSubsection>,
+ functions: Option<FunctionNameSubsection>,
+ locals: Option<LocalNameSubsection>) -> Self {
+ Self {
+ module,
+ functions,
+ locals,
+ }
+ }
+
+ /// Module name subsection of this section.
+ pub fn module(&self) -> Option<&ModuleNameSubsection> {
+ self.module.as_ref()
+ }
+
+ /// Module name subsection of this section (mutable).
+ pub fn module_mut(&mut self) -> &mut Option<ModuleNameSubsection> {
+ &mut self.module
+ }
+
+ /// Functions name subsection of this section.
+ pub fn functions(&self) -> Option<&FunctionNameSubsection> {
+ self.functions.as_ref()
+ }
+
+ /// Functions name subsection of this section (mutable).
+ pub fn functions_mut(&mut self) -> &mut Option<FunctionNameSubsection> {
+ &mut self.functions
+ }
+
+ /// Local name subsection of this section.
+ pub fn locals(&self) -> Option<&LocalNameSubsection> {
+ self.locals.as_ref()
+ }
+
+ /// Local name subsection of this section (mutable).
+ pub fn locals_mut(&mut self) -> &mut Option<LocalNameSubsection> {
+ &mut self.locals
+ }
+}
+
+impl NameSection {
+ /// Deserialize a name section.
+ pub fn deserialize<R: io::Read>(
+ module: &Module,
+ rdr: &mut R,
+ ) -> Result<Self, Error> {
+ let mut module_name: Option<ModuleNameSubsection> = None;
+ let mut function_names: Option<FunctionNameSubsection> = None;
+ let mut local_names: Option<LocalNameSubsection> = None;
+
+ loop {
+ let subsection_type: u8 = match VarUint7::deserialize(rdr) {
+ Ok(raw_subsection_type) => raw_subsection_type.into(),
+ // todo: be more selective detecting no more subsection
+ Err(_) => { break; },
+ };
+
+ // deserialize the section size
+ VarUint32::deserialize(rdr)?;
+
+ match subsection_type {
+ NAME_TYPE_MODULE => {
+ if let Some(_) = module_name {
+ return Err(Error::DuplicatedNameSubsections(NAME_TYPE_FUNCTION));
+ }
+ module_name = Some(ModuleNameSubsection::deserialize(rdr)?);
+ },
+
+ NAME_TYPE_FUNCTION => {
+ if let Some(_) = function_names {
+ return Err(Error::DuplicatedNameSubsections(NAME_TYPE_FUNCTION));
+ }
+ function_names = Some(FunctionNameSubsection::deserialize(module, rdr)?);
+ },
+
+ NAME_TYPE_LOCAL => {
+ if let Some(_) = local_names {
+ return Err(Error::DuplicatedNameSubsections(NAME_TYPE_LOCAL));
+ }
+ local_names = Some(LocalNameSubsection::deserialize(module, rdr)?);
+ },
+
+ _ => return Err(Error::UnknownNameSubsectionType(subsection_type))
+ };
+ }
+
+ Ok(Self {
+ module: module_name,
+ functions: function_names,
+ locals: local_names,
+ })
+ }
+}
+
+impl Serialize for NameSection {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, wtr: &mut W) -> Result<(), Error> {
+ fn serialize_subsection<W: io::Write>(wtr: &mut W, name_type: u8, name_payload: &Vec<u8>) -> Result<(), Error> {
+ VarUint7::from(name_type).serialize(wtr)?;
+ VarUint32::from(name_payload.len()).serialize(wtr)?;
+ wtr.write(name_payload).map_err(Into::into)
+ }
+
+ if let Some(module_name_subsection) = self.module {
+ let mut buffer = vec![];
+ module_name_subsection.serialize(&mut buffer)?;
+ serialize_subsection(wtr, NAME_TYPE_MODULE, &buffer)?;
+ }
+
+ if let Some(function_name_subsection) = self.functions {
+ let mut buffer = vec![];
+ function_name_subsection.serialize(&mut buffer)?;
+ serialize_subsection(wtr, NAME_TYPE_FUNCTION, &buffer)?;
+ }
+
+ if let Some(local_name_subsection) = self.locals {
+ let mut buffer = vec![];
+ local_name_subsection.serialize(&mut buffer)?;
+ serialize_subsection(wtr, NAME_TYPE_LOCAL, &buffer)?;
+ }
+
+ Ok(())
+ }
+}
+
+/// The name of this module.
+#[derive(Clone, Debug, PartialEq)]
+pub struct ModuleNameSubsection {
+ name: String,
+}
+
+impl ModuleNameSubsection {
+ /// Create a new module name section with the specified name.
+ pub fn new<S: Into<String>>(name: S) -> ModuleNameSubsection {
+ ModuleNameSubsection { name: name.into() }
+ }
+
+ /// The name of this module.
+ pub fn name(&self) -> &str {
+ &self.name
+ }
+
+ /// The name of this module (mutable).
+ pub fn name_mut(&mut self) -> &mut String {
+ &mut self.name
+ }
+}
+
+impl Serialize for ModuleNameSubsection {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, wtr: &mut W) -> Result<(), Error> {
+ self.name.serialize(wtr)
+ }
+}
+
+impl Deserialize for ModuleNameSubsection {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(rdr: &mut R) -> Result<ModuleNameSubsection, Error> {
+ let name = String::deserialize(rdr)?;
+ Ok(ModuleNameSubsection { name })
+ }
+}
+
+/// The names of the functions in this module.
+#[derive(Clone, Debug, Default, PartialEq)]
+pub struct FunctionNameSubsection {
+ names: NameMap,
+}
+
+impl FunctionNameSubsection {
+ /// A map from function indices to names.
+ pub fn names(&self) -> &NameMap {
+ &self.names
+ }
+
+ /// A map from function indices to names (mutable).
+ pub fn names_mut(&mut self) -> &mut NameMap {
+ &mut self.names
+ }
+
+ /// Deserialize names, making sure that all names correspond to functions.
+ pub fn deserialize<R: io::Read>(
+ module: &Module,
+ rdr: &mut R,
+ ) -> Result<FunctionNameSubsection, Error> {
+ let names = IndexMap::deserialize(module.functions_space(), rdr)?;
+ Ok(FunctionNameSubsection { names })
+ }
+}
+
+impl Serialize for FunctionNameSubsection {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, wtr: &mut W) -> Result<(), Error> {
+ self.names.serialize(wtr)
+ }
+}
+
+/// The names of the local variables in this module's functions.
+#[derive(Clone, Debug, Default, PartialEq)]
+pub struct LocalNameSubsection {
+ local_names: IndexMap<NameMap>,
+}
+
+impl LocalNameSubsection {
+ /// A map from function indices to a map from variables indices to names.
+ pub fn local_names(&self) -> &IndexMap<NameMap> {
+ &self.local_names
+ }
+
+ /// A map from function indices to a map from variables indices to names
+ /// (mutable).
+ pub fn local_names_mut(&mut self) -> &mut IndexMap<NameMap> {
+ &mut self.local_names
+ }
+
+ /// Deserialize names, making sure that all names correspond to local
+ /// variables.
+ pub fn deserialize<R: io::Read>(
+ module: &Module,
+ rdr: &mut R,
+ ) -> Result<LocalNameSubsection, Error> {
+ let max_entry_space = module.functions_space();
+
+ let max_signature_args = module
+ .type_section()
+ .map(|ts|
+ ts.types()
+ .iter()
+ .map(|x| { let Type::Function(ref func) = *x; func.params().len() })
+ .max()
+ .unwrap_or(0))
+ .unwrap_or(0);
+
+ let max_locals = module
+ .code_section()
+ .map(|cs| cs.bodies().iter().map(|f| f.locals().len()).max().unwrap_or(0))
+ .unwrap_or(0);
+
+ let max_space = max_signature_args + max_locals;
+
+ let deserialize_locals = |_: u32, rdr: &mut R| IndexMap::deserialize(max_space, rdr);
+
+ let local_names = IndexMap::deserialize_with(
+ max_entry_space,
+ &deserialize_locals,
+ rdr,
+ )?;
+ Ok(LocalNameSubsection { local_names })
+ }}
+
+impl Serialize for LocalNameSubsection {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, wtr: &mut W) -> Result<(), Error> {
+ self.local_names.serialize(wtr)
+ }
+}
+
+/// A map from indices to names.
+pub type NameMap = IndexMap<String>;
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ // A helper function for the tests. Serialize a section, deserialize it,
+ // and make sure it matches the original.
+ fn serialize_test(original: NameSection) -> Vec<u8> {
+ let mut buffer = vec![];
+ original
+ .serialize(&mut buffer)
+ .expect("serialize error");
+ buffer
+ // todo: add deserialization to this test
+ }
+
+ #[test]
+ fn serialize_module_name() {
+ let module_name_subsection = ModuleNameSubsection::new("my_mod");
+ let original = NameSection::new(Some(module_name_subsection), None, None);
+ serialize_test(original.clone());
+ }
+
+ #[test]
+ fn serialize_function_names() {
+ let mut function_name_subsection = FunctionNameSubsection::default();
+ function_name_subsection.names_mut().insert(0, "hello_world".to_string());
+ let name_section = NameSection::new(None, Some(function_name_subsection), None);
+ serialize_test(name_section);
+ }
+
+ #[test]
+ fn serialize_local_names() {
+ let mut local_name_subsection = LocalNameSubsection::default();
+ let mut locals = NameMap::default();
+ locals.insert(0, "msg".to_string());
+ local_name_subsection.local_names_mut().insert(0, locals);
+
+ let name_section = NameSection::new(None, None, Some(local_name_subsection));
+ serialize_test(name_section);
+ }
+
+ #[test]
+ fn serialize_all_subsections() {
+ let module_name_subsection = ModuleNameSubsection::new("ModuleNameSubsection");
+
+ let mut function_name_subsection = FunctionNameSubsection::default();
+ function_name_subsection.names_mut().insert(0, "foo".to_string());
+ function_name_subsection.names_mut().insert(1, "bar".to_string());
+
+ let mut local_name_subsection = LocalNameSubsection::default();
+ let mut locals = NameMap::default();
+ locals.insert(0, "msg1".to_string());
+ locals.insert(1, "msg2".to_string());
+ local_name_subsection.local_names_mut().insert(0, locals);
+
+ let name_section = NameSection::new(Some(module_name_subsection), Some(function_name_subsection), Some(local_name_subsection));
+ serialize_test(name_section);
+ }
+
+ #[test]
+ fn deserialize_local_names() {
+ let module = super::super::deserialize_file("./res/cases/v1/names_with_imports.wasm")
+ .expect("Should be deserialized")
+ .parse_names()
+ .expect("Names to be parsed");
+
+ let name_section = module.names_section().expect("name_section should be present");
+ let local_names = name_section.locals().expect("local_name_section should be present");
+
+ let locals = local_names.local_names().get(0).expect("entry #0 should be present");
+ assert_eq!(
+ locals.get(0).expect("entry #0 should be present"),
+ "abc"
+ );
+
+ let locals = local_names.local_names().get(1).expect("entry #1 should be present");
+ assert_eq!(
+ locals.get(0).expect("entry #0 should be present"),
+ "def"
+ );
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/elements/ops.rs b/third_party/rust/parity-wasm/src/elements/ops.rs
new file mode 100644
index 0000000000..2059338e6e
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/elements/ops.rs
@@ -0,0 +1,2951 @@
+use alloc::{boxed::Box, vec::Vec};
+use crate::io;
+use super::{
+ Serialize, Deserialize, Error,
+ Uint8, VarUint32, CountedList, BlockType,
+ Uint32, Uint64, CountedListWriter,
+ VarInt32, VarInt64,
+};
+use core::fmt;
+
+/// List of instructions (usually inside a block section).
+#[derive(Debug, Clone, PartialEq)]
+pub struct Instructions(Vec<Instruction>);
+
+impl Instructions {
+ /// New list of instructions from vector of instructions.
+ pub fn new(elements: Vec<Instruction>) -> Self {
+ Instructions(elements)
+ }
+
+ /// Empty expression with only `Instruction::End` instruction.
+ pub fn empty() -> Self {
+ Instructions(vec![Instruction::End])
+ }
+
+ /// List of individual instructions.
+ pub fn elements(&self) -> &[Instruction] { &self.0 }
+
+ /// Individual instructions, mutable.
+ pub fn elements_mut(&mut self) -> &mut Vec<Instruction> { &mut self.0 }
+}
+
+impl Deserialize for Instructions {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let mut instructions = Vec::new();
+ let mut block_count = 1usize;
+
+ loop {
+ let instruction = Instruction::deserialize(reader)?;
+ if instruction.is_terminal() {
+ block_count -= 1;
+ } else if instruction.is_block() {
+ block_count = block_count.checked_add(1).ok_or(Error::Other("too many instructions"))?;
+ }
+
+ instructions.push(instruction);
+ if block_count == 0 {
+ break;
+ }
+ }
+
+ Ok(Instructions(instructions))
+ }
+}
+
+/// Initialization expression.
+#[derive(Debug, Clone, PartialEq)]
+pub struct InitExpr(Vec<Instruction>);
+
+impl InitExpr {
+ /// New initialization expression from instruction list.
+ ///
+ /// `code` must end with the `Instruction::End` instruction!
+ pub fn new(code: Vec<Instruction>) -> Self {
+ InitExpr(code)
+ }
+
+ /// Empty expression with only `Instruction::End` instruction.
+ pub fn empty() -> Self {
+ InitExpr(vec![Instruction::End])
+ }
+
+ /// List of instructions used in the expression.
+ pub fn code(&self) -> &[Instruction] {
+ &self.0
+ }
+
+ /// List of instructions used in the expression.
+ pub fn code_mut(&mut self) -> &mut Vec<Instruction> {
+ &mut self.0
+ }
+}
+
+impl Deserialize for InitExpr {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let mut instructions = Vec::new();
+
+ loop {
+ let instruction = Instruction::deserialize(reader)?;
+ let is_terminal = instruction.is_terminal();
+ instructions.push(instruction);
+ if is_terminal {
+ break;
+ }
+ }
+
+ Ok(InitExpr(instructions))
+ }
+}
+
+/// Instruction.
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+#[allow(missing_docs)]
+pub enum Instruction {
+ Unreachable,
+ Nop,
+ Block(BlockType),
+ Loop(BlockType),
+ If(BlockType),
+ Else,
+ End,
+ Br(u32),
+ BrIf(u32),
+ BrTable(Box<BrTableData>),
+ Return,
+
+ Call(u32),
+ CallIndirect(u32, u8),
+
+ Drop,
+ Select,
+
+ GetLocal(u32),
+ SetLocal(u32),
+ TeeLocal(u32),
+ GetGlobal(u32),
+ SetGlobal(u32),
+
+ // All store/load instructions operate with 'memory immediates'
+ // which represented here as (flag, offset) tuple
+ I32Load(u32, u32),
+ I64Load(u32, u32),
+ F32Load(u32, u32),
+ F64Load(u32, u32),
+ I32Load8S(u32, u32),
+ I32Load8U(u32, u32),
+ I32Load16S(u32, u32),
+ I32Load16U(u32, u32),
+ I64Load8S(u32, u32),
+ I64Load8U(u32, u32),
+ I64Load16S(u32, u32),
+ I64Load16U(u32, u32),
+ I64Load32S(u32, u32),
+ I64Load32U(u32, u32),
+ I32Store(u32, u32),
+ I64Store(u32, u32),
+ F32Store(u32, u32),
+ F64Store(u32, u32),
+ I32Store8(u32, u32),
+ I32Store16(u32, u32),
+ I64Store8(u32, u32),
+ I64Store16(u32, u32),
+ I64Store32(u32, u32),
+
+ CurrentMemory(u8),
+ GrowMemory(u8),
+
+ I32Const(i32),
+ I64Const(i64),
+ F32Const(u32),
+ F64Const(u64),
+
+ I32Eqz,
+ I32Eq,
+ I32Ne,
+ I32LtS,
+ I32LtU,
+ I32GtS,
+ I32GtU,
+ I32LeS,
+ I32LeU,
+ I32GeS,
+ I32GeU,
+
+ I64Eqz,
+ I64Eq,
+ I64Ne,
+ I64LtS,
+ I64LtU,
+ I64GtS,
+ I64GtU,
+ I64LeS,
+ I64LeU,
+ I64GeS,
+ I64GeU,
+
+ F32Eq,
+ F32Ne,
+ F32Lt,
+ F32Gt,
+ F32Le,
+ F32Ge,
+
+ F64Eq,
+ F64Ne,
+ F64Lt,
+ F64Gt,
+ F64Le,
+ F64Ge,
+
+ I32Clz,
+ I32Ctz,
+ I32Popcnt,
+ I32Add,
+ I32Sub,
+ I32Mul,
+ I32DivS,
+ I32DivU,
+ I32RemS,
+ I32RemU,
+ I32And,
+ I32Or,
+ I32Xor,
+ I32Shl,
+ I32ShrS,
+ I32ShrU,
+ I32Rotl,
+ I32Rotr,
+
+ I64Clz,
+ I64Ctz,
+ I64Popcnt,
+ I64Add,
+ I64Sub,
+ I64Mul,
+ I64DivS,
+ I64DivU,
+ I64RemS,
+ I64RemU,
+ I64And,
+ I64Or,
+ I64Xor,
+ I64Shl,
+ I64ShrS,
+ I64ShrU,
+ I64Rotl,
+ I64Rotr,
+ F32Abs,
+ F32Neg,
+ F32Ceil,
+ F32Floor,
+ F32Trunc,
+ F32Nearest,
+ F32Sqrt,
+ F32Add,
+ F32Sub,
+ F32Mul,
+ F32Div,
+ F32Min,
+ F32Max,
+ F32Copysign,
+ F64Abs,
+ F64Neg,
+ F64Ceil,
+ F64Floor,
+ F64Trunc,
+ F64Nearest,
+ F64Sqrt,
+ F64Add,
+ F64Sub,
+ F64Mul,
+ F64Div,
+ F64Min,
+ F64Max,
+ F64Copysign,
+
+ I32WrapI64,
+ I32TruncSF32,
+ I32TruncUF32,
+ I32TruncSF64,
+ I32TruncUF64,
+ I64ExtendSI32,
+ I64ExtendUI32,
+ I64TruncSF32,
+ I64TruncUF32,
+ I64TruncSF64,
+ I64TruncUF64,
+ F32ConvertSI32,
+ F32ConvertUI32,
+ F32ConvertSI64,
+ F32ConvertUI64,
+ F32DemoteF64,
+ F64ConvertSI32,
+ F64ConvertUI32,
+ F64ConvertSI64,
+ F64ConvertUI64,
+ F64PromoteF32,
+
+ I32ReinterpretF32,
+ I64ReinterpretF64,
+ F32ReinterpretI32,
+ F64ReinterpretI64,
+
+ #[cfg(feature="atomics")]
+ Atomics(AtomicsInstruction),
+
+ #[cfg(feature="simd")]
+ Simd(SimdInstruction),
+
+ #[cfg(feature="sign_ext")]
+ SignExt(SignExtInstruction),
+
+ #[cfg(feature="bulk")]
+ Bulk(BulkInstruction),
+}
+
+#[allow(missing_docs)]
+#[cfg(feature="atomics")]
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum AtomicsInstruction {
+ AtomicWake(MemArg),
+ I32AtomicWait(MemArg),
+ I64AtomicWait(MemArg),
+
+ I32AtomicLoad(MemArg),
+ I64AtomicLoad(MemArg),
+ I32AtomicLoad8u(MemArg),
+ I32AtomicLoad16u(MemArg),
+ I64AtomicLoad8u(MemArg),
+ I64AtomicLoad16u(MemArg),
+ I64AtomicLoad32u(MemArg),
+ I32AtomicStore(MemArg),
+ I64AtomicStore(MemArg),
+ I32AtomicStore8u(MemArg),
+ I32AtomicStore16u(MemArg),
+ I64AtomicStore8u(MemArg),
+ I64AtomicStore16u(MemArg),
+ I64AtomicStore32u(MemArg),
+
+ I32AtomicRmwAdd(MemArg),
+ I64AtomicRmwAdd(MemArg),
+ I32AtomicRmwAdd8u(MemArg),
+ I32AtomicRmwAdd16u(MemArg),
+ I64AtomicRmwAdd8u(MemArg),
+ I64AtomicRmwAdd16u(MemArg),
+ I64AtomicRmwAdd32u(MemArg),
+
+ I32AtomicRmwSub(MemArg),
+ I64AtomicRmwSub(MemArg),
+ I32AtomicRmwSub8u(MemArg),
+ I32AtomicRmwSub16u(MemArg),
+ I64AtomicRmwSub8u(MemArg),
+ I64AtomicRmwSub16u(MemArg),
+ I64AtomicRmwSub32u(MemArg),
+
+ I32AtomicRmwAnd(MemArg),
+ I64AtomicRmwAnd(MemArg),
+ I32AtomicRmwAnd8u(MemArg),
+ I32AtomicRmwAnd16u(MemArg),
+ I64AtomicRmwAnd8u(MemArg),
+ I64AtomicRmwAnd16u(MemArg),
+ I64AtomicRmwAnd32u(MemArg),
+
+ I32AtomicRmwOr(MemArg),
+ I64AtomicRmwOr(MemArg),
+ I32AtomicRmwOr8u(MemArg),
+ I32AtomicRmwOr16u(MemArg),
+ I64AtomicRmwOr8u(MemArg),
+ I64AtomicRmwOr16u(MemArg),
+ I64AtomicRmwOr32u(MemArg),
+
+ I32AtomicRmwXor(MemArg),
+ I64AtomicRmwXor(MemArg),
+ I32AtomicRmwXor8u(MemArg),
+ I32AtomicRmwXor16u(MemArg),
+ I64AtomicRmwXor8u(MemArg),
+ I64AtomicRmwXor16u(MemArg),
+ I64AtomicRmwXor32u(MemArg),
+
+ I32AtomicRmwXchg(MemArg),
+ I64AtomicRmwXchg(MemArg),
+ I32AtomicRmwXchg8u(MemArg),
+ I32AtomicRmwXchg16u(MemArg),
+ I64AtomicRmwXchg8u(MemArg),
+ I64AtomicRmwXchg16u(MemArg),
+ I64AtomicRmwXchg32u(MemArg),
+
+ I32AtomicRmwCmpxchg(MemArg),
+ I64AtomicRmwCmpxchg(MemArg),
+ I32AtomicRmwCmpxchg8u(MemArg),
+ I32AtomicRmwCmpxchg16u(MemArg),
+ I64AtomicRmwCmpxchg8u(MemArg),
+ I64AtomicRmwCmpxchg16u(MemArg),
+ I64AtomicRmwCmpxchg32u(MemArg),
+}
+
+#[allow(missing_docs)]
+#[cfg(feature="simd")]
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum SimdInstruction {
+ V128Const(Box<[u8; 16]>),
+ V128Load(MemArg),
+ V128Store(MemArg),
+ I8x16Splat,
+ I16x8Splat,
+ I32x4Splat,
+ I64x2Splat,
+ F32x4Splat,
+ F64x2Splat,
+ I8x16ExtractLaneS(u8),
+ I8x16ExtractLaneU(u8),
+ I16x8ExtractLaneS(u8),
+ I16x8ExtractLaneU(u8),
+ I32x4ExtractLane(u8),
+ I64x2ExtractLane(u8),
+ F32x4ExtractLane(u8),
+ F64x2ExtractLane(u8),
+ I8x16ReplaceLane(u8),
+ I16x8ReplaceLane(u8),
+ I32x4ReplaceLane(u8),
+ I64x2ReplaceLane(u8),
+ F32x4ReplaceLane(u8),
+ F64x2ReplaceLane(u8),
+ V8x16Shuffle(Box<[u8; 16]>),
+ I8x16Add,
+ I16x8Add,
+ I32x4Add,
+ I64x2Add,
+ I8x16Sub,
+ I16x8Sub,
+ I32x4Sub,
+ I64x2Sub,
+ I8x16Mul,
+ I16x8Mul,
+ I32x4Mul,
+ // I64x2Mul,
+ I8x16Neg,
+ I16x8Neg,
+ I32x4Neg,
+ I64x2Neg,
+ I8x16AddSaturateS,
+ I8x16AddSaturateU,
+ I16x8AddSaturateS,
+ I16x8AddSaturateU,
+ I8x16SubSaturateS,
+ I8x16SubSaturateU,
+ I16x8SubSaturateS,
+ I16x8SubSaturateU,
+ I8x16Shl,
+ I16x8Shl,
+ I32x4Shl,
+ I64x2Shl,
+ I8x16ShrS,
+ I8x16ShrU,
+ I16x8ShrS,
+ I16x8ShrU,
+ I32x4ShrS,
+ I32x4ShrU,
+ I64x2ShrS,
+ I64x2ShrU,
+ V128And,
+ V128Or,
+ V128Xor,
+ V128Not,
+ V128Bitselect,
+ I8x16AnyTrue,
+ I16x8AnyTrue,
+ I32x4AnyTrue,
+ I64x2AnyTrue,
+ I8x16AllTrue,
+ I16x8AllTrue,
+ I32x4AllTrue,
+ I64x2AllTrue,
+ I8x16Eq,
+ I16x8Eq,
+ I32x4Eq,
+ // I64x2Eq,
+ F32x4Eq,
+ F64x2Eq,
+ I8x16Ne,
+ I16x8Ne,
+ I32x4Ne,
+ // I64x2Ne,
+ F32x4Ne,
+ F64x2Ne,
+ I8x16LtS,
+ I8x16LtU,
+ I16x8LtS,
+ I16x8LtU,
+ I32x4LtS,
+ I32x4LtU,
+ // I64x2LtS,
+ // I64x2LtU,
+ F32x4Lt,
+ F64x2Lt,
+ I8x16LeS,
+ I8x16LeU,
+ I16x8LeS,
+ I16x8LeU,
+ I32x4LeS,
+ I32x4LeU,
+ // I64x2LeS,
+ // I64x2LeU,
+ F32x4Le,
+ F64x2Le,
+ I8x16GtS,
+ I8x16GtU,
+ I16x8GtS,
+ I16x8GtU,
+ I32x4GtS,
+ I32x4GtU,
+ // I64x2GtS,
+ // I64x2GtU,
+ F32x4Gt,
+ F64x2Gt,
+ I8x16GeS,
+ I8x16GeU,
+ I16x8GeS,
+ I16x8GeU,
+ I32x4GeS,
+ I32x4GeU,
+ // I64x2GeS,
+ // I64x2GeU,
+ F32x4Ge,
+ F64x2Ge,
+ F32x4Neg,
+ F64x2Neg,
+ F32x4Abs,
+ F64x2Abs,
+ F32x4Min,
+ F64x2Min,
+ F32x4Max,
+ F64x2Max,
+ F32x4Add,
+ F64x2Add,
+ F32x4Sub,
+ F64x2Sub,
+ F32x4Div,
+ F64x2Div,
+ F32x4Mul,
+ F64x2Mul,
+ F32x4Sqrt,
+ F64x2Sqrt,
+ F32x4ConvertSI32x4,
+ F32x4ConvertUI32x4,
+ F64x2ConvertSI64x2,
+ F64x2ConvertUI64x2,
+ I32x4TruncSF32x4Sat,
+ I32x4TruncUF32x4Sat,
+ I64x2TruncSF64x2Sat,
+ I64x2TruncUF64x2Sat,
+}
+
+#[allow(missing_docs)]
+#[cfg(feature="sign_ext")]
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum SignExtInstruction {
+ I32Extend8S,
+ I32Extend16S,
+ I64Extend8S,
+ I64Extend16S,
+ I64Extend32S,
+}
+
+#[allow(missing_docs)]
+#[cfg(feature="bulk")]
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum BulkInstruction {
+ MemoryInit(u32),
+ MemoryDrop(u32),
+ MemoryCopy,
+ MemoryFill,
+ TableInit(u32),
+ TableDrop(u32),
+ TableCopy,
+}
+
+#[cfg(any(feature="simd", feature="atomics"))]
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+#[allow(missing_docs)]
+pub struct MemArg {
+ pub align: u8,
+ pub offset: u32,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+#[allow(missing_docs)]
+pub struct BrTableData {
+ pub table: Box<[u32]>,
+ pub default: u32,
+}
+
+impl Instruction {
+ /// Is this instruction starts the new block (which should end with terminal instruction).
+ pub fn is_block(&self) -> bool {
+ match self {
+ &Instruction::Block(_) | &Instruction::Loop(_) | &Instruction::If(_) => true,
+ _ => false,
+ }
+ }
+
+ /// Is this instruction determines the termination of instruction sequence?
+ ///
+ /// `true` for `Instruction::End`
+ pub fn is_terminal(&self) -> bool {
+ match self {
+ &Instruction::End => true,
+ _ => false,
+ }
+ }
+}
+
+#[allow(missing_docs)]
+pub mod opcodes {
+ pub const UNREACHABLE: u8 = 0x00;
+ pub const NOP: u8 = 0x01;
+ pub const BLOCK: u8 = 0x02;
+ pub const LOOP: u8 = 0x03;
+ pub const IF: u8 = 0x04;
+ pub const ELSE: u8 = 0x05;
+ pub const END: u8 = 0x0b;
+ pub const BR: u8 = 0x0c;
+ pub const BRIF: u8 = 0x0d;
+ pub const BRTABLE: u8 = 0x0e;
+ pub const RETURN: u8 = 0x0f;
+ pub const CALL: u8 = 0x10;
+ pub const CALLINDIRECT: u8 = 0x11;
+ pub const DROP: u8 = 0x1a;
+ pub const SELECT: u8 = 0x1b;
+ pub const GETLOCAL: u8 = 0x20;
+ pub const SETLOCAL: u8 = 0x21;
+ pub const TEELOCAL: u8 = 0x22;
+ pub const GETGLOBAL: u8 = 0x23;
+ pub const SETGLOBAL: u8 = 0x24;
+ pub const I32LOAD: u8 = 0x28;
+ pub const I64LOAD: u8 = 0x29;
+ pub const F32LOAD: u8 = 0x2a;
+ pub const F64LOAD: u8 = 0x2b;
+ pub const I32LOAD8S: u8 = 0x2c;
+ pub const I32LOAD8U: u8 = 0x2d;
+ pub const I32LOAD16S: u8 = 0x2e;
+ pub const I32LOAD16U: u8 = 0x2f;
+ pub const I64LOAD8S: u8 = 0x30;
+ pub const I64LOAD8U: u8 = 0x31;
+ pub const I64LOAD16S: u8 = 0x32;
+ pub const I64LOAD16U: u8 = 0x33;
+ pub const I64LOAD32S: u8 = 0x34;
+ pub const I64LOAD32U: u8 = 0x35;
+ pub const I32STORE: u8 = 0x36;
+ pub const I64STORE: u8 = 0x37;
+ pub const F32STORE: u8 = 0x38;
+ pub const F64STORE: u8 = 0x39;
+ pub const I32STORE8: u8 = 0x3a;
+ pub const I32STORE16: u8 = 0x3b;
+ pub const I64STORE8: u8 = 0x3c;
+ pub const I64STORE16: u8 = 0x3d;
+ pub const I64STORE32: u8 = 0x3e;
+ pub const CURRENTMEMORY: u8 = 0x3f;
+ pub const GROWMEMORY: u8 = 0x40;
+ pub const I32CONST: u8 = 0x41;
+ pub const I64CONST: u8 = 0x42;
+ pub const F32CONST: u8 = 0x43;
+ pub const F64CONST: u8 = 0x44;
+ pub const I32EQZ: u8 = 0x45;
+ pub const I32EQ: u8 = 0x46;
+ pub const I32NE: u8 = 0x47;
+ pub const I32LTS: u8 = 0x48;
+ pub const I32LTU: u8 = 0x49;
+ pub const I32GTS: u8 = 0x4a;
+ pub const I32GTU: u8 = 0x4b;
+ pub const I32LES: u8 = 0x4c;
+ pub const I32LEU: u8 = 0x4d;
+ pub const I32GES: u8 = 0x4e;
+ pub const I32GEU: u8 = 0x4f;
+ pub const I64EQZ: u8 = 0x50;
+ pub const I64EQ: u8 = 0x51;
+ pub const I64NE: u8 = 0x52;
+ pub const I64LTS: u8 = 0x53;
+ pub const I64LTU: u8 = 0x54;
+ pub const I64GTS: u8 = 0x55;
+ pub const I64GTU: u8 = 0x56;
+ pub const I64LES: u8 = 0x57;
+ pub const I64LEU: u8 = 0x58;
+ pub const I64GES: u8 = 0x59;
+ pub const I64GEU: u8 = 0x5a;
+
+ pub const F32EQ: u8 = 0x5b;
+ pub const F32NE: u8 = 0x5c;
+ pub const F32LT: u8 = 0x5d;
+ pub const F32GT: u8 = 0x5e;
+ pub const F32LE: u8 = 0x5f;
+ pub const F32GE: u8 = 0x60;
+
+ pub const F64EQ: u8 = 0x61;
+ pub const F64NE: u8 = 0x62;
+ pub const F64LT: u8 = 0x63;
+ pub const F64GT: u8 = 0x64;
+ pub const F64LE: u8 = 0x65;
+ pub const F64GE: u8 = 0x66;
+
+ pub const I32CLZ: u8 = 0x67;
+ pub const I32CTZ: u8 = 0x68;
+ pub const I32POPCNT: u8 = 0x69;
+ pub const I32ADD: u8 = 0x6a;
+ pub const I32SUB: u8 = 0x6b;
+ pub const I32MUL: u8 = 0x6c;
+ pub const I32DIVS: u8 = 0x6d;
+ pub const I32DIVU: u8 = 0x6e;
+ pub const I32REMS: u8 = 0x6f;
+ pub const I32REMU: u8 = 0x70;
+ pub const I32AND: u8 = 0x71;
+ pub const I32OR: u8 = 0x72;
+ pub const I32XOR: u8 = 0x73;
+ pub const I32SHL: u8 = 0x74;
+ pub const I32SHRS: u8 = 0x75;
+ pub const I32SHRU: u8 = 0x76;
+ pub const I32ROTL: u8 = 0x77;
+ pub const I32ROTR: u8 = 0x78;
+
+ pub const I64CLZ: u8 = 0x79;
+ pub const I64CTZ: u8 = 0x7a;
+ pub const I64POPCNT: u8 = 0x7b;
+ pub const I64ADD: u8 = 0x7c;
+ pub const I64SUB: u8 = 0x7d;
+ pub const I64MUL: u8 = 0x7e;
+ pub const I64DIVS: u8 = 0x7f;
+ pub const I64DIVU: u8 = 0x80;
+ pub const I64REMS: u8 = 0x81;
+ pub const I64REMU: u8 = 0x82;
+ pub const I64AND: u8 = 0x83;
+ pub const I64OR: u8 = 0x84;
+ pub const I64XOR: u8 = 0x85;
+ pub const I64SHL: u8 = 0x86;
+ pub const I64SHRS: u8 = 0x87;
+ pub const I64SHRU: u8 = 0x88;
+ pub const I64ROTL: u8 = 0x89;
+ pub const I64ROTR: u8 = 0x8a;
+ pub const F32ABS: u8 = 0x8b;
+ pub const F32NEG: u8 = 0x8c;
+ pub const F32CEIL: u8 = 0x8d;
+ pub const F32FLOOR: u8 = 0x8e;
+ pub const F32TRUNC: u8 = 0x8f;
+ pub const F32NEAREST: u8 = 0x90;
+ pub const F32SQRT: u8 = 0x91;
+ pub const F32ADD: u8 = 0x92;
+ pub const F32SUB: u8 = 0x93;
+ pub const F32MUL: u8 = 0x94;
+ pub const F32DIV: u8 = 0x95;
+ pub const F32MIN: u8 = 0x96;
+ pub const F32MAX: u8 = 0x97;
+ pub const F32COPYSIGN: u8 = 0x98;
+ pub const F64ABS: u8 = 0x99;
+ pub const F64NEG: u8 = 0x9a;
+ pub const F64CEIL: u8 = 0x9b;
+ pub const F64FLOOR: u8 = 0x9c;
+ pub const F64TRUNC: u8 = 0x9d;
+ pub const F64NEAREST: u8 = 0x9e;
+ pub const F64SQRT: u8 = 0x9f;
+ pub const F64ADD: u8 = 0xa0;
+ pub const F64SUB: u8 = 0xa1;
+ pub const F64MUL: u8 = 0xa2;
+ pub const F64DIV: u8 = 0xa3;
+ pub const F64MIN: u8 = 0xa4;
+ pub const F64MAX: u8 = 0xa5;
+ pub const F64COPYSIGN: u8 = 0xa6;
+
+ pub const I32WRAPI64: u8 = 0xa7;
+ pub const I32TRUNCSF32: u8 = 0xa8;
+ pub const I32TRUNCUF32: u8 = 0xa9;
+ pub const I32TRUNCSF64: u8 = 0xaa;
+ pub const I32TRUNCUF64: u8 = 0xab;
+ pub const I64EXTENDSI32: u8 = 0xac;
+ pub const I64EXTENDUI32: u8 = 0xad;
+ pub const I64TRUNCSF32: u8 = 0xae;
+ pub const I64TRUNCUF32: u8 = 0xaf;
+ pub const I64TRUNCSF64: u8 = 0xb0;
+ pub const I64TRUNCUF64: u8 = 0xb1;
+ pub const F32CONVERTSI32: u8 = 0xb2;
+ pub const F32CONVERTUI32: u8 = 0xb3;
+ pub const F32CONVERTSI64: u8 = 0xb4;
+ pub const F32CONVERTUI64: u8 = 0xb5;
+ pub const F32DEMOTEF64: u8 = 0xb6;
+ pub const F64CONVERTSI32: u8 = 0xb7;
+ pub const F64CONVERTUI32: u8 = 0xb8;
+ pub const F64CONVERTSI64: u8 = 0xb9;
+ pub const F64CONVERTUI64: u8 = 0xba;
+ pub const F64PROMOTEF32: u8 = 0xbb;
+
+ pub const I32REINTERPRETF32: u8 = 0xbc;
+ pub const I64REINTERPRETF64: u8 = 0xbd;
+ pub const F32REINTERPRETI32: u8 = 0xbe;
+ pub const F64REINTERPRETI64: u8 = 0xbf;
+
+ #[cfg(feature="sign_ext")]
+ pub mod sign_ext {
+ pub const I32_EXTEND8_S: u8 = 0xc0;
+ pub const I32_EXTEND16_S: u8 = 0xc1;
+ pub const I64_EXTEND8_S: u8 = 0xc2;
+ pub const I64_EXTEND16_S: u8 = 0xc3;
+ pub const I64_EXTEND32_S: u8 = 0xc4;
+ }
+
+ #[cfg(feature="atomics")]
+ pub mod atomics {
+ pub const ATOMIC_PREFIX: u8 = 0xfe;
+ pub const ATOMIC_WAKE: u8 = 0x00;
+ pub const I32_ATOMIC_WAIT: u8 = 0x01;
+ pub const I64_ATOMIC_WAIT: u8 = 0x02;
+
+ pub const I32_ATOMIC_LOAD: u8 = 0x10;
+ pub const I64_ATOMIC_LOAD: u8 = 0x11;
+ pub const I32_ATOMIC_LOAD8U: u8 = 0x12;
+ pub const I32_ATOMIC_LOAD16U: u8 = 0x13;
+ pub const I64_ATOMIC_LOAD8U: u8 = 0x14;
+ pub const I64_ATOMIC_LOAD16U: u8 = 0x15;
+ pub const I64_ATOMIC_LOAD32U: u8 = 0x16;
+ pub const I32_ATOMIC_STORE: u8 = 0x17;
+ pub const I64_ATOMIC_STORE: u8 = 0x18;
+ pub const I32_ATOMIC_STORE8U: u8 = 0x19;
+ pub const I32_ATOMIC_STORE16U: u8 = 0x1a;
+ pub const I64_ATOMIC_STORE8U: u8 = 0x1b;
+ pub const I64_ATOMIC_STORE16U: u8 = 0x1c;
+ pub const I64_ATOMIC_STORE32U: u8 = 0x1d;
+
+ pub const I32_ATOMIC_RMW_ADD: u8 = 0x1e;
+ pub const I64_ATOMIC_RMW_ADD: u8 = 0x1f;
+ pub const I32_ATOMIC_RMW_ADD8U: u8 = 0x20;
+ pub const I32_ATOMIC_RMW_ADD16U: u8 = 0x21;
+ pub const I64_ATOMIC_RMW_ADD8U: u8 = 0x22;
+ pub const I64_ATOMIC_RMW_ADD16U: u8 = 0x23;
+ pub const I64_ATOMIC_RMW_ADD32U: u8 = 0x24;
+
+ pub const I32_ATOMIC_RMW_SUB: u8 = 0x25;
+ pub const I64_ATOMIC_RMW_SUB: u8 = 0x26;
+ pub const I32_ATOMIC_RMW_SUB8U: u8 = 0x27;
+ pub const I32_ATOMIC_RMW_SUB16U: u8 = 0x28;
+ pub const I64_ATOMIC_RMW_SUB8U: u8 = 0x29;
+ pub const I64_ATOMIC_RMW_SUB16U: u8 = 0x2a;
+ pub const I64_ATOMIC_RMW_SUB32U: u8 = 0x2b;
+
+ pub const I32_ATOMIC_RMW_AND: u8 = 0x2c;
+ pub const I64_ATOMIC_RMW_AND: u8 = 0x2d;
+ pub const I32_ATOMIC_RMW_AND8U: u8 = 0x2e;
+ pub const I32_ATOMIC_RMW_AND16U: u8 = 0x2f;
+ pub const I64_ATOMIC_RMW_AND8U: u8 = 0x30;
+ pub const I64_ATOMIC_RMW_AND16U: u8 = 0x31;
+ pub const I64_ATOMIC_RMW_AND32U: u8 = 0x32;
+
+ pub const I32_ATOMIC_RMW_OR: u8 = 0x33;
+ pub const I64_ATOMIC_RMW_OR: u8 = 0x34;
+ pub const I32_ATOMIC_RMW_OR8U: u8 = 0x35;
+ pub const I32_ATOMIC_RMW_OR16U: u8 = 0x36;
+ pub const I64_ATOMIC_RMW_OR8U: u8 = 0x37;
+ pub const I64_ATOMIC_RMW_OR16U: u8 = 0x38;
+ pub const I64_ATOMIC_RMW_OR32U: u8 = 0x39;
+
+ pub const I32_ATOMIC_RMW_XOR: u8 = 0x3a;
+ pub const I64_ATOMIC_RMW_XOR: u8 = 0x3b;
+ pub const I32_ATOMIC_RMW_XOR8U: u8 = 0x3c;
+ pub const I32_ATOMIC_RMW_XOR16U: u8 = 0x3d;
+ pub const I64_ATOMIC_RMW_XOR8U: u8 = 0x3e;
+ pub const I64_ATOMIC_RMW_XOR16U: u8 = 0x3f;
+ pub const I64_ATOMIC_RMW_XOR32U: u8 = 0x40;
+
+ pub const I32_ATOMIC_RMW_XCHG: u8 = 0x41;
+ pub const I64_ATOMIC_RMW_XCHG: u8 = 0x42;
+ pub const I32_ATOMIC_RMW_XCHG8U: u8 = 0x43;
+ pub const I32_ATOMIC_RMW_XCHG16U: u8 = 0x44;
+ pub const I64_ATOMIC_RMW_XCHG8U: u8 = 0x45;
+ pub const I64_ATOMIC_RMW_XCHG16U: u8 = 0x46;
+ pub const I64_ATOMIC_RMW_XCHG32U: u8 = 0x47;
+
+ pub const I32_ATOMIC_RMW_CMPXCHG: u8 = 0x48;
+ pub const I64_ATOMIC_RMW_CMPXCHG: u8 = 0x49;
+ pub const I32_ATOMIC_RMW_CMPXCHG8U: u8 = 0x4a;
+ pub const I32_ATOMIC_RMW_CMPXCHG16U: u8 = 0x4b;
+ pub const I64_ATOMIC_RMW_CMPXCHG8U: u8 = 0x4c;
+ pub const I64_ATOMIC_RMW_CMPXCHG16U: u8 = 0x4d;
+ pub const I64_ATOMIC_RMW_CMPXCHG32U: u8 = 0x4e;
+ }
+
+ #[cfg(feature="simd")]
+ pub mod simd {
+ // https://github.com/WebAssembly/simd/blob/master/proposals/simd/BinarySIMD.md
+ pub const SIMD_PREFIX: u8 = 0xfd;
+
+ pub const V128_LOAD: u32 = 0x00;
+ pub const V128_STORE: u32 = 0x01;
+ pub const V128_CONST: u32 = 0x02;
+ pub const V8X16_SHUFFLE: u32 = 0x03;
+
+ pub const I8X16_SPLAT: u32 = 0x04;
+ pub const I8X16_EXTRACT_LANE_S: u32 = 0x05;
+ pub const I8X16_EXTRACT_LANE_U: u32 = 0x06;
+ pub const I8X16_REPLACE_LANE: u32 = 0x07;
+ pub const I16X8_SPLAT: u32 = 0x08;
+ pub const I16X8_EXTRACT_LANE_S: u32 = 0x09;
+ pub const I16X8_EXTRACT_LANE_U: u32 = 0xa;
+ pub const I16X8_REPLACE_LANE: u32 = 0x0b;
+ pub const I32X4_SPLAT: u32 = 0x0c;
+ pub const I32X4_EXTRACT_LANE: u32 = 0x0d;
+ pub const I32X4_REPLACE_LANE: u32 = 0x0e;
+ pub const I64X2_SPLAT: u32 = 0x0f;
+ pub const I64X2_EXTRACT_LANE: u32 = 0x10;
+ pub const I64X2_REPLACE_LANE: u32 = 0x11;
+ pub const F32X4_SPLAT: u32 = 0x12;
+ pub const F32X4_EXTRACT_LANE: u32 = 0x13;
+ pub const F32X4_REPLACE_LANE: u32 = 0x14;
+ pub const F64X2_SPLAT: u32 = 0x15;
+ pub const F64X2_EXTRACT_LANE: u32 = 0x16;
+ pub const F64X2_REPLACE_LANE: u32 = 0x17;
+
+ pub const I8X16_EQ: u32 = 0x18;
+ pub const I8X16_NE: u32 = 0x19;
+ pub const I8X16_LT_S: u32 = 0x1a;
+ pub const I8X16_LT_U: u32 = 0x1b;
+ pub const I8X16_GT_S: u32 = 0x1c;
+ pub const I8X16_GT_U: u32 = 0x1d;
+ pub const I8X16_LE_S: u32 = 0x1e;
+ pub const I8X16_LE_U: u32 = 0x1f;
+ pub const I8X16_GE_S: u32 = 0x20;
+ pub const I8X16_GE_U: u32 = 0x21;
+
+ pub const I16X8_EQ: u32 = 0x22;
+ pub const I16X8_NE: u32 = 0x23;
+ pub const I16X8_LT_S: u32 = 0x24;
+ pub const I16X8_LT_U: u32 = 0x25;
+ pub const I16X8_GT_S: u32 = 0x26;
+ pub const I16X8_GT_U: u32 = 0x27;
+ pub const I16X8_LE_S: u32 = 0x28;
+ pub const I16X8_LE_U: u32 = 0x29;
+ pub const I16X8_GE_S: u32 = 0x2a;
+ pub const I16X8_GE_U: u32 = 0x2b;
+
+ pub const I32X4_EQ: u32 = 0x2c;
+ pub const I32X4_NE: u32 = 0x2d;
+ pub const I32X4_LT_S: u32 = 0x2e;
+ pub const I32X4_LT_U: u32 = 0x2f;
+ pub const I32X4_GT_S: u32 = 0x30;
+ pub const I32X4_GT_U: u32 = 0x31;
+ pub const I32X4_LE_S: u32 = 0x32;
+ pub const I32X4_LE_U: u32 = 0x33;
+ pub const I32X4_GE_S: u32 = 0x34;
+ pub const I32X4_GE_U: u32 = 0x35;
+
+ pub const F32X4_EQ: u32 = 0x40;
+ pub const F32X4_NE: u32 = 0x41;
+ pub const F32X4_LT: u32 = 0x42;
+ pub const F32X4_GT: u32 = 0x43;
+ pub const F32X4_LE: u32 = 0x44;
+ pub const F32X4_GE: u32 = 0x45;
+
+ pub const F64X2_EQ: u32 = 0x46;
+ pub const F64X2_NE: u32 = 0x47;
+ pub const F64X2_LT: u32 = 0x48;
+ pub const F64X2_GT: u32 = 0x49;
+ pub const F64X2_LE: u32 = 0x4a;
+ pub const F64X2_GE: u32 = 0x4b;
+
+ pub const V128_NOT: u32 = 0x4c;
+ pub const V128_AND: u32 = 0x4d;
+ pub const V128_OR: u32 = 0x4e;
+ pub const V128_XOR: u32 = 0x4f;
+ pub const V128_BITSELECT: u32 = 0x50;
+
+ pub const I8X16_NEG: u32 = 0x51;
+ pub const I8X16_ANY_TRUE: u32 = 0x52;
+ pub const I8X16_ALL_TRUE: u32 = 0x53;
+ pub const I8X16_SHL: u32 = 0x54;
+ pub const I8X16_SHR_S: u32 = 0x55;
+ pub const I8X16_SHR_U: u32 = 0x56;
+ pub const I8X16_ADD: u32 = 0x57;
+ pub const I8X16_ADD_SATURATE_S: u32 = 0x58;
+ pub const I8X16_ADD_SATURATE_U: u32 = 0x59;
+ pub const I8X16_SUB: u32 = 0x5a;
+ pub const I8X16_SUB_SATURATE_S: u32 = 0x5b;
+ pub const I8X16_SUB_SATURATE_U: u32 = 0x5c;
+ pub const I8X16_MUL: u32 = 0x5d;
+
+ pub const I16X8_NEG: u32 = 0x62;
+ pub const I16X8_ANY_TRUE: u32 = 0x63;
+ pub const I16X8_ALL_TRUE: u32 = 0x64;
+ pub const I16X8_SHL: u32 = 0x65;
+ pub const I16X8_SHR_S: u32 = 0x66;
+ pub const I16X8_SHR_U: u32 = 0x67;
+ pub const I16X8_ADD: u32 = 0x68;
+ pub const I16X8_ADD_SATURATE_S: u32 = 0x69;
+ pub const I16X8_ADD_SATURATE_U: u32 = 0x6a;
+ pub const I16X8_SUB: u32 = 0x6b;
+ pub const I16X8_SUB_SATURATE_S: u32 = 0x6c;
+ pub const I16X8_SUB_SATURATE_U: u32 = 0x6d;
+ pub const I16X8_MUL: u32 = 0x6e;
+
+ pub const I32X4_NEG: u32 = 0x73;
+ pub const I32X4_ANY_TRUE: u32 = 0x74;
+ pub const I32X4_ALL_TRUE: u32 = 0x75;
+ pub const I32X4_SHL: u32 = 0x76;
+ pub const I32X4_SHR_S: u32 = 0x77;
+ pub const I32X4_SHR_U: u32 = 0x78;
+ pub const I32X4_ADD: u32 = 0x79;
+ pub const I32X4_ADD_SATURATE_S: u32 = 0x7a;
+ pub const I32X4_ADD_SATURATE_U: u32 = 0x7b;
+ pub const I32X4_SUB: u32 = 0x7c;
+ pub const I32X4_SUB_SATURATE_S: u32 = 0x7d;
+ pub const I32X4_SUB_SATURATE_U: u32 = 0x7e;
+ pub const I32X4_MUL: u32 = 0x7f;
+
+ pub const I64X2_NEG: u32 = 0x84;
+ pub const I64X2_ANY_TRUE: u32 = 0x85;
+ pub const I64X2_ALL_TRUE: u32 = 0x86;
+ pub const I64X2_SHL: u32 = 0x87;
+ pub const I64X2_SHR_S: u32 = 0x88;
+ pub const I64X2_SHR_U: u32 = 0x89;
+ pub const I64X2_ADD: u32 = 0x8a;
+ pub const I64X2_SUB: u32 = 0x8d;
+
+ pub const F32X4_ABS: u32 = 0x95;
+ pub const F32X4_NEG: u32 = 0x96;
+ pub const F32X4_SQRT: u32 = 0x97;
+ pub const F32X4_ADD: u32 = 0x9a;
+ pub const F32X4_SUB: u32 = 0x9b;
+ pub const F32X4_MUL: u32 = 0x9c;
+ pub const F32X4_DIV: u32 = 0x9d;
+ pub const F32X4_MIN: u32 = 0x9e;
+ pub const F32X4_MAX: u32 = 0x9f;
+
+ pub const F64X2_ABS: u32 = 0xa0;
+ pub const F64X2_NEG: u32 = 0xa1;
+ pub const F64X2_SQRT: u32 = 0xa2;
+ pub const F64X2_ADD: u32 = 0xa5;
+ pub const F64X2_SUB: u32 = 0xa6;
+ pub const F64X2_MUL: u32 = 0xa7;
+ pub const F64X2_DIV: u32 = 0xa8;
+ pub const F64X2_MIN: u32 = 0xa9;
+ pub const F64X2_MAX: u32 = 0xaa;
+
+ pub const I32X4_TRUNC_S_F32X4_SAT: u32 = 0xab;
+ pub const I32X4_TRUNC_U_F32X4_SAT: u32 = 0xac;
+ pub const I64X2_TRUNC_S_F64X2_SAT: u32 = 0xad;
+ pub const I64X2_TRUNC_U_F64X2_SAT: u32 = 0xae;
+
+ pub const F32X4_CONVERT_S_I32X4: u32 = 0xaf;
+ pub const F32X4_CONVERT_U_I32X4: u32 = 0xb0;
+ pub const F64X2_CONVERT_S_I64X2: u32 = 0xb1;
+ pub const F64X2_CONVERT_U_I64X2: u32 = 0xb2;
+ }
+
+ #[cfg(feature="bulk")]
+ pub mod bulk {
+ pub const BULK_PREFIX: u8 = 0xfc;
+ pub const MEMORY_INIT: u8 = 0x08;
+ pub const MEMORY_DROP: u8 = 0x09;
+ pub const MEMORY_COPY: u8 = 0x0a;
+ pub const MEMORY_FILL: u8 = 0x0b;
+ pub const TABLE_INIT: u8 = 0x0c;
+ pub const TABLE_DROP: u8 = 0x0d;
+ pub const TABLE_COPY: u8 = 0x0e;
+ }
+}
+
+impl Deserialize for Instruction {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ use self::Instruction::*;
+ use self::opcodes::*;
+
+ #[cfg(feature="sign_ext")]
+ use self::opcodes::sign_ext::*;
+
+ let val: u8 = Uint8::deserialize(reader)?.into();
+
+ Ok(
+ match val {
+ UNREACHABLE => Unreachable,
+ NOP => Nop,
+ BLOCK => Block(BlockType::deserialize(reader)?),
+ LOOP => Loop(BlockType::deserialize(reader)?),
+ IF => If(BlockType::deserialize(reader)?),
+ ELSE => Else,
+ END => End,
+
+ BR => Br(VarUint32::deserialize(reader)?.into()),
+ BRIF => BrIf(VarUint32::deserialize(reader)?.into()),
+ BRTABLE => {
+ let t1: Vec<u32> = CountedList::<VarUint32>::deserialize(reader)?
+ .into_inner()
+ .into_iter()
+ .map(Into::into)
+ .collect();
+
+ BrTable(Box::new(BrTableData {
+ table: t1.into_boxed_slice(),
+ default: VarUint32::deserialize(reader)?.into(),
+ }))
+ },
+ RETURN => Return,
+ CALL => Call(VarUint32::deserialize(reader)?.into()),
+ CALLINDIRECT => {
+ let signature: u32 = VarUint32::deserialize(reader)?.into();
+ let table_ref: u8 = Uint8::deserialize(reader)?.into();
+ if table_ref != 0 { return Err(Error::InvalidTableReference(table_ref)); }
+
+ CallIndirect(
+ signature,
+ table_ref,
+ )
+ },
+ DROP => Drop,
+ SELECT => Select,
+
+ GETLOCAL => GetLocal(VarUint32::deserialize(reader)?.into()),
+ SETLOCAL => SetLocal(VarUint32::deserialize(reader)?.into()),
+ TEELOCAL => TeeLocal(VarUint32::deserialize(reader)?.into()),
+ GETGLOBAL => GetGlobal(VarUint32::deserialize(reader)?.into()),
+ SETGLOBAL => SetGlobal(VarUint32::deserialize(reader)?.into()),
+
+ I32LOAD => I32Load(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I64LOAD => I64Load(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ F32LOAD => F32Load(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ F64LOAD => F64Load(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I32LOAD8S => I32Load8S(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I32LOAD8U => I32Load8U(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I32LOAD16S => I32Load16S(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I32LOAD16U => I32Load16U(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I64LOAD8S => I64Load8S(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I64LOAD8U => I64Load8U(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I64LOAD16S => I64Load16S(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I64LOAD16U => I64Load16U(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I64LOAD32S => I64Load32S(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I64LOAD32U => I64Load32U(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I32STORE => I32Store(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I64STORE => I64Store(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ F32STORE => F32Store(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ F64STORE => F64Store(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I32STORE8 => I32Store8(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I32STORE16 => I32Store16(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I64STORE8 => I64Store8(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I64STORE16 => I64Store16(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+ I64STORE32 => I64Store32(
+ VarUint32::deserialize(reader)?.into(),
+ VarUint32::deserialize(reader)?.into()),
+
+
+ CURRENTMEMORY => {
+ let mem_ref: u8 = Uint8::deserialize(reader)?.into();
+ if mem_ref != 0 { return Err(Error::InvalidMemoryReference(mem_ref)); }
+ CurrentMemory(mem_ref)
+ },
+ GROWMEMORY => {
+ let mem_ref: u8 = Uint8::deserialize(reader)?.into();
+ if mem_ref != 0 { return Err(Error::InvalidMemoryReference(mem_ref)); }
+ GrowMemory(mem_ref)
+ }
+
+ I32CONST => I32Const(VarInt32::deserialize(reader)?.into()),
+ I64CONST => I64Const(VarInt64::deserialize(reader)?.into()),
+ F32CONST => F32Const(Uint32::deserialize(reader)?.into()),
+ F64CONST => F64Const(Uint64::deserialize(reader)?.into()),
+ I32EQZ => I32Eqz,
+ I32EQ => I32Eq,
+ I32NE => I32Ne,
+ I32LTS => I32LtS,
+ I32LTU => I32LtU,
+ I32GTS => I32GtS,
+ I32GTU => I32GtU,
+ I32LES => I32LeS,
+ I32LEU => I32LeU,
+ I32GES => I32GeS,
+ I32GEU => I32GeU,
+
+ I64EQZ => I64Eqz,
+ I64EQ => I64Eq,
+ I64NE => I64Ne,
+ I64LTS => I64LtS,
+ I64LTU => I64LtU,
+ I64GTS => I64GtS,
+ I64GTU => I64GtU,
+ I64LES => I64LeS,
+ I64LEU => I64LeU,
+ I64GES => I64GeS,
+ I64GEU => I64GeU,
+
+ F32EQ => F32Eq,
+ F32NE => F32Ne,
+ F32LT => F32Lt,
+ F32GT => F32Gt,
+ F32LE => F32Le,
+ F32GE => F32Ge,
+
+ F64EQ => F64Eq,
+ F64NE => F64Ne,
+ F64LT => F64Lt,
+ F64GT => F64Gt,
+ F64LE => F64Le,
+ F64GE => F64Ge,
+
+ I32CLZ => I32Clz,
+ I32CTZ => I32Ctz,
+ I32POPCNT => I32Popcnt,
+ I32ADD => I32Add,
+ I32SUB => I32Sub,
+ I32MUL => I32Mul,
+ I32DIVS => I32DivS,
+ I32DIVU => I32DivU,
+ I32REMS => I32RemS,
+ I32REMU => I32RemU,
+ I32AND => I32And,
+ I32OR => I32Or,
+ I32XOR => I32Xor,
+ I32SHL => I32Shl,
+ I32SHRS => I32ShrS,
+ I32SHRU => I32ShrU,
+ I32ROTL => I32Rotl,
+ I32ROTR => I32Rotr,
+
+ I64CLZ => I64Clz,
+ I64CTZ => I64Ctz,
+ I64POPCNT => I64Popcnt,
+ I64ADD => I64Add,
+ I64SUB => I64Sub,
+ I64MUL => I64Mul,
+ I64DIVS => I64DivS,
+ I64DIVU => I64DivU,
+ I64REMS => I64RemS,
+ I64REMU => I64RemU,
+ I64AND => I64And,
+ I64OR => I64Or,
+ I64XOR => I64Xor,
+ I64SHL => I64Shl,
+ I64SHRS => I64ShrS,
+ I64SHRU => I64ShrU,
+ I64ROTL => I64Rotl,
+ I64ROTR => I64Rotr,
+ F32ABS => F32Abs,
+ F32NEG => F32Neg,
+ F32CEIL => F32Ceil,
+ F32FLOOR => F32Floor,
+ F32TRUNC => F32Trunc,
+ F32NEAREST => F32Nearest,
+ F32SQRT => F32Sqrt,
+ F32ADD => F32Add,
+ F32SUB => F32Sub,
+ F32MUL => F32Mul,
+ F32DIV => F32Div,
+ F32MIN => F32Min,
+ F32MAX => F32Max,
+ F32COPYSIGN => F32Copysign,
+ F64ABS => F64Abs,
+ F64NEG => F64Neg,
+ F64CEIL => F64Ceil,
+ F64FLOOR => F64Floor,
+ F64TRUNC => F64Trunc,
+ F64NEAREST => F64Nearest,
+ F64SQRT => F64Sqrt,
+ F64ADD => F64Add,
+ F64SUB => F64Sub,
+ F64MUL => F64Mul,
+ F64DIV => F64Div,
+ F64MIN => F64Min,
+ F64MAX => F64Max,
+ F64COPYSIGN => F64Copysign,
+
+ I32WRAPI64 => I32WrapI64,
+ I32TRUNCSF32 => I32TruncSF32,
+ I32TRUNCUF32 => I32TruncUF32,
+ I32TRUNCSF64 => I32TruncSF64,
+ I32TRUNCUF64 => I32TruncUF64,
+ I64EXTENDSI32 => I64ExtendSI32,
+ I64EXTENDUI32 => I64ExtendUI32,
+ I64TRUNCSF32 => I64TruncSF32,
+ I64TRUNCUF32 => I64TruncUF32,
+ I64TRUNCSF64 => I64TruncSF64,
+ I64TRUNCUF64 => I64TruncUF64,
+ F32CONVERTSI32 => F32ConvertSI32,
+ F32CONVERTUI32 => F32ConvertUI32,
+ F32CONVERTSI64 => F32ConvertSI64,
+ F32CONVERTUI64 => F32ConvertUI64,
+ F32DEMOTEF64 => F32DemoteF64,
+ F64CONVERTSI32 => F64ConvertSI32,
+ F64CONVERTUI32 => F64ConvertUI32,
+ F64CONVERTSI64 => F64ConvertSI64,
+ F64CONVERTUI64 => F64ConvertUI64,
+ F64PROMOTEF32 => F64PromoteF32,
+
+ I32REINTERPRETF32 => I32ReinterpretF32,
+ I64REINTERPRETF64 => I64ReinterpretF64,
+ F32REINTERPRETI32 => F32ReinterpretI32,
+ F64REINTERPRETI64 => F64ReinterpretI64,
+
+ #[cfg(feature="sign_ext")]
+ I32_EXTEND8_S |
+ I32_EXTEND16_S |
+ I64_EXTEND8_S |
+ I64_EXTEND16_S |
+ I64_EXTEND32_S => match val {
+ I32_EXTEND8_S => SignExt(SignExtInstruction::I32Extend8S),
+ I32_EXTEND16_S => SignExt(SignExtInstruction::I32Extend16S),
+ I64_EXTEND8_S => SignExt(SignExtInstruction::I64Extend8S),
+ I64_EXTEND16_S => SignExt(SignExtInstruction::I64Extend16S),
+ I64_EXTEND32_S => SignExt(SignExtInstruction::I64Extend32S),
+ _ => return Err(Error::UnknownOpcode(val)),
+ }
+
+ #[cfg(feature="atomics")]
+ atomics::ATOMIC_PREFIX => return deserialize_atomic(reader),
+
+ #[cfg(feature="simd")]
+ simd::SIMD_PREFIX => return deserialize_simd(reader),
+
+ #[cfg(feature="bulk")]
+ bulk::BULK_PREFIX => return deserialize_bulk(reader),
+
+ _ => { return Err(Error::UnknownOpcode(val)); }
+ }
+ )
+ }
+}
+
+#[cfg(feature="atomics")]
+fn deserialize_atomic<R: io::Read>(reader: &mut R) -> Result<Instruction, Error> {
+ use self::AtomicsInstruction::*;
+ use self::opcodes::atomics::*;
+
+ let val: u8 = Uint8::deserialize(reader)?.into();
+ let mem = MemArg::deserialize(reader)?;
+ Ok(Instruction::Atomics(match val {
+ ATOMIC_WAKE => AtomicWake(mem),
+ I32_ATOMIC_WAIT => I32AtomicWait(mem),
+ I64_ATOMIC_WAIT => I64AtomicWait(mem),
+
+ I32_ATOMIC_LOAD => I32AtomicLoad(mem),
+ I64_ATOMIC_LOAD => I64AtomicLoad(mem),
+ I32_ATOMIC_LOAD8U => I32AtomicLoad8u(mem),
+ I32_ATOMIC_LOAD16U => I32AtomicLoad16u(mem),
+ I64_ATOMIC_LOAD8U => I64AtomicLoad8u(mem),
+ I64_ATOMIC_LOAD16U => I64AtomicLoad16u(mem),
+ I64_ATOMIC_LOAD32U => I64AtomicLoad32u(mem),
+ I32_ATOMIC_STORE => I32AtomicStore(mem),
+ I64_ATOMIC_STORE => I64AtomicStore(mem),
+ I32_ATOMIC_STORE8U => I32AtomicStore8u(mem),
+ I32_ATOMIC_STORE16U => I32AtomicStore16u(mem),
+ I64_ATOMIC_STORE8U => I64AtomicStore8u(mem),
+ I64_ATOMIC_STORE16U => I64AtomicStore16u(mem),
+ I64_ATOMIC_STORE32U => I64AtomicStore32u(mem),
+
+ I32_ATOMIC_RMW_ADD => I32AtomicRmwAdd(mem),
+ I64_ATOMIC_RMW_ADD => I64AtomicRmwAdd(mem),
+ I32_ATOMIC_RMW_ADD8U => I32AtomicRmwAdd8u(mem),
+ I32_ATOMIC_RMW_ADD16U => I32AtomicRmwAdd16u(mem),
+ I64_ATOMIC_RMW_ADD8U => I64AtomicRmwAdd8u(mem),
+ I64_ATOMIC_RMW_ADD16U => I64AtomicRmwAdd16u(mem),
+ I64_ATOMIC_RMW_ADD32U => I64AtomicRmwAdd32u(mem),
+
+ I32_ATOMIC_RMW_SUB => I32AtomicRmwSub(mem),
+ I64_ATOMIC_RMW_SUB => I64AtomicRmwSub(mem),
+ I32_ATOMIC_RMW_SUB8U => I32AtomicRmwSub8u(mem),
+ I32_ATOMIC_RMW_SUB16U => I32AtomicRmwSub16u(mem),
+ I64_ATOMIC_RMW_SUB8U => I64AtomicRmwSub8u(mem),
+ I64_ATOMIC_RMW_SUB16U => I64AtomicRmwSub16u(mem),
+ I64_ATOMIC_RMW_SUB32U => I64AtomicRmwSub32u(mem),
+
+ I32_ATOMIC_RMW_OR => I32AtomicRmwOr(mem),
+ I64_ATOMIC_RMW_OR => I64AtomicRmwOr(mem),
+ I32_ATOMIC_RMW_OR8U => I32AtomicRmwOr8u(mem),
+ I32_ATOMIC_RMW_OR16U => I32AtomicRmwOr16u(mem),
+ I64_ATOMIC_RMW_OR8U => I64AtomicRmwOr8u(mem),
+ I64_ATOMIC_RMW_OR16U => I64AtomicRmwOr16u(mem),
+ I64_ATOMIC_RMW_OR32U => I64AtomicRmwOr32u(mem),
+
+ I32_ATOMIC_RMW_XOR => I32AtomicRmwXor(mem),
+ I64_ATOMIC_RMW_XOR => I64AtomicRmwXor(mem),
+ I32_ATOMIC_RMW_XOR8U => I32AtomicRmwXor8u(mem),
+ I32_ATOMIC_RMW_XOR16U => I32AtomicRmwXor16u(mem),
+ I64_ATOMIC_RMW_XOR8U => I64AtomicRmwXor8u(mem),
+ I64_ATOMIC_RMW_XOR16U => I64AtomicRmwXor16u(mem),
+ I64_ATOMIC_RMW_XOR32U => I64AtomicRmwXor32u(mem),
+
+ I32_ATOMIC_RMW_XCHG => I32AtomicRmwXchg(mem),
+ I64_ATOMIC_RMW_XCHG => I64AtomicRmwXchg(mem),
+ I32_ATOMIC_RMW_XCHG8U => I32AtomicRmwXchg8u(mem),
+ I32_ATOMIC_RMW_XCHG16U => I32AtomicRmwXchg16u(mem),
+ I64_ATOMIC_RMW_XCHG8U => I64AtomicRmwXchg8u(mem),
+ I64_ATOMIC_RMW_XCHG16U => I64AtomicRmwXchg16u(mem),
+ I64_ATOMIC_RMW_XCHG32U => I64AtomicRmwXchg32u(mem),
+
+ I32_ATOMIC_RMW_CMPXCHG => I32AtomicRmwCmpxchg(mem),
+ I64_ATOMIC_RMW_CMPXCHG => I64AtomicRmwCmpxchg(mem),
+ I32_ATOMIC_RMW_CMPXCHG8U => I32AtomicRmwCmpxchg8u(mem),
+ I32_ATOMIC_RMW_CMPXCHG16U => I32AtomicRmwCmpxchg16u(mem),
+ I64_ATOMIC_RMW_CMPXCHG8U => I64AtomicRmwCmpxchg8u(mem),
+ I64_ATOMIC_RMW_CMPXCHG16U => I64AtomicRmwCmpxchg16u(mem),
+ I64_ATOMIC_RMW_CMPXCHG32U => I64AtomicRmwCmpxchg32u(mem),
+
+ _ => return Err(Error::UnknownOpcode(val)),
+ }))
+}
+
+#[cfg(feature="simd")]
+fn deserialize_simd<R: io::Read>(reader: &mut R) -> Result<Instruction, Error> {
+ use self::SimdInstruction::*;
+ use self::opcodes::simd::*;
+
+ let val = VarUint32::deserialize(reader)?.into();
+ Ok(Instruction::Simd(match val {
+ V128_CONST => {
+ let mut buf = [0; 16];
+ reader.read(&mut buf)?;
+ V128Const(Box::new(buf))
+ }
+ V128_LOAD => V128Load(MemArg::deserialize(reader)?),
+ V128_STORE => V128Store(MemArg::deserialize(reader)?),
+ I8X16_SPLAT => I8x16Splat,
+ I16X8_SPLAT => I16x8Splat,
+ I32X4_SPLAT => I32x4Splat,
+ I64X2_SPLAT => I64x2Splat,
+ F32X4_SPLAT => F32x4Splat,
+ F64X2_SPLAT => F64x2Splat,
+ I8X16_EXTRACT_LANE_S => I8x16ExtractLaneS(Uint8::deserialize(reader)?.into()),
+ I8X16_EXTRACT_LANE_U => I8x16ExtractLaneU(Uint8::deserialize(reader)?.into()),
+ I16X8_EXTRACT_LANE_S => I16x8ExtractLaneS(Uint8::deserialize(reader)?.into()),
+ I16X8_EXTRACT_LANE_U => I16x8ExtractLaneU(Uint8::deserialize(reader)?.into()),
+ I32X4_EXTRACT_LANE => I32x4ExtractLane(Uint8::deserialize(reader)?.into()),
+ I64X2_EXTRACT_LANE => I64x2ExtractLane(Uint8::deserialize(reader)?.into()),
+ F32X4_EXTRACT_LANE => F32x4ExtractLane(Uint8::deserialize(reader)?.into()),
+ F64X2_EXTRACT_LANE => F64x2ExtractLane(Uint8::deserialize(reader)?.into()),
+ I8X16_REPLACE_LANE => I8x16ReplaceLane(Uint8::deserialize(reader)?.into()),
+ I16X8_REPLACE_LANE => I16x8ReplaceLane(Uint8::deserialize(reader)?.into()),
+ I32X4_REPLACE_LANE => I32x4ReplaceLane(Uint8::deserialize(reader)?.into()),
+ I64X2_REPLACE_LANE => I64x2ReplaceLane(Uint8::deserialize(reader)?.into()),
+ F32X4_REPLACE_LANE => F32x4ReplaceLane(Uint8::deserialize(reader)?.into()),
+ F64X2_REPLACE_LANE => F64x2ReplaceLane(Uint8::deserialize(reader)?.into()),
+ V8X16_SHUFFLE => {
+ let mut buf = [0; 16];
+ reader.read(&mut buf)?;
+ V8x16Shuffle(Box::new(buf))
+ }
+ I8X16_ADD => I8x16Add,
+ I16X8_ADD => I16x8Add,
+ I32X4_ADD => I32x4Add,
+ I64X2_ADD => I64x2Add,
+ I8X16_SUB => I8x16Sub,
+ I16X8_SUB => I16x8Sub,
+ I32X4_SUB => I32x4Sub,
+ I64X2_SUB => I64x2Sub,
+ I8X16_MUL => I8x16Mul,
+ I16X8_MUL => I16x8Mul,
+ I32X4_MUL => I32x4Mul,
+ // I64X2_MUL => I64x2Mul,
+ I8X16_NEG => I8x16Neg,
+ I16X8_NEG => I16x8Neg,
+ I32X4_NEG => I32x4Neg,
+ I64X2_NEG => I64x2Neg,
+
+ I8X16_ADD_SATURATE_S => I8x16AddSaturateS,
+ I8X16_ADD_SATURATE_U => I8x16AddSaturateU,
+ I16X8_ADD_SATURATE_S => I16x8AddSaturateS,
+ I16X8_ADD_SATURATE_U => I16x8AddSaturateU,
+ I8X16_SUB_SATURATE_S => I8x16SubSaturateS,
+ I8X16_SUB_SATURATE_U => I8x16SubSaturateU,
+ I16X8_SUB_SATURATE_S => I16x8SubSaturateS,
+ I16X8_SUB_SATURATE_U => I16x8SubSaturateU,
+ I8X16_SHL => I8x16Shl,
+ I16X8_SHL => I16x8Shl,
+ I32X4_SHL => I32x4Shl,
+ I64X2_SHL => I64x2Shl,
+ I8X16_SHR_S => I8x16ShrS,
+ I8X16_SHR_U => I8x16ShrU,
+ I16X8_SHR_S => I16x8ShrS,
+ I16X8_SHR_U => I16x8ShrU,
+ I32X4_SHR_S => I32x4ShrS,
+ I32X4_SHR_U => I32x4ShrU,
+ I64X2_SHR_S => I64x2ShrS,
+ I64X2_SHR_U => I64x2ShrU,
+ V128_AND => V128And,
+ V128_OR => V128Or,
+ V128_XOR => V128Xor,
+ V128_NOT => V128Not,
+ V128_BITSELECT => V128Bitselect,
+ I8X16_ANY_TRUE => I8x16AnyTrue,
+ I16X8_ANY_TRUE => I16x8AnyTrue,
+ I32X4_ANY_TRUE => I32x4AnyTrue,
+ I64X2_ANY_TRUE => I64x2AnyTrue,
+ I8X16_ALL_TRUE => I8x16AllTrue,
+ I16X8_ALL_TRUE => I16x8AllTrue,
+ I32X4_ALL_TRUE => I32x4AllTrue,
+ I64X2_ALL_TRUE => I64x2AllTrue,
+ I8X16_EQ => I8x16Eq,
+ I16X8_EQ => I16x8Eq,
+ I32X4_EQ => I32x4Eq,
+ // I64X2_EQ => I64x2Eq,
+ F32X4_EQ => F32x4Eq,
+ F64X2_EQ => F64x2Eq,
+ I8X16_NE => I8x16Ne,
+ I16X8_NE => I16x8Ne,
+ I32X4_NE => I32x4Ne,
+ // I64X2_NE => I64x2Ne,
+ F32X4_NE => F32x4Ne,
+ F64X2_NE => F64x2Ne,
+ I8X16_LT_S => I8x16LtS,
+ I8X16_LT_U => I8x16LtU,
+ I16X8_LT_S => I16x8LtS,
+ I16X8_LT_U => I16x8LtU,
+ I32X4_LT_S => I32x4LtS,
+ I32X4_LT_U => I32x4LtU,
+ // I64X2_LT_S => I64x2LtS,
+ // I64X2_LT_U => I64x2LtU,
+ F32X4_LT => F32x4Lt,
+ F64X2_LT => F64x2Lt,
+ I8X16_LE_S => I8x16LeS,
+ I8X16_LE_U => I8x16LeU,
+ I16X8_LE_S => I16x8LeS,
+ I16X8_LE_U => I16x8LeU,
+ I32X4_LE_S => I32x4LeS,
+ I32X4_LE_U => I32x4LeU,
+ // I64X2_LE_S => I64x2LeS,
+ // I64X2_LE_U => I64x2LeU,
+ F32X4_LE => F32x4Le,
+ F64X2_LE => F64x2Le,
+ I8X16_GT_S => I8x16GtS,
+ I8X16_GT_U => I8x16GtU,
+ I16X8_GT_S => I16x8GtS,
+ I16X8_GT_U => I16x8GtU,
+ I32X4_GT_S => I32x4GtS,
+ I32X4_GT_U => I32x4GtU,
+ // I64X2_GT_S => I64x2GtS,
+ // I64X2_GT_U => I64x2GtU,
+ F32X4_GT => F32x4Gt,
+ F64X2_GT => F64x2Gt,
+ I8X16_GE_S => I8x16GeS,
+ I8X16_GE_U => I8x16GeU,
+ I16X8_GE_S => I16x8GeS,
+ I16X8_GE_U => I16x8GeU,
+ I32X4_GE_S => I32x4GeS,
+ I32X4_GE_U => I32x4GeU,
+ // I64X2_GE_S => I64x2GeS,
+ // I64X2_GE_U => I64x2GeU,
+ F32X4_GE => F32x4Ge,
+ F64X2_GE => F64x2Ge,
+ F32X4_NEG => F32x4Neg,
+ F64X2_NEG => F64x2Neg,
+ F32X4_ABS => F32x4Abs,
+ F64X2_ABS => F64x2Abs,
+ F32X4_MIN => F32x4Min,
+ F64X2_MIN => F64x2Min,
+ F32X4_MAX => F32x4Max,
+ F64X2_MAX => F64x2Max,
+ F32X4_ADD => F32x4Add,
+ F64X2_ADD => F64x2Add,
+ F32X4_SUB => F32x4Sub,
+ F64X2_SUB => F64x2Sub,
+ F32X4_DIV => F32x4Div,
+ F64X2_DIV => F64x2Div,
+ F32X4_MUL => F32x4Mul,
+ F64X2_MUL => F64x2Mul,
+ F32X4_SQRT => F32x4Sqrt,
+ F64X2_SQRT => F64x2Sqrt,
+ F32X4_CONVERT_S_I32X4 => F32x4ConvertSI32x4,
+ F32X4_CONVERT_U_I32X4 => F32x4ConvertUI32x4,
+ F64X2_CONVERT_S_I64X2 => F64x2ConvertSI64x2,
+ F64X2_CONVERT_U_I64X2 => F64x2ConvertUI64x2,
+ I32X4_TRUNC_S_F32X4_SAT => I32x4TruncSF32x4Sat,
+ I32X4_TRUNC_U_F32X4_SAT => I32x4TruncUF32x4Sat,
+ I64X2_TRUNC_S_F64X2_SAT => I64x2TruncSF64x2Sat,
+ I64X2_TRUNC_U_F64X2_SAT => I64x2TruncUF64x2Sat,
+
+ _ => return Err(Error::UnknownSimdOpcode(val)),
+ }))
+}
+
+#[cfg(feature="bulk")]
+fn deserialize_bulk<R: io::Read>(reader: &mut R) -> Result<Instruction, Error> {
+ use self::BulkInstruction::*;
+ use self::opcodes::bulk::*;
+
+ let val: u8 = Uint8::deserialize(reader)?.into();
+ Ok(Instruction::Bulk(match val {
+ MEMORY_INIT => {
+ if u8::from(Uint8::deserialize(reader)?) != 0 {
+ return Err(Error::UnknownOpcode(val))
+ }
+ MemoryInit(VarUint32::deserialize(reader)?.into())
+ }
+ MEMORY_DROP => MemoryDrop(VarUint32::deserialize(reader)?.into()),
+ MEMORY_FILL => {
+ if u8::from(Uint8::deserialize(reader)?) != 0 {
+ return Err(Error::UnknownOpcode(val))
+ }
+ MemoryFill
+ }
+ MEMORY_COPY => {
+ if u8::from(Uint8::deserialize(reader)?) != 0 {
+ return Err(Error::UnknownOpcode(val))
+ }
+ MemoryCopy
+ }
+
+ TABLE_INIT => {
+ if u8::from(Uint8::deserialize(reader)?) != 0 {
+ return Err(Error::UnknownOpcode(val))
+ }
+ TableInit(VarUint32::deserialize(reader)?.into())
+ }
+ TABLE_DROP => TableDrop(VarUint32::deserialize(reader)?.into()),
+ TABLE_COPY => {
+ if u8::from(Uint8::deserialize(reader)?) != 0 {
+ return Err(Error::UnknownOpcode(val))
+ }
+ TableCopy
+ }
+
+ _ => return Err(Error::UnknownOpcode(val)),
+ }))
+}
+
+#[cfg(any(feature="simd", feature="atomics"))]
+impl Deserialize for MemArg {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let align = Uint8::deserialize(reader)?;
+ let offset = VarUint32::deserialize(reader)?;
+ Ok(MemArg { align: align.into(), offset: offset.into() })
+ }
+}
+
+macro_rules! op {
+ ($writer: expr, $byte: expr) => ({
+ let b: u8 = $byte;
+ $writer.write(&[b])?;
+ });
+ ($writer: expr, $byte: expr, $s: block) => ({
+ op!($writer, $byte);
+ $s;
+ });
+}
+
+#[cfg(feature="atomics")]
+macro_rules! atomic {
+ ($writer: expr, $byte: expr, $mem:expr) => ({
+ $writer.write(&[ATOMIC_PREFIX, $byte])?;
+ MemArg::serialize($mem, $writer)?;
+ });
+}
+
+#[cfg(feature="simd")]
+macro_rules! simd {
+ ($writer: expr, $byte: expr, $other:expr) => ({
+ $writer.write(&[SIMD_PREFIX])?;
+ VarUint32::from($byte).serialize($writer)?;
+ $other;
+ })
+}
+
+#[cfg(feature="bulk")]
+macro_rules! bulk {
+ ($writer: expr, $byte: expr) => ({
+ $writer.write(&[BULK_PREFIX, $byte])?;
+ });
+ ($writer: expr, $byte: expr, $remaining:expr) => ({
+ bulk!($writer, $byte);
+ $remaining;
+ });
+}
+
+impl Serialize for Instruction {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ use self::Instruction::*;
+ use self::opcodes::*;
+
+ match self {
+ Unreachable => op!(writer, UNREACHABLE),
+ Nop => op!(writer, NOP),
+ Block(block_type) => op!(writer, BLOCK, {
+ block_type.serialize(writer)?;
+ }),
+ Loop(block_type) => op!(writer, LOOP, {
+ block_type.serialize(writer)?;
+ }),
+ If(block_type) => op!(writer, IF, {
+ block_type.serialize(writer)?;
+ }),
+ Else => op!(writer, ELSE),
+ End => op!(writer, END),
+ Br(idx) => op!(writer, BR, {
+ VarUint32::from(idx).serialize(writer)?;
+ }),
+ BrIf(idx) => op!(writer, BRIF, {
+ VarUint32::from(idx).serialize(writer)?;
+ }),
+ BrTable(ref table) => op!(writer, BRTABLE, {
+ let list_writer = CountedListWriter::<VarUint32, _>(
+ table.table.len(),
+ table.table.into_iter().map(|x| VarUint32::from(*x)),
+ );
+ list_writer.serialize(writer)?;
+ VarUint32::from(table.default).serialize(writer)?;
+ }),
+ Return => op!(writer, RETURN),
+ Call(index) => op!(writer, CALL, {
+ VarUint32::from(index).serialize(writer)?;
+ }),
+ CallIndirect(index, reserved) => op!(writer, CALLINDIRECT, {
+ VarUint32::from(index).serialize(writer)?;
+ Uint8::from(reserved).serialize(writer)?;
+ }),
+ Drop => op!(writer, DROP),
+ Select => op!(writer, SELECT),
+ GetLocal(index) => op!(writer, GETLOCAL, {
+ VarUint32::from(index).serialize(writer)?;
+ }),
+ SetLocal(index) => op!(writer, SETLOCAL, {
+ VarUint32::from(index).serialize(writer)?;
+ }),
+ TeeLocal(index) => op!(writer, TEELOCAL, {
+ VarUint32::from(index).serialize(writer)?;
+ }),
+ GetGlobal(index) => op!(writer, GETGLOBAL, {
+ VarUint32::from(index).serialize(writer)?;
+ }),
+ SetGlobal(index) => op!(writer, SETGLOBAL, {
+ VarUint32::from(index).serialize(writer)?;
+ }),
+ I32Load(flags, offset) => op!(writer, I32LOAD, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I64Load(flags, offset) => op!(writer, I64LOAD, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ F32Load(flags, offset) => op!(writer, F32LOAD, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ F64Load(flags, offset) => op!(writer, F64LOAD, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I32Load8S(flags, offset) => op!(writer, I32LOAD8S, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I32Load8U(flags, offset) => op!(writer, I32LOAD8U, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I32Load16S(flags, offset) => op!(writer, I32LOAD16S, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I32Load16U(flags, offset) => op!(writer, I32LOAD16U, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I64Load8S(flags, offset) => op!(writer, I64LOAD8S, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I64Load8U(flags, offset) => op!(writer, I64LOAD8U, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I64Load16S(flags, offset) => op!(writer, I64LOAD16S, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I64Load16U(flags, offset) => op!(writer, I64LOAD16U, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I64Load32S(flags, offset) => op!(writer, I64LOAD32S, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I64Load32U(flags, offset) => op!(writer, I64LOAD32U, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I32Store(flags, offset) => op!(writer, I32STORE, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I64Store(flags, offset) => op!(writer, I64STORE, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ F32Store(flags, offset) => op!(writer, F32STORE, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ F64Store(flags, offset) => op!(writer, F64STORE, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I32Store8(flags, offset) => op!(writer, I32STORE8, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I32Store16(flags, offset) => op!(writer, I32STORE16, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I64Store8(flags, offset) => op!(writer, I64STORE8, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I64Store16(flags, offset) => op!(writer, I64STORE16, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ I64Store32(flags, offset) => op!(writer, I64STORE32, {
+ VarUint32::from(flags).serialize(writer)?;
+ VarUint32::from(offset).serialize(writer)?;
+ }),
+ CurrentMemory(flag) => op!(writer, CURRENTMEMORY, {
+ Uint8::from(flag).serialize(writer)?;
+ }),
+ GrowMemory(flag) => op!(writer, GROWMEMORY, {
+ Uint8::from(flag).serialize(writer)?;
+ }),
+ I32Const(def) => op!(writer, I32CONST, {
+ VarInt32::from(def).serialize(writer)?;
+ }),
+ I64Const(def) => op!(writer, I64CONST, {
+ VarInt64::from(def).serialize(writer)?;
+ }),
+ F32Const(def) => op!(writer, F32CONST, {
+ Uint32::from(def).serialize(writer)?;
+ }),
+ F64Const(def) => op!(writer, F64CONST, {
+ Uint64::from(def).serialize(writer)?;
+ }),
+ I32Eqz => op!(writer, I32EQZ),
+ I32Eq => op!(writer, I32EQ),
+ I32Ne => op!(writer, I32NE),
+ I32LtS => op!(writer, I32LTS),
+ I32LtU => op!(writer, I32LTU),
+ I32GtS => op!(writer, I32GTS),
+ I32GtU => op!(writer, I32GTU),
+ I32LeS => op!(writer, I32LES),
+ I32LeU => op!(writer, I32LEU),
+ I32GeS => op!(writer, I32GES),
+ I32GeU => op!(writer, I32GEU),
+
+ I64Eqz => op!(writer, I64EQZ),
+ I64Eq => op!(writer, I64EQ),
+ I64Ne => op!(writer, I64NE),
+ I64LtS => op!(writer, I64LTS),
+ I64LtU => op!(writer, I64LTU),
+ I64GtS => op!(writer, I64GTS),
+ I64GtU => op!(writer, I64GTU),
+ I64LeS => op!(writer, I64LES),
+ I64LeU => op!(writer, I64LEU),
+ I64GeS => op!(writer, I64GES),
+ I64GeU => op!(writer, I64GEU),
+
+ F32Eq => op!(writer, F32EQ),
+ F32Ne => op!(writer, F32NE),
+ F32Lt => op!(writer, F32LT),
+ F32Gt => op!(writer, F32GT),
+ F32Le => op!(writer, F32LE),
+ F32Ge => op!(writer, F32GE),
+
+ F64Eq => op!(writer, F64EQ),
+ F64Ne => op!(writer, F64NE),
+ F64Lt => op!(writer, F64LT),
+ F64Gt => op!(writer, F64GT),
+ F64Le => op!(writer, F64LE),
+ F64Ge => op!(writer, F64GE),
+
+ I32Clz => op!(writer, I32CLZ),
+ I32Ctz => op!(writer, I32CTZ),
+ I32Popcnt => op!(writer, I32POPCNT),
+ I32Add => op!(writer, I32ADD),
+ I32Sub => op!(writer, I32SUB),
+ I32Mul => op!(writer, I32MUL),
+ I32DivS => op!(writer, I32DIVS),
+ I32DivU => op!(writer, I32DIVU),
+ I32RemS => op!(writer, I32REMS),
+ I32RemU => op!(writer, I32REMU),
+ I32And => op!(writer, I32AND),
+ I32Or => op!(writer, I32OR),
+ I32Xor => op!(writer, I32XOR),
+ I32Shl => op!(writer, I32SHL),
+ I32ShrS => op!(writer, I32SHRS),
+ I32ShrU => op!(writer, I32SHRU),
+ I32Rotl => op!(writer, I32ROTL),
+ I32Rotr => op!(writer, I32ROTR),
+
+ I64Clz => op!(writer, I64CLZ),
+ I64Ctz => op!(writer, I64CTZ),
+ I64Popcnt => op!(writer, I64POPCNT),
+ I64Add => op!(writer, I64ADD),
+ I64Sub => op!(writer, I64SUB),
+ I64Mul => op!(writer, I64MUL),
+ I64DivS => op!(writer, I64DIVS),
+ I64DivU => op!(writer, I64DIVU),
+ I64RemS => op!(writer, I64REMS),
+ I64RemU => op!(writer, I64REMU),
+ I64And => op!(writer, I64AND),
+ I64Or => op!(writer, I64OR),
+ I64Xor => op!(writer, I64XOR),
+ I64Shl => op!(writer, I64SHL),
+ I64ShrS => op!(writer, I64SHRS),
+ I64ShrU => op!(writer, I64SHRU),
+ I64Rotl => op!(writer, I64ROTL),
+ I64Rotr => op!(writer, I64ROTR),
+ F32Abs => op!(writer, F32ABS),
+ F32Neg => op!(writer, F32NEG),
+ F32Ceil => op!(writer, F32CEIL),
+ F32Floor => op!(writer, F32FLOOR),
+ F32Trunc => op!(writer, F32TRUNC),
+ F32Nearest => op!(writer, F32NEAREST),
+ F32Sqrt => op!(writer, F32SQRT),
+ F32Add => op!(writer, F32ADD),
+ F32Sub => op!(writer, F32SUB),
+ F32Mul => op!(writer, F32MUL),
+ F32Div => op!(writer, F32DIV),
+ F32Min => op!(writer, F32MIN),
+ F32Max => op!(writer, F32MAX),
+ F32Copysign => op!(writer, F32COPYSIGN),
+ F64Abs => op!(writer, F64ABS),
+ F64Neg => op!(writer, F64NEG),
+ F64Ceil => op!(writer, F64CEIL),
+ F64Floor => op!(writer, F64FLOOR),
+ F64Trunc => op!(writer, F64TRUNC),
+ F64Nearest => op!(writer, F64NEAREST),
+ F64Sqrt => op!(writer, F64SQRT),
+ F64Add => op!(writer, F64ADD),
+ F64Sub => op!(writer, F64SUB),
+ F64Mul => op!(writer, F64MUL),
+ F64Div => op!(writer, F64DIV),
+ F64Min => op!(writer, F64MIN),
+ F64Max => op!(writer, F64MAX),
+ F64Copysign => op!(writer, F64COPYSIGN),
+
+ I32WrapI64 => op!(writer, I32WRAPI64),
+ I32TruncSF32 => op!(writer, I32TRUNCSF32),
+ I32TruncUF32 => op!(writer, I32TRUNCUF32),
+ I32TruncSF64 => op!(writer, I32TRUNCSF64),
+ I32TruncUF64 => op!(writer, I32TRUNCUF64),
+ I64ExtendSI32 => op!(writer, I64EXTENDSI32),
+ I64ExtendUI32 => op!(writer, I64EXTENDUI32),
+ I64TruncSF32 => op!(writer, I64TRUNCSF32),
+ I64TruncUF32 => op!(writer, I64TRUNCUF32),
+ I64TruncSF64 => op!(writer, I64TRUNCSF64),
+ I64TruncUF64 => op!(writer, I64TRUNCUF64),
+ F32ConvertSI32 => op!(writer, F32CONVERTSI32),
+ F32ConvertUI32 => op!(writer, F32CONVERTUI32),
+ F32ConvertSI64 => op!(writer, F32CONVERTSI64),
+ F32ConvertUI64 => op!(writer, F32CONVERTUI64),
+ F32DemoteF64 => op!(writer, F32DEMOTEF64),
+ F64ConvertSI32 => op!(writer, F64CONVERTSI32),
+ F64ConvertUI32 => op!(writer, F64CONVERTUI32),
+ F64ConvertSI64 => op!(writer, F64CONVERTSI64),
+ F64ConvertUI64 => op!(writer, F64CONVERTUI64),
+ F64PromoteF32 => op!(writer, F64PROMOTEF32),
+
+ I32ReinterpretF32 => op!(writer, I32REINTERPRETF32),
+ I64ReinterpretF64 => op!(writer, I64REINTERPRETF64),
+ F32ReinterpretI32 => op!(writer, F32REINTERPRETI32),
+ F64ReinterpretI64 => op!(writer, F64REINTERPRETI64),
+
+ #[cfg(feature="sign_ext")]
+ SignExt(ref a) => match *a {
+ SignExtInstruction::I32Extend8S => op!(writer, sign_ext::I32_EXTEND8_S),
+ SignExtInstruction::I32Extend16S => op!(writer, sign_ext::I32_EXTEND16_S),
+ SignExtInstruction::I64Extend8S => op!(writer, sign_ext::I64_EXTEND8_S),
+ SignExtInstruction::I64Extend16S => op!(writer, sign_ext::I64_EXTEND16_S),
+ SignExtInstruction::I64Extend32S => op!(writer, sign_ext::I64_EXTEND32_S),
+ }
+
+ #[cfg(feature="atomics")]
+ Atomics(a) => return a.serialize(writer),
+
+ #[cfg(feature="simd")]
+ Simd(a) => return a.serialize(writer),
+
+ #[cfg(feature="bulk")]
+ Bulk(a) => return a.serialize(writer),
+ }
+
+ Ok(())
+ }
+}
+
+#[cfg(feature="atomics")]
+impl Serialize for AtomicsInstruction {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ use self::AtomicsInstruction::*;
+ use self::opcodes::atomics::*;
+
+ match self {
+ AtomicWake(m) => atomic!(writer, ATOMIC_WAKE, m),
+ I32AtomicWait(m) => atomic!(writer, I32_ATOMIC_WAIT, m),
+ I64AtomicWait(m) => atomic!(writer, I64_ATOMIC_WAIT, m),
+
+ I32AtomicLoad(m) => atomic!(writer, I32_ATOMIC_LOAD, m),
+ I64AtomicLoad(m) => atomic!(writer, I64_ATOMIC_LOAD, m),
+ I32AtomicLoad8u(m) => atomic!(writer, I32_ATOMIC_LOAD8U, m),
+ I32AtomicLoad16u(m) => atomic!(writer, I32_ATOMIC_LOAD16U, m),
+ I64AtomicLoad8u(m) => atomic!(writer, I64_ATOMIC_LOAD8U, m),
+ I64AtomicLoad16u(m) => atomic!(writer, I64_ATOMIC_LOAD16U, m),
+ I64AtomicLoad32u(m) => atomic!(writer, I64_ATOMIC_LOAD32U, m),
+ I32AtomicStore(m) => atomic!(writer, I32_ATOMIC_STORE, m),
+ I64AtomicStore(m) => atomic!(writer, I64_ATOMIC_STORE, m),
+ I32AtomicStore8u(m) => atomic!(writer, I32_ATOMIC_STORE8U, m),
+ I32AtomicStore16u(m) => atomic!(writer, I32_ATOMIC_STORE16U, m),
+ I64AtomicStore8u(m) => atomic!(writer, I64_ATOMIC_STORE8U, m),
+ I64AtomicStore16u(m) => atomic!(writer, I64_ATOMIC_STORE16U, m),
+ I64AtomicStore32u(m) => atomic!(writer, I64_ATOMIC_STORE32U, m),
+
+ I32AtomicRmwAdd(m) => atomic!(writer, I32_ATOMIC_RMW_ADD, m),
+ I64AtomicRmwAdd(m) => atomic!(writer, I64_ATOMIC_RMW_ADD, m),
+ I32AtomicRmwAdd8u(m) => atomic!(writer, I32_ATOMIC_RMW_ADD8U, m),
+ I32AtomicRmwAdd16u(m) => atomic!(writer, I32_ATOMIC_RMW_ADD16U, m),
+ I64AtomicRmwAdd8u(m) => atomic!(writer, I64_ATOMIC_RMW_ADD8U, m),
+ I64AtomicRmwAdd16u(m) => atomic!(writer, I64_ATOMIC_RMW_ADD16U, m),
+ I64AtomicRmwAdd32u(m) => atomic!(writer, I64_ATOMIC_RMW_ADD32U, m),
+
+ I32AtomicRmwSub(m) => atomic!(writer, I32_ATOMIC_RMW_SUB, m),
+ I64AtomicRmwSub(m) => atomic!(writer, I64_ATOMIC_RMW_SUB, m),
+ I32AtomicRmwSub8u(m) => atomic!(writer, I32_ATOMIC_RMW_SUB8U, m),
+ I32AtomicRmwSub16u(m) => atomic!(writer, I32_ATOMIC_RMW_SUB16U, m),
+ I64AtomicRmwSub8u(m) => atomic!(writer, I64_ATOMIC_RMW_SUB8U, m),
+ I64AtomicRmwSub16u(m) => atomic!(writer, I64_ATOMIC_RMW_SUB16U, m),
+ I64AtomicRmwSub32u(m) => atomic!(writer, I64_ATOMIC_RMW_SUB32U, m),
+
+ I32AtomicRmwAnd(m) => atomic!(writer, I32_ATOMIC_RMW_AND, m),
+ I64AtomicRmwAnd(m) => atomic!(writer, I64_ATOMIC_RMW_AND, m),
+ I32AtomicRmwAnd8u(m) => atomic!(writer, I32_ATOMIC_RMW_AND8U, m),
+ I32AtomicRmwAnd16u(m) => atomic!(writer, I32_ATOMIC_RMW_AND16U, m),
+ I64AtomicRmwAnd8u(m) => atomic!(writer, I64_ATOMIC_RMW_AND8U, m),
+ I64AtomicRmwAnd16u(m) => atomic!(writer, I64_ATOMIC_RMW_AND16U, m),
+ I64AtomicRmwAnd32u(m) => atomic!(writer, I64_ATOMIC_RMW_AND32U, m),
+
+ I32AtomicRmwOr(m) => atomic!(writer, I32_ATOMIC_RMW_OR, m),
+ I64AtomicRmwOr(m) => atomic!(writer, I64_ATOMIC_RMW_OR, m),
+ I32AtomicRmwOr8u(m) => atomic!(writer, I32_ATOMIC_RMW_OR8U, m),
+ I32AtomicRmwOr16u(m) => atomic!(writer, I32_ATOMIC_RMW_OR16U, m),
+ I64AtomicRmwOr8u(m) => atomic!(writer, I64_ATOMIC_RMW_OR8U, m),
+ I64AtomicRmwOr16u(m) => atomic!(writer, I64_ATOMIC_RMW_OR16U, m),
+ I64AtomicRmwOr32u(m) => atomic!(writer, I64_ATOMIC_RMW_OR32U, m),
+
+ I32AtomicRmwXor(m) => atomic!(writer, I32_ATOMIC_RMW_XOR, m),
+ I64AtomicRmwXor(m) => atomic!(writer, I64_ATOMIC_RMW_XOR, m),
+ I32AtomicRmwXor8u(m) => atomic!(writer, I32_ATOMIC_RMW_XOR8U, m),
+ I32AtomicRmwXor16u(m) => atomic!(writer, I32_ATOMIC_RMW_XOR16U, m),
+ I64AtomicRmwXor8u(m) => atomic!(writer, I64_ATOMIC_RMW_XOR8U, m),
+ I64AtomicRmwXor16u(m) => atomic!(writer, I64_ATOMIC_RMW_XOR16U, m),
+ I64AtomicRmwXor32u(m) => atomic!(writer, I64_ATOMIC_RMW_XOR32U, m),
+
+ I32AtomicRmwXchg(m) => atomic!(writer, I32_ATOMIC_RMW_XCHG, m),
+ I64AtomicRmwXchg(m) => atomic!(writer, I64_ATOMIC_RMW_XCHG, m),
+ I32AtomicRmwXchg8u(m) => atomic!(writer, I32_ATOMIC_RMW_XCHG8U, m),
+ I32AtomicRmwXchg16u(m) => atomic!(writer, I32_ATOMIC_RMW_XCHG16U, m),
+ I64AtomicRmwXchg8u(m) => atomic!(writer, I64_ATOMIC_RMW_XCHG8U, m),
+ I64AtomicRmwXchg16u(m) => atomic!(writer, I64_ATOMIC_RMW_XCHG16U, m),
+ I64AtomicRmwXchg32u(m) => atomic!(writer, I64_ATOMIC_RMW_XCHG32U, m),
+
+ I32AtomicRmwCmpxchg(m) => atomic!(writer, I32_ATOMIC_RMW_CMPXCHG, m),
+ I64AtomicRmwCmpxchg(m) => atomic!(writer, I64_ATOMIC_RMW_CMPXCHG, m),
+ I32AtomicRmwCmpxchg8u(m) => atomic!(writer, I32_ATOMIC_RMW_CMPXCHG8U, m),
+ I32AtomicRmwCmpxchg16u(m) => atomic!(writer, I32_ATOMIC_RMW_CMPXCHG16U, m),
+ I64AtomicRmwCmpxchg8u(m) => atomic!(writer, I64_ATOMIC_RMW_CMPXCHG8U, m),
+ I64AtomicRmwCmpxchg16u(m) => atomic!(writer, I64_ATOMIC_RMW_CMPXCHG16U, m),
+ I64AtomicRmwCmpxchg32u(m) => atomic!(writer, I64_ATOMIC_RMW_CMPXCHG32U, m),
+ }
+
+ Ok(())
+ }
+}
+
+#[cfg(feature="simd")]
+impl Serialize for SimdInstruction {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ use self::SimdInstruction::*;
+ use self::opcodes::simd::*;
+
+ match self {
+ V128Const(ref c) => simd!(writer, V128_CONST, writer.write(&c[..])?),
+ V128Load(m) => simd!(writer, V128_LOAD, MemArg::serialize(m, writer)?),
+ V128Store(m) => simd!(writer, V128_STORE, MemArg::serialize(m, writer)?),
+ I8x16Splat => simd!(writer, I8X16_SPLAT, ()),
+ I16x8Splat => simd!(writer, I16X8_SPLAT, ()),
+ I32x4Splat => simd!(writer, I32X4_SPLAT, ()),
+ I64x2Splat => simd!(writer, I64X2_SPLAT, ()),
+ F32x4Splat => simd!(writer, F32X4_SPLAT, ()),
+ F64x2Splat => simd!(writer, F64X2_SPLAT, ()),
+ I8x16ExtractLaneS(i) => simd!(writer, I8X16_EXTRACT_LANE_S, writer.write(&[i])?),
+ I8x16ExtractLaneU(i) => simd!(writer, I8X16_EXTRACT_LANE_U, writer.write(&[i])?),
+ I16x8ExtractLaneS(i) => simd!(writer, I16X8_EXTRACT_LANE_S, writer.write(&[i])?),
+ I16x8ExtractLaneU(i) => simd!(writer, I16X8_EXTRACT_LANE_U, writer.write(&[i])?),
+ I32x4ExtractLane(i) => simd!(writer, I32X4_EXTRACT_LANE, writer.write(&[i])?),
+ I64x2ExtractLane(i) => simd!(writer, I64X2_EXTRACT_LANE, writer.write(&[i])?),
+ F32x4ExtractLane(i) => simd!(writer, F32X4_EXTRACT_LANE, writer.write(&[i])?),
+ F64x2ExtractLane(i) => simd!(writer, F64X2_EXTRACT_LANE, writer.write(&[i])?),
+ I8x16ReplaceLane(i) => simd!(writer, I8X16_REPLACE_LANE, writer.write(&[i])?),
+ I16x8ReplaceLane(i) => simd!(writer, I16X8_REPLACE_LANE, writer.write(&[i])?),
+ I32x4ReplaceLane(i) => simd!(writer, I32X4_REPLACE_LANE, writer.write(&[i])?),
+ I64x2ReplaceLane(i) => simd!(writer, I64X2_REPLACE_LANE, writer.write(&[i])?),
+ F32x4ReplaceLane(i) => simd!(writer, F32X4_REPLACE_LANE, writer.write(&[i])?),
+ F64x2ReplaceLane(i) => simd!(writer, F64X2_REPLACE_LANE, writer.write(&[i])?),
+ V8x16Shuffle(ref i) => simd!(writer, V8X16_SHUFFLE, writer.write(&i[..])?),
+ I8x16Add => simd!(writer, I8X16_ADD, ()),
+ I16x8Add => simd!(writer, I16X8_ADD, ()),
+ I32x4Add => simd!(writer, I32X4_ADD, ()),
+ I64x2Add => simd!(writer, I64X2_ADD, ()),
+ I8x16Sub => simd!(writer, I8X16_SUB, ()),
+ I16x8Sub => simd!(writer, I16X8_SUB, ()),
+ I32x4Sub => simd!(writer, I32X4_SUB, ()),
+ I64x2Sub => simd!(writer, I64X2_SUB, ()),
+ I8x16Mul => simd!(writer, I8X16_MUL, ()),
+ I16x8Mul => simd!(writer, I16X8_MUL, ()),
+ I32x4Mul => simd!(writer, I32X4_MUL, ()),
+ // I64x2Mul => simd!(writer, I64X2_MUL, ()),
+ I8x16Neg => simd!(writer, I8X16_NEG, ()),
+ I16x8Neg => simd!(writer, I16X8_NEG, ()),
+ I32x4Neg => simd!(writer, I32X4_NEG, ()),
+ I64x2Neg => simd!(writer, I64X2_NEG, ()),
+ I8x16AddSaturateS => simd!(writer, I8X16_ADD_SATURATE_S, ()),
+ I8x16AddSaturateU => simd!(writer, I8X16_ADD_SATURATE_U, ()),
+ I16x8AddSaturateS => simd!(writer, I16X8_ADD_SATURATE_S, ()),
+ I16x8AddSaturateU => simd!(writer, I16X8_ADD_SATURATE_U, ()),
+ I8x16SubSaturateS => simd!(writer, I8X16_SUB_SATURATE_S, ()),
+ I8x16SubSaturateU => simd!(writer, I8X16_SUB_SATURATE_U, ()),
+ I16x8SubSaturateS => simd!(writer, I16X8_SUB_SATURATE_S, ()),
+ I16x8SubSaturateU => simd!(writer, I16X8_SUB_SATURATE_U, ()),
+ I8x16Shl => simd!(writer, I8X16_SHL, ()),
+ I16x8Shl => simd!(writer, I16X8_SHL, ()),
+ I32x4Shl => simd!(writer, I32X4_SHL, ()),
+ I64x2Shl => simd!(writer, I64X2_SHL, ()),
+ I8x16ShrS => simd!(writer, I8X16_SHR_S, ()),
+ I8x16ShrU => simd!(writer, I8X16_SHR_U, ()),
+ I16x8ShrS => simd!(writer, I16X8_SHR_S, ()),
+ I16x8ShrU => simd!(writer, I16X8_SHR_U, ()),
+ I32x4ShrU => simd!(writer, I32X4_SHR_U, ()),
+ I32x4ShrS => simd!(writer, I32X4_SHR_S, ()),
+ I64x2ShrU => simd!(writer, I64X2_SHR_U, ()),
+ I64x2ShrS => simd!(writer, I64X2_SHR_S, ()),
+ V128And => simd!(writer, V128_AND, ()),
+ V128Or => simd!(writer, V128_OR, ()),
+ V128Xor => simd!(writer, V128_XOR, ()),
+ V128Not => simd!(writer, V128_NOT, ()),
+ V128Bitselect => simd!(writer, V128_BITSELECT, ()),
+ I8x16AnyTrue => simd!(writer, I8X16_ANY_TRUE, ()),
+ I16x8AnyTrue => simd!(writer, I16X8_ANY_TRUE, ()),
+ I32x4AnyTrue => simd!(writer, I32X4_ANY_TRUE, ()),
+ I64x2AnyTrue => simd!(writer, I64X2_ANY_TRUE, ()),
+ I8x16AllTrue => simd!(writer, I8X16_ALL_TRUE, ()),
+ I16x8AllTrue => simd!(writer, I16X8_ALL_TRUE, ()),
+ I32x4AllTrue => simd!(writer, I32X4_ALL_TRUE, ()),
+ I64x2AllTrue => simd!(writer, I64X2_ALL_TRUE, ()),
+ I8x16Eq => simd!(writer, I8X16_EQ, ()),
+ I16x8Eq => simd!(writer, I16X8_EQ, ()),
+ I32x4Eq => simd!(writer, I32X4_EQ, ()),
+ // I64x2Eq => simd!(writer, I64X2_EQ, ()),
+ F32x4Eq => simd!(writer, F32X4_EQ, ()),
+ F64x2Eq => simd!(writer, F64X2_EQ, ()),
+ I8x16Ne => simd!(writer, I8X16_NE, ()),
+ I16x8Ne => simd!(writer, I16X8_NE, ()),
+ I32x4Ne => simd!(writer, I32X4_NE, ()),
+ // I64x2Ne => simd!(writer, I64X2_NE, ()),
+ F32x4Ne => simd!(writer, F32X4_NE, ()),
+ F64x2Ne => simd!(writer, F64X2_NE, ()),
+ I8x16LtS => simd!(writer, I8X16_LT_S, ()),
+ I8x16LtU => simd!(writer, I8X16_LT_U, ()),
+ I16x8LtS => simd!(writer, I16X8_LT_S, ()),
+ I16x8LtU => simd!(writer, I16X8_LT_U, ()),
+ I32x4LtS => simd!(writer, I32X4_LT_S, ()),
+ I32x4LtU => simd!(writer, I32X4_LT_U, ()),
+ // I64x2LtS => simd!(writer, I64X2_LT_S, ()),
+ // I64x2LtU => simd!(writer, I64X2_LT_U, ()),
+ F32x4Lt => simd!(writer, F32X4_LT, ()),
+ F64x2Lt => simd!(writer, F64X2_LT, ()),
+ I8x16LeS => simd!(writer, I8X16_LE_S, ()),
+ I8x16LeU => simd!(writer, I8X16_LE_U, ()),
+ I16x8LeS => simd!(writer, I16X8_LE_S, ()),
+ I16x8LeU => simd!(writer, I16X8_LE_U, ()),
+ I32x4LeS => simd!(writer, I32X4_LE_S, ()),
+ I32x4LeU => simd!(writer, I32X4_LE_U, ()),
+ // I64x2LeS => simd!(writer, I64X2_LE_S, ()),
+ // I64x2LeU => simd!(writer, I64X2_LE_U, ()),
+ F32x4Le => simd!(writer, F32X4_LE, ()),
+ F64x2Le => simd!(writer, F64X2_LE, ()),
+ I8x16GtS => simd!(writer, I8X16_GT_S, ()),
+ I8x16GtU => simd!(writer, I8X16_GT_U, ()),
+ I16x8GtS => simd!(writer, I16X8_GT_S, ()),
+ I16x8GtU => simd!(writer, I16X8_GT_U, ()),
+ I32x4GtS => simd!(writer, I32X4_GT_S, ()),
+ I32x4GtU => simd!(writer, I32X4_GT_U, ()),
+ // I64x2GtS => simd!(writer, I64X2_GT_S, ()),
+ // I64x2GtU => simd!(writer, I64X2_GT_U, ()),
+ F32x4Gt => simd!(writer, F32X4_GT, ()),
+ F64x2Gt => simd!(writer, F64X2_GT, ()),
+ I8x16GeS => simd!(writer, I8X16_GE_S, ()),
+ I8x16GeU => simd!(writer, I8X16_GE_U, ()),
+ I16x8GeS => simd!(writer, I16X8_GE_S, ()),
+ I16x8GeU => simd!(writer, I16X8_GE_U, ()),
+ I32x4GeS => simd!(writer, I32X4_GE_S, ()),
+ I32x4GeU => simd!(writer, I32X4_GE_U, ()),
+ // I64x2GeS => simd!(writer, I64X2_GE_S, ()),
+ // I64x2GeU => simd!(writer, I64X2_GE_U, ()),
+ F32x4Ge => simd!(writer, F32X4_GE, ()),
+ F64x2Ge => simd!(writer, F64X2_GE, ()),
+ F32x4Neg => simd!(writer, F32X4_NEG, ()),
+ F64x2Neg => simd!(writer, F64X2_NEG, ()),
+ F32x4Abs => simd!(writer, F32X4_ABS, ()),
+ F64x2Abs => simd!(writer, F64X2_ABS, ()),
+ F32x4Min => simd!(writer, F32X4_MIN, ()),
+ F64x2Min => simd!(writer, F64X2_MIN, ()),
+ F32x4Max => simd!(writer, F32X4_MAX, ()),
+ F64x2Max => simd!(writer, F64X2_MAX, ()),
+ F32x4Add => simd!(writer, F32X4_ADD, ()),
+ F64x2Add => simd!(writer, F64X2_ADD, ()),
+ F32x4Sub => simd!(writer, F32X4_SUB, ()),
+ F64x2Sub => simd!(writer, F64X2_SUB, ()),
+ F32x4Div => simd!(writer, F32X4_DIV, ()),
+ F64x2Div => simd!(writer, F64X2_DIV, ()),
+ F32x4Mul => simd!(writer, F32X4_MUL, ()),
+ F64x2Mul => simd!(writer, F64X2_MUL, ()),
+ F32x4Sqrt => simd!(writer, F32X4_SQRT, ()),
+ F64x2Sqrt => simd!(writer, F64X2_SQRT, ()),
+ F32x4ConvertSI32x4 => simd!(writer, F32X4_CONVERT_S_I32X4, ()),
+ F32x4ConvertUI32x4 => simd!(writer, F32X4_CONVERT_U_I32X4, ()),
+ F64x2ConvertSI64x2 => simd!(writer, F64X2_CONVERT_S_I64X2, ()),
+ F64x2ConvertUI64x2 => simd!(writer, F64X2_CONVERT_U_I64X2, ()),
+ I32x4TruncSF32x4Sat => simd!(writer, I32X4_TRUNC_S_F32X4_SAT, ()),
+ I32x4TruncUF32x4Sat => simd!(writer, I32X4_TRUNC_U_F32X4_SAT, ()),
+ I64x2TruncSF64x2Sat => simd!(writer, I64X2_TRUNC_S_F64X2_SAT, ()),
+ I64x2TruncUF64x2Sat => simd!(writer, I64X2_TRUNC_U_F64X2_SAT, ()),
+ }
+
+ Ok(())
+ }
+}
+
+#[cfg(feature="bulk")]
+impl Serialize for BulkInstruction {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ use self::BulkInstruction::*;
+ use self::opcodes::bulk::*;
+
+ match self {
+ MemoryInit(seg) => bulk!(writer, MEMORY_INIT, {
+ Uint8::from(0).serialize(writer)?;
+ VarUint32::from(seg).serialize(writer)?;
+ }),
+ MemoryDrop(seg) => bulk!(writer, MEMORY_DROP, VarUint32::from(seg).serialize(writer)?),
+ MemoryFill => bulk!(writer, MEMORY_FILL, Uint8::from(0).serialize(writer)?),
+ MemoryCopy => bulk!(writer, MEMORY_COPY, Uint8::from(0).serialize(writer)?),
+ TableInit(seg) => bulk!(writer, TABLE_INIT, {
+ Uint8::from(0).serialize(writer)?;
+ VarUint32::from(seg).serialize(writer)?;
+ }),
+ TableDrop(seg) => bulk!(writer, TABLE_DROP, VarUint32::from(seg).serialize(writer)?),
+ TableCopy => bulk!(writer, TABLE_COPY, Uint8::from(0).serialize(writer)?),
+ }
+
+ Ok(())
+ }
+}
+
+#[cfg(any(feature="simd", feature="atomics"))]
+impl Serialize for MemArg {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ Uint8::from(self.align).serialize(writer)?;
+ VarUint32::from(self.offset).serialize(writer)?;
+ Ok(())
+ }
+}
+
+macro_rules! fmt_op {
+ ($f: expr, $mnemonic: expr) => ({
+ write!($f, "{}", $mnemonic)
+ });
+ ($f: expr, $mnemonic: expr, $immediate: expr) => ({
+ write!($f, "{} {}", $mnemonic, $immediate)
+ });
+ ($f: expr, $mnemonic: expr, $immediate1: expr, $immediate2: expr) => ({
+ write!($f, "{} {} {}", $mnemonic, $immediate1, $immediate2)
+ });
+}
+
+impl fmt::Display for Instruction {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ use self::Instruction::*;
+
+ match *self {
+ Unreachable => fmt_op!(f, "unreachable"),
+ Nop => fmt_op!(f, "nop"),
+ Block(BlockType::NoResult) => fmt_op!(f, "block"),
+ Block(BlockType::Value(value_type)) => fmt_op!(f, "block", value_type),
+ Loop(BlockType::NoResult) => fmt_op!(f, "loop"),
+ Loop(BlockType::Value(value_type)) => fmt_op!(f, "loop", value_type),
+ If(BlockType::NoResult) => fmt_op!(f, "if"),
+ If(BlockType::Value(value_type)) => fmt_op!(f, "if", value_type),
+ Else => fmt_op!(f, "else"),
+ End => fmt_op!(f, "end"),
+ Br(idx) => fmt_op!(f, "br", idx),
+ BrIf(idx) => fmt_op!(f, "br_if", idx),
+ BrTable(ref table) => fmt_op!(f, "br_table", table.default),
+ Return => fmt_op!(f, "return"),
+ Call(index) => fmt_op!(f, "call", index),
+ CallIndirect(index, _) => fmt_op!(f, "call_indirect", index),
+ Drop => fmt_op!(f, "drop"),
+ Select => fmt_op!(f, "select"),
+ GetLocal(index) => fmt_op!(f, "get_local", index),
+ SetLocal(index) => fmt_op!(f, "set_local", index),
+ TeeLocal(index) => fmt_op!(f, "tee_local", index),
+ GetGlobal(index) => fmt_op!(f, "get_global", index),
+ SetGlobal(index) => fmt_op!(f, "set_global", index),
+
+ I32Load(_, 0) => write!(f, "i32.load"),
+ I32Load(_, offset) => write!(f, "i32.load offset={}", offset),
+
+ I64Load(_, 0) => write!(f, "i64.load"),
+ I64Load(_, offset) => write!(f, "i64.load offset={}", offset),
+
+ F32Load(_, 0) => write!(f, "f32.load"),
+ F32Load(_, offset) => write!(f, "f32.load offset={}", offset),
+
+ F64Load(_, 0) => write!(f, "f64.load"),
+ F64Load(_, offset) => write!(f, "f64.load offset={}", offset),
+
+ I32Load8S(_, 0) => write!(f, "i32.load8_s"),
+ I32Load8S(_, offset) => write!(f, "i32.load8_s offset={}", offset),
+
+ I32Load8U(_, 0) => write!(f, "i32.load8_u"),
+ I32Load8U(_, offset) => write!(f, "i32.load8_u offset={}", offset),
+
+ I32Load16S(_, 0) => write!(f, "i32.load16_s"),
+ I32Load16S(_, offset) => write!(f, "i32.load16_s offset={}", offset),
+
+ I32Load16U(_, 0) => write!(f, "i32.load16_u"),
+ I32Load16U(_, offset) => write!(f, "i32.load16_u offset={}", offset),
+
+ I64Load8S(_, 0) => write!(f, "i64.load8_s"),
+ I64Load8S(_, offset) => write!(f, "i64.load8_s offset={}", offset),
+
+ I64Load8U(_, 0) => write!(f, "i64.load8_u"),
+ I64Load8U(_, offset) => write!(f, "i64.load8_u offset={}", offset),
+
+ I64Load16S(_, 0) => write!(f, "i64.load16_s"),
+ I64Load16S(_, offset) => write!(f, "i64.load16_s offset={}", offset),
+
+ I64Load16U(_, 0) => write!(f, "i64.load16_u"),
+ I64Load16U(_, offset) => write!(f, "i64.load16_u offset={}", offset),
+
+ I64Load32S(_, 0) => write!(f, "i64.load32_s"),
+ I64Load32S(_, offset) => write!(f, "i64.load32_s offset={}", offset),
+
+ I64Load32U(_, 0) => write!(f, "i64.load32_u"),
+ I64Load32U(_, offset) => write!(f, "i64.load32_u offset={}", offset),
+
+ I32Store(_, 0) => write!(f, "i32.store"),
+ I32Store(_, offset) => write!(f, "i32.store offset={}", offset),
+
+ I64Store(_, 0) => write!(f, "i64.store"),
+ I64Store(_, offset) => write!(f, "i64.store offset={}", offset),
+
+ F32Store(_, 0) => write!(f, "f32.store"),
+ F32Store(_, offset) => write!(f, "f32.store offset={}", offset),
+
+ F64Store(_, 0) => write!(f, "f64.store"),
+ F64Store(_, offset) => write!(f, "f64.store offset={}", offset),
+
+ I32Store8(_, 0) => write!(f, "i32.store8"),
+ I32Store8(_, offset) => write!(f, "i32.store8 offset={}", offset),
+
+ I32Store16(_, 0) => write!(f, "i32.store16"),
+ I32Store16(_, offset) => write!(f, "i32.store16 offset={}", offset),
+
+ I64Store8(_, 0) => write!(f, "i64.store8"),
+ I64Store8(_, offset) => write!(f, "i64.store8 offset={}", offset),
+
+ I64Store16(_, 0) => write!(f, "i64.store16"),
+ I64Store16(_, offset) => write!(f, "i64.store16 offset={}", offset),
+
+ I64Store32(_, 0) => write!(f, "i64.store32"),
+ I64Store32(_, offset) => write!(f, "i64.store32 offset={}", offset),
+
+ CurrentMemory(_) => fmt_op!(f, "current_memory"),
+ GrowMemory(_) => fmt_op!(f, "grow_memory"),
+
+ I32Const(def) => fmt_op!(f, "i32.const", def),
+ I64Const(def) => fmt_op!(f, "i64.const", def),
+ F32Const(def) => fmt_op!(f, "f32.const", def),
+ F64Const(def) => fmt_op!(f, "f64.const", def),
+
+ I32Eq => write!(f, "i32.eq"),
+ I32Eqz => write!(f, "i32.eqz"),
+ I32Ne => write!(f, "i32.ne"),
+ I32LtS => write!(f, "i32.lt_s"),
+ I32LtU => write!(f, "i32.lt_u"),
+ I32GtS => write!(f, "i32.gt_s"),
+ I32GtU => write!(f, "i32.gt_u"),
+ I32LeS => write!(f, "i32.le_s"),
+ I32LeU => write!(f, "i32.le_u"),
+ I32GeS => write!(f, "i32.ge_s"),
+ I32GeU => write!(f, "i32.ge_u"),
+
+ I64Eq => write!(f, "i64.eq"),
+ I64Eqz => write!(f, "i64.eqz"),
+ I64Ne => write!(f, "i64.ne"),
+ I64LtS => write!(f, "i64.lt_s"),
+ I64LtU => write!(f, "i64.lt_u"),
+ I64GtS => write!(f, "i64.gt_s"),
+ I64GtU => write!(f, "i64.gt_u"),
+ I64LeS => write!(f, "i64.le_s"),
+ I64LeU => write!(f, "i64.le_u"),
+ I64GeS => write!(f, "i64.ge_s"),
+ I64GeU => write!(f, "i64.ge_u"),
+
+ F32Eq => write!(f, "f32.eq"),
+ F32Ne => write!(f, "f32.ne"),
+ F32Lt => write!(f, "f32.lt"),
+ F32Gt => write!(f, "f32.gt"),
+ F32Le => write!(f, "f32.le"),
+ F32Ge => write!(f, "f32.ge"),
+
+ F64Eq => write!(f, "f64.eq"),
+ F64Ne => write!(f, "f64.ne"),
+ F64Lt => write!(f, "f64.lt"),
+ F64Gt => write!(f, "f64.gt"),
+ F64Le => write!(f, "f64.le"),
+ F64Ge => write!(f, "f64.ge"),
+
+ I32Clz => write!(f, "i32.clz"),
+ I32Ctz => write!(f, "i32.ctz"),
+ I32Popcnt => write!(f, "i32.popcnt"),
+ I32Add => write!(f, "i32.add"),
+ I32Sub => write!(f, "i32.sub"),
+ I32Mul => write!(f, "i32.mul"),
+ I32DivS => write!(f, "i32.div_s"),
+ I32DivU => write!(f, "i32.div_u"),
+ I32RemS => write!(f, "i32.rem_s"),
+ I32RemU => write!(f, "i32.rem_u"),
+ I32And => write!(f, "i32.and"),
+ I32Or => write!(f, "i32.or"),
+ I32Xor => write!(f, "i32.xor"),
+ I32Shl => write!(f, "i32.shl"),
+ I32ShrS => write!(f, "i32.shr_s"),
+ I32ShrU => write!(f, "i32.shr_u"),
+ I32Rotl => write!(f, "i32.rotl"),
+ I32Rotr => write!(f, "i32.rotr"),
+
+ I64Clz => write!(f, "i64.clz"),
+ I64Ctz => write!(f, "i64.ctz"),
+ I64Popcnt => write!(f, "i64.popcnt"),
+ I64Add => write!(f, "i64.add"),
+ I64Sub => write!(f, "i64.sub"),
+ I64Mul => write!(f, "i64.mul"),
+ I64DivS => write!(f, "i64.div_s"),
+ I64DivU => write!(f, "i64.div_u"),
+ I64RemS => write!(f, "i64.rem_s"),
+ I64RemU => write!(f, "i64.rem_u"),
+ I64And => write!(f, "i64.and"),
+ I64Or => write!(f, "i64.or"),
+ I64Xor => write!(f, "i64.xor"),
+ I64Shl => write!(f, "i64.shl"),
+ I64ShrS => write!(f, "i64.shr_s"),
+ I64ShrU => write!(f, "i64.shr_u"),
+ I64Rotl => write!(f, "i64.rotl"),
+ I64Rotr => write!(f, "i64.rotr"),
+
+ F32Abs => write!(f, "f32.abs"),
+ F32Neg => write!(f, "f32.neg"),
+ F32Ceil => write!(f, "f32.ceil"),
+ F32Floor => write!(f, "f32.floor"),
+ F32Trunc => write!(f, "f32.trunc"),
+ F32Nearest => write!(f, "f32.nearest"),
+ F32Sqrt => write!(f, "f32.sqrt"),
+ F32Add => write!(f, "f32.add"),
+ F32Sub => write!(f, "f32.sub"),
+ F32Mul => write!(f, "f32.mul"),
+ F32Div => write!(f, "f32.div"),
+ F32Min => write!(f, "f32.min"),
+ F32Max => write!(f, "f32.max"),
+ F32Copysign => write!(f, "f32.copysign"),
+
+ F64Abs => write!(f, "f64.abs"),
+ F64Neg => write!(f, "f64.neg"),
+ F64Ceil => write!(f, "f64.ceil"),
+ F64Floor => write!(f, "f64.floor"),
+ F64Trunc => write!(f, "f64.trunc"),
+ F64Nearest => write!(f, "f64.nearest"),
+ F64Sqrt => write!(f, "f64.sqrt"),
+ F64Add => write!(f, "f64.add"),
+ F64Sub => write!(f, "f64.sub"),
+ F64Mul => write!(f, "f64.mul"),
+ F64Div => write!(f, "f64.div"),
+ F64Min => write!(f, "f64.min"),
+ F64Max => write!(f, "f64.max"),
+ F64Copysign => write!(f, "f64.copysign"),
+
+ I32WrapI64 => write!(f, "i32.wrap/i64"),
+ I32TruncSF32 => write!(f, "i32.trunc_s/f32"),
+ I32TruncUF32 => write!(f, "i32.trunc_u/f32"),
+ I32TruncSF64 => write!(f, "i32.trunc_s/f64"),
+ I32TruncUF64 => write!(f, "i32.trunc_u/f64"),
+
+ I64ExtendSI32 => write!(f, "i64.extend_s/i32"),
+ I64ExtendUI32 => write!(f, "i64.extend_u/i32"),
+
+ I64TruncSF32 => write!(f, "i64.trunc_s/f32"),
+ I64TruncUF32 => write!(f, "i64.trunc_u/f32"),
+ I64TruncSF64 => write!(f, "i64.trunc_s/f64"),
+ I64TruncUF64 => write!(f, "i64.trunc_u/f64"),
+
+ F32ConvertSI32 => write!(f, "f32.convert_s/i32"),
+ F32ConvertUI32 => write!(f, "f32.convert_u/i32"),
+ F32ConvertSI64 => write!(f, "f32.convert_s/i64"),
+ F32ConvertUI64 => write!(f, "f32.convert_u/i64"),
+ F32DemoteF64 => write!(f, "f32.demote/f64"),
+
+ F64ConvertSI32 => write!(f, "f64.convert_s/i32"),
+ F64ConvertUI32 => write!(f, "f64.convert_u/i32"),
+ F64ConvertSI64 => write!(f, "f64.convert_s/i64"),
+ F64ConvertUI64 => write!(f, "f64.convert_u/i64"),
+ F64PromoteF32 => write!(f, "f64.promote/f32"),
+
+ I32ReinterpretF32 => write!(f, "i32.reinterpret/f32"),
+ I64ReinterpretF64 => write!(f, "i64.reinterpret/f64"),
+ F32ReinterpretI32 => write!(f, "f32.reinterpret/i32"),
+ F64ReinterpretI64 => write!(f, "f64.reinterpret/i64"),
+
+ #[cfg(feature="sign_ext")]
+ SignExt(ref i) => match i {
+ SignExtInstruction::I32Extend8S => write!(f, "i32.extend8_s"),
+ SignExtInstruction::I32Extend16S => write!(f, "i32.extend16_s"),
+ SignExtInstruction::I64Extend8S => write!(f, "i64.extend8_s"),
+ SignExtInstruction::I64Extend16S => write!(f, "i64.extend16_s"),
+ SignExtInstruction::I64Extend32S => write!(f, "i64.extend32_s"),
+ }
+
+ #[cfg(feature="atomics")]
+ Atomics(ref i) => i.fmt(f),
+
+ #[cfg(feature="simd")]
+ Simd(ref i) => i.fmt(f),
+
+ #[cfg(feature="bulk")]
+ Bulk(ref i) => i.fmt(f),
+ }
+ }
+}
+
+#[cfg(feature="atomics")]
+impl fmt::Display for AtomicsInstruction {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ use self::AtomicsInstruction::*;
+
+ match *self {
+ AtomicWake(_) => write!(f, "atomic.wake"),
+ I32AtomicWait(_) => write!(f, "i32.atomic.wait"),
+ I64AtomicWait(_) => write!(f, "i64.atomic.wait"),
+
+ I32AtomicLoad(_) => write!(f, "i32.atomic.load"),
+ I64AtomicLoad(_) => write!(f, "i64.atomic.load"),
+ I32AtomicLoad8u(_) => write!(f, "i32.atomic.load8_u"),
+ I32AtomicLoad16u(_) => write!(f, "i32.atomic.load16_u"),
+ I64AtomicLoad8u(_) => write!(f, "i64.atomic.load8_u"),
+ I64AtomicLoad16u(_) => write!(f, "i64.atomic.load16_u"),
+ I64AtomicLoad32u(_) => write!(f, "i64.atomic.load32_u"),
+ I32AtomicStore(_) => write!(f, "i32.atomic.store"),
+ I64AtomicStore(_) => write!(f, "i64.atomic.store"),
+ I32AtomicStore8u(_) => write!(f, "i32.atomic.store8_u"),
+ I32AtomicStore16u(_) => write!(f, "i32.atomic.store16_u"),
+ I64AtomicStore8u(_) => write!(f, "i64.atomic.store8_u"),
+ I64AtomicStore16u(_) => write!(f, "i64.atomic.store16_u"),
+ I64AtomicStore32u(_) => write!(f, "i64.atomic.store32_u"),
+
+ I32AtomicRmwAdd(_) => write!(f, "i32.atomic.rmw.add"),
+ I64AtomicRmwAdd(_) => write!(f, "i64.atomic.rmw.add"),
+ I32AtomicRmwAdd8u(_) => write!(f, "i32.atomic.rmw8_u.add"),
+ I32AtomicRmwAdd16u(_) => write!(f, "i32.atomic.rmw16_u.add"),
+ I64AtomicRmwAdd8u(_) => write!(f, "i64.atomic.rmw8_u.add"),
+ I64AtomicRmwAdd16u(_) => write!(f, "i64.atomic.rmw16_u.add"),
+ I64AtomicRmwAdd32u(_) => write!(f, "i64.atomic.rmw32_u.add"),
+
+ I32AtomicRmwSub(_) => write!(f, "i32.atomic.rmw.sub"),
+ I64AtomicRmwSub(_) => write!(f, "i64.atomic.rmw.sub"),
+ I32AtomicRmwSub8u(_) => write!(f, "i32.atomic.rmw8_u.sub"),
+ I32AtomicRmwSub16u(_) => write!(f, "i32.atomic.rmw16_u.sub"),
+ I64AtomicRmwSub8u(_) => write!(f, "i64.atomic.rmw8_u.sub"),
+ I64AtomicRmwSub16u(_) => write!(f, "i64.atomic.rmw16_u.sub"),
+ I64AtomicRmwSub32u(_) => write!(f, "i64.atomic.rmw32_u.sub"),
+
+ I32AtomicRmwAnd(_) => write!(f, "i32.atomic.rmw.and"),
+ I64AtomicRmwAnd(_) => write!(f, "i64.atomic.rmw.and"),
+ I32AtomicRmwAnd8u(_) => write!(f, "i32.atomic.rmw8_u.and"),
+ I32AtomicRmwAnd16u(_) => write!(f, "i32.atomic.rmw16_u.and"),
+ I64AtomicRmwAnd8u(_) => write!(f, "i64.atomic.rmw8_u.and"),
+ I64AtomicRmwAnd16u(_) => write!(f, "i64.atomic.rmw16_u.and"),
+ I64AtomicRmwAnd32u(_) => write!(f, "i64.atomic.rmw32_u.and"),
+
+ I32AtomicRmwOr(_) => write!(f, "i32.atomic.rmw.or"),
+ I64AtomicRmwOr(_) => write!(f, "i64.atomic.rmw.or"),
+ I32AtomicRmwOr8u(_) => write!(f, "i32.atomic.rmw8_u.or"),
+ I32AtomicRmwOr16u(_) => write!(f, "i32.atomic.rmw16_u.or"),
+ I64AtomicRmwOr8u(_) => write!(f, "i64.atomic.rmw8_u.or"),
+ I64AtomicRmwOr16u(_) => write!(f, "i64.atomic.rmw16_u.or"),
+ I64AtomicRmwOr32u(_) => write!(f, "i64.atomic.rmw32_u.or"),
+
+ I32AtomicRmwXor(_) => write!(f, "i32.atomic.rmw.xor"),
+ I64AtomicRmwXor(_) => write!(f, "i64.atomic.rmw.xor"),
+ I32AtomicRmwXor8u(_) => write!(f, "i32.atomic.rmw8_u.xor"),
+ I32AtomicRmwXor16u(_) => write!(f, "i32.atomic.rmw16_u.xor"),
+ I64AtomicRmwXor8u(_) => write!(f, "i64.atomic.rmw8_u.xor"),
+ I64AtomicRmwXor16u(_) => write!(f, "i64.atomic.rmw16_u.xor"),
+ I64AtomicRmwXor32u(_) => write!(f, "i64.atomic.rmw32_u.xor"),
+
+ I32AtomicRmwXchg(_) => write!(f, "i32.atomic.rmw.xchg"),
+ I64AtomicRmwXchg(_) => write!(f, "i64.atomic.rmw.xchg"),
+ I32AtomicRmwXchg8u(_) => write!(f, "i32.atomic.rmw8_u.xchg"),
+ I32AtomicRmwXchg16u(_) => write!(f, "i32.atomic.rmw16_u.xchg"),
+ I64AtomicRmwXchg8u(_) => write!(f, "i64.atomic.rmw8_u.xchg"),
+ I64AtomicRmwXchg16u(_) => write!(f, "i64.atomic.rmw16_u.xchg"),
+ I64AtomicRmwXchg32u(_) => write!(f, "i64.atomic.rmw32_u.xchg"),
+
+ I32AtomicRmwCmpxchg(_) => write!(f, "i32.atomic.rmw.cmpxchg"),
+ I64AtomicRmwCmpxchg(_) => write!(f, "i64.atomic.rmw.cmpxchg"),
+ I32AtomicRmwCmpxchg8u(_) => write!(f, "i32.atomic.rmw8_u.cmpxchg"),
+ I32AtomicRmwCmpxchg16u(_) => write!(f, "i32.atomic.rmw16_u.cmpxchg"),
+ I64AtomicRmwCmpxchg8u(_) => write!(f, "i64.atomic.rmw8_u.cmpxchg"),
+ I64AtomicRmwCmpxchg16u(_) => write!(f, "i64.atomic.rmw16_u.cmpxchg"),
+ I64AtomicRmwCmpxchg32u(_) => write!(f, "i64.atomic.rmw32_u.cmpxchg"),
+ }
+ }
+}
+
+#[cfg(feature="simd")]
+impl fmt::Display for SimdInstruction {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ use self::SimdInstruction::*;
+
+ match *self {
+ V128Const(_) => write!(f, "v128.const"),
+ V128Load(_) => write!(f, "v128.load"),
+ V128Store(_) => write!(f, "v128.store"),
+ I8x16Splat => write!(f, "i8x16.splat"),
+ I16x8Splat => write!(f, "i16x8.splat"),
+ I32x4Splat => write!(f, "i32x4.splat"),
+ I64x2Splat => write!(f, "i64x2.splat"),
+ F32x4Splat => write!(f, "f32x4.splat"),
+ F64x2Splat => write!(f, "f64x2.splat"),
+ I8x16ExtractLaneS(_) => write!(f, "i8x16.extract_lane_s"),
+ I8x16ExtractLaneU(_) => write!(f, "i8x16.extract_lane_u"),
+ I16x8ExtractLaneS(_) => write!(f, "i16x8.extract_lane_s"),
+ I16x8ExtractLaneU(_) => write!(f, "i16x8.extract_lane_u"),
+ I32x4ExtractLane(_) => write!(f, "i32x4.extract_lane"),
+ I64x2ExtractLane(_) => write!(f, "i64x2.extract_lane"),
+ F32x4ExtractLane(_) => write!(f, "f32x4.extract_lane"),
+ F64x2ExtractLane(_) => write!(f, "f64x2.extract_lane"),
+ I8x16ReplaceLane(_) => write!(f, "i8x16.replace_lane"),
+ I16x8ReplaceLane(_) => write!(f, "i16x8.replace_lane"),
+ I32x4ReplaceLane(_) => write!(f, "i32x4.replace_lane"),
+ I64x2ReplaceLane(_) => write!(f, "i64x2.replace_lane"),
+ F32x4ReplaceLane(_) => write!(f, "f32x4.replace_lane"),
+ F64x2ReplaceLane(_) => write!(f, "f64x2.replace_lane"),
+ V8x16Shuffle(_) => write!(f, "v8x16.shuffle"),
+ I8x16Add => write!(f, "i8x16.add"),
+ I16x8Add => write!(f, "i16x8.add"),
+ I32x4Add => write!(f, "i32x4.add"),
+ I64x2Add => write!(f, "i64x2.add"),
+ I8x16Sub => write!(f, "i8x16.sub"),
+ I16x8Sub => write!(f, "i16x8.sub"),
+ I32x4Sub => write!(f, "i32x4.sub"),
+ I64x2Sub => write!(f, "i64x2.sub"),
+ I8x16Mul => write!(f, "i8x16.mul"),
+ I16x8Mul => write!(f, "i16x8.mul"),
+ I32x4Mul => write!(f, "i32x4.mul"),
+ // I64x2Mul => write!(f, "i64x2.mul"),
+ I8x16Neg => write!(f, "i8x16.neg"),
+ I16x8Neg => write!(f, "i16x8.neg"),
+ I32x4Neg => write!(f, "i32x4.neg"),
+ I64x2Neg => write!(f, "i64x2.neg"),
+ I8x16AddSaturateS => write!(f, "i8x16.add_saturate_s"),
+ I8x16AddSaturateU => write!(f, "i8x16.add_saturate_u"),
+ I16x8AddSaturateS => write!(f, "i16x8.add_saturate_S"),
+ I16x8AddSaturateU => write!(f, "i16x8.add_saturate_u"),
+ I8x16SubSaturateS => write!(f, "i8x16.sub_saturate_S"),
+ I8x16SubSaturateU => write!(f, "i8x16.sub_saturate_u"),
+ I16x8SubSaturateS => write!(f, "i16x8.sub_saturate_S"),
+ I16x8SubSaturateU => write!(f, "i16x8.sub_saturate_u"),
+ I8x16Shl => write!(f, "i8x16.shl"),
+ I16x8Shl => write!(f, "i16x8.shl"),
+ I32x4Shl => write!(f, "i32x4.shl"),
+ I64x2Shl => write!(f, "i64x2.shl"),
+ I8x16ShrS => write!(f, "i8x16.shr_s"),
+ I8x16ShrU => write!(f, "i8x16.shr_u"),
+ I16x8ShrS => write!(f, "i16x8.shr_s"),
+ I16x8ShrU => write!(f, "i16x8.shr_u"),
+ I32x4ShrS => write!(f, "i32x4.shr_s"),
+ I32x4ShrU => write!(f, "i32x4.shr_u"),
+ I64x2ShrS => write!(f, "i64x2.shr_s"),
+ I64x2ShrU => write!(f, "i64x2.shr_u"),
+ V128And => write!(f, "v128.and"),
+ V128Or => write!(f, "v128.or"),
+ V128Xor => write!(f, "v128.xor"),
+ V128Not => write!(f, "v128.not"),
+ V128Bitselect => write!(f, "v128.bitselect"),
+ I8x16AnyTrue => write!(f, "i8x16.any_true"),
+ I16x8AnyTrue => write!(f, "i16x8.any_true"),
+ I32x4AnyTrue => write!(f, "i32x4.any_true"),
+ I64x2AnyTrue => write!(f, "i64x2.any_true"),
+ I8x16AllTrue => write!(f, "i8x16.all_true"),
+ I16x8AllTrue => write!(f, "i16x8.all_true"),
+ I32x4AllTrue => write!(f, "i32x4.all_true"),
+ I64x2AllTrue => write!(f, "i64x2.all_true"),
+ I8x16Eq => write!(f, "i8x16.eq"),
+ I16x8Eq => write!(f, "i16x8.eq"),
+ I32x4Eq => write!(f, "i32x4.eq"),
+ // I64x2Eq => write!(f, "i64x2.eq"),
+ F32x4Eq => write!(f, "f32x4.eq"),
+ F64x2Eq => write!(f, "f64x2.eq"),
+ I8x16Ne => write!(f, "i8x16.ne"),
+ I16x8Ne => write!(f, "i16x8.ne"),
+ I32x4Ne => write!(f, "i32x4.ne"),
+ // I64x2Ne => write!(f, "i64x2.ne"),
+ F32x4Ne => write!(f, "f32x4.ne"),
+ F64x2Ne => write!(f, "f64x2.ne"),
+ I8x16LtS => write!(f, "i8x16.lt_s"),
+ I8x16LtU => write!(f, "i8x16.lt_u"),
+ I16x8LtS => write!(f, "i16x8.lt_s"),
+ I16x8LtU => write!(f, "i16x8.lt_u"),
+ I32x4LtS => write!(f, "i32x4.lt_s"),
+ I32x4LtU => write!(f, "i32x4.lt_u"),
+ // I64x2LtS => write!(f, "// I64x2.lt_s"),
+ // I64x2LtU => write!(f, "// I64x2.lt_u"),
+ F32x4Lt => write!(f, "f32x4.lt"),
+ F64x2Lt => write!(f, "f64x2.lt"),
+ I8x16LeS => write!(f, "i8x16.le_s"),
+ I8x16LeU => write!(f, "i8x16.le_u"),
+ I16x8LeS => write!(f, "i16x8.le_s"),
+ I16x8LeU => write!(f, "i16x8.le_u"),
+ I32x4LeS => write!(f, "i32x4.le_s"),
+ I32x4LeU => write!(f, "i32x4.le_u"),
+ // I64x2LeS => write!(f, "// I64x2.le_s"),
+ // I64x2LeU => write!(f, "// I64x2.le_u"),
+ F32x4Le => write!(f, "f32x4.le"),
+ F64x2Le => write!(f, "f64x2.le"),
+ I8x16GtS => write!(f, "i8x16.gt_s"),
+ I8x16GtU => write!(f, "i8x16.gt_u"),
+ I16x8GtS => write!(f, "i16x8.gt_s"),
+ I16x8GtU => write!(f, "i16x8.gt_u"),
+ I32x4GtS => write!(f, "i32x4.gt_s"),
+ I32x4GtU => write!(f, "i32x4.gt_u"),
+ // I64x2GtS => write!(f, "// I64x2.gt_s"),
+ // I64x2GtU => write!(f, "// I64x2.gt_u"),
+ F32x4Gt => write!(f, "f32x4.gt"),
+ F64x2Gt => write!(f, "f64x2.gt"),
+ I8x16GeS => write!(f, "i8x16.ge_s"),
+ I8x16GeU => write!(f, "i8x16.ge_u"),
+ I16x8GeS => write!(f, "i16x8.ge_s"),
+ I16x8GeU => write!(f, "i16x8.ge_u"),
+ I32x4GeS => write!(f, "i32x4.ge_s"),
+ I32x4GeU => write!(f, "i32x4.ge_u"),
+ // I64x2GeS => write!(f, "// I64x2.ge_s"),
+ // I64x2GeU => write!(f, "// I64x2.ge_u"),
+ F32x4Ge => write!(f, "f32x4.ge"),
+ F64x2Ge => write!(f, "f64x2.ge"),
+ F32x4Neg => write!(f, "f32x4.neg"),
+ F64x2Neg => write!(f, "f64x2.neg"),
+ F32x4Abs => write!(f, "f32x4.abs"),
+ F64x2Abs => write!(f, "f64x2.abs"),
+ F32x4Min => write!(f, "f32x4.min"),
+ F64x2Min => write!(f, "f64x2.min"),
+ F32x4Max => write!(f, "f32x4.max"),
+ F64x2Max => write!(f, "f64x2.max"),
+ F32x4Add => write!(f, "f32x4.add"),
+ F64x2Add => write!(f, "f64x2.add"),
+ F32x4Sub => write!(f, "f32x4.sub"),
+ F64x2Sub => write!(f, "f64x2.sub"),
+ F32x4Div => write!(f, "f32x4.div"),
+ F64x2Div => write!(f, "f64x2.div"),
+ F32x4Mul => write!(f, "f32x4.mul"),
+ F64x2Mul => write!(f, "f64x2.mul"),
+ F32x4Sqrt => write!(f, "f32x4.sqrt"),
+ F64x2Sqrt => write!(f, "f64x2.sqrt"),
+ F32x4ConvertSI32x4 => write!(f, "f32x4.convert_s/i32x4"),
+ F32x4ConvertUI32x4 => write!(f, "f32x4.convert_u/i32x4"),
+ F64x2ConvertSI64x2 => write!(f, "f64x2.convert_s/i64x2"),
+ F64x2ConvertUI64x2 => write!(f, "f64x2.convert_u/i64x2"),
+ I32x4TruncSF32x4Sat => write!(f, "i32x4.trunc_s/f32x4:sat"),
+ I32x4TruncUF32x4Sat => write!(f, "i32x4.trunc_u/f32x4:sat"),
+ I64x2TruncSF64x2Sat => write!(f, "i64x2.trunc_s/f64x2:sat"),
+ I64x2TruncUF64x2Sat => write!(f, "i64x2.trunc_u/f64x2:sat"),
+ }
+ }
+}
+
+#[cfg(feature="bulk")]
+impl fmt::Display for BulkInstruction {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ use self::BulkInstruction::*;
+
+ match *self {
+ MemoryInit(_) => write!(f, "memory.init"),
+ MemoryDrop(_) => write!(f, "memory.drop"),
+ MemoryFill => write!(f, "memory.fill"),
+ MemoryCopy => write!(f, "memory.copy"),
+ TableInit(_) => write!(f, "table.init"),
+ TableDrop(_) => write!(f, "table.drop"),
+ TableCopy => write!(f, "table.copy"),
+ }
+ }
+}
+
+impl Serialize for Instructions {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ for op in self.0.into_iter() {
+ op.serialize(writer)?;
+ }
+
+ Ok(())
+ }
+}
+
+impl Serialize for InitExpr {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ for op in self.0.into_iter() {
+ op.serialize(writer)?;
+ }
+
+ Ok(())
+ }
+}
+
+#[test]
+fn ifelse() {
+ // see if-else.wast/if-else.wasm
+ let instruction_list = super::deserialize_buffer::<Instructions>(&[0x04, 0x7F, 0x41, 0x05, 0x05, 0x41, 0x07, 0x0B, 0x0B])
+ .expect("valid hex of if instruction");
+ let instructions = instruction_list.elements();
+ match &instructions[0] {
+ &Instruction::If(_) => (),
+ _ => panic!("Should be deserialized as if instruction"),
+ }
+ let before_else = instructions.iter().skip(1)
+ .take_while(|op| match **op { Instruction::Else => false, _ => true }).count();
+ let after_else = instructions.iter().skip(1)
+ .skip_while(|op| match **op { Instruction::Else => false, _ => true })
+ .take_while(|op| match **op { Instruction::End => false, _ => true })
+ .count()
+ - 1; // minus Instruction::Else itself
+ assert_eq!(before_else, after_else);
+}
+
+#[test]
+fn display() {
+ let instruction = Instruction::GetLocal(0);
+ assert_eq!("get_local 0", format!("{}", instruction));
+
+ let instruction = Instruction::F64Store(0, 24);
+ assert_eq!("f64.store offset=24", format!("{}", instruction));
+
+ let instruction = Instruction::I64Store(0, 0);
+ assert_eq!("i64.store", format!("{}", instruction));
+}
+
+#[test]
+fn size_off() {
+ assert!(::std::mem::size_of::<Instruction>() <= 24);
+}
+
+#[test]
+fn instructions_hashset() {
+ use self::Instruction::{Call, Block, Drop};
+ use super::types::{BlockType::Value, ValueType};
+
+ let set: std::collections::HashSet<Instruction> =
+ vec![Call(1), Block(Value(ValueType::I32)), Drop].into_iter().collect();
+ assert_eq!(set.contains(&Drop), true)
+}
diff --git a/third_party/rust/parity-wasm/src/elements/primitives.rs b/third_party/rust/parity-wasm/src/elements/primitives.rs
new file mode 100644
index 0000000000..149419f20b
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/elements/primitives.rs
@@ -0,0 +1,902 @@
+use alloc::{string::String, vec::Vec};
+use crate::{io, elements};
+use super::{Error, Deserialize, Serialize};
+
+/// Unsigned variable-length integer, limited to 32 bits,
+/// represented by at most 5 bytes that may contain padding 0x80 bytes.
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct VarUint32(u32);
+
+impl From<VarUint32> for usize {
+ fn from(var: VarUint32) -> usize {
+ var.0 as usize
+ }
+}
+
+impl From<VarUint32> for u32 {
+ fn from(var: VarUint32) -> u32 {
+ var.0
+ }
+}
+
+impl From<u32> for VarUint32 {
+ fn from(i: u32) -> VarUint32 {
+ VarUint32(i)
+ }
+}
+
+impl From<usize> for VarUint32 {
+ fn from(i: usize) -> VarUint32 {
+ assert!(i <= u32::max_value() as usize);
+ VarUint32(i as u32)
+ }
+}
+
+impl Deserialize for VarUint32 {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let mut res = 0;
+ let mut shift = 0;
+ let mut u8buf = [0u8; 1];
+ loop {
+ if shift > 31 { return Err(Error::InvalidVarUint32); }
+
+ reader.read(&mut u8buf)?;
+ let b = u8buf[0] as u32;
+ res |= (b & 0x7f).checked_shl(shift).ok_or(Error::InvalidVarUint32)?;
+ shift += 7;
+ if (b >> 7) == 0 {
+ if shift >= 32 && (b as u8).leading_zeros() < 4 {
+ return Err(Error::InvalidVarInt32);
+ }
+ break;
+ }
+ }
+ Ok(VarUint32(res))
+ }
+}
+
+impl Serialize for VarUint32 {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let mut buf = [0u8; 1];
+ let mut v = self.0;
+ loop {
+ buf[0] = (v & 0b0111_1111) as u8;
+ v >>= 7;
+ if v > 0 {
+ buf[0] |= 0b1000_0000;
+ }
+ writer.write(&buf[..])?;
+ if v == 0 { break; }
+ }
+
+ Ok(())
+ }
+}
+
+/// Unsigned variable-length integer, limited to 64 bits,
+/// represented by at most 9 bytes that may contain padding 0x80 bytes.
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct VarUint64(u64);
+
+impl From<VarUint64> for u64 {
+ fn from(var: VarUint64) -> u64 {
+ var.0
+ }
+}
+
+impl Deserialize for VarUint64 {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let mut res = 0;
+ let mut shift = 0;
+ let mut u8buf = [0u8; 1];
+ loop {
+ if shift > 63 { return Err(Error::InvalidVarUint64); }
+
+ reader.read(&mut u8buf)?;
+ let b = u8buf[0] as u64;
+ res |= (b & 0x7f).checked_shl(shift).ok_or(Error::InvalidVarUint64)?;
+ shift += 7;
+ if (b >> 7) == 0 {
+ if shift >= 64 && (b as u8).leading_zeros() < 7 {
+ return Err(Error::InvalidVarInt64);
+ }
+ break;
+ }
+ }
+ Ok(VarUint64(res))
+ }
+}
+
+impl Serialize for VarUint64 {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let mut buf = [0u8; 1];
+ let mut v = self.0;
+ loop {
+ buf[0] = (v & 0b0111_1111) as u8;
+ v >>= 7;
+ if v > 0 {
+ buf[0] |= 0b1000_0000;
+ }
+ writer.write(&buf[..])?;
+ if v == 0 { break; }
+ }
+
+ Ok(())
+ }
+}
+
+impl From<u64> for VarUint64 {
+ fn from(u: u64) -> VarUint64 {
+ VarUint64(u)
+ }
+}
+
+/// 7-bit unsigned integer, encoded in LEB128 (always 1 byte length).
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct VarUint7(u8);
+
+impl From<VarUint7> for u8 {
+ fn from(v: VarUint7) -> u8 {
+ v.0
+ }
+}
+
+impl From<u8> for VarUint7 {
+ fn from(v: u8) -> Self {
+ VarUint7(v)
+ }
+}
+
+impl Deserialize for VarUint7 {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let mut u8buf = [0u8; 1];
+ reader.read(&mut u8buf)?;
+ Ok(VarUint7(u8buf[0]))
+ }
+}
+
+impl Serialize for VarUint7 {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ // todo check range?
+ writer.write(&[self.0])?;
+ Ok(())
+ }
+}
+
+/// 7-bit signed integer, encoded in LEB128 (always 1 byte length)
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct VarInt7(i8);
+
+impl From<VarInt7> for i8 {
+ fn from(v: VarInt7) -> i8 {
+ v.0
+ }
+}
+
+impl From<i8> for VarInt7 {
+ fn from(v: i8) -> VarInt7 {
+ VarInt7(v)
+ }
+}
+
+impl Deserialize for VarInt7 {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let mut u8buf = [0u8; 1];
+ reader.read(&mut u8buf)?;
+
+ // check if number is not continued!
+ if u8buf[0] & 0b1000_0000 != 0 {
+ return Err(Error::InvalidVarInt7(u8buf[0]));
+ }
+
+ // expand sign
+ if u8buf[0] & 0b0100_0000 == 0b0100_0000 { u8buf[0] |= 0b1000_0000 }
+
+ Ok(VarInt7(u8buf[0] as i8))
+ }
+}
+
+impl Serialize for VarInt7 {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ // todo check range?
+ let mut b: u8 = self.0 as u8;
+ if self.0 < 0 { b |= 0b0100_0000; b &= 0b0111_1111; }
+ writer.write(&[b])?;
+ Ok(())
+ }
+}
+
+/// 8-bit unsigned integer, NOT encoded in LEB128;
+/// it's just a single byte.
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct Uint8(u8);
+
+impl From<Uint8> for u8 {
+ fn from(v: Uint8) -> u8 {
+ v.0
+ }
+}
+
+impl From<u8> for Uint8 {
+ fn from(v: u8) -> Self {
+ Uint8(v)
+ }
+}
+
+impl Deserialize for Uint8 {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let mut u8buf = [0u8; 1];
+ reader.read(&mut u8buf)?;
+ Ok(Uint8(u8buf[0]))
+ }
+}
+
+impl Serialize for Uint8 {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ writer.write(&[self.0])?;
+ Ok(())
+ }
+}
+
+
+/// 32-bit signed integer, encoded in LEB128 (can be 1-5 bytes length).
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct VarInt32(i32);
+
+impl From<VarInt32> for i32 {
+ fn from(v: VarInt32) -> i32 {
+ v.0
+ }
+}
+
+impl From<i32> for VarInt32 {
+ fn from(v: i32) -> VarInt32 {
+ VarInt32(v)
+ }
+}
+
+impl Deserialize for VarInt32 {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let mut res = 0;
+ let mut shift = 0;
+ let mut u8buf = [0u8; 1];
+ loop {
+ if shift > 31 { return Err(Error::InvalidVarInt32); }
+ reader.read(&mut u8buf)?;
+ let b = u8buf[0];
+
+ res |= ((b & 0x7f) as i32).checked_shl(shift).ok_or(Error::InvalidVarInt32)?;
+
+ shift += 7;
+ if (b >> 7) == 0 {
+ if shift < 32 && b & 0b0100_0000 == 0b0100_0000 {
+ res |= (1i32 << shift).wrapping_neg();
+ } else if shift >= 32 && b & 0b0100_0000 == 0b0100_0000 {
+ if (!(b | 0b1000_0000)).leading_zeros() < 5 {
+ return Err(Error::InvalidVarInt32);
+ }
+ } else if shift >= 32 && b & 0b0100_0000 == 0 {
+ if b.leading_zeros() < 5 {
+ return Err(Error::InvalidVarInt32);
+ }
+ }
+ break;
+ }
+ }
+ Ok(VarInt32(res))
+ }
+}
+
+impl Serialize for VarInt32 {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let mut buf = [0u8; 1];
+ let mut v = self.0;
+ let mut more = true;
+ while more {
+ buf[0] = (v & 0b0111_1111) as u8;
+ v >>= 7;
+ if (v == 0 && buf[0] & 0b0100_0000 == 0) || (v == -1 && buf[0] & 0b0100_0000 == 0b0100_0000) {
+ more = false
+ } else {
+ buf[0] |= 0b1000_0000
+ }
+
+ writer.write(&buf[..])?;
+ }
+
+ Ok(())
+ }
+}
+
+/// 64-bit signed integer, encoded in LEB128 (can be 1-9 bytes length).
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct VarInt64(i64);
+
+impl From<VarInt64> for i64 {
+ fn from(v: VarInt64) -> i64 {
+ v.0
+ }
+}
+
+impl From<i64> for VarInt64 {
+ fn from(v: i64) -> VarInt64 {
+ VarInt64(v)
+ }
+}
+
+impl Deserialize for VarInt64 {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let mut res = 0i64;
+ let mut shift = 0;
+ let mut u8buf = [0u8; 1];
+
+ loop {
+ if shift > 63 { return Err(Error::InvalidVarInt64); }
+ reader.read(&mut u8buf)?;
+ let b = u8buf[0];
+
+ res |= ((b & 0x7f) as i64).checked_shl(shift).ok_or(Error::InvalidVarInt64)?;
+
+ shift += 7;
+ if (b >> 7) == 0 {
+ if shift < 64 && b & 0b0100_0000 == 0b0100_0000 {
+ res |= (1i64 << shift).wrapping_neg();
+ } else if shift >= 64 && b & 0b0100_0000 == 0b0100_0000 {
+ if (b | 0b1000_0000) as i8 != -1 {
+ return Err(Error::InvalidVarInt64);
+ }
+ } else if shift >= 64 && b != 0 {
+ return Err(Error::InvalidVarInt64);
+ }
+ break;
+ }
+ }
+ Ok(VarInt64(res))
+ }
+}
+
+impl Serialize for VarInt64 {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let mut buf = [0u8; 1];
+ let mut v = self.0;
+ let mut more = true;
+ while more {
+ buf[0] = (v & 0b0111_1111) as u8;
+ v >>= 7;
+ if (v == 0 && buf[0] & 0b0100_0000 == 0) || (v == -1 && buf[0] & 0b0100_0000 == 0b0100_0000) {
+ more = false
+ } else {
+ buf[0] |= 0b1000_0000
+ }
+
+ writer.write(&buf[..])?;
+ }
+
+ Ok(())
+ }
+}
+
+/// 32-bit unsigned integer, encoded in little endian.
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct Uint32(u32);
+
+impl Deserialize for Uint32 {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let mut buf = [0u8; 4];
+ reader.read(&mut buf)?;
+ // todo check range
+ Ok(u32::from_le_bytes(buf).into())
+ }
+}
+
+impl From<Uint32> for u32 {
+ fn from(var: Uint32) -> u32 {
+ var.0
+ }
+}
+
+impl Serialize for Uint32 {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ writer.write(&self.0.to_le_bytes())?;
+ Ok(())
+ }
+}
+
+impl From<u32> for Uint32 {
+ fn from(u: u32) -> Self { Uint32(u) }
+}
+
+/// 64-bit unsigned integer, encoded in little endian.
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct Uint64(u64);
+
+impl Deserialize for Uint64 {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let mut buf = [0u8; 8];
+ reader.read(&mut buf)?;
+ // todo check range
+ Ok(u64::from_le_bytes(buf).into())
+ }
+}
+
+impl Serialize for Uint64 {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ writer.write(&self.0.to_le_bytes())?;
+ Ok(())
+ }
+}
+
+impl From<u64> for Uint64 {
+ fn from(u: u64) -> Self { Uint64(u) }
+}
+
+impl From<Uint64> for u64 {
+ fn from(var: Uint64) -> u64 {
+ var.0
+ }
+}
+
+
+/// VarUint1, 1-bit value (0/1).
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub struct VarUint1(bool);
+
+impl From<VarUint1> for bool {
+ fn from(v: VarUint1) -> bool {
+ v.0
+ }
+}
+
+impl From<bool> for VarUint1 {
+ fn from(b: bool) -> Self {
+ VarUint1(b)
+ }
+}
+
+impl Deserialize for VarUint1 {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let mut u8buf = [0u8; 1];
+ reader.read(&mut u8buf)?;
+ match u8buf[0] {
+ 0 => Ok(VarUint1(false)),
+ 1 => Ok(VarUint1(true)),
+ v @ _ => Err(Error::InvalidVarUint1(v)),
+ }
+ }
+}
+
+impl Serialize for VarUint1 {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ writer.write(&[
+ if self.0 { 1u8 } else { 0u8 }
+ ])?;
+ Ok(())
+ }
+}
+
+impl Deserialize for String {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let length = u32::from(VarUint32::deserialize(reader)?) as usize;
+ if length > 0 {
+ String::from_utf8(buffered_read!(1024, length, reader)).map_err(|_| Error::NonUtf8String)
+ }
+ else {
+ Ok(String::new())
+ }
+ }
+}
+
+impl Serialize for String {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Error> {
+ VarUint32::from(self.len()).serialize(writer)?;
+ writer.write(&self.into_bytes()[..])?;
+ Ok(())
+ }
+}
+
+/// List for reading sequence of elements typed `T`, given
+/// they are preceded by length (serialized as VarUint32).
+#[derive(Debug, Clone)]
+pub struct CountedList<T: Deserialize>(Vec<T>);
+
+impl<T: Deserialize> CountedList<T> {
+ /// Destroy counted list returing inner vector.
+ pub fn into_inner(self) -> Vec<T> { self.0 }
+}
+
+impl<T: Deserialize> Deserialize for CountedList<T> where T::Error: From<Error> {
+ type Error = T::Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let count: usize = VarUint32::deserialize(reader)?.into();
+ let mut result = Vec::new();
+ for _ in 0..count { result.push(T::deserialize(reader)?); }
+ Ok(CountedList(result))
+ }
+}
+
+/// Helper struct to write payload which is preceded by
+/// it's own length in bytes.
+#[derive(Debug)]
+pub struct CountedWriter<'a, W: 'a + io::Write> {
+ writer: &'a mut W,
+ data: Vec<u8>,
+}
+
+impl<'a, W: 'a + io::Write> CountedWriter<'a, W> {
+ /// New counted writer on top of the given serial writer.
+ pub fn new(writer: &'a mut W) -> Self {
+ CountedWriter {
+ writer: writer,
+ data: Vec::new(),
+ }
+ }
+
+ /// Finish counted writer routing, which writes accumulated length
+ /// and actual payload.
+ pub fn done(self) -> io::Result<()> {
+ let writer = self.writer;
+ let data = self.data;
+ VarUint32::from(data.len())
+ .serialize(writer)
+ .map_err(|_| io::Error::InvalidData)?;
+ writer.write(&data[..])?;
+ Ok(())
+ }
+}
+
+impl<'a, W: 'a + io::Write> io::Write for CountedWriter<'a, W> {
+ fn write(&mut self, buf: &[u8]) -> io::Result<()> {
+ self.data.extend_from_slice(buf);
+ Ok(())
+ }
+}
+
+/// Helper struct to write series of `T` preceded by the length of the sequence
+/// serialized as VarUint32.
+#[derive(Debug, Clone)]
+pub struct CountedListWriter<I: Serialize<Error=elements::Error>, T: IntoIterator<Item=I>>(pub usize, pub T);
+
+impl<I: Serialize<Error=elements::Error>, T: IntoIterator<Item=I>> Serialize for CountedListWriter<I, T> {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let len_us = self.0;
+ let data = self.1;
+ let len: VarUint32 = len_us.into();
+ len.serialize(writer)?;
+ for data_element in data { data_element.serialize(writer)? }
+
+ Ok(())
+ }
+}
+
+
+#[cfg(test)]
+mod tests {
+
+ use super::super::{deserialize_buffer, Serialize};
+ use super::{CountedList, VarInt7, VarUint32, VarInt32, VarInt64, VarUint64};
+ use crate::elements::Error;
+
+ fn varuint32_ser_test(val: u32, expected: Vec<u8>) {
+ let mut buf = Vec::new();
+ let v1: VarUint32 = val.into();
+ v1.serialize(&mut buf).expect("to be serialized ok");
+ assert_eq!(expected, buf);
+ }
+
+ fn varuint32_de_test(dt: Vec<u8>, expected: u32) {
+ let val: VarUint32 = super::super::deserialize_buffer(&dt).expect("buf to be serialized");
+ assert_eq!(expected, val.into());
+ }
+
+ fn varuint32_serde_test(dt: Vec<u8>, val: u32) {
+ varuint32_de_test(dt.clone(), val);
+ varuint32_ser_test(val, dt);
+ }
+
+ fn varint32_ser_test(val: i32, expected: Vec<u8>) {
+ let mut buf = Vec::new();
+ let v1: VarInt32 = val.into();
+ v1.serialize(&mut buf).expect("to be serialized ok");
+ assert_eq!(expected, buf);
+ }
+
+ fn varint32_de_test(dt: Vec<u8>, expected: i32) {
+ let val: VarInt32 = super::super::deserialize_buffer(&dt).expect("buf to be serialized");
+ assert_eq!(expected, val.into());
+ }
+
+ fn varint32_serde_test(dt: Vec<u8>, val: i32) {
+ varint32_de_test(dt.clone(), val);
+ varint32_ser_test(val, dt);
+ }
+
+ fn varuint64_ser_test(val: u64, expected: Vec<u8>) {
+ let mut buf = Vec::new();
+ let v1: VarUint64 = val.into();
+ v1.serialize(&mut buf).expect("to be serialized ok");
+ assert_eq!(expected, buf);
+ }
+
+ fn varuint64_de_test(dt: Vec<u8>, expected: u64) {
+ let val: VarUint64 = super::super::deserialize_buffer(&dt).expect("buf to be serialized");
+ assert_eq!(expected, val.into());
+ }
+
+ fn varuint64_serde_test(dt: Vec<u8>, val: u64) {
+ varuint64_de_test(dt.clone(), val);
+ varuint64_ser_test(val, dt);
+ }
+
+ fn varint64_ser_test(val: i64, expected: Vec<u8>) {
+ let mut buf = Vec::new();
+ let v1: VarInt64 = val.into();
+ v1.serialize(&mut buf).expect("to be serialized ok");
+ assert_eq!(expected, buf);
+ }
+
+ fn varint64_de_test(dt: Vec<u8>, expected: i64) {
+ let val: VarInt64 = super::super::deserialize_buffer(&dt).expect("buf to be serialized");
+ assert_eq!(expected, val.into());
+ }
+
+ fn varint64_serde_test(dt: Vec<u8>, val: i64) {
+ varint64_de_test(dt.clone(), val);
+ varint64_ser_test(val, dt);
+ }
+
+ #[test]
+ fn varuint32_0() {
+ varuint32_serde_test(vec![0u8; 1], 0);
+ }
+
+ #[test]
+ fn varuint32_1() {
+ varuint32_serde_test(vec![1u8; 1], 1);
+ }
+
+ #[test]
+ fn varuint32_135() {
+ varuint32_serde_test(vec![135u8, 0x01], 135);
+ }
+
+ #[test]
+ fn varuint32_8192() {
+ varuint32_serde_test(vec![0x80, 0x40], 8192);
+ }
+
+ #[test]
+ fn varint32_8192() {
+ varint32_serde_test(vec![0x80, 0xc0, 0x00], 8192);
+ }
+
+ #[test]
+ fn varint32_neg_8192() {
+ varint32_serde_test(vec![0x80, 0x40], -8192);
+ }
+
+ #[test]
+ fn varuint64_0() {
+ varuint64_serde_test(vec![0u8; 1], 0);
+ }
+
+ #[test]
+ fn varuint64_1() {
+ varuint64_serde_test(vec![1u8; 1], 1);
+ }
+
+ #[test]
+ fn varuint64_135() {
+ varuint64_serde_test(vec![135u8, 0x01], 135);
+ }
+
+ #[test]
+ fn varuint64_8192() {
+ varuint64_serde_test(vec![0x80, 0x40], 8192);
+ }
+
+ #[test]
+ fn varint64_8192() {
+ varint64_serde_test(vec![0x80, 0xc0, 0x00], 8192);
+ }
+
+ #[test]
+ fn varint64_neg_8192() {
+ varint64_serde_test(vec![0x80, 0x40], -8192);
+ }
+
+ #[test]
+ fn varint64_min() {
+ varint64_serde_test(
+ vec![0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x7f],
+ -9223372036854775808,
+ );
+ }
+
+ #[test]
+ fn varint64_bad_extended() {
+ let res = deserialize_buffer::<VarInt64>(&[0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x6f][..]);
+ assert!(res.is_err());
+ }
+
+ #[test]
+ fn varint32_bad_extended() {
+ let res = deserialize_buffer::<VarInt32>(&[0x80, 0x80, 0x80, 0x80, 0x6f][..]);
+ assert!(res.is_err());
+ }
+
+ #[test]
+ fn varint32_bad_extended2() {
+ let res = deserialize_buffer::<VarInt32>(&[0x80, 0x80, 0x80, 0x80, 0x41][..]);
+ assert!(res.is_err());
+ }
+
+ #[test]
+ fn varint64_max() {
+ varint64_serde_test(
+ vec![0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00],
+ 9223372036854775807,
+ );
+ }
+
+ #[test]
+ fn varint64_too_long() {
+ assert!(
+ deserialize_buffer::<VarInt64>(
+ &[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00][..],
+ ).is_err()
+ );
+ }
+
+ #[test]
+ fn varint32_too_long() {
+ assert!(
+ deserialize_buffer::<VarInt32>(
+ &[0xff, 0xff, 0xff, 0xff, 0xff, 0x00][..],
+ ).is_err()
+ );
+ }
+
+ #[test]
+ fn varuint64_too_long() {
+ assert!(
+ deserialize_buffer::<VarUint64>(
+ &[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00][..],
+ ).is_err()
+ );
+ }
+
+ #[test]
+ fn varuint32_too_long() {
+ assert!(
+ deserialize_buffer::<VarUint32>(
+ &[0xff, 0xff, 0xff, 0xff, 0xff, 0x00][..],
+ ).is_err()
+ );
+ }
+
+ #[test]
+ fn varuint32_too_long_trailing() {
+ assert!(
+ deserialize_buffer::<VarUint32>(
+ &[0xff, 0xff, 0xff, 0xff, 0x7f][..],
+ ).is_err()
+ );
+ }
+
+ #[test]
+ fn varuint64_too_long_trailing() {
+ assert!(
+ deserialize_buffer::<VarUint64>(
+ &[0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x04][..],
+ ).is_err()
+ );
+ }
+
+ #[test]
+ fn varint32_min() {
+ varint32_serde_test(
+ vec![0x80, 0x80, 0x80, 0x80, 0x78],
+ -2147483648,
+ );
+ }
+
+ #[test]
+ fn varint7_invalid() {
+ match deserialize_buffer::<VarInt7>(&[240]) {
+ Err(Error::InvalidVarInt7(_)) => {},
+ _ => panic!("Should be invalid varint7 error!")
+ }
+ }
+
+ #[test]
+ fn varint7_neg() {
+ assert_eq!(-0x10i8, deserialize_buffer::<VarInt7>(&[0x70]).expect("fail").into());
+ }
+
+ #[test]
+ fn varuint32_too_long_nulled() {
+ match deserialize_buffer::<VarUint32>(
+ &[0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x78]
+ ) {
+ Err(Error::InvalidVarUint32) => {},
+ _ => panic!("Should be invalid varuint32"),
+ }
+ }
+
+ #[test]
+ fn varint32_max() {
+ varint32_serde_test(
+ vec![0xff, 0xff, 0xff, 0xff, 0x07],
+ 2147483647,
+ );
+ }
+
+
+ #[test]
+ fn counted_list() {
+ let payload = [
+ 133u8, //(128+5), length is 5
+ 0x80, 0x80, 0x80, 0x0, // padding
+ 0x01,
+ 0x7d,
+ 0x05,
+ 0x07,
+ 0x09,
+ ];
+
+ let list: CountedList<VarInt7> =
+ deserialize_buffer(&payload).expect("type_section be deserialized");
+
+ let vars = list.into_inner();
+ assert_eq!(5, vars.len());
+ let v3: i8 = (*vars.get(1).unwrap()).into();
+ assert_eq!(-0x03i8, v3);
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/elements/reloc_section.rs b/third_party/rust/parity-wasm/src/elements/reloc_section.rs
new file mode 100644
index 0000000000..50c867f7ca
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/elements/reloc_section.rs
@@ -0,0 +1,347 @@
+use alloc::{string::String, vec::Vec};
+use crate::io;
+
+use super::{CountedList, CountedListWriter, CountedWriter, Deserialize, Error, Serialize, VarInt32, VarUint32, VarUint7};
+
+const FUNCTION_INDEX_LEB: u8 = 0;
+const TABLE_INDEX_SLEB: u8 = 1;
+const TABLE_INDEX_I32: u8 = 2;
+const MEMORY_ADDR_LEB: u8 = 3;
+const MEMORY_ADDR_SLEB: u8 = 4;
+const MEMORY_ADDR_I32: u8 = 5;
+const TYPE_INDEX_LEB: u8 = 6;
+const GLOBAL_INDEX_LEB: u8 = 7;
+
+/// Relocation information.
+#[derive(Clone, Debug, PartialEq)]
+pub struct RelocSection {
+ /// Name of this section.
+ name: String,
+
+ /// ID of the section containing the relocations described in this section.
+ section_id: u32,
+
+ /// Name of the section containing the relocations described in this section. Only set if section_id is 0.
+ relocation_section_name: Option<String>,
+
+ /// Relocation entries.
+ entries: Vec<RelocationEntry>,
+}
+
+impl RelocSection {
+ /// Name of this section.
+ pub fn name(&self) -> &str {
+ &self.name
+ }
+
+ /// Name of this section (mutable).
+ pub fn name_mut(&mut self) -> &mut String {
+ &mut self.name
+ }
+
+ /// ID of the section containing the relocations described in this section.
+ pub fn section_id(&self) -> u32 {
+ self.section_id
+ }
+
+ /// ID of the section containing the relocations described in this section (mutable).
+ pub fn section_id_mut(&mut self) -> &mut u32 {
+ &mut self.section_id
+ }
+
+ /// Name of the section containing the relocations described in this section.
+ pub fn relocation_section_name(&self) -> Option<&str> {
+ self.relocation_section_name.as_ref().map(String::as_str)
+ }
+
+ /// Name of the section containing the relocations described in this section (mutable).
+ pub fn relocation_section_name_mut(&mut self) -> &mut Option<String> {
+ &mut self.relocation_section_name
+ }
+
+ /// List of relocation entries.
+ pub fn entries(&self) -> &[RelocationEntry] {
+ &self.entries
+ }
+
+ /// List of relocation entries (mutable).
+ pub fn entries_mut(&mut self) -> &mut Vec<RelocationEntry> {
+ &mut self.entries
+ }
+}
+
+impl RelocSection {
+ /// Deserialize a reloc section.
+ pub fn deserialize<R: io::Read>(
+ name: String,
+ rdr: &mut R,
+ ) -> Result<Self, Error> {
+ let section_id = VarUint32::deserialize(rdr)?.into();
+
+ let relocation_section_name =
+ if section_id == 0 {
+ Some(String::deserialize(rdr)?)
+ }
+ else {
+ None
+ };
+
+ let entries = CountedList::deserialize(rdr)?.into_inner();
+
+ Ok(RelocSection {
+ name,
+ section_id,
+ relocation_section_name,
+ entries,
+ })
+ }
+}
+
+impl Serialize for RelocSection {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, wtr: &mut W) -> Result<(), Error> {
+ let mut counted_writer = CountedWriter::new(wtr);
+
+ self.name.serialize(&mut counted_writer)?;
+
+ VarUint32::from(self.section_id).serialize(&mut counted_writer)?;
+
+ if let Some(relocation_section_name) = self.relocation_section_name {
+ relocation_section_name.serialize(&mut counted_writer)?;
+ }
+
+ let counted_list = CountedListWriter(self.entries.len(), self.entries.into_iter());
+ counted_list.serialize(&mut counted_writer)?;
+
+ counted_writer.done()?;
+
+ Ok(())
+ }
+}
+
+/// Relocation entry.
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub enum RelocationEntry {
+ /// Function index.
+ FunctionIndexLeb {
+ /// Offset of the value to rewrite.
+ offset: u32,
+
+ /// Index of the function symbol in the symbol table.
+ index: u32,
+ },
+
+ /// Function table index.
+ TableIndexSleb {
+ /// Offset of the value to rewrite.
+ offset: u32,
+
+ /// Index of the function symbol in the symbol table.
+ index: u32,
+ },
+
+ /// Function table index.
+ TableIndexI32 {
+ /// Offset of the value to rewrite.
+ offset: u32,
+
+ /// Index of the function symbol in the symbol table.
+ index: u32,
+ },
+
+ /// Linear memory index.
+ MemoryAddressLeb {
+ /// Offset of the value to rewrite.
+ offset: u32,
+
+ /// Index of the data symbol in the symbol table.
+ index: u32,
+
+ /// Addend to add to the address.
+ addend: i32,
+ },
+
+ /// Linear memory index.
+ MemoryAddressSleb {
+ /// Offset of the value to rewrite.
+ offset: u32,
+
+ /// Index of the data symbol in the symbol table.
+ index: u32,
+
+ /// Addend to add to the address.
+ addend: i32,
+ },
+
+ /// Linear memory index.
+ MemoryAddressI32 {
+ /// Offset of the value to rewrite.
+ offset: u32,
+
+ /// Index of the data symbol in the symbol table.
+ index: u32,
+
+ /// Addend to add to the address.
+ addend: i32,
+ },
+
+ /// Type table index.
+ TypeIndexLeb {
+ /// Offset of the value to rewrite.
+ offset: u32,
+
+ /// Index of the type used.
+ index: u32,
+ },
+
+ /// Global index.
+ GlobalIndexLeb {
+ /// Offset of the value to rewrite.
+ offset: u32,
+
+ /// Index of the global symbol in the symbol table.
+ index: u32,
+ },
+}
+
+impl Deserialize for RelocationEntry {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(rdr: &mut R) -> Result<Self, Self::Error> {
+ match VarUint7::deserialize(rdr)?.into() {
+ FUNCTION_INDEX_LEB => Ok(RelocationEntry::FunctionIndexLeb {
+ offset: VarUint32::deserialize(rdr)?.into(),
+ index: VarUint32::deserialize(rdr)?.into(),
+ }),
+
+ TABLE_INDEX_SLEB => Ok(RelocationEntry::TableIndexSleb {
+ offset: VarUint32::deserialize(rdr)?.into(),
+ index: VarUint32::deserialize(rdr)?.into(),
+ }),
+
+ TABLE_INDEX_I32 => Ok(RelocationEntry::TableIndexI32 {
+ offset: VarUint32::deserialize(rdr)?.into(),
+ index: VarUint32::deserialize(rdr)?.into(),
+ }),
+
+ MEMORY_ADDR_LEB => Ok(RelocationEntry::MemoryAddressLeb {
+ offset: VarUint32::deserialize(rdr)?.into(),
+ index: VarUint32::deserialize(rdr)?.into(),
+ addend: VarInt32::deserialize(rdr)?.into(),
+ }),
+
+ MEMORY_ADDR_SLEB => Ok(RelocationEntry::MemoryAddressSleb {
+ offset: VarUint32::deserialize(rdr)?.into(),
+ index: VarUint32::deserialize(rdr)?.into(),
+ addend: VarInt32::deserialize(rdr)?.into(),
+ }),
+
+ MEMORY_ADDR_I32 => Ok(RelocationEntry::MemoryAddressI32 {
+ offset: VarUint32::deserialize(rdr)?.into(),
+ index: VarUint32::deserialize(rdr)?.into(),
+ addend: VarInt32::deserialize(rdr)?.into(),
+ }),
+
+ TYPE_INDEX_LEB => Ok(RelocationEntry::TypeIndexLeb {
+ offset: VarUint32::deserialize(rdr)?.into(),
+ index: VarUint32::deserialize(rdr)?.into(),
+ }),
+
+ GLOBAL_INDEX_LEB => Ok(RelocationEntry::GlobalIndexLeb {
+ offset: VarUint32::deserialize(rdr)?.into(),
+ index: VarUint32::deserialize(rdr)?.into(),
+ }),
+
+ entry_type => Err(Error::UnknownValueType(entry_type as i8)),
+ }
+ }
+}
+
+impl Serialize for RelocationEntry {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, wtr: &mut W) -> Result<(), Error> {
+ match self {
+ RelocationEntry::FunctionIndexLeb { offset, index } => {
+ VarUint7::from(FUNCTION_INDEX_LEB).serialize(wtr)?;
+ VarUint32::from(offset).serialize(wtr)?;
+ VarUint32::from(index).serialize(wtr)?;
+ },
+
+ RelocationEntry::TableIndexSleb { offset, index } => {
+ VarUint7::from(TABLE_INDEX_SLEB).serialize(wtr)?;
+ VarUint32::from(offset).serialize(wtr)?;
+ VarUint32::from(index).serialize(wtr)?;
+ },
+
+ RelocationEntry::TableIndexI32 { offset, index } => {
+ VarUint7::from(TABLE_INDEX_I32).serialize(wtr)?;
+ VarUint32::from(offset).serialize(wtr)?;
+ VarUint32::from(index).serialize(wtr)?;
+ },
+
+ RelocationEntry::MemoryAddressLeb { offset, index, addend } => {
+ VarUint7::from(MEMORY_ADDR_LEB).serialize(wtr)?;
+ VarUint32::from(offset).serialize(wtr)?;
+ VarUint32::from(index).serialize(wtr)?;
+ VarInt32::from(addend).serialize(wtr)?;
+ },
+
+ RelocationEntry::MemoryAddressSleb { offset, index, addend } => {
+ VarUint7::from(MEMORY_ADDR_SLEB).serialize(wtr)?;
+ VarUint32::from(offset).serialize(wtr)?;
+ VarUint32::from(index).serialize(wtr)?;
+ VarInt32::from(addend).serialize(wtr)?;
+ },
+
+ RelocationEntry::MemoryAddressI32 { offset, index, addend } => {
+ VarUint7::from(MEMORY_ADDR_I32).serialize(wtr)?;
+ VarUint32::from(offset).serialize(wtr)?;
+ VarUint32::from(index).serialize(wtr)?;
+ VarInt32::from(addend).serialize(wtr)?;
+ },
+
+ RelocationEntry::TypeIndexLeb { offset, index } => {
+ VarUint7::from(TYPE_INDEX_LEB).serialize(wtr)?;
+ VarUint32::from(offset).serialize(wtr)?;
+ VarUint32::from(index).serialize(wtr)?;
+ },
+
+ RelocationEntry::GlobalIndexLeb { offset, index } => {
+ VarUint7::from(GLOBAL_INDEX_LEB).serialize(wtr)?;
+ VarUint32::from(offset).serialize(wtr)?;
+ VarUint32::from(index).serialize(wtr)?;
+ },
+ }
+
+ Ok(())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::super::{Section, deserialize_file};
+ use super::RelocationEntry;
+
+ #[test]
+ fn reloc_section() {
+ let module =
+ deserialize_file("./res/cases/v1/relocatable.wasm").expect("Module should be deserialized")
+ .parse_reloc().expect("Reloc section should be deserialized");
+ let mut found = false;
+ for section in module.sections() {
+ match *section {
+ Section::Reloc(ref reloc_section) => {
+ assert_eq!(vec![
+ RelocationEntry::MemoryAddressSleb { offset: 4, index: 0, addend: 0 },
+ RelocationEntry::FunctionIndexLeb { offset: 12, index: 0 },
+ ], reloc_section.entries());
+ found = true
+ },
+ _ => { }
+ }
+ }
+ assert!(found, "There should be a reloc section in relocatable.wasm");
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/elements/section.rs b/third_party/rust/parity-wasm/src/elements/section.rs
new file mode 100644
index 0000000000..6668e82185
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/elements/section.rs
@@ -0,0 +1,1153 @@
+use alloc::{vec::Vec, borrow::ToOwned, string::String};
+use crate::{io, elements};
+use super::{
+ Serialize,
+ Deserialize,
+ Error,
+ VarUint7,
+ VarUint32,
+ CountedList,
+ ImportEntry,
+ MemoryType,
+ TableType,
+ ExportEntry,
+ GlobalEntry,
+ Func,
+ FuncBody,
+ ElementSegment,
+ DataSegment,
+ CountedWriter,
+ CountedListWriter,
+ External,
+ serialize,
+};
+
+use super::types::Type;
+use super::name_section::NameSection;
+use super::reloc_section::RelocSection;
+
+const ENTRIES_BUFFER_LENGTH: usize = 16384;
+
+/// Section in the WebAssembly module.
+#[derive(Debug, Clone, PartialEq)]
+pub enum Section {
+ /// Section is unparsed.
+ Unparsed {
+ /// id of the unparsed section.
+ id: u8,
+ /// raw bytes of the unparsed section.
+ payload: Vec<u8>,
+ },
+ /// Custom section (`id=0`).
+ Custom(CustomSection),
+ /// Types section.
+ Type(TypeSection),
+ /// Import section.
+ Import(ImportSection),
+ /// Function signatures section.
+ Function(FunctionSection),
+ /// Table definition section.
+ Table(TableSection),
+ /// Memory definition section.
+ Memory(MemorySection),
+ /// Global entries section.
+ Global(GlobalSection),
+ /// Export definitions.
+ Export(ExportSection),
+ /// Entry reference of the module.
+ Start(u32),
+ /// Elements section.
+ Element(ElementSection),
+ /// Number of passive data entries in the data section
+ DataCount(u32),
+ /// Function bodies section.
+ Code(CodeSection),
+ /// Data definition section.
+ Data(DataSection),
+ /// Name section.
+ ///
+ /// Note that initially it is not parsed until `parse_names` is called explicitly.
+ Name(NameSection),
+ /// Relocation section.
+ ///
+ /// Note that initially it is not parsed until `parse_reloc` is called explicitly.
+ /// Also note that currently there are serialization (but not de-serialization)
+ /// issues with this section (#198).
+ Reloc(RelocSection),
+}
+
+impl Deserialize for Section {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let id = match VarUint7::deserialize(reader) {
+ // todo: be more selective detecting no more section
+ Err(_) => { return Err(Error::UnexpectedEof); },
+ Ok(id) => id,
+ };
+
+ Ok(
+ match id.into() {
+ 0 => {
+ Section::Custom(CustomSection::deserialize(reader)?.into())
+ },
+ 1 => {
+ Section::Type(TypeSection::deserialize(reader)?)
+ },
+ 2 => {
+ Section::Import(ImportSection::deserialize(reader)?)
+ },
+ 3 => {
+ Section::Function(FunctionSection::deserialize(reader)?)
+ },
+ 4 => {
+ Section::Table(TableSection::deserialize(reader)?)
+ },
+ 5 => {
+ Section::Memory(MemorySection::deserialize(reader)?)
+ },
+ 6 => {
+ Section::Global(GlobalSection::deserialize(reader)?)
+ },
+ 7 => {
+ Section::Export(ExportSection::deserialize(reader)?)
+ },
+ 8 => {
+ let mut section_reader = SectionReader::new(reader)?;
+ let start_idx = VarUint32::deserialize(&mut section_reader)?;
+ section_reader.close()?;
+ Section::Start(start_idx.into())
+ },
+ 9 => {
+ Section::Element(ElementSection::deserialize(reader)?)
+ },
+ 10 => {
+ Section::Code(CodeSection::deserialize(reader)?)
+ },
+ 11 => {
+ Section::Data(DataSection::deserialize(reader)?)
+ },
+ 12 => {
+ let mut section_reader = SectionReader::new(reader)?;
+ let count = VarUint32::deserialize(&mut section_reader)?;
+ section_reader.close()?;
+ Section::DataCount(count.into())
+ },
+ invalid_id => {
+ return Err(Error::InvalidSectionId(invalid_id))
+ },
+ }
+ )
+ }
+}
+
+impl Serialize for Section {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ match self {
+ Section::Custom(custom_section) => {
+ VarUint7::from(0x00).serialize(writer)?;
+ custom_section.serialize(writer)?;
+ },
+ Section::Unparsed { id, payload } => {
+ VarUint7::from(id).serialize(writer)?;
+ writer.write(&payload[..])?;
+ },
+ Section::Type(type_section) => {
+ VarUint7::from(0x01).serialize(writer)?;
+ type_section.serialize(writer)?;
+ },
+ Section::Import(import_section) => {
+ VarUint7::from(0x02).serialize(writer)?;
+ import_section.serialize(writer)?;
+ },
+ Section::Function(function_section) => {
+ VarUint7::from(0x03).serialize(writer)?;
+ function_section.serialize(writer)?;
+ },
+ Section::Table(table_section) => {
+ VarUint7::from(0x04).serialize(writer)?;
+ table_section.serialize(writer)?;
+ },
+ Section::Memory(memory_section) => {
+ VarUint7::from(0x05).serialize(writer)?;
+ memory_section.serialize(writer)?;
+ },
+ Section::Global(global_section) => {
+ VarUint7::from(0x06).serialize(writer)?;
+ global_section.serialize(writer)?;
+ },
+ Section::Export(export_section) => {
+ VarUint7::from(0x07).serialize(writer)?;
+ export_section.serialize(writer)?;
+ },
+ Section::Start(index) => {
+ VarUint7::from(0x08).serialize(writer)?;
+ let mut counted_writer = CountedWriter::new(writer);
+ VarUint32::from(index).serialize(&mut counted_writer)?;
+ counted_writer.done()?;
+ },
+ Section::DataCount(count) => {
+ VarUint7::from(0x0c).serialize(writer)?;
+ let mut counted_writer = CountedWriter::new(writer);
+ VarUint32::from(count).serialize(&mut counted_writer)?;
+ counted_writer.done()?;
+ },
+ Section::Element(element_section) => {
+ VarUint7::from(0x09).serialize(writer)?;
+ element_section.serialize(writer)?;
+ },
+ Section::Code(code_section) => {
+ VarUint7::from(0x0a).serialize(writer)?;
+ code_section.serialize(writer)?;
+ },
+ Section::Data(data_section) => {
+ VarUint7::from(0x0b).serialize(writer)?;
+ data_section.serialize(writer)?;
+ },
+ Section::Name(name_section) => {
+ VarUint7::from(0x00).serialize(writer)?;
+ let custom = CustomSection {
+ name: "name".to_owned(),
+ payload: serialize(name_section)?,
+ };
+ custom.serialize(writer)?;
+ },
+ Section::Reloc(reloc_section) => {
+ VarUint7::from(0x00).serialize(writer)?;
+ reloc_section.serialize(writer)?;
+ },
+ }
+ Ok(())
+ }
+}
+
+impl Section {
+ pub(crate) fn order(&self) -> u8 {
+ match *self {
+ Section::Custom(_) => 0x00,
+ Section::Unparsed { .. } => 0x00,
+ Section::Type(_) => 0x1,
+ Section::Import(_) => 0x2,
+ Section::Function(_) => 0x3,
+ Section::Table(_) => 0x4,
+ Section::Memory(_) => 0x5,
+ Section::Global(_) => 0x6,
+ Section::Export(_) => 0x7,
+ Section::Start(_) => 0x8,
+ Section::Element(_) => 0x9,
+ Section::DataCount(_) => 0x0a,
+ Section::Code(_) => 0x0b,
+ Section::Data(_) => 0x0c,
+ Section::Name(_) => 0x00,
+ Section::Reloc(_) => 0x00,
+ }
+ }
+}
+
+pub(crate) struct SectionReader {
+ cursor: io::Cursor<Vec<u8>>,
+ declared_length: usize,
+}
+
+impl SectionReader {
+ pub fn new<R: io::Read>(reader: &mut R) -> Result<Self, elements::Error> {
+ let length = u32::from(VarUint32::deserialize(reader)?) as usize;
+ let inner_buffer = buffered_read!(ENTRIES_BUFFER_LENGTH, length, reader);
+ let buf_length = inner_buffer.len();
+ let cursor = io::Cursor::new(inner_buffer);
+
+ Ok(SectionReader {
+ cursor: cursor,
+ declared_length: buf_length,
+ })
+ }
+
+ pub fn close(self) -> Result<(), io::Error> {
+ let cursor = self.cursor;
+ let buf_length = self.declared_length;
+
+ if cursor.position() != buf_length {
+ Err(io::Error::InvalidData)
+ } else {
+ Ok(())
+ }
+ }
+}
+
+impl io::Read for SectionReader {
+ fn read(&mut self, buf: &mut [u8]) -> io::Result<()> {
+ self.cursor.read(buf)?;
+ Ok(())
+ }
+}
+
+fn read_entries<R: io::Read, T: Deserialize<Error=elements::Error>>(reader: &mut R)
+ -> Result<Vec<T>, elements::Error>
+{
+ let mut section_reader = SectionReader::new(reader)?;
+ let result = CountedList::<T>::deserialize(&mut section_reader)?.into_inner();
+ section_reader.close()?;
+ Ok(result)
+}
+
+/// Custom section.
+#[derive(Debug, Default, Clone, PartialEq)]
+pub struct CustomSection {
+ name: String,
+ payload: Vec<u8>,
+}
+
+impl CustomSection {
+ /// Creates a new custom section with the given name and payload.
+ pub fn new(name: String, payload: Vec<u8>) -> CustomSection {
+ CustomSection { name, payload }
+ }
+
+ /// Name of the custom section.
+ pub fn name(&self) -> &str {
+ &self.name
+ }
+
+ /// Payload of the custom section.
+ pub fn payload(&self) -> &[u8] {
+ &self.payload
+ }
+
+ /// Name of the custom section (mutable).
+ pub fn name_mut(&mut self) -> &mut String {
+ &mut self.name
+ }
+
+ /// Payload of the custom section (mutable).
+ pub fn payload_mut(&mut self) -> &mut Vec<u8> {
+ &mut self.payload
+ }
+}
+
+impl Deserialize for CustomSection {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let section_length: usize = u32::from(VarUint32::deserialize(reader)?) as usize;
+ let buf = buffered_read!(16384, section_length, reader);
+ let mut cursor = io::Cursor::new(&buf[..]);
+ let name = String::deserialize(&mut cursor)?;
+ let payload = buf[cursor.position() as usize..].to_vec();
+ Ok(CustomSection { name: name, payload: payload })
+ }
+}
+
+impl Serialize for CustomSection {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ use io::Write;
+
+ let mut counted_writer = CountedWriter::new(writer);
+ self.name.serialize(&mut counted_writer)?;
+ counted_writer.write(&self.payload[..])?;
+ counted_writer.done()?;
+ Ok(())
+ }
+}
+
+/// Section with type declarations.
+#[derive(Debug, Default, Clone, PartialEq)]
+pub struct TypeSection(Vec<Type>);
+
+impl TypeSection {
+ /// New type section with provided types.
+ pub fn with_types(types: Vec<Type>) -> Self {
+ TypeSection(types)
+ }
+
+ /// List of type declarations.
+ pub fn types(&self) -> &[Type] {
+ &self.0
+ }
+
+ /// List of type declarations (mutable).
+ pub fn types_mut(&mut self) -> &mut Vec<Type> {
+ &mut self.0
+ }
+}
+
+impl Deserialize for TypeSection {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ Ok(TypeSection(read_entries(reader)?))
+ }
+}
+
+impl Serialize for TypeSection {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let mut counted_writer = CountedWriter::new(writer);
+ let data = self.0;
+ let counted_list = CountedListWriter::<Type, _>(
+ data.len(),
+ data.into_iter().map(Into::into),
+ );
+ counted_list.serialize(&mut counted_writer)?;
+ counted_writer.done()?;
+ Ok(())
+ }
+}
+
+/// Section of the imports definition.
+#[derive(Debug, Default, Clone, PartialEq)]
+pub struct ImportSection(Vec<ImportEntry>);
+
+impl ImportSection {
+ /// New import section with provided types.
+ pub fn with_entries(entries: Vec<ImportEntry>) -> Self {
+ ImportSection(entries)
+ }
+
+ /// List of import entries.
+ pub fn entries(&self) -> &[ImportEntry] {
+ &self.0
+ }
+
+ /// List of import entries (mutable).
+ pub fn entries_mut(&mut self) -> &mut Vec<ImportEntry> {
+ &mut self.0
+ }
+
+ /// Returns number of functions.
+ pub fn functions(&self) -> usize {
+ self.0.iter()
+ .filter(|entry| match entry.external() { &External::Function(_) => true, _ => false })
+ .count()
+ }
+
+ /// Returns number of globals
+ pub fn globals(&self) -> usize {
+ self.0.iter()
+ .filter(|entry| match entry.external() { &External::Global(_) => true, _ => false })
+ .count()
+ }
+}
+
+impl Deserialize for ImportSection {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ Ok(ImportSection(read_entries(reader)?))
+ }
+}
+
+impl Serialize for ImportSection {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let mut counted_writer = CountedWriter::new(writer);
+ let data = self.0;
+ let counted_list = CountedListWriter::<ImportEntry, _>(
+ data.len(),
+ data.into_iter().map(Into::into),
+ );
+ counted_list.serialize(&mut counted_writer)?;
+ counted_writer.done()?;
+ Ok(())
+ }
+}
+
+/// Section with function signatures definition.
+#[derive(Default, Debug, Clone, PartialEq)]
+pub struct FunctionSection(Vec<Func>);
+
+impl FunctionSection {
+ /// New function signatures section with provided entries.
+ pub fn with_entries(entries: Vec<Func>) -> Self {
+ FunctionSection(entries)
+ }
+
+ /// List of all functions in the section, mutable.
+ pub fn entries_mut(&mut self) -> &mut Vec<Func> {
+ &mut self.0
+ }
+
+ /// List of all functions in the section.
+ pub fn entries(&self) -> &[Func] {
+ &self.0
+ }
+}
+
+impl Deserialize for FunctionSection {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ Ok(FunctionSection(read_entries(reader)?))
+ }
+}
+
+impl Serialize for FunctionSection {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let mut counted_writer = CountedWriter::new(writer);
+ let data = self.0;
+ let counted_list = CountedListWriter::<VarUint32, _>(
+ data.len(),
+ data.into_iter().map(|func| func.type_ref().into())
+ );
+ counted_list.serialize(&mut counted_writer)?;
+ counted_writer.done()?;
+ Ok(())
+ }
+}
+
+/// Section with table definition (currently only one is allowed).
+#[derive(Default, Debug, Clone, PartialEq)]
+pub struct TableSection(Vec<TableType>);
+
+impl TableSection {
+ /// Table entries.
+ pub fn entries(&self) -> &[TableType] {
+ &self.0
+ }
+
+ /// New table section with provided table entries.
+ pub fn with_entries(entries: Vec<TableType>) -> Self {
+ TableSection(entries)
+ }
+
+ /// Mutable table entries.
+ pub fn entries_mut(&mut self) -> &mut Vec<TableType> {
+ &mut self.0
+ }
+}
+
+impl Deserialize for TableSection {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ Ok(TableSection(read_entries(reader)?))
+ }
+}
+
+impl Serialize for TableSection {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let mut counted_writer = CountedWriter::new(writer);
+ let data = self.0;
+ let counted_list = CountedListWriter::<TableType, _>(
+ data.len(),
+ data.into_iter().map(Into::into),
+ );
+ counted_list.serialize(&mut counted_writer)?;
+ counted_writer.done()?;
+ Ok(())
+ }
+}
+
+/// Section with table definition (currently only one entry is allowed).
+#[derive(Default, Debug, Clone, PartialEq)]
+pub struct MemorySection(Vec<MemoryType>);
+
+impl MemorySection {
+ /// List of all memory entries in the section
+ pub fn entries(&self) -> &[MemoryType] {
+ &self.0
+ }
+
+ /// New memory section with memory types.
+ pub fn with_entries(entries: Vec<MemoryType>) -> Self {
+ MemorySection(entries)
+ }
+
+ /// Mutable list of all memory entries in the section.
+ pub fn entries_mut(&mut self) -> &mut Vec<MemoryType> {
+ &mut self.0
+ }
+}
+
+impl Deserialize for MemorySection {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ Ok(MemorySection(read_entries(reader)?))
+ }
+}
+
+impl Serialize for MemorySection {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let mut counted_writer = CountedWriter::new(writer);
+ let data = self.0;
+ let counted_list = CountedListWriter::<MemoryType, _>(
+ data.len(),
+ data.into_iter().map(Into::into),
+ );
+ counted_list.serialize(&mut counted_writer)?;
+ counted_writer.done()?;
+ Ok(())
+ }
+}
+
+/// Globals definition section.
+#[derive(Default, Debug, Clone, PartialEq)]
+pub struct GlobalSection(Vec<GlobalEntry>);
+
+impl GlobalSection {
+ /// List of all global entries in the section.
+ pub fn entries(&self) -> &[GlobalEntry] {
+ &self.0
+ }
+
+ /// New global section from list of global entries.
+ pub fn with_entries(entries: Vec<GlobalEntry>) -> Self {
+ GlobalSection(entries)
+ }
+
+ /// List of all global entries in the section (mutable).
+ pub fn entries_mut(&mut self) -> &mut Vec<GlobalEntry> {
+ &mut self.0
+ }
+}
+
+impl Deserialize for GlobalSection {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ Ok(GlobalSection(read_entries(reader)?))
+ }
+}
+
+impl Serialize for GlobalSection {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let mut counted_writer = CountedWriter::new(writer);
+ let data = self.0;
+ let counted_list = CountedListWriter::<GlobalEntry, _>(
+ data.len(),
+ data.into_iter().map(Into::into),
+ );
+ counted_list.serialize(&mut counted_writer)?;
+ counted_writer.done()?;
+ Ok(())
+ }
+}
+
+/// List of exports definition.
+#[derive(Debug, Default, Clone, PartialEq)]
+pub struct ExportSection(Vec<ExportEntry>);
+
+impl ExportSection {
+ /// List of all export entries in the section.
+ pub fn entries(&self) -> &[ExportEntry] {
+ &self.0
+ }
+
+ /// New export section from list of export entries.
+ pub fn with_entries(entries: Vec<ExportEntry>) -> Self {
+ ExportSection(entries)
+ }
+
+ /// List of all export entries in the section (mutable).
+ pub fn entries_mut(&mut self) -> &mut Vec<ExportEntry> {
+ &mut self.0
+ }
+}
+
+impl Deserialize for ExportSection {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ Ok(ExportSection(read_entries(reader)?))
+ }
+}
+
+impl Serialize for ExportSection {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let mut counted_writer = CountedWriter::new(writer);
+ let data = self.0;
+ let counted_list = CountedListWriter::<ExportEntry, _>(
+ data.len(),
+ data.into_iter().map(Into::into),
+ );
+ counted_list.serialize(&mut counted_writer)?;
+ counted_writer.done()?;
+ Ok(())
+ }
+}
+
+/// Section with function bodies of the module.
+#[derive(Default, Debug, Clone, PartialEq)]
+pub struct CodeSection(Vec<FuncBody>);
+
+impl CodeSection {
+ /// New code section with specified function bodies.
+ pub fn with_bodies(bodies: Vec<FuncBody>) -> Self {
+ CodeSection(bodies)
+ }
+
+ /// All function bodies in the section.
+ pub fn bodies(&self) -> &[FuncBody] {
+ &self.0
+ }
+
+ /// All function bodies in the section, mutable.
+ pub fn bodies_mut(&mut self) -> &mut Vec<FuncBody> {
+ &mut self.0
+ }
+}
+
+impl Deserialize for CodeSection {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ Ok(CodeSection(read_entries(reader)?))
+ }
+}
+
+impl Serialize for CodeSection {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let mut counted_writer = CountedWriter::new(writer);
+ let data = self.0;
+ let counted_list = CountedListWriter::<FuncBody, _>(
+ data.len(),
+ data.into_iter().map(Into::into),
+ );
+ counted_list.serialize(&mut counted_writer)?;
+ counted_writer.done()?;
+ Ok(())
+ }
+}
+
+/// Element entries section.
+#[derive(Default, Debug, Clone, PartialEq)]
+pub struct ElementSection(Vec<ElementSegment>);
+
+impl ElementSection {
+ /// New elements section.
+ pub fn with_entries(entries: Vec<ElementSegment>) -> Self {
+ ElementSection(entries)
+ }
+
+ /// New elements entries in the section.
+ pub fn entries(&self) -> &[ElementSegment] {
+ &self.0
+ }
+
+ /// List of all data entries in the section (mutable).
+ pub fn entries_mut(&mut self) -> &mut Vec<ElementSegment> {
+ &mut self.0
+ }
+}
+
+impl Deserialize for ElementSection {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ Ok(ElementSection(read_entries(reader)?))
+ }
+}
+
+impl Serialize for ElementSection {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let mut counted_writer = CountedWriter::new(writer);
+ let data = self.0;
+ let counted_list = CountedListWriter::<ElementSegment, _>(
+ data.len(),
+ data.into_iter().map(Into::into),
+ );
+ counted_list.serialize(&mut counted_writer)?;
+ counted_writer.done()?;
+ Ok(())
+ }
+}
+
+/// Data entries definitions.
+#[derive(Default, Debug, Clone, PartialEq)]
+pub struct DataSection(Vec<DataSegment>);
+
+impl DataSection {
+ /// New data section.
+ pub fn with_entries(entries: Vec<DataSegment>) -> Self {
+ DataSection(entries)
+ }
+
+ /// List of all data entries in the section.
+ pub fn entries(&self) -> &[DataSegment] {
+ &self.0
+ }
+
+ /// List of all data entries in the section (mutable).
+ pub fn entries_mut(&mut self) -> &mut Vec<DataSegment> {
+ &mut self.0
+ }
+}
+
+impl Deserialize for DataSection {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ Ok(DataSection(read_entries(reader)?))
+ }
+}
+
+impl Serialize for DataSection {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let mut counted_writer = CountedWriter::new(writer);
+ let data = self.0;
+ let counted_list = CountedListWriter::<DataSegment, _>(
+ data.len(),
+ data.into_iter().map(Into::into),
+ );
+ counted_list.serialize(&mut counted_writer)?;
+ counted_writer.done()?;
+ Ok(())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+
+ use super::super::{
+ deserialize_buffer, deserialize_file, ValueType, InitExpr, DataSegment,
+ serialize, ElementSegment, Instructions, BlockType, Local, FuncBody,
+ };
+ use super::{Section, TypeSection, Type, DataSection, ElementSection, CodeSection};
+
+ #[test]
+ fn import_section() {
+ let module = deserialize_file("./res/cases/v1/test5.wasm").expect("Should be deserialized");
+ let mut found = false;
+ for section in module.sections() {
+ match section {
+ &Section::Import(ref import_section) => {
+ assert_eq!(25, import_section.entries().len());
+ found = true
+ },
+ _ => { }
+ }
+ }
+ assert!(found, "There should be import section in test5.wasm");
+ }
+
+ fn functions_test_payload() -> &'static [u8] {
+ &[
+ // functions section id
+ 0x03u8,
+ // functions section length
+ 0x87, 0x80, 0x80, 0x80, 0x0,
+ // number of functions
+ 0x04,
+ // type reference 1
+ 0x01,
+ // type reference 2
+ 0x86, 0x80, 0x00,
+ // type reference 3
+ 0x09,
+ // type reference 4
+ 0x33
+ ]
+ }
+
+ #[test]
+ fn fn_section_detect() {
+ let section: Section =
+ deserialize_buffer(functions_test_payload()).expect("section to be deserialized");
+
+ match section {
+ Section::Function(_) => {},
+ _ => {
+ panic!("Payload should be recognized as functions section")
+ }
+ }
+ }
+
+ #[test]
+ fn fn_section_number() {
+ let section: Section =
+ deserialize_buffer(functions_test_payload()).expect("section to be deserialized");
+
+ match section {
+ Section::Function(fn_section) => {
+ assert_eq!(4, fn_section.entries().len(), "There should be 4 functions total");
+ },
+ _ => {
+ // will be catched by dedicated test
+ }
+ }
+ }
+
+ #[test]
+ fn fn_section_ref() {
+ let section: Section =
+ deserialize_buffer(functions_test_payload()).expect("section to be deserialized");
+
+ match section {
+ Section::Function(fn_section) => {
+ assert_eq!(6, fn_section.entries()[1].type_ref());
+ },
+ _ => {
+ // will be catched by dedicated test
+ }
+ }
+ }
+
+ fn types_test_payload() -> &'static [u8] {
+ &[
+ // section length
+ 11,
+
+ // 2 functions
+ 2,
+ // func 1, form =1
+ 0x60,
+ // param_count=1
+ 1,
+ // first param
+ 0x7e, // i64
+ // no return params
+ 0x00,
+
+ // func 2, form=1
+ 0x60,
+ // param_count=2
+ 2,
+ // first param
+ 0x7e,
+ // second param
+ 0x7d,
+ // return param (is_present, param_type)
+ 0x01, 0x7e
+ ]
+ }
+
+ #[test]
+ fn type_section_len() {
+ let type_section: TypeSection =
+ deserialize_buffer(types_test_payload()).expect("type_section be deserialized");
+
+ assert_eq!(type_section.types().len(), 2);
+ }
+
+ #[test]
+ fn type_section_infer() {
+ let type_section: TypeSection =
+ deserialize_buffer(types_test_payload()).expect("type_section be deserialized");
+
+ let t1 = match &type_section.types()[1] {
+ &Type::Function(ref func_type) => func_type
+ };
+
+ assert_eq!(Some(ValueType::I64), t1.return_type());
+ assert_eq!(2, t1.params().len());
+ }
+
+ fn export_payload() -> &'static [u8] {
+ &[
+ // section id
+ 0x07,
+ // section length
+ 28,
+ // 6 entries
+ 6,
+ // func "A", index 6
+ // [name_len(1-5 bytes), name_bytes(name_len, internal_kind(1byte), internal_index(1-5 bytes)])
+ 0x01, 0x41, 0x01, 0x86, 0x80, 0x00,
+ // func "B", index 8
+ 0x01, 0x42, 0x01, 0x86, 0x00,
+ // func "C", index 7
+ 0x01, 0x43, 0x01, 0x07,
+ // memory "D", index 0
+ 0x01, 0x44, 0x02, 0x00,
+ // func "E", index 1
+ 0x01, 0x45, 0x01, 0x01,
+ // func "F", index 2
+ 0x01, 0x46, 0x01, 0x02
+ ]
+ }
+
+
+ #[test]
+ fn export_detect() {
+ let section: Section =
+ deserialize_buffer(export_payload()).expect("section to be deserialized");
+
+ match section {
+ Section::Export(_) => {},
+ _ => {
+ panic!("Payload should be recognized as export section")
+ }
+ }
+ }
+
+ fn code_payload() -> &'static [u8] {
+ &[
+ // sectionid
+ 0x0Au8,
+ // section length, 32
+ 0x20,
+ // body count
+ 0x01,
+ // body 1, length 30
+ 0x1E,
+ 0x01, 0x01, 0x7F, // local i32 (one collection of length one of type i32)
+ 0x02, 0x7F, // block i32
+ 0x23, 0x00, // get_global 0
+ 0x21, 0x01, // set_local 1
+ 0x23, 0x00, // get_global 0
+ 0x20, 0x00, // get_local 0
+ 0x6A, // i32.add
+ 0x24, 0x00, // set_global 0
+ 0x23, 0x00, // get_global 0
+ 0x41, 0x0F, // i32.const 15
+ 0x6A, // i32.add
+ 0x41, 0x70, // i32.const -16
+ 0x71, // i32.and
+ 0x24, 0x00, // set_global 0
+ 0x20, 0x01, // get_local 1
+ 0x0B,
+ 0x0B,
+ ]
+ }
+
+ #[test]
+ fn code_detect() {
+
+ let section: Section =
+ deserialize_buffer(code_payload()).expect("section to be deserialized");
+
+ match section {
+ Section::Code(_) => {},
+ _ => {
+ panic!("Payload should be recognized as a code section")
+ }
+ }
+ }
+
+ fn data_payload() -> &'static [u8] {
+ &[
+ 0x0bu8, // section id
+ 20, // 20 bytes overall
+ 0x01, // number of segments
+ 0x00, // index
+ 0x0b, // just `end` op
+ 0x10,
+ // 16x 0x00
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00
+ ]
+ }
+
+ #[test]
+ fn data_section_ser() {
+ let data_section = DataSection::with_entries(
+ vec![DataSegment::new(0u32, Some(InitExpr::empty()), vec![0u8; 16])]
+ );
+
+ let buf = serialize(data_section).expect("Data section to be serialized");
+
+ assert_eq!(buf, vec![
+ 20u8, // 19 bytes overall
+ 0x01, // number of segments
+ 0x00, // index
+ 0x0b, // just `end` op
+ 16, // value of length 16
+ 0x00, 0x00, 0x00, 0x00, // 16x 0x00 as in initialization
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00
+ ]);
+ }
+
+ #[test]
+ fn data_section_detect() {
+ let section: Section =
+ deserialize_buffer(data_payload()).expect("section to be deserialized");
+
+ match section {
+ Section::Data(_) => {},
+ _ => {
+ panic!("Payload should be recognized as a data section")
+ }
+ }
+ }
+
+ #[test]
+ fn element_section_ser() {
+ let element_section = ElementSection::with_entries(
+ vec![ElementSegment::new(0u32, Some(InitExpr::empty()), vec![0u32; 4])]
+ );
+
+ let buf = serialize(element_section).expect("Element section to be serialized");
+
+ assert_eq!(buf, vec![
+ 08u8, // 8 bytes overall
+ 0x01, // number of segments
+ 0x00, // index
+ 0x0b, // just `end` op
+ 0x04, // 4 elements
+ 0x00, 0x00, 0x00, 0x00 // 4x 0x00 as in initialization
+ ]);
+ }
+
+ #[test]
+ fn code_section_ser() {
+ use super::super::Instruction::*;
+
+ let code_section = CodeSection::with_bodies(
+ vec![
+ FuncBody::new(
+ vec![Local::new(1, ValueType::I32)],
+ Instructions::new(vec![
+ Block(BlockType::Value(ValueType::I32)),
+ GetGlobal(0),
+ End,
+ End,
+ ])
+ )
+ ]);
+
+ let buf = serialize(code_section).expect("Code section to be serialized");
+
+ assert_eq!(buf, vec![
+ 11u8, // 11 bytes total section size
+ 0x01, // 1 function
+ 9, // function #1 total code size
+ 1, // 1 local variable declaration
+ 1, // amount of variables
+ 0x7f, // type of variable (7-bit, -0x01), negative
+ 0x02, // block
+ 0x7f, // block return type (7-bit, -0x01), negative
+ 0x23, 0x00, // get_global(0)
+ 0x0b, // block end
+ 0x0b, // function end
+ ]);
+ }
+
+ #[test]
+ fn start_section() {
+ let section: Section = deserialize_buffer(&[08u8, 01u8, 00u8]).expect("Start section to deserialize");
+ if let Section::Start(_) = section {
+ } else {
+ panic!("Payload should be a start section");
+ }
+
+ let serialized = serialize(section).expect("Start section to successfully serializen");
+
+ assert_eq!(serialized, vec![08u8, 01u8, 00u8]);
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/elements/segment.rs b/third_party/rust/parity-wasm/src/elements/segment.rs
new file mode 100644
index 0000000000..98bc3097b9
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/elements/segment.rs
@@ -0,0 +1,283 @@
+use alloc::vec::Vec;
+use crate::io;
+use super::{Deserialize, Serialize, Error, VarUint32, CountedList, InitExpr, CountedListWriter};
+
+#[cfg(feature="bulk")]
+const FLAG_MEMZERO: u32 = 0;
+#[cfg(feature="bulk")]
+const FLAG_PASSIVE: u32 = 1;
+#[cfg(feature="bulk")]
+const FLAG_MEM_NONZERO: u32 = 2;
+
+/// Entry in the element section.
+#[derive(Debug, Clone, PartialEq)]
+pub struct ElementSegment {
+ index: u32,
+ offset: Option<InitExpr>,
+ members: Vec<u32>,
+
+ #[cfg(feature="bulk")]
+ passive: bool,
+}
+
+impl ElementSegment {
+ /// New element segment.
+ pub fn new(index: u32, offset: Option<InitExpr>, members: Vec<u32>) -> Self {
+ ElementSegment {
+ index: index,
+ offset: offset,
+ members: members,
+
+ #[cfg(feature="bulk")]
+ passive: false,
+ }
+ }
+
+ /// Sequence of function indices.
+ pub fn members(&self) -> &[u32] { &self.members }
+
+ /// Sequence of function indices (mutable)
+ pub fn members_mut(&mut self) -> &mut Vec<u32> { &mut self.members }
+
+ /// Table index (currently valid only value of `0`)
+ pub fn index(&self) -> u32 { self.index }
+
+ /// An i32 initializer expression that computes the offset at which to place the elements.
+ ///
+ /// Note that this return `None` if the segment is `passive`.
+ pub fn offset(&self) -> &Option<InitExpr> { &self.offset }
+
+ /// An i32 initializer expression that computes the offset at which to place the elements (mutable)
+ ///
+ /// Note that this return `None` if the segment is `passive`.
+ pub fn offset_mut(&mut self) -> &mut Option<InitExpr> { &mut self.offset }
+}
+
+#[cfg(feature="bulk")]
+impl ElementSegment {
+ /// Whether or not this table segment is "passive"
+ pub fn passive(&self) -> bool { self.passive }
+
+ /// Whether or not this table segment is "passive"
+ pub fn passive_mut(&mut self) -> &mut bool { &mut self.passive }
+
+ /// Set whether or not this table segment is "passive"
+ pub fn set_passive(&mut self, passive: bool) {
+ self.passive = passive;
+ }
+}
+
+impl Deserialize for ElementSegment {
+ type Error = Error;
+
+ #[cfg(not(feature="bulk"))]
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let index: u32 = VarUint32::deserialize(reader)?.into();
+ let offset = InitExpr::deserialize(reader)?;
+ let members: Vec<u32> = CountedList::<VarUint32>::deserialize(reader)?
+ .into_inner()
+ .into_iter()
+ .map(Into::into)
+ .collect();
+
+ Ok(ElementSegment {
+ index,
+ offset: Some(offset),
+ members,
+ })
+ }
+
+ #[cfg(feature="bulk")]
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ // This piece of data was treated as `index` [of the table], but was repurposed
+ // for flags in bulk-memory operations proposal.
+ let flags: u32 = VarUint32::deserialize(reader)?.into();
+ let index = if flags == FLAG_MEMZERO || flags == FLAG_PASSIVE {
+ 0u32
+ } else if flags == FLAG_MEM_NONZERO {
+ VarUint32::deserialize(reader)?.into()
+ } else {
+ return Err(Error::InvalidSegmentFlags(flags))
+ };
+ let offset = if flags == FLAG_PASSIVE {
+ None
+ } else {
+ Some(InitExpr::deserialize(reader)?)
+ };
+
+ let funcs: Vec<u32> = CountedList::<VarUint32>::deserialize(reader)?
+ .into_inner()
+ .into_iter()
+ .map(Into::into)
+ .collect();
+
+ Ok(ElementSegment {
+ index: index,
+ offset: offset,
+ members: funcs,
+ passive: flags == FLAG_PASSIVE,
+ })
+ }
+}
+
+impl Serialize for ElementSegment {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ #[cfg(feature="bulk")]
+ {
+ if self.passive {
+ VarUint32::from(FLAG_PASSIVE).serialize(writer)?;
+ } else if self.index != 0 {
+ VarUint32::from(FLAG_MEM_NONZERO).serialize(writer)?;
+ VarUint32::from(self.index).serialize(writer)?;
+ } else {
+ VarUint32::from(FLAG_MEMZERO).serialize(writer)?;
+ }
+ }
+ #[cfg(not(feature="bulk"))]
+ VarUint32::from(self.index).serialize(writer)?;
+
+ if let Some(offset) = self.offset {
+ offset.serialize(writer)?;
+ }
+ let data = self.members;
+ let counted_list = CountedListWriter::<VarUint32, _>(
+ data.len(),
+ data.into_iter().map(Into::into),
+ );
+ counted_list.serialize(writer)?;
+ Ok(())
+ }
+}
+
+/// Data segment definition.
+#[derive(Clone, Debug, PartialEq)]
+pub struct DataSegment {
+ index: u32,
+ offset: Option<InitExpr>,
+ value: Vec<u8>,
+
+ #[cfg(feature="bulk")]
+ passive: bool,
+}
+
+impl DataSegment {
+ /// New data segments.
+ pub fn new(index: u32, offset: Option<InitExpr>, value: Vec<u8>) -> Self {
+ DataSegment {
+ index: index,
+ offset: offset,
+ value: value,
+
+ #[cfg(feature="bulk")]
+ passive: false,
+ }
+ }
+
+ /// Linear memory index (currently the only valid value is `0`).
+ pub fn index(&self) -> u32 { self.index }
+
+ /// An i32 initializer expression that computes the offset at which to place the data.
+ ///
+ /// Note that this return `None` if the segment is `passive`.
+ pub fn offset(&self) -> &Option<InitExpr> { &self.offset }
+
+ /// An i32 initializer expression that computes the offset at which to place the data (mutable)
+ ///
+ /// Note that this return `None` if the segment is `passive`.
+ pub fn offset_mut(&mut self) -> &mut Option<InitExpr> { &mut self.offset }
+
+ /// Initial value of the data segment.
+ pub fn value(&self) -> &[u8] { &self.value }
+
+ /// Initial value of the data segment (mutable).
+ pub fn value_mut(&mut self) -> &mut Vec<u8> { &mut self.value }
+}
+
+#[cfg(feature="bulk")]
+impl DataSegment {
+ /// Whether or not this data segment is "passive".
+ pub fn passive(&self) -> bool { self.passive }
+
+ /// Whether or not this data segment is "passive" (mutable).
+ pub fn passive_mut(&mut self) -> &mut bool { &mut self.passive }
+
+ /// Set whether or not this table segment is "passive"
+ pub fn set_passive(&mut self, passive: bool) {
+ self.passive = passive;
+ }
+}
+
+impl Deserialize for DataSegment {
+ type Error = Error;
+
+ #[cfg(not(feature="bulk"))]
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let index = VarUint32::deserialize(reader)?;
+ let offset = InitExpr::deserialize(reader)?;
+ let value_len = u32::from(VarUint32::deserialize(reader)?) as usize;
+ let value_buf = buffered_read!(65536, value_len, reader);
+
+ Ok(DataSegment {
+ index: index.into(),
+ offset: Some(offset),
+ value: value_buf,
+ })
+ }
+
+ #[cfg(feature="bulk")]
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let flags: u32 = VarUint32::deserialize(reader)?.into();
+ let index = if flags == FLAG_MEMZERO || flags == FLAG_PASSIVE {
+ 0u32
+ } else if flags == FLAG_MEM_NONZERO {
+ VarUint32::deserialize(reader)?.into()
+ } else {
+ return Err(Error::InvalidSegmentFlags(flags))
+ };
+ let offset = if flags == FLAG_PASSIVE {
+ None
+ } else {
+ Some(InitExpr::deserialize(reader)?)
+ };
+ let value_len = u32::from(VarUint32::deserialize(reader)?) as usize;
+ let value_buf = buffered_read!(65536, value_len, reader);
+
+ Ok(DataSegment {
+ index: index,
+ offset: offset,
+ value: value_buf,
+ passive: flags == FLAG_PASSIVE,
+ })
+ }
+}
+
+impl Serialize for DataSegment {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ #[cfg(feature="bulk")]
+ {
+ if self.passive {
+ VarUint32::from(FLAG_PASSIVE).serialize(writer)?;
+ } else if self.index != 0 {
+ VarUint32::from(FLAG_MEM_NONZERO).serialize(writer)?;
+ VarUint32::from(self.index).serialize(writer)?;
+ } else {
+ VarUint32::from(FLAG_MEMZERO).serialize(writer)?;
+ }
+ }
+ #[cfg(not(feature="bulk"))]
+ VarUint32::from(self.index).serialize(writer)?;
+
+ if let Some(offset) = self.offset {
+ offset.serialize(writer)?;
+ }
+
+ let value = self.value;
+ VarUint32::from(value.len()).serialize(writer)?;
+ writer.write(&value[..])?;
+ Ok(())
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/elements/types.rs b/third_party/rust/parity-wasm/src/elements/types.rs
new file mode 100644
index 0000000000..462d954c46
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/elements/types.rs
@@ -0,0 +1,267 @@
+use alloc::vec::Vec;
+use crate::io;
+use super::{
+ Deserialize, Serialize, Error, VarUint7, VarInt7, VarUint1, CountedList,
+ CountedListWriter, VarUint32,
+};
+use core::fmt;
+
+/// Type definition in types section. Currently can be only of the function type.
+#[derive(Debug, Clone, PartialEq, Hash, Eq)]
+pub enum Type {
+ /// Function type.
+ Function(FunctionType),
+}
+
+impl Deserialize for Type {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ Ok(Type::Function(FunctionType::deserialize(reader)?))
+ }
+}
+
+impl Serialize for Type {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ match self {
+ Type::Function(fn_type) => fn_type.serialize(writer)
+ }
+ }
+}
+
+/// Value type.
+#[derive(Clone, Copy, Debug, PartialEq, Hash, Eq)]
+pub enum ValueType {
+ /// 32-bit signed integer
+ I32,
+ /// 64-bit signed integer
+ I64,
+ /// 32-bit float
+ F32,
+ /// 64-bit float
+ F64,
+ #[cfg(feature="simd")]
+ /// 128-bit SIMD register
+ V128,
+}
+
+impl Deserialize for ValueType {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let val = VarInt7::deserialize(reader)?;
+
+ match val.into() {
+ -0x01 => Ok(ValueType::I32),
+ -0x02 => Ok(ValueType::I64),
+ -0x03 => Ok(ValueType::F32),
+ -0x04 => Ok(ValueType::F64),
+ #[cfg(feature="simd")]
+ -0x05 => Ok(ValueType::V128),
+ _ => Err(Error::UnknownValueType(val.into())),
+ }
+ }
+}
+
+impl Serialize for ValueType {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let val: VarInt7 = match self {
+ ValueType::I32 => -0x01,
+ ValueType::I64 => -0x02,
+ ValueType::F32 => -0x03,
+ ValueType::F64 => -0x04,
+ #[cfg(feature="simd")]
+ ValueType::V128 => -0x05,
+ }.into();
+ val.serialize(writer)?;
+ Ok(())
+ }
+}
+
+impl fmt::Display for ValueType {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match *self {
+ ValueType::I32 => write!(f, "i32"),
+ ValueType::I64 => write!(f, "i64"),
+ ValueType::F32 => write!(f, "f32"),
+ ValueType::F64 => write!(f, "f64"),
+ #[cfg(feature="simd")]
+ ValueType::V128 => write!(f, "v128"),
+ }
+ }
+}
+
+/// Block type which is basically `ValueType` + NoResult (to define blocks that have no return type)
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum BlockType {
+ /// Value-type specified block type
+ Value(ValueType),
+ /// No specified block type
+ NoResult,
+}
+
+impl Deserialize for BlockType {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let val = VarInt7::deserialize(reader)?;
+
+ match val.into() {
+ -0x01 => Ok(BlockType::Value(ValueType::I32)),
+ -0x02 => Ok(BlockType::Value(ValueType::I64)),
+ -0x03 => Ok(BlockType::Value(ValueType::F32)),
+ -0x04 => Ok(BlockType::Value(ValueType::F64)),
+ #[cfg(feature="simd")]
+ 0x7b => Ok(BlockType::Value(ValueType::V128)),
+ -0x40 => Ok(BlockType::NoResult),
+ _ => Err(Error::UnknownValueType(val.into())),
+ }
+ }
+}
+
+impl Serialize for BlockType {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let val: VarInt7 = match self {
+ BlockType::NoResult => -0x40i8,
+ BlockType::Value(ValueType::I32) => -0x01,
+ BlockType::Value(ValueType::I64) => -0x02,
+ BlockType::Value(ValueType::F32) => -0x03,
+ BlockType::Value(ValueType::F64) => -0x04,
+ #[cfg(feature="simd")]
+ BlockType::Value(ValueType::V128) => 0x7b,
+ }.into();
+ val.serialize(writer)?;
+ Ok(())
+ }
+}
+
+/// Function signature type.
+#[derive(Debug, Clone, PartialEq, Hash, Eq)]
+pub struct FunctionType {
+ form: u8,
+ params: Vec<ValueType>,
+ return_type: Option<ValueType>,
+}
+
+impl Default for FunctionType {
+ fn default() -> Self {
+ FunctionType {
+ form: 0x60,
+ params: Vec::new(),
+ return_type: None,
+ }
+ }
+}
+
+impl FunctionType {
+ /// New function type given the signature in-params(`params`) and return type (`return_type`)
+ pub fn new(params: Vec<ValueType>, return_type: Option<ValueType>) -> Self {
+ FunctionType {
+ params: params,
+ return_type: return_type,
+ ..Default::default()
+ }
+ }
+ /// Function form (currently only valid value is `0x60`)
+ pub fn form(&self) -> u8 { self.form }
+ /// Parameters in the function signature.
+ pub fn params(&self) -> &[ValueType] { &self.params }
+ /// Mutable parameters in the function signature.
+ pub fn params_mut(&mut self) -> &mut Vec<ValueType> { &mut self.params }
+ /// Return type in the function signature, if any.
+ pub fn return_type(&self) -> Option<ValueType> { self.return_type }
+ /// Mutable type in the function signature, if any.
+ pub fn return_type_mut(&mut self) -> &mut Option<ValueType> { &mut self.return_type }
+}
+
+impl Deserialize for FunctionType {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let form: u8 = VarUint7::deserialize(reader)?.into();
+
+ if form != 0x60 {
+ return Err(Error::UnknownFunctionForm(form));
+ }
+
+ let params: Vec<ValueType> = CountedList::deserialize(reader)?.into_inner();
+
+ let return_types: u32 = VarUint32::deserialize(reader)?.into();
+
+ let return_type = if return_types == 1 {
+ Some(ValueType::deserialize(reader)?)
+ } else if return_types == 0 {
+ None
+ } else {
+ return Err(Error::Other("Return types length should be 0 or 1"));
+ };
+
+ Ok(FunctionType {
+ form: form,
+ params: params,
+ return_type: return_type,
+ })
+ }
+}
+
+impl Serialize for FunctionType {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ VarUint7::from(self.form).serialize(writer)?;
+
+ let data = self.params;
+ let counted_list = CountedListWriter::<ValueType, _>(
+ data.len(),
+ data.into_iter().map(Into::into),
+ );
+ counted_list.serialize(writer)?;
+
+ if let Some(return_type) = self.return_type {
+ VarUint1::from(true).serialize(writer)?;
+ return_type.serialize(writer)?;
+ } else {
+ VarUint1::from(false).serialize(writer)?;
+ }
+
+ Ok(())
+ }
+}
+
+/// Table element type.
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub enum TableElementType {
+ /// A reference to a function with any signature.
+ AnyFunc,
+}
+
+impl Deserialize for TableElementType {
+ type Error = Error;
+
+ fn deserialize<R: io::Read>(reader: &mut R) -> Result<Self, Self::Error> {
+ let val = VarInt7::deserialize(reader)?;
+
+ match val.into() {
+ -0x10 => Ok(TableElementType::AnyFunc),
+ _ => Err(Error::UnknownTableElementType(val.into())),
+ }
+ }
+}
+
+impl Serialize for TableElementType {
+ type Error = Error;
+
+ fn serialize<W: io::Write>(self, writer: &mut W) -> Result<(), Self::Error> {
+ let val: VarInt7 = match self {
+ TableElementType::AnyFunc => -0x10,
+ }.into();
+ val.serialize(writer)?;
+ Ok(())
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/io.rs b/third_party/rust/parity-wasm/src/io.rs
new file mode 100644
index 0000000000..c027b7102a
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/io.rs
@@ -0,0 +1,122 @@
+//! Simple abstractions for the IO operations.
+//!
+//! Basically it just a replacement for the std::io that is usable from
+//! the `no_std` environment.
+
+#[cfg(feature="std")]
+use std::io;
+
+/// IO specific error.
+#[derive(Debug)]
+pub enum Error {
+ /// Some unexpected data left in the buffer after reading all data.
+ TrailingData,
+
+ /// Unexpected End-Of-File
+ UnexpectedEof,
+
+ /// Invalid data is encountered.
+ InvalidData,
+
+ #[cfg(feature = "std")]
+ IoError(std::io::Error),
+}
+
+/// IO specific Result.
+pub type Result<T> = core::result::Result<T, Error>;
+
+pub trait Write {
+ /// Write a buffer of data into this write.
+ ///
+ /// All data is written at once.
+ fn write(&mut self, buf: &[u8]) -> Result<()>;
+}
+
+pub trait Read {
+ /// Read a data from this read to a buffer.
+ ///
+ /// If there is not enough data in this read then `UnexpectedEof` will be returned.
+ fn read(&mut self, buf: &mut [u8]) -> Result<()>;
+}
+
+/// Reader that saves the last position.
+pub struct Cursor<T> {
+ inner: T,
+ pos: usize,
+}
+
+impl<T> Cursor<T> {
+ pub fn new(inner: T) -> Cursor<T> {
+ Cursor {
+ inner,
+ pos: 0,
+ }
+ }
+
+ pub fn position(&self) -> usize {
+ self.pos
+ }
+}
+
+impl<T: AsRef<[u8]>> Read for Cursor<T> {
+ fn read(&mut self, buf: &mut [u8]) -> Result<()> {
+ let slice = self.inner.as_ref();
+ let remainder = slice.len() - self.pos;
+ let requested = buf.len();
+ if requested > remainder {
+ return Err(Error::UnexpectedEof);
+ }
+ buf.copy_from_slice(&slice[self.pos..(self.pos + requested)]);
+ self.pos += requested;
+ Ok(())
+ }
+}
+
+#[cfg(not(feature = "std"))]
+impl Write for alloc::vec::Vec<u8> {
+ fn write(&mut self, buf: &[u8]) -> Result<()> {
+ self.extend(buf);
+ Ok(())
+ }
+}
+
+#[cfg(feature = "std")]
+impl<T: io::Read> Read for T {
+ fn read(&mut self, buf: &mut [u8]) -> Result<()> {
+ self.read_exact(buf)
+ .map_err(Error::IoError)
+ }
+}
+
+#[cfg(feature = "std")]
+impl<T: io::Write> Write for T {
+ fn write(&mut self, buf: &[u8]) -> Result<()> {
+ self.write_all(buf).map_err(Error::IoError)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn cursor() {
+ let mut cursor = Cursor::new(vec![0xFFu8, 0x7Fu8]);
+ assert_eq!(cursor.position(), 0);
+
+ let mut buf = [0u8];
+ assert!(cursor.read(&mut buf[..]).is_ok());
+ assert_eq!(cursor.position(), 1);
+ assert_eq!(buf[0], 0xFFu8);
+ assert!(cursor.read(&mut buf[..]).is_ok());
+ assert_eq!(buf[0], 0x7Fu8);
+ assert_eq!(cursor.position(), 2);
+ }
+
+ #[test]
+ fn overflow_in_cursor() {
+ let mut cursor = Cursor::new(vec![0u8]);
+ let mut buf = [0, 1, 2];
+ assert!(cursor.read(&mut buf[..]).is_err());
+ }
+}
diff --git a/third_party/rust/parity-wasm/src/lib.rs b/third_party/rust/parity-wasm/src/lib.rs
new file mode 100644
index 0000000000..c49a3c5a17
--- /dev/null
+++ b/third_party/rust/parity-wasm/src/lib.rs
@@ -0,0 +1,24 @@
+//! WebAssembly format library
+#![warn(missing_docs)]
+
+#![cfg_attr(not(feature = "std"), no_std)]
+
+#[macro_use]
+extern crate alloc;
+
+pub mod elements;
+pub mod builder;
+mod io;
+
+pub use elements::{
+ Error as SerializationError,
+ deserialize_buffer,
+ serialize,
+ peek_size,
+};
+
+#[cfg(feature = "std")]
+pub use elements::{
+ deserialize_file,
+ serialize_to_file,
+};