summaryrefslogtreecommitdiffstats
path: root/third_party/rust/wasmparser
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/rust/wasmparser')
-rw-r--r--third_party/rust/wasmparser/.cargo-checksum.json1
-rw-r--r--third_party/rust/wasmparser/Cargo.lock707
-rw-r--r--third_party/rust/wasmparser/Cargo.toml54
-rw-r--r--third_party/rust/wasmparser/LICENSE220
-rw-r--r--third_party/rust/wasmparser/README.md36
-rw-r--r--third_party/rust/wasmparser/benches/benchmark.rs350
-rw-r--r--third_party/rust/wasmparser/examples/simple.rs37
-rw-r--r--third_party/rust/wasmparser/src/binary_reader.rs1682
-rw-r--r--third_party/rust/wasmparser/src/lib.rs712
-rw-r--r--third_party/rust/wasmparser/src/limits.rs57
-rw-r--r--third_party/rust/wasmparser/src/parser.rs1496
-rw-r--r--third_party/rust/wasmparser/src/readers.rs316
-rw-r--r--third_party/rust/wasmparser/src/readers/component.rs17
-rw-r--r--third_party/rust/wasmparser/src/readers/component/aliases.rs119
-rw-r--r--third_party/rust/wasmparser/src/readers/component/canonicals.rs95
-rw-r--r--third_party/rust/wasmparser/src/readers/component/exports.rs105
-rw-r--r--third_party/rust/wasmparser/src/readers/component/imports.rs109
-rw-r--r--third_party/rust/wasmparser/src/readers/component/instances.rs164
-rw-r--r--third_party/rust/wasmparser/src/readers/component/names.rs102
-rw-r--r--third_party/rust/wasmparser/src/readers/component/start.rs30
-rw-r--r--third_party/rust/wasmparser/src/readers/component/types.rs508
-rw-r--r--third_party/rust/wasmparser/src/readers/core.rs33
-rw-r--r--third_party/rust/wasmparser/src/readers/core/code.rs146
-rw-r--r--third_party/rust/wasmparser/src/readers/core/custom.rs63
-rw-r--r--third_party/rust/wasmparser/src/readers/core/data.rs96
-rw-r--r--third_party/rust/wasmparser/src/readers/core/elements.rs158
-rw-r--r--third_party/rust/wasmparser/src/readers/core/exports.rs65
-rw-r--r--third_party/rust/wasmparser/src/readers/core/functions.rs17
-rw-r--r--third_party/rust/wasmparser/src/readers/core/globals.rs49
-rw-r--r--third_party/rust/wasmparser/src/readers/core/imports.rs76
-rw-r--r--third_party/rust/wasmparser/src/readers/core/init.rs51
-rw-r--r--third_party/rust/wasmparser/src/readers/core/memories.rs56
-rw-r--r--third_party/rust/wasmparser/src/readers/core/names.rs153
-rw-r--r--third_party/rust/wasmparser/src/readers/core/operators.rs354
-rw-r--r--third_party/rust/wasmparser/src/readers/core/producers.rs78
-rw-r--r--third_party/rust/wasmparser/src/readers/core/tables.rs87
-rw-r--r--third_party/rust/wasmparser/src/readers/core/tags.rs32
-rw-r--r--third_party/rust/wasmparser/src/readers/core/types.rs380
-rw-r--r--third_party/rust/wasmparser/src/resources.rs395
-rw-r--r--third_party/rust/wasmparser/src/validator.rs1514
-rw-r--r--third_party/rust/wasmparser/src/validator/component.rs2101
-rw-r--r--third_party/rust/wasmparser/src/validator/core.rs1278
-rw-r--r--third_party/rust/wasmparser/src/validator/func.rs348
-rw-r--r--third_party/rust/wasmparser/src/validator/operators.rs3474
-rw-r--r--third_party/rust/wasmparser/src/validator/types.rs2166
-rw-r--r--third_party/rust/wasmparser/tests/big-module.rs32
46 files changed, 20119 insertions, 0 deletions
diff --git a/third_party/rust/wasmparser/.cargo-checksum.json b/third_party/rust/wasmparser/.cargo-checksum.json
new file mode 100644
index 0000000000..7e081694a9
--- /dev/null
+++ b/third_party/rust/wasmparser/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.lock":"ef76d24da3e4c9b0e0034844c4d3fa21f84ddb56af9f3134222046aaf26a48d3","Cargo.toml":"f1f05638e3da3d7304ccb0a41ef0a9889224bc5dc92352c634a553c6fa27d4bd","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"1c3b4f8db61a673ee2f7dc66118b1008ce8d1a7d0f5197fbe87f1271b23de3bd","benches/benchmark.rs":"b8417fbd86058b76f3ff15426075f9afee25b9f90155bbd9a2d774f4bad0af04","examples/simple.rs":"e9eb076367cc0932e2a32651372defa4a27ef800f47fad28c0ef840ba8ea7e08","src/binary_reader.rs":"1b1f0e1d831a716875ed578380f96b4b60245f0c6a4f43e2407c50fc94e50e6d","src/lib.rs":"d64774abb8d193f6a177130ffca94ce9fd199535fb6177dcb9910c4af7602082","src/limits.rs":"0c4dc6b96b35a320a8cff8269c0b55acd56f1f7b8a093c4885e04b9835441458","src/parser.rs":"b65b995296ed03a5b4d31c6459f2f3430ff685e50fec397c44f91b1364de5aaf","src/readers.rs":"406bf0cf694ed364c9c53cd25eb27a8e3fa97d099994f3e2a640747c49abf74b","src/readers/component.rs":"c259628ca3f09ff904f7e920aeec261b657f7a197c99b520a6a7d3e918b9fb3b","src/readers/component/aliases.rs":"72f3339cb452c1b055bd080281fe4923d132c280da112fcd1182594811b87494","src/readers/component/canonicals.rs":"c36e9bffc2e1ed752586189903eec77ea9897ae265e020096a9b92c467be26d9","src/readers/component/exports.rs":"f2c76e4be5045f1d099fd89dedf20e2f4817f046ea1e64bde7ca8f3ea9b2cdc6","src/readers/component/imports.rs":"725a688f2ab4306dbb421fe92ffff9373e3c88632557e352529f297bb2b96e0e","src/readers/component/instances.rs":"bca614178da3337e8c71a6ec431f12157a8cccc15411a0222bbc0106926a3ad5","src/readers/component/names.rs":"3f5dac9db8668b18cd6721cd780e6aba9b223b404c9d1173d09efe4e6b4d3a8a","src/readers/component/start.rs":"8e1e5d8aa5ece26d42251449cadcce0546c4d995f1641941b8a31ed4bc6ac761","src/readers/component/types.rs":"878fdf8f3d6a1c1387fd92b34838d8e79cee039710366bd5ea6739448d468a7a","src/readers/core.rs":"b1dbe0ffe61af1e6da4104de687009bcaa71fd3137300896159abbcfd903b800","src/readers/core/code.rs":"53f49986febb27f4cb67f4495e7b369fc80e2f70908e1e831750698dd15fe37f","src/readers/core/custom.rs":"f80d3a994e8778a912319834228cbb772c65a4b6b1f25b41fe00d220d388831f","src/readers/core/data.rs":"c1fcda7b548b15be40b3dd1f667d97c30c543867f2dc4734b590846beefe3ae3","src/readers/core/elements.rs":"197d3427fcf0fa8a3eb654e62dfa53159e6bc5cb160e75a06cf394f821bddef5","src/readers/core/exports.rs":"50dc1ee51b73f03f24077f7c290bca756966563cedbad9e49d735d60f60c91db","src/readers/core/functions.rs":"b5bbc7f7b7a429187376f63c695a9f1cbf92e515dba840ac876fa60f7290da34","src/readers/core/globals.rs":"d23f99a3adc9593652a5efd4dc81e8f014f57e776b49717dabbdcd0a811a96b1","src/readers/core/imports.rs":"4d247e8cac0b3cef7425d93f7a7f2412b2ae8979da944080d565415499e27e87","src/readers/core/init.rs":"ec6717063b0b7f2e9aa17ae52083019cee52594bf5a8b6858f2d77b10d7a0639","src/readers/core/memories.rs":"351f816d663b7d17546dc3b19ce0e43f406ce743289219a3758f7c837903fa6d","src/readers/core/names.rs":"408ebf052170bf0dc874b3158bb31089a891c3735cb35df2976e0b1e9791febb","src/readers/core/operators.rs":"46e927f6db9db9097217c5485da3a7b89e638730def809d177897f39356f5f52","src/readers/core/producers.rs":"4b42b7e1c9e22e7e2913d9da2291b491dc2d4fea417503d7ce99ad33c7beb439","src/readers/core/tables.rs":"cbe5b35893bd3929354b4487b344062ce6b540e6a70bde6eddf05a07a68574e9","src/readers/core/tags.rs":"c1dcfeb973195da9708549bcc2df4dd8ff282fe802c15a603bebc8a856f70949","src/readers/core/types.rs":"266a6c37ecdea656c235001b56b15564ad7777c23849a3f6e08ce63272347c7e","src/resources.rs":"b38b564ee425a392a30fb7440b50afa9e91be5ee933e4933b843e18c212a5871","src/validator.rs":"0dc406f1a61cf646719fa2d88675eae4d8c52c23cc3a4a222b6671cd25f84465","src/validator/component.rs":"4ca4d2d3c800e212a68e30577eead6bb69b2e7604f6d774d4f51f2aaa2f87084","src/validator/core.rs":"143f1a531485d411eb459fe608a64ca21f772740cbebe51e9f336e30ce3d1424","src/validator/func.rs":"63b66b7bb274be4115d8a26bce9b73128de0c079bd3318423288fadc1361fdb4","src/validator/operators.rs":"39b3bd04898b02b9dbd1d42c5727c79439593f5ef8fbf3c33fcc789d0b5be91a","src/validator/types.rs":"9ee9fb8a8afa0580c3e71bf9b39bd28c14b2a21f2d9acbb92e7d8ec14615ade2","tests/big-module.rs":"1f9241a4504d0421f6a5c759e3c688c87994fe04668b1d4e8912cedd98f2bd17"},"package":"48134de3d7598219ab9eaf6b91b15d8e50d31da76b8519fe4ecfcec2cf35104b"} \ No newline at end of file
diff --git a/third_party/rust/wasmparser/Cargo.lock b/third_party/rust/wasmparser/Cargo.lock
new file mode 100644
index 0000000000..d65810ae17
--- /dev/null
+++ b/third_party/rust/wasmparser/Cargo.lock
@@ -0,0 +1,707 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "anyhow"
+version = "1.0.69"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "224afbd727c3d6e4b90103ece64b8d1b67fbb1973b1046c2281eed3f3803f800"
+
+[[package]]
+name = "atty"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
+dependencies = [
+ "hermit-abi 0.1.19",
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "bstr"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223"
+dependencies = [
+ "lazy_static",
+ "memchr",
+ "regex-automata",
+ "serde",
+]
+
+[[package]]
+name = "bumpalo"
+version = "3.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535"
+
+[[package]]
+name = "cast"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "clap"
+version = "2.34.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
+dependencies = [
+ "bitflags",
+ "textwrap",
+ "unicode-width",
+]
+
+[[package]]
+name = "criterion"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f"
+dependencies = [
+ "atty",
+ "cast",
+ "clap",
+ "criterion-plot",
+ "csv",
+ "itertools",
+ "lazy_static",
+ "num-traits",
+ "oorandom",
+ "plotters",
+ "rayon",
+ "regex",
+ "serde",
+ "serde_cbor",
+ "serde_derive",
+ "serde_json",
+ "tinytemplate",
+ "walkdir",
+]
+
+[[package]]
+name = "criterion-plot"
+version = "0.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876"
+dependencies = [
+ "cast",
+ "itertools",
+]
+
+[[package]]
+name = "crossbeam-channel"
+version = "0.5.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-deque"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
+dependencies = [
+ "cfg-if",
+ "crossbeam-epoch",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-epoch"
+version = "0.9.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a"
+dependencies = [
+ "autocfg",
+ "cfg-if",
+ "crossbeam-utils",
+ "memoffset",
+ "scopeguard",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "csv"
+version = "1.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "22813a6dc45b335f9bade10bf7271dc477e81113e89eb251a0bc2a8a81c536e1"
+dependencies = [
+ "bstr",
+ "csv-core",
+ "itoa 0.4.8",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "csv-core"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "either"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
+
+[[package]]
+name = "form_urlencoded"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8"
+dependencies = [
+ "percent-encoding",
+]
+
+[[package]]
+name = "half"
+version = "1.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7"
+
+[[package]]
+name = "hashbrown"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "hermit-abi"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "idna"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6"
+dependencies = [
+ "unicode-bidi",
+ "unicode-normalization",
+]
+
+[[package]]
+name = "indexmap"
+version = "1.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399"
+dependencies = [
+ "autocfg",
+ "hashbrown",
+]
+
+[[package]]
+name = "itertools"
+version = "0.10.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itoa"
+version = "0.4.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
+
+[[package]]
+name = "itoa"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
+
+[[package]]
+name = "js-sys"
+version = "0.3.61"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730"
+dependencies = [
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "leb128"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"
+
+[[package]]
+name = "libc"
+version = "0.2.139"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"
+
+[[package]]
+name = "log"
+version = "0.4.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "memchr"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
+
+[[package]]
+name = "memoffset"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "num-traits"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "num_cpus"
+version = "1.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
+dependencies = [
+ "hermit-abi 0.2.6",
+ "libc",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66"
+
+[[package]]
+name = "oorandom"
+version = "11.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
+
+[[package]]
+name = "percent-encoding"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
+
+[[package]]
+name = "plotters"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97"
+dependencies = [
+ "num-traits",
+ "plotters-backend",
+ "plotters-svg",
+ "wasm-bindgen",
+ "web-sys",
+]
+
+[[package]]
+name = "plotters-backend"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142"
+
+[[package]]
+name = "plotters-svg"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f"
+dependencies = [
+ "plotters-backend",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.51"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rayon"
+version = "1.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7"
+dependencies = [
+ "either",
+ "rayon-core",
+]
+
+[[package]]
+name = "rayon-core"
+version = "1.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "356a0625f1954f730c0201cdab48611198dc6ce21f4acff55089b5a78e6e835b"
+dependencies = [
+ "crossbeam-channel",
+ "crossbeam-deque",
+ "crossbeam-utils",
+ "num_cpus",
+]
+
+[[package]]
+name = "regex"
+version = "1.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733"
+dependencies = [
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.28"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
+
+[[package]]
+name = "ryu"
+version = "1.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "scopeguard"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+
+[[package]]
+name = "serde"
+version = "1.0.152"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
+
+[[package]]
+name = "serde_cbor"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5"
+dependencies = [
+ "half",
+ "serde",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.152"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.93"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cad406b69c91885b5107daf2c29572f6c8cdb3c66826821e286c533490c0bc76"
+dependencies = [
+ "itoa 1.0.5",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.107"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "textwrap"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
+dependencies = [
+ "unicode-width",
+]
+
+[[package]]
+name = "tinytemplate"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc"
+dependencies = [
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "tinyvec"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
+dependencies = [
+ "tinyvec_macros",
+]
+
+[[package]]
+name = "tinyvec_macros"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
+
+[[package]]
+name = "unicode-bidi"
+version = "0.3.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d54675592c1dbefd78cbd98db9bacd89886e1ca50692a0692baefffdeb92dd58"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
+
+[[package]]
+name = "unicode-normalization"
+version = "0.1.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921"
+dependencies = [
+ "tinyvec",
+]
+
+[[package]]
+name = "unicode-width"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
+
+[[package]]
+name = "url"
+version = "2.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643"
+dependencies = [
+ "form_urlencoded",
+ "idna",
+ "percent-encoding",
+]
+
+[[package]]
+name = "walkdir"
+version = "2.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56"
+dependencies = [
+ "same-file",
+ "winapi",
+ "winapi-util",
+]
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b"
+dependencies = [
+ "cfg-if",
+ "wasm-bindgen-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-backend"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9"
+dependencies = [
+ "bumpalo",
+ "log",
+ "once_cell",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-backend",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.84"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d"
+
+[[package]]
+name = "wasm-encoder"
+version = "0.25.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4eff853c4f09eec94d76af527eddad4e9de13b11d6286a1ef7134bc30135a2b7"
+dependencies = [
+ "leb128",
+]
+
+[[package]]
+name = "wasmparser"
+version = "0.102.0"
+dependencies = [
+ "anyhow",
+ "criterion",
+ "indexmap",
+ "once_cell",
+ "rayon",
+ "url",
+ "wasm-encoder",
+]
+
+[[package]]
+name = "web-sys"
+version = "0.3.61"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
diff --git a/third_party/rust/wasmparser/Cargo.toml b/third_party/rust/wasmparser/Cargo.toml
new file mode 100644
index 0000000000..9717ba19f8
--- /dev/null
+++ b/third_party/rust/wasmparser/Cargo.toml
@@ -0,0 +1,54 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+name = "wasmparser"
+version = "0.102.0"
+authors = ["Yury Delendik <ydelendik@mozilla.com>"]
+exclude = ["benches/*.wasm"]
+description = """
+A simple event-driven library for parsing WebAssembly binary files.
+"""
+homepage = "https://github.com/bytecodealliance/wasm-tools/tree/main/crates/wasmparser"
+readme = "README.md"
+keywords = [
+ "parser",
+ "WebAssembly",
+ "wasm",
+]
+license = "Apache-2.0 WITH LLVM-exception"
+repository = "https://github.com/bytecodealliance/wasm-tools/tree/main/crates/wasmparser"
+
+[[bench]]
+name = "benchmark"
+harness = false
+
+[dependencies.indexmap]
+version = "1.9.1"
+
+[dependencies.url]
+version = "2.0.0"
+
+[dev-dependencies.anyhow]
+version = "1.0.58"
+
+[dev-dependencies.criterion]
+version = "0.3.3"
+
+[dev-dependencies.once_cell]
+version = "1.13.0"
+
+[dev-dependencies.rayon]
+version = "1.3"
+
+[dev-dependencies.wasm-encoder]
+version = "0.25.0"
diff --git a/third_party/rust/wasmparser/LICENSE b/third_party/rust/wasmparser/LICENSE
new file mode 100644
index 0000000000..f9d81955f4
--- /dev/null
+++ b/third_party/rust/wasmparser/LICENSE
@@ -0,0 +1,220 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+--- LLVM Exceptions to the Apache 2.0 License ----
+
+As an exception, if, as a result of your compiling your source code, portions
+of this Software are embedded into an Object form of such source code, you
+may redistribute such embedded portions in such Object form without complying
+with the conditions of Sections 4(a), 4(b) and 4(d) of the License.
+
+In addition, if you combine or link compiled forms of this Software with
+software that is licensed under the GPLv2 ("Combined Software") and if a
+court of competent jurisdiction determines that the patent provision (Section
+3), the indemnity provision (Section 9) or other Section of the License
+conflicts with the conditions of the GPLv2, you may retroactively and
+prospectively choose to deem waived or otherwise exclude such Section(s) of
+the License, but only in their entirety and only with respect to the Combined
+Software.
+
diff --git a/third_party/rust/wasmparser/README.md b/third_party/rust/wasmparser/README.md
new file mode 100644
index 0000000000..d40b38b550
--- /dev/null
+++ b/third_party/rust/wasmparser/README.md
@@ -0,0 +1,36 @@
+# The WebAssembly binary file decoder in Rust
+
+**A [Bytecode Alliance](https://bytecodealliance.org/) project**
+
+[![crates.io link](https://img.shields.io/crates/v/wasmparser.svg)](https://crates.io/crates/wasmparser)
+[![docs.rs docs](https://img.shields.io/static/v1?label=docs&message=wasmparser&color=blue&style=flat-square)](https://docs.rs/wasmparser/)
+
+The decoder library provides lightweight and fast decoding/parsing of WebAssembly binary files.
+
+The other goal is minimal memory footprint. For this reason, there is no AST or IR of WebAssembly data.
+
+See also its sibling at https://github.com/wasdk/wasmparser
+
+
+## Documentation
+
+The documentation and examples can be found at the https://docs.rs/wasmparser/
+
+
+## Fuzzing
+
+To fuzz test wasmparser.rs, switch to a nightly Rust compiler and install [cargo-fuzz]:
+
+```
+cargo install cargo-fuzz
+```
+
+Then, from the root of the repository, run:
+
+```
+cargo fuzz run parse
+```
+
+If you want to use files as seeds for the fuzzer, add them to `fuzz/corpus/parse/` and restart cargo-fuzz.
+
+[cargo-fuzz]: https://github.com/rust-fuzz/cargo-fuzz
diff --git a/third_party/rust/wasmparser/benches/benchmark.rs b/third_party/rust/wasmparser/benches/benchmark.rs
new file mode 100644
index 0000000000..5fb7b83905
--- /dev/null
+++ b/third_party/rust/wasmparser/benches/benchmark.rs
@@ -0,0 +1,350 @@
+use anyhow::Result;
+use criterion::{criterion_group, criterion_main, Criterion};
+use once_cell::unsync::Lazy;
+use std::fs;
+use std::path::Path;
+use std::path::PathBuf;
+use wasmparser::{
+ DataKind, ElementKind, HeapType, Parser, Payload, ValType, Validator, VisitOperator,
+ WasmFeatures,
+};
+
+/// A benchmark input.
+pub struct BenchmarkInput {
+ /// The path to the benchmark file important for handling errors.
+ pub path: PathBuf,
+ /// The encoded Wasm module that is run by the benchmark.
+ pub wasm: Vec<u8>,
+}
+
+impl BenchmarkInput {
+ /// Creates a new benchmark input.
+ pub fn new(test_path: PathBuf, encoded_wasm: Vec<u8>) -> Self {
+ Self {
+ path: test_path,
+ wasm: encoded_wasm,
+ }
+ }
+}
+
+/// Returns a vector of all found benchmark input files under the given directory.
+///
+/// Benchmark input files can be `.wat` or `.wast` formatted files.
+/// For `.wast` files we pull out all the module directives and run them in the benchmarks.
+fn collect_test_files(path: &Path, list: &mut Vec<BenchmarkInput>) -> Result<()> {
+ for entry in path.read_dir()? {
+ let entry = entry?;
+ let path = entry.path();
+ if path.is_dir() {
+ collect_test_files(&path, list)?;
+ continue;
+ }
+ match path.extension().and_then(|ext| ext.to_str()) {
+ Some("wasm") => {
+ let wasm = fs::read(&path)?;
+ list.push(BenchmarkInput::new(path, wasm));
+ }
+ Some("wat") | Some("txt") => {
+ if let Ok(wasm) = wat::parse_file(&path) {
+ list.push(BenchmarkInput::new(path, wasm));
+ }
+ }
+ Some("wast") => {
+ let contents = fs::read_to_string(&path)?;
+ let buf = match wast::parser::ParseBuffer::new(&contents) {
+ Ok(buf) => buf,
+ Err(_) => continue,
+ };
+ let wast: wast::Wast<'_> = match wast::parser::parse(&buf) {
+ Ok(wast) => wast,
+ Err(_) => continue,
+ };
+ for directive in wast.directives {
+ match directive {
+ wast::WastDirective::Wat(mut module) => {
+ let wasm = module.encode()?;
+ list.push(BenchmarkInput::new(path.clone(), wasm));
+ }
+ _ => continue,
+ }
+ }
+ }
+ _ => (),
+ }
+ }
+ Ok(())
+}
+
+/// Reads the input given the Wasm parser or validator.
+///
+/// The `path` specifies which benchmark input file we are currently operating on
+/// so that we can report better errors in case of failures.
+fn read_all_wasm(wasm: &[u8]) -> Result<()> {
+ use Payload::*;
+ for item in Parser::new(0).parse_all(wasm) {
+ match item? {
+ TypeSection(s) => {
+ for item in s {
+ item?;
+ }
+ }
+ ImportSection(s) => {
+ for item in s {
+ item?;
+ }
+ }
+ FunctionSection(s) => {
+ for item in s {
+ item?;
+ }
+ }
+ TableSection(s) => {
+ for item in s {
+ item?;
+ }
+ }
+ MemorySection(s) => {
+ for item in s {
+ item?;
+ }
+ }
+ TagSection(s) => {
+ for item in s {
+ item?;
+ }
+ }
+ GlobalSection(s) => {
+ for item in s {
+ for op in item?.init_expr.get_operators_reader() {
+ op?;
+ }
+ }
+ }
+ ExportSection(s) => {
+ for item in s {
+ item?;
+ }
+ }
+ ElementSection(s) => {
+ for item in s {
+ let item = item?;
+ if let ElementKind::Active { offset_expr, .. } = item.kind {
+ for op in offset_expr.get_operators_reader() {
+ op?;
+ }
+ }
+ match item.items {
+ wasmparser::ElementItems::Functions(r) => {
+ for op in r {
+ op?;
+ }
+ }
+ wasmparser::ElementItems::Expressions(r) => {
+ for op in r {
+ op?;
+ }
+ }
+ }
+ }
+ }
+ DataSection(s) => {
+ for item in s {
+ let item = item?;
+ if let DataKind::Active { offset_expr, .. } = item.kind {
+ for op in offset_expr.get_operators_reader() {
+ op?;
+ }
+ }
+ }
+ }
+ CodeSectionEntry(body) => {
+ let mut reader = body.get_binary_reader();
+ for _ in 0..reader.read_var_u32()? {
+ reader.read_var_u32()?;
+ reader.read::<wasmparser::ValType>()?;
+ }
+ while !reader.eof() {
+ reader.visit_operator(&mut NopVisit)?;
+ }
+ }
+
+ // Component sections
+ ModuleSection { .. } => {}
+ InstanceSection(s) => {
+ for item in s {
+ item?;
+ }
+ }
+ CoreTypeSection(s) => {
+ for item in s {
+ item?;
+ }
+ }
+ ComponentSection { .. } => {}
+ ComponentInstanceSection(s) => {
+ for item in s {
+ item?;
+ }
+ }
+ ComponentAliasSection(s) => {
+ for item in s {
+ item?;
+ }
+ }
+ ComponentTypeSection(s) => {
+ for item in s {
+ item?;
+ }
+ }
+ ComponentCanonicalSection(s) => {
+ for item in s {
+ item?;
+ }
+ }
+ ComponentStartSection { .. } => {}
+ ComponentImportSection(s) => {
+ for item in s {
+ item?;
+ }
+ }
+ ComponentExportSection(s) => {
+ for item in s {
+ item?;
+ }
+ }
+
+ Version { .. }
+ | StartSection { .. }
+ | DataCountSection { .. }
+ | UnknownSection { .. }
+ | CustomSection { .. }
+ | CodeSectionStart { .. }
+ | End(_) => {}
+ }
+ }
+ Ok(())
+}
+
+/// Returns the default benchmark inputs that are proper `wasmparser` benchmark
+/// test inputs.
+fn collect_benchmark_inputs() -> Vec<BenchmarkInput> {
+ let mut ret = Vec::new();
+ collect_test_files("../../tests".as_ref(), &mut ret).unwrap();
+ // Sort to ideally get more deterministic perf that ignores filesystems
+ ret.sort_by_key(|p| p.path.clone());
+ ret
+}
+
+fn define_benchmarks(c: &mut Criterion) {
+ fn validator() -> Validator {
+ Validator::new_with_features(WasmFeatures {
+ reference_types: true,
+ multi_value: true,
+ simd: true,
+ relaxed_simd: true,
+ exceptions: true,
+ component_model: true,
+ bulk_memory: true,
+ threads: true,
+ tail_call: true,
+ multi_memory: true,
+ memory64: true,
+ extended_const: true,
+ floats: true,
+ mutable_global: true,
+ saturating_float_to_int: true,
+ sign_extension: true,
+ function_references: true,
+ memory_control: true,
+ })
+ }
+
+ let test_inputs = once_cell::unsync::Lazy::new(collect_benchmark_inputs);
+
+ let parse_inputs = once_cell::unsync::Lazy::new(|| {
+ let mut list = Vec::new();
+ for input in test_inputs.iter() {
+ if read_all_wasm(&input.wasm).is_ok() {
+ list.push(&input.wasm);
+ }
+ }
+ list
+ });
+ c.bench_function("parse/tests", |b| {
+ Lazy::force(&parse_inputs);
+ b.iter(|| {
+ for wasm in parse_inputs.iter() {
+ read_all_wasm(wasm).unwrap();
+ }
+ })
+ });
+
+ let validate_inputs = once_cell::unsync::Lazy::new(|| {
+ let mut list = Vec::new();
+ for input in test_inputs.iter() {
+ if validator().validate_all(&input.wasm).is_ok() {
+ list.push(&input.wasm);
+ }
+ }
+ list
+ });
+ c.bench_function("validate/tests", |b| {
+ Lazy::force(&validate_inputs);
+ b.iter(|| {
+ for wasm in validate_inputs.iter() {
+ validator().validate_all(wasm).unwrap();
+ }
+ })
+ });
+
+ for file in std::fs::read_dir("benches").unwrap() {
+ let file = file.unwrap();
+ let path = file.path();
+ if path.extension().and_then(|s| s.to_str()) != Some("wasm") {
+ continue;
+ }
+ let name = path.file_stem().unwrap().to_str().unwrap();
+ let wasm = Lazy::new(|| std::fs::read(&path).unwrap());
+ c.bench_function(&format!("validate/{name}"), |b| {
+ Lazy::force(&wasm);
+ b.iter(|| {
+ validator().validate_all(&wasm).unwrap();
+ })
+ });
+ c.bench_function(&format!("parse/{name}"), |b| {
+ Lazy::force(&wasm);
+ b.iter(|| {
+ read_all_wasm(&wasm).unwrap();
+ })
+ });
+ }
+}
+
+criterion_group!(benchmark, define_benchmarks);
+criterion_main!(benchmark);
+
+struct NopVisit;
+
+macro_rules! define_visit_operator {
+ ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => {
+ $(
+ fn $visit(&mut self $($(,$arg: $argty)*)?) {
+ define_visit_operator!(@visit $op $( $($arg)* )?);
+ }
+ )*
+ };
+
+ (@visit BrTable $table:ident) => {
+ for target in $table.targets() {
+ target.unwrap();
+ }
+ };
+ (@visit $($rest:tt)*) => {}
+}
+
+#[allow(unused_variables)]
+impl<'a> VisitOperator<'a> for NopVisit {
+ type Output = ();
+
+ wasmparser::for_each_operator!(define_visit_operator);
+}
diff --git a/third_party/rust/wasmparser/examples/simple.rs b/third_party/rust/wasmparser/examples/simple.rs
new file mode 100644
index 0000000000..2aca2f2b5e
--- /dev/null
+++ b/third_party/rust/wasmparser/examples/simple.rs
@@ -0,0 +1,37 @@
+use anyhow::Result;
+use std::env;
+use wasmparser::{Parser, Payload};
+
+fn main() -> Result<()> {
+ let args = env::args().collect::<Vec<_>>();
+ if args.len() != 2 {
+ println!("Usage: {} in.wasm", args[0]);
+ return Ok(());
+ }
+
+ let buf: Vec<u8> = std::fs::read(&args[1])?;
+ for payload in Parser::new(0).parse_all(&buf) {
+ match payload? {
+ Payload::Version { .. } => {
+ println!("====== Module");
+ }
+ Payload::ExportSection(s) => {
+ for export in s {
+ let export = export?;
+ println!(" Export {} {:?}", export.name, export.kind);
+ }
+ }
+ Payload::ImportSection(s) => {
+ for import in s {
+ let import = import?;
+ println!(" Import {}::{}", import.module, import.name);
+ }
+ }
+ _other => {
+ // println!("found payload {:?}", _other);
+ }
+ }
+ }
+
+ Ok(())
+}
diff --git a/third_party/rust/wasmparser/src/binary_reader.rs b/third_party/rust/wasmparser/src/binary_reader.rs
new file mode 100644
index 0000000000..43fef14cdd
--- /dev/null
+++ b/third_party/rust/wasmparser/src/binary_reader.rs
@@ -0,0 +1,1682 @@
+/* Copyright 2018 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::{limits::*, *};
+use std::convert::TryInto;
+use std::error::Error;
+use std::fmt;
+use std::marker;
+use std::ops::Range;
+use std::str;
+
+const WASM_MAGIC_NUMBER: &[u8; 4] = b"\0asm";
+
+/// A binary reader for WebAssembly modules.
+#[derive(Debug, Clone)]
+pub struct BinaryReaderError {
+ // Wrap the actual error data in a `Box` so that the error is just one
+ // word. This means that we can continue returning small `Result`s in
+ // registers.
+ pub(crate) inner: Box<BinaryReaderErrorInner>,
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct BinaryReaderErrorInner {
+ pub(crate) message: String,
+ pub(crate) offset: usize,
+ pub(crate) needed_hint: Option<usize>,
+}
+
+/// The result for `BinaryReader` operations.
+pub type Result<T, E = BinaryReaderError> = std::result::Result<T, E>;
+
+impl Error for BinaryReaderError {}
+
+impl fmt::Display for BinaryReaderError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(
+ f,
+ "{} (at offset 0x{:x})",
+ self.inner.message, self.inner.offset
+ )
+ }
+}
+
+impl BinaryReaderError {
+ #[cold]
+ pub(crate) fn new(message: impl Into<String>, offset: usize) -> Self {
+ let message = message.into();
+ BinaryReaderError {
+ inner: Box::new(BinaryReaderErrorInner {
+ message,
+ offset,
+ needed_hint: None,
+ }),
+ }
+ }
+
+ #[cold]
+ pub(crate) fn fmt(args: fmt::Arguments<'_>, offset: usize) -> Self {
+ BinaryReaderError::new(args.to_string(), offset)
+ }
+
+ #[cold]
+ pub(crate) fn eof(offset: usize, needed_hint: usize) -> Self {
+ BinaryReaderError {
+ inner: Box::new(BinaryReaderErrorInner {
+ message: "unexpected end-of-file".to_string(),
+ offset,
+ needed_hint: Some(needed_hint),
+ }),
+ }
+ }
+
+ /// Get this error's message.
+ pub fn message(&self) -> &str {
+ &self.inner.message
+ }
+
+ /// Get the offset within the Wasm binary where the error occurred.
+ pub fn offset(&self) -> usize {
+ self.inner.offset
+ }
+}
+
+/// A binary reader of the WebAssembly structures and types.
+#[derive(Clone, Debug, Hash)]
+pub struct BinaryReader<'a> {
+ pub(crate) buffer: &'a [u8],
+ pub(crate) position: usize,
+ original_offset: usize,
+ allow_memarg64: bool,
+}
+
+impl<'a> BinaryReader<'a> {
+ /// Constructs `BinaryReader` type.
+ ///
+ /// # Examples
+ /// ```
+ /// let fn_body = &vec![0x41, 0x00, 0x10, 0x00, 0x0B];
+ /// let mut reader = wasmparser::BinaryReader::new(fn_body);
+ /// while !reader.eof() {
+ /// let op = reader.read_operator();
+ /// println!("{:?}", op)
+ /// }
+ /// ```
+ pub fn new(data: &[u8]) -> BinaryReader {
+ BinaryReader {
+ buffer: data,
+ position: 0,
+ original_offset: 0,
+ allow_memarg64: false,
+ }
+ }
+
+ /// Constructs a `BinaryReader` with an explicit starting offset.
+ pub fn new_with_offset(data: &[u8], original_offset: usize) -> BinaryReader {
+ BinaryReader {
+ buffer: data,
+ position: 0,
+ original_offset,
+ allow_memarg64: false,
+ }
+ }
+
+ /// Gets the original position of the binary reader.
+ #[inline]
+ pub fn original_position(&self) -> usize {
+ self.original_offset + self.position
+ }
+
+ /// Whether or not to allow 64-bit memory arguments in functions.
+ ///
+ /// This is intended to be `true` when support for the memory64
+ /// WebAssembly proposal is also enabled.
+ pub fn allow_memarg64(&mut self, allow: bool) {
+ self.allow_memarg64 = allow;
+ }
+
+ /// Returns a range from the starting offset to the end of the buffer.
+ pub fn range(&self) -> Range<usize> {
+ self.original_offset..self.original_offset + self.buffer.len()
+ }
+
+ pub(crate) fn remaining_buffer(&self) -> &'a [u8] {
+ &self.buffer[self.position..]
+ }
+
+ fn ensure_has_byte(&self) -> Result<()> {
+ if self.position < self.buffer.len() {
+ Ok(())
+ } else {
+ Err(BinaryReaderError::eof(self.original_position(), 1))
+ }
+ }
+
+ pub(crate) fn ensure_has_bytes(&self, len: usize) -> Result<()> {
+ if self.position + len <= self.buffer.len() {
+ Ok(())
+ } else {
+ let hint = self.position + len - self.buffer.len();
+ Err(BinaryReaderError::eof(self.original_position(), hint))
+ }
+ }
+
+ /// Reads a value of type `T` from this binary reader, advancing the
+ /// internal position in this reader forward as data is read.
+ #[inline]
+ pub fn read<T>(&mut self) -> Result<T>
+ where
+ T: FromReader<'a>,
+ {
+ T::from_reader(self)
+ }
+
+ pub(crate) fn read_u7(&mut self) -> Result<u8> {
+ let b = self.read_u8()?;
+ if (b & 0x80) != 0 {
+ return Err(BinaryReaderError::new(
+ "invalid u7",
+ self.original_position() - 1,
+ ));
+ }
+ Ok(b)
+ }
+
+ pub(crate) fn external_kind_from_byte(byte: u8, offset: usize) -> Result<ExternalKind> {
+ match byte {
+ 0x00 => Ok(ExternalKind::Func),
+ 0x01 => Ok(ExternalKind::Table),
+ 0x02 => Ok(ExternalKind::Memory),
+ 0x03 => Ok(ExternalKind::Global),
+ 0x04 => Ok(ExternalKind::Tag),
+ x => Err(Self::invalid_leading_byte_error(x, "external kind", offset)),
+ }
+ }
+
+ /// Reads a variable-length 32-bit size from the byte stream while checking
+ /// against a limit.
+ pub fn read_size(&mut self, limit: usize, desc: &str) -> Result<usize> {
+ let pos = self.original_position();
+ let size = self.read_var_u32()? as usize;
+ if size > limit {
+ bail!(pos, "{desc} size is out of bounds");
+ }
+ Ok(size)
+ }
+
+ /// Reads a variable-length 32-bit size from the byte stream while checking
+ /// against a limit.
+ ///
+ /// Then reads that many values of type `T` and returns them as an iterator.
+ ///
+ /// Note that regardless of how many items are read from the returned
+ /// iterator the items will still be parsed from this reader.
+ pub fn read_iter<'me, T>(
+ &'me mut self,
+ limit: usize,
+ desc: &str,
+ ) -> Result<BinaryReaderIter<'a, 'me, T>>
+ where
+ T: FromReader<'a>,
+ {
+ let size = self.read_size(limit, desc)?;
+ Ok(BinaryReaderIter {
+ remaining: size,
+ reader: self,
+ _marker: marker::PhantomData,
+ })
+ }
+
+ fn read_first_byte_and_var_u32(&mut self) -> Result<(u8, u32)> {
+ let pos = self.position;
+ let val = self.read_var_u32()?;
+ Ok((self.buffer[pos], val))
+ }
+
+ fn read_memarg(&mut self, max_align: u8) -> Result<MemArg> {
+ let flags_pos = self.original_position();
+ let mut flags = self.read_var_u32()?;
+ let memory = if flags & (1 << 6) != 0 {
+ flags ^= 1 << 6;
+ self.read_var_u32()?
+ } else {
+ 0
+ };
+ let align = if flags >= (1 << 6) {
+ return Err(BinaryReaderError::new("alignment too large", flags_pos));
+ } else {
+ flags as u8
+ };
+ let offset = if self.allow_memarg64 {
+ self.read_var_u64()?
+ } else {
+ u64::from(self.read_var_u32()?)
+ };
+ Ok(MemArg {
+ align,
+ max_align,
+ offset,
+ memory,
+ })
+ }
+
+ fn read_br_table(&mut self) -> Result<BrTable<'a>> {
+ let cnt = self.read_size(MAX_WASM_BR_TABLE_SIZE, "br_table")?;
+ let start = self.position;
+ for _ in 0..cnt {
+ self.read_var_u32()?;
+ }
+ let end = self.position;
+ let default = self.read_var_u32()?;
+ Ok(BrTable {
+ reader: BinaryReader::new_with_offset(&self.buffer[start..end], start),
+ cnt: cnt as u32,
+ default,
+ })
+ }
+
+ /// Returns whether the `BinaryReader` has reached the end of the file.
+ #[inline]
+ pub fn eof(&self) -> bool {
+ self.position >= self.buffer.len()
+ }
+
+ /// Returns the `BinaryReader`'s current position.
+ #[inline]
+ pub fn current_position(&self) -> usize {
+ self.position
+ }
+
+ /// Returns the number of bytes remaining in the `BinaryReader`.
+ #[inline]
+ pub fn bytes_remaining(&self) -> usize {
+ self.buffer.len() - self.position
+ }
+
+ /// Advances the `BinaryReader` `size` bytes, and returns a slice from the
+ /// current position of `size` length.
+ ///
+ /// # Errors
+ /// If `size` exceeds the remaining length in `BinaryReader`.
+ pub fn read_bytes(&mut self, size: usize) -> Result<&'a [u8]> {
+ self.ensure_has_bytes(size)?;
+ let start = self.position;
+ self.position += size;
+ Ok(&self.buffer[start..self.position])
+ }
+
+ /// Reads a length-prefixed list of bytes from this reader and returns a
+ /// new `BinaryReader` to read that list of bytes.
+ ///
+ /// Advances the position of this reader by the number of bytes read.
+ pub fn read_reader(&mut self, err: &str) -> Result<BinaryReader<'a>> {
+ let size = self.read_var_u32()? as usize;
+ let body_start = self.position;
+ let buffer = match self.buffer.get(self.position..).and_then(|s| s.get(..size)) {
+ Some(buf) => buf,
+ None => {
+ return Err(BinaryReaderError::new(
+ err,
+ self.original_offset + self.buffer.len(),
+ ))
+ }
+ };
+ self.position += size;
+ Ok(BinaryReader::new_with_offset(
+ buffer,
+ self.original_offset + body_start,
+ ))
+ }
+
+ /// Advances the `BinaryReader` four bytes and returns a `u32`.
+ /// # Errors
+ /// If `BinaryReader` has less than four bytes remaining.
+ pub fn read_u32(&mut self) -> Result<u32> {
+ self.ensure_has_bytes(4)?;
+ let word = u32::from_le_bytes(
+ self.buffer[self.position..self.position + 4]
+ .try_into()
+ .unwrap(),
+ );
+ self.position += 4;
+ Ok(word)
+ }
+
+ /// Advances the `BinaryReader` eight bytes and returns a `u64`.
+ /// # Errors
+ /// If `BinaryReader` has less than eight bytes remaining.
+ pub fn read_u64(&mut self) -> Result<u64> {
+ self.ensure_has_bytes(8)?;
+ let word = u64::from_le_bytes(
+ self.buffer[self.position..self.position + 8]
+ .try_into()
+ .unwrap(),
+ );
+ self.position += 8;
+ Ok(word)
+ }
+
+ /// Advances the `BinaryReader` a single byte.
+ ///
+ /// # Errors
+ ///
+ /// If `BinaryReader` has no bytes remaining.
+ #[inline]
+ pub fn read_u8(&mut self) -> Result<u8> {
+ let b = match self.buffer.get(self.position) {
+ Some(b) => *b,
+ None => return Err(self.eof_err()),
+ };
+ self.position += 1;
+ Ok(b)
+ }
+
+ #[cold]
+ fn eof_err(&self) -> BinaryReaderError {
+ BinaryReaderError::eof(self.original_position(), 1)
+ }
+
+ /// Advances the `BinaryReader` up to four bytes to parse a variable
+ /// length integer as a `u32`.
+ ///
+ /// # Errors
+ ///
+ /// If `BinaryReader` has less than one or up to four bytes remaining, or
+ /// the integer is larger than 32 bits.
+ #[inline]
+ pub fn read_var_u32(&mut self) -> Result<u32> {
+ // Optimization for single byte i32.
+ let byte = self.read_u8()?;
+ if (byte & 0x80) == 0 {
+ Ok(u32::from(byte))
+ } else {
+ self.read_var_u32_big(byte)
+ }
+ }
+
+ fn read_var_u32_big(&mut self, byte: u8) -> Result<u32> {
+ let mut result = (byte & 0x7F) as u32;
+ let mut shift = 7;
+ loop {
+ let byte = self.read_u8()?;
+ result |= ((byte & 0x7F) as u32) << shift;
+ if shift >= 25 && (byte >> (32 - shift)) != 0 {
+ let msg = if byte & 0x80 != 0 {
+ "invalid var_u32: integer representation too long"
+ } else {
+ "invalid var_u32: integer too large"
+ };
+ // The continuation bit or unused bits are set.
+ return Err(BinaryReaderError::new(msg, self.original_position() - 1));
+ }
+ shift += 7;
+ if (byte & 0x80) == 0 {
+ break;
+ }
+ }
+ Ok(result)
+ }
+
+ /// Advances the `BinaryReader` up to four bytes to parse a variable
+ /// length integer as a `u64`.
+ ///
+ /// # Errors
+ ///
+ /// If `BinaryReader` has less than one or up to eight bytes remaining, or
+ /// the integer is larger than 64 bits.
+ #[inline]
+ pub fn read_var_u64(&mut self) -> Result<u64> {
+ // Optimization for single byte u64.
+ let byte = u64::from(self.read_u8()?);
+ if (byte & 0x80) == 0 {
+ Ok(byte)
+ } else {
+ self.read_var_u64_big(byte)
+ }
+ }
+
+ fn read_var_u64_big(&mut self, byte: u64) -> Result<u64> {
+ let mut result = byte & 0x7F;
+ let mut shift = 7;
+ loop {
+ let byte = u64::from(self.read_u8()?);
+ result |= (byte & 0x7F) << shift;
+ if shift >= 57 && (byte >> (64 - shift)) != 0 {
+ let msg = if byte & 0x80 != 0 {
+ "invalid var_u64: integer representation too long"
+ } else {
+ "invalid var_u64: integer too large"
+ };
+ // The continuation bit or unused bits are set.
+ return Err(BinaryReaderError::new(msg, self.original_position() - 1));
+ }
+ shift += 7;
+ if (byte & 0x80) == 0 {
+ break;
+ }
+ }
+ Ok(result)
+ }
+
+ /// Executes `f` to skip some data in this binary reader and then returns a
+ /// reader which will read the skipped data.
+ pub fn skip(&mut self, f: impl FnOnce(&mut Self) -> Result<()>) -> Result<Self> {
+ let start = self.position;
+ f(self)?;
+ Ok(BinaryReader::new_with_offset(
+ &self.buffer[start..self.position],
+ self.original_offset + start,
+ ))
+ }
+
+ /// Advances the `BinaryReader` past a WebAssembly string. This method does
+ /// not perform any utf-8 validation.
+ /// # Errors
+ /// If `BinaryReader` has less than four bytes, the string's length exceeds
+ /// the remaining bytes, or the string length
+ /// exceeds `limits::MAX_WASM_STRING_SIZE`.
+ pub fn skip_string(&mut self) -> Result<()> {
+ let len = self.read_var_u32()? as usize;
+ if len > MAX_WASM_STRING_SIZE {
+ return Err(BinaryReaderError::new(
+ "string size out of bounds",
+ self.original_position() - 1,
+ ));
+ }
+ self.ensure_has_bytes(len)?;
+ self.position += len;
+ Ok(())
+ }
+
+ /// Advances the `BinaryReader` up to four bytes to parse a variable
+ /// length integer as a `i32`.
+ /// # Errors
+ /// If `BinaryReader` has less than one or up to four bytes remaining, or
+ /// the integer is larger than 32 bits.
+ #[inline]
+ pub fn read_var_i32(&mut self) -> Result<i32> {
+ // Optimization for single byte i32.
+ let byte = self.read_u8()?;
+ if (byte & 0x80) == 0 {
+ Ok(((byte as i32) << 25) >> 25)
+ } else {
+ self.read_var_i32_big(byte)
+ }
+ }
+
+ fn read_var_i32_big(&mut self, byte: u8) -> Result<i32> {
+ let mut result = (byte & 0x7F) as i32;
+ let mut shift = 7;
+ loop {
+ let byte = self.read_u8()?;
+ result |= ((byte & 0x7F) as i32) << shift;
+ if shift >= 25 {
+ let continuation_bit = (byte & 0x80) != 0;
+ let sign_and_unused_bit = (byte << 1) as i8 >> (32 - shift);
+ if continuation_bit || (sign_and_unused_bit != 0 && sign_and_unused_bit != -1) {
+ let msg = if continuation_bit {
+ "invalid var_i32: integer representation too long"
+ } else {
+ "invalid var_i32: integer too large"
+ };
+ return Err(BinaryReaderError::new(msg, self.original_position() - 1));
+ }
+ return Ok(result);
+ }
+ shift += 7;
+ if (byte & 0x80) == 0 {
+ break;
+ }
+ }
+ let ashift = 32 - shift;
+ Ok((result << ashift) >> ashift)
+ }
+
+ /// Advances the `BinaryReader` up to four bytes to parse a variable
+ /// length integer as a signed 33 bit integer, returned as a `i64`.
+ /// # Errors
+ /// If `BinaryReader` has less than one or up to five bytes remaining, or
+ /// the integer is larger than 33 bits.
+ pub fn read_var_s33(&mut self) -> Result<i64> {
+ // Optimization for single byte.
+ let byte = self.read_u8()?;
+ if (byte & 0x80) == 0 {
+ return Ok(((byte as i8) << 1) as i64 >> 1);
+ }
+
+ let mut result = (byte & 0x7F) as i64;
+ let mut shift = 7;
+ loop {
+ let byte = self.read_u8()?;
+ result |= ((byte & 0x7F) as i64) << shift;
+ if shift >= 25 {
+ let continuation_bit = (byte & 0x80) != 0;
+ let sign_and_unused_bit = (byte << 1) as i8 >> (33 - shift);
+ if continuation_bit || (sign_and_unused_bit != 0 && sign_and_unused_bit != -1) {
+ return Err(BinaryReaderError::new(
+ "invalid var_s33: integer representation too long",
+ self.original_position() - 1,
+ ));
+ }
+ return Ok(result);
+ }
+ shift += 7;
+ if (byte & 0x80) == 0 {
+ break;
+ }
+ }
+ let ashift = 64 - shift;
+ Ok((result << ashift) >> ashift)
+ }
+
+ /// Advances the `BinaryReader` up to eight bytes to parse a variable
+ /// length integer as a 64 bit integer, returned as a `i64`.
+ /// # Errors
+ /// If `BinaryReader` has less than one or up to eight bytes remaining, or
+ /// the integer is larger than 64 bits.
+ pub fn read_var_i64(&mut self) -> Result<i64> {
+ let mut result: i64 = 0;
+ let mut shift = 0;
+ loop {
+ let byte = self.read_u8()?;
+ result |= i64::from(byte & 0x7F) << shift;
+ if shift >= 57 {
+ let continuation_bit = (byte & 0x80) != 0;
+ let sign_and_unused_bit = ((byte << 1) as i8) >> (64 - shift);
+ if continuation_bit || (sign_and_unused_bit != 0 && sign_and_unused_bit != -1) {
+ let msg = if continuation_bit {
+ "invalid var_i64: integer representation too long"
+ } else {
+ "invalid var_i64: integer too large"
+ };
+ return Err(BinaryReaderError::new(msg, self.original_position() - 1));
+ }
+ return Ok(result);
+ }
+ shift += 7;
+ if (byte & 0x80) == 0 {
+ break;
+ }
+ }
+ let ashift = 64 - shift;
+ Ok((result << ashift) >> ashift)
+ }
+
+ /// Advances the `BinaryReader` up to four bytes to parse a variable
+ /// length integer as a 32 bit floating point integer, returned as `Ieee32`.
+ /// # Errors
+ /// If `BinaryReader` has less than one or up to four bytes remaining, or
+ /// the integer is larger than 32 bits.
+ pub fn read_f32(&mut self) -> Result<Ieee32> {
+ let value = self.read_u32()?;
+ Ok(Ieee32(value))
+ }
+
+ /// Advances the `BinaryReader` up to four bytes to parse a variable
+ /// length integer as a 32 bit floating point integer, returned as `Ieee32`.
+ /// # Errors
+ /// If `BinaryReader` has less than one or up to four bytes remaining, or
+ /// the integer is larger than 32 bits.
+ pub fn read_f64(&mut self) -> Result<Ieee64> {
+ let value = self.read_u64()?;
+ Ok(Ieee64(value))
+ }
+
+ /// Reads a WebAssembly string from the module.
+ /// # Errors
+ /// If `BinaryReader` has less than up to four bytes remaining, the string's
+ /// length exceeds the remaining bytes, the string's length exceeds
+ /// `limits::MAX_WASM_STRING_SIZE`, or the string contains invalid utf-8.
+ pub fn read_string(&mut self) -> Result<&'a str> {
+ let len = self.read_var_u32()? as usize;
+ if len > MAX_WASM_STRING_SIZE {
+ return Err(BinaryReaderError::new(
+ "string size out of bounds",
+ self.original_position() - 1,
+ ));
+ }
+ let bytes = self.read_bytes(len)?;
+ str::from_utf8(bytes).map_err(|_| {
+ BinaryReaderError::new("invalid UTF-8 encoding", self.original_position() - 1)
+ })
+ }
+
+ #[cold]
+ pub(crate) fn invalid_leading_byte<T>(&self, byte: u8, desc: &str) -> Result<T> {
+ Err(Self::invalid_leading_byte_error(
+ byte,
+ desc,
+ self.original_position() - 1,
+ ))
+ }
+
+ pub(crate) fn invalid_leading_byte_error(
+ byte: u8,
+ desc: &str,
+ offset: usize,
+ ) -> BinaryReaderError {
+ format_err!(offset, "invalid leading byte (0x{byte:x}) for {desc}")
+ }
+
+ pub(crate) fn peek(&self) -> Result<u8> {
+ self.ensure_has_byte()?;
+ Ok(self.buffer[self.position])
+ }
+
+ fn read_block_type(&mut self) -> Result<BlockType> {
+ let b = self.peek()?;
+
+ // Check for empty block
+ if b == 0x40 {
+ self.position += 1;
+ return Ok(BlockType::Empty);
+ }
+
+ // Check for a block type of form [] -> [t].
+ if ValType::is_valtype_byte(b) {
+ return Ok(BlockType::Type(self.read()?));
+ }
+
+ // Not empty or a singular type, so read the function type index
+ let idx = self.read_var_s33()?;
+ match u32::try_from(idx) {
+ Ok(idx) => Ok(BlockType::FuncType(idx)),
+ Err(_) => {
+ return Err(BinaryReaderError::new(
+ "invalid function type",
+ self.original_position(),
+ ));
+ }
+ }
+ }
+
+ /// Visit the next available operator with the specified [`VisitOperator`] instance.
+ ///
+ /// Note that this does not implicitly propagate any additional information such as instruction
+ /// offsets. In order to do so, consider storing such data within the visitor before visiting.
+ ///
+ /// # Errors
+ ///
+ /// If `BinaryReader` has less bytes remaining than required to parse the `Operator`.
+ ///
+ /// # Examples
+ ///
+ /// Store an offset for use in diagnostics or any other purposes:
+ ///
+ /// ```
+ /// # use wasmparser::{BinaryReader, VisitOperator, Result, for_each_operator};
+ ///
+ /// pub fn dump(mut reader: BinaryReader) -> Result<()> {
+ /// let mut visitor = Dumper { offset: 0 };
+ /// while !reader.eof() {
+ /// visitor.offset = reader.original_position();
+ /// reader.visit_operator(&mut visitor)?;
+ /// }
+ /// Ok(())
+ /// }
+ ///
+ /// struct Dumper {
+ /// offset: usize
+ /// }
+ ///
+ /// macro_rules! define_visit_operator {
+ /// ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => {
+ /// $(
+ /// fn $visit(&mut self $($(,$arg: $argty)*)?) -> Self::Output {
+ /// println!("{}: {}", self.offset, stringify!($visit));
+ /// }
+ /// )*
+ /// }
+ /// }
+ ///
+ /// impl<'a> VisitOperator<'a> for Dumper {
+ /// type Output = ();
+ /// for_each_operator!(define_visit_operator);
+ /// }
+ ///
+ /// ```
+ pub fn visit_operator<T>(&mut self, visitor: &mut T) -> Result<<T as VisitOperator<'a>>::Output>
+ where
+ T: VisitOperator<'a>,
+ {
+ let pos = self.original_position();
+ let code = self.read_u8()? as u8;
+ Ok(match code {
+ 0x00 => visitor.visit_unreachable(),
+ 0x01 => visitor.visit_nop(),
+ 0x02 => visitor.visit_block(self.read_block_type()?),
+ 0x03 => visitor.visit_loop(self.read_block_type()?),
+ 0x04 => visitor.visit_if(self.read_block_type()?),
+ 0x05 => visitor.visit_else(),
+ 0x06 => visitor.visit_try(self.read_block_type()?),
+ 0x07 => visitor.visit_catch(self.read_var_u32()?),
+ 0x08 => visitor.visit_throw(self.read_var_u32()?),
+ 0x09 => visitor.visit_rethrow(self.read_var_u32()?),
+ 0x0b => visitor.visit_end(),
+ 0x0c => visitor.visit_br(self.read_var_u32()?),
+ 0x0d => visitor.visit_br_if(self.read_var_u32()?),
+ 0x0e => visitor.visit_br_table(self.read_br_table()?),
+ 0x0f => visitor.visit_return(),
+ 0x10 => visitor.visit_call(self.read_var_u32()?),
+ 0x11 => {
+ let index = self.read_var_u32()?;
+ let (table_byte, table_index) = self.read_first_byte_and_var_u32()?;
+ visitor.visit_call_indirect(index, table_index, table_byte)
+ }
+ 0x12 => visitor.visit_return_call(self.read_var_u32()?),
+ 0x13 => visitor.visit_return_call_indirect(self.read_var_u32()?, self.read_var_u32()?),
+ 0x14 => visitor.visit_call_ref(self.read()?),
+ 0x15 => visitor.visit_return_call_ref(self.read()?),
+ 0x18 => visitor.visit_delegate(self.read_var_u32()?),
+ 0x19 => visitor.visit_catch_all(),
+ 0x1a => visitor.visit_drop(),
+ 0x1b => visitor.visit_select(),
+ 0x1c => {
+ let results = self.read_var_u32()?;
+ if results != 1 {
+ return Err(BinaryReaderError::new(
+ "invalid result arity",
+ self.position,
+ ));
+ }
+ visitor.visit_typed_select(self.read()?)
+ }
+
+ 0x20 => visitor.visit_local_get(self.read_var_u32()?),
+ 0x21 => visitor.visit_local_set(self.read_var_u32()?),
+ 0x22 => visitor.visit_local_tee(self.read_var_u32()?),
+ 0x23 => visitor.visit_global_get(self.read_var_u32()?),
+ 0x24 => visitor.visit_global_set(self.read_var_u32()?),
+ 0x25 => visitor.visit_table_get(self.read_var_u32()?),
+ 0x26 => visitor.visit_table_set(self.read_var_u32()?),
+
+ 0x28 => visitor.visit_i32_load(self.read_memarg(2)?),
+ 0x29 => visitor.visit_i64_load(self.read_memarg(3)?),
+ 0x2a => visitor.visit_f32_load(self.read_memarg(2)?),
+ 0x2b => visitor.visit_f64_load(self.read_memarg(3)?),
+ 0x2c => visitor.visit_i32_load8_s(self.read_memarg(0)?),
+ 0x2d => visitor.visit_i32_load8_u(self.read_memarg(0)?),
+ 0x2e => visitor.visit_i32_load16_s(self.read_memarg(1)?),
+ 0x2f => visitor.visit_i32_load16_u(self.read_memarg(1)?),
+ 0x30 => visitor.visit_i64_load8_s(self.read_memarg(0)?),
+ 0x31 => visitor.visit_i64_load8_u(self.read_memarg(0)?),
+ 0x32 => visitor.visit_i64_load16_s(self.read_memarg(1)?),
+ 0x33 => visitor.visit_i64_load16_u(self.read_memarg(1)?),
+ 0x34 => visitor.visit_i64_load32_s(self.read_memarg(2)?),
+ 0x35 => visitor.visit_i64_load32_u(self.read_memarg(2)?),
+ 0x36 => visitor.visit_i32_store(self.read_memarg(2)?),
+ 0x37 => visitor.visit_i64_store(self.read_memarg(3)?),
+ 0x38 => visitor.visit_f32_store(self.read_memarg(2)?),
+ 0x39 => visitor.visit_f64_store(self.read_memarg(3)?),
+ 0x3a => visitor.visit_i32_store8(self.read_memarg(0)?),
+ 0x3b => visitor.visit_i32_store16(self.read_memarg(1)?),
+ 0x3c => visitor.visit_i64_store8(self.read_memarg(0)?),
+ 0x3d => visitor.visit_i64_store16(self.read_memarg(1)?),
+ 0x3e => visitor.visit_i64_store32(self.read_memarg(2)?),
+ 0x3f => {
+ let (mem_byte, mem) = self.read_first_byte_and_var_u32()?;
+ visitor.visit_memory_size(mem, mem_byte)
+ }
+ 0x40 => {
+ let (mem_byte, mem) = self.read_first_byte_and_var_u32()?;
+ visitor.visit_memory_grow(mem, mem_byte)
+ }
+
+ 0x41 => visitor.visit_i32_const(self.read_var_i32()?),
+ 0x42 => visitor.visit_i64_const(self.read_var_i64()?),
+ 0x43 => visitor.visit_f32_const(self.read_f32()?),
+ 0x44 => visitor.visit_f64_const(self.read_f64()?),
+
+ 0x45 => visitor.visit_i32_eqz(),
+ 0x46 => visitor.visit_i32_eq(),
+ 0x47 => visitor.visit_i32_ne(),
+ 0x48 => visitor.visit_i32_lt_s(),
+ 0x49 => visitor.visit_i32_lt_u(),
+ 0x4a => visitor.visit_i32_gt_s(),
+ 0x4b => visitor.visit_i32_gt_u(),
+ 0x4c => visitor.visit_i32_le_s(),
+ 0x4d => visitor.visit_i32_le_u(),
+ 0x4e => visitor.visit_i32_ge_s(),
+ 0x4f => visitor.visit_i32_ge_u(),
+ 0x50 => visitor.visit_i64_eqz(),
+ 0x51 => visitor.visit_i64_eq(),
+ 0x52 => visitor.visit_i64_ne(),
+ 0x53 => visitor.visit_i64_lt_s(),
+ 0x54 => visitor.visit_i64_lt_u(),
+ 0x55 => visitor.visit_i64_gt_s(),
+ 0x56 => visitor.visit_i64_gt_u(),
+ 0x57 => visitor.visit_i64_le_s(),
+ 0x58 => visitor.visit_i64_le_u(),
+ 0x59 => visitor.visit_i64_ge_s(),
+ 0x5a => visitor.visit_i64_ge_u(),
+ 0x5b => visitor.visit_f32_eq(),
+ 0x5c => visitor.visit_f32_ne(),
+ 0x5d => visitor.visit_f32_lt(),
+ 0x5e => visitor.visit_f32_gt(),
+ 0x5f => visitor.visit_f32_le(),
+ 0x60 => visitor.visit_f32_ge(),
+ 0x61 => visitor.visit_f64_eq(),
+ 0x62 => visitor.visit_f64_ne(),
+ 0x63 => visitor.visit_f64_lt(),
+ 0x64 => visitor.visit_f64_gt(),
+ 0x65 => visitor.visit_f64_le(),
+ 0x66 => visitor.visit_f64_ge(),
+ 0x67 => visitor.visit_i32_clz(),
+ 0x68 => visitor.visit_i32_ctz(),
+ 0x69 => visitor.visit_i32_popcnt(),
+ 0x6a => visitor.visit_i32_add(),
+ 0x6b => visitor.visit_i32_sub(),
+ 0x6c => visitor.visit_i32_mul(),
+ 0x6d => visitor.visit_i32_div_s(),
+ 0x6e => visitor.visit_i32_div_u(),
+ 0x6f => visitor.visit_i32_rem_s(),
+ 0x70 => visitor.visit_i32_rem_u(),
+ 0x71 => visitor.visit_i32_and(),
+ 0x72 => visitor.visit_i32_or(),
+ 0x73 => visitor.visit_i32_xor(),
+ 0x74 => visitor.visit_i32_shl(),
+ 0x75 => visitor.visit_i32_shr_s(),
+ 0x76 => visitor.visit_i32_shr_u(),
+ 0x77 => visitor.visit_i32_rotl(),
+ 0x78 => visitor.visit_i32_rotr(),
+ 0x79 => visitor.visit_i64_clz(),
+ 0x7a => visitor.visit_i64_ctz(),
+ 0x7b => visitor.visit_i64_popcnt(),
+ 0x7c => visitor.visit_i64_add(),
+ 0x7d => visitor.visit_i64_sub(),
+ 0x7e => visitor.visit_i64_mul(),
+ 0x7f => visitor.visit_i64_div_s(),
+ 0x80 => visitor.visit_i64_div_u(),
+ 0x81 => visitor.visit_i64_rem_s(),
+ 0x82 => visitor.visit_i64_rem_u(),
+ 0x83 => visitor.visit_i64_and(),
+ 0x84 => visitor.visit_i64_or(),
+ 0x85 => visitor.visit_i64_xor(),
+ 0x86 => visitor.visit_i64_shl(),
+ 0x87 => visitor.visit_i64_shr_s(),
+ 0x88 => visitor.visit_i64_shr_u(),
+ 0x89 => visitor.visit_i64_rotl(),
+ 0x8a => visitor.visit_i64_rotr(),
+ 0x8b => visitor.visit_f32_abs(),
+ 0x8c => visitor.visit_f32_neg(),
+ 0x8d => visitor.visit_f32_ceil(),
+ 0x8e => visitor.visit_f32_floor(),
+ 0x8f => visitor.visit_f32_trunc(),
+ 0x90 => visitor.visit_f32_nearest(),
+ 0x91 => visitor.visit_f32_sqrt(),
+ 0x92 => visitor.visit_f32_add(),
+ 0x93 => visitor.visit_f32_sub(),
+ 0x94 => visitor.visit_f32_mul(),
+ 0x95 => visitor.visit_f32_div(),
+ 0x96 => visitor.visit_f32_min(),
+ 0x97 => visitor.visit_f32_max(),
+ 0x98 => visitor.visit_f32_copysign(),
+ 0x99 => visitor.visit_f64_abs(),
+ 0x9a => visitor.visit_f64_neg(),
+ 0x9b => visitor.visit_f64_ceil(),
+ 0x9c => visitor.visit_f64_floor(),
+ 0x9d => visitor.visit_f64_trunc(),
+ 0x9e => visitor.visit_f64_nearest(),
+ 0x9f => visitor.visit_f64_sqrt(),
+ 0xa0 => visitor.visit_f64_add(),
+ 0xa1 => visitor.visit_f64_sub(),
+ 0xa2 => visitor.visit_f64_mul(),
+ 0xa3 => visitor.visit_f64_div(),
+ 0xa4 => visitor.visit_f64_min(),
+ 0xa5 => visitor.visit_f64_max(),
+ 0xa6 => visitor.visit_f64_copysign(),
+ 0xa7 => visitor.visit_i32_wrap_i64(),
+ 0xa8 => visitor.visit_i32_trunc_f32_s(),
+ 0xa9 => visitor.visit_i32_trunc_f32_u(),
+ 0xaa => visitor.visit_i32_trunc_f64_s(),
+ 0xab => visitor.visit_i32_trunc_f64_u(),
+ 0xac => visitor.visit_i64_extend_i32_s(),
+ 0xad => visitor.visit_i64_extend_i32_u(),
+ 0xae => visitor.visit_i64_trunc_f32_s(),
+ 0xaf => visitor.visit_i64_trunc_f32_u(),
+ 0xb0 => visitor.visit_i64_trunc_f64_s(),
+ 0xb1 => visitor.visit_i64_trunc_f64_u(),
+ 0xb2 => visitor.visit_f32_convert_i32_s(),
+ 0xb3 => visitor.visit_f32_convert_i32_u(),
+ 0xb4 => visitor.visit_f32_convert_i64_s(),
+ 0xb5 => visitor.visit_f32_convert_i64_u(),
+ 0xb6 => visitor.visit_f32_demote_f64(),
+ 0xb7 => visitor.visit_f64_convert_i32_s(),
+ 0xb8 => visitor.visit_f64_convert_i32_u(),
+ 0xb9 => visitor.visit_f64_convert_i64_s(),
+ 0xba => visitor.visit_f64_convert_i64_u(),
+ 0xbb => visitor.visit_f64_promote_f32(),
+ 0xbc => visitor.visit_i32_reinterpret_f32(),
+ 0xbd => visitor.visit_i64_reinterpret_f64(),
+ 0xbe => visitor.visit_f32_reinterpret_i32(),
+ 0xbf => visitor.visit_f64_reinterpret_i64(),
+
+ 0xc0 => visitor.visit_i32_extend8_s(),
+ 0xc1 => visitor.visit_i32_extend16_s(),
+ 0xc2 => visitor.visit_i64_extend8_s(),
+ 0xc3 => visitor.visit_i64_extend16_s(),
+ 0xc4 => visitor.visit_i64_extend32_s(),
+
+ 0xd0 => visitor.visit_ref_null(self.read()?),
+ 0xd1 => visitor.visit_ref_is_null(),
+ 0xd2 => visitor.visit_ref_func(self.read_var_u32()?),
+ 0xd3 => visitor.visit_ref_as_non_null(),
+ 0xd4 => visitor.visit_br_on_null(self.read_var_u32()?),
+ 0xd6 => visitor.visit_br_on_non_null(self.read_var_u32()?),
+
+ 0xfc => self.visit_0xfc_operator(pos, visitor)?,
+ 0xfd => self.visit_0xfd_operator(pos, visitor)?,
+ 0xfe => self.visit_0xfe_operator(pos, visitor)?,
+
+ _ => bail!(pos, "illegal opcode: 0x{code:x}"),
+ })
+ }
+
+ fn visit_0xfc_operator<T>(
+ &mut self,
+ pos: usize,
+ visitor: &mut T,
+ ) -> Result<<T as VisitOperator<'a>>::Output>
+ where
+ T: VisitOperator<'a>,
+ {
+ let code = self.read_var_u32()?;
+ Ok(match code {
+ 0x00 => visitor.visit_i32_trunc_sat_f32_s(),
+ 0x01 => visitor.visit_i32_trunc_sat_f32_u(),
+ 0x02 => visitor.visit_i32_trunc_sat_f64_s(),
+ 0x03 => visitor.visit_i32_trunc_sat_f64_u(),
+ 0x04 => visitor.visit_i64_trunc_sat_f32_s(),
+ 0x05 => visitor.visit_i64_trunc_sat_f32_u(),
+ 0x06 => visitor.visit_i64_trunc_sat_f64_s(),
+ 0x07 => visitor.visit_i64_trunc_sat_f64_u(),
+
+ 0x08 => {
+ let segment = self.read_var_u32()?;
+ let mem = self.read_var_u32()?;
+ visitor.visit_memory_init(segment, mem)
+ }
+ 0x09 => {
+ let segment = self.read_var_u32()?;
+ visitor.visit_data_drop(segment)
+ }
+ 0x0a => {
+ let dst = self.read_var_u32()?;
+ let src = self.read_var_u32()?;
+ visitor.visit_memory_copy(dst, src)
+ }
+ 0x0b => {
+ let mem = self.read_var_u32()?;
+ visitor.visit_memory_fill(mem)
+ }
+ 0x0c => {
+ let segment = self.read_var_u32()?;
+ let table = self.read_var_u32()?;
+ visitor.visit_table_init(segment, table)
+ }
+ 0x0d => {
+ let segment = self.read_var_u32()?;
+ visitor.visit_elem_drop(segment)
+ }
+ 0x0e => {
+ let dst_table = self.read_var_u32()?;
+ let src_table = self.read_var_u32()?;
+ visitor.visit_table_copy(dst_table, src_table)
+ }
+
+ 0x0f => {
+ let table = self.read_var_u32()?;
+ visitor.visit_table_grow(table)
+ }
+ 0x10 => {
+ let table = self.read_var_u32()?;
+ visitor.visit_table_size(table)
+ }
+
+ 0x11 => {
+ let table = self.read_var_u32()?;
+ visitor.visit_table_fill(table)
+ }
+
+ 0x12 => {
+ let mem = self.read_var_u32()?;
+ visitor.visit_memory_discard(mem)
+ }
+
+ _ => bail!(pos, "unknown 0xfc subopcode: 0x{code:x}"),
+ })
+ }
+
+ fn visit_0xfd_operator<T>(
+ &mut self,
+ pos: usize,
+ visitor: &mut T,
+ ) -> Result<<T as VisitOperator<'a>>::Output>
+ where
+ T: VisitOperator<'a>,
+ {
+ let code = self.read_var_u32()?;
+ Ok(match code {
+ 0x00 => visitor.visit_v128_load(self.read_memarg(4)?),
+ 0x01 => visitor.visit_v128_load8x8_s(self.read_memarg(3)?),
+ 0x02 => visitor.visit_v128_load8x8_u(self.read_memarg(3)?),
+ 0x03 => visitor.visit_v128_load16x4_s(self.read_memarg(3)?),
+ 0x04 => visitor.visit_v128_load16x4_u(self.read_memarg(3)?),
+ 0x05 => visitor.visit_v128_load32x2_s(self.read_memarg(3)?),
+ 0x06 => visitor.visit_v128_load32x2_u(self.read_memarg(3)?),
+ 0x07 => visitor.visit_v128_load8_splat(self.read_memarg(0)?),
+ 0x08 => visitor.visit_v128_load16_splat(self.read_memarg(1)?),
+ 0x09 => visitor.visit_v128_load32_splat(self.read_memarg(2)?),
+ 0x0a => visitor.visit_v128_load64_splat(self.read_memarg(3)?),
+
+ 0x0b => visitor.visit_v128_store(self.read_memarg(4)?),
+ 0x0c => visitor.visit_v128_const(self.read_v128()?),
+ 0x0d => {
+ let mut lanes: [u8; 16] = [0; 16];
+ for lane in &mut lanes {
+ *lane = self.read_lane_index(32)?
+ }
+ visitor.visit_i8x16_shuffle(lanes)
+ }
+
+ 0x0e => visitor.visit_i8x16_swizzle(),
+ 0x0f => visitor.visit_i8x16_splat(),
+ 0x10 => visitor.visit_i16x8_splat(),
+ 0x11 => visitor.visit_i32x4_splat(),
+ 0x12 => visitor.visit_i64x2_splat(),
+ 0x13 => visitor.visit_f32x4_splat(),
+ 0x14 => visitor.visit_f64x2_splat(),
+
+ 0x15 => visitor.visit_i8x16_extract_lane_s(self.read_lane_index(16)?),
+ 0x16 => visitor.visit_i8x16_extract_lane_u(self.read_lane_index(16)?),
+ 0x17 => visitor.visit_i8x16_replace_lane(self.read_lane_index(16)?),
+ 0x18 => visitor.visit_i16x8_extract_lane_s(self.read_lane_index(8)?),
+ 0x19 => visitor.visit_i16x8_extract_lane_u(self.read_lane_index(8)?),
+ 0x1a => visitor.visit_i16x8_replace_lane(self.read_lane_index(8)?),
+ 0x1b => visitor.visit_i32x4_extract_lane(self.read_lane_index(4)?),
+
+ 0x1c => visitor.visit_i32x4_replace_lane(self.read_lane_index(4)?),
+ 0x1d => visitor.visit_i64x2_extract_lane(self.read_lane_index(2)?),
+ 0x1e => visitor.visit_i64x2_replace_lane(self.read_lane_index(2)?),
+ 0x1f => visitor.visit_f32x4_extract_lane(self.read_lane_index(4)?),
+ 0x20 => visitor.visit_f32x4_replace_lane(self.read_lane_index(4)?),
+ 0x21 => visitor.visit_f64x2_extract_lane(self.read_lane_index(2)?),
+ 0x22 => visitor.visit_f64x2_replace_lane(self.read_lane_index(2)?),
+
+ 0x23 => visitor.visit_i8x16_eq(),
+ 0x24 => visitor.visit_i8x16_ne(),
+ 0x25 => visitor.visit_i8x16_lt_s(),
+ 0x26 => visitor.visit_i8x16_lt_u(),
+ 0x27 => visitor.visit_i8x16_gt_s(),
+ 0x28 => visitor.visit_i8x16_gt_u(),
+ 0x29 => visitor.visit_i8x16_le_s(),
+ 0x2a => visitor.visit_i8x16_le_u(),
+ 0x2b => visitor.visit_i8x16_ge_s(),
+ 0x2c => visitor.visit_i8x16_ge_u(),
+ 0x2d => visitor.visit_i16x8_eq(),
+ 0x2e => visitor.visit_i16x8_ne(),
+ 0x2f => visitor.visit_i16x8_lt_s(),
+ 0x30 => visitor.visit_i16x8_lt_u(),
+ 0x31 => visitor.visit_i16x8_gt_s(),
+ 0x32 => visitor.visit_i16x8_gt_u(),
+ 0x33 => visitor.visit_i16x8_le_s(),
+ 0x34 => visitor.visit_i16x8_le_u(),
+ 0x35 => visitor.visit_i16x8_ge_s(),
+ 0x36 => visitor.visit_i16x8_ge_u(),
+ 0x37 => visitor.visit_i32x4_eq(),
+ 0x38 => visitor.visit_i32x4_ne(),
+ 0x39 => visitor.visit_i32x4_lt_s(),
+ 0x3a => visitor.visit_i32x4_lt_u(),
+ 0x3b => visitor.visit_i32x4_gt_s(),
+ 0x3c => visitor.visit_i32x4_gt_u(),
+ 0x3d => visitor.visit_i32x4_le_s(),
+ 0x3e => visitor.visit_i32x4_le_u(),
+ 0x3f => visitor.visit_i32x4_ge_s(),
+ 0x40 => visitor.visit_i32x4_ge_u(),
+ 0x41 => visitor.visit_f32x4_eq(),
+ 0x42 => visitor.visit_f32x4_ne(),
+ 0x43 => visitor.visit_f32x4_lt(),
+ 0x44 => visitor.visit_f32x4_gt(),
+ 0x45 => visitor.visit_f32x4_le(),
+ 0x46 => visitor.visit_f32x4_ge(),
+ 0x47 => visitor.visit_f64x2_eq(),
+ 0x48 => visitor.visit_f64x2_ne(),
+ 0x49 => visitor.visit_f64x2_lt(),
+ 0x4a => visitor.visit_f64x2_gt(),
+ 0x4b => visitor.visit_f64x2_le(),
+ 0x4c => visitor.visit_f64x2_ge(),
+ 0x4d => visitor.visit_v128_not(),
+ 0x4e => visitor.visit_v128_and(),
+ 0x4f => visitor.visit_v128_andnot(),
+ 0x50 => visitor.visit_v128_or(),
+ 0x51 => visitor.visit_v128_xor(),
+ 0x52 => visitor.visit_v128_bitselect(),
+ 0x53 => visitor.visit_v128_any_true(),
+
+ 0x54 => {
+ let memarg = self.read_memarg(0)?;
+ let lane = self.read_lane_index(16)?;
+ visitor.visit_v128_load8_lane(memarg, lane)
+ }
+ 0x55 => {
+ let memarg = self.read_memarg(1)?;
+ let lane = self.read_lane_index(8)?;
+ visitor.visit_v128_load16_lane(memarg, lane)
+ }
+ 0x56 => {
+ let memarg = self.read_memarg(2)?;
+ let lane = self.read_lane_index(4)?;
+ visitor.visit_v128_load32_lane(memarg, lane)
+ }
+ 0x57 => {
+ let memarg = self.read_memarg(3)?;
+ let lane = self.read_lane_index(2)?;
+ visitor.visit_v128_load64_lane(memarg, lane)
+ }
+ 0x58 => {
+ let memarg = self.read_memarg(0)?;
+ let lane = self.read_lane_index(16)?;
+ visitor.visit_v128_store8_lane(memarg, lane)
+ }
+ 0x59 => {
+ let memarg = self.read_memarg(1)?;
+ let lane = self.read_lane_index(8)?;
+ visitor.visit_v128_store16_lane(memarg, lane)
+ }
+ 0x5a => {
+ let memarg = self.read_memarg(2)?;
+ let lane = self.read_lane_index(4)?;
+ visitor.visit_v128_store32_lane(memarg, lane)
+ }
+ 0x5b => {
+ let memarg = self.read_memarg(3)?;
+ let lane = self.read_lane_index(2)?;
+ visitor.visit_v128_store64_lane(memarg, lane)
+ }
+
+ 0x5c => visitor.visit_v128_load32_zero(self.read_memarg(2)?),
+ 0x5d => visitor.visit_v128_load64_zero(self.read_memarg(3)?),
+ 0x5e => visitor.visit_f32x4_demote_f64x2_zero(),
+ 0x5f => visitor.visit_f64x2_promote_low_f32x4(),
+ 0x60 => visitor.visit_i8x16_abs(),
+ 0x61 => visitor.visit_i8x16_neg(),
+ 0x62 => visitor.visit_i8x16_popcnt(),
+ 0x63 => visitor.visit_i8x16_all_true(),
+ 0x64 => visitor.visit_i8x16_bitmask(),
+ 0x65 => visitor.visit_i8x16_narrow_i16x8_s(),
+ 0x66 => visitor.visit_i8x16_narrow_i16x8_u(),
+ 0x67 => visitor.visit_f32x4_ceil(),
+ 0x68 => visitor.visit_f32x4_floor(),
+ 0x69 => visitor.visit_f32x4_trunc(),
+ 0x6a => visitor.visit_f32x4_nearest(),
+ 0x6b => visitor.visit_i8x16_shl(),
+ 0x6c => visitor.visit_i8x16_shr_s(),
+ 0x6d => visitor.visit_i8x16_shr_u(),
+ 0x6e => visitor.visit_i8x16_add(),
+ 0x6f => visitor.visit_i8x16_add_sat_s(),
+ 0x70 => visitor.visit_i8x16_add_sat_u(),
+ 0x71 => visitor.visit_i8x16_sub(),
+ 0x72 => visitor.visit_i8x16_sub_sat_s(),
+ 0x73 => visitor.visit_i8x16_sub_sat_u(),
+ 0x74 => visitor.visit_f64x2_ceil(),
+ 0x75 => visitor.visit_f64x2_floor(),
+ 0x76 => visitor.visit_i8x16_min_s(),
+ 0x77 => visitor.visit_i8x16_min_u(),
+ 0x78 => visitor.visit_i8x16_max_s(),
+ 0x79 => visitor.visit_i8x16_max_u(),
+ 0x7a => visitor.visit_f64x2_trunc(),
+ 0x7b => visitor.visit_i8x16_avgr_u(),
+ 0x7c => visitor.visit_i16x8_extadd_pairwise_i8x16_s(),
+ 0x7d => visitor.visit_i16x8_extadd_pairwise_i8x16_u(),
+ 0x7e => visitor.visit_i32x4_extadd_pairwise_i16x8_s(),
+ 0x7f => visitor.visit_i32x4_extadd_pairwise_i16x8_u(),
+ 0x80 => visitor.visit_i16x8_abs(),
+ 0x81 => visitor.visit_i16x8_neg(),
+ 0x82 => visitor.visit_i16x8_q15mulr_sat_s(),
+ 0x83 => visitor.visit_i16x8_all_true(),
+ 0x84 => visitor.visit_i16x8_bitmask(),
+ 0x85 => visitor.visit_i16x8_narrow_i32x4_s(),
+ 0x86 => visitor.visit_i16x8_narrow_i32x4_u(),
+ 0x87 => visitor.visit_i16x8_extend_low_i8x16_s(),
+ 0x88 => visitor.visit_i16x8_extend_high_i8x16_s(),
+ 0x89 => visitor.visit_i16x8_extend_low_i8x16_u(),
+ 0x8a => visitor.visit_i16x8_extend_high_i8x16_u(),
+ 0x8b => visitor.visit_i16x8_shl(),
+ 0x8c => visitor.visit_i16x8_shr_s(),
+ 0x8d => visitor.visit_i16x8_shr_u(),
+ 0x8e => visitor.visit_i16x8_add(),
+ 0x8f => visitor.visit_i16x8_add_sat_s(),
+ 0x90 => visitor.visit_i16x8_add_sat_u(),
+ 0x91 => visitor.visit_i16x8_sub(),
+ 0x92 => visitor.visit_i16x8_sub_sat_s(),
+ 0x93 => visitor.visit_i16x8_sub_sat_u(),
+ 0x94 => visitor.visit_f64x2_nearest(),
+ 0x95 => visitor.visit_i16x8_mul(),
+ 0x96 => visitor.visit_i16x8_min_s(),
+ 0x97 => visitor.visit_i16x8_min_u(),
+ 0x98 => visitor.visit_i16x8_max_s(),
+ 0x99 => visitor.visit_i16x8_max_u(),
+ 0x9b => visitor.visit_i16x8_avgr_u(),
+ 0x9c => visitor.visit_i16x8_extmul_low_i8x16_s(),
+ 0x9d => visitor.visit_i16x8_extmul_high_i8x16_s(),
+ 0x9e => visitor.visit_i16x8_extmul_low_i8x16_u(),
+ 0x9f => visitor.visit_i16x8_extmul_high_i8x16_u(),
+ 0xa0 => visitor.visit_i32x4_abs(),
+ 0xa1 => visitor.visit_i32x4_neg(),
+ 0xa3 => visitor.visit_i32x4_all_true(),
+ 0xa4 => visitor.visit_i32x4_bitmask(),
+ 0xa7 => visitor.visit_i32x4_extend_low_i16x8_s(),
+ 0xa8 => visitor.visit_i32x4_extend_high_i16x8_s(),
+ 0xa9 => visitor.visit_i32x4_extend_low_i16x8_u(),
+ 0xaa => visitor.visit_i32x4_extend_high_i16x8_u(),
+ 0xab => visitor.visit_i32x4_shl(),
+ 0xac => visitor.visit_i32x4_shr_s(),
+ 0xad => visitor.visit_i32x4_shr_u(),
+ 0xae => visitor.visit_i32x4_add(),
+ 0xb1 => visitor.visit_i32x4_sub(),
+ 0xb5 => visitor.visit_i32x4_mul(),
+ 0xb6 => visitor.visit_i32x4_min_s(),
+ 0xb7 => visitor.visit_i32x4_min_u(),
+ 0xb8 => visitor.visit_i32x4_max_s(),
+ 0xb9 => visitor.visit_i32x4_max_u(),
+ 0xba => visitor.visit_i32x4_dot_i16x8_s(),
+ 0xbc => visitor.visit_i32x4_extmul_low_i16x8_s(),
+ 0xbd => visitor.visit_i32x4_extmul_high_i16x8_s(),
+ 0xbe => visitor.visit_i32x4_extmul_low_i16x8_u(),
+ 0xbf => visitor.visit_i32x4_extmul_high_i16x8_u(),
+ 0xc0 => visitor.visit_i64x2_abs(),
+ 0xc1 => visitor.visit_i64x2_neg(),
+ 0xc3 => visitor.visit_i64x2_all_true(),
+ 0xc4 => visitor.visit_i64x2_bitmask(),
+ 0xc7 => visitor.visit_i64x2_extend_low_i32x4_s(),
+ 0xc8 => visitor.visit_i64x2_extend_high_i32x4_s(),
+ 0xc9 => visitor.visit_i64x2_extend_low_i32x4_u(),
+ 0xca => visitor.visit_i64x2_extend_high_i32x4_u(),
+ 0xcb => visitor.visit_i64x2_shl(),
+ 0xcc => visitor.visit_i64x2_shr_s(),
+ 0xcd => visitor.visit_i64x2_shr_u(),
+ 0xce => visitor.visit_i64x2_add(),
+ 0xd1 => visitor.visit_i64x2_sub(),
+ 0xd5 => visitor.visit_i64x2_mul(),
+ 0xd6 => visitor.visit_i64x2_eq(),
+ 0xd7 => visitor.visit_i64x2_ne(),
+ 0xd8 => visitor.visit_i64x2_lt_s(),
+ 0xd9 => visitor.visit_i64x2_gt_s(),
+ 0xda => visitor.visit_i64x2_le_s(),
+ 0xdb => visitor.visit_i64x2_ge_s(),
+ 0xdc => visitor.visit_i64x2_extmul_low_i32x4_s(),
+ 0xdd => visitor.visit_i64x2_extmul_high_i32x4_s(),
+ 0xde => visitor.visit_i64x2_extmul_low_i32x4_u(),
+ 0xdf => visitor.visit_i64x2_extmul_high_i32x4_u(),
+ 0xe0 => visitor.visit_f32x4_abs(),
+ 0xe1 => visitor.visit_f32x4_neg(),
+ 0xe3 => visitor.visit_f32x4_sqrt(),
+ 0xe4 => visitor.visit_f32x4_add(),
+ 0xe5 => visitor.visit_f32x4_sub(),
+ 0xe6 => visitor.visit_f32x4_mul(),
+ 0xe7 => visitor.visit_f32x4_div(),
+ 0xe8 => visitor.visit_f32x4_min(),
+ 0xe9 => visitor.visit_f32x4_max(),
+ 0xea => visitor.visit_f32x4_pmin(),
+ 0xeb => visitor.visit_f32x4_pmax(),
+ 0xec => visitor.visit_f64x2_abs(),
+ 0xed => visitor.visit_f64x2_neg(),
+ 0xef => visitor.visit_f64x2_sqrt(),
+ 0xf0 => visitor.visit_f64x2_add(),
+ 0xf1 => visitor.visit_f64x2_sub(),
+ 0xf2 => visitor.visit_f64x2_mul(),
+ 0xf3 => visitor.visit_f64x2_div(),
+ 0xf4 => visitor.visit_f64x2_min(),
+ 0xf5 => visitor.visit_f64x2_max(),
+ 0xf6 => visitor.visit_f64x2_pmin(),
+ 0xf7 => visitor.visit_f64x2_pmax(),
+ 0xf8 => visitor.visit_i32x4_trunc_sat_f32x4_s(),
+ 0xf9 => visitor.visit_i32x4_trunc_sat_f32x4_u(),
+ 0xfa => visitor.visit_f32x4_convert_i32x4_s(),
+ 0xfb => visitor.visit_f32x4_convert_i32x4_u(),
+ 0xfc => visitor.visit_i32x4_trunc_sat_f64x2_s_zero(),
+ 0xfd => visitor.visit_i32x4_trunc_sat_f64x2_u_zero(),
+ 0xfe => visitor.visit_f64x2_convert_low_i32x4_s(),
+ 0xff => visitor.visit_f64x2_convert_low_i32x4_u(),
+ 0x100 => visitor.visit_i8x16_relaxed_swizzle(),
+ 0x101 => visitor.visit_i32x4_relaxed_trunc_f32x4_s(),
+ 0x102 => visitor.visit_i32x4_relaxed_trunc_f32x4_u(),
+ 0x103 => visitor.visit_i32x4_relaxed_trunc_f64x2_s_zero(),
+ 0x104 => visitor.visit_i32x4_relaxed_trunc_f64x2_u_zero(),
+ 0x105 => visitor.visit_f32x4_relaxed_madd(),
+ 0x106 => visitor.visit_f32x4_relaxed_nmadd(),
+ 0x107 => visitor.visit_f64x2_relaxed_madd(),
+ 0x108 => visitor.visit_f64x2_relaxed_nmadd(),
+ 0x109 => visitor.visit_i8x16_relaxed_laneselect(),
+ 0x10a => visitor.visit_i16x8_relaxed_laneselect(),
+ 0x10b => visitor.visit_i32x4_relaxed_laneselect(),
+ 0x10c => visitor.visit_i64x2_relaxed_laneselect(),
+ 0x10d => visitor.visit_f32x4_relaxed_min(),
+ 0x10e => visitor.visit_f32x4_relaxed_max(),
+ 0x10f => visitor.visit_f64x2_relaxed_min(),
+ 0x110 => visitor.visit_f64x2_relaxed_max(),
+ 0x111 => visitor.visit_i16x8_relaxed_q15mulr_s(),
+ 0x112 => visitor.visit_i16x8_relaxed_dot_i8x16_i7x16_s(),
+ 0x113 => visitor.visit_i32x4_relaxed_dot_i8x16_i7x16_add_s(),
+
+ _ => bail!(pos, "unknown 0xfd subopcode: 0x{code:x}"),
+ })
+ }
+
+ fn visit_0xfe_operator<T>(
+ &mut self,
+ pos: usize,
+ visitor: &mut T,
+ ) -> Result<<T as VisitOperator<'a>>::Output>
+ where
+ T: VisitOperator<'a>,
+ {
+ let code = self.read_var_u32()?;
+ Ok(match code {
+ 0x00 => visitor.visit_memory_atomic_notify(self.read_memarg(2)?),
+ 0x01 => visitor.visit_memory_atomic_wait32(self.read_memarg(2)?),
+ 0x02 => visitor.visit_memory_atomic_wait64(self.read_memarg(3)?),
+ 0x03 => {
+ if self.read_u8()? != 0 {
+ bail!(pos, "nonzero byte after `atomic.fence`");
+ }
+ visitor.visit_atomic_fence()
+ }
+ 0x10 => visitor.visit_i32_atomic_load(self.read_memarg(2)?),
+ 0x11 => visitor.visit_i64_atomic_load(self.read_memarg(3)?),
+ 0x12 => visitor.visit_i32_atomic_load8_u(self.read_memarg(0)?),
+ 0x13 => visitor.visit_i32_atomic_load16_u(self.read_memarg(1)?),
+ 0x14 => visitor.visit_i64_atomic_load8_u(self.read_memarg(0)?),
+ 0x15 => visitor.visit_i64_atomic_load16_u(self.read_memarg(1)?),
+ 0x16 => visitor.visit_i64_atomic_load32_u(self.read_memarg(2)?),
+ 0x17 => visitor.visit_i32_atomic_store(self.read_memarg(2)?),
+ 0x18 => visitor.visit_i64_atomic_store(self.read_memarg(3)?),
+ 0x19 => visitor.visit_i32_atomic_store8(self.read_memarg(0)?),
+ 0x1a => visitor.visit_i32_atomic_store16(self.read_memarg(1)?),
+ 0x1b => visitor.visit_i64_atomic_store8(self.read_memarg(0)?),
+ 0x1c => visitor.visit_i64_atomic_store16(self.read_memarg(1)?),
+ 0x1d => visitor.visit_i64_atomic_store32(self.read_memarg(2)?),
+ 0x1e => visitor.visit_i32_atomic_rmw_add(self.read_memarg(2)?),
+ 0x1f => visitor.visit_i64_atomic_rmw_add(self.read_memarg(3)?),
+ 0x20 => visitor.visit_i32_atomic_rmw8_add_u(self.read_memarg(0)?),
+ 0x21 => visitor.visit_i32_atomic_rmw16_add_u(self.read_memarg(1)?),
+ 0x22 => visitor.visit_i64_atomic_rmw8_add_u(self.read_memarg(0)?),
+ 0x23 => visitor.visit_i64_atomic_rmw16_add_u(self.read_memarg(1)?),
+ 0x24 => visitor.visit_i64_atomic_rmw32_add_u(self.read_memarg(2)?),
+ 0x25 => visitor.visit_i32_atomic_rmw_sub(self.read_memarg(2)?),
+ 0x26 => visitor.visit_i64_atomic_rmw_sub(self.read_memarg(3)?),
+ 0x27 => visitor.visit_i32_atomic_rmw8_sub_u(self.read_memarg(0)?),
+ 0x28 => visitor.visit_i32_atomic_rmw16_sub_u(self.read_memarg(1)?),
+ 0x29 => visitor.visit_i64_atomic_rmw8_sub_u(self.read_memarg(0)?),
+ 0x2a => visitor.visit_i64_atomic_rmw16_sub_u(self.read_memarg(1)?),
+ 0x2b => visitor.visit_i64_atomic_rmw32_sub_u(self.read_memarg(2)?),
+ 0x2c => visitor.visit_i32_atomic_rmw_and(self.read_memarg(2)?),
+ 0x2d => visitor.visit_i64_atomic_rmw_and(self.read_memarg(3)?),
+ 0x2e => visitor.visit_i32_atomic_rmw8_and_u(self.read_memarg(0)?),
+ 0x2f => visitor.visit_i32_atomic_rmw16_and_u(self.read_memarg(1)?),
+ 0x30 => visitor.visit_i64_atomic_rmw8_and_u(self.read_memarg(0)?),
+ 0x31 => visitor.visit_i64_atomic_rmw16_and_u(self.read_memarg(1)?),
+ 0x32 => visitor.visit_i64_atomic_rmw32_and_u(self.read_memarg(2)?),
+ 0x33 => visitor.visit_i32_atomic_rmw_or(self.read_memarg(2)?),
+ 0x34 => visitor.visit_i64_atomic_rmw_or(self.read_memarg(3)?),
+ 0x35 => visitor.visit_i32_atomic_rmw8_or_u(self.read_memarg(0)?),
+ 0x36 => visitor.visit_i32_atomic_rmw16_or_u(self.read_memarg(1)?),
+ 0x37 => visitor.visit_i64_atomic_rmw8_or_u(self.read_memarg(0)?),
+ 0x38 => visitor.visit_i64_atomic_rmw16_or_u(self.read_memarg(1)?),
+ 0x39 => visitor.visit_i64_atomic_rmw32_or_u(self.read_memarg(2)?),
+ 0x3a => visitor.visit_i32_atomic_rmw_xor(self.read_memarg(2)?),
+ 0x3b => visitor.visit_i64_atomic_rmw_xor(self.read_memarg(3)?),
+ 0x3c => visitor.visit_i32_atomic_rmw8_xor_u(self.read_memarg(0)?),
+ 0x3d => visitor.visit_i32_atomic_rmw16_xor_u(self.read_memarg(1)?),
+ 0x3e => visitor.visit_i64_atomic_rmw8_xor_u(self.read_memarg(0)?),
+ 0x3f => visitor.visit_i64_atomic_rmw16_xor_u(self.read_memarg(1)?),
+ 0x40 => visitor.visit_i64_atomic_rmw32_xor_u(self.read_memarg(2)?),
+ 0x41 => visitor.visit_i32_atomic_rmw_xchg(self.read_memarg(2)?),
+ 0x42 => visitor.visit_i64_atomic_rmw_xchg(self.read_memarg(3)?),
+ 0x43 => visitor.visit_i32_atomic_rmw8_xchg_u(self.read_memarg(0)?),
+ 0x44 => visitor.visit_i32_atomic_rmw16_xchg_u(self.read_memarg(1)?),
+ 0x45 => visitor.visit_i64_atomic_rmw8_xchg_u(self.read_memarg(0)?),
+ 0x46 => visitor.visit_i64_atomic_rmw16_xchg_u(self.read_memarg(1)?),
+ 0x47 => visitor.visit_i64_atomic_rmw32_xchg_u(self.read_memarg(2)?),
+ 0x48 => visitor.visit_i32_atomic_rmw_cmpxchg(self.read_memarg(2)?),
+ 0x49 => visitor.visit_i64_atomic_rmw_cmpxchg(self.read_memarg(3)?),
+ 0x4a => visitor.visit_i32_atomic_rmw8_cmpxchg_u(self.read_memarg(0)?),
+ 0x4b => visitor.visit_i32_atomic_rmw16_cmpxchg_u(self.read_memarg(1)?),
+ 0x4c => visitor.visit_i64_atomic_rmw8_cmpxchg_u(self.read_memarg(0)?),
+ 0x4d => visitor.visit_i64_atomic_rmw16_cmpxchg_u(self.read_memarg(1)?),
+ 0x4e => visitor.visit_i64_atomic_rmw32_cmpxchg_u(self.read_memarg(2)?),
+
+ _ => bail!(pos, "unknown 0xfe subopcode: 0x{code:x}"),
+ })
+ }
+
+ /// Reads the next available `Operator`.
+ ///
+ /// # Errors
+ ///
+ /// If `BinaryReader` has less bytes remaining than required to parse
+ /// the `Operator`.
+ pub fn read_operator(&mut self) -> Result<Operator<'a>> {
+ self.visit_operator(&mut OperatorFactory::new())
+ }
+
+ fn read_lane_index(&mut self, max: u8) -> Result<u8> {
+ let index = self.read_u8()?;
+ if index >= max {
+ return Err(BinaryReaderError::new(
+ "invalid lane index",
+ self.original_position() - 1,
+ ));
+ }
+ Ok(index)
+ }
+
+ fn read_v128(&mut self) -> Result<V128> {
+ let mut bytes = [0; 16];
+ bytes.clone_from_slice(self.read_bytes(16)?);
+ Ok(V128(bytes))
+ }
+
+ pub(crate) fn read_header_version(&mut self) -> Result<u32> {
+ let magic_number = self.read_bytes(4)?;
+ if magic_number != WASM_MAGIC_NUMBER {
+ return Err(BinaryReaderError::new(
+ "magic header not detected: bad magic number",
+ self.original_position() - 4,
+ ));
+ }
+ self.read_u32()
+ }
+
+ pub(crate) fn skip_const_expr(&mut self) -> Result<()> {
+ // TODO add skip_operator() method and/or validate ConstExpr operators.
+ loop {
+ if let Operator::End = self.read_operator()? {
+ return Ok(());
+ }
+ }
+ }
+}
+
+impl<'a> BrTable<'a> {
+ /// Returns the number of `br_table` entries, not including the default
+ /// label
+ pub fn len(&self) -> u32 {
+ self.cnt
+ }
+
+ /// Returns whether `BrTable` doesn't have any labels apart from the default one.
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ /// Returns the default target of this `br_table` instruction.
+ pub fn default(&self) -> u32 {
+ self.default
+ }
+
+ /// Returns the list of targets that this `br_table` instruction will be
+ /// jumping to.
+ ///
+ /// This method will return an iterator which parses each target of this
+ /// `br_table` except the default target. The returned iterator will
+ /// yield `self.len()` elements.
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// let buf = [0x0e, 0x02, 0x01, 0x02, 0x00];
+ /// let mut reader = wasmparser::BinaryReader::new(&buf);
+ /// let op = reader.read_operator().unwrap();
+ /// if let wasmparser::Operator::BrTable { targets } = op {
+ /// let targets = targets.targets().collect::<Result<Vec<_>, _>>().unwrap();
+ /// assert_eq!(targets, [1, 2]);
+ /// }
+ /// ```
+ pub fn targets(&self) -> BrTableTargets {
+ BrTableTargets {
+ reader: self.reader.clone(),
+ remaining: self.cnt,
+ }
+ }
+}
+
+/// An iterator over the targets of a [`BrTable`].
+///
+/// # Note
+///
+/// This iterator parses each target of the underlying `br_table`
+/// except for the default target.
+/// The iterator will yield exactly as many targets as the `br_table` has.
+pub struct BrTableTargets<'a> {
+ reader: crate::BinaryReader<'a>,
+ remaining: u32,
+}
+
+impl<'a> Iterator for BrTableTargets<'a> {
+ type Item = Result<u32>;
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let remaining = usize::try_from(self.remaining).unwrap_or_else(|error| {
+ panic!("could not convert remaining `u32` into `usize`: {}", error)
+ });
+ (remaining, Some(remaining))
+ }
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.remaining == 0 {
+ if !self.reader.eof() {
+ return Some(Err(BinaryReaderError::new(
+ "trailing data in br_table",
+ self.reader.original_position(),
+ )));
+ }
+ return None;
+ }
+ self.remaining -= 1;
+ Some(self.reader.read_var_u32())
+ }
+}
+
+impl fmt::Debug for BrTable<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut f = f.debug_struct("BrTable");
+ f.field("count", &self.cnt);
+ f.field("default", &self.default);
+ match self.targets().collect::<Result<Vec<_>>>() {
+ Ok(targets) => {
+ f.field("targets", &targets);
+ }
+ Err(_) => {
+ f.field("reader", &self.reader);
+ }
+ }
+ f.finish()
+ }
+}
+
+/// A factory to construct [`Operator`] instances via the [`VisitOperator`] trait.
+struct OperatorFactory<'a> {
+ marker: core::marker::PhantomData<fn() -> &'a ()>,
+}
+
+impl<'a> OperatorFactory<'a> {
+ /// Creates a new [`OperatorFactory`].
+ fn new() -> Self {
+ Self {
+ marker: core::marker::PhantomData,
+ }
+ }
+}
+
+macro_rules! define_visit_operator {
+ ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => {
+ $(
+ fn $visit(&mut self $($(,$arg: $argty)*)?) -> Operator<'a> {
+ Operator::$op $({ $($arg),* })?
+ }
+ )*
+ }
+}
+
+impl<'a> VisitOperator<'a> for OperatorFactory<'a> {
+ type Output = Operator<'a>;
+
+ for_each_operator!(define_visit_operator);
+}
+
+/// Iterator returned from [`BinaryReader::read_iter`].
+pub struct BinaryReaderIter<'a, 'me, T: FromReader<'a>> {
+ remaining: usize,
+ reader: &'me mut BinaryReader<'a>,
+ _marker: marker::PhantomData<T>,
+}
+
+impl<'a, T> Iterator for BinaryReaderIter<'a, '_, T>
+where
+ T: FromReader<'a>,
+{
+ type Item = Result<T>;
+
+ fn next(&mut self) -> Option<Result<T>> {
+ if self.remaining == 0 {
+ None
+ } else {
+ let ret = self.reader.read::<T>();
+ if ret.is_err() {
+ self.remaining = 0;
+ } else {
+ self.remaining -= 1;
+ }
+ Some(ret)
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.remaining, Some(self.remaining))
+ }
+}
+
+impl<'a, T> Drop for BinaryReaderIter<'a, '_, T>
+where
+ T: FromReader<'a>,
+{
+ fn drop(&mut self) {
+ while self.next().is_some() {
+ // ...
+ }
+ }
+}
diff --git a/third_party/rust/wasmparser/src/lib.rs b/third_party/rust/wasmparser/src/lib.rs
new file mode 100644
index 0000000000..708809118d
--- /dev/null
+++ b/third_party/rust/wasmparser/src/lib.rs
@@ -0,0 +1,712 @@
+/* Copyright 2017 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//! A simple event-driven library for parsing WebAssembly binary files
+//! (or streams).
+//!
+//! The parser library reports events as they happen and only stores
+//! parsing information for a brief period of time, making it very fast
+//! and memory-efficient. The event-driven model, however, has some drawbacks.
+//! If you need random access to the entire WebAssembly data-structure,
+//! this is not the right library for you. You could however, build such
+//! a data-structure using this library.
+
+#![deny(missing_docs)]
+
+/// A helper macro to conveniently iterate over all opcodes recognized by this
+/// crate. This can be used to work with either the [`Operator`] enumeration or
+/// the [`VisitOperator`] trait if your use case uniformly handles all operators
+/// the same way.
+///
+/// It is also possible to specialize handling of operators depending on the
+/// Wasm proposal from which they are originating.
+///
+/// This is an "iterator macro" where this macro is invoked with the name of
+/// another macro, and then that macro is invoked with the list of all
+/// operators. An example invocation of this looks like:
+///
+/// The list of specializable Wasm proposals is as follows:
+///
+/// - `@mvp`: Denoting a Wasm operator from the initial Wasm MVP version.
+/// - `@exceptions`: [Wasm `expection-handling` proposal]
+/// - `@tail_call`: [Wasm `tail-calls` proposal]
+/// - `@reference_types`: [Wasm `reference-types` proposal]
+/// - `@sign_extension`: [Wasm `sign-extension-ops` proposal]
+/// - `@saturating_float_to_int`: [Wasm `non_trapping_float-to-int-conversions` proposal]
+/// - `@bulk_memory `:[Wasm `bulk-memory` proposal]
+/// - `@threads`: [Wasm `threads` proposal]
+/// - `@simd`: [Wasm `simd` proposal]
+/// - `@relaxed_simd`: [Wasm `relaxed-simd` proposal]
+///
+/// [Wasm `expection-handling` proposal]:
+/// https://github.com/WebAssembly/exception-handling
+///
+/// [Wasm `tail-calls` proposal]:
+/// https://github.com/WebAssembly/tail-call
+///
+/// [Wasm `reference-types` proposal]:
+/// https://github.com/WebAssembly/reference-types
+///
+/// [Wasm `sign-extension-ops` proposal]:
+/// https://github.com/WebAssembly/sign-extension-ops
+///
+/// [Wasm `non_trapping_float-to-int-conversions` proposal]:
+/// https://github.com/WebAssembly/nontrapping-float-to-int-conversions
+///
+/// [Wasm `bulk-memory` proposal]:
+/// https://github.com/WebAssembly/bulk-memory-operations
+///
+/// [Wasm `threads` proposal]:
+/// https://github.com/webassembly/threads
+///
+/// [Wasm `simd` proposal]:
+/// https://github.com/webassembly/simd
+///
+/// [Wasm `relaxed-simd` proposal]:
+/// https://github.com/WebAssembly/relaxed-simd
+///
+/// ```
+/// macro_rules! define_visit_operator {
+/// // The outer layer of repetition represents how all operators are
+/// // provided to the macro at the same time.
+/// //
+/// // The `$proposal` identifier indicates the Wasm proposals from which
+/// // the Wasm operator is originating.
+/// // For example to specialize the macro match arm for Wasm SIMD proposal
+/// // operators you could write `@simd` instead of `@$proposal:ident` to
+/// // only catch those operators.
+/// //
+/// // The `$op` name is bound to the `Operator` variant name. The
+/// // payload of the operator is optionally specified (the `$(...)?`
+/// // clause) since not all instructions have payloads. Within the payload
+/// // each argument is named and has its type specified.
+/// //
+/// // The `$visit` name is bound to the corresponding name in the
+/// // `VisitOperator` trait that this corresponds to.
+/// ($( @$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => {
+/// $(
+/// fn $visit(&mut self $($(,$arg: $argty)*)?) {
+/// // do nothing for this example
+/// }
+/// )*
+/// }
+/// }
+///
+/// pub struct VisitAndDoNothing;
+///
+/// impl<'a> wasmparser::VisitOperator<'a> for VisitAndDoNothing {
+/// type Output = ();
+///
+/// wasmparser::for_each_operator!(define_visit_operator);
+/// }
+/// ```
+#[macro_export]
+macro_rules! for_each_operator {
+ ($mac:ident) => {
+ $mac! {
+ @mvp Unreachable => visit_unreachable
+ @mvp Nop => visit_nop
+ @mvp Block { blockty: $crate::BlockType } => visit_block
+ @mvp Loop { blockty: $crate::BlockType } => visit_loop
+ @mvp If { blockty: $crate::BlockType } => visit_if
+ @mvp Else => visit_else
+ @exceptions Try { blockty: $crate::BlockType } => visit_try
+ @exceptions Catch { tag_index: u32 } => visit_catch
+ @exceptions Throw { tag_index: u32 } => visit_throw
+ @exceptions Rethrow { relative_depth: u32 } => visit_rethrow
+ @mvp End => visit_end
+ @mvp Br { relative_depth: u32 } => visit_br
+ @mvp BrIf { relative_depth: u32 } => visit_br_if
+ @mvp BrTable { targets: $crate::BrTable<'a> } => visit_br_table
+ @mvp Return => visit_return
+ @mvp Call { function_index: u32 } => visit_call
+ @mvp CallIndirect { type_index: u32, table_index: u32, table_byte: u8 } => visit_call_indirect
+ @tail_call ReturnCall { function_index: u32 } => visit_return_call
+ @tail_call ReturnCallIndirect { type_index: u32, table_index: u32 } => visit_return_call_indirect
+ @exceptions Delegate { relative_depth: u32 } => visit_delegate
+ @exceptions CatchAll => visit_catch_all
+ @mvp Drop => visit_drop
+ @mvp Select => visit_select
+ @reference_types TypedSelect { ty: $crate::ValType } => visit_typed_select
+ @mvp LocalGet { local_index: u32 } => visit_local_get
+ @mvp LocalSet { local_index: u32 } => visit_local_set
+ @mvp LocalTee { local_index: u32 } => visit_local_tee
+ @mvp GlobalGet { global_index: u32 } => visit_global_get
+ @mvp GlobalSet { global_index: u32 } => visit_global_set
+ @mvp I32Load { memarg: $crate::MemArg } => visit_i32_load
+ @mvp I64Load { memarg: $crate::MemArg } => visit_i64_load
+ @mvp F32Load { memarg: $crate::MemArg } => visit_f32_load
+ @mvp F64Load { memarg: $crate::MemArg } => visit_f64_load
+ @mvp I32Load8S { memarg: $crate::MemArg } => visit_i32_load8_s
+ @mvp I32Load8U { memarg: $crate::MemArg } => visit_i32_load8_u
+ @mvp I32Load16S { memarg: $crate::MemArg } => visit_i32_load16_s
+ @mvp I32Load16U { memarg: $crate::MemArg } => visit_i32_load16_u
+ @mvp I64Load8S { memarg: $crate::MemArg } => visit_i64_load8_s
+ @mvp I64Load8U { memarg: $crate::MemArg } => visit_i64_load8_u
+ @mvp I64Load16S { memarg: $crate::MemArg } => visit_i64_load16_s
+ @mvp I64Load16U { memarg: $crate::MemArg } => visit_i64_load16_u
+ @mvp I64Load32S { memarg: $crate::MemArg } => visit_i64_load32_s
+ @mvp I64Load32U { memarg: $crate::MemArg } => visit_i64_load32_u
+ @mvp I32Store { memarg: $crate::MemArg } => visit_i32_store
+ @mvp I64Store { memarg: $crate::MemArg } => visit_i64_store
+ @mvp F32Store { memarg: $crate::MemArg } => visit_f32_store
+ @mvp F64Store { memarg: $crate::MemArg } => visit_f64_store
+ @mvp I32Store8 { memarg: $crate::MemArg } => visit_i32_store8
+ @mvp I32Store16 { memarg: $crate::MemArg } => visit_i32_store16
+ @mvp I64Store8 { memarg: $crate::MemArg } => visit_i64_store8
+ @mvp I64Store16 { memarg: $crate::MemArg } => visit_i64_store16
+ @mvp I64Store32 { memarg: $crate::MemArg } => visit_i64_store32
+ @mvp MemorySize { mem: u32, mem_byte: u8 } => visit_memory_size
+ @mvp MemoryGrow { mem: u32, mem_byte: u8 } => visit_memory_grow
+ @mvp I32Const { value: i32 } => visit_i32_const
+ @mvp I64Const { value: i64 } => visit_i64_const
+ @mvp F32Const { value: $crate::Ieee32 } => visit_f32_const
+ @mvp F64Const { value: $crate::Ieee64 } => visit_f64_const
+ @reference_types RefNull { hty: $crate::HeapType } => visit_ref_null
+ @reference_types RefIsNull => visit_ref_is_null
+ @reference_types RefFunc { function_index: u32 } => visit_ref_func
+ @mvp I32Eqz => visit_i32_eqz
+ @mvp I32Eq => visit_i32_eq
+ @mvp I32Ne => visit_i32_ne
+ @mvp I32LtS => visit_i32_lt_s
+ @mvp I32LtU => visit_i32_lt_u
+ @mvp I32GtS => visit_i32_gt_s
+ @mvp I32GtU => visit_i32_gt_u
+ @mvp I32LeS => visit_i32_le_s
+ @mvp I32LeU => visit_i32_le_u
+ @mvp I32GeS => visit_i32_ge_s
+ @mvp I32GeU => visit_i32_ge_u
+ @mvp I64Eqz => visit_i64_eqz
+ @mvp I64Eq => visit_i64_eq
+ @mvp I64Ne => visit_i64_ne
+ @mvp I64LtS => visit_i64_lt_s
+ @mvp I64LtU => visit_i64_lt_u
+ @mvp I64GtS => visit_i64_gt_s
+ @mvp I64GtU => visit_i64_gt_u
+ @mvp I64LeS => visit_i64_le_s
+ @mvp I64LeU => visit_i64_le_u
+ @mvp I64GeS => visit_i64_ge_s
+ @mvp I64GeU => visit_i64_ge_u
+ @mvp F32Eq => visit_f32_eq
+ @mvp F32Ne => visit_f32_ne
+ @mvp F32Lt => visit_f32_lt
+ @mvp F32Gt => visit_f32_gt
+ @mvp F32Le => visit_f32_le
+ @mvp F32Ge => visit_f32_ge
+ @mvp F64Eq => visit_f64_eq
+ @mvp F64Ne => visit_f64_ne
+ @mvp F64Lt => visit_f64_lt
+ @mvp F64Gt => visit_f64_gt
+ @mvp F64Le => visit_f64_le
+ @mvp F64Ge => visit_f64_ge
+ @mvp I32Clz => visit_i32_clz
+ @mvp I32Ctz => visit_i32_ctz
+ @mvp I32Popcnt => visit_i32_popcnt
+ @mvp I32Add => visit_i32_add
+ @mvp I32Sub => visit_i32_sub
+ @mvp I32Mul => visit_i32_mul
+ @mvp I32DivS => visit_i32_div_s
+ @mvp I32DivU => visit_i32_div_u
+ @mvp I32RemS => visit_i32_rem_s
+ @mvp I32RemU => visit_i32_rem_u
+ @mvp I32And => visit_i32_and
+ @mvp I32Or => visit_i32_or
+ @mvp I32Xor => visit_i32_xor
+ @mvp I32Shl => visit_i32_shl
+ @mvp I32ShrS => visit_i32_shr_s
+ @mvp I32ShrU => visit_i32_shr_u
+ @mvp I32Rotl => visit_i32_rotl
+ @mvp I32Rotr => visit_i32_rotr
+ @mvp I64Clz => visit_i64_clz
+ @mvp I64Ctz => visit_i64_ctz
+ @mvp I64Popcnt => visit_i64_popcnt
+ @mvp I64Add => visit_i64_add
+ @mvp I64Sub => visit_i64_sub
+ @mvp I64Mul => visit_i64_mul
+ @mvp I64DivS => visit_i64_div_s
+ @mvp I64DivU => visit_i64_div_u
+ @mvp I64RemS => visit_i64_rem_s
+ @mvp I64RemU => visit_i64_rem_u
+ @mvp I64And => visit_i64_and
+ @mvp I64Or => visit_i64_or
+ @mvp I64Xor => visit_i64_xor
+ @mvp I64Shl => visit_i64_shl
+ @mvp I64ShrS => visit_i64_shr_s
+ @mvp I64ShrU => visit_i64_shr_u
+ @mvp I64Rotl => visit_i64_rotl
+ @mvp I64Rotr => visit_i64_rotr
+ @mvp F32Abs => visit_f32_abs
+ @mvp F32Neg => visit_f32_neg
+ @mvp F32Ceil => visit_f32_ceil
+ @mvp F32Floor => visit_f32_floor
+ @mvp F32Trunc => visit_f32_trunc
+ @mvp F32Nearest => visit_f32_nearest
+ @mvp F32Sqrt => visit_f32_sqrt
+ @mvp F32Add => visit_f32_add
+ @mvp F32Sub => visit_f32_sub
+ @mvp F32Mul => visit_f32_mul
+ @mvp F32Div => visit_f32_div
+ @mvp F32Min => visit_f32_min
+ @mvp F32Max => visit_f32_max
+ @mvp F32Copysign => visit_f32_copysign
+ @mvp F64Abs => visit_f64_abs
+ @mvp F64Neg => visit_f64_neg
+ @mvp F64Ceil => visit_f64_ceil
+ @mvp F64Floor => visit_f64_floor
+ @mvp F64Trunc => visit_f64_trunc
+ @mvp F64Nearest => visit_f64_nearest
+ @mvp F64Sqrt => visit_f64_sqrt
+ @mvp F64Add => visit_f64_add
+ @mvp F64Sub => visit_f64_sub
+ @mvp F64Mul => visit_f64_mul
+ @mvp F64Div => visit_f64_div
+ @mvp F64Min => visit_f64_min
+ @mvp F64Max => visit_f64_max
+ @mvp F64Copysign => visit_f64_copysign
+ @mvp I32WrapI64 => visit_i32_wrap_i64
+ @mvp I32TruncF32S => visit_i32_trunc_f32_s
+ @mvp I32TruncF32U => visit_i32_trunc_f32_u
+ @mvp I32TruncF64S => visit_i32_trunc_f64_s
+ @mvp I32TruncF64U => visit_i32_trunc_f64_u
+ @mvp I64ExtendI32S => visit_i64_extend_i32_s
+ @mvp I64ExtendI32U => visit_i64_extend_i32_u
+ @mvp I64TruncF32S => visit_i64_trunc_f32_s
+ @mvp I64TruncF32U => visit_i64_trunc_f32_u
+ @mvp I64TruncF64S => visit_i64_trunc_f64_s
+ @mvp I64TruncF64U => visit_i64_trunc_f64_u
+ @mvp F32ConvertI32S => visit_f32_convert_i32_s
+ @mvp F32ConvertI32U => visit_f32_convert_i32_u
+ @mvp F32ConvertI64S => visit_f32_convert_i64_s
+ @mvp F32ConvertI64U => visit_f32_convert_i64_u
+ @mvp F32DemoteF64 => visit_f32_demote_f64
+ @mvp F64ConvertI32S => visit_f64_convert_i32_s
+ @mvp F64ConvertI32U => visit_f64_convert_i32_u
+ @mvp F64ConvertI64S => visit_f64_convert_i64_s
+ @mvp F64ConvertI64U => visit_f64_convert_i64_u
+ @mvp F64PromoteF32 => visit_f64_promote_f32
+ @mvp I32ReinterpretF32 => visit_i32_reinterpret_f32
+ @mvp I64ReinterpretF64 => visit_i64_reinterpret_f64
+ @mvp F32ReinterpretI32 => visit_f32_reinterpret_i32
+ @mvp F64ReinterpretI64 => visit_f64_reinterpret_i64
+ @sign_extension I32Extend8S => visit_i32_extend8_s
+ @sign_extension I32Extend16S => visit_i32_extend16_s
+ @sign_extension I64Extend8S => visit_i64_extend8_s
+ @sign_extension I64Extend16S => visit_i64_extend16_s
+ @sign_extension I64Extend32S => visit_i64_extend32_s
+
+ // 0xFC operators
+ // Non-trapping Float-to-int Conversions
+ // https://github.com/WebAssembly/nontrapping-float-to-int-conversions
+ @saturating_float_to_int I32TruncSatF32S => visit_i32_trunc_sat_f32_s
+ @saturating_float_to_int I32TruncSatF32U => visit_i32_trunc_sat_f32_u
+ @saturating_float_to_int I32TruncSatF64S => visit_i32_trunc_sat_f64_s
+ @saturating_float_to_int I32TruncSatF64U => visit_i32_trunc_sat_f64_u
+ @saturating_float_to_int I64TruncSatF32S => visit_i64_trunc_sat_f32_s
+ @saturating_float_to_int I64TruncSatF32U => visit_i64_trunc_sat_f32_u
+ @saturating_float_to_int I64TruncSatF64S => visit_i64_trunc_sat_f64_s
+ @saturating_float_to_int I64TruncSatF64U => visit_i64_trunc_sat_f64_u
+
+ // 0xFC prefixed operators
+ // bulk memory operations
+ // https://github.com/WebAssembly/bulk-memory-operations
+ @bulk_memory MemoryInit { data_index: u32, mem: u32 } => visit_memory_init
+ @bulk_memory DataDrop { data_index: u32 } => visit_data_drop
+ @bulk_memory MemoryCopy { dst_mem: u32, src_mem: u32 } => visit_memory_copy
+ @bulk_memory MemoryFill { mem: u32 } => visit_memory_fill
+ @bulk_memory TableInit { elem_index: u32, table: u32 } => visit_table_init
+ @bulk_memory ElemDrop { elem_index: u32 } => visit_elem_drop
+ @bulk_memory TableCopy { dst_table: u32, src_table: u32 } => visit_table_copy
+
+ // 0xFC prefixed operators
+ // reference-types
+ // https://github.com/WebAssembly/reference-types
+ @reference_types TableFill { table: u32 } => visit_table_fill
+ @reference_types TableGet { table: u32 } => visit_table_get
+ @reference_types TableSet { table: u32 } => visit_table_set
+ @reference_types TableGrow { table: u32 } => visit_table_grow
+ @reference_types TableSize { table: u32 } => visit_table_size
+
+ // OxFC prefixed operators
+ // memory control (experimental)
+ // https://github.com/WebAssembly/design/issues/1439
+ @memory_control MemoryDiscard { mem: u32 } => visit_memory_discard
+
+ // 0xFE prefixed operators
+ // threads
+ // https://github.com/WebAssembly/threads
+ @threads MemoryAtomicNotify { memarg: $crate::MemArg } => visit_memory_atomic_notify
+ @threads MemoryAtomicWait32 { memarg: $crate::MemArg } => visit_memory_atomic_wait32
+ @threads MemoryAtomicWait64 { memarg: $crate::MemArg } => visit_memory_atomic_wait64
+ @threads AtomicFence => visit_atomic_fence
+ @threads I32AtomicLoad { memarg: $crate::MemArg } => visit_i32_atomic_load
+ @threads I64AtomicLoad { memarg: $crate::MemArg } => visit_i64_atomic_load
+ @threads I32AtomicLoad8U { memarg: $crate::MemArg } => visit_i32_atomic_load8_u
+ @threads I32AtomicLoad16U { memarg: $crate::MemArg } => visit_i32_atomic_load16_u
+ @threads I64AtomicLoad8U { memarg: $crate::MemArg } => visit_i64_atomic_load8_u
+ @threads I64AtomicLoad16U { memarg: $crate::MemArg } => visit_i64_atomic_load16_u
+ @threads I64AtomicLoad32U { memarg: $crate::MemArg } => visit_i64_atomic_load32_u
+ @threads I32AtomicStore { memarg: $crate::MemArg } => visit_i32_atomic_store
+ @threads I64AtomicStore { memarg: $crate::MemArg } => visit_i64_atomic_store
+ @threads I32AtomicStore8 { memarg: $crate::MemArg } => visit_i32_atomic_store8
+ @threads I32AtomicStore16 { memarg: $crate::MemArg } => visit_i32_atomic_store16
+ @threads I64AtomicStore8 { memarg: $crate::MemArg } => visit_i64_atomic_store8
+ @threads I64AtomicStore16 { memarg: $crate::MemArg } => visit_i64_atomic_store16
+ @threads I64AtomicStore32 { memarg: $crate::MemArg } => visit_i64_atomic_store32
+ @threads I32AtomicRmwAdd { memarg: $crate::MemArg } => visit_i32_atomic_rmw_add
+ @threads I64AtomicRmwAdd { memarg: $crate::MemArg } => visit_i64_atomic_rmw_add
+ @threads I32AtomicRmw8AddU { memarg: $crate::MemArg } => visit_i32_atomic_rmw8_add_u
+ @threads I32AtomicRmw16AddU { memarg: $crate::MemArg } => visit_i32_atomic_rmw16_add_u
+ @threads I64AtomicRmw8AddU { memarg: $crate::MemArg } => visit_i64_atomic_rmw8_add_u
+ @threads I64AtomicRmw16AddU { memarg: $crate::MemArg } => visit_i64_atomic_rmw16_add_u
+ @threads I64AtomicRmw32AddU { memarg: $crate::MemArg } => visit_i64_atomic_rmw32_add_u
+ @threads I32AtomicRmwSub { memarg: $crate::MemArg } => visit_i32_atomic_rmw_sub
+ @threads I64AtomicRmwSub { memarg: $crate::MemArg } => visit_i64_atomic_rmw_sub
+ @threads I32AtomicRmw8SubU { memarg: $crate::MemArg } => visit_i32_atomic_rmw8_sub_u
+ @threads I32AtomicRmw16SubU { memarg: $crate::MemArg } => visit_i32_atomic_rmw16_sub_u
+ @threads I64AtomicRmw8SubU { memarg: $crate::MemArg } => visit_i64_atomic_rmw8_sub_u
+ @threads I64AtomicRmw16SubU { memarg: $crate::MemArg } => visit_i64_atomic_rmw16_sub_u
+ @threads I64AtomicRmw32SubU { memarg: $crate::MemArg } => visit_i64_atomic_rmw32_sub_u
+ @threads I32AtomicRmwAnd { memarg: $crate::MemArg } => visit_i32_atomic_rmw_and
+ @threads I64AtomicRmwAnd { memarg: $crate::MemArg } => visit_i64_atomic_rmw_and
+ @threads I32AtomicRmw8AndU { memarg: $crate::MemArg } => visit_i32_atomic_rmw8_and_u
+ @threads I32AtomicRmw16AndU { memarg: $crate::MemArg } => visit_i32_atomic_rmw16_and_u
+ @threads I64AtomicRmw8AndU { memarg: $crate::MemArg } => visit_i64_atomic_rmw8_and_u
+ @threads I64AtomicRmw16AndU { memarg: $crate::MemArg } => visit_i64_atomic_rmw16_and_u
+ @threads I64AtomicRmw32AndU { memarg: $crate::MemArg } => visit_i64_atomic_rmw32_and_u
+ @threads I32AtomicRmwOr { memarg: $crate::MemArg } => visit_i32_atomic_rmw_or
+ @threads I64AtomicRmwOr { memarg: $crate::MemArg } => visit_i64_atomic_rmw_or
+ @threads I32AtomicRmw8OrU { memarg: $crate::MemArg } => visit_i32_atomic_rmw8_or_u
+ @threads I32AtomicRmw16OrU { memarg: $crate::MemArg } => visit_i32_atomic_rmw16_or_u
+ @threads I64AtomicRmw8OrU { memarg: $crate::MemArg } => visit_i64_atomic_rmw8_or_u
+ @threads I64AtomicRmw16OrU { memarg: $crate::MemArg } => visit_i64_atomic_rmw16_or_u
+ @threads I64AtomicRmw32OrU { memarg: $crate::MemArg } => visit_i64_atomic_rmw32_or_u
+ @threads I32AtomicRmwXor { memarg: $crate::MemArg } => visit_i32_atomic_rmw_xor
+ @threads I64AtomicRmwXor { memarg: $crate::MemArg } => visit_i64_atomic_rmw_xor
+ @threads I32AtomicRmw8XorU { memarg: $crate::MemArg } => visit_i32_atomic_rmw8_xor_u
+ @threads I32AtomicRmw16XorU { memarg: $crate::MemArg } => visit_i32_atomic_rmw16_xor_u
+ @threads I64AtomicRmw8XorU { memarg: $crate::MemArg } => visit_i64_atomic_rmw8_xor_u
+ @threads I64AtomicRmw16XorU { memarg: $crate::MemArg } => visit_i64_atomic_rmw16_xor_u
+ @threads I64AtomicRmw32XorU { memarg: $crate::MemArg } => visit_i64_atomic_rmw32_xor_u
+ @threads I32AtomicRmwXchg { memarg: $crate::MemArg } => visit_i32_atomic_rmw_xchg
+ @threads I64AtomicRmwXchg { memarg: $crate::MemArg } => visit_i64_atomic_rmw_xchg
+ @threads I32AtomicRmw8XchgU { memarg: $crate::MemArg } => visit_i32_atomic_rmw8_xchg_u
+ @threads I32AtomicRmw16XchgU { memarg: $crate::MemArg } => visit_i32_atomic_rmw16_xchg_u
+ @threads I64AtomicRmw8XchgU { memarg: $crate::MemArg } => visit_i64_atomic_rmw8_xchg_u
+ @threads I64AtomicRmw16XchgU { memarg: $crate::MemArg } => visit_i64_atomic_rmw16_xchg_u
+ @threads I64AtomicRmw32XchgU { memarg: $crate::MemArg } => visit_i64_atomic_rmw32_xchg_u
+ @threads I32AtomicRmwCmpxchg { memarg: $crate::MemArg } => visit_i32_atomic_rmw_cmpxchg
+ @threads I64AtomicRmwCmpxchg { memarg: $crate::MemArg } => visit_i64_atomic_rmw_cmpxchg
+ @threads I32AtomicRmw8CmpxchgU { memarg: $crate::MemArg } => visit_i32_atomic_rmw8_cmpxchg_u
+ @threads I32AtomicRmw16CmpxchgU { memarg: $crate::MemArg } => visit_i32_atomic_rmw16_cmpxchg_u
+ @threads I64AtomicRmw8CmpxchgU { memarg: $crate::MemArg } => visit_i64_atomic_rmw8_cmpxchg_u
+ @threads I64AtomicRmw16CmpxchgU { memarg: $crate::MemArg } => visit_i64_atomic_rmw16_cmpxchg_u
+ @threads I64AtomicRmw32CmpxchgU { memarg: $crate::MemArg } => visit_i64_atomic_rmw32_cmpxchg_u
+
+ // 0xFD operators
+ // 128-bit SIMD
+ // - https://github.com/webassembly/simd
+ // - https://webassembly.github.io/simd/core/binary/instructions.html
+ @simd V128Load { memarg: $crate::MemArg } => visit_v128_load
+ @simd V128Load8x8S { memarg: $crate::MemArg } => visit_v128_load8x8_s
+ @simd V128Load8x8U { memarg: $crate::MemArg } => visit_v128_load8x8_u
+ @simd V128Load16x4S { memarg: $crate::MemArg } => visit_v128_load16x4_s
+ @simd V128Load16x4U { memarg: $crate::MemArg } => visit_v128_load16x4_u
+ @simd V128Load32x2S { memarg: $crate::MemArg } => visit_v128_load32x2_s
+ @simd V128Load32x2U { memarg: $crate::MemArg } => visit_v128_load32x2_u
+ @simd V128Load8Splat { memarg: $crate::MemArg } => visit_v128_load8_splat
+ @simd V128Load16Splat { memarg: $crate::MemArg } => visit_v128_load16_splat
+ @simd V128Load32Splat { memarg: $crate::MemArg } => visit_v128_load32_splat
+ @simd V128Load64Splat { memarg: $crate::MemArg } => visit_v128_load64_splat
+ @simd V128Load32Zero { memarg: $crate::MemArg } => visit_v128_load32_zero
+ @simd V128Load64Zero { memarg: $crate::MemArg } => visit_v128_load64_zero
+ @simd V128Store { memarg: $crate::MemArg } => visit_v128_store
+ @simd V128Load8Lane { memarg: $crate::MemArg, lane: u8 } => visit_v128_load8_lane
+ @simd V128Load16Lane { memarg: $crate::MemArg, lane: u8 } => visit_v128_load16_lane
+ @simd V128Load32Lane { memarg: $crate::MemArg, lane: u8 } => visit_v128_load32_lane
+ @simd V128Load64Lane { memarg: $crate::MemArg, lane: u8 } => visit_v128_load64_lane
+ @simd V128Store8Lane { memarg: $crate::MemArg, lane: u8 } => visit_v128_store8_lane
+ @simd V128Store16Lane { memarg: $crate::MemArg, lane: u8 } => visit_v128_store16_lane
+ @simd V128Store32Lane { memarg: $crate::MemArg, lane: u8 } => visit_v128_store32_lane
+ @simd V128Store64Lane { memarg: $crate::MemArg, lane: u8 } => visit_v128_store64_lane
+ @simd V128Const { value: $crate::V128 } => visit_v128_const
+ @simd I8x16Shuffle { lanes: [u8; 16] } => visit_i8x16_shuffle
+ @simd I8x16ExtractLaneS { lane: u8 } => visit_i8x16_extract_lane_s
+ @simd I8x16ExtractLaneU { lane: u8 } => visit_i8x16_extract_lane_u
+ @simd I8x16ReplaceLane { lane: u8 } => visit_i8x16_replace_lane
+ @simd I16x8ExtractLaneS { lane: u8 } => visit_i16x8_extract_lane_s
+ @simd I16x8ExtractLaneU { lane: u8 } => visit_i16x8_extract_lane_u
+ @simd I16x8ReplaceLane { lane: u8 } => visit_i16x8_replace_lane
+ @simd I32x4ExtractLane { lane: u8 } => visit_i32x4_extract_lane
+ @simd I32x4ReplaceLane { lane: u8 } => visit_i32x4_replace_lane
+ @simd I64x2ExtractLane { lane: u8 } => visit_i64x2_extract_lane
+ @simd I64x2ReplaceLane { lane: u8 } => visit_i64x2_replace_lane
+ @simd F32x4ExtractLane { lane: u8 } => visit_f32x4_extract_lane
+ @simd F32x4ReplaceLane { lane: u8 } => visit_f32x4_replace_lane
+ @simd F64x2ExtractLane { lane: u8 } => visit_f64x2_extract_lane
+ @simd F64x2ReplaceLane { lane: u8 } => visit_f64x2_replace_lane
+ @simd I8x16Swizzle => visit_i8x16_swizzle
+ @simd I8x16Splat => visit_i8x16_splat
+ @simd I16x8Splat => visit_i16x8_splat
+ @simd I32x4Splat => visit_i32x4_splat
+ @simd I64x2Splat => visit_i64x2_splat
+ @simd F32x4Splat => visit_f32x4_splat
+ @simd F64x2Splat => visit_f64x2_splat
+ @simd I8x16Eq => visit_i8x16_eq
+ @simd I8x16Ne => visit_i8x16_ne
+ @simd I8x16LtS => visit_i8x16_lt_s
+ @simd I8x16LtU => visit_i8x16_lt_u
+ @simd I8x16GtS => visit_i8x16_gt_s
+ @simd I8x16GtU => visit_i8x16_gt_u
+ @simd I8x16LeS => visit_i8x16_le_s
+ @simd I8x16LeU => visit_i8x16_le_u
+ @simd I8x16GeS => visit_i8x16_ge_s
+ @simd I8x16GeU => visit_i8x16_ge_u
+ @simd I16x8Eq => visit_i16x8_eq
+ @simd I16x8Ne => visit_i16x8_ne
+ @simd I16x8LtS => visit_i16x8_lt_s
+ @simd I16x8LtU => visit_i16x8_lt_u
+ @simd I16x8GtS => visit_i16x8_gt_s
+ @simd I16x8GtU => visit_i16x8_gt_u
+ @simd I16x8LeS => visit_i16x8_le_s
+ @simd I16x8LeU => visit_i16x8_le_u
+ @simd I16x8GeS => visit_i16x8_ge_s
+ @simd I16x8GeU => visit_i16x8_ge_u
+ @simd I32x4Eq => visit_i32x4_eq
+ @simd I32x4Ne => visit_i32x4_ne
+ @simd I32x4LtS => visit_i32x4_lt_s
+ @simd I32x4LtU => visit_i32x4_lt_u
+ @simd I32x4GtS => visit_i32x4_gt_s
+ @simd I32x4GtU => visit_i32x4_gt_u
+ @simd I32x4LeS => visit_i32x4_le_s
+ @simd I32x4LeU => visit_i32x4_le_u
+ @simd I32x4GeS => visit_i32x4_ge_s
+ @simd I32x4GeU => visit_i32x4_ge_u
+ @simd I64x2Eq => visit_i64x2_eq
+ @simd I64x2Ne => visit_i64x2_ne
+ @simd I64x2LtS => visit_i64x2_lt_s
+ @simd I64x2GtS => visit_i64x2_gt_s
+ @simd I64x2LeS => visit_i64x2_le_s
+ @simd I64x2GeS => visit_i64x2_ge_s
+ @simd F32x4Eq => visit_f32x4_eq
+ @simd F32x4Ne => visit_f32x4_ne
+ @simd F32x4Lt => visit_f32x4_lt
+ @simd F32x4Gt => visit_f32x4_gt
+ @simd F32x4Le => visit_f32x4_le
+ @simd F32x4Ge => visit_f32x4_ge
+ @simd F64x2Eq => visit_f64x2_eq
+ @simd F64x2Ne => visit_f64x2_ne
+ @simd F64x2Lt => visit_f64x2_lt
+ @simd F64x2Gt => visit_f64x2_gt
+ @simd F64x2Le => visit_f64x2_le
+ @simd F64x2Ge => visit_f64x2_ge
+ @simd V128Not => visit_v128_not
+ @simd V128And => visit_v128_and
+ @simd V128AndNot => visit_v128_andnot
+ @simd V128Or => visit_v128_or
+ @simd V128Xor => visit_v128_xor
+ @simd V128Bitselect => visit_v128_bitselect
+ @simd V128AnyTrue => visit_v128_any_true
+ @simd I8x16Abs => visit_i8x16_abs
+ @simd I8x16Neg => visit_i8x16_neg
+ @simd I8x16Popcnt => visit_i8x16_popcnt
+ @simd I8x16AllTrue => visit_i8x16_all_true
+ @simd I8x16Bitmask => visit_i8x16_bitmask
+ @simd I8x16NarrowI16x8S => visit_i8x16_narrow_i16x8_s
+ @simd I8x16NarrowI16x8U => visit_i8x16_narrow_i16x8_u
+ @simd I8x16Shl => visit_i8x16_shl
+ @simd I8x16ShrS => visit_i8x16_shr_s
+ @simd I8x16ShrU => visit_i8x16_shr_u
+ @simd I8x16Add => visit_i8x16_add
+ @simd I8x16AddSatS => visit_i8x16_add_sat_s
+ @simd I8x16AddSatU => visit_i8x16_add_sat_u
+ @simd I8x16Sub => visit_i8x16_sub
+ @simd I8x16SubSatS => visit_i8x16_sub_sat_s
+ @simd I8x16SubSatU => visit_i8x16_sub_sat_u
+ @simd I8x16MinS => visit_i8x16_min_s
+ @simd I8x16MinU => visit_i8x16_min_u
+ @simd I8x16MaxS => visit_i8x16_max_s
+ @simd I8x16MaxU => visit_i8x16_max_u
+ @simd I8x16AvgrU => visit_i8x16_avgr_u
+ @simd I16x8ExtAddPairwiseI8x16S => visit_i16x8_extadd_pairwise_i8x16_s
+ @simd I16x8ExtAddPairwiseI8x16U => visit_i16x8_extadd_pairwise_i8x16_u
+ @simd I16x8Abs => visit_i16x8_abs
+ @simd I16x8Neg => visit_i16x8_neg
+ @simd I16x8Q15MulrSatS => visit_i16x8_q15mulr_sat_s
+ @simd I16x8AllTrue => visit_i16x8_all_true
+ @simd I16x8Bitmask => visit_i16x8_bitmask
+ @simd I16x8NarrowI32x4S => visit_i16x8_narrow_i32x4_s
+ @simd I16x8NarrowI32x4U => visit_i16x8_narrow_i32x4_u
+ @simd I16x8ExtendLowI8x16S => visit_i16x8_extend_low_i8x16_s
+ @simd I16x8ExtendHighI8x16S => visit_i16x8_extend_high_i8x16_s
+ @simd I16x8ExtendLowI8x16U => visit_i16x8_extend_low_i8x16_u
+ @simd I16x8ExtendHighI8x16U => visit_i16x8_extend_high_i8x16_u
+ @simd I16x8Shl => visit_i16x8_shl
+ @simd I16x8ShrS => visit_i16x8_shr_s
+ @simd I16x8ShrU => visit_i16x8_shr_u
+ @simd I16x8Add => visit_i16x8_add
+ @simd I16x8AddSatS => visit_i16x8_add_sat_s
+ @simd I16x8AddSatU => visit_i16x8_add_sat_u
+ @simd I16x8Sub => visit_i16x8_sub
+ @simd I16x8SubSatS => visit_i16x8_sub_sat_s
+ @simd I16x8SubSatU => visit_i16x8_sub_sat_u
+ @simd I16x8Mul => visit_i16x8_mul
+ @simd I16x8MinS => visit_i16x8_min_s
+ @simd I16x8MinU => visit_i16x8_min_u
+ @simd I16x8MaxS => visit_i16x8_max_s
+ @simd I16x8MaxU => visit_i16x8_max_u
+ @simd I16x8AvgrU => visit_i16x8_avgr_u
+ @simd I16x8ExtMulLowI8x16S => visit_i16x8_extmul_low_i8x16_s
+ @simd I16x8ExtMulHighI8x16S => visit_i16x8_extmul_high_i8x16_s
+ @simd I16x8ExtMulLowI8x16U => visit_i16x8_extmul_low_i8x16_u
+ @simd I16x8ExtMulHighI8x16U => visit_i16x8_extmul_high_i8x16_u
+ @simd I32x4ExtAddPairwiseI16x8S => visit_i32x4_extadd_pairwise_i16x8_s
+ @simd I32x4ExtAddPairwiseI16x8U => visit_i32x4_extadd_pairwise_i16x8_u
+ @simd I32x4Abs => visit_i32x4_abs
+ @simd I32x4Neg => visit_i32x4_neg
+ @simd I32x4AllTrue => visit_i32x4_all_true
+ @simd I32x4Bitmask => visit_i32x4_bitmask
+ @simd I32x4ExtendLowI16x8S => visit_i32x4_extend_low_i16x8_s
+ @simd I32x4ExtendHighI16x8S => visit_i32x4_extend_high_i16x8_s
+ @simd I32x4ExtendLowI16x8U => visit_i32x4_extend_low_i16x8_u
+ @simd I32x4ExtendHighI16x8U => visit_i32x4_extend_high_i16x8_u
+ @simd I32x4Shl => visit_i32x4_shl
+ @simd I32x4ShrS => visit_i32x4_shr_s
+ @simd I32x4ShrU => visit_i32x4_shr_u
+ @simd I32x4Add => visit_i32x4_add
+ @simd I32x4Sub => visit_i32x4_sub
+ @simd I32x4Mul => visit_i32x4_mul
+ @simd I32x4MinS => visit_i32x4_min_s
+ @simd I32x4MinU => visit_i32x4_min_u
+ @simd I32x4MaxS => visit_i32x4_max_s
+ @simd I32x4MaxU => visit_i32x4_max_u
+ @simd I32x4DotI16x8S => visit_i32x4_dot_i16x8_s
+ @simd I32x4ExtMulLowI16x8S => visit_i32x4_extmul_low_i16x8_s
+ @simd I32x4ExtMulHighI16x8S => visit_i32x4_extmul_high_i16x8_s
+ @simd I32x4ExtMulLowI16x8U => visit_i32x4_extmul_low_i16x8_u
+ @simd I32x4ExtMulHighI16x8U => visit_i32x4_extmul_high_i16x8_u
+ @simd I64x2Abs => visit_i64x2_abs
+ @simd I64x2Neg => visit_i64x2_neg
+ @simd I64x2AllTrue => visit_i64x2_all_true
+ @simd I64x2Bitmask => visit_i64x2_bitmask
+ @simd I64x2ExtendLowI32x4S => visit_i64x2_extend_low_i32x4_s
+ @simd I64x2ExtendHighI32x4S => visit_i64x2_extend_high_i32x4_s
+ @simd I64x2ExtendLowI32x4U => visit_i64x2_extend_low_i32x4_u
+ @simd I64x2ExtendHighI32x4U => visit_i64x2_extend_high_i32x4_u
+ @simd I64x2Shl => visit_i64x2_shl
+ @simd I64x2ShrS => visit_i64x2_shr_s
+ @simd I64x2ShrU => visit_i64x2_shr_u
+ @simd I64x2Add => visit_i64x2_add
+ @simd I64x2Sub => visit_i64x2_sub
+ @simd I64x2Mul => visit_i64x2_mul
+ @simd I64x2ExtMulLowI32x4S => visit_i64x2_extmul_low_i32x4_s
+ @simd I64x2ExtMulHighI32x4S => visit_i64x2_extmul_high_i32x4_s
+ @simd I64x2ExtMulLowI32x4U => visit_i64x2_extmul_low_i32x4_u
+ @simd I64x2ExtMulHighI32x4U => visit_i64x2_extmul_high_i32x4_u
+ @simd F32x4Ceil => visit_f32x4_ceil
+ @simd F32x4Floor => visit_f32x4_floor
+ @simd F32x4Trunc => visit_f32x4_trunc
+ @simd F32x4Nearest => visit_f32x4_nearest
+ @simd F32x4Abs => visit_f32x4_abs
+ @simd F32x4Neg => visit_f32x4_neg
+ @simd F32x4Sqrt => visit_f32x4_sqrt
+ @simd F32x4Add => visit_f32x4_add
+ @simd F32x4Sub => visit_f32x4_sub
+ @simd F32x4Mul => visit_f32x4_mul
+ @simd F32x4Div => visit_f32x4_div
+ @simd F32x4Min => visit_f32x4_min
+ @simd F32x4Max => visit_f32x4_max
+ @simd F32x4PMin => visit_f32x4_pmin
+ @simd F32x4PMax => visit_f32x4_pmax
+ @simd F64x2Ceil => visit_f64x2_ceil
+ @simd F64x2Floor => visit_f64x2_floor
+ @simd F64x2Trunc => visit_f64x2_trunc
+ @simd F64x2Nearest => visit_f64x2_nearest
+ @simd F64x2Abs => visit_f64x2_abs
+ @simd F64x2Neg => visit_f64x2_neg
+ @simd F64x2Sqrt => visit_f64x2_sqrt
+ @simd F64x2Add => visit_f64x2_add
+ @simd F64x2Sub => visit_f64x2_sub
+ @simd F64x2Mul => visit_f64x2_mul
+ @simd F64x2Div => visit_f64x2_div
+ @simd F64x2Min => visit_f64x2_min
+ @simd F64x2Max => visit_f64x2_max
+ @simd F64x2PMin => visit_f64x2_pmin
+ @simd F64x2PMax => visit_f64x2_pmax
+ @simd I32x4TruncSatF32x4S => visit_i32x4_trunc_sat_f32x4_s
+ @simd I32x4TruncSatF32x4U => visit_i32x4_trunc_sat_f32x4_u
+ @simd F32x4ConvertI32x4S => visit_f32x4_convert_i32x4_s
+ @simd F32x4ConvertI32x4U => visit_f32x4_convert_i32x4_u
+ @simd I32x4TruncSatF64x2SZero => visit_i32x4_trunc_sat_f64x2_s_zero
+ @simd I32x4TruncSatF64x2UZero => visit_i32x4_trunc_sat_f64x2_u_zero
+ @simd F64x2ConvertLowI32x4S => visit_f64x2_convert_low_i32x4_s
+ @simd F64x2ConvertLowI32x4U => visit_f64x2_convert_low_i32x4_u
+ @simd F32x4DemoteF64x2Zero => visit_f32x4_demote_f64x2_zero
+ @simd F64x2PromoteLowF32x4 => visit_f64x2_promote_low_f32x4
+
+ // Relaxed SIMD operators
+ // https://github.com/WebAssembly/relaxed-simd
+ @relaxed_simd I8x16RelaxedSwizzle => visit_i8x16_relaxed_swizzle
+ @relaxed_simd I32x4RelaxedTruncF32x4S => visit_i32x4_relaxed_trunc_f32x4_s
+ @relaxed_simd I32x4RelaxedTruncF32x4U => visit_i32x4_relaxed_trunc_f32x4_u
+ @relaxed_simd I32x4RelaxedTruncF64x2SZero => visit_i32x4_relaxed_trunc_f64x2_s_zero
+ @relaxed_simd I32x4RelaxedTruncF64x2UZero => visit_i32x4_relaxed_trunc_f64x2_u_zero
+ @relaxed_simd F32x4RelaxedMadd => visit_f32x4_relaxed_madd
+ @relaxed_simd F32x4RelaxedNmadd => visit_f32x4_relaxed_nmadd
+ @relaxed_simd F64x2RelaxedMadd => visit_f64x2_relaxed_madd
+ @relaxed_simd F64x2RelaxedNmadd => visit_f64x2_relaxed_nmadd
+ @relaxed_simd I8x16RelaxedLaneselect => visit_i8x16_relaxed_laneselect
+ @relaxed_simd I16x8RelaxedLaneselect => visit_i16x8_relaxed_laneselect
+ @relaxed_simd I32x4RelaxedLaneselect => visit_i32x4_relaxed_laneselect
+ @relaxed_simd I64x2RelaxedLaneselect => visit_i64x2_relaxed_laneselect
+ @relaxed_simd F32x4RelaxedMin => visit_f32x4_relaxed_min
+ @relaxed_simd F32x4RelaxedMax => visit_f32x4_relaxed_max
+ @relaxed_simd F64x2RelaxedMin => visit_f64x2_relaxed_min
+ @relaxed_simd F64x2RelaxedMax => visit_f64x2_relaxed_max
+ @relaxed_simd I16x8RelaxedQ15mulrS => visit_i16x8_relaxed_q15mulr_s
+ @relaxed_simd I16x8RelaxedDotI8x16I7x16S => visit_i16x8_relaxed_dot_i8x16_i7x16_s
+ @relaxed_simd I32x4RelaxedDotI8x16I7x16AddS => visit_i32x4_relaxed_dot_i8x16_i7x16_add_s
+
+ // Typed Function references
+ @function_references CallRef { hty: $crate::HeapType } => visit_call_ref
+ @function_references ReturnCallRef { hty: $crate::HeapType } => visit_return_call_ref
+ @function_references RefAsNonNull => visit_ref_as_non_null
+ @function_references BrOnNull { relative_depth: u32 } => visit_br_on_null
+ @function_references BrOnNonNull { relative_depth: u32 } => visit_br_on_non_null
+ }
+ };
+}
+
+macro_rules! format_err {
+ ($offset:expr, $($arg:tt)*) => {
+ crate::BinaryReaderError::fmt(format_args!($($arg)*), $offset)
+ }
+}
+
+macro_rules! bail {
+ ($($arg:tt)*) => {return Err(format_err!($($arg)*))}
+}
+
+pub use crate::binary_reader::{BinaryReader, BinaryReaderError, Result};
+pub use crate::parser::*;
+pub use crate::readers::*;
+pub use crate::resources::*;
+pub use crate::validator::*;
+
+mod binary_reader;
+mod limits;
+mod parser;
+mod readers;
+mod resources;
+mod validator;
diff --git a/third_party/rust/wasmparser/src/limits.rs b/third_party/rust/wasmparser/src/limits.rs
new file mode 100644
index 0000000000..e9ab7d06a6
--- /dev/null
+++ b/third_party/rust/wasmparser/src/limits.rs
@@ -0,0 +1,57 @@
+/* Copyright 2017 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// The following limits are imposed by wasmparser on WebAssembly modules.
+// The limits are agreed upon with other engines for consistency.
+pub const MAX_WASM_TYPES: usize = 1_000_000;
+pub const MAX_WASM_FUNCTIONS: usize = 1_000_000;
+pub const MAX_WASM_EXPORTS: usize = 100_000;
+pub const MAX_WASM_GLOBALS: usize = 1_000_000;
+pub const MAX_WASM_ELEMENT_SEGMENTS: usize = 100_000;
+pub const MAX_WASM_DATA_SEGMENTS: usize = 100_000;
+pub const MAX_WASM_MEMORY32_PAGES: u64 = 65536;
+pub const MAX_WASM_MEMORY64_PAGES: u64 = 1 << 48;
+pub const MAX_WASM_STRING_SIZE: usize = 100_000;
+pub const MAX_WASM_FUNCTION_SIZE: usize = 128 * 1024;
+pub const MAX_WASM_FUNCTION_LOCALS: usize = 50000;
+pub const MAX_WASM_FUNCTION_PARAMS: usize = 1000;
+pub const MAX_WASM_FUNCTION_RETURNS: usize = 1000;
+pub const _MAX_WASM_TABLE_SIZE: usize = 10_000_000;
+pub const MAX_WASM_TABLE_ENTRIES: usize = 10_000_000;
+pub const MAX_WASM_TABLES: usize = 100;
+pub const MAX_WASM_MEMORIES: usize = 100;
+pub const MAX_WASM_TAGS: usize = 1_000_000;
+pub const MAX_WASM_BR_TABLE_SIZE: usize = MAX_WASM_FUNCTION_SIZE;
+
+// Component-related limits
+pub const MAX_WASM_MODULE_SIZE: usize = 1024 * 1024 * 1024; //= 1 GiB
+pub const MAX_WASM_MODULE_TYPE_DECLS: usize = 100_000;
+pub const MAX_WASM_COMPONENT_TYPE_DECLS: usize = 100_000;
+pub const MAX_WASM_INSTANCE_TYPE_DECLS: usize = 100_000;
+pub const MAX_WASM_RECORD_FIELDS: usize = 1000;
+pub const MAX_WASM_VARIANT_CASES: usize = 1000;
+pub const MAX_WASM_TUPLE_TYPES: usize = 1000;
+pub const MAX_WASM_FLAG_NAMES: usize = 1000;
+pub const MAX_WASM_ENUM_CASES: usize = 1000;
+pub const MAX_WASM_UNION_TYPES: usize = 1000;
+pub const MAX_WASM_INSTANTIATION_EXPORTS: usize = 1000;
+pub const MAX_WASM_CANONICAL_OPTIONS: usize = 10;
+pub const MAX_WASM_INSTANTIATION_ARGS: usize = 1000;
+pub const MAX_WASM_START_ARGS: usize = 1000;
+pub const MAX_WASM_TYPE_SIZE: u32 = 1_000_000;
+pub const MAX_WASM_MODULES: usize = 1_000;
+pub const MAX_WASM_COMPONENTS: usize = 1_000;
+pub const MAX_WASM_INSTANCES: usize = 1_000;
+pub const MAX_WASM_VALUES: usize = 1_000;
diff --git a/third_party/rust/wasmparser/src/parser.rs b/third_party/rust/wasmparser/src/parser.rs
new file mode 100644
index 0000000000..227530abba
--- /dev/null
+++ b/third_party/rust/wasmparser/src/parser.rs
@@ -0,0 +1,1496 @@
+use crate::CoreTypeSectionReader;
+use crate::{
+ limits::MAX_WASM_MODULE_SIZE, BinaryReader, BinaryReaderError, ComponentCanonicalSectionReader,
+ ComponentExportSectionReader, ComponentImportSectionReader, ComponentInstanceSectionReader,
+ ComponentStartFunction, ComponentTypeSectionReader, CustomSectionReader, DataSectionReader,
+ ElementSectionReader, ExportSectionReader, FromReader, FunctionBody, FunctionSectionReader,
+ GlobalSectionReader, ImportSectionReader, InstanceSectionReader, MemorySectionReader, Result,
+ SectionLimited, TableSectionReader, TagSectionReader, TypeSectionReader,
+};
+use std::convert::TryInto;
+use std::fmt;
+use std::iter;
+use std::ops::Range;
+
+pub(crate) const WASM_MODULE_VERSION: u16 = 0x1;
+
+// Note that this started at `0xa` and we're incrementing up from there. When
+// the component model is stabilized this will become 0x1. The changes here are:
+//
+// * [????-??-??] 0xa - original version
+// * [2022-01-05] 0xb - `export` introduces an alias
+// * [2022-02-06] 0xc - `export` has an optional type ascribed to it
+pub(crate) const WASM_COMPONENT_VERSION: u16 = 0xc;
+
+/// The supported encoding formats for the parser.
+#[derive(Debug, Clone, Copy, Eq, PartialEq)]
+pub enum Encoding {
+ /// The encoding format is a WebAssembly module.
+ Module,
+ /// The encoding format is a WebAssembly component.
+ Component,
+}
+
+/// An incremental parser of a binary WebAssembly module or component.
+///
+/// This type is intended to be used to incrementally parse a WebAssembly module
+/// or component as bytes become available for the module. This can also be used
+/// to parse modules or components that are already entirely resident within memory.
+///
+/// This primary function for a parser is the [`Parser::parse`] function which
+/// will incrementally consume input. You can also use the [`Parser::parse_all`]
+/// function to parse a module or component that is entirely resident in memory.
+#[derive(Debug, Clone)]
+pub struct Parser {
+ state: State,
+ offset: u64,
+ max_size: u64,
+ encoding: Encoding,
+}
+
+#[derive(Debug, Clone)]
+enum State {
+ Header,
+ SectionStart,
+ FunctionBody { remaining: u32, len: u32 },
+}
+
+/// A successful return payload from [`Parser::parse`].
+///
+/// On success one of two possible values can be returned, either that more data
+/// is needed to continue parsing or a chunk of the input was parsed, indicating
+/// how much of it was parsed.
+#[derive(Debug)]
+pub enum Chunk<'a> {
+ /// This can be returned at any time and indicates that more data is needed
+ /// to proceed with parsing. Zero bytes were consumed from the input to
+ /// [`Parser::parse`]. The `usize` value here is a hint as to how many more
+ /// bytes are needed to continue parsing.
+ NeedMoreData(u64),
+
+ /// A chunk was successfully parsed.
+ Parsed {
+ /// This many bytes of the `data` input to [`Parser::parse`] were
+ /// consumed to produce `payload`.
+ consumed: usize,
+ /// The value that we actually parsed.
+ payload: Payload<'a>,
+ },
+}
+
+/// Values that can be parsed from a WebAssembly module or component.
+///
+/// This enumeration is all possible chunks of pieces that can be parsed by a
+/// [`Parser`] from a binary WebAssembly module or component. Note that for many
+/// sections the entire section is parsed all at once, whereas other functions,
+/// like the code section, are parsed incrementally. This is a distinction where some
+/// sections, like the type section, are required to be fully resident in memory
+/// (fully downloaded) before proceeding. Other sections, like the code section,
+/// can be processed in a streaming fashion where each function is extracted
+/// individually so it can possibly be shipped to another thread while you wait
+/// for more functions to get downloaded.
+///
+/// Note that payloads, when returned, do not indicate that the module or component
+/// is valid. For example when you receive a `Payload::TypeSection` the type
+/// section itself has not yet actually been parsed. The reader returned will be
+/// able to parse it, but you'll have to actually iterate the reader to do the
+/// full parse. Each payload returned is intended to be a *window* into the
+/// original `data` passed to [`Parser::parse`] which can be further processed
+/// if necessary.
+pub enum Payload<'a> {
+ /// Indicates the header of a WebAssembly module or component.
+ Version {
+ /// The version number found in the header.
+ num: u16,
+ /// The encoding format being parsed.
+ encoding: Encoding,
+ /// The range of bytes that were parsed to consume the header of the
+ /// module or component. Note that this range is relative to the start
+ /// of the byte stream.
+ range: Range<usize>,
+ },
+
+ /// A module type section was received and the provided reader can be
+ /// used to parse the contents of the type section.
+ TypeSection(TypeSectionReader<'a>),
+ /// A module import section was received and the provided reader can be
+ /// used to parse the contents of the import section.
+ ImportSection(ImportSectionReader<'a>),
+ /// A module function section was received and the provided reader can be
+ /// used to parse the contents of the function section.
+ FunctionSection(FunctionSectionReader<'a>),
+ /// A module table section was received and the provided reader can be
+ /// used to parse the contents of the table section.
+ TableSection(TableSectionReader<'a>),
+ /// A module memory section was received and the provided reader can be
+ /// used to parse the contents of the memory section.
+ MemorySection(MemorySectionReader<'a>),
+ /// A module tag section was received, and the provided reader can be
+ /// used to parse the contents of the tag section.
+ TagSection(TagSectionReader<'a>),
+ /// A module global section was received and the provided reader can be
+ /// used to parse the contents of the global section.
+ GlobalSection(GlobalSectionReader<'a>),
+ /// A module export section was received, and the provided reader can be
+ /// used to parse the contents of the export section.
+ ExportSection(ExportSectionReader<'a>),
+ /// A module start section was received.
+ StartSection {
+ /// The start function index
+ func: u32,
+ /// The range of bytes that specify the `func` field, specified in
+ /// offsets relative to the start of the byte stream.
+ range: Range<usize>,
+ },
+ /// A module element section was received and the provided reader can be
+ /// used to parse the contents of the element section.
+ ElementSection(ElementSectionReader<'a>),
+ /// A module data count section was received.
+ DataCountSection {
+ /// The number of data segments.
+ count: u32,
+ /// The range of bytes that specify the `count` field, specified in
+ /// offsets relative to the start of the byte stream.
+ range: Range<usize>,
+ },
+ /// A module data section was received and the provided reader can be
+ /// used to parse the contents of the data section.
+ DataSection(DataSectionReader<'a>),
+ /// Indicator of the start of the code section of a WebAssembly module.
+ ///
+ /// This entry is returned whenever the code section starts. The `count`
+ /// field indicates how many entries are in this code section. After
+ /// receiving this start marker you're guaranteed that the next `count`
+ /// items will be either `CodeSectionEntry` or an error will be returned.
+ ///
+ /// This, unlike other sections, is intended to be used for streaming the
+ /// contents of the code section. The code section is not required to be
+ /// fully resident in memory when we parse it. Instead a [`Parser`] is
+ /// capable of parsing piece-by-piece of a code section.
+ CodeSectionStart {
+ /// The number of functions in this section.
+ count: u32,
+ /// The range of bytes that represent this section, specified in
+ /// offsets relative to the start of the byte stream.
+ range: Range<usize>,
+ /// The size, in bytes, of the remaining contents of this section.
+ ///
+ /// This can be used in combination with [`Parser::skip_section`]
+ /// where the caller will know how many bytes to skip before feeding
+ /// bytes into `Parser` again.
+ size: u32,
+ },
+ /// An entry of the code section, a function, was parsed from a WebAssembly
+ /// module.
+ ///
+ /// This entry indicates that a function was successfully received from the
+ /// code section, and the payload here is the window into the original input
+ /// where the function resides. Note that the function itself has not been
+ /// parsed, it's only been outlined. You'll need to process the
+ /// `FunctionBody` provided to test whether it parses and/or is valid.
+ CodeSectionEntry(FunctionBody<'a>),
+
+ /// A core module section was received and the provided parser can be
+ /// used to parse the nested module.
+ ///
+ /// This variant is special in that it returns a sub-`Parser`. Upon
+ /// receiving a `ModuleSection` it is expected that the returned
+ /// `Parser` will be used instead of the parent `Parser` until the parse has
+ /// finished. You'll need to feed data into the `Parser` returned until it
+ /// returns `Payload::End`. After that you'll switch back to the parent
+ /// parser to resume parsing the rest of the current component.
+ ///
+ /// Note that binaries will not be parsed correctly if you feed the data for
+ /// a nested module into the parent [`Parser`].
+ ModuleSection {
+ /// The parser for the nested module.
+ parser: Parser,
+ /// The range of bytes that represent the nested module in the
+ /// original byte stream.
+ range: Range<usize>,
+ },
+ /// A core instance section was received and the provided parser can be
+ /// used to parse the contents of the core instance section.
+ ///
+ /// Currently this section is only parsed in a component.
+ InstanceSection(InstanceSectionReader<'a>),
+ /// A core type section was received and the provided parser can be
+ /// used to parse the contents of the core type section.
+ ///
+ /// Currently this section is only parsed in a component.
+ CoreTypeSection(CoreTypeSectionReader<'a>),
+ /// A component section from a WebAssembly component was received and the
+ /// provided parser can be used to parse the nested component.
+ ///
+ /// This variant is special in that it returns a sub-`Parser`. Upon
+ /// receiving a `ComponentSection` it is expected that the returned
+ /// `Parser` will be used instead of the parent `Parser` until the parse has
+ /// finished. You'll need to feed data into the `Parser` returned until it
+ /// returns `Payload::End`. After that you'll switch back to the parent
+ /// parser to resume parsing the rest of the current component.
+ ///
+ /// Note that binaries will not be parsed correctly if you feed the data for
+ /// a nested component into the parent [`Parser`].
+ ComponentSection {
+ /// The parser for the nested component.
+ parser: Parser,
+ /// The range of bytes that represent the nested component in the
+ /// original byte stream.
+ range: Range<usize>,
+ },
+ /// A component instance section was received and the provided reader can be
+ /// used to parse the contents of the component instance section.
+ ComponentInstanceSection(ComponentInstanceSectionReader<'a>),
+ /// A component alias section was received and the provided reader can be
+ /// used to parse the contents of the component alias section.
+ ComponentAliasSection(SectionLimited<'a, crate::ComponentAlias<'a>>),
+ /// A component type section was received and the provided reader can be
+ /// used to parse the contents of the component type section.
+ ComponentTypeSection(ComponentTypeSectionReader<'a>),
+ /// A component canonical section was received and the provided reader can be
+ /// used to parse the contents of the component canonical section.
+ ComponentCanonicalSection(ComponentCanonicalSectionReader<'a>),
+ /// A component start section was received.
+ ComponentStartSection {
+ /// The start function description.
+ start: ComponentStartFunction,
+ /// The range of bytes that specify the `start` field.
+ range: Range<usize>,
+ },
+ /// A component import section was received and the provided reader can be
+ /// used to parse the contents of the component import section.
+ ComponentImportSection(ComponentImportSectionReader<'a>),
+ /// A component export section was received, and the provided reader can be
+ /// used to parse the contents of the component export section.
+ ComponentExportSection(ComponentExportSectionReader<'a>),
+
+ /// A module or component custom section was received.
+ CustomSection(CustomSectionReader<'a>),
+
+ /// An unknown section was found.
+ ///
+ /// This variant is returned for all unknown sections encountered. This
+ /// likely wants to be interpreted as an error by consumers of the parser,
+ /// but this can also be used to parse sections currently unsupported by
+ /// the parser.
+ UnknownSection {
+ /// The 8-bit identifier for this section.
+ id: u8,
+ /// The contents of this section.
+ contents: &'a [u8],
+ /// The range of bytes, relative to the start of the original data
+ /// stream, that the contents of this section reside in.
+ range: Range<usize>,
+ },
+
+ /// The end of the WebAssembly module or component was reached.
+ ///
+ /// The value is the offset in the input byte stream where the end
+ /// was reached.
+ End(usize),
+}
+
+const CUSTOM_SECTION: u8 = 0;
+const TYPE_SECTION: u8 = 1;
+const IMPORT_SECTION: u8 = 2;
+const FUNCTION_SECTION: u8 = 3;
+const TABLE_SECTION: u8 = 4;
+const MEMORY_SECTION: u8 = 5;
+const GLOBAL_SECTION: u8 = 6;
+const EXPORT_SECTION: u8 = 7;
+const START_SECTION: u8 = 8;
+const ELEMENT_SECTION: u8 = 9;
+const CODE_SECTION: u8 = 10;
+const DATA_SECTION: u8 = 11;
+const DATA_COUNT_SECTION: u8 = 12;
+const TAG_SECTION: u8 = 13;
+
+const COMPONENT_MODULE_SECTION: u8 = 1;
+const COMPONENT_CORE_INSTANCE_SECTION: u8 = 2;
+const COMPONENT_CORE_TYPE_SECTION: u8 = 3;
+const COMPONENT_SECTION: u8 = 4;
+const COMPONENT_INSTANCE_SECTION: u8 = 5;
+const COMPONENT_ALIAS_SECTION: u8 = 6;
+const COMPONENT_TYPE_SECTION: u8 = 7;
+const COMPONENT_CANONICAL_SECTION: u8 = 8;
+const COMPONENT_START_SECTION: u8 = 9;
+const COMPONENT_IMPORT_SECTION: u8 = 10;
+const COMPONENT_EXPORT_SECTION: u8 = 11;
+
+impl Parser {
+ /// Creates a new parser.
+ ///
+ /// Reports errors and ranges relative to `offset` provided, where `offset`
+ /// is some logical offset within the input stream that we're parsing.
+ pub fn new(offset: u64) -> Parser {
+ Parser {
+ state: State::Header,
+ offset,
+ max_size: u64::MAX,
+ // Assume the encoding is a module until we know otherwise
+ encoding: Encoding::Module,
+ }
+ }
+
+ /// Attempts to parse a chunk of data.
+ ///
+ /// This method will attempt to parse the next incremental portion of a
+ /// WebAssembly binary. Data available for the module or component is
+ /// provided as `data`, and the data can be incomplete if more data has yet
+ /// to arrive. The `eof` flag indicates whether more data will ever be received.
+ ///
+ /// There are two ways parsing can succeed with this method:
+ ///
+ /// * `Chunk::NeedMoreData` - this indicates that there is not enough bytes
+ /// in `data` to parse a payload. The caller needs to wait for more data to
+ /// be available in this situation before calling this method again. It is
+ /// guaranteed that this is only returned if `eof` is `false`.
+ ///
+ /// * `Chunk::Parsed` - this indicates that a chunk of the input was
+ /// successfully parsed. The payload is available in this variant of what
+ /// was parsed, and this also indicates how many bytes of `data` was
+ /// consumed. It's expected that the caller will not provide these bytes
+ /// back to the [`Parser`] again.
+ ///
+ /// Note that all `Chunk` return values are connected, with a lifetime, to
+ /// the input buffer. Each parsed chunk borrows the input buffer and is a
+ /// view into it for successfully parsed chunks.
+ ///
+ /// It is expected that you'll call this method until `Payload::End` is
+ /// reached, at which point you're guaranteed that the parse has completed.
+ /// Note that complete parsing, for the top-level module or component,
+ /// implies that `data` is empty and `eof` is `true`.
+ ///
+ /// # Errors
+ ///
+ /// Parse errors are returned as an `Err`. Errors can happen when the
+ /// structure of the data is unexpected or if sections are too large for
+ /// example. Note that errors are not returned for malformed *contents* of
+ /// sections here. Sections are generally not individually parsed and each
+ /// returned [`Payload`] needs to be iterated over further to detect all
+ /// errors.
+ ///
+ /// # Examples
+ ///
+ /// An example of reading a wasm file from a stream (`std::io::Read`) and
+ /// incrementally parsing it.
+ ///
+ /// ```
+ /// use std::io::Read;
+ /// use anyhow::Result;
+ /// use wasmparser::{Parser, Chunk, Payload::*};
+ ///
+ /// fn parse(mut reader: impl Read) -> Result<()> {
+ /// let mut buf = Vec::new();
+ /// let mut parser = Parser::new(0);
+ /// let mut eof = false;
+ /// let mut stack = Vec::new();
+ ///
+ /// loop {
+ /// let (payload, consumed) = match parser.parse(&buf, eof)? {
+ /// Chunk::NeedMoreData(hint) => {
+ /// assert!(!eof); // otherwise an error would be returned
+ ///
+ /// // Use the hint to preallocate more space, then read
+ /// // some more data into our buffer.
+ /// //
+ /// // Note that the buffer management here is not ideal,
+ /// // but it's compact enough to fit in an example!
+ /// let len = buf.len();
+ /// buf.extend((0..hint).map(|_| 0u8));
+ /// let n = reader.read(&mut buf[len..])?;
+ /// buf.truncate(len + n);
+ /// eof = n == 0;
+ /// continue;
+ /// }
+ ///
+ /// Chunk::Parsed { consumed, payload } => (payload, consumed),
+ /// };
+ ///
+ /// match payload {
+ /// // Sections for WebAssembly modules
+ /// Version { .. } => { /* ... */ }
+ /// TypeSection(_) => { /* ... */ }
+ /// ImportSection(_) => { /* ... */ }
+ /// FunctionSection(_) => { /* ... */ }
+ /// TableSection(_) => { /* ... */ }
+ /// MemorySection(_) => { /* ... */ }
+ /// TagSection(_) => { /* ... */ }
+ /// GlobalSection(_) => { /* ... */ }
+ /// ExportSection(_) => { /* ... */ }
+ /// StartSection { .. } => { /* ... */ }
+ /// ElementSection(_) => { /* ... */ }
+ /// DataCountSection { .. } => { /* ... */ }
+ /// DataSection(_) => { /* ... */ }
+ ///
+ /// // Here we know how many functions we'll be receiving as
+ /// // `CodeSectionEntry`, so we can prepare for that, and
+ /// // afterwards we can parse and handle each function
+ /// // individually.
+ /// CodeSectionStart { .. } => { /* ... */ }
+ /// CodeSectionEntry(body) => {
+ /// // here we can iterate over `body` to parse the function
+ /// // and its locals
+ /// }
+ ///
+ /// // Sections for WebAssembly components
+ /// ModuleSection { .. } => { /* ... */ }
+ /// InstanceSection(_) => { /* ... */ }
+ /// CoreTypeSection(_) => { /* ... */ }
+ /// ComponentSection { .. } => { /* ... */ }
+ /// ComponentInstanceSection(_) => { /* ... */ }
+ /// ComponentAliasSection(_) => { /* ... */ }
+ /// ComponentTypeSection(_) => { /* ... */ }
+ /// ComponentCanonicalSection(_) => { /* ... */ }
+ /// ComponentStartSection { .. } => { /* ... */ }
+ /// ComponentImportSection(_) => { /* ... */ }
+ /// ComponentExportSection(_) => { /* ... */ }
+ ///
+ /// CustomSection(_) => { /* ... */ }
+ ///
+ /// // most likely you'd return an error here
+ /// UnknownSection { id, .. } => { /* ... */ }
+ ///
+ /// // Once we've reached the end of a parser we either resume
+ /// // at the parent parser or we break out of the loop because
+ /// // we're done.
+ /// End(_) => {
+ /// if let Some(parent_parser) = stack.pop() {
+ /// parser = parent_parser;
+ /// } else {
+ /// break;
+ /// }
+ /// }
+ /// }
+ ///
+ /// // once we're done processing the payload we can forget the
+ /// // original.
+ /// buf.drain(..consumed);
+ /// }
+ ///
+ /// Ok(())
+ /// }
+ ///
+ /// # parse(&b"\0asm\x01\0\0\0"[..]).unwrap();
+ /// ```
+ pub fn parse<'a>(&mut self, data: &'a [u8], eof: bool) -> Result<Chunk<'a>> {
+ let (data, eof) = if usize_to_u64(data.len()) > self.max_size {
+ (&data[..(self.max_size as usize)], true)
+ } else {
+ (data, eof)
+ };
+ // TODO: thread through `offset: u64` to `BinaryReader`, remove
+ // the cast here.
+ let mut reader = BinaryReader::new_with_offset(data, self.offset as usize);
+ match self.parse_reader(&mut reader, eof) {
+ Ok(payload) => {
+ // Be sure to update our offset with how far we got in the
+ // reader
+ self.offset += usize_to_u64(reader.position);
+ self.max_size -= usize_to_u64(reader.position);
+ Ok(Chunk::Parsed {
+ consumed: reader.position,
+ payload,
+ })
+ }
+ Err(e) => {
+ // If we're at EOF then there's no way we can recover from any
+ // error, so continue to propagate it.
+ if eof {
+ return Err(e);
+ }
+
+ // If our error doesn't look like it can be resolved with more
+ // data being pulled down, then propagate it, otherwise switch
+ // the error to "feed me please"
+ match e.inner.needed_hint {
+ Some(hint) => Ok(Chunk::NeedMoreData(usize_to_u64(hint))),
+ None => Err(e),
+ }
+ }
+ }
+ }
+
+ fn parse_reader<'a>(
+ &mut self,
+ reader: &mut BinaryReader<'a>,
+ eof: bool,
+ ) -> Result<Payload<'a>> {
+ use Payload::*;
+
+ match self.state {
+ State::Header => {
+ const KIND_MODULE: u16 = 0x00;
+ const KIND_COMPONENT: u16 = 0x01;
+
+ let start = reader.original_position();
+ let header_version = reader.read_header_version()?;
+ self.encoding = match (header_version >> 16) as u16 {
+ KIND_MODULE => Encoding::Module,
+ KIND_COMPONENT => Encoding::Component,
+ _ => bail!(start + 4, "unknown binary version: {header_version:#10x}"),
+ };
+ let num = header_version as u16;
+ self.state = State::SectionStart;
+ Ok(Version {
+ num,
+ encoding: self.encoding,
+ range: start..reader.original_position(),
+ })
+ }
+ State::SectionStart => {
+ // If we're at eof and there are no bytes in our buffer, then
+ // that means we reached the end of the data since it's
+ // just a bunch of sections concatenated after the header.
+ if eof && reader.bytes_remaining() == 0 {
+ return Ok(Payload::End(reader.original_position()));
+ }
+
+ let id_pos = reader.position;
+ let id = reader.read_u8()?;
+ if id & 0x80 != 0 {
+ return Err(BinaryReaderError::new("malformed section id", id_pos));
+ }
+ let len_pos = reader.original_position();
+ let mut len = reader.read_var_u32()?;
+
+ // Test to make sure that this section actually fits within
+ // `Parser::max_size`. This doesn't matter for top-level modules
+ // but it is required for nested modules/components to correctly ensure
+ // that all sections live entirely within their section of the
+ // file.
+ let section_overflow = self
+ .max_size
+ .checked_sub(usize_to_u64(reader.position))
+ .and_then(|s| s.checked_sub(len.into()))
+ .is_none();
+ if section_overflow {
+ return Err(BinaryReaderError::new("section too large", len_pos));
+ }
+
+ match (self.encoding, id) {
+ // Sections for both modules and components.
+ (_, 0) => section(reader, len, CustomSectionReader::new, CustomSection),
+
+ // Module sections
+ (Encoding::Module, TYPE_SECTION) => {
+ section(reader, len, TypeSectionReader::new, TypeSection)
+ }
+ (Encoding::Module, IMPORT_SECTION) => {
+ section(reader, len, ImportSectionReader::new, ImportSection)
+ }
+ (Encoding::Module, FUNCTION_SECTION) => {
+ section(reader, len, FunctionSectionReader::new, FunctionSection)
+ }
+ (Encoding::Module, TABLE_SECTION) => {
+ section(reader, len, TableSectionReader::new, TableSection)
+ }
+ (Encoding::Module, MEMORY_SECTION) => {
+ section(reader, len, MemorySectionReader::new, MemorySection)
+ }
+ (Encoding::Module, GLOBAL_SECTION) => {
+ section(reader, len, GlobalSectionReader::new, GlobalSection)
+ }
+ (Encoding::Module, EXPORT_SECTION) => {
+ section(reader, len, ExportSectionReader::new, ExportSection)
+ }
+ (Encoding::Module, START_SECTION) => {
+ let (func, range) = single_item(reader, len, "start")?;
+ Ok(StartSection { func, range })
+ }
+ (Encoding::Module, ELEMENT_SECTION) => {
+ section(reader, len, ElementSectionReader::new, ElementSection)
+ }
+ (Encoding::Module, CODE_SECTION) => {
+ let start = reader.original_position();
+ let count = delimited(reader, &mut len, |r| r.read_var_u32())?;
+ let range = start..reader.original_position() + len as usize;
+ self.state = State::FunctionBody {
+ remaining: count,
+ len,
+ };
+ Ok(CodeSectionStart {
+ count,
+ range,
+ size: len,
+ })
+ }
+ (Encoding::Module, DATA_SECTION) => {
+ section(reader, len, DataSectionReader::new, DataSection)
+ }
+ (Encoding::Module, DATA_COUNT_SECTION) => {
+ let (count, range) = single_item(reader, len, "data count")?;
+ Ok(DataCountSection { count, range })
+ }
+ (Encoding::Module, TAG_SECTION) => {
+ section(reader, len, TagSectionReader::new, TagSection)
+ }
+
+ // Component sections
+ (Encoding::Component, COMPONENT_MODULE_SECTION)
+ | (Encoding::Component, COMPONENT_SECTION) => {
+ if len as usize > MAX_WASM_MODULE_SIZE {
+ bail!(
+ len_pos,
+ "{} section is too large",
+ if id == 1 { "module" } else { "component " }
+ );
+ }
+
+ let range =
+ reader.original_position()..reader.original_position() + len as usize;
+ self.max_size -= u64::from(len);
+ self.offset += u64::from(len);
+ let mut parser = Parser::new(usize_to_u64(reader.original_position()));
+ parser.max_size = len.into();
+
+ Ok(match id {
+ 1 => ModuleSection { parser, range },
+ 4 => ComponentSection { parser, range },
+ _ => unreachable!(),
+ })
+ }
+ (Encoding::Component, COMPONENT_CORE_INSTANCE_SECTION) => {
+ section(reader, len, InstanceSectionReader::new, InstanceSection)
+ }
+ (Encoding::Component, COMPONENT_CORE_TYPE_SECTION) => {
+ section(reader, len, CoreTypeSectionReader::new, CoreTypeSection)
+ }
+ (Encoding::Component, COMPONENT_INSTANCE_SECTION) => section(
+ reader,
+ len,
+ ComponentInstanceSectionReader::new,
+ ComponentInstanceSection,
+ ),
+ (Encoding::Component, COMPONENT_ALIAS_SECTION) => {
+ section(reader, len, SectionLimited::new, ComponentAliasSection)
+ }
+ (Encoding::Component, COMPONENT_TYPE_SECTION) => section(
+ reader,
+ len,
+ ComponentTypeSectionReader::new,
+ ComponentTypeSection,
+ ),
+ (Encoding::Component, COMPONENT_CANONICAL_SECTION) => section(
+ reader,
+ len,
+ ComponentCanonicalSectionReader::new,
+ ComponentCanonicalSection,
+ ),
+ (Encoding::Component, COMPONENT_START_SECTION) => {
+ let (start, range) = single_item(reader, len, "component start")?;
+ Ok(ComponentStartSection { start, range })
+ }
+ (Encoding::Component, COMPONENT_IMPORT_SECTION) => section(
+ reader,
+ len,
+ ComponentImportSectionReader::new,
+ ComponentImportSection,
+ ),
+ (Encoding::Component, COMPONENT_EXPORT_SECTION) => section(
+ reader,
+ len,
+ ComponentExportSectionReader::new,
+ ComponentExportSection,
+ ),
+ (_, id) => {
+ let offset = reader.original_position();
+ let contents = reader.read_bytes(len as usize)?;
+ let range = offset..offset + len as usize;
+ Ok(UnknownSection {
+ id,
+ contents,
+ range,
+ })
+ }
+ }
+ }
+
+ // Once we hit 0 remaining incrementally parsed items, with 0
+ // remaining bytes in each section, we're done and can switch back
+ // to parsing sections.
+ State::FunctionBody {
+ remaining: 0,
+ len: 0,
+ } => {
+ self.state = State::SectionStart;
+ self.parse_reader(reader, eof)
+ }
+
+ // ... otherwise trailing bytes with no remaining entries in these
+ // sections indicates an error.
+ State::FunctionBody { remaining: 0, len } => {
+ debug_assert!(len > 0);
+ let offset = reader.original_position();
+ Err(BinaryReaderError::new(
+ "trailing bytes at end of section",
+ offset,
+ ))
+ }
+
+ // Functions are relatively easy to parse when we know there's at
+ // least one remaining and at least one byte available to read
+ // things.
+ //
+ // We use the remaining length try to read a u32 size of the
+ // function, and using that size we require the entire function be
+ // resident in memory. This means that we're reading whole chunks of
+ // functions at a time.
+ //
+ // Limiting via `Parser::max_size` (nested parsing) happens above in
+ // `fn parse`, and limiting by our section size happens via
+ // `delimited`. Actual parsing of the function body is delegated to
+ // the caller to iterate over the `FunctionBody` structure.
+ State::FunctionBody { remaining, mut len } => {
+ let body = delimited(reader, &mut len, |r| {
+ let size = r.read_var_u32()?;
+ let offset = r.original_position();
+ Ok(FunctionBody::new(offset, r.read_bytes(size as usize)?))
+ })?;
+ self.state = State::FunctionBody {
+ remaining: remaining - 1,
+ len,
+ };
+ Ok(CodeSectionEntry(body))
+ }
+ }
+ }
+
+ /// Convenience function that can be used to parse a module or component
+ /// that is entirely resident in memory.
+ ///
+ /// This function will parse the `data` provided as a WebAssembly module
+ /// or component.
+ ///
+ /// Note that when this function yields sections that provide parsers,
+ /// no further action is required for those sections as payloads from
+ /// those parsers will be automatically returned.
+ pub fn parse_all(self, mut data: &[u8]) -> impl Iterator<Item = Result<Payload>> {
+ let mut stack = Vec::new();
+ let mut cur = self;
+ let mut done = false;
+ iter::from_fn(move || {
+ if done {
+ return None;
+ }
+ let payload = match cur.parse(data, true) {
+ // Propagate all errors
+ Err(e) => {
+ done = true;
+ return Some(Err(e));
+ }
+
+ // This isn't possible because `eof` is always true.
+ Ok(Chunk::NeedMoreData(_)) => unreachable!(),
+
+ Ok(Chunk::Parsed { payload, consumed }) => {
+ data = &data[consumed..];
+ payload
+ }
+ };
+
+ match &payload {
+ Payload::ModuleSection { parser, .. }
+ | Payload::ComponentSection { parser, .. } => {
+ stack.push(cur.clone());
+ cur = parser.clone();
+ }
+ Payload::End(_) => match stack.pop() {
+ Some(p) => cur = p,
+ None => done = true,
+ },
+
+ _ => {}
+ }
+
+ Some(Ok(payload))
+ })
+ }
+
+ /// Skip parsing the code section entirely.
+ ///
+ /// This function can be used to indicate, after receiving
+ /// `CodeSectionStart`, that the section will not be parsed.
+ ///
+ /// The caller will be responsible for skipping `size` bytes (found in the
+ /// `CodeSectionStart` payload). Bytes should only be fed into `parse`
+ /// after the `size` bytes have been skipped.
+ ///
+ /// # Panics
+ ///
+ /// This function will panic if the parser is not in a state where it's
+ /// parsing the code section.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use wasmparser::{Result, Parser, Chunk, Payload::*};
+ /// use std::ops::Range;
+ ///
+ /// fn objdump_headers(mut wasm: &[u8]) -> Result<()> {
+ /// let mut parser = Parser::new(0);
+ /// loop {
+ /// let payload = match parser.parse(wasm, true)? {
+ /// Chunk::Parsed { consumed, payload } => {
+ /// wasm = &wasm[consumed..];
+ /// payload
+ /// }
+ /// // this state isn't possible with `eof = true`
+ /// Chunk::NeedMoreData(_) => unreachable!(),
+ /// };
+ /// match payload {
+ /// TypeSection(s) => print_range("type section", &s.range()),
+ /// ImportSection(s) => print_range("import section", &s.range()),
+ /// // .. other sections
+ ///
+ /// // Print the range of the code section we see, but don't
+ /// // actually iterate over each individual function.
+ /// CodeSectionStart { range, size, .. } => {
+ /// print_range("code section", &range);
+ /// parser.skip_section();
+ /// wasm = &wasm[size as usize..];
+ /// }
+ /// End(_) => break,
+ /// _ => {}
+ /// }
+ /// }
+ /// Ok(())
+ /// }
+ ///
+ /// fn print_range(section: &str, range: &Range<usize>) {
+ /// println!("{:>40}: {:#010x} - {:#010x}", section, range.start, range.end);
+ /// }
+ /// ```
+ pub fn skip_section(&mut self) {
+ let skip = match self.state {
+ State::FunctionBody { remaining: _, len } => len,
+ _ => panic!("wrong state to call `skip_section`"),
+ };
+ self.offset += u64::from(skip);
+ self.max_size -= u64::from(skip);
+ self.state = State::SectionStart;
+ }
+}
+
+fn usize_to_u64(a: usize) -> u64 {
+ a.try_into().unwrap()
+}
+
+/// Parses an entire section resident in memory into a `Payload`.
+///
+/// Requires that `len` bytes are resident in `reader` and uses `ctor`/`variant`
+/// to construct the section to return.
+fn section<'a, T>(
+ reader: &mut BinaryReader<'a>,
+ len: u32,
+ ctor: fn(&'a [u8], usize) -> Result<T>,
+ variant: fn(T) -> Payload<'a>,
+) -> Result<Payload<'a>> {
+ let offset = reader.original_position();
+ let payload = reader.read_bytes(len as usize)?;
+ // clear the hint for "need this many more bytes" here because we already
+ // read all the bytes, so it's not possible to read more bytes if this
+ // fails.
+ let reader = ctor(payload, offset).map_err(clear_hint)?;
+ Ok(variant(reader))
+}
+
+/// Reads a section that is represented by a single uleb-encoded `u32`.
+fn single_item<'a, T>(
+ reader: &mut BinaryReader<'a>,
+ len: u32,
+ desc: &str,
+) -> Result<(T, Range<usize>)>
+where
+ T: FromReader<'a>,
+{
+ let range = reader.original_position()..reader.original_position() + len as usize;
+ let mut content = BinaryReader::new_with_offset(reader.read_bytes(len as usize)?, range.start);
+ // We can't recover from "unexpected eof" here because our entire section is
+ // already resident in memory, so clear the hint for how many more bytes are
+ // expected.
+ let ret = content.read().map_err(clear_hint)?;
+ if !content.eof() {
+ bail!(
+ content.original_position(),
+ "unexpected content in the {desc} section",
+ );
+ }
+ Ok((ret, range))
+}
+
+/// Attempts to parse using `f`.
+///
+/// This will update `*len` with the number of bytes consumed, and it will cause
+/// a failure to be returned instead of the number of bytes consumed exceeds
+/// what `*len` currently is.
+fn delimited<'a, T>(
+ reader: &mut BinaryReader<'a>,
+ len: &mut u32,
+ f: impl FnOnce(&mut BinaryReader<'a>) -> Result<T>,
+) -> Result<T> {
+ let start = reader.position;
+ let ret = f(reader)?;
+ *len = match (reader.position - start)
+ .try_into()
+ .ok()
+ .and_then(|i| len.checked_sub(i))
+ {
+ Some(i) => i,
+ None => return Err(BinaryReaderError::new("unexpected end-of-file", start)),
+ };
+ Ok(ret)
+}
+
+impl Default for Parser {
+ fn default() -> Parser {
+ Parser::new(0)
+ }
+}
+
+impl Payload<'_> {
+ /// If this `Payload` represents a section in the original wasm module then
+ /// the section's id and range within the original wasm binary are returned.
+ ///
+ /// Not all payloads refer to entire sections, such as the `Version` and
+ /// `CodeSectionEntry` variants. These variants will return `None` from this
+ /// function.
+ ///
+ /// Otherwise this function will return `Some` where the first element is
+ /// the byte identifier for the section and the second element is the range
+ /// of the contents of the section within the original wasm binary.
+ ///
+ /// The purpose of this method is to enable tools to easily iterate over
+ /// entire sections if necessary and handle sections uniformly, for example
+ /// dropping custom sections while preserving all other sections.
+ pub fn as_section(&self) -> Option<(u8, Range<usize>)> {
+ use Payload::*;
+
+ match self {
+ Version { .. } => None,
+ TypeSection(s) => Some((TYPE_SECTION, s.range())),
+ ImportSection(s) => Some((IMPORT_SECTION, s.range())),
+ FunctionSection(s) => Some((FUNCTION_SECTION, s.range())),
+ TableSection(s) => Some((TABLE_SECTION, s.range())),
+ MemorySection(s) => Some((MEMORY_SECTION, s.range())),
+ TagSection(s) => Some((TAG_SECTION, s.range())),
+ GlobalSection(s) => Some((GLOBAL_SECTION, s.range())),
+ ExportSection(s) => Some((EXPORT_SECTION, s.range())),
+ ElementSection(s) => Some((ELEMENT_SECTION, s.range())),
+ DataSection(s) => Some((DATA_SECTION, s.range())),
+ StartSection { range, .. } => Some((START_SECTION, range.clone())),
+ DataCountSection { range, .. } => Some((DATA_COUNT_SECTION, range.clone())),
+ CodeSectionStart { range, .. } => Some((CODE_SECTION, range.clone())),
+ CodeSectionEntry(_) => None,
+
+ ModuleSection { range, .. } => Some((COMPONENT_MODULE_SECTION, range.clone())),
+ InstanceSection(s) => Some((COMPONENT_CORE_INSTANCE_SECTION, s.range())),
+ CoreTypeSection(s) => Some((COMPONENT_CORE_TYPE_SECTION, s.range())),
+ ComponentSection { range, .. } => Some((COMPONENT_SECTION, range.clone())),
+ ComponentInstanceSection(s) => Some((COMPONENT_INSTANCE_SECTION, s.range())),
+ ComponentAliasSection(s) => Some((COMPONENT_ALIAS_SECTION, s.range())),
+ ComponentTypeSection(s) => Some((COMPONENT_TYPE_SECTION, s.range())),
+ ComponentCanonicalSection(s) => Some((COMPONENT_CANONICAL_SECTION, s.range())),
+ ComponentStartSection { range, .. } => Some((COMPONENT_START_SECTION, range.clone())),
+ ComponentImportSection(s) => Some((COMPONENT_IMPORT_SECTION, s.range())),
+ ComponentExportSection(s) => Some((COMPONENT_EXPORT_SECTION, s.range())),
+
+ CustomSection(c) => Some((CUSTOM_SECTION, c.range())),
+
+ UnknownSection { id, range, .. } => Some((*id, range.clone())),
+
+ End(_) => None,
+ }
+ }
+}
+
+impl fmt::Debug for Payload<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ use Payload::*;
+ match self {
+ Version {
+ num,
+ encoding,
+ range,
+ } => f
+ .debug_struct("Version")
+ .field("num", num)
+ .field("encoding", encoding)
+ .field("range", range)
+ .finish(),
+
+ // Module sections
+ TypeSection(_) => f.debug_tuple("TypeSection").field(&"...").finish(),
+ ImportSection(_) => f.debug_tuple("ImportSection").field(&"...").finish(),
+ FunctionSection(_) => f.debug_tuple("FunctionSection").field(&"...").finish(),
+ TableSection(_) => f.debug_tuple("TableSection").field(&"...").finish(),
+ MemorySection(_) => f.debug_tuple("MemorySection").field(&"...").finish(),
+ TagSection(_) => f.debug_tuple("TagSection").field(&"...").finish(),
+ GlobalSection(_) => f.debug_tuple("GlobalSection").field(&"...").finish(),
+ ExportSection(_) => f.debug_tuple("ExportSection").field(&"...").finish(),
+ ElementSection(_) => f.debug_tuple("ElementSection").field(&"...").finish(),
+ DataSection(_) => f.debug_tuple("DataSection").field(&"...").finish(),
+ StartSection { func, range } => f
+ .debug_struct("StartSection")
+ .field("func", func)
+ .field("range", range)
+ .finish(),
+ DataCountSection { count, range } => f
+ .debug_struct("DataCountSection")
+ .field("count", count)
+ .field("range", range)
+ .finish(),
+ CodeSectionStart { count, range, size } => f
+ .debug_struct("CodeSectionStart")
+ .field("count", count)
+ .field("range", range)
+ .field("size", size)
+ .finish(),
+ CodeSectionEntry(_) => f.debug_tuple("CodeSectionEntry").field(&"...").finish(),
+
+ // Component sections
+ ModuleSection { parser: _, range } => f
+ .debug_struct("ModuleSection")
+ .field("range", range)
+ .finish(),
+ InstanceSection(_) => f.debug_tuple("InstanceSection").field(&"...").finish(),
+ CoreTypeSection(_) => f.debug_tuple("CoreTypeSection").field(&"...").finish(),
+ ComponentSection { parser: _, range } => f
+ .debug_struct("ComponentSection")
+ .field("range", range)
+ .finish(),
+ ComponentInstanceSection(_) => f
+ .debug_tuple("ComponentInstanceSection")
+ .field(&"...")
+ .finish(),
+ ComponentAliasSection(_) => f
+ .debug_tuple("ComponentAliasSection")
+ .field(&"...")
+ .finish(),
+ ComponentTypeSection(_) => f.debug_tuple("ComponentTypeSection").field(&"...").finish(),
+ ComponentCanonicalSection(_) => f
+ .debug_tuple("ComponentCanonicalSection")
+ .field(&"...")
+ .finish(),
+ ComponentStartSection { .. } => f
+ .debug_tuple("ComponentStartSection")
+ .field(&"...")
+ .finish(),
+ ComponentImportSection(_) => f
+ .debug_tuple("ComponentImportSection")
+ .field(&"...")
+ .finish(),
+ ComponentExportSection(_) => f
+ .debug_tuple("ComponentExportSection")
+ .field(&"...")
+ .finish(),
+
+ CustomSection(c) => f.debug_tuple("CustomSection").field(c).finish(),
+
+ UnknownSection { id, range, .. } => f
+ .debug_struct("UnknownSection")
+ .field("id", id)
+ .field("range", range)
+ .finish(),
+
+ End(offset) => f.debug_tuple("End").field(offset).finish(),
+ }
+ }
+}
+
+fn clear_hint(mut err: BinaryReaderError) -> BinaryReaderError {
+ err.inner.needed_hint = None;
+ err
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ macro_rules! assert_matches {
+ ($a:expr, $b:pat $(,)?) => {
+ match $a {
+ $b => {}
+ a => panic!("`{:?}` doesn't match `{}`", a, stringify!($b)),
+ }
+ };
+ }
+
+ #[test]
+ fn header() {
+ assert!(Parser::default().parse(&[], true).is_err());
+ assert_matches!(
+ Parser::default().parse(&[], false),
+ Ok(Chunk::NeedMoreData(4)),
+ );
+ assert_matches!(
+ Parser::default().parse(b"\0", false),
+ Ok(Chunk::NeedMoreData(3)),
+ );
+ assert_matches!(
+ Parser::default().parse(b"\0asm", false),
+ Ok(Chunk::NeedMoreData(4)),
+ );
+ assert_matches!(
+ Parser::default().parse(b"\0asm\x01\0\0\0", false),
+ Ok(Chunk::Parsed {
+ consumed: 8,
+ payload: Payload::Version { num: 1, .. },
+ }),
+ );
+ }
+
+ #[test]
+ fn header_iter() {
+ for _ in Parser::default().parse_all(&[]) {}
+ for _ in Parser::default().parse_all(b"\0") {}
+ for _ in Parser::default().parse_all(b"\0asm") {}
+ for _ in Parser::default().parse_all(b"\0asm\x01\x01\x01\x01") {}
+ }
+
+ fn parser_after_header() -> Parser {
+ let mut p = Parser::default();
+ assert_matches!(
+ p.parse(b"\0asm\x01\0\0\0", false),
+ Ok(Chunk::Parsed {
+ consumed: 8,
+ payload: Payload::Version {
+ num: WASM_MODULE_VERSION,
+ encoding: Encoding::Module,
+ ..
+ },
+ }),
+ );
+ p
+ }
+
+ fn parser_after_component_header() -> Parser {
+ let mut p = Parser::default();
+ assert_matches!(
+ p.parse(b"\0asm\x0c\0\x01\0", false),
+ Ok(Chunk::Parsed {
+ consumed: 8,
+ payload: Payload::Version {
+ num: WASM_COMPONENT_VERSION,
+ encoding: Encoding::Component,
+ ..
+ },
+ }),
+ );
+ p
+ }
+
+ #[test]
+ fn start_section() {
+ assert_matches!(
+ parser_after_header().parse(&[], false),
+ Ok(Chunk::NeedMoreData(1)),
+ );
+ assert!(parser_after_header().parse(&[8], true).is_err());
+ assert!(parser_after_header().parse(&[8, 1], true).is_err());
+ assert!(parser_after_header().parse(&[8, 2], true).is_err());
+ assert_matches!(
+ parser_after_header().parse(&[8], false),
+ Ok(Chunk::NeedMoreData(1)),
+ );
+ assert_matches!(
+ parser_after_header().parse(&[8, 1], false),
+ Ok(Chunk::NeedMoreData(1)),
+ );
+ assert_matches!(
+ parser_after_header().parse(&[8, 2], false),
+ Ok(Chunk::NeedMoreData(2)),
+ );
+ assert_matches!(
+ parser_after_header().parse(&[8, 1, 1], false),
+ Ok(Chunk::Parsed {
+ consumed: 3,
+ payload: Payload::StartSection { func: 1, .. },
+ }),
+ );
+ assert!(parser_after_header().parse(&[8, 2, 1, 1], false).is_err());
+ assert!(parser_after_header().parse(&[8, 0], false).is_err());
+ }
+
+ #[test]
+ fn end_works() {
+ assert_matches!(
+ parser_after_header().parse(&[], true),
+ Ok(Chunk::Parsed {
+ consumed: 0,
+ payload: Payload::End(8),
+ }),
+ );
+ }
+
+ #[test]
+ fn type_section() {
+ assert!(parser_after_header().parse(&[1], true).is_err());
+ assert!(parser_after_header().parse(&[1, 0], false).is_err());
+ assert!(parser_after_header().parse(&[8, 2], true).is_err());
+ assert_matches!(
+ parser_after_header().parse(&[1], false),
+ Ok(Chunk::NeedMoreData(1)),
+ );
+ assert_matches!(
+ parser_after_header().parse(&[1, 1], false),
+ Ok(Chunk::NeedMoreData(1)),
+ );
+ assert_matches!(
+ parser_after_header().parse(&[1, 1, 1], false),
+ Ok(Chunk::Parsed {
+ consumed: 3,
+ payload: Payload::TypeSection(_),
+ }),
+ );
+ assert_matches!(
+ parser_after_header().parse(&[1, 1, 1, 2, 3, 4], false),
+ Ok(Chunk::Parsed {
+ consumed: 3,
+ payload: Payload::TypeSection(_),
+ }),
+ );
+ }
+
+ #[test]
+ fn custom_section() {
+ assert!(parser_after_header().parse(&[0], true).is_err());
+ assert!(parser_after_header().parse(&[0, 0], false).is_err());
+ assert!(parser_after_header().parse(&[0, 1, 1], false).is_err());
+ assert_matches!(
+ parser_after_header().parse(&[0, 2, 1], false),
+ Ok(Chunk::NeedMoreData(1)),
+ );
+ assert_matches!(
+ parser_after_header().parse(&[0, 1, 0], false),
+ Ok(Chunk::Parsed {
+ consumed: 3,
+ payload: Payload::CustomSection(CustomSectionReader {
+ name: "",
+ data_offset: 11,
+ data: b"",
+ range: Range { start: 10, end: 11 },
+ }),
+ }),
+ );
+ assert_matches!(
+ parser_after_header().parse(&[0, 2, 1, b'a'], false),
+ Ok(Chunk::Parsed {
+ consumed: 4,
+ payload: Payload::CustomSection(CustomSectionReader {
+ name: "a",
+ data_offset: 12,
+ data: b"",
+ range: Range { start: 10, end: 12 },
+ }),
+ }),
+ );
+ assert_matches!(
+ parser_after_header().parse(&[0, 2, 0, b'a'], false),
+ Ok(Chunk::Parsed {
+ consumed: 4,
+ payload: Payload::CustomSection(CustomSectionReader {
+ name: "",
+ data_offset: 11,
+ data: b"a",
+ range: Range { start: 10, end: 12 },
+ }),
+ }),
+ );
+ }
+
+ #[test]
+ fn function_section() {
+ assert!(parser_after_header().parse(&[10], true).is_err());
+ assert!(parser_after_header().parse(&[10, 0], true).is_err());
+ assert!(parser_after_header().parse(&[10, 1], true).is_err());
+ assert_matches!(
+ parser_after_header().parse(&[10], false),
+ Ok(Chunk::NeedMoreData(1))
+ );
+ assert_matches!(
+ parser_after_header().parse(&[10, 1], false),
+ Ok(Chunk::NeedMoreData(1))
+ );
+ let mut p = parser_after_header();
+ assert_matches!(
+ p.parse(&[10, 1, 0], false),
+ Ok(Chunk::Parsed {
+ consumed: 3,
+ payload: Payload::CodeSectionStart { count: 0, .. },
+ }),
+ );
+ assert_matches!(
+ p.parse(&[], true),
+ Ok(Chunk::Parsed {
+ consumed: 0,
+ payload: Payload::End(11),
+ }),
+ );
+ let mut p = parser_after_header();
+ assert_matches!(
+ p.parse(&[10, 2, 1, 0], false),
+ Ok(Chunk::Parsed {
+ consumed: 3,
+ payload: Payload::CodeSectionStart { count: 1, .. },
+ }),
+ );
+ assert_matches!(
+ p.parse(&[0], false),
+ Ok(Chunk::Parsed {
+ consumed: 1,
+ payload: Payload::CodeSectionEntry(_),
+ }),
+ );
+ assert_matches!(
+ p.parse(&[], true),
+ Ok(Chunk::Parsed {
+ consumed: 0,
+ payload: Payload::End(12),
+ }),
+ );
+
+ // 1 byte section with 1 function can't read the function body because
+ // the section is too small
+ let mut p = parser_after_header();
+ assert_matches!(
+ p.parse(&[10, 1, 1], false),
+ Ok(Chunk::Parsed {
+ consumed: 3,
+ payload: Payload::CodeSectionStart { count: 1, .. },
+ }),
+ );
+ assert_eq!(
+ p.parse(&[0], false).unwrap_err().message(),
+ "unexpected end-of-file"
+ );
+
+ // section with 2 functions but section is cut off
+ let mut p = parser_after_header();
+ assert_matches!(
+ p.parse(&[10, 2, 2], false),
+ Ok(Chunk::Parsed {
+ consumed: 3,
+ payload: Payload::CodeSectionStart { count: 2, .. },
+ }),
+ );
+ assert_matches!(
+ p.parse(&[0], false),
+ Ok(Chunk::Parsed {
+ consumed: 1,
+ payload: Payload::CodeSectionEntry(_),
+ }),
+ );
+ assert_matches!(p.parse(&[], false), Ok(Chunk::NeedMoreData(1)));
+ assert_eq!(
+ p.parse(&[0], false).unwrap_err().message(),
+ "unexpected end-of-file",
+ );
+
+ // trailing data is bad
+ let mut p = parser_after_header();
+ assert_matches!(
+ p.parse(&[10, 3, 1], false),
+ Ok(Chunk::Parsed {
+ consumed: 3,
+ payload: Payload::CodeSectionStart { count: 1, .. },
+ }),
+ );
+ assert_matches!(
+ p.parse(&[0], false),
+ Ok(Chunk::Parsed {
+ consumed: 1,
+ payload: Payload::CodeSectionEntry(_),
+ }),
+ );
+ assert_eq!(
+ p.parse(&[0], false).unwrap_err().message(),
+ "trailing bytes at end of section",
+ );
+ }
+
+ #[test]
+ fn single_module() {
+ let mut p = parser_after_component_header();
+ assert_matches!(p.parse(&[4], false), Ok(Chunk::NeedMoreData(1)));
+
+ // A module that's 8 bytes in length
+ let mut sub = match p.parse(&[1, 8], false) {
+ Ok(Chunk::Parsed {
+ consumed: 2,
+ payload: Payload::ModuleSection { parser, .. },
+ }) => parser,
+ other => panic!("bad parse {:?}", other),
+ };
+
+ // Parse the header of the submodule with the sub-parser.
+ assert_matches!(sub.parse(&[], false), Ok(Chunk::NeedMoreData(4)));
+ assert_matches!(sub.parse(b"\0asm", false), Ok(Chunk::NeedMoreData(4)));
+ assert_matches!(
+ sub.parse(b"\0asm\x01\0\0\0", false),
+ Ok(Chunk::Parsed {
+ consumed: 8,
+ payload: Payload::Version {
+ num: 1,
+ encoding: Encoding::Module,
+ ..
+ },
+ }),
+ );
+
+ // The sub-parser should be byte-limited so the next byte shouldn't get
+ // consumed, it's intended for the parent parser.
+ assert_matches!(
+ sub.parse(&[10], false),
+ Ok(Chunk::Parsed {
+ consumed: 0,
+ payload: Payload::End(18),
+ }),
+ );
+
+ // The parent parser should now be back to resuming, and we simulate it
+ // being done with bytes to ensure that it's safely at the end,
+ // completing the module code section.
+ assert_matches!(p.parse(&[], false), Ok(Chunk::NeedMoreData(1)));
+ assert_matches!(
+ p.parse(&[], true),
+ Ok(Chunk::Parsed {
+ consumed: 0,
+ payload: Payload::End(18),
+ }),
+ );
+ }
+
+ #[test]
+ fn nested_section_too_big() {
+ let mut p = parser_after_component_header();
+
+ // A module that's 10 bytes in length
+ let mut sub = match p.parse(&[1, 10], false) {
+ Ok(Chunk::Parsed {
+ consumed: 2,
+ payload: Payload::ModuleSection { parser, .. },
+ }) => parser,
+ other => panic!("bad parse {:?}", other),
+ };
+
+ // use 8 bytes to parse the header, leaving 2 remaining bytes in our
+ // module.
+ assert_matches!(
+ sub.parse(b"\0asm\x01\0\0\0", false),
+ Ok(Chunk::Parsed {
+ consumed: 8,
+ payload: Payload::Version { num: 1, .. },
+ }),
+ );
+
+ // We can't parse a section which declares its bigger than the outer
+ // module. This is a custom section, one byte big, with one content byte. The
+ // content byte, however, lives outside of the parent's module code
+ // section.
+ assert_eq!(
+ sub.parse(&[0, 1, 0], false).unwrap_err().message(),
+ "section too large",
+ );
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers.rs b/third_party/rust/wasmparser/src/readers.rs
new file mode 100644
index 0000000000..e2b25da7cf
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers.rs
@@ -0,0 +1,316 @@
+/* Copyright 2018 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::{BinaryReader, BinaryReaderError, Result};
+use std::fmt;
+use std::marker;
+use std::ops::Range;
+
+mod component;
+mod core;
+
+pub use self::component::*;
+pub use self::core::*;
+
+/// A trait implemented for items that can be decoded directly from a
+/// `BinaryReader`, or that which can be parsed from the WebAssembly binary
+/// format.
+///
+/// Note that this is also accessible as a [`BinaryReader::read`] method.
+pub trait FromReader<'a>: Sized {
+ /// Attempts to read `Self` from the provided binary reader, returning an
+ /// error if it is unable to do so.
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self>;
+}
+
+impl<'a> FromReader<'a> for u32 {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ reader.read_var_u32()
+ }
+}
+
+impl<'a> FromReader<'a> for &'a str {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ reader.read_string()
+ }
+}
+
+impl<'a, T, U> FromReader<'a> for (T, U)
+where
+ T: FromReader<'a>,
+ U: FromReader<'a>,
+{
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok((reader.read()?, reader.read()?))
+ }
+}
+
+/// A generic structure for reading a section of a WebAssembly binary which has
+/// a limited number of items within it.
+///
+/// Many WebAssembly sections are a count of items followed by that many items.
+/// This helper structure can be used to parse these sections and provides
+/// an iteration-based API for reading the contents.
+///
+/// Note that this always implements the [`Clone`] trait to represent the
+/// ability to parse the section multiple times.
+pub struct SectionLimited<'a, T> {
+ reader: BinaryReader<'a>,
+ count: u32,
+ _marker: marker::PhantomData<T>,
+}
+
+impl<'a, T> SectionLimited<'a, T> {
+ /// Creates a new section reader from the provided contents.
+ ///
+ /// The `data` provided here is the data of the section itself that will be
+ /// parsed. The `offset` argument is the byte offset, in the original wasm
+ /// binary, that the section was found. The `offset` argument is used
+ /// for error reporting.
+ ///
+ /// # Errors
+ ///
+ /// Returns an error if a 32-bit count couldn't be read from the `data`.
+ pub fn new(data: &'a [u8], offset: usize) -> Result<Self> {
+ let mut reader = BinaryReader::new_with_offset(data, offset);
+ let count = reader.read_var_u32()?;
+ Ok(SectionLimited {
+ reader,
+ count,
+ _marker: marker::PhantomData,
+ })
+ }
+
+ /// Returns the count of total items within this section.
+ pub fn count(&self) -> u32 {
+ self.count
+ }
+
+ /// Returns whether the original byte offset of this section.
+ pub fn original_position(&self) -> usize {
+ self.reader.original_position()
+ }
+
+ /// Returns the range, as byte offsets, of this section within the original
+ /// wasm binary.
+ pub fn range(&self) -> Range<usize> {
+ self.reader.range()
+ }
+
+ /// Returns an iterator which yields not only each item in this section but
+ /// additionally the offset of each item within the section.
+ pub fn into_iter_with_offsets(self) -> SectionLimitedIntoIterWithOffsets<'a, T>
+ where
+ T: FromReader<'a>,
+ {
+ SectionLimitedIntoIterWithOffsets {
+ iter: self.into_iter(),
+ }
+ }
+}
+
+impl<T> Clone for SectionLimited<'_, T> {
+ fn clone(&self) -> Self {
+ SectionLimited {
+ reader: self.reader.clone(),
+ count: self.count,
+ _marker: self._marker,
+ }
+ }
+}
+
+impl<T> fmt::Debug for SectionLimited<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("SectionLimited")
+ .field("count", &self.count)
+ .field("range", &self.range())
+ .finish()
+ }
+}
+
+impl<'a, T> IntoIterator for SectionLimited<'a, T>
+where
+ T: FromReader<'a>,
+{
+ type Item = Result<T>;
+ type IntoIter = SectionLimitedIntoIter<'a, T>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ SectionLimitedIntoIter {
+ remaining: self.count,
+ section: self,
+ end: false,
+ }
+ }
+}
+
+/// A consuming iterator of a [`SectionLimited`].
+///
+/// This is created via the [`IntoIterator`] `impl` for the [`SectionLimited`]
+/// type.
+pub struct SectionLimitedIntoIter<'a, T> {
+ section: SectionLimited<'a, T>,
+ remaining: u32,
+ end: bool,
+}
+
+impl<T> SectionLimitedIntoIter<'_, T> {
+ /// Returns the current byte offset of the section within this iterator.
+ pub fn original_position(&self) -> usize {
+ self.section.reader.original_position()
+ }
+}
+
+impl<'a, T> Iterator for SectionLimitedIntoIter<'a, T>
+where
+ T: FromReader<'a>,
+{
+ type Item = Result<T>;
+
+ fn next(&mut self) -> Option<Result<T>> {
+ if self.end {
+ return None;
+ }
+ if self.remaining == 0 {
+ self.end = true;
+ if self.section.reader.eof() {
+ return None;
+ }
+ return Some(Err(BinaryReaderError::new(
+ "section size mismatch: unexpected data at the end of the section",
+ self.section.reader.original_position(),
+ )));
+ }
+ let result = self.section.reader.read();
+ self.end = result.is_err();
+ self.remaining -= 1;
+ Some(result)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let remaining = self.remaining as usize;
+ (remaining, Some(remaining))
+ }
+}
+
+impl<'a, T> ExactSizeIterator for SectionLimitedIntoIter<'a, T> where T: FromReader<'a> {}
+
+/// An iterator over a limited section iterator.
+pub struct SectionLimitedIntoIterWithOffsets<'a, T> {
+ iter: SectionLimitedIntoIter<'a, T>,
+}
+
+impl<'a, T> Iterator for SectionLimitedIntoIterWithOffsets<'a, T>
+where
+ T: FromReader<'a>,
+{
+ type Item = Result<(usize, T)>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let pos = self.iter.section.reader.original_position();
+ Some(self.iter.next()?.map(|item| (pos, item)))
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+
+impl<'a, T> ExactSizeIterator for SectionLimitedIntoIterWithOffsets<'a, T> where T: FromReader<'a> {}
+
+/// A trait implemented for subsections of another outer section.
+///
+/// This is currently only used for subsections within custom sections, such as
+/// the `name` section of core wasm.
+///
+/// This is used in conjunction with [`Subsections`].
+pub trait Subsection<'a>: Sized {
+ /// Converts the section identifier provided with the section contents into
+ /// a typed section
+ fn from_reader(id: u8, reader: BinaryReader<'a>) -> Result<Self>;
+}
+
+/// Iterator/reader over the contents of a section which is composed of
+/// subsections.
+///
+/// This reader is used for the core `name` section, for example. This type
+/// primarily implements [`Iterator`] for advancing through the sections.
+pub struct Subsections<'a, T> {
+ reader: BinaryReader<'a>,
+ _marker: marker::PhantomData<T>,
+}
+
+impl<'a, T> Subsections<'a, T> {
+ /// Creates a new reader for the specified section contents starting at
+ /// `offset` within the original wasm file.
+ pub fn new(data: &'a [u8], offset: usize) -> Self {
+ Subsections {
+ reader: BinaryReader::new_with_offset(data, offset),
+ _marker: marker::PhantomData,
+ }
+ }
+
+ /// Returns whether the original byte offset of this section.
+ pub fn original_position(&self) -> usize {
+ self.reader.original_position()
+ }
+
+ /// Returns the range, as byte offsets, of this section within the original
+ /// wasm binary.
+ pub fn range(&self) -> Range<usize> {
+ self.reader.range()
+ }
+
+ fn read(&mut self) -> Result<T>
+ where
+ T: Subsection<'a>,
+ {
+ let subsection_id = self.reader.read_u7()?;
+ let reader = self.reader.read_reader("unexpected end of section")?;
+ T::from_reader(subsection_id, reader)
+ }
+}
+
+impl<T> Clone for Subsections<'_, T> {
+ fn clone(&self) -> Self {
+ Subsections {
+ reader: self.reader.clone(),
+ _marker: self._marker,
+ }
+ }
+}
+
+impl<T> fmt::Debug for Subsections<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Subsections")
+ .field("range", &self.range())
+ .finish()
+ }
+}
+
+impl<'a, T> Iterator for Subsections<'a, T>
+where
+ T: Subsection<'a>,
+{
+ type Item = Result<T>;
+
+ fn next(&mut self) -> Option<Result<T>> {
+ if self.reader.eof() {
+ None
+ } else {
+ Some(self.read())
+ }
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/component.rs b/third_party/rust/wasmparser/src/readers/component.rs
new file mode 100644
index 0000000000..24b490d0c3
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/component.rs
@@ -0,0 +1,17 @@
+mod aliases;
+mod canonicals;
+mod exports;
+mod imports;
+mod instances;
+mod names;
+mod start;
+mod types;
+
+pub use self::aliases::*;
+pub use self::canonicals::*;
+pub use self::exports::*;
+pub use self::imports::*;
+pub use self::instances::*;
+pub use self::names::*;
+pub use self::start::*;
+pub use self::types::*;
diff --git a/third_party/rust/wasmparser/src/readers/component/aliases.rs b/third_party/rust/wasmparser/src/readers/component/aliases.rs
new file mode 100644
index 0000000000..fb71d579b4
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/component/aliases.rs
@@ -0,0 +1,119 @@
+use crate::{BinaryReader, ComponentExternalKind, ExternalKind, FromReader, Result};
+
+/// Represents the kind of an outer alias in a WebAssembly component.
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub enum ComponentOuterAliasKind {
+ /// The alias is to a core module.
+ CoreModule,
+ /// The alias is to a core type.
+ CoreType,
+ /// The alias is to a component type.
+ Type,
+ /// The alias is to a component.
+ Component,
+}
+
+/// Represents an alias in a WebAssembly component.
+#[derive(Debug, Clone)]
+pub enum ComponentAlias<'a> {
+ /// The alias is to an export of a component instance.
+ InstanceExport {
+ /// The alias kind.
+ kind: ComponentExternalKind,
+ /// The instance index.
+ instance_index: u32,
+ /// The export name.
+ name: &'a str,
+ },
+ /// The alias is to an export of a module instance.
+ CoreInstanceExport {
+ /// The alias kind.
+ kind: ExternalKind,
+ /// The instance index.
+ instance_index: u32,
+ /// The export name.
+ name: &'a str,
+ },
+ /// The alias is to an outer item.
+ Outer {
+ /// The alias kind.
+ kind: ComponentOuterAliasKind,
+ /// The outward count, starting at zero for the current component.
+ count: u32,
+ /// The index of the item within the outer component.
+ index: u32,
+ },
+}
+
+/// Section reader for the component alias section
+pub type ComponentAliasSectionReader<'a> = crate::SectionLimited<'a, ComponentAlias<'a>>;
+
+impl<'a> FromReader<'a> for ComponentAlias<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ // We don't know what type of alias it is yet, so just read the sort bytes
+ let offset = reader.original_position();
+ let byte1 = reader.read_u8()?;
+ let byte2 = if byte1 == 0x00 {
+ Some(reader.read_u8()?)
+ } else {
+ None
+ };
+
+ Ok(match reader.read_u8()? {
+ 0x00 => ComponentAlias::InstanceExport {
+ kind: ComponentExternalKind::from_bytes(byte1, byte2, offset)?,
+ instance_index: reader.read_var_u32()?,
+ name: reader.read_string()?,
+ },
+ 0x01 => ComponentAlias::CoreInstanceExport {
+ kind: BinaryReader::external_kind_from_byte(
+ byte2.ok_or_else(|| {
+ BinaryReader::invalid_leading_byte_error(
+ byte1,
+ "core instance export kind",
+ offset,
+ )
+ })?,
+ offset,
+ )?,
+ instance_index: reader.read_var_u32()?,
+ name: reader.read_string()?,
+ },
+ 0x02 => ComponentAlias::Outer {
+ kind: component_outer_alias_kind_from_bytes(byte1, byte2, offset)?,
+ count: reader.read_var_u32()?,
+ index: reader.read_var_u32()?,
+ },
+ x => reader.invalid_leading_byte(x, "alias")?,
+ })
+ }
+}
+
+fn component_outer_alias_kind_from_bytes(
+ byte1: u8,
+ byte2: Option<u8>,
+ offset: usize,
+) -> Result<ComponentOuterAliasKind> {
+ Ok(match byte1 {
+ 0x00 => match byte2.unwrap() {
+ 0x10 => ComponentOuterAliasKind::CoreType,
+ 0x11 => ComponentOuterAliasKind::CoreModule,
+ x => {
+ return Err(BinaryReader::invalid_leading_byte_error(
+ x,
+ "component outer alias kind",
+ offset + 1,
+ ))
+ }
+ },
+ 0x03 => ComponentOuterAliasKind::Type,
+ 0x04 => ComponentOuterAliasKind::Component,
+ x => {
+ return Err(BinaryReader::invalid_leading_byte_error(
+ x,
+ "component outer alias kind",
+ offset,
+ ))
+ }
+ })
+}
diff --git a/third_party/rust/wasmparser/src/readers/component/canonicals.rs b/third_party/rust/wasmparser/src/readers/component/canonicals.rs
new file mode 100644
index 0000000000..e360d029c4
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/component/canonicals.rs
@@ -0,0 +1,95 @@
+use crate::limits::MAX_WASM_CANONICAL_OPTIONS;
+use crate::{BinaryReader, FromReader, Result, SectionLimited};
+
+/// Represents options for component functions.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum CanonicalOption {
+ /// The string types in the function signature are UTF-8 encoded.
+ UTF8,
+ /// The string types in the function signature are UTF-16 encoded.
+ UTF16,
+ /// The string types in the function signature are compact UTF-16 encoded.
+ CompactUTF16,
+ /// The memory to use if the lifting or lowering of a function requires memory access.
+ ///
+ /// The value is an index to a core memory.
+ Memory(u32),
+ /// The realloc function to use if the lifting or lowering of a function requires memory
+ /// allocation.
+ ///
+ /// The value is an index to a core function of type `(func (param i32 i32 i32 i32) (result i32))`.
+ Realloc(u32),
+ /// The post-return function to use if the lifting of a function requires
+ /// cleanup after the function returns.
+ PostReturn(u32),
+}
+
+/// Represents a canonical function in a WebAssembly component.
+#[derive(Debug, Clone)]
+pub enum CanonicalFunction {
+ /// The function lifts a core WebAssembly function to the canonical ABI.
+ Lift {
+ /// The index of the core WebAssembly function to lift.
+ core_func_index: u32,
+ /// The index of the lifted function's type.
+ type_index: u32,
+ /// The canonical options for the function.
+ options: Box<[CanonicalOption]>,
+ },
+ /// The function lowers a canonical ABI function to a core WebAssembly function.
+ Lower {
+ /// The index of the function to lower.
+ func_index: u32,
+ /// The canonical options for the function.
+ options: Box<[CanonicalOption]>,
+ },
+}
+
+/// A reader for the canonical section of a WebAssembly component.
+pub type ComponentCanonicalSectionReader<'a> = SectionLimited<'a, CanonicalFunction>;
+
+impl<'a> FromReader<'a> for CanonicalFunction {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<CanonicalFunction> {
+ Ok(match reader.read_u8()? {
+ 0x00 => match reader.read_u8()? {
+ 0x00 => {
+ let core_func_index = reader.read_var_u32()?;
+ let options = reader
+ .read_iter(MAX_WASM_CANONICAL_OPTIONS, "canonical options")?
+ .collect::<Result<_>>()?;
+ let type_index = reader.read_var_u32()?;
+ CanonicalFunction::Lift {
+ core_func_index,
+ options,
+ type_index,
+ }
+ }
+ x => return reader.invalid_leading_byte(x, "canonical function lift"),
+ },
+ 0x01 => match reader.read_u8()? {
+ 0x00 => CanonicalFunction::Lower {
+ func_index: reader.read_var_u32()?,
+ options: reader
+ .read_iter(MAX_WASM_CANONICAL_OPTIONS, "canonical options")?
+ .collect::<Result<_>>()?,
+ },
+ x => return reader.invalid_leading_byte(x, "canonical function lower"),
+ },
+ x => return reader.invalid_leading_byte(x, "canonical function"),
+ })
+ }
+}
+
+impl<'a> FromReader<'a> for CanonicalOption {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(match reader.read_u8()? {
+ 0x00 => CanonicalOption::UTF8,
+ 0x01 => CanonicalOption::UTF16,
+ 0x02 => CanonicalOption::CompactUTF16,
+ 0x03 => CanonicalOption::Memory(reader.read_var_u32()?),
+ 0x04 => CanonicalOption::Realloc(reader.read_var_u32()?),
+ 0x05 => CanonicalOption::PostReturn(reader.read_var_u32()?),
+ x => return reader.invalid_leading_byte(x, "canonical option"),
+ })
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/component/exports.rs b/third_party/rust/wasmparser/src/readers/component/exports.rs
new file mode 100644
index 0000000000..8ce5f43a00
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/component/exports.rs
@@ -0,0 +1,105 @@
+use crate::{BinaryReader, ComponentTypeRef, FromReader, Result, SectionLimited};
+
+/// Represents the kind of an external items of a WebAssembly component.
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub enum ComponentExternalKind {
+ /// The external kind is a core module.
+ Module,
+ /// The external kind is a function.
+ Func,
+ /// The external kind is a value.
+ Value,
+ /// The external kind is a type.
+ Type,
+ /// The external kind is an instance.
+ Instance,
+ /// The external kind is a component.
+ Component,
+}
+
+impl ComponentExternalKind {
+ pub(crate) fn from_bytes(
+ byte1: u8,
+ byte2: Option<u8>,
+ offset: usize,
+ ) -> Result<ComponentExternalKind> {
+ Ok(match byte1 {
+ 0x00 => match byte2.unwrap() {
+ 0x11 => ComponentExternalKind::Module,
+ x => {
+ return Err(BinaryReader::invalid_leading_byte_error(
+ x,
+ "component external kind",
+ offset + 1,
+ ))
+ }
+ },
+ 0x01 => ComponentExternalKind::Func,
+ 0x02 => ComponentExternalKind::Value,
+ 0x03 => ComponentExternalKind::Type,
+ 0x04 => ComponentExternalKind::Component,
+ 0x05 => ComponentExternalKind::Instance,
+ x => {
+ return Err(BinaryReader::invalid_leading_byte_error(
+ x,
+ "component external kind",
+ offset,
+ ))
+ }
+ })
+ }
+}
+
+/// Represents an export in a WebAssembly component.
+#[derive(Debug, Clone)]
+pub struct ComponentExport<'a> {
+ /// The name of the exported item.
+ pub name: &'a str,
+ /// The optional URL of the exported item.
+ pub url: &'a str,
+ /// The kind of the export.
+ pub kind: ComponentExternalKind,
+ /// The index of the exported item.
+ pub index: u32,
+ /// An optionally specified type ascribed to this export.
+ pub ty: Option<ComponentTypeRef>,
+}
+
+/// A reader for the export section of a WebAssembly component.
+pub type ComponentExportSectionReader<'a> = SectionLimited<'a, ComponentExport<'a>>;
+
+impl<'a> FromReader<'a> for ComponentExport<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(ComponentExport {
+ name: reader.read()?,
+ url: reader.read()?,
+ kind: reader.read()?,
+ index: reader.read()?,
+ ty: match reader.read_u8()? {
+ 0x00 => None,
+ 0x01 => Some(reader.read()?),
+ other => {
+ return Err(BinaryReader::invalid_leading_byte_error(
+ other,
+ "optional component export type",
+ reader.original_position() - 1,
+ ))
+ }
+ },
+ })
+ }
+}
+
+impl<'a> FromReader<'a> for ComponentExternalKind {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ let offset = reader.original_position();
+ let byte1 = reader.read_u8()?;
+ let byte2 = if byte1 == 0x00 {
+ Some(reader.read_u8()?)
+ } else {
+ None
+ };
+
+ ComponentExternalKind::from_bytes(byte1, byte2, offset)
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/component/imports.rs b/third_party/rust/wasmparser/src/readers/component/imports.rs
new file mode 100644
index 0000000000..c1313c11e2
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/component/imports.rs
@@ -0,0 +1,109 @@
+use crate::{
+ BinaryReader, ComponentExternalKind, ComponentValType, FromReader, Result, SectionLimited,
+};
+
+/// Represents the type bounds for imports and exports.
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub enum TypeBounds {
+ /// The type is bounded by equality.
+ Eq,
+}
+
+impl<'a> FromReader<'a> for TypeBounds {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(match reader.read_u8()? {
+ 0x00 => TypeBounds::Eq,
+ x => return reader.invalid_leading_byte(x, "type bound"),
+ })
+ }
+}
+
+/// Represents a reference to a component type.
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub enum ComponentTypeRef {
+ /// The reference is to a core module type.
+ ///
+ /// The index is expected to be core type index to a core module type.
+ Module(u32),
+ /// The reference is to a function type.
+ ///
+ /// The index is expected to be a type index to a function type.
+ Func(u32),
+ /// The reference is to a value type.
+ Value(ComponentValType),
+ /// The reference is to a bounded type.
+ ///
+ /// The index is expected to be a type index.
+ Type(TypeBounds, u32),
+ /// The reference is to an instance type.
+ ///
+ /// The index is a type index to an instance type.
+ Instance(u32),
+ /// The reference is to a component type.
+ ///
+ /// The index is a type index to a component type.
+ Component(u32),
+}
+
+impl ComponentTypeRef {
+ /// Returns the corresponding [`ComponentExternalKind`] for this reference.
+ pub fn kind(&self) -> ComponentExternalKind {
+ match self {
+ ComponentTypeRef::Module(_) => ComponentExternalKind::Module,
+ ComponentTypeRef::Func(_) => ComponentExternalKind::Func,
+ ComponentTypeRef::Value(_) => ComponentExternalKind::Value,
+ ComponentTypeRef::Type(..) => ComponentExternalKind::Type,
+ ComponentTypeRef::Instance(_) => ComponentExternalKind::Instance,
+ ComponentTypeRef::Component(_) => ComponentExternalKind::Component,
+ }
+ }
+}
+
+impl<'a> FromReader<'a> for ComponentTypeRef {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(match reader.read()? {
+ ComponentExternalKind::Module => ComponentTypeRef::Module(reader.read()?),
+ ComponentExternalKind::Func => ComponentTypeRef::Func(reader.read()?),
+ ComponentExternalKind::Value => ComponentTypeRef::Value(reader.read()?),
+ ComponentExternalKind::Type => ComponentTypeRef::Type(reader.read()?, reader.read()?),
+ ComponentExternalKind::Instance => ComponentTypeRef::Instance(reader.read()?),
+ ComponentExternalKind::Component => ComponentTypeRef::Component(reader.read()?),
+ })
+ }
+}
+
+/// Represents an import in a WebAssembly component
+#[derive(Debug, Copy, Clone)]
+pub struct ComponentImport<'a> {
+ /// The name of the imported item.
+ pub name: &'a str,
+ /// The optional URL of the imported item.
+ pub url: &'a str,
+ /// The type reference for the import.
+ pub ty: ComponentTypeRef,
+}
+
+impl<'a> FromReader<'a> for ComponentImport<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(ComponentImport {
+ name: reader.read()?,
+ url: reader.read()?,
+ ty: reader.read()?,
+ })
+ }
+}
+
+/// A reader for the import section of a WebAssembly component.
+///
+/// # Examples
+///
+/// ```
+/// use wasmparser::ComponentImportSectionReader;
+/// let data: &[u8] = &[0x01, 0x01, 0x41, 0x00, 0x01, 0x66];
+/// let reader = ComponentImportSectionReader::new(data, 0).unwrap();
+/// for import in reader {
+/// let import = import.expect("import");
+/// println!("Import: {:?}", import);
+/// }
+/// ```
+pub type ComponentImportSectionReader<'a> = SectionLimited<'a, ComponentImport<'a>>;
diff --git a/third_party/rust/wasmparser/src/readers/component/instances.rs b/third_party/rust/wasmparser/src/readers/component/instances.rs
new file mode 100644
index 0000000000..8166395edc
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/component/instances.rs
@@ -0,0 +1,164 @@
+use crate::limits::{MAX_WASM_INSTANTIATION_ARGS, MAX_WASM_INSTANTIATION_EXPORTS};
+use crate::{
+ BinaryReader, ComponentExport, ComponentExternalKind, Export, FromReader, Result,
+ SectionLimited,
+};
+
+/// Represents the kind of an instantiation argument for a core instance.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum InstantiationArgKind {
+ /// The instantiation argument is a core instance.
+ Instance,
+}
+
+/// Represents an argument to instantiating a WebAssembly module.
+#[derive(Debug, Clone)]
+pub struct InstantiationArg<'a> {
+ /// The name of the module argument.
+ pub name: &'a str,
+ /// The kind of the module argument.
+ pub kind: InstantiationArgKind,
+ /// The index of the argument item.
+ pub index: u32,
+}
+
+/// Represents an instance of a WebAssembly module.
+#[derive(Debug, Clone)]
+pub enum Instance<'a> {
+ /// The instance is from instantiating a WebAssembly module.
+ Instantiate {
+ /// The module index.
+ module_index: u32,
+ /// The module's instantiation arguments.
+ args: Box<[InstantiationArg<'a>]>,
+ },
+ /// The instance is a from exporting local items.
+ FromExports(Box<[Export<'a>]>),
+}
+
+/// A reader for the core instance section of a WebAssembly component.
+///
+/// # Examples
+///
+/// ```
+/// use wasmparser::InstanceSectionReader;
+/// # let data: &[u8] = &[0x01, 0x00, 0x00, 0x01, 0x03, b'f', b'o', b'o', 0x12, 0x00];
+/// let mut reader = InstanceSectionReader::new(data, 0).unwrap();
+/// for inst in reader {
+/// println!("Instance {:?}", inst.expect("instance"));
+/// }
+/// ```
+pub type InstanceSectionReader<'a> = SectionLimited<'a, Instance<'a>>;
+
+impl<'a> FromReader<'a> for Instance<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(match reader.read_u8()? {
+ 0x00 => Instance::Instantiate {
+ module_index: reader.read_var_u32()?,
+ args: reader
+ .read_iter(MAX_WASM_INSTANTIATION_ARGS, "core instantiation arguments")?
+ .collect::<Result<_>>()?,
+ },
+ 0x01 => Instance::FromExports(
+ reader
+ .read_iter(MAX_WASM_INSTANTIATION_ARGS, "core instantiation arguments")?
+ .collect::<Result<_>>()?,
+ ),
+ x => return reader.invalid_leading_byte(x, "core instance"),
+ })
+ }
+}
+
+impl<'a> FromReader<'a> for InstantiationArg<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(InstantiationArg {
+ name: reader.read()?,
+ kind: reader.read()?,
+ index: reader.read()?,
+ })
+ }
+}
+
+impl<'a> FromReader<'a> for InstantiationArgKind {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(match reader.read_u8()? {
+ 0x12 => InstantiationArgKind::Instance,
+ x => return reader.invalid_leading_byte(x, "instantiation arg kind"),
+ })
+ }
+}
+
+/// Represents an argument to instantiating a WebAssembly component.
+#[derive(Debug, Clone)]
+pub struct ComponentInstantiationArg<'a> {
+ /// The name of the component argument.
+ pub name: &'a str,
+ /// The kind of the component argument.
+ pub kind: ComponentExternalKind,
+ /// The index of the argument item.
+ pub index: u32,
+}
+
+/// Represents an instance in a WebAssembly component.
+#[derive(Debug, Clone)]
+pub enum ComponentInstance<'a> {
+ /// The instance is from instantiating a WebAssembly component.
+ Instantiate {
+ /// The component index.
+ component_index: u32,
+ /// The component's instantiation arguments.
+ args: Box<[ComponentInstantiationArg<'a>]>,
+ },
+ /// The instance is a from exporting local items.
+ FromExports(Box<[ComponentExport<'a>]>),
+}
+
+/// A reader for the component instance section of a WebAssembly component.
+///
+/// # Examples
+///
+/// ```
+/// use wasmparser::ComponentInstanceSectionReader;
+/// # let data: &[u8] = &[0x01, 0x00, 0x00, 0x01, 0x03, b'f', b'o', b'o', 0x01, 0x00];
+/// let mut reader = ComponentInstanceSectionReader::new(data, 0).unwrap();
+/// for inst in reader {
+/// println!("Instance {:?}", inst.expect("instance"));
+/// }
+/// ```
+pub type ComponentInstanceSectionReader<'a> = SectionLimited<'a, ComponentInstance<'a>>;
+
+impl<'a> FromReader<'a> for ComponentInstance<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(match reader.read_u8()? {
+ 0x00 => ComponentInstance::Instantiate {
+ component_index: reader.read_var_u32()?,
+ args: reader
+ .read_iter(MAX_WASM_INSTANTIATION_ARGS, "instantiation arguments")?
+ .collect::<Result<_>>()?,
+ },
+ 0x01 => ComponentInstance::FromExports(
+ (0..reader.read_size(MAX_WASM_INSTANTIATION_EXPORTS, "instantiation exports")?)
+ .map(|_| {
+ Ok(ComponentExport {
+ name: reader.read()?,
+ url: "",
+ kind: reader.read()?,
+ index: reader.read()?,
+ ty: None,
+ })
+ })
+ .collect::<Result<_>>()?,
+ ),
+ x => return reader.invalid_leading_byte(x, "instance"),
+ })
+ }
+}
+impl<'a> FromReader<'a> for ComponentInstantiationArg<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(ComponentInstantiationArg {
+ name: reader.read()?,
+ kind: reader.read()?,
+ index: reader.read()?,
+ })
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/component/names.rs b/third_party/rust/wasmparser/src/readers/component/names.rs
new file mode 100644
index 0000000000..19de2752d0
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/component/names.rs
@@ -0,0 +1,102 @@
+use crate::{BinaryReader, BinaryReaderError, NameMap, Result, Subsection, Subsections};
+use std::ops::Range;
+
+/// Type used to iterate and parse the contents of the `component-name` custom
+/// section in compnents, similar to the `name` section of core modules.
+pub type ComponentNameSectionReader<'a> = Subsections<'a, ComponentName<'a>>;
+
+/// Represents a name read from the names custom section.
+#[derive(Clone)]
+#[allow(missing_docs)]
+pub enum ComponentName<'a> {
+ Component {
+ name: &'a str,
+ name_range: Range<usize>,
+ },
+ CoreFuncs(NameMap<'a>),
+ CoreGlobals(NameMap<'a>),
+ CoreMemories(NameMap<'a>),
+ CoreTables(NameMap<'a>),
+ CoreModules(NameMap<'a>),
+ CoreInstances(NameMap<'a>),
+ CoreTypes(NameMap<'a>),
+ Types(NameMap<'a>),
+ Instances(NameMap<'a>),
+ Components(NameMap<'a>),
+ Funcs(NameMap<'a>),
+ Values(NameMap<'a>),
+
+ /// An unknown [name subsection](https://webassembly.github.io/spec/core/appendix/custom.html#subsections).
+ Unknown {
+ /// The identifier for this subsection.
+ ty: u8,
+ /// The contents of this subsection.
+ data: &'a [u8],
+ /// The range of bytes, relative to the start of the original data
+ /// stream, that the contents of this subsection reside in.
+ range: Range<usize>,
+ },
+}
+
+impl<'a> Subsection<'a> for ComponentName<'a> {
+ fn from_reader(id: u8, mut reader: BinaryReader<'a>) -> Result<Self> {
+ let data = reader.remaining_buffer();
+ let offset = reader.original_position();
+ Ok(match id {
+ 0 => {
+ let name = reader.read_string()?;
+ if !reader.eof() {
+ return Err(BinaryReaderError::new(
+ "trailing data at the end of a name",
+ reader.original_position(),
+ ));
+ }
+ ComponentName::Component {
+ name,
+ name_range: offset..offset + reader.position,
+ }
+ }
+ 1 => {
+ let ctor: fn(NameMap<'a>) -> ComponentName<'a> = match reader.read_u8()? {
+ 0x00 => match reader.read_u8()? {
+ 0x00 => ComponentName::CoreFuncs,
+ 0x01 => ComponentName::CoreTables,
+ 0x02 => ComponentName::CoreMemories,
+ 0x03 => ComponentName::CoreGlobals,
+ 0x10 => ComponentName::CoreTypes,
+ 0x11 => ComponentName::CoreModules,
+ 0x12 => ComponentName::CoreInstances,
+ _ => {
+ return Ok(ComponentName::Unknown {
+ ty: 1,
+ data,
+ range: offset..offset + data.len(),
+ });
+ }
+ },
+ 0x01 => ComponentName::Funcs,
+ 0x02 => ComponentName::Values,
+ 0x03 => ComponentName::Types,
+ 0x04 => ComponentName::Components,
+ 0x05 => ComponentName::Instances,
+ _ => {
+ return Ok(ComponentName::Unknown {
+ ty: 1,
+ data,
+ range: offset..offset + data.len(),
+ });
+ }
+ };
+ ctor(NameMap::new(
+ reader.remaining_buffer(),
+ reader.original_position(),
+ )?)
+ }
+ ty => ComponentName::Unknown {
+ ty,
+ data,
+ range: offset..offset + data.len(),
+ },
+ })
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/component/start.rs b/third_party/rust/wasmparser/src/readers/component/start.rs
new file mode 100644
index 0000000000..dc01fa4340
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/component/start.rs
@@ -0,0 +1,30 @@
+use crate::limits::{MAX_WASM_FUNCTION_RETURNS, MAX_WASM_START_ARGS};
+use crate::{BinaryReader, FromReader, Result};
+
+/// Represents the start function in a WebAssembly component.
+#[derive(Debug, Clone)]
+pub struct ComponentStartFunction {
+ /// The index to the start function.
+ pub func_index: u32,
+ /// The start function arguments.
+ ///
+ /// The arguments are specified by value index.
+ pub arguments: Box<[u32]>,
+ /// The number of expected results for the start function.
+ pub results: u32,
+}
+
+impl<'a> FromReader<'a> for ComponentStartFunction {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ let func_index = reader.read_var_u32()?;
+ let arguments = reader
+ .read_iter(MAX_WASM_START_ARGS, "start function arguments")?
+ .collect::<Result<_>>()?;
+ let results = reader.read_size(MAX_WASM_FUNCTION_RETURNS, "start function results")? as u32;
+ Ok(ComponentStartFunction {
+ func_index,
+ arguments,
+ results,
+ })
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/component/types.rs b/third_party/rust/wasmparser/src/readers/component/types.rs
new file mode 100644
index 0000000000..b0e9687a4d
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/component/types.rs
@@ -0,0 +1,508 @@
+use crate::limits::*;
+use crate::{
+ BinaryReader, ComponentAlias, ComponentImport, ComponentTypeRef, FromReader, FuncType, Import,
+ Result, SectionLimited, Type, TypeRef,
+};
+
+/// Represents the kind of an outer core alias in a WebAssembly component.
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub enum OuterAliasKind {
+ /// The alias is to a core type.
+ Type,
+}
+
+/// Represents a core type in a WebAssembly component.
+#[derive(Debug, Clone)]
+pub enum CoreType<'a> {
+ /// The type is for a core function.
+ Func(FuncType),
+ /// The type is for a core module.
+ Module(Box<[ModuleTypeDeclaration<'a>]>),
+}
+
+impl<'a> FromReader<'a> for CoreType<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(match reader.read_u8()? {
+ 0x60 => CoreType::Func(reader.read()?),
+ 0x50 => CoreType::Module(
+ reader
+ .read_iter(MAX_WASM_MODULE_TYPE_DECLS, "module type declaration")?
+ .collect::<Result<_>>()?,
+ ),
+ x => return reader.invalid_leading_byte(x, "core type"),
+ })
+ }
+}
+
+/// Represents a module type declaration in a WebAssembly component.
+#[derive(Debug, Clone)]
+pub enum ModuleTypeDeclaration<'a> {
+ /// The module type definition is for a type.
+ Type(Type),
+ /// The module type definition is for an export.
+ Export {
+ /// The name of the exported item.
+ name: &'a str,
+ /// The type reference of the export.
+ ty: TypeRef,
+ },
+ /// The module type declaration is for an outer alias.
+ OuterAlias {
+ /// The alias kind.
+ kind: OuterAliasKind,
+ /// The outward count, starting at zero for the current type.
+ count: u32,
+ /// The index of the item within the outer type.
+ index: u32,
+ },
+ /// The module type definition is for an import.
+ Import(Import<'a>),
+}
+
+impl<'a> FromReader<'a> for ModuleTypeDeclaration<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(match reader.read_u8()? {
+ 0x00 => ModuleTypeDeclaration::Import(reader.read()?),
+ 0x01 => ModuleTypeDeclaration::Type(reader.read()?),
+ 0x02 => {
+ let kind = match reader.read_u8()? {
+ 0x10 => OuterAliasKind::Type,
+ x => {
+ return reader.invalid_leading_byte(x, "outer alias kind");
+ }
+ };
+ match reader.read_u8()? {
+ 0x01 => ModuleTypeDeclaration::OuterAlias {
+ kind,
+ count: reader.read()?,
+ index: reader.read()?,
+ },
+ x => {
+ return reader.invalid_leading_byte(x, "outer alias target");
+ }
+ }
+ }
+ 0x03 => ModuleTypeDeclaration::Export {
+ name: reader.read()?,
+ ty: reader.read()?,
+ },
+ x => return reader.invalid_leading_byte(x, "type definition"),
+ })
+ }
+}
+
+/// A reader for the core type section of a WebAssembly component.
+///
+/// # Examples
+/// ```
+/// use wasmparser::CoreTypeSectionReader;
+/// # let data: &[u8] = &[0x01, 0x60, 0x00, 0x00];
+/// let mut reader = CoreTypeSectionReader::new(data, 0).unwrap();
+/// for ty in reader {
+/// println!("Type {:?}", ty.expect("type"));
+/// }
+/// ```
+pub type CoreTypeSectionReader<'a> = SectionLimited<'a, CoreType<'a>>;
+
+/// Represents a value type in a WebAssembly component.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum ComponentValType {
+ /// The value type is a primitive type.
+ Primitive(PrimitiveValType),
+ /// The value type is a reference to a defined type.
+ Type(u32),
+}
+
+impl<'a> FromReader<'a> for ComponentValType {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ if let Some(ty) = PrimitiveValType::from_byte(reader.peek()?) {
+ reader.position += 1;
+ return Ok(ComponentValType::Primitive(ty));
+ }
+
+ Ok(ComponentValType::Type(reader.read_var_s33()? as u32))
+ }
+}
+
+impl<'a> FromReader<'a> for Option<ComponentValType> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ match reader.read_u8()? {
+ 0x0 => Ok(None),
+ 0x1 => Ok(Some(reader.read()?)),
+ x => reader.invalid_leading_byte(x, "optional component value type"),
+ }
+ }
+}
+
+/// Represents a primitive value type.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum PrimitiveValType {
+ /// The type is a boolean.
+ Bool,
+ /// The type is a signed 8-bit integer.
+ S8,
+ /// The type is an unsigned 8-bit integer.
+ U8,
+ /// The type is a signed 16-bit integer.
+ S16,
+ /// The type is an unsigned 16-bit integer.
+ U16,
+ /// The type is a signed 32-bit integer.
+ S32,
+ /// The type is an unsigned 32-bit integer.
+ U32,
+ /// The type is a signed 64-bit integer.
+ S64,
+ /// The type is an unsigned 64-bit integer.
+ U64,
+ /// The type is a 32-bit floating point number.
+ Float32,
+ /// The type is a 64-bit floating point number.
+ Float64,
+ /// The type is a Unicode character.
+ Char,
+ /// The type is a string.
+ String,
+}
+
+impl PrimitiveValType {
+ fn from_byte(byte: u8) -> Option<PrimitiveValType> {
+ Some(match byte {
+ 0x7f => PrimitiveValType::Bool,
+ 0x7e => PrimitiveValType::S8,
+ 0x7d => PrimitiveValType::U8,
+ 0x7c => PrimitiveValType::S16,
+ 0x7b => PrimitiveValType::U16,
+ 0x7a => PrimitiveValType::S32,
+ 0x79 => PrimitiveValType::U32,
+ 0x78 => PrimitiveValType::S64,
+ 0x77 => PrimitiveValType::U64,
+ 0x76 => PrimitiveValType::Float32,
+ 0x75 => PrimitiveValType::Float64,
+ 0x74 => PrimitiveValType::Char,
+ 0x73 => PrimitiveValType::String,
+ _ => return None,
+ })
+ }
+
+ pub(crate) fn requires_realloc(&self) -> bool {
+ matches!(self, Self::String)
+ }
+
+ /// Determines if primitive value type `a` is a subtype of `b`.
+ pub fn is_subtype_of(a: Self, b: Self) -> bool {
+ // Note that this intentionally diverges from the upstream specification
+ // at this time and only considers exact equality for subtyping
+ // relationships.
+ //
+ // More information can be found in the subtyping implementation for
+ // component functions.
+ a == b
+ }
+}
+
+/// Represents a type in a WebAssembly component.
+#[derive(Debug, Clone)]
+pub enum ComponentType<'a> {
+ /// The type is a component defined type.
+ Defined(ComponentDefinedType<'a>),
+ /// The type is a function type.
+ Func(ComponentFuncType<'a>),
+ /// The type is a component type.
+ Component(Box<[ComponentTypeDeclaration<'a>]>),
+ /// The type is an instance type.
+ Instance(Box<[InstanceTypeDeclaration<'a>]>),
+}
+
+impl<'a> FromReader<'a> for ComponentType<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(match reader.read_u8()? {
+ 0x40 => {
+ let params = reader
+ .read_iter(MAX_WASM_FUNCTION_PARAMS, "component function parameters")?
+ .collect::<Result<_>>()?;
+ let results = reader.read()?;
+ ComponentType::Func(ComponentFuncType { params, results })
+ }
+ 0x41 => ComponentType::Component(
+ reader
+ .read_iter(MAX_WASM_COMPONENT_TYPE_DECLS, "component type declaration")?
+ .collect::<Result<_>>()?,
+ ),
+ 0x42 => ComponentType::Instance(
+ reader
+ .read_iter(MAX_WASM_INSTANCE_TYPE_DECLS, "instance type declaration")?
+ .collect::<Result<_>>()?,
+ ),
+ x => {
+ if let Some(ty) = PrimitiveValType::from_byte(x) {
+ ComponentType::Defined(ComponentDefinedType::Primitive(ty))
+ } else {
+ ComponentType::Defined(ComponentDefinedType::read(reader, x)?)
+ }
+ }
+ })
+ }
+}
+
+/// Represents part of a component type declaration in a WebAssembly component.
+#[derive(Debug, Clone)]
+pub enum ComponentTypeDeclaration<'a> {
+ /// The component type declaration is for a core type.
+ CoreType(CoreType<'a>),
+ /// The component type declaration is for a type.
+ Type(ComponentType<'a>),
+ /// The component type declaration is for an alias.
+ Alias(ComponentAlias<'a>),
+ /// The component type declaration is for an export.
+ Export {
+ /// The name of the export.
+ name: &'a str,
+ /// The optional URL of the export.
+ url: &'a str,
+ /// The type reference for the export.
+ ty: ComponentTypeRef,
+ },
+ /// The component type declaration is for an import.
+ Import(ComponentImport<'a>),
+}
+
+impl<'a> FromReader<'a> for ComponentTypeDeclaration<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ // Component types are effectively instance types with the additional
+ // variant of imports; check for imports here or delegate to
+ // `InstanceTypeDeclaration` with the appropriate conversions.
+ if reader.peek()? == 0x03 {
+ reader.position += 1;
+ return Ok(ComponentTypeDeclaration::Import(reader.read()?));
+ }
+
+ Ok(match reader.read()? {
+ InstanceTypeDeclaration::CoreType(t) => ComponentTypeDeclaration::CoreType(t),
+ InstanceTypeDeclaration::Type(t) => ComponentTypeDeclaration::Type(t),
+ InstanceTypeDeclaration::Alias(a) => ComponentTypeDeclaration::Alias(a),
+ InstanceTypeDeclaration::Export { name, url, ty } => {
+ ComponentTypeDeclaration::Export { name, url, ty }
+ }
+ })
+ }
+}
+
+/// Represents an instance type declaration in a WebAssembly component.
+#[derive(Debug, Clone)]
+pub enum InstanceTypeDeclaration<'a> {
+ /// The component type declaration is for a core type.
+ CoreType(CoreType<'a>),
+ /// The instance type declaration is for a type.
+ Type(ComponentType<'a>),
+ /// The instance type declaration is for an alias.
+ Alias(ComponentAlias<'a>),
+ /// The instance type declaration is for an export.
+ Export {
+ /// The name of the export.
+ name: &'a str,
+ /// The URL for the export.
+ url: &'a str,
+ /// The type reference for the export.
+ ty: ComponentTypeRef,
+ },
+}
+
+impl<'a> FromReader<'a> for InstanceTypeDeclaration<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(match reader.read_u8()? {
+ 0x00 => InstanceTypeDeclaration::CoreType(reader.read()?),
+ 0x01 => InstanceTypeDeclaration::Type(reader.read()?),
+ 0x02 => InstanceTypeDeclaration::Alias(reader.read()?),
+ 0x04 => InstanceTypeDeclaration::Export {
+ name: reader.read()?,
+ url: reader.read()?,
+ ty: reader.read()?,
+ },
+ x => return reader.invalid_leading_byte(x, "component or instance type declaration"),
+ })
+ }
+}
+
+/// Represents the result type of a component function.
+#[derive(Debug, Clone)]
+pub enum ComponentFuncResult<'a> {
+ /// The function returns a singular, unnamed type.
+ Unnamed(ComponentValType),
+ /// The function returns zero or more named types.
+ Named(Box<[(&'a str, ComponentValType)]>),
+}
+
+impl<'a> FromReader<'a> for ComponentFuncResult<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(match reader.read_u8()? {
+ 0x00 => ComponentFuncResult::Unnamed(reader.read()?),
+ 0x01 => ComponentFuncResult::Named(
+ reader
+ .read_iter(MAX_WASM_FUNCTION_RETURNS, "component function results")?
+ .collect::<Result<_>>()?,
+ ),
+ x => return reader.invalid_leading_byte(x, "component function results"),
+ })
+ }
+}
+
+impl ComponentFuncResult<'_> {
+ /// Gets the count of types returned by the function.
+ pub fn type_count(&self) -> usize {
+ match self {
+ Self::Unnamed(_) => 1,
+ Self::Named(vec) => vec.len(),
+ }
+ }
+
+ /// Iterates over the types returned by the function.
+ pub fn iter(&self) -> impl Iterator<Item = (Option<&str>, &ComponentValType)> {
+ enum Either<L, R> {
+ Left(L),
+ Right(R),
+ }
+
+ impl<L, R> Iterator for Either<L, R>
+ where
+ L: Iterator,
+ R: Iterator<Item = L::Item>,
+ {
+ type Item = L::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ match self {
+ Either::Left(l) => l.next(),
+ Either::Right(r) => r.next(),
+ }
+ }
+ }
+
+ match self {
+ Self::Unnamed(ty) => Either::Left(std::iter::once(ty).map(|ty| (None, ty))),
+ Self::Named(vec) => Either::Right(vec.iter().map(|(n, ty)| (Some(*n), ty))),
+ }
+ }
+}
+
+/// Represents a type of a function in a WebAssembly component.
+#[derive(Debug, Clone)]
+pub struct ComponentFuncType<'a> {
+ /// The function parameters.
+ pub params: Box<[(&'a str, ComponentValType)]>,
+ /// The function result.
+ pub results: ComponentFuncResult<'a>,
+}
+
+/// Represents a case in a variant type.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct VariantCase<'a> {
+ /// The name of the variant case.
+ pub name: &'a str,
+ /// The value type of the variant case.
+ pub ty: Option<ComponentValType>,
+ /// The index of the variant case that is refined by this one.
+ pub refines: Option<u32>,
+}
+
+impl<'a> FromReader<'a> for VariantCase<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(VariantCase {
+ name: reader.read()?,
+ ty: reader.read()?,
+ refines: match reader.read_u8()? {
+ 0x0 => None,
+ 0x1 => Some(reader.read_var_u32()?),
+ x => return reader.invalid_leading_byte(x, "variant case refines"),
+ },
+ })
+ }
+}
+
+/// Represents a defined type in a WebAssembly component.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ComponentDefinedType<'a> {
+ /// The type is one of the primitive value types.
+ Primitive(PrimitiveValType),
+ /// The type is a record with the given fields.
+ Record(Box<[(&'a str, ComponentValType)]>),
+ /// The type is a variant with the given cases.
+ Variant(Box<[VariantCase<'a>]>),
+ /// The type is a list of the given value type.
+ List(ComponentValType),
+ /// The type is a tuple of the given value types.
+ Tuple(Box<[ComponentValType]>),
+ /// The type is flags with the given names.
+ Flags(Box<[&'a str]>),
+ /// The type is an enum with the given tags.
+ Enum(Box<[&'a str]>),
+ /// The type is a union of the given value types.
+ Union(Box<[ComponentValType]>),
+ /// The type is an option of the given value type.
+ Option(ComponentValType),
+ /// The type is a result type.
+ Result {
+ /// The type returned for success.
+ ok: Option<ComponentValType>,
+ /// The type returned for failure.
+ err: Option<ComponentValType>,
+ },
+}
+
+impl<'a> ComponentDefinedType<'a> {
+ fn read(reader: &mut BinaryReader<'a>, byte: u8) -> Result<ComponentDefinedType<'a>> {
+ Ok(match byte {
+ 0x72 => ComponentDefinedType::Record(
+ reader
+ .read_iter(MAX_WASM_RECORD_FIELDS, "record field")?
+ .collect::<Result<_>>()?,
+ ),
+ 0x71 => ComponentDefinedType::Variant(
+ reader
+ .read_iter(MAX_WASM_VARIANT_CASES, "variant cases")?
+ .collect::<Result<_>>()?,
+ ),
+ 0x70 => ComponentDefinedType::List(reader.read()?),
+ 0x6f => ComponentDefinedType::Tuple(
+ reader
+ .read_iter(MAX_WASM_TUPLE_TYPES, "tuple types")?
+ .collect::<Result<_>>()?,
+ ),
+ 0x6e => ComponentDefinedType::Flags(
+ reader
+ .read_iter(MAX_WASM_FLAG_NAMES, "flag names")?
+ .collect::<Result<_>>()?,
+ ),
+ 0x6d => ComponentDefinedType::Enum(
+ reader
+ .read_iter(MAX_WASM_ENUM_CASES, "enum cases")?
+ .collect::<Result<_>>()?,
+ ),
+ 0x6c => ComponentDefinedType::Union(
+ reader
+ .read_iter(MAX_WASM_UNION_TYPES, "union types")?
+ .collect::<Result<_>>()?,
+ ),
+ 0x6b => ComponentDefinedType::Option(reader.read()?),
+ 0x6a => ComponentDefinedType::Result {
+ ok: reader.read()?,
+ err: reader.read()?,
+ },
+ x => return reader.invalid_leading_byte(x, "component defined type"),
+ })
+ }
+}
+
+/// A reader for the type section of a WebAssembly component.
+///
+/// # Examples
+///
+/// ```
+/// use wasmparser::ComponentTypeSectionReader;
+/// let data: &[u8] = &[0x01, 0x40, 0x01, 0x03, b'f', b'o', b'o', 0x73, 0x00, 0x73];
+/// let mut reader = ComponentTypeSectionReader::new(data, 0).unwrap();
+/// for ty in reader {
+/// println!("Type {:?}", ty.expect("type"));
+/// }
+/// ```
+pub type ComponentTypeSectionReader<'a> = SectionLimited<'a, ComponentType<'a>>;
diff --git a/third_party/rust/wasmparser/src/readers/core.rs b/third_party/rust/wasmparser/src/readers/core.rs
new file mode 100644
index 0000000000..c42bbf8d9e
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/core.rs
@@ -0,0 +1,33 @@
+mod code;
+mod custom;
+mod data;
+mod elements;
+mod exports;
+mod functions;
+mod globals;
+mod imports;
+mod init;
+mod memories;
+mod names;
+mod operators;
+mod producers;
+mod tables;
+mod tags;
+mod types;
+
+pub use self::code::*;
+pub use self::custom::*;
+pub use self::data::*;
+pub use self::elements::*;
+pub use self::exports::*;
+pub use self::functions::*;
+pub use self::globals::*;
+pub use self::imports::*;
+pub use self::init::*;
+pub use self::memories::*;
+pub use self::names::*;
+pub use self::operators::*;
+pub use self::producers::*;
+pub use self::tables::*;
+pub use self::tags::*;
+pub use self::types::*;
diff --git a/third_party/rust/wasmparser/src/readers/core/code.rs b/third_party/rust/wasmparser/src/readers/core/code.rs
new file mode 100644
index 0000000000..2a463727e8
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/core/code.rs
@@ -0,0 +1,146 @@
+/* Copyright 2018 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::{BinaryReader, FromReader, OperatorsReader, Result, SectionLimited, ValType};
+use std::ops::Range;
+
+/// A reader for the code section of a WebAssembly module.
+pub type CodeSectionReader<'a> = SectionLimited<'a, FunctionBody<'a>>;
+
+/// Represents a WebAssembly function body.
+#[derive(Debug, Clone)]
+pub struct FunctionBody<'a> {
+ reader: BinaryReader<'a>,
+}
+
+impl<'a> FunctionBody<'a> {
+ /// Constructs a new `FunctionBody` for the given data and offset.
+ pub fn new(offset: usize, data: &'a [u8]) -> Self {
+ Self {
+ reader: BinaryReader::new_with_offset(data, offset),
+ }
+ }
+
+ /// Whether or not to allow 64-bit memory arguments in the
+ /// function body.
+ ///
+ /// This is intended to be `true` when support for the memory64
+ /// WebAssembly proposal is also enabled.
+ pub fn allow_memarg64(&mut self, allow: bool) {
+ self.reader.allow_memarg64(allow);
+ }
+
+ /// Gets a binary reader for this function body.
+ pub fn get_binary_reader(&self) -> BinaryReader<'a> {
+ self.reader.clone()
+ }
+
+ fn skip_locals(reader: &mut BinaryReader) -> Result<()> {
+ let count = reader.read_var_u32()?;
+ for _ in 0..count {
+ reader.read_var_u32()?;
+ reader.read::<ValType>()?;
+ }
+ Ok(())
+ }
+
+ /// Gets the locals reader for this function body.
+ pub fn get_locals_reader(&self) -> Result<LocalsReader<'a>> {
+ let mut reader = self.reader.clone();
+ let count = reader.read_var_u32()?;
+ Ok(LocalsReader { reader, count })
+ }
+
+ /// Gets the operators reader for this function body.
+ pub fn get_operators_reader(&self) -> Result<OperatorsReader<'a>> {
+ let mut reader = self.reader.clone();
+ Self::skip_locals(&mut reader)?;
+ Ok(OperatorsReader::new(reader))
+ }
+
+ /// Gets the range of the function body.
+ pub fn range(&self) -> Range<usize> {
+ self.reader.range()
+ }
+}
+
+impl<'a> FromReader<'a> for FunctionBody<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ let reader = reader.read_reader("function body extends past end of the code section")?;
+ Ok(FunctionBody { reader })
+ }
+}
+
+/// A reader for a function body's locals.
+pub struct LocalsReader<'a> {
+ reader: BinaryReader<'a>,
+ count: u32,
+}
+
+impl<'a> LocalsReader<'a> {
+ /// Gets the count of locals in the function body.
+ pub fn get_count(&self) -> u32 {
+ self.count
+ }
+
+ /// Gets the original position of the reader.
+ pub fn original_position(&self) -> usize {
+ self.reader.original_position()
+ }
+
+ /// Reads an item from the reader.
+ pub fn read(&mut self) -> Result<(u32, ValType)> {
+ let count = self.reader.read()?;
+ let value_type = self.reader.read()?;
+ Ok((count, value_type))
+ }
+}
+
+impl<'a> IntoIterator for LocalsReader<'a> {
+ type Item = Result<(u32, ValType)>;
+ type IntoIter = LocalsIterator<'a>;
+ fn into_iter(self) -> Self::IntoIter {
+ let count = self.count;
+ LocalsIterator {
+ reader: self,
+ left: count,
+ err: false,
+ }
+ }
+}
+
+/// An iterator over locals in a function body.
+pub struct LocalsIterator<'a> {
+ reader: LocalsReader<'a>,
+ left: u32,
+ err: bool,
+}
+
+impl<'a> Iterator for LocalsIterator<'a> {
+ type Item = Result<(u32, ValType)>;
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.err || self.left == 0 {
+ return None;
+ }
+ let result = self.reader.read();
+ self.err = result.is_err();
+ self.left -= 1;
+ Some(result)
+ }
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let count = self.reader.get_count() as usize;
+ (count, Some(count))
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/core/custom.rs b/third_party/rust/wasmparser/src/readers/core/custom.rs
new file mode 100644
index 0000000000..a04fe5a1ac
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/core/custom.rs
@@ -0,0 +1,63 @@
+use crate::{BinaryReader, Result};
+use std::ops::Range;
+
+/// A reader for custom sections of a WebAssembly module.
+#[derive(Clone)]
+pub struct CustomSectionReader<'a> {
+ // NB: these fields are public to the crate to make testing easier.
+ pub(crate) name: &'a str,
+ pub(crate) data_offset: usize,
+ pub(crate) data: &'a [u8],
+ pub(crate) range: Range<usize>,
+}
+
+impl<'a> CustomSectionReader<'a> {
+ /// Constructs a new `CustomSectionReader` for the given data and offset.
+ pub fn new(data: &'a [u8], offset: usize) -> Result<CustomSectionReader<'a>> {
+ let mut reader = BinaryReader::new_with_offset(data, offset);
+ let name = reader.read_string()?;
+ let data_offset = reader.original_position();
+ let data = reader.remaining_buffer();
+ let range = reader.range();
+ Ok(CustomSectionReader {
+ name,
+ data_offset,
+ data,
+ range,
+ })
+ }
+
+ /// The name of the custom section.
+ pub fn name(&self) -> &'a str {
+ self.name
+ }
+
+ /// The offset, relative to the start of the original module or component,
+ /// that the `data` payload for this custom section starts at.
+ pub fn data_offset(&self) -> usize {
+ self.data_offset
+ }
+
+ /// The actual contents of the custom section.
+ pub fn data(&self) -> &'a [u8] {
+ self.data
+ }
+
+ /// The range of bytes that specify this whole custom section (including
+ /// both the name of this custom section and its data) specified in
+ /// offsets relative to the start of the byte stream.
+ pub fn range(&self) -> Range<usize> {
+ self.range.clone()
+ }
+}
+
+impl<'a> std::fmt::Debug for CustomSectionReader<'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("CustomSectionReader")
+ .field("name", &self.name)
+ .field("data_offset", &self.data_offset)
+ .field("data", &"...")
+ .field("range", &self.range)
+ .finish()
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/core/data.rs b/third_party/rust/wasmparser/src/readers/core/data.rs
new file mode 100644
index 0000000000..5ea5f99457
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/core/data.rs
@@ -0,0 +1,96 @@
+/* Copyright 2018 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::{BinaryReader, BinaryReaderError, ConstExpr, FromReader, Result, SectionLimited};
+use std::ops::Range;
+
+/// Represents a data segment in a core WebAssembly module.
+#[derive(Debug, Clone)]
+pub struct Data<'a> {
+ /// The kind of data segment.
+ pub kind: DataKind<'a>,
+ /// The data of the data segment.
+ pub data: &'a [u8],
+ /// The range of the data segment.
+ pub range: Range<usize>,
+}
+
+/// The kind of data segment.
+#[derive(Debug, Copy, Clone)]
+pub enum DataKind<'a> {
+ /// The data segment is passive.
+ Passive,
+ /// The data segment is active.
+ Active {
+ /// The memory index for the data segment.
+ memory_index: u32,
+ /// The initialization expression for the data segment.
+ offset_expr: ConstExpr<'a>,
+ },
+}
+
+/// A reader for the data section of a WebAssembly module.
+pub type DataSectionReader<'a> = SectionLimited<'a, Data<'a>>;
+
+impl<'a> FromReader<'a> for Data<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ let segment_start = reader.original_position();
+
+ // The current handling of the flags is largely specified in the `bulk-memory` proposal,
+ // which at the time this comment is written has been merged to the main specification
+ // draft.
+ //
+ // Notably, this proposal allows multiple different encodings of the memory index 0. `00`
+ // and `02 00` are both valid ways to specify the 0-th memory. However it also makes
+ // another encoding of the 0-th memory `80 00` no longer valid.
+ //
+ // We, however maintain this by parsing `flags` as a LEB128 integer. In that case, `80 00`
+ // encoding is parsed out as `0` and is therefore assigned a `memidx` 0, even though the
+ // current specification draft does not allow for this.
+ //
+ // See also https://github.com/WebAssembly/spec/issues/1439
+ let flags = reader.read_var_u32()?;
+ let kind = match flags {
+ 1 => DataKind::Passive,
+ 0 | 2 => {
+ let memory_index = if flags == 0 {
+ 0
+ } else {
+ reader.read_var_u32()?
+ };
+ let offset_expr = reader.read()?;
+ DataKind::Active {
+ memory_index,
+ offset_expr,
+ }
+ }
+ _ => {
+ return Err(BinaryReaderError::new(
+ "invalid flags byte in data segment",
+ segment_start,
+ ));
+ }
+ };
+
+ let data = reader.read_reader(
+ "unexpected end of section or function: data segment extends past end of the section",
+ )?;
+ Ok(Data {
+ kind,
+ data: data.remaining_buffer(),
+ range: segment_start..data.range().end,
+ })
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/core/elements.rs b/third_party/rust/wasmparser/src/readers/core/elements.rs
new file mode 100644
index 0000000000..7e37e7d7b6
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/core/elements.rs
@@ -0,0 +1,158 @@
+/* Copyright 2018 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::{
+ BinaryReader, BinaryReaderError, ConstExpr, ExternalKind, FromReader, RefType, Result,
+ SectionLimited,
+};
+use std::ops::Range;
+
+/// Represents a core WebAssembly element segment.
+#[derive(Clone)]
+pub struct Element<'a> {
+ /// The kind of the element segment.
+ pub kind: ElementKind<'a>,
+ /// The initial elements of the element segment.
+ pub items: ElementItems<'a>,
+ /// The type of the elements.
+ pub ty: RefType,
+ /// The range of the the element segment.
+ pub range: Range<usize>,
+}
+
+/// The kind of element segment.
+#[derive(Clone)]
+pub enum ElementKind<'a> {
+ /// The element segment is passive.
+ Passive,
+ /// The element segment is active.
+ Active {
+ /// The index of the table being initialized.
+ table_index: u32,
+ /// The initial expression of the element segment.
+ offset_expr: ConstExpr<'a>,
+ },
+ /// The element segment is declared.
+ Declared,
+}
+
+/// Represents the items of an element segment.
+#[derive(Clone)]
+pub enum ElementItems<'a> {
+ /// This element contains function indices.
+ Functions(SectionLimited<'a, u32>),
+ /// This element contains constant expressions used to initialize the table.
+ Expressions(SectionLimited<'a, ConstExpr<'a>>),
+}
+
+/// A reader for the element section of a WebAssembly module.
+pub type ElementSectionReader<'a> = SectionLimited<'a, Element<'a>>;
+
+impl<'a> FromReader<'a> for Element<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ let elem_start = reader.original_position();
+ // The current handling of the flags is largely specified in the `bulk-memory` proposal,
+ // which at the time this commend is written has been merged to the main specification
+ // draft.
+ //
+ // Notably, this proposal allows multiple different encodings of the table index 0. `00`
+ // and `02 00` are both valid ways to specify the 0-th table. However it also makes
+ // another encoding of the 0-th memory `80 00` no longer valid.
+ //
+ // We, however maintain this support by parsing `flags` as a LEB128 integer. In that case,
+ // `80 00` encoding is parsed out as `0` and is therefore assigned a `tableidx` 0, even
+ // though the current specification draft does not allow for this.
+ //
+ // See also https://github.com/WebAssembly/spec/issues/1439
+ let flags = reader.read_var_u32()?;
+ if (flags & !0b111) != 0 {
+ return Err(BinaryReaderError::new(
+ "invalid flags byte in element segment",
+ reader.original_position() - 1,
+ ));
+ }
+ let kind = if flags & 0b001 != 0 {
+ if flags & 0b010 != 0 {
+ ElementKind::Declared
+ } else {
+ ElementKind::Passive
+ }
+ } else {
+ let table_index = if flags & 0b010 == 0 {
+ 0
+ } else {
+ reader.read_var_u32()?
+ };
+ let offset_expr = reader.read()?;
+ ElementKind::Active {
+ table_index,
+ offset_expr,
+ }
+ };
+ let exprs = flags & 0b100 != 0;
+ let ty = if flags & 0b011 != 0 {
+ if exprs {
+ reader.read()?
+ } else {
+ match reader.read()? {
+ ExternalKind::Func => RefType::FUNCREF,
+ _ => {
+ return Err(BinaryReaderError::new(
+ "only the function external type is supported in elem segment",
+ reader.original_position() - 1,
+ ));
+ }
+ }
+ }
+ } else {
+ RefType::FUNCREF
+ };
+ // FIXME(#188) ideally wouldn't have to do skips here
+ let data = reader.skip(|reader| {
+ let items_count = reader.read_var_u32()?;
+ if exprs {
+ for _ in 0..items_count {
+ reader.skip_const_expr()?;
+ }
+ } else {
+ for _ in 0..items_count {
+ reader.read_var_u32()?;
+ }
+ }
+ Ok(())
+ })?;
+ let items = if exprs {
+ ElementItems::Expressions(SectionLimited::new(
+ data.remaining_buffer(),
+ data.original_position(),
+ )?)
+ } else {
+ ElementItems::Functions(SectionLimited::new(
+ data.remaining_buffer(),
+ data.original_position(),
+ )?)
+ };
+
+ let elem_end = reader.original_position();
+ let range = elem_start..elem_end;
+
+ Ok(Element {
+ kind,
+ items,
+ ty,
+ range,
+ })
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/core/exports.rs b/third_party/rust/wasmparser/src/readers/core/exports.rs
new file mode 100644
index 0000000000..c1bd62626b
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/core/exports.rs
@@ -0,0 +1,65 @@
+/* Copyright 2018 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::{BinaryReader, FromReader, Result, SectionLimited};
+
+/// A reader for the export section of a WebAssembly module.
+pub type ExportSectionReader<'a> = SectionLimited<'a, Export<'a>>;
+
+/// External types as defined [here].
+///
+/// [here]: https://webassembly.github.io/spec/core/syntax/types.html#external-types
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum ExternalKind {
+ /// The external kind is a function.
+ Func,
+ /// The external kind if a table.
+ Table,
+ /// The external kind is a memory.
+ Memory,
+ /// The external kind is a global.
+ Global,
+ /// The external kind is a tag.
+ Tag,
+}
+
+/// Represents an export in a WebAssembly module.
+#[derive(Debug, Copy, Clone)]
+pub struct Export<'a> {
+ /// The name of the exported item.
+ pub name: &'a str,
+ /// The kind of the export.
+ pub kind: ExternalKind,
+ /// The index of the exported item.
+ pub index: u32,
+}
+
+impl<'a> FromReader<'a> for Export<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(Export {
+ name: reader.read_string()?,
+ kind: reader.read()?,
+ index: reader.read_var_u32()?,
+ })
+ }
+}
+
+impl<'a> FromReader<'a> for ExternalKind {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ let offset = reader.original_position();
+ let byte = reader.read_u8()?;
+ BinaryReader::external_kind_from_byte(byte, offset)
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/core/functions.rs b/third_party/rust/wasmparser/src/readers/core/functions.rs
new file mode 100644
index 0000000000..ebddce05a3
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/core/functions.rs
@@ -0,0 +1,17 @@
+/* Copyright 2018 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/// A reader for the function section of a WebAssembly module.
+pub type FunctionSectionReader<'a> = crate::SectionLimited<'a, u32>;
diff --git a/third_party/rust/wasmparser/src/readers/core/globals.rs b/third_party/rust/wasmparser/src/readers/core/globals.rs
new file mode 100644
index 0000000000..6fd99bc0b8
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/core/globals.rs
@@ -0,0 +1,49 @@
+/* Copyright 2018 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::{BinaryReader, ConstExpr, FromReader, GlobalType, Result, SectionLimited};
+
+/// Represents a core WebAssembly global.
+#[derive(Debug, Copy, Clone)]
+pub struct Global<'a> {
+ /// The global's type.
+ pub ty: GlobalType,
+ /// The global's initialization expression.
+ pub init_expr: ConstExpr<'a>,
+}
+
+/// A reader for the global section of a WebAssembly module.
+pub type GlobalSectionReader<'a> = SectionLimited<'a, Global<'a>>;
+
+impl<'a> FromReader<'a> for Global<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ let ty = reader.read()?;
+ let init_expr = reader.read()?;
+ Ok(Global { ty, init_expr })
+ }
+}
+
+impl<'a> FromReader<'a> for GlobalType {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(GlobalType {
+ content_type: reader.read()?,
+ mutable: match reader.read_u8()? {
+ 0x00 => false,
+ 0x01 => true,
+ _ => bail!(reader.original_position() - 1, "malformed mutability",),
+ },
+ })
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/core/imports.rs b/third_party/rust/wasmparser/src/readers/core/imports.rs
new file mode 100644
index 0000000000..d2a33c89e3
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/core/imports.rs
@@ -0,0 +1,76 @@
+/* Copyright 2018 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::{
+ BinaryReader, ExternalKind, FromReader, GlobalType, MemoryType, Result, SectionLimited,
+ TableType, TagType,
+};
+
+/// Represents a reference to a type definition in a WebAssembly module.
+#[derive(Debug, Clone, Copy)]
+pub enum TypeRef {
+ /// The type is a function.
+ ///
+ /// The value is an index into the type section.
+ Func(u32),
+ /// The type is a table.
+ Table(TableType),
+ /// The type is a memory.
+ Memory(MemoryType),
+ /// The type is a global.
+ Global(GlobalType),
+ /// The type is a tag.
+ ///
+ /// This variant is only used for the exception handling proposal.
+ ///
+ /// The value is an index in the types index space.
+ Tag(TagType),
+}
+
+/// Represents an import in a WebAssembly module.
+#[derive(Debug, Copy, Clone)]
+pub struct Import<'a> {
+ /// The module being imported from.
+ pub module: &'a str,
+ /// The name of the imported item.
+ pub name: &'a str,
+ /// The type of the imported item.
+ pub ty: TypeRef,
+}
+
+/// A reader for the import section of a WebAssembly module.
+pub type ImportSectionReader<'a> = SectionLimited<'a, Import<'a>>;
+
+impl<'a> FromReader<'a> for Import<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(Import {
+ module: reader.read()?,
+ name: reader.read()?,
+ ty: reader.read()?,
+ })
+ }
+}
+
+impl<'a> FromReader<'a> for TypeRef {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(match reader.read()? {
+ ExternalKind::Func => TypeRef::Func(reader.read_var_u32()?),
+ ExternalKind::Table => TypeRef::Table(reader.read()?),
+ ExternalKind::Memory => TypeRef::Memory(reader.read()?),
+ ExternalKind::Global => TypeRef::Global(reader.read()?),
+ ExternalKind::Tag => TypeRef::Tag(reader.read()?),
+ })
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/core/init.rs b/third_party/rust/wasmparser/src/readers/core/init.rs
new file mode 100644
index 0000000000..fcd3bd73c9
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/core/init.rs
@@ -0,0 +1,51 @@
+/* Copyright 2018 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::{BinaryReader, FromReader, OperatorsReader, Result};
+
+/// Represents an initialization expression.
+#[derive(Debug, Copy, Clone)]
+pub struct ConstExpr<'a> {
+ offset: usize,
+ data: &'a [u8],
+}
+
+impl<'a> ConstExpr<'a> {
+ /// Constructs a new `ConstExpr` from the given data and offset.
+ pub fn new(data: &[u8], offset: usize) -> ConstExpr {
+ ConstExpr { offset, data }
+ }
+
+ /// Gets a binary reader for the initialization expression.
+ pub fn get_binary_reader(&self) -> BinaryReader<'a> {
+ BinaryReader::new_with_offset(self.data, self.offset)
+ }
+
+ /// Gets an operators reader for the initialization expression.
+ pub fn get_operators_reader(&self) -> OperatorsReader<'a> {
+ OperatorsReader::new(self.get_binary_reader())
+ }
+}
+
+impl<'a> FromReader<'a> for ConstExpr<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ // FIXME(#188) ideally shouldn't need to skip here
+ let reader = reader.skip(|r| r.skip_const_expr())?;
+ Ok(ConstExpr::new(
+ reader.remaining_buffer(),
+ reader.original_position(),
+ ))
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/core/memories.rs b/third_party/rust/wasmparser/src/readers/core/memories.rs
new file mode 100644
index 0000000000..d1941b1cdc
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/core/memories.rs
@@ -0,0 +1,56 @@
+/* Copyright 2018 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::{BinaryReader, FromReader, MemoryType, Result, SectionLimited};
+
+/// A reader for the memory section of a WebAssembly module.
+pub type MemorySectionReader<'a> = SectionLimited<'a, MemoryType>;
+
+impl<'a> FromReader<'a> for MemoryType {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ let pos = reader.original_position();
+ let flags = reader.read_u8()?;
+ if (flags & !0b111) != 0 {
+ bail!(pos, "invalid memory limits flags");
+ }
+
+ let memory64 = flags & 0b100 != 0;
+ let shared = flags & 0b010 != 0;
+ let has_max = flags & 0b001 != 0;
+ Ok(MemoryType {
+ memory64,
+ shared,
+ // FIXME(WebAssembly/memory64#21) as currently specified if the
+ // `shared` flag is set we should be reading a 32-bit limits field
+ // here. That seems a bit odd to me at the time of this writing so
+ // I've taken the liberty of reading a 64-bit limits field in those
+ // situations. I suspect that this is a typo in the spec, but if not
+ // we'll need to update this to read a 32-bit limits field when the
+ // shared flag is set.
+ initial: if memory64 {
+ reader.read_var_u64()?
+ } else {
+ reader.read_var_u32()?.into()
+ },
+ maximum: if !has_max {
+ None
+ } else if memory64 {
+ Some(reader.read_var_u64()?)
+ } else {
+ Some(reader.read_var_u32()?.into())
+ },
+ })
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/core/names.rs b/third_party/rust/wasmparser/src/readers/core/names.rs
new file mode 100644
index 0000000000..aa8a11dde2
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/core/names.rs
@@ -0,0 +1,153 @@
+/* Copyright 2018 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::{
+ BinaryReader, BinaryReaderError, FromReader, Result, SectionLimited, Subsection, Subsections,
+};
+use std::ops::Range;
+
+/// Represents a name map from the names custom section.
+pub type NameMap<'a> = SectionLimited<'a, Naming<'a>>;
+
+/// Represents a name for an index from the names section.
+#[derive(Debug, Copy, Clone)]
+pub struct Naming<'a> {
+ /// The index being named.
+ pub index: u32,
+ /// The name for the index.
+ pub name: &'a str,
+}
+
+impl<'a> FromReader<'a> for Naming<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ let index = reader.read_var_u32()?;
+ let name = reader.read_string()?;
+ Ok(Naming { index, name })
+ }
+}
+
+/// Represents a reader for indirect names from the names custom section.
+pub type IndirectNameMap<'a> = SectionLimited<'a, IndirectNaming<'a>>;
+
+/// Represents an indirect name in the names custom section.
+#[derive(Debug, Clone)]
+pub struct IndirectNaming<'a> {
+ /// The indirect index of the name.
+ pub index: u32,
+ /// The map of names within the `index` prior.
+ pub names: NameMap<'a>,
+}
+
+impl<'a> FromReader<'a> for IndirectNaming<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ let index = reader.read_var_u32()?;
+
+ // Skip the `NameMap` manually here.
+ //
+ // FIXME(#188) shouldn't need to skip here
+ let names = reader.skip(|reader| {
+ let count = reader.read_var_u32()?;
+ for _ in 0..count {
+ reader.read_var_u32()?;
+ reader.skip_string()?;
+ }
+ Ok(())
+ })?;
+
+ Ok(IndirectNaming {
+ index,
+ names: NameMap::new(names.remaining_buffer(), names.original_position())?,
+ })
+ }
+}
+
+/// Represents a name read from the names custom section.
+#[derive(Clone)]
+pub enum Name<'a> {
+ /// The name is for the module.
+ Module {
+ /// The specified name.
+ name: &'a str,
+ /// The byte range that `name` occupies in the original binary.
+ name_range: Range<usize>,
+ },
+ /// The name is for the functions.
+ Function(NameMap<'a>),
+ /// The name is for the function locals.
+ Local(IndirectNameMap<'a>),
+ /// The name is for the function labels.
+ Label(IndirectNameMap<'a>),
+ /// The name is for the types.
+ Type(NameMap<'a>),
+ /// The name is for the tables.
+ Table(NameMap<'a>),
+ /// The name is for the memories.
+ Memory(NameMap<'a>),
+ /// The name is for the globals.
+ Global(NameMap<'a>),
+ /// The name is for the element segments.
+ Element(NameMap<'a>),
+ /// The name is for the data segments.
+ Data(NameMap<'a>),
+ /// An unknown [name subsection](https://webassembly.github.io/spec/core/appendix/custom.html#subsections).
+ Unknown {
+ /// The identifier for this subsection.
+ ty: u8,
+ /// The contents of this subsection.
+ data: &'a [u8],
+ /// The range of bytes, relative to the start of the original data
+ /// stream, that the contents of this subsection reside in.
+ range: Range<usize>,
+ },
+}
+
+/// A reader for the name custom section of a WebAssembly module.
+pub type NameSectionReader<'a> = Subsections<'a, Name<'a>>;
+
+impl<'a> Subsection<'a> for Name<'a> {
+ fn from_reader(id: u8, mut reader: BinaryReader<'a>) -> Result<Self> {
+ let data = reader.remaining_buffer();
+ let offset = reader.original_position();
+ Ok(match id {
+ 0 => {
+ let name = reader.read_string()?;
+ if !reader.eof() {
+ return Err(BinaryReaderError::new(
+ "trailing data at the end of a name",
+ reader.original_position(),
+ ));
+ }
+ Name::Module {
+ name,
+ name_range: offset..offset + reader.position,
+ }
+ }
+ 1 => Name::Function(NameMap::new(data, offset)?),
+ 2 => Name::Local(IndirectNameMap::new(data, offset)?),
+ 3 => Name::Label(IndirectNameMap::new(data, offset)?),
+ 4 => Name::Type(NameMap::new(data, offset)?),
+ 5 => Name::Table(NameMap::new(data, offset)?),
+ 6 => Name::Memory(NameMap::new(data, offset)?),
+ 7 => Name::Global(NameMap::new(data, offset)?),
+ 8 => Name::Element(NameMap::new(data, offset)?),
+ 9 => Name::Data(NameMap::new(data, offset)?),
+ ty => Name::Unknown {
+ ty,
+ data,
+ range: offset..offset + data.len(),
+ },
+ })
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/core/operators.rs b/third_party/rust/wasmparser/src/readers/core/operators.rs
new file mode 100644
index 0000000000..d1312c259f
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/core/operators.rs
@@ -0,0 +1,354 @@
+/* Copyright 2018 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::{BinaryReader, BinaryReaderError, Result, ValType};
+
+/// Represents a block type.
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum BlockType {
+ /// The block produces consumes nor produces any values.
+ Empty,
+ /// The block produces a singular value of the given type ([] -> \[t]).
+ Type(ValType),
+ /// The block is described by a function type.
+ ///
+ /// The index is to a function type in the types section.
+ FuncType(u32),
+}
+
+/// Represents a memory immediate in a WebAssembly memory instruction.
+#[derive(Debug, Copy, Clone)]
+pub struct MemArg {
+ /// Alignment, stored as `n` where the actual alignment is `2^n`
+ pub align: u8,
+ /// Maximum alignment, stored as `n` where the actual alignment is `2^n`.
+ ///
+ /// Note that this field is not actually read from the binary format, it
+ /// will be a constant depending on which instruction this `MemArg` is a
+ /// payload for.
+ pub max_align: u8,
+ /// A fixed byte-offset that this memory immediate specifies.
+ ///
+ /// Note that the memory64 proposal can specify a full 64-bit byte offset
+ /// while otherwise only 32-bit offsets are allowed. Once validated
+ /// memory immediates for 32-bit memories are guaranteed to be at most
+ /// `u32::MAX` whereas 64-bit memories can use the full 64-bits.
+ pub offset: u64,
+ /// The index of the memory this immediate points to.
+ ///
+ /// Note that this points within the module's own memory index space, and
+ /// is always zero unless the multi-memory proposal of WebAssembly is
+ /// enabled.
+ pub memory: u32,
+}
+
+/// A br_table entries representation.
+#[derive(Clone)]
+pub struct BrTable<'a> {
+ pub(crate) reader: crate::BinaryReader<'a>,
+ pub(crate) cnt: u32,
+ pub(crate) default: u32,
+}
+
+/// An IEEE binary32 immediate floating point value, represented as a u32
+/// containing the bit pattern.
+///
+/// All bit patterns are allowed.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
+pub struct Ieee32(pub(crate) u32);
+
+impl Ieee32 {
+ /// Gets the underlying bits of the 32-bit float.
+ pub fn bits(self) -> u32 {
+ self.0
+ }
+}
+
+/// An IEEE binary64 immediate floating point value, represented as a u64
+/// containing the bit pattern.
+///
+/// All bit patterns are allowed.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
+pub struct Ieee64(pub(crate) u64);
+
+impl Ieee64 {
+ /// Gets the underlying bits of the 64-bit float.
+ pub fn bits(self) -> u64 {
+ self.0
+ }
+}
+
+/// Represents a 128-bit vector value.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
+pub struct V128(pub(crate) [u8; 16]);
+
+impl V128 {
+ /// Gets the bytes of the vector value.
+ pub fn bytes(&self) -> &[u8; 16] {
+ &self.0
+ }
+
+ /// Gets a signed 128-bit integer value from the vector's bytes.
+ pub fn i128(&self) -> i128 {
+ i128::from_le_bytes(self.0)
+ }
+}
+
+macro_rules! define_operator {
+ ($(@$proposal:ident $op:ident $({ $($payload:tt)* })? => $visit:ident)*) => {
+ /// Instructions as defined [here].
+ ///
+ /// [here]: https://webassembly.github.io/spec/core/binary/instructions.html
+ #[derive(Debug, Clone)]
+ #[allow(missing_docs)]
+ pub enum Operator<'a> {
+ $(
+ $op $({ $($payload)* })?,
+ )*
+ }
+ }
+}
+for_each_operator!(define_operator);
+
+/// A reader for a core WebAssembly function's operators.
+#[derive(Clone)]
+pub struct OperatorsReader<'a> {
+ pub(crate) reader: BinaryReader<'a>,
+}
+
+impl<'a> OperatorsReader<'a> {
+ pub(crate) fn new(reader: BinaryReader<'a>) -> OperatorsReader<'a> {
+ OperatorsReader { reader }
+ }
+
+ /// Determines if the reader is at the end of the operators.
+ pub fn eof(&self) -> bool {
+ self.reader.eof()
+ }
+
+ /// Gets the original position of the reader.
+ pub fn original_position(&self) -> usize {
+ self.reader.original_position()
+ }
+
+ /// Whether or not to allow 64-bit memory arguments in the
+ /// the operators being read.
+ ///
+ /// This is intended to be `true` when support for the memory64
+ /// WebAssembly proposal is also enabled.
+ pub fn allow_memarg64(&mut self, allow: bool) {
+ self.reader.allow_memarg64(allow);
+ }
+
+ /// Ensures the reader is at the end.
+ ///
+ /// This function returns an error if there is extra data after the operators.
+ pub fn ensure_end(&self) -> Result<()> {
+ if self.eof() {
+ return Ok(());
+ }
+ Err(BinaryReaderError::new(
+ "unexpected data at the end of operators",
+ self.reader.original_position(),
+ ))
+ }
+
+ /// Reads an operator from the reader.
+ pub fn read(&mut self) -> Result<Operator<'a>> {
+ self.reader.read_operator()
+ }
+
+ /// Converts to an iterator of operators paired with offsets.
+ pub fn into_iter_with_offsets(self) -> OperatorsIteratorWithOffsets<'a> {
+ OperatorsIteratorWithOffsets {
+ reader: self,
+ err: false,
+ }
+ }
+
+ /// Reads an operator with its offset.
+ pub fn read_with_offset(&mut self) -> Result<(Operator<'a>, usize)> {
+ let pos = self.reader.original_position();
+ Ok((self.read()?, pos))
+ }
+
+ /// Visit a single operator with the specified [`VisitOperator`] instance.
+ ///
+ /// See [`BinaryReader::visit_operator`] for more information.
+ pub fn visit_operator<T>(&mut self, visitor: &mut T) -> Result<<T as VisitOperator<'a>>::Output>
+ where
+ T: VisitOperator<'a>,
+ {
+ self.reader.visit_operator(visitor)
+ }
+
+ /// Gets a binary reader from this operators reader.
+ pub fn get_binary_reader(&self) -> BinaryReader<'a> {
+ self.reader.clone()
+ }
+}
+
+impl<'a> IntoIterator for OperatorsReader<'a> {
+ type Item = Result<Operator<'a>>;
+ type IntoIter = OperatorsIterator<'a>;
+
+ /// Reads content of the code section.
+ ///
+ /// # Examples
+ /// ```
+ /// use wasmparser::{Operator, CodeSectionReader, Result};
+ /// # let data: &[u8] = &[
+ /// # 0x01, 0x03, 0x00, 0x01, 0x0b];
+ /// let code_reader = CodeSectionReader::new(data, 0).unwrap();
+ /// for body in code_reader {
+ /// let body = body.expect("function body");
+ /// let mut op_reader = body.get_operators_reader().expect("op reader");
+ /// let ops = op_reader.into_iter().collect::<Result<Vec<Operator>>>().expect("ops");
+ /// assert!(
+ /// if let [Operator::Nop, Operator::End] = ops.as_slice() { true } else { false },
+ /// "found {:?}",
+ /// ops
+ /// );
+ /// }
+ /// ```
+ fn into_iter(self) -> Self::IntoIter {
+ OperatorsIterator {
+ reader: self,
+ err: false,
+ }
+ }
+}
+
+/// An iterator over a function's operators.
+pub struct OperatorsIterator<'a> {
+ reader: OperatorsReader<'a>,
+ err: bool,
+}
+
+impl<'a> Iterator for OperatorsIterator<'a> {
+ type Item = Result<Operator<'a>>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.err || self.reader.eof() {
+ return None;
+ }
+ let result = self.reader.read();
+ self.err = result.is_err();
+ Some(result)
+ }
+}
+
+/// An iterator over a function's operators with offsets.
+pub struct OperatorsIteratorWithOffsets<'a> {
+ reader: OperatorsReader<'a>,
+ err: bool,
+}
+
+impl<'a> Iterator for OperatorsIteratorWithOffsets<'a> {
+ type Item = Result<(Operator<'a>, usize)>;
+
+ /// Reads content of the code section with offsets.
+ ///
+ /// # Examples
+ /// ```
+ /// use wasmparser::{Operator, CodeSectionReader, Result};
+ /// # let data: &[u8] = &[
+ /// # 0x01, 0x03, 0x00, /* offset = 23 */ 0x01, 0x0b];
+ /// let code_reader = CodeSectionReader::new(data, 20).unwrap();
+ /// for body in code_reader {
+ /// let body = body.expect("function body");
+ /// let mut op_reader = body.get_operators_reader().expect("op reader");
+ /// let ops = op_reader.into_iter_with_offsets().collect::<Result<Vec<(Operator, usize)>>>().expect("ops");
+ /// assert!(
+ /// if let [(Operator::Nop, 23), (Operator::End, 24)] = ops.as_slice() { true } else { false },
+ /// "found {:?}",
+ /// ops
+ /// );
+ /// }
+ /// ```
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.err || self.reader.eof() {
+ return None;
+ }
+ let result = self.reader.read_with_offset();
+ self.err = result.is_err();
+ Some(result)
+ }
+}
+
+macro_rules! define_visit_operator {
+ ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => {
+ $(
+ fn $visit(&mut self $($(,$arg: $argty)*)?) -> Self::Output;
+ )*
+ }
+}
+
+/// Trait implemented by types that can visit all [`Operator`] variants.
+#[allow(missing_docs)]
+pub trait VisitOperator<'a> {
+ /// The result type of the visitor.
+ type Output: 'a;
+
+ /// Visits the [`Operator`] `op` using the given `offset`.
+ ///
+ /// # Note
+ ///
+ /// This is a convenience method that is intended for non-performance
+ /// critical use cases. For performance critical implementations users
+ /// are recommended to directly use the respective `visit` methods or
+ /// implement [`VisitOperator`] on their own.
+ fn visit_operator(&mut self, op: &Operator<'a>) -> Self::Output {
+ macro_rules! visit_operator {
+ ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => {
+ match op {
+ $(
+ Operator::$op $({ $($arg),* })? => self.$visit($($($arg.clone()),*)?),
+ )*
+ }
+ }
+
+ }
+ for_each_operator!(visit_operator)
+ }
+
+ for_each_operator!(define_visit_operator);
+}
+
+macro_rules! define_visit_operator_delegate {
+ ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => {
+ $(
+ fn $visit(&mut self $($(,$arg: $argty)*)?) -> Self::Output {
+ V::$visit(&mut *self, $($($arg),*)?)
+ }
+ )*
+ }
+}
+
+impl<'a, 'b, V: VisitOperator<'a> + ?Sized> VisitOperator<'a> for &'b mut V {
+ type Output = V::Output;
+ fn visit_operator(&mut self, op: &Operator<'a>) -> Self::Output {
+ V::visit_operator(*self, op)
+ }
+ for_each_operator!(define_visit_operator_delegate);
+}
+
+impl<'a, V: VisitOperator<'a> + ?Sized> VisitOperator<'a> for Box<V> {
+ type Output = V::Output;
+ fn visit_operator(&mut self, op: &Operator<'a>) -> Self::Output {
+ V::visit_operator(&mut *self, op)
+ }
+ for_each_operator!(define_visit_operator_delegate);
+}
diff --git a/third_party/rust/wasmparser/src/readers/core/producers.rs b/third_party/rust/wasmparser/src/readers/core/producers.rs
new file mode 100644
index 0000000000..07785ed75a
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/core/producers.rs
@@ -0,0 +1,78 @@
+/* Copyright 2019 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::{BinaryReader, FromReader, Result, SectionLimited};
+
+/// A reader for the producers custom section of a WebAssembly module.
+///
+/// # Examples
+///
+/// ```
+/// # let data: &[u8] = &[0x01, 0x08, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65,
+/// # 0x02, 0x03, 0x77, 0x61, 0x74, 0x01, 0x31, 0x01, 0x43, 0x03, 0x39, 0x2e, 0x30];
+/// use wasmparser::{ProducersSectionReader, ProducersFieldValue, Result};
+/// let reader = ProducersSectionReader::new(data, 0).expect("producers reader");
+/// let field = reader.into_iter().next().unwrap().expect("producers field");
+/// assert!(field.name == "language");
+/// let value = field.values.into_iter().collect::<Result<Vec<_>>>().expect("values");
+/// assert!(value.len() == 2);
+/// assert!(value[0].name == "wat" && value[0].version == "1");
+/// assert!(value[1].name == "C" && value[1].version == "9.0");
+/// ```
+pub type ProducersSectionReader<'a> = SectionLimited<'a, ProducersField<'a>>;
+
+/// A field from the producers custom section.
+#[derive(Debug, Clone)]
+pub struct ProducersField<'a> {
+ /// The name of the field.
+ pub name: &'a str,
+ /// The values specified for this field
+ pub values: SectionLimited<'a, ProducersFieldValue<'a>>,
+}
+
+impl<'a> FromReader<'a> for ProducersField<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ let name = reader.read_string()?;
+ let values = reader.skip(|reader| {
+ // FIXME(#188) ideally shouldn't need to skip here
+ for _ in 0..reader.read_var_u32()? {
+ reader.skip_string()?;
+ reader.skip_string()?;
+ }
+ Ok(())
+ })?;
+ Ok(ProducersField {
+ name,
+ values: SectionLimited::new(values.remaining_buffer(), values.original_position())?,
+ })
+ }
+}
+
+/// Represents a field value in the producers custom section.
+#[derive(Debug, Copy, Clone)]
+pub struct ProducersFieldValue<'a> {
+ /// The field name.
+ pub name: &'a str,
+ /// The field version.
+ pub version: &'a str,
+}
+
+impl<'a> FromReader<'a> for ProducersFieldValue<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ let name = reader.read_string()?;
+ let version = reader.read_string()?;
+ Ok(ProducersFieldValue { name, version })
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/core/tables.rs b/third_party/rust/wasmparser/src/readers/core/tables.rs
new file mode 100644
index 0000000000..211e415efd
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/core/tables.rs
@@ -0,0 +1,87 @@
+/* Copyright 2018 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::{BinaryReader, ConstExpr, FromReader, Result, SectionLimited, TableType};
+
+/// A reader for the table section of a WebAssembly module.
+pub type TableSectionReader<'a> = SectionLimited<'a, Table<'a>>;
+
+/// Type information about a table defined in the table section of a WebAssembly
+/// module.
+#[derive(Debug)]
+pub struct Table<'a> {
+ /// The type of this table, including its element type and its limits.
+ pub ty: TableType,
+ /// The initialization expression for the table.
+ pub init: TableInit<'a>,
+}
+
+/// Different modes of initializing a table.
+#[derive(Debug)]
+pub enum TableInit<'a> {
+ /// The table is initialized to all null elements.
+ RefNull,
+ /// Each element in the table is initialized with the specified constant
+ /// expression.
+ Expr(ConstExpr<'a>),
+}
+
+impl<'a> FromReader<'a> for Table<'a> {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ let has_init_expr = if reader.peek()? == 0x40 {
+ reader.read_u8()?;
+ true
+ } else {
+ false
+ };
+
+ if has_init_expr {
+ if reader.read_u8()? != 0x00 {
+ bail!(reader.original_position() - 1, "invalid table encoding");
+ }
+ }
+
+ let ty = reader.read::<TableType>()?;
+ let init = if has_init_expr {
+ TableInit::Expr(reader.read()?)
+ } else {
+ TableInit::RefNull
+ };
+ Ok(Table { ty, init })
+ }
+}
+
+impl<'a> FromReader<'a> for TableType {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ let element_type = reader.read()?;
+ let has_max = match reader.read_u8()? {
+ 0x00 => false,
+ 0x01 => true,
+ _ => {
+ bail!(
+ reader.original_position() - 1,
+ "invalid table resizable limits flags",
+ )
+ }
+ };
+ let initial = reader.read()?;
+ let maximum = if has_max { Some(reader.read()?) } else { None };
+ Ok(TableType {
+ element_type,
+ initial,
+ maximum,
+ })
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/core/tags.rs b/third_party/rust/wasmparser/src/readers/core/tags.rs
new file mode 100644
index 0000000000..746b3ea7ac
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/core/tags.rs
@@ -0,0 +1,32 @@
+/* Copyright 2020 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::{BinaryReader, FromReader, Result, SectionLimited, TagKind, TagType};
+
+/// A reader for the tags section of a WebAssembly module.
+pub type TagSectionReader<'a> = SectionLimited<'a, TagType>;
+
+impl<'a> FromReader<'a> for TagType {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ let attribute = reader.read_u8()?;
+ if attribute != 0 {
+ bail!(reader.original_position() - 1, "invalid tag attributes");
+ }
+ Ok(TagType {
+ kind: TagKind::Exception,
+ func_type_idx: reader.read_var_u32()?,
+ })
+ }
+}
diff --git a/third_party/rust/wasmparser/src/readers/core/types.rs b/third_party/rust/wasmparser/src/readers/core/types.rs
new file mode 100644
index 0000000000..4358e2670c
--- /dev/null
+++ b/third_party/rust/wasmparser/src/readers/core/types.rs
@@ -0,0 +1,380 @@
+/* Copyright 2018 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::limits::{MAX_WASM_FUNCTION_PARAMS, MAX_WASM_FUNCTION_RETURNS};
+use crate::{BinaryReader, FromReader, Result, SectionLimited};
+use std::fmt::Debug;
+
+/// Represents the types of values in a WebAssembly module.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum ValType {
+ /// The value type is i32.
+ I32,
+ /// The value type is i64.
+ I64,
+ /// The value type is f32.
+ F32,
+ /// The value type is f64.
+ F64,
+ /// The value type is v128.
+ V128,
+ /// The value type is a reference. Which type of reference is decided by
+ /// RefType. This is a change in syntax from the function references proposal,
+ /// which now provides FuncRef and ExternRef as sugar for the generic ref
+ /// construct.
+ Ref(RefType),
+}
+
+/// A reference type. When the function references feature is disabled, this
+/// only represents funcref and externref, using the following format:
+/// RefType { nullable: true, heap_type: Func | Extern })
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+#[repr(packed)]
+pub struct RefType {
+ /// Whether it's nullable
+ pub nullable: bool,
+ /// The relevant heap type
+ pub heap_type: HeapType,
+}
+
+impl RefType {
+ /// Alias for the wasm `funcref` type.
+ pub const FUNCREF: RefType = RefType {
+ nullable: true,
+ heap_type: HeapType::Func,
+ };
+ /// Alias for the wasm `externref` type.
+ pub const EXTERNREF: RefType = RefType {
+ nullable: true,
+ heap_type: HeapType::Extern,
+ };
+}
+
+impl From<RefType> for ValType {
+ fn from(ty: RefType) -> ValType {
+ ValType::Ref(ty)
+ }
+}
+
+/// Used as a performance optimization in HeapType. Call `.into()` to get the u32
+// A u16 forces 2-byte alignment, which forces HeapType to be 4 bytes,
+// which forces ValType to 5 bytes. This newtype is annotated as unaligned to
+// store the necessary bits compactly
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+#[repr(packed)]
+pub struct PackedIndex(u16);
+
+impl TryFrom<u32> for PackedIndex {
+ type Error = ();
+
+ fn try_from(idx: u32) -> Result<PackedIndex, ()> {
+ idx.try_into().map(PackedIndex).map_err(|_| ())
+ }
+}
+
+impl From<PackedIndex> for u32 {
+ fn from(x: PackedIndex) -> u32 {
+ x.0 as u32
+ }
+}
+
+/// A heap type from function references. When the proposal is disabled, Index
+/// is an invalid type.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum HeapType {
+ /// Function type index
+ /// Note: [PackedIndex] may need to be unpacked
+ TypedFunc(PackedIndex),
+ /// From reference types
+ Func,
+ /// From reference types
+ Extern,
+}
+
+impl ValType {
+ /// Alias for the wasm `funcref` type.
+ pub const FUNCREF: ValType = ValType::Ref(RefType::FUNCREF);
+ /// Alias for the wasm `externref` type.
+ pub const EXTERNREF: ValType = ValType::Ref(RefType::EXTERNREF);
+
+ /// Returns whether this value type is a "reference type".
+ ///
+ /// Only reference types are allowed in tables, for example, and with some
+ /// instructions. Current reference types include `funcref` and `externref`.
+ pub fn is_reference_type(&self) -> bool {
+ matches!(self, ValType::Ref(_))
+ }
+ /// Whether the type is defaultable according to function references
+ /// spec. This amounts to whether it's a non-nullable ref
+ pub fn is_defaultable(&self) -> bool {
+ !matches!(
+ self,
+ ValType::Ref(RefType {
+ nullable: false,
+ ..
+ })
+ )
+ }
+
+ pub(crate) fn is_valtype_byte(byte: u8) -> bool {
+ match byte {
+ 0x7F | 0x7E | 0x7D | 0x7C | 0x7B | 0x70 | 0x6F | 0x6B | 0x6C => true,
+ _ => false,
+ }
+ }
+}
+
+impl<'a> FromReader<'a> for ValType {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ match reader.peek()? {
+ 0x7F => {
+ reader.position += 1;
+ Ok(ValType::I32)
+ }
+ 0x7E => {
+ reader.position += 1;
+ Ok(ValType::I64)
+ }
+ 0x7D => {
+ reader.position += 1;
+ Ok(ValType::F32)
+ }
+ 0x7C => {
+ reader.position += 1;
+ Ok(ValType::F64)
+ }
+ 0x7B => {
+ reader.position += 1;
+ Ok(ValType::V128)
+ }
+ 0x70 | 0x6F | 0x6B | 0x6C => Ok(ValType::Ref(reader.read()?)),
+ _ => bail!(reader.original_position(), "invalid value type"),
+ }
+ }
+}
+
+impl<'a> FromReader<'a> for RefType {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ match reader.read()? {
+ 0x70 => Ok(RefType::FUNCREF),
+ 0x6F => Ok(RefType::EXTERNREF),
+ byte @ (0x6B | 0x6C) => Ok(RefType {
+ nullable: byte == 0x6C,
+ heap_type: reader.read()?,
+ }),
+ _ => bail!(reader.original_position(), "malformed reference type"),
+ }
+ }
+}
+
+impl<'a> FromReader<'a> for HeapType {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ match reader.peek()? {
+ 0x70 => {
+ reader.position += 1;
+ Ok(HeapType::Func)
+ }
+ 0x6F => {
+ reader.position += 1;
+ Ok(HeapType::Extern)
+ }
+ _ => {
+ let idx = match u32::try_from(reader.read_var_s33()?) {
+ Ok(idx) => idx,
+ Err(_) => {
+ bail!(reader.original_position(), "invalid function heap type",);
+ }
+ };
+ match idx.try_into() {
+ Ok(packed) => Ok(HeapType::TypedFunc(packed)),
+ Err(_) => {
+ bail!(reader.original_position(), "function index too large");
+ }
+ }
+ }
+ }
+ }
+}
+
+/// Represents a type in a WebAssembly module.
+#[derive(Debug, Clone)]
+pub enum Type {
+ /// The type is for a function.
+ Func(FuncType),
+}
+
+/// Represents a type of a function in a WebAssembly module.
+#[derive(Clone, Eq, PartialEq, Hash)]
+pub struct FuncType {
+ /// The combined parameters and result types.
+ params_results: Box<[ValType]>,
+ /// The number of parameter types.
+ len_params: usize,
+}
+
+impl Debug for FuncType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("FuncType")
+ .field("params", &self.params())
+ .field("returns", &self.results())
+ .finish()
+ }
+}
+
+impl FuncType {
+ /// Creates a new [`FuncType`] from the given `params` and `results`.
+ pub fn new<P, R>(params: P, results: R) -> Self
+ where
+ P: IntoIterator<Item = ValType>,
+ R: IntoIterator<Item = ValType>,
+ {
+ let mut buffer = params.into_iter().collect::<Vec<_>>();
+ let len_params = buffer.len();
+ buffer.extend(results);
+ Self {
+ params_results: buffer.into(),
+ len_params,
+ }
+ }
+
+ /// Creates a new [`FuncType`] fom its raw parts.
+ ///
+ /// # Panics
+ ///
+ /// If `len_params` is greater than the length of `params_results` combined.
+ pub(crate) fn from_raw_parts(params_results: Box<[ValType]>, len_params: usize) -> Self {
+ assert!(len_params <= params_results.len());
+ Self {
+ params_results,
+ len_params,
+ }
+ }
+
+ /// Returns a shared slice to the parameter types of the [`FuncType`].
+ #[inline]
+ pub fn params(&self) -> &[ValType] {
+ &self.params_results[..self.len_params]
+ }
+
+ /// Returns a shared slice to the result types of the [`FuncType`].
+ #[inline]
+ pub fn results(&self) -> &[ValType] {
+ &self.params_results[self.len_params..]
+ }
+}
+
+/// Represents a table's type.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct TableType {
+ /// The table's element type.
+ pub element_type: RefType,
+ /// Initial size of this table, in elements.
+ pub initial: u32,
+ /// Optional maximum size of the table, in elements.
+ pub maximum: Option<u32>,
+}
+
+/// Represents a memory's type.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct MemoryType {
+ /// Whether or not this is a 64-bit memory, using i64 as an index. If this
+ /// is false it's a 32-bit memory using i32 as an index.
+ ///
+ /// This is part of the memory64 proposal in WebAssembly.
+ pub memory64: bool,
+
+ /// Whether or not this is a "shared" memory, indicating that it should be
+ /// send-able across threads and the `maximum` field is always present for
+ /// valid types.
+ ///
+ /// This is part of the threads proposal in WebAssembly.
+ pub shared: bool,
+
+ /// Initial size of this memory, in wasm pages.
+ ///
+ /// For 32-bit memories (when `memory64` is `false`) this is guaranteed to
+ /// be at most `u32::MAX` for valid types.
+ pub initial: u64,
+
+ /// Optional maximum size of this memory, in wasm pages.
+ ///
+ /// For 32-bit memories (when `memory64` is `false`) this is guaranteed to
+ /// be at most `u32::MAX` for valid types. This field is always present for
+ /// valid wasm memories when `shared` is `true`.
+ pub maximum: Option<u64>,
+}
+
+impl MemoryType {
+ /// Gets the index type for the memory.
+ pub fn index_type(&self) -> ValType {
+ if self.memory64 {
+ ValType::I64
+ } else {
+ ValType::I32
+ }
+ }
+}
+
+/// Represents a global's type.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct GlobalType {
+ /// The global's type.
+ pub content_type: ValType,
+ /// Whether or not the global is mutable.
+ pub mutable: bool,
+}
+
+/// Represents a tag kind.
+#[derive(Clone, Copy, Debug)]
+pub enum TagKind {
+ /// The tag is an exception type.
+ Exception,
+}
+
+/// A tag's type.
+#[derive(Clone, Copy, Debug)]
+pub struct TagType {
+ /// The kind of tag
+ pub kind: TagKind,
+ /// The function type this tag uses.
+ pub func_type_idx: u32,
+}
+
+/// A reader for the type section of a WebAssembly module.
+pub type TypeSectionReader<'a> = SectionLimited<'a, Type>;
+
+impl<'a> FromReader<'a> for Type {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ Ok(match reader.read_u8()? {
+ 0x60 => Type::Func(reader.read()?),
+ x => return reader.invalid_leading_byte(x, "type"),
+ })
+ }
+}
+
+impl<'a> FromReader<'a> for FuncType {
+ fn from_reader(reader: &mut BinaryReader<'a>) -> Result<Self> {
+ let mut params_results = reader
+ .read_iter(MAX_WASM_FUNCTION_PARAMS, "function params")?
+ .collect::<Result<Vec<_>>>()?;
+ let len_params = params_results.len();
+ let results = reader.read_iter(MAX_WASM_FUNCTION_RETURNS, "function returns")?;
+ params_results.reserve(results.size_hint().0);
+ for result in results {
+ params_results.push(result?);
+ }
+ Ok(FuncType::from_raw_parts(params_results.into(), len_params))
+ }
+}
diff --git a/third_party/rust/wasmparser/src/resources.rs b/third_party/rust/wasmparser/src/resources.rs
new file mode 100644
index 0000000000..2f0cb5309d
--- /dev/null
+++ b/third_party/rust/wasmparser/src/resources.rs
@@ -0,0 +1,395 @@
+/* Copyright 2019 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::{
+ BinaryReaderError, FuncType, GlobalType, HeapType, MemoryType, RefType, TableType, ValType,
+ WasmFeatures,
+};
+use std::ops::Range;
+
+/// Types that qualify as Wasm function types for validation purposes.
+pub trait WasmFuncType {
+ /// Returns the number of input types.
+ fn len_inputs(&self) -> usize;
+ /// Returns the number of output types.
+ fn len_outputs(&self) -> usize;
+ /// Returns the type at given index if any.
+ ///
+ /// # Note
+ ///
+ /// The returned type may be wrapped by the user crate and thus
+ /// the actually returned type only has to be comparable to a Wasm type.
+ fn input_at(&self, at: u32) -> Option<ValType>;
+ /// Returns the type at given index if any.
+ ///
+ /// # Note
+ ///
+ /// The returned type may be wrapped by the user crate and thus
+ /// the actually returned type only has to be comparable to a Wasm type.
+ fn output_at(&self, at: u32) -> Option<ValType>;
+
+ /// Returns the list of inputs as an iterator.
+ fn inputs(&self) -> WasmFuncTypeInputs<'_, Self>
+ where
+ Self: Sized,
+ {
+ WasmFuncTypeInputs {
+ func_type: self,
+ range: 0..self.len_inputs() as u32,
+ }
+ }
+
+ /// Returns the list of outputs as an iterator.
+ fn outputs(&self) -> WasmFuncTypeOutputs<'_, Self>
+ where
+ Self: Sized,
+ {
+ WasmFuncTypeOutputs {
+ func_type: self,
+ range: 0..self.len_outputs() as u32,
+ }
+ }
+}
+
+impl<T> WasmFuncType for &'_ T
+where
+ T: ?Sized + WasmFuncType,
+{
+ fn len_inputs(&self) -> usize {
+ T::len_inputs(self)
+ }
+ fn len_outputs(&self) -> usize {
+ T::len_outputs(self)
+ }
+ fn input_at(&self, at: u32) -> Option<ValType> {
+ T::input_at(self, at)
+ }
+ fn output_at(&self, at: u32) -> Option<ValType> {
+ T::output_at(self, at)
+ }
+}
+
+/// Iterator over the inputs of a Wasm function type.
+pub struct WasmFuncTypeInputs<'a, T> {
+ /// The iterated-over function type.
+ func_type: &'a T,
+ /// The range we're iterating over.
+ range: Range<u32>,
+}
+
+impl<T> Iterator for WasmFuncTypeInputs<'_, T>
+where
+ T: WasmFuncType,
+{
+ type Item = crate::ValType;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.range
+ .next()
+ .map(|i| self.func_type.input_at(i).unwrap())
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.range.size_hint()
+ }
+}
+
+impl<T> DoubleEndedIterator for WasmFuncTypeInputs<'_, T>
+where
+ T: WasmFuncType,
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.range
+ .next_back()
+ .map(|i| self.func_type.input_at(i).unwrap())
+ }
+}
+
+impl<T> ExactSizeIterator for WasmFuncTypeInputs<'_, T>
+where
+ T: WasmFuncType,
+{
+ fn len(&self) -> usize {
+ self.range.len()
+ }
+}
+
+impl<'a, T> Clone for WasmFuncTypeInputs<'a, T> {
+ fn clone(&self) -> WasmFuncTypeInputs<'a, T> {
+ WasmFuncTypeInputs {
+ func_type: self.func_type,
+ range: self.range.clone(),
+ }
+ }
+}
+
+/// Iterator over the outputs of a Wasm function type.
+pub struct WasmFuncTypeOutputs<'a, T> {
+ /// The iterated-over function type.
+ func_type: &'a T,
+ /// The range we're iterating over.
+ range: Range<u32>,
+}
+
+impl<T> Iterator for WasmFuncTypeOutputs<'_, T>
+where
+ T: WasmFuncType,
+{
+ type Item = crate::ValType;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.range
+ .next()
+ .map(|i| self.func_type.output_at(i).unwrap())
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.range.size_hint()
+ }
+}
+
+impl<T> DoubleEndedIterator for WasmFuncTypeOutputs<'_, T>
+where
+ T: WasmFuncType,
+{
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.range
+ .next_back()
+ .map(|i| self.func_type.output_at(i).unwrap())
+ }
+}
+
+impl<T> ExactSizeIterator for WasmFuncTypeOutputs<'_, T>
+where
+ T: WasmFuncType,
+{
+ fn len(&self) -> usize {
+ self.range.len()
+ }
+}
+
+impl<'a, T> Clone for WasmFuncTypeOutputs<'a, T> {
+ fn clone(&self) -> WasmFuncTypeOutputs<'a, T> {
+ WasmFuncTypeOutputs {
+ func_type: self.func_type,
+ range: self.range.clone(),
+ }
+ }
+}
+
+/// Types that qualify as Wasm validation database.
+///
+/// # Note
+///
+/// The `wasmparser` crate provides a builtin validation framework but allows
+/// users of this crate to also feed the parsed Wasm into their own data
+/// structure while parsing and also validate at the same time without
+/// the need of an additional parsing or validation step or copying data around.
+pub trait WasmModuleResources {
+ /// The function type used for validation.
+ type FuncType: WasmFuncType;
+
+ /// Returns the table at given index if any.
+ fn table_at(&self, at: u32) -> Option<TableType>;
+ /// Returns the linear memory at given index.
+ fn memory_at(&self, at: u32) -> Option<MemoryType>;
+ /// Returns the tag at given index.
+ fn tag_at(&self, at: u32) -> Option<&Self::FuncType>;
+ /// Returns the global variable at given index.
+ fn global_at(&self, at: u32) -> Option<GlobalType>;
+ /// Returns the `FuncType` associated with the given type index.
+ fn func_type_at(&self, type_idx: u32) -> Option<&Self::FuncType>;
+ /// Returns the type index associated with the given function
+ /// index. type_of_function = func_type_at(type_index_of_function)
+ fn type_index_of_function(&self, func_idx: u32) -> Option<u32>;
+ /// Returns the `FuncType` associated with the given function index.
+ fn type_of_function(&self, func_idx: u32) -> Option<&Self::FuncType>;
+ /// Returns the element type at the given index.
+ fn element_type_at(&self, at: u32) -> Option<RefType>;
+ /// Under the function references proposal, returns whether t1 <=
+ /// t2. Otherwise, returns whether t1 == t2
+ fn matches(&self, t1: ValType, t2: ValType) -> bool;
+ /// Check a value type. This requires using func_type_at to check references
+ fn check_value_type(
+ &self,
+ t: ValType,
+ features: &WasmFeatures,
+ offset: usize,
+ ) -> Result<(), BinaryReaderError>;
+
+ /// Checks that a `HeapType` is valid, notably its function index if one is
+ /// used.
+ fn check_heap_type(
+ &self,
+ heap_type: HeapType,
+ features: &WasmFeatures,
+ offset: usize,
+ ) -> Result<(), BinaryReaderError> {
+ // Delegate to the generic value type validation which will have the
+ // same validity checks.
+ self.check_value_type(
+ RefType {
+ nullable: true,
+ heap_type,
+ }
+ .into(),
+ features,
+ offset,
+ )
+ }
+
+ /// Returns the number of elements.
+ fn element_count(&self) -> u32;
+ /// Returns the number of bytes in the Wasm data section.
+ fn data_count(&self) -> Option<u32>;
+ /// Returns whether the function index is referenced in the module anywhere
+ /// outside of the start/function sections.
+ fn is_function_referenced(&self, idx: u32) -> bool;
+}
+
+impl<T> WasmModuleResources for &'_ T
+where
+ T: ?Sized + WasmModuleResources,
+{
+ type FuncType = T::FuncType;
+
+ fn table_at(&self, at: u32) -> Option<TableType> {
+ T::table_at(self, at)
+ }
+ fn memory_at(&self, at: u32) -> Option<MemoryType> {
+ T::memory_at(self, at)
+ }
+ fn tag_at(&self, at: u32) -> Option<&Self::FuncType> {
+ T::tag_at(self, at)
+ }
+ fn global_at(&self, at: u32) -> Option<GlobalType> {
+ T::global_at(self, at)
+ }
+ fn func_type_at(&self, at: u32) -> Option<&Self::FuncType> {
+ T::func_type_at(self, at)
+ }
+ fn type_index_of_function(&self, func_idx: u32) -> Option<u32> {
+ T::type_index_of_function(self, func_idx)
+ }
+ fn type_of_function(&self, func_idx: u32) -> Option<&Self::FuncType> {
+ T::type_of_function(self, func_idx)
+ }
+ fn check_value_type(
+ &self,
+ t: ValType,
+ features: &WasmFeatures,
+ offset: usize,
+ ) -> Result<(), BinaryReaderError> {
+ T::check_value_type(self, t, features, offset)
+ }
+ fn element_type_at(&self, at: u32) -> Option<RefType> {
+ T::element_type_at(self, at)
+ }
+ fn matches(&self, t1: ValType, t2: ValType) -> bool {
+ T::matches(self, t1, t2)
+ }
+
+ fn element_count(&self) -> u32 {
+ T::element_count(self)
+ }
+ fn data_count(&self) -> Option<u32> {
+ T::data_count(self)
+ }
+ fn is_function_referenced(&self, idx: u32) -> bool {
+ T::is_function_referenced(self, idx)
+ }
+}
+
+impl<T> WasmModuleResources for std::sync::Arc<T>
+where
+ T: WasmModuleResources,
+{
+ type FuncType = T::FuncType;
+
+ fn table_at(&self, at: u32) -> Option<TableType> {
+ T::table_at(self, at)
+ }
+
+ fn memory_at(&self, at: u32) -> Option<MemoryType> {
+ T::memory_at(self, at)
+ }
+
+ fn tag_at(&self, at: u32) -> Option<&Self::FuncType> {
+ T::tag_at(self, at)
+ }
+
+ fn global_at(&self, at: u32) -> Option<GlobalType> {
+ T::global_at(self, at)
+ }
+
+ fn func_type_at(&self, type_idx: u32) -> Option<&Self::FuncType> {
+ T::func_type_at(self, type_idx)
+ }
+
+ fn type_index_of_function(&self, func_idx: u32) -> Option<u32> {
+ T::type_index_of_function(self, func_idx)
+ }
+
+ fn type_of_function(&self, func_idx: u32) -> Option<&Self::FuncType> {
+ T::type_of_function(self, func_idx)
+ }
+
+ fn check_value_type(
+ &self,
+ t: ValType,
+ features: &WasmFeatures,
+ offset: usize,
+ ) -> Result<(), BinaryReaderError> {
+ T::check_value_type(self, t, features, offset)
+ }
+
+ fn element_type_at(&self, at: u32) -> Option<RefType> {
+ T::element_type_at(self, at)
+ }
+
+ fn matches(&self, t1: ValType, t2: ValType) -> bool {
+ T::matches(self, t1, t2)
+ }
+
+ fn element_count(&self) -> u32 {
+ T::element_count(self)
+ }
+
+ fn data_count(&self) -> Option<u32> {
+ T::data_count(self)
+ }
+
+ fn is_function_referenced(&self, idx: u32) -> bool {
+ T::is_function_referenced(self, idx)
+ }
+}
+
+impl WasmFuncType for FuncType {
+ fn len_inputs(&self) -> usize {
+ self.params().len()
+ }
+
+ fn len_outputs(&self) -> usize {
+ self.results().len()
+ }
+
+ fn input_at(&self, at: u32) -> Option<ValType> {
+ self.params().get(at as usize).copied()
+ }
+
+ fn output_at(&self, at: u32) -> Option<ValType> {
+ self.results().get(at as usize).copied()
+ }
+}
diff --git a/third_party/rust/wasmparser/src/validator.rs b/third_party/rust/wasmparser/src/validator.rs
new file mode 100644
index 0000000000..04c207130b
--- /dev/null
+++ b/third_party/rust/wasmparser/src/validator.rs
@@ -0,0 +1,1514 @@
+/* Copyright 2018 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use crate::{
+ limits::*, BinaryReaderError, Encoding, FromReader, FunctionBody, HeapType, Parser, Payload,
+ Result, SectionLimited, ValType, WASM_COMPONENT_VERSION, WASM_MODULE_VERSION,
+};
+use std::mem;
+use std::ops::Range;
+use std::sync::Arc;
+
+/// Test whether the given buffer contains a valid WebAssembly module or component,
+/// analogous to [`WebAssembly.validate`][js] in the JS API.
+///
+/// This functions requires the bytes to validate are entirely resident in memory.
+/// Additionally this validates the given bytes with the default set of WebAssembly
+/// features implemented by `wasmparser`.
+///
+/// For more fine-tuned control over validation it's recommended to review the
+/// documentation of [`Validator`].
+///
+/// Upon success, the type information for the top-level module or component will
+/// be returned.
+///
+/// [js]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/validate
+pub fn validate(bytes: &[u8]) -> Result<Types> {
+ Validator::new().validate_all(bytes)
+}
+
+#[test]
+fn test_validate() {
+ assert!(validate(&[0x0, 0x61, 0x73, 0x6d, 0x1, 0x0, 0x0, 0x0]).is_ok());
+ assert!(validate(&[0x0, 0x61, 0x73, 0x6d, 0x2, 0x0, 0x0, 0x0]).is_err());
+}
+
+mod component;
+mod core;
+mod func;
+mod operators;
+pub mod types;
+
+use self::component::*;
+pub use self::core::ValidatorResources;
+use self::core::*;
+use self::types::{TypeAlloc, Types, TypesRef};
+pub use func::{FuncToValidate, FuncValidator, FuncValidatorAllocations};
+pub use operators::{Frame, FrameKind};
+
+fn check_max(cur_len: usize, amt_added: u32, max: usize, desc: &str, offset: usize) -> Result<()> {
+ if max
+ .checked_sub(cur_len)
+ .and_then(|amt| amt.checked_sub(amt_added as usize))
+ .is_none()
+ {
+ if max == 1 {
+ bail!(offset, "multiple {desc}");
+ }
+
+ bail!(offset, "{desc} count exceeds limit of {max}");
+ }
+
+ Ok(())
+}
+
+fn combine_type_sizes(a: u32, b: u32, offset: usize) -> Result<u32> {
+ match a.checked_add(b) {
+ Some(sum) if sum < MAX_WASM_TYPE_SIZE => Ok(sum),
+ _ => Err(format_err!(
+ offset,
+ "effective type size exceeds the limit of {MAX_WASM_TYPE_SIZE}",
+ )),
+ }
+}
+
+/// Validator for a WebAssembly binary module or component.
+///
+/// This structure encapsulates state necessary to validate a WebAssembly
+/// binary. This implements validation as defined by the [core
+/// specification][core]. A `Validator` is designed, like
+/// [`Parser`], to accept incremental input over time.
+/// Additionally a `Validator` is also designed for parallel validation of
+/// functions as they are received.
+///
+/// It's expected that you'll be using a [`Parser`] in tandem with a
+/// `Validator`. As each [`Payload`](crate::Payload) is received from a
+/// [`Parser`] you'll pass it into a `Validator` to test the validity of the
+/// payload. Note that all payloads received from a [`Parser`] are expected to
+/// be passed to a [`Validator`]. For example if you receive
+/// [`Payload::TypeSection`](crate::Payload) you'll call
+/// [`Validator::type_section`] to validate this.
+///
+/// The design of [`Validator`] is intended that you'll interleave, in your own
+/// application's processing, calls to validation. Each variant, after it's
+/// received, will be validated and then your application would proceed as
+/// usual. At all times, however, you'll have access to the [`Validator`] and
+/// the validation context up to that point. This enables applications to check
+/// the types of functions and learn how many globals there are, for example.
+///
+/// [core]: https://webassembly.github.io/spec/core/valid/index.html
+#[derive(Default)]
+pub struct Validator {
+ /// The current state of the validator.
+ state: State,
+
+ /// The global type space used by the validator and any sub-validators.
+ types: TypeAlloc,
+
+ /// The module state when parsing a WebAssembly module.
+ module: Option<ModuleState>,
+
+ /// With the component model enabled, this stores the pushed component states.
+ /// The top of the stack is the current component state.
+ components: Vec<ComponentState>,
+
+ /// Enabled WebAssembly feature flags, dictating what's valid and what
+ /// isn't.
+ features: WasmFeatures,
+}
+
+#[derive(Debug, Clone, Copy, Eq, PartialEq)]
+enum State {
+ /// A header has not yet been parsed.
+ ///
+ /// The value is the expected encoding for the header.
+ Unparsed(Option<Encoding>),
+ /// A module header has been parsed.
+ ///
+ /// The associated module state is available via [`Validator::module`].
+ Module,
+ /// A component header has been parsed.
+ ///
+ /// The associated component state exists at the top of the
+ /// validator's [`Validator::components`] stack.
+ Component,
+ /// The parse has completed and no more data is expected.
+ End,
+}
+
+impl State {
+ fn ensure_parsable(&self, offset: usize) -> Result<()> {
+ match self {
+ Self::Module | Self::Component => Ok(()),
+ Self::Unparsed(_) => Err(BinaryReaderError::new(
+ "unexpected section before header was parsed",
+ offset,
+ )),
+ Self::End => Err(BinaryReaderError::new(
+ "unexpected section after parsing has completed",
+ offset,
+ )),
+ }
+ }
+
+ fn ensure_module(&self, section: &str, offset: usize) -> Result<()> {
+ self.ensure_parsable(offset)?;
+
+ match self {
+ Self::Module => Ok(()),
+ Self::Component => Err(format_err!(
+ offset,
+ "unexpected module {section} section while parsing a component",
+ )),
+ _ => unreachable!(),
+ }
+ }
+
+ fn ensure_component(&self, section: &str, offset: usize) -> Result<()> {
+ self.ensure_parsable(offset)?;
+
+ match self {
+ Self::Component => Ok(()),
+ Self::Module => Err(format_err!(
+ offset,
+ "unexpected component {section} section while parsing a module",
+ )),
+ _ => unreachable!(),
+ }
+ }
+}
+
+impl Default for State {
+ fn default() -> Self {
+ Self::Unparsed(None)
+ }
+}
+
+/// Flags for features that are enabled for validation.
+#[derive(Hash, Debug, Copy, Clone)]
+pub struct WasmFeatures {
+ /// The WebAssembly `mutable-global` proposal (enabled by default)
+ pub mutable_global: bool,
+ /// The WebAssembly `nontrapping-float-to-int-conversions` proposal (enabled by default)
+ pub saturating_float_to_int: bool,
+ /// The WebAssembly `sign-extension-ops` proposal (enabled by default)
+ pub sign_extension: bool,
+ /// The WebAssembly reference types proposal (enabled by default)
+ pub reference_types: bool,
+ /// The WebAssembly multi-value proposal (enabled by default)
+ pub multi_value: bool,
+ /// The WebAssembly bulk memory operations proposal (enabled by default)
+ pub bulk_memory: bool,
+ /// The WebAssembly SIMD proposal (enabled by default)
+ pub simd: bool,
+ /// The WebAssembly Relaxed SIMD proposal
+ pub relaxed_simd: bool,
+ /// The WebAssembly threads proposal
+ pub threads: bool,
+ /// The WebAssembly tail-call proposal
+ pub tail_call: bool,
+ /// Whether or not floating-point instructions are enabled.
+ ///
+ /// This is enabled by default can be used to disallow floating-point
+ /// operators and types.
+ ///
+ /// This does not correspond to a WebAssembly proposal but is instead
+ /// intended for embeddings which have stricter-than-usual requirements
+ /// about execution. Floats in WebAssembly can have different NaN patterns
+ /// across hosts which can lead to host-dependent execution which some
+ /// runtimes may not desire.
+ pub floats: bool,
+ /// The WebAssembly multi memory proposal
+ pub multi_memory: bool,
+ /// The WebAssembly exception handling proposal
+ pub exceptions: bool,
+ /// The WebAssembly memory64 proposal
+ pub memory64: bool,
+ /// The WebAssembly extended_const proposal
+ pub extended_const: bool,
+ /// The WebAssembly component model proposal.
+ pub component_model: bool,
+ /// The WebAssembly typed function references proposal
+ pub function_references: bool,
+ /// The WebAssembly memory control proposal
+ pub memory_control: bool,
+}
+
+impl WasmFeatures {
+ /// NOTE: This only checks that the value type corresponds to the feature set!!
+ ///
+ /// To check that reference types are valid, we need access to the module
+ /// types. Use module.check_value_type.
+ pub(crate) fn check_value_type(&self, ty: ValType) -> Result<(), &'static str> {
+ match ty {
+ ValType::I32 | ValType::I64 => Ok(()),
+ ValType::F32 | ValType::F64 => {
+ if self.floats {
+ Ok(())
+ } else {
+ Err("floating-point support is disabled")
+ }
+ }
+ ValType::Ref(r) => {
+ if self.reference_types {
+ if !self.function_references {
+ match (r.heap_type, r.nullable) {
+ (_, false) => {
+ Err("function references required for non-nullable types")
+ }
+ (HeapType::TypedFunc(_), _) => {
+ Err("function references required for index reference types")
+ }
+ _ => Ok(()),
+ }
+ } else {
+ Ok(())
+ }
+ } else {
+ Err("reference types support is not enabled")
+ }
+ }
+ ValType::V128 => {
+ if self.simd {
+ Ok(())
+ } else {
+ Err("SIMD support is not enabled")
+ }
+ }
+ }
+ }
+}
+
+impl Default for WasmFeatures {
+ fn default() -> WasmFeatures {
+ WasmFeatures {
+ // Off-by-default features.
+ relaxed_simd: false,
+ threads: false,
+ multi_memory: false,
+ exceptions: false,
+ memory64: false,
+ extended_const: false,
+ component_model: false,
+ function_references: false,
+ memory_control: false,
+
+ // On-by-default features (phase 4 or greater).
+ mutable_global: true,
+ saturating_float_to_int: true,
+ sign_extension: true,
+ bulk_memory: true,
+ multi_value: true,
+ reference_types: true,
+ tail_call: true,
+ simd: true,
+ floats: true,
+ }
+ }
+}
+
+/// Possible return values from [`Validator::payload`].
+#[allow(clippy::large_enum_variant)]
+pub enum ValidPayload<'a> {
+ /// The payload validated, no further action need be taken.
+ Ok,
+ /// The payload validated, but it started a nested module or component.
+ ///
+ /// This result indicates that the specified parser should be used instead
+ /// of the currently-used parser until this returned one ends.
+ Parser(Parser),
+ /// A function was found to be validate.
+ Func(FuncToValidate<ValidatorResources>, FunctionBody<'a>),
+ /// The end payload was validated and the types known to the validator
+ /// are provided.
+ End(Types),
+}
+
+impl Validator {
+ /// Creates a new [`Validator`] ready to validate a WebAssembly module
+ /// or component.
+ ///
+ /// The new validator will receive payloads parsed from
+ /// [`Parser`], and expects the first payload received to be
+ /// the version header from the parser.
+ pub fn new() -> Validator {
+ Validator::default()
+ }
+
+ /// Creates a new [`Validator`] which has the specified set of wasm
+ /// features activated for validation.
+ ///
+ /// This function is the same as [`Validator::new`] except it also allows
+ /// you to customize the active wasm features in use for validation. This
+ /// can allow enabling experimental proposals or also turning off
+ /// on-by-default wasm proposals.
+ pub fn new_with_features(features: WasmFeatures) -> Validator {
+ let mut ret = Validator::new();
+ ret.features = features;
+ ret
+ }
+
+ /// Returns the wasm features used for this validator.
+ pub fn features(&self) -> &WasmFeatures {
+ &self.features
+ }
+
+ /// Validates an entire in-memory module or component with this validator.
+ ///
+ /// This function will internally create a [`Parser`] to parse the `bytes`
+ /// provided. The entire module or component specified by `bytes` will be
+ /// parsed and validated.
+ ///
+ /// Upon success, the type information for the top-level module or component
+ /// will be returned.
+ pub fn validate_all(&mut self, bytes: &[u8]) -> Result<Types> {
+ let mut functions_to_validate = Vec::new();
+ let mut last_types = None;
+ for payload in Parser::new(0).parse_all(bytes) {
+ match self.payload(&payload?)? {
+ ValidPayload::Func(a, b) => {
+ functions_to_validate.push((a, b));
+ }
+ ValidPayload::End(types) => {
+ // Only the last (top-level) type information will be returned
+ last_types = Some(types);
+ }
+ _ => {}
+ }
+ }
+
+ let mut allocs = FuncValidatorAllocations::default();
+ for (func, body) in functions_to_validate {
+ let mut validator = func.into_validator(allocs);
+ validator.validate(&body)?;
+ allocs = validator.into_allocations();
+ }
+
+ Ok(last_types.unwrap())
+ }
+
+ /// Gets the types known by the validator so far within the
+ /// module/component `level` modules/components up from the
+ /// module/component currently being parsed.
+ ///
+ /// For instance, calling `validator.types(0)` will get the types of the
+ /// module/component currently being parsed, and `validator.types(1)` will
+ /// get the types of the component containing that module/component.
+ ///
+ /// Returns `None` if there is no module/component that many levels up.
+ pub fn types(&self, mut level: usize) -> Option<TypesRef> {
+ if let Some(module) = &self.module {
+ if level == 0 {
+ return Some(TypesRef::from_module(&self.types, &module.module));
+ } else {
+ level -= 1;
+ }
+ }
+
+ self.components
+ .iter()
+ .nth_back(level)
+ .map(|component| TypesRef::from_component(&self.types, component))
+ }
+
+ /// Convenience function to validate a single [`Payload`].
+ ///
+ /// This function is intended to be used as a convenience. It will
+ /// internally perform any validation necessary to validate the [`Payload`]
+ /// provided. The convenience part is that you're likely already going to
+ /// be matching on [`Payload`] in your application, at which point it's more
+ /// appropriate to call the individual methods on [`Validator`] per-variant
+ /// in [`Payload`], such as [`Validator::type_section`].
+ ///
+ /// This function returns a [`ValidPayload`] variant on success, indicating
+ /// one of a few possible actions that need to be taken after a payload is
+ /// validated. For example function contents are not validated here, they're
+ /// returned through [`ValidPayload`] for validation by the caller.
+ pub fn payload<'a>(&mut self, payload: &Payload<'a>) -> Result<ValidPayload<'a>> {
+ use crate::Payload::*;
+ match payload {
+ Version {
+ num,
+ encoding,
+ range,
+ } => self.version(*num, *encoding, range)?,
+
+ // Module sections
+ TypeSection(s) => self.type_section(s)?,
+ ImportSection(s) => self.import_section(s)?,
+ FunctionSection(s) => self.function_section(s)?,
+ TableSection(s) => self.table_section(s)?,
+ MemorySection(s) => self.memory_section(s)?,
+ TagSection(s) => self.tag_section(s)?,
+ GlobalSection(s) => self.global_section(s)?,
+ ExportSection(s) => self.export_section(s)?,
+ StartSection { func, range } => self.start_section(*func, range)?,
+ ElementSection(s) => self.element_section(s)?,
+ DataCountSection { count, range } => self.data_count_section(*count, range)?,
+ CodeSectionStart {
+ count,
+ range,
+ size: _,
+ } => self.code_section_start(*count, range)?,
+ CodeSectionEntry(body) => {
+ let func_validator = self.code_section_entry(body)?;
+ return Ok(ValidPayload::Func(func_validator, body.clone()));
+ }
+ DataSection(s) => self.data_section(s)?,
+
+ // Component sections
+ ModuleSection { parser, range, .. } => {
+ self.module_section(range)?;
+ return Ok(ValidPayload::Parser(parser.clone()));
+ }
+ InstanceSection(s) => self.instance_section(s)?,
+ CoreTypeSection(s) => self.core_type_section(s)?,
+ ComponentSection { parser, range, .. } => {
+ self.component_section(range)?;
+ return Ok(ValidPayload::Parser(parser.clone()));
+ }
+ ComponentInstanceSection(s) => self.component_instance_section(s)?,
+ ComponentAliasSection(s) => self.component_alias_section(s)?,
+ ComponentTypeSection(s) => self.component_type_section(s)?,
+ ComponentCanonicalSection(s) => self.component_canonical_section(s)?,
+ ComponentStartSection { start, range } => self.component_start_section(start, range)?,
+ ComponentImportSection(s) => self.component_import_section(s)?,
+ ComponentExportSection(s) => self.component_export_section(s)?,
+
+ End(offset) => return Ok(ValidPayload::End(self.end(*offset)?)),
+
+ CustomSection { .. } => {} // no validation for custom sections
+ UnknownSection { id, range, .. } => self.unknown_section(*id, range)?,
+ }
+ Ok(ValidPayload::Ok)
+ }
+
+ /// Validates [`Payload::Version`](crate::Payload).
+ pub fn version(&mut self, num: u16, encoding: Encoding, range: &Range<usize>) -> Result<()> {
+ match &self.state {
+ State::Unparsed(expected) => {
+ if let Some(expected) = expected {
+ if *expected != encoding {
+ bail!(
+ range.start,
+ "expected a version header for a {}",
+ match expected {
+ Encoding::Module => "module",
+ Encoding::Component => "component",
+ }
+ );
+ }
+ }
+ }
+ _ => {
+ return Err(BinaryReaderError::new(
+ "wasm version header out of order",
+ range.start,
+ ))
+ }
+ }
+
+ self.state = match encoding {
+ Encoding::Module => {
+ if num == WASM_MODULE_VERSION {
+ assert!(self.module.is_none());
+ self.module = Some(ModuleState::default());
+ State::Module
+ } else {
+ bail!(range.start, "unknown binary version: {num:#x}");
+ }
+ }
+ Encoding::Component => {
+ if !self.features.component_model {
+ bail!(
+ range.start,
+ "unknown binary version and encoding combination: {num:#x} and 0x1, \
+ note: encoded as a component but the WebAssembly component model feature \
+ is not enabled - enable the feature to allow component validation",
+ );
+ }
+ if num == WASM_COMPONENT_VERSION {
+ self.components.push(ComponentState::default());
+ State::Component
+ } else if num < WASM_COMPONENT_VERSION {
+ bail!(range.start, "unsupported component version: {num:#x}");
+ } else {
+ bail!(range.start, "unknown component version: {num:#x}");
+ }
+ }
+ };
+
+ Ok(())
+ }
+
+ /// Validates [`Payload::TypeSection`](crate::Payload).
+ pub fn type_section(&mut self, section: &crate::TypeSectionReader<'_>) -> Result<()> {
+ self.process_module_section(
+ Order::Type,
+ section,
+ "type",
+ |state, _, types, count, offset| {
+ check_max(
+ state.module.types.len(),
+ count,
+ MAX_WASM_TYPES,
+ "types",
+ offset,
+ )?;
+ types.reserve(count as usize);
+ state.module.assert_mut().types.reserve(count as usize);
+ Ok(())
+ },
+ |state, features, types, def, offset| {
+ state
+ .module
+ .assert_mut()
+ .add_type(def, features, types, offset, false /* checked above */)
+ },
+ )
+ }
+
+ /// Validates [`Payload::ImportSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a module.
+ pub fn import_section(&mut self, section: &crate::ImportSectionReader<'_>) -> Result<()> {
+ self.process_module_section(
+ Order::Import,
+ section,
+ "import",
+ |_, _, _, _, _| Ok(()), // add_import will check limits
+ |state, features, types, import, offset| {
+ state
+ .module
+ .assert_mut()
+ .add_import(import, features, types, offset)
+ },
+ )
+ }
+
+ /// Validates [`Payload::FunctionSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a module.
+ pub fn function_section(&mut self, section: &crate::FunctionSectionReader<'_>) -> Result<()> {
+ self.process_module_section(
+ Order::Function,
+ section,
+ "function",
+ |state, _, _, count, offset| {
+ check_max(
+ state.module.functions.len(),
+ count,
+ MAX_WASM_FUNCTIONS,
+ "functions",
+ offset,
+ )?;
+ state.module.assert_mut().functions.reserve(count as usize);
+ debug_assert!(state.expected_code_bodies.is_none());
+ state.expected_code_bodies = Some(count);
+ Ok(())
+ },
+ |state, _, types, ty, offset| state.module.assert_mut().add_function(ty, types, offset),
+ )
+ }
+
+ /// Validates [`Payload::TableSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a module.
+ pub fn table_section(&mut self, section: &crate::TableSectionReader<'_>) -> Result<()> {
+ let features = self.features;
+ self.process_module_section(
+ Order::Table,
+ section,
+ "table",
+ |state, _, _, count, offset| {
+ check_max(
+ state.module.tables.len(),
+ count,
+ state.module.max_tables(&features),
+ "tables",
+ offset,
+ )?;
+ state.module.assert_mut().tables.reserve(count as usize);
+ Ok(())
+ },
+ |state, features, types, table, offset| state.add_table(table, features, types, offset),
+ )
+ }
+
+ /// Validates [`Payload::MemorySection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a module.
+ pub fn memory_section(&mut self, section: &crate::MemorySectionReader<'_>) -> Result<()> {
+ self.process_module_section(
+ Order::Memory,
+ section,
+ "memory",
+ |state, features, _, count, offset| {
+ check_max(
+ state.module.memories.len(),
+ count,
+ state.module.max_memories(features),
+ "memories",
+ offset,
+ )?;
+ state.module.assert_mut().memories.reserve(count as usize);
+ Ok(())
+ },
+ |state, features, _, ty, offset| {
+ state.module.assert_mut().add_memory(ty, features, offset)
+ },
+ )
+ }
+
+ /// Validates [`Payload::TagSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a module.
+ pub fn tag_section(&mut self, section: &crate::TagSectionReader<'_>) -> Result<()> {
+ if !self.features.exceptions {
+ return Err(BinaryReaderError::new(
+ "exceptions proposal not enabled",
+ section.range().start,
+ ));
+ }
+
+ self.process_module_section(
+ Order::Tag,
+ section,
+ "tag",
+ |state, _, _, count, offset| {
+ check_max(
+ state.module.tags.len(),
+ count,
+ MAX_WASM_TAGS,
+ "tags",
+ offset,
+ )?;
+ state.module.assert_mut().tags.reserve(count as usize);
+ Ok(())
+ },
+ |state, features, types, ty, offset| {
+ state
+ .module
+ .assert_mut()
+ .add_tag(ty, features, types, offset)
+ },
+ )
+ }
+
+ /// Validates [`Payload::GlobalSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a module.
+ pub fn global_section(&mut self, section: &crate::GlobalSectionReader<'_>) -> Result<()> {
+ self.process_module_section(
+ Order::Global,
+ section,
+ "global",
+ |state, _, _, count, offset| {
+ check_max(
+ state.module.globals.len(),
+ count,
+ MAX_WASM_GLOBALS,
+ "globals",
+ offset,
+ )?;
+ state.module.assert_mut().globals.reserve(count as usize);
+ Ok(())
+ },
+ |state, features, types, global, offset| {
+ state.add_global(global, features, types, offset)
+ },
+ )
+ }
+
+ /// Validates [`Payload::ExportSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a module.
+ pub fn export_section(&mut self, section: &crate::ExportSectionReader<'_>) -> Result<()> {
+ self.process_module_section(
+ Order::Export,
+ section,
+ "export",
+ |state, _, _, count, offset| {
+ check_max(
+ state.module.exports.len(),
+ count,
+ MAX_WASM_EXPORTS,
+ "exports",
+ offset,
+ )?;
+ state.module.assert_mut().exports.reserve(count as usize);
+ Ok(())
+ },
+ |state, features, _, e, offset| {
+ let state = state.module.assert_mut();
+ let ty = state.export_to_entity_type(&e, offset)?;
+ state.add_export(e.name, ty, features, offset, false /* checked above */)
+ },
+ )
+ }
+
+ /// Validates [`Payload::StartSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a module.
+ pub fn start_section(&mut self, func: u32, range: &Range<usize>) -> Result<()> {
+ let offset = range.start;
+ self.state.ensure_module("start", offset)?;
+ let state = self.module.as_mut().unwrap();
+ state.update_order(Order::Start, offset)?;
+
+ let ty = state.module.get_func_type(func, &self.types, offset)?;
+ if !ty.params().is_empty() || !ty.results().is_empty() {
+ return Err(BinaryReaderError::new(
+ "invalid start function type",
+ offset,
+ ));
+ }
+
+ Ok(())
+ }
+
+ /// Validates [`Payload::ElementSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a module.
+ pub fn element_section(&mut self, section: &crate::ElementSectionReader<'_>) -> Result<()> {
+ self.process_module_section(
+ Order::Element,
+ section,
+ "element",
+ |state, _, _, count, offset| {
+ check_max(
+ state.module.element_types.len(),
+ count,
+ MAX_WASM_ELEMENT_SEGMENTS,
+ "element segments",
+ offset,
+ )?;
+ state
+ .module
+ .assert_mut()
+ .element_types
+ .reserve(count as usize);
+ Ok(())
+ },
+ |state, features, types, e, offset| {
+ state.add_element_segment(e, features, types, offset)
+ },
+ )
+ }
+
+ /// Validates [`Payload::DataCountSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a module.
+ pub fn data_count_section(&mut self, count: u32, range: &Range<usize>) -> Result<()> {
+ let offset = range.start;
+ self.state.ensure_module("data count", offset)?;
+
+ let state = self.module.as_mut().unwrap();
+ state.update_order(Order::DataCount, offset)?;
+
+ if count > MAX_WASM_DATA_SEGMENTS as u32 {
+ return Err(BinaryReaderError::new(
+ "data count section specifies too many data segments",
+ offset,
+ ));
+ }
+
+ state.module.assert_mut().data_count = Some(count);
+ Ok(())
+ }
+
+ /// Validates [`Payload::CodeSectionStart`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a module.
+ pub fn code_section_start(&mut self, count: u32, range: &Range<usize>) -> Result<()> {
+ let offset = range.start;
+ self.state.ensure_module("code", offset)?;
+
+ let state = self.module.as_mut().unwrap();
+ state.update_order(Order::Code, offset)?;
+
+ match state.expected_code_bodies.take() {
+ Some(n) if n == count => {}
+ Some(_) => {
+ return Err(BinaryReaderError::new(
+ "function and code section have inconsistent lengths",
+ offset,
+ ));
+ }
+ // empty code sections are allowed even if the function section is
+ // missing
+ None if count == 0 => {}
+ None => {
+ return Err(BinaryReaderError::new(
+ "code section without function section",
+ offset,
+ ))
+ }
+ }
+
+ // Take a snapshot of the types when we start the code section.
+ state.module.assert_mut().snapshot = Some(Arc::new(self.types.commit()));
+
+ Ok(())
+ }
+
+ /// Validates [`Payload::CodeSectionEntry`](crate::Payload).
+ ///
+ /// This function will prepare a [`FuncToValidate`] which can be used to
+ /// create a [`FuncValidator`] to validate the function. The function body
+ /// provided will not be parsed or validated by this function.
+ ///
+ /// Note that the returned [`FuncToValidate`] is "connected" to this
+ /// [`Validator`] in that it uses the internal context of this validator for
+ /// validating the function. The [`FuncToValidate`] can be sent to another
+ /// thread, for example, to offload actual processing of functions
+ /// elsewhere.
+ ///
+ /// This method should only be called when parsing a module.
+ pub fn code_section_entry(
+ &mut self,
+ body: &crate::FunctionBody,
+ ) -> Result<FuncToValidate<ValidatorResources>> {
+ let offset = body.range().start;
+ self.state.ensure_module("code", offset)?;
+
+ let state = self.module.as_mut().unwrap();
+
+ let (index, ty) = state.next_code_index_and_type(offset)?;
+ Ok(FuncToValidate::new(
+ index,
+ ty,
+ ValidatorResources(state.module.arc().clone()),
+ &self.features,
+ ))
+ }
+
+ /// Validates [`Payload::DataSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a module.
+ pub fn data_section(&mut self, section: &crate::DataSectionReader<'_>) -> Result<()> {
+ self.process_module_section(
+ Order::Data,
+ section,
+ "data",
+ |state, _, _, count, offset| {
+ state.data_segment_count = count;
+ check_max(0, count, MAX_WASM_DATA_SEGMENTS, "data segments", offset)
+ },
+ |state, features, types, d, offset| state.add_data_segment(d, features, types, offset),
+ )
+ }
+
+ /// Validates [`Payload::ModuleSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a component.
+ pub fn module_section(&mut self, range: &Range<usize>) -> Result<()> {
+ self.state.ensure_component("module", range.start)?;
+
+ let current = self.components.last_mut().unwrap();
+ check_max(
+ current.core_modules.len(),
+ 1,
+ MAX_WASM_MODULES,
+ "modules",
+ range.start,
+ )?;
+
+ match mem::replace(&mut self.state, State::Unparsed(Some(Encoding::Module))) {
+ State::Component => {}
+ _ => unreachable!(),
+ }
+
+ Ok(())
+ }
+
+ /// Validates [`Payload::InstanceSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a component.
+ pub fn instance_section(&mut self, section: &crate::InstanceSectionReader) -> Result<()> {
+ self.process_component_section(
+ section,
+ "core instance",
+ |components, _, count, offset| {
+ let current = components.last_mut().unwrap();
+ check_max(
+ current.instance_count(),
+ count,
+ MAX_WASM_INSTANCES,
+ "instances",
+ offset,
+ )?;
+ current.core_instances.reserve(count as usize);
+ Ok(())
+ },
+ |components, types, _, instance, offset| {
+ components
+ .last_mut()
+ .unwrap()
+ .add_core_instance(instance, types, offset)
+ },
+ )
+ }
+
+ /// Validates [`Payload::CoreTypeSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a component.
+ pub fn core_type_section(&mut self, section: &crate::CoreTypeSectionReader<'_>) -> Result<()> {
+ self.process_component_section(
+ section,
+ "core type",
+ |components, types, count, offset| {
+ let current = components.last_mut().unwrap();
+ check_max(current.type_count(), count, MAX_WASM_TYPES, "types", offset)?;
+ types.reserve(count as usize);
+ current.core_types.reserve(count as usize);
+ Ok(())
+ },
+ |components, types, features, ty, offset| {
+ ComponentState::add_core_type(
+ components, ty, features, types, offset, false, /* checked above */
+ )
+ },
+ )
+ }
+
+ /// Validates [`Payload::ComponentSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a component.
+ pub fn component_section(&mut self, range: &Range<usize>) -> Result<()> {
+ self.state.ensure_component("component", range.start)?;
+
+ let current = self.components.last_mut().unwrap();
+ check_max(
+ current.components.len(),
+ 1,
+ MAX_WASM_COMPONENTS,
+ "components",
+ range.start,
+ )?;
+
+ match mem::replace(&mut self.state, State::Unparsed(Some(Encoding::Component))) {
+ State::Component => {}
+ _ => unreachable!(),
+ }
+
+ Ok(())
+ }
+
+ /// Validates [`Payload::ComponentInstanceSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a component.
+ pub fn component_instance_section(
+ &mut self,
+ section: &crate::ComponentInstanceSectionReader,
+ ) -> Result<()> {
+ self.process_component_section(
+ section,
+ "instance",
+ |components, _, count, offset| {
+ let current = components.last_mut().unwrap();
+ check_max(
+ current.instance_count(),
+ count,
+ MAX_WASM_INSTANCES,
+ "instances",
+ offset,
+ )?;
+ current.instances.reserve(count as usize);
+ Ok(())
+ },
+ |components, types, _, instance, offset| {
+ components
+ .last_mut()
+ .unwrap()
+ .add_instance(instance, types, offset)
+ },
+ )
+ }
+
+ /// Validates [`Payload::ComponentAliasSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a component.
+ pub fn component_alias_section(
+ &mut self,
+ section: &crate::ComponentAliasSectionReader<'_>,
+ ) -> Result<()> {
+ self.process_component_section(
+ section,
+ "alias",
+ |_, _, _, _| Ok(()), // maximums checked via `add_alias`
+ |components, types, _, alias, offset| -> Result<(), BinaryReaderError> {
+ ComponentState::add_alias(components, alias, types, offset)
+ },
+ )
+ }
+
+ /// Validates [`Payload::ComponentTypeSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a component.
+ pub fn component_type_section(
+ &mut self,
+ section: &crate::ComponentTypeSectionReader,
+ ) -> Result<()> {
+ self.process_component_section(
+ section,
+ "type",
+ |components, types, count, offset| {
+ let current = components.last_mut().unwrap();
+ check_max(current.type_count(), count, MAX_WASM_TYPES, "types", offset)?;
+ types.reserve(count as usize);
+ current.types.reserve(count as usize);
+ Ok(())
+ },
+ |components, types, features, ty, offset| {
+ ComponentState::add_type(
+ components, ty, features, types, offset, false, /* checked above */
+ )
+ },
+ )
+ }
+
+ /// Validates [`Payload::ComponentCanonicalSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a component.
+ pub fn component_canonical_section(
+ &mut self,
+ section: &crate::ComponentCanonicalSectionReader,
+ ) -> Result<()> {
+ self.process_component_section(
+ section,
+ "function",
+ |components, _, count, offset| {
+ let current = components.last_mut().unwrap();
+ check_max(
+ current.function_count(),
+ count,
+ MAX_WASM_FUNCTIONS,
+ "functions",
+ offset,
+ )?;
+ current.funcs.reserve(count as usize);
+ Ok(())
+ },
+ |components, types, _, func, offset| {
+ let current = components.last_mut().unwrap();
+ match func {
+ crate::CanonicalFunction::Lift {
+ core_func_index,
+ type_index,
+ options,
+ } => current.lift_function(
+ core_func_index,
+ type_index,
+ options.into_vec(),
+ types,
+ offset,
+ ),
+ crate::CanonicalFunction::Lower {
+ func_index,
+ options,
+ } => current.lower_function(func_index, options.into_vec(), types, offset),
+ }
+ },
+ )
+ }
+
+ /// Validates [`Payload::ComponentStartSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a component.
+ pub fn component_start_section(
+ &mut self,
+ f: &crate::ComponentStartFunction,
+ range: &Range<usize>,
+ ) -> Result<()> {
+ self.state.ensure_component("start", range.start)?;
+
+ // let mut section = section.clone();
+ // let f = section.read()?;
+
+ // if !section.eof() {
+ // return Err(BinaryReaderError::new(
+ // "trailing data at the end of the start section",
+ // section.original_position(),
+ // ));
+ // }
+
+ self.components.last_mut().unwrap().add_start(
+ f.func_index,
+ &f.arguments,
+ f.results,
+ &self.types,
+ range.start,
+ )
+ }
+
+ /// Validates [`Payload::ComponentImportSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a component.
+ pub fn component_import_section(
+ &mut self,
+ section: &crate::ComponentImportSectionReader,
+ ) -> Result<()> {
+ self.process_component_section(
+ section,
+ "import",
+ |_, _, _, _| Ok(()), // add_import will check limits
+ |components, types, _, import, offset| {
+ components
+ .last_mut()
+ .unwrap()
+ .add_import(import, types, offset)
+ },
+ )
+ }
+
+ /// Validates [`Payload::ComponentExportSection`](crate::Payload).
+ ///
+ /// This method should only be called when parsing a component.
+ pub fn component_export_section(
+ &mut self,
+ section: &crate::ComponentExportSectionReader,
+ ) -> Result<()> {
+ self.process_component_section(
+ section,
+ "export",
+ |components, _, count, offset| {
+ let current = components.last_mut().unwrap();
+ check_max(
+ current.externs.len(),
+ count,
+ MAX_WASM_EXPORTS,
+ "imports and exports",
+ offset,
+ )?;
+ current.externs.reserve(count as usize);
+ Ok(())
+ },
+ |components, types, _, export, offset| {
+ let current = components.last_mut().unwrap();
+ let ty = current.export_to_entity_type(&export, types, offset)?;
+ current.add_export(
+ export.name,
+ export.url,
+ ty,
+ offset,
+ false, /* checked above */
+ )
+ },
+ )
+ }
+
+ /// Validates [`Payload::UnknownSection`](crate::Payload).
+ ///
+ /// Currently always returns an error.
+ pub fn unknown_section(&mut self, id: u8, range: &Range<usize>) -> Result<()> {
+ Err(format_err!(range.start, "malformed section id: {id}"))
+ }
+
+ /// Validates [`Payload::End`](crate::Payload).
+ ///
+ /// Returns the types known to the validator for the module or component.
+ pub fn end(&mut self, offset: usize) -> Result<Types> {
+ match std::mem::replace(&mut self.state, State::End) {
+ State::Unparsed(_) => Err(BinaryReaderError::new(
+ "cannot call `end` before a header has been parsed",
+ offset,
+ )),
+ State::End => Err(BinaryReaderError::new(
+ "cannot call `end` after parsing has completed",
+ offset,
+ )),
+ State::Module => {
+ let mut state = self.module.take().unwrap();
+ state.validate_end(offset)?;
+
+ // If there's a parent component, we'll add a module to the parent state
+ // and continue to validate the component
+ if let Some(parent) = self.components.last_mut() {
+ parent.add_core_module(&state.module, &mut self.types, offset)?;
+ self.state = State::Component;
+ }
+
+ Ok(Types::from_module(
+ self.types.commit(),
+ state.module.arc().clone(),
+ ))
+ }
+ State::Component => {
+ let mut component = self.components.pop().unwrap();
+
+ // Validate that all values were used for the component
+ if let Some(index) = component.values.iter().position(|(_, used)| !*used) {
+ return Err(
+ format_err!(offset,"value index {index} was not used as part of an instantiation, start function, or export"
+ )
+ );
+ }
+
+ // If there's a parent component, pop the stack, add it to the parent,
+ // and continue to validate the component
+ if let Some(parent) = self.components.last_mut() {
+ parent.add_component(&mut component, &mut self.types);
+ self.state = State::Component;
+ }
+
+ Ok(Types::from_component(self.types.commit(), component))
+ }
+ }
+ }
+
+ fn process_module_section<'a, T>(
+ &mut self,
+ order: Order,
+ section: &SectionLimited<'a, T>,
+ name: &str,
+ validate_section: impl FnOnce(
+ &mut ModuleState,
+ &WasmFeatures,
+ &mut TypeAlloc,
+ u32,
+ usize,
+ ) -> Result<()>,
+ mut validate_item: impl FnMut(
+ &mut ModuleState,
+ &WasmFeatures,
+ &mut TypeAlloc,
+ T,
+ usize,
+ ) -> Result<()>,
+ ) -> Result<()>
+ where
+ T: FromReader<'a>,
+ {
+ let offset = section.range().start;
+ self.state.ensure_module(name, offset)?;
+
+ let state = self.module.as_mut().unwrap();
+ state.update_order(order, offset)?;
+
+ validate_section(
+ state,
+ &self.features,
+ &mut self.types,
+ section.count(),
+ offset,
+ )?;
+
+ for item in section.clone().into_iter_with_offsets() {
+ let (offset, item) = item?;
+ validate_item(state, &self.features, &mut self.types, item, offset)?;
+ }
+
+ Ok(())
+ }
+
+ fn process_component_section<'a, T>(
+ &mut self,
+ section: &SectionLimited<'a, T>,
+ name: &str,
+ validate_section: impl FnOnce(
+ &mut Vec<ComponentState>,
+ &mut TypeAlloc,
+ u32,
+ usize,
+ ) -> Result<()>,
+ mut validate_item: impl FnMut(
+ &mut Vec<ComponentState>,
+ &mut TypeAlloc,
+ &WasmFeatures,
+ T,
+ usize,
+ ) -> Result<()>,
+ ) -> Result<()>
+ where
+ T: FromReader<'a>,
+ {
+ let offset = section.range().start;
+
+ if !self.features.component_model {
+ return Err(BinaryReaderError::new(
+ "component model feature is not enabled",
+ offset,
+ ));
+ }
+
+ self.state.ensure_component(name, offset)?;
+ validate_section(
+ &mut self.components,
+ &mut self.types,
+ section.count(),
+ offset,
+ )?;
+
+ for item in section.clone().into_iter_with_offsets() {
+ let (offset, item) = item?;
+ validate_item(
+ &mut self.components,
+ &mut self.types,
+ &self.features,
+ item,
+ offset,
+ )?;
+ }
+
+ Ok(())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{GlobalType, MemoryType, RefType, TableType, ValType, Validator, WasmFeatures};
+ use anyhow::Result;
+
+ #[test]
+ fn test_module_type_information() -> Result<()> {
+ let bytes = wat::parse_str(
+ r#"
+ (module
+ (type (func (param i32 i64) (result i32)))
+ (memory 1 5)
+ (table 10 funcref)
+ (global (mut i32) (i32.const 0))
+ (func (type 0) (i32.const 0))
+ (tag (param i64 i32))
+ (elem funcref (ref.func 0))
+ )
+ "#,
+ )?;
+
+ let mut validator = Validator::new_with_features(WasmFeatures {
+ exceptions: true,
+ ..Default::default()
+ });
+
+ let types = validator.validate_all(&bytes)?;
+
+ assert_eq!(types.type_count(), 2);
+ assert_eq!(types.memory_count(), 1);
+ assert_eq!(types.table_count(), 1);
+ assert_eq!(types.global_count(), 1);
+ assert_eq!(types.function_count(), 1);
+ assert_eq!(types.tag_count(), 1);
+ assert_eq!(types.element_count(), 1);
+ assert_eq!(types.module_count(), 0);
+ assert_eq!(types.component_count(), 0);
+ assert_eq!(types.instance_count(), 0);
+ assert_eq!(types.value_count(), 0);
+
+ match types.func_type_at(0) {
+ Some(ty) => {
+ assert_eq!(ty.params(), [ValType::I32, ValType::I64]);
+ assert_eq!(ty.results(), [ValType::I32]);
+ }
+ _ => unreachable!(),
+ }
+
+ match types.func_type_at(1) {
+ Some(ty) => {
+ assert_eq!(ty.params(), [ValType::I64, ValType::I32]);
+ assert_eq!(ty.results(), []);
+ }
+ _ => unreachable!(),
+ }
+
+ assert_eq!(
+ types.memory_at(0),
+ Some(MemoryType {
+ memory64: false,
+ shared: false,
+ initial: 1,
+ maximum: Some(5)
+ })
+ );
+
+ assert_eq!(
+ types.table_at(0),
+ Some(TableType {
+ initial: 10,
+ maximum: None,
+ element_type: RefType::FUNCREF,
+ })
+ );
+
+ assert_eq!(
+ types.global_at(0),
+ Some(GlobalType {
+ content_type: ValType::I32,
+ mutable: true
+ })
+ );
+
+ match types.function_at(0) {
+ Some(ty) => {
+ assert_eq!(ty.params(), [ValType::I32, ValType::I64]);
+ assert_eq!(ty.results(), [ValType::I32]);
+ }
+ _ => unreachable!(),
+ }
+
+ match types.tag_at(0) {
+ Some(ty) => {
+ assert_eq!(ty.params(), [ValType::I64, ValType::I32]);
+ assert_eq!(ty.results(), []);
+ }
+ _ => unreachable!(),
+ }
+
+ assert_eq!(types.element_at(0), Some(RefType::FUNCREF));
+
+ Ok(())
+ }
+
+ #[test]
+ fn test_type_id_aliasing() -> Result<()> {
+ let bytes = wat::parse_str(
+ r#"
+ (component
+ (type $T (list string))
+ (alias outer 0 $T (type $A1))
+ (alias outer 0 $T (type $A2))
+ )
+ "#,
+ )?;
+
+ let mut validator = Validator::new_with_features(WasmFeatures {
+ component_model: true,
+ ..Default::default()
+ });
+
+ let types = validator.validate_all(&bytes)?;
+
+ let t_id = types.id_from_type_index(0, false).unwrap();
+ let a1_id = types.id_from_type_index(1, false).unwrap();
+ let a2_id = types.id_from_type_index(2, false).unwrap();
+
+ // The ids should all be different
+ assert!(t_id != a1_id);
+ assert!(t_id != a2_id);
+ assert!(a1_id != a2_id);
+
+ // However, they should all point to the same type
+ assert!(std::ptr::eq(
+ types.type_from_id(t_id).unwrap(),
+ types.type_from_id(a1_id).unwrap()
+ ));
+ assert!(std::ptr::eq(
+ types.type_from_id(t_id).unwrap(),
+ types.type_from_id(a2_id).unwrap()
+ ));
+
+ Ok(())
+ }
+}
diff --git a/third_party/rust/wasmparser/src/validator/component.rs b/third_party/rust/wasmparser/src/validator/component.rs
new file mode 100644
index 0000000000..641b18a2cc
--- /dev/null
+++ b/third_party/rust/wasmparser/src/validator/component.rs
@@ -0,0 +1,2101 @@
+//! State relating to validating a WebAssembly component.
+
+use super::{
+ check_max, combine_type_sizes,
+ core::Module,
+ types::{
+ ComponentFuncType, ComponentInstanceType, ComponentInstanceTypeKind, ComponentType,
+ ComponentValType, EntityType, InstanceType, KebabString, ModuleType, RecordType, Type,
+ TypeAlloc, TypeId, TypeList, VariantCase,
+ },
+};
+use crate::{
+ limits::*,
+ types::{
+ ComponentDefinedType, ComponentEntityType, InstanceTypeKind, KebabStr, LoweringInfo,
+ TupleType, UnionType, VariantType,
+ },
+ BinaryReaderError, CanonicalOption, ComponentExternalKind, ComponentOuterAliasKind,
+ ComponentTypeRef, ExternalKind, FuncType, GlobalType, InstantiationArgKind, MemoryType, Result,
+ TableType, TypeBounds, ValType, WasmFeatures,
+};
+use indexmap::{map::Entry, IndexMap, IndexSet};
+use std::{collections::HashSet, mem};
+use url::Url;
+
+fn to_kebab_str<'a>(s: &'a str, desc: &str, offset: usize) -> Result<&'a KebabStr> {
+ match KebabStr::new(s) {
+ Some(s) => Ok(s),
+ None => {
+ if s.is_empty() {
+ bail!(offset, "{desc} name cannot be empty");
+ }
+
+ bail!(offset, "{desc} name `{s}` is not in kebab case");
+ }
+ }
+}
+
+fn parse_url(url: &str, offset: usize) -> Result<Option<Url>> {
+ if url.is_empty() {
+ return Ok(None);
+ }
+
+ Url::parse(url)
+ .map(Some)
+ .map_err(|e| BinaryReaderError::new(e.to_string(), offset))
+}
+
+pub(crate) struct ComponentState {
+ // Core index spaces
+ pub core_types: Vec<TypeId>,
+ pub core_modules: Vec<TypeId>,
+ pub core_instances: Vec<TypeId>,
+ pub core_funcs: Vec<TypeId>,
+ pub core_memories: Vec<MemoryType>,
+ pub core_tables: Vec<TableType>,
+ pub core_globals: Vec<GlobalType>,
+ pub core_tags: Vec<TypeId>,
+
+ // Component index spaces
+ pub types: Vec<TypeId>,
+ pub funcs: Vec<TypeId>,
+ pub values: Vec<(ComponentValType, bool)>,
+ pub instances: Vec<TypeId>,
+ pub components: Vec<TypeId>,
+
+ /// A set of all imports and exports since they share the same namespace.
+ pub externs: IndexMap<KebabString, (Option<Url>, ComponentEntityType, ExternKind)>,
+
+ // Note: URL validation requires unique URLs by byte comparison, so
+ // strings are used here and the URLs are not normalized.
+ import_urls: HashSet<String>,
+ export_urls: HashSet<String>,
+
+ has_start: bool,
+ type_size: u32,
+}
+
+pub enum ExternKind {
+ Import,
+ Export,
+}
+
+impl ExternKind {
+ fn desc(&self) -> &'static str {
+ match self {
+ ExternKind::Import => "import",
+ ExternKind::Export => "export",
+ }
+ }
+}
+
+impl ComponentState {
+ pub fn type_count(&self) -> usize {
+ self.core_types.len() + self.types.len()
+ }
+
+ pub fn instance_count(&self) -> usize {
+ self.core_instances.len() + self.instances.len()
+ }
+
+ pub fn function_count(&self) -> usize {
+ self.core_funcs.len() + self.funcs.len()
+ }
+
+ pub fn add_core_type(
+ components: &mut [Self],
+ ty: crate::CoreType,
+ features: &WasmFeatures,
+ types: &mut TypeAlloc,
+ offset: usize,
+ check_limit: bool,
+ ) -> Result<()> {
+ let ty = match ty {
+ crate::CoreType::Func(ty) => Type::Func(ty),
+ crate::CoreType::Module(decls) => Type::Module(Self::create_module_type(
+ components,
+ decls.into_vec(),
+ features,
+ types,
+ offset,
+ )?),
+ };
+
+ let current = components.last_mut().unwrap();
+
+ if check_limit {
+ check_max(current.type_count(), 1, MAX_WASM_TYPES, "types", offset)?;
+ }
+
+ let id = types.push_defined(ty);
+ current.core_types.push(id);
+
+ Ok(())
+ }
+
+ pub fn add_core_module(
+ &mut self,
+ module: &Module,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ let imports = module.imports_for_module_type(offset)?;
+
+ // We have to clone the module's imports and exports here
+ // because we cannot take the data out of the `MaybeOwned`
+ // as it might be shared with a function validator.
+ let ty = Type::Module(ModuleType {
+ type_size: module.type_size,
+ imports,
+ exports: module.exports.clone(),
+ });
+
+ let id = types.push_anon(ty);
+ self.core_modules.push(id);
+
+ Ok(())
+ }
+
+ pub fn add_core_instance(
+ &mut self,
+ instance: crate::Instance,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ let instance = match instance {
+ crate::Instance::Instantiate { module_index, args } => {
+ self.instantiate_module(module_index, args.into_vec(), types, offset)?
+ }
+ crate::Instance::FromExports(exports) => {
+ self.instantiate_core_exports(exports.into_vec(), types, offset)?
+ }
+ };
+
+ self.core_instances.push(instance);
+
+ Ok(())
+ }
+
+ pub fn add_type(
+ components: &mut Vec<Self>,
+ ty: crate::ComponentType,
+ features: &WasmFeatures,
+ types: &mut TypeAlloc,
+ offset: usize,
+ check_limit: bool,
+ ) -> Result<()> {
+ assert!(!components.is_empty());
+ let ty = match ty {
+ crate::ComponentType::Defined(ty) => Type::Defined(
+ components
+ .last_mut()
+ .unwrap()
+ .create_defined_type(ty, types, offset)?,
+ ),
+ crate::ComponentType::Func(ty) => Type::ComponentFunc(
+ components
+ .last_mut()
+ .unwrap()
+ .create_function_type(ty, types, offset)?,
+ ),
+ crate::ComponentType::Component(decls) => Type::Component(Self::create_component_type(
+ components,
+ decls.into_vec(),
+ features,
+ types,
+ offset,
+ )?),
+ crate::ComponentType::Instance(decls) => Type::ComponentInstance(
+ Self::create_instance_type(components, decls.into_vec(), features, types, offset)?,
+ ),
+ };
+
+ let current = components.last_mut().unwrap();
+ if check_limit {
+ check_max(current.type_count(), 1, MAX_WASM_TYPES, "types", offset)?;
+ }
+
+ let id = types.push_defined(ty);
+ current.types.push(id);
+
+ Ok(())
+ }
+
+ pub fn add_import(
+ &mut self,
+ import: crate::ComponentImport,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ let entity = self.check_type_ref(&import.ty, types, offset)?;
+ self.add_entity(entity, false, offset)?;
+ let name = to_kebab_str(import.name, "import", offset)?;
+
+ match self.externs.entry(name.to_owned()) {
+ Entry::Occupied(e) => {
+ bail!(
+ offset,
+ "import name `{name}` conflicts with previous {desc} name `{prev}`",
+ name = import.name,
+ prev = e.key(),
+ desc = e.get().2.desc(),
+ );
+ }
+ Entry::Vacant(e) => {
+ let url = parse_url(import.url, offset)?;
+ if let Some(url) = url.as_ref() {
+ if !self.import_urls.insert(url.to_string()) {
+ bail!(offset, "duplicate import URL `{url}`");
+ }
+ }
+
+ self.type_size = combine_type_sizes(self.type_size, entity.type_size(), offset)?;
+ e.insert((url, entity, ExternKind::Import));
+ }
+ }
+
+ Ok(())
+ }
+
+ fn add_entity(
+ &mut self,
+ ty: ComponentEntityType,
+ value_used: bool,
+ offset: usize,
+ ) -> Result<()> {
+ let (len, max, desc) = match ty {
+ ComponentEntityType::Module(id) => {
+ self.core_modules.push(id);
+ (self.core_modules.len(), MAX_WASM_MODULES, "modules")
+ }
+ ComponentEntityType::Component(id) => {
+ self.components.push(id);
+ (self.components.len(), MAX_WASM_COMPONENTS, "components")
+ }
+ ComponentEntityType::Instance(id) => {
+ self.instances.push(id);
+ (self.instance_count(), MAX_WASM_INSTANCES, "instances")
+ }
+ ComponentEntityType::Func(id) => {
+ self.funcs.push(id);
+ (self.function_count(), MAX_WASM_FUNCTIONS, "functions")
+ }
+ ComponentEntityType::Value(ty) => {
+ self.values.push((ty, value_used));
+ (self.values.len(), MAX_WASM_VALUES, "values")
+ }
+ ComponentEntityType::Type { created, .. } => {
+ self.types.push(created);
+ (self.types.len(), MAX_WASM_TYPES, "types")
+ }
+ };
+
+ check_max(len, 0, max, desc, offset)?;
+ Ok(())
+ }
+
+ pub fn add_export(
+ &mut self,
+ name: &str,
+ url: &str,
+ ty: ComponentEntityType,
+ offset: usize,
+ check_limit: bool,
+ ) -> Result<()> {
+ if check_limit {
+ check_max(
+ self.externs.len(),
+ 1,
+ MAX_WASM_EXPORTS,
+ "imports and exports",
+ offset,
+ )?;
+ }
+ self.add_entity(ty, true, offset)?;
+
+ let name = to_kebab_str(name, "export", offset)?;
+
+ match self.externs.entry(name.to_owned()) {
+ Entry::Occupied(e) => {
+ bail!(
+ offset,
+ "export name `{name}` conflicts with previous {desc} name `{prev}`",
+ prev = e.key(),
+ desc = e.get().2.desc(),
+ );
+ }
+ Entry::Vacant(e) => {
+ let url = parse_url(url, offset)?;
+ if let Some(url) = url.as_ref() {
+ if !self.export_urls.insert(url.to_string()) {
+ bail!(offset, "duplicate export URL `{url}`");
+ }
+ }
+
+ self.type_size = combine_type_sizes(self.type_size, ty.type_size(), offset)?;
+ e.insert((url, ty, ExternKind::Export));
+ }
+ }
+
+ Ok(())
+ }
+
+ pub fn lift_function(
+ &mut self,
+ core_func_index: u32,
+ type_index: u32,
+ options: Vec<CanonicalOption>,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ let ty = self.function_type_at(type_index, types, offset)?;
+ let core_ty = types[self.core_function_at(core_func_index, offset)?]
+ .as_func_type()
+ .unwrap();
+
+ // Lifting a function is for an export, so match the expected canonical ABI
+ // export signature
+ let info = ty.lower(types, false);
+ self.check_options(Some(core_ty), &info, &options, types, offset)?;
+
+ if core_ty.params() != info.params.as_slice() {
+ bail!(
+ offset,
+ "lowered parameter types `{:?}` do not match parameter types \
+ `{:?}` of core function {core_func_index}",
+ info.params.as_slice(),
+ core_ty.params(),
+ );
+ }
+
+ if core_ty.results() != info.results.as_slice() {
+ bail!(
+ offset,
+ "lowered result types `{:?}` do not match result types \
+ `{:?}` of core function {core_func_index}",
+ info.results.as_slice(),
+ core_ty.results()
+ );
+ }
+
+ self.funcs.push(self.types[type_index as usize]);
+
+ Ok(())
+ }
+
+ pub fn lower_function(
+ &mut self,
+ func_index: u32,
+ options: Vec<CanonicalOption>,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ let ty = types[self.function_at(func_index, offset)?]
+ .as_component_func_type()
+ .unwrap();
+
+ // Lowering a function is for an import, so use a function type that matches
+ // the expected canonical ABI import signature.
+ let info = ty.lower(types, true);
+
+ self.check_options(None, &info, &options, types, offset)?;
+
+ let lowered_ty = Type::Func(info.into_func_type());
+
+ let id = types.push_anon(lowered_ty);
+ self.core_funcs.push(id);
+
+ Ok(())
+ }
+
+ pub fn add_component(&mut self, component: &mut Self, types: &mut TypeAlloc) {
+ let ty = Type::Component(component.take_component_type());
+ let id = types.push_anon(ty);
+ self.components.push(id);
+ }
+
+ pub fn add_instance(
+ &mut self,
+ instance: crate::ComponentInstance,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ let instance = match instance {
+ crate::ComponentInstance::Instantiate {
+ component_index,
+ args,
+ } => self.instantiate_component(component_index, args.into_vec(), types, offset)?,
+ crate::ComponentInstance::FromExports(exports) => {
+ self.instantiate_exports(exports.into_vec(), types, offset)?
+ }
+ };
+
+ self.instances.push(instance);
+
+ Ok(())
+ }
+
+ pub fn add_alias(
+ components: &mut [Self],
+ alias: crate::ComponentAlias,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ match alias {
+ crate::ComponentAlias::InstanceExport {
+ instance_index,
+ kind,
+ name,
+ } => components.last_mut().unwrap().alias_instance_export(
+ instance_index,
+ kind,
+ name,
+ types,
+ offset,
+ ),
+ crate::ComponentAlias::CoreInstanceExport {
+ instance_index,
+ kind,
+ name,
+ } => components.last_mut().unwrap().alias_core_instance_export(
+ instance_index,
+ kind,
+ name,
+ types,
+ offset,
+ ),
+ crate::ComponentAlias::Outer { kind, count, index } => match kind {
+ ComponentOuterAliasKind::CoreModule => {
+ Self::alias_module(components, count, index, offset)
+ }
+ ComponentOuterAliasKind::CoreType => {
+ Self::alias_core_type(components, count, index, types, offset)
+ }
+ ComponentOuterAliasKind::Type => {
+ Self::alias_type(components, count, index, types, offset)
+ }
+ ComponentOuterAliasKind::Component => {
+ Self::alias_component(components, count, index, offset)
+ }
+ },
+ }
+ }
+
+ pub fn add_start(
+ &mut self,
+ func_index: u32,
+ args: &[u32],
+ results: u32,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ if self.has_start {
+ return Err(BinaryReaderError::new(
+ "component cannot have more than one start function",
+ offset,
+ ));
+ }
+
+ let ft = types[self.function_at(func_index, offset)?]
+ .as_component_func_type()
+ .unwrap();
+
+ if ft.params.len() != args.len() {
+ bail!(
+ offset,
+ "component start function requires {} arguments but was given {}",
+ ft.params.len(),
+ args.len()
+ );
+ }
+
+ if ft.results.len() as u32 != results {
+ bail!(
+ offset,
+ "component start function has a result count of {results} \
+ but the function type has a result count of {type_results}",
+ type_results = ft.results.len(),
+ );
+ }
+
+ for (i, ((_, ty), arg)) in ft.params.iter().zip(args).enumerate() {
+ // Ensure the value's type is a subtype of the parameter type
+ if !ComponentValType::internal_is_subtype_of(
+ self.value_at(*arg, offset)?,
+ types,
+ ty,
+ types,
+ ) {
+ bail!(
+ offset,
+ "value type mismatch for component start function argument {i}"
+ );
+ }
+ }
+
+ for (_, ty) in ft.results.iter() {
+ self.values.push((*ty, false));
+ }
+
+ self.has_start = true;
+
+ Ok(())
+ }
+
+ fn check_options(
+ &self,
+ core_ty: Option<&FuncType>,
+ info: &LoweringInfo,
+ options: &[CanonicalOption],
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ fn display(option: CanonicalOption) -> &'static str {
+ match option {
+ CanonicalOption::UTF8 => "utf8",
+ CanonicalOption::UTF16 => "utf16",
+ CanonicalOption::CompactUTF16 => "latin1-utf16",
+ CanonicalOption::Memory(_) => "memory",
+ CanonicalOption::Realloc(_) => "realloc",
+ CanonicalOption::PostReturn(_) => "post-return",
+ }
+ }
+
+ let mut encoding = None;
+ let mut memory = None;
+ let mut realloc = None;
+ let mut post_return = None;
+
+ for option in options {
+ match option {
+ CanonicalOption::UTF8 | CanonicalOption::UTF16 | CanonicalOption::CompactUTF16 => {
+ match encoding {
+ Some(existing) => {
+ bail!(
+ offset,
+ "canonical encoding option `{}` conflicts with option `{}`",
+ display(existing),
+ display(*option),
+ )
+ }
+ None => encoding = Some(*option),
+ }
+ }
+ CanonicalOption::Memory(idx) => {
+ memory = match memory {
+ None => {
+ self.memory_at(*idx, offset)?;
+ Some(*idx)
+ }
+ Some(_) => {
+ return Err(BinaryReaderError::new(
+ "canonical option `memory` is specified more than once",
+ offset,
+ ))
+ }
+ }
+ }
+ CanonicalOption::Realloc(idx) => {
+ realloc = match realloc {
+ None => {
+ let ty = types[self.core_function_at(*idx, offset)?]
+ .as_func_type()
+ .unwrap();
+ if ty.params()
+ != [ValType::I32, ValType::I32, ValType::I32, ValType::I32]
+ || ty.results() != [ValType::I32]
+ {
+ return Err(BinaryReaderError::new(
+ "canonical option `realloc` uses a core function with an incorrect signature",
+ offset,
+ ));
+ }
+ Some(*idx)
+ }
+ Some(_) => {
+ return Err(BinaryReaderError::new(
+ "canonical option `realloc` is specified more than once",
+ offset,
+ ))
+ }
+ }
+ }
+ CanonicalOption::PostReturn(idx) => {
+ post_return = match post_return {
+ None => {
+ let core_ty = core_ty.ok_or_else(|| {
+ BinaryReaderError::new(
+ "canonical option `post-return` cannot be specified for lowerings",
+ offset,
+ )
+ })?;
+
+ let ty = types[self.core_function_at(*idx, offset)?]
+ .as_func_type()
+ .unwrap();
+
+ if ty.params() != core_ty.results() || !ty.results().is_empty() {
+ return Err(BinaryReaderError::new(
+ "canonical option `post-return` uses a core function with an incorrect signature",
+ offset,
+ ));
+ }
+ Some(*idx)
+ }
+ Some(_) => {
+ return Err(BinaryReaderError::new(
+ "canonical option `post-return` is specified more than once",
+ offset,
+ ))
+ }
+ }
+ }
+ }
+ }
+
+ if info.requires_memory && memory.is_none() {
+ return Err(BinaryReaderError::new(
+ "canonical option `memory` is required",
+ offset,
+ ));
+ }
+
+ if info.requires_realloc && realloc.is_none() {
+ return Err(BinaryReaderError::new(
+ "canonical option `realloc` is required",
+ offset,
+ ));
+ }
+
+ Ok(())
+ }
+
+ fn check_type_ref(
+ &self,
+ ty: &ComponentTypeRef,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<ComponentEntityType> {
+ Ok(match ty {
+ ComponentTypeRef::Module(index) => {
+ let id = self.type_at(*index, true, offset)?;
+ types[id].as_module_type().ok_or_else(|| {
+ format_err!(offset, "core type index {index} is not a module type")
+ })?;
+ ComponentEntityType::Module(id)
+ }
+ ComponentTypeRef::Func(index) => {
+ let id = self.type_at(*index, false, offset)?;
+ types[id].as_component_func_type().ok_or_else(|| {
+ format_err!(offset, "type index {index} is not a function type")
+ })?;
+ ComponentEntityType::Func(id)
+ }
+ ComponentTypeRef::Value(ty) => {
+ let ty = match ty {
+ crate::ComponentValType::Primitive(ty) => ComponentValType::Primitive(*ty),
+ crate::ComponentValType::Type(index) => {
+ ComponentValType::Type(self.defined_type_at(*index, types, offset)?)
+ }
+ };
+ ComponentEntityType::Value(ty)
+ }
+ ComponentTypeRef::Type(TypeBounds::Eq, index) => {
+ let referenced = self.type_at(*index, false, offset)?;
+ let created = types.with_unique(referenced);
+ ComponentEntityType::Type {
+ referenced,
+ created,
+ }
+ }
+ ComponentTypeRef::Instance(index) => {
+ let id = self.type_at(*index, false, offset)?;
+ types[id].as_component_instance_type().ok_or_else(|| {
+ format_err!(offset, "type index {index} is not an instance type")
+ })?;
+ ComponentEntityType::Instance(id)
+ }
+ ComponentTypeRef::Component(index) => {
+ let id = self.type_at(*index, false, offset)?;
+ types[id].as_component_type().ok_or_else(|| {
+ format_err!(offset, "type index {index} is not a component type")
+ })?;
+ ComponentEntityType::Component(id)
+ }
+ })
+ }
+
+ pub fn export_to_entity_type(
+ &mut self,
+ export: &crate::ComponentExport,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<ComponentEntityType> {
+ let actual = match export.kind {
+ ComponentExternalKind::Module => {
+ ComponentEntityType::Module(self.module_at(export.index, offset)?)
+ }
+ ComponentExternalKind::Func => {
+ ComponentEntityType::Func(self.function_at(export.index, offset)?)
+ }
+ ComponentExternalKind::Value => {
+ ComponentEntityType::Value(*self.value_at(export.index, offset)?)
+ }
+ ComponentExternalKind::Type => {
+ let referenced = self.type_at(export.index, false, offset)?;
+ let created = types.with_unique(referenced);
+ ComponentEntityType::Type {
+ referenced,
+ created,
+ }
+ }
+ ComponentExternalKind::Instance => {
+ ComponentEntityType::Instance(self.instance_at(export.index, offset)?)
+ }
+ ComponentExternalKind::Component => {
+ ComponentEntityType::Component(self.component_at(export.index, offset)?)
+ }
+ };
+
+ let ascribed = match &export.ty {
+ Some(ty) => self.check_type_ref(ty, types, offset)?,
+ None => return Ok(actual),
+ };
+
+ if !ComponentEntityType::internal_is_subtype_of(&actual, types, &ascribed, types) {
+ bail!(
+ offset,
+ "ascribed type of export is not compatible with item's type"
+ );
+ }
+
+ Ok(ascribed)
+ }
+
+ fn create_module_type(
+ components: &[Self],
+ decls: Vec<crate::ModuleTypeDeclaration>,
+ features: &WasmFeatures,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<ModuleType> {
+ let mut state = Module::default();
+
+ for decl in decls {
+ match decl {
+ crate::ModuleTypeDeclaration::Type(ty) => {
+ state.add_type(ty, features, types, offset, true)?;
+ }
+ crate::ModuleTypeDeclaration::Export { name, ty } => {
+ let ty = state.check_type_ref(&ty, features, types, offset)?;
+ state.add_export(name, ty, features, offset, true)?;
+ }
+ crate::ModuleTypeDeclaration::OuterAlias { kind, count, index } => {
+ if count > 1 {
+ return Err(BinaryReaderError::new(
+ "outer type aliases in module type declarations are limited to a maximum count of 1",
+ offset,
+ ));
+ }
+ match kind {
+ crate::OuterAliasKind::Type => {
+ let ty = if count == 0 {
+ // Local alias, check the local module state
+ state.type_at(index, offset)?
+ } else {
+ // Otherwise, check the enclosing component state
+ let component =
+ Self::check_alias_count(components, count - 1, offset)?;
+ component.type_at(index, true, offset)?
+ };
+
+ check_max(state.types.len(), 1, MAX_WASM_TYPES, "types", offset)?;
+
+ state.types.push(ty);
+ }
+ }
+ }
+ crate::ModuleTypeDeclaration::Import(import) => {
+ state.add_import(import, features, types, offset)?;
+ }
+ }
+ }
+
+ let imports = state.imports_for_module_type(offset)?;
+
+ Ok(ModuleType {
+ type_size: state.type_size,
+ imports,
+ exports: state.exports,
+ })
+ }
+
+ fn create_component_type(
+ components: &mut Vec<Self>,
+ decls: Vec<crate::ComponentTypeDeclaration>,
+ features: &WasmFeatures,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<ComponentType> {
+ components.push(ComponentState::default());
+
+ for decl in decls {
+ match decl {
+ crate::ComponentTypeDeclaration::CoreType(ty) => {
+ Self::add_core_type(components, ty, features, types, offset, true)?;
+ }
+ crate::ComponentTypeDeclaration::Type(ty) => {
+ Self::add_type(components, ty, features, types, offset, true)?;
+ }
+ crate::ComponentTypeDeclaration::Export { name, url, ty } => {
+ let current = components.last_mut().unwrap();
+ let ty = current.check_type_ref(&ty, types, offset)?;
+ current.add_export(name, url, ty, offset, true)?;
+ }
+ crate::ComponentTypeDeclaration::Import(import) => {
+ components
+ .last_mut()
+ .unwrap()
+ .add_import(import, types, offset)?;
+ }
+ crate::ComponentTypeDeclaration::Alias(alias) => {
+ Self::add_alias(components, alias, types, offset)?;
+ }
+ };
+ }
+
+ let mut state = components.pop().unwrap();
+
+ Ok(state.take_component_type())
+ }
+
+ fn create_instance_type(
+ components: &mut Vec<Self>,
+ decls: Vec<crate::InstanceTypeDeclaration>,
+ features: &WasmFeatures,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<ComponentInstanceType> {
+ components.push(ComponentState::default());
+
+ for decl in decls {
+ match decl {
+ crate::InstanceTypeDeclaration::CoreType(ty) => {
+ Self::add_core_type(components, ty, features, types, offset, true)?;
+ }
+ crate::InstanceTypeDeclaration::Type(ty) => {
+ Self::add_type(components, ty, features, types, offset, true)?;
+ }
+ crate::InstanceTypeDeclaration::Export { name, url, ty } => {
+ let current = components.last_mut().unwrap();
+ let ty = current.check_type_ref(&ty, types, offset)?;
+ current.add_export(name, url, ty, offset, true)?;
+ }
+ crate::InstanceTypeDeclaration::Alias(alias) => {
+ Self::add_alias(components, alias, types, offset)?;
+ }
+ };
+ }
+
+ let state = components.pop().unwrap();
+
+ Ok(ComponentInstanceType {
+ type_size: state.type_size,
+ kind: ComponentInstanceTypeKind::Defined(
+ state
+ .externs
+ .into_iter()
+ .filter_map(|(name, (url, ty, kind))| match kind {
+ ExternKind::Export => Some((name, (url, ty))),
+ ExternKind::Import => None,
+ })
+ .collect(),
+ ),
+ })
+ }
+
+ fn create_function_type(
+ &self,
+ ty: crate::ComponentFuncType,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<ComponentFuncType> {
+ let mut type_size = 1;
+
+ let mut set =
+ HashSet::with_capacity(std::cmp::max(ty.params.len(), ty.results.type_count()));
+
+ let params = ty
+ .params
+ .iter()
+ .map(|(name, ty)| {
+ let name = to_kebab_str(name, "function parameter", offset)?;
+ if !set.insert(name) {
+ bail!(
+ offset,
+ "function parameter name `{name}` conflicts with previous parameter name `{prev}`",
+ prev = set.get(&name).unwrap(),
+ );
+ }
+
+ let ty = self.create_component_val_type(*ty, types, offset)?;
+ type_size = combine_type_sizes(type_size, ty.type_size(), offset)?;
+ Ok((name.to_owned(), ty))
+ })
+ .collect::<Result<_>>()?;
+
+ set.clear();
+
+ let results = ty
+ .results
+ .iter()
+ .map(|(name, ty)| {
+ let name = name
+ .map(|name| {
+ let name = to_kebab_str(name, "function result", offset)?;
+ if !set.insert(name) {
+ bail!(
+ offset,
+ "function result name `{name}` conflicts with previous result name `{prev}`",
+ prev = set.get(name).unwrap(),
+ );
+ }
+
+ Ok(name.to_owned())
+ })
+ .transpose()?;
+
+ let ty = self.create_component_val_type(*ty, types, offset)?;
+ type_size = combine_type_sizes(type_size, ty.type_size(), offset)?;
+ Ok((name, ty))
+ })
+ .collect::<Result<_>>()?;
+
+ Ok(ComponentFuncType {
+ type_size,
+ params,
+ results,
+ })
+ }
+
+ fn instantiate_module(
+ &self,
+ module_index: u32,
+ module_args: Vec<crate::InstantiationArg>,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<TypeId> {
+ fn insert_arg<'a>(
+ name: &'a str,
+ arg: &'a InstanceType,
+ args: &mut IndexMap<&'a str, &'a InstanceType>,
+ offset: usize,
+ ) -> Result<()> {
+ if args.insert(name, arg).is_some() {
+ bail!(
+ offset,
+ "duplicate module instantiation argument named `{name}`"
+ );
+ }
+
+ Ok(())
+ }
+
+ let module_type_id = self.module_at(module_index, offset)?;
+ let mut args = IndexMap::new();
+
+ // Populate the arguments
+ for module_arg in module_args {
+ match module_arg.kind {
+ InstantiationArgKind::Instance => {
+ let instance_type = types[self.core_instance_at(module_arg.index, offset)?]
+ .as_instance_type()
+ .unwrap();
+ insert_arg(module_arg.name, instance_type, &mut args, offset)?;
+ }
+ }
+ }
+
+ // Validate the arguments
+ let module_type = types[module_type_id].as_module_type().unwrap();
+ for ((module, name), expected) in module_type.imports.iter() {
+ let instance = args.get(module.as_str()).ok_or_else(|| {
+ format_err!(
+ offset,
+ "missing module instantiation argument named `{module}`"
+ )
+ })?;
+
+ let arg = instance
+ .internal_exports(types)
+ .get(name.as_str())
+ .ok_or_else(|| {
+ format_err!(
+ offset,
+ "module instantiation argument `{module}` does not \
+ export an item named `{name}`",
+ )
+ })?;
+
+ match (arg, expected) {
+ (EntityType::Func(_), EntityType::Func(_))
+ | (EntityType::Table(_), EntityType::Table(_))
+ | (EntityType::Memory(_), EntityType::Memory(_))
+ | (EntityType::Global(_), EntityType::Global(_))
+ | (EntityType::Tag(_), EntityType::Tag(_)) => {}
+ _ => {
+ bail!(
+ offset,
+ "module instantiation argument `{module}` exports \
+ an item named `{name}` but it is not a {}",
+ expected.desc()
+ )
+ }
+ }
+
+ if !EntityType::internal_is_subtype_of(arg, types, expected, types) {
+ bail!(
+ offset,
+ "{} type mismatch for export `{name}` of module \
+ instantiation argument `{module}`",
+ expected.desc(),
+ );
+ }
+ }
+
+ let ty = Type::Instance(InstanceType {
+ type_size: module_type
+ .exports
+ .iter()
+ .fold(1, |acc, (_, ty)| acc + ty.type_size()),
+ kind: InstanceTypeKind::Instantiated(module_type_id),
+ });
+
+ Ok(types.push_anon(ty))
+ }
+
+ fn instantiate_component(
+ &mut self,
+ component_index: u32,
+ component_args: Vec<crate::ComponentInstantiationArg>,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<TypeId> {
+ fn insert_arg<'a>(
+ name: &'a str,
+ arg: ComponentEntityType,
+ args: &mut IndexMap<&'a KebabStr, ComponentEntityType>,
+ offset: usize,
+ ) -> Result<()> {
+ let name = to_kebab_str(name, "instantiation argument", offset)?;
+ match args.entry(name) {
+ Entry::Occupied(e) => {
+ bail!(
+ offset,
+ "instantiation argument `{name}` conflicts with previous argument `{prev}`",
+ prev = e.key()
+ );
+ }
+ Entry::Vacant(e) => {
+ e.insert(arg);
+ }
+ }
+
+ Ok(())
+ }
+
+ let component_type_id = self.component_at(component_index, offset)?;
+ let mut args = IndexMap::new();
+
+ // Populate the arguments
+ for component_arg in component_args {
+ match component_arg.kind {
+ ComponentExternalKind::Module => {
+ insert_arg(
+ component_arg.name,
+ ComponentEntityType::Module(self.module_at(component_arg.index, offset)?),
+ &mut args,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Component => {
+ insert_arg(
+ component_arg.name,
+ ComponentEntityType::Component(
+ self.component_at(component_arg.index, offset)?,
+ ),
+ &mut args,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Instance => {
+ insert_arg(
+ component_arg.name,
+ ComponentEntityType::Instance(
+ self.instance_at(component_arg.index, offset)?,
+ ),
+ &mut args,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Func => {
+ insert_arg(
+ component_arg.name,
+ ComponentEntityType::Func(self.function_at(component_arg.index, offset)?),
+ &mut args,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Value => {
+ insert_arg(
+ component_arg.name,
+ ComponentEntityType::Value(*self.value_at(component_arg.index, offset)?),
+ &mut args,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Type => {
+ let ty = self.type_at(component_arg.index, false, offset)?;
+ insert_arg(
+ component_arg.name,
+ ComponentEntityType::Type {
+ referenced: ty,
+ created: ty,
+ },
+ &mut args,
+ offset,
+ )?;
+ }
+ }
+ }
+
+ // Validate the arguments
+ let component_type = types[component_type_id].as_component_type().unwrap();
+ for (name, (_, expected)) in component_type.imports.iter() {
+ match args.get(&name.as_kebab_str()) {
+ Some(arg) => {
+ match (arg, expected) {
+ (ComponentEntityType::Module(_), ComponentEntityType::Module(_))
+ | (ComponentEntityType::Component(_), ComponentEntityType::Component(_))
+ | (ComponentEntityType::Instance(_), ComponentEntityType::Instance(_))
+ | (ComponentEntityType::Func(_), ComponentEntityType::Func(_))
+ | (ComponentEntityType::Value(_), ComponentEntityType::Value(_))
+ | (ComponentEntityType::Type { .. }, ComponentEntityType::Type { .. }) => {}
+ _ => {
+ bail!(
+ offset,
+ "expected component instantiation argument `{name}` to be a {desc}",
+ desc = expected.desc()
+ )
+ }
+ };
+
+ if !ComponentEntityType::internal_is_subtype_of(arg, types, expected, types) {
+ bail!(
+ offset,
+ "type mismatch for component instantiation argument `{name}`"
+ );
+ }
+ }
+ None => {
+ bail!(
+ offset,
+ "missing component instantiation argument named `{name}`"
+ );
+ }
+ }
+ }
+
+ let ty = Type::ComponentInstance(ComponentInstanceType {
+ type_size: component_type
+ .exports
+ .iter()
+ .fold(1, |acc, (_, (_, ty))| acc + ty.type_size()),
+ kind: ComponentInstanceTypeKind::Instantiated(component_type_id),
+ });
+
+ Ok(types.push_anon(ty))
+ }
+
+ fn instantiate_exports(
+ &mut self,
+ exports: Vec<crate::ComponentExport>,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<TypeId> {
+ fn insert_export(
+ name: &str,
+ export: ComponentEntityType,
+ exports: &mut IndexMap<KebabString, (Option<Url>, ComponentEntityType)>,
+ type_size: &mut u32,
+ offset: usize,
+ ) -> Result<()> {
+ let name = to_kebab_str(name, "instance export", offset)?;
+ match exports.entry(name.to_owned()) {
+ Entry::Occupied(e) => bail!(
+ offset,
+ "instance export name `{name}` conflicts with previous export name `{prev}`",
+ prev = e.key()
+ ),
+ Entry::Vacant(e) => {
+ *type_size = combine_type_sizes(*type_size, export.type_size(), offset)?;
+ e.insert((None, export));
+ }
+ }
+
+ Ok(())
+ }
+
+ let mut type_size = 1;
+ let mut inst_exports = IndexMap::new();
+ for export in exports {
+ assert!(export.ty.is_none());
+ match export.kind {
+ ComponentExternalKind::Module => {
+ insert_export(
+ export.name,
+ ComponentEntityType::Module(self.module_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Component => {
+ insert_export(
+ export.name,
+ ComponentEntityType::Component(self.component_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Instance => {
+ insert_export(
+ export.name,
+ ComponentEntityType::Instance(self.instance_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Func => {
+ insert_export(
+ export.name,
+ ComponentEntityType::Func(self.function_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Value => {
+ insert_export(
+ export.name,
+ ComponentEntityType::Value(*self.value_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?;
+ }
+ ComponentExternalKind::Type => {
+ let ty = self.type_at(export.index, false, offset)?;
+ insert_export(
+ export.name,
+ ComponentEntityType::Type {
+ referenced: ty,
+ // The created type index here isn't used anywhere
+ // in index spaces because a "bag of exports"
+ // doesn't build up its own index spaces. Just fill
+ // in the same index here in this case as what's
+ // referenced.
+ created: ty,
+ },
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?;
+ }
+ }
+ }
+
+ let ty = Type::ComponentInstance(ComponentInstanceType {
+ type_size,
+ kind: ComponentInstanceTypeKind::Exports(inst_exports),
+ });
+
+ Ok(types.push_anon(ty))
+ }
+
+ fn instantiate_core_exports(
+ &mut self,
+ exports: Vec<crate::Export>,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<TypeId> {
+ fn insert_export(
+ name: &str,
+ export: EntityType,
+ exports: &mut IndexMap<String, EntityType>,
+ type_size: &mut u32,
+ offset: usize,
+ ) -> Result<()> {
+ *type_size = combine_type_sizes(*type_size, export.type_size(), offset)?;
+
+ if exports.insert(name.to_string(), export).is_some() {
+ bail!(
+ offset,
+ "duplicate instantiation export name `{name}` already defined",
+ )
+ }
+
+ Ok(())
+ }
+
+ let mut type_size = 1;
+ let mut inst_exports = IndexMap::new();
+ for export in exports {
+ match export.kind {
+ ExternalKind::Func => {
+ insert_export(
+ export.name,
+ EntityType::Func(self.core_function_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?;
+ }
+ ExternalKind::Table => insert_export(
+ export.name,
+ EntityType::Table(*self.table_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?,
+ ExternalKind::Memory => insert_export(
+ export.name,
+ EntityType::Memory(*self.memory_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?,
+ ExternalKind::Global => {
+ insert_export(
+ export.name,
+ EntityType::Global(*self.global_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?;
+ }
+ ExternalKind::Tag => insert_export(
+ export.name,
+ EntityType::Tag(self.core_function_at(export.index, offset)?),
+ &mut inst_exports,
+ &mut type_size,
+ offset,
+ )?,
+ }
+ }
+
+ let ty = Type::Instance(InstanceType {
+ type_size,
+ kind: InstanceTypeKind::Exports(inst_exports),
+ });
+
+ Ok(types.push_anon(ty))
+ }
+
+ fn alias_core_instance_export(
+ &mut self,
+ instance_index: u32,
+ kind: ExternalKind,
+ name: &str,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ macro_rules! push_module_export {
+ ($expected:path, $collection:ident, $ty:literal) => {{
+ match self.core_instance_export(instance_index, name, types, offset)? {
+ $expected(ty) => {
+ self.$collection.push(*ty);
+ Ok(())
+ }
+ _ => {
+ bail!(
+ offset,
+ "export `{name}` for core instance {instance_index} is not a {}",
+ $ty
+ )
+ }
+ }
+ }};
+ }
+
+ match kind {
+ ExternalKind::Func => {
+ check_max(
+ self.function_count(),
+ 1,
+ MAX_WASM_FUNCTIONS,
+ "functions",
+ offset,
+ )?;
+ push_module_export!(EntityType::Func, core_funcs, "function")
+ }
+ ExternalKind::Table => {
+ check_max(self.core_tables.len(), 1, MAX_WASM_TABLES, "tables", offset)?;
+ push_module_export!(EntityType::Table, core_tables, "table")
+ }
+ ExternalKind::Memory => {
+ check_max(
+ self.core_memories.len(),
+ 1,
+ MAX_WASM_MEMORIES,
+ "memories",
+ offset,
+ )?;
+ push_module_export!(EntityType::Memory, core_memories, "memory")
+ }
+ ExternalKind::Global => {
+ check_max(
+ self.core_globals.len(),
+ 1,
+ MAX_WASM_GLOBALS,
+ "globals",
+ offset,
+ )?;
+ push_module_export!(EntityType::Global, core_globals, "global")
+ }
+ ExternalKind::Tag => {
+ check_max(self.core_tags.len(), 1, MAX_WASM_TAGS, "tags", offset)?;
+ push_module_export!(EntityType::Tag, core_tags, "tag")
+ }
+ }
+ }
+
+ fn alias_instance_export(
+ &mut self,
+ instance_index: u32,
+ kind: ComponentExternalKind,
+ name: &str,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ let name = to_kebab_str(name, "alias export", offset)?;
+
+ macro_rules! push_component_export {
+ ($expected:path, $collection:ident, $ty:literal) => {{
+ match self.instance_export(instance_index, name, types, offset)? {
+ $expected(ty) => {
+ self.$collection.push(*ty);
+ Ok(())
+ }
+ _ => {
+ bail!(
+ offset,
+ "export `{name}` for instance {instance_index} is not a {}",
+ $ty
+ )
+ }
+ }
+ }};
+ }
+
+ match kind {
+ ComponentExternalKind::Module => {
+ check_max(
+ self.core_modules.len(),
+ 1,
+ MAX_WASM_MODULES,
+ "modules",
+ offset,
+ )?;
+ push_component_export!(ComponentEntityType::Module, core_modules, "module")
+ }
+ ComponentExternalKind::Component => {
+ check_max(
+ self.components.len(),
+ 1,
+ MAX_WASM_COMPONENTS,
+ "components",
+ offset,
+ )?;
+ push_component_export!(ComponentEntityType::Component, components, "component")
+ }
+ ComponentExternalKind::Instance => {
+ check_max(
+ self.instance_count(),
+ 1,
+ MAX_WASM_INSTANCES,
+ "instances",
+ offset,
+ )?;
+ push_component_export!(ComponentEntityType::Instance, instances, "instance")
+ }
+ ComponentExternalKind::Func => {
+ check_max(
+ self.function_count(),
+ 1,
+ MAX_WASM_FUNCTIONS,
+ "functions",
+ offset,
+ )?;
+ push_component_export!(ComponentEntityType::Func, funcs, "function")
+ }
+ ComponentExternalKind::Value => {
+ check_max(self.values.len(), 1, MAX_WASM_VALUES, "values", offset)?;
+ match self.instance_export(instance_index, name, types, offset)? {
+ ComponentEntityType::Value(ty) => {
+ self.values.push((*ty, false));
+ Ok(())
+ }
+ _ => bail!(
+ offset,
+ "export `{name}` for instance {instance_index} is not a value",
+ ),
+ }
+ }
+ ComponentExternalKind::Type => {
+ check_max(self.type_count(), 1, MAX_WASM_TYPES, "types", offset)?;
+ match *self.instance_export(instance_index, name, types, offset)? {
+ ComponentEntityType::Type { created, .. } => {
+ let id = types.with_unique(created);
+ self.types.push(id);
+ Ok(())
+ }
+ _ => {
+ bail!(
+ offset,
+ "export `{name}` for instance {instance_index} is not a type",
+ )
+ }
+ }
+ }
+ }
+ }
+
+ fn alias_module(components: &mut [Self], count: u32, index: u32, offset: usize) -> Result<()> {
+ let component = Self::check_alias_count(components, count, offset)?;
+ let ty = component.module_at(index, offset)?;
+
+ let current = components.last_mut().unwrap();
+ check_max(
+ current.core_modules.len(),
+ 1,
+ MAX_WASM_MODULES,
+ "modules",
+ offset,
+ )?;
+
+ current.core_modules.push(ty);
+ Ok(())
+ }
+
+ fn alias_component(
+ components: &mut [Self],
+ count: u32,
+ index: u32,
+ offset: usize,
+ ) -> Result<()> {
+ let component = Self::check_alias_count(components, count, offset)?;
+ let ty = component.component_at(index, offset)?;
+
+ let current = components.last_mut().unwrap();
+ check_max(
+ current.components.len(),
+ 1,
+ MAX_WASM_COMPONENTS,
+ "components",
+ offset,
+ )?;
+
+ current.components.push(ty);
+ Ok(())
+ }
+
+ fn alias_core_type(
+ components: &mut [Self],
+ count: u32,
+ index: u32,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ let component = Self::check_alias_count(components, count, offset)?;
+ let ty = component.type_at(index, true, offset)?;
+
+ let current = components.last_mut().unwrap();
+ check_max(current.type_count(), 1, MAX_WASM_TYPES, "types", offset)?;
+
+ let id = types.with_unique(ty);
+ current.core_types.push(id);
+
+ Ok(())
+ }
+
+ fn alias_type(
+ components: &mut [Self],
+ count: u32,
+ index: u32,
+ types: &mut TypeAlloc,
+ offset: usize,
+ ) -> Result<()> {
+ let component = Self::check_alias_count(components, count, offset)?;
+ let ty = component.type_at(index, false, offset)?;
+
+ let current = components.last_mut().unwrap();
+ check_max(current.type_count(), 1, MAX_WASM_TYPES, "types", offset)?;
+
+ let id = types.with_unique(ty);
+ current.types.push(id);
+
+ Ok(())
+ }
+
+ fn check_alias_count(components: &[Self], count: u32, offset: usize) -> Result<&Self> {
+ let count = count as usize;
+ if count >= components.len() {
+ bail!(offset, "invalid outer alias count of {count}");
+ }
+
+ Ok(&components[components.len() - count - 1])
+ }
+
+ fn create_defined_type(
+ &self,
+ ty: crate::ComponentDefinedType,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<ComponentDefinedType> {
+ match ty {
+ crate::ComponentDefinedType::Primitive(ty) => Ok(ComponentDefinedType::Primitive(ty)),
+ crate::ComponentDefinedType::Record(fields) => {
+ self.create_record_type(fields.as_ref(), types, offset)
+ }
+ crate::ComponentDefinedType::Variant(cases) => {
+ self.create_variant_type(cases.as_ref(), types, offset)
+ }
+ crate::ComponentDefinedType::List(ty) => Ok(ComponentDefinedType::List(
+ self.create_component_val_type(ty, types, offset)?,
+ )),
+ crate::ComponentDefinedType::Tuple(tys) => {
+ self.create_tuple_type(tys.as_ref(), types, offset)
+ }
+ crate::ComponentDefinedType::Flags(names) => {
+ self.create_flags_type(names.as_ref(), offset)
+ }
+ crate::ComponentDefinedType::Enum(cases) => {
+ self.create_enum_type(cases.as_ref(), offset)
+ }
+ crate::ComponentDefinedType::Union(tys) => {
+ self.create_union_type(tys.as_ref(), types, offset)
+ }
+ crate::ComponentDefinedType::Option(ty) => Ok(ComponentDefinedType::Option(
+ self.create_component_val_type(ty, types, offset)?,
+ )),
+ crate::ComponentDefinedType::Result { ok, err } => Ok(ComponentDefinedType::Result {
+ ok: ok
+ .map(|ty| self.create_component_val_type(ty, types, offset))
+ .transpose()?,
+ err: err
+ .map(|ty| self.create_component_val_type(ty, types, offset))
+ .transpose()?,
+ }),
+ }
+ }
+
+ fn create_record_type(
+ &self,
+ fields: &[(&str, crate::ComponentValType)],
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<ComponentDefinedType> {
+ let mut type_size = 1;
+ let mut field_map = IndexMap::with_capacity(fields.len());
+
+ for (name, ty) in fields {
+ let name = to_kebab_str(name, "record field", offset)?;
+ let ty = self.create_component_val_type(*ty, types, offset)?;
+
+ match field_map.entry(name.to_owned()) {
+ Entry::Occupied(e) => bail!(
+ offset,
+ "record field name `{name}` conflicts with previous field name `{prev}`",
+ prev = e.key()
+ ),
+ Entry::Vacant(e) => {
+ type_size = combine_type_sizes(type_size, ty.type_size(), offset)?;
+ e.insert(ty);
+ }
+ }
+ }
+
+ Ok(ComponentDefinedType::Record(RecordType {
+ type_size,
+ fields: field_map,
+ }))
+ }
+
+ fn create_variant_type(
+ &self,
+ cases: &[crate::VariantCase],
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<ComponentDefinedType> {
+ let mut type_size = 1;
+ let mut case_map: IndexMap<KebabString, VariantCase> = IndexMap::with_capacity(cases.len());
+
+ if cases.is_empty() {
+ return Err(BinaryReaderError::new(
+ "variant type must have at least one case",
+ offset,
+ ));
+ }
+
+ if cases.len() > u32::MAX as usize {
+ return Err(BinaryReaderError::new(
+ "variant type cannot be represented with a 32-bit discriminant value",
+ offset,
+ ));
+ }
+
+ for (i, case) in cases.iter().enumerate() {
+ if let Some(refines) = case.refines {
+ if refines >= i as u32 {
+ return Err(BinaryReaderError::new(
+ "variant case can only refine a previously defined case",
+ offset,
+ ));
+ }
+ }
+
+ let name = to_kebab_str(case.name, "variant case", offset)?;
+
+ let ty = case
+ .ty
+ .map(|ty| self.create_component_val_type(ty, types, offset))
+ .transpose()?;
+
+ match case_map.entry(name.to_owned()) {
+ Entry::Occupied(e) => bail!(
+ offset,
+ "variant case name `{name}` conflicts with previous case name `{prev}`",
+ name = case.name,
+ prev = e.key()
+ ),
+ Entry::Vacant(e) => {
+ type_size = combine_type_sizes(
+ type_size,
+ ty.map(|ty| ty.type_size()).unwrap_or(1),
+ offset,
+ )?;
+
+ // Safety: the use of `KebabStr::new_unchecked` here is safe because the string
+ // was already verified to be kebab case.
+ e.insert(VariantCase {
+ ty,
+ refines: case
+ .refines
+ .map(|i| KebabStr::new_unchecked(cases[i as usize].name).to_owned()),
+ });
+ }
+ }
+ }
+
+ Ok(ComponentDefinedType::Variant(VariantType {
+ type_size,
+ cases: case_map,
+ }))
+ }
+
+ fn create_tuple_type(
+ &self,
+ tys: &[crate::ComponentValType],
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<ComponentDefinedType> {
+ let mut type_size = 1;
+ let types = tys
+ .iter()
+ .map(|ty| {
+ let ty = self.create_component_val_type(*ty, types, offset)?;
+ type_size = combine_type_sizes(type_size, ty.type_size(), offset)?;
+ Ok(ty)
+ })
+ .collect::<Result<_>>()?;
+
+ Ok(ComponentDefinedType::Tuple(TupleType { type_size, types }))
+ }
+
+ fn create_flags_type(&self, names: &[&str], offset: usize) -> Result<ComponentDefinedType> {
+ let mut names_set = IndexSet::with_capacity(names.len());
+
+ for name in names {
+ let name = to_kebab_str(name, "flag", offset)?;
+ if !names_set.insert(name.to_owned()) {
+ bail!(
+ offset,
+ "flag name `{name}` conflicts with previous flag name `{prev}`",
+ prev = names_set.get(name).unwrap()
+ );
+ }
+ }
+
+ Ok(ComponentDefinedType::Flags(names_set))
+ }
+
+ fn create_enum_type(&self, cases: &[&str], offset: usize) -> Result<ComponentDefinedType> {
+ if cases.len() > u32::MAX as usize {
+ return Err(BinaryReaderError::new(
+ "enumeration type cannot be represented with a 32-bit discriminant value",
+ offset,
+ ));
+ }
+
+ let mut tags = IndexSet::with_capacity(cases.len());
+
+ for tag in cases {
+ let tag = to_kebab_str(tag, "enum tag", offset)?;
+ if !tags.insert(tag.to_owned()) {
+ bail!(
+ offset,
+ "enum tag name `{tag}` conflicts with previous tag name `{prev}`",
+ prev = tags.get(tag).unwrap()
+ );
+ }
+ }
+
+ Ok(ComponentDefinedType::Enum(tags))
+ }
+
+ fn create_union_type(
+ &self,
+ tys: &[crate::ComponentValType],
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<ComponentDefinedType> {
+ let mut type_size = 1;
+ let types = tys
+ .iter()
+ .map(|ty| {
+ let ty = self.create_component_val_type(*ty, types, offset)?;
+ type_size = combine_type_sizes(type_size, ty.type_size(), offset)?;
+ Ok(ty)
+ })
+ .collect::<Result<_>>()?;
+
+ Ok(ComponentDefinedType::Union(UnionType { type_size, types }))
+ }
+
+ fn create_component_val_type(
+ &self,
+ ty: crate::ComponentValType,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<ComponentValType> {
+ Ok(match ty {
+ crate::ComponentValType::Primitive(pt) => ComponentValType::Primitive(pt),
+ crate::ComponentValType::Type(idx) => {
+ ComponentValType::Type(self.defined_type_at(idx, types, offset)?)
+ }
+ })
+ }
+
+ pub fn type_at(&self, idx: u32, core: bool, offset: usize) -> Result<TypeId> {
+ let types = if core { &self.core_types } else { &self.types };
+ types
+ .get(idx as usize)
+ .copied()
+ .ok_or_else(|| format_err!(offset, "unknown type {idx}: type index out of bounds"))
+ }
+
+ fn function_type_at<'a>(
+ &self,
+ idx: u32,
+ types: &'a TypeList,
+ offset: usize,
+ ) -> Result<&'a ComponentFuncType> {
+ types[self.type_at(idx, false, offset)?]
+ .as_component_func_type()
+ .ok_or_else(|| format_err!(offset, "type index {idx} is not a function type"))
+ }
+
+ fn function_at(&self, idx: u32, offset: usize) -> Result<TypeId> {
+ self.funcs.get(idx as usize).copied().ok_or_else(|| {
+ format_err!(
+ offset,
+ "unknown function {idx}: function index out of bounds"
+ )
+ })
+ }
+
+ fn component_at(&self, idx: u32, offset: usize) -> Result<TypeId> {
+ self.components.get(idx as usize).copied().ok_or_else(|| {
+ format_err!(
+ offset,
+ "unknown component {idx}: component index out of bounds"
+ )
+ })
+ }
+
+ fn instance_at(&self, idx: u32, offset: usize) -> Result<TypeId> {
+ self.instances.get(idx as usize).copied().ok_or_else(|| {
+ format_err!(
+ offset,
+ "unknown instance {idx}: instance index out of bounds"
+ )
+ })
+ }
+
+ fn instance_export<'a>(
+ &self,
+ instance_index: u32,
+ name: &KebabStr,
+ types: &'a TypeList,
+ offset: usize,
+ ) -> Result<&'a ComponentEntityType> {
+ match types[self.instance_at(instance_index, offset)?]
+ .as_component_instance_type()
+ .unwrap()
+ .internal_exports(types)
+ .get(name)
+ {
+ Some((_, ty)) => Ok(ty),
+ None => bail!(
+ offset,
+ "instance {instance_index} has no export named `{name}`"
+ ),
+ }
+ }
+
+ fn value_at(&mut self, idx: u32, offset: usize) -> Result<&ComponentValType> {
+ match self.values.get_mut(idx as usize) {
+ Some((ty, used)) if !*used => {
+ *used = true;
+ Ok(ty)
+ }
+ Some(_) => bail!(offset, "value {idx} cannot be used more than once"),
+ None => bail!(offset, "unknown value {idx}: value index out of bounds"),
+ }
+ }
+
+ fn defined_type_at(&self, idx: u32, types: &TypeList, offset: usize) -> Result<TypeId> {
+ let id = self.type_at(idx, false, offset)?;
+ match &types[id] {
+ Type::Defined(_) => Ok(id),
+ _ => bail!(offset, "type index {} is not a defined type", idx),
+ }
+ }
+
+ fn core_function_at(&self, idx: u32, offset: usize) -> Result<TypeId> {
+ match self.core_funcs.get(idx as usize) {
+ Some(id) => Ok(*id),
+ None => bail!(
+ offset,
+ "unknown core function {idx}: function index out of bounds"
+ ),
+ }
+ }
+
+ fn module_at(&self, idx: u32, offset: usize) -> Result<TypeId> {
+ match self.core_modules.get(idx as usize) {
+ Some(id) => Ok(*id),
+ None => bail!(offset, "unknown module {idx}: module index out of bounds"),
+ }
+ }
+
+ fn core_instance_at(&self, idx: u32, offset: usize) -> Result<TypeId> {
+ match self.core_instances.get(idx as usize) {
+ Some(id) => Ok(*id),
+ None => bail!(
+ offset,
+ "unknown core instance {idx}: instance index out of bounds"
+ ),
+ }
+ }
+
+ fn core_instance_export<'a>(
+ &self,
+ instance_index: u32,
+ name: &str,
+ types: &'a TypeList,
+ offset: usize,
+ ) -> Result<&'a EntityType> {
+ match types[self.core_instance_at(instance_index, offset)?]
+ .as_instance_type()
+ .unwrap()
+ .internal_exports(types)
+ .get(name)
+ {
+ Some(export) => Ok(export),
+ None => bail!(
+ offset,
+ "core instance {instance_index} has no export named `{name}`"
+ ),
+ }
+ }
+
+ fn global_at(&self, idx: u32, offset: usize) -> Result<&GlobalType> {
+ match self.core_globals.get(idx as usize) {
+ Some(t) => Ok(t),
+ None => bail!(offset, "unknown global {idx}: global index out of bounds"),
+ }
+ }
+
+ fn table_at(&self, idx: u32, offset: usize) -> Result<&TableType> {
+ match self.core_tables.get(idx as usize) {
+ Some(t) => Ok(t),
+ None => bail!(offset, "unknown table {idx}: table index out of bounds"),
+ }
+ }
+
+ fn memory_at(&self, idx: u32, offset: usize) -> Result<&MemoryType> {
+ match self.core_memories.get(idx as usize) {
+ Some(t) => Ok(t),
+ None => bail!(offset, "unknown memory {idx}: memory index out of bounds"),
+ }
+ }
+
+ fn take_component_type(&mut self) -> ComponentType {
+ let mut ty = ComponentType {
+ type_size: self.type_size,
+ imports: Default::default(),
+ exports: Default::default(),
+ };
+
+ for (name, (url, t, kind)) in mem::take(&mut self.externs) {
+ let map = match kind {
+ ExternKind::Import => &mut ty.imports,
+ ExternKind::Export => &mut ty.exports,
+ };
+ let prev = map.insert(name, (url, t));
+ assert!(prev.is_none());
+ }
+
+ ty
+ }
+}
+
+impl Default for ComponentState {
+ fn default() -> Self {
+ Self {
+ core_types: Default::default(),
+ core_modules: Default::default(),
+ core_instances: Default::default(),
+ core_funcs: Default::default(),
+ core_memories: Default::default(),
+ core_tables: Default::default(),
+ core_globals: Default::default(),
+ core_tags: Default::default(),
+ types: Default::default(),
+ funcs: Default::default(),
+ values: Default::default(),
+ instances: Default::default(),
+ components: Default::default(),
+ externs: Default::default(),
+ export_urls: Default::default(),
+ import_urls: Default::default(),
+ has_start: Default::default(),
+ type_size: 1,
+ }
+ }
+}
diff --git a/third_party/rust/wasmparser/src/validator/core.rs b/third_party/rust/wasmparser/src/validator/core.rs
new file mode 100644
index 0000000000..5707e1e73b
--- /dev/null
+++ b/third_party/rust/wasmparser/src/validator/core.rs
@@ -0,0 +1,1278 @@
+//! State relating to validating a WebAssembly module.
+//!
+use super::{
+ check_max, combine_type_sizes,
+ operators::{ty_to_str, OperatorValidator, OperatorValidatorAllocations},
+ types::{EntityType, Type, TypeAlloc, TypeId, TypeList},
+};
+use crate::limits::*;
+use crate::validator::core::arc::MaybeOwned;
+use crate::{
+ BinaryReaderError, ConstExpr, Data, DataKind, Element, ElementKind, ExternalKind, FuncType,
+ Global, GlobalType, HeapType, MemoryType, RefType, Result, Table, TableInit, TableType,
+ TagType, TypeRef, ValType, VisitOperator, WasmFeatures, WasmFuncType, WasmModuleResources,
+};
+use indexmap::IndexMap;
+use std::mem;
+use std::{collections::HashSet, sync::Arc};
+
+// Section order for WebAssembly modules.
+//
+// Component sections are unordered and allow for duplicates,
+// so this isn't used for components.
+#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug)]
+pub enum Order {
+ Initial,
+ Type,
+ Import,
+ Function,
+ Table,
+ Memory,
+ Tag,
+ Global,
+ Export,
+ Start,
+ Element,
+ DataCount,
+ Code,
+ Data,
+}
+
+impl Default for Order {
+ fn default() -> Order {
+ Order::Initial
+ }
+}
+
+#[derive(Default)]
+pub(crate) struct ModuleState {
+ /// Internal state that is incrementally built-up for the module being
+ /// validated. This houses type information for all wasm items, like
+ /// functions. Note that this starts out as a solely owned `Arc<T>` so we can
+ /// get mutable access, but after we get to the code section this is never
+ /// mutated to we can clone it cheaply and hand it to sub-validators.
+ pub module: arc::MaybeOwned<Module>,
+
+ /// Where we are, order-wise, in the wasm binary.
+ order: Order,
+
+ /// The number of data segments in the data section (if present).
+ pub data_segment_count: u32,
+
+ /// The number of functions we expect to be defined in the code section, or
+ /// basically the length of the function section if it was found. The next
+ /// index is where we are, in the code section index space, for the next
+ /// entry in the code section (used to figure out what type is next for the
+ /// function being validated).
+ pub expected_code_bodies: Option<u32>,
+
+ const_expr_allocs: OperatorValidatorAllocations,
+
+ /// When parsing the code section, represents the current index in the section.
+ code_section_index: Option<usize>,
+}
+
+impl ModuleState {
+ pub fn update_order(&mut self, order: Order, offset: usize) -> Result<()> {
+ if self.order >= order {
+ return Err(BinaryReaderError::new("section out of order", offset));
+ }
+
+ self.order = order;
+
+ Ok(())
+ }
+
+ pub fn validate_end(&self, offset: usize) -> Result<()> {
+ // Ensure that the data count section, if any, was correct.
+ if let Some(data_count) = self.module.data_count {
+ if data_count != self.data_segment_count {
+ return Err(BinaryReaderError::new(
+ "data count and data section have inconsistent lengths",
+ offset,
+ ));
+ }
+ }
+ // Ensure that the function section, if nonzero, was paired with a code
+ // section with the appropriate length.
+ if let Some(n) = self.expected_code_bodies {
+ if n > 0 {
+ return Err(BinaryReaderError::new(
+ "function and code section have inconsistent lengths",
+ offset,
+ ));
+ }
+ }
+
+ Ok(())
+ }
+
+ pub fn next_code_index_and_type(&mut self, offset: usize) -> Result<(u32, u32)> {
+ let index = self
+ .code_section_index
+ .get_or_insert(self.module.num_imported_functions as usize);
+
+ if *index >= self.module.functions.len() {
+ return Err(BinaryReaderError::new(
+ "code section entry exceeds number of functions",
+ offset,
+ ));
+ }
+
+ let ty = self.module.functions[*index];
+ *index += 1;
+
+ Ok(((*index - 1) as u32, ty))
+ }
+
+ pub fn add_global(
+ &mut self,
+ global: Global,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ self.module
+ .check_global_type(&global.ty, features, types, offset)?;
+ self.check_const_expr(&global.init_expr, global.ty.content_type, features, types)?;
+ self.module.assert_mut().globals.push(global.ty);
+ Ok(())
+ }
+
+ pub fn add_table(
+ &mut self,
+ table: Table<'_>,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ self.module
+ .check_table_type(&table.ty, features, types, offset)?;
+
+ match &table.init {
+ TableInit::RefNull => {
+ if !table.ty.element_type.nullable {
+ bail!(offset, "type mismatch: non-defaultable element type");
+ }
+ }
+ TableInit::Expr(expr) => {
+ if !features.function_references {
+ bail!(
+ offset,
+ "tables with expression initializers require \
+ the function-references proposal"
+ );
+ }
+ self.check_const_expr(expr, table.ty.element_type.into(), features, types)?;
+ }
+ }
+ self.module.assert_mut().tables.push(table.ty);
+ Ok(())
+ }
+
+ pub fn add_data_segment(
+ &mut self,
+ data: Data,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ match data.kind {
+ DataKind::Passive => Ok(()),
+ DataKind::Active {
+ memory_index,
+ offset_expr,
+ } => {
+ let ty = self.module.memory_at(memory_index, offset)?.index_type();
+ self.check_const_expr(&offset_expr, ty, features, types)
+ }
+ }
+ }
+
+ pub fn add_element_segment(
+ &mut self,
+ e: Element,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ // the `funcref` value type is allowed all the way back to the MVP, so
+ // don't check it here
+ if e.ty != RefType::FUNCREF {
+ self.module
+ .check_value_type(ValType::Ref(e.ty), features, types, offset)?;
+ }
+ match e.kind {
+ ElementKind::Active {
+ table_index,
+ offset_expr,
+ } => {
+ let table = self.module.table_at(table_index, offset)?;
+ if !self
+ .module
+ .matches(ValType::Ref(e.ty), ValType::Ref(table.element_type), types)
+ {
+ return Err(BinaryReaderError::new(
+ format!(
+ "type mismatch: invalid element type `{}` for table type `{}`",
+ ty_to_str(e.ty.into()),
+ ty_to_str(table.element_type.into()),
+ ),
+ offset,
+ ));
+ }
+
+ self.check_const_expr(&offset_expr, ValType::I32, features, types)?;
+ }
+ ElementKind::Passive | ElementKind::Declared => {
+ if !features.bulk_memory {
+ return Err(BinaryReaderError::new(
+ "bulk memory must be enabled",
+ offset,
+ ));
+ }
+ }
+ }
+
+ let validate_count = |count: u32| -> Result<(), BinaryReaderError> {
+ if count > MAX_WASM_TABLE_ENTRIES as u32 {
+ Err(BinaryReaderError::new(
+ "number of elements is out of bounds",
+ offset,
+ ))
+ } else {
+ Ok(())
+ }
+ };
+ match e.items {
+ crate::ElementItems::Functions(reader) => {
+ let count = reader.count();
+ if !e.ty.nullable && count <= 0 {
+ return Err(BinaryReaderError::new(
+ "a non-nullable element must come with an initialization expression",
+ offset,
+ ));
+ }
+ validate_count(count)?;
+ for f in reader.into_iter_with_offsets() {
+ let (offset, f) = f?;
+ self.module.get_func_type(f, types, offset)?;
+ self.module.assert_mut().function_references.insert(f);
+ }
+ }
+ crate::ElementItems::Expressions(reader) => {
+ validate_count(reader.count())?;
+ for expr in reader {
+ self.check_const_expr(&expr?, ValType::Ref(e.ty), features, types)?;
+ }
+ }
+ }
+ self.module.assert_mut().element_types.push(e.ty);
+ Ok(())
+ }
+
+ fn check_const_expr(
+ &mut self,
+ expr: &ConstExpr<'_>,
+ expected_ty: ValType,
+ features: &WasmFeatures,
+ types: &TypeList,
+ ) -> Result<()> {
+ let mut validator = VisitConstOperator {
+ offset: 0,
+ order: self.order,
+ uninserted_funcref: false,
+ ops: OperatorValidator::new_const_expr(
+ features,
+ expected_ty,
+ mem::take(&mut self.const_expr_allocs),
+ ),
+ resources: OperatorValidatorResources {
+ types,
+ module: &mut self.module,
+ },
+ };
+
+ let mut ops = expr.get_operators_reader();
+ while !ops.eof() {
+ validator.offset = ops.original_position();
+ ops.visit_operator(&mut validator)??;
+ }
+ validator.ops.finish(ops.original_position())?;
+
+ // See comment in `RefFunc` below for why this is an assert.
+ assert!(!validator.uninserted_funcref);
+
+ self.const_expr_allocs = validator.ops.into_allocations();
+
+ return Ok(());
+
+ struct VisitConstOperator<'a> {
+ offset: usize,
+ uninserted_funcref: bool,
+ ops: OperatorValidator,
+ resources: OperatorValidatorResources<'a>,
+ order: Order,
+ }
+
+ impl VisitConstOperator<'_> {
+ fn validator(&mut self) -> impl VisitOperator<'_, Output = Result<()>> {
+ self.ops.with_resources(&self.resources, self.offset)
+ }
+
+ fn validate_extended_const(&mut self) -> Result<()> {
+ if self.ops.features.extended_const {
+ Ok(())
+ } else {
+ Err(BinaryReaderError::new(
+ "constant expression required: non-constant operator",
+ self.offset,
+ ))
+ }
+ }
+
+ fn validate_global(&mut self, index: u32) -> Result<()> {
+ let module = &self.resources.module;
+ let global = module.global_at(index, self.offset)?;
+ if index >= module.num_imported_globals {
+ return Err(BinaryReaderError::new(
+ "constant expression required: global.get of locally defined global",
+ self.offset,
+ ));
+ }
+ if global.mutable {
+ return Err(BinaryReaderError::new(
+ "constant expression required: global.get of mutable global",
+ self.offset,
+ ));
+ }
+ Ok(())
+ }
+
+ // Functions in initialization expressions are only valid in
+ // element segment initialization expressions and globals. In
+ // these contexts we want to record all function references.
+ //
+ // Initialization expressions can also be found in the data
+ // section, however. A `RefFunc` instruction in those situations
+ // is always invalid and needs to produce a validation error. In
+ // this situation, though, we can no longer modify
+ // the state since it's been "snapshot" already for
+ // parallel validation of functions.
+ //
+ // If we cannot modify the function references then this function
+ // *should* result in a validation error, but we defer that
+ // validation error to happen later. The `uninserted_funcref`
+ // boolean here is used to track this and will cause a panic
+ // (aka a fuzz bug) if we somehow forget to emit an error somewhere
+ // else.
+ fn insert_ref_func(&mut self, index: u32) {
+ if self.order == Order::Data {
+ self.uninserted_funcref = true;
+ } else {
+ self.resources
+ .module
+ .assert_mut()
+ .function_references
+ .insert(index);
+ }
+ }
+ }
+
+ macro_rules! define_visit_operator {
+ ($(@$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => {
+ $(
+ #[allow(unused_variables)]
+ fn $visit(&mut self $($(,$arg: $argty)*)?) -> Self::Output {
+ define_visit_operator!(@visit self $visit $($($arg)*)?)
+ }
+ )*
+ };
+
+ // These are always valid in const expressions
+ (@visit $self:ident visit_i32_const $val:ident) => {{
+ $self.validator().visit_i32_const($val)
+ }};
+ (@visit $self:ident visit_i64_const $val:ident) => {{
+ $self.validator().visit_i64_const($val)
+ }};
+ (@visit $self:ident visit_f32_const $val:ident) => {{
+ $self.validator().visit_f32_const($val)
+ }};
+ (@visit $self:ident visit_f64_const $val:ident) => {{
+ $self.validator().visit_f64_const($val)
+ }};
+ (@visit $self:ident visit_v128_const $val:ident) => {{
+ $self.validator().visit_v128_const($val)
+ }};
+ (@visit $self:ident visit_ref_null $val:ident) => {{
+ $self.validator().visit_ref_null($val)
+ }};
+ (@visit $self:ident visit_end) => {{
+ $self.validator().visit_end()
+ }};
+
+
+ // These are valid const expressions when the extended-const proposal is enabled.
+ (@visit $self:ident visit_i32_add) => {{
+ $self.validate_extended_const()?;
+ $self.validator().visit_i32_add()
+ }};
+ (@visit $self:ident visit_i32_sub) => {{
+ $self.validate_extended_const()?;
+ $self.validator().visit_i32_sub()
+ }};
+ (@visit $self:ident visit_i32_mul) => {{
+ $self.validate_extended_const()?;
+ $self.validator().visit_i32_mul()
+ }};
+ (@visit $self:ident visit_i64_add) => {{
+ $self.validate_extended_const()?;
+ $self.validator().visit_i64_add()
+ }};
+ (@visit $self:ident visit_i64_sub) => {{
+ $self.validate_extended_const()?;
+ $self.validator().visit_i64_sub()
+ }};
+ (@visit $self:ident visit_i64_mul) => {{
+ $self.validate_extended_const()?;
+ $self.validator().visit_i64_mul()
+ }};
+
+ // `global.get` is a valid const expression for imported, immutable
+ // globals.
+ (@visit $self:ident visit_global_get $idx:ident) => {{
+ $self.validate_global($idx)?;
+ $self.validator().visit_global_get($idx)
+ }};
+ // `ref.func`, if it's in a `global` initializer, will insert into
+ // the set of referenced functions so it's processed here.
+ (@visit $self:ident visit_ref_func $idx:ident) => {{
+ $self.insert_ref_func($idx);
+ $self.validator().visit_ref_func($idx)
+ }};
+
+ (@visit $self:ident $op:ident $($args:tt)*) => {{
+ Err(BinaryReaderError::new(
+ "constant expression required: non-constant operator",
+ $self.offset,
+ ))
+ }}
+ }
+
+ impl<'a> VisitOperator<'a> for VisitConstOperator<'a> {
+ type Output = Result<()>;
+
+ for_each_operator!(define_visit_operator);
+ }
+ }
+}
+
+pub(crate) struct Module {
+ // This is set once the code section starts.
+ // `WasmModuleResources` implementations use the snapshot to
+ // enable parallel validation of functions.
+ pub snapshot: Option<Arc<TypeList>>,
+ // Stores indexes into the validator's types list.
+ pub types: Vec<TypeId>,
+ pub tables: Vec<TableType>,
+ pub memories: Vec<MemoryType>,
+ pub globals: Vec<GlobalType>,
+ pub element_types: Vec<RefType>,
+ pub data_count: Option<u32>,
+ // Stores indexes into `types`.
+ pub functions: Vec<u32>,
+ pub tags: Vec<TypeId>,
+ pub function_references: HashSet<u32>,
+ pub imports: IndexMap<(String, String), Vec<EntityType>>,
+ pub exports: IndexMap<String, EntityType>,
+ pub type_size: u32,
+ num_imported_globals: u32,
+ num_imported_functions: u32,
+}
+
+impl Module {
+ pub fn add_type(
+ &mut self,
+ ty: crate::Type,
+ features: &WasmFeatures,
+ types: &mut TypeAlloc,
+ offset: usize,
+ check_limit: bool,
+ ) -> Result<()> {
+ let ty = match ty {
+ crate::Type::Func(t) => {
+ for ty in t.params().iter().chain(t.results()) {
+ self.check_value_type(*ty, features, types, offset)?;
+ }
+ if t.results().len() > 1 && !features.multi_value {
+ return Err(BinaryReaderError::new(
+ "func type returns multiple values but the multi-value feature is not enabled",
+ offset,
+ ));
+ }
+ Type::Func(t)
+ }
+ };
+
+ if check_limit {
+ check_max(self.types.len(), 1, MAX_WASM_TYPES, "types", offset)?;
+ }
+
+ let id = types.push_defined(ty);
+ self.types.push(id);
+ Ok(())
+ }
+
+ pub fn add_import(
+ &mut self,
+ import: crate::Import,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ let entity = self.check_type_ref(&import.ty, features, types, offset)?;
+
+ let (len, max, desc) = match import.ty {
+ TypeRef::Func(type_index) => {
+ self.functions.push(type_index);
+ self.num_imported_functions += 1;
+ (self.functions.len(), MAX_WASM_FUNCTIONS, "functions")
+ }
+ TypeRef::Table(ty) => {
+ self.tables.push(ty);
+ (self.tables.len(), self.max_tables(features), "tables")
+ }
+ TypeRef::Memory(ty) => {
+ self.memories.push(ty);
+ (self.memories.len(), self.max_memories(features), "memories")
+ }
+ TypeRef::Tag(ty) => {
+ self.tags.push(self.types[ty.func_type_idx as usize]);
+ (self.tags.len(), MAX_WASM_TAGS, "tags")
+ }
+ TypeRef::Global(ty) => {
+ if !features.mutable_global && ty.mutable {
+ return Err(BinaryReaderError::new(
+ "mutable global support is not enabled",
+ offset,
+ ));
+ }
+ self.globals.push(ty);
+ self.num_imported_globals += 1;
+ (self.globals.len(), MAX_WASM_GLOBALS, "globals")
+ }
+ };
+
+ check_max(len, 0, max, desc, offset)?;
+
+ self.type_size = combine_type_sizes(self.type_size, entity.type_size(), offset)?;
+
+ self.imports
+ .entry((import.module.to_string(), import.name.to_string()))
+ .or_default()
+ .push(entity);
+
+ Ok(())
+ }
+
+ pub fn add_export(
+ &mut self,
+ name: &str,
+ ty: EntityType,
+ features: &WasmFeatures,
+ offset: usize,
+ check_limit: bool,
+ ) -> Result<()> {
+ if !features.mutable_global {
+ if let EntityType::Global(global_type) = ty {
+ if global_type.mutable {
+ return Err(BinaryReaderError::new(
+ "mutable global support is not enabled",
+ offset,
+ ));
+ }
+ }
+ }
+
+ if check_limit {
+ check_max(self.exports.len(), 1, MAX_WASM_EXPORTS, "exports", offset)?;
+ }
+
+ self.type_size = combine_type_sizes(self.type_size, ty.type_size(), offset)?;
+
+ match self.exports.insert(name.to_string(), ty) {
+ Some(_) => Err(format_err!(
+ offset,
+ "duplicate export name `{name}` already defined"
+ )),
+ None => Ok(()),
+ }
+ }
+
+ pub fn add_function(&mut self, type_index: u32, types: &TypeList, offset: usize) -> Result<()> {
+ self.func_type_at(type_index, types, offset)?;
+ self.functions.push(type_index);
+ Ok(())
+ }
+
+ pub fn add_memory(
+ &mut self,
+ ty: MemoryType,
+ features: &WasmFeatures,
+ offset: usize,
+ ) -> Result<()> {
+ self.check_memory_type(&ty, features, offset)?;
+ self.memories.push(ty);
+ Ok(())
+ }
+
+ pub fn add_tag(
+ &mut self,
+ ty: TagType,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ self.check_tag_type(&ty, features, types, offset)?;
+ self.tags.push(self.types[ty.func_type_idx as usize]);
+ Ok(())
+ }
+
+ pub fn type_at(&self, idx: u32, offset: usize) -> Result<TypeId> {
+ self.types
+ .get(idx as usize)
+ .copied()
+ .ok_or_else(|| format_err!(offset, "unknown type {idx}: type index out of bounds"))
+ }
+
+ fn func_type_at<'a>(
+ &self,
+ type_index: u32,
+ types: &'a TypeList,
+ offset: usize,
+ ) -> Result<&'a FuncType> {
+ types[self.type_at(type_index, offset)?]
+ .as_func_type()
+ .ok_or_else(|| format_err!(offset, "type index {type_index} is not a function type"))
+ }
+
+ pub fn check_type_ref(
+ &self,
+ type_ref: &TypeRef,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<EntityType> {
+ Ok(match type_ref {
+ TypeRef::Func(type_index) => {
+ self.func_type_at(*type_index, types, offset)?;
+ EntityType::Func(self.types[*type_index as usize])
+ }
+ TypeRef::Table(t) => {
+ self.check_table_type(t, features, types, offset)?;
+ EntityType::Table(*t)
+ }
+ TypeRef::Memory(t) => {
+ self.check_memory_type(t, features, offset)?;
+ EntityType::Memory(*t)
+ }
+ TypeRef::Tag(t) => {
+ self.check_tag_type(t, features, types, offset)?;
+ EntityType::Tag(self.types[t.func_type_idx as usize])
+ }
+ TypeRef::Global(t) => {
+ self.check_global_type(t, features, types, offset)?;
+ EntityType::Global(*t)
+ }
+ })
+ }
+
+ fn check_table_type(
+ &self,
+ ty: &TableType,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ // the `funcref` value type is allowed all the way back to the MVP, so
+ // don't check it here
+ if ty.element_type != RefType::FUNCREF {
+ self.check_value_type(ValType::Ref(ty.element_type), features, types, offset)?
+ }
+
+ self.check_limits(ty.initial, ty.maximum, offset)?;
+ if ty.initial > MAX_WASM_TABLE_ENTRIES as u32 {
+ return Err(BinaryReaderError::new(
+ "minimum table size is out of bounds",
+ offset,
+ ));
+ }
+ Ok(())
+ }
+
+ fn check_memory_type(
+ &self,
+ ty: &MemoryType,
+ features: &WasmFeatures,
+ offset: usize,
+ ) -> Result<()> {
+ self.check_limits(ty.initial, ty.maximum, offset)?;
+ let (true_maximum, err) = if ty.memory64 {
+ if !features.memory64 {
+ return Err(BinaryReaderError::new(
+ "memory64 must be enabled for 64-bit memories",
+ offset,
+ ));
+ }
+ (
+ MAX_WASM_MEMORY64_PAGES,
+ "memory size must be at most 2**48 pages",
+ )
+ } else {
+ (
+ MAX_WASM_MEMORY32_PAGES,
+ "memory size must be at most 65536 pages (4GiB)",
+ )
+ };
+ if ty.initial > true_maximum {
+ return Err(BinaryReaderError::new(err, offset));
+ }
+ if let Some(maximum) = ty.maximum {
+ if maximum > true_maximum {
+ return Err(BinaryReaderError::new(err, offset));
+ }
+ }
+ if ty.shared {
+ if !features.threads {
+ return Err(BinaryReaderError::new(
+ "threads must be enabled for shared memories",
+ offset,
+ ));
+ }
+ if ty.maximum.is_none() {
+ return Err(BinaryReaderError::new(
+ "shared memory must have maximum size",
+ offset,
+ ));
+ }
+ }
+ Ok(())
+ }
+
+ pub(crate) fn imports_for_module_type(
+ &self,
+ offset: usize,
+ ) -> Result<IndexMap<(String, String), EntityType>> {
+ // Ensure imports are unique, which is a requirement of the component model
+ self.imports
+ .iter()
+ .map(|((module, name), types)| {
+ if types.len() != 1 {
+ bail!(
+ offset,
+ "module has a duplicate import name `{module}:{name}` \
+ that is not allowed in components",
+ );
+ }
+ Ok(((module.clone(), name.clone()), types[0]))
+ })
+ .collect::<Result<_>>()
+ }
+
+ fn check_value_type(
+ &self,
+ ty: ValType,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ match features.check_value_type(ty) {
+ Ok(()) => Ok(()),
+ Err(e) => Err(BinaryReaderError::new(e, offset)),
+ }?;
+ // The above only checks the value type for features.
+ // We must check it if it's a reference.
+ match ty {
+ ValType::Ref(rt) => {
+ self.check_ref_type(rt, types, offset)?;
+ }
+ _ => (),
+ }
+ Ok(())
+ }
+
+ fn check_ref_type(&self, ty: RefType, types: &TypeList, offset: usize) -> Result<()> {
+ // Check that the heap type is valid
+ match ty.heap_type {
+ HeapType::Func | HeapType::Extern => (),
+ HeapType::TypedFunc(type_index) => {
+ // Just check that the index is valid
+ self.func_type_at(type_index.into(), types, offset)?;
+ }
+ }
+ Ok(())
+ }
+
+ fn eq_valtypes(&self, ty1: ValType, ty2: ValType, types: &TypeList) -> bool {
+ match (ty1, ty2) {
+ (ValType::Ref(rt1), ValType::Ref(rt2)) => {
+ rt1.nullable == rt2.nullable
+ && match (rt1.heap_type, rt2.heap_type) {
+ (HeapType::Func, HeapType::Func) => true,
+ (HeapType::Extern, HeapType::Extern) => true,
+ (HeapType::TypedFunc(n1), HeapType::TypedFunc(n2)) => {
+ let n1 = self.func_type_at(n1.into(), types, 0).unwrap();
+ let n2 = self.func_type_at(n2.into(), types, 0).unwrap();
+ self.eq_fns(n1, n2, types)
+ }
+ (_, _) => false,
+ }
+ }
+ _ => ty1 == ty2,
+ }
+ }
+ fn eq_fns(&self, f1: &impl WasmFuncType, f2: &impl WasmFuncType, types: &TypeList) -> bool {
+ f1.len_inputs() == f2.len_inputs()
+ && f2.len_outputs() == f2.len_outputs()
+ && f1
+ .inputs()
+ .zip(f2.inputs())
+ .all(|(t1, t2)| self.eq_valtypes(t1, t2, types))
+ && f1
+ .outputs()
+ .zip(f2.outputs())
+ .all(|(t1, t2)| self.eq_valtypes(t1, t2, types))
+ }
+
+ pub(crate) fn matches(&self, ty1: ValType, ty2: ValType, types: &TypeList) -> bool {
+ fn matches_null(null1: bool, null2: bool) -> bool {
+ (null1 == null2) || null2
+ }
+
+ let matches_heap = |ty1: HeapType, ty2: HeapType, types: &TypeList| -> bool {
+ match (ty1, ty2) {
+ (HeapType::TypedFunc(n1), HeapType::TypedFunc(n2)) => {
+ // Check whether the defined types are (structurally) equivalent.
+ let n1 = self.func_type_at(n1.into(), types, 0).unwrap();
+ let n2 = self.func_type_at(n2.into(), types, 0).unwrap();
+ self.eq_fns(n1, n2, types)
+ }
+ (HeapType::TypedFunc(_), HeapType::Func) => true,
+ (_, _) => ty1 == ty2,
+ }
+ };
+
+ let matches_ref = |ty1: RefType, ty2: RefType, types: &TypeList| -> bool {
+ matches_heap(ty1.heap_type, ty2.heap_type, types)
+ && matches_null(ty1.nullable, ty2.nullable)
+ };
+
+ match (ty1, ty2) {
+ (ValType::Ref(rt1), ValType::Ref(rt2)) => matches_ref(rt1, rt2, types),
+ (_, _) => ty1 == ty2,
+ }
+ }
+
+ fn check_tag_type(
+ &self,
+ ty: &TagType,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ if !features.exceptions {
+ return Err(BinaryReaderError::new(
+ "exceptions proposal not enabled",
+ offset,
+ ));
+ }
+ let ty = self.func_type_at(ty.func_type_idx, types, offset)?;
+ if !ty.results().is_empty() {
+ return Err(BinaryReaderError::new(
+ "invalid exception type: non-empty tag result type",
+ offset,
+ ));
+ }
+ Ok(())
+ }
+
+ fn check_global_type(
+ &self,
+ ty: &GlobalType,
+ features: &WasmFeatures,
+ types: &TypeList,
+ offset: usize,
+ ) -> Result<()> {
+ self.check_value_type(ty.content_type, features, types, offset)
+ }
+
+ fn check_limits<T>(&self, initial: T, maximum: Option<T>, offset: usize) -> Result<()>
+ where
+ T: Into<u64>,
+ {
+ if let Some(max) = maximum {
+ if initial.into() > max.into() {
+ return Err(BinaryReaderError::new(
+ "size minimum must not be greater than maximum",
+ offset,
+ ));
+ }
+ }
+ Ok(())
+ }
+
+ pub fn max_tables(&self, features: &WasmFeatures) -> usize {
+ if features.reference_types {
+ MAX_WASM_TABLES
+ } else {
+ 1
+ }
+ }
+
+ pub fn max_memories(&self, features: &WasmFeatures) -> usize {
+ if features.multi_memory {
+ MAX_WASM_MEMORIES
+ } else {
+ 1
+ }
+ }
+
+ pub fn export_to_entity_type(
+ &mut self,
+ export: &crate::Export,
+ offset: usize,
+ ) -> Result<EntityType> {
+ let check = |ty: &str, index: u32, total: usize| {
+ if index as usize >= total {
+ Err(format_err!(
+ offset,
+ "unknown {ty} {index}: exported {ty} index out of bounds",
+ ))
+ } else {
+ Ok(())
+ }
+ };
+
+ Ok(match export.kind {
+ ExternalKind::Func => {
+ check("function", export.index, self.functions.len())?;
+ self.function_references.insert(export.index);
+ EntityType::Func(self.types[self.functions[export.index as usize] as usize])
+ }
+ ExternalKind::Table => {
+ check("table", export.index, self.tables.len())?;
+ EntityType::Table(self.tables[export.index as usize])
+ }
+ ExternalKind::Memory => {
+ check("memory", export.index, self.memories.len())?;
+ EntityType::Memory(self.memories[export.index as usize])
+ }
+ ExternalKind::Global => {
+ check("global", export.index, self.globals.len())?;
+ EntityType::Global(self.globals[export.index as usize])
+ }
+ ExternalKind::Tag => {
+ check("tag", export.index, self.tags.len())?;
+ EntityType::Tag(self.tags[export.index as usize])
+ }
+ })
+ }
+
+ pub fn get_func_type<'a>(
+ &self,
+ func_idx: u32,
+ types: &'a TypeList,
+ offset: usize,
+ ) -> Result<&'a FuncType> {
+ match self.functions.get(func_idx as usize) {
+ Some(idx) => self.func_type_at(*idx, types, offset),
+ None => Err(format_err!(
+ offset,
+ "unknown function {func_idx}: func index out of bounds",
+ )),
+ }
+ }
+
+ fn global_at(&self, idx: u32, offset: usize) -> Result<&GlobalType> {
+ match self.globals.get(idx as usize) {
+ Some(t) => Ok(t),
+ None => Err(format_err!(
+ offset,
+ "unknown global {idx}: global index out of bounds"
+ )),
+ }
+ }
+
+ fn table_at(&self, idx: u32, offset: usize) -> Result<&TableType> {
+ match self.tables.get(idx as usize) {
+ Some(t) => Ok(t),
+ None => Err(format_err!(
+ offset,
+ "unknown table {idx}: table index out of bounds"
+ )),
+ }
+ }
+
+ fn memory_at(&self, idx: u32, offset: usize) -> Result<&MemoryType> {
+ match self.memories.get(idx as usize) {
+ Some(t) => Ok(t),
+ None => Err(format_err!(
+ offset,
+ "unknown memory {idx}: memory index out of bounds"
+ )),
+ }
+ }
+}
+
+impl Default for Module {
+ fn default() -> Self {
+ Self {
+ snapshot: Default::default(),
+ types: Default::default(),
+ tables: Default::default(),
+ memories: Default::default(),
+ globals: Default::default(),
+ element_types: Default::default(),
+ data_count: Default::default(),
+ functions: Default::default(),
+ tags: Default::default(),
+ function_references: Default::default(),
+ imports: Default::default(),
+ exports: Default::default(),
+ type_size: 1,
+ num_imported_globals: Default::default(),
+ num_imported_functions: Default::default(),
+ }
+ }
+}
+
+struct OperatorValidatorResources<'a> {
+ module: &'a mut MaybeOwned<Module>,
+ types: &'a TypeList,
+}
+
+impl WasmModuleResources for OperatorValidatorResources<'_> {
+ type FuncType = crate::FuncType;
+
+ fn table_at(&self, at: u32) -> Option<TableType> {
+ self.module.tables.get(at as usize).cloned()
+ }
+
+ fn memory_at(&self, at: u32) -> Option<MemoryType> {
+ self.module.memories.get(at as usize).cloned()
+ }
+
+ fn tag_at(&self, at: u32) -> Option<&Self::FuncType> {
+ Some(
+ self.types[*self.module.tags.get(at as usize)?]
+ .as_func_type()
+ .unwrap(),
+ )
+ }
+
+ fn global_at(&self, at: u32) -> Option<GlobalType> {
+ self.module.globals.get(at as usize).cloned()
+ }
+
+ fn func_type_at(&self, at: u32) -> Option<&Self::FuncType> {
+ Some(
+ self.types[*self.module.types.get(at as usize)?]
+ .as_func_type()
+ .unwrap(),
+ )
+ }
+
+ fn type_index_of_function(&self, at: u32) -> Option<u32> {
+ self.module.functions.get(at as usize).cloned()
+ }
+
+ fn type_of_function(&self, at: u32) -> Option<&Self::FuncType> {
+ self.func_type_at(self.type_index_of_function(at)?)
+ }
+
+ fn check_value_type(&self, t: ValType, features: &WasmFeatures, offset: usize) -> Result<()> {
+ self.module
+ .check_value_type(t, features, self.types, offset)
+ }
+
+ fn element_type_at(&self, at: u32) -> Option<RefType> {
+ self.module.element_types.get(at as usize).cloned()
+ }
+
+ fn matches(&self, t1: ValType, t2: ValType) -> bool {
+ self.module.matches(t1, t2, self.types)
+ }
+
+ fn element_count(&self) -> u32 {
+ self.module.element_types.len() as u32
+ }
+
+ fn data_count(&self) -> Option<u32> {
+ self.module.data_count
+ }
+
+ fn is_function_referenced(&self, idx: u32) -> bool {
+ self.module.function_references.contains(&idx)
+ }
+}
+
+/// The implementation of [`WasmModuleResources`] used by
+/// [`Validator`](crate::Validator).
+pub struct ValidatorResources(pub(crate) Arc<Module>);
+
+impl WasmModuleResources for ValidatorResources {
+ type FuncType = crate::FuncType;
+
+ fn table_at(&self, at: u32) -> Option<TableType> {
+ self.0.tables.get(at as usize).cloned()
+ }
+
+ fn memory_at(&self, at: u32) -> Option<MemoryType> {
+ self.0.memories.get(at as usize).cloned()
+ }
+
+ fn tag_at(&self, at: u32) -> Option<&Self::FuncType> {
+ Some(
+ self.0.snapshot.as_ref().unwrap()[*self.0.tags.get(at as usize)?]
+ .as_func_type()
+ .unwrap(),
+ )
+ }
+
+ fn global_at(&self, at: u32) -> Option<GlobalType> {
+ self.0.globals.get(at as usize).cloned()
+ }
+
+ fn func_type_at(&self, at: u32) -> Option<&Self::FuncType> {
+ Some(
+ self.0.snapshot.as_ref().unwrap()[*self.0.types.get(at as usize)?]
+ .as_func_type()
+ .unwrap(),
+ )
+ }
+
+ fn type_index_of_function(&self, at: u32) -> Option<u32> {
+ self.0.functions.get(at as usize).cloned()
+ }
+
+ fn type_of_function(&self, at: u32) -> Option<&Self::FuncType> {
+ self.func_type_at(self.type_index_of_function(at)?)
+ }
+
+ fn check_value_type(&self, t: ValType, features: &WasmFeatures, offset: usize) -> Result<()> {
+ self.0
+ .check_value_type(t, features, self.0.snapshot.as_ref().unwrap(), offset)
+ }
+
+ fn element_type_at(&self, at: u32) -> Option<RefType> {
+ self.0.element_types.get(at as usize).cloned()
+ }
+
+ fn matches(&self, t1: ValType, t2: ValType) -> bool {
+ self.0.matches(t1, t2, self.0.snapshot.as_ref().unwrap())
+ }
+
+ fn element_count(&self) -> u32 {
+ self.0.element_types.len() as u32
+ }
+
+ fn data_count(&self) -> Option<u32> {
+ self.0.data_count
+ }
+
+ fn is_function_referenced(&self, idx: u32) -> bool {
+ self.0.function_references.contains(&idx)
+ }
+}
+
+const _: () = {
+ fn assert_send<T: Send>() {}
+
+ // Assert that `ValidatorResources` is Send so function validation
+ // can be parallelizable
+ fn assert() {
+ assert_send::<ValidatorResources>();
+ }
+};
+
+mod arc {
+ use std::ops::Deref;
+ use std::sync::Arc;
+
+ enum Inner<T> {
+ Owned(T),
+ Shared(Arc<T>),
+
+ Empty, // Only used for swapping from owned to shared.
+ }
+
+ pub struct MaybeOwned<T> {
+ inner: Inner<T>,
+ }
+
+ impl<T> MaybeOwned<T> {
+ #[inline]
+ fn as_mut(&mut self) -> Option<&mut T> {
+ match &mut self.inner {
+ Inner::Owned(x) => Some(x),
+ Inner::Shared(_) => None,
+ Inner::Empty => Self::unreachable(),
+ }
+ }
+
+ #[inline]
+ pub fn assert_mut(&mut self) -> &mut T {
+ self.as_mut().unwrap()
+ }
+
+ pub fn arc(&mut self) -> &Arc<T> {
+ self.make_shared();
+ match &self.inner {
+ Inner::Shared(x) => x,
+ _ => Self::unreachable(),
+ }
+ }
+
+ #[inline]
+ fn make_shared(&mut self) {
+ if let Inner::Shared(_) = self.inner {
+ return;
+ }
+
+ let inner = std::mem::replace(&mut self.inner, Inner::Empty);
+ let x = match inner {
+ Inner::Owned(x) => x,
+ _ => Self::unreachable(),
+ };
+ let x = Arc::new(x);
+ self.inner = Inner::Shared(x);
+ }
+
+ #[cold]
+ #[inline(never)]
+ fn unreachable() -> ! {
+ unreachable!()
+ }
+ }
+
+ impl<T: Default> Default for MaybeOwned<T> {
+ fn default() -> MaybeOwned<T> {
+ MaybeOwned {
+ inner: Inner::Owned(T::default()),
+ }
+ }
+ }
+
+ impl<T> Deref for MaybeOwned<T> {
+ type Target = T;
+
+ fn deref(&self) -> &T {
+ match &self.inner {
+ Inner::Owned(x) => x,
+ Inner::Shared(x) => x,
+ Inner::Empty => Self::unreachable(),
+ }
+ }
+ }
+}
diff --git a/third_party/rust/wasmparser/src/validator/func.rs b/third_party/rust/wasmparser/src/validator/func.rs
new file mode 100644
index 0000000000..4d405f9615
--- /dev/null
+++ b/third_party/rust/wasmparser/src/validator/func.rs
@@ -0,0 +1,348 @@
+use super::operators::{Frame, OperatorValidator, OperatorValidatorAllocations};
+use crate::{BinaryReader, Result, ValType, VisitOperator};
+use crate::{FunctionBody, Operator, WasmFeatures, WasmModuleResources};
+
+/// Resources necessary to perform validation of a function.
+///
+/// This structure is created by
+/// [`Validator::code_section_entry`](crate::Validator::code_section_entry) and
+/// is created per-function in a WebAssembly module. This structure is suitable
+/// for sending to other threads while the original
+/// [`Validator`](crate::Validator) continues processing other functions.
+pub struct FuncToValidate<T> {
+ resources: T,
+ index: u32,
+ ty: u32,
+ features: WasmFeatures,
+}
+
+impl<T: WasmModuleResources> FuncToValidate<T> {
+ /// Creates a new function to validate which will have the specified
+ /// configuration parameters:
+ ///
+ /// * `index` - the core wasm function index being validated
+ /// * `ty` - the core wasm type index of the function being validated,
+ /// defining the results and parameters to the function.
+ /// * `resources` - metadata and type information about the module that
+ /// this function is validated within.
+ /// * `features` - enabled WebAssembly features.
+ pub fn new(index: u32, ty: u32, resources: T, features: &WasmFeatures) -> FuncToValidate<T> {
+ FuncToValidate {
+ resources,
+ index,
+ ty,
+ features: *features,
+ }
+ }
+
+ /// Converts this [`FuncToValidate`] into a [`FuncValidator`] using the
+ /// `allocs` provided.
+ ///
+ /// This method, in conjunction with [`FuncValidator::into_allocations`],
+ /// provides a means to reuse allocations across validation of each
+ /// individual function. Note that it is also sufficient to call this
+ /// method with `Default::default()` if no prior allocations are
+ /// available.
+ ///
+ /// # Panics
+ ///
+ /// If a `FuncToValidate` was created with an invalid `ty` index then this
+ /// function will panic.
+ pub fn into_validator(self, allocs: FuncValidatorAllocations) -> FuncValidator<T> {
+ let FuncToValidate {
+ resources,
+ index,
+ ty,
+ features,
+ } = self;
+ let validator =
+ OperatorValidator::new_func(ty, 0, &features, &resources, allocs.0).unwrap();
+ FuncValidator {
+ validator,
+ resources,
+ index,
+ }
+ }
+}
+
+/// Validation context for a WebAssembly function.
+///
+/// This is a finalized validator which is ready to process a [`FunctionBody`].
+/// This is created from the [`FuncToValidate::into_validator`] method.
+pub struct FuncValidator<T> {
+ validator: OperatorValidator,
+ resources: T,
+ index: u32,
+}
+
+/// External handle to the internal allocations used during function validation.
+///
+/// This is created with either the `Default` implementation or with
+/// [`FuncValidator::into_allocations`]. It is then passed as an argument to
+/// [`FuncToValidate::into_validator`] to provide a means of reusing allocations
+/// between each function.
+#[derive(Default)]
+pub struct FuncValidatorAllocations(OperatorValidatorAllocations);
+
+impl<T: WasmModuleResources> FuncValidator<T> {
+ /// Convenience function to validate an entire function's body.
+ ///
+ /// You may not end up using this in final implementations because you'll
+ /// often want to interleave validation with parsing.
+ pub fn validate(&mut self, body: &FunctionBody<'_>) -> Result<()> {
+ let mut reader = body.get_binary_reader();
+ self.read_locals(&mut reader)?;
+ reader.allow_memarg64(self.validator.features.memory64);
+ while !reader.eof() {
+ reader.visit_operator(&mut self.visitor(reader.original_position()))??;
+ }
+ self.finish(reader.original_position())
+ }
+
+ /// Reads the local definitions from the given `BinaryReader`, often sourced
+ /// from a `FunctionBody`.
+ ///
+ /// This function will automatically advance the `BinaryReader` forward,
+ /// leaving reading operators up to the caller afterwards.
+ pub fn read_locals(&mut self, reader: &mut BinaryReader<'_>) -> Result<()> {
+ for _ in 0..reader.read_var_u32()? {
+ let offset = reader.original_position();
+ let cnt = reader.read()?;
+ let ty = reader.read()?;
+ self.define_locals(offset, cnt, ty)?;
+ }
+ Ok(())
+ }
+
+ /// Defines locals into this validator.
+ ///
+ /// This should be used if the application is already reading local
+ /// definitions and there's no need to re-parse the function again.
+ pub fn define_locals(&mut self, offset: usize, count: u32, ty: ValType) -> Result<()> {
+ self.validator
+ .define_locals(offset, count, ty, &self.resources)
+ }
+
+ /// Validates the next operator in a function.
+ ///
+ /// This functions is expected to be called once-per-operator in a
+ /// WebAssembly function. Each operator's offset in the original binary and
+ /// the operator itself are passed to this function to provide more useful
+ /// error messages.
+ pub fn op(&mut self, offset: usize, operator: &Operator<'_>) -> Result<()> {
+ self.visitor(offset).visit_operator(operator)
+ }
+
+ /// Get the operator visitor for the next operator in the function.
+ ///
+ /// The returned visitor is intended to visit just one instruction at the `offset`.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// # use wasmparser::{WasmModuleResources, FuncValidator, FunctionBody, Result};
+ /// pub fn validate<R>(validator: &mut FuncValidator<R>, body: &FunctionBody<'_>) -> Result<()>
+ /// where R: WasmModuleResources
+ /// {
+ /// let mut operator_reader = body.get_binary_reader();
+ /// while !operator_reader.eof() {
+ /// let mut visitor = validator.visitor(operator_reader.original_position());
+ /// operator_reader.visit_operator(&mut visitor)??;
+ /// }
+ /// validator.finish(operator_reader.original_position())
+ /// }
+ /// ```
+ pub fn visitor<'this, 'a: 'this>(
+ &'this mut self,
+ offset: usize,
+ ) -> impl VisitOperator<'a, Output = Result<()>> + 'this {
+ self.validator.with_resources(&self.resources, offset)
+ }
+
+ /// Function that must be called after the last opcode has been processed.
+ ///
+ /// This will validate that the function was properly terminated with the
+ /// `end` opcode. If this function is not called then the function will not
+ /// be properly validated.
+ ///
+ /// The `offset` provided to this function will be used as a position for an
+ /// error if validation fails.
+ pub fn finish(&mut self, offset: usize) -> Result<()> {
+ self.validator.finish(offset)
+ }
+
+ /// Returns the underlying module resources that this validator is using.
+ pub fn resources(&self) -> &T {
+ &self.resources
+ }
+
+ /// The index of the function within the module's function index space that
+ /// is being validated.
+ pub fn index(&self) -> u32 {
+ self.index
+ }
+
+ /// Returns the number of defined local variables in the function.
+ pub fn len_locals(&self) -> u32 {
+ self.validator.locals.len_locals()
+ }
+
+ /// Returns the type of the local variable at the given `index` if any.
+ pub fn get_local_type(&self, index: u32) -> Option<ValType> {
+ self.validator.locals.get(index)
+ }
+
+ /// Get the current height of the operand stack.
+ ///
+ /// This returns the height of the whole operand stack for this function,
+ /// not just for the current control frame.
+ pub fn operand_stack_height(&self) -> u32 {
+ self.validator.operand_stack_height() as u32
+ }
+
+ /// Returns the optional value type of the value operand at the given
+ /// `depth` from the top of the operand stack.
+ ///
+ /// - Returns `None` if the `depth` is out of bounds.
+ /// - Returns `Some(None)` if there is a value with unknown type
+ /// at the given `depth`.
+ ///
+ /// # Note
+ ///
+ /// A `depth` of 0 will refer to the last operand on the stack.
+ pub fn get_operand_type(&self, depth: usize) -> Option<Option<ValType>> {
+ self.validator.peek_operand_at(depth)
+ }
+
+ /// Returns the number of frames on the control flow stack.
+ ///
+ /// This returns the height of the whole control stack for this function,
+ /// not just for the current control frame.
+ pub fn control_stack_height(&self) -> u32 {
+ self.validator.control_stack_height() as u32
+ }
+
+ /// Returns a shared reference to the control flow [`Frame`] of the
+ /// control flow stack at the given `depth` if any.
+ ///
+ /// Returns `None` if the `depth` is out of bounds.
+ ///
+ /// # Note
+ ///
+ /// A `depth` of 0 will refer to the last frame on the stack.
+ pub fn get_control_frame(&self, depth: usize) -> Option<&Frame> {
+ self.validator.get_frame(depth)
+ }
+
+ /// Consumes this validator and returns the underlying allocations that
+ /// were used during the validation process.
+ ///
+ /// The returned value here can be paired with
+ /// [`FuncToValidate::into_validator`] to reuse the allocations already
+ /// created by this validator.
+ pub fn into_allocations(self) -> FuncValidatorAllocations {
+ FuncValidatorAllocations(self.validator.into_allocations())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::WasmFuncType;
+
+ struct EmptyResources;
+
+ impl WasmModuleResources for EmptyResources {
+ type FuncType = EmptyFuncType;
+
+ fn table_at(&self, _at: u32) -> Option<crate::TableType> {
+ todo!()
+ }
+ fn memory_at(&self, _at: u32) -> Option<crate::MemoryType> {
+ todo!()
+ }
+ fn tag_at(&self, _at: u32) -> Option<&Self::FuncType> {
+ todo!()
+ }
+ fn global_at(&self, _at: u32) -> Option<crate::GlobalType> {
+ todo!()
+ }
+ fn func_type_at(&self, _type_idx: u32) -> Option<&Self::FuncType> {
+ Some(&EmptyFuncType)
+ }
+ fn type_index_of_function(&self, _at: u32) -> Option<u32> {
+ todo!()
+ }
+ fn type_of_function(&self, _func_idx: u32) -> Option<&Self::FuncType> {
+ todo!()
+ }
+ fn check_value_type(
+ &self,
+ _t: ValType,
+ _features: &WasmFeatures,
+ _offset: usize,
+ ) -> Result<()> {
+ Ok(())
+ }
+ fn element_type_at(&self, _at: u32) -> Option<crate::RefType> {
+ todo!()
+ }
+ fn matches(&self, _t1: ValType, _t2: ValType) -> bool {
+ todo!()
+ }
+ fn element_count(&self) -> u32 {
+ todo!()
+ }
+ fn data_count(&self) -> Option<u32> {
+ todo!()
+ }
+ fn is_function_referenced(&self, _idx: u32) -> bool {
+ todo!()
+ }
+ }
+
+ struct EmptyFuncType;
+
+ impl WasmFuncType for EmptyFuncType {
+ fn len_inputs(&self) -> usize {
+ 0
+ }
+ fn len_outputs(&self) -> usize {
+ 0
+ }
+ fn input_at(&self, _at: u32) -> Option<ValType> {
+ todo!()
+ }
+ fn output_at(&self, _at: u32) -> Option<ValType> {
+ todo!()
+ }
+ }
+
+ #[test]
+ fn operand_stack_height() {
+ let mut v = FuncToValidate::new(0, 0, EmptyResources, &Default::default())
+ .into_validator(Default::default());
+
+ // Initially zero values on the stack.
+ assert_eq!(v.operand_stack_height(), 0);
+
+ // Pushing a constant value makes use have one value on the stack.
+ assert!(v.op(0, &Operator::I32Const { value: 0 }).is_ok());
+ assert_eq!(v.operand_stack_height(), 1);
+
+ // Entering a new control block does not affect the stack height.
+ assert!(v
+ .op(
+ 1,
+ &Operator::Block {
+ blockty: crate::BlockType::Empty
+ }
+ )
+ .is_ok());
+ assert_eq!(v.operand_stack_height(), 1);
+
+ // Pushing another constant value makes use have two values on the stack.
+ assert!(v.op(2, &Operator::I32Const { value: 99 }).is_ok());
+ assert_eq!(v.operand_stack_height(), 2);
+ }
+}
diff --git a/third_party/rust/wasmparser/src/validator/operators.rs b/third_party/rust/wasmparser/src/validator/operators.rs
new file mode 100644
index 0000000000..54fee8acc6
--- /dev/null
+++ b/third_party/rust/wasmparser/src/validator/operators.rs
@@ -0,0 +1,3474 @@
+/* Copyright 2019 Mozilla Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// The basic validation algorithm here is copied from the "Validation
+// Algorithm" section of the WebAssembly specification -
+// https://webassembly.github.io/spec/core/appendix/algorithm.html.
+//
+// That algorithm is followed pretty closely here, namely `push_operand`,
+// `pop_operand`, `push_ctrl`, and `pop_ctrl`. If anything here is a bit
+// confusing it's recommended to read over that section to see how it maps to
+// the various methods here.
+
+use crate::{
+ limits::MAX_WASM_FUNCTION_LOCALS, BinaryReaderError, BlockType, BrTable, HeapType, Ieee32,
+ Ieee64, MemArg, RefType, Result, ValType, VisitOperator, WasmFeatures, WasmFuncType,
+ WasmModuleResources, V128,
+};
+use std::ops::{Deref, DerefMut};
+
+pub(crate) struct OperatorValidator {
+ pub(super) locals: Locals,
+ pub(super) local_inits: Vec<bool>,
+
+ // This is a list of flags for wasm features which are used to gate various
+ // instructions.
+ pub(crate) features: WasmFeatures,
+
+ // Temporary storage used during the validation of `br_table`.
+ br_table_tmp: Vec<MaybeType>,
+
+ /// The `control` list is the list of blocks that we're currently in.
+ control: Vec<Frame>,
+ /// The `operands` is the current type stack.
+ operands: Vec<MaybeType>,
+ /// When local_inits is modified, the relevant index is recorded here to be
+ /// undone when control pops
+ inits: Vec<u32>,
+
+ /// Offset of the `end` instruction which emptied the `control` stack, which
+ /// must be the end of the function.
+ end_which_emptied_control: Option<usize>,
+}
+
+// No science was performed in the creation of this number, feel free to change
+// it if you so like.
+const MAX_LOCALS_TO_TRACK: usize = 50;
+
+pub(super) struct Locals {
+ // Total number of locals in the function.
+ num_locals: u32,
+
+ // The first MAX_LOCALS_TO_TRACK locals in a function. This is used to
+ // optimize the theoretically common case where most functions don't have
+ // many locals and don't need a full binary search in the entire local space
+ // below.
+ first: Vec<ValType>,
+
+ // This is a "compressed" list of locals for this function. The list of
+ // locals are represented as a list of tuples. The second element is the
+ // type of the local, and the first element is monotonically increasing as
+ // you visit elements of this list. The first element is the maximum index
+ // of the local, after the previous index, of the type specified.
+ //
+ // This allows us to do a binary search on the list for a local's index for
+ // `local.{get,set,tee}`. We do a binary search for the index desired, and
+ // it either lies in a "hole" where the maximum index is specified later,
+ // or it's at the end of the list meaning it's out of bounds.
+ all: Vec<(u32, ValType)>,
+}
+
+/// A Wasm control flow block on the control flow stack during Wasm validation.
+//
+// # Dev. Note
+//
+// This structure corresponds to `ctrl_frame` as specified at in the validation
+// appendix of the wasm spec
+#[derive(Debug, Copy, Clone)]
+pub struct Frame {
+ /// Indicator for what kind of instruction pushed this frame.
+ pub kind: FrameKind,
+ /// The type signature of this frame, represented as a singular return type
+ /// or a type index pointing into the module's types.
+ pub block_type: BlockType,
+ /// The index, below which, this frame cannot modify the operand stack.
+ pub height: usize,
+ /// Whether this frame is unreachable so far.
+ pub unreachable: bool,
+ /// The number of initializations in the stack at the time of its creation
+ pub init_height: usize,
+}
+
+/// The kind of a control flow [`Frame`].
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum FrameKind {
+ /// A Wasm `block` control block.
+ Block,
+ /// A Wasm `if` control block.
+ If,
+ /// A Wasm `else` control block.
+ Else,
+ /// A Wasm `loop` control block.
+ Loop,
+ /// A Wasm `try` control block.
+ ///
+ /// # Note
+ ///
+ /// This belongs to the Wasm exception handling proposal.
+ Try,
+ /// A Wasm `catch` control block.
+ ///
+ /// # Note
+ ///
+ /// This belongs to the Wasm exception handling proposal.
+ Catch,
+ /// A Wasm `catch_all` control block.
+ ///
+ /// # Note
+ ///
+ /// This belongs to the Wasm exception handling proposal.
+ CatchAll,
+}
+
+struct OperatorValidatorTemp<'validator, 'resources, T> {
+ offset: usize,
+ inner: &'validator mut OperatorValidator,
+ resources: &'resources T,
+}
+
+#[derive(Default)]
+pub struct OperatorValidatorAllocations {
+ br_table_tmp: Vec<MaybeType>,
+ control: Vec<Frame>,
+ operands: Vec<MaybeType>,
+ local_inits: Vec<bool>,
+ inits: Vec<u32>,
+ locals_first: Vec<ValType>,
+ locals_all: Vec<(u32, ValType)>,
+}
+
+/// Type storage within the validator.
+///
+/// This is used to manage the operand stack and notably isn't just `ValType` to
+/// handle unreachable code and the "bottom" type.
+#[derive(Debug, Copy, Clone)]
+enum MaybeType {
+ Bot,
+ HeapBot,
+ Type(ValType),
+}
+
+// The validator is pretty performance-sensitive and `MaybeType` is the main
+// unit of storage, so assert that it doesn't exceed 4 bytes which is the
+// current expected size.
+const _: () = {
+ assert!(std::mem::size_of::<MaybeType>() == 4);
+};
+
+impl From<ValType> for MaybeType {
+ fn from(ty: ValType) -> MaybeType {
+ MaybeType::Type(ty)
+ }
+}
+
+impl OperatorValidator {
+ fn new(features: &WasmFeatures, allocs: OperatorValidatorAllocations) -> Self {
+ let OperatorValidatorAllocations {
+ br_table_tmp,
+ control,
+ operands,
+ local_inits,
+ inits,
+ locals_first,
+ locals_all,
+ } = allocs;
+ debug_assert!(br_table_tmp.is_empty());
+ debug_assert!(control.is_empty());
+ debug_assert!(operands.is_empty());
+ debug_assert!(local_inits.is_empty());
+ debug_assert!(inits.is_empty());
+ debug_assert!(locals_first.is_empty());
+ debug_assert!(locals_all.is_empty());
+ OperatorValidator {
+ locals: Locals {
+ num_locals: 0,
+ first: locals_first,
+ all: locals_all,
+ },
+ local_inits,
+ inits,
+ features: *features,
+ br_table_tmp,
+ operands,
+ control,
+ end_which_emptied_control: None,
+ }
+ }
+
+ /// Creates a new operator validator which will be used to validate a
+ /// function whose type is the `ty` index specified.
+ ///
+ /// The `resources` are used to learn about the function type underlying
+ /// `ty`.
+ pub fn new_func<T>(
+ ty: u32,
+ offset: usize,
+ features: &WasmFeatures,
+ resources: &T,
+ allocs: OperatorValidatorAllocations,
+ ) -> Result<Self>
+ where
+ T: WasmModuleResources,
+ {
+ let mut ret = OperatorValidator::new(features, allocs);
+ ret.control.push(Frame {
+ kind: FrameKind::Block,
+ block_type: BlockType::FuncType(ty),
+ height: 0,
+ unreachable: false,
+ init_height: 0,
+ });
+ let params = OperatorValidatorTemp {
+ // This offset is used by the `func_type_at` and `inputs`.
+ offset,
+ inner: &mut ret,
+ resources,
+ }
+ .func_type_at(ty)?
+ .inputs();
+ for ty in params {
+ ret.locals.define(1, ty);
+ ret.local_inits.push(true);
+ }
+ Ok(ret)
+ }
+
+ /// Creates a new operator validator which will be used to validate an
+ /// `init_expr` constant expression which should result in the `ty`
+ /// specified.
+ pub fn new_const_expr(
+ features: &WasmFeatures,
+ ty: ValType,
+ allocs: OperatorValidatorAllocations,
+ ) -> Self {
+ let mut ret = OperatorValidator::new(features, allocs);
+ ret.control.push(Frame {
+ kind: FrameKind::Block,
+ block_type: BlockType::Type(ty),
+ height: 0,
+ unreachable: false,
+ init_height: 0,
+ });
+ ret
+ }
+
+ pub fn define_locals(
+ &mut self,
+ offset: usize,
+ count: u32,
+ ty: ValType,
+ resources: &impl WasmModuleResources,
+ ) -> Result<()> {
+ resources.check_value_type(ty, &self.features, offset)?;
+ if count == 0 {
+ return Ok(());
+ }
+ if !self.locals.define(count, ty) {
+ return Err(BinaryReaderError::new(
+ "too many locals: locals exceed maximum",
+ offset,
+ ));
+ }
+ self.local_inits
+ .resize(self.local_inits.len() + count as usize, ty.is_defaultable());
+ Ok(())
+ }
+
+ /// Returns the current operands stack height.
+ pub fn operand_stack_height(&self) -> usize {
+ self.operands.len()
+ }
+
+ /// Returns the optional value type of the value operand at the given
+ /// `depth` from the top of the operand stack.
+ ///
+ /// - Returns `None` if the `depth` is out of bounds.
+ /// - Returns `Some(None)` if there is a value with unknown type
+ /// at the given `depth`.
+ ///
+ /// # Note
+ ///
+ /// A `depth` of 0 will refer to the last operand on the stack.
+ pub fn peek_operand_at(&self, depth: usize) -> Option<Option<ValType>> {
+ Some(match self.operands.iter().rev().nth(depth)? {
+ MaybeType::Type(t) => Some(*t),
+ MaybeType::Bot | MaybeType::HeapBot => None,
+ })
+ }
+
+ /// Returns the number of frames on the control flow stack.
+ pub fn control_stack_height(&self) -> usize {
+ self.control.len()
+ }
+
+ pub fn get_frame(&self, depth: usize) -> Option<&Frame> {
+ self.control.iter().rev().nth(depth)
+ }
+
+ /// Create a temporary [`OperatorValidatorTemp`] for validation.
+ pub fn with_resources<'a, 'validator, 'resources, T>(
+ &'validator mut self,
+ resources: &'resources T,
+ offset: usize,
+ ) -> impl VisitOperator<'a, Output = Result<()>> + 'validator
+ where
+ T: WasmModuleResources,
+ 'resources: 'validator,
+ {
+ WasmProposalValidator(OperatorValidatorTemp {
+ offset,
+ inner: self,
+ resources,
+ })
+ }
+
+ pub fn finish(&mut self, offset: usize) -> Result<()> {
+ if self.control.last().is_some() {
+ bail!(
+ offset,
+ "control frames remain at end of function: END opcode expected"
+ );
+ }
+
+ // The `end` opcode is one byte which means that the `offset` here
+ // should point just beyond the `end` opcode which emptied the control
+ // stack. If not that means more instructions were present after the
+ // control stack was emptied.
+ if offset != self.end_which_emptied_control.unwrap() + 1 {
+ return Err(self.err_beyond_end(offset));
+ }
+ Ok(())
+ }
+
+ fn err_beyond_end(&self, offset: usize) -> BinaryReaderError {
+ format_err!(offset, "operators remaining after end of function")
+ }
+
+ pub fn into_allocations(self) -> OperatorValidatorAllocations {
+ fn truncate<T>(mut tmp: Vec<T>) -> Vec<T> {
+ tmp.truncate(0);
+ tmp
+ }
+ OperatorValidatorAllocations {
+ br_table_tmp: truncate(self.br_table_tmp),
+ control: truncate(self.control),
+ operands: truncate(self.operands),
+ local_inits: truncate(self.local_inits),
+ inits: truncate(self.inits),
+ locals_first: truncate(self.locals.first),
+ locals_all: truncate(self.locals.all),
+ }
+ }
+}
+
+impl<R> Deref for OperatorValidatorTemp<'_, '_, R> {
+ type Target = OperatorValidator;
+ fn deref(&self) -> &OperatorValidator {
+ self.inner
+ }
+}
+
+impl<R> DerefMut for OperatorValidatorTemp<'_, '_, R> {
+ fn deref_mut(&mut self) -> &mut OperatorValidator {
+ self.inner
+ }
+}
+
+impl<'resources, R: WasmModuleResources> OperatorValidatorTemp<'_, 'resources, R> {
+ /// Pushes a type onto the operand stack.
+ ///
+ /// This is used by instructions to represent a value that is pushed to the
+ /// operand stack. This can fail, but only if `Type` is feature gated.
+ /// Otherwise the push operation always succeeds.
+ fn push_operand<T>(&mut self, ty: T) -> Result<()>
+ where
+ T: Into<MaybeType>,
+ {
+ let maybe_ty = ty.into();
+ self.operands.push(maybe_ty);
+ Ok(())
+ }
+
+ /// Attempts to pop a type from the operand stack.
+ ///
+ /// This function is used to remove types from the operand stack. The
+ /// `expected` argument can be used to indicate that a type is required, or
+ /// simply that something is needed to be popped.
+ ///
+ /// If `expected` is `Some(T)` then this will be guaranteed to return
+ /// `T`, and it will only return success if the current block is
+ /// unreachable or if `T` was found at the top of the operand stack.
+ ///
+ /// If `expected` is `None` then it indicates that something must be on the
+ /// operand stack, but it doesn't matter what's on the operand stack. This
+ /// is useful for polymorphic instructions like `select`.
+ ///
+ /// If `Some(T)` is returned then `T` was popped from the operand stack and
+ /// matches `expected`. If `None` is returned then it means that `None` was
+ /// expected and a type was successfully popped, but its exact type is
+ /// indeterminate because the current block is unreachable.
+ fn pop_operand(&mut self, expected: Option<ValType>) -> Result<MaybeType> {
+ // This method is one of the hottest methods in the validator so to
+ // improve codegen this method contains a fast-path success case where
+ // if the top operand on the stack is as expected it's returned
+ // immediately. This is the most common case where the stack will indeed
+ // have the expected type and all we need to do is pop it off.
+ //
+ // Note that this still has to be careful to be correct, though. For
+ // efficiency an operand is unconditionally popped and on success it is
+ // matched against the state of the world to see if we could actually
+ // pop it. If we shouldn't have popped it then it's passed to the slow
+ // path to get pushed back onto the stack.
+ let popped = match self.operands.pop() {
+ Some(MaybeType::Type(actual_ty)) => {
+ if Some(actual_ty) == expected {
+ if let Some(control) = self.control.last() {
+ if self.operands.len() >= control.height {
+ return Ok(MaybeType::Type(actual_ty));
+ }
+ }
+ }
+ Some(MaybeType::Type(actual_ty))
+ }
+ other => other,
+ };
+
+ self._pop_operand(expected, popped)
+ }
+
+ // This is the "real" implementation of `pop_operand` which is 100%
+ // spec-compliant with little attention paid to efficiency since this is the
+ // slow-path from the actual `pop_operand` function above.
+ #[cold]
+ fn _pop_operand(
+ &mut self,
+ expected: Option<ValType>,
+ popped: Option<MaybeType>,
+ ) -> Result<MaybeType> {
+ self.operands.extend(popped);
+ let control = match self.control.last() {
+ Some(c) => c,
+ None => return Err(self.err_beyond_end(self.offset)),
+ };
+ let actual = if self.operands.len() == control.height && control.unreachable {
+ MaybeType::Bot
+ } else {
+ if self.operands.len() == control.height {
+ let desc = match expected {
+ Some(ty) => ty_to_str(ty),
+ None => "a type",
+ };
+ bail!(
+ self.offset,
+ "type mismatch: expected {desc} but nothing on stack"
+ )
+ } else {
+ self.operands.pop().unwrap()
+ }
+ };
+ if let Some(expected) = expected {
+ match (actual, expected) {
+ // The bottom type matches all expectations
+ (MaybeType::Bot, _)
+ // The "heap bottom" type only matches other references types,
+ // but not any integer types.
+ | (MaybeType::HeapBot, ValType::Ref(_)) => {}
+
+ // Use the `matches` predicate to test if a found type matches
+ // the expectation.
+ (MaybeType::Type(actual), expected) => {
+ if !self.resources.matches(actual, expected) {
+ bail!(
+ self.offset,
+ "type mismatch: expected {}, found {}",
+ ty_to_str(expected),
+ ty_to_str(actual)
+ );
+ }
+ }
+
+ // A "heap bottom" type cannot match any numeric types.
+ (
+ MaybeType::HeapBot,
+ ValType::I32 | ValType::I64 | ValType::F32 | ValType::F64 | ValType::V128,
+ ) => {
+ bail!(
+ self.offset,
+ "type mismatche: expected {}, found heap type",
+ ty_to_str(expected)
+ )
+ }
+ }
+ }
+ Ok(actual)
+ }
+
+ fn pop_ref(&mut self) -> Result<Option<RefType>> {
+ match self.pop_operand(None)? {
+ MaybeType::Bot | MaybeType::HeapBot => Ok(None),
+ MaybeType::Type(ValType::Ref(rt)) => Ok(Some(rt)),
+ MaybeType::Type(ty) => bail!(
+ self.offset,
+ "type mismatch: expected ref but found {}",
+ ty_to_str(ty)
+ ),
+ }
+ }
+
+ /// Fetches the type for the local at `idx`, returning an error if it's out
+ /// of bounds.
+ fn local(&self, idx: u32) -> Result<ValType> {
+ match self.locals.get(idx) {
+ Some(ty) => Ok(ty),
+ None => bail!(
+ self.offset,
+ "unknown local {}: local index out of bounds",
+ idx
+ ),
+ }
+ }
+
+ /// Flags the current control frame as unreachable, additionally truncating
+ /// the currently active operand stack.
+ fn unreachable(&mut self) -> Result<()> {
+ let control = match self.control.last_mut() {
+ Some(frame) => frame,
+ None => return Err(self.err_beyond_end(self.offset)),
+ };
+ control.unreachable = true;
+ let new_height = control.height;
+ self.operands.truncate(new_height);
+ Ok(())
+ }
+
+ /// Pushes a new frame onto the control stack.
+ ///
+ /// This operation is used when entering a new block such as an if, loop,
+ /// or block itself. The `kind` of block is specified which indicates how
+ /// breaks interact with this block's type. Additionally the type signature
+ /// of the block is specified by `ty`.
+ fn push_ctrl(&mut self, kind: FrameKind, ty: BlockType) -> Result<()> {
+ // Push a new frame which has a snapshot of the height of the current
+ // operand stack.
+ let height = self.operands.len();
+ let init_height = self.inits.len();
+ self.control.push(Frame {
+ kind,
+ block_type: ty,
+ height,
+ unreachable: false,
+ init_height,
+ });
+ // All of the parameters are now also available in this control frame,
+ // so we push them here in order.
+ for ty in self.params(ty)? {
+ self.push_operand(ty)?;
+ }
+ Ok(())
+ }
+
+ /// Pops a frame from the control stack.
+ ///
+ /// This function is used when exiting a block and leaves a block scope.
+ /// Internally this will validate that blocks have the correct result type.
+ fn pop_ctrl(&mut self) -> Result<Frame> {
+ // Read the expected type and expected height of the operand stack the
+ // end of the frame.
+ let frame = match self.control.last() {
+ Some(f) => f,
+ None => return Err(self.err_beyond_end(self.offset)),
+ };
+ let ty = frame.block_type;
+ let height = frame.height;
+ let init_height = frame.init_height;
+
+ // reset_locals in the spec
+ for init in self.inits.split_off(init_height) {
+ self.local_inits[init as usize] = false;
+ }
+
+ // Pop all the result types, in reverse order, from the operand stack.
+ // These types will, possibly, be transferred to the next frame.
+ for ty in self.results(ty)?.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+
+ // Make sure that the operand stack has returned to is original
+ // height...
+ if self.operands.len() != height {
+ bail!(
+ self.offset,
+ "type mismatch: values remaining on stack at end of block"
+ );
+ }
+
+ // And then we can remove it!
+ Ok(self.control.pop().unwrap())
+ }
+
+ /// Validates a relative jump to the `depth` specified.
+ ///
+ /// Returns the type signature of the block that we're jumping to as well
+ /// as the kind of block if the jump is valid. Otherwise returns an error.
+ fn jump(&self, depth: u32) -> Result<(BlockType, FrameKind)> {
+ if self.control.is_empty() {
+ return Err(self.err_beyond_end(self.offset));
+ }
+ match (self.control.len() - 1).checked_sub(depth as usize) {
+ Some(i) => {
+ let frame = &self.control[i];
+ Ok((frame.block_type, frame.kind))
+ }
+ None => bail!(self.offset, "unknown label: branch depth too large"),
+ }
+ }
+
+ /// Validates that `memory_index` is valid in this module, and returns the
+ /// type of address used to index the memory specified.
+ fn check_memory_index(&self, memory_index: u32) -> Result<ValType> {
+ match self.resources.memory_at(memory_index) {
+ Some(mem) => Ok(mem.index_type()),
+ None => bail!(self.offset, "unknown memory {}", memory_index),
+ }
+ }
+
+ /// Validates a `memarg for alignment and such (also the memory it
+ /// references), and returns the type of index used to address the memory.
+ fn check_memarg(&self, memarg: MemArg) -> Result<ValType> {
+ let index_ty = self.check_memory_index(memarg.memory)?;
+ if memarg.align > memarg.max_align {
+ bail!(self.offset, "alignment must not be larger than natural");
+ }
+ if index_ty == ValType::I32 && memarg.offset > u64::from(u32::MAX) {
+ bail!(self.offset, "offset out of range: must be <= 2**32");
+ }
+ Ok(index_ty)
+ }
+
+ fn check_floats_enabled(&self) -> Result<()> {
+ if !self.features.floats {
+ bail!(self.offset, "floating-point instruction disallowed");
+ }
+ Ok(())
+ }
+
+ fn check_shared_memarg(&self, memarg: MemArg) -> Result<ValType> {
+ if memarg.align != memarg.max_align {
+ bail!(
+ self.offset,
+ "atomic instructions must always specify maximum alignment"
+ );
+ }
+ self.check_memory_index(memarg.memory)
+ }
+
+ fn check_simd_lane_index(&self, index: u8, max: u8) -> Result<()> {
+ if index >= max {
+ bail!(self.offset, "SIMD index out of bounds");
+ }
+ Ok(())
+ }
+
+ /// Validates a block type, primarily with various in-flight proposals.
+ fn check_block_type(&self, ty: BlockType) -> Result<()> {
+ match ty {
+ BlockType::Empty => Ok(()),
+ BlockType::Type(t) => self
+ .resources
+ .check_value_type(t, &self.features, self.offset),
+ BlockType::FuncType(idx) => {
+ if !self.features.multi_value {
+ bail!(
+ self.offset,
+ "blocks, loops, and ifs may only produce a resulttype \
+ when multi-value is not enabled",
+ );
+ }
+ self.func_type_at(idx)?;
+ Ok(())
+ }
+ }
+ }
+
+ /// Validates a `call` instruction, ensuring that the function index is
+ /// in-bounds and the right types are on the stack to call the function.
+ fn check_call(&mut self, function_index: u32) -> Result<()> {
+ let ty = match self.resources.type_index_of_function(function_index) {
+ Some(i) => i,
+ None => {
+ bail!(
+ self.offset,
+ "unknown function {function_index}: function index out of bounds",
+ );
+ }
+ };
+ self.check_call_ty(ty)
+ }
+
+ fn check_call_ty(&mut self, type_index: u32) -> Result<()> {
+ let ty = match self.resources.func_type_at(type_index) {
+ Some(i) => i,
+ None => {
+ bail!(
+ self.offset,
+ "unknown type {type_index}: type index out of bounds",
+ );
+ }
+ };
+ for ty in ty.inputs().rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ for ty in ty.outputs() {
+ self.push_operand(ty)?;
+ }
+ Ok(())
+ }
+
+ /// Validates a call to an indirect function, very similar to `check_call`.
+ fn check_call_indirect(&mut self, index: u32, table_index: u32) -> Result<()> {
+ match self.resources.table_at(table_index) {
+ None => {
+ bail!(self.offset, "unknown table: table index out of bounds");
+ }
+ Some(tab) => {
+ if !self
+ .resources
+ .matches(ValType::Ref(tab.element_type), ValType::FUNCREF)
+ {
+ bail!(
+ self.offset,
+ "indirect calls must go through a table with type <= funcref",
+ );
+ }
+ }
+ }
+ let ty = self.func_type_at(index)?;
+ self.pop_operand(Some(ValType::I32))?;
+ for ty in ty.inputs().rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ for ty in ty.outputs() {
+ self.push_operand(ty)?;
+ }
+ Ok(())
+ }
+
+ /// Validates a `return` instruction, popping types from the operand
+ /// stack that the function needs.
+ fn check_return(&mut self) -> Result<()> {
+ if self.control.is_empty() {
+ return Err(self.err_beyond_end(self.offset));
+ }
+ for ty in self.results(self.control[0].block_type)?.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ self.unreachable()?;
+ Ok(())
+ }
+
+ /// Checks the validity of a common comparison operator.
+ fn check_cmp_op(&mut self, ty: ValType) -> Result<()> {
+ self.pop_operand(Some(ty))?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+
+ /// Checks the validity of a common float comparison operator.
+ fn check_fcmp_op(&mut self, ty: ValType) -> Result<()> {
+ debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
+ self.check_floats_enabled()?;
+ self.check_cmp_op(ty)
+ }
+
+ /// Checks the validity of a common unary operator.
+ fn check_unary_op(&mut self, ty: ValType) -> Result<()> {
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ty)?;
+ Ok(())
+ }
+
+ /// Checks the validity of a common unary float operator.
+ fn check_funary_op(&mut self, ty: ValType) -> Result<()> {
+ debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
+ self.check_floats_enabled()?;
+ self.check_unary_op(ty)
+ }
+
+ /// Checks the validity of a common conversion operator.
+ fn check_conversion_op(&mut self, into: ValType, from: ValType) -> Result<()> {
+ self.pop_operand(Some(from))?;
+ self.push_operand(into)?;
+ Ok(())
+ }
+
+ /// Checks the validity of a common conversion operator.
+ fn check_fconversion_op(&mut self, into: ValType, from: ValType) -> Result<()> {
+ debug_assert!(matches!(into, ValType::F32 | ValType::F64));
+ self.check_floats_enabled()?;
+ self.check_conversion_op(into, from)
+ }
+
+ /// Checks the validity of a common binary operator.
+ fn check_binary_op(&mut self, ty: ValType) -> Result<()> {
+ self.pop_operand(Some(ty))?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ty)?;
+ Ok(())
+ }
+
+ /// Checks the validity of a common binary float operator.
+ fn check_fbinary_op(&mut self, ty: ValType) -> Result<()> {
+ debug_assert!(matches!(ty, ValType::F32 | ValType::F64));
+ self.check_floats_enabled()?;
+ self.check_binary_op(ty)
+ }
+
+ /// Checks the validity of an atomic load operator.
+ fn check_atomic_load(&mut self, memarg: MemArg, load_ty: ValType) -> Result<()> {
+ let ty = self.check_shared_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(load_ty)?;
+ Ok(())
+ }
+
+ /// Checks the validity of an atomic store operator.
+ fn check_atomic_store(&mut self, memarg: MemArg, store_ty: ValType) -> Result<()> {
+ let ty = self.check_shared_memarg(memarg)?;
+ self.pop_operand(Some(store_ty))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+
+ /// Checks the validity of a common atomic binary operator.
+ fn check_atomic_binary_op(&mut self, memarg: MemArg, op_ty: ValType) -> Result<()> {
+ let ty = self.check_shared_memarg(memarg)?;
+ self.pop_operand(Some(op_ty))?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(op_ty)?;
+ Ok(())
+ }
+
+ /// Checks the validity of an atomic compare exchange operator.
+ fn check_atomic_binary_cmpxchg(&mut self, memarg: MemArg, op_ty: ValType) -> Result<()> {
+ let ty = self.check_shared_memarg(memarg)?;
+ self.pop_operand(Some(op_ty))?;
+ self.pop_operand(Some(op_ty))?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(op_ty)?;
+ Ok(())
+ }
+
+ /// Checks a [`V128`] splat operator.
+ fn check_v128_splat(&mut self, src_ty: ValType) -> Result<()> {
+ self.pop_operand(Some(src_ty))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+
+ /// Checks a [`V128`] binary operator.
+ fn check_v128_binary_op(&mut self) -> Result<()> {
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+
+ /// Checks a [`V128`] binary float operator.
+ fn check_v128_fbinary_op(&mut self) -> Result<()> {
+ self.check_floats_enabled()?;
+ self.check_v128_binary_op()
+ }
+
+ /// Checks a [`V128`] binary operator.
+ fn check_v128_unary_op(&mut self) -> Result<()> {
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+
+ /// Checks a [`V128`] binary operator.
+ fn check_v128_funary_op(&mut self) -> Result<()> {
+ self.check_floats_enabled()?;
+ self.check_v128_unary_op()
+ }
+
+ /// Checks a [`V128`] relaxed ternary operator.
+ fn check_v128_ternary_op(&mut self) -> Result<()> {
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+
+ /// Checks a [`V128`] relaxed ternary operator.
+ fn check_v128_bitmask_op(&mut self) -> Result<()> {
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+
+ /// Checks a [`V128`] relaxed ternary operator.
+ fn check_v128_shift_op(&mut self) -> Result<()> {
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+
+ /// Checks a [`V128`] common load operator.
+ fn check_v128_load_op(&mut self, memarg: MemArg) -> Result<()> {
+ let idx = self.check_memarg(memarg)?;
+ self.pop_operand(Some(idx))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+
+ fn func_type_at(&self, at: u32) -> Result<&'resources R::FuncType> {
+ self.resources
+ .func_type_at(at)
+ .ok_or_else(|| format_err!(self.offset, "unknown type: type index out of bounds"))
+ }
+
+ fn tag_at(&self, at: u32) -> Result<&'resources R::FuncType> {
+ self.resources
+ .tag_at(at)
+ .ok_or_else(|| format_err!(self.offset, "unknown tag {}: tag index out of bounds", at))
+ }
+
+ fn params(&self, ty: BlockType) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
+ Ok(match ty {
+ BlockType::Empty | BlockType::Type(_) => Either::B(None.into_iter()),
+ BlockType::FuncType(t) => Either::A(self.func_type_at(t)?.inputs()),
+ })
+ }
+
+ fn results(&self, ty: BlockType) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
+ Ok(match ty {
+ BlockType::Empty => Either::B(None.into_iter()),
+ BlockType::Type(t) => Either::B(Some(t).into_iter()),
+ BlockType::FuncType(t) => Either::A(self.func_type_at(t)?.outputs()),
+ })
+ }
+
+ fn label_types(
+ &self,
+ ty: BlockType,
+ kind: FrameKind,
+ ) -> Result<impl PreciseIterator<Item = ValType> + 'resources> {
+ Ok(match kind {
+ FrameKind::Loop => Either::A(self.params(ty)?),
+ _ => Either::B(self.results(ty)?),
+ })
+ }
+}
+
+pub fn ty_to_str(ty: ValType) -> &'static str {
+ match ty {
+ ValType::I32 => "i32",
+ ValType::I64 => "i64",
+ ValType::F32 => "f32",
+ ValType::F64 => "f64",
+ ValType::V128 => "v128",
+ ValType::FUNCREF => "funcref",
+ ValType::EXTERNREF => "externref",
+ ValType::Ref(RefType {
+ nullable: false,
+ heap_type: HeapType::Func,
+ }) => "(ref func)",
+ ValType::Ref(RefType {
+ nullable: false,
+ heap_type: HeapType::Extern,
+ }) => "(ref extern)",
+ ValType::Ref(RefType {
+ nullable: false,
+ heap_type: HeapType::TypedFunc(_),
+ }) => "(ref $type)",
+ ValType::Ref(RefType {
+ nullable: true,
+ heap_type: HeapType::TypedFunc(_),
+ }) => "(ref null $type)",
+ }
+}
+
+/// A wrapper "visitor" around the real operator validator internally which
+/// exists to check that the required wasm feature is enabled to proceed with
+/// validation.
+///
+/// This validator is macro-generated to ensure that the proposal listed in this
+/// crate's macro matches the one that's validated here. Each instruction's
+/// visit method validates the specified proposal is enabled and then delegates
+/// to `OperatorValidatorTemp` to perform the actual opcode validation.
+struct WasmProposalValidator<'validator, 'resources, T>(
+ OperatorValidatorTemp<'validator, 'resources, T>,
+);
+
+impl<T> WasmProposalValidator<'_, '_, T> {
+ fn check_enabled(&self, flag: bool, desc: &str) -> Result<()> {
+ if flag {
+ return Ok(());
+ }
+ bail!(self.0.offset, "{desc} support is not enabled");
+ }
+}
+
+macro_rules! validate_proposal {
+ ($( @$proposal:ident $op:ident $({ $($arg:ident: $argty:ty),* })? => $visit:ident)*) => {
+ $(
+ fn $visit(&mut self $($(,$arg: $argty)*)?) -> Result<()> {
+ validate_proposal!(validate self $proposal);
+ self.0.$visit($( $($arg),* )?)
+ }
+ )*
+ };
+
+ (validate self mvp) => {};
+ (validate $self:ident $proposal:ident) => {
+ $self.check_enabled($self.0.features.$proposal, validate_proposal!(desc $proposal))?
+ };
+
+ (desc simd) => ("SIMD");
+ (desc relaxed_simd) => ("relaxed SIMD");
+ (desc threads) => ("threads");
+ (desc saturating_float_to_int) => ("saturating float to int conversions");
+ (desc reference_types) => ("reference types");
+ (desc bulk_memory) => ("bulk memory");
+ (desc sign_extension) => ("sign extension operations");
+ (desc exceptions) => ("exceptions");
+ (desc tail_call) => ("tail calls");
+ (desc function_references) => ("function references");
+ (desc memory_control) => ("memory control");
+}
+
+impl<'a, T> VisitOperator<'a> for WasmProposalValidator<'_, '_, T>
+where
+ T: WasmModuleResources,
+{
+ type Output = Result<()>;
+
+ for_each_operator!(validate_proposal);
+}
+
+impl<'a, T> VisitOperator<'a> for OperatorValidatorTemp<'_, '_, T>
+where
+ T: WasmModuleResources,
+{
+ type Output = Result<()>;
+
+ fn visit_nop(&mut self) -> Self::Output {
+ Ok(())
+ }
+ fn visit_unreachable(&mut self) -> Self::Output {
+ self.unreachable()?;
+ Ok(())
+ }
+ fn visit_block(&mut self, ty: BlockType) -> Self::Output {
+ self.check_block_type(ty)?;
+ for ty in self.params(ty)?.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ self.push_ctrl(FrameKind::Block, ty)?;
+ Ok(())
+ }
+ fn visit_loop(&mut self, ty: BlockType) -> Self::Output {
+ self.check_block_type(ty)?;
+ for ty in self.params(ty)?.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ self.push_ctrl(FrameKind::Loop, ty)?;
+ Ok(())
+ }
+ fn visit_if(&mut self, ty: BlockType) -> Self::Output {
+ self.check_block_type(ty)?;
+ self.pop_operand(Some(ValType::I32))?;
+ for ty in self.params(ty)?.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ self.push_ctrl(FrameKind::If, ty)?;
+ Ok(())
+ }
+ fn visit_else(&mut self) -> Self::Output {
+ let frame = self.pop_ctrl()?;
+ if frame.kind != FrameKind::If {
+ bail!(self.offset, "else found outside of an `if` block");
+ }
+ self.push_ctrl(FrameKind::Else, frame.block_type)?;
+ Ok(())
+ }
+ fn visit_try(&mut self, ty: BlockType) -> Self::Output {
+ self.check_block_type(ty)?;
+ for ty in self.params(ty)?.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ self.push_ctrl(FrameKind::Try, ty)?;
+ Ok(())
+ }
+ fn visit_catch(&mut self, index: u32) -> Self::Output {
+ let frame = self.pop_ctrl()?;
+ if frame.kind != FrameKind::Try && frame.kind != FrameKind::Catch {
+ bail!(self.offset, "catch found outside of an `try` block");
+ }
+ // Start a new frame and push `exnref` value.
+ let height = self.operands.len();
+ let init_height = self.inits.len();
+ self.control.push(Frame {
+ kind: FrameKind::Catch,
+ block_type: frame.block_type,
+ height,
+ unreachable: false,
+ init_height,
+ });
+ // Push exception argument types.
+ let ty = self.tag_at(index)?;
+ for ty in ty.inputs() {
+ self.push_operand(ty)?;
+ }
+ Ok(())
+ }
+ fn visit_throw(&mut self, index: u32) -> Self::Output {
+ // Check values associated with the exception.
+ let ty = self.tag_at(index)?;
+ for ty in ty.inputs().rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ if ty.outputs().len() > 0 {
+ bail!(
+ self.offset,
+ "result type expected to be empty for exception"
+ );
+ }
+ self.unreachable()?;
+ Ok(())
+ }
+ fn visit_rethrow(&mut self, relative_depth: u32) -> Self::Output {
+ // This is not a jump, but we need to check that the `rethrow`
+ // targets an actual `catch` to get the exception.
+ let (_, kind) = self.jump(relative_depth)?;
+ if kind != FrameKind::Catch && kind != FrameKind::CatchAll {
+ bail!(
+ self.offset,
+ "invalid rethrow label: target was not a `catch` block"
+ );
+ }
+ self.unreachable()?;
+ Ok(())
+ }
+ fn visit_delegate(&mut self, relative_depth: u32) -> Self::Output {
+ let frame = self.pop_ctrl()?;
+ if frame.kind != FrameKind::Try {
+ bail!(self.offset, "delegate found outside of an `try` block");
+ }
+ // This operation is not a jump, but we need to check the
+ // depth for validity
+ let _ = self.jump(relative_depth)?;
+ for ty in self.results(frame.block_type)? {
+ self.push_operand(ty)?;
+ }
+ Ok(())
+ }
+ fn visit_catch_all(&mut self) -> Self::Output {
+ let frame = self.pop_ctrl()?;
+ if frame.kind == FrameKind::CatchAll {
+ bail!(self.offset, "only one catch_all allowed per `try` block");
+ } else if frame.kind != FrameKind::Try && frame.kind != FrameKind::Catch {
+ bail!(self.offset, "catch_all found outside of a `try` block");
+ }
+ let height = self.operands.len();
+ let init_height = self.inits.len();
+ self.control.push(Frame {
+ kind: FrameKind::CatchAll,
+ block_type: frame.block_type,
+ height,
+ unreachable: false,
+ init_height,
+ });
+ Ok(())
+ }
+ fn visit_end(&mut self) -> Self::Output {
+ let mut frame = self.pop_ctrl()?;
+
+ // Note that this `if` isn't included in the appendix right
+ // now, but it's used to allow for `if` statements that are
+ // missing an `else` block which have the same parameter/return
+ // types on the block (since that's valid).
+ if frame.kind == FrameKind::If {
+ self.push_ctrl(FrameKind::Else, frame.block_type)?;
+ frame = self.pop_ctrl()?;
+ }
+ for ty in self.results(frame.block_type)? {
+ self.push_operand(ty)?;
+ }
+
+ if self.control.is_empty() && self.end_which_emptied_control.is_none() {
+ assert_ne!(self.offset, 0);
+ self.end_which_emptied_control = Some(self.offset);
+ }
+ Ok(())
+ }
+ fn visit_br(&mut self, relative_depth: u32) -> Self::Output {
+ let (ty, kind) = self.jump(relative_depth)?;
+ for ty in self.label_types(ty, kind)?.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ self.unreachable()?;
+ Ok(())
+ }
+ fn visit_br_if(&mut self, relative_depth: u32) -> Self::Output {
+ self.pop_operand(Some(ValType::I32))?;
+ let (ty, kind) = self.jump(relative_depth)?;
+ let types = self.label_types(ty, kind)?;
+ for ty in types.clone().rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ for ty in types {
+ self.push_operand(ty)?;
+ }
+ Ok(())
+ }
+ fn visit_br_table(&mut self, table: BrTable) -> Self::Output {
+ self.pop_operand(Some(ValType::I32))?;
+ let default = self.jump(table.default())?;
+ let default_types = self.label_types(default.0, default.1)?;
+ for element in table.targets() {
+ let relative_depth = element?;
+ let block = self.jump(relative_depth)?;
+ let tys = self.label_types(block.0, block.1)?;
+ if tys.len() != default_types.len() {
+ bail!(
+ self.offset,
+ "type mismatch: br_table target labels have different number of types"
+ );
+ }
+ debug_assert!(self.br_table_tmp.is_empty());
+ for ty in tys.rev() {
+ let ty = self.pop_operand(Some(ty))?;
+ self.br_table_tmp.push(ty);
+ }
+ for ty in self.inner.br_table_tmp.drain(..).rev() {
+ self.inner.operands.push(ty);
+ }
+ }
+ for ty in default_types.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ self.unreachable()?;
+ Ok(())
+ }
+ fn visit_return(&mut self) -> Self::Output {
+ self.check_return()?;
+ Ok(())
+ }
+ fn visit_call(&mut self, function_index: u32) -> Self::Output {
+ self.check_call(function_index)?;
+ Ok(())
+ }
+ fn visit_return_call(&mut self, function_index: u32) -> Self::Output {
+ self.check_call(function_index)?;
+ self.check_return()?;
+ Ok(())
+ }
+ fn visit_call_ref(&mut self, hty: HeapType) -> Self::Output {
+ self.resources
+ .check_heap_type(hty, &self.features, self.offset)?;
+ // If `None` is popped then that means a "bottom" type was popped which
+ // is always considered equivalent to the `hty` tag.
+ if let Some(rt) = self.pop_ref()? {
+ let expected = RefType {
+ nullable: true,
+ heap_type: hty,
+ };
+ if !self
+ .resources
+ .matches(ValType::Ref(rt), ValType::Ref(expected))
+ {
+ bail!(
+ self.offset,
+ "type mismatch: funcref on stack does not match specified type",
+ );
+ }
+ }
+ match hty {
+ HeapType::TypedFunc(type_index) => self.check_call_ty(type_index.into())?,
+ _ => bail!(
+ self.offset,
+ "type mismatch: instruction requires function reference type",
+ ),
+ }
+ Ok(())
+ }
+ fn visit_return_call_ref(&mut self, hty: HeapType) -> Self::Output {
+ self.visit_call_ref(hty)?;
+ self.check_return()
+ }
+ fn visit_call_indirect(
+ &mut self,
+ index: u32,
+ table_index: u32,
+ table_byte: u8,
+ ) -> Self::Output {
+ if table_byte != 0 && !self.features.reference_types {
+ bail!(
+ self.offset,
+ "reference-types not enabled: zero byte expected"
+ );
+ }
+ self.check_call_indirect(index, table_index)?;
+ Ok(())
+ }
+ fn visit_return_call_indirect(&mut self, index: u32, table_index: u32) -> Self::Output {
+ self.check_call_indirect(index, table_index)?;
+ self.check_return()?;
+ Ok(())
+ }
+ fn visit_drop(&mut self) -> Self::Output {
+ self.pop_operand(None)?;
+ Ok(())
+ }
+ fn visit_select(&mut self) -> Self::Output {
+ self.pop_operand(Some(ValType::I32))?;
+ let ty1 = self.pop_operand(None)?;
+ let ty2 = self.pop_operand(None)?;
+
+ let ty = match (ty1, ty2) {
+ // All heap-related types aren't allowed with the `select`
+ // instruction
+ (MaybeType::HeapBot, _)
+ | (_, MaybeType::HeapBot)
+ | (MaybeType::Type(ValType::Ref(_)), _)
+ | (_, MaybeType::Type(ValType::Ref(_))) => {
+ bail!(
+ self.offset,
+ "type mismatch: select only takes integral types"
+ )
+ }
+
+ // If one operand is the "bottom" type then whatever the other
+ // operand is is the result of the `select`
+ (MaybeType::Bot, t) | (t, MaybeType::Bot) => t,
+
+ // Otherwise these are two integral types and they must match for
+ // `select` to typecheck.
+ (t @ MaybeType::Type(t1), MaybeType::Type(t2)) => {
+ if t1 != t2 {
+ bail!(
+ self.offset,
+ "type mismatch: select operands have different types"
+ );
+ }
+ t
+ }
+ };
+ self.push_operand(ty)?;
+ Ok(())
+ }
+ fn visit_typed_select(&mut self, ty: ValType) -> Self::Output {
+ self.resources
+ .check_value_type(ty, &self.features, self.offset)?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ty))?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ty)?;
+ Ok(())
+ }
+ fn visit_local_get(&mut self, local_index: u32) -> Self::Output {
+ let ty = self.local(local_index)?;
+ if !self.local_inits[local_index as usize] {
+ bail!(self.offset, "uninitialized local: {}", local_index);
+ }
+ self.push_operand(ty)?;
+ Ok(())
+ }
+ fn visit_local_set(&mut self, local_index: u32) -> Self::Output {
+ let ty = self.local(local_index)?;
+ self.pop_operand(Some(ty))?;
+ if !self.local_inits[local_index as usize] {
+ self.local_inits[local_index as usize] = true;
+ self.inits.push(local_index);
+ }
+ Ok(())
+ }
+ fn visit_local_tee(&mut self, local_index: u32) -> Self::Output {
+ let ty = self.local(local_index)?;
+ self.pop_operand(Some(ty))?;
+ if !self.local_inits[local_index as usize] {
+ self.local_inits[local_index as usize] = true;
+ self.inits.push(local_index);
+ }
+
+ self.push_operand(ty)?;
+ Ok(())
+ }
+ fn visit_global_get(&mut self, global_index: u32) -> Self::Output {
+ if let Some(ty) = self.resources.global_at(global_index) {
+ self.push_operand(ty.content_type)?;
+ } else {
+ bail!(self.offset, "unknown global: global index out of bounds");
+ };
+ Ok(())
+ }
+ fn visit_global_set(&mut self, global_index: u32) -> Self::Output {
+ if let Some(ty) = self.resources.global_at(global_index) {
+ if !ty.mutable {
+ bail!(
+ self.offset,
+ "global is immutable: cannot modify it with `global.set`"
+ );
+ }
+ self.pop_operand(Some(ty.content_type))?;
+ } else {
+ bail!(self.offset, "unknown global: global index out of bounds");
+ };
+ Ok(())
+ }
+ fn visit_i32_load(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i64_load(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I64)?;
+ Ok(())
+ }
+ fn visit_f32_load(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_floats_enabled()?;
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::F32)?;
+ Ok(())
+ }
+ fn visit_f64_load(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_floats_enabled()?;
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::F64)?;
+ Ok(())
+ }
+ fn visit_i32_load8_s(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i32_load8_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.visit_i32_load8_s(memarg)
+ }
+ fn visit_i32_load16_s(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i32_load16_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.visit_i32_load16_s(memarg)
+ }
+ fn visit_i64_load8_s(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I64)?;
+ Ok(())
+ }
+ fn visit_i64_load8_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.visit_i64_load8_s(memarg)
+ }
+ fn visit_i64_load16_s(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I64)?;
+ Ok(())
+ }
+ fn visit_i64_load16_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.visit_i64_load16_s(memarg)
+ }
+ fn visit_i64_load32_s(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I64)?;
+ Ok(())
+ }
+ fn visit_i64_load32_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.visit_i64_load32_s(memarg)
+ }
+ fn visit_i32_store(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_i64_store(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I64))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_f32_store(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_floats_enabled()?;
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::F32))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_f64_store(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_floats_enabled()?;
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::F64))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_i32_store8(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_i32_store16(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_i64_store8(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I64))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_i64_store16(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I64))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_i64_store32(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I64))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_memory_size(&mut self, mem: u32, mem_byte: u8) -> Self::Output {
+ if mem_byte != 0 && !self.features.multi_memory {
+ bail!(self.offset, "multi-memory not enabled: zero byte expected");
+ }
+ let index_ty = self.check_memory_index(mem)?;
+ self.push_operand(index_ty)?;
+ Ok(())
+ }
+ fn visit_memory_grow(&mut self, mem: u32, mem_byte: u8) -> Self::Output {
+ if mem_byte != 0 && !self.features.multi_memory {
+ bail!(self.offset, "multi-memory not enabled: zero byte expected");
+ }
+ let index_ty = self.check_memory_index(mem)?;
+ self.pop_operand(Some(index_ty))?;
+ self.push_operand(index_ty)?;
+ Ok(())
+ }
+ fn visit_i32_const(&mut self, _value: i32) -> Self::Output {
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i64_const(&mut self, _value: i64) -> Self::Output {
+ self.push_operand(ValType::I64)?;
+ Ok(())
+ }
+ fn visit_f32_const(&mut self, _value: Ieee32) -> Self::Output {
+ self.check_floats_enabled()?;
+ self.push_operand(ValType::F32)?;
+ Ok(())
+ }
+ fn visit_f64_const(&mut self, _value: Ieee64) -> Self::Output {
+ self.check_floats_enabled()?;
+ self.push_operand(ValType::F64)?;
+ Ok(())
+ }
+ fn visit_i32_eqz(&mut self) -> Self::Output {
+ self.pop_operand(Some(ValType::I32))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i32_eq(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_ne(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_lt_s(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_lt_u(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_gt_s(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_gt_u(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_le_s(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_le_u(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_ge_s(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i32_ge_u(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I32)
+ }
+ fn visit_i64_eqz(&mut self) -> Self::Output {
+ self.pop_operand(Some(ValType::I64))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i64_eq(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_ne(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_lt_s(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_lt_u(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_gt_s(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_gt_u(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_le_s(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_le_u(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_ge_s(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_i64_ge_u(&mut self) -> Self::Output {
+ self.check_cmp_op(ValType::I64)
+ }
+ fn visit_f32_eq(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F32)
+ }
+ fn visit_f32_ne(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F32)
+ }
+ fn visit_f32_lt(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F32)
+ }
+ fn visit_f32_gt(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F32)
+ }
+ fn visit_f32_le(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F32)
+ }
+ fn visit_f32_ge(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F32)
+ }
+ fn visit_f64_eq(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F64)
+ }
+ fn visit_f64_ne(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F64)
+ }
+ fn visit_f64_lt(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F64)
+ }
+ fn visit_f64_gt(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F64)
+ }
+ fn visit_f64_le(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F64)
+ }
+ fn visit_f64_ge(&mut self) -> Self::Output {
+ self.check_fcmp_op(ValType::F64)
+ }
+ fn visit_i32_clz(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I32)
+ }
+ fn visit_i32_ctz(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I32)
+ }
+ fn visit_i32_popcnt(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I32)
+ }
+ fn visit_i32_add(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_sub(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_mul(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_div_s(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_div_u(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_rem_s(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_rem_u(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_and(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_or(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_xor(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_shl(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_shr_s(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_shr_u(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_rotl(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i32_rotr(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I32)
+ }
+ fn visit_i64_clz(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I64)
+ }
+ fn visit_i64_ctz(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I64)
+ }
+ fn visit_i64_popcnt(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I64)
+ }
+ fn visit_i64_add(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_sub(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_mul(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_div_s(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_div_u(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_rem_s(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_rem_u(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_and(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_or(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_xor(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_shl(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_shr_s(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_shr_u(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_rotl(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_i64_rotr(&mut self) -> Self::Output {
+ self.check_binary_op(ValType::I64)
+ }
+ fn visit_f32_abs(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F32)
+ }
+ fn visit_f32_neg(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F32)
+ }
+ fn visit_f32_ceil(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F32)
+ }
+ fn visit_f32_floor(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F32)
+ }
+ fn visit_f32_trunc(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F32)
+ }
+ fn visit_f32_nearest(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F32)
+ }
+ fn visit_f32_sqrt(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F32)
+ }
+ fn visit_f32_add(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F32)
+ }
+ fn visit_f32_sub(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F32)
+ }
+ fn visit_f32_mul(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F32)
+ }
+ fn visit_f32_div(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F32)
+ }
+ fn visit_f32_min(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F32)
+ }
+ fn visit_f32_max(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F32)
+ }
+ fn visit_f32_copysign(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F32)
+ }
+ fn visit_f64_abs(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F64)
+ }
+ fn visit_f64_neg(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F64)
+ }
+ fn visit_f64_ceil(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F64)
+ }
+ fn visit_f64_floor(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F64)
+ }
+ fn visit_f64_trunc(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F64)
+ }
+ fn visit_f64_nearest(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F64)
+ }
+ fn visit_f64_sqrt(&mut self) -> Self::Output {
+ self.check_funary_op(ValType::F64)
+ }
+ fn visit_f64_add(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F64)
+ }
+ fn visit_f64_sub(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F64)
+ }
+ fn visit_f64_mul(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F64)
+ }
+ fn visit_f64_div(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F64)
+ }
+ fn visit_f64_min(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F64)
+ }
+ fn visit_f64_max(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F64)
+ }
+ fn visit_f64_copysign(&mut self) -> Self::Output {
+ self.check_fbinary_op(ValType::F64)
+ }
+ fn visit_i32_wrap_i64(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::I64)
+ }
+ fn visit_i32_trunc_f32_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F32)
+ }
+ fn visit_i32_trunc_f32_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F32)
+ }
+ fn visit_i32_trunc_f64_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F64)
+ }
+ fn visit_i32_trunc_f64_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F64)
+ }
+ fn visit_i64_extend_i32_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::I32)
+ }
+ fn visit_i64_extend_i32_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::I32)
+ }
+ fn visit_i64_trunc_f32_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F32)
+ }
+ fn visit_i64_trunc_f32_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F32)
+ }
+ fn visit_i64_trunc_f64_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F64)
+ }
+ fn visit_i64_trunc_f64_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F64)
+ }
+ fn visit_f32_convert_i32_s(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F32, ValType::I32)
+ }
+ fn visit_f32_convert_i32_u(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F32, ValType::I32)
+ }
+ fn visit_f32_convert_i64_s(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F32, ValType::I64)
+ }
+ fn visit_f32_convert_i64_u(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F32, ValType::I64)
+ }
+ fn visit_f32_demote_f64(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F32, ValType::F64)
+ }
+ fn visit_f64_convert_i32_s(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F64, ValType::I32)
+ }
+ fn visit_f64_convert_i32_u(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F64, ValType::I32)
+ }
+ fn visit_f64_convert_i64_s(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F64, ValType::I64)
+ }
+ fn visit_f64_convert_i64_u(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F64, ValType::I64)
+ }
+ fn visit_f64_promote_f32(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F64, ValType::F32)
+ }
+ fn visit_i32_reinterpret_f32(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F32)
+ }
+ fn visit_i64_reinterpret_f64(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F64)
+ }
+ fn visit_f32_reinterpret_i32(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F32, ValType::I32)
+ }
+ fn visit_f64_reinterpret_i64(&mut self) -> Self::Output {
+ self.check_fconversion_op(ValType::F64, ValType::I64)
+ }
+ fn visit_i32_trunc_sat_f32_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F32)
+ }
+ fn visit_i32_trunc_sat_f32_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F32)
+ }
+ fn visit_i32_trunc_sat_f64_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F64)
+ }
+ fn visit_i32_trunc_sat_f64_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I32, ValType::F64)
+ }
+ fn visit_i64_trunc_sat_f32_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F32)
+ }
+ fn visit_i64_trunc_sat_f32_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F32)
+ }
+ fn visit_i64_trunc_sat_f64_s(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F64)
+ }
+ fn visit_i64_trunc_sat_f64_u(&mut self) -> Self::Output {
+ self.check_conversion_op(ValType::I64, ValType::F64)
+ }
+ fn visit_i32_extend8_s(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I32)
+ }
+ fn visit_i32_extend16_s(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I32)
+ }
+ fn visit_i64_extend8_s(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I64)
+ }
+ fn visit_i64_extend16_s(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I64)
+ }
+ fn visit_i64_extend32_s(&mut self) -> Self::Output {
+ self.check_unary_op(ValType::I64)
+ }
+ fn visit_i32_atomic_load(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_load(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_load(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_load(memarg, ValType::I32)
+ }
+ fn visit_i64_atomic_load(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_load(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_load32_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_load(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_load16_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_load(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_load8_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_load(memarg, ValType::I64)
+ }
+ fn visit_i32_atomic_store(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_store(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_store16(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_store(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_store8(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_store(memarg, ValType::I32)
+ }
+ fn visit_i64_atomic_store(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_store(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_store32(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_store(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_store16(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_store(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_store8(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_store(memarg, ValType::I64)
+ }
+ fn visit_i32_atomic_rmw_add(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw_sub(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw_and(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw_or(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw_xor(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw16_add_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw16_sub_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw16_and_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw16_or_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw16_xor_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw8_add_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw8_sub_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw8_and_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw8_or_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw8_xor_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i64_atomic_rmw_add(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw_sub(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw_and(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw_or(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw_xor(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw32_add_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw32_sub_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw32_and_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw32_or_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw32_xor_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw16_add_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw16_sub_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw16_and_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw16_or_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw16_xor_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw8_add_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw8_sub_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw8_and_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw8_or_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw8_xor_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i32_atomic_rmw_xchg(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw16_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw8_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw_cmpxchg(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_cmpxchg(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw16_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_cmpxchg(memarg, ValType::I32)
+ }
+ fn visit_i32_atomic_rmw8_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_cmpxchg(memarg, ValType::I32)
+ }
+ fn visit_i64_atomic_rmw_xchg(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw32_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw16_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw8_xchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw_cmpxchg(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_cmpxchg(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw32_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_cmpxchg(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw16_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_cmpxchg(memarg, ValType::I64)
+ }
+ fn visit_i64_atomic_rmw8_cmpxchg_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_cmpxchg(memarg, ValType::I64)
+ }
+ fn visit_memory_atomic_notify(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_atomic_binary_op(memarg, ValType::I32)
+ }
+ fn visit_memory_atomic_wait32(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_shared_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I64))?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_memory_atomic_wait64(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_shared_memarg(memarg)?;
+ self.pop_operand(Some(ValType::I64))?;
+ self.pop_operand(Some(ValType::I64))?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_atomic_fence(&mut self) -> Self::Output {
+ Ok(())
+ }
+ fn visit_ref_null(&mut self, heap_type: HeapType) -> Self::Output {
+ self.resources
+ .check_heap_type(heap_type, &self.features, self.offset)?;
+ self.push_operand(ValType::Ref(RefType {
+ nullable: true,
+ heap_type,
+ }))?;
+ Ok(())
+ }
+
+ fn visit_ref_as_non_null(&mut self) -> Self::Output {
+ let ty = match self.pop_ref()? {
+ Some(ty) => MaybeType::Type(ValType::Ref(RefType {
+ nullable: false,
+ heap_type: ty.heap_type,
+ })),
+ None => MaybeType::HeapBot,
+ };
+ self.push_operand(ty)?;
+ Ok(())
+ }
+ fn visit_br_on_null(&mut self, relative_depth: u32) -> Self::Output {
+ let ty = match self.pop_ref()? {
+ None => MaybeType::HeapBot,
+ Some(ty) => MaybeType::Type(ValType::Ref(RefType {
+ nullable: false,
+ heap_type: ty.heap_type,
+ })),
+ };
+ let (ft, kind) = self.jump(relative_depth)?;
+ for ty in self.label_types(ft, kind)?.rev() {
+ self.pop_operand(Some(ty))?;
+ }
+ for ty in self.label_types(ft, kind)? {
+ self.push_operand(ty)?;
+ }
+ self.push_operand(ty)?;
+ Ok(())
+ }
+ fn visit_br_on_non_null(&mut self, relative_depth: u32) -> Self::Output {
+ let ty = self.pop_ref()?;
+ let (ft, kind) = self.jump(relative_depth)?;
+ let mut lts = self.label_types(ft, kind)?;
+ match (lts.next_back(), ty) {
+ (None, _) => bail!(
+ self.offset,
+ "type mismatch: br_on_non_null target has no label types",
+ ),
+ (Some(ValType::Ref(_)), None) => {}
+ (Some(rt1 @ ValType::Ref(_)), Some(rt0)) => {
+ // Switch rt0, our popped type, to a non-nullable type and
+ // perform the match because if the branch is taken it's a
+ // non-null value.
+ let ty = RefType {
+ nullable: false,
+ heap_type: rt0.heap_type,
+ };
+ if !self.resources.matches(ty.into(), rt1) {
+ bail!(
+ self.offset,
+ "type mismatch: expected {} but found {}",
+ ty_to_str(rt0.into()),
+ ty_to_str(rt1)
+ )
+ }
+ }
+ (Some(_), _) => bail!(
+ self.offset,
+ "type mismatch: br_on_non_null target does not end with heap type",
+ ),
+ }
+ for ty in self.label_types(ft, kind)?.rev().skip(1) {
+ self.pop_operand(Some(ty))?;
+ }
+ for ty in lts {
+ self.push_operand(ty)?;
+ }
+ Ok(())
+ }
+ fn visit_ref_is_null(&mut self) -> Self::Output {
+ self.pop_ref()?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_ref_func(&mut self, function_index: u32) -> Self::Output {
+ let type_index = match self.resources.type_index_of_function(function_index) {
+ Some(idx) => idx,
+ None => bail!(
+ self.offset,
+ "unknown function {}: function index out of bounds",
+ function_index,
+ ),
+ };
+ if !self.resources.is_function_referenced(function_index) {
+ bail!(self.offset, "undeclared function reference");
+ }
+
+ // FIXME(#924) this should not be conditional based on enabled
+ // proposals.
+ if self.features.function_references {
+ let heap_type = HeapType::TypedFunc(match type_index.try_into() {
+ Ok(packed) => packed,
+ Err(_) => {
+ bail!(self.offset, "type index of `ref.func` target too large")
+ }
+ });
+ self.push_operand(ValType::Ref(RefType {
+ nullable: false,
+ heap_type,
+ }))?;
+ } else {
+ self.push_operand(ValType::FUNCREF)?;
+ }
+ Ok(())
+ }
+ fn visit_v128_load(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_store(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_v128_const(&mut self, _value: V128) -> Self::Output {
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_i8x16_splat(&mut self) -> Self::Output {
+ self.check_v128_splat(ValType::I32)
+ }
+ fn visit_i16x8_splat(&mut self) -> Self::Output {
+ self.check_v128_splat(ValType::I32)
+ }
+ fn visit_i32x4_splat(&mut self) -> Self::Output {
+ self.check_v128_splat(ValType::I32)
+ }
+ fn visit_i64x2_splat(&mut self) -> Self::Output {
+ self.check_v128_splat(ValType::I64)
+ }
+ fn visit_f32x4_splat(&mut self) -> Self::Output {
+ self.check_floats_enabled()?;
+ self.check_v128_splat(ValType::F32)
+ }
+ fn visit_f64x2_splat(&mut self) -> Self::Output {
+ self.check_floats_enabled()?;
+ self.check_v128_splat(ValType::F64)
+ }
+ fn visit_i8x16_extract_lane_s(&mut self, lane: u8) -> Self::Output {
+ self.check_simd_lane_index(lane, 16)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i8x16_extract_lane_u(&mut self, lane: u8) -> Self::Output {
+ self.visit_i8x16_extract_lane_s(lane)
+ }
+ fn visit_i16x8_extract_lane_s(&mut self, lane: u8) -> Self::Output {
+ self.check_simd_lane_index(lane, 8)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i16x8_extract_lane_u(&mut self, lane: u8) -> Self::Output {
+ self.visit_i16x8_extract_lane_s(lane)
+ }
+ fn visit_i32x4_extract_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_simd_lane_index(lane, 4)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_i8x16_replace_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_simd_lane_index(lane, 16)?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_i16x8_replace_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_simd_lane_index(lane, 8)?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_i32x4_replace_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_simd_lane_index(lane, 4)?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_i64x2_extract_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_simd_lane_index(lane, 2)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::I64)?;
+ Ok(())
+ }
+ fn visit_i64x2_replace_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_simd_lane_index(lane, 2)?;
+ self.pop_operand(Some(ValType::I64))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_f32x4_extract_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_floats_enabled()?;
+ self.check_simd_lane_index(lane, 4)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::F32)?;
+ Ok(())
+ }
+ fn visit_f32x4_replace_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_floats_enabled()?;
+ self.check_simd_lane_index(lane, 4)?;
+ self.pop_operand(Some(ValType::F32))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_f64x2_extract_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_floats_enabled()?;
+ self.check_simd_lane_index(lane, 2)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::F64)?;
+ Ok(())
+ }
+ fn visit_f64x2_replace_lane(&mut self, lane: u8) -> Self::Output {
+ self.check_floats_enabled()?;
+ self.check_simd_lane_index(lane, 2)?;
+ self.pop_operand(Some(ValType::F64))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_f32x4_eq(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_ne(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_lt(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_gt(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_le(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_ge(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_eq(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_ne(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_lt(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_gt(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_le(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_ge(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_add(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_sub(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_mul(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_div(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_min(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_max(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_pmin(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f32x4_pmax(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_add(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_sub(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_mul(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_div(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_min(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_max(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_pmin(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_f64x2_pmax(&mut self) -> Self::Output {
+ self.check_v128_fbinary_op()
+ }
+ fn visit_i8x16_eq(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_ne(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_lt_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_lt_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_gt_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_gt_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_le_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_le_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_ge_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_ge_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_eq(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_ne(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_lt_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_lt_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_gt_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_gt_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_le_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_le_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_ge_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_ge_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_eq(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_ne(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_lt_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_lt_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_gt_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_gt_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_le_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_le_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_ge_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_ge_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_eq(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_ne(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_lt_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_gt_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_le_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_ge_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_v128_and(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_v128_andnot(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_v128_or(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_v128_xor(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_add(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_add_sat_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_add_sat_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_sub(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_sub_sat_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_sub_sat_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_min_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_min_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_max_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_max_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_add(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_add_sat_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_add_sat_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_sub(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_sub_sat_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_sub_sat_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_mul(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_min_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_min_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_max_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_max_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_add(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_sub(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_mul(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_min_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_min_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_max_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_max_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_dot_i16x8_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_add(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_sub(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_mul(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_avgr_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_avgr_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_narrow_i16x8_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i8x16_narrow_i16x8_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_narrow_i32x4_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_narrow_i32x4_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_extmul_low_i8x16_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_extmul_high_i8x16_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_extmul_low_i8x16_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_extmul_high_i8x16_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_extmul_low_i16x8_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_extmul_high_i16x8_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_extmul_low_i16x8_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_extmul_high_i16x8_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_extmul_low_i32x4_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_extmul_high_i32x4_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_extmul_low_i32x4_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i64x2_extmul_high_i32x4_u(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_q15mulr_sat_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_f32x4_ceil(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_floor(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_trunc(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_nearest(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_ceil(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_floor(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_trunc(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_nearest(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_abs(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_neg(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_sqrt(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_abs(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_neg(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_sqrt(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_demote_f64x2_zero(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_promote_low_f32x4(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_convert_low_i32x4_s(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f64x2_convert_low_i32x4_u(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_i32x4_trunc_sat_f32x4_s(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_i32x4_trunc_sat_f32x4_u(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_i32x4_trunc_sat_f64x2_s_zero(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_i32x4_trunc_sat_f64x2_u_zero(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_convert_i32x4_s(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_f32x4_convert_i32x4_u(&mut self) -> Self::Output {
+ self.check_v128_funary_op()
+ }
+ fn visit_v128_not(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i8x16_abs(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i8x16_neg(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i8x16_popcnt(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i16x8_abs(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i16x8_neg(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_abs(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_neg(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i64x2_abs(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i64x2_neg(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i16x8_extend_low_i8x16_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i16x8_extend_high_i8x16_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i16x8_extend_low_i8x16_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i16x8_extend_high_i8x16_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_extend_low_i16x8_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_extend_high_i16x8_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_extend_low_i16x8_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_extend_high_i16x8_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i64x2_extend_low_i32x4_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i64x2_extend_high_i32x4_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i64x2_extend_low_i32x4_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i64x2_extend_high_i32x4_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i16x8_extadd_pairwise_i8x16_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i16x8_extadd_pairwise_i8x16_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_extadd_pairwise_i16x8_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_extadd_pairwise_i16x8_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_v128_bitselect(&mut self) -> Self::Output {
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_i8x16_relaxed_swizzle(&mut self) -> Self::Output {
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_i32x4_relaxed_trunc_f32x4_s(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_relaxed_trunc_f32x4_u(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_relaxed_trunc_f64x2_s_zero(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_i32x4_relaxed_trunc_f64x2_u_zero(&mut self) -> Self::Output {
+ self.check_v128_unary_op()
+ }
+ fn visit_f32x4_relaxed_madd(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_f32x4_relaxed_nmadd(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_f64x2_relaxed_madd(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_f64x2_relaxed_nmadd(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_i8x16_relaxed_laneselect(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_i16x8_relaxed_laneselect(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_i32x4_relaxed_laneselect(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_i64x2_relaxed_laneselect(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_f32x4_relaxed_min(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_f32x4_relaxed_max(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_f64x2_relaxed_min(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_f64x2_relaxed_max(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_relaxed_q15mulr_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i16x8_relaxed_dot_i8x16_i7x16_s(&mut self) -> Self::Output {
+ self.check_v128_binary_op()
+ }
+ fn visit_i32x4_relaxed_dot_i8x16_i7x16_add_s(&mut self) -> Self::Output {
+ self.check_v128_ternary_op()
+ }
+ fn visit_v128_any_true(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i8x16_all_true(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i8x16_bitmask(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i16x8_all_true(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i16x8_bitmask(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i32x4_all_true(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i32x4_bitmask(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i64x2_all_true(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i64x2_bitmask(&mut self) -> Self::Output {
+ self.check_v128_bitmask_op()
+ }
+ fn visit_i8x16_shl(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i8x16_shr_s(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i8x16_shr_u(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i16x8_shl(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i16x8_shr_s(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i16x8_shr_u(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i32x4_shl(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i32x4_shr_s(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i32x4_shr_u(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i64x2_shl(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i64x2_shr_s(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i64x2_shr_u(&mut self) -> Self::Output {
+ self.check_v128_shift_op()
+ }
+ fn visit_i8x16_swizzle(&mut self) -> Self::Output {
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_i8x16_shuffle(&mut self, lanes: [u8; 16]) -> Self::Output {
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(ValType::V128))?;
+ for i in lanes {
+ self.check_simd_lane_index(i, 32)?;
+ }
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_load8_splat(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_load16_splat(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_load32_splat(&mut self, memarg: MemArg) -> Self::Output {
+ let ty = self.check_memarg(memarg)?;
+ self.pop_operand(Some(ty))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_load32_zero(&mut self, memarg: MemArg) -> Self::Output {
+ self.visit_v128_load32_splat(memarg)
+ }
+ fn visit_v128_load64_splat(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_v128_load_op(memarg)
+ }
+ fn visit_v128_load64_zero(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_v128_load_op(memarg)
+ }
+ fn visit_v128_load8x8_s(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_v128_load_op(memarg)
+ }
+ fn visit_v128_load8x8_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_v128_load_op(memarg)
+ }
+ fn visit_v128_load16x4_s(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_v128_load_op(memarg)
+ }
+ fn visit_v128_load16x4_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_v128_load_op(memarg)
+ }
+ fn visit_v128_load32x2_s(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_v128_load_op(memarg)
+ }
+ fn visit_v128_load32x2_u(&mut self, memarg: MemArg) -> Self::Output {
+ self.check_v128_load_op(memarg)
+ }
+ fn visit_v128_load8_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
+ let idx = self.check_memarg(memarg)?;
+ self.check_simd_lane_index(lane, 16)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(idx))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_load16_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
+ let idx = self.check_memarg(memarg)?;
+ self.check_simd_lane_index(lane, 8)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(idx))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_load32_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
+ let idx = self.check_memarg(memarg)?;
+ self.check_simd_lane_index(lane, 4)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(idx))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_load64_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
+ let idx = self.check_memarg(memarg)?;
+ self.check_simd_lane_index(lane, 2)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(idx))?;
+ self.push_operand(ValType::V128)?;
+ Ok(())
+ }
+ fn visit_v128_store8_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
+ let idx = self.check_memarg(memarg)?;
+ self.check_simd_lane_index(lane, 16)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(idx))?;
+ Ok(())
+ }
+ fn visit_v128_store16_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
+ let idx = self.check_memarg(memarg)?;
+ self.check_simd_lane_index(lane, 8)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(idx))?;
+ Ok(())
+ }
+ fn visit_v128_store32_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
+ let idx = self.check_memarg(memarg)?;
+ self.check_simd_lane_index(lane, 4)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(idx))?;
+ Ok(())
+ }
+ fn visit_v128_store64_lane(&mut self, memarg: MemArg, lane: u8) -> Self::Output {
+ let idx = self.check_memarg(memarg)?;
+ self.check_simd_lane_index(lane, 2)?;
+ self.pop_operand(Some(ValType::V128))?;
+ self.pop_operand(Some(idx))?;
+ Ok(())
+ }
+ fn visit_memory_init(&mut self, segment: u32, mem: u32) -> Self::Output {
+ let ty = self.check_memory_index(mem)?;
+ match self.resources.data_count() {
+ None => bail!(self.offset, "data count section required"),
+ Some(count) if segment < count => {}
+ Some(_) => bail!(self.offset, "unknown data segment {}", segment),
+ }
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_data_drop(&mut self, segment: u32) -> Self::Output {
+ match self.resources.data_count() {
+ None => bail!(self.offset, "data count section required"),
+ Some(count) if segment < count => {}
+ Some(_) => bail!(self.offset, "unknown data segment {}", segment),
+ }
+ Ok(())
+ }
+ fn visit_memory_copy(&mut self, dst: u32, src: u32) -> Self::Output {
+ let dst_ty = self.check_memory_index(dst)?;
+ let src_ty = self.check_memory_index(src)?;
+
+ // The length operand here is the smaller of src/dst, which is
+ // i32 if one is i32
+ self.pop_operand(Some(match src_ty {
+ ValType::I32 => ValType::I32,
+ _ => dst_ty,
+ }))?;
+
+ // ... and the offset into each memory is required to be
+ // whatever the indexing type is for that memory
+ self.pop_operand(Some(src_ty))?;
+ self.pop_operand(Some(dst_ty))?;
+ Ok(())
+ }
+ fn visit_memory_fill(&mut self, mem: u32) -> Self::Output {
+ let ty = self.check_memory_index(mem)?;
+ self.pop_operand(Some(ty))?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_memory_discard(&mut self, mem: u32) -> Self::Output {
+ let ty = self.check_memory_index(mem)?;
+ self.pop_operand(Some(ty))?;
+ self.pop_operand(Some(ty))?;
+ Ok(())
+ }
+ fn visit_table_init(&mut self, segment: u32, table: u32) -> Self::Output {
+ if table > 0 {}
+ let table = match self.resources.table_at(table) {
+ Some(table) => table,
+ None => bail!(
+ self.offset,
+ "unknown table {}: table index out of bounds",
+ table
+ ),
+ };
+ let segment_ty = match self.resources.element_type_at(segment) {
+ Some(ty) => ty,
+ None => bail!(
+ self.offset,
+ "unknown elem segment {}: segment index out of bounds",
+ segment
+ ),
+ };
+ if segment_ty != table.element_type {
+ bail!(self.offset, "type mismatch");
+ }
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::I32))?;
+ Ok(())
+ }
+ fn visit_elem_drop(&mut self, segment: u32) -> Self::Output {
+ if segment >= self.resources.element_count() {
+ bail!(
+ self.offset,
+ "unknown elem segment {}: segment index out of bounds",
+ segment
+ );
+ }
+ Ok(())
+ }
+ fn visit_table_copy(&mut self, dst_table: u32, src_table: u32) -> Self::Output {
+ if src_table > 0 || dst_table > 0 {}
+ let (src, dst) = match (
+ self.resources.table_at(src_table),
+ self.resources.table_at(dst_table),
+ ) {
+ (Some(a), Some(b)) => (a, b),
+ _ => bail!(self.offset, "table index out of bounds"),
+ };
+ if !self.resources.matches(
+ ValType::Ref(src.element_type),
+ ValType::Ref(dst.element_type),
+ ) {
+ bail!(self.offset, "type mismatch");
+ }
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::I32))?;
+ Ok(())
+ }
+ fn visit_table_get(&mut self, table: u32) -> Self::Output {
+ let ty = match self.resources.table_at(table) {
+ Some(ty) => ty.element_type,
+ None => bail!(self.offset, "table index out of bounds"),
+ };
+ self.pop_operand(Some(ValType::I32))?;
+ self.push_operand(ValType::Ref(ty))?;
+ Ok(())
+ }
+ fn visit_table_set(&mut self, table: u32) -> Self::Output {
+ let ty = match self.resources.table_at(table) {
+ Some(ty) => ty.element_type,
+ None => bail!(self.offset, "table index out of bounds"),
+ };
+ self.pop_operand(Some(ValType::Ref(ty)))?;
+ self.pop_operand(Some(ValType::I32))?;
+ Ok(())
+ }
+ fn visit_table_grow(&mut self, table: u32) -> Self::Output {
+ let ty = match self.resources.table_at(table) {
+ Some(ty) => ty.element_type,
+ None => bail!(self.offset, "table index out of bounds"),
+ };
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::Ref(ty)))?;
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_table_size(&mut self, table: u32) -> Self::Output {
+ if self.resources.table_at(table).is_none() {
+ bail!(self.offset, "table index out of bounds");
+ }
+ self.push_operand(ValType::I32)?;
+ Ok(())
+ }
+ fn visit_table_fill(&mut self, table: u32) -> Self::Output {
+ let ty = match self.resources.table_at(table) {
+ Some(ty) => ty.element_type,
+ None => bail!(self.offset, "table index out of bounds"),
+ };
+ self.pop_operand(Some(ValType::I32))?;
+ self.pop_operand(Some(ValType::Ref(ty)))?;
+ self.pop_operand(Some(ValType::I32))?;
+ Ok(())
+ }
+}
+
+#[derive(Clone)]
+enum Either<A, B> {
+ A(A),
+ B(B),
+}
+
+impl<A, B> Iterator for Either<A, B>
+where
+ A: Iterator,
+ B: Iterator<Item = A::Item>,
+{
+ type Item = A::Item;
+ fn next(&mut self) -> Option<A::Item> {
+ match self {
+ Either::A(a) => a.next(),
+ Either::B(b) => b.next(),
+ }
+ }
+}
+
+impl<A, B> DoubleEndedIterator for Either<A, B>
+where
+ A: DoubleEndedIterator,
+ B: DoubleEndedIterator<Item = A::Item>,
+{
+ fn next_back(&mut self) -> Option<A::Item> {
+ match self {
+ Either::A(a) => a.next_back(),
+ Either::B(b) => b.next_back(),
+ }
+ }
+}
+
+impl<A, B> ExactSizeIterator for Either<A, B>
+where
+ A: ExactSizeIterator,
+ B: ExactSizeIterator<Item = A::Item>,
+{
+ fn len(&self) -> usize {
+ match self {
+ Either::A(a) => a.len(),
+ Either::B(b) => b.len(),
+ }
+ }
+}
+
+trait PreciseIterator: ExactSizeIterator + DoubleEndedIterator + Clone {}
+impl<T: ExactSizeIterator + DoubleEndedIterator + Clone> PreciseIterator for T {}
+
+impl Locals {
+ /// Defines another group of `count` local variables of type `ty`.
+ ///
+ /// Returns `true` if the definition was successful. Local variable
+ /// definition is unsuccessful in case the amount of total variables
+ /// after definition exceeds the allowed maximum number.
+ fn define(&mut self, count: u32, ty: ValType) -> bool {
+ match self.num_locals.checked_add(count) {
+ Some(n) => self.num_locals = n,
+ None => return false,
+ }
+ if self.num_locals > (MAX_WASM_FUNCTION_LOCALS as u32) {
+ return false;
+ }
+ for _ in 0..count {
+ if self.first.len() >= MAX_LOCALS_TO_TRACK {
+ break;
+ }
+ self.first.push(ty);
+ }
+ self.all.push((self.num_locals - 1, ty));
+ true
+ }
+
+ /// Returns the number of defined local variables.
+ pub(super) fn len_locals(&self) -> u32 {
+ self.num_locals
+ }
+
+ /// Returns the type of the local variable at the given index if any.
+ #[inline]
+ pub(super) fn get(&self, idx: u32) -> Option<ValType> {
+ match self.first.get(idx as usize) {
+ Some(ty) => Some(*ty),
+ None => self.get_bsearch(idx),
+ }
+ }
+
+ fn get_bsearch(&self, idx: u32) -> Option<ValType> {
+ match self.all.binary_search_by_key(&idx, |(idx, _)| *idx) {
+ // If this index would be inserted at the end of the list, then the
+ // index is out of bounds and we return an error.
+ Err(i) if i == self.all.len() => None,
+
+ // If `Ok` is returned we found the index exactly, or if `Err` is
+ // returned the position is the one which is the least index
+ // greater that `idx`, which is still the type of `idx` according
+ // to our "compressed" representation. In both cases we access the
+ // list at index `i`.
+ Ok(i) | Err(i) => Some(self.all[i].1),
+ }
+ }
+}
diff --git a/third_party/rust/wasmparser/src/validator/types.rs b/third_party/rust/wasmparser/src/validator/types.rs
new file mode 100644
index 0000000000..ce0559d34c
--- /dev/null
+++ b/third_party/rust/wasmparser/src/validator/types.rs
@@ -0,0 +1,2166 @@
+//! Types relating to type information provided by validation.
+
+use super::{component::ComponentState, core::Module};
+use crate::{
+ Export, ExternalKind, FuncType, GlobalType, Import, MemoryType, PrimitiveValType, RefType,
+ TableType, TypeRef, ValType,
+};
+use indexmap::{IndexMap, IndexSet};
+use std::collections::HashMap;
+use std::{
+ borrow::Borrow,
+ fmt,
+ hash::{Hash, Hasher},
+ mem,
+ ops::{Deref, DerefMut},
+ sync::Arc,
+};
+use url::Url;
+
+/// The maximum number of parameters in the canonical ABI that can be passed by value.
+///
+/// Functions that exceed this limit will instead pass parameters indirectly from
+/// linear memory via a single pointer parameter.
+const MAX_FLAT_FUNC_PARAMS: usize = 16;
+/// The maximum number of results in the canonical ABI that can be returned by a function.
+///
+/// Functions that exceed this limit have their results written to linear memory via an
+/// additional pointer parameter (imports) or return a single pointer value (exports).
+const MAX_FLAT_FUNC_RESULTS: usize = 1;
+
+/// The maximum lowered types, including a possible type for a return pointer parameter.
+const MAX_LOWERED_TYPES: usize = MAX_FLAT_FUNC_PARAMS + 1;
+
+/// Represents a kebab string slice used in validation.
+///
+/// This is a wrapper around `str` that ensures the slice is
+/// a valid kebab case string according to the component model
+/// specification.
+///
+/// It also provides an equality and hashing implementation
+/// that ignores ASCII case.
+#[derive(Debug, Eq)]
+#[repr(transparent)]
+pub struct KebabStr(str);
+
+impl KebabStr {
+ /// Creates a new kebab string slice.
+ ///
+ /// Returns `None` if the given string is not a valid kebab string.
+ pub fn new<'a>(s: impl AsRef<str> + 'a) -> Option<&'a Self> {
+ let s = Self::new_unchecked(s);
+ if s.is_kebab_case() {
+ Some(s)
+ } else {
+ None
+ }
+ }
+
+ pub(crate) fn new_unchecked<'a>(s: impl AsRef<str> + 'a) -> &'a Self {
+ // Safety: `KebabStr` is a transparent wrapper around `str`
+ // Therefore transmuting `&str` to `&KebabStr` is safe.
+ unsafe { std::mem::transmute::<_, &Self>(s.as_ref()) }
+ }
+
+ /// Gets the underlying string slice.
+ pub fn as_str(&self) -> &str {
+ &self.0
+ }
+
+ /// Converts the slice to an owned string.
+ pub fn to_kebab_string(&self) -> KebabString {
+ KebabString(self.to_string())
+ }
+
+ fn is_kebab_case(&self) -> bool {
+ let mut lower = false;
+ let mut upper = false;
+ for c in self.chars() {
+ match c {
+ 'a'..='z' if !lower && !upper => lower = true,
+ 'A'..='Z' if !lower && !upper => upper = true,
+ 'a'..='z' if lower => {}
+ 'A'..='Z' if upper => {}
+ '0'..='9' if lower || upper => {}
+ '-' if lower || upper => {
+ lower = false;
+ upper = false;
+ }
+ _ => return false,
+ }
+ }
+
+ !self.is_empty() && !self.ends_with('-')
+ }
+}
+
+impl Deref for KebabStr {
+ type Target = str;
+
+ fn deref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl PartialEq for KebabStr {
+ fn eq(&self, other: &Self) -> bool {
+ if self.len() != other.len() {
+ return false;
+ }
+
+ self.chars()
+ .zip(other.chars())
+ .all(|(a, b)| a.to_ascii_lowercase() == b.to_ascii_lowercase())
+ }
+}
+
+impl PartialEq<KebabString> for KebabStr {
+ fn eq(&self, other: &KebabString) -> bool {
+ self.eq(other.as_kebab_str())
+ }
+}
+
+impl Hash for KebabStr {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.len().hash(state);
+
+ for b in self.chars() {
+ b.to_ascii_lowercase().hash(state);
+ }
+ }
+}
+
+impl fmt::Display for KebabStr {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ (self as &str).fmt(f)
+ }
+}
+
+impl ToOwned for KebabStr {
+ type Owned = KebabString;
+
+ fn to_owned(&self) -> Self::Owned {
+ self.to_kebab_string()
+ }
+}
+
+/// Represents an owned kebab string for validation.
+///
+/// This is a wrapper around `String` that ensures the string is
+/// a valid kebab case string according to the component model
+/// specification.
+///
+/// It also provides an equality and hashing implementation
+/// that ignores ASCII case.
+#[derive(Debug, Clone, Eq)]
+pub struct KebabString(String);
+
+impl KebabString {
+ /// Creates a new kebab string.
+ ///
+ /// Returns `None` if the given string is not a valid kebab string.
+ pub fn new(s: impl Into<String>) -> Option<Self> {
+ let s = s.into();
+ if KebabStr::new(&s).is_some() {
+ Some(Self(s))
+ } else {
+ None
+ }
+ }
+
+ /// Gets the underlying string.
+ pub fn as_str(&self) -> &str {
+ self.0.as_str()
+ }
+
+ /// Converts the kebab string to a kebab string slice.
+ pub fn as_kebab_str(&self) -> &KebabStr {
+ // Safety: internal string is always valid kebab-case
+ KebabStr::new_unchecked(self.as_str())
+ }
+}
+
+impl Deref for KebabString {
+ type Target = KebabStr;
+
+ fn deref(&self) -> &Self::Target {
+ self.as_kebab_str()
+ }
+}
+
+impl Borrow<KebabStr> for KebabString {
+ fn borrow(&self) -> &KebabStr {
+ self.as_kebab_str()
+ }
+}
+
+impl PartialEq for KebabString {
+ fn eq(&self, other: &Self) -> bool {
+ self.as_kebab_str().eq(other.as_kebab_str())
+ }
+}
+
+impl PartialEq<KebabStr> for KebabString {
+ fn eq(&self, other: &KebabStr) -> bool {
+ self.as_kebab_str().eq(other)
+ }
+}
+
+impl Hash for KebabString {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.as_kebab_str().hash(state)
+ }
+}
+
+impl fmt::Display for KebabString {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.as_kebab_str().fmt(f)
+ }
+}
+
+impl From<KebabString> for String {
+ fn from(s: KebabString) -> String {
+ s.0
+ }
+}
+
+/// A simple alloc-free list of types used for calculating lowered function signatures.
+pub(crate) struct LoweredTypes {
+ types: [ValType; MAX_LOWERED_TYPES],
+ len: usize,
+ max: usize,
+}
+
+impl LoweredTypes {
+ fn new(max: usize) -> Self {
+ assert!(max <= MAX_LOWERED_TYPES);
+ Self {
+ types: [ValType::I32; MAX_LOWERED_TYPES],
+ len: 0,
+ max,
+ }
+ }
+
+ fn len(&self) -> usize {
+ self.len
+ }
+
+ fn maxed(&self) -> bool {
+ self.len == self.max
+ }
+
+ fn get_mut(&mut self, index: usize) -> Option<&mut ValType> {
+ if index < self.len {
+ Some(&mut self.types[index])
+ } else {
+ None
+ }
+ }
+
+ fn push(&mut self, ty: ValType) -> bool {
+ if self.maxed() {
+ return false;
+ }
+
+ self.types[self.len] = ty;
+ self.len += 1;
+ true
+ }
+
+ fn clear(&mut self) {
+ self.len = 0;
+ }
+
+ pub fn as_slice(&self) -> &[ValType] {
+ &self.types[..self.len]
+ }
+
+ pub fn iter(&self) -> impl Iterator<Item = ValType> + '_ {
+ self.as_slice().iter().copied()
+ }
+}
+
+/// Represents information about a component function type lowering.
+pub(crate) struct LoweringInfo {
+ pub(crate) params: LoweredTypes,
+ pub(crate) results: LoweredTypes,
+ pub(crate) requires_memory: bool,
+ pub(crate) requires_realloc: bool,
+}
+
+impl LoweringInfo {
+ pub(crate) fn into_func_type(self) -> FuncType {
+ FuncType::new(
+ self.params.as_slice().iter().copied(),
+ self.results.as_slice().iter().copied(),
+ )
+ }
+}
+
+impl Default for LoweringInfo {
+ fn default() -> Self {
+ Self {
+ params: LoweredTypes::new(MAX_FLAT_FUNC_PARAMS),
+ results: LoweredTypes::new(MAX_FLAT_FUNC_RESULTS),
+ requires_memory: false,
+ requires_realloc: false,
+ }
+ }
+}
+
+fn push_primitive_wasm_types(ty: &PrimitiveValType, lowered_types: &mut LoweredTypes) -> bool {
+ match ty {
+ PrimitiveValType::Bool
+ | PrimitiveValType::S8
+ | PrimitiveValType::U8
+ | PrimitiveValType::S16
+ | PrimitiveValType::U16
+ | PrimitiveValType::S32
+ | PrimitiveValType::U32
+ | PrimitiveValType::Char => lowered_types.push(ValType::I32),
+ PrimitiveValType::S64 | PrimitiveValType::U64 => lowered_types.push(ValType::I64),
+ PrimitiveValType::Float32 => lowered_types.push(ValType::F32),
+ PrimitiveValType::Float64 => lowered_types.push(ValType::F64),
+ PrimitiveValType::String => {
+ lowered_types.push(ValType::I32) && lowered_types.push(ValType::I32)
+ }
+ }
+}
+
+/// Represents a unique identifier for a type known to a [`crate::Validator`].
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct TypeId {
+ /// The index into the global list of types.
+ pub(crate) index: usize,
+ /// The effective type size for the type.
+ ///
+ /// This is stored as part of the ID to avoid having to recurse through
+ /// the global type list when calculating type sizes.
+ pub(crate) type_size: u32,
+ /// A unique integer assigned to this type.
+ ///
+ /// The purpose of this field is to ensure that two different `TypeId`
+ /// representations can be handed out for two different aliased types within
+ /// a component that actually point to the same underlying type (as pointed
+ /// to by the `index` field).
+ unique_id: u32,
+}
+
+// The size of `TypeId` was seen to have a large-ish impact in #844, so this
+// assert ensures that it stays relatively small.
+const _: () = {
+ assert!(std::mem::size_of::<TypeId>() <= 16);
+};
+
+/// A unified type definition for validating WebAssembly modules and components.
+#[derive(Debug)]
+pub enum Type {
+ /// The definition is for a core function type.
+ Func(FuncType),
+ /// The definition is for a core module type.
+ ///
+ /// This variant is only supported when parsing a component.
+ Module(ModuleType),
+ /// The definition is for a core module instance type.
+ ///
+ /// This variant is only supported when parsing a component.
+ Instance(InstanceType),
+ /// The definition is for a component type.
+ ///
+ /// This variant is only supported when parsing a component.
+ Component(ComponentType),
+ /// The definition is for a component instance type.
+ ///
+ /// This variant is only supported when parsing a component.
+ ComponentInstance(ComponentInstanceType),
+ /// The definition is for a component function type.
+ ///
+ /// This variant is only supported when parsing a component.
+ ComponentFunc(ComponentFuncType),
+ /// The definition is for a component defined type.
+ ///
+ /// This variant is only supported when parsing a component.
+ Defined(ComponentDefinedType),
+}
+
+impl Type {
+ /// Converts the type to a core function type.
+ pub fn as_func_type(&self) -> Option<&FuncType> {
+ match self {
+ Self::Func(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ /// Converts the type to a core module type.
+ pub fn as_module_type(&self) -> Option<&ModuleType> {
+ match self {
+ Self::Module(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ /// Converts the type to a core module instance type.
+ pub fn as_instance_type(&self) -> Option<&InstanceType> {
+ match self {
+ Self::Instance(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ /// Converts the type to a component type.
+ pub fn as_component_type(&self) -> Option<&ComponentType> {
+ match self {
+ Self::Component(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ /// Converts the type to a component instance type.
+ pub fn as_component_instance_type(&self) -> Option<&ComponentInstanceType> {
+ match self {
+ Self::ComponentInstance(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ /// Converts the type to a component function type.
+ pub fn as_component_func_type(&self) -> Option<&ComponentFuncType> {
+ match self {
+ Self::ComponentFunc(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ /// Converts the type to a component defined type.
+ pub fn as_defined_type(&self) -> Option<&ComponentDefinedType> {
+ match self {
+ Self::Defined(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn type_size(&self) -> u32 {
+ match self {
+ Self::Func(ty) => 1 + (ty.params().len() + ty.results().len()) as u32,
+ Self::Module(ty) => ty.type_size,
+ Self::Instance(ty) => ty.type_size,
+ Self::Component(ty) => ty.type_size,
+ Self::ComponentInstance(ty) => ty.type_size,
+ Self::ComponentFunc(ty) => ty.type_size,
+ Self::Defined(ty) => ty.type_size(),
+ }
+ }
+}
+
+/// A component value type.
+#[derive(Debug, Clone, Copy)]
+pub enum ComponentValType {
+ /// The value type is one of the primitive types.
+ Primitive(PrimitiveValType),
+ /// The type is represented with the given type identifier.
+ Type(TypeId),
+}
+
+impl ComponentValType {
+ pub(crate) fn requires_realloc(&self, types: &TypeList) -> bool {
+ match self {
+ ComponentValType::Primitive(ty) => ty.requires_realloc(),
+ ComponentValType::Type(ty) => types[*ty]
+ .as_defined_type()
+ .unwrap()
+ .requires_realloc(types),
+ }
+ }
+
+ /// Determines if component value type `a` is a subtype of `b`.
+ pub fn is_subtype_of(a: &Self, at: TypesRef, b: &Self, bt: TypesRef) -> bool {
+ Self::internal_is_subtype_of(a, at.list, b, bt.list)
+ }
+
+ pub(crate) fn internal_is_subtype_of(a: &Self, at: &TypeList, b: &Self, bt: &TypeList) -> bool {
+ match (a, b) {
+ (ComponentValType::Primitive(a), ComponentValType::Primitive(b)) => {
+ PrimitiveValType::is_subtype_of(*a, *b)
+ }
+ (ComponentValType::Type(a), ComponentValType::Type(b)) => {
+ ComponentDefinedType::internal_is_subtype_of(
+ at[*a].as_defined_type().unwrap(),
+ at,
+ bt[*b].as_defined_type().unwrap(),
+ bt,
+ )
+ }
+ (ComponentValType::Primitive(a), ComponentValType::Type(b)) => {
+ match bt[*b].as_defined_type().unwrap() {
+ ComponentDefinedType::Primitive(b) => PrimitiveValType::is_subtype_of(*a, *b),
+ _ => false,
+ }
+ }
+ (ComponentValType::Type(a), ComponentValType::Primitive(b)) => {
+ match at[*a].as_defined_type().unwrap() {
+ ComponentDefinedType::Primitive(a) => PrimitiveValType::is_subtype_of(*a, *b),
+ _ => false,
+ }
+ }
+ }
+ }
+
+ fn push_wasm_types(&self, types: &TypeList, lowered_types: &mut LoweredTypes) -> bool {
+ match self {
+ Self::Primitive(ty) => push_primitive_wasm_types(ty, lowered_types),
+ Self::Type(id) => types[*id]
+ .as_defined_type()
+ .unwrap()
+ .push_wasm_types(types, lowered_types),
+ }
+ }
+
+ pub(crate) fn type_size(&self) -> u32 {
+ match self {
+ Self::Primitive(_) => 1,
+ Self::Type(id) => id.type_size,
+ }
+ }
+}
+
+/// The entity type for imports and exports of a module.
+#[derive(Debug, Clone, Copy)]
+pub enum EntityType {
+ /// The entity is a function.
+ Func(TypeId),
+ /// The entity is a table.
+ Table(TableType),
+ /// The entity is a memory.
+ Memory(MemoryType),
+ /// The entity is a global.
+ Global(GlobalType),
+ /// The entity is a tag.
+ Tag(TypeId),
+}
+
+impl EntityType {
+ /// Determines if entity type `a` is a subtype of `b`.
+ pub fn is_subtype_of(a: &Self, at: TypesRef, b: &Self, bt: TypesRef) -> bool {
+ Self::internal_is_subtype_of(a, at.list, b, bt.list)
+ }
+
+ pub(crate) fn internal_is_subtype_of(a: &Self, at: &TypeList, b: &Self, bt: &TypeList) -> bool {
+ macro_rules! limits_match {
+ ($a:expr, $b:expr) => {{
+ let a = $a;
+ let b = $b;
+ a.initial >= b.initial
+ && match b.maximum {
+ Some(b_max) => match a.maximum {
+ Some(a_max) => a_max <= b_max,
+ None => false,
+ },
+ None => true,
+ }
+ }};
+ }
+
+ match (a, b) {
+ (EntityType::Func(a), EntityType::Func(b)) => {
+ at[*a].as_func_type().unwrap() == bt[*b].as_func_type().unwrap()
+ }
+ (EntityType::Table(a), EntityType::Table(b)) => {
+ a.element_type == b.element_type && limits_match!(a, b)
+ }
+ (EntityType::Memory(a), EntityType::Memory(b)) => {
+ a.shared == b.shared && a.memory64 == b.memory64 && limits_match!(a, b)
+ }
+ (EntityType::Global(a), EntityType::Global(b)) => a == b,
+ (EntityType::Tag(a), EntityType::Tag(b)) => {
+ at[*a].as_func_type().unwrap() == bt[*b].as_func_type().unwrap()
+ }
+ _ => false,
+ }
+ }
+
+ pub(crate) fn desc(&self) -> &'static str {
+ match self {
+ Self::Func(_) => "function",
+ Self::Table(_) => "table",
+ Self::Memory(_) => "memory",
+ Self::Global(_) => "global",
+ Self::Tag(_) => "tag",
+ }
+ }
+
+ pub(crate) fn type_size(&self) -> u32 {
+ match self {
+ Self::Func(id) | Self::Tag(id) => id.type_size,
+ Self::Table(_) | Self::Memory(_) | Self::Global(_) => 1,
+ }
+ }
+}
+
+trait ModuleImportKey {
+ fn module(&self) -> &str;
+ fn name(&self) -> &str;
+}
+
+impl<'a> Borrow<dyn ModuleImportKey + 'a> for (String, String) {
+ fn borrow(&self) -> &(dyn ModuleImportKey + 'a) {
+ self
+ }
+}
+
+impl Hash for (dyn ModuleImportKey + '_) {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.module().hash(state);
+ self.name().hash(state);
+ }
+}
+
+impl PartialEq for (dyn ModuleImportKey + '_) {
+ fn eq(&self, other: &Self) -> bool {
+ self.module() == other.module() && self.name() == other.name()
+ }
+}
+
+impl Eq for (dyn ModuleImportKey + '_) {}
+
+impl ModuleImportKey for (String, String) {
+ fn module(&self) -> &str {
+ &self.0
+ }
+
+ fn name(&self) -> &str {
+ &self.1
+ }
+}
+
+impl ModuleImportKey for (&str, &str) {
+ fn module(&self) -> &str {
+ self.0
+ }
+
+ fn name(&self) -> &str {
+ self.1
+ }
+}
+
+/// Represents a core module type.
+#[derive(Debug, Clone)]
+pub struct ModuleType {
+ /// The effective type size for the module type.
+ pub(crate) type_size: u32,
+ /// The imports of the module type.
+ pub imports: IndexMap<(String, String), EntityType>,
+ /// The exports of the module type.
+ pub exports: IndexMap<String, EntityType>,
+}
+
+impl ModuleType {
+ /// Looks up an import by its module and name.
+ ///
+ /// Returns `None` if the import was not found.
+ pub fn lookup_import(&self, module: &str, name: &str) -> Option<&EntityType> {
+ self.imports.get(&(module, name) as &dyn ModuleImportKey)
+ }
+
+ /// Determines if module type `a` is a subtype of `b`.
+ pub fn is_subtype_of(a: &Self, at: TypesRef, b: &Self, bt: TypesRef) -> bool {
+ Self::internal_is_subtype_of(a, at.list, b, bt.list)
+ }
+
+ pub(crate) fn internal_is_subtype_of(a: &Self, at: &TypeList, b: &Self, bt: &TypeList) -> bool {
+ // For module type subtyping, all exports in the other module type
+ // must be present in this module type's exports (i.e. it can export
+ // *more* than what this module type needs).
+ // However, for imports, the check is reversed (i.e. it is okay
+ // to import *less* than what this module type needs).
+ a.imports.iter().all(|(k, a)| match b.imports.get(k) {
+ Some(b) => EntityType::internal_is_subtype_of(b, bt, a, at),
+ None => false,
+ }) && b.exports.iter().all(|(k, b)| match a.exports.get(k) {
+ Some(a) => EntityType::internal_is_subtype_of(a, at, b, bt),
+ None => false,
+ })
+ }
+}
+
+/// Represents the kind of module instance type.
+#[derive(Debug, Clone)]
+pub enum InstanceTypeKind {
+ /// The instance type is the result of instantiating a module type.
+ Instantiated(TypeId),
+ /// The instance type is the result of instantiating from exported items.
+ Exports(IndexMap<String, EntityType>),
+}
+
+/// Represents a module instance type.
+#[derive(Debug, Clone)]
+pub struct InstanceType {
+ /// The effective type size for the module instance type.
+ pub(crate) type_size: u32,
+ /// The kind of module instance type.
+ pub kind: InstanceTypeKind,
+}
+
+impl InstanceType {
+ /// Gets the exports of the instance type.
+ pub fn exports<'a>(&'a self, types: TypesRef<'a>) -> &'a IndexMap<String, EntityType> {
+ self.internal_exports(types.list)
+ }
+
+ pub(crate) fn internal_exports<'a>(
+ &'a self,
+ types: &'a TypeList,
+ ) -> &'a IndexMap<String, EntityType> {
+ match &self.kind {
+ InstanceTypeKind::Instantiated(id) => &types[*id].as_module_type().unwrap().exports,
+ InstanceTypeKind::Exports(exports) => exports,
+ }
+ }
+}
+
+/// The entity type for imports and exports of a component.
+#[derive(Debug, Clone, Copy)]
+pub enum ComponentEntityType {
+ /// The entity is a core module.
+ Module(TypeId),
+ /// The entity is a function.
+ Func(TypeId),
+ /// The entity is a value.
+ Value(ComponentValType),
+ /// The entity is a type.
+ Type {
+ /// This is the identifier of the type that was referenced when this
+ /// entity was created.
+ referenced: TypeId,
+ /// This is the identifier of the type that was created when this type
+ /// was imported or exported from the component.
+ ///
+ /// Note that the underlying type information for the `referenced`
+ /// field and for this `created` field is the same, but these two types
+ /// will hash to different values.
+ created: TypeId,
+ },
+ /// The entity is a component instance.
+ Instance(TypeId),
+ /// The entity is a component.
+ Component(TypeId),
+}
+
+impl ComponentEntityType {
+ /// Determines if component entity type `a` is a subtype of `b`.
+ pub fn is_subtype_of(a: &Self, at: TypesRef, b: &Self, bt: TypesRef) -> bool {
+ Self::internal_is_subtype_of(a, at.list, b, bt.list)
+ }
+
+ pub(crate) fn internal_is_subtype_of(a: &Self, at: &TypeList, b: &Self, bt: &TypeList) -> bool {
+ match (a, b) {
+ (Self::Module(a), Self::Module(b)) => ModuleType::internal_is_subtype_of(
+ at[*a].as_module_type().unwrap(),
+ at,
+ bt[*b].as_module_type().unwrap(),
+ bt,
+ ),
+ (Self::Func(a), Self::Func(b)) => ComponentFuncType::internal_is_subtype_of(
+ at[*a].as_component_func_type().unwrap(),
+ at,
+ bt[*b].as_component_func_type().unwrap(),
+ bt,
+ ),
+ (Self::Value(a), Self::Value(b)) => {
+ ComponentValType::internal_is_subtype_of(a, at, b, bt)
+ }
+ (Self::Type { referenced: a, .. }, Self::Type { referenced: b, .. }) => {
+ ComponentDefinedType::internal_is_subtype_of(
+ at[*a].as_defined_type().unwrap(),
+ at,
+ bt[*b].as_defined_type().unwrap(),
+ bt,
+ )
+ }
+ (Self::Instance(a), Self::Instance(b)) => {
+ ComponentInstanceType::internal_is_subtype_of(
+ at[*a].as_component_instance_type().unwrap(),
+ at,
+ bt[*b].as_component_instance_type().unwrap(),
+ bt,
+ )
+ }
+ (Self::Component(a), Self::Component(b)) => ComponentType::internal_is_subtype_of(
+ at[*a].as_component_type().unwrap(),
+ at,
+ bt[*b].as_component_type().unwrap(),
+ bt,
+ ),
+ _ => false,
+ }
+ }
+
+ pub(crate) fn desc(&self) -> &'static str {
+ match self {
+ Self::Module(_) => "module",
+ Self::Func(_) => "function",
+ Self::Value(_) => "value",
+ Self::Type { .. } => "type",
+ Self::Instance(_) => "instance",
+ Self::Component(_) => "component",
+ }
+ }
+
+ pub(crate) fn type_size(&self) -> u32 {
+ match self {
+ Self::Module(ty)
+ | Self::Func(ty)
+ | Self::Type { referenced: ty, .. }
+ | Self::Instance(ty)
+ | Self::Component(ty) => ty.type_size,
+ Self::Value(ty) => ty.type_size(),
+ }
+ }
+}
+
+/// Represents a type of a component.
+#[derive(Debug, Clone)]
+pub struct ComponentType {
+ /// The effective type size for the component type.
+ pub(crate) type_size: u32,
+ /// The imports of the component type.
+ pub imports: IndexMap<KebabString, (Option<Url>, ComponentEntityType)>,
+ /// The exports of the component type.
+ pub exports: IndexMap<KebabString, (Option<Url>, ComponentEntityType)>,
+}
+
+impl ComponentType {
+ /// Determines if component type `a` is a subtype of `b`.
+ pub fn is_subtype_of(a: &Self, at: TypesRef, b: &Self, bt: TypesRef) -> bool {
+ Self::internal_is_subtype_of(a, at.list, b, bt.list)
+ }
+
+ pub(crate) fn internal_is_subtype_of(a: &Self, at: &TypeList, b: &Self, bt: &TypeList) -> bool {
+ // For component type subtyping, all exports in the other component type
+ // must be present in this component type's exports (i.e. it can export
+ // *more* than what this component type needs).
+ // However, for imports, the check is reversed (i.e. it is okay
+ // to import *less* than what this component type needs).
+ a.imports.iter().all(|(k, (_, a))| match b.imports.get(k) {
+ Some((_, b)) => ComponentEntityType::internal_is_subtype_of(b, bt, a, at),
+ None => false,
+ }) && b.exports.iter().all(|(k, (_, b))| match a.exports.get(k) {
+ Some((_, a)) => ComponentEntityType::internal_is_subtype_of(a, at, b, bt),
+ None => false,
+ })
+ }
+}
+
+/// Represents the kind of a component instance.
+#[derive(Debug, Clone)]
+pub enum ComponentInstanceTypeKind {
+ /// The instance type is from a definition.
+ Defined(IndexMap<KebabString, (Option<Url>, ComponentEntityType)>),
+ /// The instance type is the result of instantiating a component type.
+ Instantiated(TypeId),
+ /// The instance type is the result of instantiating from exported items.
+ Exports(IndexMap<KebabString, (Option<Url>, ComponentEntityType)>),
+}
+
+/// Represents a type of a component instance.
+#[derive(Debug, Clone)]
+pub struct ComponentInstanceType {
+ /// The effective type size for the instance type.
+ pub(crate) type_size: u32,
+ /// The kind of instance type.
+ pub kind: ComponentInstanceTypeKind,
+}
+
+impl ComponentInstanceType {
+ /// Gets the exports of the instance type.
+ pub fn exports<'a>(
+ &'a self,
+ types: TypesRef<'a>,
+ ) -> impl ExactSizeIterator<Item = (&'a KebabStr, &'a Option<Url>, ComponentEntityType)> + Clone
+ {
+ self.internal_exports(types.list)
+ .iter()
+ .map(|(n, (u, t))| (n.as_kebab_str(), u, *t))
+ }
+
+ pub(crate) fn internal_exports<'a>(
+ &'a self,
+ types: &'a TypeList,
+ ) -> &'a IndexMap<KebabString, (Option<Url>, ComponentEntityType)> {
+ match &self.kind {
+ ComponentInstanceTypeKind::Defined(exports)
+ | ComponentInstanceTypeKind::Exports(exports) => exports,
+ ComponentInstanceTypeKind::Instantiated(id) => {
+ &types[*id].as_component_type().unwrap().exports
+ }
+ }
+ }
+
+ /// Determines if component instance type `a` is a subtype of `b`.
+ pub fn is_subtype_of(a: &Self, at: TypesRef, b: &Self, bt: TypesRef) -> bool {
+ Self::internal_is_subtype_of(a, at.list, b, bt.list)
+ }
+
+ pub(crate) fn internal_is_subtype_of(a: &Self, at: &TypeList, b: &Self, bt: &TypeList) -> bool {
+ let exports = a.internal_exports(at);
+
+ // For instance type subtyping, all exports in the other instance type
+ // must be present in this instance type's exports (i.e. it can export
+ // *more* than what this instance type needs).
+ b.internal_exports(bt)
+ .iter()
+ .all(|(k, (_, b))| match exports.get(k) {
+ Some((_, a)) => ComponentEntityType::internal_is_subtype_of(a, at, b, bt),
+ None => false,
+ })
+ }
+}
+
+/// Represents a type of a component function.
+#[derive(Debug, Clone)]
+pub struct ComponentFuncType {
+ /// The effective type size for the component function type.
+ pub(crate) type_size: u32,
+ /// The function parameters.
+ pub params: Box<[(KebabString, ComponentValType)]>,
+ /// The function's results.
+ pub results: Box<[(Option<KebabString>, ComponentValType)]>,
+}
+
+impl ComponentFuncType {
+ /// Determines if component function type `a` is a subtype of `b`.
+ pub fn is_subtype_of(a: &Self, at: TypesRef, b: &Self, bt: TypesRef) -> bool {
+ Self::internal_is_subtype_of(a, at.list, b, bt.list)
+ }
+
+ pub(crate) fn internal_is_subtype_of(a: &Self, at: &TypeList, b: &Self, bt: &TypeList) -> bool {
+ // Note that this intentionally diverges from the upstream specification
+ // in terms of subtyping. This is a full type-equality check which
+ // ensures that the structure of `a` exactly matches the structure of
+ // `b`. The rationale for this is:
+ //
+ // * Primarily in Wasmtime subtyping based on function types is not
+ // implemented. This includes both subtyping a host import and
+ // additionally handling subtyping as functions cross component
+ // boundaries. The host import subtyping (or component export
+ // subtyping) is not clear how to handle at all at this time. The
+ // subtyping of functions between components can more easily be
+ // handled by extending the `fact` compiler, but that hasn't been done
+ // yet.
+ //
+ // * The upstream specification is currently pretty intentionally vague
+ // precisely what subtyping is allowed. Implementing a strict check
+ // here is intended to be a conservative starting point for the
+ // component model which can be extended in the future if necessary.
+ //
+ // * The interaction with subtyping on bindings generation, for example,
+ // is a tricky problem that doesn't have a clear answer at this time.
+ // Effectively this is more rationale for being conservative in the
+ // first pass of the component model.
+ //
+ // So, in conclusion, the test here (and other places that reference
+ // this comment) is for exact type equality with no differences.
+ a.params.len() == b.params.len()
+ && a.results.len() == b.results.len()
+ && a.params
+ .iter()
+ .zip(b.params.iter())
+ .all(|((an, a), (bn, b))| {
+ an == bn && ComponentValType::internal_is_subtype_of(a, at, b, bt)
+ })
+ && a.results
+ .iter()
+ .zip(b.results.iter())
+ .all(|((an, a), (bn, b))| {
+ an == bn && ComponentValType::internal_is_subtype_of(a, at, b, bt)
+ })
+ }
+
+ /// Lowers the component function type to core parameter and result types for the
+ /// canonical ABI.
+ pub(crate) fn lower(&self, types: &TypeList, import: bool) -> LoweringInfo {
+ let mut info = LoweringInfo::default();
+
+ for (_, ty) in self.params.iter() {
+ // When `import` is false, it means we're lifting a core function,
+ // check if the parameters needs realloc
+ if !import && !info.requires_realloc {
+ info.requires_realloc = ty.requires_realloc(types);
+ }
+
+ if !ty.push_wasm_types(types, &mut info.params) {
+ // Too many parameters to pass directly
+ // Function will have a single pointer parameter to pass the arguments
+ // via linear memory
+ info.params.clear();
+ assert!(info.params.push(ValType::I32));
+ info.requires_memory = true;
+
+ // We need realloc as well when lifting a function
+ if !import {
+ info.requires_realloc = true;
+ }
+ break;
+ }
+ }
+
+ for (_, ty) in self.results.iter() {
+ // When `import` is true, it means we're lowering a component function,
+ // check if the result needs realloc
+ if import && !info.requires_realloc {
+ info.requires_realloc = ty.requires_realloc(types);
+ }
+
+ if !ty.push_wasm_types(types, &mut info.results) {
+ // Too many results to return directly, either a retptr parameter will be used (import)
+ // or a single pointer will be returned (export)
+ info.results.clear();
+ if import {
+ info.params.max = MAX_LOWERED_TYPES;
+ assert!(info.params.push(ValType::I32));
+ } else {
+ assert!(info.results.push(ValType::I32));
+ }
+ info.requires_memory = true;
+ break;
+ }
+ }
+
+ // Memory is always required when realloc is required
+ info.requires_memory |= info.requires_realloc;
+
+ info
+ }
+}
+
+/// Represents a variant case.
+#[derive(Debug, Clone)]
+pub struct VariantCase {
+ /// The variant case type.
+ pub ty: Option<ComponentValType>,
+ /// The name of the variant case refined by this one.
+ pub refines: Option<KebabString>,
+}
+
+/// Represents a record type.
+#[derive(Debug, Clone)]
+pub struct RecordType {
+ /// The effective type size for the record type.
+ pub(crate) type_size: u32,
+ /// The map of record fields.
+ pub fields: IndexMap<KebabString, ComponentValType>,
+}
+
+/// Represents a variant type.
+#[derive(Debug, Clone)]
+pub struct VariantType {
+ /// The effective type size for the variant type.
+ pub(crate) type_size: u32,
+ /// The map of variant cases.
+ pub cases: IndexMap<KebabString, VariantCase>,
+}
+
+/// Represents a tuple type.
+#[derive(Debug, Clone)]
+pub struct TupleType {
+ /// The effective type size for the tuple type.
+ pub(crate) type_size: u32,
+ /// The types of the tuple.
+ pub types: Box<[ComponentValType]>,
+}
+
+/// Represents a union type.
+#[derive(Debug, Clone)]
+pub struct UnionType {
+ /// The inclusive type count for the union type.
+ pub(crate) type_size: u32,
+ /// The types of the union.
+ pub types: Box<[ComponentValType]>,
+}
+
+/// Represents a component defined type.
+#[derive(Debug, Clone)]
+pub enum ComponentDefinedType {
+ /// The type is a primitive value type.
+ Primitive(PrimitiveValType),
+ /// The type is a record.
+ Record(RecordType),
+ /// The type is a variant.
+ Variant(VariantType),
+ /// The type is a list.
+ List(ComponentValType),
+ /// The type is a tuple.
+ Tuple(TupleType),
+ /// The type is a set of flags.
+ Flags(IndexSet<KebabString>),
+ /// The type is an enumeration.
+ Enum(IndexSet<KebabString>),
+ /// The type is a union.
+ Union(UnionType),
+ /// The type is an `option`.
+ Option(ComponentValType),
+ /// The type is a `result`.
+ Result {
+ /// The `ok` type.
+ ok: Option<ComponentValType>,
+ /// The `error` type.
+ err: Option<ComponentValType>,
+ },
+}
+
+impl ComponentDefinedType {
+ pub(crate) fn requires_realloc(&self, types: &TypeList) -> bool {
+ match self {
+ Self::Primitive(ty) => ty.requires_realloc(),
+ Self::Record(r) => r.fields.values().any(|ty| ty.requires_realloc(types)),
+ Self::Variant(v) => v.cases.values().any(|case| {
+ case.ty
+ .map(|ty| ty.requires_realloc(types))
+ .unwrap_or(false)
+ }),
+ Self::List(_) => true,
+ Self::Tuple(t) => t.types.iter().any(|ty| ty.requires_realloc(types)),
+ Self::Union(u) => u.types.iter().any(|ty| ty.requires_realloc(types)),
+ Self::Flags(_) | Self::Enum(_) => false,
+ Self::Option(ty) => ty.requires_realloc(types),
+ Self::Result { ok, err } => {
+ ok.map(|ty| ty.requires_realloc(types)).unwrap_or(false)
+ || err.map(|ty| ty.requires_realloc(types)).unwrap_or(false)
+ }
+ }
+ }
+
+ /// Determines if component defined type `a` is a subtype of `b`.
+ pub fn is_subtype_of(a: &Self, at: TypesRef, b: &Self, bt: TypesRef) -> bool {
+ Self::internal_is_subtype_of(a, at.list, b, bt.list)
+ }
+
+ pub(crate) fn internal_is_subtype_of(a: &Self, at: &TypeList, b: &Self, bt: &TypeList) -> bool {
+ // Note that the implementation of subtyping here diverges from the
+ // upstream specification intentionally, see the documentation on
+ // function subtyping for more information.
+ match (a, b) {
+ (Self::Primitive(a), Self::Primitive(b)) => PrimitiveValType::is_subtype_of(*a, *b),
+ (Self::Record(a), Self::Record(b)) => {
+ a.fields.len() == b.fields.len()
+ && a.fields
+ .iter()
+ .zip(b.fields.iter())
+ .all(|((aname, a), (bname, b))| {
+ aname == bname && ComponentValType::internal_is_subtype_of(a, at, b, bt)
+ })
+ }
+ (Self::Variant(a), Self::Variant(b)) => {
+ a.cases.len() == b.cases.len()
+ && a.cases
+ .iter()
+ .zip(b.cases.iter())
+ .all(|((aname, a), (bname, b))| {
+ aname == bname
+ && match (&a.ty, &b.ty) {
+ (Some(a), Some(b)) => {
+ ComponentValType::internal_is_subtype_of(a, at, b, bt)
+ }
+ (None, None) => true,
+ _ => false,
+ }
+ })
+ }
+ (Self::List(a), Self::List(b)) | (Self::Option(a), Self::Option(b)) => {
+ ComponentValType::internal_is_subtype_of(a, at, b, bt)
+ }
+ (Self::Tuple(a), Self::Tuple(b)) => {
+ if a.types.len() != b.types.len() {
+ return false;
+ }
+ a.types
+ .iter()
+ .zip(b.types.iter())
+ .all(|(a, b)| ComponentValType::internal_is_subtype_of(a, at, b, bt))
+ }
+ (Self::Union(a), Self::Union(b)) => {
+ if a.types.len() != b.types.len() {
+ return false;
+ }
+ a.types
+ .iter()
+ .zip(b.types.iter())
+ .all(|(a, b)| ComponentValType::internal_is_subtype_of(a, at, b, bt))
+ }
+ (Self::Flags(a), Self::Flags(b)) | (Self::Enum(a), Self::Enum(b)) => {
+ a.len() == b.len() && a.iter().eq(b.iter())
+ }
+ (Self::Result { ok: ao, err: ae }, Self::Result { ok: bo, err: be }) => {
+ Self::is_optional_subtype_of(*ao, at, *bo, bt)
+ && Self::is_optional_subtype_of(*ae, at, *be, bt)
+ }
+ _ => false,
+ }
+ }
+
+ pub(crate) fn type_size(&self) -> u32 {
+ match self {
+ Self::Primitive(_) => 1,
+ Self::Flags(_) | Self::Enum(_) => 1,
+ Self::Record(r) => r.type_size,
+ Self::Variant(v) => v.type_size,
+ Self::Tuple(t) => t.type_size,
+ Self::Union(u) => u.type_size,
+ Self::List(ty) | Self::Option(ty) => ty.type_size(),
+ Self::Result { ok, err } => {
+ ok.map(|ty| ty.type_size()).unwrap_or(1) + err.map(|ty| ty.type_size()).unwrap_or(1)
+ }
+ }
+ }
+
+ fn is_optional_subtype_of(
+ a: Option<ComponentValType>,
+ at: &TypeList,
+ b: Option<ComponentValType>,
+ bt: &TypeList,
+ ) -> bool {
+ match (a, b) {
+ (None, None) => true,
+ (Some(a), Some(b)) => ComponentValType::internal_is_subtype_of(&a, at, &b, bt),
+ _ => false,
+ }
+ }
+ fn push_wasm_types(&self, types: &TypeList, lowered_types: &mut LoweredTypes) -> bool {
+ match self {
+ Self::Primitive(ty) => push_primitive_wasm_types(ty, lowered_types),
+ Self::Record(r) => r
+ .fields
+ .iter()
+ .all(|(_, ty)| ty.push_wasm_types(types, lowered_types)),
+ Self::Variant(v) => Self::push_variant_wasm_types(
+ v.cases.iter().filter_map(|(_, case)| case.ty.as_ref()),
+ types,
+ lowered_types,
+ ),
+ Self::List(_) => lowered_types.push(ValType::I32) && lowered_types.push(ValType::I32),
+ Self::Tuple(t) => t
+ .types
+ .iter()
+ .all(|ty| ty.push_wasm_types(types, lowered_types)),
+ Self::Flags(names) => {
+ (0..(names.len() + 31) / 32).all(|_| lowered_types.push(ValType::I32))
+ }
+ Self::Enum(_) => lowered_types.push(ValType::I32),
+ Self::Union(u) => Self::push_variant_wasm_types(u.types.iter(), types, lowered_types),
+ Self::Option(ty) => {
+ Self::push_variant_wasm_types([ty].into_iter(), types, lowered_types)
+ }
+ Self::Result { ok, err } => {
+ Self::push_variant_wasm_types(ok.iter().chain(err.iter()), types, lowered_types)
+ }
+ }
+ }
+
+ fn push_variant_wasm_types<'a>(
+ cases: impl Iterator<Item = &'a ComponentValType>,
+ types: &TypeList,
+ lowered_types: &mut LoweredTypes,
+ ) -> bool {
+ // Push the discriminant
+ if !lowered_types.push(ValType::I32) {
+ return false;
+ }
+
+ let start = lowered_types.len();
+
+ for ty in cases {
+ let mut temp = LoweredTypes::new(lowered_types.max);
+
+ if !ty.push_wasm_types(types, &mut temp) {
+ return false;
+ }
+
+ for (i, ty) in temp.iter().enumerate() {
+ match lowered_types.get_mut(start + i) {
+ Some(prev) => *prev = Self::join_types(*prev, ty),
+ None => {
+ if !lowered_types.push(ty) {
+ return false;
+ }
+ }
+ }
+ }
+ }
+
+ true
+ }
+
+ fn join_types(a: ValType, b: ValType) -> ValType {
+ use ValType::*;
+
+ match (a, b) {
+ (I32, I32) | (I64, I64) | (F32, F32) | (F64, F64) => a,
+ (I32, F32) | (F32, I32) => I32,
+ (_, I64 | F64) | (I64 | F64, _) => I64,
+ _ => panic!("unexpected wasm type for canonical ABI"),
+ }
+ }
+}
+
+#[allow(clippy::large_enum_variant)]
+enum TypesKind {
+ Module(Arc<Module>),
+ Component(ComponentState),
+}
+
+/// Represents the types known to a [`crate::Validator`] once validation has completed.
+///
+/// The type information is returned via the [`crate::Validator::end`] method.
+pub struct Types {
+ list: TypeList,
+ kind: TypesKind,
+}
+
+#[derive(Clone, Copy)]
+enum TypesRefKind<'a> {
+ Module(&'a Module),
+ Component(&'a ComponentState),
+}
+
+/// Represents the types known to a [`crate::Validator`] during validation.
+///
+/// Retrieved via the [`crate::Validator::types`] method.
+#[derive(Clone, Copy)]
+pub struct TypesRef<'a> {
+ list: &'a TypeList,
+ kind: TypesRefKind<'a>,
+}
+
+impl<'a> TypesRef<'a> {
+ pub(crate) fn from_module(types: &'a TypeList, module: &'a Module) -> Self {
+ Self {
+ list: types,
+ kind: TypesRefKind::Module(module),
+ }
+ }
+
+ pub(crate) fn from_component(types: &'a TypeList, component: &'a ComponentState) -> Self {
+ Self {
+ list: types,
+ kind: TypesRefKind::Component(component),
+ }
+ }
+
+ fn types(&self, core: bool) -> Option<&'a [TypeId]> {
+ Some(match &self.kind {
+ TypesRefKind::Module(module) => {
+ if core {
+ &module.types
+ } else {
+ return None;
+ }
+ }
+ TypesRefKind::Component(component) => {
+ if core {
+ &component.core_types
+ } else {
+ &component.types
+ }
+ }
+ })
+ }
+
+ /// Gets a type based on its type id.
+ ///
+ /// Returns `None` if the type id is unknown.
+ pub fn type_from_id(&self, id: TypeId) -> Option<&'a Type> {
+ self.list.get(id.index)
+ }
+
+ /// Gets a type id from a type index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn id_from_type_index(&self, index: u32, core: bool) -> Option<TypeId> {
+ self.types(core)?.get(index as usize).copied()
+ }
+
+ /// Gets a type at the given type index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn type_at(&self, index: u32, core: bool) -> Option<&'a Type> {
+ self.type_from_id(*self.types(core)?.get(index as usize)?)
+ }
+
+ /// Gets a defined core function type at the given type index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn func_type_at(&self, index: u32) -> Option<&'a FuncType> {
+ match self.type_at(index, true)? {
+ Type::Func(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ /// Gets the type of a table at the given table index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn table_at(&self, index: u32) -> Option<TableType> {
+ let tables = match &self.kind {
+ TypesRefKind::Module(module) => &module.tables,
+ TypesRefKind::Component(component) => &component.core_tables,
+ };
+
+ tables.get(index as usize).copied()
+ }
+
+ /// Gets the type of a memory at the given memory index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn memory_at(&self, index: u32) -> Option<MemoryType> {
+ let memories = match &self.kind {
+ TypesRefKind::Module(module) => &module.memories,
+ TypesRefKind::Component(component) => &component.core_memories,
+ };
+
+ memories.get(index as usize).copied()
+ }
+
+ /// Gets the type of a global at the given global index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn global_at(&self, index: u32) -> Option<GlobalType> {
+ let globals = match &self.kind {
+ TypesRefKind::Module(module) => &module.globals,
+ TypesRefKind::Component(component) => &component.core_globals,
+ };
+
+ globals.get(index as usize).copied()
+ }
+
+ /// Gets the type of a tag at the given tag index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn tag_at(&self, index: u32) -> Option<&'a FuncType> {
+ let tags = match &self.kind {
+ TypesRefKind::Module(module) => &module.tags,
+ TypesRefKind::Component(component) => &component.core_tags,
+ };
+
+ Some(
+ self.list[*tags.get(index as usize)?]
+ .as_func_type()
+ .unwrap(),
+ )
+ }
+
+ /// Gets the type of a core function at the given function index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn function_at(&self, index: u32) -> Option<&'a FuncType> {
+ let id = match &self.kind {
+ TypesRefKind::Module(module) => {
+ &module.types[*module.functions.get(index as usize)? as usize]
+ }
+ TypesRefKind::Component(component) => component.core_funcs.get(index as usize)?,
+ };
+
+ match &self.list[*id] {
+ Type::Func(ty) => Some(ty),
+ _ => None,
+ }
+ }
+
+ /// Gets the type of an element segment at the given element segment index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn element_at(&self, index: u32) -> Option<RefType> {
+ match &self.kind {
+ TypesRefKind::Module(module) => module.element_types.get(index as usize).copied(),
+ TypesRefKind::Component(_) => None,
+ }
+ }
+
+ /// Gets the type of a component function at the given function index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn component_function_at(&self, index: u32) -> Option<&'a ComponentFuncType> {
+ match &self.kind {
+ TypesRefKind::Module(_) => None,
+ TypesRefKind::Component(component) => Some(
+ self.list[*component.funcs.get(index as usize)?]
+ .as_component_func_type()
+ .unwrap(),
+ ),
+ }
+ }
+
+ /// Gets the type of a module at the given module index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn module_at(&self, index: u32) -> Option<&'a ModuleType> {
+ match &self.kind {
+ TypesRefKind::Module(_) => None,
+ TypesRefKind::Component(component) => Some(
+ self.list[*component.core_modules.get(index as usize)?]
+ .as_module_type()
+ .unwrap(),
+ ),
+ }
+ }
+
+ /// Gets the type of a module instance at the given module instance index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn instance_at(&self, index: u32) -> Option<&'a InstanceType> {
+ match &self.kind {
+ TypesRefKind::Module(_) => None,
+ TypesRefKind::Component(component) => {
+ let id = component.core_instances.get(index as usize)?;
+ match &self.list[*id] {
+ Type::Instance(ty) => Some(ty),
+ _ => None,
+ }
+ }
+ }
+ }
+
+ /// Gets the type of a component at the given component index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn component_at(&self, index: u32) -> Option<&'a ComponentType> {
+ match &self.kind {
+ TypesRefKind::Module(_) => None,
+ TypesRefKind::Component(component) => Some(
+ self.list[*component.components.get(index as usize)?]
+ .as_component_type()
+ .unwrap(),
+ ),
+ }
+ }
+
+ /// Gets the type of an component instance at the given component instance index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn component_instance_at(&self, index: u32) -> Option<&'a ComponentInstanceType> {
+ match &self.kind {
+ TypesRefKind::Module(_) => None,
+ TypesRefKind::Component(component) => {
+ let id = component.instances.get(index as usize)?;
+ match &self.list[*id] {
+ Type::ComponentInstance(ty) => Some(ty),
+ _ => None,
+ }
+ }
+ }
+ }
+
+ /// Gets the type of a value at the given value index.
+ ///
+ /// Returns `None` if the type index is out of bounds or the type has not
+ /// been parsed yet.
+ pub fn value_at(&self, index: u32) -> Option<ComponentValType> {
+ match &self.kind {
+ TypesRefKind::Module(_) => None,
+ TypesRefKind::Component(component) => {
+ component.values.get(index as usize).map(|(r, _)| *r)
+ }
+ }
+ }
+
+ /// Gets the entity type for the given import.
+ pub fn entity_type_from_import(&self, import: &Import) -> Option<EntityType> {
+ match &self.kind {
+ TypesRefKind::Module(module) => Some(match import.ty {
+ TypeRef::Func(idx) => EntityType::Func(*module.types.get(idx as usize)?),
+ TypeRef::Table(ty) => EntityType::Table(ty),
+ TypeRef::Memory(ty) => EntityType::Memory(ty),
+ TypeRef::Global(ty) => EntityType::Global(ty),
+ TypeRef::Tag(ty) => EntityType::Tag(*module.types.get(ty.func_type_idx as usize)?),
+ }),
+ TypesRefKind::Component(_) => None,
+ }
+ }
+
+ /// Gets the entity type from the given export.
+ pub fn entity_type_from_export(&self, export: &Export) -> Option<EntityType> {
+ match &self.kind {
+ TypesRefKind::Module(module) => Some(match export.kind {
+ ExternalKind::Func => EntityType::Func(
+ module.types[*module.functions.get(export.index as usize)? as usize],
+ ),
+ ExternalKind::Table => {
+ EntityType::Table(*module.tables.get(export.index as usize)?)
+ }
+ ExternalKind::Memory => {
+ EntityType::Memory(*module.memories.get(export.index as usize)?)
+ }
+ ExternalKind::Global => {
+ EntityType::Global(*module.globals.get(export.index as usize)?)
+ }
+ ExternalKind::Tag => EntityType::Tag(
+ module.types[*module.functions.get(export.index as usize)? as usize],
+ ),
+ }),
+ TypesRefKind::Component(_) => None,
+ }
+ }
+
+ /// Gets the component entity type for the given component import.
+ pub fn component_entity_type_of_extern(&self, name: &str) -> Option<ComponentEntityType> {
+ match &self.kind {
+ TypesRefKind::Module(_) => None,
+ TypesRefKind::Component(component) => {
+ let key = KebabStr::new(name)?;
+ Some(component.externs.get(key)?.1)
+ }
+ }
+ }
+}
+
+impl Types {
+ pub(crate) fn from_module(types: TypeList, module: Arc<Module>) -> Self {
+ Self {
+ list: types,
+ kind: TypesKind::Module(module),
+ }
+ }
+
+ pub(crate) fn from_component(types: TypeList, component: ComponentState) -> Self {
+ Self {
+ list: types,
+ kind: TypesKind::Component(component),
+ }
+ }
+
+ /// Gets a reference to this validation type information.
+ pub fn as_ref(&self) -> TypesRef {
+ TypesRef {
+ list: &self.list,
+ kind: match &self.kind {
+ TypesKind::Module(module) => TypesRefKind::Module(module),
+ TypesKind::Component(component) => TypesRefKind::Component(component),
+ },
+ }
+ }
+
+ /// Gets a type based on its type id.
+ ///
+ /// Returns `None` if the type id is unknown.
+ pub fn type_from_id(&self, id: TypeId) -> Option<&Type> {
+ self.as_ref().type_from_id(id)
+ }
+
+ /// Gets a type id from a type index.
+ ///
+ /// Returns `None` if the type index is out of bounds.
+ pub fn id_from_type_index(&self, index: u32, core: bool) -> Option<TypeId> {
+ self.as_ref().id_from_type_index(index, core)
+ }
+
+ /// Gets a type at the given type index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn type_at(&self, index: u32, core: bool) -> Option<&Type> {
+ self.as_ref().type_at(index, core)
+ }
+
+ /// Gets a defined core function type at the given type index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn func_type_at(&self, index: u32) -> Option<&FuncType> {
+ self.as_ref().func_type_at(index)
+ }
+
+ /// Gets the count of core types.
+ pub fn type_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(module) => module.types.len(),
+ TypesKind::Component(component) => component.core_types.len(),
+ }
+ }
+
+ /// Gets the type of a table at the given table index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn table_at(&self, index: u32) -> Option<TableType> {
+ self.as_ref().table_at(index)
+ }
+
+ /// Gets the count of imported and defined tables.
+ pub fn table_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(module) => module.tables.len(),
+ TypesKind::Component(component) => component.core_tables.len(),
+ }
+ }
+
+ /// Gets the type of a memory at the given memory index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn memory_at(&self, index: u32) -> Option<MemoryType> {
+ self.as_ref().memory_at(index)
+ }
+
+ /// Gets the count of imported and defined memories.
+ pub fn memory_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(module) => module.memories.len(),
+ TypesKind::Component(component) => component.core_memories.len(),
+ }
+ }
+
+ /// Gets the type of a global at the given global index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn global_at(&self, index: u32) -> Option<GlobalType> {
+ self.as_ref().global_at(index)
+ }
+
+ /// Gets the count of imported and defined globals.
+ pub fn global_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(module) => module.globals.len(),
+ TypesKind::Component(component) => component.core_globals.len(),
+ }
+ }
+
+ /// Gets the type of a tag at the given tag index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn tag_at(&self, index: u32) -> Option<&FuncType> {
+ self.as_ref().tag_at(index)
+ }
+
+ /// Gets the count of imported and defined tags.
+ pub fn tag_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(module) => module.tags.len(),
+ TypesKind::Component(component) => component.core_tags.len(),
+ }
+ }
+
+ /// Gets the type of a core function at the given function index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn function_at(&self, index: u32) -> Option<&FuncType> {
+ self.as_ref().function_at(index)
+ }
+
+ /// Gets the count of imported and defined core functions.
+ ///
+ /// The count also includes aliased core functions in components.
+ pub fn function_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(module) => module.functions.len(),
+ TypesKind::Component(component) => component.core_funcs.len(),
+ }
+ }
+
+ /// Gets the type of an element segment at the given element segment index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn element_at(&self, index: u32) -> Option<RefType> {
+ match &self.kind {
+ TypesKind::Module(module) => module.element_types.get(index as usize).copied(),
+ TypesKind::Component(_) => None,
+ }
+ }
+
+ /// Gets the count of element segments.
+ pub fn element_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(module) => module.element_types.len(),
+ TypesKind::Component(_) => 0,
+ }
+ }
+
+ /// Gets the type of a component function at the given function index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn component_function_at(&self, index: u32) -> Option<&ComponentFuncType> {
+ self.as_ref().component_function_at(index)
+ }
+
+ /// Gets the count of imported, exported, or aliased component functions.
+ pub fn component_function_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(_) => 0,
+ TypesKind::Component(component) => component.funcs.len(),
+ }
+ }
+
+ /// Gets the type of a module at the given module index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn module_at(&self, index: u32) -> Option<&ModuleType> {
+ self.as_ref().module_at(index)
+ }
+
+ /// Gets the count of imported, exported, or aliased modules.
+ pub fn module_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(_) => 0,
+ TypesKind::Component(component) => component.core_modules.len(),
+ }
+ }
+
+ /// Gets the type of a module instance at the given module instance index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn instance_at(&self, index: u32) -> Option<&InstanceType> {
+ self.as_ref().instance_at(index)
+ }
+
+ /// Gets the count of imported, exported, or aliased core module instances.
+ pub fn instance_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(_) => 0,
+ TypesKind::Component(component) => component.core_instances.len(),
+ }
+ }
+
+ /// Gets the type of a component at the given component index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn component_at(&self, index: u32) -> Option<&ComponentType> {
+ self.as_ref().component_at(index)
+ }
+
+ /// Gets the count of imported, exported, or aliased components.
+ pub fn component_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(_) => 0,
+ TypesKind::Component(component) => component.components.len(),
+ }
+ }
+
+ /// Gets the type of an component instance at the given component instance index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn component_instance_at(&self, index: u32) -> Option<&ComponentInstanceType> {
+ self.as_ref().component_instance_at(index)
+ }
+
+ /// Gets the count of imported, exported, or aliased component instances.
+ pub fn component_instance_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(_) => 0,
+ TypesKind::Component(component) => component.instances.len(),
+ }
+ }
+
+ /// Gets the type of a value at the given value index.
+ ///
+ /// Returns `None` if the index is out of bounds.
+ pub fn value_at(&self, index: u32) -> Option<ComponentValType> {
+ self.as_ref().value_at(index)
+ }
+
+ /// Gets the count of imported, exported, or aliased values.
+ pub fn value_count(&self) -> usize {
+ match &self.kind {
+ TypesKind::Module(_) => 0,
+ TypesKind::Component(component) => component.values.len(),
+ }
+ }
+
+ /// Gets the entity type from the given import.
+ pub fn entity_type_from_import(&self, import: &Import) -> Option<EntityType> {
+ self.as_ref().entity_type_from_import(import)
+ }
+
+ /// Gets the entity type from the given export.
+ pub fn entity_type_from_export(&self, export: &Export) -> Option<EntityType> {
+ self.as_ref().entity_type_from_export(export)
+ }
+
+ /// Gets the component entity type for the given component import or export
+ /// name.
+ pub fn component_entity_type_of_extern(&self, name: &str) -> Option<ComponentEntityType> {
+ self.as_ref().component_entity_type_of_extern(name)
+ }
+
+ /// Attempts to lookup the type id that `ty` is an alias of.
+ ///
+ /// Returns `None` if `ty` wasn't listed as aliasing a prior type.
+ pub fn peel_alias(&self, ty: TypeId) -> Option<TypeId> {
+ self.list.peel_alias(ty)
+ }
+}
+
+/// This is a type which mirrors a subset of the `Vec<T>` API, but is intended
+/// to be able to be cheaply snapshotted and cloned.
+///
+/// When each module's code sections start we "commit" the current list of types
+/// in the global list of types. This means that the temporary `cur` vec here is
+/// pushed onto `snapshots` and wrapped up in an `Arc`. At that point we clone
+/// this entire list (which is then O(modules), not O(types in all modules)) and
+/// pass out as a context to each function validator.
+///
+/// Otherwise, though, this type behaves as if it were a large `Vec<T>`, but
+/// it's represented by lists of contiguous chunks.
+pub(crate) struct SnapshotList<T> {
+ // All previous snapshots, the "head" of the list that this type represents.
+ // The first entry in this pair is the starting index for all elements
+ // contained in the list, and the second element is the list itself. Note
+ // the `Arc` wrapper around sub-lists, which makes cloning time for this
+ // `SnapshotList` O(snapshots) rather than O(snapshots_total), which for
+ // us in this context means the number of modules, not types.
+ //
+ // Note that this list is sorted least-to-greatest in order of the index for
+ // binary searching.
+ snapshots: Vec<Arc<Snapshot<T>>>,
+
+ // This is the total length of all lists in the `snapshots` array.
+ snapshots_total: usize,
+
+ // The current list of types for the current snapshot that are being built.
+ cur: Vec<T>,
+
+ unique_mappings: HashMap<u32, u32>,
+ unique_counter: u32,
+}
+
+struct Snapshot<T> {
+ prior_types: usize,
+ unique_counter: u32,
+ unique_mappings: HashMap<u32, u32>,
+ items: Vec<T>,
+}
+
+impl<T> SnapshotList<T> {
+ /// Same as `<&[T]>::get`
+ pub(crate) fn get(&self, index: usize) -> Option<&T> {
+ // Check to see if this index falls on our local list
+ if index >= self.snapshots_total {
+ return self.cur.get(index - self.snapshots_total);
+ }
+ // ... and failing that we do a binary search to figure out which bucket
+ // it's in. Note the `i-1` in the `Err` case because if we don't find an
+ // exact match the type is located in the previous bucket.
+ let i = match self
+ .snapshots
+ .binary_search_by_key(&index, |snapshot| snapshot.prior_types)
+ {
+ Ok(i) => i,
+ Err(i) => i - 1,
+ };
+ let snapshot = &self.snapshots[i];
+ Some(&snapshot.items[index - snapshot.prior_types])
+ }
+
+ /// Same as `<&mut [T]>::get_mut`, except only works for indexes into the
+ /// current snapshot being built.
+ ///
+ /// # Panics
+ ///
+ /// Panics if an index is passed in which falls within the
+ /// previously-snapshotted list of types. This should never happen in our
+ /// context and the panic is intended to weed out possible bugs in
+ /// wasmparser.
+ pub(crate) fn get_mut(&mut self, index: usize) -> Option<&mut T> {
+ if index >= self.snapshots_total {
+ return self.cur.get_mut(index - self.snapshots_total);
+ }
+ panic!("cannot get a mutable reference in snapshotted part of list")
+ }
+
+ /// Same as `Vec::push`
+ pub(crate) fn push(&mut self, val: T) {
+ self.cur.push(val);
+ }
+
+ /// Same as `<[T]>::len`
+ pub(crate) fn len(&self) -> usize {
+ self.cur.len() + self.snapshots_total
+ }
+
+ /// Reserve space for an additional count of items.
+ pub(crate) fn reserve(&mut self, additional: usize) {
+ self.cur.reserve(additional);
+ }
+
+ /// Commits previously pushed types into this snapshot vector, and returns a
+ /// clone of this list.
+ ///
+ /// The returned `SnapshotList` can be used to access all the same types as
+ /// this list itself. This list also is not changed (from an external
+ /// perspective) and can continue to access all the same types.
+ pub(crate) fn commit(&mut self) -> SnapshotList<T> {
+ // If the current chunk has new elements, commit them in to an
+ // `Arc`-wrapped vector in the snapshots list. Note the `shrink_to_fit`
+ // ahead of time to hopefully keep memory usage lower than it would
+ // otherwise be. Additionally note that the `unique_counter` is bumped
+ // here to ensure that the previous value of the unique counter is
+ // never used for an actual type so it's suitable for lookup via a
+ // binary search.
+ let len = self.cur.len();
+ if len > 0 {
+ self.unique_counter += 1;
+ self.cur.shrink_to_fit();
+ self.snapshots.push(Arc::new(Snapshot {
+ prior_types: self.snapshots_total,
+ unique_counter: self.unique_counter - 1,
+ unique_mappings: mem::take(&mut self.unique_mappings),
+ items: mem::take(&mut self.cur),
+ }));
+ self.snapshots_total += len;
+ }
+ SnapshotList {
+ snapshots: self.snapshots.clone(),
+ snapshots_total: self.snapshots_total,
+ unique_mappings: HashMap::new(),
+ unique_counter: self.unique_counter,
+ cur: Vec::new(),
+ }
+ }
+
+ /// Modifies a `TypeId` to have the same contents but a fresh new unique id.
+ ///
+ /// This is used during aliasing with components to assign types a unique
+ /// identifier that isn't equivalent to anything else but still
+ /// points to the same underlying type.
+ pub fn with_unique(&mut self, mut ty: TypeId) -> TypeId {
+ self.unique_mappings
+ .insert(self.unique_counter, ty.unique_id);
+ ty.unique_id = self.unique_counter;
+ self.unique_counter += 1;
+ ty
+ }
+
+ /// Attempts to lookup the type id that `ty` is an alias of.
+ ///
+ /// Returns `None` if `ty` wasn't listed as aliasing a prior type.
+ pub fn peel_alias(&self, ty: TypeId) -> Option<TypeId> {
+ // The unique counter in each snapshot is the unique counter at the
+ // time of the snapshot so it's guaranteed to never be used, meaning
+ // that `Ok` should never show up here. With an `Err` it's where the
+ // index would be placed meaning that the index in question is the
+ // smallest value over the unique id's value, meaning that slot has the
+ // mapping we're interested in.
+ let i = match self
+ .snapshots
+ .binary_search_by_key(&ty.unique_id, |snapshot| snapshot.unique_counter)
+ {
+ Ok(_) => unreachable!(),
+ Err(i) => i,
+ };
+
+ // If the `i` index is beyond the snapshot array then lookup in the
+ // current mappings instead since it may refer to a type not snapshot
+ // yet.
+ let unique_id = match self.snapshots.get(i) {
+ Some(snapshot) => *snapshot.unique_mappings.get(&ty.unique_id)?,
+ None => *self.unique_mappings.get(&ty.unique_id)?,
+ };
+ Some(TypeId { unique_id, ..ty })
+ }
+}
+
+impl<T> std::ops::Index<usize> for SnapshotList<T> {
+ type Output = T;
+
+ #[inline]
+ fn index(&self, index: usize) -> &T {
+ self.get(index).unwrap()
+ }
+}
+
+impl<T> std::ops::IndexMut<usize> for SnapshotList<T> {
+ #[inline]
+ fn index_mut(&mut self, index: usize) -> &mut T {
+ self.get_mut(index).unwrap()
+ }
+}
+
+impl<T> std::ops::Index<TypeId> for SnapshotList<T> {
+ type Output = T;
+
+ #[inline]
+ fn index(&self, id: TypeId) -> &T {
+ self.get(id.index).unwrap()
+ }
+}
+
+impl<T> std::ops::IndexMut<TypeId> for SnapshotList<T> {
+ #[inline]
+ fn index_mut(&mut self, id: TypeId) -> &mut T {
+ self.get_mut(id.index).unwrap()
+ }
+}
+
+impl<T> Default for SnapshotList<T> {
+ fn default() -> SnapshotList<T> {
+ SnapshotList {
+ snapshots: Vec::new(),
+ snapshots_total: 0,
+ cur: Vec::new(),
+ unique_counter: 1,
+ unique_mappings: HashMap::new(),
+ }
+ }
+}
+
+/// A snapshot list of types.
+pub(crate) type TypeList = SnapshotList<Type>;
+
+/// Thin wrapper around `TypeList` which provides an allocator of unique ids for
+/// types contained within this list.
+pub(crate) struct TypeAlloc {
+ list: TypeList,
+}
+
+impl Deref for TypeAlloc {
+ type Target = TypeList;
+ fn deref(&self) -> &TypeList {
+ &self.list
+ }
+}
+
+impl DerefMut for TypeAlloc {
+ fn deref_mut(&mut self) -> &mut TypeList {
+ &mut self.list
+ }
+}
+
+impl TypeAlloc {
+ /// Pushes a new anonymous type into this list which will have its
+ /// `unique_id` field cleared.
+ pub fn push_anon(&mut self, ty: Type) -> TypeId {
+ let index = self.list.len();
+ let type_size = ty.type_size();
+ self.list.push(ty);
+ TypeId {
+ index,
+ type_size,
+ unique_id: 0,
+ }
+ }
+
+ /// Pushes a new defined type which has an index in core wasm onto this
+ /// list.
+ ///
+ /// The returned `TypeId` is guaranteed to be unique and not hash-equivalent
+ /// to any other prior ID in this list.
+ pub fn push_defined(&mut self, ty: Type) -> TypeId {
+ let id = self.push_anon(ty);
+ self.with_unique(id)
+ }
+}
+
+impl Default for TypeAlloc {
+ fn default() -> TypeAlloc {
+ TypeAlloc {
+ list: Default::default(),
+ }
+ }
+}
diff --git a/third_party/rust/wasmparser/tests/big-module.rs b/third_party/rust/wasmparser/tests/big-module.rs
new file mode 100644
index 0000000000..d5cf37f4a4
--- /dev/null
+++ b/third_party/rust/wasmparser/tests/big-module.rs
@@ -0,0 +1,32 @@
+use wasm_encoder::*;
+#[test]
+fn big_type_indices() {
+ const N: u32 = 100_000;
+ let mut module = Module::new();
+ let mut types = TypeSection::new();
+ for _ in 0..N {
+ types.function([], []);
+ }
+ module.section(&types);
+ let mut funcs = FunctionSection::new();
+ funcs.function(N - 1);
+ module.section(&funcs);
+
+ let mut elems = ElementSection::new();
+ elems.declared(RefType::FUNCREF, Elements::Functions(&[0]));
+ module.section(&elems);
+
+ let mut code = CodeSection::new();
+ let mut body = Function::new([]);
+ body.instruction(&Instruction::RefFunc(0));
+ body.instruction(&Instruction::Drop);
+ body.instruction(&Instruction::End);
+ code.function(&body);
+ module.section(&code);
+
+ let wasm = module.finish();
+
+ wasmparser::Validator::default()
+ .validate_all(&wasm)
+ .unwrap();
+}