зеркало из https://github.com/mozilla/gecko-dev.git
Backed out changeset 4c839456c3ff (bug 1620284) for build bustages on a CLOSED TREE
--HG-- extra : amend_source : b632dccf4524a77201507b45b5025d72446e6ef7
This commit is contained in:
Родитель
49f3d93a8e
Коммит
c0adfbbf21
|
@ -47,10 +47,10 @@ git = "https://github.com/djg/cubeb-pulse-rs"
|
|||
replace-with = "vendored-sources"
|
||||
rev = "8069f8f4189982e0b38fa6dc8993dd4fab41f728"
|
||||
|
||||
[source."https://github.com/bytecodealliance/wasmtime"]
|
||||
git = "https://github.com/bytecodealliance/wasmtime"
|
||||
[source."https://github.com/bytecodealliance/cranelift"]
|
||||
git = "https://github.com/bytecodealliance/cranelift"
|
||||
replace-with = "vendored-sources"
|
||||
rev = "135a48ca7e9a45d7d31911753e602e6de8b14e2a"
|
||||
rev = "98c818c129979e98a3db150f8f9698f6451b7ef7"
|
||||
|
||||
[source."https://github.com/badboy/failure"]
|
||||
git = "https://github.com/badboy/failure"
|
||||
|
|
|
@ -711,22 +711,22 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-bforest"
|
||||
version = "0.59.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=135a48ca7e9a45d7d31911753e602e6de8b14e2a#135a48ca7e9a45d7d31911753e602e6de8b14e2a"
|
||||
version = "0.58.0"
|
||||
source = "git+https://github.com/bytecodealliance/cranelift?rev=98c818c129979e98a3db150f8f9698f6451b7ef7#98c818c129979e98a3db150f8f9698f6451b7ef7"
|
||||
dependencies = [
|
||||
"cranelift-entity 0.59.0",
|
||||
"cranelift-entity 0.58.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-codegen"
|
||||
version = "0.59.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=135a48ca7e9a45d7d31911753e602e6de8b14e2a#135a48ca7e9a45d7d31911753e602e6de8b14e2a"
|
||||
version = "0.58.0"
|
||||
source = "git+https://github.com/bytecodealliance/cranelift?rev=98c818c129979e98a3db150f8f9698f6451b7ef7#98c818c129979e98a3db150f8f9698f6451b7ef7"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"cranelift-bforest",
|
||||
"cranelift-codegen-meta",
|
||||
"cranelift-codegen-shared",
|
||||
"cranelift-entity 0.59.0",
|
||||
"cranelift-entity 0.58.0",
|
||||
"log",
|
||||
"smallvec 1.2.0",
|
||||
"target-lexicon 0.10.0",
|
||||
|
@ -735,17 +735,17 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-codegen-meta"
|
||||
version = "0.59.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=135a48ca7e9a45d7d31911753e602e6de8b14e2a#135a48ca7e9a45d7d31911753e602e6de8b14e2a"
|
||||
version = "0.58.0"
|
||||
source = "git+https://github.com/bytecodealliance/cranelift?rev=98c818c129979e98a3db150f8f9698f6451b7ef7#98c818c129979e98a3db150f8f9698f6451b7ef7"
|
||||
dependencies = [
|
||||
"cranelift-codegen-shared",
|
||||
"cranelift-entity 0.59.0",
|
||||
"cranelift-entity 0.58.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-codegen-shared"
|
||||
version = "0.59.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=135a48ca7e9a45d7d31911753e602e6de8b14e2a#135a48ca7e9a45d7d31911753e602e6de8b14e2a"
|
||||
version = "0.58.0"
|
||||
source = "git+https://github.com/bytecodealliance/cranelift?rev=98c818c129979e98a3db150f8f9698f6451b7ef7#98c818c129979e98a3db150f8f9698f6451b7ef7"
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-entity"
|
||||
|
@ -754,13 +754,13 @@ source = "git+https://github.com/PLSysSec/lucet_sandbox_compiler?rev=5e870faf6f9
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-entity"
|
||||
version = "0.59.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=135a48ca7e9a45d7d31911753e602e6de8b14e2a#135a48ca7e9a45d7d31911753e602e6de8b14e2a"
|
||||
version = "0.58.0"
|
||||
source = "git+https://github.com/bytecodealliance/cranelift?rev=98c818c129979e98a3db150f8f9698f6451b7ef7#98c818c129979e98a3db150f8f9698f6451b7ef7"
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-frontend"
|
||||
version = "0.59.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=135a48ca7e9a45d7d31911753e602e6de8b14e2a#135a48ca7e9a45d7d31911753e602e6de8b14e2a"
|
||||
version = "0.58.0"
|
||||
source = "git+https://github.com/bytecodealliance/cranelift?rev=98c818c129979e98a3db150f8f9698f6451b7ef7#98c818c129979e98a3db150f8f9698f6451b7ef7"
|
||||
dependencies = [
|
||||
"cranelift-codegen",
|
||||
"log",
|
||||
|
@ -770,11 +770,11 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-wasm"
|
||||
version = "0.59.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=135a48ca7e9a45d7d31911753e602e6de8b14e2a#135a48ca7e9a45d7d31911753e602e6de8b14e2a"
|
||||
version = "0.58.0"
|
||||
source = "git+https://github.com/bytecodealliance/cranelift?rev=98c818c129979e98a3db150f8f9698f6451b7ef7#98c818c129979e98a3db150f8f9698f6451b7ef7"
|
||||
dependencies = [
|
||||
"cranelift-codegen",
|
||||
"cranelift-entity 0.59.0",
|
||||
"cranelift-entity 0.58.0",
|
||||
"cranelift-frontend",
|
||||
"log",
|
||||
"thiserror",
|
||||
|
@ -4679,9 +4679,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmparser"
|
||||
version = "0.51.4"
|
||||
version = "0.48.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aeb1956b19469d1c5e63e459d29e7b5aa0f558d9f16fcef09736f8a265e6c10a"
|
||||
checksum = "073da89bf1c84db000dd68ce660c1b4a08e3a2d28fd1e3394ab9e7abdde4a0f8"
|
||||
|
||||
[[package]]
|
||||
name = "webdriver"
|
||||
|
|
|
@ -75,9 +75,9 @@ failure = { git = "https://github.com/badboy/failure", rev = "64af847bc5fdcb6d24
|
|||
failure_derive = { git = "https://github.com/badboy/failure", rev = "64af847bc5fdcb6d2438bec8a6030812a80519a5" }
|
||||
|
||||
[patch.crates-io.cranelift-codegen]
|
||||
git = "https://github.com/bytecodealliance/wasmtime"
|
||||
rev = "135a48ca7e9a45d7d31911753e602e6de8b14e2a"
|
||||
git = "https://github.com/bytecodealliance/cranelift"
|
||||
rev = "98c818c129979e98a3db150f8f9698f6451b7ef7"
|
||||
|
||||
[patch.crates-io.cranelift-wasm]
|
||||
git = "https://github.com/bytecodealliance/wasmtime"
|
||||
rev = "135a48ca7e9a45d7d31911753e602e6de8b14e2a"
|
||||
git = "https://github.com/bytecodealliance/cranelift"
|
||||
rev = "98c818c129979e98a3db150f8f9698f6451b7ef7"
|
||||
|
|
|
@ -13,8 +13,8 @@ name = "baldrdash"
|
|||
# cranelift-wasm to pinned commits. If you want to update Cranelift in Gecko,
|
||||
# you should update the following $TOP_LEVEL/Cargo.toml file: look for the
|
||||
# revision (rev) hashes of both cranelift dependencies (codegen and wasm).
|
||||
cranelift-codegen = { version = "0.59.0", default-features = false }
|
||||
cranelift-wasm = "0.59.0"
|
||||
cranelift-codegen = { version = "0.58.0", default-features = false }
|
||||
cranelift-wasm = "0.58.0"
|
||||
log = { version = "0.4.6", default-features = false, features = ["release_max_level_info"] }
|
||||
env_logger = "0.6"
|
||||
smallvec = "1.0"
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"29c2ba915de1d7c6270c9a82f105214f640df321013cddfa3da91afd49c252ff","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"af367c67340fa7f6fb9a35b0aa637dcf303957f7ae7427a5f4f6356801c8bb04","src/lib.rs":"23a5c42d477197a947122e662068e681bb9ed31041c0b668c3267c3fce15d39e","src/map.rs":"a3b7f64cae7ec9c2a8038def315bcf90e8751552b1bc1c20b62fbb8c763866c4","src/node.rs":"28f7edd979f7b9712bc4ab30b0d2a1b8ad5485a4b1e8c09f3dcaf501b9b5ccd1","src/path.rs":"a86ee1c882c173e8af96fd53a416a0fb485dd3f045ac590ef313a9d9ecf90f56","src/pool.rs":"f6337b5417f7772e6878a160c1a40629199ff09997bdff18eb2a0ba770158600","src/set.rs":"281eb8b5ead1ffd395946464d881f9bb0e7fb61092aed701d72d2314b5f80994"},"package":null}
|
||||
{"files":{"Cargo.toml":"535a760fb2660c657313c8161a52e76cf5f5eb2b3c2beda5803ae16bb7ad1166","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"af367c67340fa7f6fb9a35b0aa637dcf303957f7ae7427a5f4f6356801c8bb04","src/lib.rs":"23a5c42d477197a947122e662068e681bb9ed31041c0b668c3267c3fce15d39e","src/map.rs":"a3b7f64cae7ec9c2a8038def315bcf90e8751552b1bc1c20b62fbb8c763866c4","src/node.rs":"28f7edd979f7b9712bc4ab30b0d2a1b8ad5485a4b1e8c09f3dcaf501b9b5ccd1","src/path.rs":"a86ee1c882c173e8af96fd53a416a0fb485dd3f045ac590ef313a9d9ecf90f56","src/pool.rs":"f6337b5417f7772e6878a160c1a40629199ff09997bdff18eb2a0ba770158600","src/set.rs":"281eb8b5ead1ffd395946464d881f9bb0e7fb61092aed701d72d2314b5f80994"},"package":null}
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
authors = ["The Cranelift Project Developers"]
|
||||
name = "cranelift-bforest"
|
||||
version = "0.59.0"
|
||||
version = "0.58.0"
|
||||
description = "A forest of B+-trees"
|
||||
license = "Apache-2.0 WITH LLVM-exception"
|
||||
documentation = "https://cranelift.readthedocs.io/"
|
||||
|
@ -12,7 +12,7 @@ keywords = ["btree", "forest", "set", "map"]
|
|||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
cranelift-entity = { path = "../entity", version = "0.59.0", default-features = false }
|
||||
cranelift-entity = { path = "../cranelift-entity", version = "0.58.0", default-features = false }
|
||||
|
||||
[badges]
|
||||
maintenance = { status = "experimental" }
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"af6977c1e0458678e7be127ba72615085227af2b08ae202e437c3d17589095a1","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"b123f056d0d458396679c5f7f2a16d2762af0258fcda4ac14b6655a95e5a0022","src/cdsl/ast.rs":"84a4b7e3301e3249716958a7aa4ea5ba8c6172e3c02f57ee3880504c4433ff19","src/cdsl/cpu_modes.rs":"996e45b374cfe85ac47c8c86c4459fe4c04b3158102b4c63b6ee434d5eed6a9e","src/cdsl/encodings.rs":"d884a564815a03c23369bcf31d13b122ae5ba84d0c80eda9312f0c0a829bf794","src/cdsl/formats.rs":"63e638305aa3ca6dd409ddf0e5e9605eeac1cc2631103e42fc6cbc87703d9b63","src/cdsl/instructions.rs":"1284b65d53e6d42caeb0c9bfa0e69fb80cb76e14513f2ec5b73f98feb926753e","src/cdsl/isa.rs":"ccabd6848b69eb069c10db61c7e7f86080777495714bb53d03e663c40541be94","src/cdsl/mod.rs":"0aa827923bf4c45e5ee2359573bd863e00f474acd532739f49dcd74a27553882","src/cdsl/operands.rs":"1c3411504de9c83112ff48e0ff1cfbb2e4ba5a9a15c1716f411ef31a4df59899","src/cdsl/recipes.rs":"80b7cd87332229b569e38086ceee8d557e679b9a32ad2e50bdb15c33337c3418","src/cdsl/regs.rs":"05f93ab8504ee82d7cc9811a5b40e5cd004c327577b4c0b3dd957fc422f3c013","src/cdsl/settings.rs":"e6fd9a31925743b93b11f09c9c8271bab6aa2430aa053a2601957b4487df7d77","src/cdsl/type_inference.rs":"1efca8a095ffc899b7527bda6b9d9378c73d7283f8dceaa4819e8af599f8be21","src/cdsl/types.rs":"ff764c9e9c29a05677bff6164e7bc25a0c32655052d77ae580536abba8b1713b","src/cdsl/typevar.rs":"c7e80a3c52755f2d91fb5c3d18413b7c97777bd54d1aece8a17d1bbd9944c46a","src/cdsl/xform.rs":"55da0c3f2403147b535ab6ae5d69c623fbe839edecf2a3af1de84420cd58402d","src/default_map.rs":"101bb0282a124f9c921f6bd095f529e8753621450d783c3273b0b0394c2c5c03","src/error.rs":"e9b11b2feb2d867b94c8810fdc5a6c4e0d9131604a0bfa5340ff2639a55100b4","src/gen_binemit.rs":"515e243420b30d1e01f8ea630282d9b6d78a715e1951f3f20392e19a48164442","src/gen_encodings.rs":"f00cded6b68a9b48c9e3cd39a8b6f0ba136f4062c8f8666109158a72c62c3ed1","src/gen_inst.rs":"2e103077d5c3233b9b2d5e93ef595e1259c5e95886afca1be4ba2a9820ff2f23","src/gen_legalizer.rs":"ea229ab9393cc5ba2242f626e74c624ea59314535e74b26602dafb8e96481a72","src/gen_registers.rs":"3628949229c1cb5179ec5f655b9a1ddd0504ba74ffb9fb23741c85c9154b696f","src/gen_settings.rs":"f3cc3d31f6cc898f30606caf084f0de220db2d3b1b5e5e4145fa7c9a9a1597e2","src/gen_types.rs":"f6c090e1646a43bf2fe81ae0a7029cc6f7dc6d43285368f56d86c35a21c469a6","src/isa/arm32/mod.rs":"8e09ec1b3caf2d22dce8517b37c356047bfce9a6dea712467d867ed05c4bedaf","src/isa/arm64/mod.rs":"b01f030925d3f2af37d7df1b4a800eb7f0d24f74a46e9154fd8b6752643eb2d5","src/isa/mod.rs":"136141f99f217ba42b9e3f7f47238ab19cc974bb3bef2e2df7f7b5a683989d46","src/isa/riscv/encodings.rs":"8abb1968d917588bc5fc5f5be6dd66bdec23ac456ba65f8138237c8e891e843c","src/isa/riscv/mod.rs":"a7b461a30bbfbc1e3b33645422ff40d5b1761c30cb5d4a8aa12e9a3b7f7aee51","src/isa/riscv/recipes.rs":"c9424cffed54cc4d328879a4613b9f6a2c2b7cde7e6e17b4fccd5f661aaf92f2","src/isa/x86/encodings.rs":"5987f60e6fbed41eb56ce318b42aeda483e3616144ef8b92c3f537c945745e4e","src/isa/x86/instructions.rs":"4166cc5979f5106837cc37fa1642790f9dbc7f1472533d48fdfddf22ba987c36","src/isa/x86/legalize.rs":"2d0df6a6db969eb12e4155dc4aed76bd9097790306697625245bc05727069ceb","src/isa/x86/mod.rs":"65953f998ff3fc3b333167e9979fc0f15f976b51ad75272ac19dcaad0981b371","src/isa/x86/opcodes.rs":"0459fd5c06fb232dd928673e763b401e471c84ee48d47a5ee1a267a846f2345f","src/isa/x86/recipes.rs":"d438c8d3c7b3912e00df37c3d5cd98182eef2fc29d4134ffc39ffdbac26e285a","src/isa/x86/registers.rs":"e24c9487f4c257b1089dac6bca0106cc673db77292cd000ca8e0c7e0d0103f63","src/isa/x86/settings.rs":"9087cd57da2852b689bddc296639f9b9e1caa30e8185798891079da8d9340b53","src/lib.rs":"2491b0e74078914cb89d1778fa8174daf723fe76aaf7fed18741237d68f6df32","src/shared/entities.rs":"90f774a70e1c2a2e9a553c07a5e80e0fe54cf127434bd83e67274bba4e1a19ba","src/shared/formats.rs":"89ed4074f748637adf56b93ba952e398c45d43e6326d01676885939e3fe8bc4a","src/shared/immediates.rs":"e4a57657f6af9853794804eb41c01204a2c13a632f44f55d90e156a4b98c5f65","src/shared/instructions.rs":"3570968c3670dc087eac89f211205bbcfa3c976bdf7f111e944a76f367f4466c","src/shared/legalize.rs":"bc9c3292446c1d338df1c4ce19f3ac5482cfe582a04a5a1e82fc9aaa6aef25ea","src/shared/mod.rs":"c219625990bf15507ac1077b349ce20e5312d4e4707426183676d469e78792b7","src/shared/settings.rs":"2e791624b4e85f9e8adcee7169fe445ca8bcdc97d1da92c92ae9576988ab0470","src/shared/types.rs":"4702df132f4b5d70cc9411ec5221ba0b1bd4479252274e0223ae57b6d0331247","src/srcgen.rs":"dcfc159c8599270f17e6a978c4be255abca51556b5ef0da497faec4a4a1e62ce","src/unique_table.rs":"31aa54330ca4786af772d32e8cb6158b6504b88fa93fe177bf0c6cbe545a8d35"},"package":null}
|
||||
{"files":{"Cargo.toml":"3179d424e3926ec5951277b2325013fcde7286f992c9a0dba6ff4da1fa694e5a","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"b123f056d0d458396679c5f7f2a16d2762af0258fcda4ac14b6655a95e5a0022","src/cdsl/ast.rs":"84a4b7e3301e3249716958a7aa4ea5ba8c6172e3c02f57ee3880504c4433ff19","src/cdsl/cpu_modes.rs":"996e45b374cfe85ac47c8c86c4459fe4c04b3158102b4c63b6ee434d5eed6a9e","src/cdsl/encodings.rs":"d884a564815a03c23369bcf31d13b122ae5ba84d0c80eda9312f0c0a829bf794","src/cdsl/formats.rs":"63e638305aa3ca6dd409ddf0e5e9605eeac1cc2631103e42fc6cbc87703d9b63","src/cdsl/instructions.rs":"034fe7dae4fc9b90cf876a3d966d4bf1e4b5db11888704a7adc7a522dcd32947","src/cdsl/isa.rs":"ccabd6848b69eb069c10db61c7e7f86080777495714bb53d03e663c40541be94","src/cdsl/mod.rs":"0aa827923bf4c45e5ee2359573bd863e00f474acd532739f49dcd74a27553882","src/cdsl/operands.rs":"1c3411504de9c83112ff48e0ff1cfbb2e4ba5a9a15c1716f411ef31a4df59899","src/cdsl/recipes.rs":"80b7cd87332229b569e38086ceee8d557e679b9a32ad2e50bdb15c33337c3418","src/cdsl/regs.rs":"05f93ab8504ee82d7cc9811a5b40e5cd004c327577b4c0b3dd957fc422f3c013","src/cdsl/settings.rs":"e6fd9a31925743b93b11f09c9c8271bab6aa2430aa053a2601957b4487df7d77","src/cdsl/type_inference.rs":"1efca8a095ffc899b7527bda6b9d9378c73d7283f8dceaa4819e8af599f8be21","src/cdsl/types.rs":"ff764c9e9c29a05677bff6164e7bc25a0c32655052d77ae580536abba8b1713b","src/cdsl/typevar.rs":"c7e80a3c52755f2d91fb5c3d18413b7c97777bd54d1aece8a17d1bbd9944c46a","src/cdsl/xform.rs":"55da0c3f2403147b535ab6ae5d69c623fbe839edecf2a3af1de84420cd58402d","src/default_map.rs":"101bb0282a124f9c921f6bd095f529e8753621450d783c3273b0b0394c2c5c03","src/error.rs":"e9b11b2feb2d867b94c8810fdc5a6c4e0d9131604a0bfa5340ff2639a55100b4","src/gen_binemit.rs":"515e243420b30d1e01f8ea630282d9b6d78a715e1951f3f20392e19a48164442","src/gen_encodings.rs":"f00cded6b68a9b48c9e3cd39a8b6f0ba136f4062c8f8666109158a72c62c3ed1","src/gen_inst.rs":"2e103077d5c3233b9b2d5e93ef595e1259c5e95886afca1be4ba2a9820ff2f23","src/gen_legalizer.rs":"de4b41b78a1cefa0c458b699058405c14ebccd5219b51e0be329af9d96f19dc9","src/gen_registers.rs":"3628949229c1cb5179ec5f655b9a1ddd0504ba74ffb9fb23741c85c9154b696f","src/gen_settings.rs":"f3cc3d31f6cc898f30606caf084f0de220db2d3b1b5e5e4145fa7c9a9a1597e2","src/gen_types.rs":"f6c090e1646a43bf2fe81ae0a7029cc6f7dc6d43285368f56d86c35a21c469a6","src/isa/arm32/mod.rs":"8e09ec1b3caf2d22dce8517b37c356047bfce9a6dea712467d867ed05c4bedaf","src/isa/arm64/mod.rs":"b01f030925d3f2af37d7df1b4a800eb7f0d24f74a46e9154fd8b6752643eb2d5","src/isa/mod.rs":"136141f99f217ba42b9e3f7f47238ab19cc974bb3bef2e2df7f7b5a683989d46","src/isa/riscv/encodings.rs":"8abb1968d917588bc5fc5f5be6dd66bdec23ac456ba65f8138237c8e891e843c","src/isa/riscv/mod.rs":"a7b461a30bbfbc1e3b33645422ff40d5b1761c30cb5d4a8aa12e9a3b7f7aee51","src/isa/riscv/recipes.rs":"c9424cffed54cc4d328879a4613b9f6a2c2b7cde7e6e17b4fccd5f661aaf92f2","src/isa/x86/encodings.rs":"f151bfb4da35c57bb614856ac84f5ac7fd0593a77360623338e6c1c7e5c5ba5f","src/isa/x86/instructions.rs":"e4a92f2b707e0a9af0317041eb9a8bc58a8bedcdbbe35f54dcfaf05699a50675","src/isa/x86/legalize.rs":"fca4a2729fbefded71effb7517c63a3da3648f8ab68968ef014cfc5d5f631cc7","src/isa/x86/mod.rs":"d6e363531df896ab542af664a5f2514f993cfb8ea485d39d8ea1b7a40747267e","src/isa/x86/opcodes.rs":"643bae64cd4050814adfb856046cf650979bec5d251a9d9a6e11bafb5a603c43","src/isa/x86/recipes.rs":"c80a15a90d521eb4752595601087d8797a43f17a5e1b7ed5379e85f7c926cd62","src/isa/x86/registers.rs":"e24c9487f4c257b1089dac6bca0106cc673db77292cd000ca8e0c7e0d0103f63","src/isa/x86/settings.rs":"9087cd57da2852b689bddc296639f9b9e1caa30e8185798891079da8d9340b53","src/lib.rs":"2491b0e74078914cb89d1778fa8174daf723fe76aaf7fed18741237d68f6df32","src/shared/entities.rs":"90f774a70e1c2a2e9a553c07a5e80e0fe54cf127434bd83e67274bba4e1a19ba","src/shared/formats.rs":"89ed4074f748637adf56b93ba952e398c45d43e6326d01676885939e3fe8bc4a","src/shared/immediates.rs":"e4a57657f6af9853794804eb41c01204a2c13a632f44f55d90e156a4b98c5f65","src/shared/instructions.rs":"d5c3d3c59eb157f047bb2afe71f0a065c28b06627c6ab4e0104a44205cf89004","src/shared/legalize.rs":"bc9c3292446c1d338df1c4ce19f3ac5482cfe582a04a5a1e82fc9aaa6aef25ea","src/shared/mod.rs":"bc497c14d083c29eefe4935cff9cd1bd138c071bc50f787248727a3858dc69f3","src/shared/settings.rs":"cda96c9599d49b82e5cd43c7d0fd53a197fa8dadcc56dc33566ac47fdd6da607","src/shared/types.rs":"4702df132f4b5d70cc9411ec5221ba0b1bd4479252274e0223ae57b6d0331247","src/srcgen.rs":"dcfc159c8599270f17e6a978c4be255abca51556b5ef0da497faec4a4a1e62ce","src/unique_table.rs":"31aa54330ca4786af772d32e8cb6158b6504b88fa93fe177bf0c6cbe545a8d35"},"package":null}
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "cranelift-codegen-meta"
|
||||
authors = ["The Cranelift Project Developers"]
|
||||
version = "0.59.0"
|
||||
version = "0.58.0"
|
||||
description = "Metaprogram for cranelift-codegen code generator library"
|
||||
license = "Apache-2.0 WITH LLVM-exception"
|
||||
repository = "https://github.com/bytecodealliance/cranelift"
|
||||
|
@ -9,8 +9,8 @@ readme = "README.md"
|
|||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
cranelift-codegen-shared = { path = "../shared", version = "0.59.0" }
|
||||
cranelift-entity = { path = "../../entity", version = "0.59.0" }
|
||||
cranelift-codegen-shared = { path = "../shared", version = "0.58.0" }
|
||||
cranelift-entity = { path = "../../cranelift-entity", version = "0.58.0" }
|
||||
|
||||
[badges]
|
||||
maintenance = { status = "experimental" }
|
||||
|
|
|
@ -340,7 +340,7 @@ impl InstructionBuilder {
|
|||
let polymorphic_info =
|
||||
verify_polymorphic(&operands_in, &operands_out, &self.format, &value_opnums);
|
||||
|
||||
// Infer from output operands whether an instruction clobbers CPU flags or not.
|
||||
// Infer from output operands whether an instruciton clobbers CPU flags or not.
|
||||
let writes_cpu_flags = operands_out.iter().any(|op| op.is_cpu_flags());
|
||||
|
||||
let camel_name = camel_case(&self.name);
|
||||
|
|
|
@ -560,7 +560,7 @@ fn gen_transform_group<'a>(
|
|||
fmt: &mut Formatter,
|
||||
) {
|
||||
fmt.doc_comment(group.doc);
|
||||
fmt.line("#[allow(unused_variables,unused_assignments,unused_imports,non_snake_case)]");
|
||||
fmt.line("#[allow(unused_variables,unused_assignments,non_snake_case)]");
|
||||
|
||||
// Function arguments.
|
||||
fmtln!(fmt, "pub fn {}(", group.name);
|
||||
|
|
|
@ -1560,7 +1560,6 @@ fn define_simd(
|
|||
let formats = &shared_defs.formats;
|
||||
|
||||
// Shorthands for instructions.
|
||||
let avg_round = shared.by_name("avg_round");
|
||||
let bitcast = shared.by_name("bitcast");
|
||||
let bor = shared.by_name("bor");
|
||||
let bxor = shared.by_name("bxor");
|
||||
|
@ -1796,14 +1795,14 @@ fn define_simd(
|
|||
|
||||
let is_zero_128bit =
|
||||
InstructionPredicate::new_is_all_zeroes(&*formats.unary_const, "constant_handle");
|
||||
let template = rec_vconst_optimized.opcodes(&PXOR).infer_rex();
|
||||
let template = rec_vconst_optimized.nonrex().opcodes(&PXOR);
|
||||
e.enc_32_64_func(instruction.clone(), template, |builder| {
|
||||
builder.inst_predicate(is_zero_128bit)
|
||||
});
|
||||
|
||||
let is_ones_128bit =
|
||||
InstructionPredicate::new_is_all_ones(&*formats.unary_const, "constant_handle");
|
||||
let template = rec_vconst_optimized.opcodes(&PCMPEQB).infer_rex();
|
||||
let template = rec_vconst_optimized.nonrex().opcodes(&PCMPEQB);
|
||||
e.enc_32_64_func(instruction, template, |builder| {
|
||||
builder.inst_predicate(is_ones_128bit)
|
||||
});
|
||||
|
@ -1817,7 +1816,7 @@ fn define_simd(
|
|||
// in memory) but some performance measurements are needed.
|
||||
for ty in ValueType::all_lane_types().filter(allowed_simd_type) {
|
||||
let instruction = vconst.bind(vector(ty, sse_vector_size));
|
||||
let template = rec_vconst.opcodes(&MOVUPS_LOAD).infer_rex();
|
||||
let template = rec_vconst.nonrex().opcodes(&MOVUPS_LOAD);
|
||||
e.enc_32_64_maybe_isap(instruction, template, None); // from SSE
|
||||
}
|
||||
|
||||
|
@ -1827,19 +1826,13 @@ fn define_simd(
|
|||
for ty in ValueType::all_lane_types().filter(allowed_simd_type) {
|
||||
// Store
|
||||
let bound_store = store.bind(vector(ty, sse_vector_size)).bind(Any);
|
||||
e.enc_32_64(
|
||||
bound_store.clone(),
|
||||
rec_fst.opcodes(&MOVUPS_STORE).infer_rex(),
|
||||
);
|
||||
e.enc_32_64(bound_store.clone(), rec_fst.opcodes(&MOVUPS_STORE));
|
||||
e.enc_32_64(bound_store.clone(), rec_fstDisp8.opcodes(&MOVUPS_STORE));
|
||||
e.enc_32_64(bound_store, rec_fstDisp32.opcodes(&MOVUPS_STORE));
|
||||
|
||||
// Load
|
||||
let bound_load = load.bind(vector(ty, sse_vector_size)).bind(Any);
|
||||
e.enc_32_64(
|
||||
bound_load.clone(),
|
||||
rec_fld.opcodes(&MOVUPS_LOAD).infer_rex(),
|
||||
);
|
||||
e.enc_32_64(bound_load.clone(), rec_fld.opcodes(&MOVUPS_LOAD));
|
||||
e.enc_32_64(bound_load.clone(), rec_fldDisp8.opcodes(&MOVUPS_LOAD));
|
||||
e.enc_32_64(bound_load, rec_fldDisp32.opcodes(&MOVUPS_LOAD));
|
||||
|
||||
|
@ -1927,12 +1920,6 @@ fn define_simd(
|
|||
e.enc_32_64_maybe_isap(imul, rec_fa.opcodes(opcodes), *isap);
|
||||
}
|
||||
|
||||
// SIMD integer average with rounding.
|
||||
for (ty, opcodes) in &[(I8, &PAVGB[..]), (I16, &PAVGW[..])] {
|
||||
let avgr = avg_round.bind(vector(*ty, sse_vector_size));
|
||||
e.enc_32_64(avgr, rec_fa.opcodes(opcodes));
|
||||
}
|
||||
|
||||
// SIMD logical operations
|
||||
let band = shared.by_name("band");
|
||||
let band_not = shared.by_name("band_not");
|
||||
|
@ -2407,14 +2394,5 @@ pub(crate) fn define(
|
|||
define_control_flow(&mut e, shared_defs, settings, r);
|
||||
define_reftypes(&mut e, shared_defs, r);
|
||||
|
||||
let x86_elf_tls_get_addr = x86.by_name("x86_elf_tls_get_addr");
|
||||
let x86_macho_tls_get_addr = x86.by_name("x86_macho_tls_get_addr");
|
||||
|
||||
let rec_elf_tls_get_addr = r.recipe("elf_tls_get_addr");
|
||||
let rec_macho_tls_get_addr = r.recipe("macho_tls_get_addr");
|
||||
|
||||
e.enc64_rec(x86_elf_tls_get_addr, rec_elf_tls_get_addr, 0);
|
||||
e.enc64_rec(x86_macho_tls_get_addr, rec_macho_tls_get_addr, 0);
|
||||
|
||||
e
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@ use crate::cdsl::operands::Operand;
|
|||
use crate::cdsl::types::ValueType;
|
||||
use crate::cdsl::typevar::{Interval, TypeSetBuilder, TypeVar};
|
||||
|
||||
use crate::shared::entities::EntityRefs;
|
||||
use crate::shared::formats::Formats;
|
||||
use crate::shared::immediates::Immediates;
|
||||
use crate::shared::types;
|
||||
|
@ -17,7 +16,6 @@ pub(crate) fn define(
|
|||
mut all_instructions: &mut AllInstructions,
|
||||
formats: &Formats,
|
||||
immediates: &Immediates,
|
||||
entities: &EntityRefs,
|
||||
) -> InstructionGroup {
|
||||
let mut ig = InstructionGroupBuilder::new(&mut all_instructions);
|
||||
|
||||
|
@ -544,39 +542,5 @@ pub(crate) fn define(
|
|||
.operands_out(vec![a]),
|
||||
);
|
||||
|
||||
let i64_t = &TypeVar::new(
|
||||
"i64_t",
|
||||
"A scalar 64bit integer",
|
||||
TypeSetBuilder::new().ints(64..64).build(),
|
||||
);
|
||||
|
||||
let GV = &Operand::new("GV", &entities.global_value);
|
||||
let addr = &Operand::new("addr", i64_t);
|
||||
|
||||
ig.push(
|
||||
Inst::new(
|
||||
"x86_elf_tls_get_addr",
|
||||
r#"
|
||||
Elf tls get addr -- This implements the GD TLS model for ELF. The clobber output should
|
||||
not be used.
|
||||
"#,
|
||||
&formats.unary_global_value,
|
||||
)
|
||||
.operands_in(vec![GV])
|
||||
.operands_out(vec![addr]),
|
||||
);
|
||||
ig.push(
|
||||
Inst::new(
|
||||
"x86_macho_tls_get_addr",
|
||||
r#"
|
||||
Mach-O tls get addr -- This implements TLS access for Mach-O. The clobber output should
|
||||
not be used.
|
||||
"#,
|
||||
&formats.unary_global_value,
|
||||
)
|
||||
.operands_in(vec![GV])
|
||||
.operands_out(vec![addr]),
|
||||
);
|
||||
|
||||
ig.build()
|
||||
}
|
||||
|
|
|
@ -43,8 +43,6 @@ pub(crate) fn define(shared: &mut SharedDefinitions, x86_instructions: &Instruct
|
|||
let iadd = insts.by_name("iadd");
|
||||
let icmp = insts.by_name("icmp");
|
||||
let iconst = insts.by_name("iconst");
|
||||
let imax = insts.by_name("imax");
|
||||
let imin = insts.by_name("imin");
|
||||
let imul = insts.by_name("imul");
|
||||
let ineg = insts.by_name("ineg");
|
||||
let insertlane = insts.by_name("insertlane");
|
||||
|
@ -61,11 +59,8 @@ pub(crate) fn define(shared: &mut SharedDefinitions, x86_instructions: &Instruct
|
|||
let shuffle = insts.by_name("shuffle");
|
||||
let srem = insts.by_name("srem");
|
||||
let sshr = insts.by_name("sshr");
|
||||
let tls_value = insts.by_name("tls_value");
|
||||
let trueif = insts.by_name("trueif");
|
||||
let udiv = insts.by_name("udiv");
|
||||
let umax = insts.by_name("umax");
|
||||
let umin = insts.by_name("umin");
|
||||
let umulhi = insts.by_name("umulhi");
|
||||
let ushr_imm = insts.by_name("ushr_imm");
|
||||
let urem = insts.by_name("urem");
|
||||
|
@ -76,7 +71,6 @@ pub(crate) fn define(shared: &mut SharedDefinitions, x86_instructions: &Instruct
|
|||
|
||||
let x86_bsf = x86_instructions.by_name("x86_bsf");
|
||||
let x86_bsr = x86_instructions.by_name("x86_bsr");
|
||||
let x86_pmaxs = x86_instructions.by_name("x86_pmaxs");
|
||||
let x86_pmaxu = x86_instructions.by_name("x86_pmaxu");
|
||||
let x86_pmins = x86_instructions.by_name("x86_pmins");
|
||||
let x86_pminu = x86_instructions.by_name("x86_pminu");
|
||||
|
@ -325,10 +319,6 @@ pub(crate) fn define(shared: &mut SharedDefinitions, x86_instructions: &Instruct
|
|||
],
|
||||
);
|
||||
|
||||
group.custom_legalize(ineg, "convert_ineg");
|
||||
|
||||
group.custom_legalize(tls_value, "expand_tls_value");
|
||||
|
||||
group.build_and_add_to(&mut shared.transform_groups);
|
||||
|
||||
let mut narrow = TransformGroupBuilder::new(
|
||||
|
@ -562,18 +552,6 @@ pub(crate) fn define(shared: &mut SharedDefinitions, x86_instructions: &Instruct
|
|||
narrow.legalize(def!(c = icmp_(ule, a, b)), vec![def!(c = icmp(uge, b, a))]);
|
||||
}
|
||||
|
||||
// SIMD integer min/max
|
||||
for ty in &[I8, I16, I32] {
|
||||
let imin = imin.bind(vector(*ty, sse_vector_size));
|
||||
narrow.legalize(def!(c = imin(a, b)), vec![def!(c = x86_pmins(a, b))]);
|
||||
let umin = umin.bind(vector(*ty, sse_vector_size));
|
||||
narrow.legalize(def!(c = umin(a, b)), vec![def!(c = x86_pminu(a, b))]);
|
||||
let imax = imax.bind(vector(*ty, sse_vector_size));
|
||||
narrow.legalize(def!(c = imax(a, b)), vec![def!(c = x86_pmaxs(a, b))]);
|
||||
let umax = umax.bind(vector(*ty, sse_vector_size));
|
||||
narrow.legalize(def!(c = umax(a, b)), vec![def!(c = x86_pmaxu(a, b))]);
|
||||
}
|
||||
|
||||
// SIMD fcmp greater-/less-than
|
||||
let gt = Literal::enumerator_for(&imm.floatcc, "gt");
|
||||
let lt = Literal::enumerator_for(&imm.floatcc, "lt");
|
||||
|
@ -634,17 +612,4 @@ pub(crate) fn define(shared: &mut SharedDefinitions, x86_instructions: &Instruct
|
|||
narrow.custom_legalize(ineg, "convert_ineg");
|
||||
|
||||
narrow.build_and_add_to(&mut shared.transform_groups);
|
||||
|
||||
let mut widen = TransformGroupBuilder::new(
|
||||
"x86_widen",
|
||||
r#"
|
||||
Legalize instructions by widening.
|
||||
|
||||
Use x86-specific instructions if needed."#,
|
||||
)
|
||||
.isa("x86")
|
||||
.chain_with(shared.transform_groups.by_name("widen").id);
|
||||
|
||||
widen.custom_legalize(ineg, "convert_ineg");
|
||||
widen.build_and_add_to(&mut shared.transform_groups);
|
||||
}
|
||||
|
|
|
@ -24,7 +24,6 @@ pub(crate) fn define(shared_defs: &mut SharedDefinitions) -> TargetIsa {
|
|||
&mut shared_defs.all_instructions,
|
||||
&shared_defs.formats,
|
||||
&shared_defs.imm,
|
||||
&shared_defs.entities,
|
||||
);
|
||||
legalize::define(shared_defs, &inst_group);
|
||||
|
||||
|
@ -33,15 +32,16 @@ pub(crate) fn define(shared_defs: &mut SharedDefinitions) -> TargetIsa {
|
|||
let mut x86_32 = CpuMode::new("I32");
|
||||
|
||||
let expand_flags = shared_defs.transform_groups.by_name("expand_flags");
|
||||
let x86_widen = shared_defs.transform_groups.by_name("x86_widen");
|
||||
let narrow_flags = shared_defs.transform_groups.by_name("narrow_flags");
|
||||
let widen = shared_defs.transform_groups.by_name("widen");
|
||||
let x86_narrow = shared_defs.transform_groups.by_name("x86_narrow");
|
||||
let x86_expand = shared_defs.transform_groups.by_name("x86_expand");
|
||||
|
||||
x86_32.legalize_monomorphic(expand_flags);
|
||||
x86_32.legalize_default(x86_narrow);
|
||||
x86_32.legalize_default(narrow_flags);
|
||||
x86_32.legalize_type(B1, expand_flags);
|
||||
x86_32.legalize_type(I8, x86_widen);
|
||||
x86_32.legalize_type(I16, x86_widen);
|
||||
x86_32.legalize_type(I8, widen);
|
||||
x86_32.legalize_type(I16, widen);
|
||||
x86_32.legalize_type(I32, x86_expand);
|
||||
x86_32.legalize_value_type(ReferenceType(R32), x86_expand);
|
||||
x86_32.legalize_type(F32, x86_expand);
|
||||
|
@ -50,8 +50,8 @@ pub(crate) fn define(shared_defs: &mut SharedDefinitions) -> TargetIsa {
|
|||
x86_64.legalize_monomorphic(expand_flags);
|
||||
x86_64.legalize_default(x86_narrow);
|
||||
x86_64.legalize_type(B1, expand_flags);
|
||||
x86_64.legalize_type(I8, x86_widen);
|
||||
x86_64.legalize_type(I16, x86_widen);
|
||||
x86_64.legalize_type(I8, widen);
|
||||
x86_64.legalize_type(I16, widen);
|
||||
x86_64.legalize_type(I32, x86_expand);
|
||||
x86_64.legalize_type(I64, x86_expand);
|
||||
x86_64.legalize_value_type(ReferenceType(R64), x86_expand);
|
||||
|
|
|
@ -317,12 +317,6 @@ pub static PAND: [u8; 3] = [0x66, 0x0f, 0xdb];
|
|||
/// Bitwise AND NOT of xmm2/m128 and xmm1 (SSE2).
|
||||
pub static PANDN: [u8; 3] = [0x66, 0x0f, 0xdf];
|
||||
|
||||
/// Average packed unsigned byte integers from xmm2/m128 and xmm1 with rounding (SSE2).
|
||||
pub static PAVGB: [u8; 3] = [0x66, 0x0f, 0xE0];
|
||||
|
||||
/// Average packed unsigned word integers from xmm2/m128 and xmm1 with rounding (SSE2).
|
||||
pub static PAVGW: [u8; 3] = [0x66, 0x0f, 0xE3];
|
||||
|
||||
/// Compare packed data for equal (SSE2).
|
||||
pub static PCMPEQB: [u8; 3] = [0x66, 0x0f, 0x74];
|
||||
|
||||
|
|
|
@ -46,16 +46,6 @@ impl<'builder> RecipeGroup<'builder> {
|
|||
self.templates.push(template.clone());
|
||||
template
|
||||
}
|
||||
fn add_template_inferred(
|
||||
&mut self,
|
||||
recipe: EncodingRecipeBuilder,
|
||||
infer_function: &'static str,
|
||||
) -> Rc<Template<'builder>> {
|
||||
let template =
|
||||
Rc::new(Template::new(recipe, self.regs).inferred_rex_compute_size(infer_function));
|
||||
self.templates.push(template.clone());
|
||||
template
|
||||
}
|
||||
fn add_template(&mut self, template: Template<'builder>) -> Rc<Template<'builder>> {
|
||||
let template = Rc::new(template);
|
||||
self.templates.push(template.clone());
|
||||
|
@ -1491,7 +1481,7 @@ pub(crate) fn define<'shared>(
|
|||
);
|
||||
|
||||
// XX /r register-indirect store of FPR with no offset.
|
||||
recipes.add_template_inferred(
|
||||
recipes.add_template_recipe(
|
||||
EncodingRecipeBuilder::new("fst", &formats.store, 1)
|
||||
.operands_in(vec![fpr, gpr])
|
||||
.inst_predicate(has_no_offset)
|
||||
|
@ -1514,7 +1504,6 @@ pub(crate) fn define<'shared>(
|
|||
}
|
||||
"#,
|
||||
),
|
||||
"size_plus_maybe_sib_or_offset_inreg1_plus_rex_prefix_for_inreg0_inreg1",
|
||||
);
|
||||
|
||||
let has_small_offset =
|
||||
|
@ -2002,7 +1991,7 @@ pub(crate) fn define<'shared>(
|
|||
);
|
||||
|
||||
// XX /r float load with no offset.
|
||||
recipes.add_template_inferred(
|
||||
recipes.add_template_recipe(
|
||||
EncodingRecipeBuilder::new("fld", &formats.load, 1)
|
||||
.operands_in(vec![gpr])
|
||||
.operands_out(vec![fpr])
|
||||
|
@ -2026,7 +2015,6 @@ pub(crate) fn define<'shared>(
|
|||
}
|
||||
"#,
|
||||
),
|
||||
"size_plus_maybe_sib_or_offset_for_inreg_0_plus_rex_prefix_for_inreg0_outreg0",
|
||||
);
|
||||
|
||||
let has_small_offset =
|
||||
|
@ -2527,7 +2515,7 @@ pub(crate) fn define<'shared>(
|
|||
),
|
||||
);
|
||||
|
||||
recipes.add_template_inferred(
|
||||
recipes.add_template_recipe(
|
||||
EncodingRecipeBuilder::new("vconst", &formats.unary_const, 5)
|
||||
.operands_out(vec![fpr])
|
||||
.clobbers_flags(false)
|
||||
|
@ -2538,10 +2526,9 @@ pub(crate) fn define<'shared>(
|
|||
const_disp4(constant_handle, func, sink);
|
||||
"#,
|
||||
),
|
||||
"size_with_inferred_rex_for_outreg0",
|
||||
);
|
||||
|
||||
recipes.add_template_inferred(
|
||||
recipes.add_template_recipe(
|
||||
EncodingRecipeBuilder::new("vconst_optimized", &formats.unary_const, 1)
|
||||
.operands_out(vec![fpr])
|
||||
.clobbers_flags(false)
|
||||
|
@ -2551,7 +2538,6 @@ pub(crate) fn define<'shared>(
|
|||
modrm_rr(out_reg0, out_reg0, sink);
|
||||
"#,
|
||||
),
|
||||
"size_with_inferred_rex_for_outreg0",
|
||||
);
|
||||
|
||||
recipes.add_template_recipe(
|
||||
|
@ -3259,73 +3245,10 @@ pub(crate) fn define<'shared>(
|
|||
recipes.add_recipe(
|
||||
EncodingRecipeBuilder::new("safepoint", &formats.multiary, 0).emit(
|
||||
r#"
|
||||
sink.add_stackmap(args, func, isa);
|
||||
"#,
|
||||
sink.add_stackmap(args, func, isa);
|
||||
"#,
|
||||
),
|
||||
);
|
||||
|
||||
// Both `elf_tls_get_addr` and `macho_tls_get_addr` require all caller-saved registers to be spilled.
|
||||
// This is currently special cased in `regalloc/spilling.rs` in the `visit_inst` function.
|
||||
|
||||
recipes.add_recipe(
|
||||
EncodingRecipeBuilder::new("elf_tls_get_addr", &formats.unary_global_value, 16)
|
||||
// FIXME Correct encoding for non rax registers
|
||||
.operands_out(vec![reg_rax])
|
||||
.emit(
|
||||
r#"
|
||||
// output %rax
|
||||
// clobbers %rdi
|
||||
|
||||
// Those data16 prefixes are necessary to pad to 16 bytes.
|
||||
|
||||
// data16 lea gv@tlsgd(%rip),%rdi
|
||||
sink.put1(0x66); // data16
|
||||
sink.put1(0b01001000); // rex.w
|
||||
const LEA: u8 = 0x8d;
|
||||
sink.put1(LEA); // lea
|
||||
modrm_riprel(0b111/*out_reg0*/, sink); // 0x3d
|
||||
sink.reloc_external(Reloc::ElfX86_64TlsGd,
|
||||
&func.global_values[global_value].symbol_name(),
|
||||
-4);
|
||||
sink.put4(0);
|
||||
|
||||
// data16 data16 callq __tls_get_addr-4
|
||||
sink.put1(0x66); // data16
|
||||
sink.put1(0x66); // data16
|
||||
sink.put1(0b01001000); // rex.w
|
||||
sink.put1(0xe8); // call
|
||||
sink.reloc_external(Reloc::X86CallPLTRel4,
|
||||
&ExternalName::LibCall(LibCall::ElfTlsGetAddr),
|
||||
-4);
|
||||
sink.put4(0);
|
||||
"#,
|
||||
),
|
||||
);
|
||||
|
||||
recipes.add_recipe(
|
||||
EncodingRecipeBuilder::new("macho_tls_get_addr", &formats.unary_global_value, 9)
|
||||
// FIXME Correct encoding for non rax registers
|
||||
.operands_out(vec![reg_rax])
|
||||
.emit(
|
||||
r#"
|
||||
// output %rax
|
||||
// clobbers %rdi
|
||||
|
||||
// movq gv@tlv(%rip), %rdi
|
||||
sink.put1(0x48); // rex
|
||||
sink.put1(0x8b); // mov
|
||||
modrm_riprel(0b111/*out_reg0*/, sink); // 0x3d
|
||||
sink.reloc_external(Reloc::MachOX86_64Tlv,
|
||||
&func.global_values[global_value].symbol_name(),
|
||||
-4);
|
||||
sink.put4(0);
|
||||
|
||||
// callq *(%rdi)
|
||||
sink.put1(0xff);
|
||||
sink.put1(0x17);
|
||||
"#,
|
||||
),
|
||||
);
|
||||
|
||||
recipes
|
||||
}
|
||||
|
|
|
@ -481,101 +481,6 @@ fn define_control_flow(
|
|||
);
|
||||
}
|
||||
|
||||
#[inline(never)]
|
||||
fn define_simd_arithmetic(
|
||||
ig: &mut InstructionGroupBuilder,
|
||||
formats: &Formats,
|
||||
_: &Immediates,
|
||||
_: &EntityRefs,
|
||||
) {
|
||||
let Int = &TypeVar::new(
|
||||
"Int",
|
||||
"A scalar or vector integer type",
|
||||
TypeSetBuilder::new()
|
||||
.ints(Interval::All)
|
||||
.simd_lanes(Interval::All)
|
||||
.build(),
|
||||
);
|
||||
|
||||
let a = &Operand::new("a", Int);
|
||||
let x = &Operand::new("x", Int);
|
||||
let y = &Operand::new("y", Int);
|
||||
|
||||
ig.push(
|
||||
Inst::new(
|
||||
"imin",
|
||||
r#"
|
||||
Signed integer minimum.
|
||||
"#,
|
||||
&formats.binary,
|
||||
)
|
||||
.operands_in(vec![x, y])
|
||||
.operands_out(vec![a]),
|
||||
);
|
||||
|
||||
ig.push(
|
||||
Inst::new(
|
||||
"umin",
|
||||
r#"
|
||||
Unsigned integer minimum.
|
||||
"#,
|
||||
&formats.binary,
|
||||
)
|
||||
.operands_in(vec![x, y])
|
||||
.operands_out(vec![a]),
|
||||
);
|
||||
|
||||
ig.push(
|
||||
Inst::new(
|
||||
"imax",
|
||||
r#"
|
||||
Signed integer maximum.
|
||||
"#,
|
||||
&formats.binary,
|
||||
)
|
||||
.operands_in(vec![x, y])
|
||||
.operands_out(vec![a]),
|
||||
);
|
||||
|
||||
ig.push(
|
||||
Inst::new(
|
||||
"umax",
|
||||
r#"
|
||||
Unsigned integer maximum.
|
||||
"#,
|
||||
&formats.binary,
|
||||
)
|
||||
.operands_in(vec![x, y])
|
||||
.operands_out(vec![a]),
|
||||
);
|
||||
|
||||
let IxN = &TypeVar::new(
|
||||
"IxN",
|
||||
"A SIMD vector type containing integers",
|
||||
TypeSetBuilder::new()
|
||||
.ints(Interval::All)
|
||||
.simd_lanes(Interval::All)
|
||||
.includes_scalars(false)
|
||||
.build(),
|
||||
);
|
||||
|
||||
let a = &Operand::new("a", IxN);
|
||||
let x = &Operand::new("x", IxN);
|
||||
let y = &Operand::new("y", IxN);
|
||||
|
||||
ig.push(
|
||||
Inst::new(
|
||||
"avg_round",
|
||||
r#"
|
||||
Unsigned average with rounding: `a := (x + y + 1) // 2`
|
||||
"#,
|
||||
&formats.binary,
|
||||
)
|
||||
.operands_in(vec![x, y])
|
||||
.operands_out(vec![a]),
|
||||
);
|
||||
}
|
||||
|
||||
#[allow(clippy::many_single_char_names)]
|
||||
pub(crate) fn define(
|
||||
all_instructions: &mut AllInstructions,
|
||||
|
@ -586,7 +491,6 @@ pub(crate) fn define(
|
|||
let mut ig = InstructionGroupBuilder::new(all_instructions);
|
||||
|
||||
define_control_flow(&mut ig, formats, imm, entities);
|
||||
define_simd_arithmetic(&mut ig, formats, imm, entities);
|
||||
|
||||
// Operand kind shorthands.
|
||||
let iflags: &TypeVar = &ValueType::Special(types::Flag::IFlags.into()).into();
|
||||
|
@ -653,6 +557,7 @@ pub(crate) fn define(
|
|||
.includes_scalars(false)
|
||||
.build(),
|
||||
);
|
||||
|
||||
let Any = &TypeVar::new(
|
||||
"Any",
|
||||
"Any integer, float, boolean, or reference scalar or vector type",
|
||||
|
@ -1127,18 +1032,6 @@ pub(crate) fn define(
|
|||
.operands_out(vec![a]),
|
||||
);
|
||||
|
||||
ig.push(
|
||||
Inst::new(
|
||||
"tls_value",
|
||||
r#"
|
||||
Compute the value of global GV, which is a TLS (thread local storage) value.
|
||||
"#,
|
||||
&formats.unary_global_value,
|
||||
)
|
||||
.operands_in(vec![GV])
|
||||
.operands_out(vec![a]),
|
||||
);
|
||||
|
||||
let HeapOffset = &TypeVar::new(
|
||||
"HeapOffset",
|
||||
"An unsigned heap offset",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! Shared definitions for the Cranelift intermediate language.
|
||||
|
||||
pub mod entities;
|
||||
mod entities;
|
||||
pub mod formats;
|
||||
pub mod immediates;
|
||||
pub mod instructions;
|
||||
|
@ -28,7 +28,6 @@ pub(crate) struct Definitions {
|
|||
pub imm: Immediates,
|
||||
pub formats: Formats,
|
||||
pub transform_groups: TransformGroups,
|
||||
pub entities: EntityRefs,
|
||||
}
|
||||
|
||||
pub(crate) fn define() -> Definitions {
|
||||
|
@ -48,7 +47,6 @@ pub(crate) fn define() -> Definitions {
|
|||
imm: immediates,
|
||||
formats,
|
||||
transform_groups,
|
||||
entities,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -131,14 +131,6 @@ pub(crate) fn define() -> SettingGroup {
|
|||
false,
|
||||
);
|
||||
|
||||
settings.add_enum(
|
||||
"tls_model",
|
||||
r#"
|
||||
Defines the model used to perform TLS accesses.
|
||||
"#,
|
||||
vec!["none", "elf_gd", "macho", "coff"],
|
||||
);
|
||||
|
||||
// Settings specific to the `baldrdash` calling convention.
|
||||
|
||||
settings.add_enum(
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"32dba47d85f9bb5108788de0bbdde6da5f22e6294b63c56d1232049a929b09a0","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"a410bc2f5dcbde499c0cd299c2620bc8111e3c5b3fccdd9e2d85caf3c24fdab3","src/condcodes.rs":"b8d433b2217b86e172d25b6c65a3ce0cc8ca221062cad1b28b0c78d2159fbda9","src/constant_hash.rs":"ffc619f45aad62c6fdcb83553a05879691a72e9a0103375b2d6cc12d52cf72d0","src/constants.rs":"fed03a10a6316e06aa174091db6e7d1fbb5f73c82c31193012ec5ab52f1c603a","src/isa/mod.rs":"428a950eca14acbe783899ccb1aecf15027f8cbe205578308ebde203d10535f3","src/isa/x86/encoding_bits.rs":"7e013fb804b13f9f83a0d517c6f5105856938d08ad378cc44a6fe6a59adef270","src/isa/x86/mod.rs":"01ef4e4d7437f938badbe2137892183c1ac684da0f68a5bec7e06aad34f43b9b","src/lib.rs":"91f26f998f11fb9cb74d2ec171424e29badd417beef023674850ace57149c656"},"package":null}
|
||||
{"files":{"Cargo.toml":"4b84070f17601421b41646efa8761533de5867778772e126c5349d623b30d278","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"a410bc2f5dcbde499c0cd299c2620bc8111e3c5b3fccdd9e2d85caf3c24fdab3","src/condcodes.rs":"b8d433b2217b86e172d25b6c65a3ce0cc8ca221062cad1b28b0c78d2159fbda9","src/constant_hash.rs":"ffc619f45aad62c6fdcb83553a05879691a72e9a0103375b2d6cc12d52cf72d0","src/constants.rs":"fed03a10a6316e06aa174091db6e7d1fbb5f73c82c31193012ec5ab52f1c603a","src/isa/mod.rs":"428a950eca14acbe783899ccb1aecf15027f8cbe205578308ebde203d10535f3","src/isa/x86/encoding_bits.rs":"7e013fb804b13f9f83a0d517c6f5105856938d08ad378cc44a6fe6a59adef270","src/isa/x86/mod.rs":"01ef4e4d7437f938badbe2137892183c1ac684da0f68a5bec7e06aad34f43b9b","src/lib.rs":"91f26f998f11fb9cb74d2ec171424e29badd417beef023674850ace57149c656"},"package":null}
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
authors = ["The Cranelift Project Developers"]
|
||||
name = "cranelift-codegen-shared"
|
||||
version = "0.59.0"
|
||||
version = "0.58.0"
|
||||
description = "For code shared between cranelift-codegen-meta and cranelift-codegen"
|
||||
license = "Apache-2.0 WITH LLVM-exception"
|
||||
repository = "https://github.com/bytecodealliance/cranelift"
|
||||
|
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
authors = ["The Cranelift Project Developers"]
|
||||
name = "cranelift-codegen"
|
||||
version = "0.59.0"
|
||||
version = "0.58.0"
|
||||
description = "Low-level code generator library"
|
||||
license = "Apache-2.0 WITH LLVM-exception"
|
||||
documentation = "https://cranelift.readthedocs.io/"
|
||||
|
@ -13,9 +13,9 @@ build = "build.rs"
|
|||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
cranelift-codegen-shared = { path = "./shared", version = "0.59.0" }
|
||||
cranelift-entity = { path = "../entity", version = "0.59.0" }
|
||||
cranelift-bforest = { path = "../bforest", version = "0.59.0" }
|
||||
cranelift-codegen-shared = { path = "./shared", version = "0.58.0" }
|
||||
cranelift-entity = { path = "../cranelift-entity", version = "0.58.0" }
|
||||
cranelift-bforest = { path = "../cranelift-bforest", version = "0.58.0" }
|
||||
hashbrown = { version = "0.6", optional = true }
|
||||
target-lexicon = "0.10"
|
||||
log = { version = "0.4.6", default-features = false }
|
||||
|
@ -30,7 +30,7 @@ byteorder = { version = "1.3.2", default-features = false }
|
|||
# accomodated in `tests`.
|
||||
|
||||
[build-dependencies]
|
||||
cranelift-codegen-meta = { path = "meta", version = "0.59.0" }
|
||||
cranelift-codegen-meta = { path = "meta", version = "0.58.0" }
|
||||
|
||||
[features]
|
||||
default = ["std", "unwind"]
|
||||
|
|
|
@ -74,7 +74,7 @@ impl<'a> MemoryCodeSink<'a> {
|
|||
|
||||
/// A trait for receiving relocations for code that is emitted directly into memory.
|
||||
pub trait RelocSink {
|
||||
/// Add a relocation referencing a block at the current offset.
|
||||
/// Add a relocation referencing an block at the current offset.
|
||||
fn reloc_block(&mut self, _: CodeOffset, _: Reloc, _: CodeOffset);
|
||||
|
||||
/// Add a relocation referencing an external symbol at the current offset.
|
||||
|
|
|
@ -56,12 +56,6 @@ pub enum Reloc {
|
|||
Arm64Call,
|
||||
/// RISC-V call target
|
||||
RiscvCall,
|
||||
|
||||
/// Elf x86_64 32 bit signed PC relative offset to two GOT entries for GD symbol.
|
||||
ElfX86_64TlsGd,
|
||||
|
||||
/// Mach-O x86_64 32 bit signed PC relative offset to a `__thread_vars` entry.
|
||||
MachOX86_64Tlv,
|
||||
}
|
||||
|
||||
impl fmt::Display for Reloc {
|
||||
|
@ -77,9 +71,6 @@ impl fmt::Display for Reloc {
|
|||
Self::X86CallPLTRel4 => write!(f, "CallPLTRel4"),
|
||||
Self::X86GOTPCRel4 => write!(f, "GOTPCRel4"),
|
||||
Self::Arm32Call | Self::Arm64Call | Self::RiscvCall => write!(f, "Call"),
|
||||
|
||||
Self::ElfX86_64TlsGd => write!(f, "ElfX86_64TlsGd"),
|
||||
Self::MachOX86_64Tlv => write!(f, "MachOX86_64Tlv"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -136,7 +127,7 @@ pub trait CodeSink {
|
|||
/// Add 8 bytes to the code section.
|
||||
fn put8(&mut self, _: u64);
|
||||
|
||||
/// Add a relocation referencing a block at the current offset.
|
||||
/// Add a relocation referencing an block at the current offset.
|
||||
fn reloc_block(&mut self, _: Reloc, _: CodeOffset);
|
||||
|
||||
/// Add a relocation referencing an external symbol plus the addend at the current offset.
|
||||
|
|
|
@ -163,7 +163,7 @@ fn try_fold_redundant_jump(
|
|||
}
|
||||
};
|
||||
|
||||
// For the moment, only attempt to fold a branch to a block that is parameterless.
|
||||
// For the moment, only attempt to fold a branch to an block that is parameterless.
|
||||
// These blocks are mainly produced by critical edge splitting.
|
||||
//
|
||||
// TODO: Allow folding blocks that define SSA values and function as phi nodes.
|
||||
|
|
|
@ -13,10 +13,10 @@ pub enum CursorPosition {
|
|||
/// Cursor is pointing at an existing instruction.
|
||||
/// New instructions will be inserted *before* the current instruction.
|
||||
At(ir::Inst),
|
||||
/// Cursor is before the beginning of a block. No instructions can be inserted. Calling
|
||||
/// Cursor is before the beginning of an block. No instructions can be inserted. Calling
|
||||
/// `next_inst()` will move to the first instruction in the block.
|
||||
Before(ir::Block),
|
||||
/// Cursor is pointing after the end of a block.
|
||||
/// Cursor is pointing after the end of an block.
|
||||
/// New instructions will be appended to the block.
|
||||
After(ir::Block),
|
||||
}
|
||||
|
@ -368,7 +368,7 @@ pub trait Cursor {
|
|||
|
||||
/// Move to the next instruction in the same block and return it.
|
||||
///
|
||||
/// - If the cursor was positioned before a block, go to the first instruction in that block.
|
||||
/// - If the cursor was positioned before an block, go to the first instruction in that block.
|
||||
/// - If there are no more instructions in the block, go to the `After(block)` position and return
|
||||
/// `None`.
|
||||
/// - If the cursor wasn't pointing anywhere, keep doing that.
|
||||
|
@ -377,7 +377,7 @@ pub trait Cursor {
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// The `next_inst()` method is intended for iterating over the instructions in a block like
|
||||
/// The `next_inst()` method is intended for iterating over the instructions in an block like
|
||||
/// this:
|
||||
///
|
||||
/// ```
|
||||
|
@ -438,7 +438,7 @@ pub trait Cursor {
|
|||
|
||||
/// Move to the previous instruction in the same block and return it.
|
||||
///
|
||||
/// - If the cursor was positioned after a block, go to the last instruction in that block.
|
||||
/// - If the cursor was positioned after an block, go to the last instruction in that block.
|
||||
/// - If there are no more instructions in the block, go to the `Before(block)` position and return
|
||||
/// `None`.
|
||||
/// - If the cursor wasn't pointing anywhere, keep doing that.
|
||||
|
@ -494,7 +494,7 @@ pub trait Cursor {
|
|||
///
|
||||
/// - If pointing at an instruction, the new instruction is inserted before the current
|
||||
/// instruction.
|
||||
/// - If pointing at the bottom of a block, the new instruction is appended to the block.
|
||||
/// - If pointing at the bottom of an block, the new instruction is appended to the block.
|
||||
/// - Otherwise panic.
|
||||
///
|
||||
/// In either case, the cursor is not moved, such that repeated calls to `insert_inst()` causes
|
||||
|
@ -532,16 +532,16 @@ pub trait Cursor {
|
|||
inst
|
||||
}
|
||||
|
||||
/// Insert a block at the current position and switch to it.
|
||||
/// Insert an block at the current position and switch to it.
|
||||
///
|
||||
/// As far as possible, this method behaves as if the block header were an instruction inserted
|
||||
/// at the current position.
|
||||
///
|
||||
/// - If the cursor is pointing at an existing instruction, *the current block is split in two*
|
||||
/// and the current instruction becomes the first instruction in the inserted block.
|
||||
/// - If the cursor points at the bottom of a block, the new block is inserted after the current
|
||||
/// - If the cursor points at the bottom of an block, the new block is inserted after the current
|
||||
/// one, and moved to the bottom of the new block where instructions can be appended.
|
||||
/// - If the cursor points to the top of a block, the new block is inserted above the current one.
|
||||
/// - If the cursor points to the top of an block, the new block is inserted above the current one.
|
||||
/// - If the cursor is not pointing at anything, the new block is placed last in the layout.
|
||||
///
|
||||
/// This means that it is always valid to call this method, and it always leaves the cursor in
|
||||
|
|
|
@ -284,9 +284,9 @@ impl DominatorTree {
|
|||
//
|
||||
// 1. Each block is a node, with outgoing edges for all the branches in the block.
|
||||
// 2. Each basic block is a node, with outgoing edges for the single branch at the end of
|
||||
// the BB. (A block is a linear sequence of basic blocks).
|
||||
// the BB. (An block is a linear sequence of basic blocks).
|
||||
//
|
||||
// The first graph is a contraction of the second one. We want to compute a block post-order
|
||||
// The first graph is a contraction of the second one. We want to compute an block post-order
|
||||
// that is compatible both graph interpretations. That is, if you compute a BB post-order
|
||||
// and then remove those BBs that do not correspond to block headers, you get a post-order of
|
||||
// the block graph.
|
||||
|
@ -302,15 +302,15 @@ impl DominatorTree {
|
|||
//
|
||||
// Edge pruning:
|
||||
//
|
||||
// In the BB graph, we keep an edge to a block the first time we visit the *source* side
|
||||
// In the BB graph, we keep an edge to an block the first time we visit the *source* side
|
||||
// of the edge. Any subsequent edges to the same block are pruned.
|
||||
//
|
||||
// The equivalent tree is reached in the block graph by keeping the first edge to a block
|
||||
// The equivalent tree is reached in the block graph by keeping the first edge to an block
|
||||
// in a top-down traversal of the successors. (And then visiting edges in a bottom-up
|
||||
// order).
|
||||
//
|
||||
// This pruning method makes it possible to compute the DFT without storing lots of
|
||||
// information about the progress through a block.
|
||||
// information about the progress through an block.
|
||||
|
||||
// During this algorithm only, use `rpo_number` to hold the following state:
|
||||
//
|
||||
|
@ -348,7 +348,7 @@ impl DominatorTree {
|
|||
/// Push `block` successors onto `self.stack`, filtering out those that have already been seen.
|
||||
///
|
||||
/// The successors are pushed in program order which is important to get a split-invariant
|
||||
/// post-order. Split-invariant means that if a block is split in two, we get the same
|
||||
/// post-order. Split-invariant means that if an block is split in two, we get the same
|
||||
/// post-order except for the insertion of the new block header at the split point.
|
||||
fn push_successors(&mut self, func: &Function, block: Block) {
|
||||
for inst in func.layout.block_insts(block) {
|
||||
|
@ -543,7 +543,7 @@ impl DominatorTreePreorder {
|
|||
}
|
||||
}
|
||||
|
||||
/// An iterator that enumerates the direct children of a block in the dominator tree.
|
||||
/// An iterator that enumerates the direct children of an block in the dominator tree.
|
||||
pub struct ChildIter<'a> {
|
||||
dtpo: &'a DominatorTreePreorder,
|
||||
next: PackedOption<Block>,
|
||||
|
@ -580,7 +580,7 @@ impl DominatorTreePreorder {
|
|||
/// time. This is less general than the `DominatorTree` method because it only works with block
|
||||
/// program points.
|
||||
///
|
||||
/// A block is considered to dominate itself.
|
||||
/// An block is considered to dominate itself.
|
||||
pub fn dominates(&self, a: Block, b: Block) -> bool {
|
||||
let na = &self.nodes[a];
|
||||
let nb = &self.nodes[b];
|
||||
|
|
|
@ -376,7 +376,7 @@ impl DataFlowGraph {
|
|||
pub enum ValueDef {
|
||||
/// Value is the n'th result of an instruction.
|
||||
Result(Inst, usize),
|
||||
/// Value is the n'th parameter to a block.
|
||||
/// Value is the n'th parameter to an block.
|
||||
Param(Block, usize),
|
||||
}
|
||||
|
||||
|
@ -393,7 +393,7 @@ impl ValueDef {
|
|||
pub fn unwrap_block(&self) -> Block {
|
||||
match *self {
|
||||
Self::Param(block, _) => block,
|
||||
_ => panic!("Value is not a block parameter"),
|
||||
_ => panic!("Value is not an block parameter"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -419,7 +419,7 @@ enum ValueData {
|
|||
/// Value is defined by an instruction.
|
||||
Inst { ty: Type, num: u16, inst: Inst },
|
||||
|
||||
/// Value is a block parameter.
|
||||
/// Value is an block parameter.
|
||||
Param { ty: Type, num: u16, block: Block },
|
||||
|
||||
/// Value is an alias of another value.
|
||||
|
@ -804,12 +804,12 @@ impl DataFlowGraph {
|
|||
/// last `block` parameter. This can disrupt all the branch instructions jumping to this
|
||||
/// `block` for which you have to change the branch argument order if necessary.
|
||||
///
|
||||
/// Panics if `val` is not a block parameter.
|
||||
/// Panics if `val` is not an block parameter.
|
||||
pub fn swap_remove_block_param(&mut self, val: Value) -> usize {
|
||||
let (block, num) = if let ValueData::Param { num, block, .. } = self.values[val] {
|
||||
(block, num)
|
||||
} else {
|
||||
panic!("{} must be a block parameter", val);
|
||||
panic!("{} must be an block parameter", val);
|
||||
};
|
||||
self.blocks[block]
|
||||
.params
|
||||
|
@ -826,7 +826,7 @@ impl DataFlowGraph {
|
|||
{
|
||||
*old_num = num;
|
||||
} else {
|
||||
panic!("{} should be a Block parameter", last_arg_val);
|
||||
panic!("{} should be an Block parameter", last_arg_val);
|
||||
}
|
||||
}
|
||||
num as usize
|
||||
|
@ -838,7 +838,7 @@ impl DataFlowGraph {
|
|||
let (block, num) = if let ValueData::Param { num, block, .. } = self.values[val] {
|
||||
(block, num)
|
||||
} else {
|
||||
panic!("{} must be a block parameter", val);
|
||||
panic!("{} must be an block parameter", val);
|
||||
};
|
||||
self.blocks[block]
|
||||
.params
|
||||
|
@ -853,7 +853,7 @@ impl DataFlowGraph {
|
|||
*num -= 1;
|
||||
}
|
||||
_ => panic!(
|
||||
"{} must be a block parameter",
|
||||
"{} must be an block parameter",
|
||||
self.blocks[block]
|
||||
.params
|
||||
.get(index as usize, &self.value_lists)
|
||||
|
@ -880,7 +880,7 @@ impl DataFlowGraph {
|
|||
};
|
||||
}
|
||||
|
||||
/// Replace a block parameter with a new value of type `ty`.
|
||||
/// Replace an block parameter with a new value of type `ty`.
|
||||
///
|
||||
/// The `old_value` must be an attached block parameter. It is removed from its place in the list
|
||||
/// of parameters and replaced by a new value of type `new_type`. The new value gets the same
|
||||
|
@ -894,7 +894,7 @@ impl DataFlowGraph {
|
|||
let (block, num) = if let ValueData::Param { num, block, .. } = self.values[old_value] {
|
||||
(block, num)
|
||||
} else {
|
||||
panic!("{} must be a block parameter", old_value);
|
||||
panic!("{} must be an block parameter", old_value);
|
||||
};
|
||||
let new_arg = self.make_value(ValueData::Param {
|
||||
ty: new_type,
|
||||
|
|
|
@ -295,8 +295,8 @@ impl Function {
|
|||
/// to be confused with a "leaf function" in Windows terminology.
|
||||
pub fn is_leaf(&self) -> bool {
|
||||
// Conservative result: if there's at least one function signature referenced in this
|
||||
// function, assume it is not a leaf.
|
||||
self.dfg.signatures.is_empty()
|
||||
// function, assume it may call.
|
||||
!self.dfg.signatures.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -63,9 +63,6 @@ pub enum GlobalValueData {
|
|||
/// away, after linking? If so, references to it can avoid going through a GOT. Note that
|
||||
/// symbols meant to be preemptible cannot be colocated.
|
||||
colocated: bool,
|
||||
|
||||
/// Does this symbol refer to a thread local storage value?
|
||||
tls: bool,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -113,13 +110,11 @@ impl fmt::Display for GlobalValueData {
|
|||
ref name,
|
||||
offset,
|
||||
colocated,
|
||||
tls,
|
||||
} => {
|
||||
write!(
|
||||
f,
|
||||
"symbol {}{}{}",
|
||||
"symbol {}{}",
|
||||
if colocated { "colocated " } else { "" },
|
||||
if tls { "tls " } else { "" },
|
||||
name
|
||||
)?;
|
||||
let offset_val: i64 = offset.into();
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! Function layout.
|
||||
//!
|
||||
//! The order of basic blocks in a function and the order of instructions in a block is
|
||||
//! The order of basic blocks in a function and the order of instructions in an block is
|
||||
//! determined by the `Layout` data structure defined in this module.
|
||||
|
||||
use crate::entity::SecondaryMap;
|
||||
|
@ -21,7 +21,7 @@ use log::debug;
|
|||
///
|
||||
/// - The order of blocks in the function.
|
||||
/// - Which block contains a given instruction.
|
||||
/// - The order of instructions with a block.
|
||||
/// - The order of instructions with an block.
|
||||
///
|
||||
/// While data dependencies are not recorded, instruction ordering does affect control
|
||||
/// dependencies, so part of the semantics of the program are determined by the layout.
|
||||
|
@ -75,7 +75,7 @@ impl Layout {
|
|||
/// like line numbers in BASIC: 10, 20, 30, ...
|
||||
///
|
||||
/// The block sequence numbers are strictly increasing, and so are the instruction sequence numbers
|
||||
/// within a block. The instruction sequence numbers are all between the sequence number of their
|
||||
/// within an block. The instruction sequence numbers are all between the sequence number of their
|
||||
/// containing block and the following block.
|
||||
///
|
||||
/// The result is that sequence numbers work like BASIC line numbers for the textual form of the IR.
|
||||
|
@ -335,7 +335,7 @@ impl Layout {
|
|||
/// Methods for laying out blocks.
|
||||
///
|
||||
/// An unknown block starts out as *not inserted* in the block layout. The layout is a linear order of
|
||||
/// inserted blocks. Once a block has been inserted in the layout, instructions can be added. A block
|
||||
/// inserted blocks. Once an block has been inserted in the layout, instructions can be added. An block
|
||||
/// can only be removed from the layout when it is empty.
|
||||
///
|
||||
/// Since every block must end with a terminator instruction which cannot fall through, the layout of
|
||||
|
@ -514,7 +514,7 @@ impl<'f> IntoIterator for &'f Layout {
|
|||
/// Methods for arranging instructions.
|
||||
///
|
||||
/// An instruction starts out as *not inserted* in the layout. An instruction can be inserted into
|
||||
/// a block at a given position.
|
||||
/// an block at a given position.
|
||||
impl Layout {
|
||||
/// Get the block containing `inst`, or `None` if `inst` is not inserted in the layout.
|
||||
pub fn inst_block(&self, inst: Inst) -> Option<Block> {
|
||||
|
@ -559,12 +559,12 @@ impl Layout {
|
|||
self.assign_inst_seq(inst);
|
||||
}
|
||||
|
||||
/// Fetch a block's first instruction.
|
||||
/// Fetch an block's first instruction.
|
||||
pub fn first_inst(&self, block: Block) -> Option<Inst> {
|
||||
self.blocks[block].first_inst.into()
|
||||
}
|
||||
|
||||
/// Fetch a block's last instruction.
|
||||
/// Fetch an block's last instruction.
|
||||
pub fn last_inst(&self, block: Block) -> Option<Inst> {
|
||||
self.blocks[block].last_inst.into()
|
||||
}
|
||||
|
@ -579,7 +579,7 @@ impl Layout {
|
|||
self.insts[inst].prev.expand()
|
||||
}
|
||||
|
||||
/// Fetch the first instruction in a block's terminal branch group.
|
||||
/// Fetch the first instruction in an block's terminal branch group.
|
||||
pub fn canonical_branch_inst(&self, dfg: &DataFlowGraph, block: Block) -> Option<Inst> {
|
||||
// Basic blocks permit at most two terminal branch instructions.
|
||||
// If two, the former is conditional and the latter is unconditional.
|
||||
|
@ -724,7 +724,7 @@ struct InstNode {
|
|||
seq: SequenceNumber,
|
||||
}
|
||||
|
||||
/// Iterate over instructions in a block in layout order. See `Layout::block_insts()`.
|
||||
/// Iterate over instructions in an block in layout order. See `Layout::block_insts()`.
|
||||
pub struct Insts<'f> {
|
||||
layout: &'f Layout,
|
||||
head: Option<Inst>,
|
||||
|
|
|
@ -46,9 +46,6 @@ pub enum LibCall {
|
|||
Memset,
|
||||
/// libc.memmove
|
||||
Memmove,
|
||||
|
||||
/// Elf __tls_get_addr
|
||||
ElfTlsGetAddr,
|
||||
}
|
||||
|
||||
impl fmt::Display for LibCall {
|
||||
|
@ -74,8 +71,6 @@ impl FromStr for LibCall {
|
|||
"Memcpy" => Ok(Self::Memcpy),
|
||||
"Memset" => Ok(Self::Memset),
|
||||
"Memmove" => Ok(Self::Memmove),
|
||||
|
||||
"ElfTlsGetAddr" => Ok(Self::ElfTlsGetAddr),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ use core::u32;
|
|||
/// begin or end. It can be either:
|
||||
///
|
||||
/// 1. An instruction or
|
||||
/// 2. A block header.
|
||||
/// 2. An block header.
|
||||
///
|
||||
/// This corresponds more or less to the lines in the textual form of Cranelift IR.
|
||||
#[derive(PartialEq, Eq, Clone, Copy)]
|
||||
|
@ -47,7 +47,7 @@ impl From<ValueDef> for ProgramPoint {
|
|||
pub enum ExpandedProgramPoint {
|
||||
/// An instruction in the function.
|
||||
Inst(Inst),
|
||||
/// A block header.
|
||||
/// An block header.
|
||||
Block(Block),
|
||||
}
|
||||
|
||||
|
|
|
@ -4,10 +4,7 @@ use super::enc_tables::{needs_offset, needs_sib_byte};
|
|||
use super::registers::RU;
|
||||
use crate::binemit::{bad_encoding, CodeSink, Reloc};
|
||||
use crate::ir::condcodes::{CondCode, FloatCC, IntCC};
|
||||
use crate::ir::{
|
||||
Block, Constant, ExternalName, Function, Inst, InstructionData, JumpTable, LibCall, Opcode,
|
||||
TrapCode,
|
||||
};
|
||||
use crate::ir::{Block, Constant, Function, Inst, InstructionData, JumpTable, Opcode, TrapCode};
|
||||
use crate::isa::{RegUnit, StackBase, StackBaseMask, StackRef, TargetIsa};
|
||||
use crate::regalloc::RegDiversions;
|
||||
|
||||
|
|
|
@ -123,39 +123,6 @@ fn size_plus_maybe_sib_or_offset_for_inreg_1(
|
|||
sizing.base_size + if needs_sib_or_offset { 1 } else { 0 }
|
||||
}
|
||||
|
||||
/// Calculates the size while inferring if the first and second input registers (inreg0, inreg1)
|
||||
/// require a dynamic REX prefix and if the second input register (inreg1) requires a SIB or offset.
|
||||
fn size_plus_maybe_sib_or_offset_inreg1_plus_rex_prefix_for_inreg0_inreg1(
|
||||
sizing: &RecipeSizing,
|
||||
enc: Encoding,
|
||||
inst: Inst,
|
||||
divert: &RegDiversions,
|
||||
func: &Function,
|
||||
) -> u8 {
|
||||
let needs_rex = (EncodingBits::from(enc.bits()).rex_w() != 0)
|
||||
|| test_input(0, inst, divert, func, is_extended_reg)
|
||||
|| test_input(1, inst, divert, func, is_extended_reg);
|
||||
size_plus_maybe_sib_or_offset_for_inreg_1(sizing, enc, inst, divert, func)
|
||||
+ if needs_rex { 1 } else { 0 }
|
||||
}
|
||||
|
||||
/// Calculates the size while inferring if the first input register (inreg0) and first output
|
||||
/// register (outreg0) require a dynamic REX and if the first input register (inreg0) requires a
|
||||
/// SIB or offset.
|
||||
fn size_plus_maybe_sib_or_offset_for_inreg_0_plus_rex_prefix_for_inreg0_outreg0(
|
||||
sizing: &RecipeSizing,
|
||||
enc: Encoding,
|
||||
inst: Inst,
|
||||
divert: &RegDiversions,
|
||||
func: &Function,
|
||||
) -> u8 {
|
||||
let needs_rex = (EncodingBits::from(enc.bits()).rex_w() != 0)
|
||||
|| test_input(0, inst, divert, func, is_extended_reg)
|
||||
|| test_result(0, inst, divert, func, is_extended_reg);
|
||||
size_plus_maybe_sib_or_offset_for_inreg_0(sizing, enc, inst, divert, func)
|
||||
+ if needs_rex { 1 } else { 0 }
|
||||
}
|
||||
|
||||
/// Infers whether a dynamic REX prefix will be emitted, for use with one input reg.
|
||||
///
|
||||
/// A REX prefix is known to be emitted if either:
|
||||
|
@ -232,19 +199,6 @@ fn size_with_inferred_rex_for_inreg0_outreg0(
|
|||
sizing.base_size + if needs_rex { 1 } else { 0 }
|
||||
}
|
||||
|
||||
/// Infers whether a dynamic REX prefix will be emitted, based on a single output register.
|
||||
fn size_with_inferred_rex_for_outreg0(
|
||||
sizing: &RecipeSizing,
|
||||
enc: Encoding,
|
||||
inst: Inst,
|
||||
divert: &RegDiversions,
|
||||
func: &Function,
|
||||
) -> u8 {
|
||||
let needs_rex = (EncodingBits::from(enc.bits()).rex_w() != 0)
|
||||
|| test_result(0, inst, divert, func, is_extended_reg);
|
||||
sizing.base_size + if needs_rex { 1 } else { 0 }
|
||||
}
|
||||
|
||||
/// Infers whether a dynamic REX prefix will be emitted, for use with CMOV.
|
||||
///
|
||||
/// CMOV uses 3 inputs, with the REX is inferred from reg1 and reg2.
|
||||
|
@ -1259,7 +1213,7 @@ fn convert_insertlane(
|
|||
}
|
||||
}
|
||||
|
||||
/// For SIMD or scalar integer negation, convert `ineg` to `vconst + isub` or `iconst + isub`.
|
||||
/// For SIMD negation, convert an `ineg` to a `vconst + isub`.
|
||||
fn convert_ineg(
|
||||
inst: ir::Inst,
|
||||
func: &mut ir::Function,
|
||||
|
@ -1275,53 +1229,10 @@ fn convert_ineg(
|
|||
} = pos.func.dfg[inst]
|
||||
{
|
||||
let value_type = pos.func.dfg.value_type(arg);
|
||||
let zero_value = if value_type.is_vector() && value_type.lane_type().is_int() {
|
||||
if value_type.is_vector() && value_type.lane_type().is_int() {
|
||||
let zero_immediate = pos.func.dfg.constants.insert(vec![0; 16].into());
|
||||
pos.ins().vconst(value_type, zero_immediate) // this should be legalized to a PXOR
|
||||
} else if value_type.is_int() {
|
||||
pos.ins().iconst(value_type, 0)
|
||||
} else {
|
||||
panic!("Can't convert ineg of type {}", value_type)
|
||||
};
|
||||
pos.func.dfg.replace(inst).isub(zero_value, arg);
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
fn expand_tls_value(
|
||||
inst: ir::Inst,
|
||||
func: &mut ir::Function,
|
||||
_cfg: &mut ControlFlowGraph,
|
||||
isa: &dyn TargetIsa,
|
||||
) {
|
||||
use crate::settings::TlsModel;
|
||||
|
||||
assert!(
|
||||
isa.triple().architecture == target_lexicon::Architecture::X86_64,
|
||||
"Not yet implemented for {:?}",
|
||||
isa.triple(),
|
||||
);
|
||||
|
||||
if let ir::InstructionData::UnaryGlobalValue {
|
||||
opcode: ir::Opcode::TlsValue,
|
||||
global_value,
|
||||
} = func.dfg[inst]
|
||||
{
|
||||
let ctrl_typevar = func.dfg.ctrl_typevar(inst);
|
||||
assert_eq!(ctrl_typevar, ir::types::I64);
|
||||
|
||||
match isa.flags().tls_model() {
|
||||
TlsModel::None => panic!("tls_model flag is not set."),
|
||||
TlsModel::ElfGd => {
|
||||
func.dfg.replace(inst).x86_elf_tls_get_addr(global_value);
|
||||
}
|
||||
TlsModel::Macho => {
|
||||
func.dfg.replace(inst).x86_macho_tls_get_addr(global_value);
|
||||
}
|
||||
model => unimplemented!("tls_value for tls model {:?}", model),
|
||||
let zero_value = pos.ins().vconst(value_type, zero_immediate); // this should be legalized to a PXOR
|
||||
pos.func.dfg.replace(inst).isub(zero_value, arg);
|
||||
}
|
||||
} else {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ pub fn expand_global_value(
|
|||
global_type,
|
||||
readonly,
|
||||
} => load_addr(inst, func, base, offset, global_type, readonly, isa),
|
||||
ir::GlobalValueData::Symbol { tls, .. } => symbol(inst, func, gv, isa, tls),
|
||||
ir::GlobalValueData::Symbol { .. } => symbol(inst, func, gv, isa),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -123,18 +123,7 @@ fn load_addr(
|
|||
}
|
||||
|
||||
/// Expand a `global_value` instruction for a symbolic name global.
|
||||
fn symbol(
|
||||
inst: ir::Inst,
|
||||
func: &mut ir::Function,
|
||||
gv: ir::GlobalValue,
|
||||
isa: &dyn TargetIsa,
|
||||
tls: bool,
|
||||
) {
|
||||
fn symbol(inst: ir::Inst, func: &mut ir::Function, gv: ir::GlobalValue, isa: &dyn TargetIsa) {
|
||||
let ptr_ty = isa.pointer_type();
|
||||
|
||||
if tls {
|
||||
func.dfg.replace(inst).tls_value(ptr_ty, gv);
|
||||
} else {
|
||||
func.dfg.replace(inst).symbol_value(ptr_ty, gv);
|
||||
}
|
||||
func.dfg.replace(inst).symbol_value(ptr_ty, gv);
|
||||
}
|
||||
|
|
|
@ -121,7 +121,7 @@ fn static_addr(
|
|||
pos.func.dfg.replace(inst).iconst(addr_ty, 0);
|
||||
|
||||
// Split Block, as the trap is a terminator instruction.
|
||||
let curr_block = pos.current_block().expect("Cursor is not in a block");
|
||||
let curr_block = pos.current_block().expect("Cursor is not in an block");
|
||||
let new_block = pos.func.dfg.make_block();
|
||||
pos.insert_block(new_block);
|
||||
cfg.recompute_block(pos.func, curr_block);
|
||||
|
|
|
@ -95,7 +95,7 @@ pub fn vsplit(
|
|||
split_any(func, cfg, pos, srcloc, value, Opcode::Vconcat)
|
||||
}
|
||||
|
||||
/// After splitting a block argument, we need to go back and fix up all of the predecessor
|
||||
/// After splitting an block argument, we need to go back and fix up all of the predecessor
|
||||
/// instructions. This is potentially a recursive operation, but we don't implement it recursively
|
||||
/// since that could use up too muck stack.
|
||||
///
|
||||
|
@ -260,7 +260,7 @@ fn split_value(
|
|||
}
|
||||
}
|
||||
ValueDef::Param(block, num) => {
|
||||
// This is a block parameter.
|
||||
// This is an block parameter.
|
||||
// We can split the parameter value unless this is the entry block.
|
||||
if pos.func.layout.entry_block() != Some(block) {
|
||||
reuse = Some(split_block_param(pos, block, num, value, concat, repairs));
|
||||
|
|
|
@ -71,7 +71,7 @@ impl LoopAnalysis {
|
|||
self.loops[lp].parent.expand()
|
||||
}
|
||||
|
||||
/// Determine if a Block belongs to a loop by running a finger along the loop tree.
|
||||
/// Determine if an Block belongs to a loop by running a finger along the loop tree.
|
||||
///
|
||||
/// Returns `true` if `block` is in loop `lp`.
|
||||
pub fn is_in_loop(&self, block: Block, lp: Loop) -> bool {
|
||||
|
|
|
@ -122,7 +122,7 @@ use cranelift_entity::{PrimaryMap, SecondaryMap};
|
|||
// =============================================================================================
|
||||
// Data structures used for discovery of trees
|
||||
|
||||
// `ZeroOneOrMany` is used to record the number of predecessors a Block block has. The `Zero` case
|
||||
// `ZeroOneOrMany` is used to record the number of predecessors an Block block has. The `Zero` case
|
||||
// is included so as to cleanly handle the case where the incoming graph has unreachable Blocks.
|
||||
|
||||
#[derive(Clone, PartialEq)]
|
||||
|
@ -184,7 +184,7 @@ struct AvailEnv {
|
|||
}
|
||||
|
||||
// `ProcessingStackElem` combines AvailEnv with contextual information needed to "navigate" within
|
||||
// a Block.
|
||||
// an Block.
|
||||
//
|
||||
// A ProcessingStackElem conceptually has the lifetime of exactly one Block: once the current Block is
|
||||
// completed, the ProcessingStackElem will be abandoned. In practice the top level state,
|
||||
|
@ -192,7 +192,7 @@ struct AvailEnv {
|
|||
//
|
||||
// Note that ProcessingStackElem must contain a CursorPosition. The CursorPosition, which
|
||||
// indicates where we are in the current Block, cannot be implicitly maintained by looping over all
|
||||
// the instructions in a Block in turn, because we may choose to suspend processing the current Block
|
||||
// the instructions in an Block in turn, because we may choose to suspend processing the current Block
|
||||
// at a side exit, continue by processing the subtree reached via the side exit, and only later
|
||||
// resume the current Block.
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
//!
|
||||
//! Conventional SSA (CSSA) form is a subset of SSA form where any (transitively) phi-related
|
||||
//! values do not interfere. We construct CSSA by building virtual registers that are as large as
|
||||
//! possible and inserting copies where necessary such that all argument values passed to a block
|
||||
//! possible and inserting copies where necessary such that all argument values passed to an block
|
||||
//! parameter will belong to the same virtual register as the block parameter value itself.
|
||||
|
||||
use crate::cursor::{Cursor, EncCursor};
|
||||
|
@ -233,7 +233,7 @@ impl<'a> Context<'a> {
|
|||
}
|
||||
|
||||
// Check for basic interference: If `arg` overlaps a value defined at the entry to
|
||||
// `block`, it can never be used as a block argument.
|
||||
// `block`, it can never be used as an block argument.
|
||||
let interference = {
|
||||
let lr = &self.liveness[arg];
|
||||
|
||||
|
@ -874,7 +874,7 @@ struct VirtualCopies {
|
|||
|
||||
// Filter for the currently active node iterator.
|
||||
//
|
||||
// A block => (set_id, num) entry means that branches to `block` are active in `set_id` with
|
||||
// An block => (set_id, num) entry means that branches to `block` are active in `set_id` with
|
||||
// branch argument number `num`.
|
||||
filter: FxHashMap<Block, (u8, usize)>,
|
||||
}
|
||||
|
@ -953,7 +953,7 @@ impl VirtualCopies {
|
|||
debug_assert_eq!(popped, Some(param));
|
||||
|
||||
// The domtree pre-order in `self.params` guarantees that all parameters defined at the
|
||||
// same block will be adjacent. This means we can see when all parameters at a block have been
|
||||
// same block will be adjacent. This means we can see when all parameters at an block have been
|
||||
// merged.
|
||||
//
|
||||
// We don't care about the last parameter - when that is merged we are done.
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
//! a register.
|
||||
//!
|
||||
//! 5. The code must be in Conventional SSA form. Among other things, this means that values passed
|
||||
//! as arguments when branching to a block must belong to the same virtual register as the
|
||||
//! as arguments when branching to an block must belong to the same virtual register as the
|
||||
//! corresponding block argument value.
|
||||
//!
|
||||
//! # Iteration order
|
||||
|
@ -35,7 +35,7 @@
|
|||
//! defined by the instruction and only consider the colors of other values that are live at the
|
||||
//! instruction.
|
||||
//!
|
||||
//! The first time we see a branch to a block, the block's argument values are colored to match the
|
||||
//! The first time we see a branch to an block, the block's argument values are colored to match the
|
||||
//! registers currently holding branch argument values passed to the predecessor branch. By
|
||||
//! visiting blocks in a CFG topological order, we guarantee that at least one predecessor branch has
|
||||
//! been visited before the destination block. Therefore, the block's arguments are already colored.
|
||||
|
@ -224,7 +224,7 @@ impl<'a> Context<'a> {
|
|||
SingleDest(block, _) => block,
|
||||
};
|
||||
|
||||
// We have a single branch with a single target, and a block with a single
|
||||
// We have a single branch with a single target, and an block with a single
|
||||
// predecessor. Thus we can forward the diversion set to the next block.
|
||||
if self.cfg.pred_iter(target).count() == 1 {
|
||||
// Transfer the diversion to the next block.
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
//! Sometimes, it is necessary to move register values to a different register in order to satisfy
|
||||
//! instruction constraints.
|
||||
//!
|
||||
//! These register diversions are local to a block. No values can be diverted when entering a new
|
||||
//! These register diversions are local to an block. No values can be diverted when entering a new
|
||||
//! block.
|
||||
|
||||
use crate::fx::FxHashMap;
|
||||
|
@ -38,7 +38,7 @@ impl Diversion {
|
|||
}
|
||||
}
|
||||
|
||||
/// Keep track of diversions in a block.
|
||||
/// Keep track of diversions in an block.
|
||||
#[derive(Clone)]
|
||||
pub struct RegDiversions {
|
||||
current: FxHashMap<Value, Diversion>,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! Track which values are live in a block with instruction granularity.
|
||||
//! Track which values are live in an block with instruction granularity.
|
||||
//!
|
||||
//! The `LiveValueTracker` keeps track of the set of live SSA values at each instruction in a block.
|
||||
//! The `LiveValueTracker` keeps track of the set of live SSA values at each instruction in an block.
|
||||
//! The sets of live values are computed on the fly as the tracker is moved from instruction to
|
||||
//! instruction, starting at the block header.
|
||||
|
||||
|
@ -16,13 +16,13 @@ use alloc::vec::Vec;
|
|||
|
||||
type ValueList = EntityList<Value>;
|
||||
|
||||
/// Compute and track live values throughout a block.
|
||||
/// Compute and track live values throughout an block.
|
||||
pub struct LiveValueTracker {
|
||||
/// The set of values that are live at the current program point.
|
||||
live: LiveValueVec,
|
||||
|
||||
/// Saved set of live values for every jump and branch that can potentially be an immediate
|
||||
/// dominator of a block.
|
||||
/// dominator of an block.
|
||||
///
|
||||
/// This is the set of values that are live *before* the branch.
|
||||
idom_sets: FxHashMap<Inst, ValueList>,
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
//!
|
||||
//! The set of `LiveRange` instances can answer these questions through their `def_local_end` and
|
||||
//! `livein_local_end` queries. The coloring algorithm visits blocks in a topological order of the
|
||||
//! dominator tree, so it can compute the set of live values at the beginning of a block by starting
|
||||
//! dominator tree, so it can compute the set of live values at the beginning of an block by starting
|
||||
//! from the set of live values at the dominating branch instruction and filtering it with
|
||||
//! `livein_local_end`. These sets do not need to be stored in the liveness analysis.
|
||||
//!
|
||||
|
|
|
@ -131,7 +131,7 @@ use smallvec::SmallVec;
|
|||
/// 2. The *live-in intervals* are the local intervals in the remaining blocks.
|
||||
///
|
||||
/// A live-in interval always begins at the block header, while the def interval can begin at the
|
||||
/// defining instruction, or at the block header for a block argument value.
|
||||
/// defining instruction, or at the block header for an block argument value.
|
||||
///
|
||||
/// All values have a def interval, but a large proportion of values don't have any live-in
|
||||
/// intervals. These are called *local live ranges*.
|
||||
|
@ -139,7 +139,7 @@ use smallvec::SmallVec;
|
|||
/// # Program order requirements
|
||||
///
|
||||
/// The internal representation of a `LiveRange` depends on a consistent `ProgramOrder` both for
|
||||
/// ordering instructions inside a block *and* for ordering blocks. The methods that depend on the
|
||||
/// ordering instructions inside an block *and* for ordering blocks. The methods that depend on the
|
||||
/// ordering take an explicit `ProgramOrder` object, and it is the caller's responsibility to
|
||||
/// ensure that the provided ordering is consistent between calls.
|
||||
///
|
||||
|
@ -363,7 +363,7 @@ impl<PO: ProgramOrder> GenericLiveRange<PO> {
|
|||
|
||||
/// Get the program point where this live range is defined.
|
||||
///
|
||||
/// This will be a block header when the value is a block argument, otherwise it is the defining
|
||||
/// This will be an block header when the value is an block argument, otherwise it is the defining
|
||||
/// instruction.
|
||||
pub fn def(&self) -> ProgramPoint {
|
||||
self.def_begin
|
||||
|
@ -385,7 +385,7 @@ impl<PO: ProgramOrder> GenericLiveRange<PO> {
|
|||
self.def_end
|
||||
}
|
||||
|
||||
/// Get the local end-point of this live range in a block where it is live-in.
|
||||
/// Get the local end-point of this live range in an block where it is live-in.
|
||||
///
|
||||
/// If this live range is not live-in to `block`, return `None`. Otherwise, return the end-point
|
||||
/// of this live range's local interval in `block`.
|
||||
|
@ -409,7 +409,7 @@ impl<PO: ProgramOrder> GenericLiveRange<PO> {
|
|||
|
||||
/// Is this value live-in to `block`?
|
||||
///
|
||||
/// A block argument is not considered to be live in.
|
||||
/// An block argument is not considered to be live in.
|
||||
pub fn is_livein(&self, block: Block, order: &PO) -> bool {
|
||||
self.livein_local_end(block, order).is_some()
|
||||
}
|
||||
|
@ -594,7 +594,7 @@ mod tests {
|
|||
assert!(lr.is_local());
|
||||
assert_eq!(lr.def(), e2.into());
|
||||
assert_eq!(lr.def_local_end(), e2.into());
|
||||
// The def interval of a block argument does not count as live-in.
|
||||
// The def interval of an block argument does not count as live-in.
|
||||
assert_eq!(lr.livein_local_end(e2, PO), None);
|
||||
PO.validate(&lr);
|
||||
}
|
||||
|
|
|
@ -199,7 +199,7 @@ impl<'a> Context<'a> {
|
|||
self.pressure.reset();
|
||||
self.take_live_regs(liveins);
|
||||
|
||||
// A block can have an arbitrary (up to 2^16...) number of parameters, so they are not
|
||||
// An block can have an arbitrary (up to 2^16...) number of parameters, so they are not
|
||||
// guaranteed to fit in registers.
|
||||
for lv in params {
|
||||
if let Affinity::Reg(rci) = lv.affinity {
|
||||
|
@ -267,11 +267,7 @@ impl<'a> Context<'a> {
|
|||
// If inst is a call, spill all register values that are live across the call.
|
||||
// This means that we don't currently take advantage of callee-saved registers.
|
||||
// TODO: Be more sophisticated.
|
||||
let opcode = self.cur.func.dfg[inst].opcode();
|
||||
if call_sig.is_some()
|
||||
|| opcode == crate::ir::Opcode::X86ElfTlsGetAddr
|
||||
|| opcode == crate::ir::Opcode::X86MachoTlsGetAddr
|
||||
{
|
||||
if call_sig.is_some() {
|
||||
for lv in throughs {
|
||||
if lv.affinity.is_reg() && !self.spills.contains(&lv.value) {
|
||||
self.spill_reg(lv.value);
|
||||
|
|
|
@ -5,11 +5,11 @@
|
|||
//! output.
|
||||
//!
|
||||
//! A virtual register is typically built by merging together SSA values that are "phi-related" -
|
||||
//! that is, one value is passed as a block argument to a branch and the other is the block parameter
|
||||
//! that is, one value is passed as an block argument to a branch and the other is the block parameter
|
||||
//! value itself.
|
||||
//!
|
||||
//! If any values in a virtual register are spilled, they will use the same stack slot. This avoids
|
||||
//! memory-to-memory copies when a spilled value is passed as a block argument.
|
||||
//! memory-to-memory copies when a spilled value is passed as an block argument.
|
||||
|
||||
use crate::dbg::DisplayList;
|
||||
use crate::dominator_tree::DominatorTreePreorder;
|
||||
|
|
|
@ -379,7 +379,6 @@ mod tests {
|
|||
f.to_string(),
|
||||
"[shared]\n\
|
||||
opt_level = \"none\"\n\
|
||||
tls_model = \"none\"\n\
|
||||
libcall_call_conv = \"isa_default\"\n\
|
||||
baldrdash_prologue_words = 0\n\
|
||||
probestack_size_log2 = 12\n\
|
||||
|
|
|
@ -810,7 +810,7 @@ enum BranchOrderKind {
|
|||
|
||||
/// Reorder branches to encourage fallthroughs.
|
||||
///
|
||||
/// When a block ends with a conditional branch followed by an unconditional
|
||||
/// When an block ends with a conditional branch followed by an unconditional
|
||||
/// branch, this will reorder them if one of them is branching to the next Block
|
||||
/// layout-wise. The unconditional jump can then become a fallthrough.
|
||||
fn branch_order(pos: &mut FuncCursor, cfg: &mut ControlFlowGraph, block: Block, inst: Inst) {
|
||||
|
|
|
@ -47,7 +47,7 @@ pub fn layout_stack(
|
|||
let mut incoming_max = 0;
|
||||
let mut outgoing_max = 0;
|
||||
let mut min_align = alignment;
|
||||
let mut must_align = !is_leaf;
|
||||
let mut must_align = is_leaf;
|
||||
|
||||
for slot in frame.values() {
|
||||
if slot.size > max_size {
|
||||
|
@ -145,7 +145,7 @@ mod tests {
|
|||
let sss = &mut StackSlots::new();
|
||||
|
||||
// For all these test cases, assume it will call.
|
||||
let is_leaf = false;
|
||||
let is_leaf = true;
|
||||
|
||||
// An empty layout should have 0-sized stack frame.
|
||||
assert_eq!(layout_stack(sss, is_leaf, 1), Ok(0));
|
||||
|
|
|
@ -13,7 +13,7 @@ use crate::verifier::{VerifierErrors, VerifierStepResult};
|
|||
///
|
||||
/// Conventional SSA form is represented in Cranelift with the help of virtual registers:
|
||||
///
|
||||
/// - Two values are said to be *PHI-related* if one is a block argument and the other is passed as
|
||||
/// - Two values are said to be *PHI-related* if one is an block argument and the other is passed as
|
||||
/// a branch argument in a location that matches the first value.
|
||||
/// - PHI-related values must belong to the same virtual register.
|
||||
/// - Two values in the same virtual register must not have overlapping live ranges.
|
||||
|
|
|
@ -16,7 +16,7 @@ use crate::verifier::{VerifierErrors, VerifierStepResult};
|
|||
/// - All values in the program must have a live range.
|
||||
/// - The live range def point must match where the value is defined.
|
||||
/// - The live range must reach all uses.
|
||||
/// - When a live range is live-in to a block, it must be live at all the predecessors.
|
||||
/// - When a live range is live-in to an block, it must be live at all the predecessors.
|
||||
/// - The live range affinity must be compatible with encoding constraints.
|
||||
///
|
||||
/// We don't verify that live ranges are minimal. This would require recomputing live ranges for
|
||||
|
|
|
@ -15,7 +15,7 @@ use crate::verifier::{VerifierErrors, VerifierStepResult};
|
|||
/// instruction encoding recipes.
|
||||
///
|
||||
/// Values can be temporarily diverted to a different location by using the `regmove`, `regspill`,
|
||||
/// and `regfill` instructions, but only inside a block.
|
||||
/// and `regfill` instructions, but only inside an block.
|
||||
///
|
||||
/// If a liveness analysis is provided, it is used to verify that there are no active register
|
||||
/// diversions across control flow edges.
|
||||
|
|
|
@ -19,13 +19,13 @@
|
|||
//! SSA form
|
||||
//!
|
||||
//! - Values must be defined by an instruction that exists and that is inserted in
|
||||
//! a block, or be an argument of an existing block.
|
||||
//! an block, or be an argument of an existing block.
|
||||
//! - Values used by an instruction must dominate the instruction.
|
||||
//!
|
||||
//! Control flow graph and dominator tree integrity:
|
||||
//!
|
||||
//! - All predecessors in the CFG must be branches to the block.
|
||||
//! - All branches to a block must be present in the CFG.
|
||||
//! - All branches to an block must be present in the CFG.
|
||||
//! - A recomputed dominator tree is identical to the existing one.
|
||||
//!
|
||||
//! Type checking
|
||||
|
@ -961,7 +961,7 @@ impl<'a> Verifier<'a> {
|
|||
format!("{} is defined by invalid instruction {}", v, def_inst),
|
||||
));
|
||||
}
|
||||
// Defining instruction is inserted in a block.
|
||||
// Defining instruction is inserted in an block.
|
||||
if self.func.layout.inst_block(def_inst) == None {
|
||||
return errors.fatal((
|
||||
loc_inst,
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"07a93208dff603dbca23451e3248d23d1ddde36c305868f2815bb7c8ae5f3b5d","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"96ceffbfd88fb06e3b41aa4d3087cffbbf8441d04eba7ab09662a72ab600a321","src/boxed_slice.rs":"69d539b72460c0aba1d30e0b72efb0c29d61558574d751c784794e14abf41352","src/iter.rs":"4a4d3309fe9aad14fd7702f02459f4277b4ddb50dba700e58dcc75665ffebfb3","src/keys.rs":"b8c2fba26dee15bf3d1880bb2b41e8d66fe1428d242ee6d9fd30ee94bbd0407d","src/lib.rs":"f6d738a46f1dca8b0c82a5910d86cd572a3585ab7ef9f73dac96962529069190","src/list.rs":"4bf609eb7cc7c000c18da746596d5fcc67eece3f919ee2d76e19f6ac371640d1","src/map.rs":"546b36be4cbbd2423bacbed69cbe114c63538c3f635e15284ab8e4223e717705","src/packed_option.rs":"dccb3dd6fc87eba0101de56417f21cab67a4394831df9fa41e3bbddb70cdf694","src/primary.rs":"30d5e2ab8427fd2b2c29da395812766049e3c40845cc887af3ee233dba91a063","src/set.rs":"b040054b8baa0599e64df9ee841640688e2a73b6eabbdc5a4f15334412db052a","src/sparse.rs":"536e31fdcf64450526f5e5b85e97406c26b998bc7e0d8161b6b449c24265449f"},"package":null}
|
||||
{"files":{"Cargo.toml":"36961af3ce6f450e3ace7ccc85dd4d4c1b82db1ecc4cd24c55a7416f43e81142","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"96ceffbfd88fb06e3b41aa4d3087cffbbf8441d04eba7ab09662a72ab600a321","src/boxed_slice.rs":"69d539b72460c0aba1d30e0b72efb0c29d61558574d751c784794e14abf41352","src/iter.rs":"4a4d3309fe9aad14fd7702f02459f4277b4ddb50dba700e58dcc75665ffebfb3","src/keys.rs":"b8c2fba26dee15bf3d1880bb2b41e8d66fe1428d242ee6d9fd30ee94bbd0407d","src/lib.rs":"f6d738a46f1dca8b0c82a5910d86cd572a3585ab7ef9f73dac96962529069190","src/list.rs":"4bf609eb7cc7c000c18da746596d5fcc67eece3f919ee2d76e19f6ac371640d1","src/map.rs":"546b36be4cbbd2423bacbed69cbe114c63538c3f635e15284ab8e4223e717705","src/packed_option.rs":"dccb3dd6fc87eba0101de56417f21cab67a4394831df9fa41e3bbddb70cdf694","src/primary.rs":"30d5e2ab8427fd2b2c29da395812766049e3c40845cc887af3ee233dba91a063","src/set.rs":"b040054b8baa0599e64df9ee841640688e2a73b6eabbdc5a4f15334412db052a","src/sparse.rs":"536e31fdcf64450526f5e5b85e97406c26b998bc7e0d8161b6b449c24265449f"},"package":null}
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
authors = ["The Cranelift Project Developers"]
|
||||
name = "cranelift-entity"
|
||||
version = "0.59.0"
|
||||
version = "0.58.0"
|
||||
description = "Data structures using entity references as mapping keys"
|
||||
license = "Apache-2.0 WITH LLVM-exception"
|
||||
documentation = "https://cranelift.readthedocs.io/"
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"2ff3fc11d0d57fe027d53ee86bd00c3b2f577d7146aa256ff82527846d455a4d","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"dea43e8044284df50f8b8772e9b48ba8b109b45c74111ff73619775d57ad8d67","src/frontend.rs":"6f195596a11403d4a5371a60582f751880cdd0fe46e9f61da6ff5dbb9d719adb","src/lib.rs":"5197f467d1625ee2b117a168f4b1886b4b69d4250faea6618360a5adc70b4e0c","src/ssa.rs":"89ae17181c9440d5840870bb995ce346f4b416b1c9ebafb9ede7b60a00f2a23c","src/switch.rs":"6b7f97799e251f2b4ae6a9892fb911375e2dc9faa5d53ff93ba08988141f1f5b","src/variable.rs":"399437bd7d2ac11a7a748bad7dd1f6dac58824d374ec318f36367a9d077cc225"},"package":null}
|
||||
{"files":{"Cargo.toml":"b8c673e0b184ab7168579a06894247f9bcb55687c0c470e9d4df20e75b5b39ab","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"dea43e8044284df50f8b8772e9b48ba8b109b45c74111ff73619775d57ad8d67","src/frontend.rs":"9c4bd81d92594c29cd801e7ca830c2f8b55e5aebeeb2a9598a307b14365eaa8d","src/lib.rs":"5197f467d1625ee2b117a168f4b1886b4b69d4250faea6618360a5adc70b4e0c","src/ssa.rs":"a4c4996a58e86b7ac5680d145bee670f50c5f180a9d6919007ec32f629efe7cf","src/switch.rs":"6b7f97799e251f2b4ae6a9892fb911375e2dc9faa5d53ff93ba08988141f1f5b","src/variable.rs":"399437bd7d2ac11a7a748bad7dd1f6dac58824d374ec318f36367a9d077cc225"},"package":null}
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
authors = ["The Cranelift Project Developers"]
|
||||
name = "cranelift-frontend"
|
||||
version = "0.59.0"
|
||||
version = "0.58.0"
|
||||
description = "Cranelift IR builder helper"
|
||||
license = "Apache-2.0 WITH LLVM-exception"
|
||||
documentation = "https://cranelift.readthedocs.io/"
|
||||
|
@ -11,7 +11,7 @@ readme = "README.md"
|
|||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
cranelift-codegen = { path = "../codegen", version = "0.59.0", default-features = false }
|
||||
cranelift-codegen = { path = "../cranelift-codegen", version = "0.58.0", default-features = false }
|
||||
target-lexicon = "0.10"
|
||||
log = { version = "0.4.6", default-features = false }
|
||||
hashbrown = { version = "0.6", optional = true }
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
//! A frontend for building Cranelift IR from other languages.
|
||||
use crate::ssa::{SSABlock, SSABuilder, SideEffects};
|
||||
use crate::variable::Variable;
|
||||
use alloc::vec::Vec;
|
||||
use cranelift_codegen::cursor::{Cursor, FuncCursor};
|
||||
use cranelift_codegen::entity::{EntitySet, SecondaryMap};
|
||||
use cranelift_codegen::ir;
|
||||
|
@ -40,11 +41,11 @@ pub struct FunctionBuilder<'a> {
|
|||
|
||||
#[derive(Clone, Default)]
|
||||
struct BlockData {
|
||||
/// A Block is "pristine" iff no instructions have been added since the last
|
||||
/// An Block is "pristine" iff no instructions have been added since the last
|
||||
/// call to `switch_to_block()`.
|
||||
pristine: bool,
|
||||
|
||||
/// A Block is "filled" iff a terminator instruction has been inserted since
|
||||
/// An Block is "filled" iff a terminator instruction has been inserted since
|
||||
/// the last call to `switch_to_block()`.
|
||||
///
|
||||
/// A filled block cannot be pristine.
|
||||
|
@ -625,15 +626,8 @@ impl<'a> FunctionBuilder<'a> {
|
|||
self.ins().call(libc_memcpy, &[dest, src, size]);
|
||||
}
|
||||
|
||||
/// Optimised memcpy or memmove for small copies.
|
||||
///
|
||||
/// # Codegen safety
|
||||
///
|
||||
/// The following properties must hold to prevent UB:
|
||||
///
|
||||
/// * `src_align` and `dest_align` are an upper-bound on the alignment of `src` respectively `dest`.
|
||||
/// * If `non_overlapping` is true, then this must be correct.
|
||||
pub fn emit_small_memory_copy(
|
||||
/// Optimised memcpy for small copies.
|
||||
pub fn emit_small_memcpy(
|
||||
&mut self,
|
||||
config: TargetFrontendConfig,
|
||||
dest: Value,
|
||||
|
@ -641,7 +635,6 @@ impl<'a> FunctionBuilder<'a> {
|
|||
size: u64,
|
||||
dest_align: u8,
|
||||
src_align: u8,
|
||||
non_overlapping: bool,
|
||||
) {
|
||||
// Currently the result of guess work, not actual profiling.
|
||||
const THRESHOLD: u64 = 4;
|
||||
|
@ -670,27 +663,16 @@ impl<'a> FunctionBuilder<'a> {
|
|||
|
||||
if load_and_store_amount > THRESHOLD {
|
||||
let size_value = self.ins().iconst(config.pointer_type(), size as i64);
|
||||
if non_overlapping {
|
||||
self.call_memcpy(config, dest, src, size_value);
|
||||
} else {
|
||||
self.call_memmove(config, dest, src, size_value);
|
||||
}
|
||||
self.call_memcpy(config, dest, src, size_value);
|
||||
return;
|
||||
}
|
||||
|
||||
let mut flags = MemFlags::new();
|
||||
flags.set_aligned();
|
||||
|
||||
// Load all of the memory first. This is necessary in case `dest` overlaps.
|
||||
// It can also improve performance a bit.
|
||||
let registers: smallvec::SmallVec<[_; THRESHOLD as usize]> = (0..load_and_store_amount)
|
||||
.map(|i| {
|
||||
let offset = (access_size * i) as i32;
|
||||
(self.ins().load(int_type, flags, src, offset), offset)
|
||||
})
|
||||
.collect();
|
||||
|
||||
for (value, offset) in registers {
|
||||
for i in 0..load_and_store_amount {
|
||||
let offset = (access_size * i) as i32;
|
||||
let value = self.ins().load(int_type, flags, src, offset);
|
||||
self.ins().store(flags, value, dest, offset);
|
||||
}
|
||||
}
|
||||
|
@ -816,6 +798,55 @@ impl<'a> FunctionBuilder<'a> {
|
|||
|
||||
self.ins().call(libc_memmove, &[dest, source, size]);
|
||||
}
|
||||
|
||||
/// Optimised memmove for small moves.
|
||||
pub fn emit_small_memmove(
|
||||
&mut self,
|
||||
config: TargetFrontendConfig,
|
||||
dest: Value,
|
||||
src: Value,
|
||||
size: u64,
|
||||
dest_align: u8,
|
||||
src_align: u8,
|
||||
) {
|
||||
// Currently the result of guess work, not actual profiling.
|
||||
const THRESHOLD: u64 = 4;
|
||||
|
||||
let access_size = greatest_divisible_power_of_two(size);
|
||||
assert!(
|
||||
access_size.is_power_of_two(),
|
||||
"`size` is not a power of two"
|
||||
);
|
||||
assert!(
|
||||
access_size >= u64::from(::core::cmp::min(src_align, dest_align)),
|
||||
"`size` is smaller than `dest` and `src`'s alignment value."
|
||||
);
|
||||
let load_and_store_amount = size / access_size;
|
||||
|
||||
if load_and_store_amount > THRESHOLD {
|
||||
let size_value = self.ins().iconst(config.pointer_type(), size as i64);
|
||||
self.call_memmove(config, dest, src, size_value);
|
||||
return;
|
||||
}
|
||||
|
||||
let mut flags = MemFlags::new();
|
||||
flags.set_aligned();
|
||||
|
||||
// Load all of the memory first in case `dest` overlaps.
|
||||
let registers: Vec<_> = (0..load_and_store_amount)
|
||||
.map(|i| {
|
||||
let offset = (access_size * i) as i32;
|
||||
(
|
||||
self.ins().load(config.pointer_type(), flags, src, offset),
|
||||
offset,
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
for (value, offset) in registers {
|
||||
self.ins().store(flags, value, dest, offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn greatest_divisible_power_of_two(size: u64) -> u64 {
|
||||
|
@ -832,7 +863,7 @@ impl<'a> FunctionBuilder<'a> {
|
|||
);
|
||||
}
|
||||
|
||||
/// A Block is 'filled' when a terminator instruction is present.
|
||||
/// An Block is 'filled' when a terminator instruction is present.
|
||||
fn fill_current_block(&mut self) {
|
||||
self.func_ctx.blocks[self.position.block.unwrap()].filled = true;
|
||||
}
|
||||
|
@ -1073,7 +1104,7 @@ block0:
|
|||
let src = builder.use_var(x);
|
||||
let dest = builder.use_var(y);
|
||||
let size = 8;
|
||||
builder.emit_small_memory_copy(target.frontend_config(), dest, src, size, 8, 8, true);
|
||||
builder.emit_small_memcpy(target.frontend_config(), dest, src, size, 8, 8);
|
||||
builder.ins().return_(&[dest]);
|
||||
|
||||
builder.seal_all_blocks();
|
||||
|
@ -1130,7 +1161,7 @@ block0:
|
|||
let src = builder.use_var(x);
|
||||
let dest = builder.use_var(y);
|
||||
let size = 8192;
|
||||
builder.emit_small_memory_copy(target.frontend_config(), dest, src, size, 8, 8, true);
|
||||
builder.emit_small_memcpy(target.frontend_config(), dest, src, size, 8, 8);
|
||||
builder.ins().return_(&[dest]);
|
||||
|
||||
builder.seal_all_blocks();
|
||||
|
|
|
@ -333,7 +333,7 @@ impl SSABuilder {
|
|||
// Part 1: With a mutable borrow of self, update the DataFlowGraph if necessary.
|
||||
let case = match self.ssa_blocks[ssa_block] {
|
||||
SSABlockData::BlockHeader(ref mut data) => {
|
||||
// The block has multiple predecessors so we append a Block parameter that
|
||||
// The block has multiple predecessors so we append an Block parameter that
|
||||
// will serve as a value.
|
||||
if data.sealed {
|
||||
if data.predecessors.len() == 1 {
|
||||
|
@ -408,7 +408,7 @@ impl SSABuilder {
|
|||
self.block_headers[block] = ssa_block.into();
|
||||
ssa_block
|
||||
}
|
||||
/// Gets the header block corresponding to a Block, panics if the Block or the header block
|
||||
/// Gets the header block corresponding to an Block, panics if the Block or the header block
|
||||
/// isn't declared.
|
||||
pub fn header_block(&self, block: Block) -> SSABlock {
|
||||
self.block_headers
|
||||
|
@ -491,7 +491,7 @@ impl SSABuilder {
|
|||
}
|
||||
};
|
||||
|
||||
// For each undef var we look up values in the predecessors and create a block parameter
|
||||
// For each undef var we look up values in the predecessors and create an block parameter
|
||||
// only if necessary.
|
||||
for (var, val) in undef_vars {
|
||||
let ty = func.dfg.value_type(val);
|
||||
|
@ -516,13 +516,13 @@ impl SSABuilder {
|
|||
}
|
||||
}
|
||||
|
||||
/// Given the local SSA Value of a Variable in a Block, perform a recursive lookup on
|
||||
/// Given the local SSA Value of a Variable in an Block, perform a recursive lookup on
|
||||
/// predecessors to determine if it is redundant with another Value earlier in the CFG.
|
||||
///
|
||||
/// If such a Value exists and is redundant, the local Value is replaced by the
|
||||
/// corresponding non-local Value. If the original Value was a Block parameter,
|
||||
/// corresponding non-local Value. If the original Value was an Block parameter,
|
||||
/// the parameter may be removed if redundant. Parameters are placed eagerly by callers
|
||||
/// to avoid infinite loops when looking up a Value for a Block that is in a CFG loop.
|
||||
/// to avoid infinite loops when looking up a Value for an Block that is in a CFG loop.
|
||||
///
|
||||
/// Doing this lookup for each Value in each Block preserves SSA form during construction.
|
||||
///
|
||||
|
@ -623,7 +623,7 @@ impl SSABuilder {
|
|||
}
|
||||
ZeroOneOrMore::One(pred_val) => {
|
||||
// Here all the predecessors use a single value to represent our variable
|
||||
// so we don't need to have it as a block argument.
|
||||
// so we don't need to have it as an block argument.
|
||||
// We need to replace all the occurrences of val with pred_val but since
|
||||
// we can't afford a re-writing pass right now we just declare an alias.
|
||||
// Resolve aliases eagerly so that we can check for cyclic aliasing,
|
||||
|
@ -692,7 +692,7 @@ impl SSABuilder {
|
|||
) -> Option<(Block, SSABlock, Inst)> {
|
||||
match func.dfg.analyze_branch(jump_inst) {
|
||||
BranchInfo::NotABranch => {
|
||||
panic!("you have declared a non-branch instruction as a predecessor to a block");
|
||||
panic!("you have declared a non-branch instruction as a predecessor to an block");
|
||||
}
|
||||
// For a single destination appending a jump argument to the instruction
|
||||
// is sufficient.
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"fa0bc0bfbc7b22f46006b9aee8b543359220c40123b78cade845acb0e426cbf0","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"f46f9c5df1b10bad0e87d9c2ad9f5e65bbb6749ac8843cd80ec357daa3b22c3e","src/code_translator.rs":"989e37a31db521686a306c55660dd31541123c8eff06c6b7015c198cecf5c9b7","src/environ/dummy.rs":"edeed25701c0ae0dd00e153e3e27584da2b9096a2019a2629ff706e83e6b8d07","src/environ/mod.rs":"b6f33f619090ff497b4e22150d77a290f259716374ac2e377b73c47cd1dafe85","src/environ/spec.rs":"0ab62cbb945e39841a5cc7b3143545c36ec14ff34c906601861d2692acd0057d","src/func_translator.rs":"a165063eafedbb8e6b632996f747eeb49a3d6f8a70cab6d741abfc4fd9af892d","src/lib.rs":"05b9994c062faf2065046d1e4d7caffb26823816f367d77ede6918e24fcfa6b0","src/module_translator.rs":"bcdf5a84226b726a73f4be0acb0318ca89c82584460101378e73021d85bd4485","src/sections_translator.rs":"f04364d43051f77ec7c1120213f292fa3cb632f506b5ae6d153a4a08f9e4c919","src/state/func_state.rs":"b114522784984a7cc26a3549c7c17f842885e1232254de81d938f9d155f95aa6","src/state/mod.rs":"20014cb93615467b4d20321b52f67f66040417efcaa739a4804093bb559eed19","src/state/module_state.rs":"2f299b043deb806b48583fe54bbb46708f7d8a1454b7be0eb285568064e5a7f9","src/translation_utils.rs":"cd3ab5f994e02d49baa47148b66599d37f8156cd657b61ae68aefefa32a9d806","tests/wasm_testsuite.rs":"730304f139371e5ef3fd913ec271fc4db181869b447c6ed26c54313b5c31495c"},"package":null}
|
||||
{"files":{"Cargo.toml":"d59f80d8a52c80ec3363c4c4cb97a68ca92faa8a5acf0d2d3890a06ad0aa23f6","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"f46f9c5df1b10bad0e87d9c2ad9f5e65bbb6749ac8843cd80ec357daa3b22c3e","src/code_translator.rs":"e0ba18925bc1a9a62dbdc1a259fa137728851e5e5af8348aa42b8b6640354c73","src/environ/dummy.rs":"32e4dde7732aa386d250ce509e6c35d68e67e6943c07565a137147c9718668da","src/environ/mod.rs":"b6f33f619090ff497b4e22150d77a290f259716374ac2e377b73c47cd1dafe85","src/environ/spec.rs":"ed55da70abd30fd1c39cd303976013e9ef20c5af9736305a22929165c0d32d65","src/func_translator.rs":"a165063eafedbb8e6b632996f747eeb49a3d6f8a70cab6d741abfc4fd9af892d","src/lib.rs":"0dbbb3d5088799c3aaa94b083ca0c2f09906bd8fb36e9c0dd200b8122c50a8b6","src/module_translator.rs":"5e1bf9471d6f4f317bb2fb9b8697b5b08f7950520017c2869e69133e7f17a2b7","src/sections_translator.rs":"20f6b46f7079296bf4611eb16ef29ea38535eb68237be54e89051530aba729c4","src/state/func_state.rs":"b114522784984a7cc26a3549c7c17f842885e1232254de81d938f9d155f95aa6","src/state/mod.rs":"20014cb93615467b4d20321b52f67f66040417efcaa739a4804093bb559eed19","src/state/module_state.rs":"2f299b043deb806b48583fe54bbb46708f7d8a1454b7be0eb285568064e5a7f9","src/translation_utils.rs":"33a3fa4da31c9b14c115b9036bc7c41c1bcba6259940e9babc3f2b7c22ededec","tests/wasm_testsuite.rs":"730304f139371e5ef3fd913ec271fc4db181869b447c6ed26c54313b5c31495c"},"package":null}
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "cranelift-wasm"
|
||||
version = "0.59.0"
|
||||
version = "0.58.0"
|
||||
authors = ["The Cranelift Project Developers"]
|
||||
description = "Translator from WebAssembly to Cranelift IR"
|
||||
repository = "https://github.com/bytecodealliance/cranelift"
|
||||
|
@ -11,17 +11,17 @@ keywords = ["webassembly", "wasm"]
|
|||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
wasmparser = { version = "0.51.0", default-features = false }
|
||||
cranelift-codegen = { path = "../codegen", version = "0.59.0", default-features = false }
|
||||
cranelift-entity = { path = "../entity", version = "0.59.0" }
|
||||
cranelift-frontend = { path = "../frontend", version = "0.59.0", default-features = false }
|
||||
wasmparser = { version = "0.48.2", default-features = false }
|
||||
cranelift-codegen = { path = "../cranelift-codegen", version = "0.58.0", default-features = false }
|
||||
cranelift-entity = { path = "../cranelift-entity", version = "0.58.0" }
|
||||
cranelift-frontend = { path = "../cranelift-frontend", version = "0.58.0", default-features = false }
|
||||
hashbrown = { version = "0.6", optional = true }
|
||||
log = { version = "0.4.6", default-features = false }
|
||||
serde = { version = "1.0.94", features = ["derive"], optional = true }
|
||||
thiserror = "1.0.4"
|
||||
|
||||
[dev-dependencies]
|
||||
wat = "1.0.9"
|
||||
wat = "1.0.7"
|
||||
target-lexicon = "0.10"
|
||||
|
||||
[features]
|
||||
|
|
|
@ -185,10 +185,10 @@ pub fn translate_operator<FE: FuncEnvironment + ?Sized>(
|
|||
let (params, results) = blocktype_params_results(module_translation_state, *ty)?;
|
||||
let (destination, else_data) = if params == results {
|
||||
// It is possible there is no `else` block, so we will only
|
||||
// allocate a block for it if/when we find the `else`. For now,
|
||||
// allocate an block for it if/when we find the `else`. For now,
|
||||
// we if the condition isn't true, then we jump directly to the
|
||||
// destination block following the whole `if...end`. If we do end
|
||||
// up discovering an `else`, then we will allocate a block for it
|
||||
// up discovering an `else`, then we will allocate an block for it
|
||||
// and go back and patch the jump.
|
||||
let destination = block_with_params(builder, results, environ)?;
|
||||
let branch_inst = builder
|
||||
|
@ -212,7 +212,7 @@ pub fn translate_operator<FE: FuncEnvironment + ?Sized>(
|
|||
builder.seal_block(next_block); // Only predecessor is the current block.
|
||||
builder.switch_to_block(next_block);
|
||||
|
||||
// Here we append an argument to a Block targeted by an argumentless jump instruction
|
||||
// Here we append an argument to an Block targeted by an argumentless jump instruction
|
||||
// But in fact there are two cases:
|
||||
// - either the If does not have a Else clause, in that case ty = EmptyBlock
|
||||
// and we add nothing;
|
||||
|
@ -241,7 +241,7 @@ pub fn translate_operator<FE: FuncEnvironment + ?Sized>(
|
|||
// We have a branch from the head of the `if` to the `else`.
|
||||
state.reachable = true;
|
||||
|
||||
// Ensure we have a block for the `else` block (it may have
|
||||
// Ensure we have an block for the `else` block (it may have
|
||||
// already been pre-allocated, see `ElseData` for details).
|
||||
let else_block = match *else_data {
|
||||
ElseData::NoElse { branch_inst } => {
|
||||
|
@ -1288,26 +1288,6 @@ pub fn translate_operator<FE: FuncEnvironment + ?Sized>(
|
|||
let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
|
||||
state.push1(builder.ins().usub_sat(a, b))
|
||||
}
|
||||
Operator::I8x16MinS | Operator::I16x8MinS | Operator::I32x4MinS => {
|
||||
let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
|
||||
state.push1(builder.ins().imin(a, b))
|
||||
}
|
||||
Operator::I8x16MinU | Operator::I16x8MinU | Operator::I32x4MinU => {
|
||||
let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
|
||||
state.push1(builder.ins().umin(a, b))
|
||||
}
|
||||
Operator::I8x16MaxS | Operator::I16x8MaxS | Operator::I32x4MaxS => {
|
||||
let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
|
||||
state.push1(builder.ins().imax(a, b))
|
||||
}
|
||||
Operator::I8x16MaxU | Operator::I16x8MaxU | Operator::I32x4MaxU => {
|
||||
let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
|
||||
state.push1(builder.ins().umax(a, b))
|
||||
}
|
||||
Operator::I8x16RoundingAverageU | Operator::I16x8RoundingAverageU => {
|
||||
let (a, b) = pop2_with_bitcast(state, type_of(op), builder);
|
||||
state.push1(builder.ins().avg_round(a, b))
|
||||
}
|
||||
Operator::I8x16Neg | Operator::I16x8Neg | Operator::I32x4Neg | Operator::I64x2Neg => {
|
||||
let a = pop1_with_bitcast(state, type_of(op), builder);
|
||||
state.push1(builder.ins().ineg(a))
|
||||
|
@ -1495,7 +1475,9 @@ pub fn translate_operator<FE: FuncEnvironment + ?Sized>(
|
|||
| Operator::I32x4Load16x4S { .. }
|
||||
| Operator::I32x4Load16x4U { .. }
|
||||
| Operator::I64x2Load32x2S { .. }
|
||||
| Operator::I64x2Load32x2U { .. } => {
|
||||
| Operator::I64x2Load32x2U { .. }
|
||||
| Operator::I8x16RoundingAverageU { .. }
|
||||
| Operator::I16x8RoundingAverageU { .. } => {
|
||||
return Err(wasm_unsupported!("proposed SIMD operator {:?}", op));
|
||||
}
|
||||
};
|
||||
|
@ -1831,11 +1813,6 @@ fn type_of(operator: &Operator) -> Type {
|
|||
| Operator::I8x16Sub
|
||||
| Operator::I8x16SubSaturateS
|
||||
| Operator::I8x16SubSaturateU
|
||||
| Operator::I8x16MinS
|
||||
| Operator::I8x16MinU
|
||||
| Operator::I8x16MaxS
|
||||
| Operator::I8x16MaxU
|
||||
| Operator::I8x16RoundingAverageU
|
||||
| Operator::I8x16Mul => I8X16,
|
||||
|
||||
Operator::I16x8Splat
|
||||
|
@ -1865,11 +1842,6 @@ fn type_of(operator: &Operator) -> Type {
|
|||
| Operator::I16x8Sub
|
||||
| Operator::I16x8SubSaturateS
|
||||
| Operator::I16x8SubSaturateU
|
||||
| Operator::I16x8MinS
|
||||
| Operator::I16x8MinU
|
||||
| Operator::I16x8MaxS
|
||||
| Operator::I16x8MaxU
|
||||
| Operator::I16x8RoundingAverageU
|
||||
| Operator::I16x8Mul => I16X8,
|
||||
|
||||
Operator::I32x4Splat
|
||||
|
@ -1895,10 +1867,6 @@ fn type_of(operator: &Operator) -> Type {
|
|||
| Operator::I32x4Add
|
||||
| Operator::I32x4Sub
|
||||
| Operator::I32x4Mul
|
||||
| Operator::I32x4MinS
|
||||
| Operator::I32x4MinU
|
||||
| Operator::I32x4MaxS
|
||||
| Operator::I32x4MaxU
|
||||
| Operator::F32x4ConvertI32x4S
|
||||
| Operator::F32x4ConvertI32x4U => I32X4,
|
||||
|
||||
|
@ -2041,5 +2009,5 @@ pub fn wasm_param_types(params: &[ir::AbiParam], is_wasm: impl Fn(usize) -> bool
|
|||
ret.push(param.value_type);
|
||||
}
|
||||
}
|
||||
ret
|
||||
return ret;
|
||||
}
|
||||
|
|
|
@ -11,8 +11,8 @@ use crate::environ::{
|
|||
use crate::func_translator::FuncTranslator;
|
||||
use crate::state::ModuleTranslationState;
|
||||
use crate::translation_utils::{
|
||||
DataIndex, DefinedFuncIndex, ElemIndex, FuncIndex, Global, GlobalIndex, Memory, MemoryIndex,
|
||||
SignatureIndex, Table, TableIndex,
|
||||
DefinedFuncIndex, FuncIndex, Global, GlobalIndex, Memory, MemoryIndex, SignatureIndex, Table,
|
||||
TableIndex,
|
||||
};
|
||||
use core::convert::TryFrom;
|
||||
use cranelift_codegen::cursor::FuncCursor;
|
||||
|
@ -605,22 +605,6 @@ impl<'data> ModuleEnvironment<'data> for DummyEnvironment {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn declare_passive_element(
|
||||
&mut self,
|
||||
_elem_index: ElemIndex,
|
||||
_segments: Box<[FuncIndex]>,
|
||||
) -> WasmResult<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn declare_passive_data(
|
||||
&mut self,
|
||||
_elem_index: DataIndex,
|
||||
_segments: &'data [u8],
|
||||
) -> WasmResult<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn declare_memory(&mut self, memory: Memory) -> WasmResult<()> {
|
||||
self.info.memories.push(Exportable::new(memory));
|
||||
Ok(())
|
||||
|
|
|
@ -8,8 +8,7 @@
|
|||
|
||||
use crate::state::{FuncTranslationState, ModuleTranslationState};
|
||||
use crate::translation_utils::{
|
||||
DataIndex, ElemIndex, FuncIndex, Global, GlobalIndex, Memory, MemoryIndex, SignatureIndex,
|
||||
Table, TableIndex,
|
||||
FuncIndex, Global, GlobalIndex, Memory, MemoryIndex, SignatureIndex, Table, TableIndex,
|
||||
};
|
||||
use core::convert::From;
|
||||
use cranelift_codegen::cursor::FuncCursor;
|
||||
|
@ -55,7 +54,7 @@ pub enum WasmError {
|
|||
#[error("Invalid input WebAssembly code at offset {offset}: {message}")]
|
||||
InvalidWebAssembly {
|
||||
/// A string describing the validation error.
|
||||
message: std::string::String,
|
||||
message: &'static str,
|
||||
/// The bytecode offset where the error occurred.
|
||||
offset: usize,
|
||||
},
|
||||
|
@ -90,10 +89,8 @@ macro_rules! wasm_unsupported {
|
|||
impl From<BinaryReaderError> for WasmError {
|
||||
/// Convert from a `BinaryReaderError` to a `WasmError`.
|
||||
fn from(e: BinaryReaderError) -> Self {
|
||||
Self::InvalidWebAssembly {
|
||||
message: e.message().into(),
|
||||
offset: e.offset(),
|
||||
}
|
||||
let BinaryReaderError { message, offset } = e;
|
||||
Self::InvalidWebAssembly { message, offset }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -603,25 +600,6 @@ pub trait ModuleEnvironment<'data>: TargetEnvironment {
|
|||
elements: Box<[FuncIndex]>,
|
||||
) -> WasmResult<()>;
|
||||
|
||||
/// Declare a passive element segment.
|
||||
fn declare_passive_element(
|
||||
&mut self,
|
||||
index: ElemIndex,
|
||||
elements: Box<[FuncIndex]>,
|
||||
) -> WasmResult<()>;
|
||||
|
||||
/// Provides the number of passive data segments up front.
|
||||
///
|
||||
/// By default this does nothing, but implementations may use this to
|
||||
/// pre-allocate memory if desired.
|
||||
fn reserve_passive_data(&mut self, count: u32) -> WasmResult<()> {
|
||||
let _ = count;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Declare a passive data segment.
|
||||
fn declare_passive_data(&mut self, data_index: DataIndex, data: &'data [u8]) -> WasmResult<()>;
|
||||
|
||||
/// Provides the contents of a function body.
|
||||
///
|
||||
/// Note there's no `reserve_function_bodies` function because the number of
|
||||
|
|
|
@ -66,8 +66,8 @@ pub use crate::module_translator::translate_module;
|
|||
pub use crate::state::func_state::FuncTranslationState;
|
||||
pub use crate::state::module_state::ModuleTranslationState;
|
||||
pub use crate::translation_utils::{
|
||||
get_vmctx_value_label, DataIndex, DefinedFuncIndex, DefinedGlobalIndex, DefinedMemoryIndex,
|
||||
DefinedTableIndex, ElemIndex, FuncIndex, Global, GlobalIndex, GlobalInit, Memory, MemoryIndex,
|
||||
get_vmctx_value_label, DefinedFuncIndex, DefinedGlobalIndex, DefinedMemoryIndex,
|
||||
DefinedTableIndex, FuncIndex, Global, GlobalIndex, GlobalInit, Memory, MemoryIndex,
|
||||
SignatureIndex, Table, TableElementType, TableIndex,
|
||||
};
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! Translation skeleton that traverses the whole WebAssembly module and call helper functions
|
||||
//! to deal with each part of it.
|
||||
use crate::environ::{ModuleEnvironment, WasmResult};
|
||||
use crate::environ::{ModuleEnvironment, WasmError, WasmResult};
|
||||
use crate::sections_translator::{
|
||||
parse_code_section, parse_data_section, parse_element_section, parse_export_section,
|
||||
parse_function_section, parse_global_section, parse_import_section, parse_memory_section,
|
||||
|
@ -67,8 +67,11 @@ pub fn translate_module<'data>(
|
|||
parse_data_section(data, environ)?;
|
||||
}
|
||||
|
||||
SectionContent::DataCount(count) => {
|
||||
environ.reserve_passive_data(count)?;
|
||||
SectionContent::DataCount(_) => {
|
||||
return Err(WasmError::InvalidWebAssembly {
|
||||
message: "don't know how to handle the data count section yet",
|
||||
offset: reader.current_position(),
|
||||
});
|
||||
}
|
||||
|
||||
SectionContent::Custom {
|
||||
|
|
|
@ -10,8 +10,8 @@
|
|||
use crate::environ::{ModuleEnvironment, WasmError, WasmResult};
|
||||
use crate::state::ModuleTranslationState;
|
||||
use crate::translation_utils::{
|
||||
tabletype_to_type, type_to_type, DataIndex, ElemIndex, FuncIndex, Global, GlobalIndex,
|
||||
GlobalInit, Memory, MemoryIndex, SignatureIndex, Table, TableElementType, TableIndex,
|
||||
tabletype_to_type, type_to_type, FuncIndex, Global, GlobalIndex, GlobalInit, Memory,
|
||||
MemoryIndex, SignatureIndex, Table, TableElementType, TableIndex,
|
||||
};
|
||||
use crate::{wasm_unsupported, HashMap};
|
||||
use core::convert::TryFrom;
|
||||
|
@ -19,11 +19,10 @@ use cranelift_codegen::ir::immediates::V128Imm;
|
|||
use cranelift_codegen::ir::{self, AbiParam, Signature};
|
||||
use cranelift_entity::packed_option::ReservedValue;
|
||||
use cranelift_entity::EntityRef;
|
||||
use std::boxed::Box;
|
||||
use std::vec::Vec;
|
||||
use wasmparser::{
|
||||
self, CodeSectionReader, Data, DataKind, DataSectionReader, Element, ElementItem, ElementItems,
|
||||
ElementKind, ElementSectionReader, Export, ExportSectionReader, ExternalKind, FuncType,
|
||||
self, CodeSectionReader, Data, DataKind, DataSectionReader, Element, ElementItem, ElementKind,
|
||||
ElementSectionReader, Export, ExportSectionReader, ExternalKind, FuncType,
|
||||
FunctionSectionReader, GlobalSectionReader, GlobalType, ImportSectionEntryType,
|
||||
ImportSectionReader, MemorySectionReader, MemoryType, NameSectionReader, Naming, NamingReader,
|
||||
Operator, TableSectionReader, Type, TypeSectionReader,
|
||||
|
@ -289,19 +288,6 @@ pub fn parse_start_section(index: u32, environ: &mut dyn ModuleEnvironment) -> W
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn read_elems(items: &ElementItems) -> WasmResult<Box<[FuncIndex]>> {
|
||||
let items_reader = items.get_items_reader()?;
|
||||
let mut elems = Vec::with_capacity(usize::try_from(items_reader.get_count()).unwrap());
|
||||
for item in items_reader {
|
||||
let elem = match item? {
|
||||
ElementItem::Null => FuncIndex::reserved_value(),
|
||||
ElementItem::Func(index) => FuncIndex::from_u32(index),
|
||||
};
|
||||
elems.push(elem);
|
||||
}
|
||||
Ok(elems.into_boxed_slice())
|
||||
}
|
||||
|
||||
/// Parses the Element section of the wasm module.
|
||||
pub fn parse_element_section<'data>(
|
||||
elements: ElementSectionReader<'data>,
|
||||
|
@ -309,7 +295,7 @@ pub fn parse_element_section<'data>(
|
|||
) -> WasmResult<()> {
|
||||
environ.reserve_table_elements(elements.get_count())?;
|
||||
|
||||
for (index, entry) in elements.into_iter().enumerate() {
|
||||
for entry in elements {
|
||||
let Element { kind, items, ty } = entry?;
|
||||
if ty != Type::AnyFunc {
|
||||
return Err(wasm_unsupported!(
|
||||
|
@ -317,37 +303,41 @@ pub fn parse_element_section<'data>(
|
|||
ty
|
||||
));
|
||||
}
|
||||
let segments = read_elems(&items)?;
|
||||
match kind {
|
||||
ElementKind::Active {
|
||||
table_index,
|
||||
init_expr,
|
||||
} => {
|
||||
let mut init_expr_reader = init_expr.get_binary_reader();
|
||||
let (base, offset) = match init_expr_reader.read_operator()? {
|
||||
Operator::I32Const { value } => (None, value as u32 as usize),
|
||||
Operator::GlobalGet { global_index } => {
|
||||
(Some(GlobalIndex::from_u32(global_index)), 0)
|
||||
}
|
||||
ref s => {
|
||||
return Err(wasm_unsupported!(
|
||||
"unsupported init expr in element section: {:?}",
|
||||
s
|
||||
));
|
||||
}
|
||||
if let ElementKind::Active {
|
||||
table_index,
|
||||
init_expr,
|
||||
} = kind
|
||||
{
|
||||
let mut init_expr_reader = init_expr.get_binary_reader();
|
||||
let (base, offset) = match init_expr_reader.read_operator()? {
|
||||
Operator::I32Const { value } => (None, value as u32 as usize),
|
||||
Operator::GlobalGet { global_index } => {
|
||||
(Some(GlobalIndex::from_u32(global_index)), 0)
|
||||
}
|
||||
ref s => {
|
||||
return Err(wasm_unsupported!(
|
||||
"unsupported init expr in element section: {:?}",
|
||||
s
|
||||
));
|
||||
}
|
||||
};
|
||||
let items_reader = items.get_items_reader()?;
|
||||
let mut elems = Vec::with_capacity(usize::try_from(items_reader.get_count()).unwrap());
|
||||
for item in items_reader {
|
||||
let elem = match item? {
|
||||
ElementItem::Null => FuncIndex::reserved_value(),
|
||||
ElementItem::Func(index) => FuncIndex::from_u32(index),
|
||||
};
|
||||
environ.declare_table_elements(
|
||||
TableIndex::from_u32(table_index),
|
||||
base,
|
||||
offset,
|
||||
segments,
|
||||
)?
|
||||
elems.push(elem);
|
||||
}
|
||||
ElementKind::Passive => {
|
||||
let index = ElemIndex::from_u32(index as u32);
|
||||
environ.declare_passive_element(index, segments)?;
|
||||
}
|
||||
ElementKind::Declared => return Err(wasm_unsupported!("element kind declared")),
|
||||
environ.declare_table_elements(
|
||||
TableIndex::from_u32(table_index),
|
||||
base,
|
||||
offset,
|
||||
elems.into_boxed_slice(),
|
||||
)?
|
||||
} else {
|
||||
return Err(wasm_unsupported!("unsupported passive elements section",));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
@ -375,37 +365,37 @@ pub fn parse_data_section<'data>(
|
|||
) -> WasmResult<()> {
|
||||
environ.reserve_data_initializers(data.get_count())?;
|
||||
|
||||
for (index, entry) in data.into_iter().enumerate() {
|
||||
for entry in data {
|
||||
let Data { kind, data } = entry?;
|
||||
match kind {
|
||||
DataKind::Active {
|
||||
memory_index,
|
||||
init_expr,
|
||||
} => {
|
||||
let mut init_expr_reader = init_expr.get_binary_reader();
|
||||
let (base, offset) = match init_expr_reader.read_operator()? {
|
||||
Operator::I32Const { value } => (None, value as u32 as usize),
|
||||
Operator::GlobalGet { global_index } => {
|
||||
(Some(GlobalIndex::from_u32(global_index)), 0)
|
||||
}
|
||||
ref s => {
|
||||
return Err(wasm_unsupported!(
|
||||
"unsupported init expr in data section: {:?}",
|
||||
s
|
||||
))
|
||||
}
|
||||
};
|
||||
environ.declare_data_initialization(
|
||||
MemoryIndex::from_u32(memory_index),
|
||||
base,
|
||||
offset,
|
||||
data,
|
||||
)?;
|
||||
}
|
||||
DataKind::Passive => {
|
||||
let index = DataIndex::from_u32(index as u32);
|
||||
environ.declare_passive_data(index, data)?;
|
||||
}
|
||||
if let DataKind::Active {
|
||||
memory_index,
|
||||
init_expr,
|
||||
} = kind
|
||||
{
|
||||
let mut init_expr_reader = init_expr.get_binary_reader();
|
||||
let (base, offset) = match init_expr_reader.read_operator()? {
|
||||
Operator::I32Const { value } => (None, value as u32 as usize),
|
||||
Operator::GlobalGet { global_index } => {
|
||||
(Some(GlobalIndex::from_u32(global_index)), 0)
|
||||
}
|
||||
ref s => {
|
||||
return Err(wasm_unsupported!(
|
||||
"unsupported init expr in data section: {:?}",
|
||||
s
|
||||
))
|
||||
}
|
||||
};
|
||||
environ.declare_data_initialization(
|
||||
MemoryIndex::from_u32(memory_index),
|
||||
base,
|
||||
offset,
|
||||
data,
|
||||
)?;
|
||||
} else {
|
||||
return Err(wasm_unsupported!(
|
||||
"unsupported passive data section: {:?}",
|
||||
kind
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -57,16 +57,6 @@ entity_impl!(MemoryIndex);
|
|||
pub struct SignatureIndex(u32);
|
||||
entity_impl!(SignatureIndex);
|
||||
|
||||
/// Index type of a passive data segment inside the WebAssembly module.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
|
||||
pub struct DataIndex(u32);
|
||||
entity_impl!(DataIndex);
|
||||
|
||||
/// Index type of a passive element segment inside the WebAssembly module.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
|
||||
pub struct ElemIndex(u32);
|
||||
entity_impl!(ElemIndex);
|
||||
|
||||
/// WebAssembly global.
|
||||
#[derive(Debug, Clone, Copy, Hash)]
|
||||
pub struct Global {
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"CODE_OF_CONDUCT.md":"a13aaaf393818bd91207c618724d3fb74944ca5161201822a84af951bcf655ef","Cargo.lock":"e11f6e0b7e1ac67f97b645ec18649c730b3e0b08e2079519b68b490fe60b0b6a","Cargo.toml":"4aa94dce47d25c24eb6e928c07500326b0cd9af253cf75fe225d1437b6f5fb87","LICENSE":"a6c48161a09acc75a0e25503bab66a731eb5fba5392ed4bb4743e4ba5085327a","ORG_CODE_OF_CONDUCT.md":"59c4c4435538496193cd73e89b2cd971d1011dba573158cf108abe6af6603e6b","README.md":"e34595e5f1d8e9cb50a5cf42e987e976d4361522fb3325fa030a52c15da6077f","SECURITY.md":"4d75afb09dd28eb5982e3a1f768ee398d90204669ceef3240a16b31dcf04148a","benches/benchmark.rs":"5ed64bf1497769a8df2a6518d06c7ca8dfaef328869ffa4fef43414035a07255","compare-master.sh":"165490eab36ef4eceb2913a6c5cdeff479a05e1e0119a7f4551b03dbcda51ad4","examples/dump.rs":"40cf9492d58e196e462c37d610fd372a648df780143d1804ae4c536f03ac5254","examples/simple.rs":"d2d46f1a232e9b23fd56982a84379b741423916983e0fd1f2a1009d456f7f851","format-all.sh":"6b02a40629ef3d2c0b9671222582a6217d526317a41262ae06c7a95de53bcbeb","src/binary_reader.rs":"e9ee1a4117277f5f08b582a276b7949df7fddc4409d2bee637753c32d8eaeb6c","src/lib.rs":"8bb5301d5d66746160466e38b0e956e14e4997bf2385431ec9070cea13fc632e","src/limits.rs":"34e5cda95fb67669011ba95ca60f48fc777f3e3fa279ff68a1f2a072032a4abd","src/module_resources.rs":"940d0d6a7972f512488cea953a15d128247cfc6bd7945be063219e281d65eb9b","src/operators_validator.rs":"97c6e7109d784efd5d419c4036d4b5b1839977b082e15e75dc95e08d1ff5b842","src/parser.rs":"4b18ae92d0c6f85581bf77896f457b34f47c75cbbf422cdd4edac48c5f48c0cb","src/primitives.rs":"be40d0d5ae3df08905d08b4ca8d646832d96bd6b79e07cc2136f0e187b9d5155","src/readers/code_section.rs":"bfdd8d5f08ef357679d7bfe6f9735ff4f08925361e0771a6b1b5112a12c62f30","src/readers/data_count_section.rs":"e711720f8205a906794dc7020a656a2ae74e1d9c3823fcdcdbd9d2f3b206c7d7","src/readers/data_section.rs":"f572e7d2589f0bccf5e97d43c1ca3aac103cbd47d139ead6b84b39b5c9d47c0b","src/readers/element_section.rs":"554e934d9f6826dcd13ed8596a5abfce75a663fca430d7c15ee604f0b00d6c26","src/readers/export_section.rs":"7c74f7a11406a95c162f6ad4f77aafd0b1eee309f33b69f06bea12b23925e143","src/readers/function_section.rs":"57c0479ba8d7f61908ed74e86cbc26553fdd6d2d952f032ce29385a39f82efd3","src/readers/global_section.rs":"5fa18bed0fffadcc2dbdcbaedbe4e4398992fd1ce9e611b0319333a7681082ac","src/readers/import_section.rs":"1db4bf7290d04783d5cf526050d025b15a1daaf2bd97fca1a92ecb873d48f641","src/readers/init_expr.rs":"7020c80013dad4518a5f969c3ab4d624b46d778f03e632871cf343964f63441c","src/readers/linking_section.rs":"9df71f3ee5356f0d273c099212213353080001e261ca697caddf6b847fb5af09","src/readers/memory_section.rs":"83212f86cfc40d18fb392e9234c880afdf443f4af38a727ba346f9c740ef8718","src/readers/mod.rs":"b9f835365b9b04411d7b141a3c9b52695e9bf8ef1f07094a10a18b901d0ac420","src/readers/module.rs":"db292e3cebe55e5f2e9de8aff0a2074fa874d42058c6bc2a798c5b7e3c1ca81e","src/readers/name_section.rs":"4ff460766bbcd67f658086c8fa525cf2bbceea67b393c65edfddbb714de722fd","src/readers/operators.rs":"1defc15f364775018ffe8c7f010ff83342c46659f780be4ba88c58fad7606e03","src/readers/producers_section.rs":"674f402fc4545c94487f827153871b37adab44ed5eff4070a436eb18e514023a","src/readers/reloc_section.rs":"0ef818a8b83a4542c4c29c23642436a92d3e7c37bc0248e817ed5a9d65ec38ce","src/readers/section_reader.rs":"f27f017938bb8602954298d053cd3b79d8876f9fcbbe0e1a3380051b6aa4584a","src/readers/sourcemappingurl_section.rs":"eff317f6f2b728a98a5eb68eec7e6cf222d27158d0d5597fd1c84f09b1092a50","src/readers/start_section.rs":"012fe574a5b94ea34c9d689629fb0df2f5ba4c11c835147b39155f5a8c715e34","src/readers/table_section.rs":"e564876825a7b31df2b5dc850279b523e26dc50a08da935cc8d635a49e809951","src/readers/type_section.rs":"2fa33a7b793f3bfa01c259b5dbc38633b7343931886ab41f0cb96dd78db3bf6e","src/tests.rs":"9a205cd5d740f2f6e8c1bb34bd295bc6e18e69511a6eb0ae5deb465a0a8f898a","src/validator.rs":"a83082cb0553dc95df06fcc7b1f7a4b32ca028c0fb4c685b9f3a6cf202d09bc5","test-all.sh":"f36e3e9bf9c39456bab3ac170d3a09911becf2b7e0d0e2a58854ce1750da0d1f"},"package":"aeb1956b19469d1c5e63e459d29e7b5aa0f558d9f16fcef09736f8a265e6c10a"}
|
||||
{"files":{"CODE_OF_CONDUCT.md":"a13aaaf393818bd91207c618724d3fb74944ca5161201822a84af951bcf655ef","Cargo.lock":"dff00d1ea3a556fcd8d06289744bbbebc74d5190f40fbe4d73e4cfdb2ded27eb","Cargo.toml":"370cbad7937cff335bc9ebe3e73ec96cfe62c76f2f205bdb4dc7c7970879fdeb","LICENSE":"a6c48161a09acc75a0e25503bab66a731eb5fba5392ed4bb4743e4ba5085327a","ORG_CODE_OF_CONDUCT.md":"59c4c4435538496193cd73e89b2cd971d1011dba573158cf108abe6af6603e6b","README.md":"90c9b0e3dd91a63b6a8088b72200b3118fa0bbdf3320cd98609bd4cc4ef09902","SECURITY.md":"4d75afb09dd28eb5982e3a1f768ee398d90204669ceef3240a16b31dcf04148a","benches/benchmark.rs":"951abd671bd1a5a33d09379b023de000e89fc5f99a5950f0b3b2f571021aa402","compare-master.sh":"165490eab36ef4eceb2913a6c5cdeff479a05e1e0119a7f4551b03dbcda51ad4","examples/dump.rs":"de2bbdba75e21b9ff92b32697b3d9941f8695b8f7e3a8dee8fc5d7f4c3a0649c","examples/simple.rs":"c79ae542913e72cfcd03711543d173b2e8f62783e6c206459953bdb94dbb8c0c","format-all.sh":"6b02a40629ef3d2c0b9671222582a6217d526317a41262ae06c7a95de53bcbeb","src/binary_reader.rs":"2be55559e5336617dacdfd64be9170a6f81a08d9642f562174f4d821b89f0da4","src/lib.rs":"8bb5301d5d66746160466e38b0e956e14e4997bf2385431ec9070cea13fc632e","src/limits.rs":"34e5cda95fb67669011ba95ca60f48fc777f3e3fa279ff68a1f2a072032a4abd","src/module_resources.rs":"940d0d6a7972f512488cea953a15d128247cfc6bd7945be063219e281d65eb9b","src/operators_validator.rs":"b6631a0bdafc115dea886557fc60de9a6e9f6b5d51b4ed800f23645192a250b1","src/parser.rs":"2680fb91b237e273bc5c06af6ad44605843ad53d59edb6af66e3b133b4096167","src/primitives.rs":"ea2e1b35418aab82554688d2f15efd3ac6b90268d655bec536b20f73fc05de91","src/readers/code_section.rs":"2034c399b76428ac993c22f551f3c541b132d8b4ccc74e34f0043e25534d107b","src/readers/data_count_section.rs":"27ef37517b6beac21245008b14b5416b851c52d0af8e2ae85c1456674e1c9a9e","src/readers/data_section.rs":"e7e2a539d2d3049d4a8f68df9ea2f21d97e7061657bbd91845e1df3e9c1f2ebc","src/readers/element_section.rs":"e685af8a189f0cfa9f250c3fd221f9f14d20886f609c4c86a75c7408a106b8e0","src/readers/export_section.rs":"7c74f7a11406a95c162f6ad4f77aafd0b1eee309f33b69f06bea12b23925e143","src/readers/function_section.rs":"57c0479ba8d7f61908ed74e86cbc26553fdd6d2d952f032ce29385a39f82efd3","src/readers/global_section.rs":"5fa18bed0fffadcc2dbdcbaedbe4e4398992fd1ce9e611b0319333a7681082ac","src/readers/import_section.rs":"1db4bf7290d04783d5cf526050d025b15a1daaf2bd97fca1a92ecb873d48f641","src/readers/init_expr.rs":"7020c80013dad4518a5f969c3ab4d624b46d778f03e632871cf343964f63441c","src/readers/linking_section.rs":"9df71f3ee5356f0d273c099212213353080001e261ca697caddf6b847fb5af09","src/readers/memory_section.rs":"83212f86cfc40d18fb392e9234c880afdf443f4af38a727ba346f9c740ef8718","src/readers/mod.rs":"b9f835365b9b04411d7b141a3c9b52695e9bf8ef1f07094a10a18b901d0ac420","src/readers/module.rs":"2d272e6b0119361c99ea12f3288433c14b55a8e6336aedc961fe5cdbffe4b5e3","src/readers/name_section.rs":"297f57393d5fef745ec265438108aa6eb7ed2762c03c3beb539493612442f3da","src/readers/operators.rs":"53bf7ea87529980a3a3e1afd92ddf488ae4dc04a40cda39dbf548e4d20c8a55c","src/readers/producers_section.rs":"674f402fc4545c94487f827153871b37adab44ed5eff4070a436eb18e514023a","src/readers/reloc_section.rs":"0ef818a8b83a4542c4c29c23642436a92d3e7c37bc0248e817ed5a9d65ec38ce","src/readers/section_reader.rs":"3d2260449fa0455d710ba6d97810372ec36cba70722c10dd236c3a18ca0eb56f","src/readers/sourcemappingurl_section.rs":"ababe84d51e4817ad19f827aa2b5239578e7f202e5ec06dd688b618885138434","src/readers/start_section.rs":"3eeae00e1aa0fcb2e0d93b7b0eaac30a60d3f1431c71c589cd3f73adb363d532","src/readers/table_section.rs":"e564876825a7b31df2b5dc850279b523e26dc50a08da935cc8d635a49e809951","src/readers/type_section.rs":"2fa33a7b793f3bfa01c259b5dbc38633b7343931886ab41f0cb96dd78db3bf6e","src/tests.rs":"0f8b8aa5434a5a0a0ebe159d13c655a0fba532292f988704b4afee3f09e013e0","src/validator.rs":"e8b8f019891e1138ebf8dcc6331b193226db1f790bfd274de0fe56de92b3fae5","test-all.sh":"f36e3e9bf9c39456bab3ac170d3a09911becf2b7e0d0e2a58854ce1750da0d1f"},"package":"073da89bf1c84db000dd68ce660c1b4a08e3a2d28fd1e3394ab9e7abdde4a0f8"}
|
|
@ -430,15 +430,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
|
||||
[[package]]
|
||||
name = "wasmparser"
|
||||
version = "0.51.4"
|
||||
version = "0.48.2"
|
||||
dependencies = [
|
||||
"criterion 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"wast 7.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"wast 6.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wast"
|
||||
version = "7.0.0"
|
||||
version = "6.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"leb128 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -525,7 +525,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
|
||||
"checksum walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9658c94fa8b940eab2250bd5a457f9c48b748420d71293b165c8cdbe2f55f71e"
|
||||
"checksum wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)" = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
|
||||
"checksum wast 7.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "12a729d076deb29c8509fa71f2d427729f9394f9496844ed8fcab152f35d163d"
|
||||
"checksum wast 6.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ed3db7029d1d31a15c10126e78b58e51781faefafbc8afb20fb01291b779984"
|
||||
"checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6"
|
||||
"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
"checksum winapi-util 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7168bab6e1daee33b4557efd0e95d5ca70a03706d39fa5f3fe7a236f584b03c9"
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
[package]
|
||||
edition = "2018"
|
||||
name = "wasmparser"
|
||||
version = "0.51.4"
|
||||
version = "0.48.2"
|
||||
authors = ["Yury Delendik <ydelendik@mozilla.com>"]
|
||||
exclude = ["fuzz/**/*", "tests/**/*", "testsuite/**/*"]
|
||||
description = "A simple event-driven library for parsing WebAssembly binary files.\n"
|
||||
|
@ -28,7 +28,7 @@ harness = false
|
|||
version = "0.3"
|
||||
|
||||
[dev-dependencies.wast]
|
||||
version = "7.0.0"
|
||||
version = "6.0.0"
|
||||
|
||||
[features]
|
||||
deterministic = []
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
**A [Bytecode Alliance](https://bytecodealliance.org/) project**
|
||||
|
||||
![CI](https://github.com/bytecodealliance/wasmparser/workflows/CI/badge.svg)
|
||||
[![Build Status](https://travis-ci.org/yurydelendik/wasmparser.rs.svg?branch=master)](https://travis-ci.org/yurydelendik/wasmparser.rs)
|
||||
[![crates.io link](https://img.shields.io/crates/v/wasmparser.svg)](https://crates.io/crates/wasmparser)
|
||||
|
||||
The decoder library provides lightweight and fast decoding/parsing of WebAssembly binary files.
|
||||
|
|
|
@ -35,7 +35,7 @@ where
|
|||
{
|
||||
loop {
|
||||
match *d.read() {
|
||||
ParserState::Error(ref e) => panic!("unexpected error {}", e),
|
||||
ParserState::Error(e) => panic!("unexpected error {:?}", e),
|
||||
ParserState::EndWasm => return,
|
||||
_ => (),
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ fn main() {
|
|||
);
|
||||
}
|
||||
ParserState::EndWasm => break,
|
||||
ParserState::Error(ref err) => panic!("Error: {:?}", err),
|
||||
ParserState::Error(err) => panic!("Error: {:?}", err),
|
||||
_ => println!("{:?}", state),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ fn main() {
|
|||
println!(" Import {}::{}", module, field)
|
||||
}
|
||||
ParserState::EndWasm => break,
|
||||
ParserState::Error(ref err) => panic!("Error: {:?}", err),
|
||||
ParserState::Error(err) => panic!("Error: {:?}", err),
|
||||
_ => ( /* println!(" Other {:?}", state); */ ),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -127,10 +127,10 @@ impl<'a> BinaryReader<'a> {
|
|||
if self.position < self.buffer.len() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(BinaryReaderError::new(
|
||||
"Unexpected EOF",
|
||||
self.original_position(),
|
||||
))
|
||||
Err(BinaryReaderError {
|
||||
message: "Unexpected EOF",
|
||||
offset: self.original_position(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -138,20 +138,20 @@ impl<'a> BinaryReader<'a> {
|
|||
if self.position + len <= self.buffer.len() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(BinaryReaderError::new(
|
||||
"Unexpected EOF",
|
||||
self.original_position(),
|
||||
))
|
||||
Err(BinaryReaderError {
|
||||
message: "Unexpected EOF",
|
||||
offset: self.original_position(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn read_var_u1(&mut self) -> Result<u32> {
|
||||
let b = self.read_u8()?;
|
||||
if (b & 0xFE) != 0 {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Invalid var_u1",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Invalid var_u1",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
Ok(b)
|
||||
}
|
||||
|
@ -159,10 +159,10 @@ impl<'a> BinaryReader<'a> {
|
|||
fn read_var_i7(&mut self) -> Result<i32> {
|
||||
let b = self.read_u8()?;
|
||||
if (b & 0x80) != 0 {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Invalid var_i7",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Invalid var_i7",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
Ok((b << 25) as i32 >> 25)
|
||||
}
|
||||
|
@ -170,10 +170,10 @@ impl<'a> BinaryReader<'a> {
|
|||
pub(crate) fn read_var_u7(&mut self) -> Result<u32> {
|
||||
let b = self.read_u8()?;
|
||||
if (b & 0x80) != 0 {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Invalid var_u7",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Invalid var_u7",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
Ok(b)
|
||||
}
|
||||
|
@ -191,10 +191,10 @@ impl<'a> BinaryReader<'a> {
|
|||
-0x12 => Ok(Type::NullRef),
|
||||
-0x20 => Ok(Type::Func),
|
||||
-0x40 => Ok(Type::EmptyBlockType),
|
||||
_ => Err(BinaryReaderError::new(
|
||||
"Invalid type",
|
||||
self.original_position() - 1,
|
||||
)),
|
||||
_ => Err(BinaryReaderError {
|
||||
message: "Invalid type",
|
||||
offset: self.original_position() - 1,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -202,10 +202,10 @@ impl<'a> BinaryReader<'a> {
|
|||
pub fn read_local_count(&mut self) -> Result<usize> {
|
||||
let local_count = self.read_var_u32()? as usize;
|
||||
if local_count > MAX_WASM_FUNCTION_LOCALS {
|
||||
return Err(BinaryReaderError::new(
|
||||
"local_count is out of bounds",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "local_count is out of bounds",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
Ok(local_count)
|
||||
}
|
||||
|
@ -214,17 +214,18 @@ impl<'a> BinaryReader<'a> {
|
|||
pub fn read_local_decl(&mut self, locals_total: &mut usize) -> Result<(u32, Type)> {
|
||||
let count = self.read_var_u32()?;
|
||||
let value_type = self.read_type()?;
|
||||
*locals_total = locals_total.checked_add(count as usize).ok_or_else(|| {
|
||||
BinaryReaderError::new(
|
||||
"locals_total is out of bounds",
|
||||
self.original_position() - 1,
|
||||
)
|
||||
})?;
|
||||
*locals_total =
|
||||
locals_total
|
||||
.checked_add(count as usize)
|
||||
.ok_or_else(|| BinaryReaderError {
|
||||
message: "locals_total is out of bounds",
|
||||
offset: self.original_position() - 1,
|
||||
})?;
|
||||
if *locals_total > MAX_WASM_FUNCTION_LOCALS {
|
||||
return Err(BinaryReaderError::new(
|
||||
"locals_total is out of bounds",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "locals_total is out of bounds",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
Ok((count, value_type))
|
||||
}
|
||||
|
@ -236,10 +237,10 @@ impl<'a> BinaryReader<'a> {
|
|||
1 => Ok(ExternalKind::Table),
|
||||
2 => Ok(ExternalKind::Memory),
|
||||
3 => Ok(ExternalKind::Global),
|
||||
_ => Err(BinaryReaderError::new(
|
||||
"Invalid external kind",
|
||||
self.original_position() - 1,
|
||||
)),
|
||||
_ => Err(BinaryReaderError {
|
||||
message: "Invalid external kind",
|
||||
offset: self.original_position() - 1,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -247,10 +248,10 @@ impl<'a> BinaryReader<'a> {
|
|||
let form = self.read_type()?;
|
||||
let params_len = self.read_var_u32()? as usize;
|
||||
if params_len > MAX_WASM_FUNCTION_PARAMS {
|
||||
return Err(BinaryReaderError::new(
|
||||
"function params size is out of bound",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "function params size is out of bound",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
let mut params: Vec<Type> = Vec::with_capacity(params_len);
|
||||
for _ in 0..params_len {
|
||||
|
@ -258,10 +259,10 @@ impl<'a> BinaryReader<'a> {
|
|||
}
|
||||
let returns_len = self.read_var_u32()? as usize;
|
||||
if returns_len > MAX_WASM_FUNCTION_RETURNS {
|
||||
return Err(BinaryReaderError::new(
|
||||
"function returns size is out of bound",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "function returns size is out of bound",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
let mut returns: Vec<Type> = Vec::with_capacity(returns_len);
|
||||
for _ in 0..returns_len {
|
||||
|
@ -288,10 +289,10 @@ impl<'a> BinaryReader<'a> {
|
|||
let element_type = self.read_type()?;
|
||||
let flags = self.read_var_u32()?;
|
||||
if (flags & !0x1) != 0 {
|
||||
return Err(BinaryReaderError::new(
|
||||
"invalid table resizable limits flags",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "invalid table resizable limits flags",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
let limits = self.read_resizable_limits((flags & 0x1) != 0)?;
|
||||
Ok(TableType {
|
||||
|
@ -303,10 +304,10 @@ impl<'a> BinaryReader<'a> {
|
|||
pub(crate) fn read_memory_type(&mut self) -> Result<MemoryType> {
|
||||
let flags = self.read_var_u32()?;
|
||||
if (flags & !0x3) != 0 {
|
||||
return Err(BinaryReaderError::new(
|
||||
"invalid table resizable limits flags",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "invalid table resizable limits flags",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
let limits = self.read_resizable_limits((flags & 0x1) != 0)?;
|
||||
let shared = (flags & 0x2) != 0;
|
||||
|
@ -358,17 +359,20 @@ impl<'a> BinaryReader<'a> {
|
|||
10 => Ok(SectionCode::Code),
|
||||
11 => Ok(SectionCode::Data),
|
||||
12 => Ok(SectionCode::DataCount),
|
||||
_ => Err(BinaryReaderError::new("Invalid section code", offset)),
|
||||
_ => Err(BinaryReaderError {
|
||||
message: "Invalid section code",
|
||||
offset,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_br_table(&mut self) -> Result<BrTable<'a>> {
|
||||
let targets_len = self.read_var_u32()? as usize;
|
||||
if targets_len > MAX_WASM_BR_TABLE_SIZE {
|
||||
return Err(BinaryReaderError::new(
|
||||
"br_table size is out of bound",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "br_table size is out of bound",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
let start = self.position;
|
||||
for _ in 0..targets_len {
|
||||
|
@ -461,10 +465,10 @@ impl<'a> BinaryReader<'a> {
|
|||
|
||||
let result = (self.read_u8()? << 7) | (byte & 0x7F);
|
||||
if result >= 0x100 {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Invalid var_u8",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Invalid var_u8",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
|
@ -488,10 +492,10 @@ impl<'a> BinaryReader<'a> {
|
|||
result |= ((byte & 0x7F) as u32) << shift;
|
||||
if shift >= 25 && (byte >> (32 - shift)) != 0 {
|
||||
// The continuation bit or unused bits are set.
|
||||
return Err(BinaryReaderError::new(
|
||||
"Invalid var_u32",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Invalid var_u32",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
shift += 7;
|
||||
if (byte & 0x80) == 0 {
|
||||
|
@ -513,10 +517,10 @@ impl<'a> BinaryReader<'a> {
|
|||
return Ok(());
|
||||
}
|
||||
}
|
||||
Err(BinaryReaderError::new(
|
||||
"Invalid var_32",
|
||||
self.original_position() - 1,
|
||||
))
|
||||
Err(BinaryReaderError {
|
||||
message: "Invalid var_32",
|
||||
offset: self.original_position() - 1,
|
||||
})
|
||||
}
|
||||
|
||||
/// Alias method for `BinaryReader::skip_var_u32`.
|
||||
|
@ -542,10 +546,10 @@ impl<'a> BinaryReader<'a> {
|
|||
pub fn skip_string(&mut self) -> Result<()> {
|
||||
let len = self.read_var_u32()? as usize;
|
||||
if len > MAX_WASM_STRING_SIZE {
|
||||
return Err(BinaryReaderError::new(
|
||||
"string size in out of bounds",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "string size in out of bounds",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
self.skip_bytes(len)
|
||||
}
|
||||
|
@ -579,10 +583,10 @@ impl<'a> BinaryReader<'a> {
|
|||
let continuation_bit = (byte & 0x80) != 0;
|
||||
let sign_and_unused_bit = (byte << 1) as i8 >> (32 - shift);
|
||||
if continuation_bit || (sign_and_unused_bit != 0 && sign_and_unused_bit != -1) {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Invalid var_i32",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Invalid var_i32",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
return Ok(result);
|
||||
}
|
||||
|
@ -616,10 +620,10 @@ impl<'a> BinaryReader<'a> {
|
|||
let continuation_bit = (byte & 0x80) != 0;
|
||||
let sign_and_unused_bit = (byte << 1) as i8 >> (33 - shift);
|
||||
if continuation_bit || (sign_and_unused_bit != 0 && sign_and_unused_bit != -1) {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Invalid var_s33",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Invalid var_s33",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
return Ok(result);
|
||||
}
|
||||
|
@ -647,10 +651,10 @@ impl<'a> BinaryReader<'a> {
|
|||
let continuation_bit = (byte & 0x80) != 0;
|
||||
let sign_and_unused_bit = ((byte << 1) as i8) >> (64 - shift);
|
||||
if continuation_bit || (sign_and_unused_bit != 0 && sign_and_unused_bit != -1) {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Invalid var_i64",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Invalid var_i64",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
return Ok(result);
|
||||
}
|
||||
|
@ -691,24 +695,25 @@ impl<'a> BinaryReader<'a> {
|
|||
pub fn read_string(&mut self) -> Result<&'a str> {
|
||||
let len = self.read_var_u32()? as usize;
|
||||
if len > MAX_WASM_STRING_SIZE {
|
||||
return Err(BinaryReaderError::new(
|
||||
"string size in out of bounds",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "string size in out of bounds",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
let bytes = self.read_bytes(len)?;
|
||||
str::from_utf8(bytes).map_err(|_| {
|
||||
BinaryReaderError::new("invalid UTF-8 encoding", self.original_position() - 1)
|
||||
str::from_utf8(bytes).map_err(|_| BinaryReaderError {
|
||||
message: "non-utf8 string",
|
||||
offset: self.original_position() - 1,
|
||||
})
|
||||
}
|
||||
|
||||
fn read_memarg_of_align(&mut self, max_align: u32) -> Result<MemoryImmediate> {
|
||||
let imm = self.read_memarg()?;
|
||||
if imm.flags > max_align {
|
||||
return Err(BinaryReaderError::new(
|
||||
"alignment must not be larger than natural",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Unexpected memarg alignment",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
Ok(imm)
|
||||
}
|
||||
|
@ -919,10 +924,10 @@ impl<'a> BinaryReader<'a> {
|
|||
},
|
||||
|
||||
_ => {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Unknown 0xFE opcode",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Unknown 0xFE opcode",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -935,7 +940,10 @@ impl<'a> BinaryReader<'a> {
|
|||
self.position = position;
|
||||
let idx = self.read_var_s33()?;
|
||||
if idx < 0 || idx > (std::u32::MAX as i64) {
|
||||
return Err(BinaryReaderError::new("invalid function type", position));
|
||||
return Err(BinaryReaderError {
|
||||
message: "invalid function type",
|
||||
offset: position,
|
||||
});
|
||||
}
|
||||
Ok(TypeOrFuncType::FuncType(idx as u32))
|
||||
}
|
||||
|
@ -983,10 +991,10 @@ impl<'a> BinaryReader<'a> {
|
|||
0x1c => {
|
||||
let results = self.read_var_u32()?;
|
||||
if results != 1 {
|
||||
return Err(BinaryReaderError::new(
|
||||
"bad number of results",
|
||||
self.position,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "bad number of results",
|
||||
offset: self.position,
|
||||
});
|
||||
}
|
||||
Operator::TypedSelect {
|
||||
ty: self.read_type()?,
|
||||
|
@ -1241,10 +1249,10 @@ impl<'a> BinaryReader<'a> {
|
|||
0xfe => self.read_0xfe_operator()?,
|
||||
|
||||
_ => {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Unknown opcode",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Unknown opcode",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -1265,10 +1273,10 @@ impl<'a> BinaryReader<'a> {
|
|||
let segment = self.read_var_u32()?;
|
||||
let mem = self.read_u8()?;
|
||||
if mem != 0 {
|
||||
return Err(BinaryReaderError::new(
|
||||
"reserved byte must be zero",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "reserved byte must be zero",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
Operator::MemoryInit { segment }
|
||||
}
|
||||
|
@ -1279,27 +1287,27 @@ impl<'a> BinaryReader<'a> {
|
|||
0x0a => {
|
||||
let dst = self.read_u8()?;
|
||||
if dst != 0 {
|
||||
return Err(BinaryReaderError::new(
|
||||
"reserved byte must be zero",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "reserved byte must be zero",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
let src = self.read_u8()?;
|
||||
if src != 0 {
|
||||
return Err(BinaryReaderError::new(
|
||||
"reserved byte must be zero",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "reserved byte must be zero",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
Operator::MemoryCopy
|
||||
}
|
||||
0x0b => {
|
||||
let mem = self.read_u8()?;
|
||||
if mem != 0 {
|
||||
return Err(BinaryReaderError::new(
|
||||
"reserved byte must be zero",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "reserved byte must be zero",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
Operator::MemoryFill
|
||||
}
|
||||
|
@ -1336,10 +1344,10 @@ impl<'a> BinaryReader<'a> {
|
|||
}
|
||||
|
||||
_ => {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Unknown 0xfc opcode",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Unknown 0xfc opcode",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -1347,10 +1355,10 @@ impl<'a> BinaryReader<'a> {
|
|||
fn read_lane_index(&mut self, max: u32) -> Result<SIMDLaneIndex> {
|
||||
let index = self.read_u8()?;
|
||||
if index >= max {
|
||||
return Err(BinaryReaderError::new(
|
||||
"invalid lane index",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "invalid lane index",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
Ok(index as SIMDLaneIndex)
|
||||
}
|
||||
|
@ -1481,10 +1489,6 @@ impl<'a> BinaryReader<'a> {
|
|||
0x5b => Operator::I8x16SubSaturateS,
|
||||
0x5c => Operator::I8x16SubSaturateU,
|
||||
0x5d => Operator::I8x16Mul,
|
||||
0x5e => Operator::I8x16MinS,
|
||||
0x5f => Operator::I8x16MinU,
|
||||
0x60 => Operator::I8x16MaxS,
|
||||
0x61 => Operator::I8x16MaxU,
|
||||
0x62 => Operator::I16x8Neg,
|
||||
0x63 => Operator::I16x8AnyTrue,
|
||||
0x64 => Operator::I16x8AllTrue,
|
||||
|
@ -1498,10 +1502,6 @@ impl<'a> BinaryReader<'a> {
|
|||
0x6c => Operator::I16x8SubSaturateS,
|
||||
0x6d => Operator::I16x8SubSaturateU,
|
||||
0x6e => Operator::I16x8Mul,
|
||||
0x6f => Operator::I16x8MinS,
|
||||
0x70 => Operator::I16x8MinU,
|
||||
0x71 => Operator::I16x8MaxS,
|
||||
0x72 => Operator::I16x8MaxU,
|
||||
0x73 => Operator::I32x4Neg,
|
||||
0x74 => Operator::I32x4AnyTrue,
|
||||
0x75 => Operator::I32x4AllTrue,
|
||||
|
@ -1511,10 +1511,6 @@ impl<'a> BinaryReader<'a> {
|
|||
0x79 => Operator::I32x4Add,
|
||||
0x7c => Operator::I32x4Sub,
|
||||
0x7f => Operator::I32x4Mul,
|
||||
0x80 => Operator::I32x4MinS,
|
||||
0x81 => Operator::I32x4MinU,
|
||||
0x82 => Operator::I32x4MaxS,
|
||||
0x83 => Operator::I32x4MaxU,
|
||||
0x84 => Operator::I64x2Neg,
|
||||
0x85 => Operator::I64x2AnyTrue,
|
||||
0x86 => Operator::I64x2AllTrue,
|
||||
|
@ -1604,10 +1600,10 @@ impl<'a> BinaryReader<'a> {
|
|||
0xd9 => Operator::I8x16RoundingAverageU,
|
||||
0xda => Operator::I16x8RoundingAverageU,
|
||||
_ => {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Unknown 0xfd opcode",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Unknown 0xfd opcode",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -1615,17 +1611,17 @@ impl<'a> BinaryReader<'a> {
|
|||
pub(crate) fn read_file_header(&mut self) -> Result<u32> {
|
||||
let magic_number = self.read_bytes(4)?;
|
||||
if magic_number != WASM_MAGIC_NUMBER {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Bad magic number",
|
||||
self.original_position() - 4,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Bad magic number",
|
||||
offset: self.original_position() - 4,
|
||||
});
|
||||
}
|
||||
let version = self.read_u32()?;
|
||||
if version != WASM_SUPPORTED_VERSION && version != WASM_EXPERIMENTAL_VERSION {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Bad version number",
|
||||
self.original_position() - 4,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Bad version number",
|
||||
offset: self.original_position() - 4,
|
||||
});
|
||||
}
|
||||
Ok(version)
|
||||
}
|
||||
|
@ -1649,10 +1645,10 @@ impl<'a> BinaryReader<'a> {
|
|||
0 => Ok(NameType::Module),
|
||||
1 => Ok(NameType::Function),
|
||||
2 => Ok(NameType::Local),
|
||||
_ => Err(BinaryReaderError::new(
|
||||
"Invalid name type",
|
||||
self.original_position() - 1,
|
||||
)),
|
||||
_ => Err(BinaryReaderError {
|
||||
message: "Invalid name type",
|
||||
offset: self.original_position() - 1,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1661,10 +1657,10 @@ impl<'a> BinaryReader<'a> {
|
|||
Ok(match ty {
|
||||
1 => LinkingType::StackPointer(self.read_var_u32()?),
|
||||
_ => {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Invalid linking type",
|
||||
self.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Invalid linking type",
|
||||
offset: self.original_position() - 1,
|
||||
});
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -1680,10 +1676,10 @@ impl<'a> BinaryReader<'a> {
|
|||
5 => Ok(RelocType::GlobalAddrI32),
|
||||
6 => Ok(RelocType::TypeIndexLEB),
|
||||
7 => Ok(RelocType::GlobalIndexLEB),
|
||||
_ => Err(BinaryReaderError::new(
|
||||
"Invalid reloc type",
|
||||
self.original_position() - 1,
|
||||
)),
|
||||
_ => Err(BinaryReaderError {
|
||||
message: "Invalid reloc type",
|
||||
offset: self.original_position() - 1,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1730,11 +1726,9 @@ impl<'a> BrTable<'a> {
|
|||
while !reader.eof() {
|
||||
table.push(reader.read_var_u32()?);
|
||||
}
|
||||
let default_target = table.pop().ok_or_else(|| {
|
||||
BinaryReaderError::new(
|
||||
"br_table missing default target",
|
||||
reader.original_position(),
|
||||
)
|
||||
let default_target = table.pop().ok_or_else(|| BinaryReaderError {
|
||||
message: "br_table missing default target",
|
||||
offset: reader.original_position(),
|
||||
})?;
|
||||
Ok((table.into_boxed_slice(), default_target))
|
||||
}
|
||||
|
|
|
@ -14,11 +14,13 @@
|
|||
*/
|
||||
|
||||
use std::cmp::min;
|
||||
use std::result;
|
||||
use std::str;
|
||||
|
||||
use crate::primitives::{MemoryImmediate, Operator, SIMDLaneIndex, Type, TypeOrFuncType};
|
||||
use crate::{
|
||||
wasm_func_type_inputs, wasm_func_type_outputs, BinaryReaderError, WasmFuncType, WasmGlobalType,
|
||||
WasmMemoryType, WasmModuleResources, WasmTableType, WasmType,
|
||||
wasm_func_type_inputs, wasm_func_type_outputs, WasmFuncType, WasmGlobalType, WasmMemoryType,
|
||||
WasmModuleResources, WasmTableType, WasmType,
|
||||
};
|
||||
|
||||
/// Test if `subtype` is a subtype of `supertype`.
|
||||
|
@ -36,16 +38,10 @@ pub(crate) fn is_subtype_supertype(subtype: Type, supertype: Type) -> bool {
|
|||
struct BlockState {
|
||||
start_types: Vec<Type>,
|
||||
return_types: Vec<Type>,
|
||||
// Position in `FuncState::stack_types` array where block values
|
||||
// start.
|
||||
stack_starts_at: usize,
|
||||
// True for loop.
|
||||
jump_to_top: bool,
|
||||
is_else_allowed: bool,
|
||||
is_dead_code: bool,
|
||||
// Amount of the required polymorphic values at the stack_starts_at
|
||||
// position in `FuncState::stack_types` array. These values are
|
||||
// fictitious and are not actually present in the stack_types.
|
||||
polymorphic_values: Option<usize>,
|
||||
}
|
||||
|
||||
|
@ -114,7 +110,7 @@ impl FuncState {
|
|||
let len = self.stack_types.len();
|
||||
let remove_non_polymorphic = len
|
||||
.checked_sub(last_block.stack_starts_at)
|
||||
.ok_or_else(|| OperatorValidatorError::new("invalid block signature"))?
|
||||
.ok_or("invalid block signature")?
|
||||
.min(remove_count);
|
||||
self.stack_types.truncate(len - remove_non_polymorphic);
|
||||
let polymorphic_values = last_block.polymorphic_values.unwrap();
|
||||
|
@ -160,7 +156,6 @@ impl FuncState {
|
|||
}
|
||||
};
|
||||
if block_type == BlockType::If {
|
||||
// Collect conditional value from the stack_types.
|
||||
let last_block = self.blocks.last().unwrap();
|
||||
if !last_block.is_stack_polymorphic()
|
||||
|| self.stack_types.len() > last_block.stack_starts_at
|
||||
|
@ -171,23 +166,10 @@ impl FuncState {
|
|||
}
|
||||
for (i, ty) in start_types.iter().rev().enumerate() {
|
||||
if !self.assert_stack_type_at(i, *ty) {
|
||||
return Err(OperatorValidatorError::new("stack operand type mismatch"));
|
||||
return Err("stack operand type mismatch");
|
||||
}
|
||||
}
|
||||
let (stack_starts_at, polymorphic_values) = {
|
||||
// When stack for last block is polymorphic, ensure that
|
||||
// the polymorphic_values matches, and next block is informed about that.
|
||||
let last_block = self.blocks.last_mut().unwrap();
|
||||
if !last_block.is_stack_polymorphic()
|
||||
|| last_block.stack_starts_at + start_types.len() <= self.stack_types.len()
|
||||
{
|
||||
(self.stack_types.len() - start_types.len(), None)
|
||||
} else {
|
||||
let unknown_stack_types_len =
|
||||
last_block.stack_starts_at + start_types.len() - self.stack_types.len();
|
||||
(last_block.stack_starts_at, Some(unknown_stack_types_len))
|
||||
}
|
||||
};
|
||||
let stack_starts_at = self.stack_types.len() - start_types.len();
|
||||
self.blocks.push(BlockState {
|
||||
start_types,
|
||||
return_types,
|
||||
|
@ -195,7 +177,7 @@ impl FuncState {
|
|||
jump_to_top: block_type == BlockType::Loop,
|
||||
is_else_allowed: block_type == BlockType::If,
|
||||
is_dead_code: false,
|
||||
polymorphic_values,
|
||||
polymorphic_values: None,
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
@ -295,44 +277,7 @@ pub enum FunctionEnd {
|
|||
Yes,
|
||||
}
|
||||
|
||||
/// A wrapper around a `BinaryReaderError` where the inner error's offset is a
|
||||
/// temporary placeholder value. This can be converted into a proper
|
||||
/// `BinaryReaderError` via the `set_offset` method, which replaces the
|
||||
/// placeholder offset with an actual offset.
|
||||
pub(crate) struct OperatorValidatorError(BinaryReaderError);
|
||||
|
||||
/// Create an `OperatorValidatorError` with a format string.
|
||||
macro_rules! format_op_err {
|
||||
( $( $arg:expr ),* $(,)* ) => {
|
||||
OperatorValidatorError::new(format!( $( $arg ),* ))
|
||||
}
|
||||
}
|
||||
|
||||
/// Early return an `Err(OperatorValidatorError)` with a format string.
|
||||
macro_rules! bail_op_err {
|
||||
( $( $arg:expr ),* $(,)* ) => {
|
||||
return Err(format_op_err!( $( $arg ),* ));
|
||||
}
|
||||
}
|
||||
|
||||
impl OperatorValidatorError {
|
||||
/// Create a new `OperatorValidatorError` with a placeholder offset.
|
||||
pub(crate) fn new(message: impl Into<String>) -> Self {
|
||||
let offset = std::usize::MAX;
|
||||
let e = BinaryReaderError::new(message, offset);
|
||||
OperatorValidatorError(e)
|
||||
}
|
||||
|
||||
/// Convert this `OperatorValidatorError` into a `BinaryReaderError` by
|
||||
/// supplying an actual offset to replace the internal placeholder offset.
|
||||
pub(crate) fn set_offset(mut self, offset: usize) -> BinaryReaderError {
|
||||
debug_assert_eq!(self.0.inner.offset, std::usize::MAX);
|
||||
self.0.inner.offset = offset;
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
type OperatorValidatorResult<T> = std::result::Result<T, OperatorValidatorError>;
|
||||
type OperatorValidatorResult<T> = result::Result<T, &'static str>;
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct OperatorValidatorConfig {
|
||||
|
@ -358,30 +303,6 @@ pub(crate) const DEFAULT_OPERATOR_VALIDATOR_CONFIG: OperatorValidatorConfig =
|
|||
deterministic_only: true,
|
||||
};
|
||||
|
||||
pub(crate) fn check_value_type(
|
||||
ty: Type,
|
||||
operator_config: &OperatorValidatorConfig,
|
||||
) -> OperatorValidatorResult<()> {
|
||||
match ty {
|
||||
Type::I32 | Type::I64 | Type::F32 | Type::F64 => Ok(()),
|
||||
Type::NullRef | Type::AnyFunc | Type::AnyRef => {
|
||||
if !operator_config.enable_reference_types {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"reference types support is not enabled",
|
||||
));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Type::V128 => {
|
||||
if !operator_config.enable_simd {
|
||||
return Err(OperatorValidatorError::new("SIMD support is not enabled"));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(OperatorValidatorError::new("invalid value type")),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct OperatorValidator {
|
||||
func_state: FuncState,
|
||||
|
@ -393,7 +314,7 @@ impl OperatorValidator {
|
|||
func_type: &F,
|
||||
locals: &[(u32, Type)],
|
||||
config: OperatorValidatorConfig,
|
||||
) -> OperatorValidatorResult<OperatorValidator>
|
||||
) -> OperatorValidator
|
||||
where
|
||||
F: WasmFuncType<Type = T>,
|
||||
T: WasmType,
|
||||
|
@ -403,7 +324,6 @@ impl OperatorValidator {
|
|||
.map(WasmType::to_parser_type)
|
||||
.collect::<Vec<_>>();
|
||||
for local in locals {
|
||||
check_value_type(local.1, &config)?;
|
||||
for _ in 0..local.0 {
|
||||
local_types.push(local.1);
|
||||
}
|
||||
|
@ -424,7 +344,7 @@ impl OperatorValidator {
|
|||
polymorphic_values: None,
|
||||
});
|
||||
|
||||
Ok(OperatorValidator {
|
||||
OperatorValidator {
|
||||
func_state: FuncState {
|
||||
local_types,
|
||||
blocks,
|
||||
|
@ -432,7 +352,7 @@ impl OperatorValidator {
|
|||
end_function: false,
|
||||
},
|
||||
config,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_dead_code(&self) -> bool {
|
||||
|
@ -441,9 +361,7 @@ impl OperatorValidator {
|
|||
|
||||
fn check_frame_size(&self, require_count: usize) -> OperatorValidatorResult<()> {
|
||||
if !self.func_state.assert_block_stack_len(0, require_count) {
|
||||
Err(OperatorValidatorError::new(
|
||||
"type mismatch: not enough operands",
|
||||
))
|
||||
Err("not enough operands")
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
|
@ -452,7 +370,7 @@ impl OperatorValidator {
|
|||
fn check_operands_1(&self, operand: Type) -> OperatorValidatorResult<()> {
|
||||
self.check_frame_size(1)?;
|
||||
if !self.func_state.assert_stack_type_at(0, operand) {
|
||||
return Err(OperatorValidatorError::new("stack operand type mismatch"));
|
||||
return Err("stack operand type mismatch");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -460,10 +378,10 @@ impl OperatorValidator {
|
|||
fn check_operands_2(&self, operand1: Type, operand2: Type) -> OperatorValidatorResult<()> {
|
||||
self.check_frame_size(2)?;
|
||||
if !self.func_state.assert_stack_type_at(1, operand1) {
|
||||
return Err(OperatorValidatorError::new("stack operand type mismatch"));
|
||||
return Err("stack operand type mismatch");
|
||||
}
|
||||
if !self.func_state.assert_stack_type_at(0, operand2) {
|
||||
return Err(OperatorValidatorError::new("stack operand type mismatch"));
|
||||
return Err("stack operand type mismatch");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -476,13 +394,13 @@ impl OperatorValidator {
|
|||
) -> OperatorValidatorResult<()> {
|
||||
self.check_frame_size(3)?;
|
||||
if !self.func_state.assert_stack_type_at(2, operand1) {
|
||||
return Err(OperatorValidatorError::new("stack operand type mismatch"));
|
||||
return Err("stack operand type mismatch");
|
||||
}
|
||||
if !self.func_state.assert_stack_type_at(1, operand2) {
|
||||
return Err(OperatorValidatorError::new("stack operand type mismatch"));
|
||||
return Err("stack operand type mismatch");
|
||||
}
|
||||
if !self.func_state.assert_stack_type_at(0, operand3) {
|
||||
return Err(OperatorValidatorError::new("stack operand type mismatch"));
|
||||
return Err("stack operand type mismatch");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -498,7 +416,7 @@ impl OperatorValidator {
|
|||
.func_state
|
||||
.assert_stack_type_at(len - 1 - i, expected_type)
|
||||
{
|
||||
return Err(OperatorValidatorError::new("stack operand type mismatch"));
|
||||
return Err("stack operand type mismatch");
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
@ -510,10 +428,8 @@ impl OperatorValidator {
|
|||
reserve_items: usize,
|
||||
) -> OperatorValidatorResult<()> {
|
||||
if !self.config.enable_multi_value && block.return_types.len() > 1 {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"blocks, loops, and ifs may only return at most one \
|
||||
value when multi-value is not enabled",
|
||||
));
|
||||
return Err("blocks, loops, and ifs may only return at most one \
|
||||
value when multi-value is not enabled");
|
||||
}
|
||||
let len = block.return_types.len();
|
||||
for i in 0..len {
|
||||
|
@ -521,9 +437,7 @@ impl OperatorValidator {
|
|||
.func_state
|
||||
.assert_stack_type_at(len - 1 - i + reserve_items, block.return_types[i])
|
||||
{
|
||||
return Err(OperatorValidatorError::new(
|
||||
"type mismatch: stack item type does not match block item type",
|
||||
));
|
||||
return Err("stack item type does not match block item type");
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
@ -532,9 +446,7 @@ impl OperatorValidator {
|
|||
fn check_block_return(&self) -> OperatorValidatorResult<()> {
|
||||
let len = self.func_state.last_block().return_types.len();
|
||||
if !self.func_state.assert_last_block_stack_len_exact(len) {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"type mismatch: stack size does not match block type",
|
||||
));
|
||||
return Err("stack size does not match block type");
|
||||
}
|
||||
self.check_block_return_types(self.func_state.last_block(), 0)
|
||||
}
|
||||
|
@ -545,30 +457,12 @@ impl OperatorValidator {
|
|||
reserve_items: usize,
|
||||
) -> OperatorValidatorResult<()> {
|
||||
if relative_depth as usize >= self.func_state.blocks.len() {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"unknown label: invalid block depth",
|
||||
));
|
||||
return Err("invalid block depth");
|
||||
}
|
||||
let block = self.func_state.block_at(relative_depth as usize);
|
||||
if block.jump_to_top {
|
||||
let len = block.start_types.len();
|
||||
if !self
|
||||
.func_state
|
||||
.assert_block_stack_len(0, reserve_items + len)
|
||||
{
|
||||
return Err(OperatorValidatorError::new(
|
||||
"type mismatch: stack size does not match target loop type",
|
||||
));
|
||||
}
|
||||
for i in 0..len {
|
||||
if !self
|
||||
.func_state
|
||||
.assert_stack_type_at(len - 1 - i + reserve_items, block.start_types[i])
|
||||
{
|
||||
return Err(OperatorValidatorError::new(
|
||||
"type mismatch: stack item type does not match block param type",
|
||||
));
|
||||
}
|
||||
if !self.func_state.assert_block_stack_len(0, reserve_items) {
|
||||
return Err("stack size does not match target loop type");
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
|
@ -578,23 +472,17 @@ impl OperatorValidator {
|
|||
.func_state
|
||||
.assert_block_stack_len(0, len + reserve_items)
|
||||
{
|
||||
return Err(OperatorValidatorError::new(
|
||||
"type mismatch: stack size does not match target block type",
|
||||
));
|
||||
return Err("stack size does not match target block type");
|
||||
}
|
||||
self.check_block_return_types(block, reserve_items)
|
||||
}
|
||||
|
||||
fn match_block_return(&self, depth1: u32, depth2: u32) -> OperatorValidatorResult<()> {
|
||||
if depth1 as usize >= self.func_state.blocks.len() {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"unknown label: invalid block depth",
|
||||
));
|
||||
return Err("invalid block depth");
|
||||
}
|
||||
if depth2 as usize >= self.func_state.blocks.len() {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"unknown label: invalid block depth",
|
||||
));
|
||||
return Err("invalid block depth");
|
||||
}
|
||||
let block1 = self.func_state.block_at(depth1 as usize);
|
||||
let block2 = self.func_state.block_at(depth2 as usize);
|
||||
|
@ -603,19 +491,13 @@ impl OperatorValidator {
|
|||
if block1.jump_to_top || block2.jump_to_top {
|
||||
if block1.jump_to_top {
|
||||
if !block2.jump_to_top && !return_types2.is_empty() {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"type mismatch: block types do not match",
|
||||
));
|
||||
return Err("block types do not match");
|
||||
}
|
||||
} else if !return_types1.is_empty() {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"type mismatch: block types do not match",
|
||||
));
|
||||
return Err("block types do not match");
|
||||
}
|
||||
} else if *return_types1 != *return_types2 {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"type mismatch: block types do not match",
|
||||
));
|
||||
return Err("block types do not match");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -636,7 +518,7 @@ impl OperatorValidator {
|
|||
>,
|
||||
) -> OperatorValidatorResult<()> {
|
||||
if resources.memory_at(memory_index).is_none() {
|
||||
bail_op_err!("unknown memory {}", memory_index);
|
||||
return Err("no linear memories are present");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -658,15 +540,9 @@ impl OperatorValidator {
|
|||
) -> OperatorValidatorResult<()> {
|
||||
match resources.memory_at(memory_index) {
|
||||
Some(memory) if !memory.is_shared() => {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"atomic accesses require shared memory",
|
||||
))
|
||||
}
|
||||
None => {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"no linear memories are present",
|
||||
))
|
||||
return Err("atomic accesses require shared memory")
|
||||
}
|
||||
None => return Err("no linear memories are present"),
|
||||
_ => Ok(()),
|
||||
}
|
||||
}
|
||||
|
@ -685,9 +561,7 @@ impl OperatorValidator {
|
|||
self.check_memory_index(0, resources)?;
|
||||
let align = memarg.flags;
|
||||
if align > max_align {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"alignment must not be larger than natural",
|
||||
));
|
||||
return Err("alignment must not be larger than natural");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -695,9 +569,7 @@ impl OperatorValidator {
|
|||
#[cfg(feature = "deterministic")]
|
||||
fn check_non_deterministic_enabled(&self) -> OperatorValidatorResult<()> {
|
||||
if !self.config.deterministic_only {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"deterministic_only support is not enabled",
|
||||
));
|
||||
return Err("deterministic_only support is not enabled");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -710,34 +582,28 @@ impl OperatorValidator {
|
|||
|
||||
fn check_threads_enabled(&self) -> OperatorValidatorResult<()> {
|
||||
if !self.config.enable_threads {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"threads support is not enabled",
|
||||
));
|
||||
return Err("threads support is not enabled");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn check_reference_types_enabled(&self) -> OperatorValidatorResult<()> {
|
||||
if !self.config.enable_reference_types {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"reference types support is not enabled",
|
||||
));
|
||||
return Err("reference types support is not enabled");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn check_simd_enabled(&self) -> OperatorValidatorResult<()> {
|
||||
if !self.config.enable_simd {
|
||||
return Err(OperatorValidatorError::new("SIMD support is not enabled"));
|
||||
return Err("SIMD support is not enabled");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn check_bulk_memory_enabled(&self) -> OperatorValidatorResult<()> {
|
||||
if !self.config.enable_bulk_memory {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"bulk memory support is not enabled",
|
||||
));
|
||||
return Err("bulk memory support is not enabled");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -763,7 +629,7 @@ impl OperatorValidator {
|
|||
|
||||
fn check_simd_lane_index(&self, index: SIMDLaneIndex, max: u8) -> OperatorValidatorResult<()> {
|
||||
if index >= max {
|
||||
return Err(OperatorValidatorError::new("SIMD index out of bounds"));
|
||||
return Err("SIMD index out of bounds");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -789,25 +655,21 @@ impl OperatorValidator {
|
|||
}
|
||||
TypeOrFuncType::Type(Type::V128) => self.check_simd_enabled(),
|
||||
TypeOrFuncType::FuncType(idx) => match resources.type_at(idx) {
|
||||
None => Err(OperatorValidatorError::new("type index out of bounds")),
|
||||
None => Err("type index out of bounds"),
|
||||
Some(ty) if !self.config.enable_multi_value => {
|
||||
if ty.len_outputs() > 1 {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"blocks, loops, and ifs may only return at most one \
|
||||
value when multi-value is not enabled",
|
||||
));
|
||||
return Err("blocks, loops, and ifs may only return at most one \
|
||||
value when multi-value is not enabled");
|
||||
}
|
||||
if ty.len_inputs() > 0 {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"blocks, loops, and ifs accept no parameters \
|
||||
when multi-value is not enabled",
|
||||
));
|
||||
return Err("blocks, loops, and ifs accept no parameters \
|
||||
when multi-value is not enabled");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Some(_) => Ok(()),
|
||||
},
|
||||
_ => Err(OperatorValidatorError::new("invalid block return type")),
|
||||
_ => Err("invalid block return type"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -842,9 +704,7 @@ impl OperatorValidator {
|
|||
.func_state
|
||||
.assert_stack_type_at(len - 1 - i + skip, ty.to_parser_type())
|
||||
{
|
||||
return Err(OperatorValidatorError::new(
|
||||
"stack operand type mismatch for block",
|
||||
));
|
||||
return Err("stack operand type mismatch for block");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -880,7 +740,7 @@ impl OperatorValidator {
|
|||
};
|
||||
|
||||
if !ty.is_valid_for_old_select() {
|
||||
return Err(OperatorValidatorError::new("invalid type for select"));
|
||||
return Err("invalid type for select");
|
||||
}
|
||||
|
||||
Ok(Some(ty))
|
||||
|
@ -902,7 +762,7 @@ impl OperatorValidator {
|
|||
>,
|
||||
) -> OperatorValidatorResult<FunctionEnd> {
|
||||
if self.func_state.end_function {
|
||||
return Err(OperatorValidatorError::new("unexpected operator"));
|
||||
return Err("unexpected operator");
|
||||
}
|
||||
match *operator {
|
||||
Operator::Unreachable => self.func_state.start_dead_code(),
|
||||
|
@ -926,9 +786,7 @@ impl OperatorValidator {
|
|||
}
|
||||
Operator::Else => {
|
||||
if !self.func_state.last_block().is_else_allowed {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"unexpected else: if block is not started",
|
||||
));
|
||||
return Err("unexpected else: if block is not started");
|
||||
}
|
||||
self.check_block_return()?;
|
||||
self.func_state.reset_block()
|
||||
|
@ -942,7 +800,7 @@ impl OperatorValidator {
|
|||
|
||||
let last_block = &self.func_state.last_block();
|
||||
if last_block.is_else_allowed && last_block.start_types != last_block.return_types {
|
||||
return Err(OperatorValidatorError::new("type mismatch: else is expected: if block has a type that can't be implemented with a no-op"));
|
||||
return Err("else is expected: if block has a type that can't be implemented with a no-op");
|
||||
}
|
||||
self.func_state.pop_block()
|
||||
}
|
||||
|
@ -993,25 +851,14 @@ impl OperatorValidator {
|
|||
wasm_func_type_outputs(ty).map(WasmType::to_parser_type),
|
||||
)?;
|
||||
}
|
||||
None => {
|
||||
bail_op_err!(
|
||||
"unknown function {}: function index out of bounds",
|
||||
function_index
|
||||
);
|
||||
}
|
||||
None => return Err("function index out of bounds"),
|
||||
},
|
||||
Operator::CallIndirect { index, table_index } => {
|
||||
if resources.table_at(table_index).is_none() {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"unknown table: table index out of bounds",
|
||||
));
|
||||
return Err("table index out of bounds");
|
||||
}
|
||||
match resources.type_at(index) {
|
||||
None => {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"unknown type: type index out of bounds",
|
||||
))
|
||||
}
|
||||
None => return Err("type index out of bounds"),
|
||||
Some(ty) => {
|
||||
let types = {
|
||||
let mut types = Vec::with_capacity(ty.len_inputs() + 1);
|
||||
|
@ -1041,14 +888,14 @@ impl OperatorValidator {
|
|||
}
|
||||
Operator::LocalGet { local_index } => {
|
||||
if local_index as usize >= self.func_state.local_types.len() {
|
||||
bail_op_err!("unknown local {}: local index out of bounds", local_index);
|
||||
return Err("local index out of bounds");
|
||||
}
|
||||
let local_type = self.func_state.local_types[local_index as usize];
|
||||
self.func_state.change_frame_with_type(0, local_type)?;
|
||||
}
|
||||
Operator::LocalSet { local_index } => {
|
||||
if local_index as usize >= self.func_state.local_types.len() {
|
||||
bail_op_err!("unknown local {}: local index out of bounds", local_index);
|
||||
return Err("local index out of bounds");
|
||||
}
|
||||
let local_type = self.func_state.local_types[local_index as usize];
|
||||
self.check_operands_1(local_type)?;
|
||||
|
@ -1056,7 +903,7 @@ impl OperatorValidator {
|
|||
}
|
||||
Operator::LocalTee { local_index } => {
|
||||
if local_index as usize >= self.func_state.local_types.len() {
|
||||
bail_op_err!("unknown local {}: local index out of bounds", local_index);
|
||||
return Err("local index out of bounds");
|
||||
}
|
||||
let local_type = self.func_state.local_types[local_index as usize];
|
||||
self.check_operands_1(local_type)?;
|
||||
|
@ -1067,24 +914,18 @@ impl OperatorValidator {
|
|||
self.func_state
|
||||
.change_frame_with_type(0, ty.content_type().to_parser_type())?;
|
||||
} else {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"unknown global: global index out of bounds",
|
||||
));
|
||||
return Err("global index out of bounds");
|
||||
};
|
||||
}
|
||||
Operator::GlobalSet { global_index } => {
|
||||
if let Some(ty) = resources.global_at(global_index) {
|
||||
if !ty.is_mutable() {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"global is immutable: cannot modify it with `global.set`",
|
||||
));
|
||||
return Err("global expected to be mutable");
|
||||
}
|
||||
self.check_operands_1(ty.content_type().to_parser_type())?;
|
||||
self.func_state.change_frame(1)?;
|
||||
} else {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"unknown global: global index out of bounds",
|
||||
));
|
||||
return Err("global index out of bounds");
|
||||
};
|
||||
}
|
||||
Operator::I32Load { memarg } => {
|
||||
|
@ -1603,9 +1444,7 @@ impl OperatorValidator {
|
|||
Operator::AtomicFence { ref flags } => {
|
||||
self.check_threads_enabled()?;
|
||||
if *flags != 0 {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"non-zero flags for fence not supported yet",
|
||||
));
|
||||
return Err("non-zero flags for fence not supported yet");
|
||||
}
|
||||
}
|
||||
Operator::RefNull => {
|
||||
|
@ -1620,9 +1459,7 @@ impl OperatorValidator {
|
|||
Operator::RefFunc { function_index } => {
|
||||
self.check_reference_types_enabled()?;
|
||||
if resources.func_type_id_at(function_index).is_none() {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"unknown function: function index out of bounds",
|
||||
));
|
||||
return Err("function index out of bounds");
|
||||
}
|
||||
self.func_state.change_frame_with_type(0, Type::AnyFunc)?;
|
||||
}
|
||||
|
@ -1810,10 +1647,6 @@ impl OperatorValidator {
|
|||
| Operator::I8x16SubSaturateS
|
||||
| Operator::I8x16SubSaturateU
|
||||
| Operator::I8x16Mul
|
||||
| Operator::I8x16MinS
|
||||
| Operator::I8x16MinU
|
||||
| Operator::I8x16MaxS
|
||||
| Operator::I8x16MaxU
|
||||
| Operator::I16x8Add
|
||||
| Operator::I16x8AddSaturateS
|
||||
| Operator::I16x8AddSaturateU
|
||||
|
@ -1821,17 +1654,9 @@ impl OperatorValidator {
|
|||
| Operator::I16x8SubSaturateS
|
||||
| Operator::I16x8SubSaturateU
|
||||
| Operator::I16x8Mul
|
||||
| Operator::I16x8MinS
|
||||
| Operator::I16x8MinU
|
||||
| Operator::I16x8MaxS
|
||||
| Operator::I16x8MaxU
|
||||
| Operator::I32x4Add
|
||||
| Operator::I32x4Sub
|
||||
| Operator::I32x4Mul
|
||||
| Operator::I32x4MinS
|
||||
| Operator::I32x4MinU
|
||||
| Operator::I32x4MaxS
|
||||
| Operator::I32x4MaxU
|
||||
| Operator::I64x2Add
|
||||
| Operator::I64x2Sub
|
||||
| Operator::I64x2Mul
|
||||
|
@ -1960,20 +1785,17 @@ impl OperatorValidator {
|
|||
|
||||
Operator::MemoryInit { segment } => {
|
||||
self.check_bulk_memory_enabled()?;
|
||||
self.check_memory_index(0, resources)?;
|
||||
if segment >= resources.data_count() {
|
||||
bail_op_err!("unknown data segment {}", segment);
|
||||
return Err("segment index out of bounds");
|
||||
}
|
||||
self.check_memory_index(0, resources)?;
|
||||
self.check_operands_3(Type::I32, Type::I32, Type::I32)?;
|
||||
self.func_state.change_frame(3)?;
|
||||
}
|
||||
Operator::DataDrop { segment } => {
|
||||
self.check_bulk_memory_enabled()?;
|
||||
self.check_memory_index(0, resources)?;
|
||||
if segment >= resources.data_count() {
|
||||
return Err(OperatorValidatorError::new(
|
||||
"unknown data segment: segment index out of bounds",
|
||||
));
|
||||
return Err("segment index out of bounds");
|
||||
}
|
||||
}
|
||||
Operator::MemoryCopy | Operator::MemoryFill => {
|
||||
|
@ -1984,17 +1806,14 @@ impl OperatorValidator {
|
|||
}
|
||||
Operator::TableInit { segment, table } => {
|
||||
self.check_bulk_memory_enabled()?;
|
||||
if segment >= resources.element_count() {
|
||||
return Err("segment index out of bounds");
|
||||
}
|
||||
if table > 0 {
|
||||
self.check_reference_types_enabled()?;
|
||||
}
|
||||
if resources.table_at(table).is_none() {
|
||||
bail_op_err!("unknown table {}: table index out of bounds", table);
|
||||
}
|
||||
if segment >= resources.element_count() {
|
||||
bail_op_err!(
|
||||
"unknown element segment {}: segment index out of bounds",
|
||||
segment
|
||||
);
|
||||
return Err("table index out of bounds");
|
||||
}
|
||||
self.check_operands_3(Type::I32, Type::I32, Type::I32)?;
|
||||
self.func_state.change_frame(3)?;
|
||||
|
@ -2002,10 +1821,7 @@ impl OperatorValidator {
|
|||
Operator::ElemDrop { segment } => {
|
||||
self.check_bulk_memory_enabled()?;
|
||||
if segment >= resources.element_count() {
|
||||
bail_op_err!(
|
||||
"unknown element segment {}: segment index out of bounds",
|
||||
segment
|
||||
);
|
||||
return Err("segment index out of bounds");
|
||||
}
|
||||
}
|
||||
Operator::TableCopy {
|
||||
|
@ -2019,7 +1835,7 @@ impl OperatorValidator {
|
|||
if resources.table_at(src_table).is_none()
|
||||
|| resources.table_at(dst_table).is_none()
|
||||
{
|
||||
return Err(OperatorValidatorError::new("table index out of bounds"));
|
||||
return Err("table index out of bounds");
|
||||
}
|
||||
self.check_operands_3(Type::I32, Type::I32, Type::I32)?;
|
||||
self.func_state.change_frame(3)?;
|
||||
|
@ -2028,7 +1844,7 @@ impl OperatorValidator {
|
|||
self.check_reference_types_enabled()?;
|
||||
let ty = match resources.table_at(table) {
|
||||
Some(ty) => ty.element_type().to_parser_type(),
|
||||
None => return Err(OperatorValidatorError::new("table index out of bounds")),
|
||||
None => return Err("table index out of bounds"),
|
||||
};
|
||||
self.check_operands_1(Type::I32)?;
|
||||
self.func_state.change_frame_with_type(1, ty)?;
|
||||
|
@ -2037,7 +1853,7 @@ impl OperatorValidator {
|
|||
self.check_reference_types_enabled()?;
|
||||
let ty = match resources.table_at(table) {
|
||||
Some(ty) => ty.element_type().to_parser_type(),
|
||||
None => return Err(OperatorValidatorError::new("table index out of bounds")),
|
||||
None => return Err("table index out of bounds"),
|
||||
};
|
||||
self.check_operands_2(Type::I32, ty)?;
|
||||
self.func_state.change_frame(2)?;
|
||||
|
@ -2046,7 +1862,7 @@ impl OperatorValidator {
|
|||
self.check_reference_types_enabled()?;
|
||||
let ty = match resources.table_at(table) {
|
||||
Some(ty) => ty.element_type().to_parser_type(),
|
||||
None => return Err(OperatorValidatorError::new("table index out of bounds")),
|
||||
None => return Err("table index out of bounds"),
|
||||
};
|
||||
self.check_operands_2(ty, Type::I32)?;
|
||||
self.func_state.change_frame_with_type(2, Type::I32)?;
|
||||
|
@ -2054,7 +1870,7 @@ impl OperatorValidator {
|
|||
Operator::TableSize { table } => {
|
||||
self.check_reference_types_enabled()?;
|
||||
if resources.table_at(table).is_none() {
|
||||
return Err(OperatorValidatorError::new("table index out of bounds"));
|
||||
return Err("table index out of bounds");
|
||||
}
|
||||
self.func_state.change_frame_with_type(0, Type::I32)?;
|
||||
}
|
||||
|
@ -2062,7 +1878,7 @@ impl OperatorValidator {
|
|||
self.check_bulk_memory_enabled()?;
|
||||
let ty = match resources.table_at(table) {
|
||||
Some(ty) => ty.element_type().to_parser_type(),
|
||||
None => return Err(OperatorValidatorError::new("table index out of bounds")),
|
||||
None => return Err("table index out of bounds"),
|
||||
};
|
||||
self.check_operands_3(Type::I32, ty, Type::I32)?;
|
||||
self.func_state.change_frame(3)?;
|
||||
|
@ -2073,7 +1889,7 @@ impl OperatorValidator {
|
|||
|
||||
pub(crate) fn process_end_function(&self) -> OperatorValidatorResult<()> {
|
||||
if !self.func_state.end_function {
|
||||
return Err(OperatorValidatorError::new("expected end of function"));
|
||||
return Err("expected end of function");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -456,10 +456,10 @@ impl<'a> Parser<'a> {
|
|||
.get_items_reader()?;
|
||||
let num_elements = reader.get_count() as usize;
|
||||
if num_elements > MAX_WASM_TABLE_ENTRIES {
|
||||
return Err(BinaryReaderError::new(
|
||||
"num_elements is out of bounds",
|
||||
0, // reader.position - 1, // TODO offset
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "num_elements is out of bounds",
|
||||
offset: 0, // reader.position - 1, // TODO offset
|
||||
});
|
||||
}
|
||||
let mut elements = Vec::with_capacity(num_elements);
|
||||
for _ in 0..num_elements {
|
||||
|
@ -487,26 +487,27 @@ impl<'a> Parser<'a> {
|
|||
let mut reader = function_body.get_locals_reader()?;
|
||||
let local_count = reader.get_count() as usize;
|
||||
if local_count > MAX_WASM_FUNCTION_LOCALS {
|
||||
return Err(BinaryReaderError::new(
|
||||
"local_count is out of bounds",
|
||||
reader.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "local_count is out of bounds",
|
||||
offset: reader.original_position() - 1,
|
||||
});
|
||||
}
|
||||
let mut locals: Vec<(u32, Type)> = Vec::with_capacity(local_count);
|
||||
let mut locals_total: usize = 0;
|
||||
for _ in 0..local_count {
|
||||
let (count, ty) = reader.read()?;
|
||||
locals_total = locals_total.checked_add(count as usize).ok_or_else(|| {
|
||||
BinaryReaderError::new(
|
||||
"locals_total is out of bounds",
|
||||
reader.original_position() - 1,
|
||||
)
|
||||
})?;
|
||||
locals_total =
|
||||
locals_total
|
||||
.checked_add(count as usize)
|
||||
.ok_or_else(|| BinaryReaderError {
|
||||
message: "locals_total is out of bounds",
|
||||
offset: reader.original_position() - 1,
|
||||
})?;
|
||||
if locals_total > MAX_WASM_FUNCTION_LOCALS {
|
||||
return Err(BinaryReaderError::new(
|
||||
"locals_total is out of bounds",
|
||||
reader.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "locals_total is out of bounds",
|
||||
offset: reader.original_position() - 1,
|
||||
});
|
||||
}
|
||||
locals.push((count, ty));
|
||||
}
|
||||
|
@ -531,10 +532,10 @@ impl<'a> Parser<'a> {
|
|||
return Ok(());
|
||||
}
|
||||
let reader = self.operators_reader.as_ref().expect("operator reader");
|
||||
return Err(BinaryReaderError::new(
|
||||
"Expected end of function marker",
|
||||
reader.original_position(),
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Expected end of function marker",
|
||||
offset: reader.original_position(),
|
||||
});
|
||||
}
|
||||
let reader = self.operators_reader.as_mut().expect("operator reader");
|
||||
let op = reader.read()?;
|
||||
|
@ -589,10 +590,10 @@ impl<'a> Parser<'a> {
|
|||
{
|
||||
let count = naming_reader.get_count() as usize;
|
||||
if count > limit {
|
||||
return Err(BinaryReaderError::new(
|
||||
"name map size is out of bound",
|
||||
naming_reader.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "name map size is out of bound",
|
||||
offset: naming_reader.original_position() - 1,
|
||||
});
|
||||
}
|
||||
let mut result = Vec::with_capacity(count);
|
||||
for _ in 0..count {
|
||||
|
@ -614,10 +615,10 @@ impl<'a> Parser<'a> {
|
|||
let mut reader = locals.get_function_local_reader()?;
|
||||
let funcs_len = reader.get_count() as usize;
|
||||
if funcs_len > MAX_WASM_FUNCTIONS {
|
||||
return Err(BinaryReaderError::new(
|
||||
"function count is out of bounds",
|
||||
reader.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "function count is out of bounds",
|
||||
offset: reader.original_position() - 1,
|
||||
});
|
||||
}
|
||||
let mut funcs: Vec<LocalName<'a>> = Vec::with_capacity(funcs_len);
|
||||
for _ in 0..funcs_len {
|
||||
|
|
|
@ -17,18 +17,10 @@ use std::error::Error;
|
|||
use std::fmt;
|
||||
use std::result;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct BinaryReaderError {
|
||||
// Wrap the actual error data in a `Box` so that the error is just one
|
||||
// word. This means that we can continue returning small `Result`s in
|
||||
// registers.
|
||||
pub(crate) inner: Box<BinaryReaderErrorInner>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct BinaryReaderErrorInner {
|
||||
pub(crate) message: String,
|
||||
pub(crate) offset: usize,
|
||||
pub message: &'static str,
|
||||
pub offset: usize,
|
||||
}
|
||||
|
||||
pub type Result<T> = result::Result<T, BinaryReaderError>;
|
||||
|
@ -37,30 +29,7 @@ impl Error for BinaryReaderError {}
|
|||
|
||||
impl fmt::Display for BinaryReaderError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{} (at offset {})",
|
||||
self.inner.message, self.inner.offset
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl BinaryReaderError {
|
||||
pub(crate) fn new(message: impl Into<String>, offset: usize) -> Self {
|
||||
let message = message.into();
|
||||
BinaryReaderError {
|
||||
inner: Box::new(BinaryReaderErrorInner { message, offset }),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get this error's message.
|
||||
pub fn message(&self) -> &str {
|
||||
&self.inner.message
|
||||
}
|
||||
|
||||
/// Get the offset within the Wasm binary where the error occured.
|
||||
pub fn offset(&self) -> usize {
|
||||
self.inner.offset
|
||||
write!(f, "{} (at offset {})", self.message, self.offset)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -634,10 +603,6 @@ pub enum Operator<'a> {
|
|||
I8x16Sub,
|
||||
I8x16SubSaturateS,
|
||||
I8x16SubSaturateU,
|
||||
I8x16MinS,
|
||||
I8x16MinU,
|
||||
I8x16MaxS,
|
||||
I8x16MaxU,
|
||||
I8x16Mul,
|
||||
I16x8Neg,
|
||||
I16x8AnyTrue,
|
||||
|
@ -652,10 +617,6 @@ pub enum Operator<'a> {
|
|||
I16x8SubSaturateS,
|
||||
I16x8SubSaturateU,
|
||||
I16x8Mul,
|
||||
I16x8MinS,
|
||||
I16x8MinU,
|
||||
I16x8MaxS,
|
||||
I16x8MaxU,
|
||||
I32x4Neg,
|
||||
I32x4AnyTrue,
|
||||
I32x4AllTrue,
|
||||
|
@ -665,10 +626,6 @@ pub enum Operator<'a> {
|
|||
I32x4Add,
|
||||
I32x4Sub,
|
||||
I32x4Mul,
|
||||
I32x4MinS,
|
||||
I32x4MinU,
|
||||
I32x4MaxS,
|
||||
I32x4MaxU,
|
||||
I64x2Neg,
|
||||
I64x2AnyTrue,
|
||||
I64x2AllTrue,
|
||||
|
|
|
@ -151,10 +151,10 @@ impl<'a> CodeSectionReader<'a> {
|
|||
|
||||
fn verify_body_end(&self, end: usize) -> Result<()> {
|
||||
if self.reader.buffer.len() < end {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Function body extends past end of the code section",
|
||||
self.reader.original_offset + self.reader.buffer.len(),
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Function body extends past end of the code section",
|
||||
offset: self.reader.original_offset + self.reader.buffer.len(),
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -19,10 +19,10 @@ pub(crate) fn read_data_count_section_content(data: &[u8], offset: usize) -> Res
|
|||
let mut reader = BinaryReader::new_with_offset(data, offset);
|
||||
let count = reader.read_var_u32()?;
|
||||
if !reader.eof() {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Unexpected content in the data count section",
|
||||
offset + reader.position,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Unexpected content in the data count section",
|
||||
offset: offset + reader.position,
|
||||
});
|
||||
}
|
||||
Ok(count)
|
||||
}
|
||||
|
|
|
@ -55,10 +55,10 @@ impl<'a> DataSectionReader<'a> {
|
|||
|
||||
fn verify_data_end(&self, end: usize) -> Result<()> {
|
||||
if self.reader.buffer.len() < end {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Data segment extends past end of the data section",
|
||||
self.reader.original_offset + self.reader.buffer.len(),
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Data segment extends past end of the data section",
|
||||
offset: self.reader.original_offset + self.reader.buffer.len(),
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -102,10 +102,10 @@ impl<'a> DataSectionReader<'a> {
|
|||
0 => 0,
|
||||
2 => self.reader.read_var_u32()?,
|
||||
_ => {
|
||||
return Err(BinaryReaderError::new(
|
||||
"invalid flags byte in data segment",
|
||||
self.reader.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "invalid flags byte in data segment",
|
||||
offset: self.reader.original_position() - 1,
|
||||
});
|
||||
}
|
||||
};
|
||||
let init_expr = {
|
||||
|
|
|
@ -93,11 +93,21 @@ impl<'a> ElementItemsReader<'a> {
|
|||
let ret = match self.reader.read_operator()? {
|
||||
Operator::RefNull => ElementItem::Null,
|
||||
Operator::RefFunc { function_index } => ElementItem::Func(function_index),
|
||||
_ => return Err(BinaryReaderError::new("invalid passive segment", offset)),
|
||||
_ => {
|
||||
return Err(BinaryReaderError {
|
||||
message: "invalid passive segment",
|
||||
offset,
|
||||
})
|
||||
}
|
||||
};
|
||||
match self.reader.read_operator()? {
|
||||
Operator::End => {}
|
||||
_ => return Err(BinaryReaderError::new("invalid passive segment", offset)),
|
||||
_ => {
|
||||
return Err(BinaryReaderError {
|
||||
message: "invalid passive segment",
|
||||
offset,
|
||||
})
|
||||
}
|
||||
}
|
||||
Ok(ret)
|
||||
} else {
|
||||
|
@ -195,10 +205,10 @@ impl<'a> ElementSectionReader<'a> {
|
|||
{
|
||||
let flags = self.reader.read_var_u32()?;
|
||||
if (flags & !0b111) != 0 {
|
||||
return Err(BinaryReaderError::new(
|
||||
"invalid flags byte in element segment",
|
||||
self.reader.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "invalid flags byte in element segment",
|
||||
offset: self.reader.original_position() - 1,
|
||||
});
|
||||
}
|
||||
let kind = if flags & 0b001 != 0 {
|
||||
if flags & 0b010 != 0 {
|
||||
|
@ -231,10 +241,10 @@ impl<'a> ElementSectionReader<'a> {
|
|||
match self.reader.read_external_kind()? {
|
||||
ExternalKind::Function => Type::AnyFunc,
|
||||
_ => {
|
||||
return Err(BinaryReaderError::new(
|
||||
"only the function external type is supported in elem segment",
|
||||
self.reader.original_position() - 1,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "only the function external type is supported in elem segment",
|
||||
offset: self.reader.original_position() - 1,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -368,16 +368,16 @@ impl<'a> ModuleReader<'a> {
|
|||
|
||||
fn verify_section_end(&self, end: usize) -> Result<()> {
|
||||
if self.reader.buffer.len() < end {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Section body extends past end of file",
|
||||
self.reader.buffer.len(),
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Section body extends past end of file",
|
||||
offset: self.reader.buffer.len(),
|
||||
});
|
||||
}
|
||||
if self.reader.position > end {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Section header is too big to fit into section body",
|
||||
end,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Section header is too big to fit into section body",
|
||||
offset: end,
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -174,10 +174,10 @@ impl<'a> NameSectionReader<'a> {
|
|||
|
||||
fn verify_section_end(&self, end: usize) -> Result<()> {
|
||||
if self.reader.buffer.len() < end {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Name entry extends past end of the code section",
|
||||
self.reader.original_offset + self.reader.buffer.len(),
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Name entry extends past end of the code section",
|
||||
offset: self.reader.original_offset + self.reader.buffer.len(),
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -42,10 +42,10 @@ impl<'a> OperatorsReader<'a> {
|
|||
if self.eof() {
|
||||
return Ok(());
|
||||
}
|
||||
Err(BinaryReaderError::new(
|
||||
"Unexpected data at the end of operators",
|
||||
self.reader.original_position(),
|
||||
))
|
||||
Err(BinaryReaderError {
|
||||
message: "Unexpected data at the end of operators",
|
||||
offset: self.reader.original_position(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn read<'b>(&mut self) -> Result<Operator<'b>>
|
||||
|
|
|
@ -24,10 +24,10 @@ pub trait SectionReader {
|
|||
if self.eof() {
|
||||
return Ok(());
|
||||
}
|
||||
Err(BinaryReaderError::new(
|
||||
"Unexpected data at the end of the section",
|
||||
self.original_position(),
|
||||
))
|
||||
Err(BinaryReaderError {
|
||||
message: "Unexpected data at the end of the section",
|
||||
offset: self.original_position(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -22,10 +22,10 @@ pub(crate) fn read_sourcemappingurl_section_content<'a>(
|
|||
let mut reader = BinaryReader::new_with_offset(data, offset);
|
||||
let url = reader.read_string()?;
|
||||
if !reader.eof() {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Unexpected content in the sourceMappingURL section",
|
||||
offset + reader.position,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Unexpected content in the sourceMappingURL section",
|
||||
offset: offset + reader.position,
|
||||
});
|
||||
}
|
||||
Ok(url)
|
||||
}
|
||||
|
|
|
@ -19,10 +19,10 @@ pub(crate) fn read_start_section_content(data: &[u8], offset: usize) -> Result<u
|
|||
let mut reader = BinaryReader::new_with_offset(data, offset);
|
||||
let start_index = reader.read_var_u32()?;
|
||||
if !reader.eof() {
|
||||
return Err(BinaryReaderError::new(
|
||||
"Unexpected content in the start section",
|
||||
offset + reader.position,
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "Unexpected content in the start section",
|
||||
offset: offset + reader.position,
|
||||
});
|
||||
}
|
||||
Ok(start_index)
|
||||
}
|
||||
|
|
|
@ -79,7 +79,7 @@ mod simple_tests {
|
|||
let state = parser.read();
|
||||
match *state {
|
||||
ParserState::EndWasm => break,
|
||||
ParserState::Error(ref err) => panic!("Error: {:?}", err),
|
||||
ParserState::Error(err) => panic!("Error: {:?}", err),
|
||||
_ => (),
|
||||
}
|
||||
max_iteration -= 1;
|
||||
|
@ -104,7 +104,7 @@ mod simple_tests {
|
|||
let state = parser.read();
|
||||
match *state {
|
||||
ParserState::EndWasm => break,
|
||||
ParserState::Error(ref err) => panic!("Error: {:?}", err),
|
||||
ParserState::Error(err) => panic!("Error: {:?}", err),
|
||||
_ => (),
|
||||
}
|
||||
max_iteration -= 1;
|
||||
|
@ -324,7 +324,6 @@ mod wast_tests {
|
|||
use std::fs::{read, read_dir};
|
||||
use std::str;
|
||||
|
||||
const WAST_TESTS_PATH: &str = "tests/wast";
|
||||
const SPEC_TESTS_PATH: &str = "testsuite";
|
||||
|
||||
fn default_config() -> ValidatingParserConfig {
|
||||
|
@ -342,26 +341,6 @@ mod wast_tests {
|
|||
}
|
||||
}
|
||||
|
||||
fn extract_config(wast: &str) -> ValidatingParserConfig {
|
||||
let first = wast.split('\n').next();
|
||||
if first.is_none() || !first.unwrap().starts_with(";;; ") {
|
||||
return default_config();
|
||||
}
|
||||
let first = first.unwrap();
|
||||
ValidatingParserConfig {
|
||||
operator_config: OperatorValidatorConfig {
|
||||
enable_threads: first.contains("--enable-threads"),
|
||||
enable_reference_types: first.contains("--enable-reference-types"),
|
||||
enable_simd: first.contains("--enable-simd"),
|
||||
enable_bulk_memory: first.contains("--enable-bulk-memory"),
|
||||
enable_multi_value: first.contains("--enable-multi-value"),
|
||||
|
||||
#[cfg(feature = "deterministic")]
|
||||
deterministic_only: true,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn validate_module(
|
||||
mut module: wast::Module,
|
||||
config: ValidatingParserConfig,
|
||||
|
@ -373,7 +352,7 @@ mod wast_tests {
|
|||
let state = parser.read();
|
||||
match *state {
|
||||
ParserState::EndWasm => break,
|
||||
ParserState::Error(ref err) => return Err(err.clone()),
|
||||
ParserState::Error(err) => return Err(err),
|
||||
_ => (),
|
||||
}
|
||||
max_iteration -= 1;
|
||||
|
@ -424,52 +403,22 @@ mod wast_tests {
|
|||
match directive {
|
||||
Module(module) | AssertUnlinkable { module, .. } => {
|
||||
if let Err(err) = validate_module(module, config.clone()) {
|
||||
panic!("{}:{}: invalid module: {}", filename, line, err.message());
|
||||
panic!("{}:{}: invalid module: {}", filename, line, err.message);
|
||||
}
|
||||
}
|
||||
AssertInvalid {
|
||||
module,
|
||||
message,
|
||||
span: _,
|
||||
} => match validate_module(module, config.clone()) {
|
||||
Ok(_) => {
|
||||
panic!(
|
||||
"{}:{}: invalid module was successfully parsed",
|
||||
filename, line
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
if message.contains("unknown table")
|
||||
&& e.message().contains("unknown element segment")
|
||||
{
|
||||
println!(
|
||||
"{}:{}: skipping until \
|
||||
https://github.com/WebAssembly/testsuite/pull/18 is merged",
|
||||
filename, line,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
assert!(
|
||||
e.message().contains(message),
|
||||
"{file}:{line}: expected \"{spec}\", got \"{actual}\"",
|
||||
file = filename,
|
||||
line = line,
|
||||
spec = message,
|
||||
actual = e.message(),
|
||||
);
|
||||
}
|
||||
},
|
||||
AssertMalformed {
|
||||
AssertInvalid { module, .. }
|
||||
| AssertMalformed {
|
||||
module: wast::QuoteModule::Module(module),
|
||||
..
|
||||
} => {
|
||||
// TODO diffentiate between assert_invalid and assert_malformed
|
||||
if let Ok(_) = validate_module(module, config.clone()) {
|
||||
panic!(
|
||||
"{}:{}: invalid module was successfully parsed",
|
||||
filename, line
|
||||
);
|
||||
}
|
||||
// TODO: Check the assert_malformed message
|
||||
// TODO: Check the assert_invalid or assert_malformed message
|
||||
}
|
||||
|
||||
AssertMalformed {
|
||||
|
@ -480,6 +429,7 @@ mod wast_tests {
|
|||
| Invoke { .. }
|
||||
| AssertTrap { .. }
|
||||
| AssertReturn { .. }
|
||||
| AssertReturnFunc { .. }
|
||||
| AssertExhaustion { .. } => {}
|
||||
}
|
||||
}
|
||||
|
@ -514,16 +464,20 @@ mod wast_tests {
|
|||
"simd",
|
||||
{
|
||||
let mut config: ValidatingParserConfig = default_config();
|
||||
config.operator_config.enable_reference_types = true;
|
||||
config.operator_config.enable_simd = true;
|
||||
config
|
||||
},
|
||||
|name, line| match (name, line) {
|
||||
// FIXME(WebAssembly/simd#140) needs a few updates to the
|
||||
// `*.wast` file to successfully parse it (or so I think)
|
||||
("simd_lane.wast", _) => true, // due to ";; Test operation with empty argument"
|
||||
("simd_conversions.wast", _) => true, // unknown `i64x2.trunc_sat_f64x2_s`
|
||||
("simd_load.wast", _) => true, // due to ";; Test operation with empty argument"
|
||||
("simd_lane.wast", _) => true,
|
||||
("simd_load_extend.wast", _) => true,
|
||||
("simd_f32x4_arith.wast", _) => true,
|
||||
("simd_f64x2_arith.wast", _) => true,
|
||||
("simd_f32x4.wast", _) => true,
|
||||
("simd_f64x2.wast", _) => true,
|
||||
("simd_const.wast", _) => true,
|
||||
("simd_load_splat.wast", _) => true,
|
||||
_ => false,
|
||||
},
|
||||
);
|
||||
|
@ -548,37 +502,11 @@ mod wast_tests {
|
|||
},
|
||||
|name, line| match (name, line) {
|
||||
("br_table.wast", _) | ("select.wast", _) => true,
|
||||
("binary.wast", 1057) => true,
|
||||
("elem.wast", _) => true,
|
||||
("ref_func.wast", _) => true,
|
||||
("table-sub.wast", _) => true,
|
||||
("table_grow.wast", _) => true,
|
||||
_ => false,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn run_wast_tests() {
|
||||
for entry in read_dir(WAST_TESTS_PATH).unwrap() {
|
||||
let dir = entry.unwrap();
|
||||
if !dir.file_type().unwrap().is_file()
|
||||
|| dir.path().extension().map(|s| s.to_str().unwrap()) != Some("wast")
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let data = read(&dir.path()).expect("wast data");
|
||||
let config = extract_config(&String::from_utf8_lossy(&data));
|
||||
run_wabt_scripts(
|
||||
dir.file_name().to_str().expect("name"),
|
||||
&data,
|
||||
config,
|
||||
|_, _| false,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn run_spec_tests() {
|
||||
for entry in read_dir(SPEC_TESTS_PATH).unwrap() {
|
||||
|
|
|
@ -30,8 +30,8 @@ use crate::primitives::{
|
|||
};
|
||||
|
||||
use crate::operators_validator::{
|
||||
check_value_type, is_subtype_supertype, FunctionEnd, OperatorValidator,
|
||||
OperatorValidatorConfig, OperatorValidatorError, DEFAULT_OPERATOR_VALIDATOR_CONFIG,
|
||||
is_subtype_supertype, FunctionEnd, OperatorValidator, OperatorValidatorConfig,
|
||||
DEFAULT_OPERATOR_VALIDATOR_CONFIG,
|
||||
};
|
||||
use crate::parser::{Parser, ParserInput, ParserState, WasmDecoder};
|
||||
use crate::{ElemSectionEntryTable, ElementItem};
|
||||
|
@ -191,30 +191,37 @@ impl<'a> ValidatingParser<'a> {
|
|||
&self.resources
|
||||
}
|
||||
|
||||
fn set_validation_error(&mut self, message: impl Into<String>) {
|
||||
self.validation_error = Some(ParserState::Error(BinaryReaderError::new(
|
||||
fn set_validation_error(&mut self, message: &'static str) {
|
||||
self.validation_error = Some(ParserState::Error(BinaryReaderError {
|
||||
message,
|
||||
self.read_position.unwrap(),
|
||||
)))
|
||||
offset: self.read_position.unwrap(),
|
||||
}))
|
||||
}
|
||||
|
||||
fn set_operator_validation_error(&mut self, e: OperatorValidatorError) {
|
||||
let offset = self.read_position.unwrap();
|
||||
self.validation_error = Some(ParserState::Error(e.set_offset(offset)));
|
||||
}
|
||||
|
||||
fn create_error<T>(&self, message: impl Into<String>) -> ValidatorResult<'a, T> {
|
||||
Err(ParserState::Error(BinaryReaderError::new(
|
||||
fn create_error<T>(&self, message: &'static str) -> ValidatorResult<'a, T> {
|
||||
Err(ParserState::Error(BinaryReaderError {
|
||||
message,
|
||||
self.read_position.unwrap(),
|
||||
)))
|
||||
offset: self.read_position.unwrap(),
|
||||
}))
|
||||
}
|
||||
|
||||
fn check_value_type(&self, ty: Type) -> ValidatorResult<'a, ()> {
|
||||
check_value_type(ty, &self.config.operator_config).map_err(|e| {
|
||||
let offset = self.read_position.unwrap();
|
||||
ParserState::Error(e.set_offset(offset))
|
||||
})
|
||||
match ty {
|
||||
Type::I32 | Type::I64 | Type::F32 | Type::F64 => Ok(()),
|
||||
Type::NullRef | Type::AnyFunc | Type::AnyRef => {
|
||||
if !self.config.operator_config.enable_reference_types {
|
||||
return self.create_error("reference types support is not enabled");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Type::V128 => {
|
||||
if !self.config.operator_config.enable_simd {
|
||||
return self.create_error("SIMD support is not enabled");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
_ => self.create_error("invalid value type"),
|
||||
}
|
||||
}
|
||||
|
||||
fn check_value_types(&self, types: &[Type]) -> ValidatorResult<'a, ()> {
|
||||
|
@ -226,7 +233,7 @@ impl<'a> ValidatingParser<'a> {
|
|||
|
||||
fn check_limits(&self, limits: &ResizableLimits) -> ValidatorResult<'a, ()> {
|
||||
if limits.maximum.is_some() && limits.initial > limits.maximum.unwrap() {
|
||||
return self.create_error("size minimum must not be greater than maximum");
|
||||
return self.create_error("maximum limits less than initial");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -236,7 +243,7 @@ impl<'a> ValidatingParser<'a> {
|
|||
self.check_value_types(&*func_type.params)?;
|
||||
self.check_value_types(&*func_type.returns)?;
|
||||
if !self.config.operator_config.enable_multi_value && func_type.returns.len() > 1 {
|
||||
self.create_error("invalid result arity: func type returns multiple values")
|
||||
self.create_error("func type returns multiple values")
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
|
@ -261,11 +268,11 @@ impl<'a> ValidatingParser<'a> {
|
|||
self.check_limits(&memory_type.limits)?;
|
||||
let initial = memory_type.limits.initial;
|
||||
if initial as usize > MAX_WASM_MEMORY_PAGES {
|
||||
return self.create_error("memory size must be at most 65536 pages (4GiB)");
|
||||
return self.create_error("memory initial value exceeds limit");
|
||||
}
|
||||
let maximum = memory_type.limits.maximum;
|
||||
if maximum.is_some() && maximum.unwrap() as usize > MAX_WASM_MEMORY_PAGES {
|
||||
return self.create_error("memory size must be at most 65536 pages (4GiB)");
|
||||
return self.create_error("memory maximum value exceeds limit");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -281,7 +288,7 @@ impl<'a> ValidatingParser<'a> {
|
|||
return self.create_error("functions count out of bounds");
|
||||
}
|
||||
if type_index as usize >= self.resources.types.len() {
|
||||
return self.create_error("unknown type: type index out of bounds");
|
||||
return self.create_error("type index out of bounds");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -289,13 +296,13 @@ impl<'a> ValidatingParser<'a> {
|
|||
if !self.config.operator_config.enable_reference_types
|
||||
&& self.resources.tables.len() >= MAX_WASM_TABLES
|
||||
{
|
||||
return self.create_error("multiple tables: tables count must be at most 1");
|
||||
return self.create_error("tables count must be at most 1");
|
||||
}
|
||||
self.check_table_type(table_type)
|
||||
}
|
||||
ImportSectionEntryType::Memory(ref memory_type) => {
|
||||
if self.resources.memories.len() >= MAX_WASM_MEMORIES {
|
||||
return self.create_error("multiple memories: memory count must be at most 1");
|
||||
return self.create_error("memory count must be at most 1");
|
||||
}
|
||||
self.check_memory_type(memory_type)
|
||||
}
|
||||
|
@ -311,9 +318,7 @@ impl<'a> ValidatingParser<'a> {
|
|||
fn check_init_expression_operator(&self, operator: &Operator) -> ValidatorResult<'a, ()> {
|
||||
let state = self.init_expression_state.as_ref().unwrap();
|
||||
if state.validated {
|
||||
return self.create_error(
|
||||
"constant expression required: type mismatch: only one init_expr operator is expected",
|
||||
);
|
||||
return self.create_error("only one init_expr operator is expected");
|
||||
}
|
||||
let ty = match *operator {
|
||||
Operator::I32Const { .. } => Type::I32,
|
||||
|
@ -334,27 +339,20 @@ impl<'a> ValidatingParser<'a> {
|
|||
}
|
||||
Operator::GlobalGet { global_index } => {
|
||||
if global_index as usize >= state.global_count {
|
||||
return self
|
||||
.create_error("unknown global: init_expr global index out of bounds");
|
||||
return self.create_error("init_expr global index out of bounds");
|
||||
}
|
||||
self.resources.globals[global_index as usize].content_type
|
||||
}
|
||||
Operator::RefFunc { function_index } => {
|
||||
if function_index as usize >= state.function_count {
|
||||
return self.create_error(format!(
|
||||
"unknown function {}: init_expr function index out of bounds",
|
||||
function_index
|
||||
));
|
||||
return self.create_error("init_expr function index out of bounds");
|
||||
}
|
||||
Type::AnyFunc
|
||||
}
|
||||
_ => {
|
||||
return self
|
||||
.create_error("constant expression required: invalid init_expr operator")
|
||||
}
|
||||
_ => return self.create_error("invalid init_expr operator"),
|
||||
};
|
||||
if !is_subtype_supertype(ty, state.ty) {
|
||||
return self.create_error("type mismatch: invalid init_expr type");
|
||||
return self.create_error("invalid init_expr type");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -366,30 +364,27 @@ impl<'a> ValidatingParser<'a> {
|
|||
index: u32,
|
||||
) -> ValidatorResult<'a, ()> {
|
||||
if self.exported_names.contains(field) {
|
||||
return self.create_error("duplicate export name");
|
||||
return self.create_error("non-unique export name");
|
||||
}
|
||||
match kind {
|
||||
ExternalKind::Function => {
|
||||
if index as usize >= self.resources.func_type_indices.len() {
|
||||
return self
|
||||
.create_error("unknown function: exported function index out of bounds");
|
||||
return self.create_error("exported function index out of bounds");
|
||||
}
|
||||
}
|
||||
ExternalKind::Table => {
|
||||
if index as usize >= self.resources.tables.len() {
|
||||
return self.create_error("unknown table: exported table index out of bounds");
|
||||
return self.create_error("exported table index out of bounds");
|
||||
}
|
||||
}
|
||||
ExternalKind::Memory => {
|
||||
if index as usize >= self.resources.memories.len() {
|
||||
return self
|
||||
.create_error("unknown memory: exported memory index out of bounds");
|
||||
return self.create_error("exported memory index out of bounds");
|
||||
}
|
||||
}
|
||||
ExternalKind::Global => {
|
||||
if index as usize >= self.resources.globals.len() {
|
||||
return self
|
||||
.create_error("unknown global: exported global index out of bounds");
|
||||
return self.create_error("exported global index out of bounds");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -398,7 +393,7 @@ impl<'a> ValidatingParser<'a> {
|
|||
|
||||
fn check_start(&self, func_index: u32) -> ValidatorResult<'a, ()> {
|
||||
if func_index as usize >= self.resources.func_type_indices.len() {
|
||||
return self.create_error("unknown function: start function index out of bounds");
|
||||
return self.create_error("start function index out of bounds");
|
||||
}
|
||||
let type_index = self.resources.func_type_indices[func_index as usize];
|
||||
let ty = &self.resources.types[type_index as usize];
|
||||
|
@ -477,7 +472,7 @@ impl<'a> ValidatingParser<'a> {
|
|||
}
|
||||
ParserState::FunctionSectionEntry(type_index) => {
|
||||
if type_index as usize >= self.resources.types.len() {
|
||||
self.set_validation_error("unknown type: func type index out of bounds");
|
||||
self.set_validation_error("func type index out of bounds");
|
||||
} else if self.resources.func_type_indices.len() >= MAX_WASM_FUNCTIONS {
|
||||
self.set_validation_error("functions count out of bounds");
|
||||
} else {
|
||||
|
@ -488,7 +483,7 @@ impl<'a> ValidatingParser<'a> {
|
|||
if !self.config.operator_config.enable_reference_types
|
||||
&& self.resources.tables.len() >= MAX_WASM_TABLES
|
||||
{
|
||||
self.set_validation_error("multiple tables: tables count must be at most 1");
|
||||
self.set_validation_error("tables count must be at most 1");
|
||||
} else {
|
||||
self.validation_error = self.check_table_type(table_type).err();
|
||||
self.resources.tables.push(table_type.clone());
|
||||
|
@ -496,9 +491,7 @@ impl<'a> ValidatingParser<'a> {
|
|||
}
|
||||
ParserState::MemorySectionEntry(ref memory_type) => {
|
||||
if self.resources.memories.len() >= MAX_WASM_MEMORIES {
|
||||
self.set_validation_error(
|
||||
"multiple memories: memories count must be at most 1",
|
||||
);
|
||||
self.set_validation_error("memories count must be at most 1");
|
||||
} else {
|
||||
self.validation_error = self.check_memory_type(memory_type).err();
|
||||
self.resources.memories.push(memory_type.clone());
|
||||
|
@ -527,7 +520,7 @@ impl<'a> ValidatingParser<'a> {
|
|||
}
|
||||
ParserState::EndInitExpressionBody => {
|
||||
if !self.init_expression_state.as_ref().unwrap().validated {
|
||||
self.set_validation_error("type mismatch: init_expr is empty");
|
||||
self.set_validation_error("init_expr is empty");
|
||||
}
|
||||
self.init_expression_state = None;
|
||||
}
|
||||
|
@ -547,9 +540,7 @@ impl<'a> ValidatingParser<'a> {
|
|||
let table = match self.resources.tables.get(table_index as usize) {
|
||||
Some(t) => t,
|
||||
None => {
|
||||
self.set_validation_error(
|
||||
"unknown table: element section table index out of bounds",
|
||||
);
|
||||
self.set_validation_error("element section table index out of bounds");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
@ -573,9 +564,7 @@ impl<'a> ValidatingParser<'a> {
|
|||
for item in &**indices {
|
||||
if let ElementItem::Func(func_index) = item {
|
||||
if *func_index as usize >= self.resources.func_type_indices.len() {
|
||||
self.set_validation_error(
|
||||
"unknown function: element func index out of bounds",
|
||||
);
|
||||
self.set_validation_error("element func index out of bounds");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -592,14 +581,8 @@ impl<'a> ValidatingParser<'a> {
|
|||
let func_type =
|
||||
&self.resources.types[self.resources.func_type_indices[index] as usize];
|
||||
let operator_config = self.config.operator_config;
|
||||
match OperatorValidator::new(func_type, locals, operator_config) {
|
||||
Ok(validator) => self.current_operator_validator = Some(validator),
|
||||
Err(err) => {
|
||||
self.validation_error = Some(ParserState::Error(
|
||||
err.set_offset(self.read_position.unwrap()),
|
||||
));
|
||||
}
|
||||
}
|
||||
self.current_operator_validator =
|
||||
Some(OperatorValidator::new(func_type, locals, operator_config));
|
||||
}
|
||||
ParserState::CodeOperator(ref operator) => {
|
||||
let check = self
|
||||
|
@ -609,7 +592,7 @@ impl<'a> ValidatingParser<'a> {
|
|||
.process_operator(operator, &self.resources);
|
||||
|
||||
if let Err(err) = check {
|
||||
self.set_operator_validation_error(err);
|
||||
self.set_validation_error(err);
|
||||
}
|
||||
}
|
||||
ParserState::EndFunctionBody => {
|
||||
|
@ -619,7 +602,7 @@ impl<'a> ValidatingParser<'a> {
|
|||
.unwrap()
|
||||
.process_end_function();
|
||||
if let Err(err) = check {
|
||||
self.set_operator_validation_error(err);
|
||||
self.set_validation_error(err);
|
||||
}
|
||||
self.current_func_index += 1;
|
||||
self.current_operator_validator = None;
|
||||
|
@ -629,9 +612,7 @@ impl<'a> ValidatingParser<'a> {
|
|||
}
|
||||
ParserState::BeginActiveDataSectionEntry(memory_index) => {
|
||||
if memory_index as usize >= self.resources.memories.len() {
|
||||
self.set_validation_error(
|
||||
"unknown memory: data section memory index out of bounds",
|
||||
);
|
||||
self.set_validation_error("data section memory index out of bounds");
|
||||
} else {
|
||||
self.init_expression_state = Some(InitExpressionState {
|
||||
ty: Type::I32,
|
||||
|
@ -659,9 +640,7 @@ impl<'a> ValidatingParser<'a> {
|
|||
};
|
||||
}
|
||||
|
||||
pub fn create_validating_operator_parser<'b>(
|
||||
&mut self,
|
||||
) -> ValidatorResult<ValidatingOperatorParser<'b>>
|
||||
pub fn create_validating_operator_parser<'b>(&mut self) -> ValidatingOperatorParser<'b>
|
||||
where
|
||||
'a: 'b,
|
||||
{
|
||||
|
@ -677,20 +656,11 @@ impl<'a> ValidatingParser<'a> {
|
|||
&self.resources.types[self.resources.func_type_indices[index] as usize];
|
||||
let operator_config = self.config.operator_config;
|
||||
OperatorValidator::new(func_type, locals, operator_config)
|
||||
.map_err(|e| ParserState::Error(e.set_offset(self.read_position.unwrap())))?
|
||||
}
|
||||
_ => panic!("Invalid reader state"),
|
||||
};
|
||||
let reader = self.create_binary_reader();
|
||||
Ok(ValidatingOperatorParser::new(
|
||||
operator_validator,
|
||||
reader,
|
||||
func_body_offset,
|
||||
))
|
||||
}
|
||||
|
||||
pub fn current_position(&self) -> usize {
|
||||
self.parser.current_position()
|
||||
ValidatingOperatorParser::new(operator_validator, reader, func_body_offset)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -800,9 +770,7 @@ impl<'b> ValidatingOperatorParser<'b> {
|
|||
/// _ => continue
|
||||
/// }
|
||||
/// }
|
||||
/// let mut reader = parser
|
||||
/// .create_validating_operator_parser()
|
||||
/// .expect("validating parser");
|
||||
/// let mut reader = parser.create_validating_operator_parser();
|
||||
/// println!("Function {}", i);
|
||||
/// i += 1;
|
||||
/// while !reader.eof() {
|
||||
|
@ -830,16 +798,18 @@ impl<'b> ValidatingOperatorParser<'b> {
|
|||
let op = self.reader.read_operator()?;
|
||||
match self.operator_validator.process_operator(&op, resources) {
|
||||
Err(err) => {
|
||||
let offset = self.func_body_offset + self.reader.current_position();
|
||||
return Err(err.set_offset(offset));
|
||||
return Err(BinaryReaderError {
|
||||
message: err,
|
||||
offset: self.func_body_offset + self.reader.current_position(),
|
||||
});
|
||||
}
|
||||
Ok(FunctionEnd::Yes) => {
|
||||
self.end_function = true;
|
||||
if !self.reader.eof() {
|
||||
return Err(BinaryReaderError::new(
|
||||
"unexpected end of function",
|
||||
self.func_body_offset + self.reader.current_position(),
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "unexpected end of function",
|
||||
offset: self.func_body_offset + self.reader.current_position(),
|
||||
});
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
|
@ -872,23 +842,27 @@ pub fn validate_function_body<
|
|||
let mut locals_reader = function_body.get_locals_reader()?;
|
||||
let local_count = locals_reader.get_count() as usize;
|
||||
if local_count > MAX_WASM_FUNCTION_LOCALS {
|
||||
return Err(BinaryReaderError::new(
|
||||
"locals exceed maximum",
|
||||
locals_reader.original_position(),
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "locals exceed maximum",
|
||||
offset: locals_reader.original_position(),
|
||||
});
|
||||
}
|
||||
let mut locals: Vec<(u32, Type)> = Vec::with_capacity(local_count);
|
||||
let mut locals_total: usize = 0;
|
||||
for _ in 0..local_count {
|
||||
let (count, ty) = locals_reader.read()?;
|
||||
locals_total = locals_total.checked_add(count as usize).ok_or_else(|| {
|
||||
BinaryReaderError::new("locals overflow", locals_reader.original_position())
|
||||
})?;
|
||||
locals_total =
|
||||
locals_total
|
||||
.checked_add(count as usize)
|
||||
.ok_or_else(|| BinaryReaderError {
|
||||
message: "locals overflow",
|
||||
offset: locals_reader.original_position(),
|
||||
})?;
|
||||
if locals_total > MAX_WASM_FUNCTION_LOCALS {
|
||||
return Err(BinaryReaderError::new(
|
||||
"locals exceed maximum",
|
||||
locals_reader.original_position(),
|
||||
));
|
||||
return Err(BinaryReaderError {
|
||||
message: "locals exceed maximum",
|
||||
offset: locals_reader.original_position(),
|
||||
});
|
||||
}
|
||||
locals.push((count, ty));
|
||||
}
|
||||
|
@ -905,15 +879,14 @@ pub fn validate_function_body<
|
|||
// so I assumed it is considered a bug to access a non-existing function
|
||||
// id here and went with panicking instead of returning a proper error.
|
||||
.expect("the function type indexof the validated function itself is out of bounds");
|
||||
let mut operator_validator = OperatorValidator::new(func_type, &locals, operator_config)
|
||||
.map_err(|e| e.set_offset(offset))?;
|
||||
let mut operator_validator = OperatorValidator::new(func_type, &locals, operator_config);
|
||||
let mut eof_found = false;
|
||||
let mut last_op = 0;
|
||||
for item in operators_reader.into_iter_with_offsets() {
|
||||
let (ref op, offset) = item?;
|
||||
match operator_validator
|
||||
.process_operator(op, resources)
|
||||
.map_err(|e| e.set_offset(offset))?
|
||||
.map_err(|message| BinaryReaderError { message, offset })?
|
||||
{
|
||||
FunctionEnd::Yes => {
|
||||
eof_found = true;
|
||||
|
@ -924,7 +897,10 @@ pub fn validate_function_body<
|
|||
}
|
||||
}
|
||||
if !eof_found {
|
||||
return Err(BinaryReaderError::new("end of function not found", last_op));
|
||||
return Err(BinaryReaderError {
|
||||
message: "end of function not found",
|
||||
offset: last_op,
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -940,7 +916,7 @@ pub fn validate(bytes: &[u8], config: Option<ValidatingParserConfig>) -> Result<
|
|||
let state = parser.read_with_input(next_input);
|
||||
match *state {
|
||||
ParserState::EndWasm => break,
|
||||
ParserState::Error(ref e) => return Err(e.clone()),
|
||||
ParserState::Error(e) => return Err(e),
|
||||
ParserState::BeginFunctionBody { range } => {
|
||||
parser_input = Some(ParserInput::SkipFunctionBody);
|
||||
func_ranges.push(range);
|
||||
|
|
Загрузка…
Ссылка в новой задаче