Bug 1604120: Bump Cranelift to ec787eb281bb2e18e191508c17abe694e91f0677; r=lth

Differential Revision: https://phabricator.services.mozilla.com/D57293

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Benjamin Bouvier 2019-12-16 14:06:58 +00:00
Родитель 6639140a09
Коммит d890f481f4
99 изменённых файлов: 298 добавлений и 20328 удалений

Просмотреть файл

@ -40,7 +40,7 @@ rev = "8069f8f4189982e0b38fa6dc8993dd4fab41f728"
[source."https://github.com/bytecodealliance/cranelift"]
git = "https://github.com/bytecodealliance/cranelift"
replace-with = "vendored-sources"
rev = "4727b70b67abfa4f3ae1c276454a0da7a76e1d49"
rev = "ec787eb281bb2e18e191508c17abe694e91f0677"
[source."https://github.com/alexcrichton/mio-named-pipes"]
branch = "master"

111
Cargo.lock сгенерированный
Просмотреть файл

@ -198,8 +198,8 @@ name = "baldrdash"
version = "0.1.0"
dependencies = [
"bindgen 0.51.1 (registry+https://github.com/rust-lang/crates.io-index)",
"cranelift-codegen 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)",
"cranelift-wasm 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)",
"cranelift-codegen 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)",
"cranelift-wasm 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)",
"env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -651,21 +651,21 @@ dependencies = [
[[package]]
name = "cranelift-bforest"
version = "0.51.0"
source = "git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49#4727b70b67abfa4f3ae1c276454a0da7a76e1d49"
source = "git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677#ec787eb281bb2e18e191508c17abe694e91f0677"
dependencies = [
"cranelift-entity 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)",
"cranelift-entity 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)",
]
[[package]]
name = "cranelift-codegen"
version = "0.51.0"
source = "git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49#4727b70b67abfa4f3ae1c276454a0da7a76e1d49"
source = "git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677#ec787eb281bb2e18e191508c17abe694e91f0677"
dependencies = [
"byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"cranelift-bforest 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)",
"cranelift-codegen-meta 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)",
"cranelift-codegen-shared 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)",
"cranelift-entity 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)",
"cranelift-bforest 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)",
"cranelift-codegen-meta 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)",
"cranelift-codegen-shared 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)",
"cranelift-entity 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"target-lexicon 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -675,20 +675,16 @@ dependencies = [
[[package]]
name = "cranelift-codegen-meta"
version = "0.51.0"
source = "git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49#4727b70b67abfa4f3ae1c276454a0da7a76e1d49"
source = "git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677#ec787eb281bb2e18e191508c17abe694e91f0677"
dependencies = [
"cranelift-codegen-shared 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)",
"cranelift-entity 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)",
"cranelift-codegen-shared 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)",
"cranelift-entity 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)",
]
[[package]]
name = "cranelift-codegen-shared"
version = "0.51.0"
source = "git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49#4727b70b67abfa4f3ae1c276454a0da7a76e1d49"
dependencies = [
"packed_struct 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"packed_struct_codegen 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
source = "git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677#ec787eb281bb2e18e191508c17abe694e91f0677"
[[package]]
name = "cranelift-entity"
@ -698,14 +694,14 @@ source = "git+https://github.com/PLSysSec/lucet_sandbox_compiler#36f0e4deb5b080d
[[package]]
name = "cranelift-entity"
version = "0.51.0"
source = "git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49#4727b70b67abfa4f3ae1c276454a0da7a76e1d49"
source = "git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677#ec787eb281bb2e18e191508c17abe694e91f0677"
[[package]]
name = "cranelift-frontend"
version = "0.51.0"
source = "git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49#4727b70b67abfa4f3ae1c276454a0da7a76e1d49"
source = "git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677#ec787eb281bb2e18e191508c17abe694e91f0677"
dependencies = [
"cranelift-codegen 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)",
"cranelift-codegen 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"target-lexicon 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -714,11 +710,11 @@ dependencies = [
[[package]]
name = "cranelift-wasm"
version = "0.51.0"
source = "git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49#4727b70b67abfa4f3ae1c276454a0da7a76e1d49"
source = "git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677#ec787eb281bb2e18e191508c17abe694e91f0677"
dependencies = [
"cranelift-codegen 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)",
"cranelift-entity 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)",
"cranelift-frontend 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)",
"cranelift-codegen 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)",
"cranelift-entity 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)",
"cranelift-frontend 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"thiserror 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"wasmparser 0.39.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2674,25 +2670,6 @@ dependencies = [
"cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "packed_struct"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "packed_struct_codegen"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"packed_struct 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "parity-wasm"
version = "0.41.0"
@ -2938,11 +2915,6 @@ name = "quick-error"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "quote"
version = "0.3.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "quote"
version = "1.0.2"
@ -3800,16 +3772,6 @@ name = "svg_fmt"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "syn"
version = "0.11.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
"synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "syn"
version = "1.0.5"
@ -3820,14 +3782,6 @@ dependencies = [
"unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "synom"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "synstructure"
version = "0.12.1"
@ -4230,11 +4184,6 @@ name = "unicode-width"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "unicode-xid"
version = "0.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "unicode-xid"
version = "0.1.0"
@ -4751,14 +4700,14 @@ dependencies = [
"checksum coreaudio-sys-utils 0.1.0 (git+https://github.com/ChunMinChang/cubeb-coreaudio-rs?rev=5fcbd99e1b7356be1efcdc41654d495bd4c71c8c)" = "<none>"
"checksum cose 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "72fa26cb151d3ae4b70f63d67d0fed57ce04220feafafbae7f503bef7aae590d"
"checksum cose-c 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "49726015ab0ca765144fcca61e4a7a543a16b795a777fa53f554da2fffff9a94"
"checksum cranelift-bforest 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)" = "<none>"
"checksum cranelift-codegen 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)" = "<none>"
"checksum cranelift-codegen-meta 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)" = "<none>"
"checksum cranelift-codegen-shared 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)" = "<none>"
"checksum cranelift-bforest 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)" = "<none>"
"checksum cranelift-codegen 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)" = "<none>"
"checksum cranelift-codegen-meta 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)" = "<none>"
"checksum cranelift-codegen-shared 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)" = "<none>"
"checksum cranelift-entity 0.41.0 (git+https://github.com/PLSysSec/lucet_sandbox_compiler)" = "<none>"
"checksum cranelift-entity 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)" = "<none>"
"checksum cranelift-frontend 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)" = "<none>"
"checksum cranelift-wasm 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=4727b70b67abfa4f3ae1c276454a0da7a76e1d49)" = "<none>"
"checksum cranelift-entity 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)" = "<none>"
"checksum cranelift-frontend 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)" = "<none>"
"checksum cranelift-wasm 0.51.0 (git+https://github.com/bytecodealliance/cranelift?rev=ec787eb281bb2e18e191508c17abe694e91f0677)" = "<none>"
"checksum crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1"
"checksum crossbeam-deque 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3aa945d63861bfe624b55d153a39684da1e8c0bc8fba932f7ee3a3c16cea3ca"
"checksum crossbeam-epoch 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5064ebdbf05ce3cb95e45c8b086f72263f4166b29b97f6baff7ef7fe047b55ac"
@ -4915,8 +4864,6 @@ dependencies = [
"checksum ordered-float 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2f0015e9e8e28ee20c581cfbfe47c650cedeb9ed0721090e0b7ebb10b9cdbcc2"
"checksum owning_ref 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "49a4b8ea2179e6a2e27411d3bca09ca6dd630821cf6894c6c7c8467a8ee7ef13"
"checksum packed_simd 0.3.3 (git+https://github.com/hsivonen/packed_simd?branch=rust_1_32)" = "<none>"
"checksum packed_struct 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "90caf80e74380d94f2aabc83edb900b49123b3132442fb147f9155c87a756281"
"checksum packed_struct_codegen 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9f6fda15ebe37b7b28889bd4aa75bb134652eaec9eb99d1bf02f806fca4357fc"
"checksum parity-wasm 0.41.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc878dac00da22f8f61e7af3157988424567ab01d9920b962ef7dcbd7cd865"
"checksum parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f842b1982eb6c2fe34036a4fbfb06dd185a3f5c8edfaacdf7d1ea10b07de6252"
"checksum parking_lot_core 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b876b1b9e7ac6e1a74a6da34d25c42e17e8862aa409cbbbdcfc8d86c6f3bc62b"
@ -4943,7 +4890,6 @@ dependencies = [
"checksum pulse 0.3.0 (git+https://github.com/djg/cubeb-pulse-rs?rev=8069f8f4189982e0b38fa6dc8993dd4fab41f728)" = "<none>"
"checksum pulse-ffi 0.1.0 (git+https://github.com/djg/cubeb-pulse-rs?rev=8069f8f4189982e0b38fa6dc8993dd4fab41f728)" = "<none>"
"checksum quick-error 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eda5fe9b71976e62bc81b781206aaa076401769b2143379d3eb2118388babac4"
"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
"checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"
"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
"checksum rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3ae1b169243eaf61759b8475a998f0a385e42042370f3a7dbaf35246eacc8412"
@ -5022,9 +4968,7 @@ dependencies = [
"checksum string 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "00caf261d6f90f588f8450b8e1230fa0d5be49ee6140fdfbcb55335aff350970"
"checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
"checksum svg_fmt 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c666f0fed8e1e20e057af770af9077d72f3d5a33157b8537c1475dd8ffd6d32b"
"checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad"
"checksum syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"
"checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6"
"checksum synstructure 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3f085a5855930c0441ca1288cf044ea4aecf4f43a91668abdb870b4ba546a203"
"checksum target-lexicon 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6f4c118a7a38378f305a9e111fcb2f7f838c0be324bfb31a77ea04f7f6e684b4"
"checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
@ -5065,7 +5009,6 @@ dependencies = [
"checksum unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "51ccda9ef9efa3f7ef5d91e8f9b83bbe6955f9bf86aec89d5cce2c874625920f"
"checksum unicode-segmentation 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "aa6024fc12ddfd1c6dbc14a80fa2324d4568849869b779f6bd37e5e4c03344d1"
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc"
"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"

Просмотреть файл

@ -68,8 +68,8 @@ spirv_cross = { git = "https://github.com/kvark/spirv_cross", branch = "wgpu" }
[patch.crates-io.cranelift-codegen]
git = "https://github.com/bytecodealliance/cranelift"
rev = "4727b70b67abfa4f3ae1c276454a0da7a76e1d49"
rev = "ec787eb281bb2e18e191508c17abe694e91f0677"
[patch.crates-io.cranelift-wasm]
git = "https://github.com/bytecodealliance/cranelift"
rev = "4727b70b67abfa4f3ae1c276454a0da7a76e1d49"
rev = "ec787eb281bb2e18e191508c17abe694e91f0677"

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"65c94041eeaee1397206bacf6777b9f51cedae6e5309e6a0ceaf3a1a3c82f4ea","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"b123f056d0d458396679c5f7f2a16d2762af0258fcda4ac14b6655a95e5a0022","src/cdsl/ast.rs":"8fdd17d9d8c4bd0cf535599a40d7da0e5bf27fdcef817cd55ab7c580c8eb6a6c","src/cdsl/cpu_modes.rs":"7ceb99df347faf5e69b34a7e0d691e9c148d15587df09e99d46a7d37238d8d06","src/cdsl/encodings.rs":"d884a564815a03c23369bcf31d13b122ae5ba84d0c80eda9312f0c0a829bf794","src/cdsl/formats.rs":"63e638305aa3ca6dd409ddf0e5e9605eeac1cc2631103e42fc6cbc87703d9b63","src/cdsl/instructions.rs":"6ad4c75e2bf634475b6a57feb1582f39dd8e670680f68f66218274ad96b6b3f4","src/cdsl/isa.rs":"ccabd6848b69eb069c10db61c7e7f86080777495714bb53d03e663c40541be94","src/cdsl/mod.rs":"0aa827923bf4c45e5ee2359573bd863e00f474acd532739f49dcd74a27553882","src/cdsl/operands.rs":"1c3411504de9c83112ff48e0ff1cfbb2e4ba5a9a15c1716f411ef31a4df59899","src/cdsl/recipes.rs":"a3b4c49488762864856f9fb181df70650fceea07219d093f3c952229c67d35e1","src/cdsl/regs.rs":"05f93ab8504ee82d7cc9811a5b40e5cd004c327577b4c0b3dd957fc422f3c013","src/cdsl/settings.rs":"e6fd9a31925743b93b11f09c9c8271bab6aa2430aa053a2601957b4487df7d77","src/cdsl/type_inference.rs":"1efca8a095ffc899b7527bda6b9d9378c73d7283f8dceaa4819e8af599f8be21","src/cdsl/types.rs":"763cb82b1d8ceb6be682ed6c71c8c98cab686151d177aad6d8f8ef92a37c5639","src/cdsl/typevar.rs":"52f7242a35805a82baf2c788c3eb392e2fba0fcbf47f047f32eba81f34487c7e","src/cdsl/xform.rs":"55da0c3f2403147b535ab6ae5d69c623fbe839edecf2a3af1de84420cd58402d","src/default_map.rs":"101bb0282a124f9c921f6bd095f529e8753621450d783c3273b0b0394c2c5c03","src/error.rs":"e9b11b2feb2d867b94c8810fdc5a6c4e0d9131604a0bfa5340ff2639a55100b4","src/gen_binemit.rs":"515e243420b30d1e01f8ea630282d9b6d78a715e1951f3f20392e19a48164442","src/gen_encodings.rs":"f00cded6b68a9b48c9e3cd39a8b6f0ba136f4062c8f8666109158a72c62c3ed1","src/gen_inst.rs":"fac99641622591c0529c0ef789dd83290a624fc674c91a959f426696f82044de","src/gen_legalizer.rs":"7a68c2b9fd77b6420ecb10539d9af0520374e4f2e30d5a345d98dafde157e6bd","src/gen_registers.rs":"3628949229c1cb5179ec5f655b9a1ddd0504ba74ffb9fb23741c85c9154b696f","src/gen_settings.rs":"f3cc3d31f6cc898f30606caf084f0de220db2d3b1b5e5e4145fa7c9a9a1597e2","src/gen_types.rs":"f6c090e1646a43bf2fe81ae0a7029cc6f7dc6d43285368f56d86c35a21c469a6","src/isa/arm32/mod.rs":"8e09ec1b3caf2d22dce8517b37c356047bfce9a6dea712467d867ed05c4bedaf","src/isa/arm64/mod.rs":"b01f030925d3f2af37d7df1b4a800eb7f0d24f74a46e9154fd8b6752643eb2d5","src/isa/mod.rs":"136141f99f217ba42b9e3f7f47238ab19cc974bb3bef2e2df7f7b5a683989d46","src/isa/riscv/encodings.rs":"8abb1968d917588bc5fc5f5be6dd66bdec23ac456ba65f8138237c8e891e843c","src/isa/riscv/mod.rs":"a7b461a30bbfbc1e3b33645422ff40d5b1761c30cb5d4a8aa12e9a3b7f7aee51","src/isa/riscv/recipes.rs":"c9424cffed54cc4d328879a4613b9f6a2c2b7cde7e6e17b4fccd5f661aaf92f2","src/isa/x86/encodings.rs":"71ba4d1cca1480437baecf1fe55ef8f9d759e540a697b5e09a2489567dfa8c26","src/isa/x86/instructions.rs":"e4a92f2b707e0a9af0317041eb9a8bc58a8bedcdbbe35f54dcfaf05699a50675","src/isa/x86/legalize.rs":"0624341293bc91f77a2bdc69c04a89a483ba18cc263058b92879b88127035c79","src/isa/x86/mod.rs":"49f0bc05898d1d8565e692ec2550855de15fd6cffa3b5b6e0f8d502cd813e306","src/isa/x86/opcodes.rs":"643bae64cd4050814adfb856046cf650979bec5d251a9d9a6e11bafb5a603c43","src/isa/x86/recipes.rs":"2e82a9244bbd7ca8b0e3423955a4c2948e9406955953edfb98676a954df4116a","src/isa/x86/registers.rs":"e24c9487f4c257b1089dac6bca0106cc673db77292cd000ca8e0c7e0d0103f63","src/isa/x86/settings.rs":"7f6266cd5098115ac24caea3be07a18c590c07b8cfe5f0912af3ed1d0d288330","src/lib.rs":"2491b0e74078914cb89d1778fa8174daf723fe76aaf7fed18741237d68f6df32","src/shared/entities.rs":"911b396da11188bd510de004956596f150e27540edd351c74028f99f5f3c79c5","src/shared/formats.rs":"d8cf211c392ec3c54d0101ef31b700c3222bc8e80b69a04b244953e449770336","src/shared/immediates.rs":"e4a57657f6af9853794804eb41c01204a2c13a632f44f55d90e156a4b98c5f65","src/shared/instructions.rs":"cef3be79dfe3d1c2d0c4eef91cca2852ec5454c1e6bc326d8b64ade622da0ef2","src/shared/legalize.rs":"73e1b42743c324104f5a34120e1598146ce4ee7393137ecb5391b768c61f2605","src/shared/mod.rs":"bc497c14d083c29eefe4935cff9cd1bd138c071bc50f787248727a3858dc69f3","src/shared/settings.rs":"5775bb6b760ed9f54370b2ab0ae6bc020cafc6ad1369e90fec6144375641c27f","src/shared/types.rs":"4702df132f4b5d70cc9411ec5221ba0b1bd4479252274e0223ae57b6d0331247","src/srcgen.rs":"dcfc159c8599270f17e6a978c4be255abca51556b5ef0da497faec4a4a1e62ce","src/unique_table.rs":"31aa54330ca4786af772d32e8cb6158b6504b88fa93fe177bf0c6cbe545a8d35"},"package":null}
{"files":{"Cargo.toml":"65c94041eeaee1397206bacf6777b9f51cedae6e5309e6a0ceaf3a1a3c82f4ea","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"b123f056d0d458396679c5f7f2a16d2762af0258fcda4ac14b6655a95e5a0022","src/cdsl/ast.rs":"8fdd17d9d8c4bd0cf535599a40d7da0e5bf27fdcef817cd55ab7c580c8eb6a6c","src/cdsl/cpu_modes.rs":"7ceb99df347faf5e69b34a7e0d691e9c148d15587df09e99d46a7d37238d8d06","src/cdsl/encodings.rs":"d884a564815a03c23369bcf31d13b122ae5ba84d0c80eda9312f0c0a829bf794","src/cdsl/formats.rs":"63e638305aa3ca6dd409ddf0e5e9605eeac1cc2631103e42fc6cbc87703d9b63","src/cdsl/instructions.rs":"6ad4c75e2bf634475b6a57feb1582f39dd8e670680f68f66218274ad96b6b3f4","src/cdsl/isa.rs":"ccabd6848b69eb069c10db61c7e7f86080777495714bb53d03e663c40541be94","src/cdsl/mod.rs":"0aa827923bf4c45e5ee2359573bd863e00f474acd532739f49dcd74a27553882","src/cdsl/operands.rs":"1c3411504de9c83112ff48e0ff1cfbb2e4ba5a9a15c1716f411ef31a4df59899","src/cdsl/recipes.rs":"a3b4c49488762864856f9fb181df70650fceea07219d093f3c952229c67d35e1","src/cdsl/regs.rs":"05f93ab8504ee82d7cc9811a5b40e5cd004c327577b4c0b3dd957fc422f3c013","src/cdsl/settings.rs":"e6fd9a31925743b93b11f09c9c8271bab6aa2430aa053a2601957b4487df7d77","src/cdsl/type_inference.rs":"1efca8a095ffc899b7527bda6b9d9378c73d7283f8dceaa4819e8af599f8be21","src/cdsl/types.rs":"763cb82b1d8ceb6be682ed6c71c8c98cab686151d177aad6d8f8ef92a37c5639","src/cdsl/typevar.rs":"52f7242a35805a82baf2c788c3eb392e2fba0fcbf47f047f32eba81f34487c7e","src/cdsl/xform.rs":"55da0c3f2403147b535ab6ae5d69c623fbe839edecf2a3af1de84420cd58402d","src/default_map.rs":"101bb0282a124f9c921f6bd095f529e8753621450d783c3273b0b0394c2c5c03","src/error.rs":"e9b11b2feb2d867b94c8810fdc5a6c4e0d9131604a0bfa5340ff2639a55100b4","src/gen_binemit.rs":"515e243420b30d1e01f8ea630282d9b6d78a715e1951f3f20392e19a48164442","src/gen_encodings.rs":"f00cded6b68a9b48c9e3cd39a8b6f0ba136f4062c8f8666109158a72c62c3ed1","src/gen_inst.rs":"fac99641622591c0529c0ef789dd83290a624fc674c91a959f426696f82044de","src/gen_legalizer.rs":"7a68c2b9fd77b6420ecb10539d9af0520374e4f2e30d5a345d98dafde157e6bd","src/gen_registers.rs":"3628949229c1cb5179ec5f655b9a1ddd0504ba74ffb9fb23741c85c9154b696f","src/gen_settings.rs":"f3cc3d31f6cc898f30606caf084f0de220db2d3b1b5e5e4145fa7c9a9a1597e2","src/gen_types.rs":"f6c090e1646a43bf2fe81ae0a7029cc6f7dc6d43285368f56d86c35a21c469a6","src/isa/arm32/mod.rs":"8e09ec1b3caf2d22dce8517b37c356047bfce9a6dea712467d867ed05c4bedaf","src/isa/arm64/mod.rs":"b01f030925d3f2af37d7df1b4a800eb7f0d24f74a46e9154fd8b6752643eb2d5","src/isa/mod.rs":"136141f99f217ba42b9e3f7f47238ab19cc974bb3bef2e2df7f7b5a683989d46","src/isa/riscv/encodings.rs":"8abb1968d917588bc5fc5f5be6dd66bdec23ac456ba65f8138237c8e891e843c","src/isa/riscv/mod.rs":"a7b461a30bbfbc1e3b33645422ff40d5b1761c30cb5d4a8aa12e9a3b7f7aee51","src/isa/riscv/recipes.rs":"c9424cffed54cc4d328879a4613b9f6a2c2b7cde7e6e17b4fccd5f661aaf92f2","src/isa/x86/encodings.rs":"71ba4d1cca1480437baecf1fe55ef8f9d759e540a697b5e09a2489567dfa8c26","src/isa/x86/instructions.rs":"e4a92f2b707e0a9af0317041eb9a8bc58a8bedcdbbe35f54dcfaf05699a50675","src/isa/x86/legalize.rs":"0624341293bc91f77a2bdc69c04a89a483ba18cc263058b92879b88127035c79","src/isa/x86/mod.rs":"49f0bc05898d1d8565e692ec2550855de15fd6cffa3b5b6e0f8d502cd813e306","src/isa/x86/opcodes.rs":"643bae64cd4050814adfb856046cf650979bec5d251a9d9a6e11bafb5a603c43","src/isa/x86/recipes.rs":"971277543466e91be6b41c1735d81ca2752c175ac079d7c7ab8694c87cab56fd","src/isa/x86/registers.rs":"e24c9487f4c257b1089dac6bca0106cc673db77292cd000ca8e0c7e0d0103f63","src/isa/x86/settings.rs":"7f6266cd5098115ac24caea3be07a18c590c07b8cfe5f0912af3ed1d0d288330","src/lib.rs":"2491b0e74078914cb89d1778fa8174daf723fe76aaf7fed18741237d68f6df32","src/shared/entities.rs":"911b396da11188bd510de004956596f150e27540edd351c74028f99f5f3c79c5","src/shared/formats.rs":"d8cf211c392ec3c54d0101ef31b700c3222bc8e80b69a04b244953e449770336","src/shared/immediates.rs":"e4a57657f6af9853794804eb41c01204a2c13a632f44f55d90e156a4b98c5f65","src/shared/instructions.rs":"cef3be79dfe3d1c2d0c4eef91cca2852ec5454c1e6bc326d8b64ade622da0ef2","src/shared/legalize.rs":"73e1b42743c324104f5a34120e1598146ce4ee7393137ecb5391b768c61f2605","src/shared/mod.rs":"bc497c14d083c29eefe4935cff9cd1bd138c071bc50f787248727a3858dc69f3","src/shared/settings.rs":"5775bb6b760ed9f54370b2ab0ae6bc020cafc6ad1369e90fec6144375641c27f","src/shared/types.rs":"4702df132f4b5d70cc9411ec5221ba0b1bd4479252274e0223ae57b6d0331247","src/srcgen.rs":"dcfc159c8599270f17e6a978c4be255abca51556b5ef0da497faec4a4a1e62ce","src/unique_table.rs":"31aa54330ca4786af772d32e8cb6158b6504b88fa93fe177bf0c6cbe545a8d35"},"package":null}

Просмотреть файл

@ -100,7 +100,7 @@ impl<'builder> RecipeGroup<'builder> {
/// Given a sequence of opcode bytes, compute the recipe name prefix and encoding bits.
fn decode_opcodes(op_bytes: &[u8], rrr: u16, w: u16) -> (&'static str, u16) {
let enc = EncodingBits::new(op_bytes, rrr, w);
(enc.prefix.recipe_name_prefix(), enc.bits())
(enc.prefix().recipe_name_prefix(), enc.bits())
}
/// Given a snippet of Rust code (or None), replace the `PUT_OP` macro with the

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"83d8a251c234fe53394e56a30d1ffcf305490ecd7ba21483dc647c84b946d334","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"a410bc2f5dcbde499c0cd299c2620bc8111e3c5b3fccdd9e2d85caf3c24fdab3","src/condcodes.rs":"b8d433b2217b86e172d25b6c65a3ce0cc8ca221062cad1b28b0c78d2159fbda9","src/constant_hash.rs":"ffc619f45aad62c6fdcb83553a05879691a72e9a0103375b2d6cc12d52cf72d0","src/constants.rs":"fed03a10a6316e06aa174091db6e7d1fbb5f73c82c31193012ec5ab52f1c603a","src/isa/mod.rs":"428a950eca14acbe783899ccb1aecf15027f8cbe205578308ebde203d10535f3","src/isa/x86/encoding_bits.rs":"4b71d7d8cda31497d11b3747deec9d905e6e7bb2f8de1f4d3eb2f5557ab5e395","src/isa/x86/mod.rs":"01ef4e4d7437f938badbe2137892183c1ac684da0f68a5bec7e06aad34f43b9b","src/lib.rs":"8a6cd81c1b73fc218682433d55f4be860684e3527b7dcac38cdb3f4d6f959a84"},"package":null}
{"files":{"Cargo.toml":"3ccd54d6e48bdc6b58a65e52f139cf8edc2038b6c62aec12be6daab19d61d185","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"a410bc2f5dcbde499c0cd299c2620bc8111e3c5b3fccdd9e2d85caf3c24fdab3","src/condcodes.rs":"b8d433b2217b86e172d25b6c65a3ce0cc8ca221062cad1b28b0c78d2159fbda9","src/constant_hash.rs":"ffc619f45aad62c6fdcb83553a05879691a72e9a0103375b2d6cc12d52cf72d0","src/constants.rs":"fed03a10a6316e06aa174091db6e7d1fbb5f73c82c31193012ec5ab52f1c603a","src/isa/mod.rs":"428a950eca14acbe783899ccb1aecf15027f8cbe205578308ebde203d10535f3","src/isa/x86/encoding_bits.rs":"b180e4319e10f0e1c229cd9a63b5af8bb63be86f8f98cf1aef203355746f51cd","src/isa/x86/mod.rs":"01ef4e4d7437f938badbe2137892183c1ac684da0f68a5bec7e06aad34f43b9b","src/lib.rs":"91f26f998f11fb9cb74d2ec171424e29badd417beef023674850ace57149c656"},"package":null}

Просмотреть файл

@ -8,6 +8,4 @@ repository = "https://github.com/bytecodealliance/cranelift"
readme = "README.md"
edition = "2018"
[dependencies]
packed_struct = "0.3"
packed_struct_codegen = "0.3"
# Since this is a shared dependency of several packages, please strive to keep this dependency-free.

Просмотреть файл

@ -1,6 +1,6 @@
//! Provides a named interface to the `u16` Encoding bits.
use packed_struct::prelude::*;
use std::ops::RangeInclusive;
/// Named interface to the `u16` Encoding bits, representing an opcode.
///
@ -27,61 +27,103 @@ use packed_struct::prelude::*;
/// 11: 0F 3A <op> (Op3/Mp3)
/// 12-14 rrr, opcode bits for the ModR/M byte for certain opcodes.
/// 15: REX.W bit (or VEX.W/E)
#[derive(Copy, Clone, PartialEq, PackedStruct)]
#[packed_struct(size_bytes = "2", bit_numbering = "lsb0")]
pub struct EncodingBits {
/// Instruction opcode byte, without the prefix.
#[packed_field(bits = "0:7")]
pub opcode_byte: u8,
/// Prefix kind for the instruction, as an enum.
#[packed_field(bits = "8:11", ty = "enum")]
pub prefix: OpcodePrefix,
/// Bits for the ModR/M byte for certain opcodes.
#[packed_field(bits = "12:14")]
pub rrr: Integer<u8, packed_bits::Bits3>,
/// REX.W bit (or VEX.W/E).
#[packed_field(bits = "15")]
pub rex_w: Integer<u8, packed_bits::Bits1>,
}
#[derive(Copy, Clone, PartialEq)]
pub struct EncodingBits(u16);
const OPCODE: RangeInclusive<u16> = 0..=7;
const OPCODE_PREFIX: RangeInclusive<u16> = 8..=11; // Includes pp and mm.
const RRR: RangeInclusive<u16> = 12..=14;
const REX_W: RangeInclusive<u16> = 15..=15;
impl From<u16> for EncodingBits {
fn from(bits: u16) -> EncodingBits {
let bytes: [u8; 2] = [((bits >> 8) & 0xff) as u8, (bits & 0xff) as u8];
EncodingBits::unpack(&bytes).expect("failed creating EncodingBits")
fn from(bits: u16) -> Self {
Self(bits)
}
}
impl EncodingBits {
/// Constructs a new EncodingBits from parts.
pub fn new(op_bytes: &[u8], rrr: u16, rex_w: u16) -> Self {
EncodingBits {
opcode_byte: op_bytes[op_bytes.len() - 1],
prefix: OpcodePrefix::from_opcode(op_bytes),
rrr: (rrr as u8).into(),
rex_w: (rex_w as u8).into(),
}
assert!(
!op_bytes.is_empty(),
"op_bytes must include at least one opcode byte"
);
let mut new = Self::from(0);
let last_byte = op_bytes[op_bytes.len() - 1];
new.write(OPCODE, last_byte as u16);
let prefix: u8 = OpcodePrefix::from_opcode(op_bytes).into();
new.write(OPCODE_PREFIX, prefix as u16);
new.write(RRR, rrr);
new.write(REX_W, rex_w);
new
}
/// Returns the raw bits.
#[inline]
pub fn bits(self) -> u16 {
let bytes: [u8; 2] = self.pack();
((bytes[0] as u16) << 8) | (bytes[1] as u16)
self.0
}
/// Convenience method for writing bits to specific range.
#[inline]
fn write(&mut self, range: RangeInclusive<u16>, value: u16) {
assert!(ExactSizeIterator::len(&range) > 0);
let size = range.end() - range.start() + 1; // Calculate the number of bits in the range.
let mask = (1 << size) - 1; // Generate a bit mask.
debug_assert!(
value <= mask,
"The written value should have fewer than {} bits.",
size
);
let mask_complement = !(mask << *range.start()); // Create the bitwise complement for the clear mask.
self.0 &= mask_complement; // Clear the bits in `range`.
let value = (value & mask) << *range.start(); // Place the value in the correct location.
self.0 |= value; // Modify the bits in `range`.
}
/// Convenience method for reading bits from a specific range.
#[inline]
fn read(self, range: RangeInclusive<u16>) -> u8 {
assert!(ExactSizeIterator::len(&range) > 0);
let size = range.end() - range.start() + 1; // Calculate the number of bits in the range.
debug_assert!(size <= 8, "This structure expects ranges of at most 8 bits");
let mask = (1 << size) - 1; // Generate a bit mask.
((self.0 >> *range.start()) & mask) as u8
}
/// Instruction opcode byte, without the prefix.
#[inline]
pub fn opcode_byte(self) -> u8 {
self.read(OPCODE)
}
/// Prefix kind for the instruction, as an enum.
#[inline]
pub fn prefix(self) -> OpcodePrefix {
OpcodePrefix::from(self.read(OPCODE_PREFIX))
}
/// Extracts the PP bits of the OpcodePrefix.
#[inline]
pub fn pp(self) -> u8 {
self.prefix.to_primitive() & 0x3
self.prefix().to_primitive() & 0x3
}
/// Extracts the MM bits of the OpcodePrefix.
#[inline]
pub fn mm(self) -> u8 {
(self.prefix.to_primitive() >> 2) & 0x3
(self.prefix().to_primitive() >> 2) & 0x3
}
/// Bits for the ModR/M byte for certain opcodes.
#[inline]
pub fn rrr(self) -> u8 {
self.read(RRR)
}
/// REX.W bit (or VEX.W/E).
#[inline]
pub fn rex_w(self) -> u8 {
self.read(REX_W)
}
}
@ -90,55 +132,103 @@ impl EncodingBits {
/// The prefix type occupies four of the EncodingBits.
#[allow(non_camel_case_types)]
#[allow(missing_docs)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, PrimitiveEnum_u8)]
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum OpcodePrefix {
Op1 = 0b0000,
Mp1_66 = 0b0001,
Mp1_f3 = 0b0010,
Mp1_f2 = 0b0011,
Op2_0f = 0b0100,
Mp2_66_0f = 0b0101,
Mp2_f3_0f = 0b0110,
Mp2_f2_0f = 0b0111,
Op3_0f_38 = 0b1000,
Mp3_66_0f_38 = 0b1001,
Mp3_f3_0f_38 = 0b1010,
Mp3_f2_0f_38 = 0b1011,
Op3_0f_3a = 0b1100,
Mp3_66_0f_3a = 0b1101,
Mp3_f3_0f_3a = 0b1110,
Mp3_f2_0f_3a = 0b1111,
Op1,
Mp1_66,
Mp1_f3,
Mp1_f2,
Op2_0f,
Mp2_66_0f,
Mp2_f3_0f,
Mp2_f2_0f,
Op3_0f_38,
Mp3_66_0f_38,
Mp3_f3_0f_38,
Mp3_f2_0f_38,
Op3_0f_3a,
Mp3_66_0f_3a,
Mp3_f3_0f_3a,
Mp3_f2_0f_3a,
}
impl From<u8> for OpcodePrefix {
fn from(n: u8) -> OpcodePrefix {
OpcodePrefix::from_primitive(n).expect("invalid OpcodePrefix")
fn from(n: u8) -> Self {
use OpcodePrefix::*;
match n {
0b0000 => Op1,
0b0001 => Mp1_66,
0b0010 => Mp1_f3,
0b0011 => Mp1_f2,
0b0100 => Op2_0f,
0b0101 => Mp2_66_0f,
0b0110 => Mp2_f3_0f,
0b0111 => Mp2_f2_0f,
0b1000 => Op3_0f_38,
0b1001 => Mp3_66_0f_38,
0b1010 => Mp3_f3_0f_38,
0b1011 => Mp3_f2_0f_38,
0b1100 => Op3_0f_3a,
0b1101 => Mp3_66_0f_3a,
0b1110 => Mp3_f3_0f_3a,
0b1111 => Mp3_f2_0f_3a,
_ => panic!("invalid opcode prefix"),
}
}
}
impl Into<u8> for OpcodePrefix {
fn into(self) -> u8 {
use OpcodePrefix::*;
match self {
Op1 => 0b0000,
Mp1_66 => 0b0001,
Mp1_f3 => 0b0010,
Mp1_f2 => 0b0011,
Op2_0f => 0b0100,
Mp2_66_0f => 0b0101,
Mp2_f3_0f => 0b0110,
Mp2_f2_0f => 0b0111,
Op3_0f_38 => 0b1000,
Mp3_66_0f_38 => 0b1001,
Mp3_f3_0f_38 => 0b1010,
Mp3_f2_0f_38 => 0b1011,
Op3_0f_3a => 0b1100,
Mp3_66_0f_3a => 0b1101,
Mp3_f3_0f_3a => 0b1110,
Mp3_f2_0f_3a => 0b1111,
}
}
}
impl OpcodePrefix {
/// Convert an opcode prefix to a `u8`; this is a convenience proxy for `Into<u8>`.
fn to_primitive(self) -> u8 {
self.into()
}
/// Extracts the OpcodePrefix from the opcode.
pub fn from_opcode(op_bytes: &[u8]) -> OpcodePrefix {
pub fn from_opcode(op_bytes: &[u8]) -> Self {
assert!(!op_bytes.is_empty(), "at least one opcode byte");
let prefix_bytes = &op_bytes[..op_bytes.len() - 1];
match prefix_bytes {
[] => OpcodePrefix::Op1,
[0x66] => OpcodePrefix::Mp1_66,
[0xf3] => OpcodePrefix::Mp1_f3,
[0xf2] => OpcodePrefix::Mp1_f2,
[0x0f] => OpcodePrefix::Op2_0f,
[0x66, 0x0f] => OpcodePrefix::Mp2_66_0f,
[0xf3, 0x0f] => OpcodePrefix::Mp2_f3_0f,
[0xf2, 0x0f] => OpcodePrefix::Mp2_f2_0f,
[0x0f, 0x38] => OpcodePrefix::Op3_0f_38,
[0x66, 0x0f, 0x38] => OpcodePrefix::Mp3_66_0f_38,
[0xf3, 0x0f, 0x38] => OpcodePrefix::Mp3_f3_0f_38,
[0xf2, 0x0f, 0x38] => OpcodePrefix::Mp3_f2_0f_38,
[0x0f, 0x3a] => OpcodePrefix::Op3_0f_3a,
[0x66, 0x0f, 0x3a] => OpcodePrefix::Mp3_66_0f_3a,
[0xf3, 0x0f, 0x3a] => OpcodePrefix::Mp3_f3_0f_3a,
[0xf2, 0x0f, 0x3a] => OpcodePrefix::Mp3_f2_0f_3a,
[] => Self::Op1,
[0x66] => Self::Mp1_66,
[0xf3] => Self::Mp1_f3,
[0xf2] => Self::Mp1_f2,
[0x0f] => Self::Op2_0f,
[0x66, 0x0f] => Self::Mp2_66_0f,
[0xf3, 0x0f] => Self::Mp2_f3_0f,
[0xf2, 0x0f] => Self::Mp2_f2_0f,
[0x0f, 0x38] => Self::Op3_0f_38,
[0x66, 0x0f, 0x38] => Self::Mp3_66_0f_38,
[0xf3, 0x0f, 0x38] => Self::Mp3_f3_0f_38,
[0xf2, 0x0f, 0x38] => Self::Mp3_f2_0f_38,
[0x0f, 0x3a] => Self::Op3_0f_3a,
[0x66, 0x0f, 0x3a] => Self::Mp3_66_0f_3a,
[0xf3, 0x0f, 0x3a] => Self::Mp3_f3_0f_3a,
[0xf2, 0x0f, 0x3a] => Self::Mp3_f2_0f_3a,
_ => {
panic!("unexpected opcode sequence: {:?}", op_bytes);
}
@ -193,38 +283,93 @@ mod tests {
test_roundtrip(OpcodePrefix::Mp3_f2_0f_3a);
}
#[test]
fn prefix_to_name() {
assert_eq!(OpcodePrefix::Op1.recipe_name_prefix(), "Op1");
assert_eq!(OpcodePrefix::Op2_0f.recipe_name_prefix(), "Op2");
assert_eq!(OpcodePrefix::Op3_0f_38.recipe_name_prefix(), "Op3");
assert_eq!(OpcodePrefix::Mp1_66.recipe_name_prefix(), "Mp1");
assert_eq!(OpcodePrefix::Mp2_66_0f.recipe_name_prefix(), "Mp2");
assert_eq!(OpcodePrefix::Mp3_66_0f_3a.recipe_name_prefix(), "Mp3");
}
/// Tests that the opcode_byte is the lower of the EncodingBits.
#[test]
fn encodingbits_opcode_byte() {
let enc = EncodingBits::from(0x00ff);
assert_eq!(enc.opcode_byte, 0xff);
assert_eq!(enc.prefix.to_primitive(), 0x0);
assert_eq!(u8::from(enc.rrr), 0x0);
assert_eq!(u8::from(enc.rex_w), 0x0);
assert_eq!(enc.opcode_byte(), 0xff);
assert_eq!(enc.prefix().to_primitive(), 0x0);
assert_eq!(enc.rrr(), 0x0);
assert_eq!(enc.rex_w(), 0x0);
let enc = EncodingBits::from(0x00cd);
assert_eq!(enc.opcode_byte, 0xcd);
assert_eq!(enc.opcode_byte(), 0xcd);
}
/// Tests that the OpcodePrefix is encoded correctly.
#[test]
fn encodingbits_prefix() {
let enc = EncodingBits::from(0x0c00);
assert_eq!(enc.opcode_byte, 0x00);
assert_eq!(enc.prefix.to_primitive(), 0xc);
assert_eq!(enc.prefix, OpcodePrefix::Op3_0f_3a);
assert_eq!(u8::from(enc.rrr), 0x0);
assert_eq!(u8::from(enc.rex_w), 0x0);
assert_eq!(enc.opcode_byte(), 0x00);
assert_eq!(enc.prefix().to_primitive(), 0xc);
assert_eq!(enc.prefix(), OpcodePrefix::Op3_0f_3a);
assert_eq!(enc.rrr(), 0x0);
assert_eq!(enc.rex_w(), 0x0);
}
/// Tests that the PP bits are encoded correctly.
#[test]
fn encodingbits_pp() {
let enc = EncodingBits::from(0x0300);
assert_eq!(enc.opcode_byte(), 0x0);
assert_eq!(enc.pp(), 0x3);
assert_eq!(enc.mm(), 0x0);
assert_eq!(enc.rrr(), 0x0);
assert_eq!(enc.rex_w(), 0x0);
}
/// Tests that the MM bits are encoded correctly.
#[test]
fn encodingbits_mm() {
let enc = EncodingBits::from(0x0c00);
assert_eq!(enc.opcode_byte(), 0x0);
assert_eq!(enc.pp(), 0x00);
assert_eq!(enc.mm(), 0x3);
assert_eq!(enc.rrr(), 0x0);
assert_eq!(enc.rex_w(), 0x0);
}
/// Tests that the ModR/M bits are encoded correctly.
#[test]
fn encodingbits_rrr() {
let enc = EncodingBits::from(0x5000);
assert_eq!(enc.opcode_byte(), 0x0);
assert_eq!(enc.prefix().to_primitive(), 0x0);
assert_eq!(enc.rrr(), 0x5);
assert_eq!(enc.rex_w(), 0x0);
}
/// Tests that the REX.W bit is encoded correctly.
#[test]
fn encodingbits_rex_w() {
let enc = EncodingBits::from(0x8000);
assert_eq!(enc.opcode_byte, 0x00);
assert_eq!(enc.prefix.to_primitive(), 0x0);
assert_eq!(u8::from(enc.rrr), 0x0);
assert_eq!(u8::from(enc.rex_w), 0x1);
assert_eq!(enc.opcode_byte(), 0x00);
assert_eq!(enc.prefix().to_primitive(), 0x0);
assert_eq!(enc.rrr(), 0x0);
assert_eq!(enc.rex_w(), 0x1);
}
/// Tests setting and unsetting a bit using EncodingBits::write.
#[test]
fn encodingbits_flip() {
let mut bits = EncodingBits::from(0);
let range = 2..=2;
bits.write(range.clone(), 1);
assert_eq!(bits.bits(), 0b100);
bits.write(range, 0);
assert_eq!(bits.bits(), 0b000);
}
/// Tests a round-trip of EncodingBits from/to a u16 (hardcoded endianness).
@ -233,4 +378,26 @@ mod tests {
let bits: u16 = 0x1234;
assert_eq!(EncodingBits::from(bits).bits(), bits);
}
#[test]
// I purposely want to divide the bits using the ranges defined above.
#[allow(clippy::inconsistent_digit_grouping)]
fn encodingbits_construction() {
assert_eq!(
EncodingBits::new(&[0x66, 0x40], 5, 1).bits(),
0b1_101_0001_01000000 // 1 = rex_w, 101 = rrr, 0001 = prefix, 01000000 = opcode
);
}
#[test]
#[should_panic]
fn encodingbits_panics_at_write_to_invalid_range() {
EncodingBits::from(0).write(1..=0, 42);
}
#[test]
#[should_panic]
fn encodingbits_panics_at_read_to_invalid_range() {
EncodingBits::from(0).read(1..=0);
}
}

Просмотреть файл

@ -20,10 +20,6 @@
)
)]
use packed_struct;
#[macro_use]
extern crate packed_struct_codegen;
pub mod condcodes;
pub mod constant_hash;
pub mod constants;

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

Просмотреть файл

@ -67,7 +67,7 @@ fn rex3(rm: RegUnit, reg: RegUnit, index: RegUnit) -> u8 {
// extracted from `bits`.
fn rex_prefix<CS: CodeSink + ?Sized>(bits: u16, rex: u8, sink: &mut CS) {
debug_assert_eq!(rex & 0xf8, BASE_REX);
let w = EncodingBits::from(bits).rex_w;
let w = EncodingBits::from(bits).rex_w();
sink.put1(rex | (u8::from(w) << 3));
}

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"09183eb6dfd8a6d33a033b46c32f8f2eca2f052af135a7b0599ecc8d9409f38c","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"f46f9c5df1b10bad0e87d9c2ad9f5e65bbb6749ac8843cd80ec357daa3b22c3e","src/code_translator.rs":"e1c17cf3ddbbcfb3b3edc2cf560e9dacf7795f6f22934549bcdc72d4be83d2fe","src/environ/dummy.rs":"b918e7c7afd3be41d7b2b8587a1cb0119640508f475fc086f949a96d01b783d6","src/environ/mod.rs":"b6f33f619090ff497b4e22150d77a290f259716374ac2e377b73c47cd1dafe85","src/environ/spec.rs":"770b494bae5c50d6cc9e5e825e0455682f0d7e83373af0f231267ea394cc5aba","src/func_translator.rs":"0c18f09fa533b67f41ff1da9893eac8feabe9e7847620a5aa2c4ad0f2185afbb","src/lib.rs":"0dbbb3d5088799c3aaa94b083ca0c2f09906bd8fb36e9c0dd200b8122c50a8b6","src/module_translator.rs":"5e1bf9471d6f4f317bb2fb9b8697b5b08f7950520017c2869e69133e7f17a2b7","src/sections_translator.rs":"e9af588789b00c4e91e7a4bd87c7ea3c745dc1a5e00fc1f758ee8d9a46902a63","src/state/func_state.rs":"8394eb9b446fc286222b806c55689c19beb0a5b6c78c1d8dee7c19b0d5693661","src/state/mod.rs":"20014cb93615467b4d20321b52f67f66040417efcaa739a4804093bb559eed19","src/state/module_state.rs":"2f299b043deb806b48583fe54bbb46708f7d8a1454b7be0eb285568064e5a7f9","src/translation_utils.rs":"e3b56d34ab989e9e81a8b30b52fab11c886bfcb783b9c091d1a72ad35fdbe0d0","tests/wasm_testsuite.rs":"c1160bde8b0bd5d7d2eb5b6a066b60025dcebf758b1794fa32235c07e2fb2be2"},"package":null}
{"files":{"Cargo.toml":"09183eb6dfd8a6d33a033b46c32f8f2eca2f052af135a7b0599ecc8d9409f38c","LICENSE":"268872b9816f90fd8e85db5a28d33f8150ebb8dd016653fb39ef1f94f2686bc5","README.md":"f46f9c5df1b10bad0e87d9c2ad9f5e65bbb6749ac8843cd80ec357daa3b22c3e","src/code_translator.rs":"e60d0d35e54f2403101c4ed8af6202ae2d37cbaf63ebe21ccef6ef9ffa77ad72","src/environ/dummy.rs":"b918e7c7afd3be41d7b2b8587a1cb0119640508f475fc086f949a96d01b783d6","src/environ/mod.rs":"b6f33f619090ff497b4e22150d77a290f259716374ac2e377b73c47cd1dafe85","src/environ/spec.rs":"770b494bae5c50d6cc9e5e825e0455682f0d7e83373af0f231267ea394cc5aba","src/func_translator.rs":"0c18f09fa533b67f41ff1da9893eac8feabe9e7847620a5aa2c4ad0f2185afbb","src/lib.rs":"0dbbb3d5088799c3aaa94b083ca0c2f09906bd8fb36e9c0dd200b8122c50a8b6","src/module_translator.rs":"5e1bf9471d6f4f317bb2fb9b8697b5b08f7950520017c2869e69133e7f17a2b7","src/sections_translator.rs":"e9af588789b00c4e91e7a4bd87c7ea3c745dc1a5e00fc1f758ee8d9a46902a63","src/state/func_state.rs":"8394eb9b446fc286222b806c55689c19beb0a5b6c78c1d8dee7c19b0d5693661","src/state/mod.rs":"20014cb93615467b4d20321b52f67f66040417efcaa739a4804093bb559eed19","src/state/module_state.rs":"2f299b043deb806b48583fe54bbb46708f7d8a1454b7be0eb285568064e5a7f9","src/translation_utils.rs":"e3b56d34ab989e9e81a8b30b52fab11c886bfcb783b9c091d1a72ad35fdbe0d0","tests/wasm_testsuite.rs":"c1160bde8b0bd5d7d2eb5b6a066b60025dcebf758b1794fa32235c07e2fb2be2"},"package":null}

Просмотреть файл

@ -1093,15 +1093,16 @@ pub fn translate_operator<FE: FuncEnvironment + ?Sized>(
// the v128.const is typed in CLIF as a I8x16 but raw_bitcast to a different type before use
state.push1(value)
}
Operator::I8x16Splat
| Operator::I16x8Splat
| Operator::I32x4Splat
Operator::I8x16Splat | Operator::I16x8Splat => {
let reduced = builder.ins().ireduce(type_of(op).lane_type(), state.pop1());
let splatted = builder.ins().splat(type_of(op), reduced);
state.push1(splatted)
}
Operator::I32x4Splat
| Operator::I64x2Splat
| Operator::F32x4Splat
| Operator::F64x2Splat => {
let value_to_splat = state.pop1();
let ty = type_of(op);
let splatted = builder.ins().splat(ty, value_to_splat);
let splatted = builder.ins().splat(type_of(op), state.pop1());
state.push1(splatted)
}
Operator::I8x16ExtractLaneS { lane } | Operator::I16x8ExtractLaneS { lane } => {
@ -1226,14 +1227,16 @@ pub fn translate_operator<FE: FuncEnvironment + ?Sized>(
| Operator::I16x8AnyTrue
| Operator::I32x4AnyTrue
| Operator::I64x2AnyTrue => {
let bool_result = builder.ins().vany_true(state.pop1());
let a = pop1_with_bitcast(state, type_of(op), builder);
let bool_result = builder.ins().vany_true(a);
state.push1(builder.ins().bint(I32, bool_result))
}
Operator::I8x16AllTrue
| Operator::I16x8AllTrue
| Operator::I32x4AllTrue
| Operator::I64x2AllTrue => {
let bool_result = builder.ins().vall_true(state.pop1());
let a = pop1_with_bitcast(state, type_of(op), builder);
let bool_result = builder.ins().vall_true(a);
state.push1(builder.ins().bint(I32, bool_result))
}
Operator::I8x16Eq | Operator::I16x8Eq | Operator::I32x4Eq => {

Просмотреть файл

@ -1 +0,0 @@
{"files":{"Cargo.toml":"b9985d5ae03d3446c08cbc52832854a3c85ecfc09c0a2a15be103e5c8bfc25a0","build.rs":"b44f2ed845dbf7b0b4e5e054da5309323aedbd5ac3998ab9dcc472c4b2b12efe","src/debug_fmt.rs":"abb789967fe9f6145a8c311d2eaeea78b179803e56b9cf14caf1fecb6cfda2c5","src/internal_prelude/mod.rs":"05867860b1da0d7f8a4b6b7b63807ce8ec9f9d8384292595420a7a222dba18e7","src/internal_prelude/no_std.rs":"f3d96c950aee6e1b3c6e978fee14c4535de973208a05ea4f6f60cb17207897c5","src/internal_prelude/std.rs":"f3507ba4765aa6fbeb39be10f653d37dce7fe20ab9b059b092ede764a71ca0f1","src/lib.rs":"3f897eaf453be7d12245b8d26660789216dac97f989b73ea4e37985ebe82f977","src/packing.rs":"af391b45fa7b42ab7b7bb63784805aed7c7509e4482f7a782dcfee40d2707367","src/primitive_enum.rs":"75f45915bea9a3394fe81c9bce2369353ff88264fa920fd711e7235ed3d0cf21","src/types_array.rs":"bc72e7c9b90ea71906b6792b284fde90a8c169c158f90fdd2cd9b46603033650","src/types_basic.rs":"8895bcf3921af4fbddf972ca3ca2798166769447ffa5b6a9a7a4f024bc0b4350","src/types_bits.rs":"d66ea815180ebeb92b5caa1a78b04d3e1218bc3dfa244fcefc624b6ff2b8206c","src/types_num.rs":"dbf9b7ab08b7b69a6a82357e4f12ddea37559ac20179cdd6e634e47db447e06b","src/types_reserved.rs":"8b18f8e195819cd492e14f41fd58a4de0a5419f152add85fa95f2f273d3852fc"},"package":"90caf80e74380d94f2aabc83edb900b49123b3132442fb147f9155c87a756281"}

35
third_party/rust/packed_struct/Cargo.toml поставляемый
Просмотреть файл

@ -1,35 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "packed_struct"
version = "0.3.0"
authors = ["Rudi Benkovic <rudi.benkovic@gmail.com>"]
build = "build.rs"
description = "Binary-level structure packing and unpacking generator"
homepage = "http://www.hashmismatch.net/libraries/packed-struct/"
readme = "../README.md"
keywords = ["enum", "packing", "serialization"]
categories = ["encoding"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/hashmismatch/packed_struct.rs"
[dependencies.serde]
version = "1.0"
default-features = false
[dependencies.serde_derive]
version = "1.0"
[features]
alloc = []
default = ["std"]
std = ["serde/std"]

31
third_party/rust/packed_struct/build.rs поставляемый
Просмотреть файл

@ -1,31 +0,0 @@
// build.rs
use std::env;
use std::fs::File;
use std::io::Write;
use std::path::Path;
fn main() {
let out_dir = env::var("OUT_DIR").unwrap();
let dest_path = Path::new(&out_dir).join("generate_bytes_and_bits.rs");
let mut f = File::create(&dest_path).unwrap();
let up_to_bytes = 32;
// bytes
for i in 1..(up_to_bytes + 1) {
let b = format!("bytes_type!(Bytes{}, {});\r\n", i, i);
f.write_all(b.as_bytes()).unwrap();
}
// bits
for i in 1..(up_to_bytes * 8) {
let b = format!("bits_type!(Bits{}, {}, Bytes{}, {});\r\n", i, i, (i as f32 / 8.0).ceil() as usize, if (i % 8) == 0 {
"BitsFullBytes"
} else {
"BitsPartialBytes"
});
f.write_all(b.as_bytes()).unwrap();
}
}

Просмотреть файл

@ -1,177 +0,0 @@
//! Helper structures for runtime packing visualization.
use internal_prelude::v1::*;
#[cfg(any(feature="alloc", feature="std"))]
pub trait PackedStructDebug {
fn fmt_fields(&self, fmt: &mut Formatter) -> Result<(), FmtError>;
fn packed_struct_display_header() -> &'static str;
}
pub struct DebugBinaryByteSlice<'a> {
pub bits: &'a Range<usize>,
pub slice: &'a [u8]
}
impl<'a> fmt::Binary for DebugBinaryByteSlice<'a> {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
for i in self.bits.start..(self.bits.end + 1) {
let byte = i / 8;
let bit = i % 8;
let bit = 7 - bit;
let src_byte = self.slice[byte];
let src_bit = (src_byte & (1 << bit)) == (1 << bit);
let s = if src_bit { "1" } else { "0" };
try!(fmt.write_str(s));
}
Ok(())
}
}
pub struct DebugBitField<'a> {
pub name: Cow<'a, str>,
pub bits: Range<usize>,
pub display_value: Cow<'a, str>
}
pub fn packable_fmt_fields(f: &mut Formatter, packed_bytes: &[u8], fields: &[DebugBitField]) -> fmt::Result {
if fields.len() == 0 {
return Ok(());
}
let max_field_length_name = fields.iter().map(|x| x.name.len()).max().unwrap();
let max_bit_width = fields.iter().map(|x| x.bits.len()).max().unwrap();
if max_bit_width > 32 {
for field in fields {
try!(write!(f, "{name:>0$} | {base_value:?}\r\n",
max_field_length_name + 1,
base_value = field.display_value,
name = field.name
));
}
} else {
for field in fields {
let debug_binary = DebugBinaryByteSlice {
bits: &field.bits,
slice: packed_bytes
};
try!(write!(f, "{name:>0$} | bits {bits_start:>3}:{bits_end:<3} | 0b{binary_value:>0width_bits$b}{dummy:>0spaces$} | {base_value:?}\r\n",
max_field_length_name + 1,
base_value = field.display_value,
binary_value = debug_binary,
dummy = "",
bits_start = field.bits.start,
bits_end = field.bits.end,
width_bits = field.bits.len(),
spaces = (max_bit_width - field.bits.len()) as usize,
name = field.name
));
}
}
Ok(())
}
pub struct PackedStructDisplay<'a, P: 'a, B: 'a> {
pub packed_struct: &'a P,
pub packed_struct_packed: PhantomData<B>,
pub header: bool,
pub raw_decimal: bool,
pub raw_hex: bool,
pub raw_binary: bool,
pub fields: bool
}
impl<'a, P, B> PackedStructDisplay<'a, P, B> {
pub fn new(packed_struct: &'a P) -> Self {
PackedStructDisplay {
packed_struct: packed_struct,
packed_struct_packed: Default::default(),
header: true,
raw_decimal: true,
raw_hex: true,
raw_binary: true,
fields: true
}
}
}
use packing::{PackedStruct, PackedStructSlice};
impl<'a, P, B> fmt::Display for PackedStructDisplay<'a, P, B> where P: PackedStruct<B> + PackedStructSlice + PackedStructDebug {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self.packed_struct.pack_to_vec() {
Ok(packed) => {
let l = packed.len();
if self.header {
try!(f.write_str(P::packed_struct_display_header()));
try!(f.write_str("\r\n"));
try!(f.write_str("\r\n"));
}
// decimal
if self.raw_decimal {
try!(f.write_str("Decimal\r\n"));
try!(f.write_str("["));
for i in 0..l {
try!(write!(f, "{}", packed[i]));
if (i + 1) != l {
try!(f.write_str(", "));
}
}
try!(f.write_str("]"));
try!(f.write_str("\r\n"));
try!(f.write_str("\r\n"));
}
// hex
if self.raw_hex {
try!(f.write_str("Hex\r\n"));
try!(f.write_str("["));
for i in 0..l {
try!(write!(f, "0x{:X}", packed[i]));
if (i + 1) != l {
try!(f.write_str(", "));
}
}
try!(f.write_str("]"));
try!(f.write_str("\r\n"));
try!(f.write_str("\r\n"));
}
if self.raw_binary {
try!(f.write_str("Binary\r\n"));
try!(f.write_str("["));
for i in 0..l {
try!(write!(f, "0b{:08b}", packed[i]));
if (i + 1) != l {
try!(f.write_str(", "));
}
}
try!(f.write_str("]"));
try!(f.write_str("\r\n"));
try!(f.write_str("\r\n"));
}
if self.fields {
try!(self.packed_struct.fmt_fields(f));
}
},
Err(e) => {
write!(f, "Error packing for display: {:?}", e)?;
}
}
Ok(())
}
}

Просмотреть файл

@ -1,7 +0,0 @@
#[cfg(not(feature="std"))]
#[path = "no_std.rs"]
pub mod v1;
#[cfg(feature="std")]
#[path = "std.rs"]
pub mod v1;

Просмотреть файл

@ -1,20 +0,0 @@
pub use core::marker::PhantomData;
pub use core::iter;
pub use core::cell::RefCell;
pub use core::fmt;
pub use core::fmt::{Debug, Display};
pub use core::fmt::Write as FmtWrite;
pub use core::fmt::Error as FmtError;
pub use core::ops::Range;
pub use core::num::Wrapping;
pub use core::cmp::*;
pub use core::mem;
pub use core::intrinsics::write_bytes;
pub use core::ops::Deref;
pub use core::slice;
#[cfg(feature="alloc")]
pub use alloc::vec::Vec;
#[cfg(feature="alloc")]
pub use alloc::borrow::Cow;

Просмотреть файл

@ -1,24 +0,0 @@
pub use std::prelude::v1::*;
pub use std::cell::RefCell;
pub use std::rc::Rc;
pub use std::num::Wrapping;
pub use std::fmt;
pub use std::fmt::format as format_to_string;
pub use std::fmt::Formatter;
pub use std::fmt::{Debug, Display};
pub use std::fmt::Write as FmtWrite;
pub use std::fmt::Error as FmtError;
pub use std::mem;
pub use std::marker::PhantomData;
pub use std::ops::Range;
pub use std::cmp::{min, max};
pub use std::ptr::write_bytes;
pub use std::iter;
pub use std::borrow::Cow;
pub use std::str::FromStr;
pub use std::io;
pub use std::io::Write;
pub use std::sync::Arc;
pub use std::str::from_utf8;
pub use std::ops::Deref;
pub use std::slice;

347
third_party/rust/packed_struct/src/lib.rs поставляемый
Просмотреть файл

@ -1,347 +0,0 @@
//! Bit-level packing and unpacking for Rust
//! ===========================================
//!
//! [![Build Status](https://travis-ci.org/hashmismatch/packed_struct.rs.svg?branch=master)](https://travis-ci.org/hashmismatch/packed_struct.rs)
//!
//! [![Documentation](https://docs.rs/packed_struct/badge.svg)](https://docs.rs/packed_struct)
//!
//! # Introduction
//!
//! Packing and unpacking bit-level structures is usually a programming tasks that needlessly reinvents the wheel. This library provides
//! a meta-programming approach, using attributes to define fields and how they should be packed. The resulting trait implementations
//! provide safe packing, unpacking and runtime debugging formatters with per-field documentation generated for each structure.
//!
//! # Features
//!
//! * Plain Rust structures, decorated with attributes
//! * MSB or LSB integers of user-defined bit widths
//! * Primitive enum code generation helper
//! * MSB0 or LSB0 bit positioning
//! * Documents the field's packing table
//! * Runtime packing visualization
//! * Nested packed types
//! * Arrays of packed structures as fields
//! * Reserved fields, their bits are always 0 or 1
//!
//! # Sample usage
//!
//! ## Cargo.toml
//!
//! ```toml
//! [dependencies]
//! packed_struct = "0.3"
//! packed_struct_codegen = "0.3"
//! ```
//! ## Including the library and the code generator
//!
//! ```rust
//! extern crate packed_struct;
//! #[macro_use]
//! extern crate packed_struct_codegen;
//! # fn main() {
//! # }
//! ```
//!
//! ## Example of a single-byte structure, with a 3 bit integer, primitive enum and a bool field.
//!
//! ```rust
//! extern crate packed_struct;
//! #[macro_use] extern crate packed_struct_codegen;
//!
//! use packed_struct::prelude::*;
//!
//! #[derive(PackedStruct)]
//! #[packed_struct(bit_numbering="msb0")]
//! pub struct TestPack {
//! #[packed_field(bits="0..=2")]
//! tiny_int: Integer<u8, packed_bits::Bits3>,
//! #[packed_field(bits="3..=4", ty="enum")]
//! mode: SelfTestMode,
//! #[packed_field(bits="7")]
//! enabled: bool
//! }
//!
//! #[derive(PrimitiveEnum_u8, Clone, Copy, Debug, PartialEq)]
//! pub enum SelfTestMode {
//! NormalMode = 0,
//! PositiveSignSelfTest = 1,
//! NegativeSignSelfTest = 2,
//! DebugMode = 3,
//! }
//!
//! fn main() {
//! let test = TestPack {
//! tiny_int: 5.into(),
//! mode: SelfTestMode::DebugMode,
//! enabled: true
//! };
//!
//! let packed = test.pack();
//! assert_eq!([0b10111001], packed);
//!
//! let unpacked = TestPack::unpack(&packed).unwrap();
//! assert_eq!(*unpacked.tiny_int, 5);
//! assert_eq!(unpacked.mode, SelfTestMode::DebugMode);
//! assert_eq!(unpacked.enabled, true);
//! }
//! ```
//!
//! # Packing attributes
//!
//! ## Syntax
//!
//! ```rust
//! extern crate packed_struct;
//! #[macro_use] extern crate packed_struct_codegen;
//!
//! #[derive(PackedStruct)]
//! #[packed_struct(attr1="val", attr2="val")]
//! pub struct Structure {
//! #[packed_field(attr1="val", attr2="val")]
//! field: u8
//! }
//! # fn main() {
//! # }
//! ```
//!
//! ## Per-structure attributes
//!
//! Attribute | Values | Comment
//! :--|:--|:--
//! ```size_bytes``` | ```1``` ... n | Size of the packed byte stream
//! ```bit_numbering``` | ```msb0``` or ```lsb0``` | Bit numbering for bit positioning of fields. Required if the bits attribute field is used.
//! ```endian``` | ```msb``` or ```lsb``` | Default integer endianness
//!
//! ## Per-field attributes
//!
//! Attribute | Values | Comment
//! :--|:--|:--
//! ```bits``` | ```0```, ```0..1```, ... | Position of the field in the packed structure. Three modes are supported: a single bit, the starting bit, or a range of bits. See details below.
//! ```bytes``` | ```0```, ```0..1```, ... | Same as above, multiplied by 8.
//! ```size_bits``` | ```1```, ... | Specifies the size of the packed structure. Mandatory for certain types. Specifying a range of bits like ```bits="0..2"``` can substite the required usage of ```size_bits```.
//! ```size_bytes``` | ```1```, ... | Same as above, multiplied by 8.
//! ```element_size_bits``` | ```1```, ... | For packed arrays, specifies the size of a single element of the array. Explicitly stating the size of the entire array can substite the usage of this attribute.
//! ```element_size_bytes``` | ```1```, ... | Same as above, multiplied by 8.
//! ```ty``` | ```enum``` | Packing helper for primitive enums.
//! ```endian``` | ```msb``` or ```lsb``` | Integer endianness. Applies to u16/i16 and larger types.
//!
//! ## Bit and byte positioning
//!
//! Used for either ```bits``` or ```bytes``` on fields. The examples are for MSB0 positioning.
//!
//! Value | Comment
//! :--|:--
//! ```0``` | A single bit or byte
//! ```0..```, ```0:``` | The field starts at bit zero
//! ```0..2``` | Exclusive range, bits zero and one
//! ```0:1```, ```0..=1``` | Inclusive range, bits zero and one
//!
//! # More examples
//!
//! ## Mixed endian integers
//!
//! ```rust
//! extern crate packed_struct;
//! #[macro_use] extern crate packed_struct_codegen;
//!
//! use packed_struct::prelude::*;
//!
//! #[derive(PackedStruct)]
//! pub struct EndianExample {
//! #[packed_field(endian="lsb")]
//! int1: u16,
//! #[packed_field(endian="msb")]
//! int2: i32
//! }
//!
//! fn main() {
//! let example = EndianExample {
//! int1: 0xBBAA,
//! int2: 0x11223344
//! };
//!
//! let packed = example.pack();
//! assert_eq!([0xAA, 0xBB, 0x11, 0x22, 0x33, 0x44], packed);
//! }
//! ```
//!
//! ## 24 bit LSB integers
//!
//! ```rust
//! extern crate packed_struct;
//! #[macro_use] extern crate packed_struct_codegen;
//!
//! use packed_struct::prelude::*;
//!
//! #[derive(PackedStruct)]
//! #[packed_struct(endian="lsb")]
//! pub struct LsbIntExample {
//! int1: Integer<u32, packed_bits::Bits24>,
//! }
//!
//! fn main() {
//! let example = LsbIntExample {
//! int1: 0xCCBBAA.into()
//! };
//!
//! let packed = example.pack();
//! assert_eq!([0xAA, 0xBB, 0xCC], packed);
//! }
//! ```
//!
//! ## Nested packed types within arrays
//!
//! ```rust
//! extern crate packed_struct;
//! #[macro_use] extern crate packed_struct_codegen;
//!
//! use packed_struct::prelude::*;
//!
//! #[derive(PackedStruct, Default, Debug, PartialEq)]
//! #[packed_struct(bit_numbering="msb0")]
//! pub struct TinyFlags {
//! _reserved: ReservedZero<packed_bits::Bits4>,
//! flag1: bool,
//! val1: Integer<u8, packed_bits::Bits2>,
//! flag2: bool
//! }
//!
//! #[derive(PackedStruct, Debug, PartialEq)]
//! pub struct Settings {
//! #[packed_field(element_size_bits="4")]
//! values: [TinyFlags; 4]
//! }
//!
//! fn main() {
//! let example = Settings {
//! values: [
//! TinyFlags { flag1: true, val1: 1.into(), flag2: false, .. TinyFlags::default() },
//! TinyFlags { flag1: true, val1: 2.into(), flag2: true, .. TinyFlags::default() },
//! TinyFlags { flag1: false, val1: 3.into(), flag2: false, .. TinyFlags::default() },
//! TinyFlags { flag1: true, val1: 0.into(), flag2: false, .. TinyFlags::default() },
//! ]
//! };
//!
//! let packed = example.pack();
//! let unpacked = Settings::unpack(&packed).unwrap();
//!
//! assert_eq!(example, unpacked);
//! }
//! ```
//!
//! # Primitive enums with simple discriminants
//!
//! Supported backing integer types: ```u8```, ```u16```, ```u32```, ```u64```, ```i8```, ```i16```, ```i32```, ```i64```.
//!
//! Explicit or implicit backing type:
//!
//! ```rust
//! extern crate packed_struct;
//! #[macro_use] extern crate packed_struct_codegen;
//!
//! #[derive(PrimitiveEnum, Clone, Copy)]
//! pub enum ImplicitType {
//! VariantMin = 0,
//! VariantMax = 255
//! }
//!
//! #[derive(PrimitiveEnum_i16, Clone, Copy)]
//! pub enum ExplicitType {
//! VariantMin = -32768,
//! VariantMax = 32767
//! }
//!
//! # fn main() {}
//! ```
//!
//! # Primitive enum packing with support for catch-all unknown values
//!
//! ```rust
//! # use packed_struct::prelude::*;
//! extern crate packed_struct;
//! #[macro_use] extern crate packed_struct_codegen;
//!
//! #[derive(PrimitiveEnum_u8, Debug, Clone, Copy)]
//! pub enum Field {
//! A = 1,
//! B = 2,
//! C = 3
//! }
//!
//! #[derive(PackedStruct, Debug, PartialEq)]
//! #[packed_struct(bit_numbering="msb0")]
//! pub struct Register {
//! #[packed_field(bits="0..4", ty="enum")]
//! field: EnumCatchAll<Field>
//! }
//!
//! # fn main() {}
//! ```
#![cfg_attr(not(feature = "std"), no_std)]
#![cfg_attr(feature="alloc", feature(alloc))]
#[cfg(feature="alloc")]
#[macro_use]
extern crate alloc;
extern crate serde;
#[macro_use] extern crate serde_derive;
mod internal_prelude;
#[macro_use]
mod packing;
mod primitive_enum;
pub use primitive_enum::*;
#[cfg(any(feature="alloc", feature="std"))]
pub mod debug_fmt;
mod types_array;
mod types_basic;
mod types_bits;
mod types_num;
mod types_reserved;
/// Implementations and wrappers for various packing types.
pub mod types {
pub use super::types_basic::*;
/// Types that specify the exact number of bits a packed integer should occupy.
pub mod bits {
pub use super::super::types_bits::*;
}
pub use super::types_num::*;
pub use super::types_array::*;
pub use super::types_reserved::*;
}
pub use self::packing::*;
pub mod prelude {
//! Re-exports the most useful traits and types. Meant to be glob imported.
pub use PackedStruct;
pub use PackedStructSlice;
pub use PackingError;
pub use PrimitiveEnum;
#[cfg(any(feature="alloc", feature="std"))]
pub use PrimitiveEnumDynamicStr;
#[cfg(not(any(feature="alloc", feature="std")))]
pub use PrimitiveEnumStaticStr;
pub use EnumCatchAll;
pub use types::*;
pub use types::bits as packed_bits;
}

105
third_party/rust/packed_struct/src/packing.rs поставляемый
Просмотреть файл

@ -1,105 +0,0 @@
use internal_prelude::v1::*;
/// A structure that can be packed and unpacked from a byte array.
///
/// In case the structure occupies less bits than there are in the byte array,
/// the packed that should be aligned to the end of the array, with leading bits
/// being ignored.
///
/// 10 bits packs into: [0b00000011, 0b11111111]
pub trait PackedStruct<B> where Self: Sized {
/// Packs the structure into a byte array.
fn pack(&self) -> B;
/// Unpacks the structure from a byte array.
fn unpack(src: &B) -> Result<Self, PackingError>;
}
/// Infos about a particular type that can be packaged.
pub trait PackedStructInfo {
/// Number of bits that this structure occupies when being packed.
fn packed_bits() -> usize;
}
/// A structure that can be packed and unpacked from a slice of bytes.
pub trait PackedStructSlice where Self: Sized {
/// Pack the structure into an output buffer.
fn pack_to_slice(&self, output: &mut [u8]) -> Result<(), PackingError>;
/// Unpack the structure from a buffer.
fn unpack_from_slice(src: &[u8]) -> Result<Self, PackingError>;
/// Number of bytes that this structure demands for packing or unpacking.
fn packed_bytes() -> usize;
#[cfg(any(feature="alloc", feature="std"))]
/// Pack the structure into a new byte vector.
fn pack_to_vec(&self) -> Result<Vec<u8>, PackingError> {
let mut buf = vec![0; Self::packed_bytes()];
self.pack_to_slice(&mut buf)?;
Ok(buf)
}
}
#[derive(Debug, Copy, Clone, PartialEq, Serialize)]
/// Packing errors that might occur during packing or unpacking
pub enum PackingError {
InvalidValue,
BitsError,
BufferTooSmall,
NotImplemented,
BufferSizeMismatch { expected: usize, actual: usize }
}
impl Display for PackingError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
#[cfg(feature="std")]
impl ::std::error::Error for PackingError {
fn description(&self) -> &str {
match *self {
PackingError::InvalidValue => "Invalid value",
PackingError::BitsError => "Bits error",
PackingError::BufferTooSmall => "Buffer too small",
PackingError::BufferSizeMismatch { .. } => "Buffer size mismatched",
PackingError::NotImplemented => "Not implemented"
}
}
}
macro_rules! packing_slice {
($T: path; $num_bytes: expr) => (
impl PackedStructSlice for $T {
#[inline]
fn pack_to_slice(&self, output: &mut [u8]) -> Result<(), PackingError> {
if output.len() != $num_bytes {
return Err(PackingError::BufferTooSmall);
}
let packed = self.pack();
&mut output[..].copy_from_slice(&packed[..]);
Ok(())
}
#[inline]
fn unpack_from_slice(src: &[u8]) -> Result<Self, PackingError> {
if src.len() != $num_bytes {
return Err(PackingError::BufferTooSmall);
}
let mut s = [0; $num_bytes];
&mut s[..].copy_from_slice(src);
Self::unpack(&s)
}
#[inline]
fn packed_bytes() -> usize {
$num_bytes
}
}
)
}

Просмотреть файл

@ -1,107 +0,0 @@
use internal_prelude::v1::*;
/// An enum type that can be packed or unpacked from a simple primitive integer.
pub trait PrimitiveEnum where Self: Sized + Copy {
/// The primitve type into which we serialize and deserialize ourselves.
type Primitive: PartialEq + Sized + Copy + Debug;
/// Convert from a primitive, might fail.
fn from_primitive(val: Self::Primitive) -> Option<Self>;
/// Convert to a primitive value.
fn to_primitive(&self) -> Self::Primitive;
/// Convert from a string value representing the variant. Case sensitive.
fn from_str(s: &str) -> Option<Self>;
/// Convert from a string value representing the variant. Lowercase.
fn from_str_lower(s: &str) -> Option<Self>;
}
/// Static display formatters.
pub trait PrimitiveEnumStaticStr where Self: Sized + Copy + PrimitiveEnum {
/// Display value, same as the name of a particular variant.
fn to_display_str(&self) -> &'static str;
/// A list all possible string variants.
fn all_variants() -> &'static [Self];
}
#[cfg(any(feature="alloc", feature="std"))]
/// Dynamic display formatters.
pub trait PrimitiveEnumDynamicStr where Self: Sized + Copy + PrimitiveEnum {
/// Display value, same as the name of a particular variant.
fn to_display_str(&self) -> Cow<'static, str>;
/// A list all possible string variants.
fn all_variants() -> Cow<'static, [Self]>;
}
/// A wrapper for primitive enums that supports catching and retaining any values
/// that don't have defined discriminants.
#[derive(Copy, Clone, Debug)]
pub enum EnumCatchAll<E> where E: PrimitiveEnum {
/// A matched discriminant
Enum(E),
/// Some other value, stored as the primitive type
CatchAll(E::Primitive)
}
impl<E> EnumCatchAll<E> where E: PrimitiveEnum {
pub fn from_enum(v: E) -> Self {
EnumCatchAll::Enum(v)
}
}
impl<E> From<E> for EnumCatchAll<E> where E: PrimitiveEnum {
fn from(v: E) -> Self {
EnumCatchAll::Enum(v)
}
}
impl<E> PartialEq<Self> for EnumCatchAll<E> where E: PrimitiveEnum {
fn eq(&self, other: &Self) -> bool {
self.to_primitive() == other.to_primitive()
}
}
impl<E> PrimitiveEnum for EnumCatchAll<E>
where E: PrimitiveEnum
{
type Primitive = E::Primitive;
fn from_primitive(val: E::Primitive) -> Option<Self> {
match E::from_primitive(val) {
Some(p) => Some(EnumCatchAll::Enum(p)),
None => Some(EnumCatchAll::CatchAll(val))
}
}
fn to_primitive(&self) -> E::Primitive {
match *self {
EnumCatchAll::Enum(p) => p.to_primitive(),
EnumCatchAll::CatchAll(v) => v
}
}
fn from_str(s: &str) -> Option<Self> {
E::from_str(s).map(|e| EnumCatchAll::Enum(e))
}
fn from_str_lower(s: &str) -> Option<Self> {
E::from_str_lower(s).map(|e| EnumCatchAll::Enum(e))
}
}
#[cfg(any(feature="alloc", feature="std"))]
impl<E> PrimitiveEnumDynamicStr for EnumCatchAll<E>
where E: PrimitiveEnum + PrimitiveEnumDynamicStr
{
/// Display value, same as the name of a particular variant.
fn to_display_str(&self) -> Cow<'static, str> {
match *self {
EnumCatchAll::Enum(p) => p.to_display_str(),
EnumCatchAll::CatchAll(v) => format!("Unknown value: {:?}", v).into()
}
}
fn all_variants() -> Cow<'static, [Self]> {
let l: Vec<_> = E::all_variants().iter().map(|v| EnumCatchAll::Enum(*v)).collect();
Cow::from(l)
}
}

Просмотреть файл

@ -1,58 +0,0 @@
use super::packing::*;
macro_rules! packable_u8_array {
($N: expr) => (
impl PackedStruct<[u8; $N]> for [u8; $N] {
#[inline]
fn pack(&self) -> [u8; $N] {
*self
}
#[inline]
fn unpack(src: &[u8; $N]) -> Result<[u8; $N], PackingError> {
Ok(*src)
}
}
impl PackedStructInfo for [u8; $N] {
#[inline]
fn packed_bits() -> usize {
$N * 8
}
}
)
}
packable_u8_array!(1);
packable_u8_array!(2);
packable_u8_array!(3);
packable_u8_array!(4);
packable_u8_array!(5);
packable_u8_array!(6);
packable_u8_array!(7);
packable_u8_array!(8);
packable_u8_array!(9);
packable_u8_array!(10);
packable_u8_array!(11);
packable_u8_array!(12);
packable_u8_array!(13);
packable_u8_array!(14);
packable_u8_array!(15);
packable_u8_array!(16);
packable_u8_array!(17);
packable_u8_array!(18);
packable_u8_array!(19);
packable_u8_array!(20);
packable_u8_array!(21);
packable_u8_array!(22);
packable_u8_array!(23);
packable_u8_array!(24);
packable_u8_array!(25);
packable_u8_array!(26);
packable_u8_array!(27);
packable_u8_array!(28);
packable_u8_array!(29);
packable_u8_array!(30);
packable_u8_array!(31);
packable_u8_array!(32);

Просмотреть файл

@ -1,95 +0,0 @@
use super::packing::*;
impl PackedStruct<[u8; 1]> for bool {
#[inline]
fn pack(&self) -> [u8; 1] {
if *self { [1] } else { [0] }
}
#[inline]
fn unpack(src: &[u8; 1]) -> Result<bool, PackingError> {
match src[0] {
1 => Ok(true),
0 => Ok(false),
_ => Err(PackingError::InvalidValue)
}
}
}
impl PackedStructInfo for bool {
#[inline]
fn packed_bits() -> usize {
1
}
}
packing_slice!(bool; 1);
impl PackedStruct<[u8; 1]> for u8 {
#[inline]
fn pack(&self) -> [u8; 1] {
[*self]
}
#[inline]
fn unpack(src: &[u8; 1]) -> Result<u8, PackingError> {
Ok(src[0])
}
}
impl PackedStructInfo for u8 {
#[inline]
fn packed_bits() -> usize {
8
}
}
packing_slice!(u8; 1);
impl PackedStruct<[u8; 1]> for i8 {
#[inline]
fn pack(&self) -> [u8; 1] {
[*self as u8]
}
#[inline]
fn unpack(src: &[u8; 1]) -> Result<i8, PackingError> {
Ok(src[0] as i8)
}
}
impl PackedStructInfo for i8 {
#[inline]
fn packed_bits() -> usize {
8
}
}
packing_slice!(i8; 1);
impl PackedStruct<[u8; 0]> for () {
#[inline]
fn pack(&self) -> [u8; 0] {
[]
}
#[inline]
fn unpack(_src: &[u8; 0]) -> Result<(), PackingError> {
Ok(())
}
}
impl PackedStructInfo for () {
#[inline]
fn packed_bits() -> usize {
0
}
}

Просмотреть файл

@ -1,123 +0,0 @@
//! Unit bit sizes, used as a type parameter to concrete types to signify their
//! intended size.
use internal_prelude::v1::*;
/// Number of bits that the generic type should occupy.
pub trait NumberOfBits: Copy + Clone + Debug + Default {
/// Minimal number of bytes that this bit width requires.
type Bytes: NumberOfBytes;
/// The numerical number of bits.
fn number_of_bits() -> u8;
}
/// These bits are a multiple of 8
pub trait BitsFullBytes {}
/// These bits are not a multiple of 8
pub trait BitsPartialBytes {}
/// Number of bytes that the generic type should occupy.
pub trait NumberOfBytes: Copy + Clone + Debug + Default {
/// The byte array type that holds these bytes, for instance [u8; 2].
type AsBytes: ByteArray;
/// The numberical number of bytes.
fn number_of_bytes() -> u8;
}
/// Helper that allows us to cast a fixed size array into a byte slice.
pub trait ByteArray: Default + Debug {
fn len(&self) -> usize;
fn as_bytes_slice(&self) -> &[u8];
fn as_mut_bytes_slice(&mut self) -> &mut [u8];
fn rotate_right(&mut self, bytes: usize);
fn new(value: u8) -> Self;
}
macro_rules! bytes_type {
($T: ident, $N: expr) => {
#[derive(Copy, Clone, Debug, Default, PartialEq)]
pub struct $T;
impl NumberOfBytes for $T {
type AsBytes = [u8; $N];
#[inline]
fn number_of_bytes() -> u8 {
$N
}
}
impl ByteArray for [u8; $N] {
#[inline]
fn len(&self) -> usize {
$N
}
#[inline]
fn as_bytes_slice(&self) -> &[u8] {
&self[..]
}
#[inline]
fn as_mut_bytes_slice(&mut self) -> &mut [u8] {
&mut self[..]
}
#[inline]
fn rotate_right(&mut self, bytes: usize) {
bytes_rotate_right(self, bytes)
}
fn new(value: u8) -> Self {
[value; $N]
}
}
}
}
macro_rules! bits_type {
($T: ident, $N: expr, $TB: ident, $TBK: ident) => {
#[derive(Copy, Clone, Debug, Default, PartialEq)]
pub struct $T;
impl NumberOfBits for $T {
type Bytes = $TB;
#[inline]
fn number_of_bits() -> u8 {
$N
}
}
impl $TBK for $T { }
};
}
include!(concat!(env!("OUT_DIR"), "/generate_bytes_and_bits.rs"));
#[inline]
fn bytes_rotate_right(s: &mut [u8], bytes: usize) {
{
let mut i = s.len() - bytes - 1;
loop {
s[i+bytes] = s[i];
if i == 0 { break;}
i -= 1;
}
}
for i in 0..bytes {
s[i] = 0;
}
}
#[test]
fn test_byte_rotation() {
let mut a = [0xCC, 0xBB, 0xAA, 0x00];
bytes_rotate_right(&mut a, 1);
assert_eq!([0x00, 0xCC, 0xBB, 0xAA], a);
}

Просмотреть файл

@ -1,739 +0,0 @@
//! Integers that are limited by a bit width, with methods to store them
//! as a native type, packing and unpacking into byte arrays, with MSB/LSB
//! support.
use internal_prelude::v1::*;
use super::types_bits::*;
/// A bit-limited integer, stored in a native type that is at least
/// as many bits wide as the desired size.
#[derive(Default, Copy, Clone)]
pub struct Integer<T, B> {
num: T,
bits: PhantomData<B>
}
impl<T, B> Debug for Integer<T, B> where T: Debug {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self.num)
}
}
impl<T, B> Display for Integer<T, B> where T: Display {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.num)
}
}
use serde::ser::{Serialize, Serializer};
impl<T, B> Serialize for Integer<T, B> where T: Serialize {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer
{
self.num.serialize(serializer)
}
}
use serde::de::{Deserialize, Deserializer};
impl<'de, T, B> Deserialize<'de> for Integer<T, B> where T: Deserialize<'de>, T: Into<Integer<T, B>> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de>
{
<T>::deserialize(deserializer).map(|n| n.into())
}
}
impl<T, B> PartialEq for Integer<T, B> where T: PartialEq {
fn eq(&self, other: &Self) -> bool {
self.num.eq(&other.num)
}
}
impl<T, B> Integer<T, B> where Self: Copy {
/// Convert into a MSB packing helper
pub fn as_packed_msb(&self) -> MsbInteger<T, B, Self> {
MsbInteger(*self, Default::default(), Default::default())
}
/// Convert into a LSB packing helper
pub fn as_packed_lsb(&self) -> LsbInteger<T, B, Self> {
LsbInteger(*self, Default::default(), Default::default())
}
}
/// Convert an integer of a specific bit width into native types.
pub trait SizedInteger<T, B: NumberOfBits> {
/// The bit mask that is used for all incoming values. For an integer
/// of width 8, that is 0xFF.
fn value_bit_mask() -> T;
/// Convert from the platform native type, applying the value mask.
fn from_primitive(val: T) -> Self;
/// Convert to the platform's native type.
fn to_primitive(&self) -> T;
/// Convert to a MSB byte representation. 0xAABB is converted into [0xAA, 0xBB].
fn to_msb_bytes(&self) -> <<B as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes;
/// Convert to a LSB byte representation. 0xAABB is converted into [0xBB, 0xAA].
fn to_lsb_bytes(&self) -> <<B as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes where B: BitsFullBytes;
/// Convert from a MSB byte array.
fn from_msb_bytes(bytes: &<<B as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes) -> Self;
/// Convert from a LSB byte array.
fn from_lsb_bytes(bytes: &<<B as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes) -> Self where B: BitsFullBytes;
}
/// Convert a native platform integer type into a byte array.
pub trait IntegerAsBytes where Self: Sized {
/// The byte array type, for instance [u8; 2].
type AsBytes;
/// Convert into a MSB byte array.
fn to_msb_bytes(&self) -> Self::AsBytes;
/// Convert into a LSB byte array.
fn to_lsb_bytes(&self) -> Self::AsBytes;
/// Convert from a MSB byte array.
fn from_msb_bytes(bytes: &Self::AsBytes) -> Self;
/// Convert from a LSB byte array.
fn from_lsb_bytes(bytes: &Self::AsBytes) -> Self;
}
macro_rules! as_bytes {
(1, $v: expr) => {
[
(($v >> 0) as u8 & 0xFF)
]
};
(2, $v: expr) => {
[
(($v >> 8) as u8 & 0xFF),
(($v >> 0) as u8 & 0xFF),
]
};
(4, $v: expr) => {
[
(($v >> 24) as u8 & 0xFF),
(($v >> 16) as u8 & 0xFF),
(($v >> 8) as u8 & 0xFF),
(($v >> 0) as u8 & 0xFF)
]
};
(8, $v: expr) => {
[
(($v >> 56) as u8 & 0xFF),
(($v >> 48) as u8 & 0xFF),
(($v >> 40) as u8 & 0xFF),
(($v >> 32) as u8 & 0xFF),
(($v >> 24) as u8 & 0xFF),
(($v >> 16) as u8 & 0xFF),
(($v >> 8) as u8 & 0xFF),
(($v >> 0) as u8 & 0xFF)
]
}
}
macro_rules! from_bytes {
(1, $v: expr, $T: ident) => {
$v[0] as $T
};
(2, $v: expr, $T: ident) => {
(($v[0] as $T) << 8) |
(($v[1] as $T) << 0)
};
(4, $v: expr, $T: ident) => {
(($v[0] as $T) << 24) |
(($v[1] as $T) << 16) |
(($v[2] as $T) << 8) |
(($v[3] as $T) << 0)
};
(8, $v: expr, $T: ident) => {
(($v[0] as $T) << 56) |
(($v[1] as $T) << 48) |
(($v[2] as $T) << 40) |
(($v[3] as $T) << 32) |
(($v[4] as $T) << 24) |
(($v[5] as $T) << 16) |
(($v[6] as $T) << 8) |
(($v[7] as $T) << 0)
};
}
macro_rules! integer_as_bytes {
($T: ident, $N: tt) => {
impl IntegerAsBytes for $T {
type AsBytes = [u8; $N];
#[inline]
fn to_msb_bytes(&self) -> [u8; $N] {
let n = self.to_le();
as_bytes!($N, n)
}
#[inline]
fn to_lsb_bytes(&self) -> [u8; $N] {
let n = self.to_be();
as_bytes!($N, n)
}
#[inline]
fn from_msb_bytes(bytes: &[u8; $N]) -> Self {
from_bytes!($N, bytes, $T)
}
#[inline]
fn from_lsb_bytes(bytes: &[u8; $N]) -> Self {
let n = from_bytes!($N, bytes, $T);
n.to_be()
}
}
};
}
integer_as_bytes!(u8, 1);
integer_as_bytes!(i8, 1);
integer_as_bytes!(u16, 2);
integer_as_bytes!(i16, 2);
integer_as_bytes!(u32, 4);
integer_as_bytes!(i32, 4);
integer_as_bytes!(u64, 8);
integer_as_bytes!(i64, 8);
macro_rules! integer_bytes_impl {
($T: ident, $TB: ident) => {
impl SizedInteger<$T, $TB> for Integer<$T, $TB> {
#[inline]
fn value_bit_mask() -> $T {
ones($TB::number_of_bits() as u64) as $T
}
#[inline]
fn from_primitive(val: $T) -> Self {
let v = val & Self::value_bit_mask();
Integer { num: v, bits: Default::default() }
}
#[inline]
fn to_primitive(&self) -> $T {
self.num
}
#[inline]
fn to_msb_bytes(&self) -> <<$TB as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes
{
let mut ret: <<$TB as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes = Default::default();
let b = self.num.to_msb_bytes();
let skip = b.len() - ret.len();
ret.copy_from_slice(&b[skip..]);
ret
}
#[inline]
fn to_lsb_bytes(&self) -> <<$TB as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes
{
let mut ret: <<$TB as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes = Default::default();
let b = self.num.to_lsb_bytes();
let take = ret.len();
ret.copy_from_slice(&b[0..take]);
ret
}
#[inline]
fn from_msb_bytes(bytes: &<<$TB as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes) -> Self
{
let mut native_bytes = Default::default();
{
// hack that infers the size of the native array...
<$T>::from_msb_bytes(&native_bytes);
}
let skip = native_bytes.len() - bytes.len();
{
let native_bytes = &mut native_bytes[skip..];
native_bytes.copy_from_slice(&bytes[..]);
}
let v = <$T>::from_msb_bytes(&native_bytes);
Self::from_primitive(v)
}
#[inline]
fn from_lsb_bytes(bytes: &<<$TB as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes) -> Self
{
let mut native_bytes = Default::default();
{
// hack that infers the size of the native array...
<$T>::from_lsb_bytes(&native_bytes);
}
{
let take = bytes.len();
let native_bytes = &mut native_bytes[..take];
native_bytes.copy_from_slice(&bytes[..]);
}
let v = <$T>::from_lsb_bytes(&native_bytes);
Self::from_primitive(v)
}
}
impl From<$T> for Integer<$T, $TB> {
fn from(v: $T) -> Self {
Self::from_primitive(v)
}
}
impl From<Integer<$T, $TB>> for $T {
fn from(v: Integer<$T, $TB>) -> Self {
v.to_primitive()
}
}
impl Deref for Integer<$T, $TB> {
type Target = $T;
fn deref(&self) -> &$T {
&self.num
}
}
};
}
macro_rules! bytes1_impl {
($T: ident) => {
integer_bytes_impl!($T, Bits1);
integer_bytes_impl!($T, Bits2);
integer_bytes_impl!($T, Bits3);
integer_bytes_impl!($T, Bits4);
integer_bytes_impl!($T, Bits5);
integer_bytes_impl!($T, Bits6);
integer_bytes_impl!($T, Bits7);
integer_bytes_impl!($T, Bits8);
};
}
macro_rules! bytes2_impl {
($T: ident) => {
integer_bytes_impl!($T, Bits9);
integer_bytes_impl!($T, Bits10);
integer_bytes_impl!($T, Bits11);
integer_bytes_impl!($T, Bits12);
integer_bytes_impl!($T, Bits13);
integer_bytes_impl!($T, Bits14);
integer_bytes_impl!($T, Bits15);
integer_bytes_impl!($T, Bits16);
};
}
macro_rules! bytes3_impl {
($T: ident) => {
integer_bytes_impl!($T, Bits17);
integer_bytes_impl!($T, Bits18);
integer_bytes_impl!($T, Bits19);
integer_bytes_impl!($T, Bits20);
integer_bytes_impl!($T, Bits21);
integer_bytes_impl!($T, Bits22);
integer_bytes_impl!($T, Bits23);
integer_bytes_impl!($T, Bits24);
};
}
macro_rules! bytes4_impl {
($T: ident) => {
integer_bytes_impl!($T, Bits25);
integer_bytes_impl!($T, Bits26);
integer_bytes_impl!($T, Bits27);
integer_bytes_impl!($T, Bits28);
integer_bytes_impl!($T, Bits29);
integer_bytes_impl!($T, Bits30);
integer_bytes_impl!($T, Bits31);
integer_bytes_impl!($T, Bits32);
};
}
macro_rules! bytes5_impl {
($T: ident) => {
integer_bytes_impl!($T, Bits33);
integer_bytes_impl!($T, Bits34);
integer_bytes_impl!($T, Bits35);
integer_bytes_impl!($T, Bits36);
integer_bytes_impl!($T, Bits37);
integer_bytes_impl!($T, Bits38);
integer_bytes_impl!($T, Bits39);
integer_bytes_impl!($T, Bits40);
};
}
macro_rules! bytes6_impl {
($T: ident) => {
integer_bytes_impl!($T, Bits41);
integer_bytes_impl!($T, Bits42);
integer_bytes_impl!($T, Bits43);
integer_bytes_impl!($T, Bits44);
integer_bytes_impl!($T, Bits45);
integer_bytes_impl!($T, Bits46);
integer_bytes_impl!($T, Bits47);
integer_bytes_impl!($T, Bits48);
};
}
macro_rules! bytes7_impl {
($T: ident) => {
integer_bytes_impl!($T, Bits49);
integer_bytes_impl!($T, Bits50);
integer_bytes_impl!($T, Bits51);
integer_bytes_impl!($T, Bits52);
integer_bytes_impl!($T, Bits53);
integer_bytes_impl!($T, Bits54);
integer_bytes_impl!($T, Bits55);
integer_bytes_impl!($T, Bits56);
};
}
macro_rules! bytes8_impl {
($T: ident) => {
integer_bytes_impl!($T, Bits57);
integer_bytes_impl!($T, Bits58);
integer_bytes_impl!($T, Bits59);
integer_bytes_impl!($T, Bits60);
integer_bytes_impl!($T, Bits61);
integer_bytes_impl!($T, Bits62);
integer_bytes_impl!($T, Bits63);
integer_bytes_impl!($T, Bits64);
};
}
bytes1_impl!(u8);
bytes1_impl!(i8);
bytes2_impl!(u16);
bytes2_impl!(i16);
bytes3_impl!(u32);
bytes3_impl!(i32);
bytes4_impl!(u32);
bytes4_impl!(i32);
bytes5_impl!(u64);
bytes5_impl!(i64);
bytes6_impl!(u64);
bytes6_impl!(i64);
bytes7_impl!(u64);
bytes7_impl!(i64);
bytes8_impl!(u64);
bytes8_impl!(i64);
/// A positive bit mask of the desired width.
///
/// ones(1) => 0b1
/// ones(2) => 0b11
/// ones(3) => 0b111
/// ...
fn ones(n: u64) -> u64 {
if n == 0 { return 0; }
if n >= 64 { return !0; }
(1 << n) - 1
}
#[test]
fn test_u8() {
let byte: Integer<u8, Bits8> = 0.into();
assert_eq!(0, *byte);
assert_eq!(0xFF, <Integer<u8, Bits8>>::value_bit_mask());
}
#[test]
fn test_u16() {
let val = 0xABCD;
let num: Integer<u16, Bits16> = val.into();
assert_eq!(val, *num);
assert_eq!([0xAB, 0xCD], num.to_msb_bytes());
assert_eq!([0xCD, 0xAB], num.to_lsb_bytes());
}
#[test]
fn test_u32() {
let val = 0x4589ABCD;
let num: Integer<u32, Bits32> = val.into();
assert_eq!(val, *num);
assert_eq!([0x45, 0x89, 0xAB, 0xCD], num.to_msb_bytes());
assert_eq!([0xCD, 0xAB, 0x89, 0x45], num.to_lsb_bytes());
}
#[test]
fn test_u64() {
let val = 0x1122334455667788;
let num: Integer<u64, Bits64> = val.into();
assert_eq!(val, *num);
assert_eq!([0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88], num.to_msb_bytes());
assert_eq!([0x88, 0x77, 0x66, 0x55, 0x44, 0x33, 0x22, 0x11], num.to_lsb_bytes());
}
#[test]
fn test_roundtrip_u32() {
let val = 0x11223344;
let num: Integer<u32, Bits32> = val.into();
let msb_bytes = num.to_msb_bytes();
let from_msb = u32::from_msb_bytes(&msb_bytes);
assert_eq!(val, from_msb);
let lsb_bytes = num.to_lsb_bytes();
let from_lsb = u32::from_lsb_bytes(&lsb_bytes);
assert_eq!(val, from_lsb);
}
#[test]
fn test_roundtrip_u24() {
let val = 0xCCBBAA;
let num: Integer<u32, Bits24> = val.into();
let msb_bytes = num.to_msb_bytes();
assert_eq!([0xCC, 0xBB, 0xAA], msb_bytes);
let from_msb = <Integer<u32, Bits24>>::from_msb_bytes(&msb_bytes);
assert_eq!(val, *from_msb);
let lsb_bytes = num.to_lsb_bytes();
assert_eq!([0xAA, 0xBB, 0xCC], lsb_bytes);
let from_lsb = <Integer<u32, Bits24>>::from_lsb_bytes(&lsb_bytes);
assert_eq!(val, *from_lsb);
}
#[test]
fn test_roundtrip_u20() {
let val = 0xFBBAA;
let num: Integer<u32, Bits20> = val.into();
let msb_bytes = num.to_msb_bytes();
assert_eq!([0x0F, 0xBB, 0xAA], msb_bytes);
let from_msb = <Integer<u32, Bits20>>::from_msb_bytes(&msb_bytes);
assert_eq!(val, *from_msb);
}
use super::packing::{PackingError, PackedStruct, PackedStructInfo, PackedStructSlice};
/// A wrapper that packages the integer as a MSB packaged byte array. Usually
/// invoked using code generation.
pub struct MsbInteger<T, B, I>(I, PhantomData<T>, PhantomData<B>);
impl<T, B, I> Deref for MsbInteger<T, B, I> {
type Target = I;
fn deref(&self) -> &I {
&self.0
}
}
impl<T, B, I> From<I> for MsbInteger<T, B, I> {
fn from(i: I) -> Self {
MsbInteger(i, Default::default(), Default::default())
}
}
impl<T, B, I> Debug for MsbInteger<T, B, I> where I: Debug {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self.0)
}
}
impl<T, B, I> Display for MsbInteger<T, B, I> where I: Display {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl<T, B, I> PackedStruct<<<B as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes> for MsbInteger<T, B, I>
where B: NumberOfBits, I: SizedInteger<T, B>
{
fn pack(&self) -> <<B as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes {
self.0.to_msb_bytes()
}
#[inline]
fn unpack(src: &<<B as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes) -> Result<Self, PackingError> {
let n = I::from_msb_bytes(src);
let n = MsbInteger(n, Default::default(), Default::default());
Ok(n)
}
}
impl<T, B, I> PackedStructInfo for MsbInteger<T, B, I> where B: NumberOfBits {
#[inline]
fn packed_bits() -> usize {
B::number_of_bits() as usize
}
}
impl<T, B, I> PackedStructSlice for MsbInteger<T, B, I> where B: NumberOfBits, I: SizedInteger<T, B> {
fn pack_to_slice(&self, output: &mut [u8]) -> Result<(), PackingError> {
let expected_bytes = <B as NumberOfBits>::Bytes::number_of_bytes() as usize;
if output.len() != expected_bytes {
return Err(PackingError::BufferSizeMismatch { expected: expected_bytes, actual: output.len() });
}
let packed = self.pack();
&mut output[..].copy_from_slice(packed.as_bytes_slice());
Ok(())
}
fn unpack_from_slice(src: &[u8]) -> Result<Self, PackingError> {
let expected_bytes = <B as NumberOfBits>::Bytes::number_of_bytes() as usize;
if src.len() != expected_bytes {
return Err(PackingError::BufferSizeMismatch { expected: expected_bytes, actual: src.len() });
}
let mut s = Default::default();
// hack to infer the type
{
Self::unpack(&s)?;
}
s.as_mut_bytes_slice().copy_from_slice(src);
Self::unpack(&s)
}
fn packed_bytes() -> usize {
<B as NumberOfBits>::Bytes::number_of_bytes() as usize
}
}
/// A wrapper that packages the integer as a LSB packaged byte array. Usually
/// invoked using code generation.
pub struct LsbInteger<T, B, I>(I, PhantomData<T>, PhantomData<B>);
impl<T, B, I> Deref for LsbInteger<T, B, I> where B: BitsFullBytes {
type Target = I;
fn deref(&self) -> &I {
&self.0
}
}
impl<T, B, I> From<I> for LsbInteger<T, B, I> where B: BitsFullBytes {
fn from(i: I) -> Self {
LsbInteger(i, Default::default(), Default::default())
}
}
impl<T, B, I> Debug for LsbInteger<T, B, I> where I: Debug {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self.0)
}
}
impl<T, B, I> Display for LsbInteger<T, B, I> where I: Display {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl<T, B, I> PackedStruct<<<B as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes> for LsbInteger<T, B, I>
where B: NumberOfBits, I: SizedInteger<T, B>, B: BitsFullBytes
{
fn pack(&self) -> <<B as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes {
self.0.to_lsb_bytes()
}
#[inline]
fn unpack(src: &<<B as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes) -> Result<Self, PackingError> {
let n = I::from_lsb_bytes(src);
let n = LsbInteger(n, Default::default(), Default::default());
Ok(n)
}
}
impl<T, B, I> PackedStructInfo for LsbInteger<T, B, I> where B: NumberOfBits {
#[inline]
fn packed_bits() -> usize {
B::number_of_bits() as usize
}
}
impl<T, B, I> PackedStructSlice for LsbInteger<T, B, I> where B: NumberOfBits + BitsFullBytes, I: SizedInteger<T, B> {
fn pack_to_slice(&self, output: &mut [u8]) -> Result<(), PackingError> {
let expected_bytes = <B as NumberOfBits>::Bytes::number_of_bytes() as usize;
if output.len() != expected_bytes {
return Err(PackingError::BufferSizeMismatch { expected: expected_bytes, actual: output.len() });
}
let packed = self.pack();
&mut output[..].copy_from_slice(packed.as_bytes_slice());
Ok(())
}
fn unpack_from_slice(src: &[u8]) -> Result<Self, PackingError> {
let expected_bytes = <B as NumberOfBits>::Bytes::number_of_bytes() as usize;
if src.len() != expected_bytes {
return Err(PackingError::BufferSizeMismatch { expected: expected_bytes, actual: src.len() });
}
let mut s = Default::default();
// hack to infer the type
{
Self::unpack(&s)?;
}
s.as_mut_bytes_slice().copy_from_slice(src);
Self::unpack(&s)
}
fn packed_bytes() -> usize {
<B as NumberOfBits>::Bytes::number_of_bytes() as usize
}
}
#[test]
fn test_packed_int_msb() {
let val = 0xAABBCCDD;
let typed: Integer<u32, Bits32> = val.into();
let endian = typed.as_packed_msb();
let packed = endian.pack();
assert_eq!([0xAA, 0xBB, 0xCC, 0xDD], packed);
let unpacked: MsbInteger<_, _, Integer<u32, Bits32>> = MsbInteger::unpack(&packed).unwrap();
assert_eq!(val, **unpacked);
}
#[test]
fn test_packed_int_partial() {
let val = 0b10_10101010;
let typed: Integer<u16, Bits10> = val.into();
let endian = typed.as_packed_msb();
let packed = endian.pack();
assert_eq!([0b00000010, 0b10101010], packed);
let unpacked: MsbInteger<_, _, Integer<u16, Bits10>> = MsbInteger::unpack(&packed).unwrap();
assert_eq!(val, **unpacked);
}
#[test]
fn test_packed_int_lsb() {
let val = 0xAABBCCDD;
let typed: Integer<u32, Bits32> = val.into();
let endian = typed.as_packed_lsb();
let packed = endian.pack();
assert_eq!([0xDD, 0xCC, 0xBB, 0xAA], packed);
let unpacked: LsbInteger<_, _, Integer<u32, Bits32>> = LsbInteger::unpack(&packed).unwrap();
assert_eq!(val, **unpacked);
}
#[test]
fn test_struct_info() {
fn get_bits<P: PackedStructInfo>(_s: &P) -> usize { P::packed_bits() }
let typed: Integer<u32, Bits30> = 123.into();
let msb = typed.as_packed_msb();
assert_eq!(30, get_bits(&msb));
}
#[test]
fn test_slice_packing() {
let mut data = vec![0xAA, 0xBB, 0xCC, 0xDD];
let unpacked = <MsbInteger<_, _, Integer<u32, Bits32>>>::unpack_from_slice(&data).unwrap();
assert_eq!(0xAABBCCDD, **unpacked);
unpacked.pack_to_slice(&mut data).unwrap();
assert_eq!(&[0xAA, 0xBB, 0xCC, 0xDD], &data[..]);
}
#[test]
fn test_packed_int_lsb_sub() {
let val = 0xAABBCC;
let typed: Integer<u32, Bits24> = val.into();
let endian = typed.as_packed_lsb();
let packed = endian.pack();
assert_eq!([0xCC, 0xBB, 0xAA], packed);
}

Просмотреть файл

@ -1,102 +0,0 @@
//! Reserved space in a packed structure, either just zeroes or ones.
use internal_prelude::v1::*;
/// Packs into a set of zeroes. Ignores the input when unpacking.
pub type ReservedZero<B> = ReservedBits<BitZero, B>;
pub type ReservedZeroes<B> = ReservedZero<B>;
/// Packs into a set of ones. Ignores the input when unpacking.
pub type ReservedOne<B> = ReservedBits<BitOne, B>;
pub type ReservedOnes<B> = ReservedOne<B>;
pub trait ReservedBitValue {
fn get_reserved_bit_value_byte() -> u8;
}
#[derive(Default, Copy, Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BitOne;
impl ReservedBitValue for BitOne {
fn get_reserved_bit_value_byte() -> u8 {
0xFF
}
}
#[derive(Default, Copy, Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BitZero;
impl ReservedBitValue for BitZero {
fn get_reserved_bit_value_byte() -> u8 {
0
}
}
/// Always packs into the associated bit value. Ignores the input when unpacking.
#[derive(Default, Copy, Clone, PartialEq, Serialize, Deserialize)]
pub struct ReservedBits<V, B> {
value: V,
bits: PhantomData<B>
}
impl<B> Debug for ReservedBits<BitZero, B> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Reserved - always 0")
}
}
impl<B> Display for ReservedBits<BitZero, B> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Reserved - always 0")
}
}
impl<B> Debug for ReservedBits<BitOne, B> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Reserved - always 1")
}
}
impl<B> Display for ReservedBits<BitOne, B> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Reserved - always 1")
}
}
use packing::*;
use types_bits::{NumberOfBits, NumberOfBytes, ByteArray};
impl<V, B> PackedStruct<<<B as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes> for ReservedBits<V, B> where Self: Default, V: ReservedBitValue, B: NumberOfBits {
fn pack(&self) -> <<B as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes {
<<<B as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes>::new(V::get_reserved_bit_value_byte())
}
fn unpack(_src: &<<B as NumberOfBits>::Bytes as NumberOfBytes>::AsBytes) -> Result<Self, PackingError> {
Ok(Self:: default())
}
}
impl<V, B> PackedStructInfo for ReservedBits<V, B> where B: NumberOfBits {
#[inline]
fn packed_bits() -> usize {
B::number_of_bits() as usize
}
}
impl<V, B> PackedStructSlice for ReservedBits<V, B> where Self: Default, V: ReservedBitValue, B: NumberOfBits {
fn pack_to_slice(&self, output: &mut [u8]) -> Result<(), PackingError> {
for v in output.iter_mut() {
*v = V::get_reserved_bit_value_byte();
}
Ok(())
}
fn unpack_from_slice(_src: &[u8]) -> Result<Self, PackingError> {
Ok(Self::default())
}
fn packed_bytes() -> usize {
<B as NumberOfBits>::Bytes::number_of_bytes() as usize
}
}

Просмотреть файл

@ -1 +0,0 @@
{"files":{"Cargo.toml":"c0ab6ac517366f39a39fab33d7918220fe36e2c85a243eb001801fd90f16af09","src/common.rs":"72529fa7a9b349bd7fb4fe6e2d66bfae06dd8a5c2a8c656d8f77660fa132f2c0","src/lib.rs":"30e0e777b5226f685b220f17fa68a7acfd5a86d025a76572e8f079e781cc26d3","src/pack.rs":"16cf2c1d5757a43369b8736f6224f4c3f1b8fc3e310e65525198b02727461cf6","src/pack_codegen.rs":"7ec5eeeecf358ce9016f5d693361fbfd5ab8cf5fab94a6e2c66e5dd3efb6160c","src/pack_codegen_docs.rs":"f12feed5d80ae82769f68a1e85cc00be867b23181c4a9475e31509519225b65a","src/pack_parse.rs":"2a7a3ff7b5aae4c9bd1a1725718685b815c5de9bd91b924e338a4781972dcc77","src/pack_parse_attributes.rs":"06aa07a343c84e82b6f4c736fd67f1740193c79ffaf8fe60697d56bf47c4cf3e","src/primitive_enum.rs":"731e77a3117d4836d4993d77e333de80ec8324699f29cf9edfa6880f996d5a9e","src/utils.rs":"4af1f8b2a052faf2dabbebcd3f31ee6ff3de3048834199c9561199135d6a44b1"},"package":"9f6fda15ebe37b7b28889bd4aa75bb134652eaec9eb99d1bf02f806fca4357fc"}

Просмотреть файл

@ -1,37 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "packed_struct_codegen"
version = "0.3.0"
authors = ["Rudi Benkovic <rudi.benkovic@gmail.com>"]
description = "This crate implements the code generation for the packed_struct library."
license = "MIT OR Apache-2.0"
repository = "https://github.com/hashmismatch/packed_struct.rs"
[lib]
proc-macro = true
[dependencies.packed_struct]
version = "0.3"
default-features = false
[dependencies.quote]
version = "^0.3.12"
[dependencies.syn]
version = "^0.11.4"
features = ["full", "parsing", "printing"]
[features]
alloc = ["packed_struct/alloc"]
default = ["std"]
std = ["packed_struct/std"]

Просмотреть файл

@ -1,37 +0,0 @@
extern crate syn;
#[cfg(feature="std")]
pub fn collections_prefix() -> syn::Ty {
syn::parse_type("::std").unwrap()
}
#[cfg(not(feature="std"))]
pub fn collections_prefix() -> syn::Ty {
syn::parse_type("::alloc").unwrap()
}
#[cfg(feature="std")]
pub fn result_type() -> syn::Ty {
syn::parse_type("::std::result::Result").expect("result type parse error")
}
#[cfg(not(feature="std"))]
pub fn result_type() -> syn::Ty {
syn::parse_type("::core::result::Result").expect("result type parse error")
}
pub fn alloc_supported() -> bool {
#[cfg(any(feature="std", feature="alloc"))]
{
true
}
#[cfg(not(any(feature="std", feature="alloc")))]
{
false
}}
pub fn include_debug_codegen() -> bool {
alloc_supported()
}

Просмотреть файл

@ -1,87 +0,0 @@
#![recursion_limit = "192"]
extern crate proc_macro;
extern crate packed_struct;
extern crate syn;
#[macro_use]
extern crate quote;
use proc_macro::TokenStream;
mod pack;
mod pack_codegen;
mod pack_codegen_docs;
mod pack_parse;
mod pack_parse_attributes;
mod primitive_enum;
mod common;
mod utils;
#[proc_macro_derive(PackedStruct, attributes(packed_struct, packed_field))]
pub fn derive_packable_bytes(input: TokenStream) -> TokenStream {
let ast = syn::parse_macro_input(&input.to_string()).unwrap();
let parsed = pack_parse::parse_struct(&ast);
let pack = pack_codegen::derive_pack(&parsed);
quote!(#pack).to_string().parse().unwrap()
}
#[proc_macro_derive(PrimitiveEnum)]
pub fn derive_primitive_detect(input: TokenStream) -> TokenStream {
derive_primitive(input, None)
}
#[proc_macro_derive(PrimitiveEnum_u8)]
pub fn derive_primitive_u8(input: TokenStream) -> TokenStream {
derive_primitive(input, Some(syn::parse_type("u8").unwrap()))
}
#[proc_macro_derive(PrimitiveEnum_u16)]
pub fn derive_primitive_u16(input: TokenStream) -> TokenStream {
derive_primitive(input, Some(syn::parse_type("u16").unwrap()))
}
#[proc_macro_derive(PrimitiveEnum_u32)]
pub fn derive_primitive_u32(input: TokenStream) -> TokenStream {
derive_primitive(input, Some(syn::parse_type("u32").unwrap()))
}
#[proc_macro_derive(PrimitiveEnum_u64)]
pub fn derive_primitive_u64(input: TokenStream) -> TokenStream {
derive_primitive(input, Some(syn::parse_type("u64").unwrap()))
}
#[proc_macro_derive(PrimitiveEnum_i8)]
pub fn derive_primitive_i8(input: TokenStream) -> TokenStream {
derive_primitive(input, Some(syn::parse_type("i8").unwrap()))
}
#[proc_macro_derive(PrimitiveEnum_i16)]
pub fn derive_primitive_i16(input: TokenStream) -> TokenStream {
derive_primitive(input, Some(syn::parse_type("i16").unwrap()))
}
#[proc_macro_derive(PrimitiveEnum_i32)]
pub fn derive_primitive_i32(input: TokenStream) -> TokenStream {
derive_primitive(input, Some(syn::parse_type("i32").unwrap()))
}
#[proc_macro_derive(PrimitiveEnum_i64)]
pub fn derive_primitive_i64(input: TokenStream) -> TokenStream {
derive_primitive(input, Some(syn::parse_type("i64").unwrap()))
}
fn derive_primitive(input: TokenStream, ty: Option<syn::Ty>) -> TokenStream {
let input = match syn::parse_derive_input(&input.to_string()) {
Ok(tokens) => tokens,
Err(msg) => panic!("Internal error from `syn`: {}", msg),
};
let prim = primitive_enum::derive(&input, ty);
quote!(#prim).to_string().parse().unwrap()
}

Просмотреть файл

@ -1,64 +0,0 @@
extern crate quote;
extern crate syn;
use std::ops::*;
use pack_parse::*;
#[derive(Debug)]
pub struct FieldMidPositioning {
pub bit_width: usize,
pub bits_position: BitsPositionParsed,
}
#[derive(Debug)]
pub enum FieldKind {
Regular {
ident: syn::Ident,
field: FieldRegular
},
Array {
ident: syn::Ident,
size: usize,
elements: Vec<FieldRegular>
}
}
#[derive(Debug)]
pub struct FieldRegular {
pub ty: syn::Ty,
pub serialization_wrappers: Vec<SerializationWrapper>,
pub bit_width: usize,
/// The range as parsed by our parser. A single byte: 0..7
pub bit_range: Range<usize>,
/// The range that can be used by rust's slices. A single byte: 0..8
pub bit_range_rust: Range<usize>
}
#[derive(Debug, Clone)]
pub enum SerializationWrapper {
IntegerWrapper {
integer: syn::Ty,
},
EndiannesWrapper {
endian: syn::Ty
},
PrimitiveEnumWrapper
}
#[derive(Debug)]
pub struct PackStruct {
pub ast: syn::MacroInput,
pub fields: Vec<FieldKind>,
pub num_bytes: usize,
pub num_bits: usize
}

Просмотреть файл

@ -1,398 +0,0 @@
extern crate quote;
extern crate syn;
use pack::*;
use pack_codegen_docs::*;
use pack_parse::syn_to_string;
use common::*;
use utils::*;
pub fn derive_pack(parsed: &PackStruct) -> quote::Tokens {
let (impl_generics, ty_generics, where_clause) = parsed.ast.generics.split_for_impl();
let name = &parsed.ast.ident;
//let snake_name = to_snake_case(name.as_ref());
let type_documentation = type_docs(parsed);
let num_bytes = parsed.num_bytes;
let num_bits = parsed.num_bits;
//let num_fields = parsed.fields.len();
let mut pack_fields = vec![];
let mut unpack_fields = vec![];
let mut unpack_struct_set = vec![];
{
let mut reg = |src: &syn::Ident, target: &syn::Ident, field: &FieldRegular| {
let bits = pack_bits(field);
let pack = pack_field(src, field);
let unpack = unpack_field(field);
let pack_bits = bits.pack;
let unpack_bits = bits.unpack;
pack_fields.push(quote! {
{
let packed = { #pack };
#pack_bits
}
});
unpack_fields.push(quote! {
let #target = {
let bytes = { #unpack_bits };
#unpack
};
});
};
for field in &parsed.fields {
match field {
&FieldKind::Regular { ref ident, ref field } => {
reg(ident, ident, field);
unpack_struct_set.push(quote! {
#ident: #ident
});
},
&FieldKind::Array { ref ident, ref elements, .. } => {
let mut array_unpacked_elements = vec![];
for (i, field) in elements.iter().enumerate() {
let src = syn::Ident::new(format!("{}[{}]", syn_to_string(ident), i));
let target = syn::Ident::new(format!("{}_{}", syn_to_string(ident), i));
reg(&src, &target, field);
array_unpacked_elements.push(target);
}
unpack_struct_set.push(quote! {
#ident: [
#(#array_unpacked_elements),*
]
});
}
}
}
}
let result_ty = result_type();
let debug_fmt = if include_debug_codegen() {
let q = struct_runtime_formatter(parsed);
quote! {
#q
impl #impl_generics #name #ty_generics #where_clause {
#[allow(dead_code)]
/// Display formatter for console applications
pub fn packed_struct_display_formatter<'a>(&'a self) -> ::packed_struct::debug_fmt::PackedStructDisplay<'a, Self, [u8; #num_bytes]> {
::packed_struct::debug_fmt::PackedStructDisplay::new(self)
}
}
}
} else {
quote! {}
};
quote! {
#type_documentation
impl #impl_generics ::packed_struct::PackedStruct<[u8; #num_bytes]> for #name #ty_generics #where_clause {
#[inline]
#[allow(unused_imports, unused_parens)]
fn pack(&self) -> [u8; #num_bytes] {
use ::packed_struct::*;
let mut target = [0 as u8; #num_bytes];
#(#pack_fields)*
target
}
#[inline]
#[allow(unused_imports, unused_parens)]
fn unpack(src: &[u8; #num_bytes]) -> #result_ty <#name, ::packed_struct::PackingError> {
use ::packed_struct::*;
#(#unpack_fields)*
Ok(#name {
#(#unpack_struct_set),*
})
}
}
impl ::packed_struct::PackedStructInfo for #name {
#[inline]
fn packed_bits() -> usize {
#num_bits
}
}
impl #impl_generics ::packed_struct::PackedStructSlice for #name #ty_generics #where_clause {
#[inline]
#[allow(unused_imports)]
fn pack_to_slice(&self, output: &mut [u8]) -> #result_ty <(), ::packed_struct::PackingError> {
use ::packed_struct::*;
if output.len() != #num_bytes {
return Err(::packed_struct::PackingError::BufferTooSmall);
}
let packed = self.pack();
&mut output[..].copy_from_slice(&packed[..]);
Ok(())
}
#[inline]
#[allow(unused_imports)]
fn unpack_from_slice(src: &[u8]) -> #result_ty <Self, ::packed_struct::PackingError> {
use ::packed_struct::*;
if src.len() != #num_bytes {
return Err(::packed_struct::PackingError::BufferTooSmall);
}
let mut s = [0; #num_bytes];
&mut s[..].copy_from_slice(src);
Self::unpack(&s)
}
#[inline]
fn packed_bytes() -> usize {
#num_bytes
}
}
#debug_fmt
}
}
struct PackBitsCopy {
pack: quote::Tokens,
unpack: quote::Tokens
}
fn pack_bits(field: &FieldRegular) -> PackBitsCopy {
// memcpy
if (field.bit_range_rust.start % 8) == 0 && (field.bit_range_rust.end % 8) == 0 &&
(field.bit_range_rust.len() % 8) == 0 && field.bit_range_rust.len() >= 8
{
let start = field.bit_range_rust.start / 8;
let end = field.bit_range_rust.end / 8;
PackBitsCopy {
pack: quote! {
&mut target[#start..#end].copy_from_slice(&packed);
},
unpack: quote! {
let mut b = [0; (#end - #start)];
&mut b[..].copy_from_slice(&src[#start..#end]);
b
}
}
} else {
let packed_field_len = (field.bit_width as f32 / 8.0).ceil() as usize;
let start_byte = (field.bit_range_rust.start as f32 / 8.0).floor() as usize;
let shift = ((packed_field_len as isize*8) - (field.bit_width as isize)) - (field.bit_range_rust.start as isize - (start_byte as isize * 8));
let emit_shift = |s: isize| {
if s == 0 {
quote! {}
} else if s > 0 {
quote! { << #s }
} else {
let s = -s;
quote! { >> #s }
}
};
let mut l = 8 - ((packed_field_len as isize*8) - field.bit_width as isize);
let mut dst_byte = start_byte;
let mut pack = vec![];
let mut unpack = vec![];
for i in 0..packed_field_len {
let src_mask = ones_u8(l as u8);
let bit_shift = emit_shift(shift);
pack.push(quote! {
let _a = #i;
target[#dst_byte] |= (packed[#i] & #src_mask) #bit_shift;
});
let bit_shift = emit_shift(-shift);
unpack.push(quote! {
let _a = #i;
b[#i] |= (src[#dst_byte] #bit_shift) & #src_mask;
});
if shift < 0 && (dst_byte - start_byte) <= packed_field_len {
let shift = 8+shift;
let src_mask = ones_u8(8-shift as u8);
let bit_shift = emit_shift(shift);
pack.push(quote! {
let _b = #i;
target[#dst_byte + 1] |= (((packed[#i] & #src_mask) as u16) #bit_shift) as u8;
});
let bit_shift = emit_shift(-shift);
unpack.push(quote! {
let _b = #i;
b[#i] |= (((src[#dst_byte + 1] as u16) #bit_shift) & #src_mask as u16) as u8;
});
} else if shift > 0 && (dst_byte - start_byte) <= packed_field_len && i < packed_field_len - 1 {
let shift = -(8-shift);
let bit_shift = emit_shift(shift);
let src_mask = !ones_u8(-shift as u8);
pack.push(quote! {
let _c = #i;
target[#dst_byte] |= (((packed[#i + 1] & #src_mask) as u16) #bit_shift) as u8;
});
let bit_shift = emit_shift(-shift);
unpack.push(quote! {
let _c = #i;
b[#i + 1] |= (((src[#dst_byte] as u16) #bit_shift) & #src_mask as u16) as u8;
});
}
dst_byte += 1;
l += 8;
}
PackBitsCopy {
pack: quote! {
#(#pack)*
},
unpack: quote! {
let mut b = [0; #packed_field_len];
#(#unpack)*
b
}
}
}
}
fn pack_field(name: &syn::Ident, field: &FieldRegular) -> quote::Tokens {
let mut output = quote! { (self.#name) };
for wrapper in &field.serialization_wrappers {
match wrapper {
&SerializationWrapper::PrimitiveEnumWrapper => {
output = quote! {
{
use ::packed_struct::PrimitiveEnum;
let primitive_integer = { #output }.to_primitive();
primitive_integer
}
};
},
&SerializationWrapper::IntegerWrapper { ref integer } => {
output = quote! {
{
use ::packed_struct::types::*;
use ::packed_struct::types::bits::*;
let sized_integer: #integer = { #output }.into();
sized_integer
}
};
},
&SerializationWrapper::EndiannesWrapper { ref endian } => {
output = quote! {
{
use ::packed_struct::types::*;
use ::packed_struct::types::bits::*;
let wrapper: #endian <_, _, _> = { #output }.into();
wrapper
}
};
}
}
}
quote! {
{
{ & #output }.pack()
}
}
}
fn unpack_field(field: &FieldRegular) -> quote::Tokens {
let wrappers: Vec<_> = field.serialization_wrappers.iter().rev().cloned().collect();
let result_ty = result_type();
let mut unpack = quote! { bytes };
let mut i = 0;
loop {
match (wrappers.get(i), wrappers.get(i+1)) {
(Some(&SerializationWrapper::EndiannesWrapper { ref endian }), Some(&SerializationWrapper::IntegerWrapper { ref integer })) => {
unpack = quote! {
use ::packed_struct::types::*;
use ::packed_struct::types::bits::*;
let res: #result_ty <#endian <_, _, #integer >, PackingError> = <#endian <_, _, _>>::unpack(& #unpack );
let unpacked = try!(res);
**unpacked
};
i += 1;
}
(Some(&SerializationWrapper::PrimitiveEnumWrapper), _) => {
let ty = &field.ty;
unpack = quote! {
use ::packed_struct::PrimitiveEnum;
let primitive_integer: <#ty as PrimitiveEnum>::Primitive = { #unpack };
let r = <#ty>::from_primitive(primitive_integer).ok_or(PackingError::InvalidValue);
r?
};
},
(Some(&SerializationWrapper::EndiannesWrapper { ref endian }), _) => {
let integer_ty = &field.ty;
unpack = quote! {
use ::packed_struct::types::*;
use ::packed_struct::types::bits::*;
let res: #result_ty <#endian <_, _, #integer_ty >, PackingError> = <#endian <_, _, #integer_ty >>::unpack(& #unpack );
let unpacked = try!(res);
*unpacked
};
},
(None, None) => {
let ty = &field.ty;
unpack = quote! {
<#ty>::unpack(& #unpack)?
};
},
(_, _) => {
panic!("unsupported wrappers: {:#?}", wrappers);
}
}
i += 1;
if wrappers.len() == 0 || i > wrappers.len() - 1 { break; }
}
unpack
}

Просмотреть файл

@ -1,147 +0,0 @@
extern crate quote;
extern crate syn;
use pack::*;
use pack_parse::syn_to_string;
use common::*;
use utils::*;
pub fn struct_runtime_formatter(parsed: &PackStruct) -> quote::Tokens {
let (impl_generics, ty_generics, where_clause) = parsed.ast.generics.split_for_impl();
let name = &parsed.ast.ident;
let snake_name = to_snake_case(name.as_ref());
let stdlib_prefix = collections_prefix();
let debug_fields_fn = syn::Ident::from(format!("debug_fields_{}", snake_name));
let display_header = format!("{} ({} {})",
name,
parsed.num_bytes,
if parsed.num_bytes == 1 { "byte" } else { "bytes" }
);
let mut debug_fields = vec![];
for field in &parsed.fields {
match field {
&FieldKind::Regular { ref ident, ref field } => {
let ref name_str = ident.as_ref().to_string();
let bits = syn::parse_expr(&format!("{}..{}", field.bit_range.start, field.bit_range.end)).unwrap();
debug_fields.push(quote! {
::packed_struct::debug_fmt::DebugBitField {
name: #name_str.into(),
bits: #bits,
display_value: format!("{:?}", src.#ident).into()
}
});
},
&FieldKind::Array { ref ident, ref elements, .. } => {
for (i, field) in elements.iter().enumerate() {
let name_str = format!("{}[{}]", syn_to_string(ident), i);
let bits = syn::parse_expr(&format!("{}..{}", field.bit_range.start, field.bit_range.end)).unwrap();
debug_fields.push(quote! {
::packed_struct::debug_fmt::DebugBitField {
name: #name_str.into(),
bits: #bits,
display_value: format!("{:?}", src.#ident[#i]).into()
}
});
}
}
}
}
let num_fields = debug_fields.len();
let num_bytes = parsed.num_bytes;
let result_ty = result_type();
quote! {
#[doc(hidden)]
pub fn #debug_fields_fn(src: &#name) -> [::packed_struct::debug_fmt::DebugBitField<'static>; #num_fields] {
[#(#debug_fields),*]
}
#[allow(unused_imports)]
impl #impl_generics ::packed_struct::debug_fmt::PackedStructDebug for #name #ty_generics #where_clause {
fn fmt_fields(&self, fmt: &mut #stdlib_prefix::fmt::Formatter) -> #result_ty <(), #stdlib_prefix::fmt::Error> {
use ::packed_struct::PackedStruct;
let fields = #debug_fields_fn(self);
let packed: [u8; #num_bytes] = self.pack();
::packed_struct::debug_fmt::packable_fmt_fields(fmt, &packed, &fields)
}
fn packed_struct_display_header() -> &'static str {
#display_header
}
}
#[allow(unused_imports)]
impl #impl_generics #stdlib_prefix::fmt::Display for #name #ty_generics #where_clause {
#[allow(unused_imports)]
fn fmt(&self, f: &mut #stdlib_prefix::fmt::Formatter) -> #stdlib_prefix::fmt::Result {
let display = ::packed_struct::debug_fmt::PackedStructDisplay::new(self);
display.fmt(f)
}
}
}
}
use std::ops::Range;
pub fn type_docs(parsed: &PackStruct) -> quote::Tokens {
let mut doc = quote! {};
let mut doc_html = format!("/// Structure that can be packed an unpacked into {size_bytes} bytes.\r\n",
size_bytes = parsed.num_bytes
);
doc_html.push_str("/// <table>\r\n");
doc_html.push_str("/// <thead><tr><td>Bit, MSB0</td><td>Name</td><td>Type</td></tr></thead>\r\n");
doc_html.push_str("/// <tbody>\r\n");
{
let mut emit_field_docs = |bits: &Range<usize>, field_ident, ty| {
let bits_str = {
if bits.start == bits.end {
format!("{}", bits.start)
} else {
format!("{}:{}", bits.start, bits.end)
}
};
// todo: friendly integer, reserved types. add LSB/MSB integer info.
doc_html.push_str(&format!("/// <tr><td>{}</td><td>{}</td><td>{}</td></tr>\r\n", bits_str, field_ident, syn_to_string(ty)));
};
for field in &parsed.fields {
match field {
&FieldKind::Regular { ref ident, ref field } => {
emit_field_docs(&field.bit_range, ident.as_ref().to_string(), &field.ty);
},
&FieldKind::Array { ref ident, ref elements, .. } => {
for (i, field) in elements.iter().enumerate() {
emit_field_docs(&field.bit_range, format!("{}[{}]", syn_to_string(ident), i), &field.ty);
}
}
}
}
}
doc_html.push_str("/// </tbody>\r\n");
doc_html.push_str("/// </table>\r\n");
doc.append(&doc_html);
doc
}

Просмотреть файл

@ -1,484 +0,0 @@
extern crate quote;
extern crate syn;
use pack::*;
use pack_parse_attributes::*;
use utils::*;
use std::ops::Range;
pub fn parse_sub_attributes(attributes: &Vec<syn::Attribute>, main_attribute: &str) -> Vec<(String, String)> {
let mut r = vec![];
for attr in attributes {
if let &syn::Attribute { value: syn::MetaItem::List(ref ident, ref list), .. } = attr {
if ident.as_ref() != main_attribute { continue; }
for item in list {
if let &syn::NestedMetaItem::MetaItem(syn::MetaItem::NameValue(ref ident, ref lit)) = item {
let n = ident.as_ref();
if let &syn::Lit::Str(ref v, _) = lit {
r.push((n.to_string(), v.to_string()));
}
}
}
}
}
r
}
#[derive(Clone, Copy, Debug, PartialEq)]
/// https://en.wikipedia.org/wiki/Bit_numbering
pub enum BitNumbering {
Lsb0,
Msb0
}
impl BitNumbering {
pub fn from_str(s: &str) -> Option<Self> {
let s = s.to_lowercase();
match s.as_str() {
"lsb0" => Some(BitNumbering::Lsb0),
"msb0" => Some(BitNumbering::Msb0),
_ => None
}
}
}
#[derive(Clone, Copy, Debug)]
/// https://en.wikipedia.org/wiki/Endianness
pub enum IntegerEndianness {
Msb,
Lsb
}
impl IntegerEndianness {
pub fn from_str(s: &str) -> Option<Self> {
let s = s.to_lowercase();
match s.as_str() {
"lsb" | "le" => Some(IntegerEndianness::Lsb),
"msb" | "be" => Some(IntegerEndianness::Msb),
_ => None
}
}
}
fn get_builtin_type_bit_width(p: &syn::PathSegment) -> Option<usize> {
match p.ident.as_ref() {
"bool" => Some(1),
"u8" | "i8" => Some(8),
"u16" | "i16" => Some(16),
"u32" | "i32" => Some(32),
"u64" | "i64" => Some(64),
"ReservedZero" | "ReservedZeroes" | "ReservedOne" | "ReservedOnes" |
"Integer" => {
match p.parameters {
::syn::PathParameters::AngleBracketed(ref params) => {
for t in &params.types {
let b = syn_to_string(t);
if let Some(bits_pos) = b.find("Bits") {
let possible_int = &b[(bits_pos + 4)..];
if let Ok(bits) = possible_int.parse::<usize>() {
return Some(bits);
}
}
}
None
},
_ => None
}
},
_ => {
None
}
}
}
fn get_field_mid_positioning(field: &syn::Field) -> FieldMidPositioning {
let mut array_size = 1;
let bit_width_builtin: Option<usize>;
let _ty = match field.ty {
syn::Ty::Path (None, syn::Path { ref segments, .. }) => {
if segments.len() == 1 {
let ref segment = segments[0];
bit_width_builtin = get_builtin_type_bit_width(segment);
segment.clone()
} else {
panic!("Unsupported path type: {:#?}", field.ty);
}
},
syn::Ty::Array(ref ty, ref size) => {
if let syn::Ty::Path (None, syn::Path { ref segments, .. }) = **ty {
if segments.len() == 1 {
if let &syn::ConstExpr::Lit(syn::Lit::Int(size, _)) = size {
let ref segment = segments[0];
bit_width_builtin = get_builtin_type_bit_width(segment);
array_size = size as usize;
if size == 0 { panic!("Arrays sized 0 are not supported."); }
segment.clone()
} else {
panic!("unsupported array size: {:?}", size);
}
} else {
panic!("Unsupported path type: {:#?}", ty);
}
} else {
panic!("Unsupported path type: {:#?}", ty);
}
},
_ => { panic!("Unsupported type: {:?}", field.ty); }
};
let field_attributes = PackFieldAttribute::parse_all(&parse_sub_attributes(&field.attrs, "packed_field"));
let bits_position = field_attributes.iter().filter_map(|a| match a {
&PackFieldAttribute::BitPosition(b) | &PackFieldAttribute::BytePosition(b) => Some(b),
_ => None
}).next().unwrap_or(BitsPositionParsed::Next);
let bit_width = if let Some(bits) = field_attributes.iter().filter_map(|a| if let &PackFieldAttribute::SizeBits(bits) = a { Some(bits) } else { None }).next() {
if array_size > 1 { panic!("Please use the 'element_size_bits' or 'element_size_bytes' for arrays."); }
bits
} else if let Some(bits) = field_attributes.iter().filter_map(|a| if let &PackFieldAttribute::ElementSizeBits(bits) = a { Some(bits) } else { None }).next() {
bits * array_size
} else if let BitsPositionParsed::Range(a, b) = bits_position {
(b as isize - a as isize).abs() as usize + 1
} else if let Some(bit_width_builtin) = bit_width_builtin {
// todo: is it even possible to hit this branch?
bit_width_builtin * array_size
} else {
panic!("Couldn't determine the width of this field: {:?}", field);
};
FieldMidPositioning {
bit_width: bit_width,
bits_position: bits_position
}
}
fn parse_field(field: &syn::Field, mp: &FieldMidPositioning, bit_range: &Range<usize>, default_endianness: Option<IntegerEndianness>) -> FieldKind {
match field.ty {
syn::Ty::Path (None, syn::Path { ref segments, .. }) => {
if segments.len() == 1 {
let ty = syn::parse_type(&syn_to_string(&segments[0])).expect("error parsing path segment to ty");
return FieldKind::Regular {
ident: field.ident.clone().expect("mah ident?"),
field: parse_reg_field(field, &ty, bit_range, default_endianness)
};
} else {
panic!("huh 1x");
}
},
syn::Ty::Array(ref ty, ref size) => {
if let syn::Ty::Path (None, syn::Path { ref segments, .. }) = **ty {
if segments.len() == 1 {
if let &syn::ConstExpr::Lit(syn::Lit::Int(size, _)) = size {
let ty = syn::parse_type(&syn_to_string(&segments[0])).expect("error parsing path segment to ty");
let element_size_bits: usize = mp.bit_width as usize / size as usize;
if (mp.bit_width % element_size_bits) != 0 {
panic!("element and array size mismatch!");
}
let mut elements = vec![];
for i in 0..size as usize {
let s = bit_range.start + (i * element_size_bits);
let element_bit_range = s..(s + element_size_bits - 1);
elements.push(parse_reg_field(field, &ty, &element_bit_range, default_endianness));
//panic!("field: {:#?}, mp: {:#?}, bit_range: {:#?}", field, mp, bit_range);
}
return FieldKind::Array {
ident: field.ident.clone().expect("mah ident?"),
size: size as usize,
elements: elements
};
}
}
}
},
_ => { }
};
panic!("Field not supported: {:?}", field);
}
fn parse_reg_field(field: &syn::Field, ty: &syn::Ty, bit_range: &Range<usize>, default_endianness: Option<IntegerEndianness>) -> FieldRegular {
let mut wrappers = vec![];
let bit_width = (bit_range.end - bit_range.start) + 1;
let ty_str = syn_to_string(ty);
let field_attributes = PackFieldAttribute::parse_all(&parse_sub_attributes(&field.attrs, "packed_field"));
let is_enum_ty = field_attributes.iter().filter_map(|a| match a {
&PackFieldAttribute::Ty(TyKind::Enum) => Some(()),
_ => None
}).next().is_some();
let needs_int_wrap = {
let int_types = ["u8", "i8", "u16", "i16", "u32", "i32", "u64", "i64"];
is_enum_ty || int_types.iter().any(|t| t == &ty_str)
};
let needs_endiannes_wrap = {
let our_int_ty = ty_str.starts_with("Integer < ") && ty_str.contains("Bits");
our_int_ty || needs_int_wrap
};
if is_enum_ty {
wrappers.push(SerializationWrapper::PrimitiveEnumWrapper);
}
if needs_int_wrap {
let ty = if is_enum_ty {
format!("<{} as PrimitiveEnum>::Primitive",syn_to_string(ty))
} else {
ty_str.clone()
};
let integer_wrap_ty = syn::parse_type(&format!("Integer<{}, Bits{}>", ty, bit_width)).unwrap();
wrappers.push(SerializationWrapper::IntegerWrapper { integer: integer_wrap_ty });
}
if needs_endiannes_wrap {
let mut endiannes = if let Some(endiannes) = field_attributes
.iter()
.filter_map(|a| if let &PackFieldAttribute::IntEndiannes(endiannes) = a {
Some(endiannes)
} else {
None
}).next()
{
Some(endiannes)
} else {
default_endianness
};
if bit_width <= 8 {
endiannes = Some(IntegerEndianness::Msb);
}
if endiannes.is_none() {
panic!("Missing serialization wrapper for simple type {:?} - did you specify the integer endiannes on the field or a default for the struct?", ty_str);
}
let ty_prefix = match endiannes.unwrap() {
IntegerEndianness::Msb => "Msb",
IntegerEndianness::Lsb => "Lsb"
};
let endiannes_wrap_ty = syn::parse_type(&format!("{}Integer", ty_prefix)).unwrap();
wrappers.push(SerializationWrapper::EndiannesWrapper { endian: endiannes_wrap_ty });
}
FieldRegular {
ty: ty.clone(),
serialization_wrappers: wrappers,
bit_width: bit_width,
bit_range: bit_range.clone(),
bit_range_rust: bit_range.start..(bit_range.end + 1)
}
}
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum BitsPositionParsed {
Next,
Start(usize),
Range(usize, usize)
}
impl BitsPositionParsed {
fn to_bits_position(&self) -> Box<BitsRange> {
match *self {
BitsPositionParsed::Next => Box::new(NextBits),
BitsPositionParsed::Start(s) => Box::new(s),
BitsPositionParsed::Range(a, b) => Box::new(a..b)
}
}
pub fn range_in_order(a: usize, b: usize) -> Self {
BitsPositionParsed::Range(::std::cmp::min(a, b), ::std::cmp::max(a, b))
}
}
pub fn parse_num(s: &str) -> usize {
let s = s.trim();
if s.starts_with("0x") || s.starts_with("0X") {
usize::from_str_radix(&s[2..], 16).expect(&format!("Invalid hex number: {:?}", s))
} else {
s.parse().expect(&format!("Invalid decimal number: {:?}", s))
}
}
pub fn parse_struct(ast: &syn::MacroInput) -> PackStruct {
let attributes = PackStructAttribute::parse_all(&parse_sub_attributes(&ast.attrs, "packed_struct"));
let fields: Vec<_> = match ast.body {
syn::Body::Struct(syn::VariantData::Struct(ref fields)) => {
fields.iter().collect()
},
_ => panic!("#[derive(PackedStruct)] can only be used with braced structs"),
};
if ast.generics.ty_params.len() > 0 {
panic!("Structures with generic fields currently aren't supported.");
}
let bit_positioning = {
attributes.iter().filter_map(|a| match a {
&PackStructAttribute::BitNumbering(b) => Some(b),
_ => None
}).next()
};
let default_int_endianness = attributes.iter().filter_map(|a| match a {
&PackStructAttribute::DefaultIntEndianness(i) => Some(i),
_ => None
}).next();
let struct_size_bytes = attributes.iter().filter_map(|a| {
if let &PackStructAttribute::SizeBytes(size_bytes) = a {
Some(size_bytes)
} else {
None
}}).next();
let first_field_is_auto_positioned = {
if let Some(ref field) = fields.first() {
let mp = get_field_mid_positioning(field);
mp.bits_position == BitsPositionParsed::Next
} else {
false
}
};
let mut fields_parsed: Vec<FieldKind> = vec![];
{
let mut prev_bit_range = None;
for field in &fields {
let mp = get_field_mid_positioning(field);
let bits_position = match (bit_positioning, mp.bits_position) {
(Some(BitNumbering::Lsb0), BitsPositionParsed::Next) | (Some(BitNumbering::Lsb0), BitsPositionParsed::Start(_)) => {
panic!("LSB0 field positioning currently requires explicit, full field positions.");
},
(Some(BitNumbering::Lsb0), BitsPositionParsed::Range(start, end)) => {
if let Some(struct_size_bytes) = struct_size_bytes {
BitsPositionParsed::range_in_order( (struct_size_bytes * 8) - 1 - start, (struct_size_bytes * 8) - 1 - end )
} else {
panic!("LSB0 field positioning currently requires explicit struct byte size.");
}
},
(None, p @ BitsPositionParsed::Next) => p,
(Some(BitNumbering::Msb0), p) => p,
(None, _) => {
panic!("Please explicitly specify the bit numbering mode on the struct with an attribute: #[packed_struct(bit_numbering=\"msb0\")] or \"lsb0\".");
}
};
let bit_range = bits_position.to_bits_position().get_bits_range(mp.bit_width, &prev_bit_range);
fields_parsed.push(parse_field(field, &mp, &bit_range, default_int_endianness));
prev_bit_range = Some(bit_range);
}
}
let num_bits: usize = {
if let Some(struct_size_bytes) = struct_size_bytes {
struct_size_bytes * 8
} else {
let last_bit = fields_parsed.iter().map(|f| match f {
&FieldKind::Regular { ref field, .. } => field.bit_range_rust.end,
&FieldKind::Array { ref elements, .. } => elements.last().unwrap().bit_range_rust.end
}).max().unwrap();
last_bit
}
};
let num_bytes = (num_bits as f32 / 8.0).ceil() as usize;
if first_field_is_auto_positioned && (num_bits % 8) != 0 && struct_size_bytes == None {
panic!("Please explicitly position the bits of the first field of this structure ({}), as alignment isn't obvious to the end user.", ast.ident);
}
// check for overlaps
{
let mut bits = vec![None; num_bytes * 8];
for field in &fields_parsed {
let mut find_overlaps = |name: String, range: &Range<usize>| {
for i in range.start .. (range.end+1) {
if let Some(&Some(ref n)) = bits.get(i) {
panic!("Overlap in bits between fields {} and {}", n, name);
}
bits[i] = Some(name.clone());
}
};
match field {
&FieldKind::Regular { ref field, ref ident } => {
find_overlaps(syn_to_string(ident), &field.bit_range);
},
&FieldKind::Array { ref ident, ref elements, .. } => {
for (i, field) in elements.iter().enumerate() {
find_overlaps(format!("{}[{}]", syn_to_string(ident), i), &field.bit_range);
}
}
}
}
}
PackStruct {
ast: ast.clone(),
fields: fields_parsed,
num_bytes: num_bytes,
num_bits: num_bits
}
}
pub fn syn_to_string<T: ::quote::ToTokens>(thing: &T) -> String {
syn_to_tokens(thing).as_str().into()
}
pub fn append_to_tokens<T: ::quote::ToTokens>(thing: &T, tokens: &mut ::quote::Tokens) {
thing.to_tokens(tokens)
}
pub fn syn_to_tokens<T: ::quote::ToTokens>(thing: &T) -> quote::Tokens {
let mut t = ::quote::Tokens::new();
append_to_tokens(thing, &mut t);
t
}

Просмотреть файл

@ -1,278 +0,0 @@
use pack_parse::*;
#[derive(Clone, Copy)]
pub enum PackStructAttributeKind {
SizeBytes,
//SizeBits,
DefaultIntEndianness,
BitNumbering
}
impl PackStructAttributeKind {
fn get_attr_name(&self) -> &'static str {
use self::PackStructAttributeKind::*;
match *self {
SizeBytes => "size_bytes",
//SizeBits => "size_bits",
DefaultIntEndianness => "endian",
BitNumbering => "bit_numbering"
}
}
}
pub enum PackStructAttribute {
SizeBytes(usize),
//SizeBits(usize),
DefaultIntEndianness(IntegerEndianness),
BitNumbering(BitNumbering)
}
impl PackStructAttribute {
pub fn parse(name: &str, val: &str) -> Result<Self, ()> {
if name == PackStructAttributeKind::DefaultIntEndianness.get_attr_name() {
return Ok(PackStructAttribute::DefaultIntEndianness(IntegerEndianness::from_str(val).expect(&format!("Invalid default int endian value: {}", val))));
}
if name == PackStructAttributeKind::BitNumbering.get_attr_name() {
let b = BitNumbering::from_str(val).expect("Invalid bit numbering attribute value");
return Ok(PackStructAttribute::BitNumbering(b));
}
if name == PackStructAttributeKind::SizeBytes.get_attr_name() {
let b = parse_num(val);
return Ok(PackStructAttribute::SizeBytes(b));
}
/*
if name == PackStructAttributeKind::SizeBits.get_attr_name() {
let b = parse_num(val);
return Ok(PackStructAttribute::SizeBits(b));
}
*/
Err(())
}
pub fn parse_all(attributes: &Vec<(String, String)>) -> Vec<Self> {
let mut r = vec![];
for &(ref name, ref val) in attributes {
if let Ok(attr) = Self::parse(name, val) {
r.push(attr)
}
}
r
}
}
#[derive(Clone, Copy)]
pub enum PackFieldAttributeKind {
IntEndiannes,
BitPosition,
BytePosition,
ElementSizeBytes,
ElementSizeBits,
SizeBytes,
SizeBits,
Ty
}
impl PackFieldAttributeKind {
fn get_attr_name(&self) -> &'static str {
use self::PackFieldAttributeKind::*;
match *self {
IntEndiannes => "endian",
BitPosition => "bits",
BytePosition => "bytes",
SizeBytes => "size_bytes",
SizeBits => "size_bits",
ElementSizeBytes => "element_size_bytes",
ElementSizeBits => "element_size_bits",
Ty => "ty"
}
}
}
pub enum PackFieldAttribute {
IntEndiannes(IntegerEndianness),
BitPosition(BitsPositionParsed),
BytePosition(BitsPositionParsed),
SizeBits(usize),
ElementSizeBits(usize),
Ty(TyKind)
}
pub enum TyKind {
Enum
}
impl PackFieldAttribute {
pub fn parse(name: &str, val: &str) -> Result<Self, ()> {
if name == PackFieldAttributeKind::IntEndiannes.get_attr_name() {
return Ok(PackFieldAttribute::IntEndiannes(IntegerEndianness::from_str(val).unwrap()));
}
if name == PackFieldAttributeKind::BitPosition.get_attr_name() {
let b = parse_position_val(val, 1);
return Ok(PackFieldAttribute::BitPosition(b));
}
if name == PackFieldAttributeKind::BytePosition.get_attr_name() {
let b = parse_position_val(val, 8);
return Ok(PackFieldAttribute::BytePosition(b));
}
if name == PackFieldAttributeKind::SizeBytes.get_attr_name() {
let b = parse_num(val);
return Ok(PackFieldAttribute::SizeBits(b * 8));
}
if name == PackFieldAttributeKind::SizeBits.get_attr_name() {
let b = parse_num(val);
return Ok(PackFieldAttribute::SizeBits(b));
}
if name == PackFieldAttributeKind::ElementSizeBytes.get_attr_name() {
let b = parse_num(val);
return Ok(PackFieldAttribute::ElementSizeBits(b * 8));
}
if name == PackFieldAttributeKind::ElementSizeBits.get_attr_name() {
let b = parse_num(val);
return Ok(PackFieldAttribute::ElementSizeBits(b));
}
if name == PackFieldAttributeKind::Ty.get_attr_name() {
match val {
"enum" => { return Ok(PackFieldAttribute::Ty(TyKind::Enum)); },
_ => ()
}
}
Err(())
}
pub fn parse_all(attributes: &Vec<(String, String)>) -> Vec<Self> {
let mut r = vec![];
for &(ref name, ref val) in attributes {
if let Ok(attr) = Self::parse(name, val) {
r.push(attr)
}
}
r
}
}
/// Supported formats:
///
/// Single bit
/// 0
///
/// Open ended, start bit:
/// 0..
/// 0:
///
/// Inclusive range
/// 0:1
/// 0..=1
///
/// Exclusive range
/// 0..2
///
/// Returns: INCLUSIVE range
pub fn parse_position_val(v: &str, multiplier: usize) -> BitsPositionParsed {
let v = v.trim();
if v.ends_with("..") {
let v = v.replace("..", "");
let n = parse_num(&v);
return BitsPositionParsed::Start(n * multiplier);
} else if v.ends_with(":") {
let v = v.replace(":", "");
let n = parse_num(&v);
return BitsPositionParsed::Start(n * multiplier);
} else if v.contains(":") || v.contains("..=") {
// inclusive
let s: Vec<_> = {
if v.contains(":") {
v.split(":").collect()
} else {
v.split("..=").collect()
}
};
if s.len() == 2 {
let start = parse_num(s[0]);
let end = parse_num(s[1]);
if multiplier > 1 {
return BitsPositionParsed::range_in_order(start * multiplier, ((end+1) * multiplier)-1);
} else {
return BitsPositionParsed::range_in_order(start, end);
}
}
} else if v.contains("..") {
// exclusive
let s: Vec<_> = v.split("..").collect();
if s.len() == 2 {
let start = parse_num(s[0]);
let end = parse_num(s[1]);
if end == 0 {
panic!("Ending cannot be 0 for exclusive ranges.");
}
if multiplier > 1 {
return BitsPositionParsed::range_in_order(start * multiplier, ((end-1) * multiplier)-1);
} else {
return BitsPositionParsed::range_in_order(start, end - 1);
}
}
} else {
// single bit
let start = parse_num(v);
if multiplier > 1 {
return BitsPositionParsed::Range(start * multiplier, ((start+1) * multiplier)-1);
} else {
return BitsPositionParsed::Range(start, start);
}
}
panic!("Invalid bits position. Tried to parse: '{}'", v);
}
#[test]
fn test_parse_position_val() {
{
assert_eq!(BitsPositionParsed::Range(1, 1), parse_position_val("1", 1));
assert_eq!(BitsPositionParsed::Range(8, 15), parse_position_val("1", 8));
assert_eq!(BitsPositionParsed::Range(0, 7), parse_position_val("0", 8));
}
{
assert_eq!(BitsPositionParsed::Start(1), parse_position_val("1..", 1));
assert_eq!(BitsPositionParsed::Start(1), parse_position_val("1:", 1));
}
{
assert_eq!(BitsPositionParsed::Range(1, 2), parse_position_val("1:2", 1));
assert_eq!(BitsPositionParsed::Range(8, 23), parse_position_val("1:2", 8));
assert_eq!(BitsPositionParsed::Range(0, 15), parse_position_val("0:1", 8));
assert_eq!(BitsPositionParsed::Range(1, 2), parse_position_val("1..=2", 1));
}
{
assert_eq!(BitsPositionParsed::Range(1, 2), parse_position_val("1..3", 1));
assert_eq!(BitsPositionParsed::Range(8, 15), parse_position_val("1..3", 8));
}
{
assert_eq!(BitsPositionParsed::Range(0, 7), parse_position_val("0", 8));
assert_eq!(BitsPositionParsed::Range(8, 39), parse_position_val("1:4", 8));
assert_eq!(BitsPositionParsed::Start(40), parse_position_val("5..", 8));
}
}

Просмотреть файл

@ -1,274 +0,0 @@
extern crate quote;
extern crate syn;
use utils::*;
use common::collections_prefix;
pub fn derive(ast: &syn::DeriveInput, mut prim_type: Option<syn::Ty>) -> quote::Tokens {
let stdlib_prefix = collections_prefix();
let ref name = ast.ident;
let v = get_unitary_enum(ast);
//panic!("v: {:?}", v);
let from_primitive_match: Vec<_> = v.iter().map(|x| {
let d = x.discriminant;
let d = syn::Lit::Int(d, syn::IntTy::Unsuffixed);
let negative = if x.negative {
quote! { - }
} else {
quote! {}
};
let n = &x.variant.ident;
quote! {
#negative #d => Some(#name::#n)
}}).collect();
let to_display_str: Vec<_> = v.iter().map(|x| {
let n = &x.variant.ident;
let d = n.as_ref().to_string();
quote! {
#name::#n => (#d)
}}).collect();
let from_str: Vec<_> = v.iter().map(|x| {
let n = &x.variant.ident;
let d = n.as_ref().to_string();
quote! {
#d => Some(#name::#n)
}}).collect();
let from_str_lower: Vec<_> = v.iter().map(|x| {
let n = &x.variant.ident;
let d = n.as_ref().to_string().to_lowercase();
quote! {
#d => Some(#name::#n)
}}).collect();
let all_variants: Vec<_> = v.iter().map(|x| {
let n = &x.variant.ident;
quote! { #name::#n }
}).collect();
let all_variants_len = all_variants.len();
if prim_type.is_none() {
let min_ty: Vec<_> = v.iter().map(|d| {
if d.int_ty != syn::IntTy::Isize && d.int_ty != syn::IntTy::Usize && d.int_ty != syn::IntTy::Unsuffixed {
d.int_ty
} else {
if d.negative {
let n = d.discriminant as i64;
if n < <i32>::min_value() as i64 {
syn::IntTy::I64
} else {
let n = -n;
if n < <i16>::min_value() as i64 {
syn::IntTy::I32
} else if n < <i8>::min_value() as i64 {
syn::IntTy::I16
} else {
syn::IntTy::I8
}
}
} else {
let n = d.discriminant as u64;
if n > <u32>::max_value() as u64 {
syn::IntTy::U64
} else if n > <u16>::max_value() as u64 {
syn::IntTy::U32
} else if n > <u8>::max_value() as u64 {
syn::IntTy::U16
} else {
syn::IntTy::U8
}
}
}
}).collect();
// first mention, higher priority
let priority = [
syn::IntTy::I64,
syn::IntTy::I32,
syn::IntTy::I16,
syn::IntTy::I8,
syn::IntTy::U64,
syn::IntTy::U32,
syn::IntTy::U16,
syn::IntTy::U8
];
let mut ty = syn::IntTy::U8;
for t in min_ty {
if priority.iter().position(|&x| x == t).unwrap() < priority.iter().position(|&x| x == ty).unwrap() {
ty = t;
}
}
let ty_str = match ty {
syn::IntTy::I64 => "i64",
syn::IntTy::I32 => "i32",
syn::IntTy::I16 => "i16",
syn::IntTy::I8 => "i8",
syn::IntTy::U64 => "u64",
syn::IntTy::U32 => "u32",
syn::IntTy::U16 => "u16",
syn::IntTy::U8 => "u8",
_ => panic!("out of bounds ty!")
};
prim_type = Some(syn::parse_type(&ty_str).unwrap());
}
let prim_type = prim_type.expect("Unable to detect the primitive type for this enum.");
let all_variants_const_ident = syn::Ident::from(format!("{}_ALL", to_snake_case(name.as_ref()).to_uppercase() ));
let mut str_format = {
let to_display_str = to_display_str.clone();
let all_variants_const_ident = all_variants_const_ident.clone();
quote! {
impl ::packed_struct::PrimitiveEnumStaticStr for #name {
#[inline]
fn to_display_str(&self) -> &'static str {
match *self {
#(#to_display_str),*
}
}
#[inline]
fn all_variants() -> &'static [Self] {
#all_variants_const_ident
}
}
}
};
if ::common::alloc_supported() {
str_format.append(quote! {
impl ::packed_struct::PrimitiveEnumDynamicStr for #name {
#[inline]
fn to_display_str(&self) -> #stdlib_prefix::borrow::Cow<'static, str> {
let s = match *self {
#(#to_display_str),*
};
s.into()
}
#[inline]
fn all_variants() -> #stdlib_prefix::borrow::Cow<'static, [Self]> {
#stdlib_prefix::borrow::Cow::Borrowed(#all_variants_const_ident)
}
}
});
};
quote! {
const #all_variants_const_ident: &'static [#name; #all_variants_len] = &[ #(#all_variants),* ];
impl ::packed_struct::PrimitiveEnum for #name {
type Primitive = #prim_type;
#[inline]
fn from_primitive(val: #prim_type) -> Option<Self> {
match val {
#(#from_primitive_match),* ,
_ => None
}
}
#[inline]
fn to_primitive(&self) -> #prim_type {
*self as #prim_type
}
#[inline]
fn from_str(s: &str) -> Option<Self> {
match s {
#(#from_str),* ,
_ => None
}
}
#[inline]
fn from_str_lower(s: &str) -> Option<Self> {
match s {
#(#from_str_lower),* ,
_ => None
}
}
}
#str_format
}
}
#[derive(Debug)]
struct Variant {
variant: syn::Variant,
discriminant: u64,
negative: bool,
int_ty: syn::IntTy
}
fn get_unitary_enum(input: &syn::DeriveInput) -> Vec<Variant> {
match input.body {
syn::Body::Enum(ref variants) => {
let mut r = Vec::new();
let mut d = 0;
let mut neg = false;
for variant in variants {
if variant.data != syn::VariantData::Unit {
break;
}
let (discriminant, negative, int_ty) = match variant.discriminant {
Some(syn::ConstExpr::Lit(syn::Lit::Int(v, int_ty))) => { (v, false, int_ty) },
Some(syn::ConstExpr::Unary(syn::UnOp::Neg, ref v)) => {
match **v {
syn::ConstExpr::Lit(syn::Lit::Int(v, int_ty)) => {
(v, true, int_ty)
},
ref p @ _ => {
panic!("Unsupported negated enum const expr: {:?}", p);
}
}
}
Some(ref p @ _) => {
panic!("Unsupported enum const expr: {:?}", p);
},
None => {
if neg {
(d-1, if d-1 == 0 { false } else { true }, syn::IntTy::Unsuffixed)
} else {
(d+1, false, syn::IntTy::Unsuffixed)
}
}
};
r.push(Variant {
variant: variant.clone(),
discriminant: discriminant,
negative: negative,
int_ty: int_ty
});
d = discriminant;
neg = negative;
}
return r;
},
_ => ()
}
panic!("Enum's variants must be unitary.");
}

Просмотреть файл

@ -1,75 +0,0 @@
use std::ops::*;
pub struct NextBits;
pub trait BitsRange {
fn get_bits_range(&self, packed_bit_width: usize, prev_range: &Option<Range<usize>>) -> Range<usize>;
}
impl BitsRange for usize {
fn get_bits_range(&self, packed_bit_width: usize, _prev_range: &Option<Range<usize>>) -> Range<usize> {
*self..(*self + packed_bit_width as usize - 1)
}
}
impl BitsRange for Range<usize> {
fn get_bits_range(&self, _packed_bit_width: usize, _prev_range: &Option<Range<usize>>) -> Range<usize> {
self.start..self.end
}
}
impl BitsRange for NextBits {
fn get_bits_range(&self, packed_bit_width: usize, prev_range: &Option<Range<usize>>) -> Range<usize> {
if let &Some(ref prev_range) = prev_range {
(prev_range.end + 1)..((prev_range.end + 1) + (packed_bit_width as usize) - 1)
} else {
0..((packed_bit_width as usize) - 1)
}
}
}
pub fn ones_u8(n: u8) -> u8 {
match n {
0 => 0b00000000,
1 => 0b00000001,
2 => 0b00000011,
3 => 0b00000111,
4 => 0b00001111,
5 => 0b00011111,
6 => 0b00111111,
7 => 0b01111111,
_ => 0b11111111
}
}
// From rustc
pub fn to_snake_case(mut str: &str) -> String {
let mut words = vec![];
// Preserve leading underscores
str = str.trim_left_matches(|c: char| {
if c == '_' {
words.push(String::new());
true
} else {
false
}
});
for s in str.split('_') {
let mut last_upper = false;
let mut buf = String::new();
if s.is_empty() {
continue;
}
for ch in s.chars() {
if !buf.is_empty() && buf != "'" && ch.is_uppercase() && !last_upper {
words.push(buf);
buf = String::new();
}
last_upper = ch.is_uppercase();
buf.extend(ch.to_lowercase());
}
words.push(buf);
}
words.join("_")
}

Просмотреть файл

@ -1 +0,0 @@
{"files":{"Cargo.toml":"983b4a016a497eded8adc6a23963343693ab4520826a1bc916e6f8448c9bb309","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"3390fa1362c73052a268ae97a4777c5bcd8e991f866f9cce88ddde8d0963fd4d","src/ident.rs":"830077b64dce8c8ede1fb6ab664cae72f5496f4ab6be21a5b4e3b5e4e57ec425","src/lib.rs":"f799c898057a4e8e1620b32c70e13d3fee7af3a0352a5a4a1da6393942d21cc7","src/to_tokens.rs":"a871cb84506fa9e0783ac29617d686327ce5d05a9711a3a10833430b00345ccc","src/tokens.rs":"9a6010d7c0c6ccbe8b7bb1dae99722c91fc0559ea6ac6ea03ab2baafc2ec77ae","tests/test.rs":"8951d46b62d6922cc5fc130759be9723a862947738dda25cc84ff8cfdfebf729"},"package":"7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"}

10
third_party/rust/quote-0.3.15/Cargo.toml поставляемый
Просмотреть файл

@ -1,10 +0,0 @@
[package]
name = "quote"
version = "0.3.15" # don't forget to update version in readme for breaking changes
authors = ["David Tolnay <dtolnay@gmail.com>"]
license = "MIT/Apache-2.0"
description = "Quasi-quoting macro quote!(...)"
repository = "https://github.com/dtolnay/quote"
documentation = "https://docs.rs/quote/"
keywords = ["syn"]
include = ["Cargo.toml", "src/**/*.rs", "tests/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]

201
third_party/rust/quote-0.3.15/LICENSE-APACHE поставляемый
Просмотреть файл

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
third_party/rust/quote-0.3.15/LICENSE-MIT поставляемый
Просмотреть файл

@ -1,25 +0,0 @@
Copyright (c) 2016 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

104
third_party/rust/quote-0.3.15/README.md поставляемый
Просмотреть файл

@ -1,104 +0,0 @@
Rust Quasi-Quoting
==================
[![Build Status](https://api.travis-ci.org/dtolnay/quote.svg?branch=master)](https://travis-ci.org/dtolnay/quote)
[![Latest Version](https://img.shields.io/crates/v/quote.svg)](https://crates.io/crates/quote)
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/quote/)
Quasi-quoting without a Syntex dependency, intended for use with [Macros
1.1](https://github.com/rust-lang/rfcs/blob/master/text/1681-macros-1.1.md).
```toml
[dependencies]
quote = "0.3"
```
```rust
#[macro_use]
extern crate quote;
```
## What is quasi-quoting?
Quasi-quoting is a way of writing code and treating it as data, similar to
writing code inside of a double-quoted string literal except more friendly to
your text editor or IDE. It does not get in the way of syntax highlighting,
brace matching, indentation, or autocompletion, all of which you would lose by
writing code inside of double quotes.
Check out
[my meetup talk](https://air.mozilla.org/rust-meetup-december-2016-12-15/)
on the topic to learn more about the use case. Start the video at 3:00.
This crate is motivated by the Macros 1.1 use case, but is a general-purpose
Rust quasi-quoting library and is not specific to procedural macros.
## Syntax
The quote crate provides a `quote!` macro within which you can write Rust code
that gets packaged into a `quote::Tokens` and can be treated as data. You should
think of `quote::Tokens` as representing a fragment of Rust source code. Call
`to_string()` or `as_str()` on a Tokens to get back the fragment of source code
as a string.
Within the `quote!` macro, interpolation is done with `#var`. Any type
implementing the `quote::ToTokens` trait can be interpolated. This includes most
Rust primitive types as well as most of the syntax tree types from
[`syn`](https://github.com/dtolnay/syn).
```rust
let tokens = quote! {
struct SerializeWith #generics #where_clause {
value: &'a #field_ty,
phantom: ::std::marker::PhantomData<#item_ty>,
}
impl #generics serde::Serialize for SerializeWith #generics #where_clause {
fn serialize<S>(&self, s: &mut S) -> Result<(), S::Error>
where S: serde::Serializer
{
#path(self.value, s)
}
}
SerializeWith {
value: #value,
phantom: ::std::marker::PhantomData::<#item_ty>,
}
};
```
Repetition is done using `#(...)*` or `#(...),*` very similar to `macro_rules!`:
- `#(#var)*` - no separators
- `#(#var),*` - the character before the asterisk is used as a separator
- `#( struct #var; )*` - the repetition can contain other things
- `#( #k => println!("{}", #v), )*` - even multiple interpolations
Tokens can be interpolated into other quotes:
```rust
let t = quote! { /* ... */ };
return quote! { /* ... */ #t /* ... */ };
```
The `quote!` macro relies on deep recursion so some large invocations may fail
with "recursion limit reached" when you compile. If it fails, bump up the
recursion limit by adding `#![recursion_limit = "128"]` to your crate. An even
higher limit may be necessary for especially large invocations. You don't need
this unless the compiler tells you that you need it.
## License
Licensed under either of
* Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
at your option.
### Contribution
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
be dual licensed as above, without any additional terms or conditions.

57
third_party/rust/quote-0.3.15/src/ident.rs поставляемый
Просмотреть файл

@ -1,57 +0,0 @@
use {Tokens, ToTokens};
use std::borrow::Cow;
use std::fmt;
/// An identifier that should be interpolated without quotes.
#[derive(Debug, Clone, Eq, Hash)]
pub struct Ident(String);
impl Ident {
pub fn new<T: Into<Ident>>(t: T) -> Self {
t.into()
}
}
impl<'a> From<&'a str> for Ident {
fn from(s: &str) -> Self {
Ident(s.to_owned())
}
}
impl<'a> From<Cow<'a, str>> for Ident {
fn from(s: Cow<'a, str>) -> Self {
Ident(s.into_owned())
}
}
impl From<String> for Ident {
fn from(s: String) -> Self {
Ident(s)
}
}
impl AsRef<str> for Ident {
fn as_ref(&self) -> &str {
&self.0
}
}
impl fmt::Display for Ident {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
self.0.fmt(formatter)
}
}
impl<T: ?Sized> PartialEq<T> for Ident
where T: AsRef<str>
{
fn eq(&self, other: &T) -> bool {
self.0 == other.as_ref()
}
}
impl ToTokens for Ident {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(self.as_ref())
}
}

252
third_party/rust/quote-0.3.15/src/lib.rs поставляемый
Просмотреть файл

@ -1,252 +0,0 @@
//! Quasi-quoting without a Syntex dependency, intended for use with [Macros
//! 1.1](https://github.com/rust-lang/rfcs/blob/master/text/1681-macros-1.1.md).
//!
//! ```toml
//! [dependencies]
//! quote = "0.3"
//! ```
//!
//! ```rust,ignore
//! #[macro_use]
//! extern crate quote;
//! ```
//!
//! Interpolation is done with `#var`:
//!
//! ```text
//! let tokens = quote! {
//! struct SerializeWith #generics #where_clause {
//! value: &'a #field_ty,
//! phantom: ::std::marker::PhantomData<#item_ty>,
//! }
//!
//! impl #generics serde::Serialize for SerializeWith #generics #where_clause {
//! fn serialize<S>(&self, s: &mut S) -> Result<(), S::Error>
//! where S: serde::Serializer
//! {
//! #path(self.value, s)
//! }
//! }
//!
//! SerializeWith {
//! value: #value,
//! phantom: ::std::marker::PhantomData::<#item_ty>,
//! }
//! };
//! ```
//!
//! Repetition is done using `#(...)*` or `#(...),*` very similar to `macro_rules!`:
//!
//! - `#(#var)*` - no separators
//! - `#(#var),*` - the character before the asterisk is used as a separator
//! - `#( struct #var; )*` - the repetition can contain other things
//! - `#( #k => println!("{}", #v), )*` - even multiple interpolations
//!
//! The return type of `quote!` is `quote::Tokens`. Tokens can be interpolated into
//! other quotes:
//!
//! ```text
//! let t = quote! { /* ... */ };
//! return quote! { /* ... */ #t /* ... */ };
//! ```
//!
//! Call `to_string()` or `as_str()` on a Tokens to get a `String` or `&str` of Rust
//! code.
//!
//! The `quote!` macro relies on deep recursion so some large invocations may fail
//! with "recursion limit reached" when you compile. If it fails, bump up the
//! recursion limit by adding `#![recursion_limit = "128"]` to your crate. An even
//! higher limit may be necessary for especially large invocations.
mod tokens;
pub use tokens::Tokens;
mod to_tokens;
pub use to_tokens::{ToTokens, ByteStr, Hex};
mod ident;
pub use ident::Ident;
/// The whole point.
#[macro_export]
macro_rules! quote {
() => {
$crate::Tokens::new()
};
($($tt:tt)+) => {
{
let mut _s = $crate::Tokens::new();
quote_each_token!(_s $($tt)*);
_s
}
};
}
// Extract the names of all #metavariables and pass them to the $finish macro.
//
// in: pounded_var_names!(then () a #b c #( #d )* #e)
// out: then!(() b d e)
#[macro_export]
#[doc(hidden)]
macro_rules! pounded_var_names {
($finish:ident ($($found:ident)*) # ( $($inner:tt)* ) $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) # [ $($inner:tt)* ] $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) # { $($inner:tt)* } $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) # $first:ident $($rest:tt)*) => {
pounded_var_names!($finish ($($found)* $first) $($rest)*)
};
($finish:ident ($($found:ident)*) ( $($inner:tt)* ) $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) [ $($inner:tt)* ] $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) { $($inner:tt)* } $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) $ignore:tt $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($rest)*)
};
($finish:ident ($($found:ident)*)) => {
$finish!(() $($found)*)
};
}
// in: nested_tuples_pat!(() a b c d e)
// out: ((((a b) c) d) e)
//
// in: nested_tuples_pat!(() a)
// out: a
#[macro_export]
#[doc(hidden)]
macro_rules! nested_tuples_pat {
(()) => {
&()
};
(() $first:ident $($rest:ident)*) => {
nested_tuples_pat!(($first) $($rest)*)
};
(($pat:pat) $first:ident $($rest:ident)*) => {
nested_tuples_pat!((($pat, $first)) $($rest)*)
};
(($done:pat)) => {
$done
};
}
// in: multi_zip_expr!(() a b c d e)
// out: a.into_iter().zip(b).zip(c).zip(d).zip(e)
//
// in: multi_zip_iter!(() a)
// out: a
#[macro_export]
#[doc(hidden)]
macro_rules! multi_zip_expr {
(()) => {
&[]
};
(() $single:ident) => {
$single
};
(() $first:ident $($rest:ident)*) => {
multi_zip_expr!(($first.into_iter()) $($rest)*)
};
(($zips:expr) $first:ident $($rest:ident)*) => {
multi_zip_expr!(($zips.zip($first)) $($rest)*)
};
(($done:expr)) => {
$done
};
}
#[macro_export]
#[doc(hidden)]
macro_rules! quote_each_token {
($tokens:ident) => {};
($tokens:ident # ! $($rest:tt)*) => {
$tokens.append("#");
$tokens.append("!");
quote_each_token!($tokens $($rest)*);
};
($tokens:ident # ( $($inner:tt)* ) * $($rest:tt)*) => {
for pounded_var_names!(nested_tuples_pat () $($inner)*)
in pounded_var_names!(multi_zip_expr () $($inner)*) {
quote_each_token!($tokens $($inner)*);
}
quote_each_token!($tokens $($rest)*);
};
($tokens:ident # ( $($inner:tt)* ) $sep:tt * $($rest:tt)*) => {
for (_i, pounded_var_names!(nested_tuples_pat () $($inner)*))
in pounded_var_names!(multi_zip_expr () $($inner)*).into_iter().enumerate() {
if _i > 0 {
$tokens.append(stringify!($sep));
}
quote_each_token!($tokens $($inner)*);
}
quote_each_token!($tokens $($rest)*);
};
($tokens:ident # [ $($inner:tt)* ] $($rest:tt)*) => {
$tokens.append("#");
$tokens.append("[");
quote_each_token!($tokens $($inner)*);
$tokens.append("]");
quote_each_token!($tokens $($rest)*);
};
($tokens:ident # $first:ident $($rest:tt)*) => {
$crate::ToTokens::to_tokens(&$first, &mut $tokens);
quote_each_token!($tokens $($rest)*);
};
($tokens:ident ( $($first:tt)* ) $($rest:tt)*) => {
$tokens.append("(");
quote_each_token!($tokens $($first)*);
$tokens.append(")");
quote_each_token!($tokens $($rest)*);
};
($tokens:ident [ $($first:tt)* ] $($rest:tt)*) => {
$tokens.append("[");
quote_each_token!($tokens $($first)*);
$tokens.append("]");
quote_each_token!($tokens $($rest)*);
};
($tokens:ident { $($first:tt)* } $($rest:tt)*) => {
$tokens.append("{");
quote_each_token!($tokens $($first)*);
$tokens.append("}");
quote_each_token!($tokens $($rest)*);
};
($tokens:ident $first:tt $($rest:tt)*) => {
$tokens.append(stringify!($first));
quote_each_token!($tokens $($rest)*);
};
}

357
third_party/rust/quote-0.3.15/src/to_tokens.rs поставляемый
Просмотреть файл

@ -1,357 +0,0 @@
use super::Tokens;
use std::borrow::Cow;
/// Types that can be interpolated inside a `quote!(...)` invocation.
pub trait ToTokens {
/// Write `self` to the given `Tokens`.
///
/// Example implementation for a struct representing Rust paths like
/// `std::cmp::PartialEq`:
///
/// ```ignore
/// pub struct Path {
/// pub global: bool,
/// pub segments: Vec<PathSegment>,
/// }
///
/// impl ToTokens for Path {
/// fn to_tokens(&self, tokens: &mut Tokens) {
/// for (i, segment) in self.segments.iter().enumerate() {
/// if i > 0 || self.global {
/// tokens.append("::");
/// }
/// segment.to_tokens(tokens);
/// }
/// }
/// }
/// ```
fn to_tokens(&self, &mut Tokens);
}
impl<'a, T: ?Sized + ToTokens> ToTokens for &'a T {
fn to_tokens(&self, tokens: &mut Tokens) {
(**self).to_tokens(tokens);
}
}
impl<'a, T: ?Sized + ToOwned + ToTokens> ToTokens for Cow<'a, T> {
fn to_tokens(&self, tokens: &mut Tokens) {
(**self).to_tokens(tokens);
}
}
impl<T: ?Sized + ToTokens> ToTokens for Box<T> {
fn to_tokens(&self, tokens: &mut Tokens) {
(**self).to_tokens(tokens);
}
}
impl<T: ToTokens> ToTokens for Option<T> {
fn to_tokens(&self, tokens: &mut Tokens) {
if let Some(ref t) = *self {
t.to_tokens(tokens);
}
}
}
impl ToTokens for str {
fn to_tokens(&self, tokens: &mut Tokens) {
let mut escaped = "\"".to_string();
for ch in self.chars() {
match ch {
'\0' => escaped.push_str(r"\0"),
'\'' => escaped.push_str("'"),
_ => escaped.extend(ch.escape_default().map(|c| c as char)),
}
}
escaped.push('"');
tokens.append(&escaped);
}
}
impl ToTokens for String {
fn to_tokens(&self, tokens: &mut Tokens) {
self.as_str().to_tokens(tokens);
}
}
impl ToTokens for char {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
'\0' => tokens.append(r"'\0'"),
'"' => tokens.append("'\"'"),
_ => tokens.append(&format!("{:?}", self)),
}
}
}
/// Wrap a `&str` so it interpolates as a byte-string: `b"abc"`.
#[derive(Debug)]
pub struct ByteStr<'a>(pub &'a str);
impl<'a> ToTokens for ByteStr<'a> {
fn to_tokens(&self, tokens: &mut Tokens) {
let mut escaped = "b\"".to_string();
for b in self.0.bytes() {
match b {
b'\0' => escaped.push_str(r"\0"),
b'\t' => escaped.push_str(r"\t"),
b'\n' => escaped.push_str(r"\n"),
b'\r' => escaped.push_str(r"\r"),
b'"' => escaped.push_str("\\\""),
b'\\' => escaped.push_str("\\\\"),
b'\x20' ... b'\x7E' => escaped.push(b as char),
_ => escaped.push_str(&format!("\\x{:02X}", b)),
}
}
escaped.push('"');
tokens.append(&escaped);
}
}
macro_rules! impl_to_tokens_display {
($ty:ty) => {
impl ToTokens for $ty {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(&self.to_string());
}
}
};
}
impl_to_tokens_display!(Tokens);
impl_to_tokens_display!(bool);
/// Wrap an integer so it interpolates as a hexadecimal.
#[derive(Debug)]
pub struct Hex<T>(pub T);
macro_rules! impl_to_tokens_integer {
($ty:ty) => {
impl ToTokens for $ty {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(&format!(concat!("{}", stringify!($ty)), self));
}
}
impl ToTokens for Hex<$ty> {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(&format!(concat!("0x{:X}", stringify!($ty)), self.0));
}
}
};
}
impl_to_tokens_integer!(i8);
impl_to_tokens_integer!(i16);
impl_to_tokens_integer!(i32);
impl_to_tokens_integer!(i64);
impl_to_tokens_integer!(isize);
impl_to_tokens_integer!(u8);
impl_to_tokens_integer!(u16);
impl_to_tokens_integer!(u32);
impl_to_tokens_integer!(u64);
impl_to_tokens_integer!(usize);
macro_rules! impl_to_tokens_floating {
($ty:ty) => {
impl ToTokens for $ty {
fn to_tokens(&self, tokens: &mut Tokens) {
use std::num::FpCategory::*;
match self.classify() {
Zero | Subnormal | Normal => {
tokens.append(&format!(concat!("{}", stringify!($ty)), self));
}
Nan => {
tokens.append("::");
tokens.append("std");
tokens.append("::");
tokens.append(stringify!($ty));
tokens.append("::");
tokens.append("NAN");
}
Infinite => {
tokens.append("::");
tokens.append("std");
tokens.append("::");
tokens.append(stringify!($ty));
tokens.append("::");
if self.is_sign_positive() {
tokens.append("INFINITY");
} else {
tokens.append("NEG_INFINITY");
}
}
}
}
}
};
}
impl_to_tokens_floating!(f32);
impl_to_tokens_floating!(f64);
impl<T: ToTokens> ToTokens for [T] {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append("[");
for item in self {
item.to_tokens(tokens);
tokens.append(",");
}
tokens.append("]");
}
}
impl<T: ToTokens> ToTokens for Vec<T> {
fn to_tokens(&self, tokens: &mut Tokens) {
self[..].to_tokens(tokens)
}
}
macro_rules! array_impls {
($($N:expr)+) => {
$(
impl<T: ToTokens> ToTokens for [T; $N] {
fn to_tokens(&self, tokens: &mut Tokens) {
self[..].to_tokens(tokens)
}
}
)+
}
}
array_impls! {
0 1 2 3 4 5 6 7 8 9
10 11 12 13 14 15 16 17 18 19
20 21 22 23 24 25 26 27 28 29
30 31 32
}
macro_rules! tuple_impls {
($(
$Tuple:ident {
$(($idx:tt) -> $T:ident)*
}
)+) => {
$(
impl<$($T: ToTokens),*> ToTokens for ($($T,)*) {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append("(");
$(
self.$idx.to_tokens(tokens);
tokens.append(",");
)*
tokens.append(")");
}
}
)+
}
}
tuple_impls! {
Tuple0 {}
Tuple1 {
(0) -> A
}
Tuple2 {
(0) -> A
(1) -> B
}
Tuple3 {
(0) -> A
(1) -> B
(2) -> C
}
Tuple4 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
}
Tuple5 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
}
Tuple6 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
}
Tuple7 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
}
Tuple8 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
}
Tuple9 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
}
Tuple10 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
}
Tuple11 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
}
Tuple12 {
(0) -> A
(1) -> B
(2) -> C
(3) -> D
(4) -> E
(5) -> F
(6) -> G
(7) -> H
(8) -> I
(9) -> J
(10) -> K
(11) -> L
}
}

156
third_party/rust/quote-0.3.15/src/tokens.rs поставляемый
Просмотреть файл

@ -1,156 +0,0 @@
use super::ToTokens;
use std::fmt::{self, Display};
use std::str::FromStr;
/// Tokens produced by a `quote!(...)` invocation.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Tokens(String);
impl Tokens {
/// Empty tokens.
pub fn new() -> Self {
Tokens(String::new())
}
/// For use by `ToTokens` implementations.
///
/// ```
/// # #[macro_use] extern crate quote;
/// # use quote::{Tokens, ToTokens};
/// # fn main() {
/// struct X;
///
/// impl ToTokens for X {
/// fn to_tokens(&self, tokens: &mut Tokens) {
/// tokens.append("a");
/// tokens.append("b");
/// tokens.append("c");
/// }
/// }
///
/// let tokens = quote!(#X);
/// assert_eq!(tokens.as_str(), "a b c");
/// # }
/// ```
pub fn append<T: AsRef<str>>(&mut self, token: T) {
if !self.0.is_empty() && !token.as_ref().is_empty() {
self.0.push(' ');
}
self.0.push_str(token.as_ref());
}
/// For use by `ToTokens` implementations.
///
/// ```
/// # #[macro_use] extern crate quote;
/// # use quote::{Tokens, ToTokens};
/// # fn main() {
/// struct X;
///
/// impl ToTokens for X {
/// fn to_tokens(&self, tokens: &mut Tokens) {
/// tokens.append_all(&[true, false]);
/// }
/// }
///
/// let tokens = quote!(#X);
/// assert_eq!(tokens.as_str(), "true false");
/// # }
/// ```
pub fn append_all<T, I>(&mut self, iter: I)
where T: ToTokens,
I: IntoIterator<Item = T>
{
for token in iter {
token.to_tokens(self);
}
}
/// For use by `ToTokens` implementations.
///
/// ```
/// # #[macro_use] extern crate quote;
/// # use quote::{Tokens, ToTokens};
/// # fn main() {
/// struct X;
///
/// impl ToTokens for X {
/// fn to_tokens(&self, tokens: &mut Tokens) {
/// tokens.append_separated(&[true, false], ",");
/// }
/// }
///
/// let tokens = quote!(#X);
/// assert_eq!(tokens.as_str(), "true , false");
/// # }
/// ```
pub fn append_separated<T, I, S: AsRef<str>>(&mut self, iter: I, sep: S)
where T: ToTokens,
I: IntoIterator<Item = T>
{
for (i, token) in iter.into_iter().enumerate() {
if i > 0 {
self.append(sep.as_ref());
}
token.to_tokens(self);
}
}
/// For use by `ToTokens` implementations.
///
/// ```
/// # #[macro_use] extern crate quote;
/// # use quote::{Tokens, ToTokens};
/// # fn main() {
/// struct X;
///
/// impl ToTokens for X {
/// fn to_tokens(&self, tokens: &mut Tokens) {
/// tokens.append_terminated(&[true, false], ",");
/// }
/// }
///
/// let tokens = quote!(#X);
/// assert_eq!(tokens.as_str(), "true , false ,");
/// # }
/// ```
pub fn append_terminated<T, I, S: AsRef<str>>(&mut self, iter: I, term: S)
where T: ToTokens,
I: IntoIterator<Item = T>
{
for token in iter {
token.to_tokens(self);
self.append(term.as_ref());
}
}
pub fn as_str(&self) -> &str {
&self.0
}
pub fn into_string(self) -> String {
self.0
}
pub fn parse<T: FromStr>(&self) -> Result<T, T::Err> {
FromStr::from_str(&self.0)
}
}
impl Default for Tokens {
fn default() -> Self {
Tokens::new()
}
}
impl Display for Tokens {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
self.0.fmt(formatter)
}
}
impl AsRef<str> for Tokens {
fn as_ref(&self) -> &str {
&self.0
}
}

360
third_party/rust/quote-0.3.15/tests/test.rs поставляемый
Просмотреть файл

@ -1,360 +0,0 @@
use std::{f32, f64};
use std::borrow::Cow;
#[macro_use]
extern crate quote;
struct X;
impl quote::ToTokens for X {
fn to_tokens(&self, tokens: &mut quote::Tokens) {
tokens.append("X");
}
}
#[test]
fn test_quote_impl() {
let tokens = quote!(
impl<'a, T: ToTokens> ToTokens for &'a T {
fn to_tokens(&self, tokens: &mut Tokens) {
(**self).to_tokens(tokens)
}
}
);
let expected = concat!(
"impl < 'a , T : ToTokens > ToTokens for & 'a T { ",
"fn to_tokens ( & self , tokens : & mut Tokens ) { ",
"( * * self ) . to_tokens ( tokens ) ",
"} ",
"}"
);
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_append_tokens() {
let mut tokens = quote!(let x =);
tokens.append(quote!("Hello World!";));
let expected = "let x = \"Hello World!\" ;";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_substitution() {
let x = X;
let tokens = quote!(#x <#x> (#x) [#x] {#x});
let expected = "X < X > ( X ) [ X ] { X }";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_iter() {
let primes = &[X, X, X, X];
assert_eq!("X X X X", quote!(#(#primes)*).as_str());
assert_eq!("X , X , X , X ,", quote!(#(#primes,)*).as_str());
assert_eq!("X , X , X , X", quote!(#(#primes),*).as_str());
}
#[test]
fn test_advanced() {
let generics = quote!( <'a, T> );
let where_clause = quote!( where T: Serialize );
let field_ty = quote!( String );
let item_ty = quote!( Cow<'a, str> );
let path = quote!( SomeTrait::serialize_with );
let value = quote!( self.x );
let tokens = quote! {
struct SerializeWith #generics #where_clause {
value: &'a #field_ty,
phantom: ::std::marker::PhantomData<#item_ty>,
}
impl #generics ::serde::Serialize for SerializeWith #generics #where_clause {
fn serialize<S>(&self, s: &mut S) -> Result<(), S::Error>
where S: ::serde::Serializer
{
#path(self.value, s)
}
}
SerializeWith {
value: #value,
phantom: ::std::marker::PhantomData::<#item_ty>,
}
};
let expected = concat!(
"struct SerializeWith < 'a , T > where T : Serialize { ",
"value : & 'a String , ",
"phantom : :: std :: marker :: PhantomData < Cow < 'a , str > > , ",
"} ",
"impl < 'a , T > :: serde :: Serialize for SerializeWith < 'a , T > where T : Serialize { ",
"fn serialize < S > ( & self , s : & mut S ) -> Result < ( ) , S :: Error > ",
"where S : :: serde :: Serializer ",
"{ ",
"SomeTrait :: serialize_with ( self . value , s ) ",
"} ",
"} ",
"SerializeWith { ",
"value : self . x , ",
"phantom : :: std :: marker :: PhantomData :: < Cow < 'a , str > > , ",
"}"
);
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_unit() {
let x = ();
let tokens = quote!(#x);
let expected = "( )";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_tuple() {
let x = ("foo", 4_u32);
let tokens = quote!(#x);
let expected = "( \"foo\" , 4u32 , )";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_array() {
let x: [u32; 3] = [1, 2, 3];
let tokens = quote!(#x);
let expected = "[ 1u32 , 2u32 , 3u32 , ]";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_slice() {
let x: &[u32] = &[1, 2, 3];
let tokens = quote!(&#x); // Note: explicit `&`
let expected = "& [ 1u32 , 2u32 , 3u32 , ]";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_vec() {
let x: Vec<u32> = vec![1, 2, 3];
let tokens = quote!(vec!#x); // Note: explicit `vec!`
let expected = "vec ! [ 1u32 , 2u32 , 3u32 , ]";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_integer() {
let ii8 = -1i8;
let ii16 = -1i16;
let ii32 = -1i32;
let ii64 = -1i64;
let iisize = -1isize;
let uu8 = 1u8;
let uu16 = 1u16;
let uu32 = 1u32;
let uu64 = 1u64;
let uusize = 1usize;
let tokens = quote! {
#ii8 #ii16 #ii32 #ii64 #iisize
#uu8 #uu16 #uu32 #uu64 #uusize
};
let expected = "-1i8 -1i16 -1i32 -1i64 -1isize 1u8 1u16 1u32 1u64 1usize";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_hex() {
let hex = quote::Hex(0xFFFF_0000_u32);
let tokens = quote!(#hex);
let expected = "0xFFFF0000u32";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_floating() {
let e32 = 2.71828f32;
let nan32 = f32::NAN;
let inf32 = f32::INFINITY;
let neginf32 = f32::NEG_INFINITY;
let e64 = 2.71828f64;
let nan64 = f64::NAN;
let inf64 = f64::INFINITY;
let neginf64 = f64::NEG_INFINITY;
let tokens = quote! {
#e32 @ #nan32 @ #inf32 @ #neginf32
#e64 @ #nan64 @ #inf64 @ #neginf64
};
let expected = concat!(
"2.71828f32 @ :: std :: f32 :: NAN @ :: std :: f32 :: INFINITY @ :: std :: f32 :: NEG_INFINITY ",
"2.71828f64 @ :: std :: f64 :: NAN @ :: std :: f64 :: INFINITY @ :: std :: f64 :: NEG_INFINITY",
);
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_char() {
let zero = '\0';
let pound = '#';
let quote = '"';
let apost = '\'';
let newline = '\n';
let heart = '\u{2764}';
let tokens = quote! {
#zero #pound #quote #apost #newline #heart
};
let expected = "'\\0' '#' '\"' '\\'' '\\n' '\u{2764}'";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_str() {
let s = "\0 a 'b \" c";
let tokens = quote!(#s);
let expected = "\"\\0 a 'b \\\" c\"";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_string() {
let s = "\0 a 'b \" c".to_string();
let tokens = quote!(#s);
let expected = "\"\\0 a 'b \\\" c\"";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_byte_str() {
let s = quote::ByteStr("\0 a 'b \" c");
let tokens = quote!(#s);
let expected = "b\"\\0 a 'b \\\" c\"";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_byte_str_escape() {
let s = quote::ByteStr("\u{3c3} \\ \" \n");
let tokens = quote!(#s);
let expected = "b\"\\xCF\\x83 \\\\ \\\" \\n\"";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_ident() {
let foo = quote::Ident::from("Foo");
let bar = quote::Ident::from(format!("Bar{}", 7));
let tokens = quote!(struct #foo; enum #bar {});
let expected = "struct Foo ; enum Bar7 { }";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_duplicate() {
let ch = 'x';
let tokens = quote!(#ch #ch);
let expected = "'x' 'x'";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_fancy_repetition() {
let foo = vec!["a", "b"];
let bar = vec![true, false];
let tokens = quote! {
#(#foo: #bar),*
};
let expected = r#""a" : true , "b" : false"#;
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_nested_fancy_repetition() {
let nested = vec![vec!['a', 'b', 'c'], vec!['x', 'y', 'z']];
let tokens = quote! {
#(
#(#nested)*
),*
};
let expected = "'a' 'b' 'c' , 'x' 'y' 'z'";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_empty_repetition() {
let tokens = quote!(#(a b)* #(c d),*);
assert_eq!("", tokens.as_str());
}
#[test]
fn test_variable_name_conflict() {
// The implementation of `#(...),*` uses the variable `_i` but it should be
// fine, if a little confusing when debugging.
let _i = vec!['a', 'b'];
let tokens = quote! { #(#_i),* };
let expected = "'a' , 'b'";
assert_eq!(expected, tokens.as_str());
}
#[test]
fn test_empty_quote() {
let tokens = quote!();
assert_eq!("", tokens.as_str());
}
#[test]
fn test_box_str() {
let b = "str".to_owned().into_boxed_str();
let tokens = quote! { #b };
assert_eq!("\"str\"", tokens.as_str());
}
#[test]
fn test_cow() {
let owned: Cow<quote::Ident> = Cow::Owned(quote::Ident::from("owned"));
let ident = quote::Ident::from("borrowed");
let borrowed = Cow::Borrowed(&ident);
let tokens = quote! { #owned #borrowed };
assert_eq!("owned borrowed", tokens.as_str());
}
#[test]
fn test_closure() {
fn field_i(i: usize) -> quote::Ident {
quote::Ident::new(format!("__field{}", i))
}
let fields = (0usize..3)
.map(field_i as fn(_) -> _)
.map(|var| quote! { #var });
let tokens = quote! { #(#fields)* };
assert_eq!("__field0 __field1 __field2", tokens.as_str());
}

Просмотреть файл

@ -1 +0,0 @@
{"files":{"Cargo.toml":"e1c76f5a888ab4a9047a9079a2c69a666170ef5bbdbd540720cbfe4b6c2a5b78","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"aa140842ad00ec4f0601fefdeef5046bfeae3834d58c9ba6d9721d65885fc016","src/aster/generics.rs":"030a8e7f4de881ef60c171fe48bcb65aec8d58f3558f755a3b9b953b2c9f1819","src/aster/ident.rs":"e9d082664f008a56bd854011310b4258ab072740ba82e57495b6e8a868a5f36b","src/aster/invoke.rs":"2b1b993973ab4f5c8fa6d6a286576b2542edce21fe9904f5133c470c072e6d3f","src/aster/lifetime.rs":"304101622e102586946802ae17a0a76d53a7f3a3e72e520d0e2ac3c8664db3ef","src/aster/mod.rs":"12220f73b0021e72b4c50f6a513cff174b9c7267209aa23f183043d96ccc9ab7","src/aster/path.rs":"7298bcbd522e10a48ec9d54a1959eec4a0b6368fda9ef10d6e19fc488507d5bb","src/aster/qpath.rs":"5ba33af56ccf74f5c516ed542d117d1f6ca9f7dfd1a74d08b4ac50d95666c497","src/aster/ty.rs":"07d783269047f3be20e73ccc962bb4c4cd63c869d73de8bae7bef25b53986d09","src/aster/ty_param.rs":"4f17c12e0b7cb306cfdfaea648eaccee5116923b2abb4d35d085d88f70c40385","src/aster/where_predicate.rs":"5fb8ec3fcb67bcc1d9bb7b64cf2f5beb601aac6502d6db30c0cdf8641fa248d1","src/attr.rs":"2c0c14c45f39af22ea10e0d15c24ef349b23408b6c4e24b6e91c48d38a5e5ca2","src/constant.rs":"b68686cdf371d76d7ac548184d52e46fa1312e84b02a5b504fedbbc54a3b26ff","src/data.rs":"1d6c3c29b1d94a01fb6ec41b4144c22a8ebd7a7fe9074d87fbe2fd1776f2f38b","src/derive.rs":"5d474fa52c19c4d46ff79be39038254887ca01f1786c0032b54e0b5ad8697b03","src/escape.rs":"7263b3df626ad26e5b82b329557584f7cdd61589977ce82c9e794e1b61f042b2","src/expr.rs":"77e22fbf2d1003366296a05d42806a69fdaaa73b4a02e6a99438d8fc886d06b6","src/fold.rs":"879928ea8de2b228f9073658ffa100c689ec85edabfa4f876f9aee3b13057522","src/generics.rs":"02ddd46f39d771d7f229d69f763278e75ee50a5af2c7d2746080e959639726f7","src/ident.rs":"9eb6354d2b58e14191e44592c122501232539b53480389ab9e35d426c3962123","src/item.rs":"c91ec1b423877590acd3fa01b094f452ef6b177db6c177056f33caf61f3fe92d","src/krate.rs":"78f89e1f12f5b790d99d88a3a013178585f6715a27eb26f604e72e763a47dfdf","src/lib.rs":"2931fc34ec99b9ce1776debaca8114eb3531c0851ca584239c03637c90b1cf7d","src/lit.rs":"f8cdfd540f038f699cb546fc3cfc43ec6f72551aa12ca351ea0beb9c8100fa4c","src/mac.rs":"b3ba8e7531980abecec4a9f86f68ae136c5982617e0e37aaa823d288ba6f5e4e","src/op.rs":"232f84ba605ed50e70ee02169dd551548872135cf56f155637917ec3bf810ce1","src/ty.rs":"d71d75de0c0a6d27bc1d425a4ce282e42f7d6126e34ecaa7798353dffb231229","src/visit.rs":"a0c4c7d9768bd5b8fab5441932fc4075e7dc827b73144e5972a04fc7c2e676ff"},"package":"d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad"}

30
third_party/rust/syn-0.11.11/Cargo.toml поставляемый
Просмотреть файл

@ -1,30 +0,0 @@
[package]
name = "syn"
version = "0.11.11" # don't forget to update version in readme for breaking changes
authors = ["David Tolnay <dtolnay@gmail.com>"]
license = "MIT/Apache-2.0"
description = "Nom parser for Rust source code"
repository = "https://github.com/dtolnay/syn"
documentation = "https://dtolnay.github.io/syn/syn/"
categories = ["development-tools::procedural-macro-helpers"]
include = ["Cargo.toml", "src/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
[features]
default = ["parsing", "printing"]
aster = []
full = []
parsing = ["unicode-xid", "synom"]
printing = ["quote"]
visit = []
fold = []
[dependencies]
quote = { version = "0.3.7", optional = true }
unicode-xid = { version = "0.0.4", optional = true }
synom = { version = "0.11", path = "synom", optional = true }
[dev-dependencies]
syntex_pos = "0.58"
syntex_syntax = "0.58"
tempdir = "0.3.5"
walkdir = "1.0.1"

201
third_party/rust/syn-0.11.11/LICENSE-APACHE поставляемый
Просмотреть файл

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
third_party/rust/syn-0.11.11/LICENSE-MIT поставляемый
Просмотреть файл

@ -1,25 +0,0 @@
Copyright (c) 2016 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

205
third_party/rust/syn-0.11.11/README.md поставляемый
Просмотреть файл

@ -1,205 +0,0 @@
Nom parser for Rust source code
===============================
[![Build Status](https://api.travis-ci.org/dtolnay/syn.svg?branch=master)](https://travis-ci.org/dtolnay/syn)
[![Latest Version](https://img.shields.io/crates/v/syn.svg)](https://crates.io/crates/syn)
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://dtolnay.github.io/syn/syn/)
Parse Rust source code without a Syntex dependency, intended for use with
[Macros 1.1](https://github.com/rust-lang/rfcs/blob/master/text/1681-macros-1.1.md).
Designed for fast compile time.
- Compile time for `syn` (from scratch including all dependencies): **6 seconds**
- Compile time for the `syntex`/`quasi`/`aster` stack: **60+ seconds**
If you get stuck with Macros 1.1 I am happy to provide help even if the issue is
not related to syn. Please file a ticket in this repo.
## Usage with Macros 1.1
```toml
[dependencies]
syn = "0.11"
quote = "0.3"
[lib]
proc-macro = true
```
```rust
extern crate proc_macro;
use proc_macro::TokenStream;
extern crate syn;
#[macro_use]
extern crate quote;
#[proc_macro_derive(MyMacro)]
pub fn my_macro(input: TokenStream) -> TokenStream {
let source = input.to_string();
// Parse the string representation into a syntax tree
let ast = syn::parse_derive_input(&source).unwrap();
// Build the output, possibly using quasi-quotation
let expanded = quote! {
// ...
};
// Parse back to a token stream and return it
expanded.parse().unwrap()
}
```
## Complete example
Suppose we have the following simple trait which returns the number of fields in
a struct:
```rust
trait NumFields {
fn num_fields() -> usize;
}
```
A complete Macros 1.1 implementation of `#[derive(NumFields)]` based on `syn`
and [`quote`](https://github.com/dtolnay/quote) looks like this:
```rust
extern crate proc_macro;
use proc_macro::TokenStream;
extern crate syn;
#[macro_use]
extern crate quote;
#[proc_macro_derive(NumFields)]
pub fn num_fields(input: TokenStream) -> TokenStream {
let source = input.to_string();
// Parse the string representation into a syntax tree
let ast = syn::parse_derive_input(&source).unwrap();
// Build the output
let expanded = expand_num_fields(&ast);
// Return the generated impl as a TokenStream
expanded.parse().unwrap()
}
fn expand_num_fields(ast: &syn::DeriveInput) -> quote::Tokens {
let n = match ast.body {
syn::Body::Struct(ref data) => data.fields().len(),
syn::Body::Enum(_) => panic!("#[derive(NumFields)] can only be used with structs"),
};
// Used in the quasi-quotation below as `#name`
let name = &ast.ident;
// Helper is provided for handling complex generic types correctly and effortlessly
let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();
quote! {
// The generated impl
impl #impl_generics ::mycrate::NumFields for #name #ty_generics #where_clause {
fn num_fields() -> usize {
#n
}
}
}
}
```
For a more elaborate example that involves trait bounds, enums, and different
kinds of structs, check out [`DeepClone`] and [`deep-clone-derive`].
[`DeepClone`]: https://github.com/asajeffrey/deep-clone
[`deep-clone-derive`]: https://github.com/asajeffrey/deep-clone/blob/master/deep-clone-derive/lib.rs
## Testing
Macros 1.1 has a restriction that your proc-macro crate must export nothing but
`proc_macro_derive` functions, and also `proc_macro_derive` procedural macros
cannot be used from the same crate in which they are defined. These restrictions
may be lifted in the future but for now they make writing tests a bit trickier
than for other types of code.
In particular, you will not be able to write test functions like `#[test] fn
it_works() { ... }` in line with your code. Instead, either put tests in a
[`tests` directory](https://doc.rust-lang.org/book/testing.html#the-tests-directory)
or in a separate crate entirely.
Additionally, if your procedural macro implements a particular trait, that trait
must be defined in a separate crate from the procedural macro.
As a concrete example, suppose your procedural macro crate is called `my_derive`
and it implements a trait called `my_crate::MyTrait`. Your unit tests for the
procedural macro can go in `my_derive/tests/test.rs` or into a separate crate
`my_tests/tests/test.rs`. Either way the test would look something like this:
```rust
#[macro_use]
extern crate my_derive;
extern crate my_crate;
use my_crate::MyTrait;
#[test]
fn it_works() {
#[derive(MyTrait)]
struct S { /* ... */ }
/* test the thing */
}
```
## Debugging
When developing a procedural macro it can be helpful to look at what the
generated code looks like. Use `cargo rustc -- -Zunstable-options
--pretty=expanded` or the
[`cargo expand`](https://github.com/dtolnay/cargo-expand) subcommand.
To show the expanded code for some crate that uses your procedural macro, run
`cargo expand` from that crate. To show the expanded code for one of your own
test cases, run `cargo expand --test the_test_case` where the last argument is
the name of the test file without the `.rs` extension.
This write-up by Brandon W Maister discusses debugging in more detail:
[Debugging Rust's new Custom Derive
system](https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/).
## Optional features
Syn puts a lot of functionality behind optional features in order to optimize
compile time for the most common use cases. These are the available features and
their effect on compile time. Dependencies are included in the compile times.
Features | Compile time | Functionality
--- | --- | ---
*(none)* | 3 sec | The data structures representing the AST of Rust structs, enums, and types.
parsing | 6 sec | Parsing Rust source code containing structs and enums into an AST.
printing | 4 sec | Printing an AST of structs and enums as Rust source code.
**parsing, printing** | **6 sec** | **This is the default.** Parsing and printing of Rust structs and enums. This is typically what you want for implementing Macros 1.1 custom derives.
full | 4 sec | The data structures representing the full AST of all possible Rust code.
full, parsing | 9 sec | Parsing any valid Rust source code to an AST.
full, printing | 6 sec | Turning an AST into Rust source code.
full, parsing, printing | 11 sec | Parsing and printing any Rust syntax.
## License
Licensed under either of
* Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
at your option.
### Contribution
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
be dual licensed as above, without any additional terms or conditions.

Просмотреть файл

@ -1,231 +0,0 @@
use {Generics, Ident, LifetimeDef, TyParam, WhereClause, WherePredicate};
use aster::invoke::{Identity, Invoke};
use aster::lifetime::{IntoLifetime, LifetimeDefBuilder, IntoLifetimeDef};
use aster::path::IntoPath;
use aster::ty_param::TyParamBuilder;
use aster::where_predicate::WherePredicateBuilder;
pub struct GenericsBuilder<F = Identity> {
callback: F,
lifetimes: Vec<LifetimeDef>,
ty_params: Vec<TyParam>,
predicates: Vec<WherePredicate>,
}
impl GenericsBuilder {
pub fn new() -> Self {
GenericsBuilder::with_callback(Identity)
}
pub fn from_generics(generics: Generics) -> Self {
GenericsBuilder::from_generics_with_callback(generics, Identity)
}
}
impl<F> GenericsBuilder<F>
where F: Invoke<Generics>
{
pub fn with_callback(callback: F) -> Self {
GenericsBuilder {
callback: callback,
lifetimes: Vec::new(),
ty_params: Vec::new(),
predicates: Vec::new(),
}
}
pub fn from_generics_with_callback(generics: Generics, callback: F) -> Self {
GenericsBuilder {
callback: callback,
lifetimes: generics.lifetimes,
ty_params: generics.ty_params,
predicates: generics.where_clause.predicates,
}
}
pub fn with(self, generics: Generics) -> Self {
self.with_lifetimes(generics.lifetimes.into_iter())
.with_ty_params(generics.ty_params.into_iter())
.with_predicates(generics.where_clause.predicates.into_iter())
}
pub fn with_lifetimes<I, L>(mut self, iter: I) -> Self
where I: IntoIterator<Item = L>,
L: IntoLifetimeDef
{
let iter = iter.into_iter().map(|lifetime_def| lifetime_def.into_lifetime_def());
self.lifetimes.extend(iter);
self
}
pub fn with_lifetime_names<I, N>(mut self, iter: I) -> Self
where I: IntoIterator<Item = N>,
N: Into<Ident>
{
for name in iter {
self = self.lifetime_name(name);
}
self
}
pub fn with_lifetime(mut self, lifetime: LifetimeDef) -> Self {
self.lifetimes.push(lifetime);
self
}
pub fn lifetime_name<N>(self, name: N) -> Self
where N: Into<Ident>
{
self.lifetime(name).build()
}
pub fn lifetime<N>(self, name: N) -> LifetimeDefBuilder<Self>
where N: Into<Ident>
{
LifetimeDefBuilder::with_callback(name, self)
}
pub fn with_ty_params<I>(mut self, iter: I) -> Self
where I: IntoIterator<Item = TyParam>
{
self.ty_params.extend(iter);
self
}
pub fn with_ty_param_ids<I, T>(mut self, iter: I) -> Self
where I: IntoIterator<Item = T>,
T: Into<Ident>
{
for id in iter {
self = self.ty_param_id(id);
}
self
}
pub fn with_ty_param(mut self, ty_param: TyParam) -> Self {
self.ty_params.push(ty_param);
self
}
pub fn ty_param_id<I>(self, id: I) -> Self
where I: Into<Ident>
{
self.ty_param(id).build()
}
pub fn ty_param<I>(self, id: I) -> TyParamBuilder<Self>
where I: Into<Ident>
{
TyParamBuilder::with_callback(id, self)
}
pub fn with_predicates<I>(mut self, iter: I) -> Self
where I: IntoIterator<Item = WherePredicate>
{
self.predicates.extend(iter);
self
}
pub fn with_predicate(mut self, predicate: WherePredicate) -> Self {
self.predicates.push(predicate);
self
}
pub fn predicate(self) -> WherePredicateBuilder<Self> {
WherePredicateBuilder::with_callback(self)
}
pub fn add_lifetime_bound<L>(mut self, lifetime: L) -> Self
where L: IntoLifetime
{
let lifetime = lifetime.into_lifetime();
for lifetime_def in &mut self.lifetimes {
lifetime_def.bounds.push(lifetime.clone());
}
for ty_param in &mut self.ty_params {
*ty_param = TyParamBuilder::from_ty_param(ty_param.clone())
.lifetime_bound(lifetime.clone())
.build();
}
self
}
pub fn add_ty_param_bound<P>(mut self, path: P) -> Self
where P: IntoPath
{
let path = path.into_path();
for ty_param in &mut self.ty_params {
*ty_param = TyParamBuilder::from_ty_param(ty_param.clone())
.trait_bound(path.clone())
.build()
.build();
}
self
}
pub fn strip_bounds(self) -> Self {
self.strip_lifetimes().strip_ty_params().strip_predicates()
}
pub fn strip_lifetimes(mut self) -> Self {
for lifetime in &mut self.lifetimes {
lifetime.bounds = vec![];
}
self
}
pub fn strip_ty_params(mut self) -> Self {
for ty_param in &mut self.ty_params {
ty_param.bounds = vec![];
}
self
}
pub fn strip_predicates(mut self) -> Self {
self.predicates = vec![];
self
}
pub fn build(self) -> F::Result {
self.callback.invoke(Generics {
lifetimes: self.lifetimes,
ty_params: self.ty_params,
where_clause: WhereClause { predicates: self.predicates },
})
}
}
impl<F> Invoke<LifetimeDef> for GenericsBuilder<F>
where F: Invoke<Generics>
{
type Result = Self;
fn invoke(self, lifetime: LifetimeDef) -> Self {
self.with_lifetime(lifetime)
}
}
impl<F> Invoke<TyParam> for GenericsBuilder<F>
where F: Invoke<Generics>
{
type Result = Self;
fn invoke(self, ty_param: TyParam) -> Self {
self.with_ty_param(ty_param)
}
}
impl<F> Invoke<WherePredicate> for GenericsBuilder<F>
where F: Invoke<Generics>
{
type Result = Self;
fn invoke(self, predicate: WherePredicate) -> Self {
self.with_predicate(predicate)
}
}

Просмотреть файл

@ -1,39 +0,0 @@
use Ident;
pub trait ToIdent {
fn to_ident(&self) -> Ident;
}
impl ToIdent for Ident {
fn to_ident(&self) -> Ident {
self.clone()
}
}
impl<'a> ToIdent for &'a str {
fn to_ident(&self) -> Ident {
(**self).into()
}
}
impl ToIdent for String {
fn to_ident(&self) -> Ident {
self.clone().into()
}
}
impl<'a, T> ToIdent for &'a T
where T: ToIdent
{
fn to_ident(&self) -> Ident {
(**self).to_ident()
}
}
impl<'a, T> ToIdent for &'a mut T
where T: ToIdent
{
fn to_ident(&self) -> Ident {
(**self).to_ident()
}
}

Просмотреть файл

@ -1,16 +0,0 @@
pub trait Invoke<A> {
type Result;
fn invoke(self, arg: A) -> Self::Result;
}
#[derive(Copy, Clone)]
pub struct Identity;
impl<A> Invoke<A> for Identity {
type Result = A;
fn invoke(self, arg: A) -> A {
arg
}
}

Просмотреть файл

@ -1,103 +0,0 @@
use {Ident, Lifetime, LifetimeDef};
use aster::invoke::{Invoke, Identity};
// ////////////////////////////////////////////////////////////////////////////
pub trait IntoLifetime {
fn into_lifetime(self) -> Lifetime;
}
impl IntoLifetime for Lifetime {
fn into_lifetime(self) -> Lifetime {
self
}
}
impl<'a> IntoLifetime for &'a str {
fn into_lifetime(self) -> Lifetime {
Lifetime { ident: self.into() }
}
}
// ////////////////////////////////////////////////////////////////////////////
pub trait IntoLifetimeDef {
fn into_lifetime_def(self) -> LifetimeDef;
}
impl IntoLifetimeDef for LifetimeDef {
fn into_lifetime_def(self) -> LifetimeDef {
self
}
}
impl IntoLifetimeDef for Lifetime {
fn into_lifetime_def(self) -> LifetimeDef {
LifetimeDef {
attrs: vec![],
lifetime: self,
bounds: vec![],
}
}
}
impl<'a> IntoLifetimeDef for &'a str {
fn into_lifetime_def(self) -> LifetimeDef {
self.into_lifetime().into_lifetime_def()
}
}
impl IntoLifetimeDef for String {
fn into_lifetime_def(self) -> LifetimeDef {
(*self).into_lifetime().into_lifetime_def()
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct LifetimeDefBuilder<F = Identity> {
callback: F,
lifetime: Lifetime,
bounds: Vec<Lifetime>,
}
impl LifetimeDefBuilder {
pub fn new<N>(name: N) -> Self
where N: Into<Ident>
{
LifetimeDefBuilder::with_callback(name, Identity)
}
}
impl<F> LifetimeDefBuilder<F>
where F: Invoke<LifetimeDef>
{
pub fn with_callback<N>(name: N, callback: F) -> Self
where N: Into<Ident>
{
let lifetime = Lifetime { ident: name.into() };
LifetimeDefBuilder {
callback: callback,
lifetime: lifetime,
bounds: Vec::new(),
}
}
pub fn bound<N>(mut self, name: N) -> Self
where N: Into<Ident>
{
let lifetime = Lifetime { ident: name.into() };
self.bounds.push(lifetime);
self
}
pub fn build(self) -> F::Result {
self.callback.invoke(LifetimeDef {
attrs: vec![],
lifetime: self.lifetime,
bounds: self.bounds,
})
}
}

33
third_party/rust/syn-0.11.11/src/aster/mod.rs поставляемый
Просмотреть файл

@ -1,33 +0,0 @@
use super::*;
pub mod generics;
pub mod ident;
pub mod invoke;
pub mod lifetime;
pub mod path;
pub mod qpath;
pub mod ty;
pub mod ty_param;
pub mod where_predicate;
pub fn id<I>(id: I) -> Ident
where I: Into<Ident>
{
id.into()
}
pub fn from_generics(generics: Generics) -> generics::GenericsBuilder {
generics::GenericsBuilder::from_generics(generics)
}
pub fn where_predicate() -> where_predicate::WherePredicateBuilder {
where_predicate::WherePredicateBuilder::new()
}
pub fn ty() -> ty::TyBuilder {
ty::TyBuilder::new()
}
pub fn path() -> path::PathBuilder {
path::PathBuilder::new()
}

327
third_party/rust/syn-0.11.11/src/aster/path.rs поставляемый
Просмотреть файл

@ -1,327 +0,0 @@
use {AngleBracketedParameterData, Generics, Ident, Lifetime, ParenthesizedParameterData, Path,
PathParameters, PathSegment, Ty, TypeBinding};
use aster::ident::ToIdent;
use aster::invoke::{Invoke, Identity};
use aster::lifetime::IntoLifetime;
use aster::ty::TyBuilder;
// ////////////////////////////////////////////////////////////////////////////
pub trait IntoPath {
fn into_path(self) -> Path;
}
impl IntoPath for Path {
fn into_path(self) -> Path {
self
}
}
impl IntoPath for Ident {
fn into_path(self) -> Path {
PathBuilder::new().id(self).build()
}
}
impl<'a> IntoPath for &'a str {
fn into_path(self) -> Path {
PathBuilder::new().id(self).build()
}
}
impl IntoPath for String {
fn into_path(self) -> Path {
(&*self).into_path()
}
}
impl<'a, T> IntoPath for &'a [T]
where T: ToIdent
{
fn into_path(self) -> Path {
PathBuilder::new().ids(self).build()
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct PathBuilder<F = Identity> {
callback: F,
global: bool,
}
impl PathBuilder {
pub fn new() -> Self {
PathBuilder::with_callback(Identity)
}
}
impl<F> PathBuilder<F>
where F: Invoke<Path>
{
pub fn with_callback(callback: F) -> Self {
PathBuilder {
callback: callback,
global: false,
}
}
pub fn build(self, path: Path) -> F::Result {
self.callback.invoke(path)
}
pub fn global(mut self) -> Self {
self.global = true;
self
}
pub fn ids<I, T>(self, ids: I) -> PathSegmentsBuilder<F>
where I: IntoIterator<Item = T>,
T: ToIdent
{
let mut ids = ids.into_iter();
let id = ids.next().expect("passed path with no id");
self.id(id).ids(ids)
}
pub fn id<I>(self, id: I) -> PathSegmentsBuilder<F>
where I: ToIdent
{
self.segment(id).build()
}
pub fn segment<I>(self, id: I) -> PathSegmentBuilder<PathSegmentsBuilder<F>>
where I: ToIdent
{
PathSegmentBuilder::with_callback(id,
PathSegmentsBuilder {
callback: self.callback,
global: self.global,
segments: Vec::new(),
})
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct PathSegmentsBuilder<F = Identity> {
callback: F,
global: bool,
segments: Vec<PathSegment>,
}
impl<F> PathSegmentsBuilder<F>
where F: Invoke<Path>
{
pub fn ids<I, T>(mut self, ids: I) -> PathSegmentsBuilder<F>
where I: IntoIterator<Item = T>,
T: ToIdent
{
for id in ids {
self = self.id(id);
}
self
}
pub fn id<T>(self, id: T) -> PathSegmentsBuilder<F>
where T: ToIdent
{
self.segment(id).build()
}
pub fn segment<T>(self, id: T) -> PathSegmentBuilder<Self>
where T: ToIdent
{
PathSegmentBuilder::with_callback(id, self)
}
pub fn build(self) -> F::Result {
self.callback.invoke(Path {
global: self.global,
segments: self.segments,
})
}
}
impl<F> Invoke<PathSegment> for PathSegmentsBuilder<F> {
type Result = Self;
fn invoke(mut self, segment: PathSegment) -> Self {
self.segments.push(segment);
self
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct PathSegmentBuilder<F = Identity> {
callback: F,
id: Ident,
lifetimes: Vec<Lifetime>,
tys: Vec<Ty>,
bindings: Vec<TypeBinding>,
}
impl<F> PathSegmentBuilder<F>
where F: Invoke<PathSegment>
{
pub fn with_callback<I>(id: I, callback: F) -> Self
where I: ToIdent
{
PathSegmentBuilder {
callback: callback,
id: id.to_ident(),
lifetimes: Vec::new(),
tys: Vec::new(),
bindings: Vec::new(),
}
}
pub fn with_generics(self, generics: Generics) -> Self {
// Strip off the bounds.
let lifetimes = generics.lifetimes.iter().map(|lifetime_def| lifetime_def.lifetime.clone());
let tys =
generics.ty_params.iter().map(|ty_param| TyBuilder::new().id(ty_param.ident.clone()));
self.with_lifetimes(lifetimes).with_tys(tys)
}
pub fn with_lifetimes<I, L>(mut self, iter: I) -> Self
where I: IntoIterator<Item = L>,
L: IntoLifetime
{
let iter = iter.into_iter().map(|lifetime| lifetime.into_lifetime());
self.lifetimes.extend(iter);
self
}
pub fn with_lifetime<L>(mut self, lifetime: L) -> Self
where L: IntoLifetime
{
self.lifetimes.push(lifetime.into_lifetime());
self
}
pub fn lifetime<N>(self, name: N) -> Self
where N: ToIdent
{
let lifetime = Lifetime { ident: name.to_ident() };
self.with_lifetime(lifetime)
}
pub fn with_tys<I>(mut self, iter: I) -> Self
where I: IntoIterator<Item = Ty>
{
self.tys.extend(iter);
self
}
pub fn with_ty(mut self, ty: Ty) -> Self {
self.tys.push(ty);
self
}
pub fn ty(self) -> TyBuilder<Self> {
TyBuilder::with_callback(self)
}
pub fn with_binding(mut self, binding: TypeBinding) -> Self {
self.bindings.push(binding);
self
}
pub fn binding<T>(self, id: T) -> TyBuilder<TypeBindingBuilder<F>>
where T: ToIdent
{
TyBuilder::with_callback(TypeBindingBuilder {
id: id.to_ident(),
builder: self,
})
}
pub fn no_return(self) -> F::Result {
self.build_return(None)
}
pub fn return_(self) -> TyBuilder<PathSegmentReturnBuilder<F>> {
TyBuilder::with_callback(PathSegmentReturnBuilder(self))
}
pub fn build_return(self, output: Option<Ty>) -> F::Result {
let data = ParenthesizedParameterData {
inputs: self.tys,
output: output,
};
let parameters = PathParameters::Parenthesized(data);
self.callback.invoke(PathSegment {
ident: self.id,
parameters: parameters,
})
}
pub fn build(self) -> F::Result {
let data = AngleBracketedParameterData {
lifetimes: self.lifetimes,
types: self.tys,
bindings: self.bindings,
};
let parameters = PathParameters::AngleBracketed(data);
self.callback.invoke(PathSegment {
ident: self.id,
parameters: parameters,
})
}
}
impl<F> Invoke<Ty> for PathSegmentBuilder<F>
where F: Invoke<PathSegment>
{
type Result = Self;
fn invoke(self, ty: Ty) -> Self {
self.with_ty(ty)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TypeBindingBuilder<F> {
id: Ident,
builder: PathSegmentBuilder<F>,
}
impl<F> Invoke<Ty> for TypeBindingBuilder<F>
where F: Invoke<PathSegment>
{
type Result = PathSegmentBuilder<F>;
fn invoke(self, ty: Ty) -> Self::Result {
let id = self.id;
self.builder.with_binding(TypeBinding {
ident: id,
ty: ty,
})
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct PathSegmentReturnBuilder<F>(PathSegmentBuilder<F>);
impl<F> Invoke<Ty> for PathSegmentReturnBuilder<F>
where F: Invoke<PathSegment>
{
type Result = F::Result;
fn invoke(self, ty: Ty) -> Self::Result {
self.0.build_return(Some(ty))
}
}

Просмотреть файл

@ -1,143 +0,0 @@
use {Path, PathSegment, QSelf, Ty};
use aster::ident::ToIdent;
use aster::invoke::{Invoke, Identity};
use aster::path::{PathBuilder, PathSegmentBuilder};
use aster::ty::TyBuilder;
// ////////////////////////////////////////////////////////////////////////////
pub struct QPathBuilder<F = Identity> {
callback: F,
}
impl QPathBuilder {
pub fn new() -> Self {
QPathBuilder::with_callback(Identity)
}
}
impl<F> QPathBuilder<F>
where F: Invoke<(QSelf, Path)>
{
/// Construct a `QPathBuilder` that will call the `callback` with a constructed `QSelf`
/// and `Path`.
pub fn with_callback(callback: F) -> Self {
QPathBuilder { callback: callback }
}
/// Build a qualified path first by starting with a type builder.
pub fn with_ty(self, ty: Ty) -> QPathTyBuilder<F> {
QPathTyBuilder {
builder: self,
ty: ty,
}
}
/// Build a qualified path first by starting with a type builder.
pub fn ty(self) -> TyBuilder<Self> {
TyBuilder::with_callback(self)
}
/// Build a qualified path with a concrete type and path.
pub fn build(self, qself: QSelf, path: Path) -> F::Result {
self.callback.invoke((qself, path))
}
}
impl<F> Invoke<Ty> for QPathBuilder<F>
where F: Invoke<(QSelf, Path)>
{
type Result = QPathTyBuilder<F>;
fn invoke(self, ty: Ty) -> QPathTyBuilder<F> {
self.with_ty(ty)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct QPathTyBuilder<F> {
builder: QPathBuilder<F>,
ty: Ty,
}
impl<F> QPathTyBuilder<F>
where F: Invoke<(QSelf, Path)>
{
/// Build a qualified path with a path builder.
pub fn as_(self) -> PathBuilder<Self> {
PathBuilder::with_callback(self)
}
pub fn id<T>(self, id: T) -> F::Result
where T: ToIdent
{
let path = Path {
global: false,
segments: vec![],
};
self.as_().build(path).id(id)
}
pub fn segment<T>(self, id: T) -> PathSegmentBuilder<QPathQSelfBuilder<F>>
where T: ToIdent
{
let path = Path {
global: false,
segments: vec![],
};
self.as_().build(path).segment(id)
}
}
impl<F> Invoke<Path> for QPathTyBuilder<F>
where F: Invoke<(QSelf, Path)>
{
type Result = QPathQSelfBuilder<F>;
fn invoke(self, path: Path) -> QPathQSelfBuilder<F> {
QPathQSelfBuilder {
builder: self.builder,
qself: QSelf {
ty: Box::new(self.ty),
position: path.segments.len(),
},
path: path,
}
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct QPathQSelfBuilder<F> {
builder: QPathBuilder<F>,
qself: QSelf,
path: Path,
}
impl<F> QPathQSelfBuilder<F>
where F: Invoke<(QSelf, Path)>
{
pub fn id<T>(self, id: T) -> F::Result
where T: ToIdent
{
self.segment(id).build()
}
pub fn segment<T>(self, id: T) -> PathSegmentBuilder<QPathQSelfBuilder<F>>
where T: ToIdent
{
PathSegmentBuilder::with_callback(id, self)
}
}
impl<F> Invoke<PathSegment> for QPathQSelfBuilder<F>
where F: Invoke<(QSelf, Path)>
{
type Result = F::Result;
fn invoke(mut self, segment: PathSegment) -> F::Result {
self.path.segments.push(segment);
self.builder.build(self.qself, self.path)
}
}

488
third_party/rust/syn-0.11.11/src/aster/ty.rs поставляемый
Просмотреть файл

@ -1,488 +0,0 @@
use {Generics, Lifetime, MutTy, Mutability, Path, QSelf, Ty, TyParamBound};
use aster::ident::ToIdent;
use aster::invoke::{Invoke, Identity};
use aster::lifetime::IntoLifetime;
use aster::path::PathBuilder;
use aster::qpath::QPathBuilder;
use aster::ty_param::TyParamBoundBuilder;
// ////////////////////////////////////////////////////////////////////////////
pub struct TyBuilder<F = Identity> {
callback: F,
}
impl TyBuilder {
pub fn new() -> Self {
TyBuilder::with_callback(Identity)
}
}
impl<F> TyBuilder<F>
where F: Invoke<Ty>
{
pub fn with_callback(callback: F) -> Self {
TyBuilder { callback: callback }
}
pub fn build(self, ty: Ty) -> F::Result {
self.callback.invoke(ty)
}
pub fn id<I>(self, id: I) -> F::Result
where I: ToIdent
{
self.path().id(id).build()
}
pub fn build_path(self, path: Path) -> F::Result {
self.build(Ty::Path(None, path))
}
pub fn build_qpath(self, qself: QSelf, path: Path) -> F::Result {
self.build(Ty::Path(Some(qself), path))
}
pub fn path(self) -> PathBuilder<TyPathBuilder<F>> {
PathBuilder::with_callback(TyPathBuilder(self))
}
pub fn qpath(self) -> QPathBuilder<TyQPathBuilder<F>> {
QPathBuilder::with_callback(TyQPathBuilder(self))
}
pub fn isize(self) -> F::Result {
self.id("isize")
}
pub fn i8(self) -> F::Result {
self.id("i8")
}
pub fn i16(self) -> F::Result {
self.id("i16")
}
pub fn i32(self) -> F::Result {
self.id("i32")
}
pub fn i64(self) -> F::Result {
self.id("i64")
}
pub fn usize(self) -> F::Result {
self.id("usize")
}
pub fn u8(self) -> F::Result {
self.id("u8")
}
pub fn u16(self) -> F::Result {
self.id("u16")
}
pub fn u32(self) -> F::Result {
self.id("u32")
}
pub fn u64(self) -> F::Result {
self.id("u64")
}
pub fn f32(self) -> F::Result {
self.id("f32")
}
pub fn f64(self) -> F::Result {
self.id("f64")
}
pub fn bool(self) -> F::Result {
self.id("bool")
}
pub fn unit(self) -> F::Result {
self.tuple().build()
}
pub fn tuple(self) -> TyTupleBuilder<F> {
TyTupleBuilder {
builder: self,
tys: vec![],
}
}
pub fn build_slice(self, ty: Ty) -> F::Result {
self.build(Ty::Slice(Box::new(ty)))
}
pub fn slice(self) -> TyBuilder<TySliceBuilder<F>> {
TyBuilder::with_callback(TySliceBuilder(self))
}
pub fn ref_(self) -> TyRefBuilder<F> {
TyRefBuilder {
builder: self,
lifetime: None,
mutability: Mutability::Immutable,
}
}
pub fn never(self) -> F::Result {
self.build(Ty::Never)
}
pub fn infer(self) -> F::Result {
self.build(Ty::Infer)
}
pub fn option(self) -> TyBuilder<TyOptionBuilder<F>> {
TyBuilder::with_callback(TyOptionBuilder(self))
}
pub fn result(self) -> TyBuilder<TyResultOkBuilder<F>> {
TyBuilder::with_callback(TyResultOkBuilder(self))
}
pub fn phantom_data(self) -> TyBuilder<TyPhantomDataBuilder<F>> {
TyBuilder::with_callback(TyPhantomDataBuilder(self))
}
pub fn box_(self) -> TyBuilder<TyBoxBuilder<F>> {
TyBuilder::with_callback(TyBoxBuilder(self))
}
pub fn iterator(self) -> TyBuilder<TyIteratorBuilder<F>> {
TyBuilder::with_callback(TyIteratorBuilder(self))
}
pub fn impl_trait(self) -> TyImplTraitTyBuilder<F> {
TyImplTraitTyBuilder {
builder: self,
bounds: Vec::new(),
}
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyPathBuilder<F>(TyBuilder<F>);
impl<F> Invoke<Path> for TyPathBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, path: Path) -> F::Result {
self.0.build_path(path)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyQPathBuilder<F>(TyBuilder<F>);
impl<F> Invoke<(QSelf, Path)> for TyQPathBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, (qself, path): (QSelf, Path)) -> F::Result {
self.0.build_qpath(qself, path)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TySliceBuilder<F>(TyBuilder<F>);
impl<F> Invoke<Ty> for TySliceBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, ty: Ty) -> F::Result {
self.0.build_slice(ty)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyRefBuilder<F> {
builder: TyBuilder<F>,
lifetime: Option<Lifetime>,
mutability: Mutability,
}
impl<F> TyRefBuilder<F>
where F: Invoke<Ty>
{
pub fn mut_(mut self) -> Self {
self.mutability = Mutability::Mutable;
self
}
pub fn lifetime<N>(mut self, name: N) -> Self
where N: ToIdent
{
self.lifetime = Some(Lifetime { ident: name.to_ident() });
self
}
pub fn build_ty(self, ty: Ty) -> F::Result {
let ty = MutTy {
ty: ty,
mutability: self.mutability,
};
self.builder.build(Ty::Rptr(self.lifetime, Box::new(ty)))
}
pub fn ty(self) -> TyBuilder<Self> {
TyBuilder::with_callback(self)
}
}
impl<F> Invoke<Ty> for TyRefBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, ty: Ty) -> F::Result {
self.build_ty(ty)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyOptionBuilder<F>(TyBuilder<F>);
impl<F> Invoke<Ty> for TyOptionBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, ty: Ty) -> F::Result {
let path = PathBuilder::new()
.global()
.id("std")
.id("option")
.segment("Option")
.with_ty(ty)
.build()
.build();
self.0.build_path(path)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyResultOkBuilder<F>(TyBuilder<F>);
impl<F> Invoke<Ty> for TyResultOkBuilder<F>
where F: Invoke<Ty>
{
type Result = TyBuilder<TyResultErrBuilder<F>>;
fn invoke(self, ty: Ty) -> TyBuilder<TyResultErrBuilder<F>> {
TyBuilder::with_callback(TyResultErrBuilder(self.0, ty))
}
}
pub struct TyResultErrBuilder<F>(TyBuilder<F>, Ty);
impl<F> Invoke<Ty> for TyResultErrBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, ty: Ty) -> F::Result {
let path = PathBuilder::new()
.global()
.id("std")
.id("result")
.segment("Result")
.with_ty(self.1)
.with_ty(ty)
.build()
.build();
self.0.build_path(path)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyPhantomDataBuilder<F>(TyBuilder<F>);
impl<F> Invoke<Ty> for TyPhantomDataBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, ty: Ty) -> F::Result {
let path = PathBuilder::new()
.global()
.id("std")
.id("marker")
.segment("PhantomData")
.with_ty(ty)
.build()
.build();
self.0.build_path(path)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyBoxBuilder<F>(TyBuilder<F>);
impl<F> Invoke<Ty> for TyBoxBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, ty: Ty) -> F::Result {
let path = PathBuilder::new()
.global()
.id("std")
.id("boxed")
.segment("Box")
.with_ty(ty)
.build()
.build();
self.0.build_path(path)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyIteratorBuilder<F>(TyBuilder<F>);
impl<F> Invoke<Ty> for TyIteratorBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, ty: Ty) -> F::Result {
let path = PathBuilder::new()
.global()
.id("std")
.id("iter")
.segment("Iterator")
.binding("Item")
.build(ty.clone())
.build()
.build();
self.0.build_path(path)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyImplTraitTyBuilder<F> {
builder: TyBuilder<F>,
bounds: Vec<TyParamBound>,
}
impl<F> TyImplTraitTyBuilder<F>
where F: Invoke<Ty>
{
pub fn with_bounds<I>(mut self, iter: I) -> Self
where I: Iterator<Item = TyParamBound>
{
self.bounds.extend(iter);
self
}
pub fn with_bound(mut self, bound: TyParamBound) -> Self {
self.bounds.push(bound);
self
}
pub fn bound(self) -> TyParamBoundBuilder<Self> {
TyParamBoundBuilder::with_callback(self)
}
pub fn with_generics(self, generics: Generics) -> Self {
self.with_lifetimes(generics.lifetimes.into_iter().map(|def| def.lifetime))
}
pub fn with_lifetimes<I, L>(mut self, lifetimes: I) -> Self
where I: Iterator<Item = L>,
L: IntoLifetime
{
for lifetime in lifetimes {
self = self.lifetime(lifetime);
}
self
}
pub fn lifetime<L>(self, lifetime: L) -> Self
where L: IntoLifetime
{
self.bound().lifetime(lifetime)
}
pub fn build(self) -> F::Result {
let bounds = self.bounds;
self.builder.build(Ty::ImplTrait(bounds))
}
}
impl<F> Invoke<TyParamBound> for TyImplTraitTyBuilder<F>
where F: Invoke<Ty>
{
type Result = Self;
fn invoke(self, bound: TyParamBound) -> Self {
self.with_bound(bound)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyTupleBuilder<F> {
builder: TyBuilder<F>,
tys: Vec<Ty>,
}
impl<F> TyTupleBuilder<F>
where F: Invoke<Ty>
{
pub fn with_tys<I>(mut self, iter: I) -> Self
where I: IntoIterator<Item = Ty>
{
self.tys.extend(iter);
self
}
pub fn with_ty(mut self, ty: Ty) -> Self {
self.tys.push(ty);
self
}
pub fn ty(self) -> TyBuilder<Self> {
TyBuilder::with_callback(self)
}
pub fn build(self) -> F::Result {
self.builder.build(Ty::Tup(self.tys))
}
}
impl<F> Invoke<Ty> for TyTupleBuilder<F>
where F: Invoke<Ty>
{
type Result = Self;
fn invoke(self, ty: Ty) -> Self {
self.with_ty(ty)
}
}

Просмотреть файл

@ -1,262 +0,0 @@
use {Ident, LifetimeDef, Path, PolyTraitRef, TraitBoundModifier, Ty, TyParam, TyParamBound};
use aster::invoke::{Invoke, Identity};
use aster::lifetime::{IntoLifetime, IntoLifetimeDef, LifetimeDefBuilder};
use aster::path::{IntoPath, PathBuilder};
use aster::ty::TyBuilder;
// ////////////////////////////////////////////////////////////////////////////
pub struct TyParamBuilder<F = Identity> {
callback: F,
id: Ident,
bounds: Vec<TyParamBound>,
default: Option<Ty>,
}
impl TyParamBuilder {
pub fn new<I>(id: I) -> Self
where I: Into<Ident>
{
TyParamBuilder::with_callback(id, Identity)
}
pub fn from_ty_param(ty_param: TyParam) -> Self {
TyParamBuilder::from_ty_param_with_callback(Identity, ty_param)
}
}
impl<F> TyParamBuilder<F>
where F: Invoke<TyParam>
{
pub fn with_callback<I>(id: I, callback: F) -> Self
where I: Into<Ident>
{
TyParamBuilder {
callback: callback,
id: id.into(),
bounds: Vec::new(),
default: None,
}
}
pub fn from_ty_param_with_callback(callback: F, ty_param: TyParam) -> Self {
TyParamBuilder {
callback: callback,
id: ty_param.ident,
bounds: ty_param.bounds,
default: ty_param.default,
}
}
pub fn with_default(mut self, ty: Ty) -> Self {
self.default = Some(ty);
self
}
pub fn default(self) -> TyBuilder<Self> {
TyBuilder::with_callback(self)
}
pub fn with_bound(mut self, bound: TyParamBound) -> Self {
self.bounds.push(bound);
self
}
pub fn bound(self) -> TyParamBoundBuilder<Self> {
TyParamBoundBuilder::with_callback(self)
}
pub fn with_trait_bound(self, trait_ref: PolyTraitRef) -> Self {
self.bound().build_trait(trait_ref, TraitBoundModifier::None)
}
pub fn trait_bound<P>(self, path: P) -> PolyTraitRefBuilder<Self>
where P: IntoPath
{
PolyTraitRefBuilder::with_callback(path, self)
}
pub fn lifetime_bound<L>(mut self, lifetime: L) -> Self
where L: IntoLifetime
{
let lifetime = lifetime.into_lifetime();
self.bounds.push(TyParamBound::Region(lifetime));
self
}
pub fn build(self) -> F::Result {
self.callback.invoke(TyParam {
attrs: vec![],
ident: self.id,
bounds: self.bounds,
default: self.default,
})
}
}
impl<F> Invoke<Ty> for TyParamBuilder<F>
where F: Invoke<TyParam>
{
type Result = Self;
fn invoke(self, ty: Ty) -> Self {
self.with_default(ty)
}
}
impl<F> Invoke<TyParamBound> for TyParamBuilder<F>
where F: Invoke<TyParam>
{
type Result = Self;
fn invoke(self, bound: TyParamBound) -> Self {
self.with_bound(bound)
}
}
impl<F> Invoke<PolyTraitRef> for TyParamBuilder<F>
where F: Invoke<TyParam>
{
type Result = Self;
fn invoke(self, trait_ref: PolyTraitRef) -> Self {
self.with_trait_bound(trait_ref)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyParamBoundBuilder<F = Identity> {
callback: F,
}
impl TyParamBoundBuilder {
pub fn new() -> Self {
TyParamBoundBuilder::with_callback(Identity)
}
}
impl<F> TyParamBoundBuilder<F>
where F: Invoke<TyParamBound>
{
pub fn with_callback(callback: F) -> Self {
TyParamBoundBuilder { callback: callback }
}
pub fn build_trait(self, poly_trait: PolyTraitRef, modifier: TraitBoundModifier) -> F::Result {
let bound = TyParamBound::Trait(poly_trait, modifier);
self.callback.invoke(bound)
}
pub fn trait_<P>(self, path: P) -> PolyTraitRefBuilder<TraitTyParamBoundBuilder<F>>
where P: IntoPath
{
let builder = TraitTyParamBoundBuilder {
builder: self,
modifier: TraitBoundModifier::None,
};
PolyTraitRefBuilder::with_callback(path, builder)
}
pub fn maybe_trait<P>(self, path: P) -> PolyTraitRefBuilder<TraitTyParamBoundBuilder<F>>
where P: IntoPath
{
let builder = TraitTyParamBoundBuilder {
builder: self,
modifier: TraitBoundModifier::Maybe,
};
PolyTraitRefBuilder::with_callback(path, builder)
}
pub fn iterator(self, ty: Ty) -> PolyTraitRefBuilder<TraitTyParamBoundBuilder<F>> {
let path = PathBuilder::new()
.global()
.id("std")
.id("iter")
.segment("Iterator")
.binding("Item")
.build(ty)
.build()
.build();
self.trait_(path)
}
pub fn lifetime<L>(self, lifetime: L) -> F::Result
where L: IntoLifetime
{
let lifetime = lifetime.into_lifetime();
self.callback.invoke(TyParamBound::Region(lifetime))
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TraitTyParamBoundBuilder<F> {
builder: TyParamBoundBuilder<F>,
modifier: TraitBoundModifier,
}
impl<F> Invoke<PolyTraitRef> for TraitTyParamBoundBuilder<F>
where F: Invoke<TyParamBound>
{
type Result = F::Result;
fn invoke(self, poly_trait: PolyTraitRef) -> Self::Result {
self.builder.build_trait(poly_trait, self.modifier)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct PolyTraitRefBuilder<F> {
callback: F,
trait_ref: Path,
lifetimes: Vec<LifetimeDef>,
}
impl<F> PolyTraitRefBuilder<F>
where F: Invoke<PolyTraitRef>
{
pub fn with_callback<P>(path: P, callback: F) -> Self
where P: IntoPath
{
PolyTraitRefBuilder {
callback: callback,
trait_ref: path.into_path(),
lifetimes: Vec::new(),
}
}
pub fn with_lifetime<L>(mut self, lifetime: L) -> Self
where L: IntoLifetimeDef
{
self.lifetimes.push(lifetime.into_lifetime_def());
self
}
pub fn lifetime<N>(self, name: N) -> LifetimeDefBuilder<Self>
where N: Into<Ident>
{
LifetimeDefBuilder::with_callback(name, self)
}
pub fn build(self) -> F::Result {
self.callback.invoke(PolyTraitRef {
bound_lifetimes: self.lifetimes,
trait_ref: self.trait_ref,
})
}
}
impl<F> Invoke<LifetimeDef> for PolyTraitRefBuilder<F>
where F: Invoke<PolyTraitRef>
{
type Result = Self;
fn invoke(self, lifetime: LifetimeDef) -> Self {
self.with_lifetime(lifetime)
}
}

Просмотреть файл

@ -1,259 +0,0 @@
use {Ident, Lifetime, LifetimeDef, Ty, TyParamBound, WhereBoundPredicate, WherePredicate,
WhereRegionPredicate};
use aster::invoke::{Invoke, Identity};
use aster::lifetime::{IntoLifetime, IntoLifetimeDef, LifetimeDefBuilder};
use aster::path::IntoPath;
use aster::ty::TyBuilder;
use aster::ty_param::{TyParamBoundBuilder, PolyTraitRefBuilder, TraitTyParamBoundBuilder};
// ////////////////////////////////////////////////////////////////////////////
pub struct WherePredicateBuilder<F = Identity> {
callback: F,
}
impl WherePredicateBuilder {
pub fn new() -> Self {
WherePredicateBuilder::with_callback(Identity)
}
}
impl<F> WherePredicateBuilder<F>
where F: Invoke<WherePredicate>
{
pub fn with_callback(callback: F) -> Self {
WherePredicateBuilder { callback: callback }
}
pub fn bound(self) -> TyBuilder<Self> {
TyBuilder::with_callback(self)
}
pub fn lifetime<L>(self, lifetime: L) -> WhereRegionPredicateBuilder<F>
where L: IntoLifetime
{
WhereRegionPredicateBuilder {
callback: self.callback,
lifetime: lifetime.into_lifetime(),
bounds: Vec::new(),
}
}
}
impl<F> Invoke<Ty> for WherePredicateBuilder<F>
where F: Invoke<WherePredicate>
{
type Result = WhereBoundPredicateTyBuilder<F>;
fn invoke(self, ty: Ty) -> Self::Result {
WhereBoundPredicateTyBuilder {
callback: self.callback,
ty: ty,
bound_lifetimes: Vec::new(),
}
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct WhereBoundPredicateBuilder<F> {
callback: F,
}
impl<F> Invoke<Ty> for WhereBoundPredicateBuilder<F>
where F: Invoke<WherePredicate>
{
type Result = WhereBoundPredicateTyBuilder<F>;
fn invoke(self, ty: Ty) -> Self::Result {
WhereBoundPredicateTyBuilder {
callback: self.callback,
ty: ty,
bound_lifetimes: Vec::new(),
}
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct WhereBoundPredicateTyBuilder<F> {
callback: F,
ty: Ty,
bound_lifetimes: Vec<LifetimeDef>,
}
impl<F> WhereBoundPredicateTyBuilder<F>
where F: Invoke<WherePredicate>
{
pub fn with_for_lifetime<L>(mut self, lifetime: L) -> Self
where L: IntoLifetimeDef
{
self.bound_lifetimes.push(lifetime.into_lifetime_def());
self
}
pub fn for_lifetime<N>(self, name: N) -> LifetimeDefBuilder<Self>
where N: Into<Ident>
{
LifetimeDefBuilder::with_callback(name, self)
}
pub fn with_bound(self, bound: TyParamBound) -> WhereBoundPredicateTyBoundsBuilder<F> {
WhereBoundPredicateTyBoundsBuilder {
callback: self.callback,
ty: self.ty,
bound_lifetimes: self.bound_lifetimes,
bounds: vec![bound],
}
}
pub fn bound(self) -> TyParamBoundBuilder<WhereBoundPredicateTyBoundsBuilder<F>> {
let builder = WhereBoundPredicateTyBoundsBuilder {
callback: self.callback,
ty: self.ty,
bound_lifetimes: self.bound_lifetimes,
bounds: vec![],
};
TyParamBoundBuilder::with_callback(builder)
}
pub fn trait_<P>
(self,
path: P)
-> PolyTraitRefBuilder<TraitTyParamBoundBuilder<WhereBoundPredicateTyBoundsBuilder<F>>>
where P: IntoPath
{
self.bound().trait_(path)
}
pub fn lifetime<L>(self, lifetime: L) -> WhereBoundPredicateTyBoundsBuilder<F>
where L: IntoLifetime
{
self.bound().lifetime(lifetime)
}
}
impl<F> Invoke<LifetimeDef> for WhereBoundPredicateTyBuilder<F>
where F: Invoke<WherePredicate>
{
type Result = Self;
fn invoke(self, lifetime: LifetimeDef) -> Self {
self.with_for_lifetime(lifetime)
}
}
impl<F> Invoke<TyParamBound> for WhereBoundPredicateTyBuilder<F>
where F: Invoke<WherePredicate>
{
type Result = WhereBoundPredicateTyBoundsBuilder<F>;
fn invoke(self, bound: TyParamBound) -> Self::Result {
self.with_bound(bound)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct WhereBoundPredicateTyBoundsBuilder<F> {
callback: F,
ty: Ty,
bound_lifetimes: Vec<LifetimeDef>,
bounds: Vec<TyParamBound>,
}
impl<F> WhereBoundPredicateTyBoundsBuilder<F>
where F: Invoke<WherePredicate>
{
pub fn with_for_lifetime<L>(mut self, lifetime: L) -> Self
where L: IntoLifetimeDef
{
self.bound_lifetimes.push(lifetime.into_lifetime_def());
self
}
pub fn for_lifetime<N>(self, name: N) -> LifetimeDefBuilder<Self>
where N: Into<Ident>
{
LifetimeDefBuilder::with_callback(name, self)
}
pub fn with_bound(mut self, bound: TyParamBound) -> Self {
self.bounds.push(bound);
self
}
pub fn bound(self) -> TyParamBoundBuilder<Self> {
TyParamBoundBuilder::with_callback(self)
}
pub fn trait_<P>(self, path: P) -> PolyTraitRefBuilder<TraitTyParamBoundBuilder<Self>>
where P: IntoPath
{
self.bound().trait_(path)
}
pub fn lifetime<L>(self, lifetime: L) -> Self
where L: IntoLifetime
{
self.bound().lifetime(lifetime)
}
pub fn build(self) -> F::Result {
let predicate = WhereBoundPredicate {
bound_lifetimes: self.bound_lifetimes,
bounded_ty: self.ty,
bounds: self.bounds,
};
self.callback.invoke(WherePredicate::BoundPredicate(predicate))
}
}
impl<F> Invoke<LifetimeDef> for WhereBoundPredicateTyBoundsBuilder<F>
where F: Invoke<WherePredicate>
{
type Result = Self;
fn invoke(self, lifetime: LifetimeDef) -> Self {
self.with_for_lifetime(lifetime)
}
}
impl<F> Invoke<TyParamBound> for WhereBoundPredicateTyBoundsBuilder<F>
where F: Invoke<WherePredicate>
{
type Result = Self;
fn invoke(self, bound: TyParamBound) -> Self {
self.with_bound(bound)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct WhereRegionPredicateBuilder<F> {
callback: F,
lifetime: Lifetime,
bounds: Vec<Lifetime>,
}
impl<F> WhereRegionPredicateBuilder<F>
where F: Invoke<WherePredicate>
{
pub fn bound<L>(mut self, lifetime: L) -> Self
where L: IntoLifetime
{
self.bounds.push(lifetime.into_lifetime());
self
}
pub fn build(self) -> F::Result {
let predicate = WhereRegionPredicate {
lifetime: self.lifetime,
bounds: self.bounds,
};
self.callback.invoke(WherePredicate::RegionPredicate(predicate))
}
}

305
third_party/rust/syn-0.11.11/src/attr.rs поставляемый
Просмотреть файл

@ -1,305 +0,0 @@
use super::*;
use std::iter;
/// Doc-comments are promoted to attributes that have `is_sugared_doc` = true
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Attribute {
pub style: AttrStyle,
pub value: MetaItem,
pub is_sugared_doc: bool,
}
impl Attribute {
pub fn name(&self) -> &str {
self.value.name()
}
}
/// Distinguishes between Attributes that decorate items and Attributes that
/// are contained as statements within items. These two cases need to be
/// distinguished for pretty-printing.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum AttrStyle {
/// Attribute of the form `#![...]`.
Outer,
/// Attribute of the form `#[...]`.
Inner,
}
/// A compile-time attribute item.
///
/// E.g. `#[test]`, `#[derive(..)]` or `#[feature = "foo"]`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum MetaItem {
/// Word meta item.
///
/// E.g. `test` as in `#[test]`
Word(Ident),
/// List meta item.
///
/// E.g. `derive(..)` as in `#[derive(..)]`
List(Ident, Vec<NestedMetaItem>),
/// Name-value meta item.
///
/// E.g. `feature = "foo"` as in `#[feature = "foo"]`
NameValue(Ident, Lit),
}
impl MetaItem {
/// Name of the item.
///
/// E.g. `test` as in `#[test]`, `derive` as in `#[derive(..)]`, and
/// `feature` as in `#[feature = "foo"]`.
pub fn name(&self) -> &str {
match *self {
MetaItem::Word(ref name) |
MetaItem::List(ref name, _) |
MetaItem::NameValue(ref name, _) => name.as_ref(),
}
}
}
/// Possible values inside of compile-time attribute lists.
///
/// E.g. the '..' in `#[name(..)]`.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum NestedMetaItem {
/// A full `MetaItem`.
///
/// E.g. `Copy` in `#[derive(Copy)]` would be a `MetaItem::Word(Ident::from("Copy"))`.
MetaItem(MetaItem),
/// A Rust literal.
///
/// E.g. `"name"` in `#[rename("name")]`.
Literal(Lit),
}
pub trait FilterAttrs<'a> {
type Ret: Iterator<Item = &'a Attribute>;
fn outer(self) -> Self::Ret;
fn inner(self) -> Self::Ret;
}
impl<'a, T> FilterAttrs<'a> for T
where T: IntoIterator<Item = &'a Attribute>
{
type Ret = iter::Filter<T::IntoIter, fn(&&Attribute) -> bool>;
fn outer(self) -> Self::Ret {
fn is_outer(attr: &&Attribute) -> bool {
attr.style == AttrStyle::Outer
}
self.into_iter().filter(is_outer)
}
fn inner(self) -> Self::Ret {
fn is_inner(attr: &&Attribute) -> bool {
attr.style == AttrStyle::Inner
}
self.into_iter().filter(is_inner)
}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use ident::parsing::ident;
use lit::parsing::lit;
use synom::space::{block_comment, whitespace};
#[cfg(feature = "full")]
named!(pub inner_attr -> Attribute, alt!(
do_parse!(
punct!("#") >>
punct!("!") >>
punct!("[") >>
meta_item: meta_item >>
punct!("]") >>
(Attribute {
style: AttrStyle::Inner,
value: meta_item,
is_sugared_doc: false,
})
)
|
do_parse!(
punct!("//!") >>
content: take_until!("\n") >>
(Attribute {
style: AttrStyle::Inner,
value: MetaItem::NameValue(
"doc".into(),
format!("//!{}", content).into(),
),
is_sugared_doc: true,
})
)
|
do_parse!(
option!(whitespace) >>
peek!(tag!("/*!")) >>
com: block_comment >>
(Attribute {
style: AttrStyle::Inner,
value: MetaItem::NameValue(
"doc".into(),
com.into(),
),
is_sugared_doc: true,
})
)
));
named!(pub outer_attr -> Attribute, alt!(
do_parse!(
punct!("#") >>
punct!("[") >>
meta_item: meta_item >>
punct!("]") >>
(Attribute {
style: AttrStyle::Outer,
value: meta_item,
is_sugared_doc: false,
})
)
|
do_parse!(
punct!("///") >>
not!(tag!("/")) >>
content: take_until!("\n") >>
(Attribute {
style: AttrStyle::Outer,
value: MetaItem::NameValue(
"doc".into(),
format!("///{}", content).into(),
),
is_sugared_doc: true,
})
)
|
do_parse!(
option!(whitespace) >>
peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
com: block_comment >>
(Attribute {
style: AttrStyle::Outer,
value: MetaItem::NameValue(
"doc".into(),
com.into(),
),
is_sugared_doc: true,
})
)
));
named!(meta_item -> MetaItem, alt!(
do_parse!(
id: ident >>
punct!("(") >>
inner: terminated_list!(punct!(","), nested_meta_item) >>
punct!(")") >>
(MetaItem::List(id, inner))
)
|
do_parse!(
name: ident >>
punct!("=") >>
value: lit >>
(MetaItem::NameValue(name, value))
)
|
map!(ident, MetaItem::Word)
));
named!(nested_meta_item -> NestedMetaItem, alt!(
meta_item => { NestedMetaItem::MetaItem }
|
lit => { NestedMetaItem::Literal }
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use lit::{Lit, StrStyle};
use quote::{Tokens, ToTokens};
impl ToTokens for Attribute {
fn to_tokens(&self, tokens: &mut Tokens) {
if let Attribute { style,
value: MetaItem::NameValue(ref name,
Lit::Str(ref value, StrStyle::Cooked)),
is_sugared_doc: true } = *self {
if name == "doc" {
match style {
AttrStyle::Inner if value.starts_with("//!") => {
tokens.append(&format!("{}\n", value));
return;
}
AttrStyle::Inner if value.starts_with("/*!") => {
tokens.append(value);
return;
}
AttrStyle::Outer if value.starts_with("///") => {
tokens.append(&format!("{}\n", value));
return;
}
AttrStyle::Outer if value.starts_with("/**") => {
tokens.append(value);
return;
}
_ => {}
}
}
}
tokens.append("#");
if let AttrStyle::Inner = self.style {
tokens.append("!");
}
tokens.append("[");
self.value.to_tokens(tokens);
tokens.append("]");
}
}
impl ToTokens for MetaItem {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
MetaItem::Word(ref ident) => {
ident.to_tokens(tokens);
}
MetaItem::List(ref ident, ref inner) => {
ident.to_tokens(tokens);
tokens.append("(");
tokens.append_separated(inner, ",");
tokens.append(")");
}
MetaItem::NameValue(ref name, ref value) => {
name.to_tokens(tokens);
tokens.append("=");
value.to_tokens(tokens);
}
}
}
}
impl ToTokens for NestedMetaItem {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
NestedMetaItem::MetaItem(ref nested) => {
nested.to_tokens(tokens);
}
NestedMetaItem::Literal(ref lit) => {
lit.to_tokens(tokens);
}
}
}
}
}

180
third_party/rust/syn-0.11.11/src/constant.rs поставляемый
Просмотреть файл

@ -1,180 +0,0 @@
use super::*;
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum ConstExpr {
/// A function call
///
/// The first field resolves to the function itself,
/// and the second field is the list of arguments
Call(Box<ConstExpr>, Vec<ConstExpr>),
/// A binary operation (For example: `a + b`, `a * b`)
Binary(BinOp, Box<ConstExpr>, Box<ConstExpr>),
/// A unary operation (For example: `!x`, `*x`)
Unary(UnOp, Box<ConstExpr>),
/// A literal (For example: `1`, `"foo"`)
Lit(Lit),
/// A cast (`foo as f64`)
Cast(Box<ConstExpr>, Box<Ty>),
/// Variable reference, possibly containing `::` and/or type
/// parameters, e.g. foo::bar::<baz>.
Path(Path),
/// An indexing operation (`foo[2]`)
Index(Box<ConstExpr>, Box<ConstExpr>),
/// No-op: used solely so we can pretty-print faithfully
Paren(Box<ConstExpr>),
/// If compiling with full support for expression syntax, any expression is
/// allowed
Other(Other),
}
#[cfg(not(feature = "full"))]
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Other {
_private: (),
}
#[cfg(feature = "full")]
pub type Other = Expr;
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use {BinOp, Ty};
use lit::parsing::lit;
use op::parsing::{binop, unop};
use ty::parsing::{path, ty};
named!(pub const_expr -> ConstExpr, do_parse!(
mut e: alt!(
expr_unary
|
expr_lit
|
expr_path
|
expr_paren
// Cannot handle ConstExpr::Other here because for example
// `[u32; n!()]` would end up successfully parsing `n` as
// ConstExpr::Path and then fail to parse `!()`. Instead, callers
// are required to handle Other. See ty::parsing::array_len and
// data::parsing::discriminant.
) >>
many0!(alt!(
tap!(args: and_call => {
e = ConstExpr::Call(Box::new(e), args);
})
|
tap!(more: and_binary => {
let (op, other) = more;
e = ConstExpr::Binary(op, Box::new(e), Box::new(other));
})
|
tap!(ty: and_cast => {
e = ConstExpr::Cast(Box::new(e), Box::new(ty));
})
|
tap!(i: and_index => {
e = ConstExpr::Index(Box::new(e), Box::new(i));
})
)) >>
(e)
));
named!(and_call -> Vec<ConstExpr>, do_parse!(
punct!("(") >>
args: terminated_list!(punct!(","), const_expr) >>
punct!(")") >>
(args)
));
named!(and_binary -> (BinOp, ConstExpr), tuple!(binop, const_expr));
named!(expr_unary -> ConstExpr, do_parse!(
operator: unop >>
operand: const_expr >>
(ConstExpr::Unary(operator, Box::new(operand)))
));
named!(expr_lit -> ConstExpr, map!(lit, ConstExpr::Lit));
named!(expr_path -> ConstExpr, map!(path, ConstExpr::Path));
named!(and_index -> ConstExpr, delimited!(punct!("["), const_expr, punct!("]")));
named!(expr_paren -> ConstExpr, do_parse!(
punct!("(") >>
e: const_expr >>
punct!(")") >>
(ConstExpr::Paren(Box::new(e)))
));
named!(and_cast -> Ty, do_parse!(
keyword!("as") >>
ty: ty >>
(ty)
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{Tokens, ToTokens};
impl ToTokens for ConstExpr {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
ConstExpr::Call(ref func, ref args) => {
func.to_tokens(tokens);
tokens.append("(");
tokens.append_separated(args, ",");
tokens.append(")");
}
ConstExpr::Binary(op, ref left, ref right) => {
left.to_tokens(tokens);
op.to_tokens(tokens);
right.to_tokens(tokens);
}
ConstExpr::Unary(op, ref expr) => {
op.to_tokens(tokens);
expr.to_tokens(tokens);
}
ConstExpr::Lit(ref lit) => lit.to_tokens(tokens),
ConstExpr::Cast(ref expr, ref ty) => {
expr.to_tokens(tokens);
tokens.append("as");
ty.to_tokens(tokens);
}
ConstExpr::Path(ref path) => path.to_tokens(tokens),
ConstExpr::Index(ref expr, ref index) => {
expr.to_tokens(tokens);
tokens.append("[");
index.to_tokens(tokens);
tokens.append("]");
}
ConstExpr::Paren(ref expr) => {
tokens.append("(");
expr.to_tokens(tokens);
tokens.append(")");
}
ConstExpr::Other(ref other) => {
other.to_tokens(tokens);
}
}
}
}
#[cfg(not(feature = "full"))]
impl ToTokens for Other {
fn to_tokens(&self, _tokens: &mut Tokens) {
unreachable!()
}
}
}

297
third_party/rust/syn-0.11.11/src/data.rs поставляемый
Просмотреть файл

@ -1,297 +0,0 @@
use super::*;
/// An enum variant.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Variant {
/// Name of the variant.
pub ident: Ident,
/// Attributes tagged on the variant.
pub attrs: Vec<Attribute>,
/// Type of variant.
pub data: VariantData,
/// Explicit discriminant, e.g. `Foo = 1`
pub discriminant: Option<ConstExpr>,
}
/// Data stored within an enum variant or struct.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum VariantData {
/// Struct variant, e.g. `Point { x: f64, y: f64 }`.
Struct(Vec<Field>),
/// Tuple variant, e.g. `Some(T)`.
Tuple(Vec<Field>),
/// Unit variant, e.g. `None`.
Unit,
}
impl VariantData {
/// Slice containing the fields stored in the variant.
pub fn fields(&self) -> &[Field] {
match *self {
VariantData::Struct(ref fields) |
VariantData::Tuple(ref fields) => fields,
VariantData::Unit => &[],
}
}
/// Mutable slice containing the fields stored in the variant.
pub fn fields_mut(&mut self) -> &mut [Field] {
match *self {
VariantData::Struct(ref mut fields) |
VariantData::Tuple(ref mut fields) => fields,
VariantData::Unit => &mut [],
}
}
}
/// A field of a struct or enum variant.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Field {
/// Name of the field, if any.
///
/// Fields of tuple structs have no names.
pub ident: Option<Ident>,
/// Visibility of the field.
pub vis: Visibility,
/// Attributes tagged on the field.
pub attrs: Vec<Attribute>,
/// Type of the field.
pub ty: Ty,
}
/// Visibility level of an item.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum Visibility {
/// Public, i.e. `pub`.
Public,
/// Crate-visible, i.e. `pub(crate)`.
Crate,
/// Restricted, e.g. `pub(some::module)`.
Restricted(Box<Path>),
/// Inherited, i.e. private.
Inherited,
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use WhereClause;
#[cfg(feature = "full")]
use ConstExpr;
use attr::parsing::outer_attr;
#[cfg(feature = "full")]
use constant::parsing::const_expr;
#[cfg(feature = "full")]
use expr::parsing::expr;
use generics::parsing::where_clause;
use ident::parsing::ident;
use ty::parsing::{path, ty};
named!(pub struct_body -> (WhereClause, VariantData), alt!(
do_parse!(
wh: where_clause >>
body: struct_like_body >>
(wh, VariantData::Struct(body))
)
|
do_parse!(
body: tuple_like_body >>
wh: where_clause >>
punct!(";") >>
(wh, VariantData::Tuple(body))
)
|
do_parse!(
wh: where_clause >>
punct!(";") >>
(wh, VariantData::Unit)
)
));
named!(pub enum_body -> (WhereClause, Vec<Variant>), do_parse!(
wh: where_clause >>
punct!("{") >>
variants: terminated_list!(punct!(","), variant) >>
punct!("}") >>
(wh, variants)
));
named!(variant -> Variant, do_parse!(
attrs: many0!(outer_attr) >>
id: ident >>
data: alt!(
struct_like_body => { VariantData::Struct }
|
tuple_like_body => { VariantData::Tuple }
|
epsilon!() => { |_| VariantData::Unit }
) >>
disr: option!(preceded!(punct!("="), discriminant)) >>
(Variant {
ident: id,
attrs: attrs,
data: data,
discriminant: disr,
})
));
#[cfg(not(feature = "full"))]
use constant::parsing::const_expr as discriminant;
#[cfg(feature = "full")]
named!(discriminant -> ConstExpr, alt!(
terminated!(const_expr, after_discriminant)
|
terminated!(expr, after_discriminant) => { ConstExpr::Other }
));
#[cfg(feature = "full")]
named!(after_discriminant -> &str, peek!(alt!(punct!(",") | punct!("}"))));
named!(pub struct_like_body -> Vec<Field>, do_parse!(
punct!("{") >>
fields: terminated_list!(punct!(","), struct_field) >>
punct!("}") >>
(fields)
));
named!(tuple_like_body -> Vec<Field>, do_parse!(
punct!("(") >>
fields: terminated_list!(punct!(","), tuple_field) >>
punct!(")") >>
(fields)
));
named!(struct_field -> Field, do_parse!(
attrs: many0!(outer_attr) >>
vis: visibility >>
id: ident >>
punct!(":") >>
ty: ty >>
(Field {
ident: Some(id),
vis: vis,
attrs: attrs,
ty: ty,
})
));
named!(tuple_field -> Field, do_parse!(
attrs: many0!(outer_attr) >>
vis: visibility >>
ty: ty >>
(Field {
ident: None,
vis: vis,
attrs: attrs,
ty: ty,
})
));
named!(pub visibility -> Visibility, alt!(
do_parse!(
keyword!("pub") >>
punct!("(") >>
keyword!("crate") >>
punct!(")") >>
(Visibility::Crate)
)
|
do_parse!(
keyword!("pub") >>
punct!("(") >>
restricted: path >>
punct!(")") >>
(Visibility::Restricted(Box::new(restricted)))
)
|
keyword!("pub") => { |_| Visibility::Public }
|
epsilon!() => { |_| Visibility::Inherited }
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{Tokens, ToTokens};
impl ToTokens for Variant {
fn to_tokens(&self, tokens: &mut Tokens) {
for attr in &self.attrs {
attr.to_tokens(tokens);
}
self.ident.to_tokens(tokens);
self.data.to_tokens(tokens);
if let Some(ref disr) = self.discriminant {
tokens.append("=");
disr.to_tokens(tokens);
}
}
}
impl ToTokens for VariantData {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
VariantData::Struct(ref fields) => {
tokens.append("{");
tokens.append_separated(fields, ",");
tokens.append("}");
}
VariantData::Tuple(ref fields) => {
tokens.append("(");
tokens.append_separated(fields, ",");
tokens.append(")");
}
VariantData::Unit => {}
}
}
}
impl ToTokens for Field {
fn to_tokens(&self, tokens: &mut Tokens) {
for attr in &self.attrs {
attr.to_tokens(tokens);
}
self.vis.to_tokens(tokens);
if let Some(ref ident) = self.ident {
ident.to_tokens(tokens);
tokens.append(":");
}
self.ty.to_tokens(tokens);
}
}
impl ToTokens for Visibility {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
Visibility::Public => tokens.append("pub"),
Visibility::Crate => {
tokens.append("pub");
tokens.append("(");
tokens.append("crate");
tokens.append(")");
}
Visibility::Restricted(ref path) => {
tokens.append("pub");
tokens.append("(");
path.to_tokens(tokens);
tokens.append(")");
}
Visibility::Inherited => {}
}
}
}
}

124
third_party/rust/syn-0.11.11/src/derive.rs поставляемый
Просмотреть файл

@ -1,124 +0,0 @@
use super::*;
/// Struct or enum sent to a `proc_macro_derive` macro.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct DeriveInput {
/// Name of the struct or enum.
pub ident: Ident,
/// Visibility of the struct or enum.
pub vis: Visibility,
/// Attributes tagged on the whole struct or enum.
pub attrs: Vec<Attribute>,
/// Generics required to complete the definition.
pub generics: Generics,
/// Data within the struct or enum.
pub body: Body,
}
/// Body of a derived struct or enum.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum Body {
/// It's an enum.
Enum(Vec<Variant>),
/// It's a struct.
Struct(VariantData),
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use Generics;
use attr::parsing::outer_attr;
use data::parsing::{visibility, struct_body, enum_body};
use generics::parsing::generics;
use ident::parsing::ident;
named!(pub derive_input -> DeriveInput, do_parse!(
attrs: many0!(outer_attr) >>
vis: visibility >>
which: alt!(keyword!("struct") | keyword!("enum")) >>
id: ident >>
generics: generics >>
item: switch!(value!(which),
"struct" => map!(struct_body, move |(wh, body)| DeriveInput {
ident: id,
vis: vis,
attrs: attrs,
generics: Generics {
where_clause: wh,
.. generics
},
body: Body::Struct(body),
})
|
"enum" => map!(enum_body, move |(wh, body)| DeriveInput {
ident: id,
vis: vis,
attrs: attrs,
generics: Generics {
where_clause: wh,
.. generics
},
body: Body::Enum(body),
})
) >>
(item)
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use attr::FilterAttrs;
use data::VariantData;
use quote::{Tokens, ToTokens};
impl ToTokens for DeriveInput {
fn to_tokens(&self, tokens: &mut Tokens) {
for attr in self.attrs.outer() {
attr.to_tokens(tokens);
}
self.vis.to_tokens(tokens);
match self.body {
Body::Enum(_) => tokens.append("enum"),
Body::Struct(_) => tokens.append("struct"),
}
self.ident.to_tokens(tokens);
self.generics.to_tokens(tokens);
match self.body {
Body::Enum(ref variants) => {
self.generics.where_clause.to_tokens(tokens);
tokens.append("{");
for variant in variants {
variant.to_tokens(tokens);
tokens.append(",");
}
tokens.append("}");
}
Body::Struct(ref variant_data) => {
match *variant_data {
VariantData::Struct(_) => {
self.generics.where_clause.to_tokens(tokens);
variant_data.to_tokens(tokens);
// no semicolon
}
VariantData::Tuple(_) => {
variant_data.to_tokens(tokens);
self.generics.where_clause.to_tokens(tokens);
tokens.append(";");
}
VariantData::Unit => {
self.generics.where_clause.to_tokens(tokens);
tokens.append(";");
}
}
}
}
}
}
}

294
third_party/rust/syn-0.11.11/src/escape.rs поставляемый
Просмотреть файл

@ -1,294 +0,0 @@
use std::{char, str};
use std::num::ParseIntError;
use synom::IResult;
pub fn cooked_string(input: &str) -> IResult<&str, String> {
let mut s = String::new();
let mut chars = input.char_indices().peekable();
while let Some((byte_offset, ch)) = chars.next() {
match ch {
'"' => {
return IResult::Done(&input[byte_offset..], s);
}
'\r' => {
if let Some((_, '\n')) = chars.next() {
s.push('\n');
} else {
break;
}
}
'\\' => {
match chars.next() {
Some((_, 'x')) => {
match backslash_x_char(&mut chars) {
Some(ch) => s.push(ch),
None => break,
}
}
Some((_, 'n')) => s.push('\n'),
Some((_, 'r')) => s.push('\r'),
Some((_, 't')) => s.push('\t'),
Some((_, '\\')) => s.push('\\'),
Some((_, '0')) => s.push('\0'),
Some((_, 'u')) => {
match backslash_u(&mut chars) {
Some(ch) => s.push(ch),
None => break,
}
}
Some((_, '\'')) => s.push('\''),
Some((_, '"')) => s.push('"'),
Some((_, '\n')) | Some((_, '\r')) => {
while let Some(&(_, ch)) = chars.peek() {
if ch.is_whitespace() {
chars.next();
} else {
break;
}
}
}
_ => break,
}
}
ch => {
s.push(ch);
}
}
}
IResult::Error
}
pub fn cooked_byte_string(mut input: &str) -> IResult<&str, Vec<u8>> {
let mut vec = Vec::new();
let mut bytes = input.bytes().enumerate();
'outer: while let Some((offset, b)) = bytes.next() {
match b {
b'"' => {
return IResult::Done(&input[offset..], vec);
}
b'\r' => {
if let Some((_, b'\n')) = bytes.next() {
vec.push(b'\n');
} else {
break;
}
}
b'\\' => {
match bytes.next() {
Some((_, b'x')) => {
match backslash_x_byte(&mut bytes) {
Some(b) => vec.push(b),
None => break,
}
}
Some((_, b'n')) => vec.push(b'\n'),
Some((_, b'r')) => vec.push(b'\r'),
Some((_, b't')) => vec.push(b'\t'),
Some((_, b'\\')) => vec.push(b'\\'),
Some((_, b'0')) => vec.push(b'\0'),
Some((_, b'\'')) => vec.push(b'\''),
Some((_, b'"')) => vec.push(b'"'),
Some((newline, b'\n')) |
Some((newline, b'\r')) => {
let rest = &input[newline + 1..];
for (offset, ch) in rest.char_indices() {
if !ch.is_whitespace() {
input = &rest[offset..];
bytes = input.bytes().enumerate();
continue 'outer;
}
}
break;
}
_ => break,
}
}
b if b < 0x80 => {
vec.push(b);
}
_ => break,
}
}
IResult::Error
}
pub fn cooked_char(input: &str) -> IResult<&str, char> {
let mut chars = input.char_indices();
let ch = match chars.next().map(|(_, ch)| ch) {
Some('\\') => {
match chars.next().map(|(_, ch)| ch) {
Some('x') => backslash_x_char(&mut chars),
Some('n') => Some('\n'),
Some('r') => Some('\r'),
Some('t') => Some('\t'),
Some('\\') => Some('\\'),
Some('0') => Some('\0'),
Some('u') => backslash_u(&mut chars),
Some('\'') => Some('\''),
Some('"') => Some('"'),
_ => None,
}
}
ch => ch,
};
match ch {
Some(ch) => IResult::Done(chars.as_str(), ch),
None => IResult::Error,
}
}
pub fn cooked_byte(input: &str) -> IResult<&str, u8> {
let mut bytes = input.bytes().enumerate();
let b = match bytes.next().map(|(_, b)| b) {
Some(b'\\') => {
match bytes.next().map(|(_, b)| b) {
Some(b'x') => backslash_x_byte(&mut bytes),
Some(b'n') => Some(b'\n'),
Some(b'r') => Some(b'\r'),
Some(b't') => Some(b'\t'),
Some(b'\\') => Some(b'\\'),
Some(b'0') => Some(b'\0'),
Some(b'\'') => Some(b'\''),
Some(b'"') => Some(b'"'),
_ => None,
}
}
b => b,
};
match b {
Some(b) => {
match bytes.next() {
Some((offset, _)) => IResult::Done(&input[offset..], b),
None => IResult::Done("", b),
}
}
None => IResult::Error,
}
}
pub fn raw_string(input: &str) -> IResult<&str, (String, usize)> {
let mut chars = input.char_indices();
let mut n = 0;
while let Some((byte_offset, ch)) = chars.next() {
match ch {
'"' => {
n = byte_offset;
break;
}
'#' => {}
_ => return IResult::Error,
}
}
let mut s = String::new();
for (byte_offset, ch) in chars {
match ch {
'"' if input[byte_offset + 1..].starts_with(&input[..n]) => {
let rest = &input[byte_offset + 1 + n..];
return IResult::Done(rest, (s, n));
}
'\r' => {}
_ => s.push(ch),
}
}
IResult::Error
}
macro_rules! next_ch {
($chars:ident @ $pat:pat $(| $rest:pat)*) => {
match $chars.next() {
Some((_, ch)) => match ch {
$pat $(| $rest)* => ch,
_ => return None,
},
None => return None,
}
};
}
trait FromStrRadix: Sized {
fn from_str_radix(src: &str, radix: u32) -> Result<Self, ParseIntError>;
}
impl FromStrRadix for u8 {
fn from_str_radix(src: &str, radix: u32) -> Result<Self, ParseIntError> {
u8::from_str_radix(src, radix)
}
}
impl FromStrRadix for u32 {
fn from_str_radix(src: &str, radix: u32) -> Result<Self, ParseIntError> {
u32::from_str_radix(src, radix)
}
}
macro_rules! from_hex {
($($ch:ident)+) => {{
let hex_bytes = &[$($ch as u8),*];
let hex_str = str::from_utf8(hex_bytes).unwrap();
FromStrRadix::from_str_radix(hex_str, 16).unwrap()
}};
}
#[cfg_attr(feature = "cargo-clippy", allow(diverging_sub_expression))]
fn backslash_x_char<I>(chars: &mut I) -> Option<char>
where I: Iterator<Item = (usize, char)>
{
let a = next_ch!(chars @ '0'...'7');
let b = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F');
char::from_u32(from_hex!(a b))
}
#[cfg_attr(feature = "cargo-clippy", allow(diverging_sub_expression))]
fn backslash_x_byte<I>(chars: &mut I) -> Option<u8>
where I: Iterator<Item = (usize, u8)>
{
let a = next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F');
let b = next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F');
Some(from_hex!(a b))
}
#[cfg_attr(feature = "cargo-clippy", allow(diverging_sub_expression, many_single_char_names))]
fn backslash_u<I>(chars: &mut I) -> Option<char>
where I: Iterator<Item = (usize, char)>
{
next_ch!(chars @ '{');
let a = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F');
let b = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F' | '}');
if b == '}' {
return char::from_u32(from_hex!(a));
}
let c = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F' | '}');
if c == '}' {
return char::from_u32(from_hex!(a b));
}
let d = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F' | '}');
if d == '}' {
return char::from_u32(from_hex!(a b c));
}
let e = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F' | '}');
if e == '}' {
return char::from_u32(from_hex!(a b c d));
}
let f = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F' | '}');
if f == '}' {
return char::from_u32(from_hex!(a b c d e));
}
next_ch!(chars @ '}');
char::from_u32(from_hex!(a b c d e f))
}
#[test]
fn test_cooked_string() {
let input = "\\x62 \\\n \\u{7} \\u{64} \\u{bf5} \\u{12ba} \\u{1F395} \\u{102345}\"";
let expected = "\x62 \u{7} \u{64} \u{bf5} \u{12ba} \u{1F395} \u{102345}";
assert_eq!(cooked_string(input),
IResult::Done("\"", expected.to_string()));
}
#[test]
fn test_cooked_byte_string() {
let input = "\\x62 \\\n \\xEF\"";
let expected = b"\x62 \xEF";
assert_eq!(cooked_byte_string(input),
IResult::Done("\"", expected.to_vec()));
}

1721
third_party/rust/syn-0.11.11/src/expr.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

942
third_party/rust/syn-0.11.11/src/fold.rs поставляемый
Просмотреть файл

@ -1,942 +0,0 @@
// Adapted from libsyntax.
//! A Folder represents an AST->AST fold; it accepts an AST piece,
//! and returns a piece of the same type.
use super::*;
#[cfg(not(feature = "full"))]
use constant;
/// AST->AST fold.
///
/// Each method of the Folder trait is a hook to be potentially overridden. Each
/// method's default implementation recursively visits the substructure of the
/// input via the `noop_fold` methods, which perform an "identity fold", that
/// is, they return the same structure that they are given (for example the
/// `fold_crate` method by default calls `fold::noop_fold_crate`).
///
/// If you want to ensure that your code handles every variant explicitly, you
/// need to override each method and monitor future changes to `Folder` in case
/// a new method with a new default implementation gets introduced.
pub trait Folder {
// Any additions to this trait should happen in form
// of a call to a public `noop_*` function that only calls
// out to the folder again, not other `noop_*` functions.
//
// This is a necessary API workaround to the problem of not
// being able to call out to the super default method
// in an overridden default method.
fn fold_ident(&mut self, _ident: Ident) -> Ident {
noop_fold_ident(self, _ident)
}
fn fold_derive_input(&mut self, derive_input: DeriveInput) -> DeriveInput {
noop_fold_derive_input(self, derive_input)
}
fn fold_ty(&mut self, ty: Ty) -> Ty {
noop_fold_ty(self, ty)
}
fn fold_generics(&mut self, generics: Generics) -> Generics {
noop_fold_generics(self, generics)
}
fn fold_ty_param_bound(&mut self, bound: TyParamBound) -> TyParamBound {
noop_fold_ty_param_bound(self, bound)
}
fn fold_poly_trait_ref(&mut self, trait_ref: PolyTraitRef) -> PolyTraitRef {
noop_fold_poly_trait_ref(self, trait_ref)
}
fn fold_variant_data(&mut self, data: VariantData) -> VariantData {
noop_fold_variant_data(self, data)
}
fn fold_field(&mut self, field: Field) -> Field {
noop_fold_field(self, field)
}
fn fold_variant(&mut self, variant: Variant) -> Variant {
noop_fold_variant(self, variant)
}
fn fold_lifetime(&mut self, _lifetime: Lifetime) -> Lifetime {
noop_fold_lifetime(self, _lifetime)
}
fn fold_lifetime_def(&mut self, lifetime: LifetimeDef) -> LifetimeDef {
noop_fold_lifetime_def(self, lifetime)
}
fn fold_path(&mut self, path: Path) -> Path {
noop_fold_path(self, path)
}
fn fold_path_segment(&mut self, path_segment: PathSegment) -> PathSegment {
noop_fold_path_segment(self, path_segment)
}
fn fold_path_parameters(&mut self, path_parameters: PathParameters) -> PathParameters {
noop_fold_path_parameters(self, path_parameters)
}
fn fold_assoc_type_binding(&mut self, type_binding: TypeBinding) -> TypeBinding {
noop_fold_assoc_type_binding(self, type_binding)
}
fn fold_attribute(&mut self, _attr: Attribute) -> Attribute {
noop_fold_attribute(self, _attr)
}
fn fold_fn_ret_ty(&mut self, ret_ty: FunctionRetTy) -> FunctionRetTy {
noop_fold_fn_ret_ty(self, ret_ty)
}
fn fold_const_expr(&mut self, expr: ConstExpr) -> ConstExpr {
noop_fold_const_expr(self, expr)
}
fn fold_lit(&mut self, _lit: Lit) -> Lit {
noop_fold_lit(self, _lit)
}
fn fold_mac(&mut self, mac: Mac) -> Mac {
noop_fold_mac(self, mac)
}
#[cfg(feature = "full")]
fn fold_crate(&mut self, _crate: Crate) -> Crate {
noop_fold_crate(self, _crate)
}
#[cfg(feature = "full")]
fn fold_item(&mut self, item: Item) -> Item {
noop_fold_item(self, item)
}
#[cfg(feature = "full")]
fn fold_expr(&mut self, expr: Expr) -> Expr {
noop_fold_expr(self, expr)
}
#[cfg(feature = "full")]
fn fold_foreign_item(&mut self, foreign_item: ForeignItem) -> ForeignItem {
noop_fold_foreign_item(self, foreign_item)
}
#[cfg(feature = "full")]
fn fold_pat(&mut self, pat: Pat) -> Pat {
noop_fold_pat(self, pat)
}
#[cfg(feature = "full")]
fn fold_fn_decl(&mut self, fn_decl: FnDecl) -> FnDecl {
noop_fold_fn_decl(self, fn_decl)
}
#[cfg(feature = "full")]
fn fold_trait_item(&mut self, trait_item: TraitItem) -> TraitItem {
noop_fold_trait_item(self, trait_item)
}
#[cfg(feature = "full")]
fn fold_impl_item(&mut self, impl_item: ImplItem) -> ImplItem {
noop_fold_impl_item(self, impl_item)
}
#[cfg(feature = "full")]
fn fold_method_sig(&mut self, method_sig: MethodSig) -> MethodSig {
noop_fold_method_sig(self, method_sig)
}
#[cfg(feature = "full")]
fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
noop_fold_stmt(self, stmt)
}
#[cfg(feature = "full")]
fn fold_block(&mut self, block: Block) -> Block {
noop_fold_block(self, block)
}
#[cfg(feature = "full")]
fn fold_local(&mut self, local: Local) -> Local {
noop_fold_local(self, local)
}
#[cfg(feature = "full")]
fn fold_view_path(&mut self, view_path: ViewPath) -> ViewPath {
noop_fold_view_path(self, view_path)
}
}
trait LiftOnce<T, U> {
type Output;
fn lift<F>(self, f: F) -> Self::Output where F: FnOnce(T) -> U;
}
impl<T, U> LiftOnce<T, U> for Box<T> {
type Output = Box<U>;
// Clippy false positive
// https://github.com/Manishearth/rust-clippy/issues/1478
#[cfg_attr(feature = "cargo-clippy", allow(boxed_local))]
fn lift<F>(self, f: F) -> Box<U>
where F: FnOnce(T) -> U
{
Box::new(f(*self))
}
}
trait LiftMut<T, U> {
type Output;
fn lift<F>(self, f: F) -> Self::Output where F: FnMut(T) -> U;
}
impl<T, U> LiftMut<T, U> for Vec<T> {
type Output = Vec<U>;
fn lift<F>(self, f: F) -> Vec<U>
where F: FnMut(T) -> U
{
self.into_iter().map(f).collect()
}
}
pub fn noop_fold_ident<F: ?Sized + Folder>(_: &mut F, _ident: Ident) -> Ident {
_ident
}
pub fn noop_fold_derive_input<F: ?Sized + Folder>(folder: &mut F,
DeriveInput{ ident,
vis,
attrs,
generics,
body }: DeriveInput) -> DeriveInput{
use Body::*;
DeriveInput {
ident: folder.fold_ident(ident),
vis: noop_fold_vis(folder, vis),
attrs: attrs.lift(|a| folder.fold_attribute(a)),
generics: folder.fold_generics(generics),
body: match body {
Enum(variants) => Enum(variants.lift(move |v| folder.fold_variant(v))),
Struct(variant_data) => Struct(folder.fold_variant_data(variant_data)),
},
}
}
pub fn noop_fold_ty<F: ?Sized + Folder>(folder: &mut F, ty: Ty) -> Ty {
use Ty::*;
match ty {
Slice(inner) => Slice(inner.lift(|v| folder.fold_ty(v))),
Paren(inner) => Paren(inner.lift(|v| folder.fold_ty(v))),
Ptr(mutable_type) => {
let mutable_type_ = *mutable_type;
let MutTy { ty, mutability }: MutTy = mutable_type_;
Ptr(Box::new(MutTy {
ty: folder.fold_ty(ty),
mutability: mutability,
}))
}
Rptr(opt_lifetime, mutable_type) => {
let mutable_type_ = *mutable_type;
let MutTy { ty, mutability }: MutTy = mutable_type_;
Rptr(opt_lifetime.map(|l| folder.fold_lifetime(l)),
Box::new(MutTy {
ty: folder.fold_ty(ty),
mutability: mutability,
}))
}
Never => Never,
Infer => Infer,
Tup(tuple_element_types) => Tup(tuple_element_types.lift(|x| folder.fold_ty(x))),
BareFn(bare_fn) => {
let bf_ = *bare_fn;
let BareFnTy { unsafety, abi, lifetimes, inputs, output, variadic } = bf_;
BareFn(Box::new(BareFnTy {
unsafety: unsafety,
abi: abi,
lifetimes: lifetimes.lift(|l| folder.fold_lifetime_def(l)),
inputs: inputs.lift(|v| {
BareFnArg {
name: v.name.map(|n| folder.fold_ident(n)),
ty: folder.fold_ty(v.ty),
}
}),
output: folder.fold_fn_ret_ty(output),
variadic: variadic,
}))
}
Path(maybe_qself, path) => {
Path(maybe_qself.map(|v| noop_fold_qself(folder, v)),
folder.fold_path(path))
}
Array(inner, len) => {
Array({
inner.lift(|v| folder.fold_ty(v))
},
folder.fold_const_expr(len))
}
TraitObject(bounds) => TraitObject(bounds.lift(|v| folder.fold_ty_param_bound(v))),
ImplTrait(bounds) => ImplTrait(bounds.lift(|v| folder.fold_ty_param_bound(v))),
Mac(mac) => Mac(folder.fold_mac(mac)),
}
}
fn noop_fold_qself<F: ?Sized + Folder>(folder: &mut F, QSelf { ty, position }: QSelf) -> QSelf {
QSelf {
ty: Box::new(folder.fold_ty(*(ty))),
position: position,
}
}
pub fn noop_fold_generics<F: ?Sized + Folder>(folder: &mut F,
Generics { lifetimes, ty_params, where_clause }: Generics)
-> Generics{
use WherePredicate::*;
Generics {
lifetimes: lifetimes.lift(|l| folder.fold_lifetime_def(l)),
ty_params: ty_params.lift(|ty| {
TyParam {
attrs: ty.attrs.lift(|a| folder.fold_attribute(a)),
ident: folder.fold_ident(ty.ident),
bounds: ty.bounds.lift(|ty_pb| folder.fold_ty_param_bound(ty_pb)),
default: ty.default.map(|v| folder.fold_ty(v)),
}
}),
where_clause: WhereClause {
predicates: where_clause.predicates.lift(|p| match p {
BoundPredicate(bound_predicate) => {
BoundPredicate(WhereBoundPredicate {
bound_lifetimes: bound_predicate.bound_lifetimes
.lift(|l| folder.fold_lifetime_def(l)),
bounded_ty: folder.fold_ty(bound_predicate.bounded_ty),
bounds: bound_predicate.bounds
.lift(|ty_pb| folder.fold_ty_param_bound(ty_pb)),
})
}
RegionPredicate(region_predicate) => {
RegionPredicate(WhereRegionPredicate {
lifetime: folder.fold_lifetime(region_predicate.lifetime),
bounds: region_predicate.bounds
.lift(|b| folder.fold_lifetime(b)),
})
}
EqPredicate(eq_predicate) => {
EqPredicate(WhereEqPredicate {
lhs_ty: folder.fold_ty(eq_predicate.lhs_ty),
rhs_ty: folder.fold_ty(eq_predicate.rhs_ty),
})
}
}),
},
}
}
pub fn noop_fold_ty_param_bound<F: ?Sized + Folder>(folder: &mut F,
bound: TyParamBound)
-> TyParamBound {
use TyParamBound::*;
match bound {
Trait(ty, modifier) => Trait(folder.fold_poly_trait_ref(ty), modifier),
Region(lifetime) => Region(folder.fold_lifetime(lifetime)),
}
}
pub fn noop_fold_poly_trait_ref<F: ?Sized + Folder>(folder: &mut F,
trait_ref: PolyTraitRef)
-> PolyTraitRef {
PolyTraitRef {
bound_lifetimes: trait_ref.bound_lifetimes.lift(|bl| folder.fold_lifetime_def(bl)),
trait_ref: folder.fold_path(trait_ref.trait_ref),
}
}
pub fn noop_fold_variant_data<F: ?Sized + Folder>(folder: &mut F,
data: VariantData)
-> VariantData {
use VariantData::*;
match data {
Struct(fields) => Struct(fields.lift(|f| folder.fold_field(f))),
Tuple(fields) => Tuple(fields.lift(|f| folder.fold_field(f))),
Unit => Unit,
}
}
pub fn noop_fold_field<F: ?Sized + Folder>(folder: &mut F, field: Field) -> Field {
Field {
ident: field.ident.map(|i| folder.fold_ident(i)),
vis: noop_fold_vis(folder, field.vis),
attrs: field.attrs.lift(|a| folder.fold_attribute(a)),
ty: folder.fold_ty(field.ty),
}
}
pub fn noop_fold_variant<F: ?Sized + Folder>(folder: &mut F,
Variant { ident, attrs, data, discriminant }: Variant)
-> Variant{
Variant {
ident: folder.fold_ident(ident),
attrs: attrs.lift(|v| folder.fold_attribute(v)),
data: folder.fold_variant_data(data),
discriminant: discriminant.map(|ce| folder.fold_const_expr(ce)),
}
}
pub fn noop_fold_lifetime<F: ?Sized + Folder>(folder: &mut F, _lifetime: Lifetime) -> Lifetime {
Lifetime { ident: folder.fold_ident(_lifetime.ident) }
}
pub fn noop_fold_lifetime_def<F: ?Sized + Folder>(folder: &mut F,
LifetimeDef { attrs, lifetime, bounds }: LifetimeDef)
-> LifetimeDef{
LifetimeDef {
attrs: attrs.lift(|x| folder.fold_attribute(x)),
lifetime: folder.fold_lifetime(lifetime),
bounds: bounds.lift(|l| folder.fold_lifetime(l)),
}
}
pub fn noop_fold_path<F: ?Sized + Folder>(folder: &mut F, Path { global, segments }: Path) -> Path {
Path {
global: global,
segments: segments.lift(|s| folder.fold_path_segment(s)),
}
}
pub fn noop_fold_path_segment<F: ?Sized + Folder>(folder: &mut F,
PathSegment { ident, parameters }: PathSegment)
-> PathSegment {
PathSegment {
ident: folder.fold_ident(ident),
parameters: folder.fold_path_parameters(parameters),
}
}
pub fn noop_fold_path_parameters<F: ?Sized + Folder>(folder: &mut F,
path_parameters: PathParameters)
-> PathParameters {
use PathParameters::*;
match path_parameters {
AngleBracketed(d) => {
let AngleBracketedParameterData { lifetimes, types, bindings } = d;
AngleBracketed(AngleBracketedParameterData {
lifetimes: lifetimes.into_iter()
.map(|l| folder.fold_lifetime(l))
.collect(),
types: types.lift(|ty| folder.fold_ty(ty)),
bindings: bindings.lift(|tb| folder.fold_assoc_type_binding(tb)),
})
}
Parenthesized(d) => {
let ParenthesizedParameterData { inputs, output } = d;
Parenthesized(ParenthesizedParameterData {
inputs: inputs.lift(|i| folder.fold_ty(i)),
output: output.map(|v| folder.fold_ty(v)),
})
}
}
}
pub fn noop_fold_assoc_type_binding<F: ?Sized + Folder>(folder: &mut F,
TypeBinding { ident, ty }: TypeBinding)
-> TypeBinding {
TypeBinding {
ident: folder.fold_ident(ident),
ty: folder.fold_ty(ty),
}
}
pub fn noop_fold_attribute<F: ?Sized + Folder>(_: &mut F, _attr: Attribute) -> Attribute {
_attr
}
pub fn noop_fold_fn_ret_ty<F: ?Sized + Folder>(folder: &mut F,
ret_ty: FunctionRetTy)
-> FunctionRetTy {
use FunctionRetTy::*;
match ret_ty {
Default => Default,
Ty(ty) => Ty(folder.fold_ty(ty)),
}
}
pub fn noop_fold_const_expr<F: ?Sized + Folder>(folder: &mut F, expr: ConstExpr) -> ConstExpr {
use ConstExpr::*;
match expr {
Call(f, args) => {
Call(f.lift(|e| folder.fold_const_expr(e)),
args.lift(|v| folder.fold_const_expr(v)))
}
Binary(op, lhs, rhs) => {
Binary(op,
lhs.lift(|e| folder.fold_const_expr(e)),
rhs.lift(|e| folder.fold_const_expr(e)))
}
Unary(op, e) => Unary(op, e.lift(|e| folder.fold_const_expr(e))),
Lit(l) => Lit(folder.fold_lit(l)),
Cast(e, ty) => {
Cast(e.lift(|e| folder.fold_const_expr(e)),
ty.lift(|v| folder.fold_ty(v)))
}
Path(p) => Path(folder.fold_path(p)),
Index(o, i) => {
Index(o.lift(|e| folder.fold_const_expr(e)),
i.lift(|e| folder.fold_const_expr(e)))
}
Paren(no_op) => Paren(no_op.lift(|e| folder.fold_const_expr(e))),
Other(e) => Other(noop_fold_other_const_expr(folder, e)),
}
}
#[cfg(feature = "full")]
fn noop_fold_other_const_expr<F: ?Sized + Folder>(folder: &mut F, e: Expr) -> Expr {
folder.fold_expr(e)
}
#[cfg(not(feature = "full"))]
fn noop_fold_other_const_expr<F: ?Sized + Folder>(_: &mut F,
e: constant::Other)
-> constant::Other {
e
}
pub fn noop_fold_lit<F: ?Sized + Folder>(_: &mut F, _lit: Lit) -> Lit {
_lit
}
pub fn noop_fold_tt<F: ?Sized + Folder>(folder: &mut F, tt: TokenTree) -> TokenTree {
use TokenTree::*;
use Token::*;
match tt {
Token(token) => {
Token(match token {
Literal(lit) => Literal(folder.fold_lit(lit)),
Ident(ident) => Ident(folder.fold_ident(ident)),
Lifetime(ident) => Lifetime(folder.fold_ident(ident)),
x => x,
})
}
Delimited(super::Delimited { delim, tts }) => {
Delimited(super::Delimited {
delim: delim,
tts: tts.lift(|v| noop_fold_tt(folder, v)),
})
}
}
}
pub fn noop_fold_mac<F: ?Sized + Folder>(folder: &mut F, Mac { path, tts }: Mac) -> Mac {
Mac {
path: folder.fold_path(path),
tts: tts.lift(|tt| noop_fold_tt(folder, tt)),
}
}
#[cfg(feature = "full")]
pub fn noop_fold_crate<F: ?Sized + Folder>(folder: &mut F,
Crate { shebang, attrs, items }: Crate)
-> Crate {
Crate {
shebang: shebang,
attrs: attrs.lift(|a| folder.fold_attribute(a)),
items: items.lift(|i| folder.fold_item(i)),
}
}
#[cfg(feature = "full")]
pub fn noop_fold_block<F: ?Sized + Folder>(folder: &mut F, block: Block) -> Block {
Block { stmts: block.stmts.lift(|s| folder.fold_stmt(s)) }
}
fn noop_fold_vis<F: ?Sized + Folder>(folder: &mut F, vis: Visibility) -> Visibility {
use Visibility::*;
match vis {
Crate => Crate,
Inherited => Inherited,
Public => Public,
Restricted(path) => Restricted(path.lift(|p| folder.fold_path(p))),
}
}
#[cfg(feature = "full")]
pub fn noop_fold_item<F: ?Sized + Folder>(folder: &mut F,
Item { ident, vis, attrs, node }: Item)
-> Item {
use ItemKind::*;
Item {
ident: folder.fold_ident(ident.clone()),
vis: noop_fold_vis(folder, vis),
attrs: attrs.lift(|a| folder.fold_attribute(a)),
node: match node {
ExternCrate(name) => ExternCrate(name.map(|i| folder.fold_ident(i))),
Use(view_path) => Use(Box::new(folder.fold_view_path(*view_path))),
Static(ty, mutability, expr) => {
Static(Box::new(folder.fold_ty(*ty)),
mutability,
expr.lift(|e| folder.fold_expr(e)))
}
Const(ty, expr) => {
Const(ty.lift(|ty| folder.fold_ty(ty)),
expr.lift(|e| folder.fold_expr(e)))
}
Fn(fn_decl, unsafety, constness, abi, generics, block) => {
Fn(fn_decl.lift(|v| folder.fold_fn_decl(v)),
unsafety,
constness,
abi,
folder.fold_generics(generics),
block.lift(|v| folder.fold_block(v)))
}
Mod(items) => Mod(items.map(|items| items.lift(|i| folder.fold_item(i)))),
ForeignMod(super::ForeignMod { abi, items }) => {
ForeignMod(super::ForeignMod {
abi: abi,
items: items.lift(|foreign_item| {
folder.fold_foreign_item(foreign_item)
}),
})
}
Ty(ty, generics) => {
Ty(ty.lift(|ty| folder.fold_ty(ty)),
folder.fold_generics(generics))
}
Enum(variants, generics) => {
Enum(variants.lift(|v| folder.fold_variant(v)),
folder.fold_generics(generics))
}
Struct(variant_data, generics) => {
Struct(folder.fold_variant_data(variant_data),
folder.fold_generics(generics))
}
Union(variant_data, generics) => {
Union(folder.fold_variant_data(variant_data),
folder.fold_generics(generics))
}
Trait(unsafety, generics, typbs, trait_items) => {
Trait(unsafety,
folder.fold_generics(generics),
typbs.lift(|typb| folder.fold_ty_param_bound(typb)),
trait_items.lift(|ti| folder.fold_trait_item(ti)))
}
DefaultImpl(unsafety, path) => DefaultImpl(unsafety, folder.fold_path(path)),
Impl(unsafety, impl_polarity, generics, path, ty, impl_items) => {
Impl(unsafety,
impl_polarity,
folder.fold_generics(generics),
path.map(|p| folder.fold_path(p)),
ty.lift(|ty| folder.fold_ty(ty)),
impl_items.lift(|i| folder.fold_impl_item(i)))
}
Mac(mac) => Mac(folder.fold_mac(mac)),
},
}
}
#[cfg(feature = "full")]
pub fn noop_fold_expr<F: ?Sized + Folder>(folder: &mut F, Expr { node, attrs }: Expr) -> Expr {
use ExprKind::*;
Expr {
node: match node {
ExprKind::Box(e) => ExprKind::Box(e.lift(|e| folder.fold_expr(e))),
InPlace(place, value) => {
InPlace(place.lift(|e| folder.fold_expr(e)),
value.lift(|e| folder.fold_expr(e)))
}
Array(array) => Array(array.lift(|e| folder.fold_expr(e))),
Call(function, args) => {
Call(function.lift(|e| folder.fold_expr(e)),
args.lift(|e| folder.fold_expr(e)))
}
MethodCall(method, tys, args) => {
MethodCall(folder.fold_ident(method),
tys.lift(|t| folder.fold_ty(t)),
args.lift(|e| folder.fold_expr(e)))
}
Tup(args) => Tup(args.lift(|e| folder.fold_expr(e))),
Binary(bop, lhs, rhs) => {
Binary(bop,
lhs.lift(|e| folder.fold_expr(e)),
rhs.lift(|e| folder.fold_expr(e)))
}
Unary(uop, e) => Unary(uop, e.lift(|e| folder.fold_expr(e))),
Lit(lit) => Lit(folder.fold_lit(lit)),
Cast(e, ty) => {
Cast(e.lift(|e| folder.fold_expr(e)),
ty.lift(|t| folder.fold_ty(t)))
}
Type(e, ty) => {
Type(e.lift(|e| folder.fold_expr(e)),
ty.lift(|t| folder.fold_ty(t)))
}
If(e, if_block, else_block) => {
If(e.lift(|e| folder.fold_expr(e)),
folder.fold_block(if_block),
else_block.map(|v| v.lift(|e| folder.fold_expr(e))))
}
IfLet(pat, expr, block, else_block) => {
IfLet(pat.lift(|p| folder.fold_pat(p)),
expr.lift(|e| folder.fold_expr(e)),
folder.fold_block(block),
else_block.map(|v| v.lift(|e| folder.fold_expr(e))))
}
While(e, block, label) => {
While(e.lift(|e| folder.fold_expr(e)),
folder.fold_block(block),
label.map(|i| folder.fold_ident(i)))
}
WhileLet(pat, expr, block, label) => {
WhileLet(pat.lift(|p| folder.fold_pat(p)),
expr.lift(|e| folder.fold_expr(e)),
folder.fold_block(block),
label.map(|i| folder.fold_ident(i)))
}
ForLoop(pat, expr, block, label) => {
ForLoop(pat.lift(|p| folder.fold_pat(p)),
expr.lift(|e| folder.fold_expr(e)),
folder.fold_block(block),
label.map(|i| folder.fold_ident(i)))
}
Loop(block, label) => {
Loop(folder.fold_block(block),
label.map(|i| folder.fold_ident(i)))
}
Match(e, arms) => {
Match(e.lift(|e| folder.fold_expr(e)),
arms.lift(|Arm { attrs, pats, guard, body }: Arm| {
Arm {
attrs: attrs.lift(|a| folder.fold_attribute(a)),
pats: pats.lift(|p| folder.fold_pat(p)),
guard: guard.map(|v| v.lift(|e| folder.fold_expr(e))),
body: body.lift(|e| folder.fold_expr(e)),
}
}))
}
Closure(capture_by, fn_decl, expr) => {
Closure(capture_by,
fn_decl.lift(|v| folder.fold_fn_decl(v)),
expr.lift(|e| folder.fold_expr(e)))
}
Block(unsafety, block) => Block(unsafety, folder.fold_block(block)),
Assign(lhs, rhs) => {
Assign(lhs.lift(|e| folder.fold_expr(e)),
rhs.lift(|e| folder.fold_expr(e)))
}
AssignOp(bop, lhs, rhs) => {
AssignOp(bop,
lhs.lift(|e| folder.fold_expr(e)),
rhs.lift(|e| folder.fold_expr(e)))
}
Field(expr, name) => Field(expr.lift(|e| folder.fold_expr(e)), folder.fold_ident(name)),
TupField(expr, index) => TupField(expr.lift(|e| folder.fold_expr(e)), index),
Index(expr, index) => {
Index(expr.lift(|e| folder.fold_expr(e)),
index.lift(|e| folder.fold_expr(e)))
}
Range(lhs, rhs, limits) => {
Range(lhs.map(|v| v.lift(|e| folder.fold_expr(e))),
rhs.map(|v| v.lift(|e| folder.fold_expr(e))),
limits)
}
Path(qself, path) => {
Path(qself.map(|v| noop_fold_qself(folder, v)),
folder.fold_path(path))
}
AddrOf(mutability, expr) => AddrOf(mutability, expr.lift(|e| folder.fold_expr(e))),
Break(label, expr) => {
Break(label.map(|i| folder.fold_ident(i)),
expr.map(|v| v.lift(|e| folder.fold_expr(e))))
}
Continue(label) => Continue(label.map(|i| folder.fold_ident(i))),
Ret(expr) => Ret(expr.map(|v| v.lift(|e| folder.fold_expr(e)))),
ExprKind::Mac(mac) => ExprKind::Mac(folder.fold_mac(mac)),
Struct(path, fields, expr) => {
Struct(folder.fold_path(path),
fields.lift(|FieldValue { ident, expr, is_shorthand, attrs }: FieldValue| {
FieldValue {
ident: folder.fold_ident(ident),
expr: folder.fold_expr(expr),
is_shorthand: is_shorthand,
attrs: attrs.lift(|v| folder.fold_attribute(v)),
}
}),
expr.map(|v| v.lift(|e| folder.fold_expr(e))))
}
Repeat(element, number) => {
Repeat(element.lift(|e| folder.fold_expr(e)),
number.lift(|e| folder.fold_expr(e)))
}
Paren(expr) => Paren(expr.lift(|e| folder.fold_expr(e))),
Try(expr) => Try(expr.lift(|e| folder.fold_expr(e))),
},
attrs: attrs.into_iter().map(|a| folder.fold_attribute(a)).collect(),
}
}
#[cfg(feature = "full")]
pub fn noop_fold_foreign_item<F: ?Sized + Folder>(folder: &mut F,
ForeignItem { ident, attrs, node, vis }: ForeignItem)
-> ForeignItem{
ForeignItem {
ident: folder.fold_ident(ident),
attrs: attrs.into_iter().map(|a| folder.fold_attribute(a)).collect(),
node: match node {
ForeignItemKind::Fn(fn_dcl, generics) => {
ForeignItemKind::Fn(fn_dcl.lift(|v| folder.fold_fn_decl(v)),
folder.fold_generics(generics))
}
ForeignItemKind::Static(ty, mutability) => {
ForeignItemKind::Static(ty.lift(|v| folder.fold_ty(v)), mutability)
}
},
vis: noop_fold_vis(folder, vis),
}
}
#[cfg(feature = "full")]
pub fn noop_fold_pat<F: ?Sized + Folder>(folder: &mut F, pat: Pat) -> Pat {
use Pat::*;
match pat {
Wild => Wild,
Ident(binding_mode, ident, pat) => {
Ident(binding_mode,
folder.fold_ident(ident),
pat.map(|p| p.lift(|p| folder.fold_pat(p))))
}
Struct(path, field_patterns, dots) => {
Struct(folder.fold_path(path),
field_patterns.lift(|FieldPat { ident, pat, is_shorthand, attrs }: FieldPat| {
FieldPat {
ident: folder.fold_ident(ident),
pat: pat.lift(|p| folder.fold_pat(p)),
is_shorthand: is_shorthand,
attrs: attrs.lift(|a| folder.fold_attribute(a)),
}
}),
dots)
}
TupleStruct(path, pats, len) => {
TupleStruct(folder.fold_path(path),
pats.lift(|p| folder.fold_pat(p)),
len)
}
Path(qself, path) => {
Path(qself.map(|v| noop_fold_qself(folder, v)),
folder.fold_path(path))
}
Tuple(pats, len) => Tuple(pats.lift(|p| folder.fold_pat(p)), len),
Box(b) => Box(b.lift(|p| folder.fold_pat(p))),
Ref(b, mutability) => Ref(b.lift(|p| folder.fold_pat(p)), mutability),
Lit(expr) => Lit(expr.lift(|e| folder.fold_expr(e))),
Range(l, r) => {
Range(l.lift(|e| folder.fold_expr(e)),
r.lift(|e| folder.fold_expr(e)))
}
Slice(lefts, pat, rights) => {
Slice(lefts.lift(|p| folder.fold_pat(p)),
pat.map(|v| v.lift(|p| folder.fold_pat(p))),
rights.lift(|p| folder.fold_pat(p)))
}
Mac(mac) => Mac(folder.fold_mac(mac)),
}
}
#[cfg(feature = "full")]
pub fn noop_fold_fn_decl<F: ?Sized + Folder>(folder: &mut F,
FnDecl { inputs, output, variadic }: FnDecl)
-> FnDecl {
FnDecl {
inputs: inputs.lift(|a| {
use FnArg::*;
match a {
SelfRef(lifetime, mutability) => {
SelfRef(lifetime.map(|v| folder.fold_lifetime(v)), mutability)
}
SelfValue(mutability) => SelfValue(mutability),
Captured(pat, ty) => Captured(folder.fold_pat(pat), folder.fold_ty(ty)),
Ignored(ty) => Ignored(folder.fold_ty(ty)),
}
}),
output: folder.fold_fn_ret_ty(output),
variadic: variadic,
}
}
#[cfg(feature = "full")]
pub fn noop_fold_trait_item<F: ?Sized + Folder>(folder: &mut F,
TraitItem { ident, attrs, node }: TraitItem)
-> TraitItem {
use TraitItemKind::*;
TraitItem {
ident: folder.fold_ident(ident),
attrs: attrs.lift(|v| folder.fold_attribute(v)),
node: match node {
Const(ty, expr) => Const(folder.fold_ty(ty), expr.map(|v| folder.fold_expr(v))),
Method(sig, block) => {
Method(folder.fold_method_sig(sig),
block.map(|v| folder.fold_block(v)))
}
Type(ty_pbs, ty) => {
Type(ty_pbs.lift(|v| folder.fold_ty_param_bound(v)),
ty.map(|v| folder.fold_ty(v)))
}
Macro(mac) => Macro(folder.fold_mac(mac)),
},
}
}
#[cfg(feature = "full")]
pub fn noop_fold_impl_item<F: ?Sized + Folder>(folder: &mut F,
ImplItem { ident, vis, defaultness, attrs, node }: ImplItem)
-> ImplItem{
use ImplItemKind::*;
ImplItem {
ident: folder.fold_ident(ident),
vis: noop_fold_vis(folder, vis),
defaultness: defaultness,
attrs: attrs.lift(|v| folder.fold_attribute(v)),
node: match node {
Const(ty, expr) => Const(folder.fold_ty(ty), folder.fold_expr(expr)),
Method(sig, block) => Method(folder.fold_method_sig(sig), folder.fold_block(block)),
Type(ty) => Type(folder.fold_ty(ty)),
Macro(mac) => Macro(folder.fold_mac(mac)),
},
}
}
#[cfg(feature = "full")]
pub fn noop_fold_method_sig<F: ?Sized + Folder>(folder: &mut F, MethodSig{unsafety, constness, abi, decl, generics}:MethodSig) -> MethodSig{
MethodSig {
unsafety: unsafety,
constness: constness,
abi: abi,
decl: folder.fold_fn_decl(decl),
generics: folder.fold_generics(generics),
}
}
#[cfg(feature = "full")]
pub fn noop_fold_stmt<F: ?Sized + Folder>(folder: &mut F, stmt: Stmt) -> Stmt {
use Stmt::*;
match stmt {
Local(local) => Local(local.lift(|l| folder.fold_local(l))),
Item(item) => Item(item.lift(|v| folder.fold_item(v))),
Expr(expr) => Expr(expr.lift(|v| folder.fold_expr(v))),
Semi(expr) => Semi(expr.lift(|v| folder.fold_expr(v))),
Mac(mac_stmt) => {
Mac(mac_stmt.lift(|(mac, style, attrs)| {
(folder.fold_mac(mac),
style,
attrs.lift(|a| folder.fold_attribute(a)))
}))
}
}
}
#[cfg(feature = "full")]
pub fn noop_fold_local<F: ?Sized + Folder>(folder: &mut F,
Local { pat, ty, init, attrs }: Local)
-> Local {
Local {
pat: pat.lift(|v| folder.fold_pat(v)),
ty: ty.map(|v| v.lift(|t| folder.fold_ty(t))),
init: init.map(|v| v.lift(|e| folder.fold_expr(e))),
attrs: attrs.lift(|a| folder.fold_attribute(a)),
}
}
#[cfg(feature = "full")]
pub fn noop_fold_view_path<F: ?Sized + Folder>(folder: &mut F, view_path: ViewPath) -> ViewPath {
use ViewPath::*;
match view_path {
Simple(path, ident) => Simple(folder.fold_path(path), ident.map(|i| folder.fold_ident(i))),
Glob(path) => Glob(folder.fold_path(path)),
List(path, items) => {
List(folder.fold_path(path),
items.lift(|PathListItem { name, rename }: PathListItem| {
PathListItem {
name: folder.fold_ident(name),
rename: rename.map(|i| folder.fold_ident(i)),
}
}))
}
}
}

513
third_party/rust/syn-0.11.11/src/generics.rs поставляемый
Просмотреть файл

@ -1,513 +0,0 @@
use super::*;
/// Represents lifetimes and type parameters attached to a declaration
/// of a function, enum, trait, etc.
#[derive(Debug, Clone, Eq, PartialEq, Default, Hash)]
pub struct Generics {
pub lifetimes: Vec<LifetimeDef>,
pub ty_params: Vec<TyParam>,
pub where_clause: WhereClause,
}
#[cfg(feature = "printing")]
/// Returned by `Generics::split_for_impl`.
#[derive(Debug)]
pub struct ImplGenerics<'a>(&'a Generics);
#[cfg(feature = "printing")]
/// Returned by `Generics::split_for_impl`.
#[derive(Debug)]
pub struct TyGenerics<'a>(&'a Generics);
#[cfg(feature = "printing")]
/// Returned by `TyGenerics::as_turbofish`.
#[derive(Debug)]
pub struct Turbofish<'a>(&'a Generics);
#[cfg(feature = "printing")]
impl Generics {
/// Split a type's generics into the pieces required for impl'ing a trait
/// for that type.
///
/// ```
/// # extern crate syn;
/// # #[macro_use]
/// # extern crate quote;
/// # fn main() {
/// # let generics: syn::Generics = Default::default();
/// # let name = syn::Ident::new("MyType");
/// let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
/// quote! {
/// impl #impl_generics MyTrait for #name #ty_generics #where_clause {
/// // ...
/// }
/// }
/// # ;
/// # }
/// ```
pub fn split_for_impl(&self) -> (ImplGenerics, TyGenerics, &WhereClause) {
(ImplGenerics(self), TyGenerics(self), &self.where_clause)
}
}
#[cfg(feature = "printing")]
impl<'a> TyGenerics<'a> {
/// Turn a type's generics like `<X, Y>` into a turbofish like `::<X, Y>`.
pub fn as_turbofish(&self) -> Turbofish {
Turbofish(self.0)
}
}
#[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)]
pub struct Lifetime {
pub ident: Ident,
}
impl Lifetime {
pub fn new<T: Into<Ident>>(t: T) -> Self {
let id = Ident::new(t);
if !id.as_ref().starts_with('\'') {
panic!("lifetime name must start with apostrophe as in \"'a\", \
got {:?}",
id.as_ref());
}
Lifetime { ident: id }
}
}
/// A lifetime definition, e.g. `'a: 'b+'c+'d`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct LifetimeDef {
pub attrs: Vec<Attribute>,
pub lifetime: Lifetime,
pub bounds: Vec<Lifetime>,
}
impl LifetimeDef {
pub fn new<T: Into<Ident>>(t: T) -> Self {
LifetimeDef {
attrs: Vec::new(),
lifetime: Lifetime::new(t),
bounds: Vec::new(),
}
}
}
/// A generic type parameter, e.g. `T: Into<String>`.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct TyParam {
pub attrs: Vec<Attribute>,
pub ident: Ident,
pub bounds: Vec<TyParamBound>,
pub default: Option<Ty>,
}
impl From<Ident> for TyParam {
fn from(ident: Ident) -> Self {
TyParam {
attrs: vec![],
ident: ident,
bounds: vec![],
default: None,
}
}
}
/// The AST represents all type param bounds as types.
/// `typeck::collect::compute_bounds` matches these against
/// the "special" built-in traits (see `middle::lang_items`) and
/// detects Copy, Send and Sync.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum TyParamBound {
Trait(PolyTraitRef, TraitBoundModifier),
Region(Lifetime),
}
/// A modifier on a bound, currently this is only used for `?Sized`, where the
/// modifier is `Maybe`. Negative bounds should also be handled here.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum TraitBoundModifier {
None,
Maybe,
}
/// A `where` clause in a definition
#[derive(Debug, Clone, Eq, PartialEq, Default, Hash)]
pub struct WhereClause {
pub predicates: Vec<WherePredicate>,
}
impl WhereClause {
pub fn none() -> Self {
WhereClause { predicates: Vec::new() }
}
}
/// A single predicate in a `where` clause
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum WherePredicate {
/// A type binding, e.g. `for<'c> Foo: Send+Clone+'c`
BoundPredicate(WhereBoundPredicate),
/// A lifetime predicate, e.g. `'a: 'b+'c`
RegionPredicate(WhereRegionPredicate),
/// An equality predicate (unsupported)
EqPredicate(WhereEqPredicate),
}
/// A type bound.
///
/// E.g. `for<'c> Foo: Send+Clone+'c`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct WhereBoundPredicate {
/// Any lifetimes from a `for` binding
pub bound_lifetimes: Vec<LifetimeDef>,
/// The type being bounded
pub bounded_ty: Ty,
/// Trait and lifetime bounds (`Clone+Send+'static`)
pub bounds: Vec<TyParamBound>,
}
/// A lifetime predicate.
///
/// E.g. `'a: 'b+'c`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct WhereRegionPredicate {
pub lifetime: Lifetime,
pub bounds: Vec<Lifetime>,
}
/// An equality predicate (unsupported).
///
/// E.g. `T=int`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct WhereEqPredicate {
pub lhs_ty: Ty,
pub rhs_ty: Ty,
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use attr::parsing::outer_attr;
use ident::parsing::ident;
use ty::parsing::{ty, poly_trait_ref};
named!(pub generics -> Generics, map!(
alt!(
do_parse!(
punct!("<") >>
lifetimes: separated_list!(punct!(","), lifetime_def) >>
ty_params: opt_vec!(preceded!(
cond!(!lifetimes.is_empty(), punct!(",")),
separated_nonempty_list!(punct!(","), ty_param)
)) >>
cond!(!lifetimes.is_empty() || !ty_params.is_empty(), option!(punct!(","))) >>
punct!(">") >>
(lifetimes, ty_params)
)
|
epsilon!() => { |_| (Vec::new(), Vec::new()) }
),
|(lifetimes, ty_params)| Generics {
lifetimes: lifetimes,
ty_params: ty_params,
where_clause: Default::default(),
}
));
named!(pub lifetime -> Lifetime, preceded!(
punct!("'"),
alt!(
map!(ident, |id| Lifetime {
ident: format!("'{}", id).into(),
})
|
map!(keyword!("static"), |_| Lifetime {
ident: "'static".into(),
})
)
));
named!(pub lifetime_def -> LifetimeDef, do_parse!(
attrs: many0!(outer_attr) >>
life: lifetime >>
bounds: opt_vec!(preceded!(
punct!(":"),
separated_list!(punct!("+"), lifetime)
)) >>
(LifetimeDef {
attrs: attrs,
lifetime: life,
bounds: bounds,
})
));
named!(pub bound_lifetimes -> Vec<LifetimeDef>, opt_vec!(do_parse!(
keyword!("for") >>
punct!("<") >>
lifetimes: terminated_list!(punct!(","), lifetime_def) >>
punct!(">") >>
(lifetimes)
)));
named!(ty_param -> TyParam, do_parse!(
attrs: many0!(outer_attr) >>
id: ident >>
bounds: opt_vec!(preceded!(
punct!(":"),
separated_nonempty_list!(punct!("+"), ty_param_bound)
)) >>
default: option!(preceded!(
punct!("="),
ty
)) >>
(TyParam {
attrs: attrs,
ident: id,
bounds: bounds,
default: default,
})
));
named!(pub ty_param_bound -> TyParamBound, alt!(
preceded!(punct!("?"), poly_trait_ref) => {
|poly| TyParamBound::Trait(poly, TraitBoundModifier::Maybe)
}
|
lifetime => { TyParamBound::Region }
|
poly_trait_ref => {
|poly| TyParamBound::Trait(poly, TraitBoundModifier::None)
}
));
named!(pub where_clause -> WhereClause, alt!(
do_parse!(
keyword!("where") >>
predicates: separated_nonempty_list!(punct!(","), where_predicate) >>
option!(punct!(",")) >>
(WhereClause { predicates: predicates })
)
|
epsilon!() => { |_| Default::default() }
));
named!(where_predicate -> WherePredicate, alt!(
do_parse!(
ident: lifetime >>
bounds: opt_vec!(preceded!(
punct!(":"),
separated_list!(punct!("+"), lifetime)
)) >>
(WherePredicate::RegionPredicate(WhereRegionPredicate {
lifetime: ident,
bounds: bounds,
}))
)
|
do_parse!(
bound_lifetimes: bound_lifetimes >>
bounded_ty: ty >>
punct!(":") >>
bounds: separated_nonempty_list!(punct!("+"), ty_param_bound) >>
(WherePredicate::BoundPredicate(WhereBoundPredicate {
bound_lifetimes: bound_lifetimes,
bounded_ty: bounded_ty,
bounds: bounds,
}))
)
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use attr::FilterAttrs;
use quote::{Tokens, ToTokens};
impl ToTokens for Generics {
fn to_tokens(&self, tokens: &mut Tokens) {
let has_lifetimes = !self.lifetimes.is_empty();
let has_ty_params = !self.ty_params.is_empty();
if has_lifetimes || has_ty_params {
tokens.append("<");
tokens.append_separated(&self.lifetimes, ",");
if has_lifetimes && has_ty_params {
tokens.append(",");
}
tokens.append_separated(&self.ty_params, ",");
tokens.append(">");
}
}
}
impl<'a> ToTokens for ImplGenerics<'a> {
fn to_tokens(&self, tokens: &mut Tokens) {
let has_lifetimes = !self.0.lifetimes.is_empty();
let has_ty_params = !self.0.ty_params.is_empty();
if has_lifetimes || has_ty_params {
tokens.append("<");
tokens.append_separated(&self.0.lifetimes, ",");
// Leave off the type parameter defaults
for (i, ty_param) in self.0
.ty_params
.iter()
.enumerate() {
if i > 0 || has_lifetimes {
tokens.append(",");
}
tokens.append_all(ty_param.attrs.outer());
ty_param.ident.to_tokens(tokens);
if !ty_param.bounds.is_empty() {
tokens.append(":");
tokens.append_separated(&ty_param.bounds, "+");
}
}
tokens.append(">");
}
}
}
impl<'a> ToTokens for TyGenerics<'a> {
fn to_tokens(&self, tokens: &mut Tokens) {
let has_lifetimes = !self.0.lifetimes.is_empty();
let has_ty_params = !self.0.ty_params.is_empty();
if has_lifetimes || has_ty_params {
tokens.append("<");
// Leave off the lifetime bounds and attributes
let lifetimes = self.0
.lifetimes
.iter()
.map(|ld| &ld.lifetime);
tokens.append_separated(lifetimes, ",");
if has_lifetimes && has_ty_params {
tokens.append(",");
}
// Leave off the type parameter bounds, defaults, and attributes
let ty_params = self.0
.ty_params
.iter()
.map(|tp| &tp.ident);
tokens.append_separated(ty_params, ",");
tokens.append(">");
}
}
}
impl<'a> ToTokens for Turbofish<'a> {
fn to_tokens(&self, tokens: &mut Tokens) {
let has_lifetimes = !self.0.lifetimes.is_empty();
let has_ty_params = !self.0.ty_params.is_empty();
if has_lifetimes || has_ty_params {
tokens.append("::");
TyGenerics(self.0).to_tokens(tokens);
}
}
}
impl ToTokens for Lifetime {
fn to_tokens(&self, tokens: &mut Tokens) {
self.ident.to_tokens(tokens);
}
}
impl ToTokens for LifetimeDef {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append_all(self.attrs.outer());
self.lifetime.to_tokens(tokens);
if !self.bounds.is_empty() {
tokens.append(":");
tokens.append_separated(&self.bounds, "+");
}
}
}
impl ToTokens for TyParam {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append_all(self.attrs.outer());
self.ident.to_tokens(tokens);
if !self.bounds.is_empty() {
tokens.append(":");
tokens.append_separated(&self.bounds, "+");
}
if let Some(ref default) = self.default {
tokens.append("=");
default.to_tokens(tokens);
}
}
}
impl ToTokens for TyParamBound {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
TyParamBound::Region(ref lifetime) => lifetime.to_tokens(tokens),
TyParamBound::Trait(ref trait_ref, modifier) => {
match modifier {
TraitBoundModifier::None => {}
TraitBoundModifier::Maybe => tokens.append("?"),
}
trait_ref.to_tokens(tokens);
}
}
}
}
impl ToTokens for WhereClause {
fn to_tokens(&self, tokens: &mut Tokens) {
if !self.predicates.is_empty() {
tokens.append("where");
tokens.append_separated(&self.predicates, ",");
}
}
}
impl ToTokens for WherePredicate {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
WherePredicate::BoundPredicate(ref predicate) => {
predicate.to_tokens(tokens);
}
WherePredicate::RegionPredicate(ref predicate) => {
predicate.to_tokens(tokens);
}
WherePredicate::EqPredicate(ref predicate) => {
predicate.to_tokens(tokens);
}
}
}
}
impl ToTokens for WhereBoundPredicate {
fn to_tokens(&self, tokens: &mut Tokens) {
if !self.bound_lifetimes.is_empty() {
tokens.append("for");
tokens.append("<");
tokens.append_separated(&self.bound_lifetimes, ",");
tokens.append(">");
}
self.bounded_ty.to_tokens(tokens);
if !self.bounds.is_empty() {
tokens.append(":");
tokens.append_separated(&self.bounds, "+");
}
}
}
impl ToTokens for WhereRegionPredicate {
fn to_tokens(&self, tokens: &mut Tokens) {
self.lifetime.to_tokens(tokens);
if !self.bounds.is_empty() {
tokens.append(":");
tokens.append_separated(&self.bounds, "+");
}
}
}
impl ToTokens for WhereEqPredicate {
fn to_tokens(&self, tokens: &mut Tokens) {
self.lhs_ty.to_tokens(tokens);
tokens.append("=");
self.rhs_ty.to_tokens(tokens);
}
}
}

129
third_party/rust/syn-0.11.11/src/ident.rs поставляемый
Просмотреть файл

@ -1,129 +0,0 @@
use std::borrow::Cow;
use std::fmt::{self, Display};
#[derive(Debug, Clone, Eq, Hash, Ord, PartialOrd)]
pub struct Ident(String);
impl Ident {
pub fn new<T: Into<Ident>>(t: T) -> Self {
t.into()
}
}
impl<'a> From<&'a str> for Ident {
fn from(s: &str) -> Self {
Ident(s.to_owned())
}
}
impl<'a> From<Cow<'a, str>> for Ident {
fn from(s: Cow<'a, str>) -> Self {
Ident(s.into_owned())
}
}
impl From<String> for Ident {
fn from(s: String) -> Self {
Ident(s)
}
}
impl From<usize> for Ident {
fn from(u: usize) -> Self {
Ident(u.to_string())
}
}
impl AsRef<str> for Ident {
fn as_ref(&self) -> &str {
&self.0
}
}
impl Display for Ident {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
self.0.fmt(formatter)
}
}
impl<T: ?Sized> PartialEq<T> for Ident
where T: AsRef<str>
{
fn eq(&self, other: &T) -> bool {
self.0 == other.as_ref()
}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use synom::IResult;
use synom::space::skip_whitespace;
use unicode_xid::UnicodeXID;
pub fn ident(input: &str) -> IResult<&str, Ident> {
let (rest, id) = match word(input) {
IResult::Done(rest, id) => (rest, id),
IResult::Error => return IResult::Error,
};
match id.as_ref() {
// From https://doc.rust-lang.org/grammar.html#keywords
"abstract" | "alignof" | "as" | "become" | "box" | "break" | "const" | "continue" |
"crate" | "do" | "else" | "enum" | "extern" | "false" | "final" | "fn" | "for" |
"if" | "impl" | "in" | "let" | "loop" | "macro" | "match" | "mod" | "move" |
"mut" | "offsetof" | "override" | "priv" | "proc" | "pub" | "pure" | "ref" |
"return" | "Self" | "self" | "sizeof" | "static" | "struct" | "super" | "trait" |
"true" | "type" | "typeof" | "unsafe" | "unsized" | "use" | "virtual" | "where" |
"while" | "yield" => IResult::Error,
_ => IResult::Done(rest, id),
}
}
pub fn word(mut input: &str) -> IResult<&str, Ident> {
input = skip_whitespace(input);
let mut chars = input.char_indices();
match chars.next() {
Some((_, ch)) if UnicodeXID::is_xid_start(ch) || ch == '_' => {}
_ => return IResult::Error,
}
for (i, ch) in chars {
if !UnicodeXID::is_xid_continue(ch) {
return IResult::Done(&input[i..], input[..i].into());
}
}
IResult::Done("", input.into())
}
#[cfg(feature = "full")]
pub fn wordlike(mut input: &str) -> IResult<&str, Ident> {
input = skip_whitespace(input);
for (i, ch) in input.char_indices() {
if !UnicodeXID::is_xid_start(ch) && !UnicodeXID::is_xid_continue(ch) {
return if i == 0 {
IResult::Error
} else {
IResult::Done(&input[i..], input[..i].into())
};
}
}
IResult::Done("", input.into())
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{Tokens, ToTokens};
impl ToTokens for Ident {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(self.as_ref())
}
}
}

1477
third_party/rust/syn-0.11.11/src/item.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

57
third_party/rust/syn-0.11.11/src/krate.rs поставляемый
Просмотреть файл

@ -1,57 +0,0 @@
use super::*;
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Crate {
pub shebang: Option<String>,
pub attrs: Vec<Attribute>,
pub items: Vec<Item>,
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use attr::parsing::inner_attr;
use item::parsing::items;
named!(pub krate -> Crate, do_parse!(
option!(byte_order_mark) >>
shebang: option!(shebang) >>
attrs: many0!(inner_attr) >>
items: items >>
(Crate {
shebang: shebang,
attrs: attrs,
items: items,
})
));
named!(byte_order_mark -> &str, tag!("\u{feff}"));
named!(shebang -> String, do_parse!(
tag!("#!") >>
not!(tag!("[")) >>
content: take_until!("\n") >>
(format!("#!{}", content))
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use attr::FilterAttrs;
use quote::{Tokens, ToTokens};
impl ToTokens for Crate {
fn to_tokens(&self, tokens: &mut Tokens) {
if let Some(ref shebang) = self.shebang {
tokens.append(&format!("{}\n", shebang));
}
for attr in self.attrs.inner() {
attr.to_tokens(tokens);
}
for item in &self.items {
item.to_tokens(tokens);
}
}
}
}

211
third_party/rust/syn-0.11.11/src/lib.rs поставляемый
Просмотреть файл

@ -1,211 +0,0 @@
#![doc(html_root_url = "https://dtolnay.github.io/syn")]
#![cfg_attr(feature = "cargo-clippy", allow(large_enum_variant))]
#[cfg(feature = "printing")]
extern crate quote;
#[cfg(feature = "parsing")]
extern crate unicode_xid;
#[cfg(feature = "parsing")]
#[macro_use]
extern crate synom;
#[cfg(feature = "aster")]
pub mod aster;
mod attr;
pub use attr::{Attribute, AttrStyle, MetaItem, NestedMetaItem};
mod constant;
pub use constant::ConstExpr;
mod data;
pub use data::{Field, Variant, VariantData, Visibility};
#[cfg(feature = "parsing")]
mod escape;
#[cfg(feature = "full")]
mod expr;
#[cfg(feature = "full")]
pub use expr::{Arm, BindingMode, Block, CaptureBy, Expr, ExprKind, FieldPat, FieldValue, Local,
MacStmtStyle, Pat, RangeLimits, Stmt};
mod generics;
pub use generics::{Generics, Lifetime, LifetimeDef, TraitBoundModifier, TyParam, TyParamBound,
WhereBoundPredicate, WhereClause, WhereEqPredicate, WherePredicate,
WhereRegionPredicate};
#[cfg(feature = "printing")]
pub use generics::{ImplGenerics, Turbofish, TyGenerics};
mod ident;
pub use ident::Ident;
#[cfg(feature = "full")]
mod item;
#[cfg(feature = "full")]
pub use item::{Constness, Defaultness, FnArg, FnDecl, ForeignItemKind, ForeignItem, ForeignMod,
ImplItem, ImplItemKind, ImplPolarity, Item, ItemKind, MethodSig, PathListItem,
TraitItem, TraitItemKind, ViewPath};
#[cfg(feature = "full")]
mod krate;
#[cfg(feature = "full")]
pub use krate::Crate;
mod lit;
pub use lit::{FloatTy, IntTy, Lit, StrStyle};
#[cfg(feature = "parsing")]
pub use lit::{ByteStrLit, FloatLit, IntLit, StrLit};
mod mac;
pub use mac::{BinOpToken, DelimToken, Delimited, Mac, Token, TokenTree};
mod derive;
pub use derive::{Body, DeriveInput};
// Deprecated. Use `DeriveInput` instead.
#[doc(hidden)]
pub type MacroInput = DeriveInput;
mod op;
pub use op::{BinOp, UnOp};
mod ty;
pub use ty::{Abi, AngleBracketedParameterData, BareFnArg, BareFnTy, FunctionRetTy, MutTy,
Mutability, ParenthesizedParameterData, Path, PathParameters, PathSegment,
PolyTraitRef, QSelf, Ty, TypeBinding, Unsafety};
#[cfg(feature = "visit")]
pub mod visit;
#[cfg(feature = "fold")]
pub mod fold;
#[cfg(feature = "parsing")]
pub use parsing::*;
#[cfg(feature = "parsing")]
mod parsing {
use super::*;
use {derive, generics, ident, mac, ty, attr};
use synom::{space, IResult};
#[cfg(feature = "full")]
use {expr, item, krate};
pub fn parse_derive_input(input: &str) -> Result<DeriveInput, String> {
unwrap("derive input", derive::parsing::derive_input, input)
}
#[cfg(feature = "full")]
pub fn parse_crate(input: &str) -> Result<Crate, String> {
unwrap("crate", krate::parsing::krate, input)
}
#[cfg(feature = "full")]
pub fn parse_item(input: &str) -> Result<Item, String> {
unwrap("item", item::parsing::item, input)
}
#[cfg(feature = "full")]
pub fn parse_items(input: &str) -> Result<Vec<Item>, String> {
unwrap("items", item::parsing::items, input)
}
#[cfg(feature = "full")]
pub fn parse_expr(input: &str) -> Result<Expr, String> {
unwrap("expression", expr::parsing::expr, input)
}
pub fn parse_type(input: &str) -> Result<Ty, String> {
unwrap("type", ty::parsing::ty, input)
}
pub fn parse_path(input: &str) -> Result<Path, String> {
unwrap("path", ty::parsing::path, input)
}
pub fn parse_where_clause(input: &str) -> Result<WhereClause, String> {
unwrap("where clause", generics::parsing::where_clause, input)
}
pub fn parse_token_trees(input: &str) -> Result<Vec<TokenTree>, String> {
unwrap("token trees", mac::parsing::token_trees, input)
}
pub fn parse_ident(input: &str) -> Result<Ident, String> {
unwrap("identifier", ident::parsing::ident, input)
}
pub fn parse_ty_param_bound(input: &str) -> Result<TyParamBound, String> {
unwrap("type parameter bound",
generics::parsing::ty_param_bound,
input)
}
pub fn parse_outer_attr(input: &str) -> Result<Attribute, String> {
unwrap("outer attribute", attr::parsing::outer_attr, input)
}
#[cfg(feature = "full")]
pub fn parse_inner_attr(input: &str) -> Result<Attribute, String> {
unwrap("inner attribute", attr::parsing::inner_attr, input)
}
// Deprecated. Use `parse_derive_input` instead.
#[doc(hidden)]
pub fn parse_macro_input(input: &str) -> Result<MacroInput, String> {
parse_derive_input(input)
}
fn unwrap<T>(name: &'static str,
f: fn(&str) -> IResult<&str, T>,
input: &str)
-> Result<T, String> {
match f(input) {
IResult::Done(mut rest, t) => {
rest = space::skip_whitespace(rest);
if rest.is_empty() {
Ok(t)
} else if rest.len() == input.len() {
// parsed nothing
Err(format!("failed to parse {}: {:?}", name, rest))
} else {
Err(format!("unparsed tokens after {}: {:?}", name, rest))
}
}
IResult::Error => Err(format!("failed to parse {}: {:?}", name, input)),
}
}
}
#[cfg(feature = "parsing")]
pub mod parse {
//! This module contains a set of exported nom parsers which can be used to
//! parse custom grammars when used alongside the `synom` crate.
//!
//! Internally, `syn` uses a fork of `nom` called `synom` which resolves a
//! persistent pitfall of using `nom` to parse Rust by eliminating the
//! `IResult::Incomplete` variant. The `synom` crate should be used instead
//! of `nom` when working with the parsers in this module.
pub use synom::IResult;
#[cfg(feature = "full")]
pub use item::parsing::item;
#[cfg(feature = "full")]
pub use expr::parsing::{expr, pat, block, stmt};
pub use lit::parsing::{lit, string, byte_string, byte, character, float, int, boolean};
pub use ty::parsing::{ty, path};
pub use mac::parsing::token_tree as tt;
pub use ident::parsing::ident;
pub use generics::parsing::lifetime;
}

484
third_party/rust/syn-0.11.11/src/lit.rs поставляемый
Просмотреть файл

@ -1,484 +0,0 @@
/// Literal kind.
///
/// E.g. `"foo"`, `42`, `12.34` or `bool`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum Lit {
/// A string literal (`"foo"`)
Str(String, StrStyle),
/// A byte string (`b"foo"`)
ByteStr(Vec<u8>, StrStyle),
/// A byte char (`b'f'`)
Byte(u8),
/// A character literal (`'a'`)
Char(char),
/// An integer literal (`1`)
Int(u64, IntTy),
/// A float literal (`1f64` or `1E10f64` or `1.0E10`)
Float(String, FloatTy),
/// A boolean literal
Bool(bool),
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum StrStyle {
/// A regular string, like `"foo"`
Cooked,
/// A raw string, like `r##"foo"##`
///
/// The uint is the number of `#` symbols used
Raw(usize),
}
impl From<String> for Lit {
fn from(input: String) -> Lit {
Lit::Str(input, StrStyle::Cooked)
}
}
impl<'a> From<&'a str> for Lit {
fn from(input: &str) -> Lit {
Lit::Str(input.into(), StrStyle::Cooked)
}
}
impl From<Vec<u8>> for Lit {
fn from(input: Vec<u8>) -> Lit {
Lit::ByteStr(input, StrStyle::Cooked)
}
}
impl<'a> From<&'a [u8]> for Lit {
fn from(input: &[u8]) -> Lit {
Lit::ByteStr(input.into(), StrStyle::Cooked)
}
}
impl From<char> for Lit {
fn from(input: char) -> Lit {
Lit::Char(input)
}
}
impl From<bool> for Lit {
fn from(input: bool) -> Lit {
Lit::Bool(input)
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum IntTy {
Isize,
I8,
I16,
I32,
I64,
Usize,
U8,
U16,
U32,
U64,
Unsuffixed,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum FloatTy {
F32,
F64,
Unsuffixed,
}
macro_rules! impl_from_for_lit {
(Int, [$($rust_type:ty => $syn_type:expr),+]) => {
$(
impl From<$rust_type> for Lit {
fn from(input: $rust_type) -> Lit {
Lit::Int(input as u64, $syn_type)
}
}
)+
};
(Float, [$($rust_type:ty => $syn_type:expr),+]) => {
$(
impl From<$rust_type> for Lit {
fn from(input: $rust_type) -> Lit {
Lit::Float(format!("{}", input), $syn_type)
}
}
)+
};
}
impl_from_for_lit! {Int, [
isize => IntTy::Isize,
i8 => IntTy::I8,
i16 => IntTy::I16,
i32 => IntTy::I32,
i64 => IntTy::I64,
usize => IntTy::Usize,
u8 => IntTy::U8,
u16 => IntTy::U16,
u32 => IntTy::U32,
u64 => IntTy::U64
]}
impl_from_for_lit! {Float, [
f32 => FloatTy::F32,
f64 => FloatTy::F64
]}
#[cfg(feature = "parsing")]
#[derive(Debug, Clone)]
pub struct StrLit {
pub value: String,
pub style: StrStyle,
}
#[cfg(feature = "parsing")]
#[derive(Debug, Clone)]
pub struct ByteStrLit {
pub value: Vec<u8>,
pub style: StrStyle,
}
#[cfg(feature = "parsing")]
#[derive(Debug, Clone)]
pub struct IntLit {
pub value: u64,
pub suffix: IntTy,
}
#[cfg(feature = "parsing")]
#[derive(Debug, Clone)]
pub struct FloatLit {
pub value: String,
pub suffix: FloatTy,
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use escape::{cooked_byte, cooked_byte_string, cooked_char, cooked_string, raw_string};
use synom::space::skip_whitespace;
use synom::IResult;
use unicode_xid::UnicodeXID;
named!(pub lit -> Lit, alt!(
string => { |StrLit { value, style }| Lit::Str(value, style) }
|
byte_string => { |ByteStrLit { value, style }| Lit::ByteStr(value, style) }
|
byte => { |b| Lit::Byte(b) }
|
character => { |ch| Lit::Char(ch) }
|
float => { |FloatLit { value, suffix }| Lit::Float(value, suffix) } // must be before int
|
int => { |IntLit { value, suffix }| Lit::Int(value, suffix) }
|
boolean => { |value| Lit::Bool(value) }
));
named!(pub string -> StrLit, alt!(
quoted_string => { |s| StrLit { value: s, style: StrStyle::Cooked } }
|
preceded!(
punct!("r"),
raw_string
) => { |(s, n)| StrLit { value: s, style: StrStyle::Raw(n) }}
));
named!(pub quoted_string -> String, delimited!(
punct!("\""),
cooked_string,
tag!("\"")
));
named!(pub byte_string -> ByteStrLit, alt!(
delimited!(
punct!("b\""),
cooked_byte_string,
tag!("\"")
) => { |vec| ByteStrLit { value: vec, style: StrStyle::Cooked } }
|
preceded!(
punct!("br"),
raw_string
) => { |(s, n): (String, _)| ByteStrLit { value: s.into_bytes(), style: StrStyle::Raw(n) } }
));
named!(pub byte -> u8, do_parse!(
punct!("b") >>
tag!("'") >>
b: cooked_byte >>
tag!("'") >>
(b)
));
named!(pub character -> char, do_parse!(
punct!("'") >>
ch: cooked_char >>
tag!("'") >>
(ch)
));
named!(pub float -> FloatLit, do_parse!(
value: float_string >>
suffix: alt!(
tag!("f32") => { |_| FloatTy::F32 }
|
tag!("f64") => { |_| FloatTy::F64 }
|
epsilon!() => { |_| FloatTy::Unsuffixed }
) >>
(FloatLit { value: value, suffix: suffix })
));
named!(pub int -> IntLit, do_parse!(
value: digits >>
suffix: alt!(
tag!("isize") => { |_| IntTy::Isize }
|
tag!("i8") => { |_| IntTy::I8 }
|
tag!("i16") => { |_| IntTy::I16 }
|
tag!("i32") => { |_| IntTy::I32 }
|
tag!("i64") => { |_| IntTy::I64 }
|
tag!("usize") => { |_| IntTy::Usize }
|
tag!("u8") => { |_| IntTy::U8 }
|
tag!("u16") => { |_| IntTy::U16 }
|
tag!("u32") => { |_| IntTy::U32 }
|
tag!("u64") => { |_| IntTy::U64 }
|
epsilon!() => { |_| IntTy::Unsuffixed }
) >>
(IntLit { value: value, suffix: suffix })
));
named!(pub boolean -> bool, alt!(
keyword!("true") => { |_| true }
|
keyword!("false") => { |_| false }
));
fn float_string(mut input: &str) -> IResult<&str, String> {
input = skip_whitespace(input);
let mut chars = input.chars().peekable();
match chars.next() {
Some(ch) if ch >= '0' && ch <= '9' => {}
_ => return IResult::Error,
}
let mut len = 1;
let mut has_dot = false;
let mut has_exp = false;
while let Some(&ch) = chars.peek() {
match ch {
'0'...'9' | '_' => {
chars.next();
len += 1;
}
'.' => {
if has_dot {
break;
}
chars.next();
if chars.peek()
.map(|&ch| ch == '.' || UnicodeXID::is_xid_start(ch))
.unwrap_or(false) {
return IResult::Error;
}
len += 1;
has_dot = true;
}
'e' | 'E' => {
chars.next();
len += 1;
has_exp = true;
break;
}
_ => break,
}
}
let rest = &input[len..];
if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
return IResult::Error;
}
if has_exp {
let mut has_exp_value = false;
while let Some(&ch) = chars.peek() {
match ch {
'+' | '-' => {
if has_exp_value {
break;
}
chars.next();
len += 1;
}
'0'...'9' => {
chars.next();
len += 1;
has_exp_value = true;
}
'_' => {
chars.next();
len += 1;
}
_ => break,
}
}
if !has_exp_value {
return IResult::Error;
}
}
IResult::Done(&input[len..], input[..len].replace("_", ""))
}
pub fn digits(mut input: &str) -> IResult<&str, u64> {
input = skip_whitespace(input);
let base = if input.starts_with("0x") {
input = &input[2..];
16
} else if input.starts_with("0o") {
input = &input[2..];
8
} else if input.starts_with("0b") {
input = &input[2..];
2
} else {
10
};
let mut value = 0u64;
let mut len = 0;
let mut empty = true;
for b in input.bytes() {
let digit = match b {
b'0'...b'9' => (b - b'0') as u64,
b'a'...b'f' => 10 + (b - b'a') as u64,
b'A'...b'F' => 10 + (b - b'A') as u64,
b'_' => {
if empty && base == 10 {
return IResult::Error;
}
len += 1;
continue;
}
_ => break,
};
if digit >= base {
return IResult::Error;
}
value = match value.checked_mul(base) {
Some(value) => value,
None => return IResult::Error,
};
value = match value.checked_add(digit) {
Some(value) => value,
None => return IResult::Error,
};
len += 1;
empty = false;
}
if empty {
IResult::Error
} else {
IResult::Done(&input[len..], value)
}
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{Tokens, ToTokens};
use std::{ascii, iter};
use std::fmt::{self, Display};
use std::str;
impl ToTokens for Lit {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
Lit::Str(ref s, StrStyle::Cooked) => s.to_tokens(tokens),
Lit::Str(ref s, StrStyle::Raw(n)) => {
tokens.append(&format!("r{delim}\"{string}\"{delim}",
delim = iter::repeat("#").take(n).collect::<String>(),
string = s));
}
Lit::ByteStr(ref v, StrStyle::Cooked) => {
let mut escaped = "b\"".to_string();
for &ch in v.iter() {
match ch {
0 => escaped.push_str(r"\0"),
b'\'' => escaped.push('\''),
_ => escaped.extend(ascii::escape_default(ch).map(|c| c as char)),
}
}
escaped.push('"');
tokens.append(&escaped);
}
Lit::ByteStr(ref vec, StrStyle::Raw(n)) => {
tokens.append(&format!("br{delim}\"{string}\"{delim}",
delim = iter::repeat("#").take(n).collect::<String>(),
string = str::from_utf8(vec).unwrap()));
}
Lit::Byte(b) => {
match b {
0 => tokens.append(r"b'\0'"),
b'\"' => tokens.append("b'\"'"),
_ => {
let mut escaped = "b'".to_string();
escaped.extend(ascii::escape_default(b).map(|c| c as char));
escaped.push('\'');
tokens.append(&escaped);
}
}
}
Lit::Char(ch) => ch.to_tokens(tokens),
Lit::Int(value, ty) => tokens.append(&format!("{}{}", value, ty)),
Lit::Float(ref value, ty) => tokens.append(&format!("{}{}", value, ty)),
Lit::Bool(true) => tokens.append("true"),
Lit::Bool(false) => tokens.append("false"),
}
}
}
impl Display for IntTy {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
IntTy::Isize => formatter.write_str("isize"),
IntTy::I8 => formatter.write_str("i8"),
IntTy::I16 => formatter.write_str("i16"),
IntTy::I32 => formatter.write_str("i32"),
IntTy::I64 => formatter.write_str("i64"),
IntTy::Usize => formatter.write_str("usize"),
IntTy::U8 => formatter.write_str("u8"),
IntTy::U16 => formatter.write_str("u16"),
IntTy::U32 => formatter.write_str("u32"),
IntTy::U64 => formatter.write_str("u64"),
IntTy::Unsuffixed => Ok(()),
}
}
}
impl Display for FloatTy {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
FloatTy::F32 => formatter.write_str("f32"),
FloatTy::F64 => formatter.write_str("f64"),
FloatTy::Unsuffixed => Ok(()),
}
}
}
}

430
third_party/rust/syn-0.11.11/src/mac.rs поставляемый
Просмотреть файл

@ -1,430 +0,0 @@
use super::*;
/// Represents a macro invocation. The Path indicates which macro
/// is being invoked, and the vector of token-trees contains the source
/// of the macro invocation.
///
/// NB: the additional ident for a `macro_rules`-style macro is actually
/// stored in the enclosing item. Oog.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Mac {
pub path: Path,
pub tts: Vec<TokenTree>,
}
/// When the main rust parser encounters a syntax-extension invocation, it
/// parses the arguments to the invocation as a token-tree. This is a very
/// loose structure, such that all sorts of different AST-fragments can
/// be passed to syntax extensions using a uniform type.
///
/// If the syntax extension is an MBE macro, it will attempt to match its
/// LHS token tree against the provided token tree, and if it finds a
/// match, will transcribe the RHS token tree, splicing in any captured
/// `macro_parser::matched_nonterminals` into the `SubstNt`s it finds.
///
/// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
/// Nothing special happens to misnamed or misplaced `SubstNt`s.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum TokenTree {
/// A single token
Token(Token),
/// A delimited sequence of token trees
Delimited(Delimited),
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Delimited {
/// The type of delimiter
pub delim: DelimToken,
/// The delimited sequence of token trees
pub tts: Vec<TokenTree>,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum Token {
// Expression-operator symbols.
Eq,
Lt,
Le,
EqEq,
Ne,
Ge,
Gt,
AndAnd,
OrOr,
Not,
Tilde,
BinOp(BinOpToken),
BinOpEq(BinOpToken),
// Structural symbols
At,
Dot,
DotDot,
DotDotDot,
Comma,
Semi,
Colon,
ModSep,
RArrow,
LArrow,
FatArrow,
Pound,
Dollar,
Question,
// Literals
Literal(Lit),
// Name components
Ident(Ident),
Underscore,
Lifetime(Ident),
DocComment(String),
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum BinOpToken {
Plus,
Minus,
Star,
Slash,
Percent,
Caret,
And,
Or,
Shl,
Shr,
}
/// A delimiter token
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum DelimToken {
/// A round parenthesis: `(` or `)`
Paren,
/// A square bracket: `[` or `]`
Bracket,
/// A curly brace: `{` or `}`
Brace,
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use Lifetime;
use generics::parsing::lifetime;
use ident::parsing::word;
use lit::parsing::lit;
use synom::space::{block_comment, whitespace};
use ty::parsing::path;
named!(pub mac -> Mac, do_parse!(
what: path >>
punct!("!") >>
body: delimited >>
(Mac {
path: what,
tts: vec![TokenTree::Delimited(body)],
})
));
named!(pub token_trees -> Vec<TokenTree>, many0!(token_tree));
named!(pub delimited -> Delimited, alt!(
delimited!(
punct!("("),
token_trees,
punct!(")")
) => { |tts| Delimited { delim: DelimToken::Paren, tts: tts } }
|
delimited!(
punct!("["),
token_trees,
punct!("]")
) => { |tts| Delimited { delim: DelimToken::Bracket, tts: tts } }
|
delimited!(
punct!("{"),
token_trees,
punct!("}")
) => { |tts| Delimited { delim: DelimToken::Brace, tts: tts } }
));
named!(pub token_tree -> TokenTree, alt!(
map!(token, TokenTree::Token)
|
map!(delimited, TokenTree::Delimited)
));
named!(token -> Token, alt!(
keyword!("_") => { |_| Token::Underscore }
|
punct!("&&") => { |_| Token::AndAnd } // must be before BinOp
|
punct!("||") => { |_| Token::OrOr } // must be before BinOp
|
punct!("->") => { |_| Token::RArrow } // must be before BinOp
|
punct!("<-") => { |_| Token::LArrow } // must be before Lt
|
punct!("=>") => { |_| Token::FatArrow } // must be before Eq
|
punct!("...") => { |_| Token::DotDotDot } // must be before DotDot
|
punct!("..") => { |_| Token::DotDot } // must be before Dot
|
punct!(".") => { |_| Token::Dot }
|
map!(doc_comment, Token::DocComment) // must be before bin_op
|
map!(bin_op_eq, Token::BinOpEq) // must be before bin_op
|
map!(bin_op, Token::BinOp)
|
map!(lit, Token::Literal)
|
map!(word, Token::Ident)
|
map!(lifetime, |lt: Lifetime| Token::Lifetime(lt.ident))
|
punct!("<=") => { |_| Token::Le }
|
punct!("==") => { |_| Token::EqEq }
|
punct!("!=") => { |_| Token::Ne }
|
punct!(">=") => { |_| Token::Ge }
|
punct!("::") => { |_| Token::ModSep }
|
punct!("=") => { |_| Token::Eq }
|
punct!("<") => { |_| Token::Lt }
|
punct!(">") => { |_| Token::Gt }
|
punct!("!") => { |_| Token::Not }
|
punct!("~") => { |_| Token::Tilde }
|
punct!("@") => { |_| Token::At }
|
punct!(",") => { |_| Token::Comma }
|
punct!(";") => { |_| Token::Semi }
|
punct!(":") => { |_| Token::Colon }
|
punct!("#") => { |_| Token::Pound }
|
punct!("$") => { |_| Token::Dollar }
|
punct!("?") => { |_| Token::Question }
));
named!(bin_op -> BinOpToken, alt!(
punct!("+") => { |_| BinOpToken::Plus }
|
punct!("-") => { |_| BinOpToken::Minus }
|
punct!("*") => { |_| BinOpToken::Star }
|
punct!("/") => { |_| BinOpToken::Slash }
|
punct!("%") => { |_| BinOpToken::Percent }
|
punct!("^") => { |_| BinOpToken::Caret }
|
punct!("&") => { |_| BinOpToken::And }
|
punct!("|") => { |_| BinOpToken::Or }
|
punct!("<<") => { |_| BinOpToken::Shl }
|
punct!(">>") => { |_| BinOpToken::Shr }
));
named!(bin_op_eq -> BinOpToken, alt!(
punct!("+=") => { |_| BinOpToken::Plus }
|
punct!("-=") => { |_| BinOpToken::Minus }
|
punct!("*=") => { |_| BinOpToken::Star }
|
punct!("/=") => { |_| BinOpToken::Slash }
|
punct!("%=") => { |_| BinOpToken::Percent }
|
punct!("^=") => { |_| BinOpToken::Caret }
|
punct!("&=") => { |_| BinOpToken::And }
|
punct!("|=") => { |_| BinOpToken::Or }
|
punct!("<<=") => { |_| BinOpToken::Shl }
|
punct!(">>=") => { |_| BinOpToken::Shr }
));
named!(doc_comment -> String, alt!(
do_parse!(
punct!("//!") >>
content: take_until!("\n") >>
(format!("//!{}", content))
)
|
do_parse!(
option!(whitespace) >>
peek!(tag!("/*!")) >>
com: block_comment >>
(com.to_owned())
)
|
do_parse!(
punct!("///") >>
not!(tag!("/")) >>
content: take_until!("\n") >>
(format!("///{}", content))
)
|
do_parse!(
option!(whitespace) >>
peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
com: block_comment >>
(com.to_owned())
)
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{Tokens, ToTokens};
impl ToTokens for Mac {
fn to_tokens(&self, tokens: &mut Tokens) {
self.path.to_tokens(tokens);
tokens.append("!");
for tt in &self.tts {
tt.to_tokens(tokens);
}
}
}
impl ToTokens for TokenTree {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
TokenTree::Token(ref token) => token.to_tokens(tokens),
TokenTree::Delimited(ref delimited) => delimited.to_tokens(tokens),
}
}
}
impl DelimToken {
fn open(&self) -> &'static str {
match *self {
DelimToken::Paren => "(",
DelimToken::Bracket => "[",
DelimToken::Brace => "{",
}
}
fn close(&self) -> &'static str {
match *self {
DelimToken::Paren => ")",
DelimToken::Bracket => "]",
DelimToken::Brace => "}",
}
}
}
impl ToTokens for Delimited {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(self.delim.open());
for tt in &self.tts {
tt.to_tokens(tokens);
}
tokens.append(self.delim.close());
}
}
impl ToTokens for Token {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
Token::Eq => tokens.append("="),
Token::Lt => tokens.append("<"),
Token::Le => tokens.append("<="),
Token::EqEq => tokens.append("=="),
Token::Ne => tokens.append("!="),
Token::Ge => tokens.append(">="),
Token::Gt => tokens.append(">"),
Token::AndAnd => tokens.append("&&"),
Token::OrOr => tokens.append("||"),
Token::Not => tokens.append("!"),
Token::Tilde => tokens.append("~"),
Token::BinOp(binop) => tokens.append(binop.op()),
Token::BinOpEq(binop) => tokens.append(binop.assign_op()),
Token::At => tokens.append("@"),
Token::Dot => tokens.append("."),
Token::DotDot => tokens.append(".."),
Token::DotDotDot => tokens.append("..."),
Token::Comma => tokens.append(","),
Token::Semi => tokens.append(";"),
Token::Colon => tokens.append(":"),
Token::ModSep => tokens.append("::"),
Token::RArrow => tokens.append("->"),
Token::LArrow => tokens.append("<-"),
Token::FatArrow => tokens.append("=>"),
Token::Pound => tokens.append("#"),
Token::Dollar => tokens.append("$"),
Token::Question => tokens.append("?"),
Token::Literal(ref lit) => lit.to_tokens(tokens),
Token::Ident(ref ident) |
Token::Lifetime(ref ident) => ident.to_tokens(tokens),
Token::Underscore => tokens.append("_"),
Token::DocComment(ref com) => {
tokens.append(&format!("{}\n", com));
}
}
}
}
impl BinOpToken {
fn op(&self) -> &'static str {
match *self {
BinOpToken::Plus => "+",
BinOpToken::Minus => "-",
BinOpToken::Star => "*",
BinOpToken::Slash => "/",
BinOpToken::Percent => "%",
BinOpToken::Caret => "^",
BinOpToken::And => "&",
BinOpToken::Or => "|",
BinOpToken::Shl => "<<",
BinOpToken::Shr => ">>",
}
}
fn assign_op(&self) -> &'static str {
match *self {
BinOpToken::Plus => "+=",
BinOpToken::Minus => "-=",
BinOpToken::Star => "*=",
BinOpToken::Slash => "/=",
BinOpToken::Percent => "%=",
BinOpToken::Caret => "^=",
BinOpToken::And => "&=",
BinOpToken::Or => "|=",
BinOpToken::Shl => "<<=",
BinOpToken::Shr => ">>=",
}
}
}
impl ToTokens for BinOpToken {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(self.op());
}
}
}

192
third_party/rust/syn-0.11.11/src/op.rs поставляемый
Просмотреть файл

@ -1,192 +0,0 @@
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum BinOp {
/// The `+` operator (addition)
Add,
/// The `-` operator (subtraction)
Sub,
/// The `*` operator (multiplication)
Mul,
/// The `/` operator (division)
Div,
/// The `%` operator (modulus)
Rem,
/// The `&&` operator (logical and)
And,
/// The `||` operator (logical or)
Or,
/// The `^` operator (bitwise xor)
BitXor,
/// The `&` operator (bitwise and)
BitAnd,
/// The `|` operator (bitwise or)
BitOr,
/// The `<<` operator (shift left)
Shl,
/// The `>>` operator (shift right)
Shr,
/// The `==` operator (equality)
Eq,
/// The `<` operator (less than)
Lt,
/// The `<=` operator (less than or equal to)
Le,
/// The `!=` operator (not equal to)
Ne,
/// The `>=` operator (greater than or equal to)
Ge,
/// The `>` operator (greater than)
Gt,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum UnOp {
/// The `*` operator for dereferencing
Deref,
/// The `!` operator for logical inversion
Not,
/// The `-` operator for negation
Neg,
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
named!(pub binop -> BinOp, alt!(
punct!("&&") => { |_| BinOp::And }
|
punct!("||") => { |_| BinOp::Or }
|
punct!("<<") => { |_| BinOp::Shl }
|
punct!(">>") => { |_| BinOp::Shr }
|
punct!("==") => { |_| BinOp::Eq }
|
punct!("<=") => { |_| BinOp::Le }
|
punct!("!=") => { |_| BinOp::Ne }
|
punct!(">=") => { |_| BinOp::Ge }
|
punct!("+") => { |_| BinOp::Add }
|
punct!("-") => { |_| BinOp::Sub }
|
punct!("*") => { |_| BinOp::Mul }
|
punct!("/") => { |_| BinOp::Div }
|
punct!("%") => { |_| BinOp::Rem }
|
punct!("^") => { |_| BinOp::BitXor }
|
punct!("&") => { |_| BinOp::BitAnd }
|
punct!("|") => { |_| BinOp::BitOr }
|
punct!("<") => { |_| BinOp::Lt }
|
punct!(">") => { |_| BinOp::Gt }
));
#[cfg(feature = "full")]
named!(pub assign_op -> BinOp, alt!(
punct!("+=") => { |_| BinOp::Add }
|
punct!("-=") => { |_| BinOp::Sub }
|
punct!("*=") => { |_| BinOp::Mul }
|
punct!("/=") => { |_| BinOp::Div }
|
punct!("%=") => { |_| BinOp::Rem }
|
punct!("^=") => { |_| BinOp::BitXor }
|
punct!("&=") => { |_| BinOp::BitAnd }
|
punct!("|=") => { |_| BinOp::BitOr }
|
punct!("<<=") => { |_| BinOp::Shl }
|
punct!(">>=") => { |_| BinOp::Shr }
));
named!(pub unop -> UnOp, alt!(
punct!("*") => { |_| UnOp::Deref }
|
punct!("!") => { |_| UnOp::Not }
|
punct!("-") => { |_| UnOp::Neg }
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{Tokens, ToTokens};
impl BinOp {
pub fn op(&self) -> &'static str {
match *self {
BinOp::Add => "+",
BinOp::Sub => "-",
BinOp::Mul => "*",
BinOp::Div => "/",
BinOp::Rem => "%",
BinOp::And => "&&",
BinOp::Or => "||",
BinOp::BitXor => "^",
BinOp::BitAnd => "&",
BinOp::BitOr => "|",
BinOp::Shl => "<<",
BinOp::Shr => ">>",
BinOp::Eq => "==",
BinOp::Lt => "<",
BinOp::Le => "<=",
BinOp::Ne => "!=",
BinOp::Ge => ">=",
BinOp::Gt => ">",
}
}
pub fn assign_op(&self) -> Option<&'static str> {
match *self {
BinOp::Add => Some("+="),
BinOp::Sub => Some("-="),
BinOp::Mul => Some("*="),
BinOp::Div => Some("/="),
BinOp::Rem => Some("%="),
BinOp::BitXor => Some("^="),
BinOp::BitAnd => Some("&="),
BinOp::BitOr => Some("|="),
BinOp::Shl => Some("<<="),
BinOp::Shr => Some(">>="),
_ => None,
}
}
}
impl ToTokens for BinOp {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(self.op());
}
}
impl UnOp {
pub fn op(&self) -> &'static str {
match *self {
UnOp::Deref => "*",
UnOp::Not => "!",
UnOp::Neg => "-",
}
}
}
impl ToTokens for UnOp {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(self.op());
}
}
}

844
third_party/rust/syn-0.11.11/src/ty.rs поставляемый
Просмотреть файл

@ -1,844 +0,0 @@
use super::*;
/// The different kinds of types recognized by the compiler
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum Ty {
/// A variable-length array (`[T]`)
Slice(Box<Ty>),
/// A fixed length array (`[T; n]`)
Array(Box<Ty>, ConstExpr),
/// A raw pointer (`*const T` or `*mut T`)
Ptr(Box<MutTy>),
/// A reference (`&'a T` or `&'a mut T`)
Rptr(Option<Lifetime>, Box<MutTy>),
/// A bare function (e.g. `fn(usize) -> bool`)
BareFn(Box<BareFnTy>),
/// The never type (`!`)
Never,
/// A tuple (`(A, B, C, D, ...)`)
Tup(Vec<Ty>),
/// A path (`module::module::...::Type`), optionally
/// "qualified", e.g. `<Vec<T> as SomeTrait>::SomeType`.
///
/// Type parameters are stored in the Path itself
Path(Option<QSelf>, Path),
/// A trait object type `Bound1 + Bound2 + Bound3`
/// where `Bound` is a trait or a lifetime.
TraitObject(Vec<TyParamBound>),
/// An `impl Bound1 + Bound2 + Bound3` type
/// where `Bound` is a trait or a lifetime.
ImplTrait(Vec<TyParamBound>),
/// No-op; kept solely so that we can pretty-print faithfully
Paren(Box<Ty>),
/// TyKind::Infer means the type should be inferred instead of it having been
/// specified. This can appear anywhere in a type.
Infer,
/// A macro in the type position.
Mac(Mac),
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct MutTy {
pub ty: Ty,
pub mutability: Mutability,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum Mutability {
Mutable,
Immutable,
}
/// A "Path" is essentially Rust's notion of a name.
///
/// It's represented as a sequence of identifiers,
/// along with a bunch of supporting information.
///
/// E.g. `std::cmp::PartialEq`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Path {
/// A `::foo` path, is relative to the crate root rather than current
/// module (like paths in an import).
pub global: bool,
/// The segments in the path: the things separated by `::`.
pub segments: Vec<PathSegment>,
}
impl<T> From<T> for Path
where T: Into<PathSegment>
{
fn from(segment: T) -> Self {
Path {
global: false,
segments: vec![segment.into()],
}
}
}
/// A segment of a path: an identifier, an optional lifetime, and a set of types.
///
/// E.g. `std`, `String` or `Box<T>`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct PathSegment {
/// The identifier portion of this path segment.
pub ident: Ident,
/// Type/lifetime parameters attached to this path. They come in
/// two flavors: `Path<A,B,C>` and `Path(A,B) -> C`. Note that
/// this is more than just simple syntactic sugar; the use of
/// parens affects the region binding rules, so we preserve the
/// distinction.
pub parameters: PathParameters,
}
impl<T> From<T> for PathSegment
where T: Into<Ident>
{
fn from(ident: T) -> Self {
PathSegment {
ident: ident.into(),
parameters: PathParameters::none(),
}
}
}
/// Parameters of a path segment.
///
/// E.g. `<A, B>` as in `Foo<A, B>` or `(A, B)` as in `Foo(A, B)`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum PathParameters {
/// The `<'a, A, B, C>` in `foo::bar::baz::<'a, A, B, C>`
AngleBracketed(AngleBracketedParameterData),
/// The `(A, B)` and `C` in `Foo(A, B) -> C`
Parenthesized(ParenthesizedParameterData),
}
impl PathParameters {
pub fn none() -> Self {
PathParameters::AngleBracketed(AngleBracketedParameterData::default())
}
pub fn is_empty(&self) -> bool {
match *self {
PathParameters::AngleBracketed(ref bracketed) => {
bracketed.lifetimes.is_empty() && bracketed.types.is_empty() &&
bracketed.bindings.is_empty()
}
PathParameters::Parenthesized(_) => false,
}
}
}
/// A path like `Foo<'a, T>`
#[derive(Debug, Clone, Eq, PartialEq, Default, Hash)]
pub struct AngleBracketedParameterData {
/// The lifetime parameters for this path segment.
pub lifetimes: Vec<Lifetime>,
/// The type parameters for this path segment, if present.
pub types: Vec<Ty>,
/// Bindings (equality constraints) on associated types, if present.
///
/// E.g., `Foo<A=Bar>`.
pub bindings: Vec<TypeBinding>,
}
/// Bind a type to an associated type: `A=Foo`.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct TypeBinding {
pub ident: Ident,
pub ty: Ty,
}
/// A path like `Foo(A,B) -> C`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct ParenthesizedParameterData {
/// `(A, B)`
pub inputs: Vec<Ty>,
/// `C`
pub output: Option<Ty>,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct PolyTraitRef {
/// The `'a` in `<'a> Foo<&'a T>`
pub bound_lifetimes: Vec<LifetimeDef>,
/// The `Foo<&'a T>` in `<'a> Foo<&'a T>`
pub trait_ref: Path,
}
/// The explicit Self type in a "qualified path". The actual
/// path, including the trait and the associated item, is stored
/// separately. `position` represents the index of the associated
/// item qualified with this Self type.
///
/// ```rust,ignore
/// <Vec<T> as a::b::Trait>::AssociatedItem
/// ^~~~~ ~~~~~~~~~~~~~~^
/// ty position = 3
///
/// <Vec<T>>::AssociatedItem
/// ^~~~~ ^
/// ty position = 0
/// ```
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct QSelf {
pub ty: Box<Ty>,
pub position: usize,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct BareFnTy {
pub unsafety: Unsafety,
pub abi: Option<Abi>,
pub lifetimes: Vec<LifetimeDef>,
pub inputs: Vec<BareFnArg>,
pub output: FunctionRetTy,
pub variadic: bool,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum Unsafety {
Unsafe,
Normal,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum Abi {
Named(String),
Rust,
}
/// An argument in a function type.
///
/// E.g. `bar: usize` as in `fn foo(bar: usize)`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct BareFnArg {
pub name: Option<Ident>,
pub ty: Ty,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum FunctionRetTy {
/// Return type is not specified.
///
/// Functions default to `()` and
/// closures default to inference. Span points to where return
/// type would be inserted.
Default,
/// Everything else
Ty(Ty),
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use {TyParamBound, TraitBoundModifier};
#[cfg(feature = "full")]
use ConstExpr;
#[cfg(feature = "full")]
use constant::parsing::const_expr;
#[cfg(feature = "full")]
use expr::parsing::expr;
use generics::parsing::{lifetime, lifetime_def, ty_param_bound, bound_lifetimes};
use ident::parsing::ident;
use lit::parsing::quoted_string;
use mac::parsing::mac;
use std::str;
named!(pub ty -> Ty, alt!(
ty_paren // must be before ty_tup
|
ty_mac // must be before ty_path
|
ty_path // must be before ty_poly_trait_ref
|
ty_vec
|
ty_array
|
ty_ptr
|
ty_rptr
|
ty_bare_fn
|
ty_never
|
ty_tup
|
ty_poly_trait_ref
|
ty_impl_trait
));
named!(ty_mac -> Ty, map!(mac, Ty::Mac));
named!(ty_vec -> Ty, do_parse!(
punct!("[") >>
elem: ty >>
punct!("]") >>
(Ty::Slice(Box::new(elem)))
));
named!(ty_array -> Ty, do_parse!(
punct!("[") >>
elem: ty >>
punct!(";") >>
len: array_len >>
punct!("]") >>
(Ty::Array(Box::new(elem), len))
));
#[cfg(not(feature = "full"))]
use constant::parsing::const_expr as array_len;
#[cfg(feature = "full")]
named!(array_len -> ConstExpr, alt!(
terminated!(const_expr, after_array_len)
|
terminated!(expr, after_array_len) => { ConstExpr::Other }
));
#[cfg(feature = "full")]
named!(after_array_len -> &str, peek!(punct!("]")));
named!(ty_ptr -> Ty, do_parse!(
punct!("*") >>
mutability: alt!(
keyword!("const") => { |_| Mutability::Immutable }
|
keyword!("mut") => { |_| Mutability::Mutable }
) >>
target: ty >>
(Ty::Ptr(Box::new(MutTy {
ty: target,
mutability: mutability,
})))
));
named!(ty_rptr -> Ty, do_parse!(
punct!("&") >>
life: option!(lifetime) >>
mutability: mutability >>
target: ty >>
(Ty::Rptr(life, Box::new(MutTy {
ty: target,
mutability: mutability,
})))
));
named!(ty_bare_fn -> Ty, do_parse!(
lifetimes: opt_vec!(do_parse!(
keyword!("for") >>
punct!("<") >>
lifetimes: terminated_list!(punct!(","), lifetime_def) >>
punct!(">") >>
(lifetimes)
)) >>
unsafety: unsafety >>
abi: option!(abi) >>
keyword!("fn") >>
punct!("(") >>
inputs: separated_list!(punct!(","), fn_arg) >>
trailing_comma: option!(punct!(",")) >>
variadic: option!(cond_reduce!(trailing_comma.is_some(), punct!("..."))) >>
punct!(")") >>
output: option!(preceded!(
punct!("->"),
ty
)) >>
(Ty::BareFn(Box::new(BareFnTy {
unsafety: unsafety,
abi: abi,
lifetimes: lifetimes,
inputs: inputs,
output: match output {
Some(ty) => FunctionRetTy::Ty(ty),
None => FunctionRetTy::Default,
},
variadic: variadic.is_some(),
})))
));
named!(ty_never -> Ty, map!(punct!("!"), |_| Ty::Never));
named!(ty_tup -> Ty, do_parse!(
punct!("(") >>
elems: terminated_list!(punct!(","), ty) >>
punct!(")") >>
(Ty::Tup(elems))
));
named!(ty_path -> Ty, do_parse!(
qpath: qpath >>
parenthesized: cond!(
qpath.1.segments.last().unwrap().parameters == PathParameters::none(),
option!(parenthesized_parameter_data)
) >>
bounds: many0!(preceded!(punct!("+"), ty_param_bound)) >>
({
let (qself, mut path) = qpath;
if let Some(Some(parenthesized)) = parenthesized {
path.segments.last_mut().unwrap().parameters = parenthesized;
}
if bounds.is_empty() {
Ty::Path(qself, path)
} else {
let path = TyParamBound::Trait(
PolyTraitRef {
bound_lifetimes: Vec::new(),
trait_ref: path,
},
TraitBoundModifier::None,
);
let bounds = Some(path).into_iter().chain(bounds).collect();
Ty::TraitObject(bounds)
}
})
));
named!(parenthesized_parameter_data -> PathParameters, do_parse!(
punct!("(") >>
inputs: terminated_list!(punct!(","), ty) >>
punct!(")") >>
output: option!(preceded!(
punct!("->"),
ty
)) >>
(PathParameters::Parenthesized(
ParenthesizedParameterData {
inputs: inputs,
output: output,
},
))
));
named!(pub qpath -> (Option<QSelf>, Path), alt!(
map!(path, |p| (None, p))
|
do_parse!(
punct!("<") >>
this: map!(ty, Box::new) >>
path: option!(preceded!(
keyword!("as"),
path
)) >>
punct!(">") >>
punct!("::") >>
rest: separated_nonempty_list!(punct!("::"), path_segment) >>
({
match path {
Some(mut path) => {
let pos = path.segments.len();
path.segments.extend(rest);
(Some(QSelf { ty: this, position: pos }), path)
}
None => {
(Some(QSelf { ty: this, position: 0 }), Path {
global: false,
segments: rest,
})
}
}
})
)
|
map!(keyword!("self"), |_| (None, "self".into()))
));
named!(ty_poly_trait_ref -> Ty, map!(
separated_nonempty_list!(punct!("+"), ty_param_bound),
Ty::TraitObject
));
named!(ty_impl_trait -> Ty, do_parse!(
keyword!("impl") >>
elem: separated_nonempty_list!(punct!("+"), ty_param_bound) >>
(Ty::ImplTrait(elem))
));
named!(ty_paren -> Ty, do_parse!(
punct!("(") >>
elem: ty >>
punct!(")") >>
(Ty::Paren(Box::new(elem)))
));
named!(pub mutability -> Mutability, alt!(
keyword!("mut") => { |_| Mutability::Mutable }
|
epsilon!() => { |_| Mutability::Immutable }
));
named!(pub path -> Path, do_parse!(
global: option!(punct!("::")) >>
segments: separated_nonempty_list!(punct!("::"), path_segment) >>
(Path {
global: global.is_some(),
segments: segments,
})
));
named!(path_segment -> PathSegment, alt!(
do_parse!(
id: option!(ident) >>
punct!("<") >>
lifetimes: separated_list!(punct!(","), lifetime) >>
types: opt_vec!(preceded!(
cond!(!lifetimes.is_empty(), punct!(",")),
separated_nonempty_list!(
punct!(","),
terminated!(ty, not!(punct!("=")))
)
)) >>
bindings: opt_vec!(preceded!(
cond!(!lifetimes.is_empty() || !types.is_empty(), punct!(",")),
separated_nonempty_list!(punct!(","), type_binding)
)) >>
cond!(!lifetimes.is_empty() || !types.is_empty() || !bindings.is_empty(), option!(punct!(","))) >>
punct!(">") >>
(PathSegment {
ident: id.unwrap_or_else(|| "".into()),
parameters: PathParameters::AngleBracketed(
AngleBracketedParameterData {
lifetimes: lifetimes,
types: types,
bindings: bindings,
}
),
})
)
|
map!(ident, Into::into)
|
map!(alt!(
keyword!("super")
|
keyword!("self")
|
keyword!("Self")
), Into::into)
));
named!(type_binding -> TypeBinding, do_parse!(
id: ident >>
punct!("=") >>
ty: ty >>
(TypeBinding {
ident: id,
ty: ty,
})
));
named!(pub poly_trait_ref -> PolyTraitRef, do_parse!(
bound_lifetimes: bound_lifetimes >>
trait_ref: path >>
parenthesized: option!(cond_reduce!(
trait_ref.segments.last().unwrap().parameters == PathParameters::none(),
parenthesized_parameter_data
)) >>
({
let mut trait_ref = trait_ref;
if let Some(parenthesized) = parenthesized {
trait_ref.segments.last_mut().unwrap().parameters = parenthesized;
}
PolyTraitRef {
bound_lifetimes: bound_lifetimes,
trait_ref: trait_ref,
}
})
));
named!(pub fn_arg -> BareFnArg, do_parse!(
name: option!(do_parse!(
name: ident >>
punct!(":") >>
not!(tag!(":")) >> // not ::
(name)
)) >>
ty: ty >>
(BareFnArg {
name: name,
ty: ty,
})
));
named!(pub unsafety -> Unsafety, alt!(
keyword!("unsafe") => { |_| Unsafety::Unsafe }
|
epsilon!() => { |_| Unsafety::Normal }
));
named!(pub abi -> Abi, do_parse!(
keyword!("extern") >>
name: option!(quoted_string) >>
(match name {
Some(name) => Abi::Named(name),
None => Abi::Rust,
})
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{Tokens, ToTokens};
impl ToTokens for Ty {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
Ty::Slice(ref inner) => {
tokens.append("[");
inner.to_tokens(tokens);
tokens.append("]");
}
Ty::Array(ref inner, ref len) => {
tokens.append("[");
inner.to_tokens(tokens);
tokens.append(";");
len.to_tokens(tokens);
tokens.append("]");
}
Ty::Ptr(ref target) => {
tokens.append("*");
match target.mutability {
Mutability::Mutable => tokens.append("mut"),
Mutability::Immutable => tokens.append("const"),
}
target.ty.to_tokens(tokens);
}
Ty::Rptr(ref lifetime, ref target) => {
tokens.append("&");
lifetime.to_tokens(tokens);
target.mutability.to_tokens(tokens);
target.ty.to_tokens(tokens);
}
Ty::BareFn(ref func) => {
func.to_tokens(tokens);
}
Ty::Never => {
tokens.append("!");
}
Ty::Tup(ref elems) => {
tokens.append("(");
tokens.append_separated(elems, ",");
if elems.len() == 1 {
tokens.append(",");
}
tokens.append(")");
}
Ty::Path(None, ref path) => {
path.to_tokens(tokens);
}
Ty::Path(Some(ref qself), ref path) => {
tokens.append("<");
qself.ty.to_tokens(tokens);
if qself.position > 0 {
tokens.append("as");
for (i, segment) in path.segments
.iter()
.take(qself.position)
.enumerate() {
if i > 0 || path.global {
tokens.append("::");
}
segment.to_tokens(tokens);
}
}
tokens.append(">");
for segment in path.segments.iter().skip(qself.position) {
tokens.append("::");
segment.to_tokens(tokens);
}
}
Ty::TraitObject(ref bounds) => {
tokens.append_separated(bounds, "+");
}
Ty::ImplTrait(ref bounds) => {
tokens.append("impl");
tokens.append_separated(bounds, "+");
}
Ty::Paren(ref inner) => {
tokens.append("(");
inner.to_tokens(tokens);
tokens.append(")");
}
Ty::Infer => {
tokens.append("_");
}
Ty::Mac(ref mac) => mac.to_tokens(tokens),
}
}
}
impl ToTokens for Mutability {
fn to_tokens(&self, tokens: &mut Tokens) {
if let Mutability::Mutable = *self {
tokens.append("mut");
}
}
}
impl ToTokens for Path {
fn to_tokens(&self, tokens: &mut Tokens) {
for (i, segment) in self.segments.iter().enumerate() {
if i > 0 || self.global {
tokens.append("::");
}
segment.to_tokens(tokens);
}
}
}
impl ToTokens for PathSegment {
fn to_tokens(&self, tokens: &mut Tokens) {
self.ident.to_tokens(tokens);
if self.ident.as_ref().is_empty() && self.parameters.is_empty() {
tokens.append("<");
tokens.append(">");
} else {
self.parameters.to_tokens(tokens);
}
}
}
impl ToTokens for PathParameters {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
PathParameters::AngleBracketed(ref parameters) => {
parameters.to_tokens(tokens);
}
PathParameters::Parenthesized(ref parameters) => {
parameters.to_tokens(tokens);
}
}
}
}
impl ToTokens for AngleBracketedParameterData {
fn to_tokens(&self, tokens: &mut Tokens) {
let has_lifetimes = !self.lifetimes.is_empty();
let has_types = !self.types.is_empty();
let has_bindings = !self.bindings.is_empty();
if !has_lifetimes && !has_types && !has_bindings {
return;
}
tokens.append("<");
let mut first = true;
for lifetime in &self.lifetimes {
if !first {
tokens.append(",");
}
lifetime.to_tokens(tokens);
first = false;
}
for ty in &self.types {
if !first {
tokens.append(",");
}
ty.to_tokens(tokens);
first = false;
}
for binding in &self.bindings {
if !first {
tokens.append(",");
}
binding.to_tokens(tokens);
first = false;
}
tokens.append(">");
}
}
impl ToTokens for TypeBinding {
fn to_tokens(&self, tokens: &mut Tokens) {
self.ident.to_tokens(tokens);
tokens.append("=");
self.ty.to_tokens(tokens);
}
}
impl ToTokens for ParenthesizedParameterData {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append("(");
tokens.append_separated(&self.inputs, ",");
tokens.append(")");
if let Some(ref output) = self.output {
tokens.append("->");
output.to_tokens(tokens);
}
}
}
impl ToTokens for PolyTraitRef {
fn to_tokens(&self, tokens: &mut Tokens) {
if !self.bound_lifetimes.is_empty() {
tokens.append("for");
tokens.append("<");
tokens.append_separated(&self.bound_lifetimes, ",");
tokens.append(">");
}
self.trait_ref.to_tokens(tokens);
}
}
impl ToTokens for BareFnTy {
fn to_tokens(&self, tokens: &mut Tokens) {
if !self.lifetimes.is_empty() {
tokens.append("for");
tokens.append("<");
tokens.append_separated(&self.lifetimes, ",");
tokens.append(">");
}
self.unsafety.to_tokens(tokens);
self.abi.to_tokens(tokens);
tokens.append("fn");
tokens.append("(");
tokens.append_separated(&self.inputs, ",");
if self.variadic {
if !self.inputs.is_empty() {
tokens.append(",");
}
tokens.append("...");
}
tokens.append(")");
if let FunctionRetTy::Ty(ref ty) = self.output {
tokens.append("->");
ty.to_tokens(tokens);
}
}
}
impl ToTokens for BareFnArg {
fn to_tokens(&self, tokens: &mut Tokens) {
if let Some(ref name) = self.name {
name.to_tokens(tokens);
tokens.append(":");
}
self.ty.to_tokens(tokens);
}
}
impl ToTokens for Unsafety {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
Unsafety::Unsafe => tokens.append("unsafe"),
Unsafety::Normal => {
// nothing
}
}
}
}
impl ToTokens for Abi {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append("extern");
match *self {
Abi::Named(ref named) => named.to_tokens(tokens),
Abi::Rust => {}
}
}
}
}

778
third_party/rust/syn-0.11.11/src/visit.rs поставляемый
Просмотреть файл

@ -1,778 +0,0 @@
// Adapted from libsyntax.
//! AST walker. Each overridden visit method has full control over what
//! happens with its node, it can do its own traversal of the node's children,
//! call `visit::walk_*` to apply the default traversal algorithm, or prevent
//! deeper traversal by doing nothing.
//!
//! Note: it is an important invariant that the default visitor walks the body
//! of a function in "execution order" (more concretely, reverse post-order
//! with respect to the CFG implied by the AST), meaning that if AST node A may
//! execute before AST node B, then A is visited first. The borrow checker in
//! particular relies on this property.
//!
//! Note: walking an AST before macro expansion is probably a bad idea. For
//! instance, a walker looking for item names in a module will miss all of
//! those that are created by the expansion of a macro.
use super::*;
/// Each method of the Visitor trait is a hook to be potentially
/// overridden. Each method's default implementation recursively visits
/// the substructure of the input via the corresponding `walk` method;
/// e.g. the `visit_mod` method by default calls `visit::walk_mod`.
///
/// If you want to ensure that your code handles every variant
/// explicitly, you need to override each method. (And you also need
/// to monitor future changes to `Visitor` in case a new method with a
/// new default implementation gets introduced.)
pub trait Visitor: Sized {
fn visit_ident(&mut self, _ident: &Ident) {}
fn visit_derive_input(&mut self, derive_input: &DeriveInput) {
walk_derive_input(self, derive_input)
}
fn visit_ty(&mut self, ty: &Ty) {
walk_ty(self, ty)
}
fn visit_generics(&mut self, generics: &Generics) {
walk_generics(self, generics)
}
fn visit_ty_param_bound(&mut self, bound: &TyParamBound) {
walk_ty_param_bound(self, bound)
}
fn visit_poly_trait_ref(&mut self, trait_ref: &PolyTraitRef, modifier: &TraitBoundModifier) {
walk_poly_trait_ref(self, trait_ref, modifier)
}
fn visit_variant_data(&mut self, data: &VariantData, _ident: &Ident, _generics: &Generics) {
walk_variant_data(self, data)
}
fn visit_field(&mut self, field: &Field) {
walk_field(self, field)
}
fn visit_variant(&mut self, variant: &Variant, generics: &Generics) {
walk_variant(self, variant, generics)
}
fn visit_lifetime(&mut self, _lifetime: &Lifetime) {}
fn visit_lifetime_def(&mut self, lifetime: &LifetimeDef) {
walk_lifetime_def(self, lifetime)
}
fn visit_path(&mut self, path: &Path) {
walk_path(self, path)
}
fn visit_path_segment(&mut self, path_segment: &PathSegment) {
walk_path_segment(self, path_segment)
}
fn visit_path_parameters(&mut self, path_parameters: &PathParameters) {
walk_path_parameters(self, path_parameters)
}
fn visit_assoc_type_binding(&mut self, type_binding: &TypeBinding) {
walk_assoc_type_binding(self, type_binding)
}
fn visit_attribute(&mut self, _attr: &Attribute) {}
fn visit_fn_ret_ty(&mut self, ret_ty: &FunctionRetTy) {
walk_fn_ret_ty(self, ret_ty)
}
fn visit_const_expr(&mut self, expr: &ConstExpr) {
walk_const_expr(self, expr)
}
fn visit_lit(&mut self, _lit: &Lit) {}
fn visit_mac(&mut self, mac: &Mac) {
walk_mac(self, mac);
}
#[cfg(feature = "full")]
fn visit_crate(&mut self, _crate: &Crate) {
walk_crate(self, _crate);
}
#[cfg(feature = "full")]
fn visit_item(&mut self, item: &Item) {
walk_item(self, item);
}
#[cfg(feature = "full")]
fn visit_expr(&mut self, expr: &Expr) {
walk_expr(self, expr);
}
#[cfg(feature = "full")]
fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) {
walk_foreign_item(self, foreign_item);
}
#[cfg(feature = "full")]
fn visit_pat(&mut self, pat: &Pat) {
walk_pat(self, pat);
}
#[cfg(feature = "full")]
fn visit_fn_decl(&mut self, fn_decl: &FnDecl) {
walk_fn_decl(self, fn_decl);
}
#[cfg(feature = "full")]
fn visit_trait_item(&mut self, trait_item: &TraitItem) {
walk_trait_item(self, trait_item);
}
#[cfg(feature = "full")]
fn visit_impl_item(&mut self, impl_item: &ImplItem) {
walk_impl_item(self, impl_item);
}
#[cfg(feature = "full")]
fn visit_method_sig(&mut self, method_sig: &MethodSig) {
walk_method_sig(self, method_sig);
}
#[cfg(feature = "full")]
fn visit_stmt(&mut self, stmt: &Stmt) {
walk_stmt(self, stmt);
}
#[cfg(feature = "full")]
fn visit_local(&mut self, local: &Local) {
walk_local(self, local);
}
#[cfg(feature = "full")]
fn visit_view_path(&mut self, view_path: &ViewPath) {
walk_view_path(self, view_path);
}
}
macro_rules! walk_list {
($visitor:expr, $method:ident, $list:expr $(, $extra_args:expr)*) => {
for elem in $list {
$visitor.$method(elem $(, $extra_args)*)
}
};
}
pub fn walk_opt_ident<V: Visitor>(visitor: &mut V, opt_ident: &Option<Ident>) {
if let Some(ref ident) = *opt_ident {
visitor.visit_ident(ident);
}
}
pub fn walk_lifetime_def<V: Visitor>(visitor: &mut V, lifetime_def: &LifetimeDef) {
visitor.visit_lifetime(&lifetime_def.lifetime);
walk_list!(visitor, visit_lifetime, &lifetime_def.bounds);
}
pub fn walk_poly_trait_ref<V>(visitor: &mut V, trait_ref: &PolyTraitRef, _: &TraitBoundModifier)
where V: Visitor
{
walk_list!(visitor, visit_lifetime_def, &trait_ref.bound_lifetimes);
visitor.visit_path(&trait_ref.trait_ref);
}
pub fn walk_derive_input<V: Visitor>(visitor: &mut V, derive_input: &DeriveInput) {
visitor.visit_ident(&derive_input.ident);
visitor.visit_generics(&derive_input.generics);
match derive_input.body {
Body::Enum(ref variants) => {
walk_list!(visitor, visit_variant, variants, &derive_input.generics);
}
Body::Struct(ref variant_data) => {
visitor.visit_variant_data(variant_data, &derive_input.ident, &derive_input.generics);
}
}
walk_list!(visitor, visit_attribute, &derive_input.attrs);
}
pub fn walk_variant<V>(visitor: &mut V, variant: &Variant, generics: &Generics)
where V: Visitor
{
visitor.visit_ident(&variant.ident);
visitor.visit_variant_data(&variant.data, &variant.ident, generics);
walk_list!(visitor, visit_attribute, &variant.attrs);
}
pub fn walk_ty<V: Visitor>(visitor: &mut V, ty: &Ty) {
match *ty {
Ty::Slice(ref inner) |
Ty::Paren(ref inner) => visitor.visit_ty(inner),
Ty::Ptr(ref mutable_type) => visitor.visit_ty(&mutable_type.ty),
Ty::Rptr(ref opt_lifetime, ref mutable_type) => {
walk_list!(visitor, visit_lifetime, opt_lifetime);
visitor.visit_ty(&mutable_type.ty)
}
Ty::Never | Ty::Infer => {}
Ty::Tup(ref tuple_element_types) => {
walk_list!(visitor, visit_ty, tuple_element_types);
}
Ty::BareFn(ref bare_fn) => {
walk_list!(visitor, visit_lifetime_def, &bare_fn.lifetimes);
for argument in &bare_fn.inputs {
walk_opt_ident(visitor, &argument.name);
visitor.visit_ty(&argument.ty)
}
visitor.visit_fn_ret_ty(&bare_fn.output)
}
Ty::Path(ref maybe_qself, ref path) => {
if let Some(ref qself) = *maybe_qself {
visitor.visit_ty(&qself.ty);
}
visitor.visit_path(path);
}
Ty::Array(ref inner, ref len) => {
visitor.visit_ty(inner);
visitor.visit_const_expr(len);
}
Ty::TraitObject(ref bounds) |
Ty::ImplTrait(ref bounds) => {
walk_list!(visitor, visit_ty_param_bound, bounds);
}
Ty::Mac(ref mac) => {
visitor.visit_mac(mac);
}
}
}
pub fn walk_path<V: Visitor>(visitor: &mut V, path: &Path) {
for segment in &path.segments {
visitor.visit_path_segment(segment);
}
}
pub fn walk_path_segment<V: Visitor>(visitor: &mut V, segment: &PathSegment) {
visitor.visit_ident(&segment.ident);
visitor.visit_path_parameters(&segment.parameters);
}
pub fn walk_path_parameters<V>(visitor: &mut V, path_parameters: &PathParameters)
where V: Visitor
{
match *path_parameters {
PathParameters::AngleBracketed(ref data) => {
walk_list!(visitor, visit_ty, &data.types);
walk_list!(visitor, visit_lifetime, &data.lifetimes);
walk_list!(visitor, visit_assoc_type_binding, &data.bindings);
}
PathParameters::Parenthesized(ref data) => {
walk_list!(visitor, visit_ty, &data.inputs);
walk_list!(visitor, visit_ty, &data.output);
}
}
}
pub fn walk_assoc_type_binding<V: Visitor>(visitor: &mut V, type_binding: &TypeBinding) {
visitor.visit_ident(&type_binding.ident);
visitor.visit_ty(&type_binding.ty);
}
pub fn walk_ty_param_bound<V: Visitor>(visitor: &mut V, bound: &TyParamBound) {
match *bound {
TyParamBound::Trait(ref ty, ref modifier) => {
visitor.visit_poly_trait_ref(ty, modifier);
}
TyParamBound::Region(ref lifetime) => {
visitor.visit_lifetime(lifetime);
}
}
}
pub fn walk_generics<V: Visitor>(visitor: &mut V, generics: &Generics) {
for param in &generics.ty_params {
visitor.visit_ident(&param.ident);
walk_list!(visitor, visit_ty_param_bound, &param.bounds);
walk_list!(visitor, visit_ty, &param.default);
}
walk_list!(visitor, visit_lifetime_def, &generics.lifetimes);
for predicate in &generics.where_clause.predicates {
match *predicate {
WherePredicate::BoundPredicate(WhereBoundPredicate { ref bounded_ty,
ref bounds,
ref bound_lifetimes,
.. }) => {
visitor.visit_ty(bounded_ty);
walk_list!(visitor, visit_ty_param_bound, bounds);
walk_list!(visitor, visit_lifetime_def, bound_lifetimes);
}
WherePredicate::RegionPredicate(WhereRegionPredicate { ref lifetime,
ref bounds,
.. }) => {
visitor.visit_lifetime(lifetime);
walk_list!(visitor, visit_lifetime, bounds);
}
WherePredicate::EqPredicate(WhereEqPredicate { ref lhs_ty, ref rhs_ty, .. }) => {
visitor.visit_ty(lhs_ty);
visitor.visit_ty(rhs_ty);
}
}
}
}
pub fn walk_fn_ret_ty<V: Visitor>(visitor: &mut V, ret_ty: &FunctionRetTy) {
if let FunctionRetTy::Ty(ref output_ty) = *ret_ty {
visitor.visit_ty(output_ty)
}
}
pub fn walk_variant_data<V: Visitor>(visitor: &mut V, data: &VariantData) {
walk_list!(visitor, visit_field, data.fields());
}
pub fn walk_field<V: Visitor>(visitor: &mut V, field: &Field) {
walk_opt_ident(visitor, &field.ident);
visitor.visit_ty(&field.ty);
walk_list!(visitor, visit_attribute, &field.attrs);
}
pub fn walk_const_expr<V: Visitor>(visitor: &mut V, len: &ConstExpr) {
match *len {
ConstExpr::Call(ref function, ref args) => {
visitor.visit_const_expr(function);
walk_list!(visitor, visit_const_expr, args);
}
ConstExpr::Binary(_op, ref left, ref right) => {
visitor.visit_const_expr(left);
visitor.visit_const_expr(right);
}
ConstExpr::Unary(_op, ref v) => {
visitor.visit_const_expr(v);
}
ConstExpr::Lit(ref lit) => {
visitor.visit_lit(lit);
}
ConstExpr::Cast(ref expr, ref ty) => {
visitor.visit_const_expr(expr);
visitor.visit_ty(ty);
}
ConstExpr::Path(ref path) => {
visitor.visit_path(path);
}
ConstExpr::Index(ref expr, ref index) => {
visitor.visit_const_expr(expr);
visitor.visit_const_expr(index);
}
ConstExpr::Paren(ref expr) => {
visitor.visit_const_expr(expr);
}
ConstExpr::Other(ref other) => {
#[cfg(feature = "full")]
fn walk_other<V: Visitor>(visitor: &mut V, other: &Expr) {
visitor.visit_expr(other);
}
#[cfg(not(feature = "full"))]
fn walk_other<V: Visitor>(_: &mut V, _: &super::constant::Other) {}
walk_other(visitor, other);
}
}
}
pub fn walk_mac<V: Visitor>(visitor: &mut V, mac: &Mac) {
visitor.visit_path(&mac.path);
}
#[cfg(feature = "full")]
pub fn walk_crate<V: Visitor>(visitor: &mut V, _crate: &Crate) {
walk_list!(visitor, visit_attribute, &_crate.attrs);
walk_list!(visitor, visit_item, &_crate.items);
}
#[cfg(feature = "full")]
pub fn walk_item<V: Visitor>(visitor: &mut V, item: &Item) {
visitor.visit_ident(&item.ident);
walk_list!(visitor, visit_attribute, &item.attrs);
match item.node {
ItemKind::ExternCrate(ref ident) => {
walk_opt_ident(visitor, ident);
}
ItemKind::Use(ref view_path) => {
visitor.visit_view_path(view_path);
}
ItemKind::Static(ref ty, _, ref expr) |
ItemKind::Const(ref ty, ref expr) => {
visitor.visit_ty(ty);
visitor.visit_expr(expr);
}
ItemKind::Fn(ref decl, _, _, _, ref generics, ref body) => {
visitor.visit_fn_decl(decl);
visitor.visit_generics(generics);
walk_list!(visitor, visit_stmt, &body.stmts);
}
ItemKind::Mod(ref maybe_items) => {
if let Some(ref items) = *maybe_items {
walk_list!(visitor, visit_item, items);
}
}
ItemKind::ForeignMod(ref foreign_mod) => {
walk_list!(visitor, visit_foreign_item, &foreign_mod.items);
}
ItemKind::Ty(ref ty, ref generics) => {
visitor.visit_ty(ty);
visitor.visit_generics(generics);
}
ItemKind::Enum(ref variant, ref generics) => {
walk_list!(visitor, visit_variant, variant, generics);
}
ItemKind::Struct(ref variant_data, ref generics) |
ItemKind::Union(ref variant_data, ref generics) => {
visitor.visit_variant_data(variant_data, &item.ident, generics);
}
ItemKind::Trait(_, ref generics, ref bounds, ref trait_items) => {
visitor.visit_generics(generics);
walk_list!(visitor, visit_ty_param_bound, bounds);
walk_list!(visitor, visit_trait_item, trait_items);
}
ItemKind::DefaultImpl(_, ref path) => {
visitor.visit_path(path);
}
ItemKind::Impl(_, _, ref generics, ref maybe_path, ref ty, ref impl_items) => {
visitor.visit_generics(generics);
if let Some(ref path) = *maybe_path {
visitor.visit_path(path);
}
visitor.visit_ty(ty);
walk_list!(visitor, visit_impl_item, impl_items);
}
ItemKind::Mac(ref mac) => visitor.visit_mac(mac),
}
}
#[cfg(feature = "full")]
#[cfg_attr(feature = "cargo-clippy", allow(cyclomatic_complexity))]
pub fn walk_expr<V: Visitor>(visitor: &mut V, expr: &Expr) {
walk_list!(visitor, visit_attribute, &expr.attrs);
match expr.node {
ExprKind::InPlace(ref place, ref value) => {
visitor.visit_expr(place);
visitor.visit_expr(value);
}
ExprKind::Call(ref callee, ref args) => {
visitor.visit_expr(callee);
walk_list!(visitor, visit_expr, args);
}
ExprKind::MethodCall(ref name, ref ty_args, ref args) => {
visitor.visit_ident(name);
walk_list!(visitor, visit_ty, ty_args);
walk_list!(visitor, visit_expr, args);
}
ExprKind::Array(ref exprs) |
ExprKind::Tup(ref exprs) => {
walk_list!(visitor, visit_expr, exprs);
}
ExprKind::Unary(_, ref operand) => {
visitor.visit_expr(operand);
}
ExprKind::Lit(ref lit) => {
visitor.visit_lit(lit);
}
ExprKind::Cast(ref expr, ref ty) |
ExprKind::Type(ref expr, ref ty) => {
visitor.visit_expr(expr);
visitor.visit_ty(ty);
}
ExprKind::If(ref cond, ref cons, ref maybe_alt) => {
visitor.visit_expr(cond);
walk_list!(visitor, visit_stmt, &cons.stmts);
if let Some(ref alt) = *maybe_alt {
visitor.visit_expr(alt);
}
}
ExprKind::IfLet(ref pat, ref cond, ref cons, ref maybe_alt) => {
visitor.visit_pat(pat);
visitor.visit_expr(cond);
walk_list!(visitor, visit_stmt, &cons.stmts);
if let Some(ref alt) = *maybe_alt {
visitor.visit_expr(alt);
}
}
ExprKind::While(ref cond, ref body, ref label) => {
visitor.visit_expr(cond);
walk_list!(visitor, visit_stmt, &body.stmts);
walk_opt_ident(visitor, label);
}
ExprKind::WhileLet(ref pat, ref cond, ref body, ref label) => {
visitor.visit_pat(pat);
visitor.visit_expr(cond);
walk_list!(visitor, visit_stmt, &body.stmts);
walk_opt_ident(visitor, label);
}
ExprKind::ForLoop(ref pat, ref expr, ref body, ref label) => {
visitor.visit_pat(pat);
visitor.visit_expr(expr);
walk_list!(visitor, visit_stmt, &body.stmts);
walk_opt_ident(visitor, label);
}
ExprKind::Loop(ref body, ref label) => {
walk_list!(visitor, visit_stmt, &body.stmts);
walk_opt_ident(visitor, label);
}
ExprKind::Match(ref expr, ref arms) => {
visitor.visit_expr(expr);
for &Arm { ref attrs, ref pats, ref guard, ref body } in arms {
walk_list!(visitor, visit_attribute, attrs);
walk_list!(visitor, visit_pat, pats);
if let Some(ref guard) = *guard {
visitor.visit_expr(guard);
}
visitor.visit_expr(body);
}
}
ExprKind::Closure(_, ref decl, ref expr) => {
visitor.visit_fn_decl(decl);
visitor.visit_expr(expr);
}
ExprKind::Block(_, ref block) => {
walk_list!(visitor, visit_stmt, &block.stmts);
}
ExprKind::Binary(_, ref lhs, ref rhs) |
ExprKind::Assign(ref lhs, ref rhs) |
ExprKind::AssignOp(_, ref lhs, ref rhs) => {
visitor.visit_expr(lhs);
visitor.visit_expr(rhs);
}
ExprKind::Field(ref obj, ref field) => {
visitor.visit_expr(obj);
visitor.visit_ident(field);
}
ExprKind::TupField(ref obj, _) => {
visitor.visit_expr(obj);
}
ExprKind::Index(ref obj, ref idx) => {
visitor.visit_expr(obj);
visitor.visit_expr(idx);
}
ExprKind::Range(ref maybe_start, ref maybe_end, _) => {
if let Some(ref start) = *maybe_start {
visitor.visit_expr(start);
}
if let Some(ref end) = *maybe_end {
visitor.visit_expr(end);
}
}
ExprKind::Path(ref maybe_qself, ref path) => {
if let Some(ref qself) = *maybe_qself {
visitor.visit_ty(&qself.ty);
}
visitor.visit_path(path);
}
ExprKind::Break(ref maybe_label, ref maybe_expr) => {
walk_opt_ident(visitor, maybe_label);
if let Some(ref expr) = *maybe_expr {
visitor.visit_expr(expr);
}
}
ExprKind::Continue(ref maybe_label) => {
walk_opt_ident(visitor, maybe_label);
}
ExprKind::Ret(ref maybe_expr) => {
if let Some(ref expr) = *maybe_expr {
visitor.visit_expr(expr);
}
}
ExprKind::Mac(ref mac) => {
visitor.visit_mac(mac);
}
ExprKind::Struct(ref path, ref fields, ref maybe_base) => {
visitor.visit_path(path);
for &FieldValue { ref ident, ref expr, .. } in fields {
visitor.visit_ident(ident);
visitor.visit_expr(expr);
}
if let Some(ref base) = *maybe_base {
visitor.visit_expr(base);
}
}
ExprKind::Repeat(ref value, ref times) => {
visitor.visit_expr(value);
visitor.visit_expr(times);
}
ExprKind::Box(ref expr) |
ExprKind::AddrOf(_, ref expr) |
ExprKind::Paren(ref expr) |
ExprKind::Try(ref expr) => {
visitor.visit_expr(expr);
}
}
}
#[cfg(feature = "full")]
pub fn walk_foreign_item<V: Visitor>(visitor: &mut V, foreign_item: &ForeignItem) {
visitor.visit_ident(&foreign_item.ident);
walk_list!(visitor, visit_attribute, &foreign_item.attrs);
match foreign_item.node {
ForeignItemKind::Fn(ref decl, ref generics) => {
visitor.visit_fn_decl(decl);
visitor.visit_generics(generics);
}
ForeignItemKind::Static(ref ty, _) => {
visitor.visit_ty(ty);
}
}
}
#[cfg(feature = "full")]
pub fn walk_pat<V: Visitor>(visitor: &mut V, pat: &Pat) {
match *pat {
Pat::Wild => {}
Pat::Ident(_, ref ident, ref maybe_pat) => {
visitor.visit_ident(ident);
if let Some(ref pat) = *maybe_pat {
visitor.visit_pat(pat);
}
}
Pat::Struct(ref path, ref field_pats, _) => {
visitor.visit_path(path);
for &FieldPat { ref ident, ref pat, .. } in field_pats {
visitor.visit_ident(ident);
visitor.visit_pat(pat);
}
}
Pat::TupleStruct(ref path, ref pats, _) => {
visitor.visit_path(path);
walk_list!(visitor, visit_pat, pats);
}
Pat::Path(ref maybe_qself, ref path) => {
if let Some(ref qself) = *maybe_qself {
visitor.visit_ty(&qself.ty);
}
visitor.visit_path(path);
}
Pat::Tuple(ref pats, _) => {
walk_list!(visitor, visit_pat, pats);
}
Pat::Box(ref pat) |
Pat::Ref(ref pat, _) => {
visitor.visit_pat(pat);
}
Pat::Lit(ref expr) => {
visitor.visit_expr(expr);
}
Pat::Range(ref start, ref end) => {
visitor.visit_expr(start);
visitor.visit_expr(end);
}
Pat::Slice(ref start, ref maybe_mid, ref end) => {
walk_list!(visitor, visit_pat, start);
if let Some(ref mid) = *maybe_mid {
visitor.visit_pat(mid);
}
walk_list!(visitor, visit_pat, end);
}
Pat::Mac(ref mac) => {
visitor.visit_mac(mac);
}
}
}
#[cfg(feature = "full")]
pub fn walk_fn_decl<V: Visitor>(visitor: &mut V, fn_decl: &FnDecl) {
for input in &fn_decl.inputs {
match *input {
FnArg::SelfRef(_, _) |
FnArg::SelfValue(_) => {}
FnArg::Captured(ref pat, ref ty) => {
visitor.visit_pat(pat);
visitor.visit_ty(ty);
}
FnArg::Ignored(ref ty) => {
visitor.visit_ty(ty);
}
}
}
visitor.visit_fn_ret_ty(&fn_decl.output);
}
#[cfg(feature = "full")]
pub fn walk_trait_item<V: Visitor>(visitor: &mut V, trait_item: &TraitItem) {
visitor.visit_ident(&trait_item.ident);
walk_list!(visitor, visit_attribute, &trait_item.attrs);
match trait_item.node {
TraitItemKind::Const(ref ty, ref maybe_expr) => {
visitor.visit_ty(ty);
if let Some(ref expr) = *maybe_expr {
visitor.visit_expr(expr);
}
}
TraitItemKind::Method(ref method_sig, ref maybe_block) => {
visitor.visit_method_sig(method_sig);
if let Some(ref block) = *maybe_block {
walk_list!(visitor, visit_stmt, &block.stmts);
}
}
TraitItemKind::Type(ref bounds, ref maybe_ty) => {
walk_list!(visitor, visit_ty_param_bound, bounds);
if let Some(ref ty) = *maybe_ty {
visitor.visit_ty(ty);
}
}
TraitItemKind::Macro(ref mac) => {
visitor.visit_mac(mac);
}
}
}
#[cfg(feature = "full")]
pub fn walk_impl_item<V: Visitor>(visitor: &mut V, impl_item: &ImplItem) {
visitor.visit_ident(&impl_item.ident);
walk_list!(visitor, visit_attribute, &impl_item.attrs);
match impl_item.node {
ImplItemKind::Const(ref ty, ref expr) => {
visitor.visit_ty(ty);
visitor.visit_expr(expr);
}
ImplItemKind::Method(ref method_sig, ref block) => {
visitor.visit_method_sig(method_sig);
walk_list!(visitor, visit_stmt, &block.stmts);
}
ImplItemKind::Type(ref ty) => {
visitor.visit_ty(ty);
}
ImplItemKind::Macro(ref mac) => {
visitor.visit_mac(mac);
}
}
}
#[cfg(feature = "full")]
pub fn walk_method_sig<V: Visitor>(visitor: &mut V, method_sig: &MethodSig) {
visitor.visit_fn_decl(&method_sig.decl);
visitor.visit_generics(&method_sig.generics);
}
#[cfg(feature = "full")]
pub fn walk_stmt<V: Visitor>(visitor: &mut V, stmt: &Stmt) {
match *stmt {
Stmt::Local(ref local) => {
visitor.visit_local(local);
}
Stmt::Item(ref item) => {
visitor.visit_item(item);
}
Stmt::Expr(ref expr) |
Stmt::Semi(ref expr) => {
visitor.visit_expr(expr);
}
Stmt::Mac(ref details) => {
let (ref mac, _, ref attrs) = **details;
visitor.visit_mac(mac);
walk_list!(visitor, visit_attribute, attrs);
}
}
}
#[cfg(feature = "full")]
pub fn walk_local<V: Visitor>(visitor: &mut V, local: &Local) {
visitor.visit_pat(&local.pat);
if let Some(ref ty) = local.ty {
visitor.visit_ty(ty);
}
if let Some(ref init) = local.init {
visitor.visit_expr(init);
}
walk_list!(visitor, visit_attribute, &local.attrs);
}
#[cfg(feature = "full")]
pub fn walk_view_path<V: Visitor>(visitor: &mut V, view_path: &ViewPath) {
match *view_path {
ViewPath::Simple(ref path, ref maybe_ident) => {
visitor.visit_path(path);
walk_opt_ident(visitor, maybe_ident);
}
ViewPath::Glob(ref path) => {
visitor.visit_path(path);
}
ViewPath::List(ref path, ref items) => {
visitor.visit_path(path);
for &PathListItem { ref name, ref rename } in items {
visitor.visit_ident(name);
walk_opt_ident(visitor, rename);
}
}
}
}

1
third_party/rust/synom/.cargo-checksum.json поставляемый
Просмотреть файл

@ -1 +0,0 @@
{"files":{"Cargo.toml":"29e0c995838581264edd9230cb79799698f7677c0384c6c84f61beadcbc42917","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"b2d43372ff5db084b4f2ef5178e1fbdba83e0f05a9cfc298f188cc130e8de794","src/helper.rs":"010d2b18a73eabdd4045e254910df90bb7f6314c63b2d2b6f1b9d908fa2d2cd6","src/lib.rs":"93c66afd80f9806ddac92b9d3a6382fad34c6b52cb3a8d37ae1067ae1ed5bec9","src/space.rs":"6fe05780dd47ed9d264bb436cbccf385f86bafc4debdd39bf250099b8210f660"},"package":"a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6"}

20
third_party/rust/synom/Cargo.toml поставляемый
Просмотреть файл

@ -1,20 +0,0 @@
[package]
name = "synom"
version = "0.11.3"
authors = ["David Tolnay <dtolnay@gmail.com>"]
license = "MIT/Apache-2.0"
description = "Stripped-down Nom parser used by Syn"
repository = "https://github.com/dtolnay/syn"
documentation = "https://docs.rs/synom/"
categories = ["development-tools::procedural-macro-helpers"]
readme = "../README.md"
include = ["Cargo.toml", "src/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
[dependencies]
unicode-xid = "0.0.4"
[dev-dependencies.syn]
version = "0.11"
path = ".."
features = ["parsing", "full"]
default-features = false

201
third_party/rust/synom/LICENSE-APACHE поставляемый
Просмотреть файл

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
third_party/rust/synom/LICENSE-MIT поставляемый
Просмотреть файл

@ -1,25 +0,0 @@
Copyright (c) 2016 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

199
third_party/rust/synom/README.md поставляемый
Просмотреть файл

@ -1,199 +0,0 @@
Nom parser for Rust source code
===============================
[![Build Status](https://api.travis-ci.org/dtolnay/syn.svg?branch=master)](https://travis-ci.org/dtolnay/syn)
[![Latest Version](https://img.shields.io/crates/v/syn.svg)](https://crates.io/crates/syn)
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://dtolnay.github.io/syn/syn/)
Parse Rust source code without a Syntex dependency, intended for use with
[Macros 1.1](https://github.com/rust-lang/rfcs/blob/master/text/1681-macros-1.1.md).
Designed for fast compile time.
- Compile time for `syn` (from scratch including all dependencies): **6 seconds**
- Compile time for the `syntex`/`quasi`/`aster` stack: **60+ seconds**
If you get stuck with Macros 1.1 I am happy to provide help even if the issue is
not related to syn. Please file a ticket in this repo.
## Usage with Macros 1.1
```toml
[dependencies]
syn = "0.11"
quote = "0.3"
[lib]
proc-macro = true
```
```rust
extern crate proc_macro;
use proc_macro::TokenStream;
extern crate syn;
#[macro_use]
extern crate quote;
#[proc_macro_derive(MyMacro)]
pub fn my_macro(input: TokenStream) -> TokenStream {
let source = input.to_string();
// Parse the string representation into a syntax tree
let ast = syn::parse_derive_input(&source).unwrap();
// Build the output, possibly using quasi-quotation
let expanded = quote! {
// ...
};
// Parse back to a token stream and return it
expanded.parse().unwrap()
}
```
## Complete example
Suppose we have the following simple trait which returns the number of fields in
a struct:
```rust
trait NumFields {
fn num_fields() -> usize;
}
```
A complete Macros 1.1 implementation of `#[derive(NumFields)]` based on `syn`
and [`quote`](https://github.com/dtolnay/quote) looks like this:
```rust
extern crate proc_macro;
use proc_macro::TokenStream;
extern crate syn;
#[macro_use]
extern crate quote;
#[proc_macro_derive(NumFields)]
pub fn num_fields(input: TokenStream) -> TokenStream {
let source = input.to_string();
// Parse the string representation into a syntax tree
let ast = syn::parse_derive_input(&source).unwrap();
// Build the output
let expanded = expand_num_fields(&ast);
// Return the generated impl as a TokenStream
expanded.parse().unwrap()
}
fn expand_num_fields(ast: &syn::DeriveInput) -> quote::Tokens {
let n = match ast.body {
syn::Body::Struct(ref data) => data.fields().len(),
syn::Body::Enum(_) => panic!("#[derive(NumFields)] can only be used with structs"),
};
// Used in the quasi-quotation below as `#name`
let name = &ast.ident;
// Helper is provided for handling complex generic types correctly and effortlessly
let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();
quote! {
// The generated impl
impl #impl_generics ::mycrate::NumFields for #name #ty_generics #where_clause {
fn num_fields() -> usize {
#n
}
}
}
}
```
## Testing
Macros 1.1 has a restriction that your proc-macro crate must export nothing but
`proc_macro_derive` functions, and also `proc_macro_derive` procedural macros
cannot be used from the same crate in which they are defined. These restrictions
may be lifted in the future but for now they make writing tests a bit trickier
than for other types of code.
In particular, you will not be able to write test functions like `#[test] fn
it_works() { ... }` in line with your code. Instead, either put tests in a
[`tests` directory](https://doc.rust-lang.org/book/testing.html#the-tests-directory)
or in a separate crate entirely.
Additionally, if your procedural macro implements a particular trait, that trait
must be defined in a separate crate from the procedural macro.
As a concrete example, suppose your procedural macro crate is called `my_derive`
and it implements a trait called `my_crate::MyTrait`. Your unit tests for the
procedural macro can go in `my_derive/tests/test.rs` or into a separate crate
`my_tests/tests/test.rs`. Either way the test would look something like this:
```rust
#[macro_use]
extern crate my_derive;
extern crate my_crate;
use my_crate::MyTrait;
#[test]
fn it_works() {
#[derive(MyTrait)]
struct S { /* ... */ }
/* test the thing */
}
```
## Debugging
When developing a procedural macro it can be helpful to look at what the
generated code looks like. Use `cargo rustc -- -Zunstable-options
--pretty=expanded` or the
[`cargo expand`](https://github.com/dtolnay/cargo-expand) subcommand.
To show the expanded code for some crate that uses your procedural macro, run
`cargo expand` from that crate. To show the expanded code for one of your own
test cases, run `cargo expand --test the_test_case` where the last argument is
the name of the test file without the `.rs` extension.
This write-up by Brandon W Maister discusses debugging in more detail:
[Debugging Rust's new Custom Derive
system](https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/).
## Optional features
Syn puts a lot of functionality behind optional features in order to optimize
compile time for the most common use cases. These are the available features and
their effect on compile time. Dependencies are included in the compile times.
Features | Compile time | Functionality
--- | --- | ---
*(none)* | 3 sec | The data structures representing the AST of Rust structs, enums, and types.
parsing | 6 sec | Parsing Rust source code containing structs and enums into an AST.
printing | 4 sec | Printing an AST of structs and enums as Rust source code.
**parsing, printing** | **6 sec** | **This is the default.** Parsing and printing of Rust structs and enums. This is typically what you want for implementing Macros 1.1 custom derives.
full | 4 sec | The data structures representing the full AST of all possible Rust code.
full, parsing | 9 sec | Parsing any valid Rust source code to an AST.
full, printing | 6 sec | Turning an AST into Rust source code.
full, parsing, printing | 11 sec | Parsing and printing any Rust syntax.
## License
Licensed under either of
* Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
at your option.
### Contribution
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
be dual licensed as above, without any additional terms or conditions.

543
third_party/rust/synom/src/helper.rs поставляемый
Просмотреть файл

@ -1,543 +0,0 @@
use IResult;
use space::{skip_whitespace, word_break};
/// Parse a piece of punctuation like "+" or "+=".
///
/// See also `keyword!` for parsing keywords, which are subtly different from
/// punctuation.
///
/// - **Syntax:** `punct!("...")`
/// - **Output:** `&str`
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// // Parse zero or more bangs.
/// named!(many_bangs -> Vec<&str>,
/// many0!(punct!("!"))
/// );
///
/// fn main() {
/// let input = "!! !";
/// let parsed = many_bangs(input).expect("bangs");
/// assert_eq!(parsed, ["!", "!", "!"]);
/// }
/// ```
#[macro_export]
macro_rules! punct {
($i:expr, $punct:expr) => {
$crate::helper::punct($i, $punct)
};
}
// Not public API.
#[doc(hidden)]
pub fn punct<'a>(input: &'a str, token: &'static str) -> IResult<&'a str, &'a str> {
let input = skip_whitespace(input);
if input.starts_with(token) {
IResult::Done(&input[token.len()..], token)
} else {
IResult::Error
}
}
/// Parse a keyword like "fn" or "struct".
///
/// See also `punct!` for parsing punctuation, which are subtly different from
/// keywords.
///
/// - **Syntax:** `keyword!("...")`
/// - **Output:** `&str`
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// use synom::IResult;
///
/// // Parse zero or more "bang" keywords.
/// named!(many_bangs -> Vec<&str>,
/// terminated!(
/// many0!(keyword!("bang")),
/// punct!(";")
/// )
/// );
///
/// fn main() {
/// let input = "bang bang bang;";
/// let parsed = many_bangs(input).expect("bangs");
/// assert_eq!(parsed, ["bang", "bang", "bang"]);
///
/// let input = "bangbang;";
/// let err = many_bangs(input);
/// assert_eq!(err, IResult::Error);
/// }
/// ```
#[macro_export]
macro_rules! keyword {
($i:expr, $keyword:expr) => {
$crate::helper::keyword($i, $keyword)
};
}
// Not public API.
#[doc(hidden)]
pub fn keyword<'a>(input: &'a str, token: &'static str) -> IResult<&'a str, &'a str> {
match punct(input, token) {
IResult::Done(rest, _) => {
match word_break(rest) {
IResult::Done(_, _) => IResult::Done(rest, token),
IResult::Error => IResult::Error,
}
}
IResult::Error => IResult::Error,
}
}
/// Turn a failed parse into `None` and a successful parse into `Some`.
///
/// - **Syntax:** `option!(THING)`
/// - **Output:** `Option<THING>`
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// named!(maybe_bang -> Option<&str>, option!(punct!("!")));
///
/// fn main() {
/// let input = "!";
/// let parsed = maybe_bang(input).expect("maybe bang");
/// assert_eq!(parsed, Some("!"));
///
/// let input = "";
/// let parsed = maybe_bang(input).expect("maybe bang");
/// assert_eq!(parsed, None);
/// }
/// ```
#[macro_export]
macro_rules! option {
($i:expr, $submac:ident!( $($args:tt)* )) => {
match $submac!($i, $($args)*) {
$crate::IResult::Done(i, o) => $crate::IResult::Done(i, Some(o)),
$crate::IResult::Error => $crate::IResult::Done($i, None),
}
};
($i:expr, $f:expr) => {
option!($i, call!($f));
};
}
/// Turn a failed parse into an empty vector. The argument parser must itself
/// return a vector.
///
/// This is often more convenient than `option!(...)` when the argument produces
/// a vector.
///
/// - **Syntax:** `opt_vec!(THING)`
/// - **Output:** `THING`, which must be `Vec<T>`
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// use syn::{Lifetime, Ty};
/// use syn::parse::{lifetime, ty};
///
/// named!(bound_lifetimes -> (Vec<Lifetime>, Ty), tuple!(
/// opt_vec!(do_parse!(
/// keyword!("for") >>
/// punct!("<") >>
/// lifetimes: terminated_list!(punct!(","), lifetime) >>
/// punct!(">") >>
/// (lifetimes)
/// )),
/// ty
/// ));
///
/// fn main() {
/// let input = "for<'a, 'b> fn(&'a A) -> &'b B";
/// let parsed = bound_lifetimes(input).expect("bound lifetimes");
/// assert_eq!(parsed.0, [Lifetime::new("'a"), Lifetime::new("'b")]);
/// println!("{:?}", parsed);
///
/// let input = "From<String>";
/// let parsed = bound_lifetimes(input).expect("bound lifetimes");
/// assert!(parsed.0.is_empty());
/// println!("{:?}", parsed);
/// }
/// ```
#[macro_export]
macro_rules! opt_vec {
($i:expr, $submac:ident!( $($args:tt)* )) => {
match $submac!($i, $($args)*) {
$crate::IResult::Done(i, o) => $crate::IResult::Done(i, o),
$crate::IResult::Error => $crate::IResult::Done($i, Vec::new()),
}
};
}
/// Parses nothing and always succeeds.
///
/// This can be useful as a fallthrough case in `alt!`.
///
/// - **Syntax:** `epsilon!()`
/// - **Output:** `()`
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// use syn::Mutability;
///
/// named!(mutability -> Mutability, alt!(
/// keyword!("mut") => { |_| Mutability::Mutable }
/// |
/// epsilon!() => { |_| Mutability::Immutable }
/// ));
///
/// fn main() {
/// let input = "mut";
/// let parsed = mutability(input).expect("mutability");
/// assert_eq!(parsed, Mutability::Mutable);
///
/// let input = "";
/// let parsed = mutability(input).expect("mutability");
/// assert_eq!(parsed, Mutability::Immutable);
/// }
/// ```
#[macro_export]
macro_rules! epsilon {
($i:expr,) => {
$crate::IResult::Done($i, ())
};
}
/// Run a parser, binding the result to a name, and then evaluating an
/// expression.
///
/// Discards the result of the expression and parser.
///
/// - **Syntax:** `tap!(NAME : THING => EXPR)`
/// - **Output:** `()`
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// use syn::{Expr, ExprKind};
/// use syn::parse::expr;
///
/// named!(expr_with_arrow_call -> Expr, do_parse!(
/// mut e: expr >>
/// many0!(tap!(arg: tuple!(punct!("=>"), expr) => {
/// e = Expr {
/// node: ExprKind::Call(Box::new(e), vec![arg.1]),
/// attrs: Vec::new(),
/// };
/// })) >>
/// (e)
/// ));
///
/// fn main() {
/// let input = "something => argument1 => argument2";
///
/// let parsed = expr_with_arrow_call(input).expect("expr with arrow call");
///
/// println!("{:?}", parsed);
/// }
/// ```
#[doc(hidden)]
#[macro_export]
macro_rules! tap {
($i:expr, $name:ident : $submac:ident!( $($args:tt)* ) => $e:expr) => {
match $submac!($i, $($args)*) {
$crate::IResult::Done(i, o) => {
let $name = o;
$e;
$crate::IResult::Done(i, ())
}
$crate::IResult::Error => $crate::IResult::Error,
}
};
($i:expr, $name:ident : $f:expr => $e:expr) => {
tap!($i, $name: call!($f) => $e);
};
}
/// Zero or more values separated by some separator. Does not allow a trailing
/// seperator.
///
/// - **Syntax:** `separated_list!(punct!("..."), THING)`
/// - **Output:** `Vec<THING>`
///
/// You may also be looking for:
///
/// - `separated_nonempty_list!` - one or more values
/// - `terminated_list!` - zero or more, allows trailing separator
/// - `many0!` - zero or more, no separator
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// use syn::Expr;
/// use syn::parse::expr;
///
/// named!(expr_list -> Vec<Expr>,
/// separated_list!(punct!(","), expr)
/// );
///
/// fn main() {
/// let input = "1 + 1, things, Construct { this: thing }";
///
/// let parsed = expr_list(input).expect("expr list");
/// assert_eq!(parsed.len(), 3);
/// }
/// ```
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// use syn::Ident;
/// use syn::parse::ident;
///
/// named!(run_on -> Vec<Ident>,
/// terminated!(
/// separated_list!(keyword!("and"), preceded!(punct!("$"), ident)),
/// punct!("...")
/// )
/// );
///
/// fn main() {
/// let input = "$expr and $ident and $pat ...";
///
/// let parsed = run_on(input).expect("run-on sentence");
/// assert_eq!(parsed.len(), 3);
/// assert_eq!(parsed[0], "expr");
/// assert_eq!(parsed[1], "ident");
/// assert_eq!(parsed[2], "pat");
/// }
/// ```
#[macro_export]
macro_rules! separated_list {
// Try to use this branch if possible - makes a difference in compile time.
($i:expr, punct!($sep:expr), $f:ident) => {
$crate::helper::separated_list($i, $sep, $f, false)
};
($i:expr, $sepmac:ident!( $($separgs:tt)* ), $fmac:ident!( $($fargs:tt)* )) => {{
let mut res = ::std::vec::Vec::new();
let mut input = $i;
// get the first element
match $fmac!(input, $($fargs)*) {
$crate::IResult::Error => $crate::IResult::Done(input, res),
$crate::IResult::Done(i, o) => {
if i.len() == input.len() {
$crate::IResult::Error
} else {
res.push(o);
input = i;
// get the separator first
while let $crate::IResult::Done(i2, _) = $sepmac!(input, $($separgs)*) {
if i2.len() == input.len() {
break;
}
// get the element next
if let $crate::IResult::Done(i3, o3) = $fmac!(i2, $($fargs)*) {
if i3.len() == i2.len() {
break;
}
res.push(o3);
input = i3;
} else {
break;
}
}
$crate::IResult::Done(input, res)
}
}
}
}};
($i:expr, $sepmac:ident!( $($separgs:tt)* ), $f:expr) => {
separated_list!($i, $sepmac!($($separgs)*), call!($f))
};
($i:expr, $sep:expr, $fmac:ident!( $($fargs:tt)* )) => {
separated_list!($i, call!($sep), $fmac!($($fargs)*))
};
($i:expr, $sep:expr, $f:expr) => {
separated_list!($i, call!($sep), call!($f))
};
}
/// Zero or more values separated by some separator. A trailing separator is
/// allowed.
///
/// - **Syntax:** `terminated_list!(punct!("..."), THING)`
/// - **Output:** `Vec<THING>`
///
/// You may also be looking for:
///
/// - `separated_list!` - zero or more, allows trailing separator
/// - `separated_nonempty_list!` - one or more values
/// - `many0!` - zero or more, no separator
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// use syn::Expr;
/// use syn::parse::expr;
///
/// named!(expr_list -> Vec<Expr>,
/// terminated_list!(punct!(","), expr)
/// );
///
/// fn main() {
/// let input = "1 + 1, things, Construct { this: thing },";
///
/// let parsed = expr_list(input).expect("expr list");
/// assert_eq!(parsed.len(), 3);
/// }
/// ```
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// use syn::Ident;
/// use syn::parse::ident;
///
/// named!(run_on -> Vec<Ident>,
/// terminated!(
/// terminated_list!(keyword!("and"), preceded!(punct!("$"), ident)),
/// punct!("...")
/// )
/// );
///
/// fn main() {
/// let input = "$expr and $ident and $pat and ...";
///
/// let parsed = run_on(input).expect("run-on sentence");
/// assert_eq!(parsed.len(), 3);
/// assert_eq!(parsed[0], "expr");
/// assert_eq!(parsed[1], "ident");
/// assert_eq!(parsed[2], "pat");
/// }
/// ```
#[macro_export]
macro_rules! terminated_list {
// Try to use this branch if possible - makes a difference in compile time.
($i:expr, punct!($sep:expr), $f:ident) => {
$crate::helper::separated_list($i, $sep, $f, true)
};
($i:expr, $sepmac:ident!( $($separgs:tt)* ), $fmac:ident!( $($fargs:tt)* )) => {{
let mut res = ::std::vec::Vec::new();
let mut input = $i;
// get the first element
match $fmac!(input, $($fargs)*) {
$crate::IResult::Error => $crate::IResult::Done(input, res),
$crate::IResult::Done(i, o) => {
if i.len() == input.len() {
$crate::IResult::Error
} else {
res.push(o);
input = i;
// get the separator first
while let $crate::IResult::Done(i2, _) = $sepmac!(input, $($separgs)*) {
if i2.len() == input.len() {
break;
}
// get the element next
if let $crate::IResult::Done(i3, o3) = $fmac!(i2, $($fargs)*) {
if i3.len() == i2.len() {
break;
}
res.push(o3);
input = i3;
} else {
break;
}
}
if let $crate::IResult::Done(after, _) = $sepmac!(input, $($separgs)*) {
input = after;
}
$crate::IResult::Done(input, res)
}
}
}
}};
($i:expr, $sepmac:ident!( $($separgs:tt)* ), $f:expr) => {
terminated_list!($i, $sepmac!($($separgs)*), call!($f))
};
($i:expr, $sep:expr, $fmac:ident!( $($fargs:tt)* )) => {
terminated_list!($i, call!($sep), $fmac!($($fargs)*))
};
($i:expr, $sep:expr, $f:expr) => {
terminated_list!($i, call!($sep), call!($f))
};
}
// Not public API.
#[doc(hidden)]
pub fn separated_list<'a, T>(mut input: &'a str,
sep: &'static str,
f: fn(&'a str) -> IResult<&'a str, T>,
terminated: bool)
-> IResult<&'a str, Vec<T>> {
let mut res = Vec::new();
// get the first element
match f(input) {
IResult::Error => IResult::Done(input, res),
IResult::Done(i, o) => {
if i.len() == input.len() {
IResult::Error
} else {
res.push(o);
input = i;
// get the separator first
while let IResult::Done(i2, _) = punct(input, sep) {
if i2.len() == input.len() {
break;
}
// get the element next
if let IResult::Done(i3, o3) = f(i2) {
if i3.len() == i2.len() {
break;
}
res.push(o3);
input = i3;
} else {
break;
}
}
if terminated {
if let IResult::Done(after, _) = punct(input, sep) {
input = after;
}
}
IResult::Done(input, res)
}
}
}
}

1225
third_party/rust/synom/src/lib.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

99
third_party/rust/synom/src/space.rs поставляемый
Просмотреть файл

@ -1,99 +0,0 @@
use IResult;
use unicode_xid::UnicodeXID;
pub fn whitespace(input: &str) -> IResult<&str, ()> {
if input.is_empty() {
return IResult::Error;
}
let bytes = input.as_bytes();
let mut i = 0;
while i < bytes.len() {
let s = &input[i..];
if bytes[i] == b'/' {
if s.starts_with("//") && (!s.starts_with("///") || s.starts_with("////")) &&
!s.starts_with("//!") {
if let Some(len) = s.find('\n') {
i += len + 1;
continue;
}
break;
} else if s.starts_with("/*") && (!s.starts_with("/**") || s.starts_with("/***")) &&
!s.starts_with("/*!") {
match block_comment(s) {
IResult::Done(_, com) => {
i += com.len();
continue;
}
IResult::Error => {
return IResult::Error;
}
}
}
}
match bytes[i] {
b' ' | 0x09...0x0d => {
i += 1;
continue;
}
b if b <= 0x7f => {}
_ => {
let ch = s.chars().next().unwrap();
if is_whitespace(ch) {
i += ch.len_utf8();
continue;
}
}
}
return if i > 0 {
IResult::Done(s, ())
} else {
IResult::Error
};
}
IResult::Done("", ())
}
pub fn block_comment(input: &str) -> IResult<&str, &str> {
if !input.starts_with("/*") {
return IResult::Error;
}
let mut depth = 0;
let bytes = input.as_bytes();
let mut i = 0;
let upper = bytes.len() - 1;
while i < upper {
if bytes[i] == b'/' && bytes[i + 1] == b'*' {
depth += 1;
i += 1; // eat '*'
} else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
depth -= 1;
if depth == 0 {
return IResult::Done(&input[i + 2..], &input[..i + 2]);
}
i += 1; // eat '/'
}
i += 1;
}
IResult::Error
}
pub fn word_break(input: &str) -> IResult<&str, ()> {
match input.chars().next() {
Some(ch) if UnicodeXID::is_xid_continue(ch) => IResult::Error,
Some(_) | None => IResult::Done(input, ()),
}
}
pub fn skip_whitespace(input: &str) -> &str {
match whitespace(input) {
IResult::Done(rest, _) => rest,
IResult::Error => input,
}
}
fn is_whitespace(ch: char) -> bool {
// Rust treats left-to-right mark and right-to-left mark as whitespace
ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
}

Просмотреть файл

@ -1 +0,0 @@
{"files":{"COPYRIGHT":"23860c2a7b5d96b21569afedf033469bab9fe14a1b24a35068b8641c578ce24d","Cargo.toml":"1be08526c07c40136071faf603a382456ad1a6bafbb45d9ace01c52b50a9a55c","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"67998486b32f4fe46abbbaa411b92528750e7f0e22452dc8a5b95d87d80fde75","scripts/unicode.py":"762eea92dd51238c6bf877570bde1149932ba15cf87be1618fc21cd53e941733","src/lib.rs":"4a89fadf452ae7c53536eaa4496f951a3153f8189dd1cbc532648731d30f0b11","src/tables.rs":"0643459b6ebeeed83aecd7604f0ea29c06bea7ce6c1cd9acd4988d27ace1ec53","src/tests.rs":"35a459382e190197e7b9a78832ae79f310b48a02a5b4227bf9bbc89d46c8deac"},"package":"8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc"}

Просмотреть файл

@ -1,7 +0,0 @@
Licensed under the Apache License, Version 2.0
<LICENSE-APACHE or
http://www.apache.org/licenses/LICENSE-2.0> or the MIT
license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
at your option. All files in the project carrying such
notice may not be copied, modified, or distributed except
according to those terms.

26
third_party/rust/unicode-xid-0.0.4/Cargo.toml поставляемый
Просмотреть файл

@ -1,26 +0,0 @@
[package]
name = "unicode-xid"
version = "0.0.4"
authors = ["erick.tryzelaar <erick.tryzelaar@gmail.com>",
"kwantam <kwantam@gmail.com>",
]
homepage = "https://github.com/unicode-rs/unicode-xid"
repository = "https://github.com/unicode-rs/unicode-xid"
documentation = "https://unicode-rs.github.io/unicode-xid"
license = "MIT/Apache-2.0"
keywords = ["text", "unicode", "xid"]
readme = "README.md"
description = """
Determine whether characters have the XID_Start
or XID_Continue properties according to
Unicode Standard Annex #31.
"""
exclude = [ "target/*", "Cargo.lock" ]
[features]
default = []
no_std = []
bench = []

Просмотреть файл

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

Просмотреть файл

@ -1,25 +0,0 @@
Copyright (c) 2015 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

34
third_party/rust/unicode-xid-0.0.4/README.md поставляемый
Просмотреть файл

@ -1,34 +0,0 @@
# unicode-xid
Determine if a `char` is a valid identifier for a parser and/or lexer according to
[Unicode Standard Annex #31](http://www.unicode.org/reports/tr31/) rules.
[![Build Status](https://travis-ci.org/unicode-rs/unicode-xid.svg)](https://travis-ci.org/unicode-rs/unicode-xid)
[Documentation](https://unicode-rs.github.io/unicode-xid/unicode_xid/index.html)
```rust
extern crate unicode_xid;
use unicode_xid::UnicodeXID;
fn main() {
let ch = 'a';
println!("Is {} a valid start of an identifier? {}", ch, UnicodeXID::is_xid_start(ch));
}
```
# features
unicode-xid supports a `no_std` feature. This eliminates dependence
on std, and instead uses equivalent functions from core.
# crates.io
You can use this package in your project by adding the following
to your `Cargo.toml`:
```toml
[dependencies]
unicode-xid = "0.0.4"
```

Просмотреть файл

@ -1,187 +0,0 @@
#!/usr/bin/env python
#
# Copyright 2011-2015 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
# This script uses the following Unicode tables:
# - DerivedCoreProperties.txt
# - ReadMe.txt
#
# Since this should not require frequent updates, we just store this
# out-of-line and check the unicode.rs file into git.
import fileinput, re, os, sys
preamble = '''// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// NOTE: The following code was generated by "scripts/unicode.py", do not edit directly
#![allow(missing_docs, non_upper_case_globals, non_snake_case)]
'''
def fetch(f):
if not os.path.exists(os.path.basename(f)):
os.system("curl -O http://www.unicode.org/Public/UNIDATA/%s"
% f)
if not os.path.exists(os.path.basename(f)):
sys.stderr.write("cannot load %s" % f)
exit(1)
def group_cat(cat):
cat_out = []
letters = sorted(set(cat))
cur_start = letters.pop(0)
cur_end = cur_start
for letter in letters:
assert letter > cur_end, \
"cur_end: %s, letter: %s" % (hex(cur_end), hex(letter))
if letter == cur_end + 1:
cur_end = letter
else:
cat_out.append((cur_start, cur_end))
cur_start = cur_end = letter
cat_out.append((cur_start, cur_end))
return cat_out
def ungroup_cat(cat):
cat_out = []
for (lo, hi) in cat:
while lo <= hi:
cat_out.append(lo)
lo += 1
return cat_out
def format_table_content(f, content, indent):
line = " "*indent
first = True
for chunk in content.split(","):
if len(line) + len(chunk) < 98:
if first:
line += chunk
else:
line += ", " + chunk
first = False
else:
f.write(line + ",\n")
line = " "*indent + chunk
f.write(line)
def load_properties(f, interestingprops):
fetch(f)
props = {}
re1 = re.compile("^ *([0-9A-F]+) *; *(\w+)")
re2 = re.compile("^ *([0-9A-F]+)\.\.([0-9A-F]+) *; *(\w+)")
for line in fileinput.input(os.path.basename(f)):
prop = None
d_lo = 0
d_hi = 0
m = re1.match(line)
if m:
d_lo = m.group(1)
d_hi = m.group(1)
prop = m.group(2)
else:
m = re2.match(line)
if m:
d_lo = m.group(1)
d_hi = m.group(2)
prop = m.group(3)
else:
continue
if interestingprops and prop not in interestingprops:
continue
d_lo = int(d_lo, 16)
d_hi = int(d_hi, 16)
if prop not in props:
props[prop] = []
props[prop].append((d_lo, d_hi))
# optimize if possible
for prop in props:
props[prop] = group_cat(ungroup_cat(props[prop]))
return props
def escape_char(c):
return "'\\u{%x}'" % c
def emit_bsearch_range_table(f):
f.write("""
fn bsearch_range_table(c: char, r: &'static [(char,char)]) -> bool {
use core::cmp::Ordering::{Equal, Less, Greater};
r.binary_search_by(|&(lo,hi)| {
if lo <= c && c <= hi { Equal }
else if hi < c { Less }
else { Greater }
}).is_ok()
}\n
""")
def emit_table(f, name, t_data, t_type = "&'static [(char, char)]", is_pub=True,
pfun=lambda x: "(%s,%s)" % (escape_char(x[0]), escape_char(x[1])), is_const=True):
pub_string = "const"
if not is_const:
pub_string = "let"
if is_pub:
pub_string = "pub " + pub_string
f.write(" %s %s: %s = &[\n" % (pub_string, name, t_type))
data = ""
first = True
for dat in t_data:
if not first:
data += ","
first = False
data += pfun(dat)
format_table_content(f, data, 8)
f.write("\n ];\n\n")
def emit_property_module(f, mod, tbl, emit):
f.write("pub mod %s {\n" % mod)
for cat in sorted(emit):
emit_table(f, "%s_table" % cat, tbl[cat])
f.write(" pub fn %s(c: char) -> bool {\n" % cat)
f.write(" super::bsearch_range_table(c, %s_table)\n" % cat)
f.write(" }\n\n")
f.write("}\n\n")
if __name__ == "__main__":
r = "tables.rs"
if os.path.exists(r):
os.remove(r)
with open(r, "w") as rf:
# write the file's preamble
rf.write(preamble)
# download and parse all the data
fetch("ReadMe.txt")
with open("ReadMe.txt") as readme:
pattern = "for Version (\d+)\.(\d+)\.(\d+) of the Unicode"
unicode_version = re.search(pattern, readme.read()).groups()
rf.write("""
/// The version of [Unicode](http://www.unicode.org/)
/// that this version of unicode-xid is based on.
pub const UNICODE_VERSION: (u64, u64, u64) = (%s, %s, %s);
""" % unicode_version)
emit_bsearch_range_table(rf)
want_derived = ["XID_Start", "XID_Continue"]
derived = load_properties("DerivedCoreProperties.txt", want_derived)
emit_property_module(rf, "derived_property", derived, want_derived)

87
third_party/rust/unicode-xid-0.0.4/src/lib.rs поставляемый
Просмотреть файл

@ -1,87 +0,0 @@
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Determine if a `char` is a valid identifier for a parser and/or lexer according to
//! [Unicode Standard Annex #31](http://www.unicode.org/reports/tr31/) rules.
//!
//! ```rust
//! extern crate unicode_xid;
//!
//! use unicode_xid::UnicodeXID;
//!
//! fn main() {
//! let ch = 'a';
//! println!("Is {} a valid start of an identifier? {}", ch, UnicodeXID::is_xid_start(ch));
//! }
//! ```
//!
//! # features
//!
//! unicode-xid supports a `no_std` feature. This eliminates dependence
//! on std, and instead uses equivalent functions from core.
//!
//! # crates.io
//!
//! You can use this package in your project by adding the following
//! to your `Cargo.toml`:
//!
//! ```toml
//! [dependencies]
//! unicode-xid = "0.0.4"
//! ```
#![deny(missing_docs, unsafe_code)]
#![doc(html_logo_url = "https://unicode-rs.github.io/unicode-rs_sm.png",
html_favicon_url = "https://unicode-rs.github.io/unicode-rs_sm.png")]
#![no_std]
#![cfg_attr(feature = "bench", feature(test, unicode))]
#[cfg(test)]
#[macro_use]
extern crate std;
#[cfg(feature = "bench")]
extern crate test;
use tables::derived_property;
pub use tables::UNICODE_VERSION;
mod tables;
#[cfg(test)]
mod tests;
/// Methods for determining if a character is a valid identifier character.
pub trait UnicodeXID {
/// Returns whether the specified character satisfies the 'XID_Start'
/// Unicode property.
///
/// 'XID_Start' is a Unicode Derived Property specified in
/// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications),
/// mostly similar to ID_Start but modified for closure under NFKx.
fn is_xid_start(self) -> bool;
/// Returns whether the specified `char` satisfies the 'XID_Continue'
/// Unicode property.
///
/// 'XID_Continue' is a Unicode Derived Property specified in
/// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications),
/// mostly similar to 'ID_Continue' but modified for closure under NFKx.
fn is_xid_continue(self) -> bool;
}
impl UnicodeXID for char {
#[inline]
fn is_xid_start(self) -> bool { derived_property::XID_Start(self) }
#[inline]
fn is_xid_continue(self) -> bool { derived_property::XID_Continue(self) }
}

Просмотреть файл

@ -1,426 +0,0 @@
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// NOTE: The following code was generated by "scripts/unicode.py", do not edit directly
#![allow(missing_docs, non_upper_case_globals, non_snake_case)]
/// The version of [Unicode](http://www.unicode.org/)
/// that this version of unicode-xid is based on.
pub const UNICODE_VERSION: (u64, u64, u64) = (9, 0, 0);
fn bsearch_range_table(c: char, r: &'static [(char,char)]) -> bool {
use core::cmp::Ordering::{Equal, Less, Greater};
r.binary_search_by(|&(lo,hi)| {
if lo <= c && c <= hi { Equal }
else if hi < c { Less }
else { Greater }
}).is_ok()
}
pub mod derived_property {
pub const XID_Continue_table: &'static [(char, char)] = &[
('\u{30}', '\u{39}'), ('\u{41}', '\u{5a}'), ('\u{5f}', '\u{5f}'), ('\u{61}', '\u{7a}'),
('\u{aa}', '\u{aa}'), ('\u{b5}', '\u{b5}'), ('\u{b7}', '\u{b7}'), ('\u{ba}', '\u{ba}'),
('\u{c0}', '\u{d6}'), ('\u{d8}', '\u{f6}'), ('\u{f8}', '\u{2c1}'), ('\u{2c6}', '\u{2d1}'),
('\u{2e0}', '\u{2e4}'), ('\u{2ec}', '\u{2ec}'), ('\u{2ee}', '\u{2ee}'), ('\u{300}',
'\u{374}'), ('\u{376}', '\u{377}'), ('\u{37b}', '\u{37d}'), ('\u{37f}', '\u{37f}'),
('\u{386}', '\u{38a}'), ('\u{38c}', '\u{38c}'), ('\u{38e}', '\u{3a1}'), ('\u{3a3}',
'\u{3f5}'), ('\u{3f7}', '\u{481}'), ('\u{483}', '\u{487}'), ('\u{48a}', '\u{52f}'),
('\u{531}', '\u{556}'), ('\u{559}', '\u{559}'), ('\u{561}', '\u{587}'), ('\u{591}',
'\u{5bd}'), ('\u{5bf}', '\u{5bf}'), ('\u{5c1}', '\u{5c2}'), ('\u{5c4}', '\u{5c5}'),
('\u{5c7}', '\u{5c7}'), ('\u{5d0}', '\u{5ea}'), ('\u{5f0}', '\u{5f2}'), ('\u{610}',
'\u{61a}'), ('\u{620}', '\u{669}'), ('\u{66e}', '\u{6d3}'), ('\u{6d5}', '\u{6dc}'),
('\u{6df}', '\u{6e8}'), ('\u{6ea}', '\u{6fc}'), ('\u{6ff}', '\u{6ff}'), ('\u{710}',
'\u{74a}'), ('\u{74d}', '\u{7b1}'), ('\u{7c0}', '\u{7f5}'), ('\u{7fa}', '\u{7fa}'),
('\u{800}', '\u{82d}'), ('\u{840}', '\u{85b}'), ('\u{8a0}', '\u{8b4}'), ('\u{8b6}',
'\u{8bd}'), ('\u{8d4}', '\u{8e1}'), ('\u{8e3}', '\u{963}'), ('\u{966}', '\u{96f}'),
('\u{971}', '\u{983}'), ('\u{985}', '\u{98c}'), ('\u{98f}', '\u{990}'), ('\u{993}',
'\u{9a8}'), ('\u{9aa}', '\u{9b0}'), ('\u{9b2}', '\u{9b2}'), ('\u{9b6}', '\u{9b9}'),
('\u{9bc}', '\u{9c4}'), ('\u{9c7}', '\u{9c8}'), ('\u{9cb}', '\u{9ce}'), ('\u{9d7}',
'\u{9d7}'), ('\u{9dc}', '\u{9dd}'), ('\u{9df}', '\u{9e3}'), ('\u{9e6}', '\u{9f1}'),
('\u{a01}', '\u{a03}'), ('\u{a05}', '\u{a0a}'), ('\u{a0f}', '\u{a10}'), ('\u{a13}',
'\u{a28}'), ('\u{a2a}', '\u{a30}'), ('\u{a32}', '\u{a33}'), ('\u{a35}', '\u{a36}'),
('\u{a38}', '\u{a39}'), ('\u{a3c}', '\u{a3c}'), ('\u{a3e}', '\u{a42}'), ('\u{a47}',
'\u{a48}'), ('\u{a4b}', '\u{a4d}'), ('\u{a51}', '\u{a51}'), ('\u{a59}', '\u{a5c}'),
('\u{a5e}', '\u{a5e}'), ('\u{a66}', '\u{a75}'), ('\u{a81}', '\u{a83}'), ('\u{a85}',
'\u{a8d}'), ('\u{a8f}', '\u{a91}'), ('\u{a93}', '\u{aa8}'), ('\u{aaa}', '\u{ab0}'),
('\u{ab2}', '\u{ab3}'), ('\u{ab5}', '\u{ab9}'), ('\u{abc}', '\u{ac5}'), ('\u{ac7}',
'\u{ac9}'), ('\u{acb}', '\u{acd}'), ('\u{ad0}', '\u{ad0}'), ('\u{ae0}', '\u{ae3}'),
('\u{ae6}', '\u{aef}'), ('\u{af9}', '\u{af9}'), ('\u{b01}', '\u{b03}'), ('\u{b05}',
'\u{b0c}'), ('\u{b0f}', '\u{b10}'), ('\u{b13}', '\u{b28}'), ('\u{b2a}', '\u{b30}'),
('\u{b32}', '\u{b33}'), ('\u{b35}', '\u{b39}'), ('\u{b3c}', '\u{b44}'), ('\u{b47}',
'\u{b48}'), ('\u{b4b}', '\u{b4d}'), ('\u{b56}', '\u{b57}'), ('\u{b5c}', '\u{b5d}'),
('\u{b5f}', '\u{b63}'), ('\u{b66}', '\u{b6f}'), ('\u{b71}', '\u{b71}'), ('\u{b82}',
'\u{b83}'), ('\u{b85}', '\u{b8a}'), ('\u{b8e}', '\u{b90}'), ('\u{b92}', '\u{b95}'),
('\u{b99}', '\u{b9a}'), ('\u{b9c}', '\u{b9c}'), ('\u{b9e}', '\u{b9f}'), ('\u{ba3}',
'\u{ba4}'), ('\u{ba8}', '\u{baa}'), ('\u{bae}', '\u{bb9}'), ('\u{bbe}', '\u{bc2}'),
('\u{bc6}', '\u{bc8}'), ('\u{bca}', '\u{bcd}'), ('\u{bd0}', '\u{bd0}'), ('\u{bd7}',
'\u{bd7}'), ('\u{be6}', '\u{bef}'), ('\u{c00}', '\u{c03}'), ('\u{c05}', '\u{c0c}'),
('\u{c0e}', '\u{c10}'), ('\u{c12}', '\u{c28}'), ('\u{c2a}', '\u{c39}'), ('\u{c3d}',
'\u{c44}'), ('\u{c46}', '\u{c48}'), ('\u{c4a}', '\u{c4d}'), ('\u{c55}', '\u{c56}'),
('\u{c58}', '\u{c5a}'), ('\u{c60}', '\u{c63}'), ('\u{c66}', '\u{c6f}'), ('\u{c80}',
'\u{c83}'), ('\u{c85}', '\u{c8c}'), ('\u{c8e}', '\u{c90}'), ('\u{c92}', '\u{ca8}'),
('\u{caa}', '\u{cb3}'), ('\u{cb5}', '\u{cb9}'), ('\u{cbc}', '\u{cc4}'), ('\u{cc6}',
'\u{cc8}'), ('\u{cca}', '\u{ccd}'), ('\u{cd5}', '\u{cd6}'), ('\u{cde}', '\u{cde}'),
('\u{ce0}', '\u{ce3}'), ('\u{ce6}', '\u{cef}'), ('\u{cf1}', '\u{cf2}'), ('\u{d01}',
'\u{d03}'), ('\u{d05}', '\u{d0c}'), ('\u{d0e}', '\u{d10}'), ('\u{d12}', '\u{d3a}'),
('\u{d3d}', '\u{d44}'), ('\u{d46}', '\u{d48}'), ('\u{d4a}', '\u{d4e}'), ('\u{d54}',
'\u{d57}'), ('\u{d5f}', '\u{d63}'), ('\u{d66}', '\u{d6f}'), ('\u{d7a}', '\u{d7f}'),
('\u{d82}', '\u{d83}'), ('\u{d85}', '\u{d96}'), ('\u{d9a}', '\u{db1}'), ('\u{db3}',
'\u{dbb}'), ('\u{dbd}', '\u{dbd}'), ('\u{dc0}', '\u{dc6}'), ('\u{dca}', '\u{dca}'),
('\u{dcf}', '\u{dd4}'), ('\u{dd6}', '\u{dd6}'), ('\u{dd8}', '\u{ddf}'), ('\u{de6}',
'\u{def}'), ('\u{df2}', '\u{df3}'), ('\u{e01}', '\u{e3a}'), ('\u{e40}', '\u{e4e}'),
('\u{e50}', '\u{e59}'), ('\u{e81}', '\u{e82}'), ('\u{e84}', '\u{e84}'), ('\u{e87}',
'\u{e88}'), ('\u{e8a}', '\u{e8a}'), ('\u{e8d}', '\u{e8d}'), ('\u{e94}', '\u{e97}'),
('\u{e99}', '\u{e9f}'), ('\u{ea1}', '\u{ea3}'), ('\u{ea5}', '\u{ea5}'), ('\u{ea7}',
'\u{ea7}'), ('\u{eaa}', '\u{eab}'), ('\u{ead}', '\u{eb9}'), ('\u{ebb}', '\u{ebd}'),
('\u{ec0}', '\u{ec4}'), ('\u{ec6}', '\u{ec6}'), ('\u{ec8}', '\u{ecd}'), ('\u{ed0}',
'\u{ed9}'), ('\u{edc}', '\u{edf}'), ('\u{f00}', '\u{f00}'), ('\u{f18}', '\u{f19}'),
('\u{f20}', '\u{f29}'), ('\u{f35}', '\u{f35}'), ('\u{f37}', '\u{f37}'), ('\u{f39}',
'\u{f39}'), ('\u{f3e}', '\u{f47}'), ('\u{f49}', '\u{f6c}'), ('\u{f71}', '\u{f84}'),
('\u{f86}', '\u{f97}'), ('\u{f99}', '\u{fbc}'), ('\u{fc6}', '\u{fc6}'), ('\u{1000}',
'\u{1049}'), ('\u{1050}', '\u{109d}'), ('\u{10a0}', '\u{10c5}'), ('\u{10c7}', '\u{10c7}'),
('\u{10cd}', '\u{10cd}'), ('\u{10d0}', '\u{10fa}'), ('\u{10fc}', '\u{1248}'), ('\u{124a}',
'\u{124d}'), ('\u{1250}', '\u{1256}'), ('\u{1258}', '\u{1258}'), ('\u{125a}', '\u{125d}'),
('\u{1260}', '\u{1288}'), ('\u{128a}', '\u{128d}'), ('\u{1290}', '\u{12b0}'), ('\u{12b2}',
'\u{12b5}'), ('\u{12b8}', '\u{12be}'), ('\u{12c0}', '\u{12c0}'), ('\u{12c2}', '\u{12c5}'),
('\u{12c8}', '\u{12d6}'), ('\u{12d8}', '\u{1310}'), ('\u{1312}', '\u{1315}'), ('\u{1318}',
'\u{135a}'), ('\u{135d}', '\u{135f}'), ('\u{1369}', '\u{1371}'), ('\u{1380}', '\u{138f}'),
('\u{13a0}', '\u{13f5}'), ('\u{13f8}', '\u{13fd}'), ('\u{1401}', '\u{166c}'), ('\u{166f}',
'\u{167f}'), ('\u{1681}', '\u{169a}'), ('\u{16a0}', '\u{16ea}'), ('\u{16ee}', '\u{16f8}'),
('\u{1700}', '\u{170c}'), ('\u{170e}', '\u{1714}'), ('\u{1720}', '\u{1734}'), ('\u{1740}',
'\u{1753}'), ('\u{1760}', '\u{176c}'), ('\u{176e}', '\u{1770}'), ('\u{1772}', '\u{1773}'),
('\u{1780}', '\u{17d3}'), ('\u{17d7}', '\u{17d7}'), ('\u{17dc}', '\u{17dd}'), ('\u{17e0}',
'\u{17e9}'), ('\u{180b}', '\u{180d}'), ('\u{1810}', '\u{1819}'), ('\u{1820}', '\u{1877}'),
('\u{1880}', '\u{18aa}'), ('\u{18b0}', '\u{18f5}'), ('\u{1900}', '\u{191e}'), ('\u{1920}',
'\u{192b}'), ('\u{1930}', '\u{193b}'), ('\u{1946}', '\u{196d}'), ('\u{1970}', '\u{1974}'),
('\u{1980}', '\u{19ab}'), ('\u{19b0}', '\u{19c9}'), ('\u{19d0}', '\u{19da}'), ('\u{1a00}',
'\u{1a1b}'), ('\u{1a20}', '\u{1a5e}'), ('\u{1a60}', '\u{1a7c}'), ('\u{1a7f}', '\u{1a89}'),
('\u{1a90}', '\u{1a99}'), ('\u{1aa7}', '\u{1aa7}'), ('\u{1ab0}', '\u{1abd}'), ('\u{1b00}',
'\u{1b4b}'), ('\u{1b50}', '\u{1b59}'), ('\u{1b6b}', '\u{1b73}'), ('\u{1b80}', '\u{1bf3}'),
('\u{1c00}', '\u{1c37}'), ('\u{1c40}', '\u{1c49}'), ('\u{1c4d}', '\u{1c7d}'), ('\u{1c80}',
'\u{1c88}'), ('\u{1cd0}', '\u{1cd2}'), ('\u{1cd4}', '\u{1cf6}'), ('\u{1cf8}', '\u{1cf9}'),
('\u{1d00}', '\u{1df5}'), ('\u{1dfb}', '\u{1f15}'), ('\u{1f18}', '\u{1f1d}'), ('\u{1f20}',
'\u{1f45}'), ('\u{1f48}', '\u{1f4d}'), ('\u{1f50}', '\u{1f57}'), ('\u{1f59}', '\u{1f59}'),
('\u{1f5b}', '\u{1f5b}'), ('\u{1f5d}', '\u{1f5d}'), ('\u{1f5f}', '\u{1f7d}'), ('\u{1f80}',
'\u{1fb4}'), ('\u{1fb6}', '\u{1fbc}'), ('\u{1fbe}', '\u{1fbe}'), ('\u{1fc2}', '\u{1fc4}'),
('\u{1fc6}', '\u{1fcc}'), ('\u{1fd0}', '\u{1fd3}'), ('\u{1fd6}', '\u{1fdb}'), ('\u{1fe0}',
'\u{1fec}'), ('\u{1ff2}', '\u{1ff4}'), ('\u{1ff6}', '\u{1ffc}'), ('\u{203f}', '\u{2040}'),
('\u{2054}', '\u{2054}'), ('\u{2071}', '\u{2071}'), ('\u{207f}', '\u{207f}'), ('\u{2090}',
'\u{209c}'), ('\u{20d0}', '\u{20dc}'), ('\u{20e1}', '\u{20e1}'), ('\u{20e5}', '\u{20f0}'),
('\u{2102}', '\u{2102}'), ('\u{2107}', '\u{2107}'), ('\u{210a}', '\u{2113}'), ('\u{2115}',
'\u{2115}'), ('\u{2118}', '\u{211d}'), ('\u{2124}', '\u{2124}'), ('\u{2126}', '\u{2126}'),
('\u{2128}', '\u{2128}'), ('\u{212a}', '\u{2139}'), ('\u{213c}', '\u{213f}'), ('\u{2145}',
'\u{2149}'), ('\u{214e}', '\u{214e}'), ('\u{2160}', '\u{2188}'), ('\u{2c00}', '\u{2c2e}'),
('\u{2c30}', '\u{2c5e}'), ('\u{2c60}', '\u{2ce4}'), ('\u{2ceb}', '\u{2cf3}'), ('\u{2d00}',
'\u{2d25}'), ('\u{2d27}', '\u{2d27}'), ('\u{2d2d}', '\u{2d2d}'), ('\u{2d30}', '\u{2d67}'),
('\u{2d6f}', '\u{2d6f}'), ('\u{2d7f}', '\u{2d96}'), ('\u{2da0}', '\u{2da6}'), ('\u{2da8}',
'\u{2dae}'), ('\u{2db0}', '\u{2db6}'), ('\u{2db8}', '\u{2dbe}'), ('\u{2dc0}', '\u{2dc6}'),
('\u{2dc8}', '\u{2dce}'), ('\u{2dd0}', '\u{2dd6}'), ('\u{2dd8}', '\u{2dde}'), ('\u{2de0}',
'\u{2dff}'), ('\u{3005}', '\u{3007}'), ('\u{3021}', '\u{302f}'), ('\u{3031}', '\u{3035}'),
('\u{3038}', '\u{303c}'), ('\u{3041}', '\u{3096}'), ('\u{3099}', '\u{309a}'), ('\u{309d}',
'\u{309f}'), ('\u{30a1}', '\u{30fa}'), ('\u{30fc}', '\u{30ff}'), ('\u{3105}', '\u{312d}'),
('\u{3131}', '\u{318e}'), ('\u{31a0}', '\u{31ba}'), ('\u{31f0}', '\u{31ff}'), ('\u{3400}',
'\u{4db5}'), ('\u{4e00}', '\u{9fd5}'), ('\u{a000}', '\u{a48c}'), ('\u{a4d0}', '\u{a4fd}'),
('\u{a500}', '\u{a60c}'), ('\u{a610}', '\u{a62b}'), ('\u{a640}', '\u{a66f}'), ('\u{a674}',
'\u{a67d}'), ('\u{a67f}', '\u{a6f1}'), ('\u{a717}', '\u{a71f}'), ('\u{a722}', '\u{a788}'),
('\u{a78b}', '\u{a7ae}'), ('\u{a7b0}', '\u{a7b7}'), ('\u{a7f7}', '\u{a827}'), ('\u{a840}',
'\u{a873}'), ('\u{a880}', '\u{a8c5}'), ('\u{a8d0}', '\u{a8d9}'), ('\u{a8e0}', '\u{a8f7}'),
('\u{a8fb}', '\u{a8fb}'), ('\u{a8fd}', '\u{a8fd}'), ('\u{a900}', '\u{a92d}'), ('\u{a930}',
'\u{a953}'), ('\u{a960}', '\u{a97c}'), ('\u{a980}', '\u{a9c0}'), ('\u{a9cf}', '\u{a9d9}'),
('\u{a9e0}', '\u{a9fe}'), ('\u{aa00}', '\u{aa36}'), ('\u{aa40}', '\u{aa4d}'), ('\u{aa50}',
'\u{aa59}'), ('\u{aa60}', '\u{aa76}'), ('\u{aa7a}', '\u{aac2}'), ('\u{aadb}', '\u{aadd}'),
('\u{aae0}', '\u{aaef}'), ('\u{aaf2}', '\u{aaf6}'), ('\u{ab01}', '\u{ab06}'), ('\u{ab09}',
'\u{ab0e}'), ('\u{ab11}', '\u{ab16}'), ('\u{ab20}', '\u{ab26}'), ('\u{ab28}', '\u{ab2e}'),
('\u{ab30}', '\u{ab5a}'), ('\u{ab5c}', '\u{ab65}'), ('\u{ab70}', '\u{abea}'), ('\u{abec}',
'\u{abed}'), ('\u{abf0}', '\u{abf9}'), ('\u{ac00}', '\u{d7a3}'), ('\u{d7b0}', '\u{d7c6}'),
('\u{d7cb}', '\u{d7fb}'), ('\u{f900}', '\u{fa6d}'), ('\u{fa70}', '\u{fad9}'), ('\u{fb00}',
'\u{fb06}'), ('\u{fb13}', '\u{fb17}'), ('\u{fb1d}', '\u{fb28}'), ('\u{fb2a}', '\u{fb36}'),
('\u{fb38}', '\u{fb3c}'), ('\u{fb3e}', '\u{fb3e}'), ('\u{fb40}', '\u{fb41}'), ('\u{fb43}',
'\u{fb44}'), ('\u{fb46}', '\u{fbb1}'), ('\u{fbd3}', '\u{fc5d}'), ('\u{fc64}', '\u{fd3d}'),
('\u{fd50}', '\u{fd8f}'), ('\u{fd92}', '\u{fdc7}'), ('\u{fdf0}', '\u{fdf9}'), ('\u{fe00}',
'\u{fe0f}'), ('\u{fe20}', '\u{fe2f}'), ('\u{fe33}', '\u{fe34}'), ('\u{fe4d}', '\u{fe4f}'),
('\u{fe71}', '\u{fe71}'), ('\u{fe73}', '\u{fe73}'), ('\u{fe77}', '\u{fe77}'), ('\u{fe79}',
'\u{fe79}'), ('\u{fe7b}', '\u{fe7b}'), ('\u{fe7d}', '\u{fe7d}'), ('\u{fe7f}', '\u{fefc}'),
('\u{ff10}', '\u{ff19}'), ('\u{ff21}', '\u{ff3a}'), ('\u{ff3f}', '\u{ff3f}'), ('\u{ff41}',
'\u{ff5a}'), ('\u{ff66}', '\u{ffbe}'), ('\u{ffc2}', '\u{ffc7}'), ('\u{ffca}', '\u{ffcf}'),
('\u{ffd2}', '\u{ffd7}'), ('\u{ffda}', '\u{ffdc}'), ('\u{10000}', '\u{1000b}'),
('\u{1000d}', '\u{10026}'), ('\u{10028}', '\u{1003a}'), ('\u{1003c}', '\u{1003d}'),
('\u{1003f}', '\u{1004d}'), ('\u{10050}', '\u{1005d}'), ('\u{10080}', '\u{100fa}'),
('\u{10140}', '\u{10174}'), ('\u{101fd}', '\u{101fd}'), ('\u{10280}', '\u{1029c}'),
('\u{102a0}', '\u{102d0}'), ('\u{102e0}', '\u{102e0}'), ('\u{10300}', '\u{1031f}'),
('\u{10330}', '\u{1034a}'), ('\u{10350}', '\u{1037a}'), ('\u{10380}', '\u{1039d}'),
('\u{103a0}', '\u{103c3}'), ('\u{103c8}', '\u{103cf}'), ('\u{103d1}', '\u{103d5}'),
('\u{10400}', '\u{1049d}'), ('\u{104a0}', '\u{104a9}'), ('\u{104b0}', '\u{104d3}'),
('\u{104d8}', '\u{104fb}'), ('\u{10500}', '\u{10527}'), ('\u{10530}', '\u{10563}'),
('\u{10600}', '\u{10736}'), ('\u{10740}', '\u{10755}'), ('\u{10760}', '\u{10767}'),
('\u{10800}', '\u{10805}'), ('\u{10808}', '\u{10808}'), ('\u{1080a}', '\u{10835}'),
('\u{10837}', '\u{10838}'), ('\u{1083c}', '\u{1083c}'), ('\u{1083f}', '\u{10855}'),
('\u{10860}', '\u{10876}'), ('\u{10880}', '\u{1089e}'), ('\u{108e0}', '\u{108f2}'),
('\u{108f4}', '\u{108f5}'), ('\u{10900}', '\u{10915}'), ('\u{10920}', '\u{10939}'),
('\u{10980}', '\u{109b7}'), ('\u{109be}', '\u{109bf}'), ('\u{10a00}', '\u{10a03}'),
('\u{10a05}', '\u{10a06}'), ('\u{10a0c}', '\u{10a13}'), ('\u{10a15}', '\u{10a17}'),
('\u{10a19}', '\u{10a33}'), ('\u{10a38}', '\u{10a3a}'), ('\u{10a3f}', '\u{10a3f}'),
('\u{10a60}', '\u{10a7c}'), ('\u{10a80}', '\u{10a9c}'), ('\u{10ac0}', '\u{10ac7}'),
('\u{10ac9}', '\u{10ae6}'), ('\u{10b00}', '\u{10b35}'), ('\u{10b40}', '\u{10b55}'),
('\u{10b60}', '\u{10b72}'), ('\u{10b80}', '\u{10b91}'), ('\u{10c00}', '\u{10c48}'),
('\u{10c80}', '\u{10cb2}'), ('\u{10cc0}', '\u{10cf2}'), ('\u{11000}', '\u{11046}'),
('\u{11066}', '\u{1106f}'), ('\u{1107f}', '\u{110ba}'), ('\u{110d0}', '\u{110e8}'),
('\u{110f0}', '\u{110f9}'), ('\u{11100}', '\u{11134}'), ('\u{11136}', '\u{1113f}'),
('\u{11150}', '\u{11173}'), ('\u{11176}', '\u{11176}'), ('\u{11180}', '\u{111c4}'),
('\u{111ca}', '\u{111cc}'), ('\u{111d0}', '\u{111da}'), ('\u{111dc}', '\u{111dc}'),
('\u{11200}', '\u{11211}'), ('\u{11213}', '\u{11237}'), ('\u{1123e}', '\u{1123e}'),
('\u{11280}', '\u{11286}'), ('\u{11288}', '\u{11288}'), ('\u{1128a}', '\u{1128d}'),
('\u{1128f}', '\u{1129d}'), ('\u{1129f}', '\u{112a8}'), ('\u{112b0}', '\u{112ea}'),
('\u{112f0}', '\u{112f9}'), ('\u{11300}', '\u{11303}'), ('\u{11305}', '\u{1130c}'),
('\u{1130f}', '\u{11310}'), ('\u{11313}', '\u{11328}'), ('\u{1132a}', '\u{11330}'),
('\u{11332}', '\u{11333}'), ('\u{11335}', '\u{11339}'), ('\u{1133c}', '\u{11344}'),
('\u{11347}', '\u{11348}'), ('\u{1134b}', '\u{1134d}'), ('\u{11350}', '\u{11350}'),
('\u{11357}', '\u{11357}'), ('\u{1135d}', '\u{11363}'), ('\u{11366}', '\u{1136c}'),
('\u{11370}', '\u{11374}'), ('\u{11400}', '\u{1144a}'), ('\u{11450}', '\u{11459}'),
('\u{11480}', '\u{114c5}'), ('\u{114c7}', '\u{114c7}'), ('\u{114d0}', '\u{114d9}'),
('\u{11580}', '\u{115b5}'), ('\u{115b8}', '\u{115c0}'), ('\u{115d8}', '\u{115dd}'),
('\u{11600}', '\u{11640}'), ('\u{11644}', '\u{11644}'), ('\u{11650}', '\u{11659}'),
('\u{11680}', '\u{116b7}'), ('\u{116c0}', '\u{116c9}'), ('\u{11700}', '\u{11719}'),
('\u{1171d}', '\u{1172b}'), ('\u{11730}', '\u{11739}'), ('\u{118a0}', '\u{118e9}'),
('\u{118ff}', '\u{118ff}'), ('\u{11ac0}', '\u{11af8}'), ('\u{11c00}', '\u{11c08}'),
('\u{11c0a}', '\u{11c36}'), ('\u{11c38}', '\u{11c40}'), ('\u{11c50}', '\u{11c59}'),
('\u{11c72}', '\u{11c8f}'), ('\u{11c92}', '\u{11ca7}'), ('\u{11ca9}', '\u{11cb6}'),
('\u{12000}', '\u{12399}'), ('\u{12400}', '\u{1246e}'), ('\u{12480}', '\u{12543}'),
('\u{13000}', '\u{1342e}'), ('\u{14400}', '\u{14646}'), ('\u{16800}', '\u{16a38}'),
('\u{16a40}', '\u{16a5e}'), ('\u{16a60}', '\u{16a69}'), ('\u{16ad0}', '\u{16aed}'),
('\u{16af0}', '\u{16af4}'), ('\u{16b00}', '\u{16b36}'), ('\u{16b40}', '\u{16b43}'),
('\u{16b50}', '\u{16b59}'), ('\u{16b63}', '\u{16b77}'), ('\u{16b7d}', '\u{16b8f}'),
('\u{16f00}', '\u{16f44}'), ('\u{16f50}', '\u{16f7e}'), ('\u{16f8f}', '\u{16f9f}'),
('\u{16fe0}', '\u{16fe0}'), ('\u{17000}', '\u{187ec}'), ('\u{18800}', '\u{18af2}'),
('\u{1b000}', '\u{1b001}'), ('\u{1bc00}', '\u{1bc6a}'), ('\u{1bc70}', '\u{1bc7c}'),
('\u{1bc80}', '\u{1bc88}'), ('\u{1bc90}', '\u{1bc99}'), ('\u{1bc9d}', '\u{1bc9e}'),
('\u{1d165}', '\u{1d169}'), ('\u{1d16d}', '\u{1d172}'), ('\u{1d17b}', '\u{1d182}'),
('\u{1d185}', '\u{1d18b}'), ('\u{1d1aa}', '\u{1d1ad}'), ('\u{1d242}', '\u{1d244}'),
('\u{1d400}', '\u{1d454}'), ('\u{1d456}', '\u{1d49c}'), ('\u{1d49e}', '\u{1d49f}'),
('\u{1d4a2}', '\u{1d4a2}'), ('\u{1d4a5}', '\u{1d4a6}'), ('\u{1d4a9}', '\u{1d4ac}'),
('\u{1d4ae}', '\u{1d4b9}'), ('\u{1d4bb}', '\u{1d4bb}'), ('\u{1d4bd}', '\u{1d4c3}'),
('\u{1d4c5}', '\u{1d505}'), ('\u{1d507}', '\u{1d50a}'), ('\u{1d50d}', '\u{1d514}'),
('\u{1d516}', '\u{1d51c}'), ('\u{1d51e}', '\u{1d539}'), ('\u{1d53b}', '\u{1d53e}'),
('\u{1d540}', '\u{1d544}'), ('\u{1d546}', '\u{1d546}'), ('\u{1d54a}', '\u{1d550}'),
('\u{1d552}', '\u{1d6a5}'), ('\u{1d6a8}', '\u{1d6c0}'), ('\u{1d6c2}', '\u{1d6da}'),
('\u{1d6dc}', '\u{1d6fa}'), ('\u{1d6fc}', '\u{1d714}'), ('\u{1d716}', '\u{1d734}'),
('\u{1d736}', '\u{1d74e}'), ('\u{1d750}', '\u{1d76e}'), ('\u{1d770}', '\u{1d788}'),
('\u{1d78a}', '\u{1d7a8}'), ('\u{1d7aa}', '\u{1d7c2}'), ('\u{1d7c4}', '\u{1d7cb}'),
('\u{1d7ce}', '\u{1d7ff}'), ('\u{1da00}', '\u{1da36}'), ('\u{1da3b}', '\u{1da6c}'),
('\u{1da75}', '\u{1da75}'), ('\u{1da84}', '\u{1da84}'), ('\u{1da9b}', '\u{1da9f}'),
('\u{1daa1}', '\u{1daaf}'), ('\u{1e000}', '\u{1e006}'), ('\u{1e008}', '\u{1e018}'),
('\u{1e01b}', '\u{1e021}'), ('\u{1e023}', '\u{1e024}'), ('\u{1e026}', '\u{1e02a}'),
('\u{1e800}', '\u{1e8c4}'), ('\u{1e8d0}', '\u{1e8d6}'), ('\u{1e900}', '\u{1e94a}'),
('\u{1e950}', '\u{1e959}'), ('\u{1ee00}', '\u{1ee03}'), ('\u{1ee05}', '\u{1ee1f}'),
('\u{1ee21}', '\u{1ee22}'), ('\u{1ee24}', '\u{1ee24}'), ('\u{1ee27}', '\u{1ee27}'),
('\u{1ee29}', '\u{1ee32}'), ('\u{1ee34}', '\u{1ee37}'), ('\u{1ee39}', '\u{1ee39}'),
('\u{1ee3b}', '\u{1ee3b}'), ('\u{1ee42}', '\u{1ee42}'), ('\u{1ee47}', '\u{1ee47}'),
('\u{1ee49}', '\u{1ee49}'), ('\u{1ee4b}', '\u{1ee4b}'), ('\u{1ee4d}', '\u{1ee4f}'),
('\u{1ee51}', '\u{1ee52}'), ('\u{1ee54}', '\u{1ee54}'), ('\u{1ee57}', '\u{1ee57}'),
('\u{1ee59}', '\u{1ee59}'), ('\u{1ee5b}', '\u{1ee5b}'), ('\u{1ee5d}', '\u{1ee5d}'),
('\u{1ee5f}', '\u{1ee5f}'), ('\u{1ee61}', '\u{1ee62}'), ('\u{1ee64}', '\u{1ee64}'),
('\u{1ee67}', '\u{1ee6a}'), ('\u{1ee6c}', '\u{1ee72}'), ('\u{1ee74}', '\u{1ee77}'),
('\u{1ee79}', '\u{1ee7c}'), ('\u{1ee7e}', '\u{1ee7e}'), ('\u{1ee80}', '\u{1ee89}'),
('\u{1ee8b}', '\u{1ee9b}'), ('\u{1eea1}', '\u{1eea3}'), ('\u{1eea5}', '\u{1eea9}'),
('\u{1eeab}', '\u{1eebb}'), ('\u{20000}', '\u{2a6d6}'), ('\u{2a700}', '\u{2b734}'),
('\u{2b740}', '\u{2b81d}'), ('\u{2b820}', '\u{2cea1}'), ('\u{2f800}', '\u{2fa1d}'),
('\u{e0100}', '\u{e01ef}')
];
pub fn XID_Continue(c: char) -> bool {
super::bsearch_range_table(c, XID_Continue_table)
}
pub const XID_Start_table: &'static [(char, char)] = &[
('\u{41}', '\u{5a}'), ('\u{61}', '\u{7a}'), ('\u{aa}', '\u{aa}'), ('\u{b5}', '\u{b5}'),
('\u{ba}', '\u{ba}'), ('\u{c0}', '\u{d6}'), ('\u{d8}', '\u{f6}'), ('\u{f8}', '\u{2c1}'),
('\u{2c6}', '\u{2d1}'), ('\u{2e0}', '\u{2e4}'), ('\u{2ec}', '\u{2ec}'), ('\u{2ee}',
'\u{2ee}'), ('\u{370}', '\u{374}'), ('\u{376}', '\u{377}'), ('\u{37b}', '\u{37d}'),
('\u{37f}', '\u{37f}'), ('\u{386}', '\u{386}'), ('\u{388}', '\u{38a}'), ('\u{38c}',
'\u{38c}'), ('\u{38e}', '\u{3a1}'), ('\u{3a3}', '\u{3f5}'), ('\u{3f7}', '\u{481}'),
('\u{48a}', '\u{52f}'), ('\u{531}', '\u{556}'), ('\u{559}', '\u{559}'), ('\u{561}',
'\u{587}'), ('\u{5d0}', '\u{5ea}'), ('\u{5f0}', '\u{5f2}'), ('\u{620}', '\u{64a}'),
('\u{66e}', '\u{66f}'), ('\u{671}', '\u{6d3}'), ('\u{6d5}', '\u{6d5}'), ('\u{6e5}',
'\u{6e6}'), ('\u{6ee}', '\u{6ef}'), ('\u{6fa}', '\u{6fc}'), ('\u{6ff}', '\u{6ff}'),
('\u{710}', '\u{710}'), ('\u{712}', '\u{72f}'), ('\u{74d}', '\u{7a5}'), ('\u{7b1}',
'\u{7b1}'), ('\u{7ca}', '\u{7ea}'), ('\u{7f4}', '\u{7f5}'), ('\u{7fa}', '\u{7fa}'),
('\u{800}', '\u{815}'), ('\u{81a}', '\u{81a}'), ('\u{824}', '\u{824}'), ('\u{828}',
'\u{828}'), ('\u{840}', '\u{858}'), ('\u{8a0}', '\u{8b4}'), ('\u{8b6}', '\u{8bd}'),
('\u{904}', '\u{939}'), ('\u{93d}', '\u{93d}'), ('\u{950}', '\u{950}'), ('\u{958}',
'\u{961}'), ('\u{971}', '\u{980}'), ('\u{985}', '\u{98c}'), ('\u{98f}', '\u{990}'),
('\u{993}', '\u{9a8}'), ('\u{9aa}', '\u{9b0}'), ('\u{9b2}', '\u{9b2}'), ('\u{9b6}',
'\u{9b9}'), ('\u{9bd}', '\u{9bd}'), ('\u{9ce}', '\u{9ce}'), ('\u{9dc}', '\u{9dd}'),
('\u{9df}', '\u{9e1}'), ('\u{9f0}', '\u{9f1}'), ('\u{a05}', '\u{a0a}'), ('\u{a0f}',
'\u{a10}'), ('\u{a13}', '\u{a28}'), ('\u{a2a}', '\u{a30}'), ('\u{a32}', '\u{a33}'),
('\u{a35}', '\u{a36}'), ('\u{a38}', '\u{a39}'), ('\u{a59}', '\u{a5c}'), ('\u{a5e}',
'\u{a5e}'), ('\u{a72}', '\u{a74}'), ('\u{a85}', '\u{a8d}'), ('\u{a8f}', '\u{a91}'),
('\u{a93}', '\u{aa8}'), ('\u{aaa}', '\u{ab0}'), ('\u{ab2}', '\u{ab3}'), ('\u{ab5}',
'\u{ab9}'), ('\u{abd}', '\u{abd}'), ('\u{ad0}', '\u{ad0}'), ('\u{ae0}', '\u{ae1}'),
('\u{af9}', '\u{af9}'), ('\u{b05}', '\u{b0c}'), ('\u{b0f}', '\u{b10}'), ('\u{b13}',
'\u{b28}'), ('\u{b2a}', '\u{b30}'), ('\u{b32}', '\u{b33}'), ('\u{b35}', '\u{b39}'),
('\u{b3d}', '\u{b3d}'), ('\u{b5c}', '\u{b5d}'), ('\u{b5f}', '\u{b61}'), ('\u{b71}',
'\u{b71}'), ('\u{b83}', '\u{b83}'), ('\u{b85}', '\u{b8a}'), ('\u{b8e}', '\u{b90}'),
('\u{b92}', '\u{b95}'), ('\u{b99}', '\u{b9a}'), ('\u{b9c}', '\u{b9c}'), ('\u{b9e}',
'\u{b9f}'), ('\u{ba3}', '\u{ba4}'), ('\u{ba8}', '\u{baa}'), ('\u{bae}', '\u{bb9}'),
('\u{bd0}', '\u{bd0}'), ('\u{c05}', '\u{c0c}'), ('\u{c0e}', '\u{c10}'), ('\u{c12}',
'\u{c28}'), ('\u{c2a}', '\u{c39}'), ('\u{c3d}', '\u{c3d}'), ('\u{c58}', '\u{c5a}'),
('\u{c60}', '\u{c61}'), ('\u{c80}', '\u{c80}'), ('\u{c85}', '\u{c8c}'), ('\u{c8e}',
'\u{c90}'), ('\u{c92}', '\u{ca8}'), ('\u{caa}', '\u{cb3}'), ('\u{cb5}', '\u{cb9}'),
('\u{cbd}', '\u{cbd}'), ('\u{cde}', '\u{cde}'), ('\u{ce0}', '\u{ce1}'), ('\u{cf1}',
'\u{cf2}'), ('\u{d05}', '\u{d0c}'), ('\u{d0e}', '\u{d10}'), ('\u{d12}', '\u{d3a}'),
('\u{d3d}', '\u{d3d}'), ('\u{d4e}', '\u{d4e}'), ('\u{d54}', '\u{d56}'), ('\u{d5f}',
'\u{d61}'), ('\u{d7a}', '\u{d7f}'), ('\u{d85}', '\u{d96}'), ('\u{d9a}', '\u{db1}'),
('\u{db3}', '\u{dbb}'), ('\u{dbd}', '\u{dbd}'), ('\u{dc0}', '\u{dc6}'), ('\u{e01}',
'\u{e30}'), ('\u{e32}', '\u{e32}'), ('\u{e40}', '\u{e46}'), ('\u{e81}', '\u{e82}'),
('\u{e84}', '\u{e84}'), ('\u{e87}', '\u{e88}'), ('\u{e8a}', '\u{e8a}'), ('\u{e8d}',
'\u{e8d}'), ('\u{e94}', '\u{e97}'), ('\u{e99}', '\u{e9f}'), ('\u{ea1}', '\u{ea3}'),
('\u{ea5}', '\u{ea5}'), ('\u{ea7}', '\u{ea7}'), ('\u{eaa}', '\u{eab}'), ('\u{ead}',
'\u{eb0}'), ('\u{eb2}', '\u{eb2}'), ('\u{ebd}', '\u{ebd}'), ('\u{ec0}', '\u{ec4}'),
('\u{ec6}', '\u{ec6}'), ('\u{edc}', '\u{edf}'), ('\u{f00}', '\u{f00}'), ('\u{f40}',
'\u{f47}'), ('\u{f49}', '\u{f6c}'), ('\u{f88}', '\u{f8c}'), ('\u{1000}', '\u{102a}'),
('\u{103f}', '\u{103f}'), ('\u{1050}', '\u{1055}'), ('\u{105a}', '\u{105d}'), ('\u{1061}',
'\u{1061}'), ('\u{1065}', '\u{1066}'), ('\u{106e}', '\u{1070}'), ('\u{1075}', '\u{1081}'),
('\u{108e}', '\u{108e}'), ('\u{10a0}', '\u{10c5}'), ('\u{10c7}', '\u{10c7}'), ('\u{10cd}',
'\u{10cd}'), ('\u{10d0}', '\u{10fa}'), ('\u{10fc}', '\u{1248}'), ('\u{124a}', '\u{124d}'),
('\u{1250}', '\u{1256}'), ('\u{1258}', '\u{1258}'), ('\u{125a}', '\u{125d}'), ('\u{1260}',
'\u{1288}'), ('\u{128a}', '\u{128d}'), ('\u{1290}', '\u{12b0}'), ('\u{12b2}', '\u{12b5}'),
('\u{12b8}', '\u{12be}'), ('\u{12c0}', '\u{12c0}'), ('\u{12c2}', '\u{12c5}'), ('\u{12c8}',
'\u{12d6}'), ('\u{12d8}', '\u{1310}'), ('\u{1312}', '\u{1315}'), ('\u{1318}', '\u{135a}'),
('\u{1380}', '\u{138f}'), ('\u{13a0}', '\u{13f5}'), ('\u{13f8}', '\u{13fd}'), ('\u{1401}',
'\u{166c}'), ('\u{166f}', '\u{167f}'), ('\u{1681}', '\u{169a}'), ('\u{16a0}', '\u{16ea}'),
('\u{16ee}', '\u{16f8}'), ('\u{1700}', '\u{170c}'), ('\u{170e}', '\u{1711}'), ('\u{1720}',
'\u{1731}'), ('\u{1740}', '\u{1751}'), ('\u{1760}', '\u{176c}'), ('\u{176e}', '\u{1770}'),
('\u{1780}', '\u{17b3}'), ('\u{17d7}', '\u{17d7}'), ('\u{17dc}', '\u{17dc}'), ('\u{1820}',
'\u{1877}'), ('\u{1880}', '\u{18a8}'), ('\u{18aa}', '\u{18aa}'), ('\u{18b0}', '\u{18f5}'),
('\u{1900}', '\u{191e}'), ('\u{1950}', '\u{196d}'), ('\u{1970}', '\u{1974}'), ('\u{1980}',
'\u{19ab}'), ('\u{19b0}', '\u{19c9}'), ('\u{1a00}', '\u{1a16}'), ('\u{1a20}', '\u{1a54}'),
('\u{1aa7}', '\u{1aa7}'), ('\u{1b05}', '\u{1b33}'), ('\u{1b45}', '\u{1b4b}'), ('\u{1b83}',
'\u{1ba0}'), ('\u{1bae}', '\u{1baf}'), ('\u{1bba}', '\u{1be5}'), ('\u{1c00}', '\u{1c23}'),
('\u{1c4d}', '\u{1c4f}'), ('\u{1c5a}', '\u{1c7d}'), ('\u{1c80}', '\u{1c88}'), ('\u{1ce9}',
'\u{1cec}'), ('\u{1cee}', '\u{1cf1}'), ('\u{1cf5}', '\u{1cf6}'), ('\u{1d00}', '\u{1dbf}'),
('\u{1e00}', '\u{1f15}'), ('\u{1f18}', '\u{1f1d}'), ('\u{1f20}', '\u{1f45}'), ('\u{1f48}',
'\u{1f4d}'), ('\u{1f50}', '\u{1f57}'), ('\u{1f59}', '\u{1f59}'), ('\u{1f5b}', '\u{1f5b}'),
('\u{1f5d}', '\u{1f5d}'), ('\u{1f5f}', '\u{1f7d}'), ('\u{1f80}', '\u{1fb4}'), ('\u{1fb6}',
'\u{1fbc}'), ('\u{1fbe}', '\u{1fbe}'), ('\u{1fc2}', '\u{1fc4}'), ('\u{1fc6}', '\u{1fcc}'),
('\u{1fd0}', '\u{1fd3}'), ('\u{1fd6}', '\u{1fdb}'), ('\u{1fe0}', '\u{1fec}'), ('\u{1ff2}',
'\u{1ff4}'), ('\u{1ff6}', '\u{1ffc}'), ('\u{2071}', '\u{2071}'), ('\u{207f}', '\u{207f}'),
('\u{2090}', '\u{209c}'), ('\u{2102}', '\u{2102}'), ('\u{2107}', '\u{2107}'), ('\u{210a}',
'\u{2113}'), ('\u{2115}', '\u{2115}'), ('\u{2118}', '\u{211d}'), ('\u{2124}', '\u{2124}'),
('\u{2126}', '\u{2126}'), ('\u{2128}', '\u{2128}'), ('\u{212a}', '\u{2139}'), ('\u{213c}',
'\u{213f}'), ('\u{2145}', '\u{2149}'), ('\u{214e}', '\u{214e}'), ('\u{2160}', '\u{2188}'),
('\u{2c00}', '\u{2c2e}'), ('\u{2c30}', '\u{2c5e}'), ('\u{2c60}', '\u{2ce4}'), ('\u{2ceb}',
'\u{2cee}'), ('\u{2cf2}', '\u{2cf3}'), ('\u{2d00}', '\u{2d25}'), ('\u{2d27}', '\u{2d27}'),
('\u{2d2d}', '\u{2d2d}'), ('\u{2d30}', '\u{2d67}'), ('\u{2d6f}', '\u{2d6f}'), ('\u{2d80}',
'\u{2d96}'), ('\u{2da0}', '\u{2da6}'), ('\u{2da8}', '\u{2dae}'), ('\u{2db0}', '\u{2db6}'),
('\u{2db8}', '\u{2dbe}'), ('\u{2dc0}', '\u{2dc6}'), ('\u{2dc8}', '\u{2dce}'), ('\u{2dd0}',
'\u{2dd6}'), ('\u{2dd8}', '\u{2dde}'), ('\u{3005}', '\u{3007}'), ('\u{3021}', '\u{3029}'),
('\u{3031}', '\u{3035}'), ('\u{3038}', '\u{303c}'), ('\u{3041}', '\u{3096}'), ('\u{309d}',
'\u{309f}'), ('\u{30a1}', '\u{30fa}'), ('\u{30fc}', '\u{30ff}'), ('\u{3105}', '\u{312d}'),
('\u{3131}', '\u{318e}'), ('\u{31a0}', '\u{31ba}'), ('\u{31f0}', '\u{31ff}'), ('\u{3400}',
'\u{4db5}'), ('\u{4e00}', '\u{9fd5}'), ('\u{a000}', '\u{a48c}'), ('\u{a4d0}', '\u{a4fd}'),
('\u{a500}', '\u{a60c}'), ('\u{a610}', '\u{a61f}'), ('\u{a62a}', '\u{a62b}'), ('\u{a640}',
'\u{a66e}'), ('\u{a67f}', '\u{a69d}'), ('\u{a6a0}', '\u{a6ef}'), ('\u{a717}', '\u{a71f}'),
('\u{a722}', '\u{a788}'), ('\u{a78b}', '\u{a7ae}'), ('\u{a7b0}', '\u{a7b7}'), ('\u{a7f7}',
'\u{a801}'), ('\u{a803}', '\u{a805}'), ('\u{a807}', '\u{a80a}'), ('\u{a80c}', '\u{a822}'),
('\u{a840}', '\u{a873}'), ('\u{a882}', '\u{a8b3}'), ('\u{a8f2}', '\u{a8f7}'), ('\u{a8fb}',
'\u{a8fb}'), ('\u{a8fd}', '\u{a8fd}'), ('\u{a90a}', '\u{a925}'), ('\u{a930}', '\u{a946}'),
('\u{a960}', '\u{a97c}'), ('\u{a984}', '\u{a9b2}'), ('\u{a9cf}', '\u{a9cf}'), ('\u{a9e0}',
'\u{a9e4}'), ('\u{a9e6}', '\u{a9ef}'), ('\u{a9fa}', '\u{a9fe}'), ('\u{aa00}', '\u{aa28}'),
('\u{aa40}', '\u{aa42}'), ('\u{aa44}', '\u{aa4b}'), ('\u{aa60}', '\u{aa76}'), ('\u{aa7a}',
'\u{aa7a}'), ('\u{aa7e}', '\u{aaaf}'), ('\u{aab1}', '\u{aab1}'), ('\u{aab5}', '\u{aab6}'),
('\u{aab9}', '\u{aabd}'), ('\u{aac0}', '\u{aac0}'), ('\u{aac2}', '\u{aac2}'), ('\u{aadb}',
'\u{aadd}'), ('\u{aae0}', '\u{aaea}'), ('\u{aaf2}', '\u{aaf4}'), ('\u{ab01}', '\u{ab06}'),
('\u{ab09}', '\u{ab0e}'), ('\u{ab11}', '\u{ab16}'), ('\u{ab20}', '\u{ab26}'), ('\u{ab28}',
'\u{ab2e}'), ('\u{ab30}', '\u{ab5a}'), ('\u{ab5c}', '\u{ab65}'), ('\u{ab70}', '\u{abe2}'),
('\u{ac00}', '\u{d7a3}'), ('\u{d7b0}', '\u{d7c6}'), ('\u{d7cb}', '\u{d7fb}'), ('\u{f900}',
'\u{fa6d}'), ('\u{fa70}', '\u{fad9}'), ('\u{fb00}', '\u{fb06}'), ('\u{fb13}', '\u{fb17}'),
('\u{fb1d}', '\u{fb1d}'), ('\u{fb1f}', '\u{fb28}'), ('\u{fb2a}', '\u{fb36}'), ('\u{fb38}',
'\u{fb3c}'), ('\u{fb3e}', '\u{fb3e}'), ('\u{fb40}', '\u{fb41}'), ('\u{fb43}', '\u{fb44}'),
('\u{fb46}', '\u{fbb1}'), ('\u{fbd3}', '\u{fc5d}'), ('\u{fc64}', '\u{fd3d}'), ('\u{fd50}',
'\u{fd8f}'), ('\u{fd92}', '\u{fdc7}'), ('\u{fdf0}', '\u{fdf9}'), ('\u{fe71}', '\u{fe71}'),
('\u{fe73}', '\u{fe73}'), ('\u{fe77}', '\u{fe77}'), ('\u{fe79}', '\u{fe79}'), ('\u{fe7b}',
'\u{fe7b}'), ('\u{fe7d}', '\u{fe7d}'), ('\u{fe7f}', '\u{fefc}'), ('\u{ff21}', '\u{ff3a}'),
('\u{ff41}', '\u{ff5a}'), ('\u{ff66}', '\u{ff9d}'), ('\u{ffa0}', '\u{ffbe}'), ('\u{ffc2}',
'\u{ffc7}'), ('\u{ffca}', '\u{ffcf}'), ('\u{ffd2}', '\u{ffd7}'), ('\u{ffda}', '\u{ffdc}'),
('\u{10000}', '\u{1000b}'), ('\u{1000d}', '\u{10026}'), ('\u{10028}', '\u{1003a}'),
('\u{1003c}', '\u{1003d}'), ('\u{1003f}', '\u{1004d}'), ('\u{10050}', '\u{1005d}'),
('\u{10080}', '\u{100fa}'), ('\u{10140}', '\u{10174}'), ('\u{10280}', '\u{1029c}'),
('\u{102a0}', '\u{102d0}'), ('\u{10300}', '\u{1031f}'), ('\u{10330}', '\u{1034a}'),
('\u{10350}', '\u{10375}'), ('\u{10380}', '\u{1039d}'), ('\u{103a0}', '\u{103c3}'),
('\u{103c8}', '\u{103cf}'), ('\u{103d1}', '\u{103d5}'), ('\u{10400}', '\u{1049d}'),
('\u{104b0}', '\u{104d3}'), ('\u{104d8}', '\u{104fb}'), ('\u{10500}', '\u{10527}'),
('\u{10530}', '\u{10563}'), ('\u{10600}', '\u{10736}'), ('\u{10740}', '\u{10755}'),
('\u{10760}', '\u{10767}'), ('\u{10800}', '\u{10805}'), ('\u{10808}', '\u{10808}'),
('\u{1080a}', '\u{10835}'), ('\u{10837}', '\u{10838}'), ('\u{1083c}', '\u{1083c}'),
('\u{1083f}', '\u{10855}'), ('\u{10860}', '\u{10876}'), ('\u{10880}', '\u{1089e}'),
('\u{108e0}', '\u{108f2}'), ('\u{108f4}', '\u{108f5}'), ('\u{10900}', '\u{10915}'),
('\u{10920}', '\u{10939}'), ('\u{10980}', '\u{109b7}'), ('\u{109be}', '\u{109bf}'),
('\u{10a00}', '\u{10a00}'), ('\u{10a10}', '\u{10a13}'), ('\u{10a15}', '\u{10a17}'),
('\u{10a19}', '\u{10a33}'), ('\u{10a60}', '\u{10a7c}'), ('\u{10a80}', '\u{10a9c}'),
('\u{10ac0}', '\u{10ac7}'), ('\u{10ac9}', '\u{10ae4}'), ('\u{10b00}', '\u{10b35}'),
('\u{10b40}', '\u{10b55}'), ('\u{10b60}', '\u{10b72}'), ('\u{10b80}', '\u{10b91}'),
('\u{10c00}', '\u{10c48}'), ('\u{10c80}', '\u{10cb2}'), ('\u{10cc0}', '\u{10cf2}'),
('\u{11003}', '\u{11037}'), ('\u{11083}', '\u{110af}'), ('\u{110d0}', '\u{110e8}'),
('\u{11103}', '\u{11126}'), ('\u{11150}', '\u{11172}'), ('\u{11176}', '\u{11176}'),
('\u{11183}', '\u{111b2}'), ('\u{111c1}', '\u{111c4}'), ('\u{111da}', '\u{111da}'),
('\u{111dc}', '\u{111dc}'), ('\u{11200}', '\u{11211}'), ('\u{11213}', '\u{1122b}'),
('\u{11280}', '\u{11286}'), ('\u{11288}', '\u{11288}'), ('\u{1128a}', '\u{1128d}'),
('\u{1128f}', '\u{1129d}'), ('\u{1129f}', '\u{112a8}'), ('\u{112b0}', '\u{112de}'),
('\u{11305}', '\u{1130c}'), ('\u{1130f}', '\u{11310}'), ('\u{11313}', '\u{11328}'),
('\u{1132a}', '\u{11330}'), ('\u{11332}', '\u{11333}'), ('\u{11335}', '\u{11339}'),
('\u{1133d}', '\u{1133d}'), ('\u{11350}', '\u{11350}'), ('\u{1135d}', '\u{11361}'),
('\u{11400}', '\u{11434}'), ('\u{11447}', '\u{1144a}'), ('\u{11480}', '\u{114af}'),
('\u{114c4}', '\u{114c5}'), ('\u{114c7}', '\u{114c7}'), ('\u{11580}', '\u{115ae}'),
('\u{115d8}', '\u{115db}'), ('\u{11600}', '\u{1162f}'), ('\u{11644}', '\u{11644}'),
('\u{11680}', '\u{116aa}'), ('\u{11700}', '\u{11719}'), ('\u{118a0}', '\u{118df}'),
('\u{118ff}', '\u{118ff}'), ('\u{11ac0}', '\u{11af8}'), ('\u{11c00}', '\u{11c08}'),
('\u{11c0a}', '\u{11c2e}'), ('\u{11c40}', '\u{11c40}'), ('\u{11c72}', '\u{11c8f}'),
('\u{12000}', '\u{12399}'), ('\u{12400}', '\u{1246e}'), ('\u{12480}', '\u{12543}'),
('\u{13000}', '\u{1342e}'), ('\u{14400}', '\u{14646}'), ('\u{16800}', '\u{16a38}'),
('\u{16a40}', '\u{16a5e}'), ('\u{16ad0}', '\u{16aed}'), ('\u{16b00}', '\u{16b2f}'),
('\u{16b40}', '\u{16b43}'), ('\u{16b63}', '\u{16b77}'), ('\u{16b7d}', '\u{16b8f}'),
('\u{16f00}', '\u{16f44}'), ('\u{16f50}', '\u{16f50}'), ('\u{16f93}', '\u{16f9f}'),
('\u{16fe0}', '\u{16fe0}'), ('\u{17000}', '\u{187ec}'), ('\u{18800}', '\u{18af2}'),
('\u{1b000}', '\u{1b001}'), ('\u{1bc00}', '\u{1bc6a}'), ('\u{1bc70}', '\u{1bc7c}'),
('\u{1bc80}', '\u{1bc88}'), ('\u{1bc90}', '\u{1bc99}'), ('\u{1d400}', '\u{1d454}'),
('\u{1d456}', '\u{1d49c}'), ('\u{1d49e}', '\u{1d49f}'), ('\u{1d4a2}', '\u{1d4a2}'),
('\u{1d4a5}', '\u{1d4a6}'), ('\u{1d4a9}', '\u{1d4ac}'), ('\u{1d4ae}', '\u{1d4b9}'),
('\u{1d4bb}', '\u{1d4bb}'), ('\u{1d4bd}', '\u{1d4c3}'), ('\u{1d4c5}', '\u{1d505}'),
('\u{1d507}', '\u{1d50a}'), ('\u{1d50d}', '\u{1d514}'), ('\u{1d516}', '\u{1d51c}'),
('\u{1d51e}', '\u{1d539}'), ('\u{1d53b}', '\u{1d53e}'), ('\u{1d540}', '\u{1d544}'),
('\u{1d546}', '\u{1d546}'), ('\u{1d54a}', '\u{1d550}'), ('\u{1d552}', '\u{1d6a5}'),
('\u{1d6a8}', '\u{1d6c0}'), ('\u{1d6c2}', '\u{1d6da}'), ('\u{1d6dc}', '\u{1d6fa}'),
('\u{1d6fc}', '\u{1d714}'), ('\u{1d716}', '\u{1d734}'), ('\u{1d736}', '\u{1d74e}'),
('\u{1d750}', '\u{1d76e}'), ('\u{1d770}', '\u{1d788}'), ('\u{1d78a}', '\u{1d7a8}'),
('\u{1d7aa}', '\u{1d7c2}'), ('\u{1d7c4}', '\u{1d7cb}'), ('\u{1e800}', '\u{1e8c4}'),
('\u{1e900}', '\u{1e943}'), ('\u{1ee00}', '\u{1ee03}'), ('\u{1ee05}', '\u{1ee1f}'),
('\u{1ee21}', '\u{1ee22}'), ('\u{1ee24}', '\u{1ee24}'), ('\u{1ee27}', '\u{1ee27}'),
('\u{1ee29}', '\u{1ee32}'), ('\u{1ee34}', '\u{1ee37}'), ('\u{1ee39}', '\u{1ee39}'),
('\u{1ee3b}', '\u{1ee3b}'), ('\u{1ee42}', '\u{1ee42}'), ('\u{1ee47}', '\u{1ee47}'),
('\u{1ee49}', '\u{1ee49}'), ('\u{1ee4b}', '\u{1ee4b}'), ('\u{1ee4d}', '\u{1ee4f}'),
('\u{1ee51}', '\u{1ee52}'), ('\u{1ee54}', '\u{1ee54}'), ('\u{1ee57}', '\u{1ee57}'),
('\u{1ee59}', '\u{1ee59}'), ('\u{1ee5b}', '\u{1ee5b}'), ('\u{1ee5d}', '\u{1ee5d}'),
('\u{1ee5f}', '\u{1ee5f}'), ('\u{1ee61}', '\u{1ee62}'), ('\u{1ee64}', '\u{1ee64}'),
('\u{1ee67}', '\u{1ee6a}'), ('\u{1ee6c}', '\u{1ee72}'), ('\u{1ee74}', '\u{1ee77}'),
('\u{1ee79}', '\u{1ee7c}'), ('\u{1ee7e}', '\u{1ee7e}'), ('\u{1ee80}', '\u{1ee89}'),
('\u{1ee8b}', '\u{1ee9b}'), ('\u{1eea1}', '\u{1eea3}'), ('\u{1eea5}', '\u{1eea9}'),
('\u{1eeab}', '\u{1eebb}'), ('\u{20000}', '\u{2a6d6}'), ('\u{2a700}', '\u{2b734}'),
('\u{2b740}', '\u{2b81d}'), ('\u{2b820}', '\u{2cea1}'), ('\u{2f800}', '\u{2fa1d}')
];
pub fn XID_Start(c: char) -> bool {
super::bsearch_range_table(c, XID_Start_table)
}
}

Просмотреть файл

@ -1,113 +0,0 @@
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[cfg(feature = "bench")]
use std::iter;
#[cfg(feature = "bench")]
use test::Bencher;
#[cfg(feature = "bench")]
use std::prelude::v1::*;
use super::UnicodeXID;
#[cfg(feature = "bench")]
#[bench]
fn cargo_is_xid_start(b: &mut Bencher) {
let string = iter::repeat('a').take(4096).collect::<String>();
b.bytes = string.len() as u64;
b.iter(|| {
string.chars().all(UnicodeXID::is_xid_start)
});
}
#[cfg(feature = "bench")]
#[bench]
fn stdlib_is_xid_start(b: &mut Bencher) {
let string = iter::repeat('a').take(4096).collect::<String>();
b.bytes = string.len() as u64;
b.iter(|| {
string.chars().all(char::is_xid_start)
});
}
#[cfg(feature = "bench")]
#[bench]
fn cargo_xid_continue(b: &mut Bencher) {
let string = iter::repeat('a').take(4096).collect::<String>();
b.bytes = string.len() as u64;
b.iter(|| {
string.chars().all(UnicodeXID::is_xid_continue)
});
}
#[cfg(feature = "bench")]
#[bench]
fn stdlib_xid_continue(b: &mut Bencher) {
let string = iter::repeat('a').take(4096).collect::<String>();
b.bytes = string.len() as u64;
b.iter(|| {
string.chars().all(char::is_xid_continue)
});
}
#[test]
fn test_is_xid_start() {
let chars = [
'A', 'Z', 'a', 'z',
'\u{1000d}', '\u{10026}',
];
for ch in &chars {
assert!(UnicodeXID::is_xid_start(*ch), "{}", ch);
}
}
#[test]
fn test_is_not_xid_start() {
let chars = [
'\x00', '\x01',
'0', '9',
' ', '[', '<', '{', '(',
'\u{02c2}', '\u{ffff}',
];
for ch in &chars {
assert!(!UnicodeXID::is_xid_start(*ch), "{}", ch);
}
}
#[test]
fn test_is_xid_continue() {
let chars = [
'0', '9', 'A', 'Z', 'a', 'z', '_',
'\u{1000d}', '\u{10026}',
];
for ch in &chars {
assert!(UnicodeXID::is_xid_continue(*ch), "{}", ch);
}
}
#[test]
fn test_is_not_xid_continue() {
let chars = [
'\x00', '\x01',
' ', '[', '<', '{', '(',
'\u{02c2}', '\u{ffff}',
];
for &ch in &chars {
assert!(!UnicodeXID::is_xid_continue(ch), "{}", ch);
}
}