зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1583471 - Don't pull new syn / quote / etc. dependencies just yet. r=bbouvier
Transition to syn 1.0 is ongoing, but these take a long time to build. Hopefully we already patch coreaudio-sys manually so all the crates dependent on bindgen are effectively in-tree. I published v0.51.1-oldsyn to avoid pulling all these dependencies for now. Differential Revision: https://phabricator.services.mozilla.com/D46966 --HG-- rename : third_party/rust/quote-0.6.11/tests/conditional/integer128.rs => third_party/rust/quote/tests/conditional/integer128.rs rename : third_party/rust/unicode-xid-0.1.0/scripts/unicode.py => third_party/rust/unicode-xid/scripts/unicode.py extra : moz-landing-system : lando
This commit is contained in:
Родитель
e207459263
Коммит
41ff579eac
|
@ -188,7 +188,7 @@ dependencies = [
|
|||
name = "baldrdash"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bindgen 0.51.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bindgen 0.51.1-oldsyn (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cranelift-codegen 0.42.0 (git+https://github.com/CraneStation/Cranelift?rev=9c6f8feb0f28f50434c0cf67f3f7c07486a42b7e)",
|
||||
"cranelift-wasm 0.42.0 (git+https://github.com/CraneStation/Cranelift?rev=9c6f8feb0f28f50434c0cf67f3f7c07486a42b7e)",
|
||||
"env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -241,7 +241,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.51.1"
|
||||
version = "0.51.1-oldsyn"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -250,8 +250,8 @@ dependencies = [
|
|||
"clang-sys 0.28.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"proc-macro2 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"shlex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -558,7 +558,7 @@ dependencies = [
|
|||
name = "coreaudio-sys"
|
||||
version = "0.2.2"
|
||||
dependencies = [
|
||||
"bindgen 0.51.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bindgen 0.51.1-oldsyn (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1472,7 +1472,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
name = "js"
|
||||
version = "0.1.4"
|
||||
dependencies = [
|
||||
"bindgen 0.51.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bindgen 0.51.1-oldsyn (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -2251,14 +2251,6 @@ dependencies = [
|
|||
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "procedural-masquerade"
|
||||
version = "0.1.1"
|
||||
|
@ -2304,14 +2296,6 @@ dependencies = [
|
|||
"proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.6.5"
|
||||
|
@ -2879,7 +2863,7 @@ dependencies = [
|
|||
"app_units 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bindgen 0.51.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bindgen 0.51.1-oldsyn (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cssparser 0.25.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -3364,11 +3348,6 @@ name = "unicode-xid"
|
|||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-xid"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "unreachable"
|
||||
version = "1.0.0"
|
||||
|
@ -3783,7 +3762,7 @@ dependencies = [
|
|||
"checksum base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e"
|
||||
"checksum binary-space-partition 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "88ceb0d16c4fd0e42876e298d7d3ce3780dd9ebdcbe4199816a32c77e08597ff"
|
||||
"checksum bincode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bda13183df33055cbb84b847becce220d392df502ebe7a4a78d7021771ed94d0"
|
||||
"checksum bindgen 0.51.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ebd71393f1ec0509b553aa012b9b58e81dadbdff7130bd3b8cba576e69b32f75"
|
||||
"checksum bindgen 0.51.1-oldsyn (registry+https://github.com/rust-lang/crates.io-index)" = "ae8266cdd336dfd53d71a95c33251232f64553b8770ebd85158039b3a734244b"
|
||||
"checksum binjs_meta 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6c9a0da2208ceb785c1626fa8b7d250d2e5546ae230294b4a998e4f818c1768e"
|
||||
"checksum bit-vec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f59bbe95d4e52a6398ec21238d31577f2b28a9d86807f06ca59d191d8440d0bb"
|
||||
"checksum bit_reverse 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5e97e02db5a2899c0377f3d6031d5da8296ca2b47abef6ed699de51b9e40a28c"
|
||||
|
@ -3955,11 +3934,9 @@ dependencies = [
|
|||
"checksum podio 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e5422a1ee1bc57cc47ae717b0137314258138f38fd5f3cea083f43a9725383a0"
|
||||
"checksum precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
|
||||
"checksum proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)" = "4d317f9caece796be1980837fd5cb3dfec5613ebdb04ad0956deea83ce168915"
|
||||
"checksum proc-macro2 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "afdc77cc74ec70ed262262942ebb7dac3d479e9e5cfa2da1841c0806f6cdabcc"
|
||||
"checksum procedural-masquerade 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9f566249236c6ca4340f7ca78968271f0ed2b0f234007a61b66f9ecd0af09260"
|
||||
"checksum quick-error 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eda5fe9b71976e62bc81b781206aaa076401769b2143379d3eb2118388babac4"
|
||||
"checksum quote 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)" = "cdd8e04bd9c52e0342b406469d494fcb033be4bdbe5c606016defbb1681411e1"
|
||||
"checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"
|
||||
"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
|
||||
"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef"
|
||||
"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
|
||||
|
@ -4059,7 +4036,6 @@ dependencies = [
|
|||
"checksum unicode-segmentation 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "aa6024fc12ddfd1c6dbc14a80fa2324d4568849869b779f6bd37e5e4c03344d1"
|
||||
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
|
||||
"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
|
||||
"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
|
||||
"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"
|
||||
"checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61"
|
||||
"checksum urlencoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3df3561629a8bb4c57e5a2e4c43348d9e29c7c29d9b1c4c1f47166deca8f37ed"
|
||||
|
|
|
@ -7,7 +7,7 @@ license = "MPL-2.0"
|
|||
|
||||
[build-dependencies]
|
||||
env_logger = {version = "0.6", default-features = false} # disable `regex` to reduce code size
|
||||
bindgen = {version = "0.51.1", default-features = false} # disable `logging` to reduce code size
|
||||
bindgen = {version = "0.51.1-oldsyn", default-features = false} # disable `logging` to reduce code size
|
||||
cmake = "0.1"
|
||||
glob = "0.3"
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ env_logger = "0.6"
|
|||
smallvec = { version = "0.6.6" }
|
||||
|
||||
[build-dependencies]
|
||||
bindgen = {version = "0.51.1", default-features = false} # disable `logging` to reduce code size
|
||||
bindgen = {version = "0.51.1-oldsyn", default-features = false} # disable `logging` to reduce code size
|
||||
|
||||
[features]
|
||||
default = ['cranelift-codegen/std']
|
||||
|
|
|
@ -84,7 +84,7 @@ void = "1.0.2"
|
|||
[build-dependencies]
|
||||
lazy_static = "1"
|
||||
log = "0.4"
|
||||
bindgen = {version = "0.51.1", optional = true, default-features = false}
|
||||
bindgen = {version = "0.51.1-oldsyn", optional = true, default-features = false}
|
||||
regex = {version = "1.0", optional = true}
|
||||
walkdir = "2.1.4"
|
||||
toml = {version = "0.4.5", optional = true, default-features = false}
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.lock":"f1b56f3cb914b4ed3214d3ce87d599398b399841718fc938c1b5a309356a44ea","Cargo.toml":"a4656cdd7bd0794e6f10ba78ed3c9a82cd86bfcbec59be7731ee90984de64bde","LICENSE":"c23953d9deb0a3312dbeaf6c128a657f3591acee45067612fa68405eaa4525db","README.md":"5a1f556c6a57c0a6ccc65e19c27718e0f4b32381a8efcc80f6601b33c58c5d59","build.rs":"e1f148e01150af6a66b6af2e5d955c8b9fa092cb4697bae2bcec8a00119055ae","csmith-fuzzing/README.md":"7107b70fedb0c0a0cadb3c439a49c1bd0119a6d38dc63b1aecc74d1942256ef2","src/callbacks.rs":"82e0be9ca02e9a652af934ed546f1cedfc6db0716643123d9a5aab33b360c7d0","src/clang.rs":"66e86bfbbe872cc247cf3bc88a2155e25f587414834023515d184dc13f8f7287","src/codegen/bitfield_unit.rs":"a8fb1a2d97a99685106fcaac87d2013f79d2690d6a46ff05ad1e3629b6075664","src/codegen/bitfield_unit_tests.rs":"dd252134118450800b516e375c872e17b4c1aee63a7b8adbe5b2cd53434bbc7e","src/codegen/error.rs":"ebc9e0f50c6adc9558b76ce774346c02316500a1ebe3cbf56ed00e5e9fe3e456","src/codegen/helpers.rs":"304c9eb56ea6b2c054e1f9fefd5812b0df3a156eee5876f3051fd0b48c7aeac3","src/codegen/impl_debug.rs":"428df604b4be105e3275275e8be81e8306b940abddc1b89675f98a01bf5de8c1","src/codegen/impl_partialeq.rs":"83707f7b13501dc413c904a17163cb54af11e56138f36dfef40ce46c823200fd","src/codegen/mod.rs":"42732503dd25ed4b7924b71862f9100cf281e22f99016540da61a602c78a3650","src/codegen/struct_layout.rs":"482bab6384e65c78346de4f8d8e4d1c3b7df38250788b58bdd1f7b1c7bf70bac","src/extra_assertions.rs":"494534bd4f18b80d89b180c8a93733e6617edcf7deac413e9a73fd6e7bc9ced7","src/features.rs":"2d82f0700c22ea44e010a89c3ae857c3feaf2c85cab3fe4d0277a41a8c2841c4","src/ir/analysis/derive.rs":"2a2322f178760859cdb4b2d45d947ff213c7c684840b4ade46b7ceb34fa6705b","src/ir/analysis/has_destructor.rs":"10380d06ed03d058f10f6f6835d9b8fbebac455a1ea218780430a0ffd8d63472","src/ir/analysis/has_float.rs":"1838ba81eb05a9c3e311687e2247d561cc5093377b15ef8008257025ea56da04","src/ir/analysis/has_type_param_in_array.rs":"dddc5511a705e3a653b5e754e359637031b4862e1a1fc1e17f711fb2fbfc1cef","src/ir/analysis/has_vtable.rs":"8da9deec23c4552ecd5b883eaa036e4f2174a5949194c333a62ef463d28dcb6a","src/ir/analysis/mod.rs":"54993cb77df1870bb12cbc6b3a243c2da942cdc967a7d21dacb430601b49b2a1","src/ir/analysis/sizedness.rs":"d0673e19add38a07680ae3a9a5e998a0b2c3917e68efb6639ffe7ea193ada1b1","src/ir/analysis/template_params.rs":"9b662b5ec99cd8409d771a16ee42df500962b0c26f0da85e430ede19cc2b17c9","src/ir/annotations.rs":"268f90fc1d40fadee329c26b183b2aaa9de98f9246fea580404ee0e626315546","src/ir/comment.rs":"31d64a49ae3d9c3c348fa2539e03306ca3a23fae429cab452e42b31ecf632145","src/ir/comp.rs":"73d5d32d70b8e62d33ad4ed6bcbb9b23273c59b5b45570b85a2357c6e1116028","src/ir/context.rs":"c30be52b22fdb489afb34426bcb2e048ae2594846b15324693dd1b71e7dc3369","src/ir/derive.rs":"e5581852eec87918901a129284b4965aefc8a19394187a8095779a084f28fabe","src/ir/dot.rs":"5da8336bf5fd8efabd784a06e0d764eb91566c19ced8ce017a24ae237f0cbe18","src/ir/enum_ty.rs":"c303f3b271d2703c2487e4afaf4b8c9b5bbedb9e1c6a8044de667c21ad8f67fb","src/ir/function.rs":"7a25a55d7f2ded1724894bd1f7ee4766a4bf5f193967bf3a2628ec604b918018","src/ir/int.rs":"68a86182743ec338d58e42203364dc7c8970cb7ec3550433ca92f0c9489b4442","src/ir/item.rs":"203fe53efb0203e0ddc3fb9fcff7b2068f80f252d249a39c137e0cc070663a49","src/ir/item_kind.rs":"7666a1ff1b8260978b790a08b4139ab56b5c65714a5652bbcec7faa7443adc36","src/ir/layout.rs":"936f96fafab34e35b622a5f9e56b0fbd2c97d2e9222470e3687f882f40db1349","src/ir/mod.rs":"713cd537434567003197a123cbae679602c715e976d22f7b23dafd0826ea4c70","src/ir/module.rs":"a26bb0ac90d4cabb0a45d9c1a42b5515c74d4c233dc084e2f85161eac12bff15","src/ir/objc.rs":"ced8242068d5daa2940469693f7277c79368019f8e30ce1e4f55d834bf24c411","src/ir/template.rs":"6c2823c9bab82ab1d70f4d643e8f4d6420be5eafcb78324fb69649e407561cec","src/ir/traversal.rs":"5ac088277f4dfe2918d81b9294aaee41fd83db8e46def66a05f89de078bf4c49","src/ir/ty.rs":"5af2b62d278c679b7c4e597263fce01113e90242e7d263b948d93bc4274dfe9a","src/ir/var.rs":"9226241b188877b6a7bea6523e14318a8523a6dba57c4f15809c377f87540061","src/lib.rs":"b968f8d0858e3145137a2e33c0913acf19d21f884f914bc513bc18eea1c37bf1","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"6b42a74dfd5c3bde75b7fb984a82f3b3d652abd45aa54b31a40fbda6b02ae674","src/options.rs":"f08facc9d58cb79c7ab93c9d614f13d4d3eca2b5801012da56490a790a8d8c4c","src/parse.rs":"be7d13cc84fae79ec7b3aa9e77063fa475a48d74a854423e2c72d75006a25202","src/regex_set.rs":"5cb72fc3714c0d79e9e942d003349c0775fafd7cd0c9603c65f5261883bbf9cf","src/time.rs":"8efe317e7c6b5ba8e0865ce7b49ca775ee8a02590f4241ef62f647fa3c22b68e"},"package":"ebd71393f1ec0509b553aa012b9b58e81dadbdff7130bd3b8cba576e69b32f75"}
|
||||
{"files":{"Cargo.lock":"08acbc65a73a371ed1468a7937888e4a92da259bcff32f3f76c2207b45f0f75f","Cargo.toml":"03fdbfce44fd7e90fd59b8ffc43bcb8cb92ce05c9f4aa22c21040a2fc7979d63","LICENSE":"c23953d9deb0a3312dbeaf6c128a657f3591acee45067612fa68405eaa4525db","README.md":"5a1f556c6a57c0a6ccc65e19c27718e0f4b32381a8efcc80f6601b33c58c5d59","build.rs":"e1f148e01150af6a66b6af2e5d955c8b9fa092cb4697bae2bcec8a00119055ae","csmith-fuzzing/README.md":"7107b70fedb0c0a0cadb3c439a49c1bd0119a6d38dc63b1aecc74d1942256ef2","src/callbacks.rs":"82e0be9ca02e9a652af934ed546f1cedfc6db0716643123d9a5aab33b360c7d0","src/clang.rs":"66e86bfbbe872cc247cf3bc88a2155e25f587414834023515d184dc13f8f7287","src/codegen/bitfield_unit.rs":"a8fb1a2d97a99685106fcaac87d2013f79d2690d6a46ff05ad1e3629b6075664","src/codegen/bitfield_unit_tests.rs":"dd252134118450800b516e375c872e17b4c1aee63a7b8adbe5b2cd53434bbc7e","src/codegen/error.rs":"ebc9e0f50c6adc9558b76ce774346c02316500a1ebe3cbf56ed00e5e9fe3e456","src/codegen/helpers.rs":"304c9eb56ea6b2c054e1f9fefd5812b0df3a156eee5876f3051fd0b48c7aeac3","src/codegen/impl_debug.rs":"428df604b4be105e3275275e8be81e8306b940abddc1b89675f98a01bf5de8c1","src/codegen/impl_partialeq.rs":"83707f7b13501dc413c904a17163cb54af11e56138f36dfef40ce46c823200fd","src/codegen/mod.rs":"42732503dd25ed4b7924b71862f9100cf281e22f99016540da61a602c78a3650","src/codegen/struct_layout.rs":"482bab6384e65c78346de4f8d8e4d1c3b7df38250788b58bdd1f7b1c7bf70bac","src/extra_assertions.rs":"494534bd4f18b80d89b180c8a93733e6617edcf7deac413e9a73fd6e7bc9ced7","src/features.rs":"2d82f0700c22ea44e010a89c3ae857c3feaf2c85cab3fe4d0277a41a8c2841c4","src/ir/analysis/derive.rs":"2a2322f178760859cdb4b2d45d947ff213c7c684840b4ade46b7ceb34fa6705b","src/ir/analysis/has_destructor.rs":"10380d06ed03d058f10f6f6835d9b8fbebac455a1ea218780430a0ffd8d63472","src/ir/analysis/has_float.rs":"1838ba81eb05a9c3e311687e2247d561cc5093377b15ef8008257025ea56da04","src/ir/analysis/has_type_param_in_array.rs":"dddc5511a705e3a653b5e754e359637031b4862e1a1fc1e17f711fb2fbfc1cef","src/ir/analysis/has_vtable.rs":"8da9deec23c4552ecd5b883eaa036e4f2174a5949194c333a62ef463d28dcb6a","src/ir/analysis/mod.rs":"54993cb77df1870bb12cbc6b3a243c2da942cdc967a7d21dacb430601b49b2a1","src/ir/analysis/sizedness.rs":"d0673e19add38a07680ae3a9a5e998a0b2c3917e68efb6639ffe7ea193ada1b1","src/ir/analysis/template_params.rs":"9b662b5ec99cd8409d771a16ee42df500962b0c26f0da85e430ede19cc2b17c9","src/ir/annotations.rs":"268f90fc1d40fadee329c26b183b2aaa9de98f9246fea580404ee0e626315546","src/ir/comment.rs":"31d64a49ae3d9c3c348fa2539e03306ca3a23fae429cab452e42b31ecf632145","src/ir/comp.rs":"73d5d32d70b8e62d33ad4ed6bcbb9b23273c59b5b45570b85a2357c6e1116028","src/ir/context.rs":"c30be52b22fdb489afb34426bcb2e048ae2594846b15324693dd1b71e7dc3369","src/ir/derive.rs":"e5581852eec87918901a129284b4965aefc8a19394187a8095779a084f28fabe","src/ir/dot.rs":"5da8336bf5fd8efabd784a06e0d764eb91566c19ced8ce017a24ae237f0cbe18","src/ir/enum_ty.rs":"c303f3b271d2703c2487e4afaf4b8c9b5bbedb9e1c6a8044de667c21ad8f67fb","src/ir/function.rs":"7a25a55d7f2ded1724894bd1f7ee4766a4bf5f193967bf3a2628ec604b918018","src/ir/int.rs":"68a86182743ec338d58e42203364dc7c8970cb7ec3550433ca92f0c9489b4442","src/ir/item.rs":"203fe53efb0203e0ddc3fb9fcff7b2068f80f252d249a39c137e0cc070663a49","src/ir/item_kind.rs":"7666a1ff1b8260978b790a08b4139ab56b5c65714a5652bbcec7faa7443adc36","src/ir/layout.rs":"936f96fafab34e35b622a5f9e56b0fbd2c97d2e9222470e3687f882f40db1349","src/ir/mod.rs":"713cd537434567003197a123cbae679602c715e976d22f7b23dafd0826ea4c70","src/ir/module.rs":"a26bb0ac90d4cabb0a45d9c1a42b5515c74d4c233dc084e2f85161eac12bff15","src/ir/objc.rs":"ced8242068d5daa2940469693f7277c79368019f8e30ce1e4f55d834bf24c411","src/ir/template.rs":"6c2823c9bab82ab1d70f4d643e8f4d6420be5eafcb78324fb69649e407561cec","src/ir/traversal.rs":"c9bc1b8d7f1673d6f709e14bc04b684ec7097c2d12de446a2327296d5efc9547","src/ir/ty.rs":"5af2b62d278c679b7c4e597263fce01113e90242e7d263b948d93bc4274dfe9a","src/ir/var.rs":"9226241b188877b6a7bea6523e14318a8523a6dba57c4f15809c377f87540061","src/lib.rs":"b968f8d0858e3145137a2e33c0913acf19d21f884f914bc513bc18eea1c37bf1","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"6b42a74dfd5c3bde75b7fb984a82f3b3d652abd45aa54b31a40fbda6b02ae674","src/options.rs":"f08facc9d58cb79c7ab93c9d614f13d4d3eca2b5801012da56490a790a8d8c4c","src/parse.rs":"be7d13cc84fae79ec7b3aa9e77063fa475a48d74a854423e2c72d75006a25202","src/regex_set.rs":"5cb72fc3714c0d79e9e942d003349c0775fafd7cd0c9603c65f5261883bbf9cf","src/time.rs":"8efe317e7c6b5ba8e0865ce7b49ca775ee8a02590f4241ef62f647fa3c22b68e"},"package":"ae8266cdd336dfd53d71a95c33251232f64553b8770ebd85158039b3a734244b"}
|
|
@ -28,7 +28,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.51.1"
|
||||
version = "0.51.1-oldsyn"
|
||||
dependencies = [
|
||||
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cexpr 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -40,8 +40,8 @@ dependencies = [
|
|||
"lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"proc-macro2 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"proc-macro2 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"shlex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -185,10 +185,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.0"
|
||||
version = "0.4.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -198,10 +198,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.0"
|
||||
version = "0.6.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"proc-macro2 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -301,7 +301,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
|
||||
[[package]]
|
||||
name = "unicode-xid"
|
||||
version = "0.2.0"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
|
@ -385,9 +385,9 @@ dependencies = [
|
|||
"checksum memchr 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4b3629fe9fdbff6daa6c33b90f7c08355c1aca05a3d01fa8063b822fcf185f3b"
|
||||
"checksum nom 4.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "898696750eb5c3ce5eb5afbfbe46e7f7c4e1936e19d3e97be4b7937da7b6d114"
|
||||
"checksum peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
|
||||
"checksum proc-macro2 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "19f287c234c9b2d0308d692dee5c449c1a171167a6f8150f7cf2a49d8fd96967"
|
||||
"checksum proc-macro2 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)" = "ffe022fb8c8bd254524b0b3305906c1921fa37a84a644e29079a9e62200c3901"
|
||||
"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
|
||||
"checksum quote 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7ab938ebe6f1c82426b5fb82eaf10c3e3028c53deaa3fbe38f5904b37cf4d767"
|
||||
"checksum quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)" = "dd636425967c33af890042c483632d33fa7a18f19ad1d7ea72e8998c6ef8dea5"
|
||||
"checksum redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "c214e91d3ecf43e9a4e41e578973adeb14b474f2bee858742d127af75a0112b1"
|
||||
"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
|
||||
"checksum regex 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "2069749032ea3ec200ca51e4a31df41759190a88edca0d2d86ee8bedf7073341"
|
||||
|
@ -401,7 +401,7 @@ dependencies = [
|
|||
"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
|
||||
"checksum ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fd2be2d6639d0f8fe6cdda291ad456e23629558d466e2789d2c3e9892bda285d"
|
||||
"checksum unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "882386231c45df4700b275c7ff55b6f3698780a650026380e72dabe76fa46526"
|
||||
"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
|
||||
"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
|
||||
"checksum utf8-ranges 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fd70f467df6810094968e2fce0ee1bd0e87157aceb026a8c083bcf5e25b9efe4"
|
||||
"checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a"
|
||||
"checksum version_check 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7716c242968ee87e5542f8021178248f267f295a5c4803beae8b8b7fd9bc6051"
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
[package]
|
||||
name = "bindgen"
|
||||
version = "0.51.1"
|
||||
version = "0.51.1-oldsyn"
|
||||
authors = ["Jyun-Yan You <jyyou.tw@gmail.com>", "Emilio Cobos Álvarez <emilio@crisal.io>", "Nick Fitzgerald <fitzgen@gmail.com>", "The Servo project developers"]
|
||||
build = "build.rs"
|
||||
include = ["LICENSE", "README.md", "Cargo.toml", "build.rs", "src/*.rs", "src/**/*.rs"]
|
||||
|
@ -65,11 +65,11 @@ optional = true
|
|||
version = "0.1.2"
|
||||
|
||||
[dependencies.proc-macro2]
|
||||
version = "1"
|
||||
version = "0.4"
|
||||
default-features = false
|
||||
|
||||
[dependencies.quote]
|
||||
version = "1"
|
||||
version = "0.6"
|
||||
default-features = false
|
||||
|
||||
[dependencies.regex]
|
||||
|
|
|
@ -503,6 +503,6 @@ mod tests {
|
|||
#[allow(dead_code)]
|
||||
fn traversal_predicate_is_object_safe() {
|
||||
// This should compile only if TraversalPredicate is object safe.
|
||||
fn takes_by_trait_object(_: &dyn TraversalPredicate) {}
|
||||
fn takes_by_trait_object(_: &TraversalPredicate) {}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ repository = "https://github.com/RustAudio/coreaudio-sys.git"
|
|||
build = "build.rs"
|
||||
|
||||
[build-dependencies]
|
||||
bindgen = {version = "0.51", default-features = false}
|
||||
bindgen = {version = "0.51.1-oldsyn", default-features = false}
|
||||
|
||||
[features]
|
||||
default = ["audio_toolbox", "audio_unit", "core_audio", "open_al", "core_midi"]
|
||||
|
@ -20,4 +20,4 @@ audio_unit = []
|
|||
core_audio = []
|
||||
open_al = []
|
||||
core_midi = []
|
||||
nobindgen = []
|
||||
nobindgen = []
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
{"files":{"Cargo.toml":"b523856472549844b4bf20eca0473d955a7e5eeb95c70eddd31a05ac455427bb","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"89857eaaa305afe540abcf56fabae0194dfb4e7906a8098b7206acb23ed11ce8","build.rs":"36fa668f3bf309f243d0e977e8428446cc424303139c1f63410b3c2e30445aec","src/fallback.rs":"e4d1bcb1e92383a2285e6c947dd74b0e34144904948db68127faea627f5dd6ff","src/lib.rs":"896a1d212e30902ff051313808007406ca4471c27880a6ef19508f0ebb8333ee","src/strnom.rs":"60f5380106dbe568cca7abd09877e133c874fbee95d502e4830425c4613a640d","src/wrapper.rs":"0d7fe28ab2b7ee02b8eb8c5a636da364c60f6704b23e7db0a1ddd57c742f54b1","tests/marker.rs":"0227d07bbc7f2e2ad34662a6acb65668b7dc2f79141c4faa672703a04e27bea0","tests/test.rs":"166d35835355bdaa85bcf69de4dfb56ccddd8acf2e1a8cbc506782632b151674"},"package":"4d317f9caece796be1980837fd5cb3dfec5613ebdb04ad0956deea83ce168915"}
|
|
@ -1,39 +0,0 @@
|
|||
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||
#
|
||||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g. crates.io) dependencies
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
# editing this file be aware that the upstream Cargo.toml
|
||||
# will likely look very different (and much more reasonable)
|
||||
|
||||
[package]
|
||||
name = "proc-macro2"
|
||||
version = "0.4.27"
|
||||
authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
||||
build = "build.rs"
|
||||
description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
|
||||
homepage = "https://github.com/alexcrichton/proc-macro2"
|
||||
documentation = "https://docs.rs/proc-macro2"
|
||||
readme = "README.md"
|
||||
keywords = ["macros"]
|
||||
license = "MIT/Apache-2.0"
|
||||
repository = "https://github.com/alexcrichton/proc-macro2"
|
||||
[package.metadata.docs.rs]
|
||||
rustc-args = ["--cfg", "procmacro2_semver_exempt"]
|
||||
rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
|
||||
[dependencies.unicode-xid]
|
||||
version = "0.1"
|
||||
[dev-dependencies.quote]
|
||||
version = "0.6"
|
||||
|
||||
[features]
|
||||
default = ["proc-macro"]
|
||||
nightly = []
|
||||
proc-macro = []
|
||||
span-locations = []
|
||||
[badges.travis-ci]
|
||||
repository = "alexcrichton/proc-macro2"
|
|
@ -1,201 +0,0 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -1,25 +0,0 @@
|
|||
Copyright (c) 2014 Alex Crichton
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
|
@ -1,100 +0,0 @@
|
|||
# proc-macro2
|
||||
|
||||
[![Build Status](https://api.travis-ci.com/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.com/alexcrichton/proc-macro2)
|
||||
[![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2)
|
||||
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
|
||||
|
||||
A wrapper around the procedural macro API of the compiler's `proc_macro` crate.
|
||||
This library serves three purposes:
|
||||
|
||||
- **Bring proc-macro-like functionality to other contexts like build.rs and
|
||||
main.rs.** Types from `proc_macro` are entirely specific to procedural macros
|
||||
and cannot ever exist in code outside of a procedural macro. Meanwhile
|
||||
`proc_macro2` types may exist anywhere including non-macro code. By developing
|
||||
foundational libraries like [syn] and [quote] against `proc_macro2` rather
|
||||
than `proc_macro`, the procedural macro ecosystem becomes easily applicable to
|
||||
many other use cases and we avoid reimplementing non-macro equivalents of
|
||||
those libraries.
|
||||
|
||||
- **Make procedural macros unit testable.** As a consequence of being specific
|
||||
to procedural macros, nothing that uses `proc_macro` can be executed from a
|
||||
unit test. In order for helper libraries or components of a macro to be
|
||||
testable in isolation, they must be implemented using `proc_macro2`.
|
||||
|
||||
- **Provide the latest and greatest APIs across all compiler versions.**
|
||||
Procedural macros were first introduced to Rust in 1.15.0 with an extremely
|
||||
minimal interface. Since then, many improvements have landed to make macros
|
||||
more flexible and easier to write. This library tracks the procedural macro
|
||||
API of the most recent stable compiler but employs a polyfill to provide that
|
||||
API consistently across any compiler since 1.15.0.
|
||||
|
||||
[syn]: https://github.com/dtolnay/syn
|
||||
[quote]: https://github.com/dtolnay/quote
|
||||
|
||||
## Usage
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
proc-macro2 = "0.4"
|
||||
```
|
||||
|
||||
The skeleton of a typical procedural macro typically looks like this:
|
||||
|
||||
```rust
|
||||
extern crate proc_macro;
|
||||
|
||||
#[proc_macro_derive(MyDerive)]
|
||||
pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let input = proc_macro2::TokenStream::from(input);
|
||||
|
||||
let output: proc_macro2::TokenStream = {
|
||||
/* transform input */
|
||||
};
|
||||
|
||||
proc_macro::TokenStream::from(output)
|
||||
}
|
||||
```
|
||||
|
||||
If parsing with [Syn], you'll use [`parse_macro_input!`] instead to propagate
|
||||
parse errors correctly back to the compiler when parsing fails.
|
||||
|
||||
[`parse_macro_input!`]: https://docs.rs/syn/0.15/syn/macro.parse_macro_input.html
|
||||
|
||||
## Unstable features
|
||||
|
||||
The default feature set of proc-macro2 tracks the most recent stable compiler
|
||||
API. Functionality in `proc_macro` that is not yet stable is not exposed by
|
||||
proc-macro2 by default.
|
||||
|
||||
To opt into the additional APIs available in the most recent nightly compiler,
|
||||
the `procmacro2_semver_exempt` config flag must be passed to rustc. As usual, we
|
||||
will polyfill those nightly-only APIs all the way back to Rust 1.15.0. As these
|
||||
are unstable APIs that track the nightly compiler, minor versions of proc-macro2
|
||||
may make breaking changes to them at any time.
|
||||
|
||||
```
|
||||
RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
|
||||
```
|
||||
|
||||
Note that this must not only be done for your crate, but for any crate that
|
||||
depends on your crate. This infectious nature is intentional, as it serves as a
|
||||
reminder that you are outside of the normal semver guarantees.
|
||||
|
||||
Semver exempt methods are marked as such in the proc-macro2 documentation.
|
||||
|
||||
# License
|
||||
|
||||
This project is licensed under either of
|
||||
|
||||
* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
http://www.apache.org/licenses/LICENSE-2.0)
|
||||
* MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
http://opensource.org/licenses/MIT)
|
||||
|
||||
at your option.
|
||||
|
||||
### Contribution
|
||||
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in Serde by you, as defined in the Apache-2.0 license, shall be
|
||||
dual licensed as above, without any additional terms or conditions.
|
|
@ -1,133 +0,0 @@
|
|||
// rustc-cfg emitted by the build script:
|
||||
//
|
||||
// "u128"
|
||||
// Include u128 and i128 constructors for proc_macro2::Literal. Enabled on
|
||||
// any compiler 1.26+.
|
||||
//
|
||||
// "use_proc_macro"
|
||||
// Link to extern crate proc_macro. Available on any compiler and any target
|
||||
// except wasm32. Requires "proc-macro" Cargo cfg to be enabled (default is
|
||||
// enabled). On wasm32 we never link to proc_macro even if "proc-macro" cfg
|
||||
// is enabled.
|
||||
//
|
||||
// "wrap_proc_macro"
|
||||
// Wrap types from libproc_macro rather than polyfilling the whole API.
|
||||
// Enabled on rustc 1.29+ as long as procmacro2_semver_exempt is not set,
|
||||
// because we can't emulate the unstable API without emulating everything
|
||||
// else. Also enabled unconditionally on nightly, in which case the
|
||||
// procmacro2_semver_exempt surface area is implemented by using the
|
||||
// nightly-only proc_macro API.
|
||||
//
|
||||
// "slow_extend"
|
||||
// Fallback when `impl Extend for TokenStream` is not available. These impls
|
||||
// were added one version later than the rest of the proc_macro token API.
|
||||
// Enabled on rustc 1.29 only.
|
||||
//
|
||||
// "nightly"
|
||||
// Enable the Span::unwrap method. This is to support proc_macro_span and
|
||||
// proc_macro_diagnostic use on the nightly channel without requiring the
|
||||
// semver exemption opt-in. Enabled when building with nightly.
|
||||
//
|
||||
// "super_unstable"
|
||||
// Implement the semver exempt API in terms of the nightly-only proc_macro
|
||||
// API. Enabled when using procmacro2_semver_exempt on a nightly compiler.
|
||||
//
|
||||
// "span_locations"
|
||||
// Provide methods Span::start and Span::end which give the line/column
|
||||
// location of a token. Enabled by procmacro2_semver_exempt or the
|
||||
// "span-locations" Cargo cfg. This is behind a cfg because tracking
|
||||
// location inside spans is a performance hit.
|
||||
|
||||
use std::env;
|
||||
use std::process::Command;
|
||||
use std::str;
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
|
||||
let target = env::var("TARGET").unwrap();
|
||||
|
||||
let version = match rustc_version() {
|
||||
Some(version) => version,
|
||||
None => return,
|
||||
};
|
||||
|
||||
if version.minor >= 26 {
|
||||
println!("cargo:rustc-cfg=u128");
|
||||
}
|
||||
|
||||
let semver_exempt = cfg!(procmacro2_semver_exempt);
|
||||
if semver_exempt {
|
||||
// https://github.com/alexcrichton/proc-macro2/issues/147
|
||||
println!("cargo:rustc-cfg=procmacro2_semver_exempt");
|
||||
}
|
||||
|
||||
if semver_exempt || cfg!(feature = "span-locations") {
|
||||
println!("cargo:rustc-cfg=span_locations");
|
||||
}
|
||||
|
||||
if !enable_use_proc_macro(&target) {
|
||||
return;
|
||||
}
|
||||
|
||||
println!("cargo:rustc-cfg=use_proc_macro");
|
||||
|
||||
// Rust 1.29 stabilized the necessary APIs in the `proc_macro` crate
|
||||
if version.nightly || version.minor >= 29 && !semver_exempt {
|
||||
println!("cargo:rustc-cfg=wrap_proc_macro");
|
||||
}
|
||||
|
||||
if version.minor == 29 {
|
||||
println!("cargo:rustc-cfg=slow_extend");
|
||||
}
|
||||
|
||||
if version.nightly {
|
||||
println!("cargo:rustc-cfg=nightly");
|
||||
}
|
||||
|
||||
if semver_exempt && version.nightly {
|
||||
println!("cargo:rustc-cfg=super_unstable");
|
||||
}
|
||||
}
|
||||
|
||||
fn enable_use_proc_macro(target: &str) -> bool {
|
||||
// wasm targets don't have the `proc_macro` crate, disable this feature.
|
||||
if target.contains("wasm32") {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Otherwise, only enable it if our feature is actually enabled.
|
||||
cfg!(feature = "proc-macro")
|
||||
}
|
||||
|
||||
struct RustcVersion {
|
||||
minor: u32,
|
||||
nightly: bool,
|
||||
}
|
||||
|
||||
fn rustc_version() -> Option<RustcVersion> {
|
||||
macro_rules! otry {
|
||||
($e:expr) => {
|
||||
match $e {
|
||||
Some(e) => e,
|
||||
None => return None,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
let rustc = otry!(env::var_os("RUSTC"));
|
||||
let output = otry!(Command::new(rustc).arg("--version").output().ok());
|
||||
let version = otry!(str::from_utf8(&output.stdout).ok());
|
||||
let nightly = version.contains("nightly");
|
||||
let mut pieces = version.split('.');
|
||||
if pieces.next() != Some("rustc 1") {
|
||||
return None;
|
||||
}
|
||||
let minor = otry!(pieces.next());
|
||||
let minor = otry!(minor.parse().ok());
|
||||
|
||||
Some(RustcVersion {
|
||||
minor: minor,
|
||||
nightly: nightly,
|
||||
})
|
||||
}
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1,393 +0,0 @@
|
|||
//! Adapted from [`nom`](https://github.com/Geal/nom).
|
||||
|
||||
use std::str::{Bytes, CharIndices, Chars};
|
||||
|
||||
use unicode_xid::UnicodeXID;
|
||||
|
||||
use fallback::LexError;
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
pub struct Cursor<'a> {
|
||||
pub rest: &'a str,
|
||||
#[cfg(span_locations)]
|
||||
pub off: u32,
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a> {
|
||||
#[cfg(not(span_locations))]
|
||||
pub fn advance(&self, amt: usize) -> Cursor<'a> {
|
||||
Cursor {
|
||||
rest: &self.rest[amt..],
|
||||
}
|
||||
}
|
||||
#[cfg(span_locations)]
|
||||
pub fn advance(&self, amt: usize) -> Cursor<'a> {
|
||||
Cursor {
|
||||
rest: &self.rest[amt..],
|
||||
off: self.off + (amt as u32),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find(&self, p: char) -> Option<usize> {
|
||||
self.rest.find(p)
|
||||
}
|
||||
|
||||
pub fn starts_with(&self, s: &str) -> bool {
|
||||
self.rest.starts_with(s)
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.rest.is_empty()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.rest.len()
|
||||
}
|
||||
|
||||
pub fn as_bytes(&self) -> &'a [u8] {
|
||||
self.rest.as_bytes()
|
||||
}
|
||||
|
||||
pub fn bytes(&self) -> Bytes<'a> {
|
||||
self.rest.bytes()
|
||||
}
|
||||
|
||||
pub fn chars(&self) -> Chars<'a> {
|
||||
self.rest.chars()
|
||||
}
|
||||
|
||||
pub fn char_indices(&self) -> CharIndices<'a> {
|
||||
self.rest.char_indices()
|
||||
}
|
||||
}
|
||||
|
||||
pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
|
||||
|
||||
pub fn whitespace(input: Cursor) -> PResult<()> {
|
||||
if input.is_empty() {
|
||||
return Err(LexError);
|
||||
}
|
||||
|
||||
let bytes = input.as_bytes();
|
||||
let mut i = 0;
|
||||
while i < bytes.len() {
|
||||
let s = input.advance(i);
|
||||
if bytes[i] == b'/' {
|
||||
if s.starts_with("//")
|
||||
&& (!s.starts_with("///") || s.starts_with("////"))
|
||||
&& !s.starts_with("//!")
|
||||
{
|
||||
if let Some(len) = s.find('\n') {
|
||||
i += len + 1;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
} else if s.starts_with("/**/") {
|
||||
i += 4;
|
||||
continue;
|
||||
} else if s.starts_with("/*")
|
||||
&& (!s.starts_with("/**") || s.starts_with("/***"))
|
||||
&& !s.starts_with("/*!")
|
||||
{
|
||||
let (_, com) = block_comment(s)?;
|
||||
i += com.len();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
match bytes[i] {
|
||||
b' ' | 0x09...0x0d => {
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
b if b <= 0x7f => {}
|
||||
_ => {
|
||||
let ch = s.chars().next().unwrap();
|
||||
if is_whitespace(ch) {
|
||||
i += ch.len_utf8();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
return if i > 0 { Ok((s, ())) } else { Err(LexError) };
|
||||
}
|
||||
Ok((input.advance(input.len()), ()))
|
||||
}
|
||||
|
||||
pub fn block_comment(input: Cursor) -> PResult<&str> {
|
||||
if !input.starts_with("/*") {
|
||||
return Err(LexError);
|
||||
}
|
||||
|
||||
let mut depth = 0;
|
||||
let bytes = input.as_bytes();
|
||||
let mut i = 0;
|
||||
let upper = bytes.len() - 1;
|
||||
while i < upper {
|
||||
if bytes[i] == b'/' && bytes[i + 1] == b'*' {
|
||||
depth += 1;
|
||||
i += 1; // eat '*'
|
||||
} else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
|
||||
depth -= 1;
|
||||
if depth == 0 {
|
||||
return Ok((input.advance(i + 2), &input.rest[..i + 2]));
|
||||
}
|
||||
i += 1; // eat '/'
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
Err(LexError)
|
||||
}
|
||||
|
||||
pub fn skip_whitespace(input: Cursor) -> Cursor {
|
||||
match whitespace(input) {
|
||||
Ok((rest, _)) => rest,
|
||||
Err(LexError) => input,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_whitespace(ch: char) -> bool {
|
||||
// Rust treats left-to-right mark and right-to-left mark as whitespace
|
||||
ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
|
||||
}
|
||||
|
||||
pub fn word_break(input: Cursor) -> PResult<()> {
|
||||
match input.chars().next() {
|
||||
Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
|
||||
Some(_) | None => Ok((input, ())),
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! named {
|
||||
($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
|
||||
fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
|
||||
$submac!(i, $($args)*)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! alt {
|
||||
($i:expr, $e:ident | $($rest:tt)*) => {
|
||||
alt!($i, call!($e) | $($rest)*)
|
||||
};
|
||||
|
||||
($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => {
|
||||
match $subrule!($i, $($args)*) {
|
||||
res @ Ok(_) => res,
|
||||
_ => alt!($i, $($rest)*)
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => {
|
||||
match $subrule!($i, $($args)*) {
|
||||
Ok((i, o)) => Ok((i, $gen(o))),
|
||||
Err(LexError) => alt!($i, $($rest)*)
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => {
|
||||
alt!($i, call!($e) => { $gen } | $($rest)*)
|
||||
};
|
||||
|
||||
($i:expr, $e:ident => { $gen:expr }) => {
|
||||
alt!($i, call!($e) => { $gen })
|
||||
};
|
||||
|
||||
($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => {
|
||||
match $subrule!($i, $($args)*) {
|
||||
Ok((i, o)) => Ok((i, $gen(o))),
|
||||
Err(LexError) => Err(LexError),
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, $e:ident) => {
|
||||
alt!($i, call!($e))
|
||||
};
|
||||
|
||||
($i:expr, $subrule:ident!( $($args:tt)*)) => {
|
||||
$subrule!($i, $($args)*)
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! do_parse {
|
||||
($i:expr, ( $($rest:expr),* )) => {
|
||||
Ok(($i, ( $($rest),* )))
|
||||
};
|
||||
|
||||
($i:expr, $e:ident >> $($rest:tt)*) => {
|
||||
do_parse!($i, call!($e) >> $($rest)*)
|
||||
};
|
||||
|
||||
($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
|
||||
match $submac!($i, $($args)*) {
|
||||
Err(LexError) => Err(LexError),
|
||||
Ok((i, _)) => do_parse!(i, $($rest)*),
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => {
|
||||
do_parse!($i, $field: call!($e) >> $($rest)*)
|
||||
};
|
||||
|
||||
($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
|
||||
match $submac!($i, $($args)*) {
|
||||
Err(LexError) => Err(LexError),
|
||||
Ok((i, o)) => {
|
||||
let $field = o;
|
||||
do_parse!(i, $($rest)*)
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! peek {
|
||||
($i:expr, $submac:ident!( $($args:tt)* )) => {
|
||||
match $submac!($i, $($args)*) {
|
||||
Ok((_, o)) => Ok(($i, o)),
|
||||
Err(LexError) => Err(LexError),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! call {
|
||||
($i:expr, $fun:expr $(, $args:expr)*) => {
|
||||
$fun($i $(, $args)*)
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! option {
|
||||
($i:expr, $f:expr) => {
|
||||
match $f($i) {
|
||||
Ok((i, o)) => Ok((i, Some(o))),
|
||||
Err(LexError) => Ok(($i, None)),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! take_until_newline_or_eof {
|
||||
($i:expr,) => {{
|
||||
if $i.len() == 0 {
|
||||
Ok(($i, ""))
|
||||
} else {
|
||||
match $i.find('\n') {
|
||||
Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
|
||||
None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
|
||||
}
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! tuple {
|
||||
($i:expr, $($rest:tt)*) => {
|
||||
tuple_parser!($i, (), $($rest)*)
|
||||
};
|
||||
}
|
||||
|
||||
/// Do not use directly. Use `tuple!`.
|
||||
macro_rules! tuple_parser {
|
||||
($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => {
|
||||
tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*)
|
||||
};
|
||||
|
||||
($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
|
||||
match $submac!($i, $($args)*) {
|
||||
Err(LexError) => Err(LexError),
|
||||
Ok((i, o)) => tuple_parser!(i, (o), $($rest)*),
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
|
||||
match $submac!($i, $($args)*) {
|
||||
Err(LexError) => Err(LexError),
|
||||
Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*),
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, ($($parsed:tt),*), $e:ident) => {
|
||||
tuple_parser!($i, ($($parsed),*), call!($e))
|
||||
};
|
||||
|
||||
($i:expr, (), $submac:ident!( $($args:tt)* )) => {
|
||||
$submac!($i, $($args)*)
|
||||
};
|
||||
|
||||
($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => {
|
||||
match $submac!($i, $($args)*) {
|
||||
Err(LexError) => Err(LexError),
|
||||
Ok((i, o)) => Ok((i, ($($parsed),*, o)))
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, ($($parsed:expr),*)) => {
|
||||
Ok(($i, ($($parsed),*)))
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! not {
|
||||
($i:expr, $submac:ident!( $($args:tt)* )) => {
|
||||
match $submac!($i, $($args)*) {
|
||||
Ok((_, _)) => Err(LexError),
|
||||
Err(LexError) => Ok(($i, ())),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! tag {
|
||||
($i:expr, $tag:expr) => {
|
||||
if $i.starts_with($tag) {
|
||||
Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
|
||||
} else {
|
||||
Err(LexError)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! punct {
|
||||
($i:expr, $punct:expr) => {
|
||||
$crate::strnom::punct($i, $punct)
|
||||
};
|
||||
}
|
||||
|
||||
/// Do not use directly. Use `punct!`.
|
||||
pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
|
||||
let input = skip_whitespace(input);
|
||||
if input.starts_with(token) {
|
||||
Ok((input.advance(token.len()), token))
|
||||
} else {
|
||||
Err(LexError)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! preceded {
|
||||
($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => {
|
||||
match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) {
|
||||
Ok((remaining, (_, o))) => Ok((remaining, o)),
|
||||
Err(LexError) => Err(LexError),
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
|
||||
preceded!($i, $submac!($($args)*), call!($g))
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! delimited {
|
||||
($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => {
|
||||
match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) {
|
||||
Err(LexError) => Err(LexError),
|
||||
Ok((i1, (_, o, _))) => Ok((i1, o))
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! map {
|
||||
($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
|
||||
match $submac!($i, $($args)*) {
|
||||
Err(LexError) => Err(LexError),
|
||||
Ok((i, o)) => Ok((i, call!(o, $g)))
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, $f:expr, $g:expr) => {
|
||||
map!($i, call!($f), $g)
|
||||
};
|
||||
}
|
|
@ -1,926 +0,0 @@
|
|||
use std::fmt;
|
||||
use std::iter;
|
||||
use std::panic::{self, PanicInfo};
|
||||
#[cfg(super_unstable)]
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use fallback;
|
||||
use proc_macro;
|
||||
|
||||
use {Delimiter, Punct, Spacing, TokenTree};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum TokenStream {
|
||||
Compiler(proc_macro::TokenStream),
|
||||
Fallback(fallback::TokenStream),
|
||||
}
|
||||
|
||||
pub enum LexError {
|
||||
Compiler(proc_macro::LexError),
|
||||
Fallback(fallback::LexError),
|
||||
}
|
||||
|
||||
fn nightly_works() -> bool {
|
||||
use std::sync::atomic::*;
|
||||
use std::sync::Once;
|
||||
|
||||
static WORKS: AtomicUsize = ATOMIC_USIZE_INIT;
|
||||
static INIT: Once = Once::new();
|
||||
|
||||
match WORKS.load(Ordering::SeqCst) {
|
||||
1 => return false,
|
||||
2 => return true,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
|
||||
// then use catch_unwind to determine whether the compiler's proc_macro is
|
||||
// working. When proc-macro2 is used from outside of a procedural macro all
|
||||
// of the proc_macro crate's APIs currently panic.
|
||||
//
|
||||
// The Once is to prevent the possibility of this ordering:
|
||||
//
|
||||
// thread 1 calls take_hook, gets the user's original hook
|
||||
// thread 1 calls set_hook with the null hook
|
||||
// thread 2 calls take_hook, thinks null hook is the original hook
|
||||
// thread 2 calls set_hook with the null hook
|
||||
// thread 1 calls set_hook with the actual original hook
|
||||
// thread 2 calls set_hook with what it thinks is the original hook
|
||||
//
|
||||
// in which the user's hook has been lost.
|
||||
//
|
||||
// There is still a race condition where a panic in a different thread can
|
||||
// happen during the interval that the user's original panic hook is
|
||||
// unregistered such that their hook is incorrectly not called. This is
|
||||
// sufficiently unlikely and less bad than printing panic messages to stderr
|
||||
// on correct use of this crate. Maybe there is a libstd feature request
|
||||
// here. For now, if a user needs to guarantee that this failure mode does
|
||||
// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
|
||||
// the main thread before launching any other threads.
|
||||
INIT.call_once(|| {
|
||||
type PanicHook = Fn(&PanicInfo) + Sync + Send + 'static;
|
||||
|
||||
let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
|
||||
let sanity_check = &*null_hook as *const PanicHook;
|
||||
let original_hook = panic::take_hook();
|
||||
panic::set_hook(null_hook);
|
||||
|
||||
let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
|
||||
WORKS.store(works as usize + 1, Ordering::SeqCst);
|
||||
|
||||
let hopefully_null_hook = panic::take_hook();
|
||||
panic::set_hook(original_hook);
|
||||
if sanity_check != &*hopefully_null_hook {
|
||||
panic!("observed race condition in proc_macro2::nightly_works");
|
||||
}
|
||||
});
|
||||
nightly_works()
|
||||
}
|
||||
|
||||
fn mismatch() -> ! {
|
||||
panic!("stable/nightly mismatch")
|
||||
}
|
||||
|
||||
impl TokenStream {
|
||||
pub fn new() -> TokenStream {
|
||||
if nightly_works() {
|
||||
TokenStream::Compiler(proc_macro::TokenStream::new())
|
||||
} else {
|
||||
TokenStream::Fallback(fallback::TokenStream::new())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => tts.is_empty(),
|
||||
TokenStream::Fallback(tts) => tts.is_empty(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::TokenStream {
|
||||
match self {
|
||||
TokenStream::Compiler(s) => s,
|
||||
TokenStream::Fallback(_) => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_stable(self) -> fallback::TokenStream {
|
||||
match self {
|
||||
TokenStream::Compiler(_) => mismatch(),
|
||||
TokenStream::Fallback(s) => s,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for TokenStream {
|
||||
type Err = LexError;
|
||||
|
||||
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
||||
if nightly_works() {
|
||||
Ok(TokenStream::Compiler(src.parse()?))
|
||||
} else {
|
||||
Ok(TokenStream::Fallback(src.parse()?))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for TokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => tts.fmt(f),
|
||||
TokenStream::Fallback(tts) => tts.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proc_macro::TokenStream> for TokenStream {
|
||||
fn from(inner: proc_macro::TokenStream) -> TokenStream {
|
||||
TokenStream::Compiler(inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenStream> for proc_macro::TokenStream {
|
||||
fn from(inner: TokenStream) -> proc_macro::TokenStream {
|
||||
match inner {
|
||||
TokenStream::Compiler(inner) => inner,
|
||||
TokenStream::Fallback(inner) => inner.to_string().parse().unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::TokenStream> for TokenStream {
|
||||
fn from(inner: fallback::TokenStream) -> TokenStream {
|
||||
TokenStream::Fallback(inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenTree> for TokenStream {
|
||||
fn from(token: TokenTree) -> TokenStream {
|
||||
if !nightly_works() {
|
||||
return TokenStream::Fallback(token.into());
|
||||
}
|
||||
let tt: proc_macro::TokenTree = match token {
|
||||
TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
|
||||
TokenTree::Punct(tt) => {
|
||||
let spacing = match tt.spacing() {
|
||||
Spacing::Joint => proc_macro::Spacing::Joint,
|
||||
Spacing::Alone => proc_macro::Spacing::Alone,
|
||||
};
|
||||
let mut op = proc_macro::Punct::new(tt.as_char(), spacing);
|
||||
op.set_span(tt.span().inner.unwrap_nightly());
|
||||
op.into()
|
||||
}
|
||||
TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
|
||||
TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
|
||||
};
|
||||
TokenStream::Compiler(tt.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl iter::FromIterator<TokenTree> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
|
||||
if nightly_works() {
|
||||
let trees = trees
|
||||
.into_iter()
|
||||
.map(TokenStream::from)
|
||||
.flat_map(|t| match t {
|
||||
TokenStream::Compiler(s) => s,
|
||||
TokenStream::Fallback(_) => mismatch(),
|
||||
});
|
||||
TokenStream::Compiler(trees.collect())
|
||||
} else {
|
||||
TokenStream::Fallback(trees.into_iter().collect())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl iter::FromIterator<TokenStream> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
||||
let mut streams = streams.into_iter();
|
||||
match streams.next() {
|
||||
#[cfg(slow_extend)]
|
||||
Some(TokenStream::Compiler(first)) => {
|
||||
let stream = iter::once(first)
|
||||
.chain(streams.map(|s| match s {
|
||||
TokenStream::Compiler(s) => s,
|
||||
TokenStream::Fallback(_) => mismatch(),
|
||||
}))
|
||||
.collect();
|
||||
TokenStream::Compiler(stream)
|
||||
}
|
||||
#[cfg(not(slow_extend))]
|
||||
Some(TokenStream::Compiler(mut first)) => {
|
||||
first.extend(streams.map(|s| match s {
|
||||
TokenStream::Compiler(s) => s,
|
||||
TokenStream::Fallback(_) => mismatch(),
|
||||
}));
|
||||
TokenStream::Compiler(first)
|
||||
}
|
||||
Some(TokenStream::Fallback(mut first)) => {
|
||||
first.extend(streams.map(|s| match s {
|
||||
TokenStream::Fallback(s) => s,
|
||||
TokenStream::Compiler(_) => mismatch(),
|
||||
}));
|
||||
TokenStream::Fallback(first)
|
||||
}
|
||||
None => TokenStream::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TokenTree> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => {
|
||||
#[cfg(not(slow_extend))]
|
||||
{
|
||||
tts.extend(
|
||||
streams
|
||||
.into_iter()
|
||||
.map(|t| TokenStream::from(t).unwrap_nightly()),
|
||||
);
|
||||
}
|
||||
#[cfg(slow_extend)]
|
||||
{
|
||||
*tts =
|
||||
tts.clone()
|
||||
.into_iter()
|
||||
.chain(streams.into_iter().map(TokenStream::from).flat_map(
|
||||
|t| match t {
|
||||
TokenStream::Compiler(tts) => tts.into_iter(),
|
||||
_ => mismatch(),
|
||||
},
|
||||
))
|
||||
.collect();
|
||||
}
|
||||
}
|
||||
TokenStream::Fallback(tts) => tts.extend(streams),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TokenStream> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => {
|
||||
#[cfg(not(slow_extend))]
|
||||
{
|
||||
tts.extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
|
||||
}
|
||||
#[cfg(slow_extend)]
|
||||
{
|
||||
*tts = tts
|
||||
.clone()
|
||||
.into_iter()
|
||||
.chain(streams.into_iter().flat_map(|t| match t {
|
||||
TokenStream::Compiler(tts) => tts.into_iter(),
|
||||
_ => mismatch(),
|
||||
}))
|
||||
.collect();
|
||||
}
|
||||
}
|
||||
TokenStream::Fallback(tts) => {
|
||||
tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for TokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => tts.fmt(f),
|
||||
TokenStream::Fallback(tts) => tts.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proc_macro::LexError> for LexError {
|
||||
fn from(e: proc_macro::LexError) -> LexError {
|
||||
LexError::Compiler(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::LexError> for LexError {
|
||||
fn from(e: fallback::LexError) -> LexError {
|
||||
LexError::Fallback(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for LexError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
LexError::Compiler(e) => e.fmt(f),
|
||||
LexError::Fallback(e) => e.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum TokenTreeIter {
|
||||
Compiler(proc_macro::token_stream::IntoIter),
|
||||
Fallback(fallback::TokenTreeIter),
|
||||
}
|
||||
|
||||
impl IntoIterator for TokenStream {
|
||||
type Item = TokenTree;
|
||||
type IntoIter = TokenTreeIter;
|
||||
|
||||
fn into_iter(self) -> TokenTreeIter {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => TokenTreeIter::Compiler(tts.into_iter()),
|
||||
TokenStream::Fallback(tts) => TokenTreeIter::Fallback(tts.into_iter()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for TokenTreeIter {
|
||||
type Item = TokenTree;
|
||||
|
||||
fn next(&mut self) -> Option<TokenTree> {
|
||||
let token = match self {
|
||||
TokenTreeIter::Compiler(iter) => iter.next()?,
|
||||
TokenTreeIter::Fallback(iter) => return iter.next(),
|
||||
};
|
||||
Some(match token {
|
||||
proc_macro::TokenTree::Group(tt) => ::Group::_new(Group::Compiler(tt)).into(),
|
||||
proc_macro::TokenTree::Punct(tt) => {
|
||||
let spacing = match tt.spacing() {
|
||||
proc_macro::Spacing::Joint => Spacing::Joint,
|
||||
proc_macro::Spacing::Alone => Spacing::Alone,
|
||||
};
|
||||
let mut o = Punct::new(tt.as_char(), spacing);
|
||||
o.set_span(::Span::_new(Span::Compiler(tt.span())));
|
||||
o.into()
|
||||
}
|
||||
proc_macro::TokenTree::Ident(s) => ::Ident::_new(Ident::Compiler(s)).into(),
|
||||
proc_macro::TokenTree::Literal(l) => ::Literal::_new(Literal::Compiler(l)).into(),
|
||||
})
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
match self {
|
||||
TokenTreeIter::Compiler(tts) => tts.size_hint(),
|
||||
TokenTreeIter::Fallback(tts) => tts.size_hint(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for TokenTreeIter {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("TokenTreeIter").finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
#[cfg(super_unstable)]
|
||||
pub enum SourceFile {
|
||||
Compiler(proc_macro::SourceFile),
|
||||
Fallback(fallback::SourceFile),
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
impl SourceFile {
|
||||
fn nightly(sf: proc_macro::SourceFile) -> Self {
|
||||
SourceFile::Compiler(sf)
|
||||
}
|
||||
|
||||
/// Get the path to this source file as a string.
|
||||
pub fn path(&self) -> PathBuf {
|
||||
match self {
|
||||
SourceFile::Compiler(a) => a.path(),
|
||||
SourceFile::Fallback(a) => a.path(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_real(&self) -> bool {
|
||||
match self {
|
||||
SourceFile::Compiler(a) => a.is_real(),
|
||||
SourceFile::Fallback(a) => a.is_real(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
impl fmt::Debug for SourceFile {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
SourceFile::Compiler(a) => a.fmt(f),
|
||||
SourceFile::Fallback(a) => a.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(super_unstable, feature = "span-locations"))]
|
||||
pub struct LineColumn {
|
||||
pub line: usize,
|
||||
pub column: usize,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum Span {
|
||||
Compiler(proc_macro::Span),
|
||||
Fallback(fallback::Span),
|
||||
}
|
||||
|
||||
impl Span {
|
||||
pub fn call_site() -> Span {
|
||||
if nightly_works() {
|
||||
Span::Compiler(proc_macro::Span::call_site())
|
||||
} else {
|
||||
Span::Fallback(fallback::Span::call_site())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn def_site() -> Span {
|
||||
if nightly_works() {
|
||||
Span::Compiler(proc_macro::Span::def_site())
|
||||
} else {
|
||||
Span::Fallback(fallback::Span::def_site())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn resolved_at(&self, other: Span) -> Span {
|
||||
match (self, other) {
|
||||
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn located_at(&self, other: Span) -> Span {
|
||||
match (self, other) {
|
||||
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unwrap(self) -> proc_macro::Span {
|
||||
match self {
|
||||
Span::Compiler(s) => s,
|
||||
Span::Fallback(_) => panic!("proc_macro::Span is only available in procedural macros"),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn source_file(&self) -> SourceFile {
|
||||
match self {
|
||||
Span::Compiler(s) => SourceFile::nightly(s.source_file()),
|
||||
Span::Fallback(s) => SourceFile::Fallback(s.source_file()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(super_unstable, feature = "span-locations"))]
|
||||
pub fn start(&self) -> LineColumn {
|
||||
match self {
|
||||
#[cfg(nightly)]
|
||||
Span::Compiler(s) => {
|
||||
let proc_macro::LineColumn { line, column } = s.start();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
#[cfg(not(nightly))]
|
||||
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
|
||||
Span::Fallback(s) => {
|
||||
let fallback::LineColumn { line, column } = s.start();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(super_unstable, feature = "span-locations"))]
|
||||
pub fn end(&self) -> LineColumn {
|
||||
match self {
|
||||
#[cfg(nightly)]
|
||||
Span::Compiler(s) => {
|
||||
let proc_macro::LineColumn { line, column } = s.end();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
#[cfg(not(nightly))]
|
||||
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
|
||||
Span::Fallback(s) => {
|
||||
let fallback::LineColumn { line, column } = s.end();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn join(&self, other: Span) -> Option<Span> {
|
||||
let ret = match (self, other) {
|
||||
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.join(b)?),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.join(b)?),
|
||||
_ => return None,
|
||||
};
|
||||
Some(ret)
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn eq(&self, other: &Span) -> bool {
|
||||
match (self, other) {
|
||||
(Span::Compiler(a), Span::Compiler(b)) => a.eq(b),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => a.eq(b),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Span {
|
||||
match self {
|
||||
Span::Compiler(s) => s,
|
||||
Span::Fallback(_) => mismatch(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proc_macro::Span> for ::Span {
|
||||
fn from(proc_span: proc_macro::Span) -> ::Span {
|
||||
::Span::_new(Span::Compiler(proc_span))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::Span> for Span {
|
||||
fn from(inner: fallback::Span) -> Span {
|
||||
Span::Fallback(inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Span {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Span::Compiler(s) => s.fmt(f),
|
||||
Span::Fallback(s) => s.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
|
||||
match span {
|
||||
Span::Compiler(s) => {
|
||||
debug.field("span", &s);
|
||||
}
|
||||
Span::Fallback(s) => fallback::debug_span_field_if_nontrivial(debug, s),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum Group {
|
||||
Compiler(proc_macro::Group),
|
||||
Fallback(fallback::Group),
|
||||
}
|
||||
|
||||
impl Group {
|
||||
pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
|
||||
match stream {
|
||||
TokenStream::Compiler(stream) => {
|
||||
let delimiter = match delimiter {
|
||||
Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
|
||||
Delimiter::Bracket => proc_macro::Delimiter::Bracket,
|
||||
Delimiter::Brace => proc_macro::Delimiter::Brace,
|
||||
Delimiter::None => proc_macro::Delimiter::None,
|
||||
};
|
||||
Group::Compiler(proc_macro::Group::new(delimiter, stream))
|
||||
}
|
||||
TokenStream::Fallback(stream) => {
|
||||
Group::Fallback(fallback::Group::new(delimiter, stream))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delimiter(&self) -> Delimiter {
|
||||
match self {
|
||||
Group::Compiler(g) => match g.delimiter() {
|
||||
proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
|
||||
proc_macro::Delimiter::Bracket => Delimiter::Bracket,
|
||||
proc_macro::Delimiter::Brace => Delimiter::Brace,
|
||||
proc_macro::Delimiter::None => Delimiter::None,
|
||||
},
|
||||
Group::Fallback(g) => g.delimiter(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stream(&self) -> TokenStream {
|
||||
match self {
|
||||
Group::Compiler(g) => TokenStream::Compiler(g.stream()),
|
||||
Group::Fallback(g) => TokenStream::Fallback(g.stream()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Group::Compiler(g) => Span::Compiler(g.span()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn span_open(&self) -> Span {
|
||||
match self {
|
||||
Group::Compiler(g) => Span::Compiler(g.span_open()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span_open()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn span_close(&self) -> Span {
|
||||
match self {
|
||||
Group::Compiler(g) => Span::Compiler(g.span_close()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span_close()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
match (self, span) {
|
||||
(Group::Compiler(g), Span::Compiler(s)) => g.set_span(s),
|
||||
(Group::Fallback(g), Span::Fallback(s)) => g.set_span(s),
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Group {
|
||||
match self {
|
||||
Group::Compiler(g) => g,
|
||||
Group::Fallback(_) => mismatch(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::Group> for Group {
|
||||
fn from(g: fallback::Group) -> Self {
|
||||
Group::Fallback(g)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Group {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Group::Compiler(group) => group.fmt(formatter),
|
||||
Group::Fallback(group) => group.fmt(formatter),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Group {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Group::Compiler(group) => group.fmt(formatter),
|
||||
Group::Fallback(group) => group.fmt(formatter),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum Ident {
|
||||
Compiler(proc_macro::Ident),
|
||||
Fallback(fallback::Ident),
|
||||
}
|
||||
|
||||
impl Ident {
|
||||
pub fn new(string: &str, span: Span) -> Ident {
|
||||
match span {
|
||||
Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new(string, s)),
|
||||
Span::Fallback(s) => Ident::Fallback(fallback::Ident::new(string, s)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_raw(string: &str, span: Span) -> Ident {
|
||||
match span {
|
||||
Span::Compiler(s) => {
|
||||
let p: proc_macro::TokenStream = string.parse().unwrap();
|
||||
let ident = match p.into_iter().next() {
|
||||
Some(proc_macro::TokenTree::Ident(mut i)) => {
|
||||
i.set_span(s);
|
||||
i
|
||||
}
|
||||
_ => panic!(),
|
||||
};
|
||||
Ident::Compiler(ident)
|
||||
}
|
||||
Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_raw(string, s)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Ident::Compiler(t) => Span::Compiler(t.span()),
|
||||
Ident::Fallback(t) => Span::Fallback(t.span()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
match (self, span) {
|
||||
(Ident::Compiler(t), Span::Compiler(s)) => t.set_span(s),
|
||||
(Ident::Fallback(t), Span::Fallback(s)) => t.set_span(s),
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Ident {
|
||||
match self {
|
||||
Ident::Compiler(s) => s,
|
||||
Ident::Fallback(_) => mismatch(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Ident {
|
||||
fn eq(&self, other: &Ident) -> bool {
|
||||
match (self, other) {
|
||||
(Ident::Compiler(t), Ident::Compiler(o)) => t.to_string() == o.to_string(),
|
||||
(Ident::Fallback(t), Ident::Fallback(o)) => t == o,
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PartialEq<T> for Ident
|
||||
where
|
||||
T: ?Sized + AsRef<str>,
|
||||
{
|
||||
fn eq(&self, other: &T) -> bool {
|
||||
let other = other.as_ref();
|
||||
match self {
|
||||
Ident::Compiler(t) => t.to_string() == other,
|
||||
Ident::Fallback(t) => t == other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Ident {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Ident::Compiler(t) => t.fmt(f),
|
||||
Ident::Fallback(t) => t.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Ident {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Ident::Compiler(t) => t.fmt(f),
|
||||
Ident::Fallback(t) => t.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum Literal {
|
||||
Compiler(proc_macro::Literal),
|
||||
Fallback(fallback::Literal),
|
||||
}
|
||||
|
||||
macro_rules! suffixed_numbers {
|
||||
($($name:ident => $kind:ident,)*) => ($(
|
||||
pub fn $name(n: $kind) -> Literal {
|
||||
if nightly_works() {
|
||||
Literal::Compiler(proc_macro::Literal::$name(n))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::$name(n))
|
||||
}
|
||||
}
|
||||
)*)
|
||||
}
|
||||
|
||||
macro_rules! unsuffixed_integers {
|
||||
($($name:ident => $kind:ident,)*) => ($(
|
||||
pub fn $name(n: $kind) -> Literal {
|
||||
if nightly_works() {
|
||||
Literal::Compiler(proc_macro::Literal::$name(n))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::$name(n))
|
||||
}
|
||||
}
|
||||
)*)
|
||||
}
|
||||
|
||||
impl Literal {
|
||||
suffixed_numbers! {
|
||||
u8_suffixed => u8,
|
||||
u16_suffixed => u16,
|
||||
u32_suffixed => u32,
|
||||
u64_suffixed => u64,
|
||||
usize_suffixed => usize,
|
||||
i8_suffixed => i8,
|
||||
i16_suffixed => i16,
|
||||
i32_suffixed => i32,
|
||||
i64_suffixed => i64,
|
||||
isize_suffixed => isize,
|
||||
|
||||
f32_suffixed => f32,
|
||||
f64_suffixed => f64,
|
||||
}
|
||||
|
||||
#[cfg(u128)]
|
||||
suffixed_numbers! {
|
||||
i128_suffixed => i128,
|
||||
u128_suffixed => u128,
|
||||
}
|
||||
|
||||
unsuffixed_integers! {
|
||||
u8_unsuffixed => u8,
|
||||
u16_unsuffixed => u16,
|
||||
u32_unsuffixed => u32,
|
||||
u64_unsuffixed => u64,
|
||||
usize_unsuffixed => usize,
|
||||
i8_unsuffixed => i8,
|
||||
i16_unsuffixed => i16,
|
||||
i32_unsuffixed => i32,
|
||||
i64_unsuffixed => i64,
|
||||
isize_unsuffixed => isize,
|
||||
}
|
||||
|
||||
#[cfg(u128)]
|
||||
unsuffixed_integers! {
|
||||
i128_unsuffixed => i128,
|
||||
u128_unsuffixed => u128,
|
||||
}
|
||||
|
||||
pub fn f32_unsuffixed(f: f32) -> Literal {
|
||||
if nightly_works() {
|
||||
Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn f64_unsuffixed(f: f64) -> Literal {
|
||||
if nightly_works() {
|
||||
Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn string(t: &str) -> Literal {
|
||||
if nightly_works() {
|
||||
Literal::Compiler(proc_macro::Literal::string(t))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::string(t))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn character(t: char) -> Literal {
|
||||
if nightly_works() {
|
||||
Literal::Compiler(proc_macro::Literal::character(t))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::character(t))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn byte_string(bytes: &[u8]) -> Literal {
|
||||
if nightly_works() {
|
||||
Literal::Compiler(proc_macro::Literal::byte_string(bytes))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::byte_string(bytes))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Literal::Compiler(lit) => Span::Compiler(lit.span()),
|
||||
Literal::Fallback(lit) => Span::Fallback(lit.span()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
match (self, span) {
|
||||
(Literal::Compiler(lit), Span::Compiler(s)) => lit.set_span(s),
|
||||
(Literal::Fallback(lit), Span::Fallback(s)) => lit.set_span(s),
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Literal {
|
||||
match self {
|
||||
Literal::Compiler(s) => s,
|
||||
Literal::Fallback(_) => mismatch(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::Literal> for Literal {
|
||||
fn from(s: fallback::Literal) -> Literal {
|
||||
Literal::Fallback(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Literal {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Literal::Compiler(t) => t.fmt(f),
|
||||
Literal::Fallback(t) => t.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Literal {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Literal::Compiler(t) => t.fmt(f),
|
||||
Literal::Fallback(t) => t.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,61 +0,0 @@
|
|||
extern crate proc_macro2;
|
||||
|
||||
use proc_macro2::*;
|
||||
|
||||
macro_rules! assert_impl {
|
||||
($ty:ident is $($marker:ident) and +) => {
|
||||
#[test]
|
||||
#[allow(non_snake_case)]
|
||||
fn $ty() {
|
||||
fn assert_implemented<T: $($marker +)+>() {}
|
||||
assert_implemented::<$ty>();
|
||||
}
|
||||
};
|
||||
|
||||
($ty:ident is not $($marker:ident) or +) => {
|
||||
#[test]
|
||||
#[allow(non_snake_case)]
|
||||
fn $ty() {
|
||||
$(
|
||||
{
|
||||
// Implemented for types that implement $marker.
|
||||
trait IsNotImplemented {
|
||||
fn assert_not_implemented() {}
|
||||
}
|
||||
impl<T: $marker> IsNotImplemented for T {}
|
||||
|
||||
// Implemented for the type being tested.
|
||||
trait IsImplemented {
|
||||
fn assert_not_implemented() {}
|
||||
}
|
||||
impl IsImplemented for $ty {}
|
||||
|
||||
// If $ty does not implement $marker, there is no ambiguity
|
||||
// in the following trait method call.
|
||||
<$ty>::assert_not_implemented();
|
||||
}
|
||||
)+
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
assert_impl!(Delimiter is Send and Sync);
|
||||
assert_impl!(Spacing is Send and Sync);
|
||||
|
||||
assert_impl!(Group is not Send or Sync);
|
||||
assert_impl!(Ident is not Send or Sync);
|
||||
assert_impl!(LexError is not Send or Sync);
|
||||
assert_impl!(Literal is not Send or Sync);
|
||||
assert_impl!(Punct is not Send or Sync);
|
||||
assert_impl!(Span is not Send or Sync);
|
||||
assert_impl!(TokenStream is not Send or Sync);
|
||||
assert_impl!(TokenTree is not Send or Sync);
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
mod semver_exempt {
|
||||
use super::*;
|
||||
|
||||
assert_impl!(LineColumn is Send and Sync);
|
||||
|
||||
assert_impl!(SourceFile is not Send or Sync);
|
||||
}
|
|
@ -1,389 +0,0 @@
|
|||
extern crate proc_macro2;
|
||||
|
||||
use std::str::{self, FromStr};
|
||||
|
||||
use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
|
||||
|
||||
#[test]
|
||||
fn terms() {
|
||||
assert_eq!(
|
||||
Ident::new("String", Span::call_site()).to_string(),
|
||||
"String"
|
||||
);
|
||||
assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
|
||||
assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
fn raw_terms() {
|
||||
assert_eq!(
|
||||
Ident::new_raw("String", Span::call_site()).to_string(),
|
||||
"r#String"
|
||||
);
|
||||
assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn");
|
||||
assert_eq!(Ident::new_raw("_", Span::call_site()).to_string(), "r#_");
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
|
||||
fn term_empty() {
|
||||
Ident::new("", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Ident cannot be a number; use Literal instead")]
|
||||
fn term_number() {
|
||||
Ident::new("255", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "\"a#\" is not a valid Ident")]
|
||||
fn term_invalid() {
|
||||
Ident::new("a#", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "not a valid Ident")]
|
||||
fn raw_term_empty() {
|
||||
Ident::new("r#", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "not a valid Ident")]
|
||||
fn raw_term_number() {
|
||||
Ident::new("r#255", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "\"r#a#\" is not a valid Ident")]
|
||||
fn raw_term_invalid() {
|
||||
Ident::new("r#a#", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "not a valid Ident")]
|
||||
fn lifetime_empty() {
|
||||
Ident::new("'", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "not a valid Ident")]
|
||||
fn lifetime_number() {
|
||||
Ident::new("'255", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = r#""\'a#" is not a valid Ident"#)]
|
||||
fn lifetime_invalid() {
|
||||
Ident::new("'a#", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literals() {
|
||||
assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
|
||||
assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
|
||||
assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn roundtrip() {
|
||||
fn roundtrip(p: &str) {
|
||||
println!("parse: {}", p);
|
||||
let s = p.parse::<TokenStream>().unwrap().to_string();
|
||||
println!("first: {}", s);
|
||||
let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
|
||||
assert_eq!(s, s2);
|
||||
}
|
||||
roundtrip("a");
|
||||
roundtrip("<<");
|
||||
roundtrip("<<=");
|
||||
roundtrip(
|
||||
"
|
||||
1
|
||||
1.0
|
||||
1f32
|
||||
2f64
|
||||
1usize
|
||||
4isize
|
||||
4e10
|
||||
1_000
|
||||
1_0i32
|
||||
8u8
|
||||
9
|
||||
0
|
||||
0xffffffffffffffffffffffffffffffff
|
||||
",
|
||||
);
|
||||
roundtrip("'a");
|
||||
roundtrip("'_");
|
||||
roundtrip("'static");
|
||||
roundtrip("'\\u{10__FFFF}'");
|
||||
roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fail() {
|
||||
fn fail(p: &str) {
|
||||
if let Ok(s) = p.parse::<TokenStream>() {
|
||||
panic!("should have failed to parse: {}\n{:#?}", p, s);
|
||||
}
|
||||
}
|
||||
fail("1x");
|
||||
fail("1u80");
|
||||
fail("1f320");
|
||||
fail("' static");
|
||||
fail("r#1");
|
||||
fail("r#_");
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
#[test]
|
||||
fn span_test() {
|
||||
use proc_macro2::TokenTree;
|
||||
|
||||
fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
|
||||
let ts = p.parse::<TokenStream>().unwrap();
|
||||
check_spans_internal(ts, &mut lines);
|
||||
}
|
||||
|
||||
fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
|
||||
for i in ts {
|
||||
if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
|
||||
*lines = rest;
|
||||
|
||||
let start = i.span().start();
|
||||
assert_eq!(start.line, sline, "sline did not match for {}", i);
|
||||
assert_eq!(start.column, scol, "scol did not match for {}", i);
|
||||
|
||||
let end = i.span().end();
|
||||
assert_eq!(end.line, eline, "eline did not match for {}", i);
|
||||
assert_eq!(end.column, ecol, "ecol did not match for {}", i);
|
||||
|
||||
match i {
|
||||
TokenTree::Group(ref g) => {
|
||||
check_spans_internal(g.stream().clone(), lines);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
check_spans(
|
||||
"\
|
||||
/// This is a document comment
|
||||
testing 123
|
||||
{
|
||||
testing 234
|
||||
}",
|
||||
&[
|
||||
(1, 0, 1, 30), // #
|
||||
(1, 0, 1, 30), // [ ... ]
|
||||
(1, 0, 1, 30), // doc
|
||||
(1, 0, 1, 30), // =
|
||||
(1, 0, 1, 30), // "This is..."
|
||||
(2, 0, 2, 7), // testing
|
||||
(2, 8, 2, 11), // 123
|
||||
(3, 0, 5, 1), // { ... }
|
||||
(4, 2, 4, 9), // testing
|
||||
(4, 10, 4, 13), // 234
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
#[cfg(not(nightly))]
|
||||
#[test]
|
||||
fn default_span() {
|
||||
let start = Span::call_site().start();
|
||||
assert_eq!(start.line, 1);
|
||||
assert_eq!(start.column, 0);
|
||||
let end = Span::call_site().end();
|
||||
assert_eq!(end.line, 1);
|
||||
assert_eq!(end.column, 0);
|
||||
let source_file = Span::call_site().source_file();
|
||||
assert_eq!(source_file.path().to_string_lossy(), "<unspecified>");
|
||||
assert!(!source_file.is_real());
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
#[test]
|
||||
fn span_join() {
|
||||
let source1 = "aaa\nbbb"
|
||||
.parse::<TokenStream>()
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>();
|
||||
let source2 = "ccc\nddd"
|
||||
.parse::<TokenStream>()
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert!(source1[0].span().source_file() != source2[0].span().source_file());
|
||||
assert_eq!(
|
||||
source1[0].span().source_file(),
|
||||
source1[1].span().source_file()
|
||||
);
|
||||
|
||||
let joined1 = source1[0].span().join(source1[1].span());
|
||||
let joined2 = source1[0].span().join(source2[0].span());
|
||||
assert!(joined1.is_some());
|
||||
assert!(joined2.is_none());
|
||||
|
||||
let start = joined1.unwrap().start();
|
||||
let end = joined1.unwrap().end();
|
||||
assert_eq!(start.line, 1);
|
||||
assert_eq!(start.column, 0);
|
||||
assert_eq!(end.line, 2);
|
||||
assert_eq!(end.column, 3);
|
||||
|
||||
assert_eq!(
|
||||
joined1.unwrap().source_file(),
|
||||
source1[0].span().source_file()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_panic() {
|
||||
let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
|
||||
assert!(s.parse::<proc_macro2::TokenStream>().is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tricky_doc_comment() {
|
||||
let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
|
||||
let tokens = stream.into_iter().collect::<Vec<_>>();
|
||||
assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
|
||||
|
||||
let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
|
||||
let tokens = stream.into_iter().collect::<Vec<_>>();
|
||||
assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
|
||||
match tokens[0] {
|
||||
proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
|
||||
_ => panic!("wrong token {:?}", tokens[0]),
|
||||
}
|
||||
let mut tokens = match tokens[1] {
|
||||
proc_macro2::TokenTree::Group(ref tt) => {
|
||||
assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
|
||||
tt.stream().into_iter()
|
||||
}
|
||||
_ => panic!("wrong token {:?}", tokens[0]),
|
||||
};
|
||||
|
||||
match tokens.next().unwrap() {
|
||||
proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
|
||||
t => panic!("wrong token {:?}", t),
|
||||
}
|
||||
match tokens.next().unwrap() {
|
||||
proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
|
||||
t => panic!("wrong token {:?}", t),
|
||||
}
|
||||
match tokens.next().unwrap() {
|
||||
proc_macro2::TokenTree::Literal(ref tt) => {
|
||||
assert_eq!(tt.to_string(), "\" doc\"");
|
||||
}
|
||||
t => panic!("wrong token {:?}", t),
|
||||
}
|
||||
assert!(tokens.next().is_none());
|
||||
|
||||
let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
|
||||
let tokens = stream.into_iter().collect::<Vec<_>>();
|
||||
assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_before_comment() {
|
||||
let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
|
||||
match tts.next().unwrap() {
|
||||
TokenTree::Punct(tt) => {
|
||||
assert_eq!(tt.as_char(), '~');
|
||||
assert_eq!(tt.spacing(), Spacing::Alone);
|
||||
}
|
||||
wrong => panic!("wrong token {:?}", wrong),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn raw_identifier() {
|
||||
let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
|
||||
match tts.next().unwrap() {
|
||||
TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()),
|
||||
wrong => panic!("wrong token {:?}", wrong),
|
||||
}
|
||||
assert!(tts.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_debug_ident() {
|
||||
let ident = Ident::new("proc_macro", Span::call_site());
|
||||
|
||||
#[cfg(not(procmacro2_semver_exempt))]
|
||||
let expected = "Ident(proc_macro)";
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
|
||||
|
||||
assert_eq!(expected, format!("{:?}", ident));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_debug_tokenstream() {
|
||||
let tts = TokenStream::from_str("[a + 1]").unwrap();
|
||||
|
||||
#[cfg(not(procmacro2_semver_exempt))]
|
||||
let expected = "\
|
||||
TokenStream [
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: a
|
||||
},
|
||||
Punct {
|
||||
op: '+',
|
||||
spacing: Alone
|
||||
},
|
||||
Literal {
|
||||
lit: 1
|
||||
}
|
||||
]
|
||||
}
|
||||
]\
|
||||
";
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
let expected = "\
|
||||
TokenStream [
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: a,
|
||||
span: bytes(2..3)
|
||||
},
|
||||
Punct {
|
||||
op: '+',
|
||||
spacing: Alone,
|
||||
span: bytes(4..5)
|
||||
},
|
||||
Literal {
|
||||
lit: 1,
|
||||
span: bytes(6..7)
|
||||
}
|
||||
],
|
||||
span: bytes(1..8)
|
||||
}
|
||||
]\
|
||||
";
|
||||
|
||||
assert_eq!(expected, format!("{:#?}", tts));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn default_tokenstream_is_empty() {
|
||||
let default_token_stream: TokenStream = Default::default();
|
||||
|
||||
assert!(default_token_stream.is_empty());
|
||||
}
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"9d18d9cad8a90dd6eb3f9ff06357a9f9a93fdb4697445bbdb4b77be361377708","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"362a2156f7645528061b6e8487a2eb0f32f1693012ed82ee57afa05c039bba0d","build.rs":"0cc6e2cb919ddbff59cf1d810283939f97a59f0037540c0f2ee3453237635ff8","src/fallback.rs":"5c6379a90735e27abcc40253b223158c6b1e5784f3850bc423335363e87ef038","src/lib.rs":"3c257d875da825fb74522d74459d4ac697ab3c998f58af57aa17ae9dfaa19308","src/strnom.rs":"37f7791f73f123817ad5403af1d4e2a0714be27401729a2d451bc80b1f26bac9","src/wrapper.rs":"6e9aa48b55da1edd81a72552d6705e251ea5e77827a611bed5fa6a89ee9e3d59","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"8c427be9cba1fa8d4a16647e53e3545e5863e29e2c0b311c93c9dd1399abf6a1"},"package":"afdc77cc74ec70ed262262942ebb7dac3d479e9e5cfa2da1841c0806f6cdabcc"}
|
||||
{"files":{"Cargo.toml":"b523856472549844b4bf20eca0473d955a7e5eeb95c70eddd31a05ac455427bb","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"89857eaaa305afe540abcf56fabae0194dfb4e7906a8098b7206acb23ed11ce8","build.rs":"36fa668f3bf309f243d0e977e8428446cc424303139c1f63410b3c2e30445aec","src/fallback.rs":"e4d1bcb1e92383a2285e6c947dd74b0e34144904948db68127faea627f5dd6ff","src/lib.rs":"896a1d212e30902ff051313808007406ca4471c27880a6ef19508f0ebb8333ee","src/strnom.rs":"60f5380106dbe568cca7abd09877e133c874fbee95d502e4830425c4613a640d","src/wrapper.rs":"0d7fe28ab2b7ee02b8eb8c5a636da364c60f6704b23e7db0a1ddd57c742f54b1","tests/marker.rs":"0227d07bbc7f2e2ad34662a6acb65668b7dc2f79141c4faa672703a04e27bea0","tests/test.rs":"166d35835355bdaa85bcf69de4dfb56ccddd8acf2e1a8cbc506782632b151674"},"package":"4d317f9caece796be1980837fd5cb3dfec5613ebdb04ad0956deea83ce168915"}
|
|
@ -3,7 +3,7 @@
|
|||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies
|
||||
# to registry (e.g. crates.io) dependencies
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
|
@ -11,28 +11,24 @@
|
|||
# will likely look very different (and much more reasonable)
|
||||
|
||||
[package]
|
||||
edition = "2018"
|
||||
name = "proc-macro2"
|
||||
version = "1.0.4"
|
||||
version = "0.4.27"
|
||||
authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
||||
build = "build.rs"
|
||||
description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
|
||||
homepage = "https://github.com/alexcrichton/proc-macro2"
|
||||
documentation = "https://docs.rs/proc-macro2"
|
||||
readme = "README.md"
|
||||
keywords = ["macros"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
license = "MIT/Apache-2.0"
|
||||
repository = "https://github.com/alexcrichton/proc-macro2"
|
||||
[package.metadata.docs.rs]
|
||||
rustc-args = ["--cfg", "procmacro2_semver_exempt"]
|
||||
rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
|
||||
|
||||
[lib]
|
||||
name = "proc_macro2"
|
||||
[dependencies.unicode-xid]
|
||||
version = "0.2"
|
||||
version = "0.1"
|
||||
[dev-dependencies.quote]
|
||||
version = "1.0"
|
||||
default_features = false
|
||||
version = "0.6"
|
||||
|
||||
[features]
|
||||
default = ["proc-macro"]
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
|
||||
|
||||
A wrapper around the procedural macro API of the compiler's `proc_macro` crate.
|
||||
This library serves two purposes:
|
||||
This library serves three purposes:
|
||||
|
||||
- **Bring proc-macro-like functionality to other contexts like build.rs and
|
||||
main.rs.** Types from `proc_macro` are entirely specific to procedural macros
|
||||
|
@ -21,6 +21,13 @@ This library serves two purposes:
|
|||
unit test. In order for helper libraries or components of a macro to be
|
||||
testable in isolation, they must be implemented using `proc_macro2`.
|
||||
|
||||
- **Provide the latest and greatest APIs across all compiler versions.**
|
||||
Procedural macros were first introduced to Rust in 1.15.0 with an extremely
|
||||
minimal interface. Since then, many improvements have landed to make macros
|
||||
more flexible and easier to write. This library tracks the procedural macro
|
||||
API of the most recent stable compiler but employs a polyfill to provide that
|
||||
API consistently across any compiler since 1.15.0.
|
||||
|
||||
[syn]: https://github.com/dtolnay/syn
|
||||
[quote]: https://github.com/dtolnay/quote
|
||||
|
||||
|
@ -28,7 +35,7 @@ This library serves two purposes:
|
|||
|
||||
```toml
|
||||
[dependencies]
|
||||
proc-macro2 = "1.0"
|
||||
proc-macro2 = "0.4"
|
||||
```
|
||||
|
||||
The skeleton of a typical procedural macro typically looks like this:
|
||||
|
@ -51,7 +58,7 @@ pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
|||
If parsing with [Syn], you'll use [`parse_macro_input!`] instead to propagate
|
||||
parse errors correctly back to the compiler when parsing fails.
|
||||
|
||||
[`parse_macro_input!`]: https://docs.rs/syn/1.0/syn/macro.parse_macro_input.html
|
||||
[`parse_macro_input!`]: https://docs.rs/syn/0.15/syn/macro.parse_macro_input.html
|
||||
|
||||
## Unstable features
|
||||
|
||||
|
@ -60,10 +67,10 @@ API. Functionality in `proc_macro` that is not yet stable is not exposed by
|
|||
proc-macro2 by default.
|
||||
|
||||
To opt into the additional APIs available in the most recent nightly compiler,
|
||||
the `procmacro2_semver_exempt` config flag must be passed to rustc. We will
|
||||
polyfill those nightly-only APIs back to Rust 1.31.0. As these are unstable APIs
|
||||
that track the nightly compiler, minor versions of proc-macro2 may make breaking
|
||||
changes to them at any time.
|
||||
the `procmacro2_semver_exempt` config flag must be passed to rustc. As usual, we
|
||||
will polyfill those nightly-only APIs all the way back to Rust 1.15.0. As these
|
||||
are unstable APIs that track the nightly compiler, minor versions of proc-macro2
|
||||
may make breaking changes to them at any time.
|
||||
|
||||
```
|
||||
RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
|
||||
|
@ -75,19 +82,19 @@ reminder that you are outside of the normal semver guarantees.
|
|||
|
||||
Semver exempt methods are marked as such in the proc-macro2 documentation.
|
||||
|
||||
<br>
|
||||
# License
|
||||
|
||||
#### License
|
||||
This project is licensed under either of
|
||||
|
||||
<sup>
|
||||
Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
|
||||
2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
|
||||
</sup>
|
||||
* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
http://www.apache.org/licenses/LICENSE-2.0)
|
||||
* MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
http://opensource.org/licenses/MIT)
|
||||
|
||||
<br>
|
||||
at your option.
|
||||
|
||||
### Contribution
|
||||
|
||||
<sub>
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
|
||||
be dual licensed as above, without any additional terms or conditions.
|
||||
</sub>
|
||||
for inclusion in Serde by you, as defined in the Apache-2.0 license, shall be
|
||||
dual licensed as above, without any additional terms or conditions.
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
// rustc-cfg emitted by the build script:
|
||||
//
|
||||
// "u128"
|
||||
// Include u128 and i128 constructors for proc_macro2::Literal. Enabled on
|
||||
// any compiler 1.26+.
|
||||
//
|
||||
// "use_proc_macro"
|
||||
// Link to extern crate proc_macro. Available on any compiler and any target
|
||||
// except wasm32. Requires "proc-macro" Cargo cfg to be enabled (default is
|
||||
|
@ -14,11 +18,15 @@
|
|||
// procmacro2_semver_exempt surface area is implemented by using the
|
||||
// nightly-only proc_macro API.
|
||||
//
|
||||
// "proc_macro_span"
|
||||
// Enable non-dummy behavior of Span::start and Span::end methods which
|
||||
// requires an unstable compiler feature. Enabled when building with
|
||||
// nightly, unless `-Z allow-feature` in RUSTFLAGS disallows unstable
|
||||
// features.
|
||||
// "slow_extend"
|
||||
// Fallback when `impl Extend for TokenStream` is not available. These impls
|
||||
// were added one version later than the rest of the proc_macro token API.
|
||||
// Enabled on rustc 1.29 only.
|
||||
//
|
||||
// "nightly"
|
||||
// Enable the Span::unwrap method. This is to support proc_macro_span and
|
||||
// proc_macro_diagnostic use on the nightly channel without requiring the
|
||||
// semver exemption opt-in. Enabled when building with nightly.
|
||||
//
|
||||
// "super_unstable"
|
||||
// Implement the semver exempt API in terms of the nightly-only proc_macro
|
||||
|
@ -31,20 +39,21 @@
|
|||
// location inside spans is a performance hit.
|
||||
|
||||
use std::env;
|
||||
use std::process::{self, Command};
|
||||
use std::process::Command;
|
||||
use std::str;
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
|
||||
let target = env::var("TARGET").unwrap();
|
||||
|
||||
let version = match rustc_version() {
|
||||
Some(version) => version,
|
||||
None => return,
|
||||
};
|
||||
|
||||
if version.minor < 31 {
|
||||
eprintln!("Minimum supported rustc version is 1.31");
|
||||
process::exit(1);
|
||||
if version.minor >= 26 {
|
||||
println!("cargo:rustc-cfg=u128");
|
||||
}
|
||||
|
||||
let semver_exempt = cfg!(procmacro2_semver_exempt);
|
||||
|
@ -57,19 +66,23 @@ fn main() {
|
|||
println!("cargo:rustc-cfg=span_locations");
|
||||
}
|
||||
|
||||
let target = env::var("TARGET").unwrap();
|
||||
if !enable_use_proc_macro(&target) {
|
||||
return;
|
||||
}
|
||||
|
||||
println!("cargo:rustc-cfg=use_proc_macro");
|
||||
|
||||
if version.nightly || !semver_exempt {
|
||||
// Rust 1.29 stabilized the necessary APIs in the `proc_macro` crate
|
||||
if version.nightly || version.minor >= 29 && !semver_exempt {
|
||||
println!("cargo:rustc-cfg=wrap_proc_macro");
|
||||
}
|
||||
|
||||
if version.nightly && feature_allowed("proc_macro_span") {
|
||||
println!("cargo:rustc-cfg=proc_macro_span");
|
||||
if version.minor == 29 {
|
||||
println!("cargo:rustc-cfg=slow_extend");
|
||||
}
|
||||
|
||||
if version.nightly {
|
||||
println!("cargo:rustc-cfg=nightly");
|
||||
}
|
||||
|
||||
if semver_exempt && version.nightly {
|
||||
|
@ -93,37 +106,28 @@ struct RustcVersion {
|
|||
}
|
||||
|
||||
fn rustc_version() -> Option<RustcVersion> {
|
||||
let rustc = env::var_os("RUSTC")?;
|
||||
let output = Command::new(rustc).arg("--version").output().ok()?;
|
||||
let version = str::from_utf8(&output.stdout).ok()?;
|
||||
let nightly = version.contains("nightly") || version.contains("dev");
|
||||
macro_rules! otry {
|
||||
($e:expr) => {
|
||||
match $e {
|
||||
Some(e) => e,
|
||||
None => return None,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
let rustc = otry!(env::var_os("RUSTC"));
|
||||
let output = otry!(Command::new(rustc).arg("--version").output().ok());
|
||||
let version = otry!(str::from_utf8(&output.stdout).ok());
|
||||
let nightly = version.contains("nightly");
|
||||
let mut pieces = version.split('.');
|
||||
if pieces.next() != Some("rustc 1") {
|
||||
return None;
|
||||
}
|
||||
let minor = pieces.next()?.parse().ok()?;
|
||||
Some(RustcVersion { minor, nightly })
|
||||
}
|
||||
|
||||
fn feature_allowed(feature: &str) -> bool {
|
||||
// Recognized formats:
|
||||
//
|
||||
// -Z allow-features=feature1,feature2
|
||||
//
|
||||
// -Zallow-features=feature1,feature2
|
||||
|
||||
if let Some(rustflags) = env::var_os("RUSTFLAGS") {
|
||||
for mut flag in rustflags.to_string_lossy().split(' ') {
|
||||
if flag.starts_with("-Z") {
|
||||
flag = &flag["-Z".len()..];
|
||||
}
|
||||
if flag.starts_with("allow-features=") {
|
||||
flag = &flag["allow-features=".len()..];
|
||||
return flag.split(',').any(|allowed| allowed == feature);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No allow-features= flag, allowed by default.
|
||||
true
|
||||
let minor = otry!(pieces.next());
|
||||
let minor = otry!(minor.parse().ok());
|
||||
|
||||
Some(RustcVersion {
|
||||
minor: minor,
|
||||
nightly: nightly,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
#[cfg(span_locations)]
|
||||
use std::cell::RefCell;
|
||||
#[cfg(span_locations)]
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
use std::cmp;
|
||||
use std::fmt;
|
||||
use std::iter;
|
||||
use std::ops::RangeBounds;
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use std::vec;
|
||||
|
||||
use crate::strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
|
||||
use crate::{Delimiter, Punct, Spacing, TokenTree};
|
||||
use strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
|
||||
use unicode_xid::UnicodeXID;
|
||||
|
||||
use {Delimiter, Punct, Spacing, TokenTree};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TokenStream {
|
||||
inner: Vec<TokenTree>,
|
||||
|
@ -35,8 +35,8 @@ impl TokenStream {
|
|||
|
||||
#[cfg(span_locations)]
|
||||
fn get_cursor(src: &str) -> Cursor {
|
||||
// Create a dummy file & add it to the source map
|
||||
SOURCE_MAP.with(|cm| {
|
||||
// Create a dummy file & add it to the codemap
|
||||
CODEMAP.with(|cm| {
|
||||
let mut cm = cm.borrow_mut();
|
||||
let name = format!("<parsed string {}>", cm.files.len());
|
||||
let span = cm.add_file(&name, src);
|
||||
|
@ -56,7 +56,7 @@ impl FromStr for TokenStream {
|
|||
type Err = LexError;
|
||||
|
||||
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
||||
// Create a dummy file & add it to the source map
|
||||
// Create a dummy file & add it to the codemap
|
||||
let cursor = get_cursor(src);
|
||||
|
||||
match token_stream(cursor) {
|
||||
|
@ -118,8 +118,8 @@ impl fmt::Debug for TokenStream {
|
|||
}
|
||||
|
||||
#[cfg(use_proc_macro)]
|
||||
impl From<proc_macro::TokenStream> for TokenStream {
|
||||
fn from(inner: proc_macro::TokenStream) -> TokenStream {
|
||||
impl From<::proc_macro::TokenStream> for TokenStream {
|
||||
fn from(inner: ::proc_macro::TokenStream) -> TokenStream {
|
||||
inner
|
||||
.to_string()
|
||||
.parse()
|
||||
|
@ -128,8 +128,8 @@ impl From<proc_macro::TokenStream> for TokenStream {
|
|||
}
|
||||
|
||||
#[cfg(use_proc_macro)]
|
||||
impl From<TokenStream> for proc_macro::TokenStream {
|
||||
fn from(inner: TokenStream) -> proc_macro::TokenStream {
|
||||
impl From<TokenStream> for ::proc_macro::TokenStream {
|
||||
fn from(inner: TokenStream) -> ::proc_macro::TokenStream {
|
||||
inner
|
||||
.to_string()
|
||||
.parse()
|
||||
|
@ -225,7 +225,7 @@ pub struct LineColumn {
|
|||
|
||||
#[cfg(span_locations)]
|
||||
thread_local! {
|
||||
static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
|
||||
static CODEMAP: RefCell<Codemap> = RefCell::new(Codemap {
|
||||
// NOTE: We start with a single dummy file which all call_site() and
|
||||
// def_site() spans reference.
|
||||
files: vec![{
|
||||
|
@ -295,12 +295,12 @@ fn lines_offsets(s: &str) -> Vec<usize> {
|
|||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
struct SourceMap {
|
||||
struct Codemap {
|
||||
files: Vec<FileInfo>,
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
impl SourceMap {
|
||||
impl Codemap {
|
||||
fn next_start_pos(&self) -> u32 {
|
||||
// Add 1 so there's always space between files.
|
||||
//
|
||||
|
@ -314,19 +314,22 @@ impl SourceMap {
|
|||
let lo = self.next_start_pos();
|
||||
// XXX(nika): Shouild we bother doing a checked cast or checked add here?
|
||||
let span = Span {
|
||||
lo,
|
||||
lo: lo,
|
||||
hi: lo + (src.len() as u32),
|
||||
};
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
self.files.push(FileInfo {
|
||||
name: name.to_owned(),
|
||||
span,
|
||||
lines,
|
||||
span: span,
|
||||
lines: lines,
|
||||
});
|
||||
|
||||
#[cfg(not(procmacro2_semver_exempt))]
|
||||
self.files.push(FileInfo { span, lines });
|
||||
self.files.push(FileInfo {
|
||||
span: span,
|
||||
lines: lines,
|
||||
});
|
||||
let _ = name;
|
||||
|
||||
span
|
||||
|
@ -381,7 +384,7 @@ impl Span {
|
|||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
pub fn source_file(&self) -> SourceFile {
|
||||
SOURCE_MAP.with(|cm| {
|
||||
CODEMAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
let fi = cm.fileinfo(*self);
|
||||
SourceFile {
|
||||
|
@ -392,7 +395,7 @@ impl Span {
|
|||
|
||||
#[cfg(span_locations)]
|
||||
pub fn start(&self) -> LineColumn {
|
||||
SOURCE_MAP.with(|cm| {
|
||||
CODEMAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
let fi = cm.fileinfo(*self);
|
||||
fi.offset_line_column(self.lo as usize)
|
||||
|
@ -401,21 +404,16 @@ impl Span {
|
|||
|
||||
#[cfg(span_locations)]
|
||||
pub fn end(&self) -> LineColumn {
|
||||
SOURCE_MAP.with(|cm| {
|
||||
CODEMAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
let fi = cm.fileinfo(*self);
|
||||
fi.offset_line_column(self.hi as usize)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(not(span_locations))]
|
||||
pub fn join(&self, _other: Span) -> Option<Span> {
|
||||
Some(Span {})
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
pub fn join(&self, other: Span) -> Option<Span> {
|
||||
SOURCE_MAP.with(|cm| {
|
||||
CODEMAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
// If `other` is not within the same FileInfo as us, return None.
|
||||
if !cm.fileinfo(*self).span_within(other) {
|
||||
|
@ -455,8 +453,8 @@ pub struct Group {
|
|||
impl Group {
|
||||
pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
|
||||
Group {
|
||||
delimiter,
|
||||
stream,
|
||||
delimiter: delimiter,
|
||||
stream: stream,
|
||||
span: Span::call_site(),
|
||||
}
|
||||
}
|
||||
|
@ -473,10 +471,12 @@ impl Group {
|
|||
self.span
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
pub fn span_open(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
pub fn span_close(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
@ -523,12 +523,12 @@ pub struct Ident {
|
|||
|
||||
impl Ident {
|
||||
fn _new(string: &str, raw: bool, span: Span) -> Ident {
|
||||
validate_ident(string);
|
||||
validate_term(string);
|
||||
|
||||
Ident {
|
||||
sym: string.to_owned(),
|
||||
span,
|
||||
raw,
|
||||
span: span,
|
||||
raw: raw,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -566,7 +566,7 @@ fn is_ident_continue(c: char) -> bool {
|
|||
|| (c > '\x7f' && UnicodeXID::is_xid_continue(c))
|
||||
}
|
||||
|
||||
fn validate_ident(string: &str) {
|
||||
fn validate_term(string: &str) {
|
||||
let validate = string;
|
||||
if validate.is_empty() {
|
||||
panic!("Ident is not allowed to be empty; use Option<Ident>");
|
||||
|
@ -671,7 +671,7 @@ macro_rules! unsuffixed_numbers {
|
|||
impl Literal {
|
||||
fn _new(text: String) -> Literal {
|
||||
Literal {
|
||||
text,
|
||||
text: text,
|
||||
span: Span::call_site(),
|
||||
}
|
||||
}
|
||||
|
@ -681,34 +681,42 @@ impl Literal {
|
|||
u16_suffixed => u16,
|
||||
u32_suffixed => u32,
|
||||
u64_suffixed => u64,
|
||||
u128_suffixed => u128,
|
||||
usize_suffixed => usize,
|
||||
i8_suffixed => i8,
|
||||
i16_suffixed => i16,
|
||||
i32_suffixed => i32,
|
||||
i64_suffixed => i64,
|
||||
i128_suffixed => i128,
|
||||
isize_suffixed => isize,
|
||||
|
||||
f32_suffixed => f32,
|
||||
f64_suffixed => f64,
|
||||
}
|
||||
|
||||
#[cfg(u128)]
|
||||
suffixed_numbers! {
|
||||
u128_suffixed => u128,
|
||||
i128_suffixed => i128,
|
||||
}
|
||||
|
||||
unsuffixed_numbers! {
|
||||
u8_unsuffixed => u8,
|
||||
u16_unsuffixed => u16,
|
||||
u32_unsuffixed => u32,
|
||||
u64_unsuffixed => u64,
|
||||
u128_unsuffixed => u128,
|
||||
usize_unsuffixed => usize,
|
||||
i8_unsuffixed => i8,
|
||||
i16_unsuffixed => i16,
|
||||
i32_unsuffixed => i32,
|
||||
i64_unsuffixed => i64,
|
||||
i128_unsuffixed => i128,
|
||||
isize_unsuffixed => isize,
|
||||
}
|
||||
|
||||
#[cfg(u128)]
|
||||
unsuffixed_numbers! {
|
||||
u128_unsuffixed => u128,
|
||||
i128_unsuffixed => i128,
|
||||
}
|
||||
|
||||
pub fn f32_unsuffixed(f: f32) -> Literal {
|
||||
let mut s = f.to_string();
|
||||
if !s.contains(".") {
|
||||
|
@ -726,31 +734,17 @@ impl Literal {
|
|||
}
|
||||
|
||||
pub fn string(t: &str) -> Literal {
|
||||
let mut text = String::with_capacity(t.len() + 2);
|
||||
text.push('"');
|
||||
for c in t.chars() {
|
||||
if c == '\'' {
|
||||
// escape_default turns this into "\'" which is unnecessary.
|
||||
text.push(c);
|
||||
} else {
|
||||
text.extend(c.escape_default());
|
||||
}
|
||||
}
|
||||
text.push('"');
|
||||
Literal::_new(text)
|
||||
let mut s = t
|
||||
.chars()
|
||||
.flat_map(|c| c.escape_default())
|
||||
.collect::<String>();
|
||||
s.push('"');
|
||||
s.insert(0, '"');
|
||||
Literal::_new(s)
|
||||
}
|
||||
|
||||
pub fn character(t: char) -> Literal {
|
||||
let mut text = String::new();
|
||||
text.push('\'');
|
||||
if t == '"' {
|
||||
// escape_default turns this into '\"' which is unnecessary.
|
||||
text.push(t);
|
||||
} else {
|
||||
text.extend(t.escape_default());
|
||||
}
|
||||
text.push('\'');
|
||||
Literal::_new(text)
|
||||
Literal::_new(format!("'{}'", t.escape_default().collect::<String>()))
|
||||
}
|
||||
|
||||
pub fn byte_string(bytes: &[u8]) -> Literal {
|
||||
|
@ -763,7 +757,7 @@ impl Literal {
|
|||
b'\r' => escaped.push_str(r"\r"),
|
||||
b'"' => escaped.push_str("\\\""),
|
||||
b'\\' => escaped.push_str("\\\\"),
|
||||
b'\x20'..=b'\x7E' => escaped.push(*b as char),
|
||||
b'\x20'...b'\x7E' => escaped.push(*b as char),
|
||||
_ => escaped.push_str(&format!("\\x{:02X}", b)),
|
||||
}
|
||||
}
|
||||
|
@ -778,10 +772,6 @@ impl Literal {
|
|||
pub fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
|
||||
pub fn subspan<R: RangeBounds<usize>>(&self, _range: R) -> Option<Span> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Literal {
|
||||
|
@ -827,21 +817,21 @@ fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
|
|||
fn spanned<'a, T>(
|
||||
input: Cursor<'a>,
|
||||
f: fn(Cursor<'a>) -> PResult<'a, T>,
|
||||
) -> PResult<'a, (T, crate::Span)> {
|
||||
) -> PResult<'a, (T, ::Span)> {
|
||||
let (a, b) = f(skip_whitespace(input))?;
|
||||
Ok((a, ((b, crate::Span::_new_stable(Span::call_site())))))
|
||||
Ok((a, ((b, ::Span::_new_stable(Span::call_site())))))
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
fn spanned<'a, T>(
|
||||
input: Cursor<'a>,
|
||||
f: fn(Cursor<'a>) -> PResult<'a, T>,
|
||||
) -> PResult<'a, (T, crate::Span)> {
|
||||
) -> PResult<'a, (T, ::Span)> {
|
||||
let input = skip_whitespace(input);
|
||||
let lo = input.off;
|
||||
let (a, b) = f(input)?;
|
||||
let hi = a.off;
|
||||
let span = crate::Span::_new_stable(Span { lo, hi });
|
||||
let span = ::Span::_new_stable(Span { lo: lo, hi: hi });
|
||||
Ok((a, (b, span)))
|
||||
}
|
||||
|
||||
|
@ -852,9 +842,9 @@ fn token_tree(input: Cursor) -> PResult<TokenTree> {
|
|||
}
|
||||
|
||||
named!(token_kind -> TokenTree, alt!(
|
||||
map!(group, |g| TokenTree::Group(crate::Group::_new_stable(g)))
|
||||
map!(group, |g| TokenTree::Group(::Group::_new_stable(g)))
|
||||
|
|
||||
map!(literal, |l| TokenTree::Literal(crate::Literal::_new_stable(l))) // must be before symbol
|
||||
map!(literal, |l| TokenTree::Literal(::Literal::_new_stable(l))) // must be before symbol
|
||||
|
|
||||
map!(op, TokenTree::Punct)
|
||||
|
|
||||
|
@ -886,27 +876,14 @@ fn symbol_leading_ws(input: Cursor) -> PResult<TokenTree> {
|
|||
}
|
||||
|
||||
fn symbol(input: Cursor) -> PResult<TokenTree> {
|
||||
let raw = input.starts_with("r#");
|
||||
let rest = input.advance((raw as usize) << 1);
|
||||
|
||||
let (rest, sym) = symbol_not_raw(rest)?;
|
||||
|
||||
if !raw {
|
||||
let ident = crate::Ident::new(sym, crate::Span::call_site());
|
||||
return Ok((rest, ident.into()));
|
||||
}
|
||||
|
||||
if sym == "_" {
|
||||
return Err(LexError);
|
||||
}
|
||||
|
||||
let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
|
||||
Ok((rest, ident.into()))
|
||||
}
|
||||
|
||||
fn symbol_not_raw(input: Cursor) -> PResult<&str> {
|
||||
let mut chars = input.char_indices();
|
||||
|
||||
let raw = input.starts_with("r#");
|
||||
if raw {
|
||||
chars.next();
|
||||
chars.next();
|
||||
}
|
||||
|
||||
match chars.next() {
|
||||
Some((_, ch)) if is_ident_start(ch) => {}
|
||||
_ => return Err(LexError),
|
||||
|
@ -920,7 +897,17 @@ fn symbol_not_raw(input: Cursor) -> PResult<&str> {
|
|||
}
|
||||
}
|
||||
|
||||
Ok((input.advance(end), &input.rest[..end]))
|
||||
let a = &input.rest[..end];
|
||||
if a == "r#_" {
|
||||
Err(LexError)
|
||||
} else {
|
||||
let ident = if raw {
|
||||
::Ident::_new_raw(&a[2..], ::Span::call_site())
|
||||
} else {
|
||||
::Ident::new(a, ::Span::call_site())
|
||||
};
|
||||
Ok((input.advance(end), ident.into()))
|
||||
}
|
||||
}
|
||||
|
||||
fn literal(input: Cursor) -> PResult<Literal> {
|
||||
|
@ -960,12 +947,10 @@ named!(string -> (), alt!(
|
|||
) => { |_| () }
|
||||
));
|
||||
|
||||
named!(quoted_string -> (), do_parse!(
|
||||
punct!("\"") >>
|
||||
cooked_string >>
|
||||
tag!("\"") >>
|
||||
option!(symbol_not_raw) >>
|
||||
(())
|
||||
named!(quoted_string -> (), delimited!(
|
||||
punct!("\""),
|
||||
cooked_string,
|
||||
tag!("\"")
|
||||
));
|
||||
|
||||
fn cooked_string(input: Cursor) -> PResult<()> {
|
||||
|
@ -1174,8 +1159,8 @@ fn backslash_x_char<I>(chars: &mut I) -> bool
|
|||
where
|
||||
I: Iterator<Item = (usize, char)>,
|
||||
{
|
||||
next_ch!(chars @ '0'..='7');
|
||||
next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
||||
next_ch!(chars @ '0'...'7');
|
||||
next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F');
|
||||
true
|
||||
}
|
||||
|
||||
|
@ -1183,8 +1168,8 @@ fn backslash_x_byte<I>(chars: &mut I) -> bool
|
|||
where
|
||||
I: Iterator<Item = (usize, u8)>,
|
||||
{
|
||||
next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
|
||||
next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
|
||||
next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F');
|
||||
next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F');
|
||||
true
|
||||
}
|
||||
|
||||
|
@ -1193,9 +1178,9 @@ where
|
|||
I: Iterator<Item = (usize, char)>,
|
||||
{
|
||||
next_ch!(chars @ '{');
|
||||
next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
||||
next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F');
|
||||
loop {
|
||||
let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
|
||||
let c = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F' | '_' | '}');
|
||||
if c == '}' {
|
||||
return true;
|
||||
}
|
||||
|
@ -1203,10 +1188,10 @@ where
|
|||
}
|
||||
|
||||
fn float(input: Cursor) -> PResult<()> {
|
||||
let (mut rest, ()) = float_digits(input)?;
|
||||
if let Some(ch) = rest.chars().next() {
|
||||
if is_ident_start(ch) {
|
||||
rest = symbol_not_raw(rest)?.0;
|
||||
let (rest, ()) = float_digits(input)?;
|
||||
for suffix in &["f32", "f64"] {
|
||||
if rest.starts_with(suffix) {
|
||||
return word_break(rest.advance(suffix.len()));
|
||||
}
|
||||
}
|
||||
word_break(rest)
|
||||
|
@ -1224,7 +1209,7 @@ fn float_digits(input: Cursor) -> PResult<()> {
|
|||
let mut has_exp = false;
|
||||
while let Some(&ch) = chars.peek() {
|
||||
match ch {
|
||||
'0'..='9' | '_' => {
|
||||
'0'...'9' | '_' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
}
|
||||
|
@ -1235,7 +1220,7 @@ fn float_digits(input: Cursor) -> PResult<()> {
|
|||
chars.next();
|
||||
if chars
|
||||
.peek()
|
||||
.map(|&ch| ch == '.' || is_ident_start(ch))
|
||||
.map(|&ch| ch == '.' || UnicodeXID::is_xid_start(ch))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return Err(LexError);
|
||||
|
@ -1269,7 +1254,7 @@ fn float_digits(input: Cursor) -> PResult<()> {
|
|||
chars.next();
|
||||
len += 1;
|
||||
}
|
||||
'0'..='9' => {
|
||||
'0'...'9' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
has_exp_value = true;
|
||||
|
@ -1290,10 +1275,12 @@ fn float_digits(input: Cursor) -> PResult<()> {
|
|||
}
|
||||
|
||||
fn int(input: Cursor) -> PResult<()> {
|
||||
let (mut rest, ()) = digits(input)?;
|
||||
if let Some(ch) = rest.chars().next() {
|
||||
if is_ident_start(ch) {
|
||||
rest = symbol_not_raw(rest)?.0;
|
||||
let (rest, ()) = digits(input)?;
|
||||
for suffix in &[
|
||||
"isize", "i8", "i16", "i32", "i64", "i128", "usize", "u8", "u16", "u32", "u64", "u128",
|
||||
] {
|
||||
if rest.starts_with(suffix) {
|
||||
return word_break(rest.advance(suffix.len()));
|
||||
}
|
||||
}
|
||||
word_break(rest)
|
||||
|
@ -1317,9 +1304,9 @@ fn digits(mut input: Cursor) -> PResult<()> {
|
|||
let mut empty = true;
|
||||
for b in input.bytes() {
|
||||
let digit = match b {
|
||||
b'0'..=b'9' => (b - b'0') as u64,
|
||||
b'a'..=b'f' => 10 + (b - b'a') as u64,
|
||||
b'A'..=b'F' => 10 + (b - b'A') as u64,
|
||||
b'0'...b'9' => (b - b'0') as u64,
|
||||
b'a'...b'f' => 10 + (b - b'a') as u64,
|
||||
b'A'...b'F' => 10 + (b - b'A') as u64,
|
||||
b'_' => {
|
||||
if empty && base == 10 {
|
||||
return Err(LexError);
|
||||
|
@ -1389,15 +1376,15 @@ fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
|
|||
trees.push(Punct::new('!', Spacing::Alone).into());
|
||||
}
|
||||
let mut stream = vec![
|
||||
TokenTree::Ident(crate::Ident::new("doc", span)),
|
||||
TokenTree::Ident(::Ident::new("doc", span)),
|
||||
TokenTree::Punct(Punct::new('=', Spacing::Alone)),
|
||||
TokenTree::Literal(crate::Literal::string(comment)),
|
||||
TokenTree::Literal(::Literal::string(comment)),
|
||||
];
|
||||
for tt in stream.iter_mut() {
|
||||
tt.set_span(span);
|
||||
}
|
||||
let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
|
||||
trees.push(crate::Group::_new_stable(group).into());
|
||||
trees.push(::Group::_new_stable(group).into());
|
||||
for tt in trees.iter_mut() {
|
||||
tt.set_span(span);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//! A wrapper around the procedural macro API of the compiler's [`proc_macro`]
|
||||
//! crate. This library serves two purposes:
|
||||
//! crate. This library serves three purposes:
|
||||
//!
|
||||
//! [`proc_macro`]: https://doc.rust-lang.org/proc_macro/
|
||||
//!
|
||||
|
@ -18,6 +18,14 @@
|
|||
//! a macro to be testable in isolation, they must be implemented using
|
||||
//! `proc_macro2`.
|
||||
//!
|
||||
//! - **Provide the latest and greatest APIs across all compiler versions.**
|
||||
//! Procedural macros were first introduced to Rust in 1.15.0 with an
|
||||
//! extremely minimal interface. Since then, many improvements have landed to
|
||||
//! make macros more flexible and easier to write. This library tracks the
|
||||
//! procedural macro API of the most recent stable compiler but employs a
|
||||
//! polyfill to provide that API consistently across any compiler since
|
||||
//! 1.15.0.
|
||||
//!
|
||||
//! [syn]: https://github.com/dtolnay/syn
|
||||
//! [quote]: https://github.com/dtolnay/quote
|
||||
//!
|
||||
|
@ -25,13 +33,12 @@
|
|||
//!
|
||||
//! The skeleton of a typical procedural macro typically looks like this:
|
||||
//!
|
||||
//! ```
|
||||
//! ```edition2018
|
||||
//! extern crate proc_macro;
|
||||
//!
|
||||
//! # const IGNORE: &str = stringify! {
|
||||
//! #[proc_macro_derive(MyDerive)]
|
||||
//! # };
|
||||
//! # #[cfg(wrap_proc_macro)]
|
||||
//! pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
//! let input = proc_macro2::TokenStream::from(input);
|
||||
//!
|
||||
|
@ -47,7 +54,7 @@
|
|||
//! If parsing with [Syn], you'll use [`parse_macro_input!`] instead to
|
||||
//! propagate parse errors correctly back to the compiler when parsing fails.
|
||||
//!
|
||||
//! [`parse_macro_input!`]: https://docs.rs/syn/1.0/syn/macro.parse_macro_input.html
|
||||
//! [`parse_macro_input!`]: https://docs.rs/syn/0.15/syn/macro.parse_macro_input.html
|
||||
//!
|
||||
//! # Unstable features
|
||||
//!
|
||||
|
@ -57,9 +64,9 @@
|
|||
//!
|
||||
//! To opt into the additional APIs available in the most recent nightly
|
||||
//! compiler, the `procmacro2_semver_exempt` config flag must be passed to
|
||||
//! rustc. We will polyfill those nightly-only APIs back to Rust 1.31.0. As
|
||||
//! these are unstable APIs that track the nightly compiler, minor versions of
|
||||
//! proc-macro2 may make breaking changes to them at any time.
|
||||
//! rustc. As usual, we will polyfill those nightly-only APIs all the way back
|
||||
//! to Rust 1.15.0. As these are unstable APIs that track the nightly compiler,
|
||||
//! minor versions of proc-macro2 may make breaking changes to them at any time.
|
||||
//!
|
||||
//! ```sh
|
||||
//! RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
|
||||
|
@ -70,27 +77,21 @@
|
|||
//! as a reminder that you are outside of the normal semver guarantees.
|
||||
//!
|
||||
//! Semver exempt methods are marked as such in the proc-macro2 documentation.
|
||||
//!
|
||||
//! # Thread-Safety
|
||||
//!
|
||||
//! Most types in this crate are `!Sync` because the underlying compiler
|
||||
//! types make use of thread-local memory, meaning they cannot be accessed from
|
||||
//! a different thread.
|
||||
|
||||
// Proc-macro2 types in rustdoc of other crates get linked to here.
|
||||
#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.4")]
|
||||
#![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
|
||||
#![doc(html_root_url = "https://docs.rs/proc-macro2/0.4.27")]
|
||||
#![cfg_attr(nightly, feature(proc_macro_span))]
|
||||
#![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))]
|
||||
|
||||
#[cfg(use_proc_macro)]
|
||||
extern crate proc_macro;
|
||||
extern crate unicode_xid;
|
||||
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::iter::FromIterator;
|
||||
use std::marker;
|
||||
use std::ops::RangeBounds;
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
|
@ -101,7 +102,7 @@ mod strnom;
|
|||
mod fallback;
|
||||
|
||||
#[cfg(not(wrap_proc_macro))]
|
||||
use crate::fallback as imp;
|
||||
use fallback as imp;
|
||||
#[path = "wrapper.rs"]
|
||||
#[cfg(wrap_proc_macro)]
|
||||
mod imp;
|
||||
|
@ -128,7 +129,7 @@ pub struct LexError {
|
|||
impl TokenStream {
|
||||
fn _new(inner: imp::TokenStream) -> TokenStream {
|
||||
TokenStream {
|
||||
inner,
|
||||
inner: inner,
|
||||
_marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
@ -145,6 +146,11 @@ impl TokenStream {
|
|||
TokenStream::_new(imp::TokenStream::new())
|
||||
}
|
||||
|
||||
#[deprecated(since = "0.4.4", note = "please use TokenStream::new")]
|
||||
pub fn empty() -> TokenStream {
|
||||
TokenStream::new()
|
||||
}
|
||||
|
||||
/// Checks if this `TokenStream` is empty.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.inner.is_empty()
|
||||
|
@ -193,12 +199,6 @@ impl From<TokenStream> for proc_macro::TokenStream {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<TokenTree> for TokenStream {
|
||||
fn from(token: TokenTree) -> Self {
|
||||
TokenStream::_new(imp::TokenStream::from(token))
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TokenTree> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
|
||||
self.inner.extend(streams)
|
||||
|
@ -261,7 +261,7 @@ pub struct SourceFile {
|
|||
impl SourceFile {
|
||||
fn _new(inner: imp::SourceFile) -> Self {
|
||||
SourceFile {
|
||||
inner,
|
||||
inner: inner,
|
||||
_marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
@ -301,7 +301,6 @@ impl fmt::Debug for SourceFile {
|
|||
///
|
||||
/// This type is semver exempt and not exposed by default.
|
||||
#[cfg(span_locations)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub struct LineColumn {
|
||||
/// The 1-indexed line in the source file on which the span starts or ends
|
||||
/// (inclusive).
|
||||
|
@ -321,7 +320,7 @@ pub struct Span {
|
|||
impl Span {
|
||||
fn _new(inner: imp::Span) -> Span {
|
||||
Span {
|
||||
inner,
|
||||
inner: inner,
|
||||
_marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
@ -404,7 +403,10 @@ impl Span {
|
|||
#[cfg(span_locations)]
|
||||
pub fn start(&self) -> LineColumn {
|
||||
let imp::LineColumn { line, column } = self.inner.start();
|
||||
LineColumn { line, column }
|
||||
LineColumn {
|
||||
line: line,
|
||||
column: column,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the ending line/column in the source file for this span.
|
||||
|
@ -413,23 +415,23 @@ impl Span {
|
|||
#[cfg(span_locations)]
|
||||
pub fn end(&self) -> LineColumn {
|
||||
let imp::LineColumn { line, column } = self.inner.end();
|
||||
LineColumn { line, column }
|
||||
LineColumn {
|
||||
line: line,
|
||||
column: column,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new span encompassing `self` and `other`.
|
||||
///
|
||||
/// Returns `None` if `self` and `other` are from different files.
|
||||
///
|
||||
/// Warning: the underlying [`proc_macro::Span::join`] method is
|
||||
/// nightly-only. When called from within a procedural macro not using a
|
||||
/// nightly compiler, this method will always return `None`.
|
||||
///
|
||||
/// [`proc_macro::Span::join`]: https://doc.rust-lang.org/proc_macro/struct.Span.html#method.join
|
||||
/// This method is semver exempt and not exposed by default.
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
pub fn join(&self, other: Span) -> Option<Span> {
|
||||
self.inner.join(other.inner).map(Span::_new)
|
||||
}
|
||||
|
||||
/// Compares two spans to see if they're equal.
|
||||
/// Compares to spans to see if they're equal.
|
||||
///
|
||||
/// This method is semver exempt and not exposed by default.
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
|
@ -573,7 +575,7 @@ pub enum Delimiter {
|
|||
|
||||
impl Group {
|
||||
fn _new(inner: imp::Group) -> Self {
|
||||
Group { inner }
|
||||
Group { inner: inner }
|
||||
}
|
||||
|
||||
fn _new_stable(inner: fallback::Group) -> Self {
|
||||
|
@ -623,6 +625,7 @@ impl Group {
|
|||
/// pub fn span_open(&self) -> Span {
|
||||
/// ^
|
||||
/// ```
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
pub fn span_open(&self) -> Span {
|
||||
Span::_new(self.inner.span_open())
|
||||
}
|
||||
|
@ -633,6 +636,7 @@ impl Group {
|
|||
/// pub fn span_close(&self) -> Span {
|
||||
/// ^
|
||||
/// ```
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
pub fn span_close(&self) -> Span {
|
||||
Span::_new(self.inner.span_close())
|
||||
}
|
||||
|
@ -680,7 +684,7 @@ pub struct Punct {
|
|||
pub enum Spacing {
|
||||
/// E.g. `+` is `Alone` in `+ =`, `+ident` or `+()`.
|
||||
Alone,
|
||||
/// E.g. `+` is `Joint` in `+=` or `'` is `Joint` in `'#`.
|
||||
/// E.g. `+` is `Joint` in `+=` or `'#`.
|
||||
///
|
||||
/// Additionally, single quote `'` can join with identifiers to form
|
||||
/// lifetimes `'ident`.
|
||||
|
@ -697,8 +701,8 @@ impl Punct {
|
|||
/// which can be further configured with the `set_span` method below.
|
||||
pub fn new(op: char, spacing: Spacing) -> Punct {
|
||||
Punct {
|
||||
op,
|
||||
spacing,
|
||||
op: op,
|
||||
spacing: spacing,
|
||||
span: Span::call_site(),
|
||||
}
|
||||
}
|
||||
|
@ -760,7 +764,7 @@ impl fmt::Debug for Punct {
|
|||
/// Rust keywords. Use `input.call(Ident::parse_any)` when parsing to match the
|
||||
/// behaviour of `Ident::new`.
|
||||
///
|
||||
/// [`Parse`]: https://docs.rs/syn/1.0/syn/parse/trait.Parse.html
|
||||
/// [`Parse`]: https://docs.rs/syn/0.15/syn/parse/trait.Parse.html
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -768,7 +772,7 @@ impl fmt::Debug for Punct {
|
|||
/// A span must be provided explicitly which governs the name resolution
|
||||
/// behavior of the resulting identifier.
|
||||
///
|
||||
/// ```
|
||||
/// ```edition2018
|
||||
/// use proc_macro2::{Ident, Span};
|
||||
///
|
||||
/// fn main() {
|
||||
|
@ -780,7 +784,7 @@ impl fmt::Debug for Punct {
|
|||
///
|
||||
/// An ident can be interpolated into a token stream using the `quote!` macro.
|
||||
///
|
||||
/// ```
|
||||
/// ```edition2018
|
||||
/// use proc_macro2::{Ident, Span};
|
||||
/// use quote::quote;
|
||||
///
|
||||
|
@ -799,7 +803,7 @@ impl fmt::Debug for Punct {
|
|||
/// A string representation of the ident is available through the `to_string()`
|
||||
/// method.
|
||||
///
|
||||
/// ```
|
||||
/// ```edition2018
|
||||
/// # use proc_macro2::{Ident, Span};
|
||||
/// #
|
||||
/// # let ident = Ident::new("another_identifier", Span::call_site());
|
||||
|
@ -819,7 +823,7 @@ pub struct Ident {
|
|||
impl Ident {
|
||||
fn _new(inner: imp::Ident) -> Ident {
|
||||
Ident {
|
||||
inner,
|
||||
inner: inner,
|
||||
_marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
@ -849,12 +853,7 @@ impl Ident {
|
|||
/// # Panics
|
||||
///
|
||||
/// Panics if the input string is neither a keyword nor a legal variable
|
||||
/// name. If you are not sure whether the string contains an identifier and
|
||||
/// need to handle an error case, use
|
||||
/// <a href="https://docs.rs/syn/1.0/syn/fn.parse_str.html"><code
|
||||
/// style="padding-right:0;">syn::parse_str</code></a><code
|
||||
/// style="padding-left:0;">::<Ident></code>
|
||||
/// rather than `Ident::new`.
|
||||
/// name.
|
||||
pub fn new(string: &str, span: Span) -> Ident {
|
||||
Ident::_new(imp::Ident::new(string, span.inner))
|
||||
}
|
||||
|
@ -987,7 +986,7 @@ macro_rules! unsuffixed_int_literals {
|
|||
impl Literal {
|
||||
fn _new(inner: imp::Literal) -> Literal {
|
||||
Literal {
|
||||
inner,
|
||||
inner: inner,
|
||||
_marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
@ -1004,62 +1003,44 @@ impl Literal {
|
|||
u16_suffixed => u16,
|
||||
u32_suffixed => u32,
|
||||
u64_suffixed => u64,
|
||||
u128_suffixed => u128,
|
||||
usize_suffixed => usize,
|
||||
i8_suffixed => i8,
|
||||
i16_suffixed => i16,
|
||||
i32_suffixed => i32,
|
||||
i64_suffixed => i64,
|
||||
i128_suffixed => i128,
|
||||
isize_suffixed => isize,
|
||||
}
|
||||
|
||||
#[cfg(u128)]
|
||||
suffixed_int_literals! {
|
||||
u128_suffixed => u128,
|
||||
i128_suffixed => i128,
|
||||
}
|
||||
|
||||
unsuffixed_int_literals! {
|
||||
u8_unsuffixed => u8,
|
||||
u16_unsuffixed => u16,
|
||||
u32_unsuffixed => u32,
|
||||
u64_unsuffixed => u64,
|
||||
u128_unsuffixed => u128,
|
||||
usize_unsuffixed => usize,
|
||||
i8_unsuffixed => i8,
|
||||
i16_unsuffixed => i16,
|
||||
i32_unsuffixed => i32,
|
||||
i64_unsuffixed => i64,
|
||||
i128_unsuffixed => i128,
|
||||
isize_unsuffixed => isize,
|
||||
}
|
||||
|
||||
/// Creates a new unsuffixed floating-point literal.
|
||||
///
|
||||
/// This constructor is similar to those like `Literal::i8_unsuffixed` where
|
||||
/// the float's value is emitted directly into the token but no suffix is
|
||||
/// used, so it may be inferred to be a `f64` later in the compiler.
|
||||
/// Literals created from negative numbers may not survive rountrips through
|
||||
/// `TokenStream` or strings and may be broken into two tokens (`-` and
|
||||
/// positive literal).
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This function requires that the specified float is finite, for example
|
||||
/// if it is infinity or NaN this function will panic.
|
||||
#[cfg(u128)]
|
||||
unsuffixed_int_literals! {
|
||||
u128_unsuffixed => u128,
|
||||
i128_unsuffixed => i128,
|
||||
}
|
||||
|
||||
pub fn f64_unsuffixed(f: f64) -> Literal {
|
||||
assert!(f.is_finite());
|
||||
Literal::_new(imp::Literal::f64_unsuffixed(f))
|
||||
}
|
||||
|
||||
/// Creates a new suffixed floating-point literal.
|
||||
///
|
||||
/// This constructor will create a literal like `1.0f64` where the value
|
||||
/// specified is the preceding part of the token and `f64` is the suffix of
|
||||
/// the token. This token will always be inferred to be an `f64` in the
|
||||
/// compiler. Literals created from negative numbers may not survive
|
||||
/// rountrips through `TokenStream` or strings and may be broken into two
|
||||
/// tokens (`-` and positive literal).
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This function requires that the specified float is finite, for example
|
||||
/// if it is infinity or NaN this function will panic.
|
||||
pub fn f64_suffixed(f: f64) -> Literal {
|
||||
assert!(f.is_finite());
|
||||
Literal::_new(imp::Literal::f64_suffixed(f))
|
||||
|
@ -1083,61 +1064,30 @@ impl Literal {
|
|||
Literal::_new(imp::Literal::f32_unsuffixed(f))
|
||||
}
|
||||
|
||||
/// Creates a new suffixed floating-point literal.
|
||||
///
|
||||
/// This constructor will create a literal like `1.0f32` where the value
|
||||
/// specified is the preceding part of the token and `f32` is the suffix of
|
||||
/// the token. This token will always be inferred to be an `f32` in the
|
||||
/// compiler. Literals created from negative numbers may not survive
|
||||
/// rountrips through `TokenStream` or strings and may be broken into two
|
||||
/// tokens (`-` and positive literal).
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This function requires that the specified float is finite, for example
|
||||
/// if it is infinity or NaN this function will panic.
|
||||
pub fn f32_suffixed(f: f32) -> Literal {
|
||||
assert!(f.is_finite());
|
||||
Literal::_new(imp::Literal::f32_suffixed(f))
|
||||
}
|
||||
|
||||
/// String literal.
|
||||
pub fn string(string: &str) -> Literal {
|
||||
Literal::_new(imp::Literal::string(string))
|
||||
}
|
||||
|
||||
/// Character literal.
|
||||
pub fn character(ch: char) -> Literal {
|
||||
Literal::_new(imp::Literal::character(ch))
|
||||
}
|
||||
|
||||
/// Byte string literal.
|
||||
pub fn byte_string(s: &[u8]) -> Literal {
|
||||
Literal::_new(imp::Literal::byte_string(s))
|
||||
}
|
||||
|
||||
/// Returns the span encompassing this literal.
|
||||
pub fn span(&self) -> Span {
|
||||
Span::_new(self.inner.span())
|
||||
}
|
||||
|
||||
/// Configures the span associated for this literal.
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.inner.set_span(span.inner);
|
||||
}
|
||||
|
||||
/// Returns a `Span` that is a subset of `self.span()` containing only
|
||||
/// the source bytes in range `range`. Returns `None` if the would-be
|
||||
/// trimmed span is outside the bounds of `self`.
|
||||
///
|
||||
/// Warning: the underlying [`proc_macro::Literal::subspan`] method is
|
||||
/// nightly-only. When called from within a procedural macro not using a
|
||||
/// nightly compiler, this method will always return `None`.
|
||||
///
|
||||
/// [`proc_macro::Literal::subspan`]: https://doc.rust-lang.org/proc_macro/struct.Literal.html#method.subspan
|
||||
pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
|
||||
self.inner.subspan(range).map(Span::_new)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Literal {
|
||||
|
@ -1158,14 +1108,14 @@ pub mod token_stream {
|
|||
use std::marker;
|
||||
use std::rc::Rc;
|
||||
|
||||
pub use crate::TokenStream;
|
||||
use crate::{imp, TokenTree};
|
||||
use imp;
|
||||
pub use TokenStream;
|
||||
use TokenTree;
|
||||
|
||||
/// An iterator over `TokenStream`'s `TokenTree`s.
|
||||
///
|
||||
/// The iteration is "shallow", e.g. the iterator doesn't recurse into
|
||||
/// delimited groups, and returns whole groups as token trees.
|
||||
#[derive(Clone)]
|
||||
pub struct IntoIter {
|
||||
inner: imp::TokenTreeIter,
|
||||
_marker: marker::PhantomData<Rc<()>>,
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
//! Adapted from [`nom`](https://github.com/Geal/nom).
|
||||
|
||||
use crate::fallback::LexError;
|
||||
use std::str::{Bytes, CharIndices, Chars};
|
||||
|
||||
use unicode_xid::UnicodeXID;
|
||||
|
||||
use fallback::LexError;
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
pub struct Cursor<'a> {
|
||||
pub rest: &'a str,
|
||||
|
@ -93,7 +95,7 @@ pub fn whitespace(input: Cursor) -> PResult<()> {
|
|||
}
|
||||
}
|
||||
match bytes[i] {
|
||||
b' ' | 0x09..=0x0d => {
|
||||
b' ' | 0x09...0x0d => {
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
use std::fmt;
|
||||
use std::iter;
|
||||
use std::ops::RangeBounds;
|
||||
use std::panic::{self, PanicInfo};
|
||||
#[cfg(super_unstable)]
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
|
||||
use fallback;
|
||||
use proc_macro;
|
||||
|
||||
use {Delimiter, Punct, Spacing, TokenTree};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum TokenStream {
|
||||
|
@ -23,7 +25,7 @@ fn nightly_works() -> bool {
|
|||
use std::sync::atomic::*;
|
||||
use std::sync::Once;
|
||||
|
||||
static WORKS: AtomicUsize = AtomicUsize::new(0);
|
||||
static WORKS: AtomicUsize = ATOMIC_USIZE_INIT;
|
||||
static INIT: Once = Once::new();
|
||||
|
||||
match WORKS.load(Ordering::SeqCst) {
|
||||
|
@ -57,7 +59,7 @@ fn nightly_works() -> bool {
|
|||
// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
|
||||
// the main thread before launching any other threads.
|
||||
INIT.call_once(|| {
|
||||
type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
|
||||
type PanicHook = Fn(&PanicInfo) + Sync + Send + 'static;
|
||||
|
||||
let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
|
||||
let sanity_check = &*null_hook as *const PanicHook;
|
||||
|
@ -197,6 +199,17 @@ impl iter::FromIterator<TokenStream> for TokenStream {
|
|||
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
||||
let mut streams = streams.into_iter();
|
||||
match streams.next() {
|
||||
#[cfg(slow_extend)]
|
||||
Some(TokenStream::Compiler(first)) => {
|
||||
let stream = iter::once(first)
|
||||
.chain(streams.map(|s| match s {
|
||||
TokenStream::Compiler(s) => s,
|
||||
TokenStream::Fallback(_) => mismatch(),
|
||||
}))
|
||||
.collect();
|
||||
TokenStream::Compiler(stream)
|
||||
}
|
||||
#[cfg(not(slow_extend))]
|
||||
Some(TokenStream::Compiler(mut first)) => {
|
||||
first.extend(streams.map(|s| match s {
|
||||
TokenStream::Compiler(s) => s,
|
||||
|
@ -220,11 +233,27 @@ impl Extend<TokenTree> for TokenStream {
|
|||
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => {
|
||||
tts.extend(
|
||||
streams
|
||||
.into_iter()
|
||||
.map(|t| TokenStream::from(t).unwrap_nightly()),
|
||||
);
|
||||
#[cfg(not(slow_extend))]
|
||||
{
|
||||
tts.extend(
|
||||
streams
|
||||
.into_iter()
|
||||
.map(|t| TokenStream::from(t).unwrap_nightly()),
|
||||
);
|
||||
}
|
||||
#[cfg(slow_extend)]
|
||||
{
|
||||
*tts =
|
||||
tts.clone()
|
||||
.into_iter()
|
||||
.chain(streams.into_iter().map(TokenStream::from).flat_map(
|
||||
|t| match t {
|
||||
TokenStream::Compiler(tts) => tts.into_iter(),
|
||||
_ => mismatch(),
|
||||
},
|
||||
))
|
||||
.collect();
|
||||
}
|
||||
}
|
||||
TokenStream::Fallback(tts) => tts.extend(streams),
|
||||
}
|
||||
|
@ -288,7 +317,6 @@ impl fmt::Debug for LexError {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum TokenTreeIter {
|
||||
Compiler(proc_macro::token_stream::IntoIter),
|
||||
Fallback(fallback::TokenTreeIter),
|
||||
|
@ -315,18 +343,18 @@ impl Iterator for TokenTreeIter {
|
|||
TokenTreeIter::Fallback(iter) => return iter.next(),
|
||||
};
|
||||
Some(match token {
|
||||
proc_macro::TokenTree::Group(tt) => crate::Group::_new(Group::Compiler(tt)).into(),
|
||||
proc_macro::TokenTree::Group(tt) => ::Group::_new(Group::Compiler(tt)).into(),
|
||||
proc_macro::TokenTree::Punct(tt) => {
|
||||
let spacing = match tt.spacing() {
|
||||
proc_macro::Spacing::Joint => Spacing::Joint,
|
||||
proc_macro::Spacing::Alone => Spacing::Alone,
|
||||
};
|
||||
let mut o = Punct::new(tt.as_char(), spacing);
|
||||
o.set_span(crate::Span::_new(Span::Compiler(tt.span())));
|
||||
o.set_span(::Span::_new(Span::Compiler(tt.span())));
|
||||
o.into()
|
||||
}
|
||||
proc_macro::TokenTree::Ident(s) => crate::Ident::_new(Ident::Compiler(s)).into(),
|
||||
proc_macro::TokenTree::Literal(l) => crate::Literal::_new(Literal::Compiler(l)).into(),
|
||||
proc_macro::TokenTree::Ident(s) => ::Ident::_new(Ident::Compiler(s)).into(),
|
||||
proc_macro::TokenTree::Literal(l) => ::Literal::_new(Literal::Compiler(l)).into(),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -449,12 +477,12 @@ impl Span {
|
|||
#[cfg(any(super_unstable, feature = "span-locations"))]
|
||||
pub fn start(&self) -> LineColumn {
|
||||
match self {
|
||||
#[cfg(proc_macro_span)]
|
||||
#[cfg(nightly)]
|
||||
Span::Compiler(s) => {
|
||||
let proc_macro::LineColumn { line, column } = s.start();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
#[cfg(not(proc_macro_span))]
|
||||
#[cfg(not(nightly))]
|
||||
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
|
||||
Span::Fallback(s) => {
|
||||
let fallback::LineColumn { line, column } = s.start();
|
||||
|
@ -466,12 +494,12 @@ impl Span {
|
|||
#[cfg(any(super_unstable, feature = "span-locations"))]
|
||||
pub fn end(&self) -> LineColumn {
|
||||
match self {
|
||||
#[cfg(proc_macro_span)]
|
||||
#[cfg(nightly)]
|
||||
Span::Compiler(s) => {
|
||||
let proc_macro::LineColumn { line, column } = s.end();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
#[cfg(not(proc_macro_span))]
|
||||
#[cfg(not(nightly))]
|
||||
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
|
||||
Span::Fallback(s) => {
|
||||
let fallback::LineColumn { line, column } = s.end();
|
||||
|
@ -480,9 +508,9 @@ impl Span {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn join(&self, other: Span) -> Option<Span> {
|
||||
let ret = match (self, other) {
|
||||
#[cfg(proc_macro_span)]
|
||||
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.join(b)?),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.join(b)?),
|
||||
_ => return None,
|
||||
|
@ -507,9 +535,9 @@ impl Span {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<proc_macro::Span> for crate::Span {
|
||||
fn from(proc_span: proc_macro::Span) -> crate::Span {
|
||||
crate::Span::_new(Span::Compiler(proc_span))
|
||||
impl From<proc_macro::Span> for ::Span {
|
||||
fn from(proc_span: proc_macro::Span) -> ::Span {
|
||||
::Span::_new(Span::Compiler(proc_span))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -587,22 +615,18 @@ impl Group {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn span_open(&self) -> Span {
|
||||
match self {
|
||||
#[cfg(proc_macro_span)]
|
||||
Group::Compiler(g) => Span::Compiler(g.span_open()),
|
||||
#[cfg(not(proc_macro_span))]
|
||||
Group::Compiler(g) => Span::Compiler(g.span()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span_open()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn span_close(&self) -> Span {
|
||||
match self {
|
||||
#[cfg(proc_macro_span)]
|
||||
Group::Compiler(g) => Span::Compiler(g.span_close()),
|
||||
#[cfg(not(proc_macro_span))]
|
||||
Group::Compiler(g) => Span::Compiler(g.span()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span_close()),
|
||||
}
|
||||
}
|
||||
|
@ -778,34 +802,42 @@ impl Literal {
|
|||
u16_suffixed => u16,
|
||||
u32_suffixed => u32,
|
||||
u64_suffixed => u64,
|
||||
u128_suffixed => u128,
|
||||
usize_suffixed => usize,
|
||||
i8_suffixed => i8,
|
||||
i16_suffixed => i16,
|
||||
i32_suffixed => i32,
|
||||
i64_suffixed => i64,
|
||||
i128_suffixed => i128,
|
||||
isize_suffixed => isize,
|
||||
|
||||
f32_suffixed => f32,
|
||||
f64_suffixed => f64,
|
||||
}
|
||||
|
||||
#[cfg(u128)]
|
||||
suffixed_numbers! {
|
||||
i128_suffixed => i128,
|
||||
u128_suffixed => u128,
|
||||
}
|
||||
|
||||
unsuffixed_integers! {
|
||||
u8_unsuffixed => u8,
|
||||
u16_unsuffixed => u16,
|
||||
u32_unsuffixed => u32,
|
||||
u64_unsuffixed => u64,
|
||||
u128_unsuffixed => u128,
|
||||
usize_unsuffixed => usize,
|
||||
i8_unsuffixed => i8,
|
||||
i16_unsuffixed => i16,
|
||||
i32_unsuffixed => i32,
|
||||
i64_unsuffixed => i64,
|
||||
i128_unsuffixed => i128,
|
||||
isize_unsuffixed => isize,
|
||||
}
|
||||
|
||||
#[cfg(u128)]
|
||||
unsuffixed_integers! {
|
||||
i128_unsuffixed => i128,
|
||||
u128_unsuffixed => u128,
|
||||
}
|
||||
|
||||
pub fn f32_unsuffixed(f: f32) -> Literal {
|
||||
if nightly_works() {
|
||||
Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
|
||||
|
@ -861,16 +893,6 @@ impl Literal {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
|
||||
match self {
|
||||
#[cfg(proc_macro_span)]
|
||||
Literal::Compiler(lit) => lit.subspan(range).map(Span::Compiler),
|
||||
#[cfg(not(proc_macro_span))]
|
||||
Literal::Compiler(_lit) => None,
|
||||
Literal::Fallback(lit) => lit.subspan(range).map(Span::Fallback),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Literal {
|
||||
match self {
|
||||
Literal::Compiler(s) => s,
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
#[test]
|
||||
#[ignore]
|
||||
fn make_sure_no_proc_macro() {
|
||||
assert!(
|
||||
!cfg!(feature = "proc-macro"),
|
||||
"still compiled with proc_macro?"
|
||||
);
|
||||
}
|
|
@ -1,3 +1,5 @@
|
|||
extern crate proc_macro2;
|
||||
|
||||
use proc_macro2::*;
|
||||
|
||||
macro_rules! assert_impl {
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
extern crate proc_macro2;
|
||||
|
||||
use std::str::{self, FromStr};
|
||||
|
||||
use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
|
||||
|
||||
#[test]
|
||||
fn idents() {
|
||||
fn terms() {
|
||||
assert_eq!(
|
||||
Ident::new("String", Span::call_site()).to_string(),
|
||||
"String"
|
||||
|
@ -14,7 +16,7 @@ fn idents() {
|
|||
|
||||
#[test]
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
fn raw_idents() {
|
||||
fn raw_terms() {
|
||||
assert_eq!(
|
||||
Ident::new_raw("String", Span::call_site()).to_string(),
|
||||
"r#String"
|
||||
|
@ -25,37 +27,37 @@ fn raw_idents() {
|
|||
|
||||
#[test]
|
||||
#[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
|
||||
fn ident_empty() {
|
||||
fn term_empty() {
|
||||
Ident::new("", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Ident cannot be a number; use Literal instead")]
|
||||
fn ident_number() {
|
||||
fn term_number() {
|
||||
Ident::new("255", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "\"a#\" is not a valid Ident")]
|
||||
fn ident_invalid() {
|
||||
fn term_invalid() {
|
||||
Ident::new("a#", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "not a valid Ident")]
|
||||
fn raw_ident_empty() {
|
||||
fn raw_term_empty() {
|
||||
Ident::new("r#", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "not a valid Ident")]
|
||||
fn raw_ident_number() {
|
||||
fn raw_term_number() {
|
||||
Ident::new("r#255", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "\"r#a#\" is not a valid Ident")]
|
||||
fn raw_ident_invalid() {
|
||||
fn raw_term_invalid() {
|
||||
Ident::new("r#a#", Span::call_site());
|
||||
}
|
||||
|
||||
|
@ -78,40 +80,12 @@ fn lifetime_invalid() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn literal_string() {
|
||||
fn literals() {
|
||||
assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
|
||||
assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
|
||||
assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_character() {
|
||||
assert_eq!(Literal::character('x').to_string(), "'x'");
|
||||
assert_eq!(Literal::character('\'').to_string(), "'\\''");
|
||||
assert_eq!(Literal::character('"').to_string(), "'\"'");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_float() {
|
||||
assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_suffix() {
|
||||
fn token_count(p: &str) -> usize {
|
||||
p.parse::<TokenStream>().unwrap().into_iter().count()
|
||||
}
|
||||
|
||||
assert_eq!(token_count("999u256"), 1);
|
||||
assert_eq!(token_count("999r#u256"), 3);
|
||||
assert_eq!(token_count("1."), 1);
|
||||
assert_eq!(token_count("1.f32"), 3);
|
||||
assert_eq!(token_count("1.0_0"), 1);
|
||||
assert_eq!(token_count("1._0"), 3);
|
||||
assert_eq!(token_count("1._m"), 3);
|
||||
assert_eq!(token_count("\"\"s"), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn roundtrip() {
|
||||
fn roundtrip(p: &str) {
|
||||
|
@ -139,9 +113,6 @@ fn roundtrip() {
|
|||
9
|
||||
0
|
||||
0xffffffffffffffffffffffffffffffff
|
||||
1x
|
||||
1u80
|
||||
1f320
|
||||
",
|
||||
);
|
||||
roundtrip("'a");
|
||||
|
@ -158,6 +129,9 @@ fn fail() {
|
|||
panic!("should have failed to parse: {}\n{:#?}", p, s);
|
||||
}
|
||||
}
|
||||
fail("1x");
|
||||
fail("1u80");
|
||||
fail("1f320");
|
||||
fail("' static");
|
||||
fail("r#1");
|
||||
fail("r#_");
|
||||
|
@ -360,27 +334,6 @@ fn test_debug_tokenstream() {
|
|||
|
||||
#[cfg(not(procmacro2_semver_exempt))]
|
||||
let expected = "\
|
||||
TokenStream [
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: a,
|
||||
},
|
||||
Punct {
|
||||
op: '+',
|
||||
spacing: Alone,
|
||||
},
|
||||
Literal {
|
||||
lit: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
]\
|
||||
";
|
||||
|
||||
#[cfg(not(procmacro2_semver_exempt))]
|
||||
let expected_before_trailing_commas = "\
|
||||
TokenStream [
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
|
@ -402,31 +355,6 @@ TokenStream [
|
|||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
let expected = "\
|
||||
TokenStream [
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: a,
|
||||
span: bytes(2..3),
|
||||
},
|
||||
Punct {
|
||||
op: '+',
|
||||
spacing: Alone,
|
||||
span: bytes(4..5),
|
||||
},
|
||||
Literal {
|
||||
lit: 1,
|
||||
span: bytes(6..7),
|
||||
},
|
||||
],
|
||||
span: bytes(1..8),
|
||||
},
|
||||
]\
|
||||
";
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
let expected_before_trailing_commas = "\
|
||||
TokenStream [
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
|
@ -450,12 +378,7 @@ TokenStream [
|
|||
]\
|
||||
";
|
||||
|
||||
let actual = format!("{:#?}", tts);
|
||||
if actual.ends_with(",\n]") {
|
||||
assert_eq!(expected, actual);
|
||||
} else {
|
||||
assert_eq!(expected_before_trailing_commas, actual);
|
||||
}
|
||||
assert_eq!(expected, format!("{:#?}", tts));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
{"files":{"Cargo.toml":"68f4dc89836a05a2347086addab1849567ef8073c552ec0dfca8f96fd20550f9","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"d9392d4c7af3bf9714f0a95801d64de46ffd4558cdfeea0eb85b414e555abb72","src/ext.rs":"03919239a20f8393288783a21bf6fdee12e405d13d162c9faa6f8f5ce54b003b","src/lib.rs":"5345b4d2e6f923724cec35c62d7397e6f04d5503d2d813bff7bbaa7ffc39a9cf","src/to_tokens.rs":"0dcd15cba2aa83abeb47b9a1babce7a29643b5efa2fe620b070cb37bb21a84f1","tests/conditional/integer128.rs":"d83e21a91efbaa801a82ae499111bdda2d31edaa620e78c0199eba42d69c9ee6","tests/test.rs":"810013d7fd77b738abd0ace90ce2f2f3e219c757652eabab29bc1c0ce4a73b24"},"package":"cdd8e04bd9c52e0342b406469d494fcb033be4bdbe5c606016defbb1681411e1"}
|
|
@ -1,33 +0,0 @@
|
|||
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||
#
|
||||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g. crates.io) dependencies
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
# editing this file be aware that the upstream Cargo.toml
|
||||
# will likely look very different (and much more reasonable)
|
||||
|
||||
[package]
|
||||
name = "quote"
|
||||
version = "0.6.11"
|
||||
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
||||
include = ["Cargo.toml", "src/**/*.rs", "tests/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
|
||||
description = "Quasi-quoting macro quote!(...)"
|
||||
documentation = "https://docs.rs/quote/"
|
||||
readme = "README.md"
|
||||
keywords = ["syn"]
|
||||
categories = ["development-tools::procedural-macro-helpers"]
|
||||
license = "MIT/Apache-2.0"
|
||||
repository = "https://github.com/dtolnay/quote"
|
||||
[dependencies.proc-macro2]
|
||||
version = "0.4.21"
|
||||
default-features = false
|
||||
|
||||
[features]
|
||||
default = ["proc-macro"]
|
||||
proc-macro = ["proc-macro2/proc-macro"]
|
||||
[badges.travis-ci]
|
||||
repository = "dtolnay/quote"
|
|
@ -1,201 +0,0 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -1,25 +0,0 @@
|
|||
Copyright (c) 2016 The Rust Project Developers
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
|
@ -1,241 +0,0 @@
|
|||
Rust Quasi-Quoting
|
||||
==================
|
||||
|
||||
[![Build Status](https://api.travis-ci.org/dtolnay/quote.svg?branch=master)](https://travis-ci.org/dtolnay/quote)
|
||||
[![Latest Version](https://img.shields.io/crates/v/quote.svg)](https://crates.io/crates/quote)
|
||||
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/quote/)
|
||||
|
||||
This crate provides the [`quote!`] macro for turning Rust syntax tree data
|
||||
structures into tokens of source code.
|
||||
|
||||
[`quote!`]: https://docs.rs/quote/0.6/quote/macro.quote.html
|
||||
|
||||
Procedural macros in Rust receive a stream of tokens as input, execute arbitrary
|
||||
Rust code to determine how to manipulate those tokens, and produce a stream of
|
||||
tokens to hand back to the compiler to compile into the caller's crate.
|
||||
Quasi-quoting is a solution to one piece of that -- producing tokens to return
|
||||
to the compiler.
|
||||
|
||||
The idea of quasi-quoting is that we write *code* that we treat as *data*.
|
||||
Within the `quote!` macro, we can write what looks like code to our text editor
|
||||
or IDE. We get all the benefits of the editor's brace matching, syntax
|
||||
highlighting, indentation, and maybe autocompletion. But rather than compiling
|
||||
that as code into the current crate, we can treat it as data, pass it around,
|
||||
mutate it, and eventually hand it back to the compiler as tokens to compile into
|
||||
the macro caller's crate.
|
||||
|
||||
This crate is motivated by the procedural macro use case, but is a
|
||||
general-purpose Rust quasi-quoting library and is not specific to procedural
|
||||
macros.
|
||||
|
||||
*Version requirement: Quote supports any compiler version back to Rust's very
|
||||
first support for procedural macros in Rust 1.15.0.*
|
||||
|
||||
[*Release notes*](https://github.com/dtolnay/quote/releases)
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
quote = "0.6"
|
||||
```
|
||||
|
||||
## Syntax
|
||||
|
||||
The quote crate provides a [`quote!`] macro within which you can write Rust code
|
||||
that gets packaged into a [`TokenStream`] and can be treated as data. You should
|
||||
think of `TokenStream` as representing a fragment of Rust source code.
|
||||
|
||||
[`TokenStream`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.TokenStream.html
|
||||
|
||||
Within the `quote!` macro, interpolation is done with `#var`. Any type
|
||||
implementing the [`quote::ToTokens`] trait can be interpolated. This includes
|
||||
most Rust primitive types as well as most of the syntax tree types from [`syn`].
|
||||
|
||||
[`quote::ToTokens`]: https://docs.rs/quote/0.6/quote/trait.ToTokens.html
|
||||
[`syn`]: https://github.com/dtolnay/syn
|
||||
|
||||
```rust
|
||||
let tokens = quote! {
|
||||
struct SerializeWith #generics #where_clause {
|
||||
value: &'a #field_ty,
|
||||
phantom: core::marker::PhantomData<#item_ty>,
|
||||
}
|
||||
|
||||
impl #generics serde::Serialize for SerializeWith #generics #where_clause {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
#path(self.value, serializer)
|
||||
}
|
||||
}
|
||||
|
||||
SerializeWith {
|
||||
value: #value,
|
||||
phantom: core::marker::PhantomData::<#item_ty>,
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
## Repetition
|
||||
|
||||
Repetition is done using `#(...)*` or `#(...),*` similar to `macro_rules!`. This
|
||||
iterates through the elements of any variable interpolated within the repetition
|
||||
and inserts a copy of the repetition body for each one. The variables in an
|
||||
interpolation may be anything that implements `IntoIterator`, including `Vec` or
|
||||
a pre-existing iterator.
|
||||
|
||||
- `#(#var)*` — no separators
|
||||
- `#(#var),*` — the character before the asterisk is used as a separator
|
||||
- `#( struct #var; )*` — the repetition can contain other things
|
||||
- `#( #k => println!("{}", #v), )*` — even multiple interpolations
|
||||
|
||||
Note that there is a difference between `#(#var ,)*` and `#(#var),*`—the latter
|
||||
does not produce a trailing comma. This matches the behavior of delimiters in
|
||||
`macro_rules!`.
|
||||
|
||||
## Returning tokens to the compiler
|
||||
|
||||
The `quote!` macro evaluates to an expression of type
|
||||
`proc_macro2::TokenStream`. Meanwhile Rust procedural macros are expected to
|
||||
return the type `proc_macro::TokenStream`.
|
||||
|
||||
The difference between the two types is that `proc_macro` types are entirely
|
||||
specific to procedural macros and cannot ever exist in code outside of a
|
||||
procedural macro, while `proc_macro2` types may exist anywhere including tests
|
||||
and non-macro code like main.rs and build.rs. This is why even the procedural
|
||||
macro ecosystem is largely built around `proc_macro2`, because that ensures the
|
||||
libraries are unit testable and accessible in non-macro contexts.
|
||||
|
||||
There is a [`From`]-conversion in both directions so returning the output of
|
||||
`quote!` from a procedural macro usually looks like `tokens.into()` or
|
||||
`proc_macro::TokenStream::from(tokens)`.
|
||||
|
||||
[`From`]: https://doc.rust-lang.org/std/convert/trait.From.html
|
||||
|
||||
## Examples
|
||||
|
||||
### Combining quoted fragments
|
||||
|
||||
Usually you don't end up constructing an entire final `TokenStream` in one
|
||||
piece. Different parts may come from different helper functions. The tokens
|
||||
produced by `quote!` themselves implement `ToTokens` and so can be interpolated
|
||||
into later `quote!` invocations to build up a final result.
|
||||
|
||||
```rust
|
||||
let type_definition = quote! {...};
|
||||
let methods = quote! {...};
|
||||
|
||||
let tokens = quote! {
|
||||
#type_definition
|
||||
#methods
|
||||
};
|
||||
```
|
||||
|
||||
### Constructing identifiers
|
||||
|
||||
Suppose we have an identifier `ident` which came from somewhere in a macro
|
||||
input and we need to modify it in some way for the macro output. Let's consider
|
||||
prepending the identifier with an underscore.
|
||||
|
||||
Simply interpolating the identifier next to an underscore will not have the
|
||||
behavior of concatenating them. The underscore and the identifier will continue
|
||||
to be two separate tokens as if you had written `_ x`.
|
||||
|
||||
```rust
|
||||
// incorrect
|
||||
quote! {
|
||||
let mut _#ident = 0;
|
||||
}
|
||||
```
|
||||
|
||||
The solution is to perform token-level manipulations using the APIs provided by
|
||||
Syn and proc-macro2.
|
||||
|
||||
```rust
|
||||
let concatenated = format!("_{}", ident);
|
||||
let varname = syn::Ident::new(&concatenated, ident.span());
|
||||
quote! {
|
||||
let mut #varname = 0;
|
||||
}
|
||||
```
|
||||
|
||||
### Making method calls
|
||||
|
||||
Let's say our macro requires some type specified in the macro input to have a
|
||||
constructor called `new`. We have the type in a variable called `field_type` of
|
||||
type `syn::Type` and want to invoke the constructor.
|
||||
|
||||
```rust
|
||||
// incorrect
|
||||
quote! {
|
||||
let value = #field_type::new();
|
||||
}
|
||||
```
|
||||
|
||||
This works only sometimes. If `field_type` is `String`, the expanded code
|
||||
contains `String::new()` which is fine. But if `field_type` is something like
|
||||
`Vec<i32>` then the expanded code is `Vec<i32>::new()` which is invalid syntax.
|
||||
Ordinarily in handwritten Rust we would write `Vec::<i32>::new()` but for macros
|
||||
often the following is more convenient.
|
||||
|
||||
```rust
|
||||
quote! {
|
||||
let value = <#field_type>::new();
|
||||
}
|
||||
```
|
||||
|
||||
This expands to `<Vec<i32>>::new()` which behaves correctly.
|
||||
|
||||
A similar pattern is appropriate for trait methods.
|
||||
|
||||
```rust
|
||||
quote! {
|
||||
let value = <#field_type as core::default::Default>::default();
|
||||
}
|
||||
```
|
||||
|
||||
## Hygiene
|
||||
|
||||
Any interpolated tokens preserve the `Span` information provided by their
|
||||
`ToTokens` implementation. Tokens that originate within a `quote!` invocation
|
||||
are spanned with [`Span::call_site()`].
|
||||
|
||||
[`Span::call_site()`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.Span.html#method.call_site
|
||||
|
||||
A different span can be provided explicitly through the [`quote_spanned!`]
|
||||
macro.
|
||||
|
||||
[`quote_spanned!`]: https://docs.rs/quote/0.6/quote/macro.quote_spanned.html
|
||||
|
||||
### Limitations
|
||||
|
||||
- A non-repeating variable may not be interpolated inside of a repeating block
|
||||
([#7]).
|
||||
- The same variable may not be interpolated more than once inside of a repeating
|
||||
block ([#8]).
|
||||
|
||||
[#7]: https://github.com/dtolnay/quote/issues/7
|
||||
[#8]: https://github.com/dtolnay/quote/issues/8
|
||||
|
||||
### Recursion limit
|
||||
|
||||
The `quote!` macro relies on deep recursion so some large invocations may fail
|
||||
with "recursion limit reached" when you compile. If it fails, bump up the
|
||||
recursion limit by adding `#![recursion_limit = "128"]` to your crate. An even
|
||||
higher limit may be necessary for especially large invocations. You don't need
|
||||
this unless the compiler tells you that you need it.
|
||||
|
||||
## License
|
||||
|
||||
Licensed under either of
|
||||
|
||||
* Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
|
||||
* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
|
||||
|
||||
at your option.
|
||||
|
||||
### Contribution
|
||||
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
|
||||
be dual licensed as above, without any additional terms or conditions.
|
|
@ -1,112 +0,0 @@
|
|||
use super::ToTokens;
|
||||
|
||||
use std::iter;
|
||||
|
||||
use proc_macro2::{TokenStream, TokenTree};
|
||||
|
||||
/// TokenStream extension trait with methods for appending tokens.
|
||||
///
|
||||
/// This trait is sealed and cannot be implemented outside of the `quote` crate.
|
||||
pub trait TokenStreamExt: private::Sealed {
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// Appends the token specified to this list of tokens.
|
||||
fn append<U>(&mut self, token: U)
|
||||
where
|
||||
U: Into<TokenTree>;
|
||||
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use quote::{quote, TokenStreamExt, ToTokens};
|
||||
/// # use proc_macro2::TokenStream;
|
||||
/// #
|
||||
/// struct X;
|
||||
///
|
||||
/// impl ToTokens for X {
|
||||
/// fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
/// tokens.append_all(&[true, false]);
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// let tokens = quote!(#X);
|
||||
/// assert_eq!(tokens.to_string(), "true false");
|
||||
/// ```
|
||||
fn append_all<T, I>(&mut self, iter: I)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>;
|
||||
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// Appends all of the items in the iterator `I`, separated by the tokens
|
||||
/// `U`.
|
||||
fn append_separated<T, I, U>(&mut self, iter: I, op: U)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
U: ToTokens;
|
||||
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// Appends all tokens in the iterator `I`, appending `U` after each
|
||||
/// element, including after the last element of the iterator.
|
||||
fn append_terminated<T, I, U>(&mut self, iter: I, term: U)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
U: ToTokens;
|
||||
}
|
||||
|
||||
impl TokenStreamExt for TokenStream {
|
||||
fn append<U>(&mut self, token: U)
|
||||
where
|
||||
U: Into<TokenTree>,
|
||||
{
|
||||
self.extend(iter::once(token.into()));
|
||||
}
|
||||
|
||||
fn append_all<T, I>(&mut self, iter: I)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
{
|
||||
for token in iter {
|
||||
token.to_tokens(self);
|
||||
}
|
||||
}
|
||||
|
||||
fn append_separated<T, I, U>(&mut self, iter: I, op: U)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
U: ToTokens,
|
||||
{
|
||||
for (i, token) in iter.into_iter().enumerate() {
|
||||
if i > 0 {
|
||||
op.to_tokens(self);
|
||||
}
|
||||
token.to_tokens(self);
|
||||
}
|
||||
}
|
||||
|
||||
fn append_terminated<T, I, U>(&mut self, iter: I, term: U)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
U: ToTokens,
|
||||
{
|
||||
for token in iter {
|
||||
token.to_tokens(self);
|
||||
term.to_tokens(self);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod private {
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
pub trait Sealed {}
|
||||
|
||||
impl Sealed for TokenStream {}
|
||||
}
|
|
@ -1,969 +0,0 @@
|
|||
//! This crate provides the [`quote!`] macro for turning Rust syntax tree data
|
||||
//! structures into tokens of source code.
|
||||
//!
|
||||
//! [`quote!`]: macro.quote.html
|
||||
//!
|
||||
//! Procedural macros in Rust receive a stream of tokens as input, execute
|
||||
//! arbitrary Rust code to determine how to manipulate those tokens, and produce
|
||||
//! a stream of tokens to hand back to the compiler to compile into the caller's
|
||||
//! crate. Quasi-quoting is a solution to one piece of that -- producing tokens
|
||||
//! to return to the compiler.
|
||||
//!
|
||||
//! The idea of quasi-quoting is that we write *code* that we treat as *data*.
|
||||
//! Within the `quote!` macro, we can write what looks like code to our text
|
||||
//! editor or IDE. We get all the benefits of the editor's brace matching,
|
||||
//! syntax highlighting, indentation, and maybe autocompletion. But rather than
|
||||
//! compiling that as code into the current crate, we can treat it as data, pass
|
||||
//! it around, mutate it, and eventually hand it back to the compiler as tokens
|
||||
//! to compile into the macro caller's crate.
|
||||
//!
|
||||
//! This crate is motivated by the procedural macro use case, but is a
|
||||
//! general-purpose Rust quasi-quoting library and is not specific to procedural
|
||||
//! macros.
|
||||
//!
|
||||
//! *Version requirement: Quote supports any compiler version back to Rust's
|
||||
//! very first support for procedural macros in Rust 1.15.0.*
|
||||
//!
|
||||
//! ```toml
|
||||
//! [dependencies]
|
||||
//! quote = "0.6"
|
||||
//! ```
|
||||
//!
|
||||
//! # Example
|
||||
//!
|
||||
//! The following quasi-quoted block of code is something you might find in [a]
|
||||
//! procedural macro having to do with data structure serialization. The `#var`
|
||||
//! syntax performs interpolation of runtime variables into the quoted tokens.
|
||||
//! Check out the documentation of the [`quote!`] macro for more detail about
|
||||
//! the syntax. See also the [`quote_spanned!`] macro which is important for
|
||||
//! implementing hygienic procedural macros.
|
||||
//!
|
||||
//! [a]: https://serde.rs/
|
||||
//! [`quote_spanned!`]: macro.quote_spanned.html
|
||||
//!
|
||||
//! ```edition2018
|
||||
//! # use quote::quote;
|
||||
//! #
|
||||
//! # let generics = "";
|
||||
//! # let where_clause = "";
|
||||
//! # let field_ty = "";
|
||||
//! # let item_ty = "";
|
||||
//! # let path = "";
|
||||
//! # let value = "";
|
||||
//! #
|
||||
//! let tokens = quote! {
|
||||
//! struct SerializeWith #generics #where_clause {
|
||||
//! value: &'a #field_ty,
|
||||
//! phantom: core::marker::PhantomData<#item_ty>,
|
||||
//! }
|
||||
//!
|
||||
//! impl #generics serde::Serialize for SerializeWith #generics #where_clause {
|
||||
//! fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
//! where
|
||||
//! S: serde::Serializer,
|
||||
//! {
|
||||
//! #path(self.value, serializer)
|
||||
//! }
|
||||
//! }
|
||||
//!
|
||||
//! SerializeWith {
|
||||
//! value: #value,
|
||||
//! phantom: core::marker::PhantomData::<#item_ty>,
|
||||
//! }
|
||||
//! };
|
||||
//! ```
|
||||
//!
|
||||
//! # Recursion limit
|
||||
//!
|
||||
//! The `quote!` macro relies on deep recursion so some large invocations may
|
||||
//! fail with "recursion limit reached" when you compile. If it fails, bump up
|
||||
//! the recursion limit by adding `#![recursion_limit = "128"]` to your crate.
|
||||
//! An even higher limit may be necessary for especially large invocations.
|
||||
|
||||
// Quote types in rustdoc of other crates get linked to here.
|
||||
#![doc(html_root_url = "https://docs.rs/quote/0.6.11")]
|
||||
|
||||
#[cfg(all(
|
||||
not(all(target_arch = "wasm32", target_os = "unknown")),
|
||||
feature = "proc-macro"
|
||||
))]
|
||||
extern crate proc_macro;
|
||||
extern crate proc_macro2;
|
||||
|
||||
mod ext;
|
||||
pub use ext::TokenStreamExt;
|
||||
|
||||
mod to_tokens;
|
||||
pub use to_tokens::ToTokens;
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub mod __rt {
|
||||
use ext::TokenStreamExt;
|
||||
pub use proc_macro2::*;
|
||||
|
||||
fn is_ident_start(c: u8) -> bool {
|
||||
(b'a' <= c && c <= b'z') || (b'A' <= c && c <= b'Z') || c == b'_'
|
||||
}
|
||||
|
||||
fn is_ident_continue(c: u8) -> bool {
|
||||
(b'a' <= c && c <= b'z')
|
||||
|| (b'A' <= c && c <= b'Z')
|
||||
|| c == b'_'
|
||||
|| (b'0' <= c && c <= b'9')
|
||||
}
|
||||
|
||||
fn is_ident(token: &str) -> bool {
|
||||
if token.bytes().all(|digit| digit >= b'0' && digit <= b'9') {
|
||||
return false;
|
||||
}
|
||||
|
||||
let mut bytes = token.bytes();
|
||||
let first = bytes.next().unwrap();
|
||||
if !is_ident_start(first) {
|
||||
return false;
|
||||
}
|
||||
for ch in bytes {
|
||||
if !is_ident_continue(ch) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
pub fn parse(tokens: &mut TokenStream, span: Span, s: &str) {
|
||||
if is_ident(s) {
|
||||
// Fast path, since idents are the most common token.
|
||||
tokens.append(Ident::new(s, span));
|
||||
} else {
|
||||
let s: TokenStream = s.parse().expect("invalid token stream");
|
||||
tokens.extend(s.into_iter().map(|mut t| {
|
||||
t.set_span(span);
|
||||
t
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! push_punct {
|
||||
($name:ident $char1:tt) => {
|
||||
pub fn $name(tokens: &mut TokenStream, span: Span) {
|
||||
let mut punct = Punct::new($char1, Spacing::Alone);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
}
|
||||
};
|
||||
($name:ident $char1:tt $char2:tt) => {
|
||||
pub fn $name(tokens: &mut TokenStream, span: Span) {
|
||||
let mut punct = Punct::new($char1, Spacing::Joint);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
let mut punct = Punct::new($char2, Spacing::Alone);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
}
|
||||
};
|
||||
($name:ident $char1:tt $char2:tt $char3:tt) => {
|
||||
pub fn $name(tokens: &mut TokenStream, span: Span) {
|
||||
let mut punct = Punct::new($char1, Spacing::Joint);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
let mut punct = Punct::new($char2, Spacing::Joint);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
let mut punct = Punct::new($char3, Spacing::Alone);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
push_punct!(push_add '+');
|
||||
push_punct!(push_add_eq '+' '=');
|
||||
push_punct!(push_and '&');
|
||||
push_punct!(push_and_and '&' '&');
|
||||
push_punct!(push_and_eq '&' '=');
|
||||
push_punct!(push_at '@');
|
||||
push_punct!(push_bang '!');
|
||||
push_punct!(push_caret '^');
|
||||
push_punct!(push_caret_eq '^' '=');
|
||||
push_punct!(push_colon ':');
|
||||
push_punct!(push_colon2 ':' ':');
|
||||
push_punct!(push_comma ',');
|
||||
push_punct!(push_div '/');
|
||||
push_punct!(push_div_eq '/' '=');
|
||||
push_punct!(push_dot '.');
|
||||
push_punct!(push_dot2 '.' '.');
|
||||
push_punct!(push_dot3 '.' '.' '.');
|
||||
push_punct!(push_dot_dot_eq '.' '.' '=');
|
||||
push_punct!(push_eq '=');
|
||||
push_punct!(push_eq_eq '=' '=');
|
||||
push_punct!(push_ge '>' '=');
|
||||
push_punct!(push_gt '>');
|
||||
push_punct!(push_le '<' '=');
|
||||
push_punct!(push_lt '<');
|
||||
push_punct!(push_mul_eq '*' '=');
|
||||
push_punct!(push_ne '!' '=');
|
||||
push_punct!(push_or '|');
|
||||
push_punct!(push_or_eq '|' '=');
|
||||
push_punct!(push_or_or '|' '|');
|
||||
push_punct!(push_pound '#');
|
||||
push_punct!(push_question '?');
|
||||
push_punct!(push_rarrow '-' '>');
|
||||
push_punct!(push_larrow '<' '-');
|
||||
push_punct!(push_rem '%');
|
||||
push_punct!(push_rem_eq '%' '=');
|
||||
push_punct!(push_fat_arrow '=' '>');
|
||||
push_punct!(push_semi ';');
|
||||
push_punct!(push_shl '<' '<');
|
||||
push_punct!(push_shl_eq '<' '<' '=');
|
||||
push_punct!(push_shr '>' '>');
|
||||
push_punct!(push_shr_eq '>' '>' '=');
|
||||
push_punct!(push_star '*');
|
||||
push_punct!(push_sub '-');
|
||||
push_punct!(push_sub_eq '-' '=');
|
||||
}
|
||||
|
||||
/// The whole point.
|
||||
///
|
||||
/// Performs variable interpolation against the input and produces it as
|
||||
/// [`TokenStream`]. For returning tokens to the compiler in a procedural macro, use
|
||||
/// `into()` to build a `TokenStream`.
|
||||
///
|
||||
/// [`TokenStream`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.TokenStream.html
|
||||
///
|
||||
/// # Interpolation
|
||||
///
|
||||
/// Variable interpolation is done with `#var` (similar to `$var` in
|
||||
/// `macro_rules!` macros). This grabs the `var` variable that is currently in
|
||||
/// scope and inserts it in that location in the output tokens. Any type
|
||||
/// implementing the [`ToTokens`] trait can be interpolated. This includes most
|
||||
/// Rust primitive types as well as most of the syntax tree types from the [Syn]
|
||||
/// crate.
|
||||
///
|
||||
/// [`ToTokens`]: trait.ToTokens.html
|
||||
/// [Syn]: https://github.com/dtolnay/syn
|
||||
///
|
||||
/// Repetition is done using `#(...)*` or `#(...),*` again similar to
|
||||
/// `macro_rules!`. This iterates through the elements of any variable
|
||||
/// interpolated within the repetition and inserts a copy of the repetition body
|
||||
/// for each one. The variables in an interpolation may be anything that
|
||||
/// implements `IntoIterator`, including `Vec` or a pre-existing iterator.
|
||||
///
|
||||
/// - `#(#var)*` — no separators
|
||||
/// - `#(#var),*` — the character before the asterisk is used as a separator
|
||||
/// - `#( struct #var; )*` — the repetition can contain other tokens
|
||||
/// - `#( #k => println!("{}", #v), )*` — even multiple interpolations
|
||||
///
|
||||
/// # Hygiene
|
||||
///
|
||||
/// Any interpolated tokens preserve the `Span` information provided by their
|
||||
/// `ToTokens` implementation. Tokens that originate within the `quote!`
|
||||
/// invocation are spanned with [`Span::call_site()`].
|
||||
///
|
||||
/// [`Span::call_site()`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.Span.html#method.call_site
|
||||
///
|
||||
/// A different span can be provided through the [`quote_spanned!`] macro.
|
||||
///
|
||||
/// [`quote_spanned!`]: macro.quote_spanned.html
|
||||
///
|
||||
/// # Return type
|
||||
///
|
||||
/// The macro evaluates to an expression of type `proc_macro2::TokenStream`.
|
||||
/// Meanwhile Rust procedural macros are expected to return the type
|
||||
/// `proc_macro::TokenStream`.
|
||||
///
|
||||
/// The difference between the two types is that `proc_macro` types are entirely
|
||||
/// specific to procedural macros and cannot ever exist in code outside of a
|
||||
/// procedural macro, while `proc_macro2` types may exist anywhere including
|
||||
/// tests and non-macro code like main.rs and build.rs. This is why even the
|
||||
/// procedural macro ecosystem is largely built around `proc_macro2`, because
|
||||
/// that ensures the libraries are unit testable and accessible in non-macro
|
||||
/// contexts.
|
||||
///
|
||||
/// There is a [`From`]-conversion in both directions so returning the output of
|
||||
/// `quote!` from a procedural macro usually looks like `tokens.into()` or
|
||||
/// `proc_macro::TokenStream::from(tokens)`.
|
||||
///
|
||||
/// [`From`]: https://doc.rust-lang.org/std/convert/trait.From.html
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ## Procedural macro
|
||||
///
|
||||
/// The structure of a basic procedural macro is as follows. Refer to the [Syn]
|
||||
/// crate for further useful guidance on using `quote!` as part of a procedural
|
||||
/// macro.
|
||||
///
|
||||
/// [Syn]: https://github.com/dtolnay/syn
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # #[cfg(any())]
|
||||
/// extern crate proc_macro;
|
||||
/// # use proc_macro2 as proc_macro;
|
||||
///
|
||||
/// use proc_macro::TokenStream;
|
||||
/// use quote::quote;
|
||||
///
|
||||
/// # const IGNORE_TOKENS: &'static str = stringify! {
|
||||
/// #[proc_macro_derive(HeapSize)]
|
||||
/// # };
|
||||
/// pub fn derive_heap_size(input: TokenStream) -> TokenStream {
|
||||
/// // Parse the input and figure out what implementation to generate...
|
||||
/// # const IGNORE_TOKENS: &'static str = stringify! {
|
||||
/// let name = /* ... */;
|
||||
/// let expr = /* ... */;
|
||||
/// # };
|
||||
/// #
|
||||
/// # let name = 0;
|
||||
/// # let expr = 0;
|
||||
///
|
||||
/// let expanded = quote! {
|
||||
/// // The generated impl.
|
||||
/// impl heapsize::HeapSize for #name {
|
||||
/// fn heap_size_of_children(&self) -> usize {
|
||||
/// #expr
|
||||
/// }
|
||||
/// }
|
||||
/// };
|
||||
///
|
||||
/// // Hand the output tokens back to the compiler.
|
||||
/// TokenStream::from(expanded)
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// ## Combining quoted fragments
|
||||
///
|
||||
/// Usually you don't end up constructing an entire final `TokenStream` in one
|
||||
/// piece. Different parts may come from different helper functions. The tokens
|
||||
/// produced by `quote!` themselves implement `ToTokens` and so can be
|
||||
/// interpolated into later `quote!` invocations to build up a final result.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// let type_definition = quote! {...};
|
||||
/// let methods = quote! {...};
|
||||
///
|
||||
/// let tokens = quote! {
|
||||
/// #type_definition
|
||||
/// #methods
|
||||
/// };
|
||||
/// ```
|
||||
///
|
||||
/// ## Constructing identifiers
|
||||
///
|
||||
/// Suppose we have an identifier `ident` which came from somewhere in a macro
|
||||
/// input and we need to modify it in some way for the macro output. Let's
|
||||
/// consider prepending the identifier with an underscore.
|
||||
///
|
||||
/// Simply interpolating the identifier next to an underscore will not have the
|
||||
/// behavior of concatenating them. The underscore and the identifier will
|
||||
/// continue to be two separate tokens as if you had written `_ x`.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use proc_macro2::{self as syn, Span};
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// # let ident = syn::Ident::new("i", Span::call_site());
|
||||
/// #
|
||||
/// // incorrect
|
||||
/// quote! {
|
||||
/// let mut _#ident = 0;
|
||||
/// }
|
||||
/// # ;
|
||||
/// ```
|
||||
///
|
||||
/// The solution is to perform token-level manipulations using the APIs provided
|
||||
/// by Syn and proc-macro2.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use proc_macro2::{self as syn, Span};
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// # let ident = syn::Ident::new("i", Span::call_site());
|
||||
/// #
|
||||
/// let concatenated = format!("_{}", ident);
|
||||
/// let varname = syn::Ident::new(&concatenated, ident.span());
|
||||
/// quote! {
|
||||
/// let mut #varname = 0;
|
||||
/// }
|
||||
/// # ;
|
||||
/// ```
|
||||
///
|
||||
/// ## Making method calls
|
||||
///
|
||||
/// Let's say our macro requires some type specified in the macro input to have
|
||||
/// a constructor called `new`. We have the type in a variable called
|
||||
/// `field_type` of type `syn::Type` and want to invoke the constructor.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// # let field_type = quote!(...);
|
||||
/// #
|
||||
/// // incorrect
|
||||
/// quote! {
|
||||
/// let value = #field_type::new();
|
||||
/// }
|
||||
/// # ;
|
||||
/// ```
|
||||
///
|
||||
/// This works only sometimes. If `field_type` is `String`, the expanded code
|
||||
/// contains `String::new()` which is fine. But if `field_type` is something
|
||||
/// like `Vec<i32>` then the expanded code is `Vec<i32>::new()` which is invalid
|
||||
/// syntax. Ordinarily in handwritten Rust we would write `Vec::<i32>::new()`
|
||||
/// but for macros often the following is more convenient.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// # let field_type = quote!(...);
|
||||
/// #
|
||||
/// quote! {
|
||||
/// let value = <#field_type>::new();
|
||||
/// }
|
||||
/// # ;
|
||||
/// ```
|
||||
///
|
||||
/// This expands to `<Vec<i32>>::new()` which behaves correctly.
|
||||
///
|
||||
/// A similar pattern is appropriate for trait methods.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// # let field_type = quote!(...);
|
||||
/// #
|
||||
/// quote! {
|
||||
/// let value = <#field_type as core::default::Default>::default();
|
||||
/// }
|
||||
/// # ;
|
||||
/// ```
|
||||
#[macro_export(local_inner_macros)]
|
||||
macro_rules! quote {
|
||||
($($tt:tt)*) => (quote_spanned!($crate::__rt::Span::call_site()=> $($tt)*));
|
||||
}
|
||||
|
||||
/// Same as `quote!`, but applies a given span to all tokens originating within
|
||||
/// the macro invocation.
|
||||
///
|
||||
/// # Syntax
|
||||
///
|
||||
/// A span expression of type [`Span`], followed by `=>`, followed by the tokens
|
||||
/// to quote. The span expression should be brief -- use a variable for anything
|
||||
/// more than a few characters. There should be no space before the `=>` token.
|
||||
///
|
||||
/// [`Span`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.Span.html
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use proc_macro2::Span;
|
||||
/// # use quote::quote_spanned;
|
||||
/// #
|
||||
/// # const IGNORE_TOKENS: &'static str = stringify! {
|
||||
/// let span = /* ... */;
|
||||
/// # };
|
||||
/// # let span = Span::call_site();
|
||||
/// # let init = 0;
|
||||
///
|
||||
/// // On one line, use parentheses.
|
||||
/// let tokens = quote_spanned!(span=> Box::into_raw(Box::new(#init)));
|
||||
///
|
||||
/// // On multiple lines, place the span at the top and use braces.
|
||||
/// let tokens = quote_spanned! {span=>
|
||||
/// Box::into_raw(Box::new(#init))
|
||||
/// };
|
||||
/// ```
|
||||
///
|
||||
/// The lack of space before the `=>` should look jarring to Rust programmers
|
||||
/// and this is intentional. The formatting is designed to be visibly
|
||||
/// off-balance and draw the eye a particular way, due to the span expression
|
||||
/// being evaluated in the context of the procedural macro and the remaining
|
||||
/// tokens being evaluated in the generated code.
|
||||
///
|
||||
/// # Hygiene
|
||||
///
|
||||
/// Any interpolated tokens preserve the `Span` information provided by their
|
||||
/// `ToTokens` implementation. Tokens that originate within the `quote_spanned!`
|
||||
/// invocation are spanned with the given span argument.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// The following procedural macro code uses `quote_spanned!` to assert that a
|
||||
/// particular Rust type implements the [`Sync`] trait so that references can be
|
||||
/// safely shared between threads.
|
||||
///
|
||||
/// [`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use quote::{quote_spanned, TokenStreamExt, ToTokens};
|
||||
/// # use proc_macro2::{Span, TokenStream};
|
||||
/// #
|
||||
/// # struct Type;
|
||||
/// #
|
||||
/// # impl Type {
|
||||
/// # fn span(&self) -> Span {
|
||||
/// # Span::call_site()
|
||||
/// # }
|
||||
/// # }
|
||||
/// #
|
||||
/// # impl ToTokens for Type {
|
||||
/// # fn to_tokens(&self, _tokens: &mut TokenStream) {}
|
||||
/// # }
|
||||
/// #
|
||||
/// # let ty = Type;
|
||||
/// # let call_site = Span::call_site();
|
||||
/// #
|
||||
/// let ty_span = ty.span();
|
||||
/// let assert_sync = quote_spanned! {ty_span=>
|
||||
/// struct _AssertSync where #ty: Sync;
|
||||
/// };
|
||||
/// ```
|
||||
///
|
||||
/// If the assertion fails, the user will see an error like the following. The
|
||||
/// input span of their type is hightlighted in the error.
|
||||
///
|
||||
/// ```text
|
||||
/// error[E0277]: the trait bound `*const (): std::marker::Sync` is not satisfied
|
||||
/// --> src/main.rs:10:21
|
||||
/// |
|
||||
/// 10 | static ref PTR: *const () = &();
|
||||
/// | ^^^^^^^^^ `*const ()` cannot be shared between threads safely
|
||||
/// ```
|
||||
///
|
||||
/// In this example it is important for the where-clause to be spanned with the
|
||||
/// line/column information of the user's input type so that error messages are
|
||||
/// placed appropriately by the compiler. But it is also incredibly important
|
||||
/// that `Sync` resolves at the macro definition site and not the macro call
|
||||
/// site. If we resolve `Sync` at the same span that the user's type is going to
|
||||
/// be resolved, then they could bypass our check by defining their own trait
|
||||
/// named `Sync` that is implemented for their type.
|
||||
#[macro_export(local_inner_macros)]
|
||||
macro_rules! quote_spanned {
|
||||
($span:expr=> $($tt:tt)*) => {
|
||||
{
|
||||
let mut _s = $crate::__rt::TokenStream::new();
|
||||
let _span = $span;
|
||||
quote_each_token!(_s _span $($tt)*);
|
||||
_s
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Extract the names of all #metavariables and pass them to the $finish macro.
|
||||
//
|
||||
// in: pounded_var_names!(then () a #b c #( #d )* #e)
|
||||
// out: then!(() b d e)
|
||||
#[macro_export(local_inner_macros)]
|
||||
#[doc(hidden)]
|
||||
macro_rules! pounded_var_names {
|
||||
($finish:ident ($($found:ident)*) # ( $($inner:tt)* ) $($rest:tt)*) => {
|
||||
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
|
||||
};
|
||||
|
||||
($finish:ident ($($found:ident)*) # [ $($inner:tt)* ] $($rest:tt)*) => {
|
||||
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
|
||||
};
|
||||
|
||||
($finish:ident ($($found:ident)*) # { $($inner:tt)* } $($rest:tt)*) => {
|
||||
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
|
||||
};
|
||||
|
||||
($finish:ident ($($found:ident)*) # $first:ident $($rest:tt)*) => {
|
||||
pounded_var_names!($finish ($($found)* $first) $($rest)*)
|
||||
};
|
||||
|
||||
($finish:ident ($($found:ident)*) ( $($inner:tt)* ) $($rest:tt)*) => {
|
||||
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
|
||||
};
|
||||
|
||||
($finish:ident ($($found:ident)*) [ $($inner:tt)* ] $($rest:tt)*) => {
|
||||
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
|
||||
};
|
||||
|
||||
($finish:ident ($($found:ident)*) { $($inner:tt)* } $($rest:tt)*) => {
|
||||
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
|
||||
};
|
||||
|
||||
($finish:ident ($($found:ident)*) $ignore:tt $($rest:tt)*) => {
|
||||
pounded_var_names!($finish ($($found)*) $($rest)*)
|
||||
};
|
||||
|
||||
($finish:ident ($($found:ident)*)) => {
|
||||
$finish!(() $($found)*)
|
||||
};
|
||||
}
|
||||
|
||||
// in: nested_tuples_pat!(() a b c d e)
|
||||
// out: ((((a b) c) d) e)
|
||||
//
|
||||
// in: nested_tuples_pat!(() a)
|
||||
// out: a
|
||||
#[macro_export(local_inner_macros)]
|
||||
#[doc(hidden)]
|
||||
macro_rules! nested_tuples_pat {
|
||||
(()) => {
|
||||
&()
|
||||
};
|
||||
|
||||
(() $first:ident $($rest:ident)*) => {
|
||||
nested_tuples_pat!(($first) $($rest)*)
|
||||
};
|
||||
|
||||
(($pat:pat) $first:ident $($rest:ident)*) => {
|
||||
nested_tuples_pat!((($pat, $first)) $($rest)*)
|
||||
};
|
||||
|
||||
(($done:pat)) => {
|
||||
$done
|
||||
};
|
||||
}
|
||||
|
||||
// in: multi_zip_expr!(() a b c d e)
|
||||
// out: a.into_iter().zip(b).zip(c).zip(d).zip(e)
|
||||
//
|
||||
// in: multi_zip_iter!(() a)
|
||||
// out: a
|
||||
#[macro_export(local_inner_macros)]
|
||||
#[doc(hidden)]
|
||||
macro_rules! multi_zip_expr {
|
||||
(()) => {
|
||||
&[]
|
||||
};
|
||||
|
||||
(() $single:ident) => {
|
||||
$single
|
||||
};
|
||||
|
||||
(() $first:ident $($rest:ident)*) => {
|
||||
multi_zip_expr!(($first.into_iter()) $($rest)*)
|
||||
};
|
||||
|
||||
(($zips:expr) $first:ident $($rest:ident)*) => {
|
||||
multi_zip_expr!(($zips.zip($first)) $($rest)*)
|
||||
};
|
||||
|
||||
(($done:expr)) => {
|
||||
$done
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export(local_inner_macros)]
|
||||
#[doc(hidden)]
|
||||
macro_rules! quote_each_token {
|
||||
($tokens:ident $span:ident) => {};
|
||||
|
||||
($tokens:ident $span:ident # ! $($rest:tt)*) => {
|
||||
quote_each_token!($tokens $span #);
|
||||
quote_each_token!($tokens $span !);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident # ( $($inner:tt)* ) * $($rest:tt)*) => {
|
||||
for pounded_var_names!(nested_tuples_pat () $($inner)*)
|
||||
in pounded_var_names!(multi_zip_expr () $($inner)*) {
|
||||
quote_each_token!($tokens $span $($inner)*);
|
||||
}
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident # ( $($inner:tt)* ) $sep:tt * $($rest:tt)*) => {
|
||||
for (_i, pounded_var_names!(nested_tuples_pat () $($inner)*))
|
||||
in pounded_var_names!(multi_zip_expr () $($inner)*).into_iter().enumerate() {
|
||||
if _i > 0 {
|
||||
quote_each_token!($tokens $span $sep);
|
||||
}
|
||||
quote_each_token!($tokens $span $($inner)*);
|
||||
}
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident # [ $($inner:tt)* ] $($rest:tt)*) => {
|
||||
quote_each_token!($tokens $span #);
|
||||
$tokens.extend({
|
||||
let mut g = $crate::__rt::Group::new(
|
||||
$crate::__rt::Delimiter::Bracket,
|
||||
quote_spanned!($span=> $($inner)*),
|
||||
);
|
||||
g.set_span($span);
|
||||
Some($crate::__rt::TokenTree::from(g))
|
||||
});
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident # $first:ident $($rest:tt)*) => {
|
||||
$crate::ToTokens::to_tokens(&$first, &mut $tokens);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident ( $($first:tt)* ) $($rest:tt)*) => {
|
||||
$tokens.extend({
|
||||
let mut g = $crate::__rt::Group::new(
|
||||
$crate::__rt::Delimiter::Parenthesis,
|
||||
quote_spanned!($span=> $($first)*),
|
||||
);
|
||||
g.set_span($span);
|
||||
Some($crate::__rt::TokenTree::from(g))
|
||||
});
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident [ $($first:tt)* ] $($rest:tt)*) => {
|
||||
$tokens.extend({
|
||||
let mut g = $crate::__rt::Group::new(
|
||||
$crate::__rt::Delimiter::Bracket,
|
||||
quote_spanned!($span=> $($first)*),
|
||||
);
|
||||
g.set_span($span);
|
||||
Some($crate::__rt::TokenTree::from(g))
|
||||
});
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident { $($first:tt)* } $($rest:tt)*) => {
|
||||
$tokens.extend({
|
||||
let mut g = $crate::__rt::Group::new(
|
||||
$crate::__rt::Delimiter::Brace,
|
||||
quote_spanned!($span=> $($first)*),
|
||||
);
|
||||
g.set_span($span);
|
||||
Some($crate::__rt::TokenTree::from(g))
|
||||
});
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident + $($rest:tt)*) => {
|
||||
$crate::__rt::push_add(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident += $($rest:tt)*) => {
|
||||
$crate::__rt::push_add_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident & $($rest:tt)*) => {
|
||||
$crate::__rt::push_and(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident && $($rest:tt)*) => {
|
||||
$crate::__rt::push_and_and(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident &= $($rest:tt)*) => {
|
||||
$crate::__rt::push_and_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident @ $($rest:tt)*) => {
|
||||
$crate::__rt::push_at(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident ! $($rest:tt)*) => {
|
||||
$crate::__rt::push_bang(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident ^ $($rest:tt)*) => {
|
||||
$crate::__rt::push_caret(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident ^= $($rest:tt)*) => {
|
||||
$crate::__rt::push_caret_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident : $($rest:tt)*) => {
|
||||
$crate::__rt::push_colon(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident :: $($rest:tt)*) => {
|
||||
$crate::__rt::push_colon2(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident , $($rest:tt)*) => {
|
||||
$crate::__rt::push_comma(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident / $($rest:tt)*) => {
|
||||
$crate::__rt::push_div(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident /= $($rest:tt)*) => {
|
||||
$crate::__rt::push_div_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident . $($rest:tt)*) => {
|
||||
$crate::__rt::push_dot(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident .. $($rest:tt)*) => {
|
||||
$crate::__rt::push_dot2(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident ... $($rest:tt)*) => {
|
||||
$crate::__rt::push_dot3(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident ..= $($rest:tt)*) => {
|
||||
$crate::__rt::push_dot_dot_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident = $($rest:tt)*) => {
|
||||
$crate::__rt::push_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident == $($rest:tt)*) => {
|
||||
$crate::__rt::push_eq_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident >= $($rest:tt)*) => {
|
||||
$crate::__rt::push_ge(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident > $($rest:tt)*) => {
|
||||
$crate::__rt::push_gt(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident <= $($rest:tt)*) => {
|
||||
$crate::__rt::push_le(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident < $($rest:tt)*) => {
|
||||
$crate::__rt::push_lt(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident *= $($rest:tt)*) => {
|
||||
$crate::__rt::push_mul_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident != $($rest:tt)*) => {
|
||||
$crate::__rt::push_ne(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident | $($rest:tt)*) => {
|
||||
$crate::__rt::push_or(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident |= $($rest:tt)*) => {
|
||||
$crate::__rt::push_or_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident || $($rest:tt)*) => {
|
||||
$crate::__rt::push_or_or(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident # $($rest:tt)*) => {
|
||||
$crate::__rt::push_pound(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident ? $($rest:tt)*) => {
|
||||
$crate::__rt::push_question(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident -> $($rest:tt)*) => {
|
||||
$crate::__rt::push_rarrow(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident <- $($rest:tt)*) => {
|
||||
$crate::__rt::push_larrow(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident % $($rest:tt)*) => {
|
||||
$crate::__rt::push_rem(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident %= $($rest:tt)*) => {
|
||||
$crate::__rt::push_rem_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident => $($rest:tt)*) => {
|
||||
$crate::__rt::push_fat_arrow(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident ; $($rest:tt)*) => {
|
||||
$crate::__rt::push_semi(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident << $($rest:tt)*) => {
|
||||
$crate::__rt::push_shl(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident <<= $($rest:tt)*) => {
|
||||
$crate::__rt::push_shl_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident >> $($rest:tt)*) => {
|
||||
$crate::__rt::push_shr(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident >>= $($rest:tt)*) => {
|
||||
$crate::__rt::push_shr_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident * $($rest:tt)*) => {
|
||||
$crate::__rt::push_star(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident - $($rest:tt)*) => {
|
||||
$crate::__rt::push_sub(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident -= $($rest:tt)*) => {
|
||||
$crate::__rt::push_sub_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident $first:tt $($rest:tt)*) => {
|
||||
$crate::__rt::parse(&mut $tokens, $span, quote_stringify!($first));
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
}
|
||||
|
||||
// Unhygienically invoke whatever `stringify` the caller has in scope i.e. not a
|
||||
// local macro. The macros marked `local_inner_macros` above cannot invoke
|
||||
// `stringify` directly.
|
||||
#[macro_export]
|
||||
#[doc(hidden)]
|
||||
macro_rules! quote_stringify {
|
||||
($tt:tt) => {
|
||||
stringify!($tt)
|
||||
};
|
||||
}
|
|
@ -1,198 +0,0 @@
|
|||
use super::TokenStreamExt;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::iter;
|
||||
|
||||
use proc_macro2::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
|
||||
|
||||
/// Types that can be interpolated inside a [`quote!`] invocation.
|
||||
///
|
||||
/// [`quote!`]: macro.quote.html
|
||||
pub trait ToTokens {
|
||||
/// Write `self` to the given `TokenStream`.
|
||||
///
|
||||
/// The token append methods provided by the [`TokenStreamExt`] extension
|
||||
/// trait may be useful for implementing `ToTokens`.
|
||||
///
|
||||
/// [`TokenStreamExt`]: trait.TokenStreamExt.html
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// Example implementation for a struct representing Rust paths like
|
||||
/// `std::cmp::PartialEq`:
|
||||
///
|
||||
/// ```edition2018
|
||||
/// use proc_macro2::{TokenTree, Spacing, Span, Punct, TokenStream};
|
||||
/// use quote::{TokenStreamExt, ToTokens};
|
||||
///
|
||||
/// pub struct Path {
|
||||
/// pub global: bool,
|
||||
/// pub segments: Vec<PathSegment>,
|
||||
/// }
|
||||
///
|
||||
/// impl ToTokens for Path {
|
||||
/// fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
/// for (i, segment) in self.segments.iter().enumerate() {
|
||||
/// if i > 0 || self.global {
|
||||
/// // Double colon `::`
|
||||
/// tokens.append(Punct::new(':', Spacing::Joint));
|
||||
/// tokens.append(Punct::new(':', Spacing::Alone));
|
||||
/// }
|
||||
/// segment.to_tokens(tokens);
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// #
|
||||
/// # pub struct PathSegment;
|
||||
/// #
|
||||
/// # impl ToTokens for PathSegment {
|
||||
/// # fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
/// # unimplemented!()
|
||||
/// # }
|
||||
/// # }
|
||||
/// ```
|
||||
fn to_tokens(&self, tokens: &mut TokenStream);
|
||||
|
||||
/// Convert `self` directly into a `TokenStream` object.
|
||||
///
|
||||
/// This method is implicitly implemented using `to_tokens`, and acts as a
|
||||
/// convenience method for consumers of the `ToTokens` trait.
|
||||
fn into_token_stream(self) -> TokenStream
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
let mut tokens = TokenStream::new();
|
||||
self.to_tokens(&mut tokens);
|
||||
tokens
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized + ToTokens> ToTokens for &'a T {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
(**self).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized + ToTokens> ToTokens for &'a mut T {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
(**self).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized + ToOwned + ToTokens> ToTokens for Cow<'a, T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
(**self).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + ToTokens> ToTokens for Box<T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
(**self).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ToTokens> ToTokens for Option<T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
if let Some(ref t) = *self {
|
||||
t.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for str {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::string(self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for String {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.as_str().to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! primitive {
|
||||
($($t:ident => $name:ident)*) => ($(
|
||||
impl ToTokens for $t {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::$name(*self));
|
||||
}
|
||||
}
|
||||
)*)
|
||||
}
|
||||
|
||||
primitive! {
|
||||
i8 => i8_suffixed
|
||||
i16 => i16_suffixed
|
||||
i32 => i32_suffixed
|
||||
i64 => i64_suffixed
|
||||
isize => isize_suffixed
|
||||
|
||||
u8 => u8_suffixed
|
||||
u16 => u16_suffixed
|
||||
u32 => u32_suffixed
|
||||
u64 => u64_suffixed
|
||||
usize => usize_suffixed
|
||||
|
||||
f32 => f32_suffixed
|
||||
f64 => f64_suffixed
|
||||
}
|
||||
|
||||
#[cfg(integer128)]
|
||||
primitive! {
|
||||
i128 => i128_suffixed
|
||||
u128 => u128_suffixed
|
||||
}
|
||||
|
||||
impl ToTokens for char {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::character(*self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for bool {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let word = if *self { "true" } else { "false" };
|
||||
tokens.append(Ident::new(word, Span::call_site()));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Group {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(self.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Ident {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(self.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Punct {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(self.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Literal {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(self.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for TokenTree {
|
||||
fn to_tokens(&self, dst: &mut TokenStream) {
|
||||
dst.append(self.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for TokenStream {
|
||||
fn to_tokens(&self, dst: &mut TokenStream) {
|
||||
dst.extend(iter::once(self.clone()));
|
||||
}
|
||||
|
||||
fn into_token_stream(self) -> TokenStream {
|
||||
self
|
||||
}
|
||||
}
|
|
@ -1,295 +0,0 @@
|
|||
#![cfg_attr(feature = "cargo-clippy", allow(blacklisted_name))]
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
extern crate proc_macro2;
|
||||
#[macro_use]
|
||||
extern crate quote;
|
||||
|
||||
use proc_macro2::{Ident, Span, TokenStream};
|
||||
use quote::TokenStreamExt;
|
||||
|
||||
mod conditional {
|
||||
#[cfg(integer128)]
|
||||
mod integer128;
|
||||
}
|
||||
|
||||
struct X;
|
||||
|
||||
impl quote::ToTokens for X {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Ident::new("X", Span::call_site()));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_quote_impl() {
|
||||
let tokens = quote! {
|
||||
impl<'a, T: ToTokens> ToTokens for &'a T {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
(**self).to_tokens(tokens)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let expected = concat!(
|
||||
"impl < 'a , T : ToTokens > ToTokens for & 'a T { ",
|
||||
"fn to_tokens ( & self , tokens : & mut TokenStream ) { ",
|
||||
"( * * self ) . to_tokens ( tokens ) ",
|
||||
"} ",
|
||||
"}"
|
||||
);
|
||||
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_substitution() {
|
||||
let x = X;
|
||||
let tokens = quote!(#x <#x> (#x) [#x] {#x});
|
||||
|
||||
let expected = "X < X > ( X ) [ X ] { X }";
|
||||
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iter() {
|
||||
let primes = &[X, X, X, X];
|
||||
|
||||
assert_eq!("X X X X", quote!(#(#primes)*).to_string());
|
||||
|
||||
assert_eq!("X , X , X , X ,", quote!(#(#primes,)*).to_string());
|
||||
|
||||
assert_eq!("X , X , X , X", quote!(#(#primes),*).to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_advanced() {
|
||||
let generics = quote!( <'a, T> );
|
||||
|
||||
let where_clause = quote!( where T: Serialize );
|
||||
|
||||
let field_ty = quote!(String);
|
||||
|
||||
let item_ty = quote!(Cow<'a, str>);
|
||||
|
||||
let path = quote!(SomeTrait::serialize_with);
|
||||
|
||||
let value = quote!(self.x);
|
||||
|
||||
let tokens = quote! {
|
||||
struct SerializeWith #generics #where_clause {
|
||||
value: &'a #field_ty,
|
||||
phantom: ::std::marker::PhantomData<#item_ty>,
|
||||
}
|
||||
|
||||
impl #generics ::serde::Serialize for SerializeWith #generics #where_clause {
|
||||
fn serialize<S>(&self, s: &mut S) -> Result<(), S::Error>
|
||||
where S: ::serde::Serializer
|
||||
{
|
||||
#path(self.value, s)
|
||||
}
|
||||
}
|
||||
|
||||
SerializeWith {
|
||||
value: #value,
|
||||
phantom: ::std::marker::PhantomData::<#item_ty>,
|
||||
}
|
||||
};
|
||||
|
||||
let expected = concat!(
|
||||
"struct SerializeWith < 'a , T > where T : Serialize { ",
|
||||
"value : & 'a String , ",
|
||||
"phantom : :: std :: marker :: PhantomData < Cow < 'a , str > > , ",
|
||||
"} ",
|
||||
"impl < 'a , T > :: serde :: Serialize for SerializeWith < 'a , T > where T : Serialize { ",
|
||||
"fn serialize < S > ( & self , s : & mut S ) -> Result < ( ) , S :: Error > ",
|
||||
"where S : :: serde :: Serializer ",
|
||||
"{ ",
|
||||
"SomeTrait :: serialize_with ( self . value , s ) ",
|
||||
"} ",
|
||||
"} ",
|
||||
"SerializeWith { ",
|
||||
"value : self . x , ",
|
||||
"phantom : :: std :: marker :: PhantomData :: < Cow < 'a , str > > , ",
|
||||
"}"
|
||||
);
|
||||
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_integer() {
|
||||
let ii8 = -1i8;
|
||||
let ii16 = -1i16;
|
||||
let ii32 = -1i32;
|
||||
let ii64 = -1i64;
|
||||
let iisize = -1isize;
|
||||
let uu8 = 1u8;
|
||||
let uu16 = 1u16;
|
||||
let uu32 = 1u32;
|
||||
let uu64 = 1u64;
|
||||
let uusize = 1usize;
|
||||
|
||||
let tokens = quote! {
|
||||
#ii8 #ii16 #ii32 #ii64 #iisize
|
||||
#uu8 #uu16 #uu32 #uu64 #uusize
|
||||
};
|
||||
let expected = "-1i8 -1i16 -1i32 -1i64 -1isize 1u8 1u16 1u32 1u64 1usize";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_floating() {
|
||||
let e32 = 2.345f32;
|
||||
|
||||
let e64 = 2.345f64;
|
||||
|
||||
let tokens = quote! {
|
||||
#e32
|
||||
#e64
|
||||
};
|
||||
let expected = concat!("2.345f32 2.345f64");
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_char() {
|
||||
let zero = '\0';
|
||||
let pound = '#';
|
||||
let quote = '"';
|
||||
let apost = '\'';
|
||||
let newline = '\n';
|
||||
let heart = '\u{2764}';
|
||||
|
||||
let tokens = quote! {
|
||||
#zero #pound #quote #apost #newline #heart
|
||||
};
|
||||
let expected = "'\\u{0}' '#' '\\\"' '\\'' '\\n' '\\u{2764}'";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_str() {
|
||||
let s = "\0 a 'b \" c";
|
||||
let tokens = quote!(#s);
|
||||
let expected = "\"\\u{0} a \\'b \\\" c\"";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_string() {
|
||||
let s = "\0 a 'b \" c".to_string();
|
||||
let tokens = quote!(#s);
|
||||
let expected = "\"\\u{0} a \\'b \\\" c\"";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ident() {
|
||||
let foo = Ident::new("Foo", Span::call_site());
|
||||
let bar = Ident::new(&format!("Bar{}", 7), Span::call_site());
|
||||
let tokens = quote!(struct #foo; enum #bar {});
|
||||
let expected = "struct Foo ; enum Bar7 { }";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_duplicate() {
|
||||
let ch = 'x';
|
||||
|
||||
let tokens = quote!(#ch #ch);
|
||||
|
||||
let expected = "'x' 'x'";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fancy_repetition() {
|
||||
let foo = vec!["a", "b"];
|
||||
let bar = vec![true, false];
|
||||
|
||||
let tokens = quote! {
|
||||
#(#foo: #bar),*
|
||||
};
|
||||
|
||||
let expected = r#""a" : true , "b" : false"#;
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nested_fancy_repetition() {
|
||||
let nested = vec![vec!['a', 'b', 'c'], vec!['x', 'y', 'z']];
|
||||
|
||||
let tokens = quote! {
|
||||
#(
|
||||
#(#nested)*
|
||||
),*
|
||||
};
|
||||
|
||||
let expected = "'a' 'b' 'c' , 'x' 'y' 'z'";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_repetition() {
|
||||
let tokens = quote!(#(a b)* #(c d),*);
|
||||
assert_eq!("", tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_variable_name_conflict() {
|
||||
// The implementation of `#(...),*` uses the variable `_i` but it should be
|
||||
// fine, if a little confusing when debugging.
|
||||
let _i = vec!['a', 'b'];
|
||||
let tokens = quote! { #(#_i),* };
|
||||
let expected = "'a' , 'b'";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_quote() {
|
||||
let tokens = quote!();
|
||||
assert_eq!("", tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_box_str() {
|
||||
let b = "str".to_owned().into_boxed_str();
|
||||
let tokens = quote! { #b };
|
||||
assert_eq!("\"str\"", tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cow() {
|
||||
let owned: Cow<Ident> = Cow::Owned(Ident::new("owned", Span::call_site()));
|
||||
|
||||
let ident = Ident::new("borrowed", Span::call_site());
|
||||
let borrowed = Cow::Borrowed(&ident);
|
||||
|
||||
let tokens = quote! { #owned #borrowed };
|
||||
assert_eq!("owned borrowed", tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_closure() {
|
||||
fn field_i(i: usize) -> Ident {
|
||||
Ident::new(&format!("__field{}", i), Span::call_site())
|
||||
}
|
||||
|
||||
let fields = (0usize..3)
|
||||
.map(field_i as fn(_) -> _)
|
||||
.map(|var| quote! { #var });
|
||||
|
||||
let tokens = quote! { #(#fields)* };
|
||||
assert_eq!("__field0 __field1 __field2", tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_append_tokens() {
|
||||
let mut a = quote!(a);
|
||||
let b = quote!(b);
|
||||
a.append_all(b);
|
||||
assert_eq!("a b", a.to_string());
|
||||
}
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"b5c36a5bffa3623f84002fa884157ae303d2dae68d2f8a6d73ba87e82d7c56d7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"ea5abae24fdf6d9be51c80427bd12b95d146c4660e872599910cf062d6fbab9a","src/ext.rs":"a9fed3a1a4c9d3f2de717ba808af99291b995db2cbf8067f4b6927c39cc62bc6","src/format.rs":"46bf0859e6da5ec195a409ba8bbd2029d32a30d169c30c4c8aee7020f478a8a2","src/ident_fragment.rs":"0824dca06942d8e097d220db0ace0fe3ae7cf08f0a86e9828d012c131b6590c2","src/lib.rs":"bce63d6d9822373dab6f9a1f3df419b5753625e618474c304f05ab3b38845760","src/runtime.rs":"13263adfb56e2c597c69277b3500ab35ca8a08f60ba6a66f921ffa5cdc09bde2","src/spanned.rs":"adc0ed742ad17327c375879472d435cea168c208c303f53eb93cb2c0f10f3650","src/to_tokens.rs":"e589c1643479a9003d4dd1d9fa63714042b106f1b16d8ea3903cfe2f73a020f5","tests/compiletest.rs":"0a52a44786aea1c299c695bf948b2ed2081e4cc344e5c2cadceab4eb03d0010d","tests/test.rs":"92062fb9ba4a3b74345fede8e09e1d376107f98dcd79931a794433fa2d74aeb5","tests/ui/does-not-have-iter-interpolated-dup.rs":"ad13eea21d4cdd2ab6c082f633392e1ff20fb0d1af5f2177041e0bf7f30da695","tests/ui/does-not-have-iter-interpolated.rs":"83a5b3f240651adcbe4b6e51076d76d653ad439b37442cf4054f1fd3c073f3b7","tests/ui/does-not-have-iter-separated.rs":"fe413c48331d5e3a7ae5fef6a5892a90c72f610d54595879eb49d0a94154ba3f","tests/ui/does-not-have-iter.rs":"09dc9499d861b63cebb0848b855b78e2dc9497bfde37ba6339f3625ae009a62f","tests/ui/not-quotable.rs":"5759d0884943417609f28faadc70254a3e2fd3d9bd6ff7297a3fb70a77fafd8a","tests/ui/not-repeatable.rs":"b08405e02d46712d47e48ec8d0d68c93d8ebf3bb299714a373c2c954de79f6bd","tests/ui/wrong-type-span.rs":"5f310cb7fde3ef51bad01e7f286d244e3b6e67396cd2ea7eab77275c9d902699"},"package":"053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"}
|
||||
{"files":{"Cargo.toml":"68f4dc89836a05a2347086addab1849567ef8073c552ec0dfca8f96fd20550f9","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"d9392d4c7af3bf9714f0a95801d64de46ffd4558cdfeea0eb85b414e555abb72","src/ext.rs":"03919239a20f8393288783a21bf6fdee12e405d13d162c9faa6f8f5ce54b003b","src/lib.rs":"5345b4d2e6f923724cec35c62d7397e6f04d5503d2d813bff7bbaa7ffc39a9cf","src/to_tokens.rs":"0dcd15cba2aa83abeb47b9a1babce7a29643b5efa2fe620b070cb37bb21a84f1","tests/conditional/integer128.rs":"d83e21a91efbaa801a82ae499111bdda2d31edaa620e78c0199eba42d69c9ee6","tests/test.rs":"810013d7fd77b738abd0ace90ce2f2f3e219c757652eabab29bc1c0ce4a73b24"},"package":"cdd8e04bd9c52e0342b406469d494fcb033be4bdbe5c606016defbb1681411e1"}
|
|
@ -3,7 +3,7 @@
|
|||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies
|
||||
# to registry (e.g. crates.io) dependencies
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
|
@ -11,9 +11,8 @@
|
|||
# will likely look very different (and much more reasonable)
|
||||
|
||||
[package]
|
||||
edition = "2018"
|
||||
name = "quote"
|
||||
version = "1.0.2"
|
||||
version = "0.6.11"
|
||||
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
||||
include = ["Cargo.toml", "src/**/*.rs", "tests/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
|
||||
description = "Quasi-quoting macro quote!(...)"
|
||||
|
@ -21,19 +20,11 @@ documentation = "https://docs.rs/quote/"
|
|||
readme = "README.md"
|
||||
keywords = ["syn"]
|
||||
categories = ["development-tools::procedural-macro-helpers"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
license = "MIT/Apache-2.0"
|
||||
repository = "https://github.com/dtolnay/quote"
|
||||
|
||||
[lib]
|
||||
name = "quote"
|
||||
[dependencies.proc-macro2]
|
||||
version = "1.0"
|
||||
version = "0.4.21"
|
||||
default-features = false
|
||||
[dev-dependencies.rustversion]
|
||||
version = "0.1"
|
||||
|
||||
[dev-dependencies.trybuild]
|
||||
version = "1.0"
|
||||
|
||||
[features]
|
||||
default = ["proc-macro"]
|
||||
|
|
|
@ -8,13 +8,13 @@ Rust Quasi-Quoting
|
|||
This crate provides the [`quote!`] macro for turning Rust syntax tree data
|
||||
structures into tokens of source code.
|
||||
|
||||
[`quote!`]: https://docs.rs/quote/1.0/quote/macro.quote.html
|
||||
[`quote!`]: https://docs.rs/quote/0.6/quote/macro.quote.html
|
||||
|
||||
Procedural macros in Rust receive a stream of tokens as input, execute arbitrary
|
||||
Rust code to determine how to manipulate those tokens, and produce a stream of
|
||||
tokens to hand back to the compiler to compile into the caller's crate.
|
||||
Quasi-quoting is a solution to one piece of that — producing tokens to
|
||||
return to the compiler.
|
||||
Quasi-quoting is a solution to one piece of that -- producing tokens to return
|
||||
to the compiler.
|
||||
|
||||
The idea of quasi-quoting is that we write *code* that we treat as *data*.
|
||||
Within the `quote!` macro, we can write what looks like code to our text editor
|
||||
|
@ -35,7 +35,7 @@ first support for procedural macros in Rust 1.15.0.*
|
|||
|
||||
```toml
|
||||
[dependencies]
|
||||
quote = "1.0"
|
||||
quote = "0.6"
|
||||
```
|
||||
|
||||
## Syntax
|
||||
|
@ -44,13 +44,13 @@ The quote crate provides a [`quote!`] macro within which you can write Rust code
|
|||
that gets packaged into a [`TokenStream`] and can be treated as data. You should
|
||||
think of `TokenStream` as representing a fragment of Rust source code.
|
||||
|
||||
[`TokenStream`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.TokenStream.html
|
||||
[`TokenStream`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.TokenStream.html
|
||||
|
||||
Within the `quote!` macro, interpolation is done with `#var`. Any type
|
||||
implementing the [`quote::ToTokens`] trait can be interpolated. This includes
|
||||
most Rust primitive types as well as most of the syntax tree types from [`syn`].
|
||||
|
||||
[`quote::ToTokens`]: https://docs.rs/quote/1.0/quote/trait.ToTokens.html
|
||||
[`quote::ToTokens`]: https://docs.rs/quote/0.6/quote/trait.ToTokens.html
|
||||
[`syn`]: https://github.com/dtolnay/syn
|
||||
|
||||
```rust
|
||||
|
@ -148,20 +148,8 @@ quote! {
|
|||
}
|
||||
```
|
||||
|
||||
The solution is to build a new identifier token with the correct value. As this
|
||||
is such a common case, the `format_ident!` macro provides a convenient utility
|
||||
for doing so correctly.
|
||||
|
||||
```rust
|
||||
let varname = format_ident!("_{}", ident);
|
||||
quote! {
|
||||
let mut #varname = 0;
|
||||
}
|
||||
```
|
||||
|
||||
Alternatively, the APIs provided by Syn and proc-macro2 can be used to directly
|
||||
build the identifier. This is roughly equivalent to the above, but will not
|
||||
handle `ident` being a raw identifier.
|
||||
The solution is to perform token-level manipulations using the APIs provided by
|
||||
Syn and proc-macro2.
|
||||
|
||||
```rust
|
||||
let concatenated = format!("_{}", ident);
|
||||
|
@ -212,26 +200,42 @@ Any interpolated tokens preserve the `Span` information provided by their
|
|||
`ToTokens` implementation. Tokens that originate within a `quote!` invocation
|
||||
are spanned with [`Span::call_site()`].
|
||||
|
||||
[`Span::call_site()`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html#method.call_site
|
||||
[`Span::call_site()`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.Span.html#method.call_site
|
||||
|
||||
A different span can be provided explicitly through the [`quote_spanned!`]
|
||||
macro.
|
||||
|
||||
[`quote_spanned!`]: https://docs.rs/quote/1.0/quote/macro.quote_spanned.html
|
||||
[`quote_spanned!`]: https://docs.rs/quote/0.6/quote/macro.quote_spanned.html
|
||||
|
||||
<br>
|
||||
### Limitations
|
||||
|
||||
#### License
|
||||
- A non-repeating variable may not be interpolated inside of a repeating block
|
||||
([#7]).
|
||||
- The same variable may not be interpolated more than once inside of a repeating
|
||||
block ([#8]).
|
||||
|
||||
<sup>
|
||||
Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
|
||||
2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
|
||||
</sup>
|
||||
[#7]: https://github.com/dtolnay/quote/issues/7
|
||||
[#8]: https://github.com/dtolnay/quote/issues/8
|
||||
|
||||
<br>
|
||||
### Recursion limit
|
||||
|
||||
The `quote!` macro relies on deep recursion so some large invocations may fail
|
||||
with "recursion limit reached" when you compile. If it fails, bump up the
|
||||
recursion limit by adding `#![recursion_limit = "128"]` to your crate. An even
|
||||
higher limit may be necessary for especially large invocations. You don't need
|
||||
this unless the compiler tells you that you need it.
|
||||
|
||||
## License
|
||||
|
||||
Licensed under either of
|
||||
|
||||
* Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
|
||||
* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
|
||||
|
||||
at your option.
|
||||
|
||||
### Contribution
|
||||
|
||||
<sub>
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
|
||||
be dual licensed as above, without any additional terms or conditions.
|
||||
</sub>
|
||||
|
|
|
@ -17,7 +17,7 @@ pub trait TokenStreamExt: private::Sealed {
|
|||
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// ```
|
||||
/// ```edition2018
|
||||
/// # use quote::{quote, TokenStreamExt, ToTokens};
|
||||
/// # use proc_macro2::TokenStream;
|
||||
/// #
|
||||
|
@ -32,29 +32,29 @@ pub trait TokenStreamExt: private::Sealed {
|
|||
/// let tokens = quote!(#X);
|
||||
/// assert_eq!(tokens.to_string(), "true false");
|
||||
/// ```
|
||||
fn append_all<I>(&mut self, iter: I)
|
||||
fn append_all<T, I>(&mut self, iter: I)
|
||||
where
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens;
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>;
|
||||
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// Appends all of the items in the iterator `I`, separated by the tokens
|
||||
/// `U`.
|
||||
fn append_separated<I, U>(&mut self, iter: I, op: U)
|
||||
fn append_separated<T, I, U>(&mut self, iter: I, op: U)
|
||||
where
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens,
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
U: ToTokens;
|
||||
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// Appends all tokens in the iterator `I`, appending `U` after each
|
||||
/// element, including after the last element of the iterator.
|
||||
fn append_terminated<I, U>(&mut self, iter: I, term: U)
|
||||
fn append_terminated<T, I, U>(&mut self, iter: I, term: U)
|
||||
where
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens,
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
U: ToTokens;
|
||||
}
|
||||
|
||||
|
@ -66,20 +66,20 @@ impl TokenStreamExt for TokenStream {
|
|||
self.extend(iter::once(token.into()));
|
||||
}
|
||||
|
||||
fn append_all<I>(&mut self, iter: I)
|
||||
fn append_all<T, I>(&mut self, iter: I)
|
||||
where
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens,
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
{
|
||||
for token in iter {
|
||||
token.to_tokens(self);
|
||||
}
|
||||
}
|
||||
|
||||
fn append_separated<I, U>(&mut self, iter: I, op: U)
|
||||
fn append_separated<T, I, U>(&mut self, iter: I, op: U)
|
||||
where
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens,
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
U: ToTokens,
|
||||
{
|
||||
for (i, token) in iter.into_iter().enumerate() {
|
||||
|
@ -90,10 +90,10 @@ impl TokenStreamExt for TokenStream {
|
|||
}
|
||||
}
|
||||
|
||||
fn append_terminated<I, U>(&mut self, iter: I, term: U)
|
||||
fn append_terminated<T, I, U>(&mut self, iter: I, term: U)
|
||||
where
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens,
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
U: ToTokens,
|
||||
{
|
||||
for token in iter {
|
||||
|
|
|
@ -1,164 +0,0 @@
|
|||
/// Formatting macro for constructing `Ident`s.
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Syntax
|
||||
///
|
||||
/// Syntax is copied from the [`format!`] macro, supporting both positional and
|
||||
/// named arguments.
|
||||
///
|
||||
/// Only a limited set of formatting traits are supported. The current mapping
|
||||
/// of format types to traits is:
|
||||
///
|
||||
/// * `{}` ⇒ [`IdentFragment`]
|
||||
/// * `{:o}` ⇒ [`Octal`](`std::fmt::Octal`)
|
||||
/// * `{:x}` ⇒ [`LowerHex`](`std::fmt::LowerHex`)
|
||||
/// * `{:X}` ⇒ [`UpperHex`](`std::fmt::UpperHex`)
|
||||
/// * `{:b}` ⇒ [`Binary`](`std::fmt::Binary`)
|
||||
///
|
||||
/// See [`std::fmt`] for more information.
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # IdentFragment
|
||||
///
|
||||
/// Unlike `format!`, this macro uses the [`IdentFragment`] formatting trait by
|
||||
/// default. This trait is like `Display`, with a few differences:
|
||||
///
|
||||
/// * `IdentFragment` is only implemented for a limited set of types, such as
|
||||
/// unsigned integers and strings.
|
||||
/// * [`Ident`] arguments will have their `r#` prefixes stripped, if present.
|
||||
///
|
||||
/// [`Ident`]: `proc_macro2::Ident`
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Hygiene
|
||||
///
|
||||
/// The [`Span`] of the first `Ident` argument is used as the span of the final
|
||||
/// identifier, falling back to [`Span::call_site`] when no identifiers are
|
||||
/// provided.
|
||||
///
|
||||
/// ```
|
||||
/// # use quote::format_ident;
|
||||
/// # let ident = format_ident!("Ident");
|
||||
/// // If `ident` is an Ident, the span of `my_ident` will be inherited from it.
|
||||
/// let my_ident = format_ident!("My{}{}", ident, "IsCool");
|
||||
/// assert_eq!(my_ident, "MyIdentIsCool");
|
||||
/// ```
|
||||
///
|
||||
/// Alternatively, the span can be overridden by passing the `span` named
|
||||
/// argument.
|
||||
///
|
||||
/// ```
|
||||
/// # use quote::format_ident;
|
||||
/// # const IGNORE_TOKENS: &'static str = stringify! {
|
||||
/// let my_span = /* ... */;
|
||||
/// # };
|
||||
/// # let my_span = proc_macro2::Span::call_site();
|
||||
/// format_ident!("MyIdent", span = my_span);
|
||||
/// ```
|
||||
///
|
||||
/// [`Span`]: `proc_macro2::Span`
|
||||
/// [`Span::call_site`]: `proc_macro2::Span::call_site`
|
||||
///
|
||||
/// <p><br></p>
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This method will panic if the resulting formatted string is not a valid
|
||||
/// identifier.
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Composing raw and non-raw identifiers:
|
||||
/// ```
|
||||
/// # use quote::format_ident;
|
||||
/// let my_ident = format_ident!("My{}", "Ident");
|
||||
/// assert_eq!(my_ident, "MyIdent");
|
||||
///
|
||||
/// let raw = format_ident!("r#Raw");
|
||||
/// assert_eq!(raw, "r#Raw");
|
||||
///
|
||||
/// let my_ident_raw = format_ident!("{}Is{}", my_ident, raw);
|
||||
/// assert_eq!(my_ident_raw, "MyIdentIsRaw");
|
||||
/// ```
|
||||
///
|
||||
/// Integer formatting options:
|
||||
/// ```
|
||||
/// # use quote::format_ident;
|
||||
/// let num: u32 = 10;
|
||||
///
|
||||
/// let decimal = format_ident!("Id_{}", num);
|
||||
/// assert_eq!(decimal, "Id_10");
|
||||
///
|
||||
/// let octal = format_ident!("Id_{:o}", num);
|
||||
/// assert_eq!(octal, "Id_12");
|
||||
///
|
||||
/// let binary = format_ident!("Id_{:b}", num);
|
||||
/// assert_eq!(binary, "Id_1010");
|
||||
///
|
||||
/// let lower_hex = format_ident!("Id_{:x}", num);
|
||||
/// assert_eq!(lower_hex, "Id_a");
|
||||
///
|
||||
/// let upper_hex = format_ident!("Id_{:X}", num);
|
||||
/// assert_eq!(upper_hex, "Id_A");
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! format_ident {
|
||||
($fmt:expr) => {
|
||||
$crate::format_ident_impl!([
|
||||
::std::option::Option::None,
|
||||
$fmt
|
||||
])
|
||||
};
|
||||
|
||||
($fmt:expr, $($rest:tt)*) => {
|
||||
$crate::format_ident_impl!([
|
||||
::std::option::Option::None,
|
||||
$fmt
|
||||
] $($rest)*)
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
#[doc(hidden)]
|
||||
macro_rules! format_ident_impl {
|
||||
// Final state
|
||||
([$span:expr, $($fmt:tt)*]) => {
|
||||
$crate::__rt::mk_ident(&format!($($fmt)*), $span)
|
||||
};
|
||||
|
||||
// Span argument
|
||||
([$old:expr, $($fmt:tt)*] span = $span:expr) => {
|
||||
$crate::format_ident_impl!([$old, $($fmt)*] span = $span,)
|
||||
};
|
||||
([$old:expr, $($fmt:tt)*] span = $span:expr, $($rest:tt)*) => {
|
||||
$crate::format_ident_impl!([
|
||||
::std::option::Option::Some::<$crate::__rt::Span>($span),
|
||||
$($fmt)*
|
||||
] $($rest)*)
|
||||
};
|
||||
|
||||
// Named argument
|
||||
([$span:expr, $($fmt:tt)*] $name:ident = $arg:expr) => {
|
||||
$crate::format_ident_impl!([$span, $($fmt)*] $name = $arg,)
|
||||
};
|
||||
([$span:expr, $($fmt:tt)*] $name:ident = $arg:expr, $($rest:tt)*) => {
|
||||
match $crate::__rt::IdentFragmentAdapter(&$arg) {
|
||||
arg => $crate::format_ident_impl!([$span.or(arg.span()), $($fmt)*, $name = arg] $($rest)*),
|
||||
}
|
||||
};
|
||||
|
||||
// Positional argument
|
||||
([$span:expr, $($fmt:tt)*] $arg:expr) => {
|
||||
$crate::format_ident_impl!([$span, $($fmt)*] $arg,)
|
||||
};
|
||||
([$span:expr, $($fmt:tt)*] $arg:expr, $($rest:tt)*) => {
|
||||
match $crate::__rt::IdentFragmentAdapter(&$arg) {
|
||||
arg => $crate::format_ident_impl!([$span.or(arg.span()), $($fmt)*, arg] $($rest)*),
|
||||
}
|
||||
};
|
||||
}
|
|
@ -1,72 +0,0 @@
|
|||
use proc_macro2::{Ident, Span};
|
||||
use std::fmt;
|
||||
|
||||
/// Specialized formatting trait used by `format_ident!`.
|
||||
///
|
||||
/// [`Ident`] arguments formatted using this trait will have their `r#` prefix
|
||||
/// stripped, if present.
|
||||
///
|
||||
/// See [`format_ident!`] for more information.
|
||||
pub trait IdentFragment {
|
||||
/// Format this value as an identifier fragment.
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result;
|
||||
|
||||
/// Span associated with this `IdentFragment`.
|
||||
///
|
||||
/// If non-`None`, may be inherited by formatted identifiers.
|
||||
fn span(&self) -> Option<Span> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: IdentFragment + ?Sized> IdentFragment for &'a T {
|
||||
fn span(&self) -> Option<Span> {
|
||||
<T as IdentFragment>::span(*self)
|
||||
}
|
||||
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
IdentFragment::fmt(*self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: IdentFragment + ?Sized> IdentFragment for &'a mut T {
|
||||
fn span(&self) -> Option<Span> {
|
||||
<T as IdentFragment>::span(*self)
|
||||
}
|
||||
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
IdentFragment::fmt(*self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl IdentFragment for Ident {
|
||||
fn span(&self) -> Option<Span> {
|
||||
Some(self.span())
|
||||
}
|
||||
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let id = self.to_string();
|
||||
if id.starts_with("r#") {
|
||||
fmt::Display::fmt(&id[2..], f)
|
||||
} else {
|
||||
fmt::Display::fmt(&id[..], f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Limited set of types which this is implemented for, as we want to avoid types
|
||||
// which will often include non-identifier characters in their `Display` impl.
|
||||
macro_rules! ident_fragment_display {
|
||||
($($T:ty),*) => {
|
||||
$(
|
||||
impl IdentFragment for $T {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
ident_fragment_display!(bool, str, String);
|
||||
ident_fragment_display!(u8, u16, u32, u64, u128, usize);
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1,373 +0,0 @@
|
|||
use crate::{IdentFragment, ToTokens, TokenStreamExt};
|
||||
use std::fmt;
|
||||
use std::ops::BitOr;
|
||||
|
||||
pub use proc_macro2::*;
|
||||
|
||||
pub struct HasIterator; // True
|
||||
pub struct ThereIsNoIteratorInRepetition; // False
|
||||
|
||||
impl BitOr<ThereIsNoIteratorInRepetition> for ThereIsNoIteratorInRepetition {
|
||||
type Output = ThereIsNoIteratorInRepetition;
|
||||
fn bitor(self, _rhs: ThereIsNoIteratorInRepetition) -> ThereIsNoIteratorInRepetition {
|
||||
ThereIsNoIteratorInRepetition
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr<ThereIsNoIteratorInRepetition> for HasIterator {
|
||||
type Output = HasIterator;
|
||||
fn bitor(self, _rhs: ThereIsNoIteratorInRepetition) -> HasIterator {
|
||||
HasIterator
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr<HasIterator> for ThereIsNoIteratorInRepetition {
|
||||
type Output = HasIterator;
|
||||
fn bitor(self, _rhs: HasIterator) -> HasIterator {
|
||||
HasIterator
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr<HasIterator> for HasIterator {
|
||||
type Output = HasIterator;
|
||||
fn bitor(self, _rhs: HasIterator) -> HasIterator {
|
||||
HasIterator
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension traits used by the implementation of `quote!`. These are defined
|
||||
/// in separate traits, rather than as a single trait due to ambiguity issues.
|
||||
///
|
||||
/// These traits expose a `quote_into_iter` method which should allow calling
|
||||
/// whichever impl happens to be applicable. Calling that method repeatedly on
|
||||
/// the returned value should be idempotent.
|
||||
pub mod ext {
|
||||
use super::RepInterp;
|
||||
use super::{HasIterator as HasIter, ThereIsNoIteratorInRepetition as DoesNotHaveIter};
|
||||
use crate::ToTokens;
|
||||
use std::collections::btree_set::{self, BTreeSet};
|
||||
use std::slice;
|
||||
|
||||
/// Extension trait providing the `quote_into_iter` method on iterators.
|
||||
pub trait RepIteratorExt: Iterator + Sized {
|
||||
fn quote_into_iter(self) -> (Self, HasIter) {
|
||||
(self, HasIter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Iterator> RepIteratorExt for T {}
|
||||
|
||||
/// Extension trait providing the `quote_into_iter` method for
|
||||
/// non-iterable types. These types interpolate the same value in each
|
||||
/// iteration of the repetition.
|
||||
pub trait RepToTokensExt {
|
||||
/// Pretend to be an iterator for the purposes of `quote_into_iter`.
|
||||
/// This allows repeated calls to `quote_into_iter` to continue
|
||||
/// correctly returning DoesNotHaveIter.
|
||||
fn next(&self) -> Option<&Self> {
|
||||
Some(self)
|
||||
}
|
||||
|
||||
fn quote_into_iter(&self) -> (&Self, DoesNotHaveIter) {
|
||||
(self, DoesNotHaveIter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ToTokens + ?Sized> RepToTokensExt for T {}
|
||||
|
||||
/// Extension trait providing the `quote_into_iter` method for types that
|
||||
/// can be referenced as an iterator.
|
||||
pub trait RepAsIteratorExt<'q> {
|
||||
type Iter: Iterator;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter);
|
||||
}
|
||||
|
||||
impl<'q, 'a, T: RepAsIteratorExt<'q> + ?Sized> RepAsIteratorExt<'q> for &'a T {
|
||||
type Iter = T::Iter;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
<T as RepAsIteratorExt>::quote_into_iter(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'q, 'a, T: RepAsIteratorExt<'q> + ?Sized> RepAsIteratorExt<'q> for &'a mut T {
|
||||
type Iter = T::Iter;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
<T as RepAsIteratorExt>::quote_into_iter(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'q, T: 'q> RepAsIteratorExt<'q> for [T] {
|
||||
type Iter = slice::Iter<'q, T>;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
(self.iter(), HasIter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'q, T: 'q> RepAsIteratorExt<'q> for Vec<T> {
|
||||
type Iter = slice::Iter<'q, T>;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
(self.iter(), HasIter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'q, T: 'q> RepAsIteratorExt<'q> for BTreeSet<T> {
|
||||
type Iter = btree_set::Iter<'q, T>;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
(self.iter(), HasIter)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! array_rep_slice {
|
||||
($($l:tt)*) => {
|
||||
$(
|
||||
impl<'q, T: 'q> RepAsIteratorExt<'q> for [T; $l] {
|
||||
type Iter = slice::Iter<'q, T>;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
(self.iter(), HasIter)
|
||||
}
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
array_rep_slice!(
|
||||
0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
|
||||
17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32
|
||||
);
|
||||
|
||||
impl<'q, T: RepAsIteratorExt<'q>> RepAsIteratorExt<'q> for RepInterp<T> {
|
||||
type Iter = T::Iter;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
self.0.quote_into_iter()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper type used within interpolations to allow for repeated binding names.
|
||||
// Implements the relevant traits, and exports a dummy `next()` method.
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct RepInterp<T>(pub T);
|
||||
|
||||
impl<T> RepInterp<T> {
|
||||
// This method is intended to look like `Iterator::next`, and is called when
|
||||
// a name is bound multiple times, as the previous binding will shadow the
|
||||
// original `Iterator` object. This allows us to avoid advancing the
|
||||
// iterator multiple times per iteration.
|
||||
pub fn next(self) -> Option<T> {
|
||||
Some(self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Iterator> Iterator for RepInterp<T> {
|
||||
type Item = T::Item;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ToTokens> ToTokens for RepInterp<T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.0.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
fn is_ident_start(c: u8) -> bool {
|
||||
(b'a' <= c && c <= b'z') || (b'A' <= c && c <= b'Z') || c == b'_'
|
||||
}
|
||||
|
||||
fn is_ident_continue(c: u8) -> bool {
|
||||
(b'a' <= c && c <= b'z') || (b'A' <= c && c <= b'Z') || c == b'_' || (b'0' <= c && c <= b'9')
|
||||
}
|
||||
|
||||
fn is_ident(token: &str) -> bool {
|
||||
let mut iter = token.bytes();
|
||||
let first_ok = iter.next().map(is_ident_start).unwrap_or(false);
|
||||
|
||||
first_ok && iter.all(is_ident_continue)
|
||||
}
|
||||
|
||||
pub fn parse(tokens: &mut TokenStream, span: Span, s: &str) {
|
||||
if is_ident(s) {
|
||||
// Fast path, since idents are the most common token.
|
||||
tokens.append(Ident::new(s, span));
|
||||
} else {
|
||||
let s: TokenStream = s.parse().expect("invalid token stream");
|
||||
tokens.extend(s.into_iter().map(|mut t| {
|
||||
t.set_span(span);
|
||||
t
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! push_punct {
|
||||
($name:ident $char1:tt) => {
|
||||
pub fn $name(tokens: &mut TokenStream, span: Span) {
|
||||
let mut punct = Punct::new($char1, Spacing::Alone);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
}
|
||||
};
|
||||
($name:ident $char1:tt $char2:tt) => {
|
||||
pub fn $name(tokens: &mut TokenStream, span: Span) {
|
||||
let mut punct = Punct::new($char1, Spacing::Joint);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
let mut punct = Punct::new($char2, Spacing::Alone);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
}
|
||||
};
|
||||
($name:ident $char1:tt $char2:tt $char3:tt) => {
|
||||
pub fn $name(tokens: &mut TokenStream, span: Span) {
|
||||
let mut punct = Punct::new($char1, Spacing::Joint);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
let mut punct = Punct::new($char2, Spacing::Joint);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
let mut punct = Punct::new($char3, Spacing::Alone);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
push_punct!(push_add '+');
|
||||
push_punct!(push_add_eq '+' '=');
|
||||
push_punct!(push_and '&');
|
||||
push_punct!(push_and_and '&' '&');
|
||||
push_punct!(push_and_eq '&' '=');
|
||||
push_punct!(push_at '@');
|
||||
push_punct!(push_bang '!');
|
||||
push_punct!(push_caret '^');
|
||||
push_punct!(push_caret_eq '^' '=');
|
||||
push_punct!(push_colon ':');
|
||||
push_punct!(push_colon2 ':' ':');
|
||||
push_punct!(push_comma ',');
|
||||
push_punct!(push_div '/');
|
||||
push_punct!(push_div_eq '/' '=');
|
||||
push_punct!(push_dot '.');
|
||||
push_punct!(push_dot2 '.' '.');
|
||||
push_punct!(push_dot3 '.' '.' '.');
|
||||
push_punct!(push_dot_dot_eq '.' '.' '=');
|
||||
push_punct!(push_eq '=');
|
||||
push_punct!(push_eq_eq '=' '=');
|
||||
push_punct!(push_ge '>' '=');
|
||||
push_punct!(push_gt '>');
|
||||
push_punct!(push_le '<' '=');
|
||||
push_punct!(push_lt '<');
|
||||
push_punct!(push_mul_eq '*' '=');
|
||||
push_punct!(push_ne '!' '=');
|
||||
push_punct!(push_or '|');
|
||||
push_punct!(push_or_eq '|' '=');
|
||||
push_punct!(push_or_or '|' '|');
|
||||
push_punct!(push_pound '#');
|
||||
push_punct!(push_question '?');
|
||||
push_punct!(push_rarrow '-' '>');
|
||||
push_punct!(push_larrow '<' '-');
|
||||
push_punct!(push_rem '%');
|
||||
push_punct!(push_rem_eq '%' '=');
|
||||
push_punct!(push_fat_arrow '=' '>');
|
||||
push_punct!(push_semi ';');
|
||||
push_punct!(push_shl '<' '<');
|
||||
push_punct!(push_shl_eq '<' '<' '=');
|
||||
push_punct!(push_shr '>' '>');
|
||||
push_punct!(push_shr_eq '>' '>' '=');
|
||||
push_punct!(push_star '*');
|
||||
push_punct!(push_sub '-');
|
||||
push_punct!(push_sub_eq '-' '=');
|
||||
|
||||
// Helper method for constructing identifiers from the `format_ident!` macro,
|
||||
// handling `r#` prefixes.
|
||||
//
|
||||
// Directly parsing the input string may produce a valid identifier,
|
||||
// although the input string was invalid, due to ignored characters such as
|
||||
// whitespace and comments. Instead, we always create a non-raw identifier
|
||||
// to validate that the string is OK, and only parse again if needed.
|
||||
//
|
||||
// The `is_ident` method defined above is insufficient for validation, as it
|
||||
// will reject non-ASCII identifiers.
|
||||
pub fn mk_ident(id: &str, span: Option<Span>) -> Ident {
|
||||
let span = span.unwrap_or_else(Span::call_site);
|
||||
|
||||
let is_raw = id.starts_with("r#");
|
||||
let unraw = Ident::new(if is_raw { &id[2..] } else { id }, span);
|
||||
if !is_raw {
|
||||
return unraw;
|
||||
}
|
||||
|
||||
// At this point, the identifier is raw, and the unraw-ed version of it was
|
||||
// successfully converted into an identifier. Try to produce a valid raw
|
||||
// identifier by running the `TokenStream` parser, and unwrapping the first
|
||||
// token as an `Ident`.
|
||||
//
|
||||
// FIXME: When `Ident::new_raw` becomes stable, this method should be
|
||||
// updated to call it when available.
|
||||
match id.parse::<TokenStream>() {
|
||||
Ok(ts) => {
|
||||
let mut iter = ts.into_iter();
|
||||
match (iter.next(), iter.next()) {
|
||||
(Some(TokenTree::Ident(mut id)), None) => {
|
||||
id.set_span(span);
|
||||
id
|
||||
}
|
||||
_ => unreachable!("valid raw ident fails to parse"),
|
||||
}
|
||||
}
|
||||
Err(_) => unreachable!("valid raw ident fails to parse"),
|
||||
}
|
||||
}
|
||||
|
||||
// Adapts from `IdentFragment` to `fmt::Display` for use by the `format_ident!`
|
||||
// macro, and exposes span information from these fragments.
|
||||
//
|
||||
// This struct also has forwarding implementations of the formatting traits
|
||||
// `Octal`, `LowerHex`, `UpperHex`, and `Binary` to allow for their use within
|
||||
// `format_ident!`.
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct IdentFragmentAdapter<T: IdentFragment>(pub T);
|
||||
|
||||
impl<T: IdentFragment> IdentFragmentAdapter<T> {
|
||||
pub fn span(&self) -> Option<Span> {
|
||||
self.0.span()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdentFragment> fmt::Display for IdentFragmentAdapter<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
IdentFragment::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdentFragment + fmt::Octal> fmt::Octal for IdentFragmentAdapter<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Octal::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdentFragment + fmt::LowerHex> fmt::LowerHex for IdentFragmentAdapter<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::LowerHex::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdentFragment + fmt::UpperHex> fmt::UpperHex for IdentFragmentAdapter<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::UpperHex::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdentFragment + fmt::Binary> fmt::Binary for IdentFragmentAdapter<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Binary::fmt(&self.0, f)
|
||||
}
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
use crate::ToTokens;
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
|
||||
pub trait Spanned {
|
||||
fn __span(&self) -> Span;
|
||||
}
|
||||
|
||||
impl Spanned for Span {
|
||||
fn __span(&self) -> Span {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + ToTokens> Spanned for T {
|
||||
fn __span(&self) -> Span {
|
||||
join_spans(self.into_token_stream())
|
||||
}
|
||||
}
|
||||
|
||||
fn join_spans(tokens: TokenStream) -> Span {
|
||||
let mut iter = tokens.into_iter().filter_map(|tt| {
|
||||
// FIXME: This shouldn't be required, since optimally spans should
|
||||
// never be invalid. This filter_map can probably be removed when
|
||||
// https://github.com/rust-lang/rust/issues/43081 is resolved.
|
||||
let span = tt.span();
|
||||
let debug = format!("{:?}", span);
|
||||
if debug.ends_with("bytes(0..0)") {
|
||||
None
|
||||
} else {
|
||||
Some(span)
|
||||
}
|
||||
});
|
||||
|
||||
let first = match iter.next() {
|
||||
Some(span) => span,
|
||||
None => return Span::call_site(),
|
||||
};
|
||||
|
||||
iter.fold(None, |_prev, next| Some(next))
|
||||
.and_then(|last| first.join(last))
|
||||
.unwrap_or(first)
|
||||
}
|
|
@ -2,11 +2,10 @@ use super::TokenStreamExt;
|
|||
|
||||
use std::borrow::Cow;
|
||||
use std::iter;
|
||||
use std::rc::Rc;
|
||||
|
||||
use proc_macro2::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
|
||||
|
||||
/// Types that can be interpolated inside a `quote!` invocation.
|
||||
/// Types that can be interpolated inside a [`quote!`] invocation.
|
||||
///
|
||||
/// [`quote!`]: macro.quote.html
|
||||
pub trait ToTokens {
|
||||
|
@ -22,7 +21,7 @@ pub trait ToTokens {
|
|||
/// Example implementation for a struct representing Rust paths like
|
||||
/// `std::cmp::PartialEq`:
|
||||
///
|
||||
/// ```
|
||||
/// ```edition2018
|
||||
/// use proc_macro2::{TokenTree, Spacing, Span, Punct, TokenStream};
|
||||
/// use quote::{TokenStreamExt, ToTokens};
|
||||
///
|
||||
|
@ -54,16 +53,6 @@ pub trait ToTokens {
|
|||
/// ```
|
||||
fn to_tokens(&self, tokens: &mut TokenStream);
|
||||
|
||||
/// Convert `self` directly into a `TokenStream` object.
|
||||
///
|
||||
/// This method is implicitly implemented using `to_tokens`, and acts as a
|
||||
/// convenience method for consumers of the `ToTokens` trait.
|
||||
fn to_token_stream(&self) -> TokenStream {
|
||||
let mut tokens = TokenStream::new();
|
||||
self.to_tokens(&mut tokens);
|
||||
tokens
|
||||
}
|
||||
|
||||
/// Convert `self` directly into a `TokenStream` object.
|
||||
///
|
||||
/// This method is implicitly implemented using `to_tokens`, and acts as a
|
||||
|
@ -72,7 +61,9 @@ pub trait ToTokens {
|
|||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.to_token_stream()
|
||||
let mut tokens = TokenStream::new();
|
||||
self.to_tokens(&mut tokens);
|
||||
tokens
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -100,12 +91,6 @@ impl<T: ?Sized + ToTokens> ToTokens for Box<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + ToTokens> ToTokens for Rc<T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
(**self).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ToTokens> ToTokens for Option<T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
if let Some(ref t) = *self {
|
||||
|
@ -141,20 +126,24 @@ primitive! {
|
|||
i16 => i16_suffixed
|
||||
i32 => i32_suffixed
|
||||
i64 => i64_suffixed
|
||||
i128 => i128_suffixed
|
||||
isize => isize_suffixed
|
||||
|
||||
u8 => u8_suffixed
|
||||
u16 => u16_suffixed
|
||||
u32 => u32_suffixed
|
||||
u64 => u64_suffixed
|
||||
u128 => u128_suffixed
|
||||
usize => usize_suffixed
|
||||
|
||||
f32 => f32_suffixed
|
||||
f64 => f64_suffixed
|
||||
}
|
||||
|
||||
#[cfg(integer128)]
|
||||
primitive! {
|
||||
i128 => i128_suffixed
|
||||
u128 => u128_suffixed
|
||||
}
|
||||
|
||||
impl ToTokens for char {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::character(*self));
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
#[rustversion::attr(not(nightly), ignore)]
|
||||
#[test]
|
||||
fn ui() {
|
||||
let t = trybuild::TestCases::new();
|
||||
t.compile_fail("tests/ui/*.rs");
|
||||
}
|
|
@ -1,10 +1,18 @@
|
|||
#![cfg_attr(feature = "cargo-clippy", allow(blacklisted_name))]
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
extern crate proc_macro2;
|
||||
#[macro_use]
|
||||
extern crate quote;
|
||||
|
||||
use proc_macro2::{Ident, Span, TokenStream};
|
||||
use quote::{format_ident, quote, TokenStreamExt};
|
||||
use quote::TokenStreamExt;
|
||||
|
||||
mod conditional {
|
||||
#[cfg(integer128)]
|
||||
mod integer128;
|
||||
}
|
||||
|
||||
struct X;
|
||||
|
||||
|
@ -117,20 +125,18 @@ fn test_integer() {
|
|||
let ii16 = -1i16;
|
||||
let ii32 = -1i32;
|
||||
let ii64 = -1i64;
|
||||
let ii128 = -1i128;
|
||||
let iisize = -1isize;
|
||||
let uu8 = 1u8;
|
||||
let uu16 = 1u16;
|
||||
let uu32 = 1u32;
|
||||
let uu64 = 1u64;
|
||||
let uu128 = 1u128;
|
||||
let uusize = 1usize;
|
||||
|
||||
let tokens = quote! {
|
||||
#ii8 #ii16 #ii32 #ii64 #ii128 #iisize
|
||||
#uu8 #uu16 #uu32 #uu64 #uu128 #uusize
|
||||
#ii8 #ii16 #ii32 #ii64 #iisize
|
||||
#uu8 #uu16 #uu32 #uu64 #uusize
|
||||
};
|
||||
let expected = "-1i8 -1i16 -1i32 -1i64 -1i128 -1isize 1u8 1u16 1u32 1u64 1u128 1usize";
|
||||
let expected = "-1i8 -1i16 -1i32 -1i64 -1isize 1u8 1u16 1u32 1u64 1usize";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
|
@ -160,7 +166,7 @@ fn test_char() {
|
|||
let tokens = quote! {
|
||||
#zero #pound #quote #apost #newline #heart
|
||||
};
|
||||
let expected = "'\\u{0}' '#' '\"' '\\'' '\\n' '\\u{2764}'";
|
||||
let expected = "'\\u{0}' '#' '\\\"' '\\'' '\\n' '\\u{2764}'";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
|
@ -168,7 +174,7 @@ fn test_char() {
|
|||
fn test_str() {
|
||||
let s = "\0 a 'b \" c";
|
||||
let tokens = quote!(#s);
|
||||
let expected = "\"\\u{0} a 'b \\\" c\"";
|
||||
let expected = "\"\\u{0} a \\'b \\\" c\"";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
|
@ -176,7 +182,7 @@ fn test_str() {
|
|||
fn test_string() {
|
||||
let s = "\0 a 'b \" c".to_string();
|
||||
let tokens = quote!(#s);
|
||||
let expected = "\"\\u{0} a 'b \\\" c\"";
|
||||
let expected = "\"\\u{0} a \\'b \\\" c\"";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
|
@ -227,42 +233,9 @@ fn test_nested_fancy_repetition() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_duplicate_name_repetition() {
|
||||
let foo = &["a", "b"];
|
||||
|
||||
let tokens = quote! {
|
||||
#(#foo: #foo),*
|
||||
#(#foo: #foo),*
|
||||
};
|
||||
|
||||
let expected = r#""a" : "a" , "b" : "b" "a" : "a" , "b" : "b""#;
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_duplicate_name_repetition_no_copy() {
|
||||
let foo = vec!["a".to_owned(), "b".to_owned()];
|
||||
|
||||
let tokens = quote! {
|
||||
#(#foo: #foo),*
|
||||
};
|
||||
|
||||
let expected = r#""a" : "a" , "b" : "b""#;
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_btreeset_repetition() {
|
||||
let mut set = BTreeSet::new();
|
||||
set.insert("a".to_owned());
|
||||
set.insert("b".to_owned());
|
||||
|
||||
let tokens = quote! {
|
||||
#(#set: #set),*
|
||||
};
|
||||
|
||||
let expected = r#""a" : "a" , "b" : "b""#;
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
fn test_empty_repetition() {
|
||||
let tokens = quote!(#(a b)* #(c d),*);
|
||||
assert_eq!("", tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -275,19 +248,6 @@ fn test_variable_name_conflict() {
|
|||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nonrep_in_repetition() {
|
||||
let rep = vec!["a", "b"];
|
||||
let nonrep = "c";
|
||||
|
||||
let tokens = quote! {
|
||||
#(#rep #rep : #nonrep #nonrep),*
|
||||
};
|
||||
|
||||
let expected = r#""a" "a" : "c" "c" , "b" "b" : "c" "c""#;
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_quote() {
|
||||
let tokens = quote!();
|
||||
|
@ -315,7 +275,7 @@ fn test_cow() {
|
|||
#[test]
|
||||
fn test_closure() {
|
||||
fn field_i(i: usize) -> Ident {
|
||||
format_ident!("__field{}", i)
|
||||
Ident::new(&format!("__field{}", i), Span::call_site())
|
||||
}
|
||||
|
||||
let fields = (0usize..3)
|
||||
|
@ -333,97 +293,3 @@ fn test_append_tokens() {
|
|||
a.append_all(b);
|
||||
assert_eq!("a b", a.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_ident() {
|
||||
let id0 = format_ident!("Aa");
|
||||
let id1 = format_ident!("Hello{x}", x = id0);
|
||||
let id2 = format_ident!("Hello{x}", x = 5usize);
|
||||
let id3 = format_ident!("Hello{}_{x}", id0, x = 10usize);
|
||||
let id4 = format_ident!("Aa", span = Span::call_site());
|
||||
|
||||
assert_eq!(id0, "Aa");
|
||||
assert_eq!(id1, "HelloAa");
|
||||
assert_eq!(id2, "Hello5");
|
||||
assert_eq!(id3, "HelloAa_10");
|
||||
assert_eq!(id4, "Aa");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_ident_strip_raw() {
|
||||
let id = format_ident!("r#struct");
|
||||
let my_id = format_ident!("MyId{}", id);
|
||||
let raw_my_id = format_ident!("r#MyId{}", id);
|
||||
|
||||
assert_eq!(id, "r#struct");
|
||||
assert_eq!(my_id, "MyIdstruct");
|
||||
assert_eq!(raw_my_id, "r#MyIdstruct");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_outer_line_comment() {
|
||||
let tokens = quote! {
|
||||
/// doc
|
||||
};
|
||||
let expected = "# [ doc = r\" doc\" ]";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_inner_line_comment() {
|
||||
let tokens = quote! {
|
||||
//! doc
|
||||
};
|
||||
let expected = "# ! [ doc = r\" doc\" ]";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_outer_block_comment() {
|
||||
let tokens = quote! {
|
||||
/** doc */
|
||||
};
|
||||
let expected = "# [ doc = r\" doc \" ]";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_inner_block_comment() {
|
||||
let tokens = quote! {
|
||||
/*! doc */
|
||||
};
|
||||
let expected = "# ! [ doc = r\" doc \" ]";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_outer_attr() {
|
||||
let tokens = quote! {
|
||||
#[inline]
|
||||
};
|
||||
let expected = "# [ inline ]";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_inner_attr() {
|
||||
let tokens = quote! {
|
||||
#![no_std]
|
||||
};
|
||||
let expected = "# ! [ no_std ]";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
// https://github.com/dtolnay/quote/issues/130
|
||||
#[test]
|
||||
fn test_star_after_repetition() {
|
||||
let c = vec!['0', '1'];
|
||||
let tokens = quote! {
|
||||
#(
|
||||
f(#c);
|
||||
)*
|
||||
*out = None;
|
||||
};
|
||||
let expected = "f ( '0' ) ; f ( '1' ) ; * out = None ;";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
use quote::quote;
|
||||
|
||||
fn main() {
|
||||
let nonrep = "";
|
||||
|
||||
// Without some protection against repetitions with no iterator somewhere
|
||||
// inside, this would loop infinitely.
|
||||
quote!(#(#nonrep #nonrep)*);
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
use quote::quote;
|
||||
|
||||
fn main() {
|
||||
let nonrep = "";
|
||||
|
||||
// Without some protection against repetitions with no iterator somewhere
|
||||
// inside, this would loop infinitely.
|
||||
quote!(#(#nonrep)*);
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
use quote::quote;
|
||||
|
||||
fn main() {
|
||||
quote!(#(a b),*);
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
use quote::quote;
|
||||
|
||||
fn main() {
|
||||
quote!(#(a b)*);
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
use quote::quote;
|
||||
use std::net::Ipv4Addr;
|
||||
|
||||
fn main() {
|
||||
let ip = Ipv4Addr::LOCALHOST;
|
||||
let _ = quote! { #ip };
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
use quote::quote;
|
||||
use std::net::Ipv4Addr;
|
||||
|
||||
fn main() {
|
||||
let ip = Ipv4Addr::LOCALHOST;
|
||||
let _ = quote! { #(#ip)* };
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
use quote::quote_spanned;
|
||||
|
||||
fn main() {
|
||||
let span = "";
|
||||
let x = 0;
|
||||
quote_spanned!(span=> #x);
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
{"files":{"COPYRIGHT":"23860c2a7b5d96b21569afedf033469bab9fe14a1b24a35068b8641c578ce24d","Cargo.toml":"aafcae4002bee71546a6aa40a97b9124a69f169ee7e3a9e3262338e32b4c2b9b","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"67998486b32f4fe46abbbaa411b92528750e7f0e22452dc8a5b95d87d80fde75","scripts/unicode.py":"762eea92dd51238c6bf877570bde1149932ba15cf87be1618fc21cd53e941733","src/lib.rs":"4a89fadf452ae7c53536eaa4496f951a3153f8189dd1cbc532648731d30f0b11","src/tables.rs":"0643459b6ebeeed83aecd7604f0ea29c06bea7ce6c1cd9acd4988d27ace1ec53","src/tests.rs":"35a459382e190197e7b9a78832ae79f310b48a02a5b4227bf9bbc89d46c8deac"},"package":"fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"}
|
|
@ -1,7 +0,0 @@
|
|||
Licensed under the Apache License, Version 2.0
|
||||
<LICENSE-APACHE or
|
||||
http://www.apache.org/licenses/LICENSE-2.0> or the MIT
|
||||
license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
|
||||
at your option. All files in the project carrying such
|
||||
notice may not be copied, modified, or distributed except
|
||||
according to those terms.
|
|
@ -1,26 +0,0 @@
|
|||
[package]
|
||||
|
||||
name = "unicode-xid"
|
||||
version = "0.1.0"
|
||||
authors = ["erick.tryzelaar <erick.tryzelaar@gmail.com>",
|
||||
"kwantam <kwantam@gmail.com>",
|
||||
]
|
||||
|
||||
homepage = "https://github.com/unicode-rs/unicode-xid"
|
||||
repository = "https://github.com/unicode-rs/unicode-xid"
|
||||
documentation = "https://unicode-rs.github.io/unicode-xid"
|
||||
license = "MIT/Apache-2.0"
|
||||
keywords = ["text", "unicode", "xid"]
|
||||
readme = "README.md"
|
||||
description = """
|
||||
Determine whether characters have the XID_Start
|
||||
or XID_Continue properties according to
|
||||
Unicode Standard Annex #31.
|
||||
"""
|
||||
|
||||
exclude = [ "target/*", "Cargo.lock" ]
|
||||
|
||||
[features]
|
||||
default = []
|
||||
no_std = []
|
||||
bench = []
|
|
@ -1,201 +0,0 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -1,25 +0,0 @@
|
|||
Copyright (c) 2015 The Rust Project Developers
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
|
@ -1,34 +0,0 @@
|
|||
# unicode-xid
|
||||
|
||||
Determine if a `char` is a valid identifier for a parser and/or lexer according to
|
||||
[Unicode Standard Annex #31](http://www.unicode.org/reports/tr31/) rules.
|
||||
|
||||
[![Build Status](https://travis-ci.org/unicode-rs/unicode-xid.svg)](https://travis-ci.org/unicode-rs/unicode-xid)
|
||||
|
||||
[Documentation](https://unicode-rs.github.io/unicode-xid/unicode_xid/index.html)
|
||||
|
||||
```rust
|
||||
extern crate unicode_xid;
|
||||
|
||||
use unicode_xid::UnicodeXID;
|
||||
|
||||
fn main() {
|
||||
let ch = 'a';
|
||||
println!("Is {} a valid start of an identifier? {}", ch, UnicodeXID::is_xid_start(ch));
|
||||
}
|
||||
```
|
||||
|
||||
# features
|
||||
|
||||
unicode-xid supports a `no_std` feature. This eliminates dependence
|
||||
on std, and instead uses equivalent functions from core.
|
||||
|
||||
# crates.io
|
||||
|
||||
You can use this package in your project by adding the following
|
||||
to your `Cargo.toml`:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
unicode-xid = "0.0.4"
|
||||
```
|
|
@ -1,87 +0,0 @@
|
|||
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! Determine if a `char` is a valid identifier for a parser and/or lexer according to
|
||||
//! [Unicode Standard Annex #31](http://www.unicode.org/reports/tr31/) rules.
|
||||
//!
|
||||
//! ```rust
|
||||
//! extern crate unicode_xid;
|
||||
//!
|
||||
//! use unicode_xid::UnicodeXID;
|
||||
//!
|
||||
//! fn main() {
|
||||
//! let ch = 'a';
|
||||
//! println!("Is {} a valid start of an identifier? {}", ch, UnicodeXID::is_xid_start(ch));
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! # features
|
||||
//!
|
||||
//! unicode-xid supports a `no_std` feature. This eliminates dependence
|
||||
//! on std, and instead uses equivalent functions from core.
|
||||
//!
|
||||
//! # crates.io
|
||||
//!
|
||||
//! You can use this package in your project by adding the following
|
||||
//! to your `Cargo.toml`:
|
||||
//!
|
||||
//! ```toml
|
||||
//! [dependencies]
|
||||
//! unicode-xid = "0.0.4"
|
||||
//! ```
|
||||
|
||||
#![deny(missing_docs, unsafe_code)]
|
||||
#![doc(html_logo_url = "https://unicode-rs.github.io/unicode-rs_sm.png",
|
||||
html_favicon_url = "https://unicode-rs.github.io/unicode-rs_sm.png")]
|
||||
|
||||
#![no_std]
|
||||
#![cfg_attr(feature = "bench", feature(test, unicode))]
|
||||
|
||||
#[cfg(test)]
|
||||
#[macro_use]
|
||||
extern crate std;
|
||||
|
||||
#[cfg(feature = "bench")]
|
||||
extern crate test;
|
||||
|
||||
use tables::derived_property;
|
||||
pub use tables::UNICODE_VERSION;
|
||||
|
||||
mod tables;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
/// Methods for determining if a character is a valid identifier character.
|
||||
pub trait UnicodeXID {
|
||||
/// Returns whether the specified character satisfies the 'XID_Start'
|
||||
/// Unicode property.
|
||||
///
|
||||
/// 'XID_Start' is a Unicode Derived Property specified in
|
||||
/// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications),
|
||||
/// mostly similar to ID_Start but modified for closure under NFKx.
|
||||
fn is_xid_start(self) -> bool;
|
||||
|
||||
/// Returns whether the specified `char` satisfies the 'XID_Continue'
|
||||
/// Unicode property.
|
||||
///
|
||||
/// 'XID_Continue' is a Unicode Derived Property specified in
|
||||
/// [UAX #31](http://unicode.org/reports/tr31/#NFKC_Modifications),
|
||||
/// mostly similar to 'ID_Continue' but modified for closure under NFKx.
|
||||
fn is_xid_continue(self) -> bool;
|
||||
}
|
||||
|
||||
impl UnicodeXID for char {
|
||||
#[inline]
|
||||
fn is_xid_start(self) -> bool { derived_property::XID_Start(self) }
|
||||
|
||||
#[inline]
|
||||
fn is_xid_continue(self) -> bool { derived_property::XID_Continue(self) }
|
||||
}
|
|
@ -1,426 +0,0 @@
|
|||
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// NOTE: The following code was generated by "scripts/unicode.py", do not edit directly
|
||||
|
||||
#![allow(missing_docs, non_upper_case_globals, non_snake_case)]
|
||||
|
||||
/// The version of [Unicode](http://www.unicode.org/)
|
||||
/// that this version of unicode-xid is based on.
|
||||
pub const UNICODE_VERSION: (u64, u64, u64) = (9, 0, 0);
|
||||
|
||||
fn bsearch_range_table(c: char, r: &'static [(char,char)]) -> bool {
|
||||
use core::cmp::Ordering::{Equal, Less, Greater};
|
||||
|
||||
r.binary_search_by(|&(lo,hi)| {
|
||||
if lo <= c && c <= hi { Equal }
|
||||
else if hi < c { Less }
|
||||
else { Greater }
|
||||
}).is_ok()
|
||||
}
|
||||
|
||||
pub mod derived_property {
|
||||
pub const XID_Continue_table: &'static [(char, char)] = &[
|
||||
('\u{30}', '\u{39}'), ('\u{41}', '\u{5a}'), ('\u{5f}', '\u{5f}'), ('\u{61}', '\u{7a}'),
|
||||
('\u{aa}', '\u{aa}'), ('\u{b5}', '\u{b5}'), ('\u{b7}', '\u{b7}'), ('\u{ba}', '\u{ba}'),
|
||||
('\u{c0}', '\u{d6}'), ('\u{d8}', '\u{f6}'), ('\u{f8}', '\u{2c1}'), ('\u{2c6}', '\u{2d1}'),
|
||||
('\u{2e0}', '\u{2e4}'), ('\u{2ec}', '\u{2ec}'), ('\u{2ee}', '\u{2ee}'), ('\u{300}',
|
||||
'\u{374}'), ('\u{376}', '\u{377}'), ('\u{37b}', '\u{37d}'), ('\u{37f}', '\u{37f}'),
|
||||
('\u{386}', '\u{38a}'), ('\u{38c}', '\u{38c}'), ('\u{38e}', '\u{3a1}'), ('\u{3a3}',
|
||||
'\u{3f5}'), ('\u{3f7}', '\u{481}'), ('\u{483}', '\u{487}'), ('\u{48a}', '\u{52f}'),
|
||||
('\u{531}', '\u{556}'), ('\u{559}', '\u{559}'), ('\u{561}', '\u{587}'), ('\u{591}',
|
||||
'\u{5bd}'), ('\u{5bf}', '\u{5bf}'), ('\u{5c1}', '\u{5c2}'), ('\u{5c4}', '\u{5c5}'),
|
||||
('\u{5c7}', '\u{5c7}'), ('\u{5d0}', '\u{5ea}'), ('\u{5f0}', '\u{5f2}'), ('\u{610}',
|
||||
'\u{61a}'), ('\u{620}', '\u{669}'), ('\u{66e}', '\u{6d3}'), ('\u{6d5}', '\u{6dc}'),
|
||||
('\u{6df}', '\u{6e8}'), ('\u{6ea}', '\u{6fc}'), ('\u{6ff}', '\u{6ff}'), ('\u{710}',
|
||||
'\u{74a}'), ('\u{74d}', '\u{7b1}'), ('\u{7c0}', '\u{7f5}'), ('\u{7fa}', '\u{7fa}'),
|
||||
('\u{800}', '\u{82d}'), ('\u{840}', '\u{85b}'), ('\u{8a0}', '\u{8b4}'), ('\u{8b6}',
|
||||
'\u{8bd}'), ('\u{8d4}', '\u{8e1}'), ('\u{8e3}', '\u{963}'), ('\u{966}', '\u{96f}'),
|
||||
('\u{971}', '\u{983}'), ('\u{985}', '\u{98c}'), ('\u{98f}', '\u{990}'), ('\u{993}',
|
||||
'\u{9a8}'), ('\u{9aa}', '\u{9b0}'), ('\u{9b2}', '\u{9b2}'), ('\u{9b6}', '\u{9b9}'),
|
||||
('\u{9bc}', '\u{9c4}'), ('\u{9c7}', '\u{9c8}'), ('\u{9cb}', '\u{9ce}'), ('\u{9d7}',
|
||||
'\u{9d7}'), ('\u{9dc}', '\u{9dd}'), ('\u{9df}', '\u{9e3}'), ('\u{9e6}', '\u{9f1}'),
|
||||
('\u{a01}', '\u{a03}'), ('\u{a05}', '\u{a0a}'), ('\u{a0f}', '\u{a10}'), ('\u{a13}',
|
||||
'\u{a28}'), ('\u{a2a}', '\u{a30}'), ('\u{a32}', '\u{a33}'), ('\u{a35}', '\u{a36}'),
|
||||
('\u{a38}', '\u{a39}'), ('\u{a3c}', '\u{a3c}'), ('\u{a3e}', '\u{a42}'), ('\u{a47}',
|
||||
'\u{a48}'), ('\u{a4b}', '\u{a4d}'), ('\u{a51}', '\u{a51}'), ('\u{a59}', '\u{a5c}'),
|
||||
('\u{a5e}', '\u{a5e}'), ('\u{a66}', '\u{a75}'), ('\u{a81}', '\u{a83}'), ('\u{a85}',
|
||||
'\u{a8d}'), ('\u{a8f}', '\u{a91}'), ('\u{a93}', '\u{aa8}'), ('\u{aaa}', '\u{ab0}'),
|
||||
('\u{ab2}', '\u{ab3}'), ('\u{ab5}', '\u{ab9}'), ('\u{abc}', '\u{ac5}'), ('\u{ac7}',
|
||||
'\u{ac9}'), ('\u{acb}', '\u{acd}'), ('\u{ad0}', '\u{ad0}'), ('\u{ae0}', '\u{ae3}'),
|
||||
('\u{ae6}', '\u{aef}'), ('\u{af9}', '\u{af9}'), ('\u{b01}', '\u{b03}'), ('\u{b05}',
|
||||
'\u{b0c}'), ('\u{b0f}', '\u{b10}'), ('\u{b13}', '\u{b28}'), ('\u{b2a}', '\u{b30}'),
|
||||
('\u{b32}', '\u{b33}'), ('\u{b35}', '\u{b39}'), ('\u{b3c}', '\u{b44}'), ('\u{b47}',
|
||||
'\u{b48}'), ('\u{b4b}', '\u{b4d}'), ('\u{b56}', '\u{b57}'), ('\u{b5c}', '\u{b5d}'),
|
||||
('\u{b5f}', '\u{b63}'), ('\u{b66}', '\u{b6f}'), ('\u{b71}', '\u{b71}'), ('\u{b82}',
|
||||
'\u{b83}'), ('\u{b85}', '\u{b8a}'), ('\u{b8e}', '\u{b90}'), ('\u{b92}', '\u{b95}'),
|
||||
('\u{b99}', '\u{b9a}'), ('\u{b9c}', '\u{b9c}'), ('\u{b9e}', '\u{b9f}'), ('\u{ba3}',
|
||||
'\u{ba4}'), ('\u{ba8}', '\u{baa}'), ('\u{bae}', '\u{bb9}'), ('\u{bbe}', '\u{bc2}'),
|
||||
('\u{bc6}', '\u{bc8}'), ('\u{bca}', '\u{bcd}'), ('\u{bd0}', '\u{bd0}'), ('\u{bd7}',
|
||||
'\u{bd7}'), ('\u{be6}', '\u{bef}'), ('\u{c00}', '\u{c03}'), ('\u{c05}', '\u{c0c}'),
|
||||
('\u{c0e}', '\u{c10}'), ('\u{c12}', '\u{c28}'), ('\u{c2a}', '\u{c39}'), ('\u{c3d}',
|
||||
'\u{c44}'), ('\u{c46}', '\u{c48}'), ('\u{c4a}', '\u{c4d}'), ('\u{c55}', '\u{c56}'),
|
||||
('\u{c58}', '\u{c5a}'), ('\u{c60}', '\u{c63}'), ('\u{c66}', '\u{c6f}'), ('\u{c80}',
|
||||
'\u{c83}'), ('\u{c85}', '\u{c8c}'), ('\u{c8e}', '\u{c90}'), ('\u{c92}', '\u{ca8}'),
|
||||
('\u{caa}', '\u{cb3}'), ('\u{cb5}', '\u{cb9}'), ('\u{cbc}', '\u{cc4}'), ('\u{cc6}',
|
||||
'\u{cc8}'), ('\u{cca}', '\u{ccd}'), ('\u{cd5}', '\u{cd6}'), ('\u{cde}', '\u{cde}'),
|
||||
('\u{ce0}', '\u{ce3}'), ('\u{ce6}', '\u{cef}'), ('\u{cf1}', '\u{cf2}'), ('\u{d01}',
|
||||
'\u{d03}'), ('\u{d05}', '\u{d0c}'), ('\u{d0e}', '\u{d10}'), ('\u{d12}', '\u{d3a}'),
|
||||
('\u{d3d}', '\u{d44}'), ('\u{d46}', '\u{d48}'), ('\u{d4a}', '\u{d4e}'), ('\u{d54}',
|
||||
'\u{d57}'), ('\u{d5f}', '\u{d63}'), ('\u{d66}', '\u{d6f}'), ('\u{d7a}', '\u{d7f}'),
|
||||
('\u{d82}', '\u{d83}'), ('\u{d85}', '\u{d96}'), ('\u{d9a}', '\u{db1}'), ('\u{db3}',
|
||||
'\u{dbb}'), ('\u{dbd}', '\u{dbd}'), ('\u{dc0}', '\u{dc6}'), ('\u{dca}', '\u{dca}'),
|
||||
('\u{dcf}', '\u{dd4}'), ('\u{dd6}', '\u{dd6}'), ('\u{dd8}', '\u{ddf}'), ('\u{de6}',
|
||||
'\u{def}'), ('\u{df2}', '\u{df3}'), ('\u{e01}', '\u{e3a}'), ('\u{e40}', '\u{e4e}'),
|
||||
('\u{e50}', '\u{e59}'), ('\u{e81}', '\u{e82}'), ('\u{e84}', '\u{e84}'), ('\u{e87}',
|
||||
'\u{e88}'), ('\u{e8a}', '\u{e8a}'), ('\u{e8d}', '\u{e8d}'), ('\u{e94}', '\u{e97}'),
|
||||
('\u{e99}', '\u{e9f}'), ('\u{ea1}', '\u{ea3}'), ('\u{ea5}', '\u{ea5}'), ('\u{ea7}',
|
||||
'\u{ea7}'), ('\u{eaa}', '\u{eab}'), ('\u{ead}', '\u{eb9}'), ('\u{ebb}', '\u{ebd}'),
|
||||
('\u{ec0}', '\u{ec4}'), ('\u{ec6}', '\u{ec6}'), ('\u{ec8}', '\u{ecd}'), ('\u{ed0}',
|
||||
'\u{ed9}'), ('\u{edc}', '\u{edf}'), ('\u{f00}', '\u{f00}'), ('\u{f18}', '\u{f19}'),
|
||||
('\u{f20}', '\u{f29}'), ('\u{f35}', '\u{f35}'), ('\u{f37}', '\u{f37}'), ('\u{f39}',
|
||||
'\u{f39}'), ('\u{f3e}', '\u{f47}'), ('\u{f49}', '\u{f6c}'), ('\u{f71}', '\u{f84}'),
|
||||
('\u{f86}', '\u{f97}'), ('\u{f99}', '\u{fbc}'), ('\u{fc6}', '\u{fc6}'), ('\u{1000}',
|
||||
'\u{1049}'), ('\u{1050}', '\u{109d}'), ('\u{10a0}', '\u{10c5}'), ('\u{10c7}', '\u{10c7}'),
|
||||
('\u{10cd}', '\u{10cd}'), ('\u{10d0}', '\u{10fa}'), ('\u{10fc}', '\u{1248}'), ('\u{124a}',
|
||||
'\u{124d}'), ('\u{1250}', '\u{1256}'), ('\u{1258}', '\u{1258}'), ('\u{125a}', '\u{125d}'),
|
||||
('\u{1260}', '\u{1288}'), ('\u{128a}', '\u{128d}'), ('\u{1290}', '\u{12b0}'), ('\u{12b2}',
|
||||
'\u{12b5}'), ('\u{12b8}', '\u{12be}'), ('\u{12c0}', '\u{12c0}'), ('\u{12c2}', '\u{12c5}'),
|
||||
('\u{12c8}', '\u{12d6}'), ('\u{12d8}', '\u{1310}'), ('\u{1312}', '\u{1315}'), ('\u{1318}',
|
||||
'\u{135a}'), ('\u{135d}', '\u{135f}'), ('\u{1369}', '\u{1371}'), ('\u{1380}', '\u{138f}'),
|
||||
('\u{13a0}', '\u{13f5}'), ('\u{13f8}', '\u{13fd}'), ('\u{1401}', '\u{166c}'), ('\u{166f}',
|
||||
'\u{167f}'), ('\u{1681}', '\u{169a}'), ('\u{16a0}', '\u{16ea}'), ('\u{16ee}', '\u{16f8}'),
|
||||
('\u{1700}', '\u{170c}'), ('\u{170e}', '\u{1714}'), ('\u{1720}', '\u{1734}'), ('\u{1740}',
|
||||
'\u{1753}'), ('\u{1760}', '\u{176c}'), ('\u{176e}', '\u{1770}'), ('\u{1772}', '\u{1773}'),
|
||||
('\u{1780}', '\u{17d3}'), ('\u{17d7}', '\u{17d7}'), ('\u{17dc}', '\u{17dd}'), ('\u{17e0}',
|
||||
'\u{17e9}'), ('\u{180b}', '\u{180d}'), ('\u{1810}', '\u{1819}'), ('\u{1820}', '\u{1877}'),
|
||||
('\u{1880}', '\u{18aa}'), ('\u{18b0}', '\u{18f5}'), ('\u{1900}', '\u{191e}'), ('\u{1920}',
|
||||
'\u{192b}'), ('\u{1930}', '\u{193b}'), ('\u{1946}', '\u{196d}'), ('\u{1970}', '\u{1974}'),
|
||||
('\u{1980}', '\u{19ab}'), ('\u{19b0}', '\u{19c9}'), ('\u{19d0}', '\u{19da}'), ('\u{1a00}',
|
||||
'\u{1a1b}'), ('\u{1a20}', '\u{1a5e}'), ('\u{1a60}', '\u{1a7c}'), ('\u{1a7f}', '\u{1a89}'),
|
||||
('\u{1a90}', '\u{1a99}'), ('\u{1aa7}', '\u{1aa7}'), ('\u{1ab0}', '\u{1abd}'), ('\u{1b00}',
|
||||
'\u{1b4b}'), ('\u{1b50}', '\u{1b59}'), ('\u{1b6b}', '\u{1b73}'), ('\u{1b80}', '\u{1bf3}'),
|
||||
('\u{1c00}', '\u{1c37}'), ('\u{1c40}', '\u{1c49}'), ('\u{1c4d}', '\u{1c7d}'), ('\u{1c80}',
|
||||
'\u{1c88}'), ('\u{1cd0}', '\u{1cd2}'), ('\u{1cd4}', '\u{1cf6}'), ('\u{1cf8}', '\u{1cf9}'),
|
||||
('\u{1d00}', '\u{1df5}'), ('\u{1dfb}', '\u{1f15}'), ('\u{1f18}', '\u{1f1d}'), ('\u{1f20}',
|
||||
'\u{1f45}'), ('\u{1f48}', '\u{1f4d}'), ('\u{1f50}', '\u{1f57}'), ('\u{1f59}', '\u{1f59}'),
|
||||
('\u{1f5b}', '\u{1f5b}'), ('\u{1f5d}', '\u{1f5d}'), ('\u{1f5f}', '\u{1f7d}'), ('\u{1f80}',
|
||||
'\u{1fb4}'), ('\u{1fb6}', '\u{1fbc}'), ('\u{1fbe}', '\u{1fbe}'), ('\u{1fc2}', '\u{1fc4}'),
|
||||
('\u{1fc6}', '\u{1fcc}'), ('\u{1fd0}', '\u{1fd3}'), ('\u{1fd6}', '\u{1fdb}'), ('\u{1fe0}',
|
||||
'\u{1fec}'), ('\u{1ff2}', '\u{1ff4}'), ('\u{1ff6}', '\u{1ffc}'), ('\u{203f}', '\u{2040}'),
|
||||
('\u{2054}', '\u{2054}'), ('\u{2071}', '\u{2071}'), ('\u{207f}', '\u{207f}'), ('\u{2090}',
|
||||
'\u{209c}'), ('\u{20d0}', '\u{20dc}'), ('\u{20e1}', '\u{20e1}'), ('\u{20e5}', '\u{20f0}'),
|
||||
('\u{2102}', '\u{2102}'), ('\u{2107}', '\u{2107}'), ('\u{210a}', '\u{2113}'), ('\u{2115}',
|
||||
'\u{2115}'), ('\u{2118}', '\u{211d}'), ('\u{2124}', '\u{2124}'), ('\u{2126}', '\u{2126}'),
|
||||
('\u{2128}', '\u{2128}'), ('\u{212a}', '\u{2139}'), ('\u{213c}', '\u{213f}'), ('\u{2145}',
|
||||
'\u{2149}'), ('\u{214e}', '\u{214e}'), ('\u{2160}', '\u{2188}'), ('\u{2c00}', '\u{2c2e}'),
|
||||
('\u{2c30}', '\u{2c5e}'), ('\u{2c60}', '\u{2ce4}'), ('\u{2ceb}', '\u{2cf3}'), ('\u{2d00}',
|
||||
'\u{2d25}'), ('\u{2d27}', '\u{2d27}'), ('\u{2d2d}', '\u{2d2d}'), ('\u{2d30}', '\u{2d67}'),
|
||||
('\u{2d6f}', '\u{2d6f}'), ('\u{2d7f}', '\u{2d96}'), ('\u{2da0}', '\u{2da6}'), ('\u{2da8}',
|
||||
'\u{2dae}'), ('\u{2db0}', '\u{2db6}'), ('\u{2db8}', '\u{2dbe}'), ('\u{2dc0}', '\u{2dc6}'),
|
||||
('\u{2dc8}', '\u{2dce}'), ('\u{2dd0}', '\u{2dd6}'), ('\u{2dd8}', '\u{2dde}'), ('\u{2de0}',
|
||||
'\u{2dff}'), ('\u{3005}', '\u{3007}'), ('\u{3021}', '\u{302f}'), ('\u{3031}', '\u{3035}'),
|
||||
('\u{3038}', '\u{303c}'), ('\u{3041}', '\u{3096}'), ('\u{3099}', '\u{309a}'), ('\u{309d}',
|
||||
'\u{309f}'), ('\u{30a1}', '\u{30fa}'), ('\u{30fc}', '\u{30ff}'), ('\u{3105}', '\u{312d}'),
|
||||
('\u{3131}', '\u{318e}'), ('\u{31a0}', '\u{31ba}'), ('\u{31f0}', '\u{31ff}'), ('\u{3400}',
|
||||
'\u{4db5}'), ('\u{4e00}', '\u{9fd5}'), ('\u{a000}', '\u{a48c}'), ('\u{a4d0}', '\u{a4fd}'),
|
||||
('\u{a500}', '\u{a60c}'), ('\u{a610}', '\u{a62b}'), ('\u{a640}', '\u{a66f}'), ('\u{a674}',
|
||||
'\u{a67d}'), ('\u{a67f}', '\u{a6f1}'), ('\u{a717}', '\u{a71f}'), ('\u{a722}', '\u{a788}'),
|
||||
('\u{a78b}', '\u{a7ae}'), ('\u{a7b0}', '\u{a7b7}'), ('\u{a7f7}', '\u{a827}'), ('\u{a840}',
|
||||
'\u{a873}'), ('\u{a880}', '\u{a8c5}'), ('\u{a8d0}', '\u{a8d9}'), ('\u{a8e0}', '\u{a8f7}'),
|
||||
('\u{a8fb}', '\u{a8fb}'), ('\u{a8fd}', '\u{a8fd}'), ('\u{a900}', '\u{a92d}'), ('\u{a930}',
|
||||
'\u{a953}'), ('\u{a960}', '\u{a97c}'), ('\u{a980}', '\u{a9c0}'), ('\u{a9cf}', '\u{a9d9}'),
|
||||
('\u{a9e0}', '\u{a9fe}'), ('\u{aa00}', '\u{aa36}'), ('\u{aa40}', '\u{aa4d}'), ('\u{aa50}',
|
||||
'\u{aa59}'), ('\u{aa60}', '\u{aa76}'), ('\u{aa7a}', '\u{aac2}'), ('\u{aadb}', '\u{aadd}'),
|
||||
('\u{aae0}', '\u{aaef}'), ('\u{aaf2}', '\u{aaf6}'), ('\u{ab01}', '\u{ab06}'), ('\u{ab09}',
|
||||
'\u{ab0e}'), ('\u{ab11}', '\u{ab16}'), ('\u{ab20}', '\u{ab26}'), ('\u{ab28}', '\u{ab2e}'),
|
||||
('\u{ab30}', '\u{ab5a}'), ('\u{ab5c}', '\u{ab65}'), ('\u{ab70}', '\u{abea}'), ('\u{abec}',
|
||||
'\u{abed}'), ('\u{abf0}', '\u{abf9}'), ('\u{ac00}', '\u{d7a3}'), ('\u{d7b0}', '\u{d7c6}'),
|
||||
('\u{d7cb}', '\u{d7fb}'), ('\u{f900}', '\u{fa6d}'), ('\u{fa70}', '\u{fad9}'), ('\u{fb00}',
|
||||
'\u{fb06}'), ('\u{fb13}', '\u{fb17}'), ('\u{fb1d}', '\u{fb28}'), ('\u{fb2a}', '\u{fb36}'),
|
||||
('\u{fb38}', '\u{fb3c}'), ('\u{fb3e}', '\u{fb3e}'), ('\u{fb40}', '\u{fb41}'), ('\u{fb43}',
|
||||
'\u{fb44}'), ('\u{fb46}', '\u{fbb1}'), ('\u{fbd3}', '\u{fc5d}'), ('\u{fc64}', '\u{fd3d}'),
|
||||
('\u{fd50}', '\u{fd8f}'), ('\u{fd92}', '\u{fdc7}'), ('\u{fdf0}', '\u{fdf9}'), ('\u{fe00}',
|
||||
'\u{fe0f}'), ('\u{fe20}', '\u{fe2f}'), ('\u{fe33}', '\u{fe34}'), ('\u{fe4d}', '\u{fe4f}'),
|
||||
('\u{fe71}', '\u{fe71}'), ('\u{fe73}', '\u{fe73}'), ('\u{fe77}', '\u{fe77}'), ('\u{fe79}',
|
||||
'\u{fe79}'), ('\u{fe7b}', '\u{fe7b}'), ('\u{fe7d}', '\u{fe7d}'), ('\u{fe7f}', '\u{fefc}'),
|
||||
('\u{ff10}', '\u{ff19}'), ('\u{ff21}', '\u{ff3a}'), ('\u{ff3f}', '\u{ff3f}'), ('\u{ff41}',
|
||||
'\u{ff5a}'), ('\u{ff66}', '\u{ffbe}'), ('\u{ffc2}', '\u{ffc7}'), ('\u{ffca}', '\u{ffcf}'),
|
||||
('\u{ffd2}', '\u{ffd7}'), ('\u{ffda}', '\u{ffdc}'), ('\u{10000}', '\u{1000b}'),
|
||||
('\u{1000d}', '\u{10026}'), ('\u{10028}', '\u{1003a}'), ('\u{1003c}', '\u{1003d}'),
|
||||
('\u{1003f}', '\u{1004d}'), ('\u{10050}', '\u{1005d}'), ('\u{10080}', '\u{100fa}'),
|
||||
('\u{10140}', '\u{10174}'), ('\u{101fd}', '\u{101fd}'), ('\u{10280}', '\u{1029c}'),
|
||||
('\u{102a0}', '\u{102d0}'), ('\u{102e0}', '\u{102e0}'), ('\u{10300}', '\u{1031f}'),
|
||||
('\u{10330}', '\u{1034a}'), ('\u{10350}', '\u{1037a}'), ('\u{10380}', '\u{1039d}'),
|
||||
('\u{103a0}', '\u{103c3}'), ('\u{103c8}', '\u{103cf}'), ('\u{103d1}', '\u{103d5}'),
|
||||
('\u{10400}', '\u{1049d}'), ('\u{104a0}', '\u{104a9}'), ('\u{104b0}', '\u{104d3}'),
|
||||
('\u{104d8}', '\u{104fb}'), ('\u{10500}', '\u{10527}'), ('\u{10530}', '\u{10563}'),
|
||||
('\u{10600}', '\u{10736}'), ('\u{10740}', '\u{10755}'), ('\u{10760}', '\u{10767}'),
|
||||
('\u{10800}', '\u{10805}'), ('\u{10808}', '\u{10808}'), ('\u{1080a}', '\u{10835}'),
|
||||
('\u{10837}', '\u{10838}'), ('\u{1083c}', '\u{1083c}'), ('\u{1083f}', '\u{10855}'),
|
||||
('\u{10860}', '\u{10876}'), ('\u{10880}', '\u{1089e}'), ('\u{108e0}', '\u{108f2}'),
|
||||
('\u{108f4}', '\u{108f5}'), ('\u{10900}', '\u{10915}'), ('\u{10920}', '\u{10939}'),
|
||||
('\u{10980}', '\u{109b7}'), ('\u{109be}', '\u{109bf}'), ('\u{10a00}', '\u{10a03}'),
|
||||
('\u{10a05}', '\u{10a06}'), ('\u{10a0c}', '\u{10a13}'), ('\u{10a15}', '\u{10a17}'),
|
||||
('\u{10a19}', '\u{10a33}'), ('\u{10a38}', '\u{10a3a}'), ('\u{10a3f}', '\u{10a3f}'),
|
||||
('\u{10a60}', '\u{10a7c}'), ('\u{10a80}', '\u{10a9c}'), ('\u{10ac0}', '\u{10ac7}'),
|
||||
('\u{10ac9}', '\u{10ae6}'), ('\u{10b00}', '\u{10b35}'), ('\u{10b40}', '\u{10b55}'),
|
||||
('\u{10b60}', '\u{10b72}'), ('\u{10b80}', '\u{10b91}'), ('\u{10c00}', '\u{10c48}'),
|
||||
('\u{10c80}', '\u{10cb2}'), ('\u{10cc0}', '\u{10cf2}'), ('\u{11000}', '\u{11046}'),
|
||||
('\u{11066}', '\u{1106f}'), ('\u{1107f}', '\u{110ba}'), ('\u{110d0}', '\u{110e8}'),
|
||||
('\u{110f0}', '\u{110f9}'), ('\u{11100}', '\u{11134}'), ('\u{11136}', '\u{1113f}'),
|
||||
('\u{11150}', '\u{11173}'), ('\u{11176}', '\u{11176}'), ('\u{11180}', '\u{111c4}'),
|
||||
('\u{111ca}', '\u{111cc}'), ('\u{111d0}', '\u{111da}'), ('\u{111dc}', '\u{111dc}'),
|
||||
('\u{11200}', '\u{11211}'), ('\u{11213}', '\u{11237}'), ('\u{1123e}', '\u{1123e}'),
|
||||
('\u{11280}', '\u{11286}'), ('\u{11288}', '\u{11288}'), ('\u{1128a}', '\u{1128d}'),
|
||||
('\u{1128f}', '\u{1129d}'), ('\u{1129f}', '\u{112a8}'), ('\u{112b0}', '\u{112ea}'),
|
||||
('\u{112f0}', '\u{112f9}'), ('\u{11300}', '\u{11303}'), ('\u{11305}', '\u{1130c}'),
|
||||
('\u{1130f}', '\u{11310}'), ('\u{11313}', '\u{11328}'), ('\u{1132a}', '\u{11330}'),
|
||||
('\u{11332}', '\u{11333}'), ('\u{11335}', '\u{11339}'), ('\u{1133c}', '\u{11344}'),
|
||||
('\u{11347}', '\u{11348}'), ('\u{1134b}', '\u{1134d}'), ('\u{11350}', '\u{11350}'),
|
||||
('\u{11357}', '\u{11357}'), ('\u{1135d}', '\u{11363}'), ('\u{11366}', '\u{1136c}'),
|
||||
('\u{11370}', '\u{11374}'), ('\u{11400}', '\u{1144a}'), ('\u{11450}', '\u{11459}'),
|
||||
('\u{11480}', '\u{114c5}'), ('\u{114c7}', '\u{114c7}'), ('\u{114d0}', '\u{114d9}'),
|
||||
('\u{11580}', '\u{115b5}'), ('\u{115b8}', '\u{115c0}'), ('\u{115d8}', '\u{115dd}'),
|
||||
('\u{11600}', '\u{11640}'), ('\u{11644}', '\u{11644}'), ('\u{11650}', '\u{11659}'),
|
||||
('\u{11680}', '\u{116b7}'), ('\u{116c0}', '\u{116c9}'), ('\u{11700}', '\u{11719}'),
|
||||
('\u{1171d}', '\u{1172b}'), ('\u{11730}', '\u{11739}'), ('\u{118a0}', '\u{118e9}'),
|
||||
('\u{118ff}', '\u{118ff}'), ('\u{11ac0}', '\u{11af8}'), ('\u{11c00}', '\u{11c08}'),
|
||||
('\u{11c0a}', '\u{11c36}'), ('\u{11c38}', '\u{11c40}'), ('\u{11c50}', '\u{11c59}'),
|
||||
('\u{11c72}', '\u{11c8f}'), ('\u{11c92}', '\u{11ca7}'), ('\u{11ca9}', '\u{11cb6}'),
|
||||
('\u{12000}', '\u{12399}'), ('\u{12400}', '\u{1246e}'), ('\u{12480}', '\u{12543}'),
|
||||
('\u{13000}', '\u{1342e}'), ('\u{14400}', '\u{14646}'), ('\u{16800}', '\u{16a38}'),
|
||||
('\u{16a40}', '\u{16a5e}'), ('\u{16a60}', '\u{16a69}'), ('\u{16ad0}', '\u{16aed}'),
|
||||
('\u{16af0}', '\u{16af4}'), ('\u{16b00}', '\u{16b36}'), ('\u{16b40}', '\u{16b43}'),
|
||||
('\u{16b50}', '\u{16b59}'), ('\u{16b63}', '\u{16b77}'), ('\u{16b7d}', '\u{16b8f}'),
|
||||
('\u{16f00}', '\u{16f44}'), ('\u{16f50}', '\u{16f7e}'), ('\u{16f8f}', '\u{16f9f}'),
|
||||
('\u{16fe0}', '\u{16fe0}'), ('\u{17000}', '\u{187ec}'), ('\u{18800}', '\u{18af2}'),
|
||||
('\u{1b000}', '\u{1b001}'), ('\u{1bc00}', '\u{1bc6a}'), ('\u{1bc70}', '\u{1bc7c}'),
|
||||
('\u{1bc80}', '\u{1bc88}'), ('\u{1bc90}', '\u{1bc99}'), ('\u{1bc9d}', '\u{1bc9e}'),
|
||||
('\u{1d165}', '\u{1d169}'), ('\u{1d16d}', '\u{1d172}'), ('\u{1d17b}', '\u{1d182}'),
|
||||
('\u{1d185}', '\u{1d18b}'), ('\u{1d1aa}', '\u{1d1ad}'), ('\u{1d242}', '\u{1d244}'),
|
||||
('\u{1d400}', '\u{1d454}'), ('\u{1d456}', '\u{1d49c}'), ('\u{1d49e}', '\u{1d49f}'),
|
||||
('\u{1d4a2}', '\u{1d4a2}'), ('\u{1d4a5}', '\u{1d4a6}'), ('\u{1d4a9}', '\u{1d4ac}'),
|
||||
('\u{1d4ae}', '\u{1d4b9}'), ('\u{1d4bb}', '\u{1d4bb}'), ('\u{1d4bd}', '\u{1d4c3}'),
|
||||
('\u{1d4c5}', '\u{1d505}'), ('\u{1d507}', '\u{1d50a}'), ('\u{1d50d}', '\u{1d514}'),
|
||||
('\u{1d516}', '\u{1d51c}'), ('\u{1d51e}', '\u{1d539}'), ('\u{1d53b}', '\u{1d53e}'),
|
||||
('\u{1d540}', '\u{1d544}'), ('\u{1d546}', '\u{1d546}'), ('\u{1d54a}', '\u{1d550}'),
|
||||
('\u{1d552}', '\u{1d6a5}'), ('\u{1d6a8}', '\u{1d6c0}'), ('\u{1d6c2}', '\u{1d6da}'),
|
||||
('\u{1d6dc}', '\u{1d6fa}'), ('\u{1d6fc}', '\u{1d714}'), ('\u{1d716}', '\u{1d734}'),
|
||||
('\u{1d736}', '\u{1d74e}'), ('\u{1d750}', '\u{1d76e}'), ('\u{1d770}', '\u{1d788}'),
|
||||
('\u{1d78a}', '\u{1d7a8}'), ('\u{1d7aa}', '\u{1d7c2}'), ('\u{1d7c4}', '\u{1d7cb}'),
|
||||
('\u{1d7ce}', '\u{1d7ff}'), ('\u{1da00}', '\u{1da36}'), ('\u{1da3b}', '\u{1da6c}'),
|
||||
('\u{1da75}', '\u{1da75}'), ('\u{1da84}', '\u{1da84}'), ('\u{1da9b}', '\u{1da9f}'),
|
||||
('\u{1daa1}', '\u{1daaf}'), ('\u{1e000}', '\u{1e006}'), ('\u{1e008}', '\u{1e018}'),
|
||||
('\u{1e01b}', '\u{1e021}'), ('\u{1e023}', '\u{1e024}'), ('\u{1e026}', '\u{1e02a}'),
|
||||
('\u{1e800}', '\u{1e8c4}'), ('\u{1e8d0}', '\u{1e8d6}'), ('\u{1e900}', '\u{1e94a}'),
|
||||
('\u{1e950}', '\u{1e959}'), ('\u{1ee00}', '\u{1ee03}'), ('\u{1ee05}', '\u{1ee1f}'),
|
||||
('\u{1ee21}', '\u{1ee22}'), ('\u{1ee24}', '\u{1ee24}'), ('\u{1ee27}', '\u{1ee27}'),
|
||||
('\u{1ee29}', '\u{1ee32}'), ('\u{1ee34}', '\u{1ee37}'), ('\u{1ee39}', '\u{1ee39}'),
|
||||
('\u{1ee3b}', '\u{1ee3b}'), ('\u{1ee42}', '\u{1ee42}'), ('\u{1ee47}', '\u{1ee47}'),
|
||||
('\u{1ee49}', '\u{1ee49}'), ('\u{1ee4b}', '\u{1ee4b}'), ('\u{1ee4d}', '\u{1ee4f}'),
|
||||
('\u{1ee51}', '\u{1ee52}'), ('\u{1ee54}', '\u{1ee54}'), ('\u{1ee57}', '\u{1ee57}'),
|
||||
('\u{1ee59}', '\u{1ee59}'), ('\u{1ee5b}', '\u{1ee5b}'), ('\u{1ee5d}', '\u{1ee5d}'),
|
||||
('\u{1ee5f}', '\u{1ee5f}'), ('\u{1ee61}', '\u{1ee62}'), ('\u{1ee64}', '\u{1ee64}'),
|
||||
('\u{1ee67}', '\u{1ee6a}'), ('\u{1ee6c}', '\u{1ee72}'), ('\u{1ee74}', '\u{1ee77}'),
|
||||
('\u{1ee79}', '\u{1ee7c}'), ('\u{1ee7e}', '\u{1ee7e}'), ('\u{1ee80}', '\u{1ee89}'),
|
||||
('\u{1ee8b}', '\u{1ee9b}'), ('\u{1eea1}', '\u{1eea3}'), ('\u{1eea5}', '\u{1eea9}'),
|
||||
('\u{1eeab}', '\u{1eebb}'), ('\u{20000}', '\u{2a6d6}'), ('\u{2a700}', '\u{2b734}'),
|
||||
('\u{2b740}', '\u{2b81d}'), ('\u{2b820}', '\u{2cea1}'), ('\u{2f800}', '\u{2fa1d}'),
|
||||
('\u{e0100}', '\u{e01ef}')
|
||||
];
|
||||
|
||||
pub fn XID_Continue(c: char) -> bool {
|
||||
super::bsearch_range_table(c, XID_Continue_table)
|
||||
}
|
||||
|
||||
pub const XID_Start_table: &'static [(char, char)] = &[
|
||||
('\u{41}', '\u{5a}'), ('\u{61}', '\u{7a}'), ('\u{aa}', '\u{aa}'), ('\u{b5}', '\u{b5}'),
|
||||
('\u{ba}', '\u{ba}'), ('\u{c0}', '\u{d6}'), ('\u{d8}', '\u{f6}'), ('\u{f8}', '\u{2c1}'),
|
||||
('\u{2c6}', '\u{2d1}'), ('\u{2e0}', '\u{2e4}'), ('\u{2ec}', '\u{2ec}'), ('\u{2ee}',
|
||||
'\u{2ee}'), ('\u{370}', '\u{374}'), ('\u{376}', '\u{377}'), ('\u{37b}', '\u{37d}'),
|
||||
('\u{37f}', '\u{37f}'), ('\u{386}', '\u{386}'), ('\u{388}', '\u{38a}'), ('\u{38c}',
|
||||
'\u{38c}'), ('\u{38e}', '\u{3a1}'), ('\u{3a3}', '\u{3f5}'), ('\u{3f7}', '\u{481}'),
|
||||
('\u{48a}', '\u{52f}'), ('\u{531}', '\u{556}'), ('\u{559}', '\u{559}'), ('\u{561}',
|
||||
'\u{587}'), ('\u{5d0}', '\u{5ea}'), ('\u{5f0}', '\u{5f2}'), ('\u{620}', '\u{64a}'),
|
||||
('\u{66e}', '\u{66f}'), ('\u{671}', '\u{6d3}'), ('\u{6d5}', '\u{6d5}'), ('\u{6e5}',
|
||||
'\u{6e6}'), ('\u{6ee}', '\u{6ef}'), ('\u{6fa}', '\u{6fc}'), ('\u{6ff}', '\u{6ff}'),
|
||||
('\u{710}', '\u{710}'), ('\u{712}', '\u{72f}'), ('\u{74d}', '\u{7a5}'), ('\u{7b1}',
|
||||
'\u{7b1}'), ('\u{7ca}', '\u{7ea}'), ('\u{7f4}', '\u{7f5}'), ('\u{7fa}', '\u{7fa}'),
|
||||
('\u{800}', '\u{815}'), ('\u{81a}', '\u{81a}'), ('\u{824}', '\u{824}'), ('\u{828}',
|
||||
'\u{828}'), ('\u{840}', '\u{858}'), ('\u{8a0}', '\u{8b4}'), ('\u{8b6}', '\u{8bd}'),
|
||||
('\u{904}', '\u{939}'), ('\u{93d}', '\u{93d}'), ('\u{950}', '\u{950}'), ('\u{958}',
|
||||
'\u{961}'), ('\u{971}', '\u{980}'), ('\u{985}', '\u{98c}'), ('\u{98f}', '\u{990}'),
|
||||
('\u{993}', '\u{9a8}'), ('\u{9aa}', '\u{9b0}'), ('\u{9b2}', '\u{9b2}'), ('\u{9b6}',
|
||||
'\u{9b9}'), ('\u{9bd}', '\u{9bd}'), ('\u{9ce}', '\u{9ce}'), ('\u{9dc}', '\u{9dd}'),
|
||||
('\u{9df}', '\u{9e1}'), ('\u{9f0}', '\u{9f1}'), ('\u{a05}', '\u{a0a}'), ('\u{a0f}',
|
||||
'\u{a10}'), ('\u{a13}', '\u{a28}'), ('\u{a2a}', '\u{a30}'), ('\u{a32}', '\u{a33}'),
|
||||
('\u{a35}', '\u{a36}'), ('\u{a38}', '\u{a39}'), ('\u{a59}', '\u{a5c}'), ('\u{a5e}',
|
||||
'\u{a5e}'), ('\u{a72}', '\u{a74}'), ('\u{a85}', '\u{a8d}'), ('\u{a8f}', '\u{a91}'),
|
||||
('\u{a93}', '\u{aa8}'), ('\u{aaa}', '\u{ab0}'), ('\u{ab2}', '\u{ab3}'), ('\u{ab5}',
|
||||
'\u{ab9}'), ('\u{abd}', '\u{abd}'), ('\u{ad0}', '\u{ad0}'), ('\u{ae0}', '\u{ae1}'),
|
||||
('\u{af9}', '\u{af9}'), ('\u{b05}', '\u{b0c}'), ('\u{b0f}', '\u{b10}'), ('\u{b13}',
|
||||
'\u{b28}'), ('\u{b2a}', '\u{b30}'), ('\u{b32}', '\u{b33}'), ('\u{b35}', '\u{b39}'),
|
||||
('\u{b3d}', '\u{b3d}'), ('\u{b5c}', '\u{b5d}'), ('\u{b5f}', '\u{b61}'), ('\u{b71}',
|
||||
'\u{b71}'), ('\u{b83}', '\u{b83}'), ('\u{b85}', '\u{b8a}'), ('\u{b8e}', '\u{b90}'),
|
||||
('\u{b92}', '\u{b95}'), ('\u{b99}', '\u{b9a}'), ('\u{b9c}', '\u{b9c}'), ('\u{b9e}',
|
||||
'\u{b9f}'), ('\u{ba3}', '\u{ba4}'), ('\u{ba8}', '\u{baa}'), ('\u{bae}', '\u{bb9}'),
|
||||
('\u{bd0}', '\u{bd0}'), ('\u{c05}', '\u{c0c}'), ('\u{c0e}', '\u{c10}'), ('\u{c12}',
|
||||
'\u{c28}'), ('\u{c2a}', '\u{c39}'), ('\u{c3d}', '\u{c3d}'), ('\u{c58}', '\u{c5a}'),
|
||||
('\u{c60}', '\u{c61}'), ('\u{c80}', '\u{c80}'), ('\u{c85}', '\u{c8c}'), ('\u{c8e}',
|
||||
'\u{c90}'), ('\u{c92}', '\u{ca8}'), ('\u{caa}', '\u{cb3}'), ('\u{cb5}', '\u{cb9}'),
|
||||
('\u{cbd}', '\u{cbd}'), ('\u{cde}', '\u{cde}'), ('\u{ce0}', '\u{ce1}'), ('\u{cf1}',
|
||||
'\u{cf2}'), ('\u{d05}', '\u{d0c}'), ('\u{d0e}', '\u{d10}'), ('\u{d12}', '\u{d3a}'),
|
||||
('\u{d3d}', '\u{d3d}'), ('\u{d4e}', '\u{d4e}'), ('\u{d54}', '\u{d56}'), ('\u{d5f}',
|
||||
'\u{d61}'), ('\u{d7a}', '\u{d7f}'), ('\u{d85}', '\u{d96}'), ('\u{d9a}', '\u{db1}'),
|
||||
('\u{db3}', '\u{dbb}'), ('\u{dbd}', '\u{dbd}'), ('\u{dc0}', '\u{dc6}'), ('\u{e01}',
|
||||
'\u{e30}'), ('\u{e32}', '\u{e32}'), ('\u{e40}', '\u{e46}'), ('\u{e81}', '\u{e82}'),
|
||||
('\u{e84}', '\u{e84}'), ('\u{e87}', '\u{e88}'), ('\u{e8a}', '\u{e8a}'), ('\u{e8d}',
|
||||
'\u{e8d}'), ('\u{e94}', '\u{e97}'), ('\u{e99}', '\u{e9f}'), ('\u{ea1}', '\u{ea3}'),
|
||||
('\u{ea5}', '\u{ea5}'), ('\u{ea7}', '\u{ea7}'), ('\u{eaa}', '\u{eab}'), ('\u{ead}',
|
||||
'\u{eb0}'), ('\u{eb2}', '\u{eb2}'), ('\u{ebd}', '\u{ebd}'), ('\u{ec0}', '\u{ec4}'),
|
||||
('\u{ec6}', '\u{ec6}'), ('\u{edc}', '\u{edf}'), ('\u{f00}', '\u{f00}'), ('\u{f40}',
|
||||
'\u{f47}'), ('\u{f49}', '\u{f6c}'), ('\u{f88}', '\u{f8c}'), ('\u{1000}', '\u{102a}'),
|
||||
('\u{103f}', '\u{103f}'), ('\u{1050}', '\u{1055}'), ('\u{105a}', '\u{105d}'), ('\u{1061}',
|
||||
'\u{1061}'), ('\u{1065}', '\u{1066}'), ('\u{106e}', '\u{1070}'), ('\u{1075}', '\u{1081}'),
|
||||
('\u{108e}', '\u{108e}'), ('\u{10a0}', '\u{10c5}'), ('\u{10c7}', '\u{10c7}'), ('\u{10cd}',
|
||||
'\u{10cd}'), ('\u{10d0}', '\u{10fa}'), ('\u{10fc}', '\u{1248}'), ('\u{124a}', '\u{124d}'),
|
||||
('\u{1250}', '\u{1256}'), ('\u{1258}', '\u{1258}'), ('\u{125a}', '\u{125d}'), ('\u{1260}',
|
||||
'\u{1288}'), ('\u{128a}', '\u{128d}'), ('\u{1290}', '\u{12b0}'), ('\u{12b2}', '\u{12b5}'),
|
||||
('\u{12b8}', '\u{12be}'), ('\u{12c0}', '\u{12c0}'), ('\u{12c2}', '\u{12c5}'), ('\u{12c8}',
|
||||
'\u{12d6}'), ('\u{12d8}', '\u{1310}'), ('\u{1312}', '\u{1315}'), ('\u{1318}', '\u{135a}'),
|
||||
('\u{1380}', '\u{138f}'), ('\u{13a0}', '\u{13f5}'), ('\u{13f8}', '\u{13fd}'), ('\u{1401}',
|
||||
'\u{166c}'), ('\u{166f}', '\u{167f}'), ('\u{1681}', '\u{169a}'), ('\u{16a0}', '\u{16ea}'),
|
||||
('\u{16ee}', '\u{16f8}'), ('\u{1700}', '\u{170c}'), ('\u{170e}', '\u{1711}'), ('\u{1720}',
|
||||
'\u{1731}'), ('\u{1740}', '\u{1751}'), ('\u{1760}', '\u{176c}'), ('\u{176e}', '\u{1770}'),
|
||||
('\u{1780}', '\u{17b3}'), ('\u{17d7}', '\u{17d7}'), ('\u{17dc}', '\u{17dc}'), ('\u{1820}',
|
||||
'\u{1877}'), ('\u{1880}', '\u{18a8}'), ('\u{18aa}', '\u{18aa}'), ('\u{18b0}', '\u{18f5}'),
|
||||
('\u{1900}', '\u{191e}'), ('\u{1950}', '\u{196d}'), ('\u{1970}', '\u{1974}'), ('\u{1980}',
|
||||
'\u{19ab}'), ('\u{19b0}', '\u{19c9}'), ('\u{1a00}', '\u{1a16}'), ('\u{1a20}', '\u{1a54}'),
|
||||
('\u{1aa7}', '\u{1aa7}'), ('\u{1b05}', '\u{1b33}'), ('\u{1b45}', '\u{1b4b}'), ('\u{1b83}',
|
||||
'\u{1ba0}'), ('\u{1bae}', '\u{1baf}'), ('\u{1bba}', '\u{1be5}'), ('\u{1c00}', '\u{1c23}'),
|
||||
('\u{1c4d}', '\u{1c4f}'), ('\u{1c5a}', '\u{1c7d}'), ('\u{1c80}', '\u{1c88}'), ('\u{1ce9}',
|
||||
'\u{1cec}'), ('\u{1cee}', '\u{1cf1}'), ('\u{1cf5}', '\u{1cf6}'), ('\u{1d00}', '\u{1dbf}'),
|
||||
('\u{1e00}', '\u{1f15}'), ('\u{1f18}', '\u{1f1d}'), ('\u{1f20}', '\u{1f45}'), ('\u{1f48}',
|
||||
'\u{1f4d}'), ('\u{1f50}', '\u{1f57}'), ('\u{1f59}', '\u{1f59}'), ('\u{1f5b}', '\u{1f5b}'),
|
||||
('\u{1f5d}', '\u{1f5d}'), ('\u{1f5f}', '\u{1f7d}'), ('\u{1f80}', '\u{1fb4}'), ('\u{1fb6}',
|
||||
'\u{1fbc}'), ('\u{1fbe}', '\u{1fbe}'), ('\u{1fc2}', '\u{1fc4}'), ('\u{1fc6}', '\u{1fcc}'),
|
||||
('\u{1fd0}', '\u{1fd3}'), ('\u{1fd6}', '\u{1fdb}'), ('\u{1fe0}', '\u{1fec}'), ('\u{1ff2}',
|
||||
'\u{1ff4}'), ('\u{1ff6}', '\u{1ffc}'), ('\u{2071}', '\u{2071}'), ('\u{207f}', '\u{207f}'),
|
||||
('\u{2090}', '\u{209c}'), ('\u{2102}', '\u{2102}'), ('\u{2107}', '\u{2107}'), ('\u{210a}',
|
||||
'\u{2113}'), ('\u{2115}', '\u{2115}'), ('\u{2118}', '\u{211d}'), ('\u{2124}', '\u{2124}'),
|
||||
('\u{2126}', '\u{2126}'), ('\u{2128}', '\u{2128}'), ('\u{212a}', '\u{2139}'), ('\u{213c}',
|
||||
'\u{213f}'), ('\u{2145}', '\u{2149}'), ('\u{214e}', '\u{214e}'), ('\u{2160}', '\u{2188}'),
|
||||
('\u{2c00}', '\u{2c2e}'), ('\u{2c30}', '\u{2c5e}'), ('\u{2c60}', '\u{2ce4}'), ('\u{2ceb}',
|
||||
'\u{2cee}'), ('\u{2cf2}', '\u{2cf3}'), ('\u{2d00}', '\u{2d25}'), ('\u{2d27}', '\u{2d27}'),
|
||||
('\u{2d2d}', '\u{2d2d}'), ('\u{2d30}', '\u{2d67}'), ('\u{2d6f}', '\u{2d6f}'), ('\u{2d80}',
|
||||
'\u{2d96}'), ('\u{2da0}', '\u{2da6}'), ('\u{2da8}', '\u{2dae}'), ('\u{2db0}', '\u{2db6}'),
|
||||
('\u{2db8}', '\u{2dbe}'), ('\u{2dc0}', '\u{2dc6}'), ('\u{2dc8}', '\u{2dce}'), ('\u{2dd0}',
|
||||
'\u{2dd6}'), ('\u{2dd8}', '\u{2dde}'), ('\u{3005}', '\u{3007}'), ('\u{3021}', '\u{3029}'),
|
||||
('\u{3031}', '\u{3035}'), ('\u{3038}', '\u{303c}'), ('\u{3041}', '\u{3096}'), ('\u{309d}',
|
||||
'\u{309f}'), ('\u{30a1}', '\u{30fa}'), ('\u{30fc}', '\u{30ff}'), ('\u{3105}', '\u{312d}'),
|
||||
('\u{3131}', '\u{318e}'), ('\u{31a0}', '\u{31ba}'), ('\u{31f0}', '\u{31ff}'), ('\u{3400}',
|
||||
'\u{4db5}'), ('\u{4e00}', '\u{9fd5}'), ('\u{a000}', '\u{a48c}'), ('\u{a4d0}', '\u{a4fd}'),
|
||||
('\u{a500}', '\u{a60c}'), ('\u{a610}', '\u{a61f}'), ('\u{a62a}', '\u{a62b}'), ('\u{a640}',
|
||||
'\u{a66e}'), ('\u{a67f}', '\u{a69d}'), ('\u{a6a0}', '\u{a6ef}'), ('\u{a717}', '\u{a71f}'),
|
||||
('\u{a722}', '\u{a788}'), ('\u{a78b}', '\u{a7ae}'), ('\u{a7b0}', '\u{a7b7}'), ('\u{a7f7}',
|
||||
'\u{a801}'), ('\u{a803}', '\u{a805}'), ('\u{a807}', '\u{a80a}'), ('\u{a80c}', '\u{a822}'),
|
||||
('\u{a840}', '\u{a873}'), ('\u{a882}', '\u{a8b3}'), ('\u{a8f2}', '\u{a8f7}'), ('\u{a8fb}',
|
||||
'\u{a8fb}'), ('\u{a8fd}', '\u{a8fd}'), ('\u{a90a}', '\u{a925}'), ('\u{a930}', '\u{a946}'),
|
||||
('\u{a960}', '\u{a97c}'), ('\u{a984}', '\u{a9b2}'), ('\u{a9cf}', '\u{a9cf}'), ('\u{a9e0}',
|
||||
'\u{a9e4}'), ('\u{a9e6}', '\u{a9ef}'), ('\u{a9fa}', '\u{a9fe}'), ('\u{aa00}', '\u{aa28}'),
|
||||
('\u{aa40}', '\u{aa42}'), ('\u{aa44}', '\u{aa4b}'), ('\u{aa60}', '\u{aa76}'), ('\u{aa7a}',
|
||||
'\u{aa7a}'), ('\u{aa7e}', '\u{aaaf}'), ('\u{aab1}', '\u{aab1}'), ('\u{aab5}', '\u{aab6}'),
|
||||
('\u{aab9}', '\u{aabd}'), ('\u{aac0}', '\u{aac0}'), ('\u{aac2}', '\u{aac2}'), ('\u{aadb}',
|
||||
'\u{aadd}'), ('\u{aae0}', '\u{aaea}'), ('\u{aaf2}', '\u{aaf4}'), ('\u{ab01}', '\u{ab06}'),
|
||||
('\u{ab09}', '\u{ab0e}'), ('\u{ab11}', '\u{ab16}'), ('\u{ab20}', '\u{ab26}'), ('\u{ab28}',
|
||||
'\u{ab2e}'), ('\u{ab30}', '\u{ab5a}'), ('\u{ab5c}', '\u{ab65}'), ('\u{ab70}', '\u{abe2}'),
|
||||
('\u{ac00}', '\u{d7a3}'), ('\u{d7b0}', '\u{d7c6}'), ('\u{d7cb}', '\u{d7fb}'), ('\u{f900}',
|
||||
'\u{fa6d}'), ('\u{fa70}', '\u{fad9}'), ('\u{fb00}', '\u{fb06}'), ('\u{fb13}', '\u{fb17}'),
|
||||
('\u{fb1d}', '\u{fb1d}'), ('\u{fb1f}', '\u{fb28}'), ('\u{fb2a}', '\u{fb36}'), ('\u{fb38}',
|
||||
'\u{fb3c}'), ('\u{fb3e}', '\u{fb3e}'), ('\u{fb40}', '\u{fb41}'), ('\u{fb43}', '\u{fb44}'),
|
||||
('\u{fb46}', '\u{fbb1}'), ('\u{fbd3}', '\u{fc5d}'), ('\u{fc64}', '\u{fd3d}'), ('\u{fd50}',
|
||||
'\u{fd8f}'), ('\u{fd92}', '\u{fdc7}'), ('\u{fdf0}', '\u{fdf9}'), ('\u{fe71}', '\u{fe71}'),
|
||||
('\u{fe73}', '\u{fe73}'), ('\u{fe77}', '\u{fe77}'), ('\u{fe79}', '\u{fe79}'), ('\u{fe7b}',
|
||||
'\u{fe7b}'), ('\u{fe7d}', '\u{fe7d}'), ('\u{fe7f}', '\u{fefc}'), ('\u{ff21}', '\u{ff3a}'),
|
||||
('\u{ff41}', '\u{ff5a}'), ('\u{ff66}', '\u{ff9d}'), ('\u{ffa0}', '\u{ffbe}'), ('\u{ffc2}',
|
||||
'\u{ffc7}'), ('\u{ffca}', '\u{ffcf}'), ('\u{ffd2}', '\u{ffd7}'), ('\u{ffda}', '\u{ffdc}'),
|
||||
('\u{10000}', '\u{1000b}'), ('\u{1000d}', '\u{10026}'), ('\u{10028}', '\u{1003a}'),
|
||||
('\u{1003c}', '\u{1003d}'), ('\u{1003f}', '\u{1004d}'), ('\u{10050}', '\u{1005d}'),
|
||||
('\u{10080}', '\u{100fa}'), ('\u{10140}', '\u{10174}'), ('\u{10280}', '\u{1029c}'),
|
||||
('\u{102a0}', '\u{102d0}'), ('\u{10300}', '\u{1031f}'), ('\u{10330}', '\u{1034a}'),
|
||||
('\u{10350}', '\u{10375}'), ('\u{10380}', '\u{1039d}'), ('\u{103a0}', '\u{103c3}'),
|
||||
('\u{103c8}', '\u{103cf}'), ('\u{103d1}', '\u{103d5}'), ('\u{10400}', '\u{1049d}'),
|
||||
('\u{104b0}', '\u{104d3}'), ('\u{104d8}', '\u{104fb}'), ('\u{10500}', '\u{10527}'),
|
||||
('\u{10530}', '\u{10563}'), ('\u{10600}', '\u{10736}'), ('\u{10740}', '\u{10755}'),
|
||||
('\u{10760}', '\u{10767}'), ('\u{10800}', '\u{10805}'), ('\u{10808}', '\u{10808}'),
|
||||
('\u{1080a}', '\u{10835}'), ('\u{10837}', '\u{10838}'), ('\u{1083c}', '\u{1083c}'),
|
||||
('\u{1083f}', '\u{10855}'), ('\u{10860}', '\u{10876}'), ('\u{10880}', '\u{1089e}'),
|
||||
('\u{108e0}', '\u{108f2}'), ('\u{108f4}', '\u{108f5}'), ('\u{10900}', '\u{10915}'),
|
||||
('\u{10920}', '\u{10939}'), ('\u{10980}', '\u{109b7}'), ('\u{109be}', '\u{109bf}'),
|
||||
('\u{10a00}', '\u{10a00}'), ('\u{10a10}', '\u{10a13}'), ('\u{10a15}', '\u{10a17}'),
|
||||
('\u{10a19}', '\u{10a33}'), ('\u{10a60}', '\u{10a7c}'), ('\u{10a80}', '\u{10a9c}'),
|
||||
('\u{10ac0}', '\u{10ac7}'), ('\u{10ac9}', '\u{10ae4}'), ('\u{10b00}', '\u{10b35}'),
|
||||
('\u{10b40}', '\u{10b55}'), ('\u{10b60}', '\u{10b72}'), ('\u{10b80}', '\u{10b91}'),
|
||||
('\u{10c00}', '\u{10c48}'), ('\u{10c80}', '\u{10cb2}'), ('\u{10cc0}', '\u{10cf2}'),
|
||||
('\u{11003}', '\u{11037}'), ('\u{11083}', '\u{110af}'), ('\u{110d0}', '\u{110e8}'),
|
||||
('\u{11103}', '\u{11126}'), ('\u{11150}', '\u{11172}'), ('\u{11176}', '\u{11176}'),
|
||||
('\u{11183}', '\u{111b2}'), ('\u{111c1}', '\u{111c4}'), ('\u{111da}', '\u{111da}'),
|
||||
('\u{111dc}', '\u{111dc}'), ('\u{11200}', '\u{11211}'), ('\u{11213}', '\u{1122b}'),
|
||||
('\u{11280}', '\u{11286}'), ('\u{11288}', '\u{11288}'), ('\u{1128a}', '\u{1128d}'),
|
||||
('\u{1128f}', '\u{1129d}'), ('\u{1129f}', '\u{112a8}'), ('\u{112b0}', '\u{112de}'),
|
||||
('\u{11305}', '\u{1130c}'), ('\u{1130f}', '\u{11310}'), ('\u{11313}', '\u{11328}'),
|
||||
('\u{1132a}', '\u{11330}'), ('\u{11332}', '\u{11333}'), ('\u{11335}', '\u{11339}'),
|
||||
('\u{1133d}', '\u{1133d}'), ('\u{11350}', '\u{11350}'), ('\u{1135d}', '\u{11361}'),
|
||||
('\u{11400}', '\u{11434}'), ('\u{11447}', '\u{1144a}'), ('\u{11480}', '\u{114af}'),
|
||||
('\u{114c4}', '\u{114c5}'), ('\u{114c7}', '\u{114c7}'), ('\u{11580}', '\u{115ae}'),
|
||||
('\u{115d8}', '\u{115db}'), ('\u{11600}', '\u{1162f}'), ('\u{11644}', '\u{11644}'),
|
||||
('\u{11680}', '\u{116aa}'), ('\u{11700}', '\u{11719}'), ('\u{118a0}', '\u{118df}'),
|
||||
('\u{118ff}', '\u{118ff}'), ('\u{11ac0}', '\u{11af8}'), ('\u{11c00}', '\u{11c08}'),
|
||||
('\u{11c0a}', '\u{11c2e}'), ('\u{11c40}', '\u{11c40}'), ('\u{11c72}', '\u{11c8f}'),
|
||||
('\u{12000}', '\u{12399}'), ('\u{12400}', '\u{1246e}'), ('\u{12480}', '\u{12543}'),
|
||||
('\u{13000}', '\u{1342e}'), ('\u{14400}', '\u{14646}'), ('\u{16800}', '\u{16a38}'),
|
||||
('\u{16a40}', '\u{16a5e}'), ('\u{16ad0}', '\u{16aed}'), ('\u{16b00}', '\u{16b2f}'),
|
||||
('\u{16b40}', '\u{16b43}'), ('\u{16b63}', '\u{16b77}'), ('\u{16b7d}', '\u{16b8f}'),
|
||||
('\u{16f00}', '\u{16f44}'), ('\u{16f50}', '\u{16f50}'), ('\u{16f93}', '\u{16f9f}'),
|
||||
('\u{16fe0}', '\u{16fe0}'), ('\u{17000}', '\u{187ec}'), ('\u{18800}', '\u{18af2}'),
|
||||
('\u{1b000}', '\u{1b001}'), ('\u{1bc00}', '\u{1bc6a}'), ('\u{1bc70}', '\u{1bc7c}'),
|
||||
('\u{1bc80}', '\u{1bc88}'), ('\u{1bc90}', '\u{1bc99}'), ('\u{1d400}', '\u{1d454}'),
|
||||
('\u{1d456}', '\u{1d49c}'), ('\u{1d49e}', '\u{1d49f}'), ('\u{1d4a2}', '\u{1d4a2}'),
|
||||
('\u{1d4a5}', '\u{1d4a6}'), ('\u{1d4a9}', '\u{1d4ac}'), ('\u{1d4ae}', '\u{1d4b9}'),
|
||||
('\u{1d4bb}', '\u{1d4bb}'), ('\u{1d4bd}', '\u{1d4c3}'), ('\u{1d4c5}', '\u{1d505}'),
|
||||
('\u{1d507}', '\u{1d50a}'), ('\u{1d50d}', '\u{1d514}'), ('\u{1d516}', '\u{1d51c}'),
|
||||
('\u{1d51e}', '\u{1d539}'), ('\u{1d53b}', '\u{1d53e}'), ('\u{1d540}', '\u{1d544}'),
|
||||
('\u{1d546}', '\u{1d546}'), ('\u{1d54a}', '\u{1d550}'), ('\u{1d552}', '\u{1d6a5}'),
|
||||
('\u{1d6a8}', '\u{1d6c0}'), ('\u{1d6c2}', '\u{1d6da}'), ('\u{1d6dc}', '\u{1d6fa}'),
|
||||
('\u{1d6fc}', '\u{1d714}'), ('\u{1d716}', '\u{1d734}'), ('\u{1d736}', '\u{1d74e}'),
|
||||
('\u{1d750}', '\u{1d76e}'), ('\u{1d770}', '\u{1d788}'), ('\u{1d78a}', '\u{1d7a8}'),
|
||||
('\u{1d7aa}', '\u{1d7c2}'), ('\u{1d7c4}', '\u{1d7cb}'), ('\u{1e800}', '\u{1e8c4}'),
|
||||
('\u{1e900}', '\u{1e943}'), ('\u{1ee00}', '\u{1ee03}'), ('\u{1ee05}', '\u{1ee1f}'),
|
||||
('\u{1ee21}', '\u{1ee22}'), ('\u{1ee24}', '\u{1ee24}'), ('\u{1ee27}', '\u{1ee27}'),
|
||||
('\u{1ee29}', '\u{1ee32}'), ('\u{1ee34}', '\u{1ee37}'), ('\u{1ee39}', '\u{1ee39}'),
|
||||
('\u{1ee3b}', '\u{1ee3b}'), ('\u{1ee42}', '\u{1ee42}'), ('\u{1ee47}', '\u{1ee47}'),
|
||||
('\u{1ee49}', '\u{1ee49}'), ('\u{1ee4b}', '\u{1ee4b}'), ('\u{1ee4d}', '\u{1ee4f}'),
|
||||
('\u{1ee51}', '\u{1ee52}'), ('\u{1ee54}', '\u{1ee54}'), ('\u{1ee57}', '\u{1ee57}'),
|
||||
('\u{1ee59}', '\u{1ee59}'), ('\u{1ee5b}', '\u{1ee5b}'), ('\u{1ee5d}', '\u{1ee5d}'),
|
||||
('\u{1ee5f}', '\u{1ee5f}'), ('\u{1ee61}', '\u{1ee62}'), ('\u{1ee64}', '\u{1ee64}'),
|
||||
('\u{1ee67}', '\u{1ee6a}'), ('\u{1ee6c}', '\u{1ee72}'), ('\u{1ee74}', '\u{1ee77}'),
|
||||
('\u{1ee79}', '\u{1ee7c}'), ('\u{1ee7e}', '\u{1ee7e}'), ('\u{1ee80}', '\u{1ee89}'),
|
||||
('\u{1ee8b}', '\u{1ee9b}'), ('\u{1eea1}', '\u{1eea3}'), ('\u{1eea5}', '\u{1eea9}'),
|
||||
('\u{1eeab}', '\u{1eebb}'), ('\u{20000}', '\u{2a6d6}'), ('\u{2a700}', '\u{2b734}'),
|
||||
('\u{2b740}', '\u{2b81d}'), ('\u{2b820}', '\u{2cea1}'), ('\u{2f800}', '\u{2fa1d}')
|
||||
];
|
||||
|
||||
pub fn XID_Start(c: char) -> bool {
|
||||
super::bsearch_range_table(c, XID_Start_table)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,113 +0,0 @@
|
|||
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#[cfg(feature = "bench")]
|
||||
use std::iter;
|
||||
#[cfg(feature = "bench")]
|
||||
use test::Bencher;
|
||||
#[cfg(feature = "bench")]
|
||||
use std::prelude::v1::*;
|
||||
|
||||
use super::UnicodeXID;
|
||||
|
||||
#[cfg(feature = "bench")]
|
||||
#[bench]
|
||||
fn cargo_is_xid_start(b: &mut Bencher) {
|
||||
let string = iter::repeat('a').take(4096).collect::<String>();
|
||||
|
||||
b.bytes = string.len() as u64;
|
||||
b.iter(|| {
|
||||
string.chars().all(UnicodeXID::is_xid_start)
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(feature = "bench")]
|
||||
#[bench]
|
||||
fn stdlib_is_xid_start(b: &mut Bencher) {
|
||||
let string = iter::repeat('a').take(4096).collect::<String>();
|
||||
|
||||
b.bytes = string.len() as u64;
|
||||
b.iter(|| {
|
||||
string.chars().all(char::is_xid_start)
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(feature = "bench")]
|
||||
#[bench]
|
||||
fn cargo_xid_continue(b: &mut Bencher) {
|
||||
let string = iter::repeat('a').take(4096).collect::<String>();
|
||||
|
||||
b.bytes = string.len() as u64;
|
||||
b.iter(|| {
|
||||
string.chars().all(UnicodeXID::is_xid_continue)
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(feature = "bench")]
|
||||
#[bench]
|
||||
fn stdlib_xid_continue(b: &mut Bencher) {
|
||||
let string = iter::repeat('a').take(4096).collect::<String>();
|
||||
|
||||
b.bytes = string.len() as u64;
|
||||
b.iter(|| {
|
||||
string.chars().all(char::is_xid_continue)
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_xid_start() {
|
||||
let chars = [
|
||||
'A', 'Z', 'a', 'z',
|
||||
'\u{1000d}', '\u{10026}',
|
||||
];
|
||||
|
||||
for ch in &chars {
|
||||
assert!(UnicodeXID::is_xid_start(*ch), "{}", ch);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_not_xid_start() {
|
||||
let chars = [
|
||||
'\x00', '\x01',
|
||||
'0', '9',
|
||||
' ', '[', '<', '{', '(',
|
||||
'\u{02c2}', '\u{ffff}',
|
||||
];
|
||||
|
||||
for ch in &chars {
|
||||
assert!(!UnicodeXID::is_xid_start(*ch), "{}", ch);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_xid_continue() {
|
||||
let chars = [
|
||||
'0', '9', 'A', 'Z', 'a', 'z', '_',
|
||||
'\u{1000d}', '\u{10026}',
|
||||
];
|
||||
|
||||
for ch in &chars {
|
||||
assert!(UnicodeXID::is_xid_continue(*ch), "{}", ch);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_not_xid_continue() {
|
||||
let chars = [
|
||||
'\x00', '\x01',
|
||||
' ', '[', '<', '{', '(',
|
||||
'\u{02c2}', '\u{ffff}',
|
||||
];
|
||||
|
||||
for &ch in &chars {
|
||||
assert!(!UnicodeXID::is_xid_continue(ch), "{}", ch);
|
||||
}
|
||||
}
|
|
@ -1 +1 @@
|
|||
{"files":{"COPYRIGHT":"23860c2a7b5d96b21569afedf033469bab9fe14a1b24a35068b8641c578ce24d","Cargo.toml":"d675b1531f28cec902162b875d7718cbbacdbb97d60a043940d08d2368e660f3","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"c9874f691222e560b8a468adf8c471d30a6efe8d02d6f3457dcbc4f95ac1e978","src/lib.rs":"4038be7839c000997565e21eaebf20842d389a64b057fcd519b674c03c466f77","src/tables.rs":"40f3711b453ea58a02e69a37f66ebe4fcb49d2cc4da5b013c732539cfceee48c","src/tests.rs":"c2bb7412b6cee1a330c85f3d60eb77e98162f98f268b6fe9f748d8910f78a026"},"package":"826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"}
|
||||
{"files":{"COPYRIGHT":"23860c2a7b5d96b21569afedf033469bab9fe14a1b24a35068b8641c578ce24d","Cargo.toml":"aafcae4002bee71546a6aa40a97b9124a69f169ee7e3a9e3262338e32b4c2b9b","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7b63ecd5f1902af1b63729947373683c32745c16a10e8e6292e2e2dcd7e90ae0","README.md":"67998486b32f4fe46abbbaa411b92528750e7f0e22452dc8a5b95d87d80fde75","scripts/unicode.py":"762eea92dd51238c6bf877570bde1149932ba15cf87be1618fc21cd53e941733","src/lib.rs":"4a89fadf452ae7c53536eaa4496f951a3153f8189dd1cbc532648731d30f0b11","src/tables.rs":"0643459b6ebeeed83aecd7604f0ea29c06bea7ce6c1cd9acd4988d27ace1ec53","src/tests.rs":"35a459382e190197e7b9a78832ae79f310b48a02a5b4227bf9bbc89d46c8deac"},"package":"fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"}
|
|
@ -1,31 +1,26 @@
|
|||
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||
#
|
||||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
# editing this file be aware that the upstream Cargo.toml
|
||||
# will likely look very different (and much more reasonable)
|
||||
|
||||
[package]
|
||||
|
||||
name = "unicode-xid"
|
||||
version = "0.2.0"
|
||||
authors = ["erick.tryzelaar <erick.tryzelaar@gmail.com>", "kwantam <kwantam@gmail.com>"]
|
||||
exclude = ["/scripts/*", "/.travis.yml"]
|
||||
description = "Determine whether characters have the XID_Start\nor XID_Continue properties according to\nUnicode Standard Annex #31.\n"
|
||||
version = "0.1.0"
|
||||
authors = ["erick.tryzelaar <erick.tryzelaar@gmail.com>",
|
||||
"kwantam <kwantam@gmail.com>",
|
||||
]
|
||||
|
||||
homepage = "https://github.com/unicode-rs/unicode-xid"
|
||||
documentation = "https://unicode-rs.github.io/unicode-xid"
|
||||
readme = "README.md"
|
||||
keywords = ["text", "unicode", "xid"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/unicode-rs/unicode-xid"
|
||||
documentation = "https://unicode-rs.github.io/unicode-xid"
|
||||
license = "MIT/Apache-2.0"
|
||||
keywords = ["text", "unicode", "xid"]
|
||||
readme = "README.md"
|
||||
description = """
|
||||
Determine whether characters have the XID_Start
|
||||
or XID_Continue properties according to
|
||||
Unicode Standard Annex #31.
|
||||
"""
|
||||
|
||||
exclude = [ "target/*", "Cargo.lock" ]
|
||||
|
||||
[features]
|
||||
bench = []
|
||||
default = []
|
||||
no_std = []
|
||||
[badges.travis-ci]
|
||||
repository = "unicode-rs/unicode-xid"
|
||||
bench = []
|
||||
|
|
|
@ -30,15 +30,5 @@ to your `Cargo.toml`:
|
|||
|
||||
```toml
|
||||
[dependencies]
|
||||
unicode-xid = "0.1.0"
|
||||
unicode-xid = "0.0.4"
|
||||
```
|
||||
|
||||
# changelog
|
||||
|
||||
## 0.2.0
|
||||
|
||||
- Update to Unicode 12.1.0.
|
||||
|
||||
## 0.1.0
|
||||
|
||||
- Initial release.
|
||||
|
|
|
@ -42,7 +42,7 @@
|
|||
html_favicon_url = "https://unicode-rs.github.io/unicode-rs_sm.png")]
|
||||
|
||||
#![no_std]
|
||||
#![cfg_attr(feature = "bench", feature(test, unicode_internals))]
|
||||
#![cfg_attr(feature = "bench", feature(test, unicode))]
|
||||
|
||||
#[cfg(test)]
|
||||
#[macro_use]
|
||||
|
|
|
@ -14,9 +14,9 @@
|
|||
|
||||
/// The version of [Unicode](http://www.unicode.org/)
|
||||
/// that this version of unicode-xid is based on.
|
||||
pub const UNICODE_VERSION: (u64, u64, u64) = (12, 1, 0);
|
||||
pub const UNICODE_VERSION: (u64, u64, u64) = (9, 0, 0);
|
||||
|
||||
fn bsearch_range_table(c: char, r: &[(char,char)]) -> bool {
|
||||
fn bsearch_range_table(c: char, r: &'static [(char,char)]) -> bool {
|
||||
use core::cmp::Ordering::{Equal, Less, Greater};
|
||||
|
||||
r.binary_search_by(|&(lo,hi)| {
|
||||
|
@ -27,7 +27,7 @@ fn bsearch_range_table(c: char, r: &[(char,char)]) -> bool {
|
|||
}
|
||||
|
||||
pub mod derived_property {
|
||||
pub const XID_Continue_table: &[(char, char)] = &[
|
||||
pub const XID_Continue_table: &'static [(char, char)] = &[
|
||||
('\u{30}', '\u{39}'), ('\u{41}', '\u{5a}'), ('\u{5f}', '\u{5f}'), ('\u{61}', '\u{7a}'),
|
||||
('\u{aa}', '\u{aa}'), ('\u{b5}', '\u{b5}'), ('\u{b7}', '\u{b7}'), ('\u{ba}', '\u{ba}'),
|
||||
('\u{c0}', '\u{d6}'), ('\u{d8}', '\u{f6}'), ('\u{f8}', '\u{2c1}'), ('\u{2c6}', '\u{2d1}'),
|
||||
|
@ -35,412 +35,387 @@ pub mod derived_property {
|
|||
'\u{374}'), ('\u{376}', '\u{377}'), ('\u{37b}', '\u{37d}'), ('\u{37f}', '\u{37f}'),
|
||||
('\u{386}', '\u{38a}'), ('\u{38c}', '\u{38c}'), ('\u{38e}', '\u{3a1}'), ('\u{3a3}',
|
||||
'\u{3f5}'), ('\u{3f7}', '\u{481}'), ('\u{483}', '\u{487}'), ('\u{48a}', '\u{52f}'),
|
||||
('\u{531}', '\u{556}'), ('\u{559}', '\u{559}'), ('\u{560}', '\u{588}'), ('\u{591}',
|
||||
('\u{531}', '\u{556}'), ('\u{559}', '\u{559}'), ('\u{561}', '\u{587}'), ('\u{591}',
|
||||
'\u{5bd}'), ('\u{5bf}', '\u{5bf}'), ('\u{5c1}', '\u{5c2}'), ('\u{5c4}', '\u{5c5}'),
|
||||
('\u{5c7}', '\u{5c7}'), ('\u{5d0}', '\u{5ea}'), ('\u{5ef}', '\u{5f2}'), ('\u{610}',
|
||||
('\u{5c7}', '\u{5c7}'), ('\u{5d0}', '\u{5ea}'), ('\u{5f0}', '\u{5f2}'), ('\u{610}',
|
||||
'\u{61a}'), ('\u{620}', '\u{669}'), ('\u{66e}', '\u{6d3}'), ('\u{6d5}', '\u{6dc}'),
|
||||
('\u{6df}', '\u{6e8}'), ('\u{6ea}', '\u{6fc}'), ('\u{6ff}', '\u{6ff}'), ('\u{710}',
|
||||
'\u{74a}'), ('\u{74d}', '\u{7b1}'), ('\u{7c0}', '\u{7f5}'), ('\u{7fa}', '\u{7fa}'),
|
||||
('\u{7fd}', '\u{7fd}'), ('\u{800}', '\u{82d}'), ('\u{840}', '\u{85b}'), ('\u{860}',
|
||||
'\u{86a}'), ('\u{8a0}', '\u{8b4}'), ('\u{8b6}', '\u{8bd}'), ('\u{8d3}', '\u{8e1}'),
|
||||
('\u{8e3}', '\u{963}'), ('\u{966}', '\u{96f}'), ('\u{971}', '\u{983}'), ('\u{985}',
|
||||
'\u{98c}'), ('\u{98f}', '\u{990}'), ('\u{993}', '\u{9a8}'), ('\u{9aa}', '\u{9b0}'),
|
||||
('\u{9b2}', '\u{9b2}'), ('\u{9b6}', '\u{9b9}'), ('\u{9bc}', '\u{9c4}'), ('\u{9c7}',
|
||||
'\u{9c8}'), ('\u{9cb}', '\u{9ce}'), ('\u{9d7}', '\u{9d7}'), ('\u{9dc}', '\u{9dd}'),
|
||||
('\u{9df}', '\u{9e3}'), ('\u{9e6}', '\u{9f1}'), ('\u{9fc}', '\u{9fc}'), ('\u{9fe}',
|
||||
'\u{9fe}'), ('\u{a01}', '\u{a03}'), ('\u{a05}', '\u{a0a}'), ('\u{a0f}', '\u{a10}'),
|
||||
('\u{a13}', '\u{a28}'), ('\u{a2a}', '\u{a30}'), ('\u{a32}', '\u{a33}'), ('\u{a35}',
|
||||
'\u{a36}'), ('\u{a38}', '\u{a39}'), ('\u{a3c}', '\u{a3c}'), ('\u{a3e}', '\u{a42}'),
|
||||
('\u{a47}', '\u{a48}'), ('\u{a4b}', '\u{a4d}'), ('\u{a51}', '\u{a51}'), ('\u{a59}',
|
||||
'\u{a5c}'), ('\u{a5e}', '\u{a5e}'), ('\u{a66}', '\u{a75}'), ('\u{a81}', '\u{a83}'),
|
||||
('\u{a85}', '\u{a8d}'), ('\u{a8f}', '\u{a91}'), ('\u{a93}', '\u{aa8}'), ('\u{aaa}',
|
||||
'\u{ab0}'), ('\u{ab2}', '\u{ab3}'), ('\u{ab5}', '\u{ab9}'), ('\u{abc}', '\u{ac5}'),
|
||||
('\u{ac7}', '\u{ac9}'), ('\u{acb}', '\u{acd}'), ('\u{ad0}', '\u{ad0}'), ('\u{ae0}',
|
||||
'\u{ae3}'), ('\u{ae6}', '\u{aef}'), ('\u{af9}', '\u{aff}'), ('\u{b01}', '\u{b03}'),
|
||||
('\u{b05}', '\u{b0c}'), ('\u{b0f}', '\u{b10}'), ('\u{b13}', '\u{b28}'), ('\u{b2a}',
|
||||
'\u{b30}'), ('\u{b32}', '\u{b33}'), ('\u{b35}', '\u{b39}'), ('\u{b3c}', '\u{b44}'),
|
||||
('\u{b47}', '\u{b48}'), ('\u{b4b}', '\u{b4d}'), ('\u{b56}', '\u{b57}'), ('\u{b5c}',
|
||||
'\u{b5d}'), ('\u{b5f}', '\u{b63}'), ('\u{b66}', '\u{b6f}'), ('\u{b71}', '\u{b71}'),
|
||||
('\u{b82}', '\u{b83}'), ('\u{b85}', '\u{b8a}'), ('\u{b8e}', '\u{b90}'), ('\u{b92}',
|
||||
'\u{b95}'), ('\u{b99}', '\u{b9a}'), ('\u{b9c}', '\u{b9c}'), ('\u{b9e}', '\u{b9f}'),
|
||||
('\u{ba3}', '\u{ba4}'), ('\u{ba8}', '\u{baa}'), ('\u{bae}', '\u{bb9}'), ('\u{bbe}',
|
||||
'\u{bc2}'), ('\u{bc6}', '\u{bc8}'), ('\u{bca}', '\u{bcd}'), ('\u{bd0}', '\u{bd0}'),
|
||||
('\u{bd7}', '\u{bd7}'), ('\u{be6}', '\u{bef}'), ('\u{c00}', '\u{c0c}'), ('\u{c0e}',
|
||||
'\u{c10}'), ('\u{c12}', '\u{c28}'), ('\u{c2a}', '\u{c39}'), ('\u{c3d}', '\u{c44}'),
|
||||
('\u{c46}', '\u{c48}'), ('\u{c4a}', '\u{c4d}'), ('\u{c55}', '\u{c56}'), ('\u{c58}',
|
||||
'\u{c5a}'), ('\u{c60}', '\u{c63}'), ('\u{c66}', '\u{c6f}'), ('\u{c80}', '\u{c83}'),
|
||||
('\u{c85}', '\u{c8c}'), ('\u{c8e}', '\u{c90}'), ('\u{c92}', '\u{ca8}'), ('\u{caa}',
|
||||
'\u{cb3}'), ('\u{cb5}', '\u{cb9}'), ('\u{cbc}', '\u{cc4}'), ('\u{cc6}', '\u{cc8}'),
|
||||
('\u{cca}', '\u{ccd}'), ('\u{cd5}', '\u{cd6}'), ('\u{cde}', '\u{cde}'), ('\u{ce0}',
|
||||
'\u{ce3}'), ('\u{ce6}', '\u{cef}'), ('\u{cf1}', '\u{cf2}'), ('\u{d00}', '\u{d03}'),
|
||||
('\u{d05}', '\u{d0c}'), ('\u{d0e}', '\u{d10}'), ('\u{d12}', '\u{d44}'), ('\u{d46}',
|
||||
'\u{d48}'), ('\u{d4a}', '\u{d4e}'), ('\u{d54}', '\u{d57}'), ('\u{d5f}', '\u{d63}'),
|
||||
('\u{d66}', '\u{d6f}'), ('\u{d7a}', '\u{d7f}'), ('\u{d82}', '\u{d83}'), ('\u{d85}',
|
||||
'\u{d96}'), ('\u{d9a}', '\u{db1}'), ('\u{db3}', '\u{dbb}'), ('\u{dbd}', '\u{dbd}'),
|
||||
('\u{dc0}', '\u{dc6}'), ('\u{dca}', '\u{dca}'), ('\u{dcf}', '\u{dd4}'), ('\u{dd6}',
|
||||
'\u{dd6}'), ('\u{dd8}', '\u{ddf}'), ('\u{de6}', '\u{def}'), ('\u{df2}', '\u{df3}'),
|
||||
('\u{e01}', '\u{e3a}'), ('\u{e40}', '\u{e4e}'), ('\u{e50}', '\u{e59}'), ('\u{e81}',
|
||||
'\u{e82}'), ('\u{e84}', '\u{e84}'), ('\u{e86}', '\u{e8a}'), ('\u{e8c}', '\u{ea3}'),
|
||||
('\u{ea5}', '\u{ea5}'), ('\u{ea7}', '\u{ebd}'), ('\u{ec0}', '\u{ec4}'), ('\u{ec6}',
|
||||
'\u{ec6}'), ('\u{ec8}', '\u{ecd}'), ('\u{ed0}', '\u{ed9}'), ('\u{edc}', '\u{edf}'),
|
||||
('\u{f00}', '\u{f00}'), ('\u{f18}', '\u{f19}'), ('\u{f20}', '\u{f29}'), ('\u{f35}',
|
||||
'\u{f35}'), ('\u{f37}', '\u{f37}'), ('\u{f39}', '\u{f39}'), ('\u{f3e}', '\u{f47}'),
|
||||
('\u{f49}', '\u{f6c}'), ('\u{f71}', '\u{f84}'), ('\u{f86}', '\u{f97}'), ('\u{f99}',
|
||||
'\u{fbc}'), ('\u{fc6}', '\u{fc6}'), ('\u{1000}', '\u{1049}'), ('\u{1050}', '\u{109d}'),
|
||||
('\u{10a0}', '\u{10c5}'), ('\u{10c7}', '\u{10c7}'), ('\u{10cd}', '\u{10cd}'), ('\u{10d0}',
|
||||
'\u{10fa}'), ('\u{10fc}', '\u{1248}'), ('\u{124a}', '\u{124d}'), ('\u{1250}', '\u{1256}'),
|
||||
('\u{1258}', '\u{1258}'), ('\u{125a}', '\u{125d}'), ('\u{1260}', '\u{1288}'), ('\u{128a}',
|
||||
'\u{128d}'), ('\u{1290}', '\u{12b0}'), ('\u{12b2}', '\u{12b5}'), ('\u{12b8}', '\u{12be}'),
|
||||
('\u{12c0}', '\u{12c0}'), ('\u{12c2}', '\u{12c5}'), ('\u{12c8}', '\u{12d6}'), ('\u{12d8}',
|
||||
'\u{1310}'), ('\u{1312}', '\u{1315}'), ('\u{1318}', '\u{135a}'), ('\u{135d}', '\u{135f}'),
|
||||
('\u{1369}', '\u{1371}'), ('\u{1380}', '\u{138f}'), ('\u{13a0}', '\u{13f5}'), ('\u{13f8}',
|
||||
'\u{13fd}'), ('\u{1401}', '\u{166c}'), ('\u{166f}', '\u{167f}'), ('\u{1681}', '\u{169a}'),
|
||||
('\u{16a0}', '\u{16ea}'), ('\u{16ee}', '\u{16f8}'), ('\u{1700}', '\u{170c}'), ('\u{170e}',
|
||||
'\u{1714}'), ('\u{1720}', '\u{1734}'), ('\u{1740}', '\u{1753}'), ('\u{1760}', '\u{176c}'),
|
||||
('\u{176e}', '\u{1770}'), ('\u{1772}', '\u{1773}'), ('\u{1780}', '\u{17d3}'), ('\u{17d7}',
|
||||
'\u{17d7}'), ('\u{17dc}', '\u{17dd}'), ('\u{17e0}', '\u{17e9}'), ('\u{180b}', '\u{180d}'),
|
||||
('\u{1810}', '\u{1819}'), ('\u{1820}', '\u{1878}'), ('\u{1880}', '\u{18aa}'), ('\u{18b0}',
|
||||
'\u{18f5}'), ('\u{1900}', '\u{191e}'), ('\u{1920}', '\u{192b}'), ('\u{1930}', '\u{193b}'),
|
||||
('\u{1946}', '\u{196d}'), ('\u{1970}', '\u{1974}'), ('\u{1980}', '\u{19ab}'), ('\u{19b0}',
|
||||
'\u{19c9}'), ('\u{19d0}', '\u{19da}'), ('\u{1a00}', '\u{1a1b}'), ('\u{1a20}', '\u{1a5e}'),
|
||||
('\u{1a60}', '\u{1a7c}'), ('\u{1a7f}', '\u{1a89}'), ('\u{1a90}', '\u{1a99}'), ('\u{1aa7}',
|
||||
'\u{1aa7}'), ('\u{1ab0}', '\u{1abd}'), ('\u{1b00}', '\u{1b4b}'), ('\u{1b50}', '\u{1b59}'),
|
||||
('\u{1b6b}', '\u{1b73}'), ('\u{1b80}', '\u{1bf3}'), ('\u{1c00}', '\u{1c37}'), ('\u{1c40}',
|
||||
'\u{1c49}'), ('\u{1c4d}', '\u{1c7d}'), ('\u{1c80}', '\u{1c88}'), ('\u{1c90}', '\u{1cba}'),
|
||||
('\u{1cbd}', '\u{1cbf}'), ('\u{1cd0}', '\u{1cd2}'), ('\u{1cd4}', '\u{1cfa}'), ('\u{1d00}',
|
||||
'\u{1df9}'), ('\u{1dfb}', '\u{1f15}'), ('\u{1f18}', '\u{1f1d}'), ('\u{1f20}', '\u{1f45}'),
|
||||
('\u{1f48}', '\u{1f4d}'), ('\u{1f50}', '\u{1f57}'), ('\u{1f59}', '\u{1f59}'), ('\u{1f5b}',
|
||||
'\u{1f5b}'), ('\u{1f5d}', '\u{1f5d}'), ('\u{1f5f}', '\u{1f7d}'), ('\u{1f80}', '\u{1fb4}'),
|
||||
('\u{1fb6}', '\u{1fbc}'), ('\u{1fbe}', '\u{1fbe}'), ('\u{1fc2}', '\u{1fc4}'), ('\u{1fc6}',
|
||||
'\u{1fcc}'), ('\u{1fd0}', '\u{1fd3}'), ('\u{1fd6}', '\u{1fdb}'), ('\u{1fe0}', '\u{1fec}'),
|
||||
('\u{1ff2}', '\u{1ff4}'), ('\u{1ff6}', '\u{1ffc}'), ('\u{203f}', '\u{2040}'), ('\u{2054}',
|
||||
'\u{2054}'), ('\u{2071}', '\u{2071}'), ('\u{207f}', '\u{207f}'), ('\u{2090}', '\u{209c}'),
|
||||
('\u{20d0}', '\u{20dc}'), ('\u{20e1}', '\u{20e1}'), ('\u{20e5}', '\u{20f0}'), ('\u{2102}',
|
||||
'\u{2102}'), ('\u{2107}', '\u{2107}'), ('\u{210a}', '\u{2113}'), ('\u{2115}', '\u{2115}'),
|
||||
('\u{2118}', '\u{211d}'), ('\u{2124}', '\u{2124}'), ('\u{2126}', '\u{2126}'), ('\u{2128}',
|
||||
'\u{2128}'), ('\u{212a}', '\u{2139}'), ('\u{213c}', '\u{213f}'), ('\u{2145}', '\u{2149}'),
|
||||
('\u{214e}', '\u{214e}'), ('\u{2160}', '\u{2188}'), ('\u{2c00}', '\u{2c2e}'), ('\u{2c30}',
|
||||
'\u{2c5e}'), ('\u{2c60}', '\u{2ce4}'), ('\u{2ceb}', '\u{2cf3}'), ('\u{2d00}', '\u{2d25}'),
|
||||
('\u{2d27}', '\u{2d27}'), ('\u{2d2d}', '\u{2d2d}'), ('\u{2d30}', '\u{2d67}'), ('\u{2d6f}',
|
||||
'\u{2d6f}'), ('\u{2d7f}', '\u{2d96}'), ('\u{2da0}', '\u{2da6}'), ('\u{2da8}', '\u{2dae}'),
|
||||
('\u{2db0}', '\u{2db6}'), ('\u{2db8}', '\u{2dbe}'), ('\u{2dc0}', '\u{2dc6}'), ('\u{2dc8}',
|
||||
'\u{2dce}'), ('\u{2dd0}', '\u{2dd6}'), ('\u{2dd8}', '\u{2dde}'), ('\u{2de0}', '\u{2dff}'),
|
||||
('\u{3005}', '\u{3007}'), ('\u{3021}', '\u{302f}'), ('\u{3031}', '\u{3035}'), ('\u{3038}',
|
||||
'\u{303c}'), ('\u{3041}', '\u{3096}'), ('\u{3099}', '\u{309a}'), ('\u{309d}', '\u{309f}'),
|
||||
('\u{30a1}', '\u{30fa}'), ('\u{30fc}', '\u{30ff}'), ('\u{3105}', '\u{312f}'), ('\u{3131}',
|
||||
'\u{318e}'), ('\u{31a0}', '\u{31ba}'), ('\u{31f0}', '\u{31ff}'), ('\u{3400}', '\u{4db5}'),
|
||||
('\u{4e00}', '\u{9fef}'), ('\u{a000}', '\u{a48c}'), ('\u{a4d0}', '\u{a4fd}'), ('\u{a500}',
|
||||
'\u{a60c}'), ('\u{a610}', '\u{a62b}'), ('\u{a640}', '\u{a66f}'), ('\u{a674}', '\u{a67d}'),
|
||||
('\u{a67f}', '\u{a6f1}'), ('\u{a717}', '\u{a71f}'), ('\u{a722}', '\u{a788}'), ('\u{a78b}',
|
||||
'\u{a7bf}'), ('\u{a7c2}', '\u{a7c6}'), ('\u{a7f7}', '\u{a827}'), ('\u{a840}', '\u{a873}'),
|
||||
('\u{a880}', '\u{a8c5}'), ('\u{a8d0}', '\u{a8d9}'), ('\u{a8e0}', '\u{a8f7}'), ('\u{a8fb}',
|
||||
'\u{a8fb}'), ('\u{a8fd}', '\u{a92d}'), ('\u{a930}', '\u{a953}'), ('\u{a960}', '\u{a97c}'),
|
||||
('\u{a980}', '\u{a9c0}'), ('\u{a9cf}', '\u{a9d9}'), ('\u{a9e0}', '\u{a9fe}'), ('\u{aa00}',
|
||||
'\u{aa36}'), ('\u{aa40}', '\u{aa4d}'), ('\u{aa50}', '\u{aa59}'), ('\u{aa60}', '\u{aa76}'),
|
||||
('\u{aa7a}', '\u{aac2}'), ('\u{aadb}', '\u{aadd}'), ('\u{aae0}', '\u{aaef}'), ('\u{aaf2}',
|
||||
'\u{aaf6}'), ('\u{ab01}', '\u{ab06}'), ('\u{ab09}', '\u{ab0e}'), ('\u{ab11}', '\u{ab16}'),
|
||||
('\u{ab20}', '\u{ab26}'), ('\u{ab28}', '\u{ab2e}'), ('\u{ab30}', '\u{ab5a}'), ('\u{ab5c}',
|
||||
'\u{ab67}'), ('\u{ab70}', '\u{abea}'), ('\u{abec}', '\u{abed}'), ('\u{abf0}', '\u{abf9}'),
|
||||
('\u{ac00}', '\u{d7a3}'), ('\u{d7b0}', '\u{d7c6}'), ('\u{d7cb}', '\u{d7fb}'), ('\u{f900}',
|
||||
'\u{fa6d}'), ('\u{fa70}', '\u{fad9}'), ('\u{fb00}', '\u{fb06}'), ('\u{fb13}', '\u{fb17}'),
|
||||
('\u{fb1d}', '\u{fb28}'), ('\u{fb2a}', '\u{fb36}'), ('\u{fb38}', '\u{fb3c}'), ('\u{fb3e}',
|
||||
'\u{fb3e}'), ('\u{fb40}', '\u{fb41}'), ('\u{fb43}', '\u{fb44}'), ('\u{fb46}', '\u{fbb1}'),
|
||||
('\u{fbd3}', '\u{fc5d}'), ('\u{fc64}', '\u{fd3d}'), ('\u{fd50}', '\u{fd8f}'), ('\u{fd92}',
|
||||
'\u{fdc7}'), ('\u{fdf0}', '\u{fdf9}'), ('\u{fe00}', '\u{fe0f}'), ('\u{fe20}', '\u{fe2f}'),
|
||||
('\u{fe33}', '\u{fe34}'), ('\u{fe4d}', '\u{fe4f}'), ('\u{fe71}', '\u{fe71}'), ('\u{fe73}',
|
||||
'\u{fe73}'), ('\u{fe77}', '\u{fe77}'), ('\u{fe79}', '\u{fe79}'), ('\u{fe7b}', '\u{fe7b}'),
|
||||
('\u{fe7d}', '\u{fe7d}'), ('\u{fe7f}', '\u{fefc}'), ('\u{ff10}', '\u{ff19}'), ('\u{ff21}',
|
||||
'\u{ff3a}'), ('\u{ff3f}', '\u{ff3f}'), ('\u{ff41}', '\u{ff5a}'), ('\u{ff66}', '\u{ffbe}'),
|
||||
('\u{ffc2}', '\u{ffc7}'), ('\u{ffca}', '\u{ffcf}'), ('\u{ffd2}', '\u{ffd7}'), ('\u{ffda}',
|
||||
'\u{ffdc}'), ('\u{10000}', '\u{1000b}'), ('\u{1000d}', '\u{10026}'), ('\u{10028}',
|
||||
'\u{1003a}'), ('\u{1003c}', '\u{1003d}'), ('\u{1003f}', '\u{1004d}'), ('\u{10050}',
|
||||
'\u{1005d}'), ('\u{10080}', '\u{100fa}'), ('\u{10140}', '\u{10174}'), ('\u{101fd}',
|
||||
'\u{101fd}'), ('\u{10280}', '\u{1029c}'), ('\u{102a0}', '\u{102d0}'), ('\u{102e0}',
|
||||
'\u{102e0}'), ('\u{10300}', '\u{1031f}'), ('\u{1032d}', '\u{1034a}'), ('\u{10350}',
|
||||
'\u{1037a}'), ('\u{10380}', '\u{1039d}'), ('\u{103a0}', '\u{103c3}'), ('\u{103c8}',
|
||||
'\u{103cf}'), ('\u{103d1}', '\u{103d5}'), ('\u{10400}', '\u{1049d}'), ('\u{104a0}',
|
||||
'\u{104a9}'), ('\u{104b0}', '\u{104d3}'), ('\u{104d8}', '\u{104fb}'), ('\u{10500}',
|
||||
'\u{10527}'), ('\u{10530}', '\u{10563}'), ('\u{10600}', '\u{10736}'), ('\u{10740}',
|
||||
'\u{10755}'), ('\u{10760}', '\u{10767}'), ('\u{10800}', '\u{10805}'), ('\u{10808}',
|
||||
'\u{10808}'), ('\u{1080a}', '\u{10835}'), ('\u{10837}', '\u{10838}'), ('\u{1083c}',
|
||||
'\u{1083c}'), ('\u{1083f}', '\u{10855}'), ('\u{10860}', '\u{10876}'), ('\u{10880}',
|
||||
'\u{1089e}'), ('\u{108e0}', '\u{108f2}'), ('\u{108f4}', '\u{108f5}'), ('\u{10900}',
|
||||
'\u{10915}'), ('\u{10920}', '\u{10939}'), ('\u{10980}', '\u{109b7}'), ('\u{109be}',
|
||||
'\u{109bf}'), ('\u{10a00}', '\u{10a03}'), ('\u{10a05}', '\u{10a06}'), ('\u{10a0c}',
|
||||
'\u{10a13}'), ('\u{10a15}', '\u{10a17}'), ('\u{10a19}', '\u{10a35}'), ('\u{10a38}',
|
||||
'\u{10a3a}'), ('\u{10a3f}', '\u{10a3f}'), ('\u{10a60}', '\u{10a7c}'), ('\u{10a80}',
|
||||
'\u{10a9c}'), ('\u{10ac0}', '\u{10ac7}'), ('\u{10ac9}', '\u{10ae6}'), ('\u{10b00}',
|
||||
'\u{10b35}'), ('\u{10b40}', '\u{10b55}'), ('\u{10b60}', '\u{10b72}'), ('\u{10b80}',
|
||||
'\u{10b91}'), ('\u{10c00}', '\u{10c48}'), ('\u{10c80}', '\u{10cb2}'), ('\u{10cc0}',
|
||||
'\u{10cf2}'), ('\u{10d00}', '\u{10d27}'), ('\u{10d30}', '\u{10d39}'), ('\u{10f00}',
|
||||
'\u{10f1c}'), ('\u{10f27}', '\u{10f27}'), ('\u{10f30}', '\u{10f50}'), ('\u{10fe0}',
|
||||
'\u{10ff6}'), ('\u{11000}', '\u{11046}'), ('\u{11066}', '\u{1106f}'), ('\u{1107f}',
|
||||
'\u{110ba}'), ('\u{110d0}', '\u{110e8}'), ('\u{110f0}', '\u{110f9}'), ('\u{11100}',
|
||||
'\u{11134}'), ('\u{11136}', '\u{1113f}'), ('\u{11144}', '\u{11146}'), ('\u{11150}',
|
||||
'\u{11173}'), ('\u{11176}', '\u{11176}'), ('\u{11180}', '\u{111c4}'), ('\u{111c9}',
|
||||
'\u{111cc}'), ('\u{111d0}', '\u{111da}'), ('\u{111dc}', '\u{111dc}'), ('\u{11200}',
|
||||
'\u{11211}'), ('\u{11213}', '\u{11237}'), ('\u{1123e}', '\u{1123e}'), ('\u{11280}',
|
||||
'\u{11286}'), ('\u{11288}', '\u{11288}'), ('\u{1128a}', '\u{1128d}'), ('\u{1128f}',
|
||||
'\u{1129d}'), ('\u{1129f}', '\u{112a8}'), ('\u{112b0}', '\u{112ea}'), ('\u{112f0}',
|
||||
'\u{112f9}'), ('\u{11300}', '\u{11303}'), ('\u{11305}', '\u{1130c}'), ('\u{1130f}',
|
||||
'\u{11310}'), ('\u{11313}', '\u{11328}'), ('\u{1132a}', '\u{11330}'), ('\u{11332}',
|
||||
'\u{11333}'), ('\u{11335}', '\u{11339}'), ('\u{1133b}', '\u{11344}'), ('\u{11347}',
|
||||
'\u{11348}'), ('\u{1134b}', '\u{1134d}'), ('\u{11350}', '\u{11350}'), ('\u{11357}',
|
||||
'\u{11357}'), ('\u{1135d}', '\u{11363}'), ('\u{11366}', '\u{1136c}'), ('\u{11370}',
|
||||
'\u{11374}'), ('\u{11400}', '\u{1144a}'), ('\u{11450}', '\u{11459}'), ('\u{1145e}',
|
||||
'\u{1145f}'), ('\u{11480}', '\u{114c5}'), ('\u{114c7}', '\u{114c7}'), ('\u{114d0}',
|
||||
'\u{114d9}'), ('\u{11580}', '\u{115b5}'), ('\u{115b8}', '\u{115c0}'), ('\u{115d8}',
|
||||
'\u{115dd}'), ('\u{11600}', '\u{11640}'), ('\u{11644}', '\u{11644}'), ('\u{11650}',
|
||||
'\u{11659}'), ('\u{11680}', '\u{116b8}'), ('\u{116c0}', '\u{116c9}'), ('\u{11700}',
|
||||
'\u{1171a}'), ('\u{1171d}', '\u{1172b}'), ('\u{11730}', '\u{11739}'), ('\u{11800}',
|
||||
'\u{1183a}'), ('\u{118a0}', '\u{118e9}'), ('\u{118ff}', '\u{118ff}'), ('\u{119a0}',
|
||||
'\u{119a7}'), ('\u{119aa}', '\u{119d7}'), ('\u{119da}', '\u{119e1}'), ('\u{119e3}',
|
||||
'\u{119e4}'), ('\u{11a00}', '\u{11a3e}'), ('\u{11a47}', '\u{11a47}'), ('\u{11a50}',
|
||||
'\u{11a99}'), ('\u{11a9d}', '\u{11a9d}'), ('\u{11ac0}', '\u{11af8}'), ('\u{11c00}',
|
||||
'\u{11c08}'), ('\u{11c0a}', '\u{11c36}'), ('\u{11c38}', '\u{11c40}'), ('\u{11c50}',
|
||||
'\u{11c59}'), ('\u{11c72}', '\u{11c8f}'), ('\u{11c92}', '\u{11ca7}'), ('\u{11ca9}',
|
||||
'\u{11cb6}'), ('\u{11d00}', '\u{11d06}'), ('\u{11d08}', '\u{11d09}'), ('\u{11d0b}',
|
||||
'\u{11d36}'), ('\u{11d3a}', '\u{11d3a}'), ('\u{11d3c}', '\u{11d3d}'), ('\u{11d3f}',
|
||||
'\u{11d47}'), ('\u{11d50}', '\u{11d59}'), ('\u{11d60}', '\u{11d65}'), ('\u{11d67}',
|
||||
'\u{11d68}'), ('\u{11d6a}', '\u{11d8e}'), ('\u{11d90}', '\u{11d91}'), ('\u{11d93}',
|
||||
'\u{11d98}'), ('\u{11da0}', '\u{11da9}'), ('\u{11ee0}', '\u{11ef6}'), ('\u{12000}',
|
||||
'\u{12399}'), ('\u{12400}', '\u{1246e}'), ('\u{12480}', '\u{12543}'), ('\u{13000}',
|
||||
'\u{1342e}'), ('\u{14400}', '\u{14646}'), ('\u{16800}', '\u{16a38}'), ('\u{16a40}',
|
||||
'\u{16a5e}'), ('\u{16a60}', '\u{16a69}'), ('\u{16ad0}', '\u{16aed}'), ('\u{16af0}',
|
||||
'\u{16af4}'), ('\u{16b00}', '\u{16b36}'), ('\u{16b40}', '\u{16b43}'), ('\u{16b50}',
|
||||
'\u{16b59}'), ('\u{16b63}', '\u{16b77}'), ('\u{16b7d}', '\u{16b8f}'), ('\u{16e40}',
|
||||
'\u{16e7f}'), ('\u{16f00}', '\u{16f4a}'), ('\u{16f4f}', '\u{16f87}'), ('\u{16f8f}',
|
||||
'\u{16f9f}'), ('\u{16fe0}', '\u{16fe1}'), ('\u{16fe3}', '\u{16fe3}'), ('\u{17000}',
|
||||
'\u{187f7}'), ('\u{18800}', '\u{18af2}'), ('\u{1b000}', '\u{1b11e}'), ('\u{1b150}',
|
||||
'\u{1b152}'), ('\u{1b164}', '\u{1b167}'), ('\u{1b170}', '\u{1b2fb}'), ('\u{1bc00}',
|
||||
'\u{1bc6a}'), ('\u{1bc70}', '\u{1bc7c}'), ('\u{1bc80}', '\u{1bc88}'), ('\u{1bc90}',
|
||||
'\u{1bc99}'), ('\u{1bc9d}', '\u{1bc9e}'), ('\u{1d165}', '\u{1d169}'), ('\u{1d16d}',
|
||||
'\u{1d172}'), ('\u{1d17b}', '\u{1d182}'), ('\u{1d185}', '\u{1d18b}'), ('\u{1d1aa}',
|
||||
'\u{1d1ad}'), ('\u{1d242}', '\u{1d244}'), ('\u{1d400}', '\u{1d454}'), ('\u{1d456}',
|
||||
'\u{1d49c}'), ('\u{1d49e}', '\u{1d49f}'), ('\u{1d4a2}', '\u{1d4a2}'), ('\u{1d4a5}',
|
||||
'\u{1d4a6}'), ('\u{1d4a9}', '\u{1d4ac}'), ('\u{1d4ae}', '\u{1d4b9}'), ('\u{1d4bb}',
|
||||
'\u{1d4bb}'), ('\u{1d4bd}', '\u{1d4c3}'), ('\u{1d4c5}', '\u{1d505}'), ('\u{1d507}',
|
||||
'\u{1d50a}'), ('\u{1d50d}', '\u{1d514}'), ('\u{1d516}', '\u{1d51c}'), ('\u{1d51e}',
|
||||
'\u{1d539}'), ('\u{1d53b}', '\u{1d53e}'), ('\u{1d540}', '\u{1d544}'), ('\u{1d546}',
|
||||
'\u{1d546}'), ('\u{1d54a}', '\u{1d550}'), ('\u{1d552}', '\u{1d6a5}'), ('\u{1d6a8}',
|
||||
'\u{1d6c0}'), ('\u{1d6c2}', '\u{1d6da}'), ('\u{1d6dc}', '\u{1d6fa}'), ('\u{1d6fc}',
|
||||
'\u{1d714}'), ('\u{1d716}', '\u{1d734}'), ('\u{1d736}', '\u{1d74e}'), ('\u{1d750}',
|
||||
'\u{1d76e}'), ('\u{1d770}', '\u{1d788}'), ('\u{1d78a}', '\u{1d7a8}'), ('\u{1d7aa}',
|
||||
'\u{1d7c2}'), ('\u{1d7c4}', '\u{1d7cb}'), ('\u{1d7ce}', '\u{1d7ff}'), ('\u{1da00}',
|
||||
'\u{1da36}'), ('\u{1da3b}', '\u{1da6c}'), ('\u{1da75}', '\u{1da75}'), ('\u{1da84}',
|
||||
'\u{1da84}'), ('\u{1da9b}', '\u{1da9f}'), ('\u{1daa1}', '\u{1daaf}'), ('\u{1e000}',
|
||||
'\u{1e006}'), ('\u{1e008}', '\u{1e018}'), ('\u{1e01b}', '\u{1e021}'), ('\u{1e023}',
|
||||
'\u{1e024}'), ('\u{1e026}', '\u{1e02a}'), ('\u{1e100}', '\u{1e12c}'), ('\u{1e130}',
|
||||
'\u{1e13d}'), ('\u{1e140}', '\u{1e149}'), ('\u{1e14e}', '\u{1e14e}'), ('\u{1e2c0}',
|
||||
'\u{1e2f9}'), ('\u{1e800}', '\u{1e8c4}'), ('\u{1e8d0}', '\u{1e8d6}'), ('\u{1e900}',
|
||||
'\u{1e94b}'), ('\u{1e950}', '\u{1e959}'), ('\u{1ee00}', '\u{1ee03}'), ('\u{1ee05}',
|
||||
'\u{1ee1f}'), ('\u{1ee21}', '\u{1ee22}'), ('\u{1ee24}', '\u{1ee24}'), ('\u{1ee27}',
|
||||
'\u{1ee27}'), ('\u{1ee29}', '\u{1ee32}'), ('\u{1ee34}', '\u{1ee37}'), ('\u{1ee39}',
|
||||
'\u{1ee39}'), ('\u{1ee3b}', '\u{1ee3b}'), ('\u{1ee42}', '\u{1ee42}'), ('\u{1ee47}',
|
||||
'\u{1ee47}'), ('\u{1ee49}', '\u{1ee49}'), ('\u{1ee4b}', '\u{1ee4b}'), ('\u{1ee4d}',
|
||||
'\u{1ee4f}'), ('\u{1ee51}', '\u{1ee52}'), ('\u{1ee54}', '\u{1ee54}'), ('\u{1ee57}',
|
||||
'\u{1ee57}'), ('\u{1ee59}', '\u{1ee59}'), ('\u{1ee5b}', '\u{1ee5b}'), ('\u{1ee5d}',
|
||||
'\u{1ee5d}'), ('\u{1ee5f}', '\u{1ee5f}'), ('\u{1ee61}', '\u{1ee62}'), ('\u{1ee64}',
|
||||
'\u{1ee64}'), ('\u{1ee67}', '\u{1ee6a}'), ('\u{1ee6c}', '\u{1ee72}'), ('\u{1ee74}',
|
||||
'\u{1ee77}'), ('\u{1ee79}', '\u{1ee7c}'), ('\u{1ee7e}', '\u{1ee7e}'), ('\u{1ee80}',
|
||||
'\u{1ee89}'), ('\u{1ee8b}', '\u{1ee9b}'), ('\u{1eea1}', '\u{1eea3}'), ('\u{1eea5}',
|
||||
'\u{1eea9}'), ('\u{1eeab}', '\u{1eebb}'), ('\u{20000}', '\u{2a6d6}'), ('\u{2a700}',
|
||||
'\u{2b734}'), ('\u{2b740}', '\u{2b81d}'), ('\u{2b820}', '\u{2cea1}'), ('\u{2ceb0}',
|
||||
'\u{2ebe0}'), ('\u{2f800}', '\u{2fa1d}'), ('\u{e0100}', '\u{e01ef}')
|
||||
];
|
||||
|
||||
pub fn XID_Continue(c: char) -> bool {
|
||||
super::bsearch_range_table(c, XID_Continue_table)
|
||||
}
|
||||
|
||||
pub const XID_Start_table: &[(char, char)] = &[
|
||||
('\u{41}', '\u{5a}'), ('\u{61}', '\u{7a}'), ('\u{aa}', '\u{aa}'), ('\u{b5}', '\u{b5}'),
|
||||
('\u{ba}', '\u{ba}'), ('\u{c0}', '\u{d6}'), ('\u{d8}', '\u{f6}'), ('\u{f8}', '\u{2c1}'),
|
||||
('\u{2c6}', '\u{2d1}'), ('\u{2e0}', '\u{2e4}'), ('\u{2ec}', '\u{2ec}'), ('\u{2ee}',
|
||||
'\u{2ee}'), ('\u{370}', '\u{374}'), ('\u{376}', '\u{377}'), ('\u{37b}', '\u{37d}'),
|
||||
('\u{37f}', '\u{37f}'), ('\u{386}', '\u{386}'), ('\u{388}', '\u{38a}'), ('\u{38c}',
|
||||
'\u{38c}'), ('\u{38e}', '\u{3a1}'), ('\u{3a3}', '\u{3f5}'), ('\u{3f7}', '\u{481}'),
|
||||
('\u{48a}', '\u{52f}'), ('\u{531}', '\u{556}'), ('\u{559}', '\u{559}'), ('\u{560}',
|
||||
'\u{588}'), ('\u{5d0}', '\u{5ea}'), ('\u{5ef}', '\u{5f2}'), ('\u{620}', '\u{64a}'),
|
||||
('\u{66e}', '\u{66f}'), ('\u{671}', '\u{6d3}'), ('\u{6d5}', '\u{6d5}'), ('\u{6e5}',
|
||||
'\u{6e6}'), ('\u{6ee}', '\u{6ef}'), ('\u{6fa}', '\u{6fc}'), ('\u{6ff}', '\u{6ff}'),
|
||||
('\u{710}', '\u{710}'), ('\u{712}', '\u{72f}'), ('\u{74d}', '\u{7a5}'), ('\u{7b1}',
|
||||
'\u{7b1}'), ('\u{7ca}', '\u{7ea}'), ('\u{7f4}', '\u{7f5}'), ('\u{7fa}', '\u{7fa}'),
|
||||
('\u{800}', '\u{815}'), ('\u{81a}', '\u{81a}'), ('\u{824}', '\u{824}'), ('\u{828}',
|
||||
'\u{828}'), ('\u{840}', '\u{858}'), ('\u{860}', '\u{86a}'), ('\u{8a0}', '\u{8b4}'),
|
||||
('\u{8b6}', '\u{8bd}'), ('\u{904}', '\u{939}'), ('\u{93d}', '\u{93d}'), ('\u{950}',
|
||||
'\u{950}'), ('\u{958}', '\u{961}'), ('\u{971}', '\u{980}'), ('\u{985}', '\u{98c}'),
|
||||
('\u{98f}', '\u{990}'), ('\u{993}', '\u{9a8}'), ('\u{9aa}', '\u{9b0}'), ('\u{9b2}',
|
||||
'\u{9b2}'), ('\u{9b6}', '\u{9b9}'), ('\u{9bd}', '\u{9bd}'), ('\u{9ce}', '\u{9ce}'),
|
||||
('\u{9dc}', '\u{9dd}'), ('\u{9df}', '\u{9e1}'), ('\u{9f0}', '\u{9f1}'), ('\u{9fc}',
|
||||
'\u{9fc}'), ('\u{a05}', '\u{a0a}'), ('\u{a0f}', '\u{a10}'), ('\u{a13}', '\u{a28}'),
|
||||
('\u{a2a}', '\u{a30}'), ('\u{a32}', '\u{a33}'), ('\u{a35}', '\u{a36}'), ('\u{a38}',
|
||||
'\u{a39}'), ('\u{a59}', '\u{a5c}'), ('\u{a5e}', '\u{a5e}'), ('\u{a72}', '\u{a74}'),
|
||||
('\u{a85}', '\u{a8d}'), ('\u{a8f}', '\u{a91}'), ('\u{a93}', '\u{aa8}'), ('\u{aaa}',
|
||||
'\u{ab0}'), ('\u{ab2}', '\u{ab3}'), ('\u{ab5}', '\u{ab9}'), ('\u{abd}', '\u{abd}'),
|
||||
('\u{ad0}', '\u{ad0}'), ('\u{ae0}', '\u{ae1}'), ('\u{af9}', '\u{af9}'), ('\u{b05}',
|
||||
('\u{800}', '\u{82d}'), ('\u{840}', '\u{85b}'), ('\u{8a0}', '\u{8b4}'), ('\u{8b6}',
|
||||
'\u{8bd}'), ('\u{8d4}', '\u{8e1}'), ('\u{8e3}', '\u{963}'), ('\u{966}', '\u{96f}'),
|
||||
('\u{971}', '\u{983}'), ('\u{985}', '\u{98c}'), ('\u{98f}', '\u{990}'), ('\u{993}',
|
||||
'\u{9a8}'), ('\u{9aa}', '\u{9b0}'), ('\u{9b2}', '\u{9b2}'), ('\u{9b6}', '\u{9b9}'),
|
||||
('\u{9bc}', '\u{9c4}'), ('\u{9c7}', '\u{9c8}'), ('\u{9cb}', '\u{9ce}'), ('\u{9d7}',
|
||||
'\u{9d7}'), ('\u{9dc}', '\u{9dd}'), ('\u{9df}', '\u{9e3}'), ('\u{9e6}', '\u{9f1}'),
|
||||
('\u{a01}', '\u{a03}'), ('\u{a05}', '\u{a0a}'), ('\u{a0f}', '\u{a10}'), ('\u{a13}',
|
||||
'\u{a28}'), ('\u{a2a}', '\u{a30}'), ('\u{a32}', '\u{a33}'), ('\u{a35}', '\u{a36}'),
|
||||
('\u{a38}', '\u{a39}'), ('\u{a3c}', '\u{a3c}'), ('\u{a3e}', '\u{a42}'), ('\u{a47}',
|
||||
'\u{a48}'), ('\u{a4b}', '\u{a4d}'), ('\u{a51}', '\u{a51}'), ('\u{a59}', '\u{a5c}'),
|
||||
('\u{a5e}', '\u{a5e}'), ('\u{a66}', '\u{a75}'), ('\u{a81}', '\u{a83}'), ('\u{a85}',
|
||||
'\u{a8d}'), ('\u{a8f}', '\u{a91}'), ('\u{a93}', '\u{aa8}'), ('\u{aaa}', '\u{ab0}'),
|
||||
('\u{ab2}', '\u{ab3}'), ('\u{ab5}', '\u{ab9}'), ('\u{abc}', '\u{ac5}'), ('\u{ac7}',
|
||||
'\u{ac9}'), ('\u{acb}', '\u{acd}'), ('\u{ad0}', '\u{ad0}'), ('\u{ae0}', '\u{ae3}'),
|
||||
('\u{ae6}', '\u{aef}'), ('\u{af9}', '\u{af9}'), ('\u{b01}', '\u{b03}'), ('\u{b05}',
|
||||
'\u{b0c}'), ('\u{b0f}', '\u{b10}'), ('\u{b13}', '\u{b28}'), ('\u{b2a}', '\u{b30}'),
|
||||
('\u{b32}', '\u{b33}'), ('\u{b35}', '\u{b39}'), ('\u{b3d}', '\u{b3d}'), ('\u{b5c}',
|
||||
'\u{b5d}'), ('\u{b5f}', '\u{b61}'), ('\u{b71}', '\u{b71}'), ('\u{b83}', '\u{b83}'),
|
||||
('\u{b85}', '\u{b8a}'), ('\u{b8e}', '\u{b90}'), ('\u{b92}', '\u{b95}'), ('\u{b99}',
|
||||
'\u{b9a}'), ('\u{b9c}', '\u{b9c}'), ('\u{b9e}', '\u{b9f}'), ('\u{ba3}', '\u{ba4}'),
|
||||
('\u{ba8}', '\u{baa}'), ('\u{bae}', '\u{bb9}'), ('\u{bd0}', '\u{bd0}'), ('\u{c05}',
|
||||
'\u{c0c}'), ('\u{c0e}', '\u{c10}'), ('\u{c12}', '\u{c28}'), ('\u{c2a}', '\u{c39}'),
|
||||
('\u{c3d}', '\u{c3d}'), ('\u{c58}', '\u{c5a}'), ('\u{c60}', '\u{c61}'), ('\u{c80}',
|
||||
'\u{c80}'), ('\u{c85}', '\u{c8c}'), ('\u{c8e}', '\u{c90}'), ('\u{c92}', '\u{ca8}'),
|
||||
('\u{caa}', '\u{cb3}'), ('\u{cb5}', '\u{cb9}'), ('\u{cbd}', '\u{cbd}'), ('\u{cde}',
|
||||
'\u{cde}'), ('\u{ce0}', '\u{ce1}'), ('\u{cf1}', '\u{cf2}'), ('\u{d05}', '\u{d0c}'),
|
||||
('\u{d0e}', '\u{d10}'), ('\u{d12}', '\u{d3a}'), ('\u{d3d}', '\u{d3d}'), ('\u{d4e}',
|
||||
'\u{d4e}'), ('\u{d54}', '\u{d56}'), ('\u{d5f}', '\u{d61}'), ('\u{d7a}', '\u{d7f}'),
|
||||
('\u{d85}', '\u{d96}'), ('\u{d9a}', '\u{db1}'), ('\u{db3}', '\u{dbb}'), ('\u{dbd}',
|
||||
'\u{dbd}'), ('\u{dc0}', '\u{dc6}'), ('\u{e01}', '\u{e30}'), ('\u{e32}', '\u{e32}'),
|
||||
('\u{e40}', '\u{e46}'), ('\u{e81}', '\u{e82}'), ('\u{e84}', '\u{e84}'), ('\u{e86}',
|
||||
'\u{e8a}'), ('\u{e8c}', '\u{ea3}'), ('\u{ea5}', '\u{ea5}'), ('\u{ea7}', '\u{eb0}'),
|
||||
('\u{eb2}', '\u{eb2}'), ('\u{ebd}', '\u{ebd}'), ('\u{ec0}', '\u{ec4}'), ('\u{ec6}',
|
||||
'\u{ec6}'), ('\u{edc}', '\u{edf}'), ('\u{f00}', '\u{f00}'), ('\u{f40}', '\u{f47}'),
|
||||
('\u{f49}', '\u{f6c}'), ('\u{f88}', '\u{f8c}'), ('\u{1000}', '\u{102a}'), ('\u{103f}',
|
||||
'\u{103f}'), ('\u{1050}', '\u{1055}'), ('\u{105a}', '\u{105d}'), ('\u{1061}', '\u{1061}'),
|
||||
('\u{1065}', '\u{1066}'), ('\u{106e}', '\u{1070}'), ('\u{1075}', '\u{1081}'), ('\u{108e}',
|
||||
'\u{108e}'), ('\u{10a0}', '\u{10c5}'), ('\u{10c7}', '\u{10c7}'), ('\u{10cd}', '\u{10cd}'),
|
||||
('\u{10d0}', '\u{10fa}'), ('\u{10fc}', '\u{1248}'), ('\u{124a}', '\u{124d}'), ('\u{1250}',
|
||||
'\u{1256}'), ('\u{1258}', '\u{1258}'), ('\u{125a}', '\u{125d}'), ('\u{1260}', '\u{1288}'),
|
||||
('\u{128a}', '\u{128d}'), ('\u{1290}', '\u{12b0}'), ('\u{12b2}', '\u{12b5}'), ('\u{12b8}',
|
||||
'\u{12be}'), ('\u{12c0}', '\u{12c0}'), ('\u{12c2}', '\u{12c5}'), ('\u{12c8}', '\u{12d6}'),
|
||||
('\u{12d8}', '\u{1310}'), ('\u{1312}', '\u{1315}'), ('\u{1318}', '\u{135a}'), ('\u{1380}',
|
||||
'\u{138f}'), ('\u{13a0}', '\u{13f5}'), ('\u{13f8}', '\u{13fd}'), ('\u{1401}', '\u{166c}'),
|
||||
('\u{166f}', '\u{167f}'), ('\u{1681}', '\u{169a}'), ('\u{16a0}', '\u{16ea}'), ('\u{16ee}',
|
||||
'\u{16f8}'), ('\u{1700}', '\u{170c}'), ('\u{170e}', '\u{1711}'), ('\u{1720}', '\u{1731}'),
|
||||
('\u{1740}', '\u{1751}'), ('\u{1760}', '\u{176c}'), ('\u{176e}', '\u{1770}'), ('\u{1780}',
|
||||
'\u{17b3}'), ('\u{17d7}', '\u{17d7}'), ('\u{17dc}', '\u{17dc}'), ('\u{1820}', '\u{1878}'),
|
||||
('\u{1880}', '\u{18a8}'), ('\u{18aa}', '\u{18aa}'), ('\u{18b0}', '\u{18f5}'), ('\u{1900}',
|
||||
'\u{191e}'), ('\u{1950}', '\u{196d}'), ('\u{1970}', '\u{1974}'), ('\u{1980}', '\u{19ab}'),
|
||||
('\u{19b0}', '\u{19c9}'), ('\u{1a00}', '\u{1a16}'), ('\u{1a20}', '\u{1a54}'), ('\u{1aa7}',
|
||||
'\u{1aa7}'), ('\u{1b05}', '\u{1b33}'), ('\u{1b45}', '\u{1b4b}'), ('\u{1b83}', '\u{1ba0}'),
|
||||
('\u{1bae}', '\u{1baf}'), ('\u{1bba}', '\u{1be5}'), ('\u{1c00}', '\u{1c23}'), ('\u{1c4d}',
|
||||
'\u{1c4f}'), ('\u{1c5a}', '\u{1c7d}'), ('\u{1c80}', '\u{1c88}'), ('\u{1c90}', '\u{1cba}'),
|
||||
('\u{1cbd}', '\u{1cbf}'), ('\u{1ce9}', '\u{1cec}'), ('\u{1cee}', '\u{1cf3}'), ('\u{1cf5}',
|
||||
'\u{1cf6}'), ('\u{1cfa}', '\u{1cfa}'), ('\u{1d00}', '\u{1dbf}'), ('\u{1e00}', '\u{1f15}'),
|
||||
('\u{1f18}', '\u{1f1d}'), ('\u{1f20}', '\u{1f45}'), ('\u{1f48}', '\u{1f4d}'), ('\u{1f50}',
|
||||
'\u{1f57}'), ('\u{1f59}', '\u{1f59}'), ('\u{1f5b}', '\u{1f5b}'), ('\u{1f5d}', '\u{1f5d}'),
|
||||
('\u{1f5f}', '\u{1f7d}'), ('\u{1f80}', '\u{1fb4}'), ('\u{1fb6}', '\u{1fbc}'), ('\u{1fbe}',
|
||||
'\u{1fbe}'), ('\u{1fc2}', '\u{1fc4}'), ('\u{1fc6}', '\u{1fcc}'), ('\u{1fd0}', '\u{1fd3}'),
|
||||
('\u{1fd6}', '\u{1fdb}'), ('\u{1fe0}', '\u{1fec}'), ('\u{1ff2}', '\u{1ff4}'), ('\u{1ff6}',
|
||||
'\u{1ffc}'), ('\u{2071}', '\u{2071}'), ('\u{207f}', '\u{207f}'), ('\u{2090}', '\u{209c}'),
|
||||
('\u{b32}', '\u{b33}'), ('\u{b35}', '\u{b39}'), ('\u{b3c}', '\u{b44}'), ('\u{b47}',
|
||||
'\u{b48}'), ('\u{b4b}', '\u{b4d}'), ('\u{b56}', '\u{b57}'), ('\u{b5c}', '\u{b5d}'),
|
||||
('\u{b5f}', '\u{b63}'), ('\u{b66}', '\u{b6f}'), ('\u{b71}', '\u{b71}'), ('\u{b82}',
|
||||
'\u{b83}'), ('\u{b85}', '\u{b8a}'), ('\u{b8e}', '\u{b90}'), ('\u{b92}', '\u{b95}'),
|
||||
('\u{b99}', '\u{b9a}'), ('\u{b9c}', '\u{b9c}'), ('\u{b9e}', '\u{b9f}'), ('\u{ba3}',
|
||||
'\u{ba4}'), ('\u{ba8}', '\u{baa}'), ('\u{bae}', '\u{bb9}'), ('\u{bbe}', '\u{bc2}'),
|
||||
('\u{bc6}', '\u{bc8}'), ('\u{bca}', '\u{bcd}'), ('\u{bd0}', '\u{bd0}'), ('\u{bd7}',
|
||||
'\u{bd7}'), ('\u{be6}', '\u{bef}'), ('\u{c00}', '\u{c03}'), ('\u{c05}', '\u{c0c}'),
|
||||
('\u{c0e}', '\u{c10}'), ('\u{c12}', '\u{c28}'), ('\u{c2a}', '\u{c39}'), ('\u{c3d}',
|
||||
'\u{c44}'), ('\u{c46}', '\u{c48}'), ('\u{c4a}', '\u{c4d}'), ('\u{c55}', '\u{c56}'),
|
||||
('\u{c58}', '\u{c5a}'), ('\u{c60}', '\u{c63}'), ('\u{c66}', '\u{c6f}'), ('\u{c80}',
|
||||
'\u{c83}'), ('\u{c85}', '\u{c8c}'), ('\u{c8e}', '\u{c90}'), ('\u{c92}', '\u{ca8}'),
|
||||
('\u{caa}', '\u{cb3}'), ('\u{cb5}', '\u{cb9}'), ('\u{cbc}', '\u{cc4}'), ('\u{cc6}',
|
||||
'\u{cc8}'), ('\u{cca}', '\u{ccd}'), ('\u{cd5}', '\u{cd6}'), ('\u{cde}', '\u{cde}'),
|
||||
('\u{ce0}', '\u{ce3}'), ('\u{ce6}', '\u{cef}'), ('\u{cf1}', '\u{cf2}'), ('\u{d01}',
|
||||
'\u{d03}'), ('\u{d05}', '\u{d0c}'), ('\u{d0e}', '\u{d10}'), ('\u{d12}', '\u{d3a}'),
|
||||
('\u{d3d}', '\u{d44}'), ('\u{d46}', '\u{d48}'), ('\u{d4a}', '\u{d4e}'), ('\u{d54}',
|
||||
'\u{d57}'), ('\u{d5f}', '\u{d63}'), ('\u{d66}', '\u{d6f}'), ('\u{d7a}', '\u{d7f}'),
|
||||
('\u{d82}', '\u{d83}'), ('\u{d85}', '\u{d96}'), ('\u{d9a}', '\u{db1}'), ('\u{db3}',
|
||||
'\u{dbb}'), ('\u{dbd}', '\u{dbd}'), ('\u{dc0}', '\u{dc6}'), ('\u{dca}', '\u{dca}'),
|
||||
('\u{dcf}', '\u{dd4}'), ('\u{dd6}', '\u{dd6}'), ('\u{dd8}', '\u{ddf}'), ('\u{de6}',
|
||||
'\u{def}'), ('\u{df2}', '\u{df3}'), ('\u{e01}', '\u{e3a}'), ('\u{e40}', '\u{e4e}'),
|
||||
('\u{e50}', '\u{e59}'), ('\u{e81}', '\u{e82}'), ('\u{e84}', '\u{e84}'), ('\u{e87}',
|
||||
'\u{e88}'), ('\u{e8a}', '\u{e8a}'), ('\u{e8d}', '\u{e8d}'), ('\u{e94}', '\u{e97}'),
|
||||
('\u{e99}', '\u{e9f}'), ('\u{ea1}', '\u{ea3}'), ('\u{ea5}', '\u{ea5}'), ('\u{ea7}',
|
||||
'\u{ea7}'), ('\u{eaa}', '\u{eab}'), ('\u{ead}', '\u{eb9}'), ('\u{ebb}', '\u{ebd}'),
|
||||
('\u{ec0}', '\u{ec4}'), ('\u{ec6}', '\u{ec6}'), ('\u{ec8}', '\u{ecd}'), ('\u{ed0}',
|
||||
'\u{ed9}'), ('\u{edc}', '\u{edf}'), ('\u{f00}', '\u{f00}'), ('\u{f18}', '\u{f19}'),
|
||||
('\u{f20}', '\u{f29}'), ('\u{f35}', '\u{f35}'), ('\u{f37}', '\u{f37}'), ('\u{f39}',
|
||||
'\u{f39}'), ('\u{f3e}', '\u{f47}'), ('\u{f49}', '\u{f6c}'), ('\u{f71}', '\u{f84}'),
|
||||
('\u{f86}', '\u{f97}'), ('\u{f99}', '\u{fbc}'), ('\u{fc6}', '\u{fc6}'), ('\u{1000}',
|
||||
'\u{1049}'), ('\u{1050}', '\u{109d}'), ('\u{10a0}', '\u{10c5}'), ('\u{10c7}', '\u{10c7}'),
|
||||
('\u{10cd}', '\u{10cd}'), ('\u{10d0}', '\u{10fa}'), ('\u{10fc}', '\u{1248}'), ('\u{124a}',
|
||||
'\u{124d}'), ('\u{1250}', '\u{1256}'), ('\u{1258}', '\u{1258}'), ('\u{125a}', '\u{125d}'),
|
||||
('\u{1260}', '\u{1288}'), ('\u{128a}', '\u{128d}'), ('\u{1290}', '\u{12b0}'), ('\u{12b2}',
|
||||
'\u{12b5}'), ('\u{12b8}', '\u{12be}'), ('\u{12c0}', '\u{12c0}'), ('\u{12c2}', '\u{12c5}'),
|
||||
('\u{12c8}', '\u{12d6}'), ('\u{12d8}', '\u{1310}'), ('\u{1312}', '\u{1315}'), ('\u{1318}',
|
||||
'\u{135a}'), ('\u{135d}', '\u{135f}'), ('\u{1369}', '\u{1371}'), ('\u{1380}', '\u{138f}'),
|
||||
('\u{13a0}', '\u{13f5}'), ('\u{13f8}', '\u{13fd}'), ('\u{1401}', '\u{166c}'), ('\u{166f}',
|
||||
'\u{167f}'), ('\u{1681}', '\u{169a}'), ('\u{16a0}', '\u{16ea}'), ('\u{16ee}', '\u{16f8}'),
|
||||
('\u{1700}', '\u{170c}'), ('\u{170e}', '\u{1714}'), ('\u{1720}', '\u{1734}'), ('\u{1740}',
|
||||
'\u{1753}'), ('\u{1760}', '\u{176c}'), ('\u{176e}', '\u{1770}'), ('\u{1772}', '\u{1773}'),
|
||||
('\u{1780}', '\u{17d3}'), ('\u{17d7}', '\u{17d7}'), ('\u{17dc}', '\u{17dd}'), ('\u{17e0}',
|
||||
'\u{17e9}'), ('\u{180b}', '\u{180d}'), ('\u{1810}', '\u{1819}'), ('\u{1820}', '\u{1877}'),
|
||||
('\u{1880}', '\u{18aa}'), ('\u{18b0}', '\u{18f5}'), ('\u{1900}', '\u{191e}'), ('\u{1920}',
|
||||
'\u{192b}'), ('\u{1930}', '\u{193b}'), ('\u{1946}', '\u{196d}'), ('\u{1970}', '\u{1974}'),
|
||||
('\u{1980}', '\u{19ab}'), ('\u{19b0}', '\u{19c9}'), ('\u{19d0}', '\u{19da}'), ('\u{1a00}',
|
||||
'\u{1a1b}'), ('\u{1a20}', '\u{1a5e}'), ('\u{1a60}', '\u{1a7c}'), ('\u{1a7f}', '\u{1a89}'),
|
||||
('\u{1a90}', '\u{1a99}'), ('\u{1aa7}', '\u{1aa7}'), ('\u{1ab0}', '\u{1abd}'), ('\u{1b00}',
|
||||
'\u{1b4b}'), ('\u{1b50}', '\u{1b59}'), ('\u{1b6b}', '\u{1b73}'), ('\u{1b80}', '\u{1bf3}'),
|
||||
('\u{1c00}', '\u{1c37}'), ('\u{1c40}', '\u{1c49}'), ('\u{1c4d}', '\u{1c7d}'), ('\u{1c80}',
|
||||
'\u{1c88}'), ('\u{1cd0}', '\u{1cd2}'), ('\u{1cd4}', '\u{1cf6}'), ('\u{1cf8}', '\u{1cf9}'),
|
||||
('\u{1d00}', '\u{1df5}'), ('\u{1dfb}', '\u{1f15}'), ('\u{1f18}', '\u{1f1d}'), ('\u{1f20}',
|
||||
'\u{1f45}'), ('\u{1f48}', '\u{1f4d}'), ('\u{1f50}', '\u{1f57}'), ('\u{1f59}', '\u{1f59}'),
|
||||
('\u{1f5b}', '\u{1f5b}'), ('\u{1f5d}', '\u{1f5d}'), ('\u{1f5f}', '\u{1f7d}'), ('\u{1f80}',
|
||||
'\u{1fb4}'), ('\u{1fb6}', '\u{1fbc}'), ('\u{1fbe}', '\u{1fbe}'), ('\u{1fc2}', '\u{1fc4}'),
|
||||
('\u{1fc6}', '\u{1fcc}'), ('\u{1fd0}', '\u{1fd3}'), ('\u{1fd6}', '\u{1fdb}'), ('\u{1fe0}',
|
||||
'\u{1fec}'), ('\u{1ff2}', '\u{1ff4}'), ('\u{1ff6}', '\u{1ffc}'), ('\u{203f}', '\u{2040}'),
|
||||
('\u{2054}', '\u{2054}'), ('\u{2071}', '\u{2071}'), ('\u{207f}', '\u{207f}'), ('\u{2090}',
|
||||
'\u{209c}'), ('\u{20d0}', '\u{20dc}'), ('\u{20e1}', '\u{20e1}'), ('\u{20e5}', '\u{20f0}'),
|
||||
('\u{2102}', '\u{2102}'), ('\u{2107}', '\u{2107}'), ('\u{210a}', '\u{2113}'), ('\u{2115}',
|
||||
'\u{2115}'), ('\u{2118}', '\u{211d}'), ('\u{2124}', '\u{2124}'), ('\u{2126}', '\u{2126}'),
|
||||
('\u{2128}', '\u{2128}'), ('\u{212a}', '\u{2139}'), ('\u{213c}', '\u{213f}'), ('\u{2145}',
|
||||
'\u{2149}'), ('\u{214e}', '\u{214e}'), ('\u{2160}', '\u{2188}'), ('\u{2c00}', '\u{2c2e}'),
|
||||
('\u{2c30}', '\u{2c5e}'), ('\u{2c60}', '\u{2ce4}'), ('\u{2ceb}', '\u{2cee}'), ('\u{2cf2}',
|
||||
'\u{2cf3}'), ('\u{2d00}', '\u{2d25}'), ('\u{2d27}', '\u{2d27}'), ('\u{2d2d}', '\u{2d2d}'),
|
||||
('\u{2d30}', '\u{2d67}'), ('\u{2d6f}', '\u{2d6f}'), ('\u{2d80}', '\u{2d96}'), ('\u{2da0}',
|
||||
'\u{2da6}'), ('\u{2da8}', '\u{2dae}'), ('\u{2db0}', '\u{2db6}'), ('\u{2db8}', '\u{2dbe}'),
|
||||
('\u{2dc0}', '\u{2dc6}'), ('\u{2dc8}', '\u{2dce}'), ('\u{2dd0}', '\u{2dd6}'), ('\u{2dd8}',
|
||||
'\u{2dde}'), ('\u{3005}', '\u{3007}'), ('\u{3021}', '\u{3029}'), ('\u{3031}', '\u{3035}'),
|
||||
('\u{3038}', '\u{303c}'), ('\u{3041}', '\u{3096}'), ('\u{309d}', '\u{309f}'), ('\u{30a1}',
|
||||
'\u{30fa}'), ('\u{30fc}', '\u{30ff}'), ('\u{3105}', '\u{312f}'), ('\u{3131}', '\u{318e}'),
|
||||
('\u{31a0}', '\u{31ba}'), ('\u{31f0}', '\u{31ff}'), ('\u{3400}', '\u{4db5}'), ('\u{4e00}',
|
||||
'\u{9fef}'), ('\u{a000}', '\u{a48c}'), ('\u{a4d0}', '\u{a4fd}'), ('\u{a500}', '\u{a60c}'),
|
||||
('\u{a610}', '\u{a61f}'), ('\u{a62a}', '\u{a62b}'), ('\u{a640}', '\u{a66e}'), ('\u{a67f}',
|
||||
'\u{a69d}'), ('\u{a6a0}', '\u{a6ef}'), ('\u{a717}', '\u{a71f}'), ('\u{a722}', '\u{a788}'),
|
||||
('\u{a78b}', '\u{a7bf}'), ('\u{a7c2}', '\u{a7c6}'), ('\u{a7f7}', '\u{a801}'), ('\u{a803}',
|
||||
'\u{a805}'), ('\u{a807}', '\u{a80a}'), ('\u{a80c}', '\u{a822}'), ('\u{a840}', '\u{a873}'),
|
||||
('\u{a882}', '\u{a8b3}'), ('\u{a8f2}', '\u{a8f7}'), ('\u{a8fb}', '\u{a8fb}'), ('\u{a8fd}',
|
||||
'\u{a8fe}'), ('\u{a90a}', '\u{a925}'), ('\u{a930}', '\u{a946}'), ('\u{a960}', '\u{a97c}'),
|
||||
('\u{a984}', '\u{a9b2}'), ('\u{a9cf}', '\u{a9cf}'), ('\u{a9e0}', '\u{a9e4}'), ('\u{a9e6}',
|
||||
'\u{a9ef}'), ('\u{a9fa}', '\u{a9fe}'), ('\u{aa00}', '\u{aa28}'), ('\u{aa40}', '\u{aa42}'),
|
||||
('\u{aa44}', '\u{aa4b}'), ('\u{aa60}', '\u{aa76}'), ('\u{aa7a}', '\u{aa7a}'), ('\u{aa7e}',
|
||||
'\u{aaaf}'), ('\u{aab1}', '\u{aab1}'), ('\u{aab5}', '\u{aab6}'), ('\u{aab9}', '\u{aabd}'),
|
||||
('\u{aac0}', '\u{aac0}'), ('\u{aac2}', '\u{aac2}'), ('\u{aadb}', '\u{aadd}'), ('\u{aae0}',
|
||||
'\u{aaea}'), ('\u{aaf2}', '\u{aaf4}'), ('\u{ab01}', '\u{ab06}'), ('\u{ab09}', '\u{ab0e}'),
|
||||
('\u{ab11}', '\u{ab16}'), ('\u{ab20}', '\u{ab26}'), ('\u{ab28}', '\u{ab2e}'), ('\u{ab30}',
|
||||
'\u{ab5a}'), ('\u{ab5c}', '\u{ab67}'), ('\u{ab70}', '\u{abe2}'), ('\u{ac00}', '\u{d7a3}'),
|
||||
('\u{d7b0}', '\u{d7c6}'), ('\u{d7cb}', '\u{d7fb}'), ('\u{f900}', '\u{fa6d}'), ('\u{fa70}',
|
||||
'\u{fad9}'), ('\u{fb00}', '\u{fb06}'), ('\u{fb13}', '\u{fb17}'), ('\u{fb1d}', '\u{fb1d}'),
|
||||
('\u{fb1f}', '\u{fb28}'), ('\u{fb2a}', '\u{fb36}'), ('\u{fb38}', '\u{fb3c}'), ('\u{fb3e}',
|
||||
'\u{fb3e}'), ('\u{fb40}', '\u{fb41}'), ('\u{fb43}', '\u{fb44}'), ('\u{fb46}', '\u{fbb1}'),
|
||||
('\u{fbd3}', '\u{fc5d}'), ('\u{fc64}', '\u{fd3d}'), ('\u{fd50}', '\u{fd8f}'), ('\u{fd92}',
|
||||
'\u{fdc7}'), ('\u{fdf0}', '\u{fdf9}'), ('\u{fe71}', '\u{fe71}'), ('\u{fe73}', '\u{fe73}'),
|
||||
('\u{fe77}', '\u{fe77}'), ('\u{fe79}', '\u{fe79}'), ('\u{fe7b}', '\u{fe7b}'), ('\u{fe7d}',
|
||||
'\u{fe7d}'), ('\u{fe7f}', '\u{fefc}'), ('\u{ff21}', '\u{ff3a}'), ('\u{ff41}', '\u{ff5a}'),
|
||||
('\u{ff66}', '\u{ff9d}'), ('\u{ffa0}', '\u{ffbe}'), ('\u{ffc2}', '\u{ffc7}'), ('\u{ffca}',
|
||||
'\u{ffcf}'), ('\u{ffd2}', '\u{ffd7}'), ('\u{ffda}', '\u{ffdc}'), ('\u{10000}', '\u{1000b}'),
|
||||
('\u{2c30}', '\u{2c5e}'), ('\u{2c60}', '\u{2ce4}'), ('\u{2ceb}', '\u{2cf3}'), ('\u{2d00}',
|
||||
'\u{2d25}'), ('\u{2d27}', '\u{2d27}'), ('\u{2d2d}', '\u{2d2d}'), ('\u{2d30}', '\u{2d67}'),
|
||||
('\u{2d6f}', '\u{2d6f}'), ('\u{2d7f}', '\u{2d96}'), ('\u{2da0}', '\u{2da6}'), ('\u{2da8}',
|
||||
'\u{2dae}'), ('\u{2db0}', '\u{2db6}'), ('\u{2db8}', '\u{2dbe}'), ('\u{2dc0}', '\u{2dc6}'),
|
||||
('\u{2dc8}', '\u{2dce}'), ('\u{2dd0}', '\u{2dd6}'), ('\u{2dd8}', '\u{2dde}'), ('\u{2de0}',
|
||||
'\u{2dff}'), ('\u{3005}', '\u{3007}'), ('\u{3021}', '\u{302f}'), ('\u{3031}', '\u{3035}'),
|
||||
('\u{3038}', '\u{303c}'), ('\u{3041}', '\u{3096}'), ('\u{3099}', '\u{309a}'), ('\u{309d}',
|
||||
'\u{309f}'), ('\u{30a1}', '\u{30fa}'), ('\u{30fc}', '\u{30ff}'), ('\u{3105}', '\u{312d}'),
|
||||
('\u{3131}', '\u{318e}'), ('\u{31a0}', '\u{31ba}'), ('\u{31f0}', '\u{31ff}'), ('\u{3400}',
|
||||
'\u{4db5}'), ('\u{4e00}', '\u{9fd5}'), ('\u{a000}', '\u{a48c}'), ('\u{a4d0}', '\u{a4fd}'),
|
||||
('\u{a500}', '\u{a60c}'), ('\u{a610}', '\u{a62b}'), ('\u{a640}', '\u{a66f}'), ('\u{a674}',
|
||||
'\u{a67d}'), ('\u{a67f}', '\u{a6f1}'), ('\u{a717}', '\u{a71f}'), ('\u{a722}', '\u{a788}'),
|
||||
('\u{a78b}', '\u{a7ae}'), ('\u{a7b0}', '\u{a7b7}'), ('\u{a7f7}', '\u{a827}'), ('\u{a840}',
|
||||
'\u{a873}'), ('\u{a880}', '\u{a8c5}'), ('\u{a8d0}', '\u{a8d9}'), ('\u{a8e0}', '\u{a8f7}'),
|
||||
('\u{a8fb}', '\u{a8fb}'), ('\u{a8fd}', '\u{a8fd}'), ('\u{a900}', '\u{a92d}'), ('\u{a930}',
|
||||
'\u{a953}'), ('\u{a960}', '\u{a97c}'), ('\u{a980}', '\u{a9c0}'), ('\u{a9cf}', '\u{a9d9}'),
|
||||
('\u{a9e0}', '\u{a9fe}'), ('\u{aa00}', '\u{aa36}'), ('\u{aa40}', '\u{aa4d}'), ('\u{aa50}',
|
||||
'\u{aa59}'), ('\u{aa60}', '\u{aa76}'), ('\u{aa7a}', '\u{aac2}'), ('\u{aadb}', '\u{aadd}'),
|
||||
('\u{aae0}', '\u{aaef}'), ('\u{aaf2}', '\u{aaf6}'), ('\u{ab01}', '\u{ab06}'), ('\u{ab09}',
|
||||
'\u{ab0e}'), ('\u{ab11}', '\u{ab16}'), ('\u{ab20}', '\u{ab26}'), ('\u{ab28}', '\u{ab2e}'),
|
||||
('\u{ab30}', '\u{ab5a}'), ('\u{ab5c}', '\u{ab65}'), ('\u{ab70}', '\u{abea}'), ('\u{abec}',
|
||||
'\u{abed}'), ('\u{abf0}', '\u{abf9}'), ('\u{ac00}', '\u{d7a3}'), ('\u{d7b0}', '\u{d7c6}'),
|
||||
('\u{d7cb}', '\u{d7fb}'), ('\u{f900}', '\u{fa6d}'), ('\u{fa70}', '\u{fad9}'), ('\u{fb00}',
|
||||
'\u{fb06}'), ('\u{fb13}', '\u{fb17}'), ('\u{fb1d}', '\u{fb28}'), ('\u{fb2a}', '\u{fb36}'),
|
||||
('\u{fb38}', '\u{fb3c}'), ('\u{fb3e}', '\u{fb3e}'), ('\u{fb40}', '\u{fb41}'), ('\u{fb43}',
|
||||
'\u{fb44}'), ('\u{fb46}', '\u{fbb1}'), ('\u{fbd3}', '\u{fc5d}'), ('\u{fc64}', '\u{fd3d}'),
|
||||
('\u{fd50}', '\u{fd8f}'), ('\u{fd92}', '\u{fdc7}'), ('\u{fdf0}', '\u{fdf9}'), ('\u{fe00}',
|
||||
'\u{fe0f}'), ('\u{fe20}', '\u{fe2f}'), ('\u{fe33}', '\u{fe34}'), ('\u{fe4d}', '\u{fe4f}'),
|
||||
('\u{fe71}', '\u{fe71}'), ('\u{fe73}', '\u{fe73}'), ('\u{fe77}', '\u{fe77}'), ('\u{fe79}',
|
||||
'\u{fe79}'), ('\u{fe7b}', '\u{fe7b}'), ('\u{fe7d}', '\u{fe7d}'), ('\u{fe7f}', '\u{fefc}'),
|
||||
('\u{ff10}', '\u{ff19}'), ('\u{ff21}', '\u{ff3a}'), ('\u{ff3f}', '\u{ff3f}'), ('\u{ff41}',
|
||||
'\u{ff5a}'), ('\u{ff66}', '\u{ffbe}'), ('\u{ffc2}', '\u{ffc7}'), ('\u{ffca}', '\u{ffcf}'),
|
||||
('\u{ffd2}', '\u{ffd7}'), ('\u{ffda}', '\u{ffdc}'), ('\u{10000}', '\u{1000b}'),
|
||||
('\u{1000d}', '\u{10026}'), ('\u{10028}', '\u{1003a}'), ('\u{1003c}', '\u{1003d}'),
|
||||
('\u{1003f}', '\u{1004d}'), ('\u{10050}', '\u{1005d}'), ('\u{10080}', '\u{100fa}'),
|
||||
('\u{10140}', '\u{10174}'), ('\u{10280}', '\u{1029c}'), ('\u{102a0}', '\u{102d0}'),
|
||||
('\u{10300}', '\u{1031f}'), ('\u{1032d}', '\u{1034a}'), ('\u{10350}', '\u{10375}'),
|
||||
('\u{10380}', '\u{1039d}'), ('\u{103a0}', '\u{103c3}'), ('\u{103c8}', '\u{103cf}'),
|
||||
('\u{103d1}', '\u{103d5}'), ('\u{10400}', '\u{1049d}'), ('\u{104b0}', '\u{104d3}'),
|
||||
('\u{10140}', '\u{10174}'), ('\u{101fd}', '\u{101fd}'), ('\u{10280}', '\u{1029c}'),
|
||||
('\u{102a0}', '\u{102d0}'), ('\u{102e0}', '\u{102e0}'), ('\u{10300}', '\u{1031f}'),
|
||||
('\u{10330}', '\u{1034a}'), ('\u{10350}', '\u{1037a}'), ('\u{10380}', '\u{1039d}'),
|
||||
('\u{103a0}', '\u{103c3}'), ('\u{103c8}', '\u{103cf}'), ('\u{103d1}', '\u{103d5}'),
|
||||
('\u{10400}', '\u{1049d}'), ('\u{104a0}', '\u{104a9}'), ('\u{104b0}', '\u{104d3}'),
|
||||
('\u{104d8}', '\u{104fb}'), ('\u{10500}', '\u{10527}'), ('\u{10530}', '\u{10563}'),
|
||||
('\u{10600}', '\u{10736}'), ('\u{10740}', '\u{10755}'), ('\u{10760}', '\u{10767}'),
|
||||
('\u{10800}', '\u{10805}'), ('\u{10808}', '\u{10808}'), ('\u{1080a}', '\u{10835}'),
|
||||
('\u{10837}', '\u{10838}'), ('\u{1083c}', '\u{1083c}'), ('\u{1083f}', '\u{10855}'),
|
||||
('\u{10860}', '\u{10876}'), ('\u{10880}', '\u{1089e}'), ('\u{108e0}', '\u{108f2}'),
|
||||
('\u{108f4}', '\u{108f5}'), ('\u{10900}', '\u{10915}'), ('\u{10920}', '\u{10939}'),
|
||||
('\u{10980}', '\u{109b7}'), ('\u{109be}', '\u{109bf}'), ('\u{10a00}', '\u{10a00}'),
|
||||
('\u{10a10}', '\u{10a13}'), ('\u{10a15}', '\u{10a17}'), ('\u{10a19}', '\u{10a35}'),
|
||||
('\u{10980}', '\u{109b7}'), ('\u{109be}', '\u{109bf}'), ('\u{10a00}', '\u{10a03}'),
|
||||
('\u{10a05}', '\u{10a06}'), ('\u{10a0c}', '\u{10a13}'), ('\u{10a15}', '\u{10a17}'),
|
||||
('\u{10a19}', '\u{10a33}'), ('\u{10a38}', '\u{10a3a}'), ('\u{10a3f}', '\u{10a3f}'),
|
||||
('\u{10a60}', '\u{10a7c}'), ('\u{10a80}', '\u{10a9c}'), ('\u{10ac0}', '\u{10ac7}'),
|
||||
('\u{10ac9}', '\u{10ae4}'), ('\u{10b00}', '\u{10b35}'), ('\u{10b40}', '\u{10b55}'),
|
||||
('\u{10ac9}', '\u{10ae6}'), ('\u{10b00}', '\u{10b35}'), ('\u{10b40}', '\u{10b55}'),
|
||||
('\u{10b60}', '\u{10b72}'), ('\u{10b80}', '\u{10b91}'), ('\u{10c00}', '\u{10c48}'),
|
||||
('\u{10c80}', '\u{10cb2}'), ('\u{10cc0}', '\u{10cf2}'), ('\u{10d00}', '\u{10d23}'),
|
||||
('\u{10f00}', '\u{10f1c}'), ('\u{10f27}', '\u{10f27}'), ('\u{10f30}', '\u{10f45}'),
|
||||
('\u{10fe0}', '\u{10ff6}'), ('\u{11003}', '\u{11037}'), ('\u{11083}', '\u{110af}'),
|
||||
('\u{110d0}', '\u{110e8}'), ('\u{11103}', '\u{11126}'), ('\u{11144}', '\u{11144}'),
|
||||
('\u{11150}', '\u{11172}'), ('\u{11176}', '\u{11176}'), ('\u{11183}', '\u{111b2}'),
|
||||
('\u{111c1}', '\u{111c4}'), ('\u{111da}', '\u{111da}'), ('\u{111dc}', '\u{111dc}'),
|
||||
('\u{11200}', '\u{11211}'), ('\u{11213}', '\u{1122b}'), ('\u{11280}', '\u{11286}'),
|
||||
('\u{11288}', '\u{11288}'), ('\u{1128a}', '\u{1128d}'), ('\u{1128f}', '\u{1129d}'),
|
||||
('\u{1129f}', '\u{112a8}'), ('\u{112b0}', '\u{112de}'), ('\u{11305}', '\u{1130c}'),
|
||||
('\u{10c80}', '\u{10cb2}'), ('\u{10cc0}', '\u{10cf2}'), ('\u{11000}', '\u{11046}'),
|
||||
('\u{11066}', '\u{1106f}'), ('\u{1107f}', '\u{110ba}'), ('\u{110d0}', '\u{110e8}'),
|
||||
('\u{110f0}', '\u{110f9}'), ('\u{11100}', '\u{11134}'), ('\u{11136}', '\u{1113f}'),
|
||||
('\u{11150}', '\u{11173}'), ('\u{11176}', '\u{11176}'), ('\u{11180}', '\u{111c4}'),
|
||||
('\u{111ca}', '\u{111cc}'), ('\u{111d0}', '\u{111da}'), ('\u{111dc}', '\u{111dc}'),
|
||||
('\u{11200}', '\u{11211}'), ('\u{11213}', '\u{11237}'), ('\u{1123e}', '\u{1123e}'),
|
||||
('\u{11280}', '\u{11286}'), ('\u{11288}', '\u{11288}'), ('\u{1128a}', '\u{1128d}'),
|
||||
('\u{1128f}', '\u{1129d}'), ('\u{1129f}', '\u{112a8}'), ('\u{112b0}', '\u{112ea}'),
|
||||
('\u{112f0}', '\u{112f9}'), ('\u{11300}', '\u{11303}'), ('\u{11305}', '\u{1130c}'),
|
||||
('\u{1130f}', '\u{11310}'), ('\u{11313}', '\u{11328}'), ('\u{1132a}', '\u{11330}'),
|
||||
('\u{11332}', '\u{11333}'), ('\u{11335}', '\u{11339}'), ('\u{1133d}', '\u{1133d}'),
|
||||
('\u{11350}', '\u{11350}'), ('\u{1135d}', '\u{11361}'), ('\u{11400}', '\u{11434}'),
|
||||
('\u{11447}', '\u{1144a}'), ('\u{1145f}', '\u{1145f}'), ('\u{11480}', '\u{114af}'),
|
||||
('\u{11332}', '\u{11333}'), ('\u{11335}', '\u{11339}'), ('\u{1133c}', '\u{11344}'),
|
||||
('\u{11347}', '\u{11348}'), ('\u{1134b}', '\u{1134d}'), ('\u{11350}', '\u{11350}'),
|
||||
('\u{11357}', '\u{11357}'), ('\u{1135d}', '\u{11363}'), ('\u{11366}', '\u{1136c}'),
|
||||
('\u{11370}', '\u{11374}'), ('\u{11400}', '\u{1144a}'), ('\u{11450}', '\u{11459}'),
|
||||
('\u{11480}', '\u{114c5}'), ('\u{114c7}', '\u{114c7}'), ('\u{114d0}', '\u{114d9}'),
|
||||
('\u{11580}', '\u{115b5}'), ('\u{115b8}', '\u{115c0}'), ('\u{115d8}', '\u{115dd}'),
|
||||
('\u{11600}', '\u{11640}'), ('\u{11644}', '\u{11644}'), ('\u{11650}', '\u{11659}'),
|
||||
('\u{11680}', '\u{116b7}'), ('\u{116c0}', '\u{116c9}'), ('\u{11700}', '\u{11719}'),
|
||||
('\u{1171d}', '\u{1172b}'), ('\u{11730}', '\u{11739}'), ('\u{118a0}', '\u{118e9}'),
|
||||
('\u{118ff}', '\u{118ff}'), ('\u{11ac0}', '\u{11af8}'), ('\u{11c00}', '\u{11c08}'),
|
||||
('\u{11c0a}', '\u{11c36}'), ('\u{11c38}', '\u{11c40}'), ('\u{11c50}', '\u{11c59}'),
|
||||
('\u{11c72}', '\u{11c8f}'), ('\u{11c92}', '\u{11ca7}'), ('\u{11ca9}', '\u{11cb6}'),
|
||||
('\u{12000}', '\u{12399}'), ('\u{12400}', '\u{1246e}'), ('\u{12480}', '\u{12543}'),
|
||||
('\u{13000}', '\u{1342e}'), ('\u{14400}', '\u{14646}'), ('\u{16800}', '\u{16a38}'),
|
||||
('\u{16a40}', '\u{16a5e}'), ('\u{16a60}', '\u{16a69}'), ('\u{16ad0}', '\u{16aed}'),
|
||||
('\u{16af0}', '\u{16af4}'), ('\u{16b00}', '\u{16b36}'), ('\u{16b40}', '\u{16b43}'),
|
||||
('\u{16b50}', '\u{16b59}'), ('\u{16b63}', '\u{16b77}'), ('\u{16b7d}', '\u{16b8f}'),
|
||||
('\u{16f00}', '\u{16f44}'), ('\u{16f50}', '\u{16f7e}'), ('\u{16f8f}', '\u{16f9f}'),
|
||||
('\u{16fe0}', '\u{16fe0}'), ('\u{17000}', '\u{187ec}'), ('\u{18800}', '\u{18af2}'),
|
||||
('\u{1b000}', '\u{1b001}'), ('\u{1bc00}', '\u{1bc6a}'), ('\u{1bc70}', '\u{1bc7c}'),
|
||||
('\u{1bc80}', '\u{1bc88}'), ('\u{1bc90}', '\u{1bc99}'), ('\u{1bc9d}', '\u{1bc9e}'),
|
||||
('\u{1d165}', '\u{1d169}'), ('\u{1d16d}', '\u{1d172}'), ('\u{1d17b}', '\u{1d182}'),
|
||||
('\u{1d185}', '\u{1d18b}'), ('\u{1d1aa}', '\u{1d1ad}'), ('\u{1d242}', '\u{1d244}'),
|
||||
('\u{1d400}', '\u{1d454}'), ('\u{1d456}', '\u{1d49c}'), ('\u{1d49e}', '\u{1d49f}'),
|
||||
('\u{1d4a2}', '\u{1d4a2}'), ('\u{1d4a5}', '\u{1d4a6}'), ('\u{1d4a9}', '\u{1d4ac}'),
|
||||
('\u{1d4ae}', '\u{1d4b9}'), ('\u{1d4bb}', '\u{1d4bb}'), ('\u{1d4bd}', '\u{1d4c3}'),
|
||||
('\u{1d4c5}', '\u{1d505}'), ('\u{1d507}', '\u{1d50a}'), ('\u{1d50d}', '\u{1d514}'),
|
||||
('\u{1d516}', '\u{1d51c}'), ('\u{1d51e}', '\u{1d539}'), ('\u{1d53b}', '\u{1d53e}'),
|
||||
('\u{1d540}', '\u{1d544}'), ('\u{1d546}', '\u{1d546}'), ('\u{1d54a}', '\u{1d550}'),
|
||||
('\u{1d552}', '\u{1d6a5}'), ('\u{1d6a8}', '\u{1d6c0}'), ('\u{1d6c2}', '\u{1d6da}'),
|
||||
('\u{1d6dc}', '\u{1d6fa}'), ('\u{1d6fc}', '\u{1d714}'), ('\u{1d716}', '\u{1d734}'),
|
||||
('\u{1d736}', '\u{1d74e}'), ('\u{1d750}', '\u{1d76e}'), ('\u{1d770}', '\u{1d788}'),
|
||||
('\u{1d78a}', '\u{1d7a8}'), ('\u{1d7aa}', '\u{1d7c2}'), ('\u{1d7c4}', '\u{1d7cb}'),
|
||||
('\u{1d7ce}', '\u{1d7ff}'), ('\u{1da00}', '\u{1da36}'), ('\u{1da3b}', '\u{1da6c}'),
|
||||
('\u{1da75}', '\u{1da75}'), ('\u{1da84}', '\u{1da84}'), ('\u{1da9b}', '\u{1da9f}'),
|
||||
('\u{1daa1}', '\u{1daaf}'), ('\u{1e000}', '\u{1e006}'), ('\u{1e008}', '\u{1e018}'),
|
||||
('\u{1e01b}', '\u{1e021}'), ('\u{1e023}', '\u{1e024}'), ('\u{1e026}', '\u{1e02a}'),
|
||||
('\u{1e800}', '\u{1e8c4}'), ('\u{1e8d0}', '\u{1e8d6}'), ('\u{1e900}', '\u{1e94a}'),
|
||||
('\u{1e950}', '\u{1e959}'), ('\u{1ee00}', '\u{1ee03}'), ('\u{1ee05}', '\u{1ee1f}'),
|
||||
('\u{1ee21}', '\u{1ee22}'), ('\u{1ee24}', '\u{1ee24}'), ('\u{1ee27}', '\u{1ee27}'),
|
||||
('\u{1ee29}', '\u{1ee32}'), ('\u{1ee34}', '\u{1ee37}'), ('\u{1ee39}', '\u{1ee39}'),
|
||||
('\u{1ee3b}', '\u{1ee3b}'), ('\u{1ee42}', '\u{1ee42}'), ('\u{1ee47}', '\u{1ee47}'),
|
||||
('\u{1ee49}', '\u{1ee49}'), ('\u{1ee4b}', '\u{1ee4b}'), ('\u{1ee4d}', '\u{1ee4f}'),
|
||||
('\u{1ee51}', '\u{1ee52}'), ('\u{1ee54}', '\u{1ee54}'), ('\u{1ee57}', '\u{1ee57}'),
|
||||
('\u{1ee59}', '\u{1ee59}'), ('\u{1ee5b}', '\u{1ee5b}'), ('\u{1ee5d}', '\u{1ee5d}'),
|
||||
('\u{1ee5f}', '\u{1ee5f}'), ('\u{1ee61}', '\u{1ee62}'), ('\u{1ee64}', '\u{1ee64}'),
|
||||
('\u{1ee67}', '\u{1ee6a}'), ('\u{1ee6c}', '\u{1ee72}'), ('\u{1ee74}', '\u{1ee77}'),
|
||||
('\u{1ee79}', '\u{1ee7c}'), ('\u{1ee7e}', '\u{1ee7e}'), ('\u{1ee80}', '\u{1ee89}'),
|
||||
('\u{1ee8b}', '\u{1ee9b}'), ('\u{1eea1}', '\u{1eea3}'), ('\u{1eea5}', '\u{1eea9}'),
|
||||
('\u{1eeab}', '\u{1eebb}'), ('\u{20000}', '\u{2a6d6}'), ('\u{2a700}', '\u{2b734}'),
|
||||
('\u{2b740}', '\u{2b81d}'), ('\u{2b820}', '\u{2cea1}'), ('\u{2f800}', '\u{2fa1d}'),
|
||||
('\u{e0100}', '\u{e01ef}')
|
||||
];
|
||||
|
||||
pub fn XID_Continue(c: char) -> bool {
|
||||
super::bsearch_range_table(c, XID_Continue_table)
|
||||
}
|
||||
|
||||
pub const XID_Start_table: &'static [(char, char)] = &[
|
||||
('\u{41}', '\u{5a}'), ('\u{61}', '\u{7a}'), ('\u{aa}', '\u{aa}'), ('\u{b5}', '\u{b5}'),
|
||||
('\u{ba}', '\u{ba}'), ('\u{c0}', '\u{d6}'), ('\u{d8}', '\u{f6}'), ('\u{f8}', '\u{2c1}'),
|
||||
('\u{2c6}', '\u{2d1}'), ('\u{2e0}', '\u{2e4}'), ('\u{2ec}', '\u{2ec}'), ('\u{2ee}',
|
||||
'\u{2ee}'), ('\u{370}', '\u{374}'), ('\u{376}', '\u{377}'), ('\u{37b}', '\u{37d}'),
|
||||
('\u{37f}', '\u{37f}'), ('\u{386}', '\u{386}'), ('\u{388}', '\u{38a}'), ('\u{38c}',
|
||||
'\u{38c}'), ('\u{38e}', '\u{3a1}'), ('\u{3a3}', '\u{3f5}'), ('\u{3f7}', '\u{481}'),
|
||||
('\u{48a}', '\u{52f}'), ('\u{531}', '\u{556}'), ('\u{559}', '\u{559}'), ('\u{561}',
|
||||
'\u{587}'), ('\u{5d0}', '\u{5ea}'), ('\u{5f0}', '\u{5f2}'), ('\u{620}', '\u{64a}'),
|
||||
('\u{66e}', '\u{66f}'), ('\u{671}', '\u{6d3}'), ('\u{6d5}', '\u{6d5}'), ('\u{6e5}',
|
||||
'\u{6e6}'), ('\u{6ee}', '\u{6ef}'), ('\u{6fa}', '\u{6fc}'), ('\u{6ff}', '\u{6ff}'),
|
||||
('\u{710}', '\u{710}'), ('\u{712}', '\u{72f}'), ('\u{74d}', '\u{7a5}'), ('\u{7b1}',
|
||||
'\u{7b1}'), ('\u{7ca}', '\u{7ea}'), ('\u{7f4}', '\u{7f5}'), ('\u{7fa}', '\u{7fa}'),
|
||||
('\u{800}', '\u{815}'), ('\u{81a}', '\u{81a}'), ('\u{824}', '\u{824}'), ('\u{828}',
|
||||
'\u{828}'), ('\u{840}', '\u{858}'), ('\u{8a0}', '\u{8b4}'), ('\u{8b6}', '\u{8bd}'),
|
||||
('\u{904}', '\u{939}'), ('\u{93d}', '\u{93d}'), ('\u{950}', '\u{950}'), ('\u{958}',
|
||||
'\u{961}'), ('\u{971}', '\u{980}'), ('\u{985}', '\u{98c}'), ('\u{98f}', '\u{990}'),
|
||||
('\u{993}', '\u{9a8}'), ('\u{9aa}', '\u{9b0}'), ('\u{9b2}', '\u{9b2}'), ('\u{9b6}',
|
||||
'\u{9b9}'), ('\u{9bd}', '\u{9bd}'), ('\u{9ce}', '\u{9ce}'), ('\u{9dc}', '\u{9dd}'),
|
||||
('\u{9df}', '\u{9e1}'), ('\u{9f0}', '\u{9f1}'), ('\u{a05}', '\u{a0a}'), ('\u{a0f}',
|
||||
'\u{a10}'), ('\u{a13}', '\u{a28}'), ('\u{a2a}', '\u{a30}'), ('\u{a32}', '\u{a33}'),
|
||||
('\u{a35}', '\u{a36}'), ('\u{a38}', '\u{a39}'), ('\u{a59}', '\u{a5c}'), ('\u{a5e}',
|
||||
'\u{a5e}'), ('\u{a72}', '\u{a74}'), ('\u{a85}', '\u{a8d}'), ('\u{a8f}', '\u{a91}'),
|
||||
('\u{a93}', '\u{aa8}'), ('\u{aaa}', '\u{ab0}'), ('\u{ab2}', '\u{ab3}'), ('\u{ab5}',
|
||||
'\u{ab9}'), ('\u{abd}', '\u{abd}'), ('\u{ad0}', '\u{ad0}'), ('\u{ae0}', '\u{ae1}'),
|
||||
('\u{af9}', '\u{af9}'), ('\u{b05}', '\u{b0c}'), ('\u{b0f}', '\u{b10}'), ('\u{b13}',
|
||||
'\u{b28}'), ('\u{b2a}', '\u{b30}'), ('\u{b32}', '\u{b33}'), ('\u{b35}', '\u{b39}'),
|
||||
('\u{b3d}', '\u{b3d}'), ('\u{b5c}', '\u{b5d}'), ('\u{b5f}', '\u{b61}'), ('\u{b71}',
|
||||
'\u{b71}'), ('\u{b83}', '\u{b83}'), ('\u{b85}', '\u{b8a}'), ('\u{b8e}', '\u{b90}'),
|
||||
('\u{b92}', '\u{b95}'), ('\u{b99}', '\u{b9a}'), ('\u{b9c}', '\u{b9c}'), ('\u{b9e}',
|
||||
'\u{b9f}'), ('\u{ba3}', '\u{ba4}'), ('\u{ba8}', '\u{baa}'), ('\u{bae}', '\u{bb9}'),
|
||||
('\u{bd0}', '\u{bd0}'), ('\u{c05}', '\u{c0c}'), ('\u{c0e}', '\u{c10}'), ('\u{c12}',
|
||||
'\u{c28}'), ('\u{c2a}', '\u{c39}'), ('\u{c3d}', '\u{c3d}'), ('\u{c58}', '\u{c5a}'),
|
||||
('\u{c60}', '\u{c61}'), ('\u{c80}', '\u{c80}'), ('\u{c85}', '\u{c8c}'), ('\u{c8e}',
|
||||
'\u{c90}'), ('\u{c92}', '\u{ca8}'), ('\u{caa}', '\u{cb3}'), ('\u{cb5}', '\u{cb9}'),
|
||||
('\u{cbd}', '\u{cbd}'), ('\u{cde}', '\u{cde}'), ('\u{ce0}', '\u{ce1}'), ('\u{cf1}',
|
||||
'\u{cf2}'), ('\u{d05}', '\u{d0c}'), ('\u{d0e}', '\u{d10}'), ('\u{d12}', '\u{d3a}'),
|
||||
('\u{d3d}', '\u{d3d}'), ('\u{d4e}', '\u{d4e}'), ('\u{d54}', '\u{d56}'), ('\u{d5f}',
|
||||
'\u{d61}'), ('\u{d7a}', '\u{d7f}'), ('\u{d85}', '\u{d96}'), ('\u{d9a}', '\u{db1}'),
|
||||
('\u{db3}', '\u{dbb}'), ('\u{dbd}', '\u{dbd}'), ('\u{dc0}', '\u{dc6}'), ('\u{e01}',
|
||||
'\u{e30}'), ('\u{e32}', '\u{e32}'), ('\u{e40}', '\u{e46}'), ('\u{e81}', '\u{e82}'),
|
||||
('\u{e84}', '\u{e84}'), ('\u{e87}', '\u{e88}'), ('\u{e8a}', '\u{e8a}'), ('\u{e8d}',
|
||||
'\u{e8d}'), ('\u{e94}', '\u{e97}'), ('\u{e99}', '\u{e9f}'), ('\u{ea1}', '\u{ea3}'),
|
||||
('\u{ea5}', '\u{ea5}'), ('\u{ea7}', '\u{ea7}'), ('\u{eaa}', '\u{eab}'), ('\u{ead}',
|
||||
'\u{eb0}'), ('\u{eb2}', '\u{eb2}'), ('\u{ebd}', '\u{ebd}'), ('\u{ec0}', '\u{ec4}'),
|
||||
('\u{ec6}', '\u{ec6}'), ('\u{edc}', '\u{edf}'), ('\u{f00}', '\u{f00}'), ('\u{f40}',
|
||||
'\u{f47}'), ('\u{f49}', '\u{f6c}'), ('\u{f88}', '\u{f8c}'), ('\u{1000}', '\u{102a}'),
|
||||
('\u{103f}', '\u{103f}'), ('\u{1050}', '\u{1055}'), ('\u{105a}', '\u{105d}'), ('\u{1061}',
|
||||
'\u{1061}'), ('\u{1065}', '\u{1066}'), ('\u{106e}', '\u{1070}'), ('\u{1075}', '\u{1081}'),
|
||||
('\u{108e}', '\u{108e}'), ('\u{10a0}', '\u{10c5}'), ('\u{10c7}', '\u{10c7}'), ('\u{10cd}',
|
||||
'\u{10cd}'), ('\u{10d0}', '\u{10fa}'), ('\u{10fc}', '\u{1248}'), ('\u{124a}', '\u{124d}'),
|
||||
('\u{1250}', '\u{1256}'), ('\u{1258}', '\u{1258}'), ('\u{125a}', '\u{125d}'), ('\u{1260}',
|
||||
'\u{1288}'), ('\u{128a}', '\u{128d}'), ('\u{1290}', '\u{12b0}'), ('\u{12b2}', '\u{12b5}'),
|
||||
('\u{12b8}', '\u{12be}'), ('\u{12c0}', '\u{12c0}'), ('\u{12c2}', '\u{12c5}'), ('\u{12c8}',
|
||||
'\u{12d6}'), ('\u{12d8}', '\u{1310}'), ('\u{1312}', '\u{1315}'), ('\u{1318}', '\u{135a}'),
|
||||
('\u{1380}', '\u{138f}'), ('\u{13a0}', '\u{13f5}'), ('\u{13f8}', '\u{13fd}'), ('\u{1401}',
|
||||
'\u{166c}'), ('\u{166f}', '\u{167f}'), ('\u{1681}', '\u{169a}'), ('\u{16a0}', '\u{16ea}'),
|
||||
('\u{16ee}', '\u{16f8}'), ('\u{1700}', '\u{170c}'), ('\u{170e}', '\u{1711}'), ('\u{1720}',
|
||||
'\u{1731}'), ('\u{1740}', '\u{1751}'), ('\u{1760}', '\u{176c}'), ('\u{176e}', '\u{1770}'),
|
||||
('\u{1780}', '\u{17b3}'), ('\u{17d7}', '\u{17d7}'), ('\u{17dc}', '\u{17dc}'), ('\u{1820}',
|
||||
'\u{1877}'), ('\u{1880}', '\u{18a8}'), ('\u{18aa}', '\u{18aa}'), ('\u{18b0}', '\u{18f5}'),
|
||||
('\u{1900}', '\u{191e}'), ('\u{1950}', '\u{196d}'), ('\u{1970}', '\u{1974}'), ('\u{1980}',
|
||||
'\u{19ab}'), ('\u{19b0}', '\u{19c9}'), ('\u{1a00}', '\u{1a16}'), ('\u{1a20}', '\u{1a54}'),
|
||||
('\u{1aa7}', '\u{1aa7}'), ('\u{1b05}', '\u{1b33}'), ('\u{1b45}', '\u{1b4b}'), ('\u{1b83}',
|
||||
'\u{1ba0}'), ('\u{1bae}', '\u{1baf}'), ('\u{1bba}', '\u{1be5}'), ('\u{1c00}', '\u{1c23}'),
|
||||
('\u{1c4d}', '\u{1c4f}'), ('\u{1c5a}', '\u{1c7d}'), ('\u{1c80}', '\u{1c88}'), ('\u{1ce9}',
|
||||
'\u{1cec}'), ('\u{1cee}', '\u{1cf1}'), ('\u{1cf5}', '\u{1cf6}'), ('\u{1d00}', '\u{1dbf}'),
|
||||
('\u{1e00}', '\u{1f15}'), ('\u{1f18}', '\u{1f1d}'), ('\u{1f20}', '\u{1f45}'), ('\u{1f48}',
|
||||
'\u{1f4d}'), ('\u{1f50}', '\u{1f57}'), ('\u{1f59}', '\u{1f59}'), ('\u{1f5b}', '\u{1f5b}'),
|
||||
('\u{1f5d}', '\u{1f5d}'), ('\u{1f5f}', '\u{1f7d}'), ('\u{1f80}', '\u{1fb4}'), ('\u{1fb6}',
|
||||
'\u{1fbc}'), ('\u{1fbe}', '\u{1fbe}'), ('\u{1fc2}', '\u{1fc4}'), ('\u{1fc6}', '\u{1fcc}'),
|
||||
('\u{1fd0}', '\u{1fd3}'), ('\u{1fd6}', '\u{1fdb}'), ('\u{1fe0}', '\u{1fec}'), ('\u{1ff2}',
|
||||
'\u{1ff4}'), ('\u{1ff6}', '\u{1ffc}'), ('\u{2071}', '\u{2071}'), ('\u{207f}', '\u{207f}'),
|
||||
('\u{2090}', '\u{209c}'), ('\u{2102}', '\u{2102}'), ('\u{2107}', '\u{2107}'), ('\u{210a}',
|
||||
'\u{2113}'), ('\u{2115}', '\u{2115}'), ('\u{2118}', '\u{211d}'), ('\u{2124}', '\u{2124}'),
|
||||
('\u{2126}', '\u{2126}'), ('\u{2128}', '\u{2128}'), ('\u{212a}', '\u{2139}'), ('\u{213c}',
|
||||
'\u{213f}'), ('\u{2145}', '\u{2149}'), ('\u{214e}', '\u{214e}'), ('\u{2160}', '\u{2188}'),
|
||||
('\u{2c00}', '\u{2c2e}'), ('\u{2c30}', '\u{2c5e}'), ('\u{2c60}', '\u{2ce4}'), ('\u{2ceb}',
|
||||
'\u{2cee}'), ('\u{2cf2}', '\u{2cf3}'), ('\u{2d00}', '\u{2d25}'), ('\u{2d27}', '\u{2d27}'),
|
||||
('\u{2d2d}', '\u{2d2d}'), ('\u{2d30}', '\u{2d67}'), ('\u{2d6f}', '\u{2d6f}'), ('\u{2d80}',
|
||||
'\u{2d96}'), ('\u{2da0}', '\u{2da6}'), ('\u{2da8}', '\u{2dae}'), ('\u{2db0}', '\u{2db6}'),
|
||||
('\u{2db8}', '\u{2dbe}'), ('\u{2dc0}', '\u{2dc6}'), ('\u{2dc8}', '\u{2dce}'), ('\u{2dd0}',
|
||||
'\u{2dd6}'), ('\u{2dd8}', '\u{2dde}'), ('\u{3005}', '\u{3007}'), ('\u{3021}', '\u{3029}'),
|
||||
('\u{3031}', '\u{3035}'), ('\u{3038}', '\u{303c}'), ('\u{3041}', '\u{3096}'), ('\u{309d}',
|
||||
'\u{309f}'), ('\u{30a1}', '\u{30fa}'), ('\u{30fc}', '\u{30ff}'), ('\u{3105}', '\u{312d}'),
|
||||
('\u{3131}', '\u{318e}'), ('\u{31a0}', '\u{31ba}'), ('\u{31f0}', '\u{31ff}'), ('\u{3400}',
|
||||
'\u{4db5}'), ('\u{4e00}', '\u{9fd5}'), ('\u{a000}', '\u{a48c}'), ('\u{a4d0}', '\u{a4fd}'),
|
||||
('\u{a500}', '\u{a60c}'), ('\u{a610}', '\u{a61f}'), ('\u{a62a}', '\u{a62b}'), ('\u{a640}',
|
||||
'\u{a66e}'), ('\u{a67f}', '\u{a69d}'), ('\u{a6a0}', '\u{a6ef}'), ('\u{a717}', '\u{a71f}'),
|
||||
('\u{a722}', '\u{a788}'), ('\u{a78b}', '\u{a7ae}'), ('\u{a7b0}', '\u{a7b7}'), ('\u{a7f7}',
|
||||
'\u{a801}'), ('\u{a803}', '\u{a805}'), ('\u{a807}', '\u{a80a}'), ('\u{a80c}', '\u{a822}'),
|
||||
('\u{a840}', '\u{a873}'), ('\u{a882}', '\u{a8b3}'), ('\u{a8f2}', '\u{a8f7}'), ('\u{a8fb}',
|
||||
'\u{a8fb}'), ('\u{a8fd}', '\u{a8fd}'), ('\u{a90a}', '\u{a925}'), ('\u{a930}', '\u{a946}'),
|
||||
('\u{a960}', '\u{a97c}'), ('\u{a984}', '\u{a9b2}'), ('\u{a9cf}', '\u{a9cf}'), ('\u{a9e0}',
|
||||
'\u{a9e4}'), ('\u{a9e6}', '\u{a9ef}'), ('\u{a9fa}', '\u{a9fe}'), ('\u{aa00}', '\u{aa28}'),
|
||||
('\u{aa40}', '\u{aa42}'), ('\u{aa44}', '\u{aa4b}'), ('\u{aa60}', '\u{aa76}'), ('\u{aa7a}',
|
||||
'\u{aa7a}'), ('\u{aa7e}', '\u{aaaf}'), ('\u{aab1}', '\u{aab1}'), ('\u{aab5}', '\u{aab6}'),
|
||||
('\u{aab9}', '\u{aabd}'), ('\u{aac0}', '\u{aac0}'), ('\u{aac2}', '\u{aac2}'), ('\u{aadb}',
|
||||
'\u{aadd}'), ('\u{aae0}', '\u{aaea}'), ('\u{aaf2}', '\u{aaf4}'), ('\u{ab01}', '\u{ab06}'),
|
||||
('\u{ab09}', '\u{ab0e}'), ('\u{ab11}', '\u{ab16}'), ('\u{ab20}', '\u{ab26}'), ('\u{ab28}',
|
||||
'\u{ab2e}'), ('\u{ab30}', '\u{ab5a}'), ('\u{ab5c}', '\u{ab65}'), ('\u{ab70}', '\u{abe2}'),
|
||||
('\u{ac00}', '\u{d7a3}'), ('\u{d7b0}', '\u{d7c6}'), ('\u{d7cb}', '\u{d7fb}'), ('\u{f900}',
|
||||
'\u{fa6d}'), ('\u{fa70}', '\u{fad9}'), ('\u{fb00}', '\u{fb06}'), ('\u{fb13}', '\u{fb17}'),
|
||||
('\u{fb1d}', '\u{fb1d}'), ('\u{fb1f}', '\u{fb28}'), ('\u{fb2a}', '\u{fb36}'), ('\u{fb38}',
|
||||
'\u{fb3c}'), ('\u{fb3e}', '\u{fb3e}'), ('\u{fb40}', '\u{fb41}'), ('\u{fb43}', '\u{fb44}'),
|
||||
('\u{fb46}', '\u{fbb1}'), ('\u{fbd3}', '\u{fc5d}'), ('\u{fc64}', '\u{fd3d}'), ('\u{fd50}',
|
||||
'\u{fd8f}'), ('\u{fd92}', '\u{fdc7}'), ('\u{fdf0}', '\u{fdf9}'), ('\u{fe71}', '\u{fe71}'),
|
||||
('\u{fe73}', '\u{fe73}'), ('\u{fe77}', '\u{fe77}'), ('\u{fe79}', '\u{fe79}'), ('\u{fe7b}',
|
||||
'\u{fe7b}'), ('\u{fe7d}', '\u{fe7d}'), ('\u{fe7f}', '\u{fefc}'), ('\u{ff21}', '\u{ff3a}'),
|
||||
('\u{ff41}', '\u{ff5a}'), ('\u{ff66}', '\u{ff9d}'), ('\u{ffa0}', '\u{ffbe}'), ('\u{ffc2}',
|
||||
'\u{ffc7}'), ('\u{ffca}', '\u{ffcf}'), ('\u{ffd2}', '\u{ffd7}'), ('\u{ffda}', '\u{ffdc}'),
|
||||
('\u{10000}', '\u{1000b}'), ('\u{1000d}', '\u{10026}'), ('\u{10028}', '\u{1003a}'),
|
||||
('\u{1003c}', '\u{1003d}'), ('\u{1003f}', '\u{1004d}'), ('\u{10050}', '\u{1005d}'),
|
||||
('\u{10080}', '\u{100fa}'), ('\u{10140}', '\u{10174}'), ('\u{10280}', '\u{1029c}'),
|
||||
('\u{102a0}', '\u{102d0}'), ('\u{10300}', '\u{1031f}'), ('\u{10330}', '\u{1034a}'),
|
||||
('\u{10350}', '\u{10375}'), ('\u{10380}', '\u{1039d}'), ('\u{103a0}', '\u{103c3}'),
|
||||
('\u{103c8}', '\u{103cf}'), ('\u{103d1}', '\u{103d5}'), ('\u{10400}', '\u{1049d}'),
|
||||
('\u{104b0}', '\u{104d3}'), ('\u{104d8}', '\u{104fb}'), ('\u{10500}', '\u{10527}'),
|
||||
('\u{10530}', '\u{10563}'), ('\u{10600}', '\u{10736}'), ('\u{10740}', '\u{10755}'),
|
||||
('\u{10760}', '\u{10767}'), ('\u{10800}', '\u{10805}'), ('\u{10808}', '\u{10808}'),
|
||||
('\u{1080a}', '\u{10835}'), ('\u{10837}', '\u{10838}'), ('\u{1083c}', '\u{1083c}'),
|
||||
('\u{1083f}', '\u{10855}'), ('\u{10860}', '\u{10876}'), ('\u{10880}', '\u{1089e}'),
|
||||
('\u{108e0}', '\u{108f2}'), ('\u{108f4}', '\u{108f5}'), ('\u{10900}', '\u{10915}'),
|
||||
('\u{10920}', '\u{10939}'), ('\u{10980}', '\u{109b7}'), ('\u{109be}', '\u{109bf}'),
|
||||
('\u{10a00}', '\u{10a00}'), ('\u{10a10}', '\u{10a13}'), ('\u{10a15}', '\u{10a17}'),
|
||||
('\u{10a19}', '\u{10a33}'), ('\u{10a60}', '\u{10a7c}'), ('\u{10a80}', '\u{10a9c}'),
|
||||
('\u{10ac0}', '\u{10ac7}'), ('\u{10ac9}', '\u{10ae4}'), ('\u{10b00}', '\u{10b35}'),
|
||||
('\u{10b40}', '\u{10b55}'), ('\u{10b60}', '\u{10b72}'), ('\u{10b80}', '\u{10b91}'),
|
||||
('\u{10c00}', '\u{10c48}'), ('\u{10c80}', '\u{10cb2}'), ('\u{10cc0}', '\u{10cf2}'),
|
||||
('\u{11003}', '\u{11037}'), ('\u{11083}', '\u{110af}'), ('\u{110d0}', '\u{110e8}'),
|
||||
('\u{11103}', '\u{11126}'), ('\u{11150}', '\u{11172}'), ('\u{11176}', '\u{11176}'),
|
||||
('\u{11183}', '\u{111b2}'), ('\u{111c1}', '\u{111c4}'), ('\u{111da}', '\u{111da}'),
|
||||
('\u{111dc}', '\u{111dc}'), ('\u{11200}', '\u{11211}'), ('\u{11213}', '\u{1122b}'),
|
||||
('\u{11280}', '\u{11286}'), ('\u{11288}', '\u{11288}'), ('\u{1128a}', '\u{1128d}'),
|
||||
('\u{1128f}', '\u{1129d}'), ('\u{1129f}', '\u{112a8}'), ('\u{112b0}', '\u{112de}'),
|
||||
('\u{11305}', '\u{1130c}'), ('\u{1130f}', '\u{11310}'), ('\u{11313}', '\u{11328}'),
|
||||
('\u{1132a}', '\u{11330}'), ('\u{11332}', '\u{11333}'), ('\u{11335}', '\u{11339}'),
|
||||
('\u{1133d}', '\u{1133d}'), ('\u{11350}', '\u{11350}'), ('\u{1135d}', '\u{11361}'),
|
||||
('\u{11400}', '\u{11434}'), ('\u{11447}', '\u{1144a}'), ('\u{11480}', '\u{114af}'),
|
||||
('\u{114c4}', '\u{114c5}'), ('\u{114c7}', '\u{114c7}'), ('\u{11580}', '\u{115ae}'),
|
||||
('\u{115d8}', '\u{115db}'), ('\u{11600}', '\u{1162f}'), ('\u{11644}', '\u{11644}'),
|
||||
('\u{11680}', '\u{116aa}'), ('\u{116b8}', '\u{116b8}'), ('\u{11700}', '\u{1171a}'),
|
||||
('\u{11800}', '\u{1182b}'), ('\u{118a0}', '\u{118df}'), ('\u{118ff}', '\u{118ff}'),
|
||||
('\u{119a0}', '\u{119a7}'), ('\u{119aa}', '\u{119d0}'), ('\u{119e1}', '\u{119e1}'),
|
||||
('\u{119e3}', '\u{119e3}'), ('\u{11a00}', '\u{11a00}'), ('\u{11a0b}', '\u{11a32}'),
|
||||
('\u{11a3a}', '\u{11a3a}'), ('\u{11a50}', '\u{11a50}'), ('\u{11a5c}', '\u{11a89}'),
|
||||
('\u{11a9d}', '\u{11a9d}'), ('\u{11ac0}', '\u{11af8}'), ('\u{11c00}', '\u{11c08}'),
|
||||
('\u{11680}', '\u{116aa}'), ('\u{11700}', '\u{11719}'), ('\u{118a0}', '\u{118df}'),
|
||||
('\u{118ff}', '\u{118ff}'), ('\u{11ac0}', '\u{11af8}'), ('\u{11c00}', '\u{11c08}'),
|
||||
('\u{11c0a}', '\u{11c2e}'), ('\u{11c40}', '\u{11c40}'), ('\u{11c72}', '\u{11c8f}'),
|
||||
('\u{11d00}', '\u{11d06}'), ('\u{11d08}', '\u{11d09}'), ('\u{11d0b}', '\u{11d30}'),
|
||||
('\u{11d46}', '\u{11d46}'), ('\u{11d60}', '\u{11d65}'), ('\u{11d67}', '\u{11d68}'),
|
||||
('\u{11d6a}', '\u{11d89}'), ('\u{11d98}', '\u{11d98}'), ('\u{11ee0}', '\u{11ef2}'),
|
||||
('\u{12000}', '\u{12399}'), ('\u{12400}', '\u{1246e}'), ('\u{12480}', '\u{12543}'),
|
||||
('\u{13000}', '\u{1342e}'), ('\u{14400}', '\u{14646}'), ('\u{16800}', '\u{16a38}'),
|
||||
('\u{16a40}', '\u{16a5e}'), ('\u{16ad0}', '\u{16aed}'), ('\u{16b00}', '\u{16b2f}'),
|
||||
('\u{16b40}', '\u{16b43}'), ('\u{16b63}', '\u{16b77}'), ('\u{16b7d}', '\u{16b8f}'),
|
||||
('\u{16e40}', '\u{16e7f}'), ('\u{16f00}', '\u{16f4a}'), ('\u{16f50}', '\u{16f50}'),
|
||||
('\u{16f93}', '\u{16f9f}'), ('\u{16fe0}', '\u{16fe1}'), ('\u{16fe3}', '\u{16fe3}'),
|
||||
('\u{17000}', '\u{187f7}'), ('\u{18800}', '\u{18af2}'), ('\u{1b000}', '\u{1b11e}'),
|
||||
('\u{1b150}', '\u{1b152}'), ('\u{1b164}', '\u{1b167}'), ('\u{1b170}', '\u{1b2fb}'),
|
||||
('\u{1bc00}', '\u{1bc6a}'), ('\u{1bc70}', '\u{1bc7c}'), ('\u{1bc80}', '\u{1bc88}'),
|
||||
('\u{1bc90}', '\u{1bc99}'), ('\u{1d400}', '\u{1d454}'), ('\u{1d456}', '\u{1d49c}'),
|
||||
('\u{1d49e}', '\u{1d49f}'), ('\u{1d4a2}', '\u{1d4a2}'), ('\u{1d4a5}', '\u{1d4a6}'),
|
||||
('\u{1d4a9}', '\u{1d4ac}'), ('\u{1d4ae}', '\u{1d4b9}'), ('\u{1d4bb}', '\u{1d4bb}'),
|
||||
('\u{1d4bd}', '\u{1d4c3}'), ('\u{1d4c5}', '\u{1d505}'), ('\u{1d507}', '\u{1d50a}'),
|
||||
('\u{1d50d}', '\u{1d514}'), ('\u{1d516}', '\u{1d51c}'), ('\u{1d51e}', '\u{1d539}'),
|
||||
('\u{1d53b}', '\u{1d53e}'), ('\u{1d540}', '\u{1d544}'), ('\u{1d546}', '\u{1d546}'),
|
||||
('\u{1d54a}', '\u{1d550}'), ('\u{1d552}', '\u{1d6a5}'), ('\u{1d6a8}', '\u{1d6c0}'),
|
||||
('\u{1d6c2}', '\u{1d6da}'), ('\u{1d6dc}', '\u{1d6fa}'), ('\u{1d6fc}', '\u{1d714}'),
|
||||
('\u{1d716}', '\u{1d734}'), ('\u{1d736}', '\u{1d74e}'), ('\u{1d750}', '\u{1d76e}'),
|
||||
('\u{1d770}', '\u{1d788}'), ('\u{1d78a}', '\u{1d7a8}'), ('\u{1d7aa}', '\u{1d7c2}'),
|
||||
('\u{1d7c4}', '\u{1d7cb}'), ('\u{1e100}', '\u{1e12c}'), ('\u{1e137}', '\u{1e13d}'),
|
||||
('\u{1e14e}', '\u{1e14e}'), ('\u{1e2c0}', '\u{1e2eb}'), ('\u{1e800}', '\u{1e8c4}'),
|
||||
('\u{1e900}', '\u{1e943}'), ('\u{1e94b}', '\u{1e94b}'), ('\u{1ee00}', '\u{1ee03}'),
|
||||
('\u{1ee05}', '\u{1ee1f}'), ('\u{1ee21}', '\u{1ee22}'), ('\u{1ee24}', '\u{1ee24}'),
|
||||
('\u{1ee27}', '\u{1ee27}'), ('\u{1ee29}', '\u{1ee32}'), ('\u{1ee34}', '\u{1ee37}'),
|
||||
('\u{1ee39}', '\u{1ee39}'), ('\u{1ee3b}', '\u{1ee3b}'), ('\u{1ee42}', '\u{1ee42}'),
|
||||
('\u{1ee47}', '\u{1ee47}'), ('\u{1ee49}', '\u{1ee49}'), ('\u{1ee4b}', '\u{1ee4b}'),
|
||||
('\u{1ee4d}', '\u{1ee4f}'), ('\u{1ee51}', '\u{1ee52}'), ('\u{1ee54}', '\u{1ee54}'),
|
||||
('\u{1ee57}', '\u{1ee57}'), ('\u{1ee59}', '\u{1ee59}'), ('\u{1ee5b}', '\u{1ee5b}'),
|
||||
('\u{1ee5d}', '\u{1ee5d}'), ('\u{1ee5f}', '\u{1ee5f}'), ('\u{1ee61}', '\u{1ee62}'),
|
||||
('\u{1ee64}', '\u{1ee64}'), ('\u{1ee67}', '\u{1ee6a}'), ('\u{1ee6c}', '\u{1ee72}'),
|
||||
('\u{1ee74}', '\u{1ee77}'), ('\u{1ee79}', '\u{1ee7c}'), ('\u{1ee7e}', '\u{1ee7e}'),
|
||||
('\u{1ee80}', '\u{1ee89}'), ('\u{1ee8b}', '\u{1ee9b}'), ('\u{1eea1}', '\u{1eea3}'),
|
||||
('\u{1eea5}', '\u{1eea9}'), ('\u{1eeab}', '\u{1eebb}'), ('\u{20000}', '\u{2a6d6}'),
|
||||
('\u{2a700}', '\u{2b734}'), ('\u{2b740}', '\u{2b81d}'), ('\u{2b820}', '\u{2cea1}'),
|
||||
('\u{2ceb0}', '\u{2ebe0}'), ('\u{2f800}', '\u{2fa1d}')
|
||||
('\u{16f00}', '\u{16f44}'), ('\u{16f50}', '\u{16f50}'), ('\u{16f93}', '\u{16f9f}'),
|
||||
('\u{16fe0}', '\u{16fe0}'), ('\u{17000}', '\u{187ec}'), ('\u{18800}', '\u{18af2}'),
|
||||
('\u{1b000}', '\u{1b001}'), ('\u{1bc00}', '\u{1bc6a}'), ('\u{1bc70}', '\u{1bc7c}'),
|
||||
('\u{1bc80}', '\u{1bc88}'), ('\u{1bc90}', '\u{1bc99}'), ('\u{1d400}', '\u{1d454}'),
|
||||
('\u{1d456}', '\u{1d49c}'), ('\u{1d49e}', '\u{1d49f}'), ('\u{1d4a2}', '\u{1d4a2}'),
|
||||
('\u{1d4a5}', '\u{1d4a6}'), ('\u{1d4a9}', '\u{1d4ac}'), ('\u{1d4ae}', '\u{1d4b9}'),
|
||||
('\u{1d4bb}', '\u{1d4bb}'), ('\u{1d4bd}', '\u{1d4c3}'), ('\u{1d4c5}', '\u{1d505}'),
|
||||
('\u{1d507}', '\u{1d50a}'), ('\u{1d50d}', '\u{1d514}'), ('\u{1d516}', '\u{1d51c}'),
|
||||
('\u{1d51e}', '\u{1d539}'), ('\u{1d53b}', '\u{1d53e}'), ('\u{1d540}', '\u{1d544}'),
|
||||
('\u{1d546}', '\u{1d546}'), ('\u{1d54a}', '\u{1d550}'), ('\u{1d552}', '\u{1d6a5}'),
|
||||
('\u{1d6a8}', '\u{1d6c0}'), ('\u{1d6c2}', '\u{1d6da}'), ('\u{1d6dc}', '\u{1d6fa}'),
|
||||
('\u{1d6fc}', '\u{1d714}'), ('\u{1d716}', '\u{1d734}'), ('\u{1d736}', '\u{1d74e}'),
|
||||
('\u{1d750}', '\u{1d76e}'), ('\u{1d770}', '\u{1d788}'), ('\u{1d78a}', '\u{1d7a8}'),
|
||||
('\u{1d7aa}', '\u{1d7c2}'), ('\u{1d7c4}', '\u{1d7cb}'), ('\u{1e800}', '\u{1e8c4}'),
|
||||
('\u{1e900}', '\u{1e943}'), ('\u{1ee00}', '\u{1ee03}'), ('\u{1ee05}', '\u{1ee1f}'),
|
||||
('\u{1ee21}', '\u{1ee22}'), ('\u{1ee24}', '\u{1ee24}'), ('\u{1ee27}', '\u{1ee27}'),
|
||||
('\u{1ee29}', '\u{1ee32}'), ('\u{1ee34}', '\u{1ee37}'), ('\u{1ee39}', '\u{1ee39}'),
|
||||
('\u{1ee3b}', '\u{1ee3b}'), ('\u{1ee42}', '\u{1ee42}'), ('\u{1ee47}', '\u{1ee47}'),
|
||||
('\u{1ee49}', '\u{1ee49}'), ('\u{1ee4b}', '\u{1ee4b}'), ('\u{1ee4d}', '\u{1ee4f}'),
|
||||
('\u{1ee51}', '\u{1ee52}'), ('\u{1ee54}', '\u{1ee54}'), ('\u{1ee57}', '\u{1ee57}'),
|
||||
('\u{1ee59}', '\u{1ee59}'), ('\u{1ee5b}', '\u{1ee5b}'), ('\u{1ee5d}', '\u{1ee5d}'),
|
||||
('\u{1ee5f}', '\u{1ee5f}'), ('\u{1ee61}', '\u{1ee62}'), ('\u{1ee64}', '\u{1ee64}'),
|
||||
('\u{1ee67}', '\u{1ee6a}'), ('\u{1ee6c}', '\u{1ee72}'), ('\u{1ee74}', '\u{1ee77}'),
|
||||
('\u{1ee79}', '\u{1ee7c}'), ('\u{1ee7e}', '\u{1ee7e}'), ('\u{1ee80}', '\u{1ee89}'),
|
||||
('\u{1ee8b}', '\u{1ee9b}'), ('\u{1eea1}', '\u{1eea3}'), ('\u{1eea5}', '\u{1eea9}'),
|
||||
('\u{1eeab}', '\u{1eebb}'), ('\u{20000}', '\u{2a6d6}'), ('\u{2a700}', '\u{2b734}'),
|
||||
('\u{2b740}', '\u{2b81d}'), ('\u{2b820}', '\u{2cea1}'), ('\u{2f800}', '\u{2fa1d}')
|
||||
];
|
||||
|
||||
pub fn XID_Start(c: char) -> bool {
|
||||
|
|
|
@ -15,6 +15,8 @@ use test::Bencher;
|
|||
#[cfg(feature = "bench")]
|
||||
use std::prelude::v1::*;
|
||||
|
||||
use super::UnicodeXID;
|
||||
|
||||
#[cfg(feature = "bench")]
|
||||
#[bench]
|
||||
fn cargo_is_xid_start(b: &mut Bencher) {
|
||||
|
@ -22,7 +24,7 @@ fn cargo_is_xid_start(b: &mut Bencher) {
|
|||
|
||||
b.bytes = string.len() as u64;
|
||||
b.iter(|| {
|
||||
string.chars().all(super::UnicodeXID::is_xid_start)
|
||||
string.chars().all(UnicodeXID::is_xid_start)
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -44,7 +46,7 @@ fn cargo_xid_continue(b: &mut Bencher) {
|
|||
|
||||
b.bytes = string.len() as u64;
|
||||
b.iter(|| {
|
||||
string.chars().all(super::UnicodeXID::is_xid_continue)
|
||||
string.chars().all(UnicodeXID::is_xid_continue)
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -67,7 +69,7 @@ fn test_is_xid_start() {
|
|||
];
|
||||
|
||||
for ch in &chars {
|
||||
assert!(super::UnicodeXID::is_xid_start(*ch), "{}", ch);
|
||||
assert!(UnicodeXID::is_xid_start(*ch), "{}", ch);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -81,7 +83,7 @@ fn test_is_not_xid_start() {
|
|||
];
|
||||
|
||||
for ch in &chars {
|
||||
assert!(!super::UnicodeXID::is_xid_start(*ch), "{}", ch);
|
||||
assert!(!UnicodeXID::is_xid_start(*ch), "{}", ch);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -93,7 +95,7 @@ fn test_is_xid_continue() {
|
|||
];
|
||||
|
||||
for ch in &chars {
|
||||
assert!(super::UnicodeXID::is_xid_continue(*ch), "{}", ch);
|
||||
assert!(UnicodeXID::is_xid_continue(*ch), "{}", ch);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -106,6 +108,6 @@ fn test_is_not_xid_continue() {
|
|||
];
|
||||
|
||||
for &ch in &chars {
|
||||
assert!(!super::UnicodeXID::is_xid_continue(ch), "{}", ch);
|
||||
assert!(!UnicodeXID::is_xid_continue(ch), "{}", ch);
|
||||
}
|
||||
}
|
||||
|
|
Загрузка…
Ссылка в новой задаче