зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1583471: Update bindgen to 0.51.1 througout the tree; r=emilio
Differential Revision: https://phabricator.services.mozilla.com/D46962 --HG-- rename : third_party/rust/quote/tests/conditional/integer128.rs => third_party/rust/quote-0.6.11/tests/conditional/integer128.rs rename : third_party/rust/unicode-xid/scripts/unicode.py => third_party/rust/unicode-xid-0.1.0/scripts/unicode.py extra : moz-landing-system : lando
This commit is contained in:
Родитель
83f6ab778e
Коммит
e207459263
|
@ -188,7 +188,7 @@ dependencies = [
|
|||
name = "baldrdash"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bindgen 0.51.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bindgen 0.51.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cranelift-codegen 0.42.0 (git+https://github.com/CraneStation/Cranelift?rev=9c6f8feb0f28f50434c0cf67f3f7c07486a42b7e)",
|
||||
"cranelift-wasm 0.42.0 (git+https://github.com/CraneStation/Cranelift?rev=9c6f8feb0f28f50434c0cf67f3f7c07486a42b7e)",
|
||||
"env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -241,21 +241,20 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.51.0"
|
||||
version = "0.51.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cexpr 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"clang-sys 0.28.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"proc-macro2 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"shlex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"which 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -559,7 +558,7 @@ dependencies = [
|
|||
name = "coreaudio-sys"
|
||||
version = "0.2.2"
|
||||
dependencies = [
|
||||
"bindgen 0.51.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bindgen 0.51.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1473,7 +1472,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
name = "js"
|
||||
version = "0.1.4"
|
||||
dependencies = [
|
||||
"bindgen 0.51.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bindgen 0.51.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -2252,6 +2251,14 @@ dependencies = [
|
|||
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "procedural-masquerade"
|
||||
version = "0.1.1"
|
||||
|
@ -2297,6 +2304,14 @@ dependencies = [
|
|||
"proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.6.5"
|
||||
|
@ -2555,6 +2570,14 @@ name = "rustc-demangle"
|
|||
version = "0.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "rustc-hash"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc_version"
|
||||
version = "0.2.3"
|
||||
|
@ -2856,7 +2879,7 @@ dependencies = [
|
|||
"app_units 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bindgen 0.51.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bindgen 0.51.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cssparser 0.25.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -3341,6 +3364,11 @@ name = "unicode-xid"
|
|||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-xid"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "unreachable"
|
||||
version = "1.0.0"
|
||||
|
@ -3573,14 +3601,6 @@ dependencies = [
|
|||
"nom 4.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "which"
|
||||
version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.2.8"
|
||||
|
@ -3763,7 +3783,7 @@ dependencies = [
|
|||
"checksum base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e"
|
||||
"checksum binary-space-partition 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "88ceb0d16c4fd0e42876e298d7d3ce3780dd9ebdcbe4199816a32c77e08597ff"
|
||||
"checksum bincode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bda13183df33055cbb84b847becce220d392df502ebe7a4a78d7021771ed94d0"
|
||||
"checksum bindgen 0.51.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18270cdd7065ec045a6bb4bdcd5144d14a78b3aedb3bc5111e688773ac8b9ad0"
|
||||
"checksum bindgen 0.51.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ebd71393f1ec0509b553aa012b9b58e81dadbdff7130bd3b8cba576e69b32f75"
|
||||
"checksum binjs_meta 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6c9a0da2208ceb785c1626fa8b7d250d2e5546ae230294b4a998e4f818c1768e"
|
||||
"checksum bit-vec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f59bbe95d4e52a6398ec21238d31577f2b28a9d86807f06ca59d191d8440d0bb"
|
||||
"checksum bit_reverse 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5e97e02db5a2899c0377f3d6031d5da8296ca2b47abef6ed699de51b9e40a28c"
|
||||
|
@ -3935,9 +3955,11 @@ dependencies = [
|
|||
"checksum podio 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e5422a1ee1bc57cc47ae717b0137314258138f38fd5f3cea083f43a9725383a0"
|
||||
"checksum precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
|
||||
"checksum proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)" = "4d317f9caece796be1980837fd5cb3dfec5613ebdb04ad0956deea83ce168915"
|
||||
"checksum proc-macro2 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "afdc77cc74ec70ed262262942ebb7dac3d479e9e5cfa2da1841c0806f6cdabcc"
|
||||
"checksum procedural-masquerade 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9f566249236c6ca4340f7ca78968271f0ed2b0f234007a61b66f9ecd0af09260"
|
||||
"checksum quick-error 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eda5fe9b71976e62bc81b781206aaa076401769b2143379d3eb2118388babac4"
|
||||
"checksum quote 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)" = "cdd8e04bd9c52e0342b406469d494fcb033be4bdbe5c606016defbb1681411e1"
|
||||
"checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"
|
||||
"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
|
||||
"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef"
|
||||
"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
|
||||
|
@ -3965,6 +3987,7 @@ dependencies = [
|
|||
"checksum rust-ini 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8a654c5bda722c699be6b0fe4c0d90de218928da5b724c3e467fc48865c37263"
|
||||
"checksum rust_cascade 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "f3fe4900d38dab1ad21a515e44687dd0711e6b0ec5b214a3b1aa8857343bcf3a"
|
||||
"checksum rustc-demangle 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "76d7ba1feafada44f2d38eed812bd2489a03c0f5abb975799251518b68848649"
|
||||
"checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8"
|
||||
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
|
||||
"checksum ryu 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "fd0568787116e13c652377b6846f5931454a363a8fdf8ae50463ee40935b278b"
|
||||
"checksum safemem 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8dca453248a96cb0749e36ccdfe2b0b4e54a61bfef89fb97ec621eb8e0a93dd9"
|
||||
|
@ -4036,6 +4059,7 @@ dependencies = [
|
|||
"checksum unicode-segmentation 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "aa6024fc12ddfd1c6dbc14a80fa2324d4568849869b779f6bd37e5e4c03344d1"
|
||||
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
|
||||
"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
|
||||
"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
|
||||
"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"
|
||||
"checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61"
|
||||
"checksum urlencoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3df3561629a8bb4c57e5a2e4c43348d9e29c7c29d9b1c4c1f47166deca8f37ed"
|
||||
|
@ -4050,7 +4074,6 @@ dependencies = [
|
|||
"checksum warp 0.1.19 (registry+https://github.com/rust-lang/crates.io-index)" = "33857527c63bc514452f885d0a57019f28139c58fef2b3566016ecc0d44e5d24"
|
||||
"checksum wasmparser 0.37.0 (registry+https://github.com/rust-lang/crates.io-index)" = "82dbea680995dad585289fd47889cf9614133ebfcc3bda95737ef8bdc9e11db6"
|
||||
"checksum weedle 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "26a4c67f132386d965390b8a734d5d10adbcd30eb5cc74bd9229af8b83f10044"
|
||||
"checksum which 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4be6cfa54dab45266e98b5d7be2f8ce959ddd49abd141a05d52dce4b07f803bb"
|
||||
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
|
||||
"checksum winapi 0.3.6 (git+https://github.com/froydnj/winapi-rs?branch=aarch64)" = "<none>"
|
||||
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
|
||||
|
|
|
@ -7,7 +7,7 @@ license = "MPL-2.0"
|
|||
|
||||
[build-dependencies]
|
||||
env_logger = {version = "0.6", default-features = false} # disable `regex` to reduce code size
|
||||
bindgen = {version = "0.51", default-features = false} # disable `logging` to reduce code size
|
||||
bindgen = {version = "0.51.1", default-features = false} # disable `logging` to reduce code size
|
||||
cmake = "0.1"
|
||||
glob = "0.3"
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ env_logger = "0.6"
|
|||
smallvec = { version = "0.6.6" }
|
||||
|
||||
[build-dependencies]
|
||||
bindgen = {version = "0.51", default-features = false} # disable `logging` to reduce code size
|
||||
bindgen = {version = "0.51.1", default-features = false} # disable `logging` to reduce code size
|
||||
|
||||
[features]
|
||||
default = ['cranelift-codegen/std']
|
||||
|
|
|
@ -84,7 +84,7 @@ void = "1.0.2"
|
|||
[build-dependencies]
|
||||
lazy_static = "1"
|
||||
log = "0.4"
|
||||
bindgen = {version = "0.51", optional = true, default-features = false}
|
||||
bindgen = {version = "0.51.1", optional = true, default-features = false}
|
||||
regex = {version = "1.0", optional = true}
|
||||
walkdir = "2.1.4"
|
||||
toml = {version = "0.4.5", optional = true, default-features = false}
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"a970d1a9e47f029fe3e3ce43ae4292054ad60c4490e035b86b669fcf32015624","LICENSE":"c23953d9deb0a3312dbeaf6c128a657f3591acee45067612fa68405eaa4525db","README.md":"5a1f556c6a57c0a6ccc65e19c27718e0f4b32381a8efcc80f6601b33c58c5d59","build.rs":"a9f6915c54d75f357ce32f96327bf4df53dc81a505b70831978f9dac6f43841d","csmith-fuzzing/README.md":"7107b70fedb0c0a0cadb3c439a49c1bd0119a6d38dc63b1aecc74d1942256ef2","src/callbacks.rs":"b24d7982332c6a35928f134184ddf4072fe4545a45546b97b9b0e0c1fbb77c08","src/clang.rs":"0009b8b6e3f6c33ec2776ec4cb8de2625ae8be22c9f0433c39b06bdd9fc7db4d","src/codegen/bitfield_unit.rs":"87be2d7f7774327a1608c2c4d64df869e17fb34b7efdacb4030b6794d98e40f3","src/codegen/bitfield_unit_tests.rs":"2073ac6a36e0bc9afaef5b1207966817c8fb7a1a9f6368c3b1b8f79822efbfba","src/codegen/error.rs":"2613af1d833377fd4a70719f4a09951d9d45dc9227827b9a2a938a1bcaaea2dd","src/codegen/helpers.rs":"aa0daff2061c6de31acfbd113190889e0f7ca2d2b8d4f46740bfd5832c33e4d2","src/codegen/impl_debug.rs":"f82969461d522fb758eca552ceaf189122a404cbb47fcc16008bfe52fc62aefa","src/codegen/impl_partialeq.rs":"d40d9ee2849c4d3d557b033c4d3af5e6de4a44347f67c0f016198086338811af","src/codegen/mod.rs":"ad116e2af755219bd556e8ee00ca48562f64a161d0b2c94846ae01297446ea0d","src/codegen/struct_layout.rs":"3fa5524aff82365ce292b0cc85080514c85a6dbd31bce90f001773b995dda28e","src/extra_assertions.rs":"494534bd4f18b80d89b180c8a93733e6617edcf7deac413e9a73fd6e7bc9ced7","src/features.rs":"82511f1bb8cbd04d98f57b000903b0264237962af93a72a58220437213eba1ef","src/ir/analysis/derive.rs":"325d4c1c1e6194e743f42a2316f1501b0ef852fe309f2e9cac3434825ad235f0","src/ir/analysis/has_destructor.rs":"63644f479738df35e531d3324ff892614083c3656e0747aa34d9f20dada878ec","src/ir/analysis/has_float.rs":"76162a309e4285a806755a08c687a3e7bc894a100a63da4e88584035e215b11d","src/ir/analysis/has_type_param_in_array.rs":"fdbc0af28a144c88ea2de83e6e6da5e1ffb40e3dd63fd7a708095d085bb06f94","src/ir/analysis/has_vtable.rs":"5788372d27bdbaaf0454bc17be31a5480918bc41a8a1c4832e8c61185c07f9cd","src/ir/analysis/mod.rs":"1f218e15c19f6666512908abc853fa7ff9ca5d0fafd94f026d9e4b0ce287ec3c","src/ir/analysis/sizedness.rs":"b73865b6689d4f3546edd91909a47c329c4ae630ea97715d29bc683ae1dc17ad","src/ir/analysis/template_params.rs":"6312c008bbc80f50e72a766756c8daddea0b6eeb31ec924b83a231df931e170e","src/ir/annotations.rs":"39a5ab19f4d5dfa617577e4a0d0d2b67b5369d480c7cca4b14d172458c9843f0","src/ir/comment.rs":"c48abe01c5af0f09f583a89f1394bc6c161b40f6c8f0f600bbfe3c907b47969b","src/ir/comp.rs":"78e245835efcd22b5cc878a8a7031171116c708408bdb48b0c9284a067041e56","src/ir/context.rs":"8fd64654343295e0e4a43efe5db6f64315dcd50a5015c3d86e90aae992e2fa9f","src/ir/derive.rs":"34f9aa76b6c9c05136bb69dcd6455397faef571a567254d2c541d50a962994db","src/ir/dot.rs":"95ed2968fc3239d87892e9f1edf1ed6dd18630d949564961765967ea1d16960c","src/ir/enum_ty.rs":"7658cf68c00b1732dfa599c2d6b6a93a82de8401142591c3afe5fcb27d901a66","src/ir/function.rs":"c2feb2e26d47aa96a74af9912ada26be077e2b0c36d46fa10167da7109590500","src/ir/int.rs":"5b8d5bcedb04f39dc2d7e571bc04114b1f1e09cf294afe944c2e7879451c4378","src/ir/item.rs":"3bcdb69b793350e5744aec3577cdbb1e5068ece5220c38763cecd82dfb5e8f03","src/ir/item_kind.rs":"dbeae8c4fd0e5c9485d325aea040e056a1f2cd6d43fc927dee8fe1c0c59a7197","src/ir/layout.rs":"d49582081f5f86f7595afbe4845f38fb3b969a840b568f4a49b265e7d790bb5b","src/ir/mod.rs":"2eae90f207fad2e45957ec9287064992a419e3fc916aba84faff2ea25cbeb5ee","src/ir/module.rs":"c4d90bf38fe3672e01923734ccbdb7951ea929949d5f413a9c2aee12395a5094","src/ir/objc.rs":"758aa955a0c5d6ad82606c88a1f4cd1d93e666b71e82d43b18b1aaae96cf888a","src/ir/template.rs":"c0f8570b927dfd6a421fc4ce3094ec837a3ed936445225dbfac961e8e0842ae5","src/ir/traversal.rs":"ea751379a5aec02f93f8d2c61e18232776b1f000dbeae64b9a7195ba21a19dd6","src/ir/ty.rs":"e6771c8102b9f01b0c4b664bf1151b4773b599634a83895376ce122ca9f74f8b","src/ir/var.rs":"8bdafb6d02f2c55ae11c28d88b19fb7a65ba8466da12ff039ae4c16c790b291e","src/lib.rs":"a07ef7a3d099493555ae5a58b7b4bf9106a978e38a23b5ff445a83b92727ab62","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"8c96cd2a051e3f09b1b87b75cd9ed77e82e889c8309ebd3e4bc782960cf63e58","src/options.rs":"5b309b225cc51e665bd42ed3e7965a7cd73d984e4455a2d76987fc42ab271ff8","src/parse.rs":"be7d13cc84fae79ec7b3aa9e77063fa475a48d74a854423e2c72d75006a25202","src/regex_set.rs":"5cb72fc3714c0d79e9e942d003349c0775fafd7cd0c9603c65f5261883bbf9cf","src/time.rs":"3b763e6fee51d0eb01228dfe28bc28a9f692aff73b2a7b90a030902e0238fca6"},"package":"18270cdd7065ec045a6bb4bdcd5144d14a78b3aedb3bc5111e688773ac8b9ad0"}
|
||||
{"files":{"Cargo.lock":"f1b56f3cb914b4ed3214d3ce87d599398b399841718fc938c1b5a309356a44ea","Cargo.toml":"a4656cdd7bd0794e6f10ba78ed3c9a82cd86bfcbec59be7731ee90984de64bde","LICENSE":"c23953d9deb0a3312dbeaf6c128a657f3591acee45067612fa68405eaa4525db","README.md":"5a1f556c6a57c0a6ccc65e19c27718e0f4b32381a8efcc80f6601b33c58c5d59","build.rs":"e1f148e01150af6a66b6af2e5d955c8b9fa092cb4697bae2bcec8a00119055ae","csmith-fuzzing/README.md":"7107b70fedb0c0a0cadb3c439a49c1bd0119a6d38dc63b1aecc74d1942256ef2","src/callbacks.rs":"82e0be9ca02e9a652af934ed546f1cedfc6db0716643123d9a5aab33b360c7d0","src/clang.rs":"66e86bfbbe872cc247cf3bc88a2155e25f587414834023515d184dc13f8f7287","src/codegen/bitfield_unit.rs":"a8fb1a2d97a99685106fcaac87d2013f79d2690d6a46ff05ad1e3629b6075664","src/codegen/bitfield_unit_tests.rs":"dd252134118450800b516e375c872e17b4c1aee63a7b8adbe5b2cd53434bbc7e","src/codegen/error.rs":"ebc9e0f50c6adc9558b76ce774346c02316500a1ebe3cbf56ed00e5e9fe3e456","src/codegen/helpers.rs":"304c9eb56ea6b2c054e1f9fefd5812b0df3a156eee5876f3051fd0b48c7aeac3","src/codegen/impl_debug.rs":"428df604b4be105e3275275e8be81e8306b940abddc1b89675f98a01bf5de8c1","src/codegen/impl_partialeq.rs":"83707f7b13501dc413c904a17163cb54af11e56138f36dfef40ce46c823200fd","src/codegen/mod.rs":"42732503dd25ed4b7924b71862f9100cf281e22f99016540da61a602c78a3650","src/codegen/struct_layout.rs":"482bab6384e65c78346de4f8d8e4d1c3b7df38250788b58bdd1f7b1c7bf70bac","src/extra_assertions.rs":"494534bd4f18b80d89b180c8a93733e6617edcf7deac413e9a73fd6e7bc9ced7","src/features.rs":"2d82f0700c22ea44e010a89c3ae857c3feaf2c85cab3fe4d0277a41a8c2841c4","src/ir/analysis/derive.rs":"2a2322f178760859cdb4b2d45d947ff213c7c684840b4ade46b7ceb34fa6705b","src/ir/analysis/has_destructor.rs":"10380d06ed03d058f10f6f6835d9b8fbebac455a1ea218780430a0ffd8d63472","src/ir/analysis/has_float.rs":"1838ba81eb05a9c3e311687e2247d561cc5093377b15ef8008257025ea56da04","src/ir/analysis/has_type_param_in_array.rs":"dddc5511a705e3a653b5e754e359637031b4862e1a1fc1e17f711fb2fbfc1cef","src/ir/analysis/has_vtable.rs":"8da9deec23c4552ecd5b883eaa036e4f2174a5949194c333a62ef463d28dcb6a","src/ir/analysis/mod.rs":"54993cb77df1870bb12cbc6b3a243c2da942cdc967a7d21dacb430601b49b2a1","src/ir/analysis/sizedness.rs":"d0673e19add38a07680ae3a9a5e998a0b2c3917e68efb6639ffe7ea193ada1b1","src/ir/analysis/template_params.rs":"9b662b5ec99cd8409d771a16ee42df500962b0c26f0da85e430ede19cc2b17c9","src/ir/annotations.rs":"268f90fc1d40fadee329c26b183b2aaa9de98f9246fea580404ee0e626315546","src/ir/comment.rs":"31d64a49ae3d9c3c348fa2539e03306ca3a23fae429cab452e42b31ecf632145","src/ir/comp.rs":"73d5d32d70b8e62d33ad4ed6bcbb9b23273c59b5b45570b85a2357c6e1116028","src/ir/context.rs":"c30be52b22fdb489afb34426bcb2e048ae2594846b15324693dd1b71e7dc3369","src/ir/derive.rs":"e5581852eec87918901a129284b4965aefc8a19394187a8095779a084f28fabe","src/ir/dot.rs":"5da8336bf5fd8efabd784a06e0d764eb91566c19ced8ce017a24ae237f0cbe18","src/ir/enum_ty.rs":"c303f3b271d2703c2487e4afaf4b8c9b5bbedb9e1c6a8044de667c21ad8f67fb","src/ir/function.rs":"7a25a55d7f2ded1724894bd1f7ee4766a4bf5f193967bf3a2628ec604b918018","src/ir/int.rs":"68a86182743ec338d58e42203364dc7c8970cb7ec3550433ca92f0c9489b4442","src/ir/item.rs":"203fe53efb0203e0ddc3fb9fcff7b2068f80f252d249a39c137e0cc070663a49","src/ir/item_kind.rs":"7666a1ff1b8260978b790a08b4139ab56b5c65714a5652bbcec7faa7443adc36","src/ir/layout.rs":"936f96fafab34e35b622a5f9e56b0fbd2c97d2e9222470e3687f882f40db1349","src/ir/mod.rs":"713cd537434567003197a123cbae679602c715e976d22f7b23dafd0826ea4c70","src/ir/module.rs":"a26bb0ac90d4cabb0a45d9c1a42b5515c74d4c233dc084e2f85161eac12bff15","src/ir/objc.rs":"ced8242068d5daa2940469693f7277c79368019f8e30ce1e4f55d834bf24c411","src/ir/template.rs":"6c2823c9bab82ab1d70f4d643e8f4d6420be5eafcb78324fb69649e407561cec","src/ir/traversal.rs":"5ac088277f4dfe2918d81b9294aaee41fd83db8e46def66a05f89de078bf4c49","src/ir/ty.rs":"5af2b62d278c679b7c4e597263fce01113e90242e7d263b948d93bc4274dfe9a","src/ir/var.rs":"9226241b188877b6a7bea6523e14318a8523a6dba57c4f15809c377f87540061","src/lib.rs":"b968f8d0858e3145137a2e33c0913acf19d21f884f914bc513bc18eea1c37bf1","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"6b42a74dfd5c3bde75b7fb984a82f3b3d652abd45aa54b31a40fbda6b02ae674","src/options.rs":"f08facc9d58cb79c7ab93c9d614f13d4d3eca2b5801012da56490a790a8d8c4c","src/parse.rs":"be7d13cc84fae79ec7b3aa9e77063fa475a48d74a854423e2c72d75006a25202","src/regex_set.rs":"5cb72fc3714c0d79e9e942d003349c0775fafd7cd0c9603c65f5261883bbf9cf","src/time.rs":"8efe317e7c6b5ba8e0865ce7b49ca775ee8a02590f4241ef62f647fa3c22b68e"},"package":"ebd71393f1ec0509b553aa012b9b58e81dadbdff7130bd3b8cba576e69b32f75"}
|
|
@ -0,0 +1,413 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "0.6.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"memchr 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ansi_term"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "atty"
|
||||
version = "0.2.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.51.1"
|
||||
dependencies = [
|
||||
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cexpr 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"clang-sys 0.28.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"env_logger 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"proc-macro2 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"shlex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"which 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "byteorder"
|
||||
version = "1.2.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.0.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "cexpr"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"nom 4.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "clang-sys"
|
||||
version = "0.28.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libloading 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "2.32.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"textwrap 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "diff"
|
||||
version = "0.1.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "env_logger"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"termcolor 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "glob"
|
||||
version = "0.2.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "humantime"
|
||||
version = "1.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"version_check 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.43"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"cc 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"version_check 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "4.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"memchr 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "peeking_take_while"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quick-error"
|
||||
version = "1.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.1.40"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "redox_termios"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"aho-corasick 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memchr 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex-syntax 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"utf8-ranges 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-hash"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "shlex"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "termcolor"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"wincolor 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "termion"
|
||||
version = "1.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "textwrap"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thread_local"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ucd-util"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-width"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-xid"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "utf8-ranges"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "vec_map"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "which"
|
||||
version = "3.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-i686-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-util"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-x86_64-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "wincolor"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[metadata]
|
||||
"checksum aho-corasick 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)" = "68f56c7353e5a9547cbd76ed90f7bb5ffc3ba09d4ea9bd1d8c06c8b1142eeb5a"
|
||||
"checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
|
||||
"checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652"
|
||||
"checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12"
|
||||
"checksum byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "94f88df23a25417badc922ab0f5716cc1330e87f71ddd9203b3a3ccd9cedf75d"
|
||||
"checksum cc 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)" = "f159dfd43363c4d08055a07703eb7a3406b0dac4d0584d96965a3262db3c9d16"
|
||||
"checksum cexpr 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8fc0086be9ca82f7fc89fc873435531cb898b86e850005850de1f820e2db6e9b"
|
||||
"checksum cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0c4e7bb64a8ebb0d856483e1e682ea3422f883c5f5615a90d51a2c82fe87fdd3"
|
||||
"checksum clang-sys 0.28.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4227269cec09f5f83ff160be12a1e9b0262dd1aa305302d5ba296c2ebd291055"
|
||||
"checksum clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b957d88f4b6a63b9d70d5f454ac8011819c6efa7727858f458ab71c756ce2d3e"
|
||||
"checksum diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "3c2b69f912779fbb121ceb775d74d51e915af17aaebc38d28a592843a2dd0a3a"
|
||||
"checksum env_logger 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "afb070faf94c85d17d50ca44f6ad076bce18ae92f0037d350947240a36e9d42e"
|
||||
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
|
||||
"checksum humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0484fda3e7007f2a4a0d9c3a703ca38c71c54c55602ce4660c419fd32e188c9e"
|
||||
"checksum lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca488b89a5657b0a2ecd45b95609b3e848cf1755da332a0da46e2b2b1cb371a7"
|
||||
"checksum libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)" = "76e3a3ef172f1a0b9a9ff0dd1491ae5e6c948b94479a3021819ba7d860c8645d"
|
||||
"checksum libloading 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3ad660d7cb8c5822cd83d10897b0f1f1526792737a179e73896152f85b88c2"
|
||||
"checksum log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fcce5fa49cc693c312001daf1d13411c4a5283796bac1084299ea3e567113f"
|
||||
"checksum memchr 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4b3629fe9fdbff6daa6c33b90f7c08355c1aca05a3d01fa8063b822fcf185f3b"
|
||||
"checksum nom 4.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "898696750eb5c3ce5eb5afbfbe46e7f7c4e1936e19d3e97be4b7937da7b6d114"
|
||||
"checksum peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
|
||||
"checksum proc-macro2 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "19f287c234c9b2d0308d692dee5c449c1a171167a6f8150f7cf2a49d8fd96967"
|
||||
"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
|
||||
"checksum quote 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7ab938ebe6f1c82426b5fb82eaf10c3e3028c53deaa3fbe38f5904b37cf4d767"
|
||||
"checksum redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "c214e91d3ecf43e9a4e41e578973adeb14b474f2bee858742d127af75a0112b1"
|
||||
"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
|
||||
"checksum regex 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "2069749032ea3ec200ca51e4a31df41759190a88edca0d2d86ee8bedf7073341"
|
||||
"checksum regex-syntax 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "747ba3b235651f6e2f67dfa8bcdcd073ddb7c243cb21c442fc12395dfcac212d"
|
||||
"checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8"
|
||||
"checksum shlex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2"
|
||||
"checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
|
||||
"checksum termcolor 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4096add70612622289f2fdcdbd5086dc81c1e2675e6ae58d6c4f62a16c6d7f2f"
|
||||
"checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096"
|
||||
"checksum textwrap 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "307686869c93e71f94da64286f9a9524c0f308a9e1c87a583de8e9c9039ad3f6"
|
||||
"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
|
||||
"checksum ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fd2be2d6639d0f8fe6cdda291ad456e23629558d466e2789d2c3e9892bda285d"
|
||||
"checksum unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "882386231c45df4700b275c7ff55b6f3698780a650026380e72dabe76fa46526"
|
||||
"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
|
||||
"checksum utf8-ranges 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fd70f467df6810094968e2fce0ee1bd0e87157aceb026a8c083bcf5e25b9efe4"
|
||||
"checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a"
|
||||
"checksum version_check 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7716c242968ee87e5542f8021178248f267f295a5c4803beae8b8b7fd9bc6051"
|
||||
"checksum which 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "240a31163872f7e8e49f35b42b58485e35355b07eb009d9f3686733541339a69"
|
||||
"checksum winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "773ef9dcc5f24b7d850d0ff101e542ff24c3b090a9768e03ff889fdef41f00fd"
|
||||
"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
"checksum winapi-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "afc5508759c5bf4285e61feb862b6083c8480aec864fa17a81fdec6f69b461ab"
|
||||
"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
"checksum wincolor 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "561ed901ae465d6185fa7864d63fbd5720d0ef718366c9a4dc83cf6170d7e9ba"
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
[package]
|
||||
name = "bindgen"
|
||||
version = "0.51.0"
|
||||
version = "0.51.1"
|
||||
authors = ["Jyun-Yan You <jyyou.tw@gmail.com>", "Emilio Cobos Álvarez <emilio@crisal.io>", "Nick Fitzgerald <fitzgen@gmail.com>", "The Servo project developers"]
|
||||
build = "build.rs"
|
||||
include = ["LICENSE", "README.md", "Cargo.toml", "build.rs", "src/*.rs", "src/**/*.rs"]
|
||||
|
@ -54,9 +54,6 @@ optional = true
|
|||
version = "0.6"
|
||||
optional = true
|
||||
|
||||
[dependencies.fxhash]
|
||||
version = "0.2"
|
||||
|
||||
[dependencies.lazy_static]
|
||||
version = "1"
|
||||
|
||||
|
@ -68,21 +65,26 @@ optional = true
|
|||
version = "0.1.2"
|
||||
|
||||
[dependencies.proc-macro2]
|
||||
version = "0.4"
|
||||
version = "1"
|
||||
default-features = false
|
||||
|
||||
[dependencies.quote]
|
||||
version = "0.6"
|
||||
version = "1"
|
||||
default-features = false
|
||||
|
||||
[dependencies.regex]
|
||||
version = "1.0"
|
||||
|
||||
[dependencies.rustc-hash]
|
||||
version = "1.0.1"
|
||||
|
||||
[dependencies.shlex]
|
||||
version = "0.1"
|
||||
|
||||
[dependencies.which]
|
||||
version = ">=1.0, <3.0"
|
||||
version = "3.0"
|
||||
optional = true
|
||||
default-features = false
|
||||
[dev-dependencies.clap]
|
||||
version = "2"
|
||||
|
||||
|
@ -93,7 +95,7 @@ version = "0.1"
|
|||
version = "0.1"
|
||||
|
||||
[features]
|
||||
default = ["logging", "clap"]
|
||||
default = ["logging", "clap", "which-rustfmt"]
|
||||
logging = ["env_logger", "log"]
|
||||
static = []
|
||||
testing_only_docs = []
|
||||
|
@ -102,5 +104,6 @@ testing_only_libclang_3_8 = []
|
|||
testing_only_libclang_3_9 = []
|
||||
testing_only_libclang_4 = []
|
||||
testing_only_libclang_5 = []
|
||||
which-rustfmt = ["which"]
|
||||
[badges.travis-ci]
|
||||
repository = "rust-lang/rust-bindgen"
|
||||
|
|
|
@ -7,8 +7,8 @@ mod target {
|
|||
pub fn main() {
|
||||
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
|
||||
|
||||
let mut dst = File::create(Path::new(&out_dir).join("host-target.txt"))
|
||||
.unwrap();
|
||||
let mut dst =
|
||||
File::create(Path::new(&out_dir).join("host-target.txt")).unwrap();
|
||||
dst.write_all(env::var("TARGET").unwrap().as_bytes())
|
||||
.unwrap();
|
||||
}
|
||||
|
@ -24,8 +24,8 @@ mod testgen {
|
|||
|
||||
pub fn main() {
|
||||
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
|
||||
let mut dst = File::create(Path::new(&out_dir).join("tests.rs"))
|
||||
.unwrap();
|
||||
let mut dst =
|
||||
File::create(Path::new(&out_dir).join("tests.rs")).unwrap();
|
||||
|
||||
let manifest_dir =
|
||||
PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
|
||||
|
@ -57,7 +57,8 @@ mod testgen {
|
|||
"test_header!(header_{}, {:?});",
|
||||
func,
|
||||
entry.path(),
|
||||
).unwrap();
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
|
|
@ -37,8 +37,7 @@ pub trait ParseCallbacks: fmt::Debug + UnwindSafe {
|
|||
|
||||
/// This will be run on every string macro. The callback can not influence the further
|
||||
/// treatment of the macro, but may use the value to generate additional code or configuration.
|
||||
fn str_macro(&self, _name: &str, _value: &[u8]) {
|
||||
}
|
||||
fn str_macro(&self, _name: &str, _value: &[u8]) {}
|
||||
|
||||
/// This function should return whether, given an enum variant
|
||||
/// name, and value, this enum variant will forcibly be a constant.
|
||||
|
|
|
@ -6,14 +6,14 @@
|
|||
|
||||
use cexpr;
|
||||
use clang_sys::*;
|
||||
use regex;
|
||||
use ir::context::BindgenContext;
|
||||
use std::{mem, ptr, slice};
|
||||
use regex;
|
||||
use std::ffi::{CStr, CString};
|
||||
use std::fmt;
|
||||
use std::hash::Hash;
|
||||
use std::hash::Hasher;
|
||||
use std::os::raw::{c_char, c_int, c_uint, c_ulong, c_longlong, c_ulonglong};
|
||||
use std::os::raw::{c_char, c_int, c_longlong, c_uint, c_ulong, c_ulonglong};
|
||||
use std::{mem, ptr, slice};
|
||||
|
||||
/// A cursor into the Clang AST, pointing to an AST node.
|
||||
///
|
||||
|
@ -43,7 +43,11 @@ impl Cursor {
|
|||
/// The USR can be used to compare entities across translation units.
|
||||
pub fn usr(&self) -> Option<String> {
|
||||
let s = unsafe { cxstring_into_string(clang_getCursorUSR(self.x)) };
|
||||
if s.is_empty() { None } else { Some(s) }
|
||||
if s.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(s)
|
||||
}
|
||||
}
|
||||
|
||||
/// Is this cursor's referent a declaration?
|
||||
|
@ -210,8 +214,9 @@ impl Cursor {
|
|||
|
||||
while semantic_parent.is_some() &&
|
||||
(semantic_parent.unwrap().kind() == CXCursor_Namespace ||
|
||||
semantic_parent.unwrap().kind() == CXCursor_NamespaceAlias ||
|
||||
semantic_parent.unwrap().kind() == CXCursor_NamespaceRef)
|
||||
semantic_parent.unwrap().kind() ==
|
||||
CXCursor_NamespaceAlias ||
|
||||
semantic_parent.unwrap().kind() == CXCursor_NamespaceRef)
|
||||
{
|
||||
semantic_parent =
|
||||
semantic_parent.unwrap().fallible_semantic_parent();
|
||||
|
@ -300,7 +305,11 @@ impl Cursor {
|
|||
let s = unsafe {
|
||||
cxstring_into_string(clang_Cursor_getRawCommentText(self.x))
|
||||
};
|
||||
if s.is_empty() { None } else { Some(s) }
|
||||
if s.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(s)
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the referent's parsed comment.
|
||||
|
@ -346,7 +355,11 @@ impl Cursor {
|
|||
x: clang_getCursorReferenced(self.x),
|
||||
};
|
||||
|
||||
if ret.is_valid() { Some(ret) } else { None }
|
||||
if ret.is_valid() {
|
||||
Some(ret)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -371,7 +384,11 @@ impl Cursor {
|
|||
let ret = Cursor {
|
||||
x: clang_getSpecializedCursorTemplate(self.x),
|
||||
};
|
||||
if ret.is_valid() { Some(ret) } else { None }
|
||||
if ret.is_valid() {
|
||||
Some(ret)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -438,11 +455,13 @@ impl Cursor {
|
|||
pub fn contains_cursor(&self, kind: CXCursorKind) -> bool {
|
||||
let mut found = false;
|
||||
|
||||
self.visit(|c| if c.kind() == kind {
|
||||
found = true;
|
||||
CXChildVisit_Break
|
||||
} else {
|
||||
CXChildVisit_Continue
|
||||
self.visit(|c| {
|
||||
if c.kind() == kind {
|
||||
found = true;
|
||||
CXChildVisit_Break
|
||||
} else {
|
||||
CXChildVisit_Continue
|
||||
}
|
||||
});
|
||||
|
||||
found
|
||||
|
@ -459,7 +478,11 @@ impl Cursor {
|
|||
pub fn bit_width(&self) -> Option<u32> {
|
||||
unsafe {
|
||||
let w = clang_getFieldDeclBitWidth(self.x);
|
||||
if w == -1 { None } else { Some(w as u32) }
|
||||
if w == -1 {
|
||||
None
|
||||
} else {
|
||||
Some(w as u32)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -470,7 +493,11 @@ impl Cursor {
|
|||
let t = Type {
|
||||
x: clang_getEnumDeclIntegerType(self.x),
|
||||
};
|
||||
if t.is_valid() { Some(t) } else { None }
|
||||
if t.is_valid() {
|
||||
Some(t)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -509,7 +536,8 @@ impl Cursor {
|
|||
self.visit(|cur| {
|
||||
if cur.kind() == CXCursor_UnexposedAttr {
|
||||
found_attr = cur.tokens().iter().any(|t| {
|
||||
t.kind == CXToken_Identifier && t.spelling() == attr.as_bytes()
|
||||
t.kind == CXToken_Identifier &&
|
||||
t.spelling() == attr.as_bytes()
|
||||
});
|
||||
|
||||
if found_attr {
|
||||
|
@ -530,7 +558,11 @@ impl Cursor {
|
|||
x: unsafe { clang_getTypedefDeclUnderlyingType(self.x) },
|
||||
};
|
||||
|
||||
if inner.is_valid() { Some(inner) } else { None }
|
||||
if inner.is_valid() {
|
||||
Some(inner)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the linkage kind for this cursor's referent.
|
||||
|
@ -559,12 +591,11 @@ impl Cursor {
|
|||
// CXCursor_FunctionDecl |
|
||||
// CXCursor_CXXMethod => {
|
||||
self.num_args().ok().map(|num| {
|
||||
(0..num).map(|i| {
|
||||
Cursor {
|
||||
(0..num)
|
||||
.map(|i| Cursor {
|
||||
x: unsafe { clang_Cursor_getArgument(self.x, i as c_uint) },
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -576,7 +607,11 @@ impl Cursor {
|
|||
pub fn num_args(&self) -> Result<u32, ()> {
|
||||
unsafe {
|
||||
let w = clang_Cursor_getNumArguments(self.x);
|
||||
if w == -1 { Err(()) } else { Ok(w as u32) }
|
||||
if w == -1 {
|
||||
Err(())
|
||||
} else {
|
||||
Ok(w as u32)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -642,7 +677,11 @@ impl Cursor {
|
|||
let rt = Type {
|
||||
x: unsafe { clang_getCursorResultType(self.x) },
|
||||
};
|
||||
if rt.is_valid() { Some(rt) } else { None }
|
||||
if rt.is_valid() {
|
||||
Some(rt)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets the tokens that correspond to that cursor.
|
||||
|
@ -654,26 +693,29 @@ impl Cursor {
|
|||
pub fn cexpr_tokens(self) -> Vec<cexpr::token::Token> {
|
||||
use cexpr::token;
|
||||
|
||||
self.tokens().iter().filter_map(|token| {
|
||||
let kind = match token.kind {
|
||||
CXToken_Punctuation => token::Kind::Punctuation,
|
||||
CXToken_Literal => token::Kind::Literal,
|
||||
CXToken_Identifier => token::Kind::Identifier,
|
||||
CXToken_Keyword => token::Kind::Keyword,
|
||||
// NB: cexpr is not too happy about comments inside
|
||||
// expressions, so we strip them down here.
|
||||
CXToken_Comment => return None,
|
||||
_ => {
|
||||
error!("Found unexpected token kind: {:?}", token);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
self.tokens()
|
||||
.iter()
|
||||
.filter_map(|token| {
|
||||
let kind = match token.kind {
|
||||
CXToken_Punctuation => token::Kind::Punctuation,
|
||||
CXToken_Literal => token::Kind::Literal,
|
||||
CXToken_Identifier => token::Kind::Identifier,
|
||||
CXToken_Keyword => token::Kind::Keyword,
|
||||
// NB: cexpr is not too happy about comments inside
|
||||
// expressions, so we strip them down here.
|
||||
CXToken_Comment => return None,
|
||||
_ => {
|
||||
error!("Found unexpected token kind: {:?}", token);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
Some(token::Token {
|
||||
kind,
|
||||
raw: token.spelling().to_vec().into_boxed_slice(),
|
||||
Some(token::Token {
|
||||
kind,
|
||||
raw: token.spelling().to_vec().into_boxed_slice(),
|
||||
})
|
||||
})
|
||||
}).collect()
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -690,11 +732,14 @@ impl<'a> RawTokens<'a> {
|
|||
let mut tokens = ptr::null_mut();
|
||||
let mut token_count = 0;
|
||||
let range = cursor.extent();
|
||||
let tu = unsafe {
|
||||
clang_Cursor_getTranslationUnit(cursor.x)
|
||||
};
|
||||
let tu = unsafe { clang_Cursor_getTranslationUnit(cursor.x) };
|
||||
unsafe { clang_tokenize(tu, range, &mut tokens, &mut token_count) };
|
||||
Self { cursor, tu, tokens, token_count }
|
||||
Self {
|
||||
cursor,
|
||||
tu,
|
||||
tokens,
|
||||
token_count,
|
||||
}
|
||||
}
|
||||
|
||||
fn as_slice(&self) -> &[CXToken] {
|
||||
|
@ -717,7 +762,11 @@ impl<'a> Drop for RawTokens<'a> {
|
|||
fn drop(&mut self) {
|
||||
if !self.tokens.is_null() {
|
||||
unsafe {
|
||||
clang_disposeTokens(self.tu, self.tokens, self.token_count as c_uint);
|
||||
clang_disposeTokens(
|
||||
self.tu,
|
||||
self.tokens,
|
||||
self.token_count as c_uint,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -790,9 +839,7 @@ where
|
|||
Visitor: FnMut(Cursor) -> CXChildVisitResult,
|
||||
{
|
||||
let func: &mut Visitor = unsafe { mem::transmute(data) };
|
||||
let child = Cursor {
|
||||
x: cur,
|
||||
};
|
||||
let child = Cursor { x: cur };
|
||||
|
||||
(*func)(child)
|
||||
}
|
||||
|
@ -942,8 +989,9 @@ impl Type {
|
|||
fn clang_size_of(&self, ctx: &BindgenContext) -> c_longlong {
|
||||
match self.kind() {
|
||||
// Work-around https://bugs.llvm.org/show_bug.cgi?id=40975
|
||||
CXType_RValueReference |
|
||||
CXType_LValueReference => ctx.target_pointer_size() as c_longlong,
|
||||
CXType_RValueReference | CXType_LValueReference => {
|
||||
ctx.target_pointer_size() as c_longlong
|
||||
}
|
||||
// Work-around https://bugs.llvm.org/show_bug.cgi?id=40813
|
||||
CXType_Auto if self.is_non_deductible_auto_type() => return -6,
|
||||
_ => unsafe { clang_Type_getSizeOf(self.x) },
|
||||
|
@ -954,8 +1002,9 @@ impl Type {
|
|||
fn clang_align_of(&self, ctx: &BindgenContext) -> c_longlong {
|
||||
match self.kind() {
|
||||
// Work-around https://bugs.llvm.org/show_bug.cgi?id=40975
|
||||
CXType_RValueReference |
|
||||
CXType_LValueReference => ctx.target_pointer_size() as c_longlong,
|
||||
CXType_RValueReference | CXType_LValueReference => {
|
||||
ctx.target_pointer_size() as c_longlong
|
||||
}
|
||||
// Work-around https://bugs.llvm.org/show_bug.cgi?id=40813
|
||||
CXType_Auto if self.is_non_deductible_auto_type() => return -6,
|
||||
_ => unsafe { clang_Type_getAlignOf(self.x) },
|
||||
|
@ -966,11 +1015,18 @@ impl Type {
|
|||
/// for them.
|
||||
pub fn size(&self, ctx: &BindgenContext) -> usize {
|
||||
let val = self.clang_size_of(ctx);
|
||||
if val < 0 { 0 } else { val as usize }
|
||||
if val < 0 {
|
||||
0
|
||||
} else {
|
||||
val as usize
|
||||
}
|
||||
}
|
||||
|
||||
/// What is the size of this type?
|
||||
pub fn fallible_size(&self, ctx: &BindgenContext) -> Result<usize, LayoutError> {
|
||||
pub fn fallible_size(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
) -> Result<usize, LayoutError> {
|
||||
let val = self.clang_size_of(ctx);
|
||||
if val < 0 {
|
||||
Err(LayoutError::from(val as i32))
|
||||
|
@ -983,11 +1039,18 @@ impl Type {
|
|||
/// returning `0`.
|
||||
pub fn align(&self, ctx: &BindgenContext) -> usize {
|
||||
let val = self.clang_align_of(ctx);
|
||||
if val < 0 { 0 } else { val as usize }
|
||||
if val < 0 {
|
||||
0
|
||||
} else {
|
||||
val as usize
|
||||
}
|
||||
}
|
||||
|
||||
/// What is the alignment of this type?
|
||||
pub fn fallible_align(&self, ctx: &BindgenContext) -> Result<usize, LayoutError> {
|
||||
pub fn fallible_align(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
) -> Result<usize, LayoutError> {
|
||||
let val = self.clang_align_of(ctx);
|
||||
if val < 0 {
|
||||
Err(LayoutError::from(val as i32))
|
||||
|
@ -998,7 +1061,10 @@ impl Type {
|
|||
|
||||
/// Get the layout for this type, or an error describing why it does not
|
||||
/// have a valid layout.
|
||||
pub fn fallible_layout(&self, ctx: &BindgenContext) -> Result<::ir::layout::Layout, LayoutError> {
|
||||
pub fn fallible_layout(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
) -> Result<::ir::layout::Layout, LayoutError> {
|
||||
use ir::layout::Layout;
|
||||
let size = self.fallible_size(ctx)?;
|
||||
let align = self.fallible_align(ctx)?;
|
||||
|
@ -1012,7 +1078,7 @@ impl Type {
|
|||
// question correctly. However, that's no reason to panic when
|
||||
// generating bindings for simple C headers with an old libclang.
|
||||
if !clang_Type_getNumTemplateArguments::is_loaded() {
|
||||
return None
|
||||
return None;
|
||||
}
|
||||
|
||||
let n = unsafe { clang_Type_getNumTemplateArguments(self.x) };
|
||||
|
@ -1027,12 +1093,10 @@ impl Type {
|
|||
/// If this type is a class template specialization, return its
|
||||
/// template arguments. Otherwise, return None.
|
||||
pub fn template_args(&self) -> Option<TypeTemplateArgIterator> {
|
||||
self.num_template_args().map(|n| {
|
||||
TypeTemplateArgIterator {
|
||||
x: self.x,
|
||||
length: n,
|
||||
index: 0,
|
||||
}
|
||||
self.num_template_args().map(|n| TypeTemplateArgIterator {
|
||||
x: self.x,
|
||||
length: n,
|
||||
index: 0,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -1041,12 +1105,11 @@ impl Type {
|
|||
/// Returns None if the type is not a function prototype.
|
||||
pub fn args(&self) -> Option<Vec<Type>> {
|
||||
self.num_args().ok().map(|num| {
|
||||
(0..num).map(|i| {
|
||||
Type {
|
||||
(0..num)
|
||||
.map(|i| Type {
|
||||
x: unsafe { clang_getArgType(self.x, i as c_uint) },
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -1056,11 +1119,14 @@ impl Type {
|
|||
pub fn num_args(&self) -> Result<u32, ()> {
|
||||
unsafe {
|
||||
let w = clang_getNumArgTypes(self.x);
|
||||
if w == -1 { Err(()) } else { Ok(w as u32) }
|
||||
if w == -1 {
|
||||
Err(())
|
||||
} else {
|
||||
Ok(w as u32)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Given that this type is a pointer type, return the type that it points
|
||||
/// to.
|
||||
pub fn pointee_type(&self) -> Option<Type> {
|
||||
|
@ -1126,7 +1192,11 @@ impl Type {
|
|||
let rt = Type {
|
||||
x: unsafe { clang_getResultType(self.x) },
|
||||
};
|
||||
if rt.is_valid() { Some(rt) } else { None }
|
||||
if rt.is_valid() {
|
||||
Some(rt)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Given that this type is a function type, get its calling convention. If
|
||||
|
@ -1186,15 +1256,19 @@ impl Type {
|
|||
// This is terrible :(
|
||||
fn hacky_parse_associated_type<S: AsRef<str>>(spelling: S) -> bool {
|
||||
lazy_static! {
|
||||
static ref ASSOC_TYPE_RE: regex::Regex =
|
||||
regex::Regex::new(r"typename type\-parameter\-\d+\-\d+::.+").unwrap();
|
||||
static ref ASSOC_TYPE_RE: regex::Regex = regex::Regex::new(
|
||||
r"typename type\-parameter\-\d+\-\d+::.+"
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
ASSOC_TYPE_RE.is_match(spelling.as_ref())
|
||||
}
|
||||
|
||||
self.kind() == CXType_Unexposed &&
|
||||
(hacky_parse_associated_type(self.spelling()) ||
|
||||
hacky_parse_associated_type(self.canonical_type().spelling()))
|
||||
hacky_parse_associated_type(
|
||||
self.canonical_type().spelling(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1263,20 +1337,9 @@ impl SourceLocation {
|
|||
let mut col = 0;
|
||||
let mut off = 0;
|
||||
clang_getSpellingLocation(
|
||||
self.x,
|
||||
&mut file,
|
||||
&mut line,
|
||||
&mut col,
|
||||
&mut off,
|
||||
self.x, &mut file, &mut line, &mut col, &mut off,
|
||||
);
|
||||
(
|
||||
File {
|
||||
x: file,
|
||||
},
|
||||
line as usize,
|
||||
col as usize,
|
||||
off as usize,
|
||||
)
|
||||
(File { x: file }, line as usize, col as usize, off as usize)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1375,14 +1438,14 @@ impl Iterator for CommentAttributesIterator {
|
|||
self.index += 1;
|
||||
Some(CommentAttribute {
|
||||
name: unsafe {
|
||||
cxstring_into_string(
|
||||
clang_HTMLStartTag_getAttrName(self.x, idx),
|
||||
)
|
||||
cxstring_into_string(clang_HTMLStartTag_getAttrName(
|
||||
self.x, idx,
|
||||
))
|
||||
},
|
||||
value: unsafe {
|
||||
cxstring_into_string(
|
||||
clang_HTMLStartTag_getAttrValue(self.x, idx),
|
||||
)
|
||||
cxstring_into_string(clang_HTMLStartTag_getAttrValue(
|
||||
self.x, idx,
|
||||
))
|
||||
},
|
||||
})
|
||||
} else {
|
||||
|
@ -1508,9 +1571,7 @@ impl TranslationUnit {
|
|||
if tu.is_null() {
|
||||
None
|
||||
} else {
|
||||
Some(TranslationUnit {
|
||||
x: tu,
|
||||
})
|
||||
Some(TranslationUnit { x: tu })
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1552,7 +1613,6 @@ impl Drop for TranslationUnit {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
/// A diagnostic message generated while parsing a translation unit.
|
||||
pub struct Diagnostic {
|
||||
x: CXDiagnostic,
|
||||
|
@ -1615,8 +1675,7 @@ impl fmt::Debug for UnsavedFile {
|
|||
write!(
|
||||
fmt,
|
||||
"UnsavedFile(name: {:?}, contents: {:?})",
|
||||
self.name,
|
||||
self.contents
|
||||
self.name, self.contents
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -1672,7 +1731,11 @@ pub fn ast_dump(c: &Cursor, depth: isize) -> CXChildVisitResult {
|
|||
if templ_kind != CXCursor_NoDeclFound {
|
||||
print_indent(
|
||||
depth,
|
||||
format!(" {}template-kind = {}", prefix, kind_to_str(templ_kind)),
|
||||
format!(
|
||||
" {}template-kind = {}",
|
||||
prefix,
|
||||
kind_to_str(templ_kind)
|
||||
),
|
||||
);
|
||||
}
|
||||
if let Some(usr) = c.usr() {
|
||||
|
@ -1769,18 +1832,18 @@ pub fn ast_dump(c: &Cursor, depth: isize) -> CXChildVisitResult {
|
|||
depth,
|
||||
format!(" {}spelling = \"{}\"", prefix, ty.spelling()),
|
||||
);
|
||||
let num_template_args = if clang_Type_getNumTemplateArguments::is_loaded() {
|
||||
unsafe { clang_Type_getNumTemplateArguments(ty.x) }
|
||||
} else {
|
||||
-1
|
||||
};
|
||||
let num_template_args =
|
||||
if clang_Type_getNumTemplateArguments::is_loaded() {
|
||||
unsafe { clang_Type_getNumTemplateArguments(ty.x) }
|
||||
} else {
|
||||
-1
|
||||
};
|
||||
if num_template_args >= 0 {
|
||||
print_indent(
|
||||
depth,
|
||||
format!(
|
||||
" {}number-of-template-args = {}",
|
||||
prefix,
|
||||
num_template_args
|
||||
prefix, num_template_args
|
||||
),
|
||||
);
|
||||
}
|
||||
|
@ -1882,7 +1945,8 @@ impl EvalResult {
|
|||
let mut found_cant_eval = false;
|
||||
cursor.visit(|c| {
|
||||
if c.kind() == CXCursor_TypeRef &&
|
||||
c.cur_type().canonical_type().kind() == CXType_Unexposed {
|
||||
c.cur_type().canonical_type().kind() == CXType_Unexposed
|
||||
{
|
||||
found_cant_eval = true;
|
||||
return CXChildVisit_Break;
|
||||
}
|
||||
|
@ -1922,7 +1986,7 @@ impl EvalResult {
|
|||
if !clang_EvalResult_isUnsignedInt::is_loaded() {
|
||||
// FIXME(emilio): There's no way to detect underflow here, and clang
|
||||
// will just happily give us a value.
|
||||
return Some(unsafe { clang_EvalResult_getAsInt(self.x) } as i64)
|
||||
return Some(unsafe { clang_EvalResult_getAsInt(self.x) } as i64);
|
||||
}
|
||||
|
||||
if unsafe { clang_EvalResult_isUnsignedInt(self.x) } != 0 {
|
||||
|
@ -1931,7 +1995,7 @@ impl EvalResult {
|
|||
return None;
|
||||
}
|
||||
|
||||
return Some(value as i64)
|
||||
return Some(value as i64);
|
||||
}
|
||||
|
||||
let value = unsafe { clang_EvalResult_getAsLongLong(self.x) };
|
||||
|
|
|
@ -1,19 +1,14 @@
|
|||
#[repr(C)]
|
||||
#[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
||||
pub struct __BindgenBitfieldUnit<Storage, Align>
|
||||
{
|
||||
pub struct __BindgenBitfieldUnit<Storage, Align> {
|
||||
storage: Storage,
|
||||
align: [Align; 0],
|
||||
}
|
||||
|
||||
impl<Storage, Align> __BindgenBitfieldUnit<Storage, Align>
|
||||
{
|
||||
impl<Storage, Align> __BindgenBitfieldUnit<Storage, Align> {
|
||||
#[inline]
|
||||
pub const fn new(storage: Storage) -> Self {
|
||||
Self {
|
||||
storage,
|
||||
align: [],
|
||||
}
|
||||
Self { storage, align: [] }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -28,12 +23,11 @@ where
|
|||
let byte_index = index / 8;
|
||||
let byte = self.storage.as_ref()[byte_index];
|
||||
|
||||
let bit_index =
|
||||
if cfg!(target_endian = "big") {
|
||||
7 - (index % 8)
|
||||
} else {
|
||||
index % 8
|
||||
};
|
||||
let bit_index = if cfg!(target_endian = "big") {
|
||||
7 - (index % 8)
|
||||
} else {
|
||||
index % 8
|
||||
};
|
||||
|
||||
let mask = 1 << bit_index;
|
||||
|
||||
|
@ -47,12 +41,11 @@ where
|
|||
let byte_index = index / 8;
|
||||
let byte = &mut self.storage.as_mut()[byte_index];
|
||||
|
||||
let bit_index =
|
||||
if cfg!(target_endian = "big") {
|
||||
7 - (index % 8)
|
||||
} else {
|
||||
index % 8
|
||||
};
|
||||
let bit_index = if cfg!(target_endian = "big") {
|
||||
7 - (index % 8)
|
||||
} else {
|
||||
index % 8
|
||||
};
|
||||
|
||||
let mask = 1 << bit_index;
|
||||
if val {
|
||||
|
@ -66,18 +59,20 @@ where
|
|||
pub fn get(&self, bit_offset: usize, bit_width: u8) -> u64 {
|
||||
debug_assert!(bit_width <= 64);
|
||||
debug_assert!(bit_offset / 8 < self.storage.as_ref().len());
|
||||
debug_assert!((bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len());
|
||||
debug_assert!(
|
||||
(bit_offset + (bit_width as usize)) / 8 <=
|
||||
self.storage.as_ref().len()
|
||||
);
|
||||
|
||||
let mut val = 0;
|
||||
|
||||
for i in 0..(bit_width as usize) {
|
||||
if self.get_bit(i + bit_offset) {
|
||||
let index =
|
||||
if cfg!(target_endian = "big") {
|
||||
bit_width as usize - 1 - i
|
||||
} else {
|
||||
i
|
||||
};
|
||||
let index = if cfg!(target_endian = "big") {
|
||||
bit_width as usize - 1 - i
|
||||
} else {
|
||||
i
|
||||
};
|
||||
val |= 1 << index;
|
||||
}
|
||||
}
|
||||
|
@ -89,17 +84,19 @@ where
|
|||
pub fn set(&mut self, bit_offset: usize, bit_width: u8, val: u64) {
|
||||
debug_assert!(bit_width <= 64);
|
||||
debug_assert!(bit_offset / 8 < self.storage.as_ref().len());
|
||||
debug_assert!((bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len());
|
||||
debug_assert!(
|
||||
(bit_offset + (bit_width as usize)) / 8 <=
|
||||
self.storage.as_ref().len()
|
||||
);
|
||||
|
||||
for i in 0..(bit_width as usize) {
|
||||
let mask = 1 << i;
|
||||
let val_bit_is_set = val & mask == mask;
|
||||
let index =
|
||||
if cfg!(target_endian = "big") {
|
||||
bit_width as usize - 1 - i
|
||||
} else {
|
||||
i
|
||||
};
|
||||
let index = if cfg!(target_endian = "big") {
|
||||
bit_width as usize - 1 - i
|
||||
} else {
|
||||
i
|
||||
};
|
||||
self.set_bit(index + bit_offset, val_bit_is_set);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,7 +26,8 @@ use std::mem;
|
|||
|
||||
#[test]
|
||||
fn bitfield_unit_get_bit() {
|
||||
let unit = __BindgenBitfieldUnit::<[u8; 2], u64>::new([0b10011101, 0b00011101]);
|
||||
let unit =
|
||||
__BindgenBitfieldUnit::<[u8; 2], u64>::new([0b10011101, 0b00011101]);
|
||||
|
||||
let mut bits = vec![];
|
||||
for i in 0..16 {
|
||||
|
@ -35,32 +36,21 @@ fn bitfield_unit_get_bit() {
|
|||
|
||||
println!();
|
||||
println!("bits = {:?}", bits);
|
||||
assert_eq!(bits, &[
|
||||
// 0b10011101
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
true ,
|
||||
|
||||
// 0b00011101
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]);
|
||||
assert_eq!(
|
||||
bits,
|
||||
&[
|
||||
// 0b10011101
|
||||
true, false, true, true, true, false, false, true,
|
||||
// 0b00011101
|
||||
true, false, true, true, true, false, false, false
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bitfield_unit_set_bit() {
|
||||
let mut unit = __BindgenBitfieldUnit::<[u8; 2], u64>::new([0b00000000, 0b00000000]);
|
||||
let mut unit =
|
||||
__BindgenBitfieldUnit::<[u8; 2], u64>::new([0b00000000, 0b00000000]);
|
||||
|
||||
for i in 0..16 {
|
||||
if i % 3 == 0 {
|
||||
|
@ -72,7 +62,8 @@ fn bitfield_unit_set_bit() {
|
|||
assert_eq!(unit.get_bit(i), i % 3 == 0);
|
||||
}
|
||||
|
||||
let mut unit = __BindgenBitfieldUnit::<[u8; 2], u64>::new([0b11111111, 0b11111111]);
|
||||
let mut unit =
|
||||
__BindgenBitfieldUnit::<[u8; 2], u64>::new([0b11111111, 0b11111111]);
|
||||
|
||||
for i in 0..16 {
|
||||
if i % 3 == 0 {
|
||||
|
@ -87,15 +78,39 @@ fn bitfield_unit_set_bit() {
|
|||
|
||||
#[test]
|
||||
fn bitfield_unit_align() {
|
||||
assert_eq!(mem::align_of::<__BindgenBitfieldUnit<[u8; 1], u8>>(), mem::align_of::<u8>());
|
||||
assert_eq!(mem::align_of::<__BindgenBitfieldUnit<[u8; 1], u16>>(), mem::align_of::<u16>());
|
||||
assert_eq!(mem::align_of::<__BindgenBitfieldUnit<[u8; 1], u32>>(), mem::align_of::<u32>());
|
||||
assert_eq!(mem::align_of::<__BindgenBitfieldUnit<[u8; 1], u64>>(), mem::align_of::<u64>());
|
||||
assert_eq!(
|
||||
mem::align_of::<__BindgenBitfieldUnit<[u8; 1], u8>>(),
|
||||
mem::align_of::<u8>()
|
||||
);
|
||||
assert_eq!(
|
||||
mem::align_of::<__BindgenBitfieldUnit<[u8; 1], u16>>(),
|
||||
mem::align_of::<u16>()
|
||||
);
|
||||
assert_eq!(
|
||||
mem::align_of::<__BindgenBitfieldUnit<[u8; 1], u32>>(),
|
||||
mem::align_of::<u32>()
|
||||
);
|
||||
assert_eq!(
|
||||
mem::align_of::<__BindgenBitfieldUnit<[u8; 1], u64>>(),
|
||||
mem::align_of::<u64>()
|
||||
);
|
||||
|
||||
assert_eq!(mem::align_of::<__BindgenBitfieldUnit<[u8; 8], u8>>(), mem::align_of::<u8>());
|
||||
assert_eq!(mem::align_of::<__BindgenBitfieldUnit<[u8; 8], u16>>(), mem::align_of::<u16>());
|
||||
assert_eq!(mem::align_of::<__BindgenBitfieldUnit<[u8; 8], u32>>(), mem::align_of::<u32>());
|
||||
assert_eq!(mem::align_of::<__BindgenBitfieldUnit<[u8; 8], u64>>(), mem::align_of::<u64>());
|
||||
assert_eq!(
|
||||
mem::align_of::<__BindgenBitfieldUnit<[u8; 8], u8>>(),
|
||||
mem::align_of::<u8>()
|
||||
);
|
||||
assert_eq!(
|
||||
mem::align_of::<__BindgenBitfieldUnit<[u8; 8], u16>>(),
|
||||
mem::align_of::<u16>()
|
||||
);
|
||||
assert_eq!(
|
||||
mem::align_of::<__BindgenBitfieldUnit<[u8; 8], u32>>(),
|
||||
mem::align_of::<u32>()
|
||||
);
|
||||
assert_eq!(
|
||||
mem::align_of::<__BindgenBitfieldUnit<[u8; 8], u64>>(),
|
||||
mem::align_of::<u64>()
|
||||
);
|
||||
}
|
||||
|
||||
macro_rules! bitfield_unit_get {
|
||||
|
|
|
@ -20,7 +20,7 @@ impl fmt::Display for Error {
|
|||
}
|
||||
|
||||
impl error::Error for Error {
|
||||
fn cause(&self) -> Option<&error::Error> {
|
||||
fn cause(&self) -> Option<&dyn error::Error> {
|
||||
None
|
||||
}
|
||||
|
||||
|
|
|
@ -17,14 +17,20 @@ pub mod attributes {
|
|||
}
|
||||
|
||||
pub fn repr_list(which_ones: &[&str]) -> TokenStream {
|
||||
let which_ones = which_ones.iter().cloned().map(|one| TokenStream::from_str(one).expect("repr to be valid"));
|
||||
let which_ones = which_ones
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|one| TokenStream::from_str(one).expect("repr to be valid"));
|
||||
quote! {
|
||||
#[repr( #( #which_ones ),* )]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn derives(which_ones: &[&str]) -> TokenStream {
|
||||
let which_ones = which_ones.iter().cloned().map(|one| Ident::new(one, Span::call_site()));
|
||||
let which_ones = which_ones
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|one| Ident::new(one, Span::call_site()));
|
||||
quote! {
|
||||
#[derive( #( #which_ones ),* )]
|
||||
}
|
||||
|
@ -97,7 +103,10 @@ pub fn blob(ctx: &BindgenContext, layout: Layout) -> TokenStream {
|
|||
}
|
||||
|
||||
/// Integer type of the same size as the given `Layout`.
|
||||
pub fn integer_type(ctx: &BindgenContext, layout: Layout) -> Option<TokenStream> {
|
||||
pub fn integer_type(
|
||||
ctx: &BindgenContext,
|
||||
layout: Layout,
|
||||
) -> Option<TokenStream> {
|
||||
let name = Layout::known_type_for_size(ctx, layout.size)?;
|
||||
let name = Ident::new(name, Span::call_site());
|
||||
Some(quote! { #name })
|
||||
|
@ -131,8 +140,8 @@ pub mod ast_ty {
|
|||
use ir::function::FunctionSig;
|
||||
use ir::layout::Layout;
|
||||
use ir::ty::FloatKind;
|
||||
use std::str::FromStr;
|
||||
use proc_macro2::{self, TokenStream};
|
||||
use std::str::FromStr;
|
||||
|
||||
pub fn raw_type(ctx: &BindgenContext, name: &str) -> TokenStream {
|
||||
let ident = ctx.rust_ident_raw(name);
|
||||
|
@ -171,7 +180,8 @@ pub mod ast_ty {
|
|||
8 => quote! { f64 },
|
||||
// TODO(emilio): If rust ever gains f128 we should
|
||||
// use it here and below.
|
||||
_ => super::integer_type(ctx, layout).unwrap_or(quote! { f64 }),
|
||||
_ => super::integer_type(ctx, layout)
|
||||
.unwrap_or(quote! { f64 }),
|
||||
}
|
||||
}
|
||||
None => {
|
||||
|
@ -219,10 +229,7 @@ pub mod ast_ty {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn float_expr(
|
||||
ctx: &BindgenContext,
|
||||
f: f64,
|
||||
) -> Result<TokenStream, ()> {
|
||||
pub fn float_expr(ctx: &BindgenContext, f: f64) -> Result<TokenStream, ()> {
|
||||
if f.is_finite() {
|
||||
let val = proc_macro2::Literal::f64_unsuffixed(f);
|
||||
|
||||
|
@ -261,17 +268,16 @@ pub mod ast_ty {
|
|||
signature
|
||||
.argument_types()
|
||||
.iter()
|
||||
.map(|&(ref name, _ty)| {
|
||||
match *name {
|
||||
Some(ref name) => {
|
||||
let name = ctx.rust_ident(name);
|
||||
quote! { #name }
|
||||
}
|
||||
None => {
|
||||
unnamed_arguments += 1;
|
||||
let name = ctx.rust_ident(format!("arg{}", unnamed_arguments));
|
||||
quote! { #name }
|
||||
}
|
||||
.map(|&(ref name, _ty)| match *name {
|
||||
Some(ref name) => {
|
||||
let name = ctx.rust_ident(name);
|
||||
quote! { #name }
|
||||
}
|
||||
None => {
|
||||
unnamed_arguments += 1;
|
||||
let name =
|
||||
ctx.rust_ident(format!("arg{}", unnamed_arguments));
|
||||
quote! { #name }
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use ir::comp::{BitfieldUnit, CompKind, Field, FieldData, FieldMethods};
|
||||
use ir::context::BindgenContext;
|
||||
use ir::item::{HasTypeParamInArray, IsOpaque, Item, ItemCanonicalName};
|
||||
use ir::ty::{RUST_DERIVE_IN_ARRAY_LIMIT, TypeKind};
|
||||
use ir::ty::{TypeKind, RUST_DERIVE_IN_ARRAY_LIMIT};
|
||||
use proc_macro2;
|
||||
|
||||
pub fn gen_debug_impl(
|
||||
|
@ -177,9 +177,10 @@ impl<'a> ImplDebug<'a> for Item {
|
|||
TypeKind::Array(_, len) => {
|
||||
// Generics are not required to implement Debug
|
||||
if self.has_type_param_in_array(ctx) {
|
||||
Some(
|
||||
(format!("{}: Array with length {}", name, len), vec![]),
|
||||
)
|
||||
Some((
|
||||
format!("{}: Array with length {}", name, len),
|
||||
vec![],
|
||||
))
|
||||
} else if len < RUST_DERIVE_IN_ARRAY_LIMIT {
|
||||
// The simple case
|
||||
debug_print(name, quote! { #name_ident })
|
||||
|
@ -187,9 +188,7 @@ impl<'a> ImplDebug<'a> for Item {
|
|||
if ctx.options().use_core {
|
||||
// There is no String in core; reducing field visibility to avoid breaking
|
||||
// no_std setups.
|
||||
Some((
|
||||
format!("{}: [...]", name), vec![]
|
||||
))
|
||||
Some((format!("{}: [...]", name), vec![]))
|
||||
} else {
|
||||
// Let's implement our own print function
|
||||
Some((
|
||||
|
@ -209,16 +208,14 @@ impl<'a> ImplDebug<'a> for Item {
|
|||
if ctx.options().use_core {
|
||||
// There is no format! in core; reducing field visibility to avoid breaking
|
||||
// no_std setups.
|
||||
Some((
|
||||
format!("{}(...)", name), vec![]
|
||||
))
|
||||
Some((format!("{}(...)", name), vec![]))
|
||||
} else {
|
||||
let self_ids = 0..len;
|
||||
Some((
|
||||
format!("{}({{}})", name),
|
||||
vec![quote! {
|
||||
#(format!("{:?}", self.#self_ids)),*
|
||||
}]
|
||||
}],
|
||||
))
|
||||
}
|
||||
}
|
||||
|
@ -235,8 +232,9 @@ impl<'a> ImplDebug<'a> for Item {
|
|||
let inner_type = ctx.resolve_type(inner).canonical_type(ctx);
|
||||
match *inner_type.kind() {
|
||||
TypeKind::Function(ref sig)
|
||||
if !sig.function_pointers_can_derive() => {
|
||||
Some((format!("{}: FunctionPointer", name), vec![]))
|
||||
if !sig.function_pointers_can_derive() =>
|
||||
{
|
||||
Some((format!("{}: FunctionPointer", name), vec![]))
|
||||
}
|
||||
_ => debug_print(name, quote! { #name_ident }),
|
||||
}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
use ir::comp::{CompInfo, CompKind, Field, FieldMethods};
|
||||
use ir::context::BindgenContext;
|
||||
use ir::item::{IsOpaque, Item};
|
||||
|
@ -50,15 +49,17 @@ pub fn gen_partialeq_impl(
|
|||
let name = fd.name().unwrap();
|
||||
tokens.push(gen_field(ctx, ty_item, name));
|
||||
}
|
||||
Field::Bitfields(ref bu) => for bitfield in bu.bitfields() {
|
||||
if let Some(_) = bitfield.name() {
|
||||
let getter_name = bitfield.getter_name();
|
||||
let name_ident = ctx.rust_ident_raw(getter_name);
|
||||
tokens.push(quote! {
|
||||
self.#name_ident () == other.#name_ident ()
|
||||
});
|
||||
Field::Bitfields(ref bu) => {
|
||||
for bitfield in bu.bitfields() {
|
||||
if let Some(_) = bitfield.name() {
|
||||
let getter_name = bitfield.getter_name();
|
||||
let name_ident = ctx.rust_ident_raw(getter_name);
|
||||
tokens.push(quote! {
|
||||
self.#name_ident () == other.#name_ident ()
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -70,8 +71,14 @@ pub fn gen_partialeq_impl(
|
|||
})
|
||||
}
|
||||
|
||||
fn gen_field(ctx: &BindgenContext, ty_item: &Item, name: &str) -> proc_macro2::TokenStream {
|
||||
fn quote_equals(name_ident: proc_macro2::Ident) -> proc_macro2::TokenStream {
|
||||
fn gen_field(
|
||||
ctx: &BindgenContext,
|
||||
ty_item: &Item,
|
||||
name: &str,
|
||||
) -> proc_macro2::TokenStream {
|
||||
fn quote_equals(
|
||||
name_ident: proc_macro2::Ident,
|
||||
) -> proc_macro2::TokenStream {
|
||||
quote! { self.#name_ident == other.#name_ident }
|
||||
}
|
||||
|
||||
|
@ -106,20 +113,22 @@ fn gen_field(ctx: &BindgenContext, ty_item: &Item, name: &str) -> proc_macro2::T
|
|||
}
|
||||
}
|
||||
|
||||
TypeKind::Array(_, len) => if len <= RUST_DERIVE_IN_ARRAY_LIMIT {
|
||||
quote_equals(name_ident)
|
||||
} else {
|
||||
quote! {
|
||||
&self. #name_ident [..] == &other. #name_ident [..]
|
||||
TypeKind::Array(_, len) => {
|
||||
if len <= RUST_DERIVE_IN_ARRAY_LIMIT {
|
||||
quote_equals(name_ident)
|
||||
} else {
|
||||
quote! {
|
||||
&self. #name_ident [..] == &other. #name_ident [..]
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
TypeKind::Vector(_, len) => {
|
||||
let self_ids = 0..len;
|
||||
let other_ids = 0..len;
|
||||
quote! {
|
||||
#(self.#self_ids == other.#other_ids &&)* true
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
TypeKind::ResolvedTypeRef(t) |
|
||||
TypeKind::TemplateAlias(t, _) |
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -166,7 +166,8 @@ impl<'a> StructLayoutTracker<'a> {
|
|||
// This means that the structs in the array are super-unsafe to
|
||||
// access, since they won't be properly aligned, but there's not too
|
||||
// much we can do about it.
|
||||
if let Some(layout) = self.ctx.resolve_type(inner).layout(self.ctx) {
|
||||
if let Some(layout) = self.ctx.resolve_type(inner).layout(self.ctx)
|
||||
{
|
||||
if layout.align > self.ctx.target_pointer_size() {
|
||||
field_layout.size =
|
||||
align_to(layout.size, layout.align) * len;
|
||||
|
@ -212,7 +213,10 @@ impl<'a> StructLayoutTracker<'a> {
|
|||
if need_padding && padding_bytes != 0 {
|
||||
Some(Layout::new(
|
||||
padding_bytes,
|
||||
cmp::min(field_layout.align, self.ctx.target_pointer_size())
|
||||
cmp::min(
|
||||
field_layout.align,
|
||||
self.ctx.target_pointer_size(),
|
||||
),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
|
@ -235,11 +239,13 @@ impl<'a> StructLayoutTracker<'a> {
|
|||
padding_layout.map(|layout| self.padding_field(layout))
|
||||
}
|
||||
|
||||
pub fn pad_struct(&mut self, layout: Layout) -> Option<proc_macro2::TokenStream> {
|
||||
pub fn pad_struct(
|
||||
&mut self,
|
||||
layout: Layout,
|
||||
) -> Option<proc_macro2::TokenStream> {
|
||||
debug!(
|
||||
"pad_struct:\n\tself = {:#?}\n\tlayout = {:#?}",
|
||||
self,
|
||||
layout
|
||||
self, layout
|
||||
);
|
||||
|
||||
if layout.size < self.latest_offset {
|
||||
|
@ -261,15 +267,15 @@ impl<'a> StructLayoutTracker<'a> {
|
|||
// other fields.
|
||||
if padding_bytes > 0 &&
|
||||
(padding_bytes >= layout.align ||
|
||||
(self.last_field_was_bitfield &&
|
||||
padding_bytes >=
|
||||
self.latest_field_layout.unwrap().align) ||
|
||||
layout.align > self.ctx.target_pointer_size())
|
||||
(self.last_field_was_bitfield &&
|
||||
padding_bytes >=
|
||||
self.latest_field_layout.unwrap().align) ||
|
||||
layout.align > self.ctx.target_pointer_size())
|
||||
{
|
||||
let layout = if self.is_packed {
|
||||
Layout::new(padding_bytes, 1)
|
||||
} else if self.last_field_was_bitfield ||
|
||||
layout.align > self.ctx.target_pointer_size()
|
||||
layout.align > self.ctx.target_pointer_size()
|
||||
{
|
||||
// We've already given up on alignment here.
|
||||
Layout::for_size(self.ctx, padding_bytes)
|
||||
|
@ -315,7 +321,10 @@ impl<'a> StructLayoutTracker<'a> {
|
|||
|
||||
self.padding_count += 1;
|
||||
|
||||
let padding_field_name = Ident::new(&format!("__bindgen_padding_{}", padding_count), Span::call_site());
|
||||
let padding_field_name = Ident::new(
|
||||
&format!("__bindgen_padding_{}", padding_count),
|
||||
Span::call_site(),
|
||||
);
|
||||
|
||||
self.max_field_align = cmp::max(self.max_field_align, layout.align);
|
||||
|
||||
|
@ -342,9 +351,7 @@ impl<'a> StructLayoutTracker<'a> {
|
|||
// current field alignment and the bitfield size and alignment are.
|
||||
debug!(
|
||||
"align_to_bitfield? {}: {:?} {:?}",
|
||||
self.last_field_was_bitfield,
|
||||
layout,
|
||||
new_field_layout
|
||||
self.last_field_was_bitfield, layout, new_field_layout
|
||||
);
|
||||
|
||||
// Avoid divide-by-zero errors if align is 0.
|
||||
|
|
|
@ -227,27 +227,27 @@ mod test {
|
|||
fn target_features() {
|
||||
let f_1_0 = RustFeatures::from(RustTarget::Stable_1_0);
|
||||
assert!(
|
||||
!f_1_0.untagged_union
|
||||
&& !f_1_0.associated_const
|
||||
&& !f_1_0.builtin_clone_impls
|
||||
&& !f_1_0.repr_align
|
||||
&& !f_1_0.thiscall_abi
|
||||
!f_1_0.untagged_union &&
|
||||
!f_1_0.associated_const &&
|
||||
!f_1_0.builtin_clone_impls &&
|
||||
!f_1_0.repr_align &&
|
||||
!f_1_0.thiscall_abi
|
||||
);
|
||||
let f_1_21 = RustFeatures::from(RustTarget::Stable_1_21);
|
||||
assert!(
|
||||
f_1_21.untagged_union
|
||||
&& f_1_21.associated_const
|
||||
&& f_1_21.builtin_clone_impls
|
||||
&& !f_1_21.repr_align
|
||||
&& !f_1_21.thiscall_abi
|
||||
f_1_21.untagged_union &&
|
||||
f_1_21.associated_const &&
|
||||
f_1_21.builtin_clone_impls &&
|
||||
!f_1_21.repr_align &&
|
||||
!f_1_21.thiscall_abi
|
||||
);
|
||||
let f_nightly = RustFeatures::from(RustTarget::Nightly);
|
||||
assert!(
|
||||
f_nightly.untagged_union
|
||||
&& f_nightly.associated_const
|
||||
&& f_nightly.builtin_clone_impls
|
||||
&& f_nightly.repr_align
|
||||
&& f_nightly.thiscall_abi
|
||||
f_nightly.untagged_union &&
|
||||
f_nightly.associated_const &&
|
||||
f_nightly.builtin_clone_impls &&
|
||||
f_nightly.repr_align &&
|
||||
f_nightly.thiscall_abi
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -2,18 +2,18 @@
|
|||
|
||||
use std::fmt;
|
||||
|
||||
use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
|
||||
use super::{generate_dependencies, ConstrainResult, MonotoneFramework};
|
||||
use ir::analysis::has_vtable::HasVtable;
|
||||
use ir::comp::CompKind;
|
||||
use ir::context::{BindgenContext, ItemId};
|
||||
use ir::derive::CanDerive;
|
||||
use ir::function::FunctionSig;
|
||||
use ir::item::{Item, IsOpaque};
|
||||
use ir::item::{IsOpaque, Item};
|
||||
use ir::template::TemplateParameters;
|
||||
use ir::traversal::{EdgeKind, Trace};
|
||||
use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
|
||||
use ir::ty::{TypeKind, Type};
|
||||
use {HashSet, HashMap, Entry};
|
||||
use ir::ty::{Type, TypeKind};
|
||||
use {Entry, HashMap, HashSet};
|
||||
|
||||
/// Which trait to consider when doing the `CannotDerive` analysis.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
|
@ -110,19 +110,26 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
can_derive: CanDerive,
|
||||
) -> ConstrainResult {
|
||||
let id = id.into();
|
||||
trace!("inserting {:?} can_derive<{}>={:?}", id, self.derive_trait, can_derive);
|
||||
trace!(
|
||||
"inserting {:?} can_derive<{}>={:?}",
|
||||
id,
|
||||
self.derive_trait,
|
||||
can_derive
|
||||
);
|
||||
|
||||
if let CanDerive::Yes = can_derive {
|
||||
return ConstrainResult::Same;
|
||||
}
|
||||
|
||||
match self.can_derive.entry(id) {
|
||||
Entry::Occupied(mut entry) => if *entry.get() < can_derive {
|
||||
entry.insert(can_derive);
|
||||
ConstrainResult::Changed
|
||||
} else {
|
||||
ConstrainResult::Same
|
||||
},
|
||||
Entry::Occupied(mut entry) => {
|
||||
if *entry.get() < can_derive {
|
||||
entry.insert(can_derive);
|
||||
ConstrainResult::Changed
|
||||
} else {
|
||||
ConstrainResult::Same
|
||||
}
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(can_derive);
|
||||
ConstrainResult::Changed
|
||||
|
@ -132,41 +139,50 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
|
||||
fn constrain_type(&mut self, item: &Item, ty: &Type) -> CanDerive {
|
||||
if !self.ctx.whitelisted_items().contains(&item.id()) {
|
||||
trace!(" cannot derive {} for blacklisted type", self.derive_trait);
|
||||
trace!(
|
||||
" cannot derive {} for blacklisted type",
|
||||
self.derive_trait
|
||||
);
|
||||
return CanDerive::No;
|
||||
}
|
||||
|
||||
if self.derive_trait.not_by_name(self.ctx, &item) {
|
||||
trace!(" cannot derive {} for explicitly excluded type", self.derive_trait);
|
||||
trace!(
|
||||
" cannot derive {} for explicitly excluded type",
|
||||
self.derive_trait
|
||||
);
|
||||
return CanDerive::No;
|
||||
}
|
||||
|
||||
trace!("ty: {:?}", ty);
|
||||
if item.is_opaque(self.ctx, &()) {
|
||||
if !self.derive_trait.can_derive_union()
|
||||
&& ty.is_union()
|
||||
&& self.ctx.options().rust_features().untagged_union
|
||||
if !self.derive_trait.can_derive_union() &&
|
||||
ty.is_union() &&
|
||||
self.ctx.options().rust_features().untagged_union
|
||||
{
|
||||
trace!(
|
||||
" cannot derive {} for Rust unions", self.derive_trait
|
||||
" cannot derive {} for Rust unions",
|
||||
self.derive_trait
|
||||
);
|
||||
return CanDerive::No;
|
||||
}
|
||||
|
||||
let layout_can_derive = ty.layout(self.ctx)
|
||||
.map_or(CanDerive::Yes, |l| {
|
||||
let layout_can_derive =
|
||||
ty.layout(self.ctx).map_or(CanDerive::Yes, |l| {
|
||||
l.opaque().array_size_within_derive_limit(self.ctx)
|
||||
});
|
||||
|
||||
match layout_can_derive {
|
||||
CanDerive::Yes => {
|
||||
trace!(
|
||||
" we can trivially derive {} for the layout", self.derive_trait
|
||||
" we can trivially derive {} for the layout",
|
||||
self.derive_trait
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
trace!(
|
||||
" we cannot derive {} for the layout", self.derive_trait
|
||||
" we cannot derive {} for the layout",
|
||||
self.derive_trait
|
||||
);
|
||||
}
|
||||
};
|
||||
|
@ -194,7 +210,7 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
let inner_type =
|
||||
self.ctx.resolve_type(inner).canonical_type(self.ctx);
|
||||
if let TypeKind::Function(ref sig) = *inner_type.kind() {
|
||||
return self.derive_trait.can_derive_fnptr(sig)
|
||||
return self.derive_trait.can_derive_fnptr(sig);
|
||||
} else {
|
||||
return self.derive_trait.can_derive_pointer();
|
||||
}
|
||||
|
@ -205,31 +221,34 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
|
||||
// Complex cases need more information
|
||||
TypeKind::Array(t, len) => {
|
||||
let inner_type = self.can_derive
|
||||
.get(&t.into())
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
let inner_type =
|
||||
self.can_derive.get(&t.into()).cloned().unwrap_or_default();
|
||||
if inner_type != CanDerive::Yes {
|
||||
trace!(
|
||||
" arrays of T for which we cannot derive {} \
|
||||
also cannot derive {}", self.derive_trait, self.derive_trait
|
||||
also cannot derive {}",
|
||||
self.derive_trait,
|
||||
self.derive_trait
|
||||
);
|
||||
return CanDerive::No;
|
||||
}
|
||||
|
||||
if len == 0 && !self.derive_trait.can_derive_incomplete_array() {
|
||||
if len == 0 && !self.derive_trait.can_derive_incomplete_array()
|
||||
{
|
||||
trace!(
|
||||
" cannot derive {} for incomplete arrays", self.derive_trait
|
||||
" cannot derive {} for incomplete arrays",
|
||||
self.derive_trait
|
||||
);
|
||||
return CanDerive::No;
|
||||
} else {
|
||||
if self.derive_trait.can_derive_large_array() {
|
||||
trace!(" array can derive {}", self.derive_trait);
|
||||
return CanDerive::Yes;
|
||||
trace!(" array can derive {}", self.derive_trait);
|
||||
return CanDerive::Yes;
|
||||
} else {
|
||||
if len <= RUST_DERIVE_IN_ARRAY_LIMIT {
|
||||
trace!(
|
||||
" array is small enough to derive {}", self.derive_trait
|
||||
" array is small enough to derive {}",
|
||||
self.derive_trait
|
||||
);
|
||||
return CanDerive::Yes;
|
||||
} else {
|
||||
|
@ -242,19 +261,19 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
}
|
||||
}
|
||||
TypeKind::Vector(t, len) => {
|
||||
let inner_type = self.can_derive
|
||||
.get(&t.into())
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
let inner_type =
|
||||
self.can_derive.get(&t.into()).cloned().unwrap_or_default();
|
||||
if inner_type != CanDerive::Yes {
|
||||
trace!(
|
||||
" vectors of T for which we cannot derive {} \
|
||||
also cannot derive {}", self.derive_trait, self.derive_trait
|
||||
also cannot derive {}",
|
||||
self.derive_trait,
|
||||
self.derive_trait
|
||||
);
|
||||
return CanDerive::No;
|
||||
}
|
||||
assert_ne!(len, 0, "vectors cannot have zero length");
|
||||
return self.derive_trait.can_derive_vector()
|
||||
return self.derive_trait.can_derive_vector();
|
||||
}
|
||||
|
||||
TypeKind::Comp(ref info) => {
|
||||
|
@ -263,19 +282,28 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
"The early ty.is_opaque check should have handled this case"
|
||||
);
|
||||
|
||||
if !self.derive_trait.can_derive_compound_forward_decl()
|
||||
&& info.is_forward_declaration() {
|
||||
trace!(" cannot derive {} for forward decls", self.derive_trait);
|
||||
if !self.derive_trait.can_derive_compound_forward_decl() &&
|
||||
info.is_forward_declaration()
|
||||
{
|
||||
trace!(
|
||||
" cannot derive {} for forward decls",
|
||||
self.derive_trait
|
||||
);
|
||||
return CanDerive::No;
|
||||
}
|
||||
|
||||
// NOTE: Take into account that while unions in C and C++ are copied by
|
||||
// default, the may have an explicit destructor in C++, so we can't
|
||||
// defer this check just for the union case.
|
||||
if !self.derive_trait.can_derive_compound_with_destructor()
|
||||
&& self.ctx.lookup_has_destructor(item.id().expect_type_id(self.ctx))
|
||||
if !self.derive_trait.can_derive_compound_with_destructor() &&
|
||||
self.ctx.lookup_has_destructor(
|
||||
item.id().expect_type_id(self.ctx),
|
||||
)
|
||||
{
|
||||
trace!(" comp has destructor which cannot derive {}", self.derive_trait);
|
||||
trace!(
|
||||
" comp has destructor which cannot derive {}",
|
||||
self.derive_trait
|
||||
);
|
||||
return CanDerive::No;
|
||||
}
|
||||
|
||||
|
@ -284,34 +312,39 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
if self.ctx.options().rust_features().untagged_union &&
|
||||
// https://github.com/rust-lang/rust/issues/36640
|
||||
(!info.self_template_params(self.ctx).is_empty() ||
|
||||
!item.all_template_params(self.ctx).is_empty()) {
|
||||
!item.all_template_params(self.ctx).is_empty())
|
||||
{
|
||||
trace!(
|
||||
" cannot derive {} for Rust union because issue 36640", self.derive_trait
|
||||
);
|
||||
return CanDerive::No;
|
||||
}
|
||||
// fall through to be same as non-union handling
|
||||
// fall through to be same as non-union handling
|
||||
} else {
|
||||
if self.ctx.options().rust_features().untagged_union {
|
||||
trace!(
|
||||
" cannot derive {} for Rust unions", self.derive_trait
|
||||
" cannot derive {} for Rust unions",
|
||||
self.derive_trait
|
||||
);
|
||||
return CanDerive::No;
|
||||
}
|
||||
|
||||
let layout_can_derive =
|
||||
ty.layout(self.ctx).map_or(CanDerive::Yes, |l| {
|
||||
l.opaque().array_size_within_derive_limit(self.ctx)
|
||||
l.opaque()
|
||||
.array_size_within_derive_limit(self.ctx)
|
||||
});
|
||||
match layout_can_derive {
|
||||
CanDerive::Yes => {
|
||||
trace!(
|
||||
" union layout can trivially derive {}", self.derive_trait
|
||||
" union layout can trivially derive {}",
|
||||
self.derive_trait
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
trace!(
|
||||
" union layout cannot derive {}", self.derive_trait
|
||||
" union layout cannot derive {}",
|
||||
self.derive_trait
|
||||
);
|
||||
}
|
||||
};
|
||||
|
@ -319,9 +352,13 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
}
|
||||
}
|
||||
|
||||
if !self.derive_trait.can_derive_compound_with_vtable()
|
||||
&& item.has_vtable(self.ctx) {
|
||||
trace!(" cannot derive {} for comp with vtable", self.derive_trait);
|
||||
if !self.derive_trait.can_derive_compound_with_vtable() &&
|
||||
item.has_vtable(self.ctx)
|
||||
{
|
||||
trace!(
|
||||
" cannot derive {} for comp with vtable",
|
||||
self.derive_trait
|
||||
);
|
||||
return CanDerive::No;
|
||||
}
|
||||
|
||||
|
@ -348,7 +385,11 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
}
|
||||
}
|
||||
|
||||
fn constrain_join(&mut self, item: &Item, consider_edge: EdgePredicate) -> CanDerive {
|
||||
fn constrain_join(
|
||||
&mut self,
|
||||
item: &Item,
|
||||
consider_edge: EdgePredicate,
|
||||
) -> CanDerive {
|
||||
let mut candidate = None;
|
||||
|
||||
item.trace(
|
||||
|
@ -378,7 +419,10 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
);
|
||||
|
||||
if candidate.is_none() {
|
||||
trace!(" can derive {} because there are no members", self.derive_trait);
|
||||
trace!(
|
||||
" can derive {} because there are no members",
|
||||
self.derive_trait
|
||||
);
|
||||
}
|
||||
candidate.unwrap_or_default()
|
||||
}
|
||||
|
@ -389,8 +433,10 @@ impl DeriveTrait {
|
|||
match self {
|
||||
DeriveTrait::Copy => ctx.no_copy_by_name(item),
|
||||
DeriveTrait::Hash => ctx.no_hash_by_name(item),
|
||||
DeriveTrait::PartialEqOrPartialOrd => ctx.no_partialeq_by_name(item),
|
||||
_ => false
|
||||
DeriveTrait::PartialEqOrPartialOrd => {
|
||||
ctx.no_partialeq_by_name(item)
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -398,17 +444,16 @@ impl DeriveTrait {
|
|||
match self {
|
||||
DeriveTrait::PartialEqOrPartialOrd => consider_edge_default,
|
||||
_ => |kind| match kind {
|
||||
EdgeKind::BaseMember |
|
||||
EdgeKind::Field => true,
|
||||
EdgeKind::BaseMember | EdgeKind::Field => true,
|
||||
_ => false,
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn consider_edge_typeref(&self) -> EdgePredicate {
|
||||
match self {
|
||||
DeriveTrait::PartialEqOrPartialOrd => consider_edge_default,
|
||||
_ => |kind| kind == EdgeKind::TypeReference
|
||||
_ => |kind| kind == EdgeKind::TypeReference,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -416,10 +461,11 @@ impl DeriveTrait {
|
|||
match self {
|
||||
DeriveTrait::PartialEqOrPartialOrd => consider_edge_default,
|
||||
_ => |kind| match kind {
|
||||
EdgeKind::TemplateArgument |
|
||||
EdgeKind::TemplateDeclaration => true,
|
||||
EdgeKind::TemplateArgument | EdgeKind::TemplateDeclaration => {
|
||||
true
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -460,16 +506,16 @@ impl DeriveTrait {
|
|||
|
||||
fn can_derive_incomplete_array(&self) -> bool {
|
||||
match self {
|
||||
DeriveTrait::Copy | DeriveTrait::Hash | DeriveTrait::PartialEqOrPartialOrd => false,
|
||||
DeriveTrait::Copy |
|
||||
DeriveTrait::Hash |
|
||||
DeriveTrait::PartialEqOrPartialOrd => false,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
|
||||
fn can_derive_fnptr(&self, f: &FunctionSig) -> CanDerive {
|
||||
match (self, f.function_pointers_can_derive()) {
|
||||
(DeriveTrait::Copy, _) |
|
||||
(DeriveTrait::Default, _) |
|
||||
(_, true) => {
|
||||
(DeriveTrait::Copy, _) | (DeriveTrait::Default, _) | (_, true) => {
|
||||
trace!(" function pointer can derive {}", self);
|
||||
CanDerive::Yes
|
||||
}
|
||||
|
@ -526,21 +572,23 @@ impl DeriveTrait {
|
|||
(DeriveTrait::Default, TypeKind::ObjCSel) => {
|
||||
trace!(" types that always cannot derive Default");
|
||||
CanDerive::No
|
||||
},
|
||||
(DeriveTrait::Default, TypeKind::UnresolvedTypeRef(..)) => unreachable!(
|
||||
"Type with unresolved type ref can't reach derive default"
|
||||
),
|
||||
}
|
||||
(DeriveTrait::Default, TypeKind::UnresolvedTypeRef(..)) => {
|
||||
unreachable!(
|
||||
"Type with unresolved type ref can't reach derive default"
|
||||
)
|
||||
}
|
||||
// === Hash ===
|
||||
(DeriveTrait::Hash, TypeKind::Float(..)) |
|
||||
(DeriveTrait::Hash, TypeKind::Complex(..)) => {
|
||||
trace!(" float cannot derive Hash");
|
||||
CanDerive::No
|
||||
},
|
||||
}
|
||||
// === others ===
|
||||
_ => {
|
||||
trace!(" simple type that can always derive {}", self);
|
||||
CanDerive::Yes
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -602,9 +650,7 @@ impl<'ctx> MonotoneFramework for CannotDerive<'ctx> {
|
|||
trace!("constrain: {:?}", id);
|
||||
|
||||
if let Some(CanDerive::No) = self.can_derive.get(&id).cloned() {
|
||||
trace!(
|
||||
" already know it cannot derive {}", self.derive_trait
|
||||
);
|
||||
trace!(" already know it cannot derive {}", self.derive_trait);
|
||||
return ConstrainResult::Same;
|
||||
}
|
||||
|
||||
|
@ -614,7 +660,9 @@ impl<'ctx> MonotoneFramework for CannotDerive<'ctx> {
|
|||
let mut can_derive = self.constrain_type(item, ty);
|
||||
if let CanDerive::Yes = can_derive {
|
||||
if !self.derive_trait.can_derive_large_array() &&
|
||||
ty.layout(self.ctx).map_or(false, |l| l.align > RUST_DERIVE_IN_ARRAY_LIMIT)
|
||||
ty.layout(self.ctx).map_or(false, |l| {
|
||||
l.align > RUST_DERIVE_IN_ARRAY_LIMIT
|
||||
})
|
||||
{
|
||||
// We have to be conservative: the struct *could* have enough
|
||||
// padding that we emit an array that is longer than
|
||||
|
@ -647,12 +695,10 @@ impl<'ctx> MonotoneFramework for CannotDerive<'ctx> {
|
|||
|
||||
impl<'ctx> From<CannotDerive<'ctx>> for HashMap<ItemId, CanDerive> {
|
||||
fn from(analysis: CannotDerive<'ctx>) -> Self {
|
||||
extra_assert!(
|
||||
analysis
|
||||
.can_derive
|
||||
.values()
|
||||
.all(|v| *v != CanDerive::Yes)
|
||||
);
|
||||
extra_assert!(analysis
|
||||
.can_derive
|
||||
.values()
|
||||
.all(|v| *v != CanDerive::Yes));
|
||||
|
||||
analysis.can_derive
|
||||
}
|
||||
|
@ -662,9 +708,11 @@ impl<'ctx> From<CannotDerive<'ctx>> for HashMap<ItemId, CanDerive> {
|
|||
///
|
||||
/// Elements that are not `CanDerive::Yes` are kept in the set, so that it
|
||||
/// represents all items that cannot derive.
|
||||
pub fn as_cannot_derive_set(can_derive: HashMap<ItemId, CanDerive>) -> HashSet<ItemId> {
|
||||
pub fn as_cannot_derive_set(
|
||||
can_derive: HashMap<ItemId, CanDerive>,
|
||||
) -> HashSet<ItemId> {
|
||||
can_derive
|
||||
.into_iter()
|
||||
.filter_map(|(k, v)| if v != CanDerive::Yes { Some(k) } else { None } )
|
||||
.filter_map(|(k, v)| if v != CanDerive::Yes { Some(k) } else { None })
|
||||
.collect()
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
//! Determining which types have destructors
|
||||
|
||||
use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
|
||||
use super::{generate_dependencies, ConstrainResult, MonotoneFramework};
|
||||
use ir::comp::{CompKind, Field, FieldMethods};
|
||||
use ir::context::{BindgenContext, ItemId};
|
||||
use ir::traversal::EdgeKind;
|
||||
use ir::comp::{CompKind, Field, FieldMethods};
|
||||
use ir::ty::TypeKind;
|
||||
use {HashMap, HashSet};
|
||||
|
||||
|
@ -121,14 +121,14 @@ impl<'ctx> MonotoneFramework for HasDestructorAnalysis<'ctx> {
|
|||
let base_or_field_destructor =
|
||||
info.base_members().iter().any(|base| {
|
||||
self.have_destructor.contains(&base.ty.into())
|
||||
}) ||
|
||||
info.fields().iter().any(|field| {
|
||||
match *field {
|
||||
Field::DataMember(ref data) =>
|
||||
self.have_destructor.contains(&data.ty().into()),
|
||||
Field::Bitfields(_) => false
|
||||
}
|
||||
});
|
||||
}) || info.fields().iter().any(
|
||||
|field| match *field {
|
||||
Field::DataMember(ref data) => self
|
||||
.have_destructor
|
||||
.contains(&data.ty().into()),
|
||||
Field::Bitfields(_) => false,
|
||||
},
|
||||
);
|
||||
if base_or_field_destructor {
|
||||
self.insert(id)
|
||||
} else {
|
||||
|
@ -139,9 +139,9 @@ impl<'ctx> MonotoneFramework for HasDestructorAnalysis<'ctx> {
|
|||
}
|
||||
|
||||
TypeKind::TemplateInstantiation(ref inst) => {
|
||||
let definition_or_arg_destructor =
|
||||
self.have_destructor.contains(&inst.template_definition().into())
|
||||
||
|
||||
let definition_or_arg_destructor = self
|
||||
.have_destructor
|
||||
.contains(&inst.template_definition().into()) ||
|
||||
inst.template_arguments().iter().any(|arg| {
|
||||
self.have_destructor.contains(&arg.into())
|
||||
});
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
//! Determining which types has float.
|
||||
|
||||
use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
|
||||
use {HashSet, HashMap};
|
||||
use super::{generate_dependencies, ConstrainResult, MonotoneFramework};
|
||||
use ir::comp::Field;
|
||||
use ir::comp::FieldMethods;
|
||||
use ir::context::{BindgenContext, ItemId};
|
||||
use ir::traversal::EdgeKind;
|
||||
use ir::ty::TypeKind;
|
||||
use ir::comp::Field;
|
||||
use ir::comp::FieldMethods;
|
||||
use {HashMap, HashSet};
|
||||
|
||||
/// An analysis that finds for each IR item whether it has float or not.
|
||||
///
|
||||
|
@ -132,24 +132,27 @@ impl<'ctx> MonotoneFramework for HasFloat<'ctx> {
|
|||
ConstrainResult::Same
|
||||
}
|
||||
|
||||
TypeKind::Float(..) |
|
||||
TypeKind::Complex(..) => {
|
||||
TypeKind::Float(..) | TypeKind::Complex(..) => {
|
||||
trace!(" float type has float");
|
||||
self.insert(id)
|
||||
}
|
||||
|
||||
TypeKind::Array(t, _) => {
|
||||
if self.has_float.contains(&t.into()) {
|
||||
trace!(" Array with type T that has float also has float");
|
||||
return self.insert(id)
|
||||
trace!(
|
||||
" Array with type T that has float also has float"
|
||||
);
|
||||
return self.insert(id);
|
||||
}
|
||||
trace!(" Array with type T that do not have float also do not have float");
|
||||
ConstrainResult::Same
|
||||
}
|
||||
TypeKind::Vector(t, _) => {
|
||||
if self.has_float.contains(&t.into()) {
|
||||
trace!(" Vector with type T that has float also has float");
|
||||
return self.insert(id)
|
||||
trace!(
|
||||
" Vector with type T that has float also has float"
|
||||
);
|
||||
return self.insert(id);
|
||||
}
|
||||
trace!(" Vector with type T that do not have float also do not have float");
|
||||
ConstrainResult::Same
|
||||
|
@ -160,8 +163,10 @@ impl<'ctx> MonotoneFramework for HasFloat<'ctx> {
|
|||
TypeKind::Alias(t) |
|
||||
TypeKind::BlockPointer(t) => {
|
||||
if self.has_float.contains(&t.into()) {
|
||||
trace!(" aliases and type refs to T which have float \
|
||||
also have float");
|
||||
trace!(
|
||||
" aliases and type refs to T which have float \
|
||||
also have float"
|
||||
);
|
||||
self.insert(id)
|
||||
} else {
|
||||
trace!(" aliases and type refs to T which do not have float \
|
||||
|
@ -171,28 +176,23 @@ impl<'ctx> MonotoneFramework for HasFloat<'ctx> {
|
|||
}
|
||||
|
||||
TypeKind::Comp(ref info) => {
|
||||
let bases_have = info.base_members()
|
||||
let bases_have = info
|
||||
.base_members()
|
||||
.iter()
|
||||
.any(|base| self.has_float.contains(&base.ty.into()));
|
||||
if bases_have {
|
||||
trace!(" bases have float, so we also have");
|
||||
return self.insert(id);
|
||||
}
|
||||
let fields_have = info.fields()
|
||||
.iter()
|
||||
.any(|f| {
|
||||
match *f {
|
||||
Field::DataMember(ref data) => {
|
||||
self.has_float.contains(&data.ty().into())
|
||||
}
|
||||
Field::Bitfields(ref bfu) => {
|
||||
bfu.bitfields()
|
||||
.iter().any(|b| {
|
||||
self.has_float.contains(&b.ty().into())
|
||||
})
|
||||
},
|
||||
}
|
||||
});
|
||||
let fields_have = info.fields().iter().any(|f| match *f {
|
||||
Field::DataMember(ref data) => {
|
||||
self.has_float.contains(&data.ty().into())
|
||||
}
|
||||
Field::Bitfields(ref bfu) => bfu
|
||||
.bitfields()
|
||||
.iter()
|
||||
.any(|b| self.has_float.contains(&b.ty().into())),
|
||||
});
|
||||
if fields_have {
|
||||
trace!(" fields have float, so we also have");
|
||||
return self.insert(id);
|
||||
|
@ -203,20 +203,26 @@ impl<'ctx> MonotoneFramework for HasFloat<'ctx> {
|
|||
}
|
||||
|
||||
TypeKind::TemplateInstantiation(ref template) => {
|
||||
let args_have = template.template_arguments()
|
||||
let args_have = template
|
||||
.template_arguments()
|
||||
.iter()
|
||||
.any(|arg| self.has_float.contains(&arg.into()));
|
||||
if args_have {
|
||||
trace!(" template args have float, so \
|
||||
insantiation also has float");
|
||||
trace!(
|
||||
" template args have float, so \
|
||||
insantiation also has float"
|
||||
);
|
||||
return self.insert(id);
|
||||
}
|
||||
|
||||
let def_has = self.has_float
|
||||
let def_has = self
|
||||
.has_float
|
||||
.contains(&template.template_definition().into());
|
||||
if def_has {
|
||||
trace!(" template definition has float, so \
|
||||
insantiation also has");
|
||||
trace!(
|
||||
" template definition has float, so \
|
||||
insantiation also has"
|
||||
);
|
||||
return self.insert(id);
|
||||
}
|
||||
|
||||
|
@ -227,7 +233,8 @@ impl<'ctx> MonotoneFramework for HasFloat<'ctx> {
|
|||
}
|
||||
|
||||
fn each_depending_on<F>(&self, id: ItemId, mut f: F)
|
||||
where F: FnMut(ItemId),
|
||||
where
|
||||
F: FnMut(ItemId),
|
||||
{
|
||||
if let Some(edges) = self.dependencies.get(&id) {
|
||||
for item in edges {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! Determining which types has typed parameters in array.
|
||||
|
||||
use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
|
||||
use super::{generate_dependencies, ConstrainResult, MonotoneFramework};
|
||||
use ir::comp::Field;
|
||||
use ir::comp::FieldMethods;
|
||||
use ir::context::{BindgenContext, ItemId};
|
||||
|
@ -88,9 +88,7 @@ impl<'ctx> MonotoneFramework for HasTypeParameterInArray<'ctx> {
|
|||
type Extra = &'ctx BindgenContext;
|
||||
type Output = HashSet<ItemId>;
|
||||
|
||||
fn new(
|
||||
ctx: &'ctx BindgenContext,
|
||||
) -> HasTypeParameterInArray<'ctx> {
|
||||
fn new(ctx: &'ctx BindgenContext) -> HasTypeParameterInArray<'ctx> {
|
||||
let has_type_parameter_in_array = HashSet::default();
|
||||
let dependencies = generate_dependencies(ctx, Self::consider_edge);
|
||||
|
||||
|
@ -169,7 +167,7 @@ impl<'ctx> MonotoneFramework for HasTypeParameterInArray<'ctx> {
|
|||
if self.has_type_parameter_in_array.contains(&t.into()) {
|
||||
trace!(
|
||||
" aliases and type refs to T which have array \
|
||||
also have array"
|
||||
also have array"
|
||||
);
|
||||
self.insert(id)
|
||||
} else {
|
||||
|
@ -190,9 +188,9 @@ impl<'ctx> MonotoneFramework for HasTypeParameterInArray<'ctx> {
|
|||
return self.insert(id);
|
||||
}
|
||||
let fields_have = info.fields().iter().any(|f| match *f {
|
||||
Field::DataMember(ref data) => {
|
||||
self.has_type_parameter_in_array.contains(&data.ty().into())
|
||||
}
|
||||
Field::DataMember(ref data) => self
|
||||
.has_type_parameter_in_array
|
||||
.contains(&data.ty().into()),
|
||||
Field::Bitfields(..) => false,
|
||||
});
|
||||
if fields_have {
|
||||
|
@ -212,18 +210,18 @@ impl<'ctx> MonotoneFramework for HasTypeParameterInArray<'ctx> {
|
|||
if args_have {
|
||||
trace!(
|
||||
" template args have array, so \
|
||||
insantiation also has array"
|
||||
insantiation also has array"
|
||||
);
|
||||
return self.insert(id);
|
||||
}
|
||||
|
||||
let def_has = self.has_type_parameter_in_array.contains(
|
||||
&template.template_definition().into(),
|
||||
);
|
||||
let def_has = self
|
||||
.has_type_parameter_in_array
|
||||
.contains(&template.template_definition().into());
|
||||
if def_has {
|
||||
trace!(
|
||||
" template definition has array, so \
|
||||
insantiation also has"
|
||||
insantiation also has"
|
||||
);
|
||||
return self.insert(id);
|
||||
}
|
||||
|
|
|
@ -1,25 +1,25 @@
|
|||
//! Determining which types has vtable
|
||||
|
||||
use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
|
||||
use super::{generate_dependencies, ConstrainResult, MonotoneFramework};
|
||||
use ir::context::{BindgenContext, ItemId};
|
||||
use ir::traversal::EdgeKind;
|
||||
use ir::ty::TypeKind;
|
||||
use std::cmp;
|
||||
use std::ops;
|
||||
use {HashMap, Entry};
|
||||
use {Entry, HashMap};
|
||||
|
||||
/// The result of the `HasVtableAnalysis` for an individual item.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Ord)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum HasVtableResult {
|
||||
/// The item has a vtable, but the actual vtable pointer is in a base
|
||||
/// member.
|
||||
BaseHasVtable,
|
||||
/// The item does not have a vtable pointer.
|
||||
No,
|
||||
|
||||
/// The item has a vtable and the actual vtable pointer is within this item.
|
||||
SelfHasVtable,
|
||||
|
||||
/// The item does not have a vtable pointer.
|
||||
No
|
||||
/// The item has a vtable, but the actual vtable pointer is in a base
|
||||
/// member.
|
||||
BaseHasVtable,
|
||||
}
|
||||
|
||||
impl Default for HasVtableResult {
|
||||
|
@ -28,21 +28,6 @@ impl Default for HasVtableResult {
|
|||
}
|
||||
}
|
||||
|
||||
impl cmp::PartialOrd for HasVtableResult {
|
||||
fn partial_cmp(&self, rhs: &Self) -> Option<cmp::Ordering> {
|
||||
use self::HasVtableResult::*;
|
||||
|
||||
match (*self, *rhs) {
|
||||
(x, y) if x == y => Some(cmp::Ordering::Equal),
|
||||
(BaseHasVtable, _) => Some(cmp::Ordering::Greater),
|
||||
(_, BaseHasVtable) => Some(cmp::Ordering::Less),
|
||||
(SelfHasVtable, _) => Some(cmp::Ordering::Greater),
|
||||
(_, SelfHasVtable) => Some(cmp::Ordering::Less),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasVtableResult {
|
||||
/// Take the least upper bound of `self` and `rhs`.
|
||||
pub fn join(self, rhs: Self) -> Self {
|
||||
|
@ -104,7 +89,11 @@ impl<'ctx> HasVtableAnalysis<'ctx> {
|
|||
}
|
||||
}
|
||||
|
||||
fn insert<Id: Into<ItemId>>(&mut self, id: Id, result: HasVtableResult) -> ConstrainResult {
|
||||
fn insert<Id: Into<ItemId>>(
|
||||
&mut self,
|
||||
id: Id,
|
||||
result: HasVtableResult,
|
||||
) -> ConstrainResult {
|
||||
if let HasVtableResult::No = result {
|
||||
return ConstrainResult::Same;
|
||||
}
|
||||
|
@ -176,7 +165,9 @@ impl<'ctx> MonotoneFramework for HasVtableAnalysis<'ctx> {
|
|||
TypeKind::Alias(t) |
|
||||
TypeKind::ResolvedTypeRef(t) |
|
||||
TypeKind::Reference(t) => {
|
||||
trace!(" aliases and references forward to their inner type");
|
||||
trace!(
|
||||
" aliases and references forward to their inner type"
|
||||
);
|
||||
self.forward(t, id)
|
||||
}
|
||||
|
||||
|
@ -224,9 +215,10 @@ impl<'ctx> MonotoneFramework for HasVtableAnalysis<'ctx> {
|
|||
impl<'ctx> From<HasVtableAnalysis<'ctx>> for HashMap<ItemId, HasVtableResult> {
|
||||
fn from(analysis: HasVtableAnalysis<'ctx>) -> Self {
|
||||
// We let the lack of an entry mean "No" to save space.
|
||||
extra_assert!(analysis.have_vtable.values().all(|v| {
|
||||
*v != HasVtableResult::No
|
||||
}));
|
||||
extra_assert!(analysis
|
||||
.have_vtable
|
||||
.values()
|
||||
.all(|v| { *v != HasVtableResult::No }));
|
||||
|
||||
analysis.have_vtable
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@
|
|||
mod template_params;
|
||||
pub use self::template_params::UsedTemplateParameters;
|
||||
mod derive;
|
||||
pub use self::derive::{CannotDerive, DeriveTrait, as_cannot_derive_set};
|
||||
pub use self::derive::{as_cannot_derive_set, CannotDerive, DeriveTrait};
|
||||
mod has_vtable;
|
||||
pub use self::has_vtable::{HasVtable, HasVtableAnalysis, HasVtableResult};
|
||||
mod has_destructor;
|
||||
|
@ -56,9 +56,9 @@ pub use self::sizedness::{Sizedness, SizednessAnalysis, SizednessResult};
|
|||
use ir::context::{BindgenContext, ItemId};
|
||||
|
||||
use ir::traversal::{EdgeKind, Trace};
|
||||
use HashMap;
|
||||
use std::fmt;
|
||||
use std::ops;
|
||||
use HashMap;
|
||||
|
||||
/// An analysis in the monotone framework.
|
||||
///
|
||||
|
@ -164,10 +164,9 @@ where
|
|||
|
||||
while let Some(node) = worklist.pop() {
|
||||
if let ConstrainResult::Changed = analysis.constrain(node) {
|
||||
analysis.each_depending_on(
|
||||
node,
|
||||
|needs_work| { worklist.push(needs_work); },
|
||||
);
|
||||
analysis.each_depending_on(node, |needs_work| {
|
||||
worklist.push(needs_work);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -196,9 +195,10 @@ where
|
|||
if ctx.whitelisted_items().contains(&sub_item) &&
|
||||
consider_edge(edge_kind)
|
||||
{
|
||||
dependencies.entry(sub_item).or_insert(vec![]).push(
|
||||
item,
|
||||
);
|
||||
dependencies
|
||||
.entry(sub_item)
|
||||
.or_insert(vec![])
|
||||
.push(item);
|
||||
}
|
||||
},
|
||||
&(),
|
||||
|
@ -325,13 +325,17 @@ mod tests {
|
|||
// Yes, what follows is a **terribly** inefficient set union
|
||||
// implementation. Don't copy this code outside of this test!
|
||||
|
||||
let original_size =
|
||||
self.reachable.entry(node).or_insert(HashSet::default()).len();
|
||||
let original_size = self
|
||||
.reachable
|
||||
.entry(node)
|
||||
.or_insert(HashSet::default())
|
||||
.len();
|
||||
|
||||
for sub_node in self.graph.0[&node].iter() {
|
||||
self.reachable.get_mut(&node).unwrap().insert(*sub_node);
|
||||
|
||||
let sub_reachable = self.reachable
|
||||
let sub_reachable = self
|
||||
.reachable
|
||||
.entry(*sub_node)
|
||||
.or_insert(HashSet::default())
|
||||
.clone();
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
//! Determining the sizedness of types (as base classes and otherwise).
|
||||
|
||||
use super::{ConstrainResult, MonotoneFramework, HasVtable, generate_dependencies};
|
||||
use super::{
|
||||
generate_dependencies, ConstrainResult, HasVtable, MonotoneFramework,
|
||||
};
|
||||
use ir::context::{BindgenContext, TypeId};
|
||||
use ir::item::IsOpaque;
|
||||
use ir::traversal::EdgeKind;
|
||||
use ir::ty::TypeKind;
|
||||
use std::{cmp, ops};
|
||||
use {HashMap, Entry};
|
||||
use {Entry, HashMap};
|
||||
|
||||
/// The result of the `Sizedness` analysis for an individual item.
|
||||
///
|
||||
|
@ -22,13 +24,14 @@ use {HashMap, Entry};
|
|||
///
|
||||
/// We initially assume that all types are `ZeroSized` and then update our
|
||||
/// understanding as we learn more about each type.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Ord)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum SizednessResult {
|
||||
/// Has some size that is known to be greater than zero. That doesn't mean
|
||||
/// it has a static size, but it is not zero sized for sure. In other words,
|
||||
/// it might contain an incomplete array or some other dynamically sized
|
||||
/// type.
|
||||
NonZeroSized,
|
||||
/// The type is zero-sized.
|
||||
///
|
||||
/// This means that if it is a C++ type, and is not being used as a base
|
||||
/// member, then we must add an `_address` byte to enforce the
|
||||
/// unique-address-per-distinct-object-instance rule.
|
||||
ZeroSized,
|
||||
|
||||
/// Whether this type is zero-sized or not depends on whether a type
|
||||
/// parameter is zero-sized or not.
|
||||
|
@ -52,12 +55,11 @@ pub enum SizednessResult {
|
|||
/// https://github.com/rust-lang/rust-bindgen/issues/586
|
||||
DependsOnTypeParam,
|
||||
|
||||
/// The type is zero-sized.
|
||||
///
|
||||
/// This means that if it is a C++ type, and is not being used as a base
|
||||
/// member, then we must add an `_address` byte to enforce the
|
||||
/// unique-address-per-distinct-object-instance rule.
|
||||
ZeroSized,
|
||||
/// Has some size that is known to be greater than zero. That doesn't mean
|
||||
/// it has a static size, but it is not zero sized for sure. In other words,
|
||||
/// it might contain an incomplete array or some other dynamically sized
|
||||
/// type.
|
||||
NonZeroSized,
|
||||
}
|
||||
|
||||
impl Default for SizednessResult {
|
||||
|
@ -66,21 +68,6 @@ impl Default for SizednessResult {
|
|||
}
|
||||
}
|
||||
|
||||
impl cmp::PartialOrd for SizednessResult {
|
||||
fn partial_cmp(&self, rhs: &Self) -> Option<cmp::Ordering> {
|
||||
use self::SizednessResult::*;
|
||||
|
||||
match (*self, *rhs) {
|
||||
(x, y) if x == y => Some(cmp::Ordering::Equal),
|
||||
(NonZeroSized, _) => Some(cmp::Ordering::Greater),
|
||||
(_, NonZeroSized) => Some(cmp::Ordering::Less),
|
||||
(DependsOnTypeParam, _) => Some(cmp::Ordering::Greater),
|
||||
(_, DependsOnTypeParam) => Some(cmp::Ordering::Less),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SizednessResult {
|
||||
/// Take the least upper bound of `self` and `rhs`.
|
||||
pub fn join(self, rhs: Self) -> Self {
|
||||
|
@ -140,7 +127,11 @@ impl<'ctx> SizednessAnalysis<'ctx> {
|
|||
|
||||
/// Insert an incremental result, and return whether this updated our
|
||||
/// knowledge of types and we should continue the analysis.
|
||||
fn insert(&mut self, id: TypeId, result: SizednessResult) -> ConstrainResult {
|
||||
fn insert(
|
||||
&mut self,
|
||||
id: TypeId,
|
||||
result: SizednessResult,
|
||||
) -> ConstrainResult {
|
||||
trace!("inserting {:?} for {:?}", result, id);
|
||||
|
||||
if let SizednessResult::ZeroSized = result {
|
||||
|
@ -180,15 +171,15 @@ impl<'ctx> MonotoneFramework for SizednessAnalysis<'ctx> {
|
|||
let dependencies = generate_dependencies(ctx, Self::consider_edge)
|
||||
.into_iter()
|
||||
.filter_map(|(id, sub_ids)| {
|
||||
id.as_type_id(ctx)
|
||||
.map(|id| {
|
||||
(
|
||||
id,
|
||||
sub_ids.into_iter()
|
||||
.filter_map(|s| s.as_type_id(ctx))
|
||||
.collect::<Vec<_>>()
|
||||
)
|
||||
})
|
||||
id.as_type_id(ctx).map(|id| {
|
||||
(
|
||||
id,
|
||||
sub_ids
|
||||
.into_iter()
|
||||
.filter_map(|s| s.as_type_id(ctx))
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
@ -213,7 +204,9 @@ impl<'ctx> MonotoneFramework for SizednessAnalysis<'ctx> {
|
|||
fn constrain(&mut self, id: TypeId) -> ConstrainResult {
|
||||
trace!("constrain {:?}", id);
|
||||
|
||||
if let Some(SizednessResult::NonZeroSized) = self.sized.get(&id).cloned() {
|
||||
if let Some(SizednessResult::NonZeroSized) =
|
||||
self.sized.get(&id).cloned()
|
||||
{
|
||||
trace!(" already know it is not zero-sized");
|
||||
return ConstrainResult::Same;
|
||||
}
|
||||
|
@ -227,8 +220,8 @@ impl<'ctx> MonotoneFramework for SizednessAnalysis<'ctx> {
|
|||
|
||||
if id.is_opaque(self.ctx, &()) {
|
||||
trace!(" type is opaque; checking layout...");
|
||||
let result = ty.layout(self.ctx)
|
||||
.map_or(SizednessResult::ZeroSized, |l| {
|
||||
let result =
|
||||
ty.layout(self.ctx).map_or(SizednessResult::ZeroSized, |l| {
|
||||
if l.size == 0 {
|
||||
trace!(" ...layout has size == 0");
|
||||
SizednessResult::ZeroSized
|
||||
|
@ -247,8 +240,10 @@ impl<'ctx> MonotoneFramework for SizednessAnalysis<'ctx> {
|
|||
}
|
||||
|
||||
TypeKind::TypeParam => {
|
||||
trace!(" type params sizedness depends on what they're \
|
||||
instantiated as");
|
||||
trace!(
|
||||
" type params sizedness depends on what they're \
|
||||
instantiated as"
|
||||
);
|
||||
self.insert(id, SizednessResult::DependsOnTypeParam)
|
||||
}
|
||||
|
||||
|
@ -280,8 +275,10 @@ impl<'ctx> MonotoneFramework for SizednessAnalysis<'ctx> {
|
|||
}
|
||||
|
||||
TypeKind::TemplateInstantiation(ref inst) => {
|
||||
trace!(" template instantiations are zero-sized if their \
|
||||
definition is zero-sized");
|
||||
trace!(
|
||||
" template instantiations are zero-sized if their \
|
||||
definition is zero-sized"
|
||||
);
|
||||
self.forward(inst.template_definition(), id)
|
||||
}
|
||||
|
||||
|
@ -305,7 +302,8 @@ impl<'ctx> MonotoneFramework for SizednessAnalysis<'ctx> {
|
|||
return self.insert(id, SizednessResult::NonZeroSized);
|
||||
}
|
||||
|
||||
let result = info.base_members()
|
||||
let result = info
|
||||
.base_members()
|
||||
.iter()
|
||||
.filter_map(|base| self.sized.get(&base.ty))
|
||||
.fold(SizednessResult::ZeroSized, |a, b| a.join(*b));
|
||||
|
@ -339,9 +337,10 @@ impl<'ctx> MonotoneFramework for SizednessAnalysis<'ctx> {
|
|||
impl<'ctx> From<SizednessAnalysis<'ctx>> for HashMap<TypeId, SizednessResult> {
|
||||
fn from(analysis: SizednessAnalysis<'ctx>) -> Self {
|
||||
// We let the lack of an entry mean "ZeroSized" to save space.
|
||||
extra_assert!(analysis.sized.values().all(|v| {
|
||||
*v != SizednessResult::ZeroSized
|
||||
}));
|
||||
extra_assert!(analysis
|
||||
.sized
|
||||
.values()
|
||||
.all(|v| { *v != SizednessResult::ZeroSized }));
|
||||
|
||||
analysis.sized
|
||||
}
|
||||
|
|
|
@ -203,18 +203,21 @@ impl<'ctx> UsedTemplateParameters<'ctx> {
|
|||
}
|
||||
}
|
||||
|
||||
fn take_this_id_usage_set<Id: Into<ItemId>>(&mut self, this_id: Id) -> ItemSet {
|
||||
fn take_this_id_usage_set<Id: Into<ItemId>>(
|
||||
&mut self,
|
||||
this_id: Id,
|
||||
) -> ItemSet {
|
||||
let this_id = this_id.into();
|
||||
self.used
|
||||
.get_mut(&this_id)
|
||||
.expect(
|
||||
"Should have a set of used template params for every item \
|
||||
id",
|
||||
id",
|
||||
)
|
||||
.take()
|
||||
.expect(
|
||||
"Should maintain the invariant that all used template param \
|
||||
sets are `Some` upon entry of `constrain`",
|
||||
sets are `Some` upon entry of `constrain`",
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -231,7 +234,7 @@ impl<'ctx> UsedTemplateParameters<'ctx> {
|
|||
) {
|
||||
trace!(
|
||||
" instantiation of blacklisted template, uses all template \
|
||||
arguments"
|
||||
arguments"
|
||||
);
|
||||
|
||||
let args = instantiation
|
||||
|
@ -252,8 +255,8 @@ impl<'ctx> UsedTemplateParameters<'ctx> {
|
|||
.as_ref()
|
||||
.expect(
|
||||
"Because a != this_id, and all used template \
|
||||
param sets other than this_id's are `Some`, \
|
||||
a's used template param set should be `Some`",
|
||||
param sets other than this_id's are `Some`, \
|
||||
a's used template param set should be `Some`",
|
||||
)
|
||||
.iter()
|
||||
.cloned()
|
||||
|
@ -289,7 +292,7 @@ impl<'ctx> UsedTemplateParameters<'ctx> {
|
|||
for (arg, param) in args.iter().zip(params.iter()) {
|
||||
trace!(
|
||||
" instantiation's argument {:?} is used if definition's \
|
||||
parameter {:?} is used",
|
||||
parameter {:?} is used",
|
||||
arg,
|
||||
param
|
||||
);
|
||||
|
@ -297,7 +300,8 @@ impl<'ctx> UsedTemplateParameters<'ctx> {
|
|||
if used_by_def.contains(¶m.into()) {
|
||||
trace!(" param is used by template definition");
|
||||
|
||||
let arg = arg.into_resolver()
|
||||
let arg = arg
|
||||
.into_resolver()
|
||||
.through_type_refs()
|
||||
.through_type_aliases()
|
||||
.resolve(self.ctx)
|
||||
|
@ -307,15 +311,16 @@ impl<'ctx> UsedTemplateParameters<'ctx> {
|
|||
continue;
|
||||
}
|
||||
|
||||
let used_by_arg = self.used
|
||||
let used_by_arg = self
|
||||
.used
|
||||
.get(&arg)
|
||||
.expect("Should have a used entry for the template arg")
|
||||
.as_ref()
|
||||
.expect(
|
||||
"Because arg != this_id, and all used template \
|
||||
param sets other than this_id's are `Some`, \
|
||||
arg's used template param set should be \
|
||||
`Some`",
|
||||
param sets other than this_id's are `Some`, \
|
||||
arg's used template param set should be \
|
||||
`Some`",
|
||||
)
|
||||
.iter()
|
||||
.cloned();
|
||||
|
@ -339,7 +344,8 @@ impl<'ctx> UsedTemplateParameters<'ctx> {
|
|||
return;
|
||||
}
|
||||
|
||||
let used_by_sub_id = self.used
|
||||
let used_by_sub_id = self
|
||||
.used
|
||||
.get(&sub_id)
|
||||
.expect("Should have a used set for the sub_id successor")
|
||||
.as_ref()
|
||||
|
@ -370,9 +376,7 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
|
|||
type Extra = &'ctx BindgenContext;
|
||||
type Output = HashMap<ItemId, ItemSet>;
|
||||
|
||||
fn new(
|
||||
ctx: &'ctx BindgenContext,
|
||||
) -> UsedTemplateParameters<'ctx> {
|
||||
fn new(ctx: &'ctx BindgenContext) -> UsedTemplateParameters<'ctx> {
|
||||
let mut used = HashMap::default();
|
||||
let mut dependencies = HashMap::default();
|
||||
let whitelisted_items: HashSet<_> =
|
||||
|
@ -383,7 +387,13 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
|
|||
.cloned()
|
||||
.flat_map(|i| {
|
||||
let mut reachable = vec![i];
|
||||
i.trace(ctx, &mut |s, _| { reachable.push(s); }, &());
|
||||
i.trace(
|
||||
ctx,
|
||||
&mut |s, _| {
|
||||
reachable.push(s);
|
||||
},
|
||||
&(),
|
||||
);
|
||||
reachable
|
||||
})
|
||||
.collect();
|
||||
|
@ -399,9 +409,10 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
|
|||
ctx,
|
||||
&mut |sub_item: ItemId, _| {
|
||||
used.entry(sub_item).or_insert(Some(ItemSet::new()));
|
||||
dependencies.entry(sub_item).or_insert(vec![]).push(
|
||||
item,
|
||||
);
|
||||
dependencies
|
||||
.entry(sub_item)
|
||||
.or_insert(vec![])
|
||||
.push(item);
|
||||
},
|
||||
&(),
|
||||
);
|
||||
|
@ -418,11 +429,11 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
|
|||
// Although template definitions should always have
|
||||
// template parameters, there is a single exception:
|
||||
// opaque templates. Hence the unwrap_or.
|
||||
let params =
|
||||
decl.self_template_params(ctx);
|
||||
let params = decl.self_template_params(ctx);
|
||||
|
||||
for (arg, param) in args.iter().zip(params.iter()) {
|
||||
let arg = arg.into_resolver()
|
||||
let arg = arg
|
||||
.into_resolver()
|
||||
.through_type_aliases()
|
||||
.through_type_refs()
|
||||
.resolve(ctx)
|
||||
|
@ -487,7 +498,13 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
|
|||
.cloned()
|
||||
.flat_map(|i| {
|
||||
let mut reachable = vec![i];
|
||||
i.trace(self.ctx, &mut |s, _| { reachable.push(s); }, &());
|
||||
i.trace(
|
||||
self.ctx,
|
||||
&mut |s, _| {
|
||||
reachable.push(s);
|
||||
},
|
||||
&(),
|
||||
);
|
||||
reachable
|
||||
})
|
||||
.collect()
|
||||
|
@ -520,9 +537,9 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
|
|||
// Template instantiations only use their template arguments if the
|
||||
// template definition uses the corresponding template parameter.
|
||||
Some(&TypeKind::TemplateInstantiation(ref inst)) => {
|
||||
if self.whitelisted_items.contains(
|
||||
&inst.template_definition().into(),
|
||||
)
|
||||
if self
|
||||
.whitelisted_items
|
||||
.contains(&inst.template_definition().into())
|
||||
{
|
||||
self.constrain_instantiation(
|
||||
id,
|
||||
|
@ -548,7 +565,7 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
|
|||
assert!(
|
||||
new_len >= original_len,
|
||||
"This is the property that ensures this function is monotone -- \
|
||||
if it doesn't hold, the analysis might never terminate!"
|
||||
if it doesn't hold, the analysis might never terminate!"
|
||||
);
|
||||
|
||||
// Put the set back in the hash map and restore our invariant.
|
||||
|
@ -576,8 +593,7 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'ctx> From<UsedTemplateParameters<'ctx>>
|
||||
for HashMap<ItemId, ItemSet> {
|
||||
impl<'ctx> From<UsedTemplateParameters<'ctx>> for HashMap<ItemId, ItemSet> {
|
||||
fn from(used_templ_params: UsedTemplateParameters<'ctx>) -> Self {
|
||||
used_templ_params
|
||||
.used
|
||||
|
|
|
@ -94,7 +94,11 @@ impl Annotations {
|
|||
let mut matched_one = false;
|
||||
anno.parse(&cursor.comment(), &mut matched_one);
|
||||
|
||||
if matched_one { Some(anno) } else { None }
|
||||
if matched_one {
|
||||
Some(anno)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Should this type be hidden?
|
||||
|
@ -157,9 +161,10 @@ impl Annotations {
|
|||
use clang_sys::CXComment_HTMLStartTag;
|
||||
if comment.kind() == CXComment_HTMLStartTag &&
|
||||
comment.get_tag_name() == "div" &&
|
||||
comment.get_tag_attrs().next().map_or(false, |attr| {
|
||||
attr.name == "rustbindgen"
|
||||
})
|
||||
comment
|
||||
.get_tag_attrs()
|
||||
.next()
|
||||
.map_or(false, |attr| attr.name == "rustbindgen")
|
||||
{
|
||||
*matched = true;
|
||||
for attr in comment.get_tag_attrs() {
|
||||
|
@ -168,14 +173,11 @@ impl Annotations {
|
|||
"hide" => self.hide = true,
|
||||
"nocopy" => self.disallow_copy = true,
|
||||
"replaces" => {
|
||||
self.use_instead_of =
|
||||
Some(
|
||||
attr.value.split("::").map(Into::into).collect(),
|
||||
)
|
||||
}
|
||||
"derive" => {
|
||||
self.derives.push(attr.value)
|
||||
self.use_instead_of = Some(
|
||||
attr.value.split("::").map(Into::into).collect(),
|
||||
)
|
||||
}
|
||||
"derive" => self.derives.push(attr.value),
|
||||
"private" => {
|
||||
self.private_fields = Some(attr.value != "false")
|
||||
}
|
||||
|
|
|
@ -67,7 +67,8 @@ fn preprocess_multi_line(comment: &str, indent: usize) -> String {
|
|||
let indent = make_indent(indent);
|
||||
// Strip any potential `*` characters preceding each line.
|
||||
let mut is_first = true;
|
||||
let mut lines: Vec<_> = comment.lines()
|
||||
let mut lines: Vec<_> = comment
|
||||
.lines()
|
||||
.map(|line| line.trim().trim_start_matches('*').trim_start_matches('!'))
|
||||
.skip_while(|line| line.trim().is_empty()) // Skip the first empty lines.
|
||||
.map(|line| {
|
||||
|
@ -78,7 +79,10 @@ fn preprocess_multi_line(comment: &str, indent: usize) -> String {
|
|||
.collect();
|
||||
|
||||
// Remove the trailing line corresponding to the `*/`.
|
||||
if lines.last().map_or(false, |l| l.trim().is_empty() || l.trim() == "///") {
|
||||
if lines
|
||||
.last()
|
||||
.map_or(false, |l| l.trim().is_empty() || l.trim() == "///")
|
||||
{
|
||||
lines.pop();
|
||||
}
|
||||
|
||||
|
|
|
@ -52,13 +52,13 @@ pub enum MethodKind {
|
|||
},
|
||||
}
|
||||
|
||||
|
||||
impl MethodKind {
|
||||
/// Is this a destructor method?
|
||||
pub fn is_destructor(&self) -> bool {
|
||||
match *self {
|
||||
MethodKind::Destructor |
|
||||
MethodKind::VirtualDestructor { .. } => true,
|
||||
MethodKind::Destructor | MethodKind::VirtualDestructor { .. } => {
|
||||
true
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
@ -87,7 +87,11 @@ pub struct Method {
|
|||
|
||||
impl Method {
|
||||
/// Construct a new `Method`.
|
||||
pub fn new(kind: MethodKind, signature: FunctionId, is_const: bool) -> Self {
|
||||
pub fn new(
|
||||
kind: MethodKind,
|
||||
signature: FunctionId,
|
||||
is_const: bool,
|
||||
) -> Self {
|
||||
Method {
|
||||
kind,
|
||||
signature,
|
||||
|
@ -198,9 +202,7 @@ impl Field {
|
|||
/// Get this field's layout.
|
||||
pub fn layout(&self, ctx: &BindgenContext) -> Option<Layout> {
|
||||
match *self {
|
||||
Field::Bitfields(BitfieldUnit {
|
||||
layout, ..
|
||||
}) => Some(layout),
|
||||
Field::Bitfields(BitfieldUnit { layout, .. }) => Some(layout),
|
||||
Field::DataMember(ref data) => {
|
||||
ctx.resolve_type(data.ty).layout(ctx)
|
||||
}
|
||||
|
@ -219,9 +221,7 @@ impl Trace for Field {
|
|||
Field::DataMember(ref data) => {
|
||||
tracer.visit_kind(data.ty.into(), EdgeKind::Field);
|
||||
}
|
||||
Field::Bitfields(BitfieldUnit {
|
||||
ref bitfields, ..
|
||||
}) => {
|
||||
Field::Bitfields(BitfieldUnit { ref bitfields, .. }) => {
|
||||
for bf in bitfields {
|
||||
tracer.visit_kind(bf.ty().into(), EdgeKind::Field);
|
||||
}
|
||||
|
@ -242,10 +242,10 @@ impl DotAttributes for Field {
|
|||
match *self {
|
||||
Field::DataMember(ref data) => data.dot_attributes(ctx, out),
|
||||
Field::Bitfields(BitfieldUnit {
|
||||
layout,
|
||||
ref bitfields,
|
||||
..
|
||||
}) => {
|
||||
layout,
|
||||
ref bitfields,
|
||||
..
|
||||
}) => {
|
||||
writeln!(
|
||||
out,
|
||||
r#"<tr>
|
||||
|
@ -259,8 +259,7 @@ impl DotAttributes for Field {
|
|||
<td>unit.align</td><td>{}</td>
|
||||
</tr>
|
||||
"#,
|
||||
layout.size,
|
||||
layout.align
|
||||
layout.size, layout.align
|
||||
)?;
|
||||
for bf in bitfields {
|
||||
bf.dot_attributes(ctx, out)?;
|
||||
|
@ -373,7 +372,10 @@ impl Bitfield {
|
|||
/// Panics if called before assigning bitfield accessor names or if
|
||||
/// this bitfield have no name.
|
||||
pub fn getter_name(&self) -> &str {
|
||||
assert!(self.name().is_some(), "`Bitfield::getter_name` called on anonymous field");
|
||||
assert!(
|
||||
self.name().is_some(),
|
||||
"`Bitfield::getter_name` called on anonymous field"
|
||||
);
|
||||
self.getter_name.as_ref().expect(
|
||||
"`Bitfield::getter_name` should only be called after\
|
||||
assigning bitfield accessor names",
|
||||
|
@ -385,7 +387,10 @@ impl Bitfield {
|
|||
/// Panics if called before assigning bitfield accessor names or if
|
||||
/// this bitfield have no name.
|
||||
pub fn setter_name(&self) -> &str {
|
||||
assert!(self.name().is_some(), "`Bitfield::setter_name` called on anonymous field");
|
||||
assert!(
|
||||
self.name().is_some(),
|
||||
"`Bitfield::setter_name` called on anonymous field"
|
||||
);
|
||||
self.setter_name.as_ref().expect(
|
||||
"`Bitfield::setter_name` should only be called\
|
||||
after assigning bitfield accessor names",
|
||||
|
@ -423,7 +428,6 @@ impl FieldMethods for Bitfield {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
/// A raw field might be either of a plain data member or a bitfield within a
|
||||
/// bitfield allocation unit, but we haven't processed it and determined which
|
||||
/// yet (which would involve allocating it into a bitfield unit if it is a
|
||||
|
@ -597,9 +601,8 @@ where
|
|||
|
||||
for bitfield in raw_bitfields {
|
||||
let bitfield_width = bitfield.bitfield_width().unwrap() as usize;
|
||||
let bitfield_layout = ctx.resolve_type(bitfield.ty())
|
||||
.layout(ctx)
|
||||
.ok_or(())?;
|
||||
let bitfield_layout =
|
||||
ctx.resolve_type(bitfield.ty()).layout(ctx).ok_or(())?;
|
||||
let bitfield_size = bitfield_layout.size;
|
||||
let bitfield_align = bitfield_layout.align;
|
||||
|
||||
|
@ -628,8 +631,8 @@ where
|
|||
} else {
|
||||
if offset != 0 &&
|
||||
(bitfield_width == 0 ||
|
||||
(offset & (bitfield_align * 8 - 1)) + bitfield_width >
|
||||
bitfield_size * 8)
|
||||
(offset & (bitfield_align * 8 - 1)) + bitfield_width >
|
||||
bitfield_size * 8)
|
||||
{
|
||||
offset = align_to(offset, bitfield_align * 8);
|
||||
}
|
||||
|
@ -724,20 +727,17 @@ impl CompFields {
|
|||
}
|
||||
};
|
||||
|
||||
let result =
|
||||
raw_fields_to_fields_and_bitfield_units(ctx, raws);
|
||||
let result = raw_fields_to_fields_and_bitfield_units(ctx, raws);
|
||||
|
||||
match result {
|
||||
Ok(fields_and_units) => {
|
||||
mem::replace(
|
||||
self,
|
||||
CompFields::AfterComputingBitfieldUnits(fields_and_units));
|
||||
CompFields::AfterComputingBitfieldUnits(fields_and_units),
|
||||
);
|
||||
}
|
||||
Err(()) => {
|
||||
mem::replace(
|
||||
self,
|
||||
CompFields::ErrorComputingBitfieldUnits
|
||||
);
|
||||
mem::replace(self, CompFields::ErrorComputingBitfieldUnits);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -754,7 +754,11 @@ impl CompFields {
|
|||
}
|
||||
};
|
||||
|
||||
fn has_method(methods: &[Method], ctx: &BindgenContext, name: &str) -> bool {
|
||||
fn has_method(
|
||||
methods: &[Method],
|
||||
ctx: &BindgenContext,
|
||||
name: &str,
|
||||
) -> bool {
|
||||
methods.iter().any(|method| {
|
||||
let method_name = ctx.resolve_func(method.signature()).name();
|
||||
method_name == name || ctx.rust_mangle(&method_name) == name
|
||||
|
@ -776,7 +780,8 @@ impl CompFields {
|
|||
.map(|bitfield_name| {
|
||||
let bitfield_name = bitfield_name.to_string();
|
||||
let getter = {
|
||||
let mut getter = ctx.rust_mangle(&bitfield_name).to_string();
|
||||
let mut getter =
|
||||
ctx.rust_mangle(&bitfield_name).to_string();
|
||||
if has_method(methods, ctx, &getter) {
|
||||
getter.push_str("_bindgen_bitfield");
|
||||
}
|
||||
|
@ -803,21 +808,24 @@ impl CompFields {
|
|||
}
|
||||
|
||||
anon_field_counter += 1;
|
||||
let generated_name = format!("__bindgen_anon_{}", anon_field_counter);
|
||||
let generated_name =
|
||||
format!("__bindgen_anon_{}", anon_field_counter);
|
||||
*name = Some(generated_name);
|
||||
}
|
||||
Field::Bitfields(ref mut bu) => for bitfield in &mut bu.bitfields {
|
||||
if bitfield.name().is_none() {
|
||||
continue;
|
||||
}
|
||||
Field::Bitfields(ref mut bu) => {
|
||||
for bitfield in &mut bu.bitfields {
|
||||
if bitfield.name().is_none() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(AccessorNamesPair { getter, setter }) =
|
||||
accessor_names.remove(bitfield.name().unwrap())
|
||||
{
|
||||
bitfield.getter_name = Some(getter);
|
||||
bitfield.setter_name = Some(setter);
|
||||
if let Some(AccessorNamesPair { getter, setter }) =
|
||||
accessor_names.remove(bitfield.name().unwrap())
|
||||
{
|
||||
bitfield.getter_name = Some(getter);
|
||||
bitfield.setter_name = Some(setter);
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1185,8 +1193,7 @@ impl CompInfo {
|
|||
let mut ci = CompInfo::new(kind);
|
||||
ci.is_forward_declaration =
|
||||
location.map_or(true, |cur| match cur.kind() {
|
||||
CXCursor_StructDecl |
|
||||
CXCursor_UnionDecl |
|
||||
CXCursor_StructDecl | CXCursor_UnionDecl |
|
||||
CXCursor_ClassDecl => !cur.is_definition(),
|
||||
_ => false,
|
||||
});
|
||||
|
@ -1195,16 +1202,20 @@ impl CompInfo {
|
|||
cursor.visit(|cur| {
|
||||
if cur.kind() != CXCursor_FieldDecl {
|
||||
if let Some((ty, clang_ty, offset)) =
|
||||
maybe_anonymous_struct_field.take() {
|
||||
maybe_anonymous_struct_field.take()
|
||||
{
|
||||
if cur.kind() == CXCursor_TypedefDecl &&
|
||||
cur.typedef_type().unwrap().canonical_type() == clang_ty {
|
||||
cur.typedef_type().unwrap().canonical_type() ==
|
||||
clang_ty
|
||||
{
|
||||
// Typedefs of anonymous structs appear later in the ast
|
||||
// than the struct itself, that would otherwise be an
|
||||
// anonymous field. Detect that case here, and do
|
||||
// nothing.
|
||||
} else {
|
||||
let field =
|
||||
RawField::new(None, ty, None, None, None, false, offset);
|
||||
let field = RawField::new(
|
||||
None, ty, None, None, None, false, offset,
|
||||
);
|
||||
ci.fields.append_raw_field(field);
|
||||
}
|
||||
}
|
||||
|
@ -1213,7 +1224,8 @@ impl CompInfo {
|
|||
match cur.kind() {
|
||||
CXCursor_FieldDecl => {
|
||||
if let Some((ty, clang_ty, offset)) =
|
||||
maybe_anonymous_struct_field.take() {
|
||||
maybe_anonymous_struct_field.take()
|
||||
{
|
||||
let mut used = false;
|
||||
cur.visit(|child| {
|
||||
if child.cur_type() == clang_ty {
|
||||
|
@ -1222,22 +1234,20 @@ impl CompInfo {
|
|||
CXChildVisit_Continue
|
||||
});
|
||||
if !used {
|
||||
let field = RawField::new(None,
|
||||
ty,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
false,
|
||||
offset);
|
||||
let field = RawField::new(
|
||||
None, ty, None, None, None, false, offset,
|
||||
);
|
||||
ci.fields.append_raw_field(field);
|
||||
}
|
||||
}
|
||||
|
||||
let bit_width = cur.bit_width();
|
||||
let field_type = Item::from_ty_or_ref(cur.cur_type(),
|
||||
cur,
|
||||
Some(potential_id),
|
||||
ctx);
|
||||
let field_type = Item::from_ty_or_ref(
|
||||
cur.cur_type(),
|
||||
cur,
|
||||
Some(potential_id),
|
||||
ctx,
|
||||
);
|
||||
|
||||
let comment = cur.raw_comment();
|
||||
let annotations = Annotations::new(&cur);
|
||||
|
@ -1247,18 +1257,22 @@ impl CompInfo {
|
|||
|
||||
// Name can be empty if there are bitfields, for example,
|
||||
// see tests/headers/struct_with_bitfields.h
|
||||
assert!(!name.is_empty() || bit_width.is_some(),
|
||||
"Empty field name?");
|
||||
assert!(
|
||||
!name.is_empty() || bit_width.is_some(),
|
||||
"Empty field name?"
|
||||
);
|
||||
|
||||
let name = if name.is_empty() { None } else { Some(name) };
|
||||
|
||||
let field = RawField::new(name,
|
||||
field_type,
|
||||
comment,
|
||||
annotations,
|
||||
bit_width,
|
||||
is_mutable,
|
||||
offset);
|
||||
let field = RawField::new(
|
||||
name,
|
||||
field_type,
|
||||
comment,
|
||||
annotations,
|
||||
bit_width,
|
||||
is_mutable,
|
||||
offset,
|
||||
);
|
||||
ci.fields.append_raw_field(field);
|
||||
|
||||
// No we look for things like attributes and stuff.
|
||||
|
@ -1268,7 +1282,6 @@ impl CompInfo {
|
|||
}
|
||||
CXChildVisit_Continue
|
||||
});
|
||||
|
||||
}
|
||||
CXCursor_UnexposedAttr => {
|
||||
ci.found_unknown_attr = true;
|
||||
|
@ -1293,8 +1306,8 @@ impl CompInfo {
|
|||
// definition, it's a valid inner type.
|
||||
//
|
||||
// [1]: https://github.com/rust-lang/rust-bindgen/issues/482
|
||||
let is_inner_struct = cur.semantic_parent() == cursor ||
|
||||
cur.is_definition();
|
||||
let is_inner_struct =
|
||||
cur.semantic_parent() == cursor || cur.is_definition();
|
||||
if !is_inner_struct {
|
||||
return CXChildVisit_Continue;
|
||||
}
|
||||
|
@ -1311,19 +1324,22 @@ impl CompInfo {
|
|||
// A declaration of an union or a struct without name could
|
||||
// also be an unnamed field, unfortunately.
|
||||
if cur.spelling().is_empty() &&
|
||||
cur.kind() != CXCursor_EnumDecl {
|
||||
cur.kind() != CXCursor_EnumDecl
|
||||
{
|
||||
let ty = cur.cur_type();
|
||||
let offset = cur.offset_of_field().ok();
|
||||
maybe_anonymous_struct_field = Some((inner, ty, offset));
|
||||
maybe_anonymous_struct_field =
|
||||
Some((inner, ty, offset));
|
||||
}
|
||||
}
|
||||
CXCursor_PackedAttr => {
|
||||
ci.packed_attr = true;
|
||||
}
|
||||
CXCursor_TemplateTypeParameter => {
|
||||
let param = Item::type_param(None, cur, ctx)
|
||||
.expect("Item::type_param should't fail when pointing \
|
||||
at a TemplateTypeParameter");
|
||||
let param = Item::type_param(None, cur, ctx).expect(
|
||||
"Item::type_param should't fail when pointing \
|
||||
at a TemplateTypeParameter",
|
||||
);
|
||||
ci.template_params.push(param);
|
||||
}
|
||||
CXCursor_CXXBaseSpecifier => {
|
||||
|
@ -1348,8 +1364,7 @@ impl CompInfo {
|
|||
field_name: field_name,
|
||||
});
|
||||
}
|
||||
CXCursor_Constructor |
|
||||
CXCursor_Destructor |
|
||||
CXCursor_Constructor | CXCursor_Destructor |
|
||||
CXCursor_CXXMethod => {
|
||||
let is_virtual = cur.method_is_virtual();
|
||||
let is_static = cur.method_is_static();
|
||||
|
@ -1376,9 +1391,14 @@ impl CompInfo {
|
|||
// `FunctionSig`.
|
||||
let signature =
|
||||
match Item::parse(cur, Some(potential_id), ctx) {
|
||||
Ok(item) if ctx.resolve_item(item)
|
||||
.kind()
|
||||
.is_function() => item,
|
||||
Ok(item)
|
||||
if ctx
|
||||
.resolve_item(item)
|
||||
.kind()
|
||||
.is_function() =>
|
||||
{
|
||||
item
|
||||
}
|
||||
_ => return CXChildVisit_Continue,
|
||||
};
|
||||
|
||||
|
@ -1424,7 +1444,8 @@ impl CompInfo {
|
|||
CXCursor_VarDecl => {
|
||||
let linkage = cur.linkage();
|
||||
if linkage != CXLinkage_External &&
|
||||
linkage != CXLinkage_UniqueExternal {
|
||||
linkage != CXLinkage_UniqueExternal
|
||||
{
|
||||
return CXChildVisit_Continue;
|
||||
}
|
||||
|
||||
|
@ -1433,9 +1454,8 @@ impl CompInfo {
|
|||
return CXChildVisit_Continue;
|
||||
}
|
||||
|
||||
if let Ok(item) = Item::parse(cur,
|
||||
Some(potential_id),
|
||||
ctx) {
|
||||
if let Ok(item) = Item::parse(cur, Some(potential_id), ctx)
|
||||
{
|
||||
ci.inner_vars.push(item.as_var_id_unchecked());
|
||||
}
|
||||
}
|
||||
|
@ -1445,11 +1465,13 @@ impl CompInfo {
|
|||
CXCursor_FunctionTemplate |
|
||||
CXCursor_ConversionFunction => {}
|
||||
_ => {
|
||||
warn!("unhandled comp member `{}` (kind {:?}) in `{}` ({})",
|
||||
cur.spelling(),
|
||||
clang::kind_to_str(cur.kind()),
|
||||
cursor.spelling(),
|
||||
cur.location());
|
||||
warn!(
|
||||
"unhandled comp member `{}` (kind {:?}) in `{}` ({})",
|
||||
cur.spelling(),
|
||||
clang::kind_to_str(cur.kind()),
|
||||
cursor.spelling(),
|
||||
cur.location()
|
||||
);
|
||||
}
|
||||
}
|
||||
CXChildVisit_Continue
|
||||
|
@ -1470,16 +1492,13 @@ impl CompInfo {
|
|||
use clang_sys::*;
|
||||
Ok(match cursor.kind() {
|
||||
CXCursor_UnionDecl => CompKind::Union,
|
||||
CXCursor_ClassDecl |
|
||||
CXCursor_StructDecl => CompKind::Struct,
|
||||
CXCursor_ClassDecl | CXCursor_StructDecl => CompKind::Struct,
|
||||
CXCursor_CXXBaseSpecifier |
|
||||
CXCursor_ClassTemplatePartialSpecialization |
|
||||
CXCursor_ClassTemplate => {
|
||||
match cursor.template_kind() {
|
||||
CXCursor_UnionDecl => CompKind::Union,
|
||||
_ => CompKind::Struct,
|
||||
}
|
||||
}
|
||||
CXCursor_ClassTemplate => match cursor.template_kind() {
|
||||
CXCursor_UnionDecl => CompKind::Union,
|
||||
_ => CompKind::Struct,
|
||||
},
|
||||
_ => {
|
||||
warn!("Unknown kind for comp type: {:?}", cursor);
|
||||
return Err(ParseError::Continue);
|
||||
|
@ -1505,9 +1524,13 @@ impl CompInfo {
|
|||
}
|
||||
|
||||
/// Is this compound type packed?
|
||||
pub fn is_packed(&self, ctx: &BindgenContext, layout: &Option<Layout>) -> bool {
|
||||
pub fn is_packed(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
layout: &Option<Layout>,
|
||||
) -> bool {
|
||||
if self.packed_attr {
|
||||
return true
|
||||
return true;
|
||||
}
|
||||
|
||||
// Even though `libclang` doesn't expose `#pragma packed(...)`, we can
|
||||
|
@ -1632,7 +1655,7 @@ impl IsOpaque for CompInfo {
|
|||
|
||||
fn is_opaque(&self, ctx: &BindgenContext, layout: &Option<Layout>) -> bool {
|
||||
if self.has_non_type_template_params {
|
||||
return true
|
||||
return true;
|
||||
}
|
||||
|
||||
// When we do not have the layout for a bitfield's type (for example, it
|
||||
|
@ -1647,17 +1670,14 @@ impl IsOpaque for CompInfo {
|
|||
// some strange things going on, and the best we can do is make the
|
||||
// whole struct opaque.
|
||||
if self.fields().iter().any(|f| match *f {
|
||||
Field::DataMember(_) => {
|
||||
false
|
||||
},
|
||||
Field::Bitfields(ref unit) => {
|
||||
unit.bitfields().iter().any(|bf| {
|
||||
let bitfield_layout = ctx.resolve_type(bf.ty())
|
||||
.layout(ctx)
|
||||
.expect("Bitfield without layout? Gah!");
|
||||
bf.width() / 8 > bitfield_layout.size as u32
|
||||
})
|
||||
}
|
||||
Field::DataMember(_) => false,
|
||||
Field::Bitfields(ref unit) => unit.bitfields().iter().any(|bf| {
|
||||
let bitfield_layout = ctx
|
||||
.resolve_type(bf.ty())
|
||||
.layout(ctx)
|
||||
.expect("Bitfield without layout? Gah!");
|
||||
bf.width() / 8 > bitfield_layout.size as u32
|
||||
}),
|
||||
}) {
|
||||
return true;
|
||||
}
|
||||
|
@ -1668,7 +1688,9 @@ impl IsOpaque for CompInfo {
|
|||
//
|
||||
// See https://github.com/rust-lang/rust-bindgen/issues/537 and
|
||||
// https://github.com/rust-lang/rust/issues/33158
|
||||
if self.is_packed(ctx, layout) && layout.map_or(false, |l| l.align > 1) {
|
||||
if self.is_packed(ctx, layout) &&
|
||||
layout.map_or(false, |l| l.align > 1)
|
||||
{
|
||||
warn!("Found a type that is both packed and aligned to greater than \
|
||||
1; Rust before version 1.33 doesn't have `#[repr(packed(N))]`, so we \
|
||||
are treating it as opaque. You may wish to set bindgen's rust target \
|
||||
|
@ -1682,10 +1704,7 @@ impl IsOpaque for CompInfo {
|
|||
}
|
||||
|
||||
impl TemplateParameters for CompInfo {
|
||||
fn self_template_params(
|
||||
&self,
|
||||
_ctx: &BindgenContext,
|
||||
) -> Vec<TypeId> {
|
||||
fn self_template_params(&self, _ctx: &BindgenContext) -> Vec<TypeId> {
|
||||
self.template_params.clone()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
//! Common context that is passed around during parsing and codegen.
|
||||
|
||||
use super::analysis::{CannotDerive, DeriveTrait, as_cannot_derive_set,
|
||||
HasTypeParameterInArray, HasVtableAnalysis,
|
||||
HasVtableResult, HasDestructorAnalysis,
|
||||
UsedTemplateParameters, HasFloat, SizednessAnalysis,
|
||||
SizednessResult, analyze};
|
||||
use super::derive::{CanDeriveCopy, CanDeriveDebug, CanDeriveDefault,
|
||||
CanDeriveHash, CanDerivePartialOrd, CanDeriveOrd,
|
||||
CanDerivePartialEq, CanDeriveEq, CanDerive};
|
||||
use super::super::time::Timer;
|
||||
use super::analysis::{
|
||||
analyze, as_cannot_derive_set, CannotDerive, DeriveTrait,
|
||||
HasDestructorAnalysis, HasFloat, HasTypeParameterInArray,
|
||||
HasVtableAnalysis, HasVtableResult, SizednessAnalysis, SizednessResult,
|
||||
UsedTemplateParameters,
|
||||
};
|
||||
use super::derive::{
|
||||
CanDerive, CanDeriveCopy, CanDeriveDebug, CanDeriveDefault, CanDeriveEq,
|
||||
CanDeriveHash, CanDeriveOrd, CanDerivePartialEq, CanDerivePartialOrd,
|
||||
};
|
||||
use super::function::Function;
|
||||
use super::int::IntKind;
|
||||
use super::item::{IsOpaque, Item, ItemAncestors, ItemSet};
|
||||
use super::item_kind::ItemKind;
|
||||
|
@ -15,9 +19,6 @@ use super::module::{Module, ModuleKind};
|
|||
use super::template::{TemplateInstantiation, TemplateParameters};
|
||||
use super::traversal::{self, Edge, ItemTraversal};
|
||||
use super::ty::{FloatKind, Type, TypeKind};
|
||||
use super::function::Function;
|
||||
use super::super::time::Timer;
|
||||
use BindgenOptions;
|
||||
use callbacks::ParseCallbacks;
|
||||
use cexpr;
|
||||
use clang::{self, Cursor};
|
||||
|
@ -26,10 +27,11 @@ use parse::ClangItemParser;
|
|||
use proc_macro2::{Ident, Span};
|
||||
use std::borrow::Cow;
|
||||
use std::cell::Cell;
|
||||
use std::collections::HashMap as StdHashMap;
|
||||
use std::iter::IntoIterator;
|
||||
use std::mem;
|
||||
use std::collections::HashMap as StdHashMap;
|
||||
use {HashMap, HashSet, Entry};
|
||||
use BindgenOptions;
|
||||
use {Entry, HashMap, HashSet};
|
||||
|
||||
/// An identifier for some kind of IR item.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialOrd, Ord, Hash)]
|
||||
|
@ -199,7 +201,7 @@ impl ItemId {
|
|||
|
||||
impl<T> ::std::cmp::PartialEq<T> for ItemId
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
fn eq(&self, rhs: &T) -> bool {
|
||||
let rhs: ItemId = (*rhs).into();
|
||||
|
@ -209,7 +211,7 @@ where
|
|||
|
||||
impl<T> CanDeriveDebug for T
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
fn can_derive_debug(&self, ctx: &BindgenContext) -> bool {
|
||||
ctx.options().derive_debug && ctx.lookup_can_derive_debug(*self)
|
||||
|
@ -218,7 +220,7 @@ where
|
|||
|
||||
impl<T> CanDeriveDefault for T
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
fn can_derive_default(&self, ctx: &BindgenContext) -> bool {
|
||||
ctx.options().derive_default && ctx.lookup_can_derive_default(*self)
|
||||
|
@ -227,7 +229,7 @@ where
|
|||
|
||||
impl<T> CanDeriveCopy for T
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
fn can_derive_copy(&self, ctx: &BindgenContext) -> bool {
|
||||
ctx.options().derive_copy && ctx.lookup_can_derive_copy(*self)
|
||||
|
@ -236,7 +238,7 @@ where
|
|||
|
||||
impl<T> CanDeriveHash for T
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
fn can_derive_hash(&self, ctx: &BindgenContext) -> bool {
|
||||
ctx.options().derive_hash && ctx.lookup_can_derive_hash(*self)
|
||||
|
@ -245,42 +247,46 @@ where
|
|||
|
||||
impl<T> CanDerivePartialOrd for T
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
fn can_derive_partialord(&self, ctx: &BindgenContext) -> bool {
|
||||
ctx.options().derive_partialord &&
|
||||
ctx.lookup_can_derive_partialeq_or_partialord(*self) == CanDerive::Yes
|
||||
ctx.lookup_can_derive_partialeq_or_partialord(*self) ==
|
||||
CanDerive::Yes
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> CanDerivePartialEq for T
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
fn can_derive_partialeq(&self, ctx: &BindgenContext) -> bool {
|
||||
ctx.options().derive_partialeq &&
|
||||
ctx.lookup_can_derive_partialeq_or_partialord(*self) == CanDerive::Yes
|
||||
ctx.lookup_can_derive_partialeq_or_partialord(*self) ==
|
||||
CanDerive::Yes
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> CanDeriveEq for T
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
fn can_derive_eq(&self, ctx: &BindgenContext) -> bool {
|
||||
ctx.options().derive_eq &&
|
||||
ctx.lookup_can_derive_partialeq_or_partialord(*self) == CanDerive::Yes &&
|
||||
ctx.lookup_can_derive_partialeq_or_partialord(*self) ==
|
||||
CanDerive::Yes &&
|
||||
!ctx.lookup_has_float(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> CanDeriveOrd for T
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
fn can_derive_ord(&self, ctx: &BindgenContext) -> bool {
|
||||
ctx.options().derive_ord &&
|
||||
ctx.lookup_can_derive_partialeq_or_partialord(*self) == CanDerive::Yes &&
|
||||
ctx.lookup_can_derive_partialeq_or_partialord(*self) ==
|
||||
CanDerive::Yes &&
|
||||
!ctx.lookup_has_float(*self)
|
||||
}
|
||||
}
|
||||
|
@ -476,7 +482,7 @@ impl<'ctx> Iterator for WhitelistedItemsTraversal<'ctx> {
|
|||
let id = self.traversal.next()?;
|
||||
|
||||
if self.ctx.resolve_item(id).is_blacklisted(self.ctx) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
|
||||
return Some(id);
|
||||
|
@ -526,7 +532,7 @@ fn find_effective_target(clang_args: &[String]) -> (String, bool) {
|
|||
|
||||
// If we're running from a build script, try to find the cargo target.
|
||||
if let Ok(t) = env::var("TARGET") {
|
||||
return (t, false)
|
||||
return (t, false);
|
||||
}
|
||||
|
||||
(HOST_TARGET.to_owned(), false)
|
||||
|
@ -577,8 +583,11 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
if let Some(ref ti) = target_info {
|
||||
if effective_target == HOST_TARGET {
|
||||
assert_eq!(
|
||||
ti.pointer_width / 8, mem::size_of::<*mut ()>(),
|
||||
"{:?} {:?}", effective_target, HOST_TARGET
|
||||
ti.pointer_width / 8,
|
||||
mem::size_of::<*mut ()>(),
|
||||
"{:?} {:?}",
|
||||
effective_target,
|
||||
HOST_TARGET
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -661,7 +670,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
}
|
||||
|
||||
/// Get the user-provided callbacks by reference, if any.
|
||||
pub fn parse_callbacks(&self) -> Option<&ParseCallbacks> {
|
||||
pub fn parse_callbacks(&self) -> Option<&dyn ParseCallbacks> {
|
||||
self.options().parse_callbacks.as_ref().map(|t| &**t)
|
||||
}
|
||||
|
||||
|
@ -677,12 +686,11 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
) {
|
||||
debug!(
|
||||
"BindgenContext::add_item({:?}, declaration: {:?}, loc: {:?}",
|
||||
item,
|
||||
declaration,
|
||||
location
|
||||
item, declaration, location
|
||||
);
|
||||
debug_assert!(
|
||||
declaration.is_some() || !item.kind().is_type() ||
|
||||
declaration.is_some() ||
|
||||
!item.kind().is_type() ||
|
||||
item.kind().expect_type().is_builtin_or_type_param() ||
|
||||
item.kind().expect_type().is_opaque(self, &item) ||
|
||||
item.kind().expect_type().is_unresolved_ref(),
|
||||
|
@ -692,8 +700,8 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
let id = item.id();
|
||||
let is_type = item.kind().is_type();
|
||||
let is_unnamed = is_type && item.expect_type().name().is_none();
|
||||
let is_template_instantiation = is_type &&
|
||||
item.expect_type().is_template_instantiation();
|
||||
let is_template_instantiation =
|
||||
is_type && item.expect_type().is_template_instantiation();
|
||||
|
||||
if item.id() != self.root_module {
|
||||
self.add_item_to_module(&item);
|
||||
|
@ -731,7 +739,10 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
debug!(
|
||||
"Invalid declaration {:?} found for type {:?}",
|
||||
declaration,
|
||||
self.resolve_item_fallible(id).unwrap().kind().expect_type()
|
||||
self.resolve_item_fallible(id)
|
||||
.unwrap()
|
||||
.kind()
|
||||
.expect_type()
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
@ -743,8 +754,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
} else {
|
||||
warn!(
|
||||
"Valid declaration with no USR: {:?}, {:?}",
|
||||
declaration,
|
||||
location
|
||||
declaration, location
|
||||
);
|
||||
TypeKey::Declaration(declaration)
|
||||
};
|
||||
|
@ -794,8 +804,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
pub fn add_type_param(&mut self, item: Item, definition: clang::Cursor) {
|
||||
debug!(
|
||||
"BindgenContext::add_type_param: item = {:?}; definition = {:?}",
|
||||
item,
|
||||
definition
|
||||
item, definition
|
||||
);
|
||||
|
||||
assert!(
|
||||
|
@ -816,7 +825,9 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
"should not have already associated an item with the given id"
|
||||
);
|
||||
|
||||
let old_named_ty = self.type_params.insert(definition, id.as_type_id_unchecked());
|
||||
let old_named_ty = self
|
||||
.type_params
|
||||
.insert(definition, id.as_type_id_unchecked());
|
||||
assert!(
|
||||
old_named_ty.is_none(),
|
||||
"should not have already associated a named type with this id"
|
||||
|
@ -841,61 +852,16 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
name.contains("?") ||
|
||||
name.contains("$") ||
|
||||
match name {
|
||||
"abstract" |
|
||||
"alignof" |
|
||||
"as" |
|
||||
"async" |
|
||||
"become" |
|
||||
"box" |
|
||||
"break" |
|
||||
"const" |
|
||||
"continue" |
|
||||
"crate" |
|
||||
"do" |
|
||||
"else" |
|
||||
"enum" |
|
||||
"extern" |
|
||||
"false" |
|
||||
"final" |
|
||||
"fn" |
|
||||
"for" |
|
||||
"if" |
|
||||
"impl" |
|
||||
"in" |
|
||||
"let" |
|
||||
"loop" |
|
||||
"macro" |
|
||||
"match" |
|
||||
"mod" |
|
||||
"move" |
|
||||
"mut" |
|
||||
"offsetof" |
|
||||
"override" |
|
||||
"priv" |
|
||||
"proc" |
|
||||
"pub" |
|
||||
"pure" |
|
||||
"ref" |
|
||||
"return" |
|
||||
"Self" |
|
||||
"self" |
|
||||
"sizeof" |
|
||||
"static" |
|
||||
"struct" |
|
||||
"super" |
|
||||
"trait" |
|
||||
"true" |
|
||||
"type" |
|
||||
"typeof" |
|
||||
"unsafe" |
|
||||
"unsized" |
|
||||
"use" |
|
||||
"virtual" |
|
||||
"where" |
|
||||
"while" |
|
||||
"yield" |
|
||||
"bool" |
|
||||
"_" => true,
|
||||
"abstract" | "alignof" | "as" | "async" | "become" |
|
||||
"box" | "break" | "const" | "continue" | "crate" | "do" |
|
||||
"else" | "enum" | "extern" | "false" | "final" | "fn" |
|
||||
"for" | "if" | "impl" | "in" | "let" | "loop" | "macro" |
|
||||
"match" | "mod" | "move" | "mut" | "offsetof" |
|
||||
"override" | "priv" | "proc" | "pub" | "pure" | "ref" |
|
||||
"return" | "Self" | "self" | "sizeof" | "static" |
|
||||
"struct" | "super" | "trait" | "true" | "type" | "typeof" |
|
||||
"unsafe" | "unsized" | "use" | "virtual" | "where" |
|
||||
"while" | "yield" | "bool" | "_" => true,
|
||||
_ => false,
|
||||
}
|
||||
{
|
||||
|
@ -912,7 +878,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
/// Returns a mangled name as a rust identifier.
|
||||
pub fn rust_ident<S>(&self, name: S) -> Ident
|
||||
where
|
||||
S: AsRef<str>
|
||||
S: AsRef<str>,
|
||||
{
|
||||
self.rust_ident_raw(self.rust_mangle(name.as_ref()))
|
||||
}
|
||||
|
@ -920,20 +886,17 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
/// Returns a mangled name as a rust identifier.
|
||||
pub fn rust_ident_raw<T>(&self, name: T) -> Ident
|
||||
where
|
||||
T: AsRef<str>
|
||||
T: AsRef<str>,
|
||||
{
|
||||
Ident::new(name.as_ref(), Span::call_site())
|
||||
}
|
||||
|
||||
/// Iterate over all items that have been defined.
|
||||
pub fn items(&self) -> impl Iterator<Item = (ItemId, &Item)> {
|
||||
self.items
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(index, item)| {
|
||||
let item = item.as_ref()?;
|
||||
Some((ItemId(index), item))
|
||||
})
|
||||
self.items.iter().enumerate().filter_map(|(index, item)| {
|
||||
let item = item.as_ref()?;
|
||||
Some((ItemId(index), item))
|
||||
})
|
||||
}
|
||||
|
||||
/// Have we collected all unresolved type references yet?
|
||||
|
@ -971,19 +934,20 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
let typerefs = self.collect_typerefs();
|
||||
|
||||
for (id, ty, loc, parent_id) in typerefs {
|
||||
let _resolved = {
|
||||
let resolved = Item::from_ty(&ty, loc, parent_id, self)
|
||||
let _resolved =
|
||||
{
|
||||
let resolved = Item::from_ty(&ty, loc, parent_id, self)
|
||||
.unwrap_or_else(|_| {
|
||||
warn!("Could not resolve type reference, falling back \
|
||||
to opaque blob");
|
||||
Item::new_opaque_type(self.next_item_id(), &ty, self)
|
||||
});
|
||||
|
||||
let item = self.items[id.0].as_mut().unwrap();
|
||||
*item.kind_mut().as_type_mut().unwrap().kind_mut() =
|
||||
TypeKind::ResolvedTypeRef(resolved);
|
||||
resolved
|
||||
};
|
||||
let item = self.items[id.0].as_mut().unwrap();
|
||||
*item.kind_mut().as_type_mut().unwrap().kind_mut() =
|
||||
TypeKind::ResolvedTypeRef(resolved);
|
||||
resolved
|
||||
};
|
||||
|
||||
// Something in the STL is trolling me. I don't need this assertion
|
||||
// right now, but worth investigating properly once this lands.
|
||||
|
@ -1008,7 +972,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
/// closure is made.
|
||||
fn with_loaned_item<F, T>(&mut self, id: ItemId, f: F) -> T
|
||||
where
|
||||
F: (FnOnce(&BindgenContext, &mut Item) -> T)
|
||||
F: (FnOnce(&BindgenContext, &mut Item) -> T),
|
||||
{
|
||||
let mut item = self.items[id.0].take().unwrap();
|
||||
|
||||
|
@ -1043,7 +1007,8 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
fn deanonymize_fields(&mut self) {
|
||||
let _t = self.timer("deanonymize_fields");
|
||||
|
||||
let comp_item_ids: Vec<ItemId> = self.items()
|
||||
let comp_item_ids: Vec<ItemId> = self
|
||||
.items()
|
||||
.filter_map(|(id, item)| {
|
||||
if item.kind().as_type()?.is_comp() {
|
||||
return Some(id);
|
||||
|
@ -1108,7 +1073,10 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
// We set this just after parsing the annotation. It's
|
||||
// very unlikely, but this can happen.
|
||||
if self.resolve_item_fallible(*replacement).is_some() {
|
||||
replacements.push((id.expect_type_id(self), replacement.expect_type_id(self)));
|
||||
replacements.push((
|
||||
id.expect_type_id(self),
|
||||
replacement.expect_type_id(self),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1157,15 +1125,14 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
})
|
||||
})
|
||||
};
|
||||
let old_module = old_module.expect(
|
||||
"Every replacement item should be in a module",
|
||||
);
|
||||
let old_module = old_module
|
||||
.expect("Every replacement item should be in a module");
|
||||
|
||||
let new_module = {
|
||||
let immut_self = &*self;
|
||||
new_parent.ancestors(immut_self).find(|id| {
|
||||
immut_self.resolve_item(*id).is_module()
|
||||
})
|
||||
new_parent
|
||||
.ancestors(immut_self)
|
||||
.find(|id| immut_self.resolve_item(*id).is_module())
|
||||
};
|
||||
let new_module = new_module.unwrap_or(self.root_module.into());
|
||||
|
||||
|
@ -1274,26 +1241,25 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
|
||||
assert!(
|
||||
{
|
||||
let id = id.into_resolver()
|
||||
let id = id
|
||||
.into_resolver()
|
||||
.through_type_refs()
|
||||
.through_type_aliases()
|
||||
.resolve(self)
|
||||
.id();
|
||||
id.ancestors(self).chain(Some(self.root_module.into())).any(
|
||||
|ancestor| {
|
||||
id.ancestors(self)
|
||||
.chain(Some(self.root_module.into()))
|
||||
.any(|ancestor| {
|
||||
debug!(
|
||||
"Checking if {:?} is a child of {:?}",
|
||||
id,
|
||||
ancestor
|
||||
id, ancestor
|
||||
);
|
||||
self.resolve_item(ancestor).as_module().map_or(
|
||||
false,
|
||||
|m| {
|
||||
self.resolve_item(ancestor)
|
||||
.as_module()
|
||||
.map_or(false, |m| {
|
||||
m.children().contains(&id)
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
})
|
||||
})
|
||||
},
|
||||
"{:?} should be in some ancestor module's children set",
|
||||
id
|
||||
|
@ -1377,7 +1343,10 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
let mut used_params = HashMap::default();
|
||||
for &id in self.whitelisted_items() {
|
||||
used_params.entry(id).or_insert(
|
||||
id.self_template_params(self).into_iter().map(|p| p.into()).collect()
|
||||
id.self_template_params(self)
|
||||
.into_iter()
|
||||
.map(|p| p.into())
|
||||
.collect(),
|
||||
);
|
||||
}
|
||||
self.used_template_parameters = Some(used_params);
|
||||
|
@ -1493,12 +1462,16 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
///
|
||||
/// Panics if the id resolves to an item that is not a type.
|
||||
pub fn safe_resolve_type(&self, type_id: TypeId) -> Option<&Type> {
|
||||
self.resolve_item_fallible(type_id).map(|t| t.kind().expect_type())
|
||||
self.resolve_item_fallible(type_id)
|
||||
.map(|t| t.kind().expect_type())
|
||||
}
|
||||
|
||||
/// Resolve the given `ItemId` into an `Item`, or `None` if no such item
|
||||
/// exists.
|
||||
pub fn resolve_item_fallible<Id: Into<ItemId>>(&self, id: Id) -> Option<&Item> {
|
||||
pub fn resolve_item_fallible<Id: Into<ItemId>>(
|
||||
&self,
|
||||
id: Id,
|
||||
) -> Option<&Item> {
|
||||
self.items.get(id.into().0)?.as_ref()
|
||||
}
|
||||
|
||||
|
@ -1537,12 +1510,11 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
/// Returns a known semantic parent for a given definition.
|
||||
pub fn known_semantic_parent(
|
||||
&self,
|
||||
definition: clang::Cursor
|
||||
definition: clang::Cursor,
|
||||
) -> Option<ItemId> {
|
||||
self.semantic_parents.get(&definition).cloned()
|
||||
}
|
||||
|
||||
|
||||
/// Given a cursor pointing to the location of a template instantiation,
|
||||
/// return a tuple of the form `(declaration_cursor, declaration_id,
|
||||
/// num_expected_template_args)`.
|
||||
|
@ -1560,7 +1532,8 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
.and_then(|canon_decl| {
|
||||
self.get_resolved_type(&canon_decl).and_then(
|
||||
|template_decl_id| {
|
||||
let num_params = template_decl_id.num_self_template_params(self);
|
||||
let num_params =
|
||||
template_decl_id.num_self_template_params(self);
|
||||
if num_params == 0 {
|
||||
None
|
||||
} else {
|
||||
|
@ -1590,7 +1563,8 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
.cloned()
|
||||
})
|
||||
.and_then(|template_decl| {
|
||||
let num_template_params = template_decl.num_self_template_params(self);
|
||||
let num_template_params =
|
||||
template_decl.num_self_template_params(self);
|
||||
if num_template_params == 0 {
|
||||
None
|
||||
} else {
|
||||
|
@ -1644,11 +1618,12 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
ty: &clang::Type,
|
||||
location: clang::Cursor,
|
||||
) -> Option<TypeId> {
|
||||
let num_expected_args = self.resolve_type(template).num_self_template_params(self);
|
||||
let num_expected_args =
|
||||
self.resolve_type(template).num_self_template_params(self);
|
||||
if num_expected_args == 0 {
|
||||
warn!(
|
||||
"Tried to instantiate a template for which we could not \
|
||||
determine any template parameters"
|
||||
determine any template parameters"
|
||||
);
|
||||
return None;
|
||||
}
|
||||
|
@ -1668,13 +1643,14 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
// being specialized via the `location`'s type, and if we do not
|
||||
// filter it out, we'll add an extra layer of template instantiation
|
||||
// on accident.
|
||||
let idx = children.iter().position(|c| {
|
||||
c.kind() == clang_sys::CXCursor_TemplateRef
|
||||
});
|
||||
let idx = children
|
||||
.iter()
|
||||
.position(|c| c.kind() == clang_sys::CXCursor_TemplateRef);
|
||||
if let Some(idx) = idx {
|
||||
if children.iter().take(idx).all(|c| {
|
||||
c.kind() == clang_sys::CXCursor_NamespaceRef
|
||||
})
|
||||
if children
|
||||
.iter()
|
||||
.take(idx)
|
||||
.all(|c| c.kind() == clang_sys::CXCursor_NamespaceRef)
|
||||
{
|
||||
children = children.into_iter().skip(idx + 1).collect();
|
||||
}
|
||||
|
@ -1701,8 +1677,13 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
args.push(ty);
|
||||
}
|
||||
clang_sys::CXCursor_TemplateRef => {
|
||||
let (template_decl_cursor, template_decl_id, num_expected_template_args) =
|
||||
self.get_declaration_info_for_template_instantiation(child)?;
|
||||
let (
|
||||
template_decl_cursor,
|
||||
template_decl_id,
|
||||
num_expected_template_args,
|
||||
) = self.get_declaration_info_for_template_instantiation(
|
||||
child,
|
||||
)?;
|
||||
|
||||
if num_expected_template_args == 0 ||
|
||||
child.has_at_least_num_children(
|
||||
|
@ -1727,7 +1708,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
if args_len < num_expected_template_args {
|
||||
warn!(
|
||||
"Found a template instantiation without \
|
||||
enough template arguments"
|
||||
enough template arguments"
|
||||
);
|
||||
return None;
|
||||
}
|
||||
|
@ -1767,7 +1748,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
// Bypass all the validations in add_item explicitly.
|
||||
debug!(
|
||||
"instantiate_template: inserting nested \
|
||||
instantiation item: {:?}",
|
||||
instantiation item: {:?}",
|
||||
sub_item
|
||||
);
|
||||
self.add_item_to_module(&sub_item);
|
||||
|
@ -1795,7 +1776,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
// situation...
|
||||
warn!(
|
||||
"Found template instantiated with a const value; \
|
||||
bindgen can't handle this kind of template instantiation!"
|
||||
bindgen can't handle this kind of template instantiation!"
|
||||
);
|
||||
return None;
|
||||
}
|
||||
|
@ -1803,7 +1784,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
if args.len() != num_expected_args {
|
||||
warn!(
|
||||
"Found a template with an unexpected number of template \
|
||||
arguments"
|
||||
arguments"
|
||||
);
|
||||
return None;
|
||||
}
|
||||
|
@ -1845,9 +1826,9 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
self.types
|
||||
.get(&TypeKey::Declaration(*decl.cursor()))
|
||||
.or_else(|| {
|
||||
decl.cursor().usr().and_then(
|
||||
|usr| self.types.get(&TypeKey::USR(usr)),
|
||||
)
|
||||
decl.cursor()
|
||||
.usr()
|
||||
.and_then(|usr| self.types.get(&TypeKey::USR(usr)))
|
||||
})
|
||||
.cloned()
|
||||
}
|
||||
|
@ -1864,19 +1845,14 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
use clang_sys::{CXCursor_TypeAliasTemplateDecl, CXCursor_TypeRef};
|
||||
debug!(
|
||||
"builtin_or_resolved_ty: {:?}, {:?}, {:?}",
|
||||
ty,
|
||||
location,
|
||||
parent_id
|
||||
ty, location, parent_id
|
||||
);
|
||||
|
||||
if let Some(decl) = ty.canonical_declaration(location.as_ref()) {
|
||||
if let Some(id) = self.get_resolved_type(&decl) {
|
||||
debug!(
|
||||
"Already resolved ty {:?}, {:?}, {:?} {:?}",
|
||||
id,
|
||||
decl,
|
||||
ty,
|
||||
location
|
||||
id, decl, ty, location
|
||||
);
|
||||
// If the declaration already exists, then either:
|
||||
//
|
||||
|
@ -1908,7 +1884,8 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
return None;
|
||||
}
|
||||
|
||||
return self.instantiate_template(with_id, id, ty, location)
|
||||
return self
|
||||
.instantiate_template(with_id, id, ty, location)
|
||||
.or_else(|| Some(id));
|
||||
}
|
||||
|
||||
|
@ -1935,13 +1912,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
parent_id: Option<ItemId>,
|
||||
ty: &clang::Type,
|
||||
) -> TypeId {
|
||||
self.build_wrapper(
|
||||
with_id,
|
||||
wrapped_id,
|
||||
parent_id,
|
||||
ty,
|
||||
ty.is_const(),
|
||||
)
|
||||
self.build_wrapper(with_id, wrapped_id, parent_id, ty, ty.is_const())
|
||||
}
|
||||
|
||||
/// A wrapper over a type that adds a const qualifier explicitly.
|
||||
|
@ -1955,11 +1926,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
ty: &clang::Type,
|
||||
) -> TypeId {
|
||||
self.build_wrapper(
|
||||
with_id,
|
||||
wrapped_id,
|
||||
parent_id,
|
||||
ty,
|
||||
/* is_const = */ true,
|
||||
with_id, wrapped_id, parent_id, ty, /* is_const = */ true,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -2001,12 +1968,8 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
CXType_Bool => TypeKind::Int(IntKind::Bool),
|
||||
CXType_Int => TypeKind::Int(IntKind::Int),
|
||||
CXType_UInt => TypeKind::Int(IntKind::UInt),
|
||||
CXType_Char_S => TypeKind::Int(IntKind::Char {
|
||||
is_signed: true,
|
||||
}),
|
||||
CXType_Char_U => TypeKind::Int(IntKind::Char {
|
||||
is_signed: false,
|
||||
}),
|
||||
CXType_Char_S => TypeKind::Int(IntKind::Char { is_signed: true }),
|
||||
CXType_Char_U => TypeKind::Int(IntKind::Char { is_signed: false }),
|
||||
CXType_SChar => TypeKind::Int(IntKind::SChar),
|
||||
CXType_UChar => TypeKind::Int(IntKind::UChar),
|
||||
CXType_Short => TypeKind::Int(IntKind::Short),
|
||||
|
@ -2032,13 +1995,10 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
CXType_Double => FloatKind::Double,
|
||||
CXType_LongDouble => FloatKind::LongDouble,
|
||||
CXType_Float128 => FloatKind::Float128,
|
||||
_ => {
|
||||
panic!(
|
||||
"Non floating-type complex? {:?}, {:?}",
|
||||
ty,
|
||||
float_type,
|
||||
)
|
||||
},
|
||||
_ => panic!(
|
||||
"Non floating-type complex? {:?}, {:?}",
|
||||
ty, float_type,
|
||||
),
|
||||
};
|
||||
TypeKind::Complex(float_kind)
|
||||
}
|
||||
|
@ -2050,8 +2010,13 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
let layout = ty.fallible_layout(self).ok();
|
||||
let ty = Type::new(Some(spelling), layout, type_kind, is_const);
|
||||
let id = self.next_item_id();
|
||||
let item =
|
||||
Item::new(id, None, None, self.root_module.into(), ItemKind::Type(ty));
|
||||
let item = Item::new(
|
||||
id,
|
||||
None,
|
||||
None,
|
||||
self.root_module.into(),
|
||||
ItemKind::Type(ty),
|
||||
);
|
||||
self.add_builtin_item(item);
|
||||
Some(id.as_type_id_unchecked())
|
||||
}
|
||||
|
@ -2067,7 +2032,9 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
}
|
||||
|
||||
/// Get the currently parsed macros.
|
||||
pub fn parsed_macros(&self) -> &StdHashMap<Vec<u8>, cexpr::expr::EvalResult> {
|
||||
pub fn parsed_macros(
|
||||
&self,
|
||||
) -> &StdHashMap<Vec<u8>, cexpr::expr::EvalResult> {
|
||||
debug_assert!(!self.in_codegen_phase());
|
||||
&self.parsed_macros
|
||||
}
|
||||
|
@ -2096,15 +2063,14 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
Entry::Vacant(entry) => {
|
||||
debug!(
|
||||
"Defining replacement for {:?} as {:?}",
|
||||
name,
|
||||
potential_ty
|
||||
name, potential_ty
|
||||
);
|
||||
entry.insert(potential_ty);
|
||||
}
|
||||
Entry::Occupied(occupied) => {
|
||||
warn!(
|
||||
"Replacement for {:?} already defined as {:?}; \
|
||||
ignoring duplicate replacement definition as {:?}",
|
||||
ignoring duplicate replacement definition as {:?}",
|
||||
name,
|
||||
occupied.get(),
|
||||
potential_ty
|
||||
|
@ -2115,7 +2081,11 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
|
||||
/// Has the item with the given `name` and `id` been replaced by another
|
||||
/// type?
|
||||
pub fn is_replaced_type<Id: Into<ItemId>>(&self, path: &[String], id: Id) -> bool {
|
||||
pub fn is_replaced_type<Id: Into<ItemId>>(
|
||||
&self,
|
||||
path: &[String],
|
||||
id: Id,
|
||||
) -> bool {
|
||||
let id = id.into();
|
||||
match self.replacements.get(path) {
|
||||
Some(replaced_by) if *replaced_by != id => true,
|
||||
|
@ -2185,7 +2155,8 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
}
|
||||
name if found_namespace_keyword => {
|
||||
if module_name.is_none() {
|
||||
module_name = Some(String::from_utf8_lossy(name).into_owned());
|
||||
module_name =
|
||||
Some(String::from_utf8_lossy(name).into_owned());
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
@ -2273,7 +2244,8 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
let _t = self.timer("compute_whitelisted_and_codegen_items");
|
||||
|
||||
let roots = {
|
||||
let mut roots = self.items()
|
||||
let mut roots = self
|
||||
.items()
|
||||
// Only consider roots that are enabled for codegen.
|
||||
.filter(|&(_, item)| item.is_enabled_for_codegen(self))
|
||||
.filter(|&(_, item)| {
|
||||
|
@ -2281,9 +2253,10 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
// game.
|
||||
if self.options().whitelisted_types.is_empty() &&
|
||||
self.options().whitelisted_functions.is_empty() &&
|
||||
self.options().whitelisted_vars.is_empty() {
|
||||
return true;
|
||||
}
|
||||
self.options().whitelisted_vars.is_empty()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// If this is a type that explicitly replaces another, we assume
|
||||
// you know what you're doing.
|
||||
|
@ -2324,7 +2297,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
TypeKind::ResolvedTypeRef(..) |
|
||||
TypeKind::Opaque |
|
||||
TypeKind::TypeParam => return true,
|
||||
_ => {},
|
||||
_ => {}
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -2338,7 +2311,6 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
return false;
|
||||
}
|
||||
|
||||
|
||||
let enum_ = match *ty.kind() {
|
||||
TypeKind::Enum(ref e) => e,
|
||||
_ => return false,
|
||||
|
@ -2354,9 +2326,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
prefix_path.push(variant.name().into());
|
||||
let name = prefix_path[1..].join("::");
|
||||
prefix_path.pop().unwrap();
|
||||
self.options()
|
||||
.whitelisted_vars
|
||||
.matches(&name)
|
||||
self.options().whitelisted_vars.matches(&name)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -2386,14 +2356,16 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
self,
|
||||
roots.clone(),
|
||||
whitelisted_items_predicate,
|
||||
).collect::<ItemSet>();
|
||||
)
|
||||
.collect::<ItemSet>();
|
||||
|
||||
let codegen_items = if self.options().whitelist_recursively {
|
||||
WhitelistedItemsTraversal::new(
|
||||
self,
|
||||
roots.clone(),
|
||||
traversal::codegen_edges,
|
||||
).collect::<ItemSet>()
|
||||
)
|
||||
.collect::<ItemSet>()
|
||||
} else {
|
||||
whitelisted.clone()
|
||||
};
|
||||
|
@ -2439,7 +2411,11 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
let _t = self.timer("compute_cannot_derive_debug");
|
||||
assert!(self.cannot_derive_debug.is_none());
|
||||
if self.options.derive_debug {
|
||||
self.cannot_derive_debug = Some(as_cannot_derive_set(analyze::<CannotDerive>((self, DeriveTrait::Debug))));
|
||||
self.cannot_derive_debug =
|
||||
Some(as_cannot_derive_set(analyze::<CannotDerive>((
|
||||
self,
|
||||
DeriveTrait::Debug,
|
||||
))));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2463,7 +2439,10 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
assert!(self.cannot_derive_default.is_none());
|
||||
if self.options.derive_default {
|
||||
self.cannot_derive_default =
|
||||
Some(as_cannot_derive_set(analyze::<CannotDerive>((self, DeriveTrait::Default))));
|
||||
Some(as_cannot_derive_set(analyze::<CannotDerive>((
|
||||
self,
|
||||
DeriveTrait::Default,
|
||||
))));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2485,7 +2464,11 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
fn compute_cannot_derive_copy(&mut self) {
|
||||
let _t = self.timer("compute_cannot_derive_copy");
|
||||
assert!(self.cannot_derive_copy.is_none());
|
||||
self.cannot_derive_copy = Some(as_cannot_derive_set(analyze::<CannotDerive>((self, DeriveTrait::Copy))));
|
||||
self.cannot_derive_copy =
|
||||
Some(as_cannot_derive_set(analyze::<CannotDerive>((
|
||||
self,
|
||||
DeriveTrait::Copy,
|
||||
))));
|
||||
}
|
||||
|
||||
/// Compute whether we can derive hash.
|
||||
|
@ -2493,7 +2476,11 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
let _t = self.timer("compute_cannot_derive_hash");
|
||||
assert!(self.cannot_derive_hash.is_none());
|
||||
if self.options.derive_hash {
|
||||
self.cannot_derive_hash = Some(as_cannot_derive_set(analyze::<CannotDerive>((self, DeriveTrait::Hash))));
|
||||
self.cannot_derive_hash =
|
||||
Some(as_cannot_derive_set(analyze::<CannotDerive>((
|
||||
self,
|
||||
DeriveTrait::Hash,
|
||||
))));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2515,13 +2502,23 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
fn compute_cannot_derive_partialord_partialeq_or_eq(&mut self) {
|
||||
let _t = self.timer("compute_cannot_derive_partialord_partialeq_or_eq");
|
||||
assert!(self.cannot_derive_partialeq_or_partialord.is_none());
|
||||
if self.options.derive_partialord || self.options.derive_partialeq || self.options.derive_eq {
|
||||
self.cannot_derive_partialeq_or_partialord = Some(analyze::<CannotDerive>((self, DeriveTrait::PartialEqOrPartialOrd)));
|
||||
if self.options.derive_partialord ||
|
||||
self.options.derive_partialeq ||
|
||||
self.options.derive_eq
|
||||
{
|
||||
self.cannot_derive_partialeq_or_partialord =
|
||||
Some(analyze::<CannotDerive>((
|
||||
self,
|
||||
DeriveTrait::PartialEqOrPartialOrd,
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
/// Look up whether the item with `id` can derive `Partial{Eq,Ord}`.
|
||||
pub fn lookup_can_derive_partialeq_or_partialord<Id: Into<ItemId>>(&self, id: Id) -> CanDerive {
|
||||
pub fn lookup_can_derive_partialeq_or_partialord<Id: Into<ItemId>>(
|
||||
&self,
|
||||
id: Id,
|
||||
) -> CanDerive {
|
||||
let id = id.into();
|
||||
assert!(
|
||||
self.in_codegen_phase(),
|
||||
|
@ -2530,7 +2527,8 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
|
||||
// Look up the computed value for whether the item with `id` can
|
||||
// derive partialeq or not.
|
||||
self.cannot_derive_partialeq_or_partialord.as_ref()
|
||||
self.cannot_derive_partialeq_or_partialord
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.get(&id)
|
||||
.cloned()
|
||||
|
@ -2561,7 +2559,10 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
}
|
||||
|
||||
/// Look up whether the item with `id` has type parameter in array or not.
|
||||
pub fn lookup_has_type_param_in_array<Id: Into<ItemId>>(&self, id: Id) -> bool {
|
||||
pub fn lookup_has_type_param_in_array<Id: Into<ItemId>>(
|
||||
&self,
|
||||
id: Id,
|
||||
) -> bool {
|
||||
assert!(
|
||||
self.in_codegen_phase(),
|
||||
"We only compute has array when we enter codegen"
|
||||
|
@ -2569,7 +2570,10 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
|
||||
// Look up the computed value for whether the item with `id` has
|
||||
// type parameter in array or not.
|
||||
self.has_type_param_in_array.as_ref().unwrap().contains(&id.into())
|
||||
self.has_type_param_in_array
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.contains(&id.into())
|
||||
}
|
||||
|
||||
/// Compute whether the type has float.
|
||||
|
@ -2583,8 +2587,10 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
|
||||
/// Look up whether the item with `id` has array or not.
|
||||
pub fn lookup_has_float<Id: Into<ItemId>>(&self, id: Id) -> bool {
|
||||
assert!(self.in_codegen_phase(),
|
||||
"We only compute has float when we enter codegen");
|
||||
assert!(
|
||||
self.in_codegen_phase(),
|
||||
"We only compute has float when we enter codegen"
|
||||
);
|
||||
|
||||
// Look up the computed value for whether the item with `id` has
|
||||
// float or not.
|
||||
|
@ -2627,7 +2633,7 @@ impl ItemId {
|
|||
|
||||
impl<T> From<T> for ItemResolver
|
||||
where
|
||||
T: Into<ItemId>
|
||||
T: Into<ItemId>,
|
||||
{
|
||||
fn from(id: T) -> ItemResolver {
|
||||
ItemResolver::new(id)
|
||||
|
@ -2667,14 +2673,16 @@ impl ItemResolver {
|
|||
let ty_kind = item.as_type().map(|t| t.kind());
|
||||
match ty_kind {
|
||||
Some(&TypeKind::ResolvedTypeRef(next_id))
|
||||
if self.through_type_refs => {
|
||||
if self.through_type_refs =>
|
||||
{
|
||||
id = next_id.into();
|
||||
}
|
||||
// We intentionally ignore template aliases here, as they are
|
||||
// more complicated, and don't represent a simple renaming of
|
||||
// some type.
|
||||
Some(&TypeKind::Alias(next_id))
|
||||
if self.through_type_aliases => {
|
||||
if self.through_type_aliases =>
|
||||
{
|
||||
id = next_id.into();
|
||||
}
|
||||
_ => return item,
|
||||
|
@ -2696,10 +2704,7 @@ impl PartialType {
|
|||
/// Construct a new `PartialType`.
|
||||
pub fn new(decl: Cursor, id: ItemId) -> PartialType {
|
||||
// assert!(decl == decl.canonical());
|
||||
PartialType {
|
||||
decl: decl,
|
||||
id: id,
|
||||
}
|
||||
PartialType { decl: decl, id: id }
|
||||
}
|
||||
|
||||
/// The cursor pointing to this partial type's declaration location.
|
||||
|
@ -2715,10 +2720,7 @@ impl PartialType {
|
|||
}
|
||||
|
||||
impl TemplateParameters for PartialType {
|
||||
fn self_template_params(
|
||||
&self,
|
||||
_ctx: &BindgenContext,
|
||||
) -> Vec<TypeId> {
|
||||
fn self_template_params(&self, _ctx: &BindgenContext) -> Vec<TypeId> {
|
||||
// Maybe at some point we will eagerly parse named types, but for now we
|
||||
// don't and this information is unavailable.
|
||||
vec![]
|
||||
|
|
|
@ -92,10 +92,10 @@ pub trait CanDeriveOrd {
|
|||
///
|
||||
/// Initially we assume that we can derive trait for all types and then
|
||||
/// update our understanding as we learn more about each type.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum CanDerive {
|
||||
/// No, we cannot.
|
||||
No,
|
||||
/// Yes, we can derive automatically.
|
||||
Yes,
|
||||
|
||||
/// The only thing that stops us from automatically deriving is that
|
||||
/// array with more than maximum number of elements is used.
|
||||
|
@ -103,8 +103,8 @@ pub enum CanDerive {
|
|||
/// This means we probably can "manually" implement such trait.
|
||||
Manually,
|
||||
|
||||
/// Yes, we can derive automatically.
|
||||
Yes,
|
||||
/// No, we cannot.
|
||||
No,
|
||||
}
|
||||
|
||||
impl Default for CanDerive {
|
||||
|
@ -113,22 +113,6 @@ impl Default for CanDerive {
|
|||
}
|
||||
}
|
||||
|
||||
impl cmp::PartialOrd for CanDerive {
|
||||
fn partial_cmp(&self, rhs: &Self) -> Option<cmp::Ordering> {
|
||||
use self::CanDerive::*;
|
||||
|
||||
let ordering = match (*self, *rhs) {
|
||||
(x, y) if x == y => cmp::Ordering::Equal,
|
||||
(No, _) => cmp::Ordering::Greater,
|
||||
(_, No) => cmp::Ordering::Less,
|
||||
(Manually, _) => cmp::Ordering::Greater,
|
||||
(_, Manually) => cmp::Ordering::Less,
|
||||
_ => unreachable!()
|
||||
};
|
||||
Some(ordering)
|
||||
}
|
||||
}
|
||||
|
||||
impl CanDerive {
|
||||
/// Take the least upper bound of `self` and `rhs`.
|
||||
pub fn join(self, rhs: Self) -> Self {
|
||||
|
|
|
@ -60,11 +60,7 @@ where
|
|||
id.as_usize(),
|
||||
sub_id.as_usize(),
|
||||
edge_kind,
|
||||
if is_whitelisted {
|
||||
"black"
|
||||
} else {
|
||||
"gray"
|
||||
}
|
||||
if is_whitelisted { "black" } else { "gray" }
|
||||
) {
|
||||
Ok(_) => {}
|
||||
Err(e) => err = Some(Err(e)),
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
//! Intermediate representation for C/C++ enumerations.
|
||||
|
||||
use super::super::codegen::EnumVariation;
|
||||
use super::context::{BindgenContext, TypeId};
|
||||
use super::item::Item;
|
||||
use super::super::codegen::EnumVariation;
|
||||
use super::ty::TypeKind;
|
||||
use clang;
|
||||
use ir::annotations::Annotations;
|
||||
|
@ -38,10 +38,7 @@ pub struct Enum {
|
|||
impl Enum {
|
||||
/// Construct a new `Enum` with the given representation and variants.
|
||||
pub fn new(repr: Option<TypeId>, variants: Vec<EnumVariant>) -> Self {
|
||||
Enum {
|
||||
repr,
|
||||
variants,
|
||||
}
|
||||
Enum { repr, variants }
|
||||
}
|
||||
|
||||
/// Get this enumeration's representation.
|
||||
|
@ -67,15 +64,15 @@ impl Enum {
|
|||
}
|
||||
|
||||
let declaration = ty.declaration().canonical();
|
||||
let repr = declaration.enum_type().and_then(|et| {
|
||||
Item::from_ty(&et, declaration, None, ctx).ok()
|
||||
});
|
||||
let repr = declaration
|
||||
.enum_type()
|
||||
.and_then(|et| Item::from_ty(&et, declaration, None, ctx).ok());
|
||||
let mut variants = vec![];
|
||||
|
||||
// Assume signedness since the default type by the C standard is an int.
|
||||
let is_signed = repr.and_then(
|
||||
|r| ctx.resolve_type(r).safe_canonical_type(ctx),
|
||||
).map_or(true, |ty| match *ty.kind() {
|
||||
let is_signed = repr
|
||||
.and_then(|r| ctx.resolve_type(r).safe_canonical_type(ctx))
|
||||
.map_or(true, |ty| match *ty.kind() {
|
||||
TypeKind::Int(ref int_kind) => int_kind.is_signed(),
|
||||
ref other => {
|
||||
panic!("Since when enums can be non-integers? {:?}", other)
|
||||
|
@ -101,9 +98,11 @@ impl Enum {
|
|||
if let Some(val) = value {
|
||||
let name = cursor.spelling();
|
||||
let annotations = Annotations::new(&cursor);
|
||||
let custom_behavior = ctx.parse_callbacks()
|
||||
let custom_behavior = ctx
|
||||
.parse_callbacks()
|
||||
.and_then(|callbacks| {
|
||||
callbacks.enum_variant_behavior(type_name, &name, val)
|
||||
callbacks
|
||||
.enum_variant_behavior(type_name, &name, val)
|
||||
})
|
||||
.or_else(|| {
|
||||
let annotations = annotations.as_ref()?;
|
||||
|
@ -116,12 +115,17 @@ impl Enum {
|
|||
}
|
||||
});
|
||||
|
||||
let name = ctx.parse_callbacks()
|
||||
let name = ctx
|
||||
.parse_callbacks()
|
||||
.and_then(|callbacks| {
|
||||
callbacks.enum_variant_name(type_name, &name, val)
|
||||
})
|
||||
.or_else(|| {
|
||||
annotations.as_ref()?.use_instead_of()?.last().cloned()
|
||||
annotations
|
||||
.as_ref()?
|
||||
.use_instead_of()?
|
||||
.last()
|
||||
.cloned()
|
||||
})
|
||||
.unwrap_or(name);
|
||||
|
||||
|
@ -139,7 +143,12 @@ impl Enum {
|
|||
Ok(Enum::new(repr, variants))
|
||||
}
|
||||
|
||||
fn is_matching_enum(&self, ctx: &BindgenContext, enums: &RegexSet, item: &Item) -> bool {
|
||||
fn is_matching_enum(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
enums: &RegexSet,
|
||||
item: &Item,
|
||||
) -> bool {
|
||||
let path = item.canonical_path(ctx);
|
||||
let enum_ty = item.expect_type();
|
||||
|
||||
|
@ -156,18 +165,46 @@ impl Enum {
|
|||
}
|
||||
|
||||
/// Returns the final representation of the enum.
|
||||
pub fn computed_enum_variation(&self, ctx: &BindgenContext, item: &Item) -> EnumVariation {
|
||||
pub fn computed_enum_variation(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
item: &Item,
|
||||
) -> EnumVariation {
|
||||
// ModuleConsts has higher precedence before Rust in order to avoid
|
||||
// problems with overlapping match patterns.
|
||||
if self.is_matching_enum(ctx, &ctx.options().constified_enum_modules, item) {
|
||||
if self.is_matching_enum(
|
||||
ctx,
|
||||
&ctx.options().constified_enum_modules,
|
||||
item,
|
||||
) {
|
||||
EnumVariation::ModuleConsts
|
||||
} else if self.is_matching_enum(ctx, &ctx.options().bitfield_enums, item) {
|
||||
} else if self.is_matching_enum(
|
||||
ctx,
|
||||
&ctx.options().bitfield_enums,
|
||||
item,
|
||||
) {
|
||||
EnumVariation::Bitfield
|
||||
} else if self.is_matching_enum(ctx, &ctx.options().rustified_enums, item) {
|
||||
EnumVariation::Rust { non_exhaustive: false }
|
||||
} else if self.is_matching_enum(ctx, &ctx.options().rustified_non_exhaustive_enums, item) {
|
||||
EnumVariation::Rust { non_exhaustive: true }
|
||||
} else if self.is_matching_enum(ctx, &ctx.options().constified_enums, item) {
|
||||
} else if self.is_matching_enum(
|
||||
ctx,
|
||||
&ctx.options().rustified_enums,
|
||||
item,
|
||||
) {
|
||||
EnumVariation::Rust {
|
||||
non_exhaustive: false,
|
||||
}
|
||||
} else if self.is_matching_enum(
|
||||
ctx,
|
||||
&ctx.options().rustified_non_exhaustive_enums,
|
||||
item,
|
||||
) {
|
||||
EnumVariation::Rust {
|
||||
non_exhaustive: true,
|
||||
}
|
||||
} else if self.is_matching_enum(
|
||||
ctx,
|
||||
&ctx.options().constified_enums,
|
||||
item,
|
||||
) {
|
||||
EnumVariation::Consts
|
||||
} else {
|
||||
ctx.options().default_enum_style
|
||||
|
@ -235,16 +272,14 @@ impl EnumVariant {
|
|||
/// Returns whether this variant should be enforced to be a constant by code
|
||||
/// generation.
|
||||
pub fn force_constification(&self) -> bool {
|
||||
self.custom_behavior.map_or(false, |b| {
|
||||
b == EnumVariantCustomBehavior::Constify
|
||||
})
|
||||
self.custom_behavior
|
||||
.map_or(false, |b| b == EnumVariantCustomBehavior::Constify)
|
||||
}
|
||||
|
||||
/// Returns whether the current variant should be hidden completely from the
|
||||
/// resulting rust enum.
|
||||
pub fn hidden(&self) -> bool {
|
||||
self.custom_behavior.map_or(false, |b| {
|
||||
b == EnumVariantCustomBehavior::Hide
|
||||
})
|
||||
self.custom_behavior
|
||||
.map_or(false, |b| b == EnumVariantCustomBehavior::Hide)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,9 +9,9 @@ use super::ty::TypeKind;
|
|||
use clang;
|
||||
use clang_sys::{self, CXCallingConv};
|
||||
use parse::{ClangItemParser, ClangSubItemParser, ParseError, ParseResult};
|
||||
use proc_macro2;
|
||||
use quote;
|
||||
use quote::TokenStreamExt;
|
||||
use proc_macro2;
|
||||
use std::io;
|
||||
|
||||
const RUST_DERIVE_FUNPTR_LIMIT: usize = 12;
|
||||
|
@ -30,18 +30,18 @@ impl FunctionKind {
|
|||
// FIXME(emilio): Deduplicate logic with `ir::comp`.
|
||||
Some(match cursor.kind() {
|
||||
clang_sys::CXCursor_FunctionDecl => FunctionKind::Function,
|
||||
clang_sys::CXCursor_Constructor => FunctionKind::Method(
|
||||
MethodKind::Constructor,
|
||||
),
|
||||
clang_sys::CXCursor_Destructor => FunctionKind::Method(
|
||||
if cursor.method_is_virtual() {
|
||||
clang_sys::CXCursor_Constructor => {
|
||||
FunctionKind::Method(MethodKind::Constructor)
|
||||
}
|
||||
clang_sys::CXCursor_Destructor => {
|
||||
FunctionKind::Method(if cursor.method_is_virtual() {
|
||||
MethodKind::VirtualDestructor {
|
||||
pure_virtual: cursor.method_is_pure_virtual(),
|
||||
}
|
||||
} else {
|
||||
MethodKind::Destructor
|
||||
}
|
||||
),
|
||||
})
|
||||
}
|
||||
clang_sys::CXCursor_CXXMethod => {
|
||||
if cursor.method_is_virtual() {
|
||||
FunctionKind::Method(MethodKind::Virtual {
|
||||
|
@ -64,7 +64,7 @@ pub enum Linkage {
|
|||
/// Externally visible and can be linked against
|
||||
External,
|
||||
/// Not exposed externally. 'static inline' functions will have this kind of linkage
|
||||
Internal
|
||||
Internal,
|
||||
}
|
||||
|
||||
/// A function declaration, with a signature, arguments, and argument names.
|
||||
|
@ -100,7 +100,7 @@ impl Function {
|
|||
signature: TypeId,
|
||||
comment: Option<String>,
|
||||
kind: FunctionKind,
|
||||
linkage: Linkage
|
||||
linkage: Linkage,
|
||||
) -> Self {
|
||||
Function {
|
||||
name,
|
||||
|
@ -136,7 +136,6 @@ impl Function {
|
|||
pub fn linkage(&self) -> Linkage {
|
||||
self.linkage
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl DotAttributes for Function {
|
||||
|
@ -325,16 +324,16 @@ fn args_from_ty_and_cursor(
|
|||
cursor_args
|
||||
.map(Some)
|
||||
.chain(std::iter::repeat(None))
|
||||
.zip(
|
||||
type_args
|
||||
.map(Some)
|
||||
.chain(std::iter::repeat(None))
|
||||
)
|
||||
.zip(type_args.map(Some).chain(std::iter::repeat(None)))
|
||||
.take_while(|(cur, ty)| cur.is_some() || ty.is_some())
|
||||
.map(|(arg_cur, arg_ty)| {
|
||||
let name = arg_cur
|
||||
.map(|a| a.spelling())
|
||||
.and_then(|name| if name.is_empty() { None} else { Some(name) });
|
||||
let name = arg_cur.map(|a| a.spelling()).and_then(|name| {
|
||||
if name.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(name)
|
||||
}
|
||||
});
|
||||
|
||||
let cursor = arg_cur.unwrap_or(*cursor);
|
||||
let ty = arg_ty.unwrap_or(cursor.cur_type());
|
||||
|
@ -404,7 +403,7 @@ impl FunctionSig {
|
|||
CXCursor_ObjCInstanceMethodDecl |
|
||||
CXCursor_ObjCClassMethodDecl => {
|
||||
args_from_ty_and_cursor(&ty, &cursor, ctx)
|
||||
},
|
||||
}
|
||||
_ => {
|
||||
// For non-CXCursor_FunctionDecl, visiting the cursor's children
|
||||
// is the only reliable way to get parameter names.
|
||||
|
@ -424,8 +423,7 @@ impl FunctionSig {
|
|||
}
|
||||
};
|
||||
|
||||
let must_use =
|
||||
ctx.options().enable_function_attribute_detection &&
|
||||
let must_use = ctx.options().enable_function_attribute_detection &&
|
||||
cursor.has_simple_attr("warn_unused_result");
|
||||
let is_method = kind == CXCursor_CXXMethod;
|
||||
let is_constructor = kind == CXCursor_Constructor;
|
||||
|
@ -475,9 +473,9 @@ impl FunctionSig {
|
|||
let ty_ret_type = if kind == CXCursor_ObjCInstanceMethodDecl ||
|
||||
kind == CXCursor_ObjCClassMethodDecl
|
||||
{
|
||||
ty.ret_type().or_else(|| cursor.ret_type()).ok_or(
|
||||
ParseError::Continue,
|
||||
)?
|
||||
ty.ret_type()
|
||||
.or_else(|| cursor.ret_type())
|
||||
.ok_or(ParseError::Continue)?
|
||||
} else {
|
||||
ty.ret_type().ok_or(ParseError::Continue)?
|
||||
};
|
||||
|
@ -583,12 +581,11 @@ impl ClangSubItemParser for Function {
|
|||
let linkage = match linkage {
|
||||
CXLinkage_External | CXLinkage_UniqueExternal => Linkage::External,
|
||||
CXLinkage_Internal => Linkage::Internal,
|
||||
_ => return Err(ParseError::Continue)
|
||||
_ => return Err(ParseError::Continue),
|
||||
};
|
||||
|
||||
// Grab the signature using Item::from_ty.
|
||||
let sig =
|
||||
Item::from_ty(&cursor.cur_type(), cursor, None, context)?;
|
||||
let sig = Item::from_ty(&cursor.cur_type(), cursor, None, context)?;
|
||||
|
||||
let mut name = cursor.spelling();
|
||||
assert!(!name.is_empty(), "Empty function name?");
|
||||
|
@ -610,7 +607,8 @@ impl ClangSubItemParser for Function {
|
|||
let mangled_name = cursor_mangling(context, &cursor);
|
||||
let comment = cursor.raw_comment();
|
||||
|
||||
let function = Self::new(name, mangled_name, sig, comment, kind, linkage);
|
||||
let function =
|
||||
Self::new(name, mangled_name, sig, comment, kind, linkage);
|
||||
Ok(ParseResult::New(function, Some(cursor)))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -101,9 +101,7 @@ impl IntKind {
|
|||
|
||||
Char { is_signed } => is_signed,
|
||||
|
||||
Custom {
|
||||
is_signed, ..
|
||||
} => is_signed,
|
||||
Custom { is_signed, .. } => is_signed,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -113,14 +111,7 @@ impl IntKind {
|
|||
pub fn known_size(&self) -> Option<usize> {
|
||||
use self::IntKind::*;
|
||||
Some(match *self {
|
||||
Bool |
|
||||
UChar |
|
||||
SChar |
|
||||
U8 |
|
||||
I8 |
|
||||
Char {
|
||||
..
|
||||
} => 1,
|
||||
Bool | UChar | SChar | U8 | I8 | Char { .. } => 1,
|
||||
U16 | I16 => 2,
|
||||
U32 | I32 => 4,
|
||||
U64 | I64 => 8,
|
||||
|
|
|
@ -1,19 +1,20 @@
|
|||
//! Bindgen's core intermediate representation type.
|
||||
|
||||
use super::super::codegen::{EnumVariation, CONSTIFIED_ENUM_MODULE_REPR_NAME};
|
||||
use super::analysis::{HasVtable, HasVtableResult, Sizedness, SizednessResult};
|
||||
use super::annotations::Annotations;
|
||||
use super::comment;
|
||||
use super::comp::MethodKind;
|
||||
use super::context::{BindgenContext, ItemId, PartialType, TypeId};
|
||||
use super::derive::{CanDeriveCopy, CanDeriveDebug, CanDeriveDefault,
|
||||
CanDeriveHash, CanDerivePartialOrd, CanDeriveOrd,
|
||||
CanDerivePartialEq, CanDeriveEq};
|
||||
use super::derive::{
|
||||
CanDeriveCopy, CanDeriveDebug, CanDeriveDefault, CanDeriveEq,
|
||||
CanDeriveHash, CanDeriveOrd, CanDerivePartialEq, CanDerivePartialOrd,
|
||||
};
|
||||
use super::dot::DotAttributes;
|
||||
use super::function::{Function, FunctionKind};
|
||||
use super::item_kind::ItemKind;
|
||||
use super::layout::Opaque;
|
||||
use super::module::Module;
|
||||
use super::super::codegen::{CONSTIFIED_ENUM_MODULE_REPR_NAME, EnumVariation};
|
||||
use super::template::{AsTemplateParam, TemplateParameters};
|
||||
use super::traversal::{EdgeKind, Trace, Tracer};
|
||||
use super::ty::{Type, TypeKind};
|
||||
|
@ -94,10 +95,7 @@ pub trait HasFloat {
|
|||
/// up to (but not including) the implicit root module.
|
||||
pub trait ItemAncestors {
|
||||
/// Get an iterable over this item's ancestors.
|
||||
fn ancestors<'a>(
|
||||
&self,
|
||||
ctx: &'a BindgenContext,
|
||||
) -> ItemAncestorsIter<'a>;
|
||||
fn ancestors<'a>(&self, ctx: &'a BindgenContext) -> ItemAncestorsIter<'a>;
|
||||
}
|
||||
|
||||
cfg_if! {
|
||||
|
@ -158,7 +156,8 @@ impl<'a> Iterator for ItemAncestorsIter<'a> {
|
|||
|
||||
impl<T> AsTemplateParam for T
|
||||
where
|
||||
T: Copy + Into<ItemId> {
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
type Extra = ();
|
||||
|
||||
fn as_template_param(
|
||||
|
@ -201,7 +200,7 @@ impl AsTemplateParam for ItemKind {
|
|||
|
||||
impl<T> ItemCanonicalName for T
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
fn canonical_name(&self, ctx: &BindgenContext) -> String {
|
||||
debug_assert!(
|
||||
|
@ -213,8 +212,8 @@ where
|
|||
}
|
||||
|
||||
impl<T> ItemCanonicalPath for T
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
where
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
fn namespace_aware_canonical_path(
|
||||
&self,
|
||||
|
@ -238,28 +237,22 @@ impl<T> ItemCanonicalPath for T
|
|||
|
||||
impl<T> ItemAncestors for T
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
fn ancestors<'a>(
|
||||
&self,
|
||||
ctx: &'a BindgenContext,
|
||||
) -> ItemAncestorsIter<'a> {
|
||||
fn ancestors<'a>(&self, ctx: &'a BindgenContext) -> ItemAncestorsIter<'a> {
|
||||
ItemAncestorsIter::new(ctx, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl ItemAncestors for Item {
|
||||
fn ancestors<'a>(
|
||||
&self,
|
||||
ctx: &'a BindgenContext,
|
||||
) -> ItemAncestorsIter<'a> {
|
||||
fn ancestors<'a>(&self, ctx: &'a BindgenContext) -> ItemAncestorsIter<'a> {
|
||||
self.id().ancestors(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Id> Trace for Id
|
||||
where
|
||||
Id: Copy + Into<ItemId>
|
||||
Id: Copy + Into<ItemId>,
|
||||
{
|
||||
type Extra = ();
|
||||
|
||||
|
@ -495,10 +488,10 @@ impl Item {
|
|||
ctx.options().conservative_inline_namespaces
|
||||
})
|
||||
})
|
||||
.count() + 1
|
||||
.count() +
|
||||
1
|
||||
}
|
||||
|
||||
|
||||
/// Get this `Item`'s comment, if it has any, already preprocessed and with
|
||||
/// the right indentation.
|
||||
pub fn comment(&self, ctx: &BindgenContext) -> Option<String> {
|
||||
|
@ -567,7 +560,8 @@ impl Item {
|
|||
pub fn is_toplevel(&self, ctx: &BindgenContext) -> bool {
|
||||
// FIXME: Workaround for some types falling behind when parsing weird
|
||||
// stl classes, for example.
|
||||
if ctx.options().enable_cxx_namespaces && self.kind().is_module() &&
|
||||
if ctx.options().enable_cxx_namespaces &&
|
||||
self.kind().is_module() &&
|
||||
self.id() != ctx.root_module()
|
||||
{
|
||||
return false;
|
||||
|
@ -583,7 +577,7 @@ impl Item {
|
|||
if parent_item.id() == ctx.root_module() {
|
||||
return true;
|
||||
} else if ctx.options().enable_cxx_namespaces ||
|
||||
!parent_item.kind().is_module()
|
||||
!parent_item.kind().is_module()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
@ -638,18 +632,17 @@ impl Item {
|
|||
let path = self.path_for_whitelisting(ctx);
|
||||
let name = path[1..].join("::");
|
||||
ctx.options().blacklisted_items.matches(&name) ||
|
||||
match self.kind {
|
||||
ItemKind::Type(..) => {
|
||||
ctx.options().blacklisted_types.matches(&name) ||
|
||||
ctx.is_replaced_type(&path, self.id)
|
||||
match self.kind {
|
||||
ItemKind::Type(..) => {
|
||||
ctx.options().blacklisted_types.matches(&name) ||
|
||||
ctx.is_replaced_type(&path, self.id)
|
||||
}
|
||||
ItemKind::Function(..) => {
|
||||
ctx.options().blacklisted_functions.matches(&name)
|
||||
}
|
||||
// TODO: Add constant / namespace blacklisting?
|
||||
ItemKind::Var(..) | ItemKind::Module(..) => false,
|
||||
}
|
||||
ItemKind::Function(..) => {
|
||||
ctx.options().blacklisted_functions.matches(&name)
|
||||
}
|
||||
// TODO: Add constant / namespace blacklisting?
|
||||
ItemKind::Var(..) |
|
||||
ItemKind::Module(..) => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Is this a reference to another type?
|
||||
|
@ -666,10 +659,7 @@ impl Item {
|
|||
}
|
||||
|
||||
/// Take out item NameOptions
|
||||
pub fn name<'a>(
|
||||
&'a self,
|
||||
ctx: &'a BindgenContext,
|
||||
) -> NameOptions<'a> {
|
||||
pub fn name<'a>(&'a self, ctx: &'a BindgenContext) -> NameOptions<'a> {
|
||||
NameOptions::new(self, ctx)
|
||||
}
|
||||
|
||||
|
@ -687,17 +677,15 @@ impl Item {
|
|||
}
|
||||
|
||||
match *item.kind() {
|
||||
ItemKind::Type(ref ty) => {
|
||||
match *ty.kind() {
|
||||
TypeKind::ResolvedTypeRef(inner) => {
|
||||
item = ctx.resolve_item(inner);
|
||||
}
|
||||
TypeKind::TemplateInstantiation(ref inst) => {
|
||||
item = ctx.resolve_item(inst.template_definition());
|
||||
}
|
||||
_ => return item.id(),
|
||||
ItemKind::Type(ref ty) => match *ty.kind() {
|
||||
TypeKind::ResolvedTypeRef(inner) => {
|
||||
item = ctx.resolve_item(inner);
|
||||
}
|
||||
}
|
||||
TypeKind::TemplateInstantiation(ref inst) => {
|
||||
item = ctx.resolve_item(inst.template_definition());
|
||||
}
|
||||
_ => return item.id(),
|
||||
},
|
||||
_ => return item.id(),
|
||||
}
|
||||
}
|
||||
|
@ -752,7 +740,8 @@ impl Item {
|
|||
if let TypeKind::Comp(ref ci) = *ty.kind() {
|
||||
// All the constructors have the same name, so no need to
|
||||
// resolve and check.
|
||||
return ci.constructors()
|
||||
return ci
|
||||
.constructors()
|
||||
.iter()
|
||||
.position(|c| *c == self.id())
|
||||
.or_else(|| {
|
||||
|
@ -937,7 +926,8 @@ impl Item {
|
|||
|
||||
match *type_.kind() {
|
||||
TypeKind::Enum(ref enum_) => {
|
||||
enum_.computed_enum_variation(ctx, self) == EnumVariation::ModuleConsts
|
||||
enum_.computed_enum_variation(ctx, self) ==
|
||||
EnumVariation::ModuleConsts
|
||||
}
|
||||
TypeKind::Alias(inner_id) => {
|
||||
// TODO(emilio): Make this "hop through type aliases that aren't
|
||||
|
@ -962,17 +952,21 @@ impl Item {
|
|||
ItemKind::Module(..) => true,
|
||||
ItemKind::Var(_) => cc.vars(),
|
||||
ItemKind::Type(_) => cc.types(),
|
||||
ItemKind::Function(ref f) => {
|
||||
match f.kind() {
|
||||
FunctionKind::Function => cc.functions(),
|
||||
FunctionKind::Method(MethodKind::Constructor) => cc.constructors(),
|
||||
FunctionKind::Method(MethodKind::Destructor) |
|
||||
FunctionKind::Method(MethodKind::VirtualDestructor { .. }) => cc.destructors(),
|
||||
FunctionKind::Method(MethodKind::Static) |
|
||||
FunctionKind::Method(MethodKind::Normal) |
|
||||
FunctionKind::Method(MethodKind::Virtual { .. }) => cc.methods(),
|
||||
ItemKind::Function(ref f) => match f.kind() {
|
||||
FunctionKind::Function => cc.functions(),
|
||||
FunctionKind::Method(MethodKind::Constructor) => {
|
||||
cc.constructors()
|
||||
}
|
||||
}
|
||||
FunctionKind::Method(MethodKind::Destructor) |
|
||||
FunctionKind::Method(MethodKind::VirtualDestructor {
|
||||
..
|
||||
}) => cc.destructors(),
|
||||
FunctionKind::Method(MethodKind::Static) |
|
||||
FunctionKind::Method(MethodKind::Normal) |
|
||||
FunctionKind::Method(MethodKind::Virtual { .. }) => {
|
||||
cc.methods()
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -982,7 +976,11 @@ impl Item {
|
|||
self.compute_path(ctx, UserMangled::No)
|
||||
}
|
||||
|
||||
fn compute_path(&self, ctx: &BindgenContext, mangled: UserMangled) -> Vec<String> {
|
||||
fn compute_path(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
mangled: UserMangled,
|
||||
) -> Vec<String> {
|
||||
if let Some(path) = self.annotations().use_instead_of() {
|
||||
let mut ret =
|
||||
vec![ctx.resolve_item(ctx.root_module()).name(ctx).get()];
|
||||
|
@ -1017,7 +1015,7 @@ impl Item {
|
|||
|
||||
impl<T> IsOpaque for T
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
type Extra = ();
|
||||
|
||||
|
@ -1046,7 +1044,7 @@ impl IsOpaque for Item {
|
|||
|
||||
impl<T> HasVtable for T
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
fn has_vtable(&self, ctx: &BindgenContext) -> bool {
|
||||
let id: ItemId = (*self).into();
|
||||
|
@ -1079,7 +1077,7 @@ impl HasVtable for Item {
|
|||
|
||||
impl<T> Sizedness for T
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
fn sizedness(&self, ctx: &BindgenContext) -> SizednessResult {
|
||||
let id: ItemId = (*self).into();
|
||||
|
@ -1096,7 +1094,7 @@ impl Sizedness for Item {
|
|||
|
||||
impl<T> HasTypeParamInArray for T
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
fn has_type_param_in_array(&self, ctx: &BindgenContext) -> bool {
|
||||
debug_assert!(
|
||||
|
@ -1119,19 +1117,23 @@ impl HasTypeParamInArray for Item {
|
|||
|
||||
impl<T> HasFloat for T
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
fn has_float(&self, ctx: &BindgenContext) -> bool {
|
||||
debug_assert!(ctx.in_codegen_phase(),
|
||||
"You're not supposed to call this yet");
|
||||
debug_assert!(
|
||||
ctx.in_codegen_phase(),
|
||||
"You're not supposed to call this yet"
|
||||
);
|
||||
ctx.lookup_has_float(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasFloat for Item {
|
||||
fn has_float(&self, ctx: &BindgenContext) -> bool {
|
||||
debug_assert!(ctx.in_codegen_phase(),
|
||||
"You're not supposed to call this yet");
|
||||
debug_assert!(
|
||||
ctx.in_codegen_phase(),
|
||||
"You're not supposed to call this yet"
|
||||
);
|
||||
ctx.lookup_has_float(self.id())
|
||||
}
|
||||
}
|
||||
|
@ -1166,40 +1168,30 @@ impl DotAttributes for Item {
|
|||
|
||||
impl<T> TemplateParameters for T
|
||||
where
|
||||
T: Copy + Into<ItemId>
|
||||
T: Copy + Into<ItemId>,
|
||||
{
|
||||
fn self_template_params(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
) -> Vec<TypeId> {
|
||||
ctx.resolve_item_fallible(*self).map_or(vec![], |item| {
|
||||
item.self_template_params(ctx)
|
||||
})
|
||||
fn self_template_params(&self, ctx: &BindgenContext) -> Vec<TypeId> {
|
||||
ctx.resolve_item_fallible(*self)
|
||||
.map_or(vec![], |item| item.self_template_params(ctx))
|
||||
}
|
||||
}
|
||||
|
||||
impl TemplateParameters for Item {
|
||||
fn self_template_params(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
) -> Vec<TypeId> {
|
||||
fn self_template_params(&self, ctx: &BindgenContext) -> Vec<TypeId> {
|
||||
self.kind.self_template_params(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
impl TemplateParameters for ItemKind {
|
||||
fn self_template_params(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
) -> Vec<TypeId> {
|
||||
fn self_template_params(&self, ctx: &BindgenContext) -> Vec<TypeId> {
|
||||
match *self {
|
||||
ItemKind::Type(ref ty) => ty.self_template_params(ctx),
|
||||
// If we start emitting bindings to explicitly instantiated
|
||||
// functions, then we'll need to check ItemKind::Function for
|
||||
// template params.
|
||||
ItemKind::Function(_) |
|
||||
ItemKind::Module(_) |
|
||||
ItemKind::Var(_) => vec![],
|
||||
ItemKind::Function(_) | ItemKind::Module(_) | ItemKind::Var(_) => {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1256,14 +1248,13 @@ impl ClangItemParser for Item {
|
|||
id.as_type_id_unchecked()
|
||||
}
|
||||
|
||||
|
||||
fn parse(
|
||||
cursor: clang::Cursor,
|
||||
parent_id: Option<ItemId>,
|
||||
ctx: &mut BindgenContext,
|
||||
) -> Result<ItemId, ParseError> {
|
||||
use ir::var::Var;
|
||||
use clang_sys::*;
|
||||
use ir::var::Var;
|
||||
|
||||
if !cursor.is_valid() {
|
||||
return Err(ParseError::Continue);
|
||||
|
@ -1281,20 +1272,28 @@ impl ClangItemParser for Item {
|
|||
Ok(ParseResult::New(item, declaration)) => {
|
||||
let id = ctx.next_item_id();
|
||||
|
||||
ctx.add_item(Item::new(id, comment, annotations,
|
||||
relevant_parent_id,
|
||||
ItemKind::$what(item)),
|
||||
declaration,
|
||||
Some(cursor));
|
||||
ctx.add_item(
|
||||
Item::new(
|
||||
id,
|
||||
comment,
|
||||
annotations,
|
||||
relevant_parent_id,
|
||||
ItemKind::$what(item),
|
||||
),
|
||||
declaration,
|
||||
Some(cursor),
|
||||
);
|
||||
return Ok(id);
|
||||
}
|
||||
Ok(ParseResult::AlreadyResolved(id)) => {
|
||||
return Ok(id);
|
||||
}
|
||||
Err(ParseError::Recurse) => return Err(ParseError::Recurse),
|
||||
Err(ParseError::Continue) => {},
|
||||
Err(ParseError::Recurse) => {
|
||||
return Err(ParseError::Recurse)
|
||||
}
|
||||
Err(ParseError::Continue) => {}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
try_parse!(Module);
|
||||
|
@ -1315,7 +1314,6 @@ impl ClangItemParser for Item {
|
|||
let definition = cursor.definition();
|
||||
let applicable_cursor = definition.unwrap_or(cursor);
|
||||
|
||||
|
||||
let relevant_parent_id = match definition {
|
||||
Some(definition) => {
|
||||
if definition != cursor {
|
||||
|
@ -1325,7 +1323,8 @@ impl ClangItemParser for Item {
|
|||
cursor,
|
||||
parent_id,
|
||||
ctx,
|
||||
).into());
|
||||
)
|
||||
.into());
|
||||
}
|
||||
ctx.known_semantic_parent(definition)
|
||||
.or(parent_id)
|
||||
|
@ -1412,10 +1411,7 @@ impl ClangItemParser for Item {
|
|||
) -> TypeId {
|
||||
debug!(
|
||||
"from_ty_or_ref_with_id: {:?} {:?}, {:?}, {:?}",
|
||||
potential_id,
|
||||
ty,
|
||||
location,
|
||||
parent_id
|
||||
potential_id, ty, location, parent_id
|
||||
);
|
||||
|
||||
if ctx.collected_typerefs() {
|
||||
|
@ -1426,9 +1422,8 @@ impl ClangItemParser for Item {
|
|||
location,
|
||||
parent_id,
|
||||
ctx,
|
||||
).unwrap_or_else(
|
||||
|_| Item::new_opaque_type(potential_id, &ty, ctx),
|
||||
);
|
||||
)
|
||||
.unwrap_or_else(|_| Item::new_opaque_type(potential_id, &ty, ctx));
|
||||
}
|
||||
|
||||
if let Some(ty) = ctx.builtin_or_resolved_ty(
|
||||
|
@ -1436,8 +1431,7 @@ impl ClangItemParser for Item {
|
|||
parent_id,
|
||||
&ty,
|
||||
Some(location),
|
||||
)
|
||||
{
|
||||
) {
|
||||
debug!("{:?} already resolved: {:?}", ty, location);
|
||||
return ty;
|
||||
}
|
||||
|
@ -1491,17 +1485,14 @@ impl ClangItemParser for Item {
|
|||
|
||||
debug!(
|
||||
"Item::from_ty_with_id: {:?}\n\
|
||||
\tty = {:?},\n\
|
||||
\tlocation = {:?}",
|
||||
id,
|
||||
ty,
|
||||
location
|
||||
\tty = {:?},\n\
|
||||
\tlocation = {:?}",
|
||||
id, ty, location
|
||||
);
|
||||
|
||||
if ty.kind() == clang_sys::CXType_Unexposed ||
|
||||
location.cur_type().kind() == clang_sys::CXType_Unexposed
|
||||
{
|
||||
|
||||
if ty.is_associated_type() ||
|
||||
location.cur_type().is_associated_type()
|
||||
{
|
||||
|
@ -1528,12 +1519,8 @@ impl ClangItemParser for Item {
|
|||
}
|
||||
}
|
||||
|
||||
if let Some(ty) = ctx.builtin_or_resolved_ty(
|
||||
id,
|
||||
parent_id,
|
||||
ty,
|
||||
Some(location),
|
||||
)
|
||||
if let Some(ty) =
|
||||
ctx.builtin_or_resolved_ty(id, parent_id, ty, Some(location))
|
||||
{
|
||||
return Ok(ty);
|
||||
}
|
||||
|
@ -1550,11 +1537,10 @@ impl ClangItemParser for Item {
|
|||
};
|
||||
|
||||
if valid_decl {
|
||||
if let Some(partial) = ctx.currently_parsed_types().iter().find(
|
||||
|ty| {
|
||||
*ty.decl() == declaration_to_look_for
|
||||
},
|
||||
)
|
||||
if let Some(partial) = ctx
|
||||
.currently_parsed_types()
|
||||
.iter()
|
||||
.find(|ty| *ty.decl() == declaration_to_look_for)
|
||||
{
|
||||
debug!("Avoiding recursion parsing type: {:?}", ty);
|
||||
// Unchecked because we haven't finished this type yet.
|
||||
|
@ -1571,7 +1557,9 @@ impl ClangItemParser for Item {
|
|||
let result = Type::from_clang_ty(id, ty, location, parent_id, ctx);
|
||||
let relevant_parent_id = parent_id.unwrap_or(current_module);
|
||||
let ret = match result {
|
||||
Ok(ParseResult::AlreadyResolved(ty)) => Ok(ty.as_type_id_unchecked()),
|
||||
Ok(ParseResult::AlreadyResolved(ty)) => {
|
||||
Ok(ty.as_type_id_unchecked())
|
||||
}
|
||||
Ok(ParseResult::New(item, declaration)) => {
|
||||
ctx.add_item(
|
||||
Item::new(
|
||||
|
@ -1619,7 +1607,7 @@ impl ClangItemParser for Item {
|
|||
if let Err(ParseError::Recurse) = result {
|
||||
warn!(
|
||||
"Unknown type, assuming named template type: \
|
||||
id = {:?}; spelling = {}",
|
||||
id = {:?}; spelling = {}",
|
||||
id,
|
||||
ty.spelling()
|
||||
);
|
||||
|
@ -1652,9 +1640,9 @@ impl ClangItemParser for Item {
|
|||
|
||||
debug!(
|
||||
"Item::type_param:\n\
|
||||
\twith_id = {:?},\n\
|
||||
\tty = {} {:?},\n\
|
||||
\tlocation: {:?}",
|
||||
\twith_id = {:?},\n\
|
||||
\tty = {} {:?},\n\
|
||||
\tlocation: {:?}",
|
||||
with_id,
|
||||
ty.spelling(),
|
||||
ty,
|
||||
|
@ -1735,50 +1723,51 @@ impl ClangItemParser for Item {
|
|||
(refd_spelling.is_empty() && ANON_TYPE_PARAM_RE.is_match(spelling.as_ref()))
|
||||
}
|
||||
|
||||
let definition =
|
||||
if is_template_with_spelling(&location, &ty_spelling) {
|
||||
// Situation (1)
|
||||
location
|
||||
} else if location.kind() == clang_sys::CXCursor_TypeRef {
|
||||
// Situation (2)
|
||||
match location.referenced() {
|
||||
Some(refd)
|
||||
if is_template_with_spelling(&refd, &ty_spelling) => {
|
||||
refd
|
||||
}
|
||||
_ => return None,
|
||||
let definition = if is_template_with_spelling(&location, &ty_spelling) {
|
||||
// Situation (1)
|
||||
location
|
||||
} else if location.kind() == clang_sys::CXCursor_TypeRef {
|
||||
// Situation (2)
|
||||
match location.referenced() {
|
||||
Some(refd)
|
||||
if is_template_with_spelling(&refd, &ty_spelling) =>
|
||||
{
|
||||
refd
|
||||
}
|
||||
} else {
|
||||
// Situation (3)
|
||||
let mut definition = None;
|
||||
_ => return None,
|
||||
}
|
||||
} else {
|
||||
// Situation (3)
|
||||
let mut definition = None;
|
||||
|
||||
location.visit(|child| {
|
||||
let child_ty = child.cur_type();
|
||||
if child_ty.kind() == clang_sys::CXCursor_TypeRef &&
|
||||
child_ty.spelling() == ty_spelling
|
||||
{
|
||||
match child.referenced() {
|
||||
Some(refd)
|
||||
if is_template_with_spelling(
|
||||
&refd,
|
||||
&ty_spelling,
|
||||
) => {
|
||||
definition = Some(refd);
|
||||
return clang_sys::CXChildVisit_Break;
|
||||
}
|
||||
_ => {}
|
||||
location.visit(|child| {
|
||||
let child_ty = child.cur_type();
|
||||
if child_ty.kind() == clang_sys::CXCursor_TypeRef &&
|
||||
child_ty.spelling() == ty_spelling
|
||||
{
|
||||
match child.referenced() {
|
||||
Some(refd)
|
||||
if is_template_with_spelling(
|
||||
&refd,
|
||||
&ty_spelling,
|
||||
) =>
|
||||
{
|
||||
definition = Some(refd);
|
||||
return clang_sys::CXChildVisit_Break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
clang_sys::CXChildVisit_Continue
|
||||
});
|
||||
|
||||
if let Some(def) = definition {
|
||||
def
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
clang_sys::CXChildVisit_Continue
|
||||
});
|
||||
|
||||
if let Some(def) = definition {
|
||||
def
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
assert!(is_template_with_spelling(&definition, &ty_spelling));
|
||||
|
||||
// Named types are always parented to the root module. They are never
|
||||
|
@ -1789,9 +1778,12 @@ impl ClangItemParser for Item {
|
|||
|
||||
if let Some(id) = ctx.get_type_param(&definition) {
|
||||
if let Some(with_id) = with_id {
|
||||
return Some(
|
||||
ctx.build_ty_wrapper(with_id, id, Some(parent), &ty),
|
||||
);
|
||||
return Some(ctx.build_ty_wrapper(
|
||||
with_id,
|
||||
id,
|
||||
Some(parent),
|
||||
&ty,
|
||||
));
|
||||
} else {
|
||||
return Some(id);
|
||||
}
|
||||
|
|
|
@ -135,11 +135,7 @@ impl DotAttributes for ItemKind {
|
|||
where
|
||||
W: io::Write,
|
||||
{
|
||||
writeln!(
|
||||
out,
|
||||
"<tr><td>kind</td><td>{}</td></tr>",
|
||||
self.kind_name()
|
||||
)?;
|
||||
writeln!(out, "<tr><td>kind</td><td>{}</td></tr>", self.kind_name())?;
|
||||
|
||||
match *self {
|
||||
ItemKind::Module(ref module) => module.dot_attributes(ctx, out),
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
//! Intermediate representation for the physical layout of some type.
|
||||
|
||||
use super::derive::CanDerive;
|
||||
use super::ty::{RUST_DERIVE_IN_ARRAY_LIMIT, Type, TypeKind};
|
||||
use ir::context::BindgenContext;
|
||||
use super::ty::{Type, TypeKind, RUST_DERIVE_IN_ARRAY_LIMIT};
|
||||
use clang;
|
||||
use ir::context::BindgenContext;
|
||||
use std::cmp;
|
||||
|
||||
/// A type that represents the struct layout of a type.
|
||||
|
@ -107,7 +107,10 @@ impl Opaque {
|
|||
|
||||
/// Return the known rust type we should use to create a correctly-aligned
|
||||
/// field with this layout.
|
||||
pub fn known_rust_type_for_array(&self,ctx: &BindgenContext) -> Option<&'static str> {
|
||||
pub fn known_rust_type_for_array(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
) -> Option<&'static str> {
|
||||
Layout::known_type_for_size(ctx, self.0.align)
|
||||
}
|
||||
|
||||
|
@ -124,10 +127,14 @@ impl Opaque {
|
|||
/// Return `true` if this opaque layout's array size will fit within the
|
||||
/// maximum number of array elements that Rust allows deriving traits
|
||||
/// with. Return `false` otherwise.
|
||||
pub fn array_size_within_derive_limit(&self, ctx: &BindgenContext) -> CanDerive {
|
||||
if self.array_size(ctx).map_or(false, |size| {
|
||||
size <= RUST_DERIVE_IN_ARRAY_LIMIT
|
||||
}) {
|
||||
pub fn array_size_within_derive_limit(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
) -> CanDerive {
|
||||
if self
|
||||
.array_size(ctx)
|
||||
.map_or(false, |size| size <= RUST_DERIVE_IN_ARRAY_LIMIT)
|
||||
{
|
||||
CanDerive::Yes
|
||||
} else {
|
||||
CanDerive::Manually
|
||||
|
|
|
@ -3,10 +3,10 @@
|
|||
//! Parsing C/C++ generates the IR, while code generation outputs Rust code from
|
||||
//! the IR.
|
||||
|
||||
pub mod annotations;
|
||||
pub mod analysis;
|
||||
pub mod comp;
|
||||
pub mod annotations;
|
||||
pub mod comment;
|
||||
pub mod comp;
|
||||
pub mod context;
|
||||
pub mod derive;
|
||||
pub mod dot;
|
||||
|
@ -17,8 +17,8 @@ pub mod item;
|
|||
pub mod item_kind;
|
||||
pub mod layout;
|
||||
pub mod module;
|
||||
pub mod objc;
|
||||
pub mod template;
|
||||
pub mod traversal;
|
||||
pub mod ty;
|
||||
pub mod var;
|
||||
pub mod objc;
|
||||
|
|
|
@ -82,9 +82,9 @@ impl ClangSubItemParser for Module {
|
|||
CXCursor_Namespace => {
|
||||
let module_id = ctx.module(cursor);
|
||||
ctx.with_module(module_id, |ctx| {
|
||||
cursor.visit(
|
||||
|cursor| parse_one(ctx, cursor, Some(module_id.into())),
|
||||
)
|
||||
cursor.visit(|cursor| {
|
||||
parse_one(ctx, cursor, Some(module_id.into()))
|
||||
})
|
||||
});
|
||||
|
||||
Ok(ParseResult::AlreadyResolved(module_id.into()))
|
||||
|
|
|
@ -12,7 +12,7 @@ use clang_sys::CXCursor_ObjCClassRef;
|
|||
use clang_sys::CXCursor_ObjCInstanceMethodDecl;
|
||||
use clang_sys::CXCursor_ObjCProtocolDecl;
|
||||
use clang_sys::CXCursor_ObjCProtocolRef;
|
||||
use proc_macro2::{TokenStream, Ident, Span};
|
||||
use proc_macro2::{Ident, Span, TokenStream};
|
||||
|
||||
/// Objective C interface as used in TypeKind
|
||||
///
|
||||
|
@ -212,7 +212,8 @@ impl ObjCMethod {
|
|||
|
||||
/// Formats the method call
|
||||
pub fn format_method_call(&self, args: &[TokenStream]) -> TokenStream {
|
||||
let split_name: Vec<_> = self.name
|
||||
let split_name: Vec<_> = self
|
||||
.name
|
||||
.split(':')
|
||||
.filter(|p| !p.is_empty())
|
||||
.map(|name| Ident::new(name, Span::call_site()))
|
||||
|
@ -242,7 +243,7 @@ impl ObjCMethod {
|
|||
let name_and_sig: Vec<&str> = arg.split(' ').collect();
|
||||
let name = name_and_sig[0];
|
||||
args_without_types.push(Ident::new(name, Span::call_site()))
|
||||
};
|
||||
}
|
||||
|
||||
let args = split_name
|
||||
.into_iter()
|
||||
|
|
|
@ -99,7 +99,7 @@ use parse::ClangItemParser;
|
|||
/// ... |Wtf | ... | [T] |
|
||||
/// ... |Qux | ... | [] |
|
||||
/// ----+------+-----+----------------------+
|
||||
pub trait TemplateParameters : Sized {
|
||||
pub trait TemplateParameters: Sized {
|
||||
/// Get the set of `ItemId`s that make up this template declaration's free
|
||||
/// template parameters.
|
||||
///
|
||||
|
@ -135,9 +135,11 @@ pub trait TemplateParameters : Sized {
|
|||
Self: ItemAncestors,
|
||||
{
|
||||
let ancestors: Vec<_> = self.ancestors(ctx).collect();
|
||||
ancestors.into_iter().rev().flat_map(|id| {
|
||||
id.self_template_params(ctx).into_iter()
|
||||
}).collect()
|
||||
ancestors
|
||||
.into_iter()
|
||||
.rev()
|
||||
.flat_map(|id| id.self_template_params(ctx).into_iter())
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get only the set of template parameters that this item uses. This is a
|
||||
|
@ -153,10 +155,11 @@ pub trait TemplateParameters : Sized {
|
|||
);
|
||||
|
||||
let id = *self.as_ref();
|
||||
ctx.resolve_item(id).all_template_params(ctx)
|
||||
.into_iter()
|
||||
.filter(|p| ctx.uses_template_parameter(id, *p))
|
||||
.collect()
|
||||
ctx.resolve_item(id)
|
||||
.all_template_params(ctx)
|
||||
.into_iter()
|
||||
.filter(|p| ctx.uses_template_parameter(id, *p))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -221,34 +224,33 @@ impl TemplateInstantiation {
|
|||
) -> Option<TemplateInstantiation> {
|
||||
use clang_sys::*;
|
||||
|
||||
let template_args = ty.template_args()
|
||||
.map_or(vec![], |args| {
|
||||
match ty.canonical_type().template_args() {
|
||||
Some(canonical_args) => {
|
||||
let arg_count = args.len();
|
||||
args.chain(canonical_args.skip(arg_count))
|
||||
.filter(|t| t.kind() != CXType_Invalid)
|
||||
.map(|t| {
|
||||
Item::from_ty_or_ref(t, t.declaration(), None, ctx)
|
||||
}).collect()
|
||||
}
|
||||
None => {
|
||||
args.filter(|t| t.kind() != CXType_Invalid)
|
||||
.map(|t| {
|
||||
Item::from_ty_or_ref(t, t.declaration(), None, ctx)
|
||||
}).collect()
|
||||
}
|
||||
}
|
||||
});
|
||||
let template_args = ty.template_args().map_or(vec![], |args| match ty
|
||||
.canonical_type()
|
||||
.template_args()
|
||||
{
|
||||
Some(canonical_args) => {
|
||||
let arg_count = args.len();
|
||||
args.chain(canonical_args.skip(arg_count))
|
||||
.filter(|t| t.kind() != CXType_Invalid)
|
||||
.map(|t| {
|
||||
Item::from_ty_or_ref(t, t.declaration(), None, ctx)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
None => args
|
||||
.filter(|t| t.kind() != CXType_Invalid)
|
||||
.map(|t| Item::from_ty_or_ref(t, t.declaration(), None, ctx))
|
||||
.collect(),
|
||||
});
|
||||
|
||||
let declaration = ty.declaration();
|
||||
let definition =
|
||||
if declaration.kind() == CXCursor_TypeAliasTemplateDecl {
|
||||
Some(declaration)
|
||||
} else {
|
||||
declaration.specialized().or_else(|| {
|
||||
let mut template_ref = None;
|
||||
ty.declaration().visit(|child| {
|
||||
let definition = if declaration.kind() == CXCursor_TypeAliasTemplateDecl
|
||||
{
|
||||
Some(declaration)
|
||||
} else {
|
||||
declaration.specialized().or_else(|| {
|
||||
let mut template_ref = None;
|
||||
ty.declaration().visit(|child| {
|
||||
if child.kind() == CXCursor_TemplateRef {
|
||||
template_ref = Some(child);
|
||||
return CXVisit_Break;
|
||||
|
@ -261,9 +263,9 @@ impl TemplateInstantiation {
|
|||
CXChildVisit_Recurse
|
||||
});
|
||||
|
||||
template_ref.and_then(|cur| cur.referenced())
|
||||
})
|
||||
};
|
||||
template_ref.and_then(|cur| cur.referenced())
|
||||
})
|
||||
};
|
||||
|
||||
let definition = match definition {
|
||||
Some(def) => def,
|
||||
|
@ -271,7 +273,7 @@ impl TemplateInstantiation {
|
|||
if !ty.declaration().is_builtin() {
|
||||
warn!(
|
||||
"Could not find template definition for template \
|
||||
instantiation"
|
||||
instantiation"
|
||||
);
|
||||
}
|
||||
return None;
|
||||
|
@ -305,7 +307,8 @@ impl IsOpaque for TemplateInstantiation {
|
|||
// arguments properly.
|
||||
|
||||
let mut path = item.canonical_path(ctx);
|
||||
let args: Vec<_> = self.template_arguments()
|
||||
let args: Vec<_> = self
|
||||
.template_arguments()
|
||||
.iter()
|
||||
.map(|arg| {
|
||||
let arg_path = arg.canonical_path(ctx);
|
||||
|
@ -330,7 +333,8 @@ impl Trace for TemplateInstantiation {
|
|||
where
|
||||
T: Tracer,
|
||||
{
|
||||
tracer.visit_kind(self.definition.into(), EdgeKind::TemplateDeclaration);
|
||||
tracer
|
||||
.visit_kind(self.definition.into(), EdgeKind::TemplateDeclaration);
|
||||
for arg in self.template_arguments() {
|
||||
tracer.visit_kind(arg.into(), EdgeKind::TemplateArgument);
|
||||
}
|
||||
|
|
|
@ -20,10 +20,7 @@ pub struct Edge {
|
|||
impl Edge {
|
||||
/// Construct a new edge whose referent is `to` and is of the given `kind`.
|
||||
pub fn new(to: ItemId, kind: EdgeKind) -> Edge {
|
||||
Edge {
|
||||
to,
|
||||
kind,
|
||||
}
|
||||
Edge { to, kind }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -236,7 +233,7 @@ pub fn codegen_edges(ctx: &BindgenContext, edge: Edge) -> bool {
|
|||
EdgeKind::InnerVar => cc.vars(),
|
||||
EdgeKind::Method => cc.methods(),
|
||||
EdgeKind::Constructor => cc.constructors(),
|
||||
EdgeKind::Destructor => cc.destructors()
|
||||
EdgeKind::Destructor => cc.destructors(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -269,10 +266,7 @@ impl<'ctx> TraversalStorage<'ctx> for ItemSet {
|
|||
/// each item. This is useful for providing debug assertions with meaningful
|
||||
/// diagnostic messages about dangling items.
|
||||
#[derive(Debug)]
|
||||
pub struct Paths<'ctx>(
|
||||
BTreeMap<ItemId, ItemId>,
|
||||
&'ctx BindgenContext
|
||||
);
|
||||
pub struct Paths<'ctx>(BTreeMap<ItemId, ItemId>, &'ctx BindgenContext);
|
||||
|
||||
impl<'ctx> TraversalStorage<'ctx> for Paths<'ctx> {
|
||||
fn new(ctx: &'ctx BindgenContext) -> Self {
|
||||
|
@ -289,7 +283,7 @@ impl<'ctx> TraversalStorage<'ctx> for Paths<'ctx> {
|
|||
loop {
|
||||
let predecessor = *self.0.get(¤t).expect(
|
||||
"We know we found this item id, so it must have a \
|
||||
predecessor",
|
||||
predecessor",
|
||||
);
|
||||
if predecessor == current {
|
||||
break;
|
||||
|
@ -300,8 +294,7 @@ impl<'ctx> TraversalStorage<'ctx> for Paths<'ctx> {
|
|||
path.reverse();
|
||||
panic!(
|
||||
"Found reference to dangling id = {:?}\nvia path = {:?}",
|
||||
item,
|
||||
path
|
||||
item, path
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -495,13 +488,12 @@ where
|
|||
///
|
||||
/// See `BindgenContext::assert_no_dangling_item_traversal` for more
|
||||
/// information.
|
||||
pub type AssertNoDanglingItemsTraversal<'ctx> =
|
||||
ItemTraversal<
|
||||
'ctx,
|
||||
Paths<'ctx>,
|
||||
VecDeque<ItemId>,
|
||||
for<'a> fn(&'a BindgenContext, Edge) -> bool,
|
||||
>;
|
||||
pub type AssertNoDanglingItemsTraversal<'ctx> = ItemTraversal<
|
||||
'ctx,
|
||||
Paths<'ctx>,
|
||||
VecDeque<ItemId>,
|
||||
for<'a> fn(&'a BindgenContext, Edge) -> bool,
|
||||
>;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
@ -511,6 +503,6 @@ mod tests {
|
|||
#[allow(dead_code)]
|
||||
fn traversal_predicate_is_object_safe() {
|
||||
// This should compile only if TraversalPredicate is object safe.
|
||||
fn takes_by_trait_object(_: &TraversalPredicate) {}
|
||||
fn takes_by_trait_object(_: &dyn TraversalPredicate) {}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,8 +9,9 @@ use super::int::IntKind;
|
|||
use super::item::{IsOpaque, Item};
|
||||
use super::layout::{Layout, Opaque};
|
||||
use super::objc::ObjCInterface;
|
||||
use super::template::{AsTemplateParam, TemplateInstantiation,
|
||||
TemplateParameters};
|
||||
use super::template::{
|
||||
AsTemplateParam, TemplateInstantiation, TemplateParameters,
|
||||
};
|
||||
use super::traversal::{EdgeKind, Trace, Tracer};
|
||||
use clang::{self, Cursor};
|
||||
use parse::{ClangItemParser, ParseError, ParseResult};
|
||||
|
@ -235,7 +236,11 @@ impl Type {
|
|||
pub fn is_incomplete_array(&self, ctx: &BindgenContext) -> Option<ItemId> {
|
||||
match self.kind {
|
||||
TypeKind::Array(item, len) => {
|
||||
if len == 0 { Some(item.into()) } else { None }
|
||||
if len == 0 {
|
||||
Some(item.into())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
TypeKind::ResolvedTypeRef(inner) => {
|
||||
ctx.resolve_type(inner).is_incomplete_array(ctx)
|
||||
|
@ -249,20 +254,15 @@ impl Type {
|
|||
self.layout.or_else(|| {
|
||||
match self.kind {
|
||||
TypeKind::Comp(ref ci) => ci.layout(ctx),
|
||||
TypeKind::Array(inner, length) if length == 0 => {
|
||||
Some(Layout::new(
|
||||
0,
|
||||
ctx.resolve_type(inner).layout(ctx)?.align,
|
||||
))
|
||||
}
|
||||
TypeKind::Array(inner, length) if length == 0 => Some(
|
||||
Layout::new(0, ctx.resolve_type(inner).layout(ctx)?.align),
|
||||
),
|
||||
// FIXME(emilio): This is a hack for anonymous union templates.
|
||||
// Use the actual pointer size!
|
||||
TypeKind::Pointer(..) => {
|
||||
Some(Layout::new(
|
||||
ctx.target_pointer_size(),
|
||||
ctx.target_pointer_size(),
|
||||
))
|
||||
}
|
||||
TypeKind::Pointer(..) => Some(Layout::new(
|
||||
ctx.target_pointer_size(),
|
||||
ctx.target_pointer_size(),
|
||||
)),
|
||||
TypeKind::ResolvedTypeRef(inner) => {
|
||||
ctx.resolve_type(inner).layout(ctx)
|
||||
}
|
||||
|
@ -301,8 +301,12 @@ impl Type {
|
|||
ctx: &BindgenContext,
|
||||
) -> Option<Cow<'a, str>> {
|
||||
let name_info = match *self.kind() {
|
||||
TypeKind::Pointer(inner) => Some((inner.into(), Cow::Borrowed("ptr"))),
|
||||
TypeKind::Reference(inner) => Some((inner.into(), Cow::Borrowed("ref"))),
|
||||
TypeKind::Pointer(inner) => {
|
||||
Some((inner.into(), Cow::Borrowed("ptr")))
|
||||
}
|
||||
TypeKind::Reference(inner) => {
|
||||
Some((inner.into(), Cow::Borrowed("ref")))
|
||||
}
|
||||
TypeKind::Array(inner, length) => {
|
||||
Some((inner, format!("array{}", length).into()))
|
||||
}
|
||||
|
@ -323,9 +327,8 @@ impl Type {
|
|||
&'tr self,
|
||||
ctx: &'tr BindgenContext,
|
||||
) -> &'tr Type {
|
||||
self.safe_canonical_type(ctx).expect(
|
||||
"Should have been resolved after parsing!",
|
||||
)
|
||||
self.safe_canonical_type(ctx)
|
||||
.expect("Should have been resolved after parsing!")
|
||||
}
|
||||
|
||||
/// Returns the canonical type of this type, that is, the "inner type".
|
||||
|
@ -362,10 +365,9 @@ impl Type {
|
|||
TypeKind::TemplateAlias(inner, _) => {
|
||||
ctx.resolve_type(inner).safe_canonical_type(ctx)
|
||||
}
|
||||
TypeKind::TemplateInstantiation(ref inst) => {
|
||||
ctx.resolve_type(inst.template_definition())
|
||||
.safe_canonical_type(ctx)
|
||||
}
|
||||
TypeKind::TemplateInstantiation(ref inst) => ctx
|
||||
.resolve_type(inst.template_definition())
|
||||
.safe_canonical_type(ctx),
|
||||
|
||||
TypeKind::UnresolvedTypeRef(..) => None,
|
||||
}
|
||||
|
@ -445,8 +447,7 @@ impl DotAttributes for Type {
|
|||
out,
|
||||
"<tr><td>size</td><td>{}</td></tr>
|
||||
<tr><td>align</td><td>{}</td></tr>",
|
||||
layout.size,
|
||||
layout.align
|
||||
layout.size, layout.align
|
||||
)?;
|
||||
if layout.packed {
|
||||
writeln!(out, "<tr><td>packed</td><td>true</td></tr>")?;
|
||||
|
@ -470,7 +471,11 @@ impl DotAttributes for TypeKind {
|
|||
where
|
||||
W: io::Write,
|
||||
{
|
||||
writeln!(out, "<tr><td>type kind</td><td>{}</td></tr>", self.kind_name())?;
|
||||
writeln!(
|
||||
out,
|
||||
"<tr><td>type kind</td><td>{}</td></tr>",
|
||||
self.kind_name()
|
||||
)?;
|
||||
|
||||
if let TypeKind::Comp(ref comp) = *self {
|
||||
comp.dot_attributes(ctx, out)?;
|
||||
|
@ -559,19 +564,13 @@ fn is_invalid_type_param_empty_name() {
|
|||
}
|
||||
|
||||
impl TemplateParameters for Type {
|
||||
fn self_template_params(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
) -> Vec<TypeId> {
|
||||
fn self_template_params(&self, ctx: &BindgenContext) -> Vec<TypeId> {
|
||||
self.kind.self_template_params(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
impl TemplateParameters for TypeKind {
|
||||
fn self_template_params(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
) -> Vec<TypeId> {
|
||||
fn self_template_params(&self, ctx: &BindgenContext) -> Vec<TypeId> {
|
||||
match *self {
|
||||
TypeKind::ResolvedTypeRef(id) => {
|
||||
ctx.resolve_type(id).self_template_params(ctx)
|
||||
|
@ -687,7 +686,7 @@ pub enum TypeKind {
|
|||
clang::Type,
|
||||
clang::Cursor,
|
||||
/* parent_id */
|
||||
Option<ItemId>
|
||||
Option<ItemId>,
|
||||
),
|
||||
|
||||
/// An indirection to another type.
|
||||
|
@ -742,9 +741,7 @@ impl Type {
|
|||
|
||||
debug!(
|
||||
"from_clang_ty: {:?}, ty: {:?}, loc: {:?}",
|
||||
potential_id,
|
||||
ty,
|
||||
location
|
||||
potential_id, ty, location
|
||||
);
|
||||
debug!("currently_parsed_types: {:?}", ctx.currently_parsed_types());
|
||||
|
||||
|
@ -753,8 +750,9 @@ impl Type {
|
|||
// Parse objc protocols as if they were interfaces
|
||||
let mut ty_kind = ty.kind();
|
||||
match location.kind() {
|
||||
CXCursor_ObjCProtocolDecl |
|
||||
CXCursor_ObjCCategoryDecl => ty_kind = CXType_ObjCInterface,
|
||||
CXCursor_ObjCProtocolDecl | CXCursor_ObjCCategoryDecl => {
|
||||
ty_kind = CXType_ObjCInterface
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
|
@ -764,10 +762,10 @@ impl Type {
|
|||
// We are rewriting them as id to suppress multiple conflicting
|
||||
// typedefs at root level
|
||||
if ty_kind == CXType_Typedef {
|
||||
let is_template_type_param = ty.declaration().kind() ==
|
||||
CXCursor_TemplateTypeParameter;
|
||||
let is_canonical_objcpointer = canonical_ty.kind() ==
|
||||
CXType_ObjCObjectPointer;
|
||||
let is_template_type_param =
|
||||
ty.declaration().kind() == CXCursor_TemplateTypeParameter;
|
||||
let is_canonical_objcpointer =
|
||||
canonical_ty.kind() == CXType_ObjCObjectPointer;
|
||||
|
||||
// We have found a template type for objc interface
|
||||
if is_canonical_objcpointer && is_template_type_param {
|
||||
|
@ -775,19 +773,19 @@ impl Type {
|
|||
// To keep it simple, just name them ids
|
||||
name = "id".to_owned();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if location.kind() == CXCursor_ClassTemplatePartialSpecialization {
|
||||
// Sorry! (Not sorry)
|
||||
warn!(
|
||||
"Found a partial template specialization; bindgen does not \
|
||||
support partial template specialization! Constructing \
|
||||
opaque type instead."
|
||||
);
|
||||
return Ok(
|
||||
ParseResult::New(Opaque::from_clang_ty(&canonical_ty, ctx), None),
|
||||
support partial template specialization! Constructing \
|
||||
opaque type instead."
|
||||
);
|
||||
return Ok(ParseResult::New(
|
||||
Opaque::from_clang_ty(&canonical_ty, ctx),
|
||||
None,
|
||||
));
|
||||
}
|
||||
|
||||
let kind = if location.kind() == CXCursor_TemplateRef ||
|
||||
|
@ -800,7 +798,8 @@ impl Type {
|
|||
}
|
||||
} else {
|
||||
match ty_kind {
|
||||
CXType_Unexposed if *ty != canonical_ty &&
|
||||
CXType_Unexposed
|
||||
if *ty != canonical_ty &&
|
||||
canonical_ty.kind() != CXType_Invalid &&
|
||||
ty.ret_type().is_none() &&
|
||||
// Sometime clang desugars some types more than
|
||||
|
@ -815,13 +814,16 @@ impl Type {
|
|||
// => { ... }
|
||||
//
|
||||
// etc.
|
||||
!canonical_ty.spelling().contains("type-parameter") => {
|
||||
!canonical_ty.spelling().contains("type-parameter") =>
|
||||
{
|
||||
debug!("Looking for canonical type: {:?}", canonical_ty);
|
||||
return Self::from_clang_ty(potential_id,
|
||||
&canonical_ty,
|
||||
location,
|
||||
parent_id,
|
||||
ctx);
|
||||
return Self::from_clang_ty(
|
||||
potential_id,
|
||||
&canonical_ty,
|
||||
location,
|
||||
parent_id,
|
||||
ctx,
|
||||
);
|
||||
}
|
||||
CXType_Unexposed | CXType_Invalid => {
|
||||
// For some reason Clang doesn't give us any hint in some
|
||||
|
@ -837,23 +839,21 @@ impl Type {
|
|||
} else if ty.is_fully_instantiated_template() {
|
||||
debug!(
|
||||
"Template specialization: {:?}, {:?} {:?}",
|
||||
ty,
|
||||
location,
|
||||
canonical_ty
|
||||
ty, location, canonical_ty
|
||||
);
|
||||
let complex = CompInfo::from_ty(
|
||||
potential_id,
|
||||
ty,
|
||||
Some(location),
|
||||
ctx,
|
||||
).expect("C'mon");
|
||||
)
|
||||
.expect("C'mon");
|
||||
TypeKind::Comp(complex)
|
||||
} else {
|
||||
match location.kind() {
|
||||
CXCursor_CXXBaseSpecifier |
|
||||
CXCursor_ClassTemplate => {
|
||||
if location.kind() ==
|
||||
CXCursor_CXXBaseSpecifier
|
||||
if location.kind() == CXCursor_CXXBaseSpecifier
|
||||
{
|
||||
// In the case we're parsing a base specifier
|
||||
// inside an unexposed or invalid type, it means
|
||||
|
@ -896,8 +896,7 @@ impl Type {
|
|||
// [4]: inherit-namespaced.hpp
|
||||
if location.spelling().chars().all(|c| {
|
||||
c.is_alphanumeric() || c == '_'
|
||||
})
|
||||
{
|
||||
}) {
|
||||
return Err(ParseError::Recurse);
|
||||
}
|
||||
} else {
|
||||
|
@ -915,13 +914,14 @@ impl Type {
|
|||
Err(_) => {
|
||||
warn!(
|
||||
"Could not create complex type \
|
||||
from class template or base \
|
||||
specifier, using opaque blob"
|
||||
);
|
||||
let opaque = Opaque::from_clang_ty(ty, ctx);
|
||||
return Ok(
|
||||
ParseResult::New(opaque, None),
|
||||
from class template or base \
|
||||
specifier, using opaque blob"
|
||||
);
|
||||
let opaque =
|
||||
Opaque::from_clang_ty(ty, ctx);
|
||||
return Ok(ParseResult::New(
|
||||
opaque, None,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -944,7 +944,8 @@ impl Type {
|
|||
|
||||
name = current.spelling();
|
||||
|
||||
let inner_ty = cur.typedef_type()
|
||||
let inner_ty = cur
|
||||
.typedef_type()
|
||||
.expect("Not valid Type?");
|
||||
inner = Ok(Item::from_ty_or_ref(
|
||||
inner_ty,
|
||||
|
@ -954,13 +955,14 @@ impl Type {
|
|||
));
|
||||
}
|
||||
CXCursor_TemplateTypeParameter => {
|
||||
let param =
|
||||
Item::type_param(None,
|
||||
cur,
|
||||
ctx)
|
||||
.expect("Item::type_param shouldn't \
|
||||
ever fail if we are looking \
|
||||
at a TemplateTypeParameter");
|
||||
let param = Item::type_param(
|
||||
None, cur, ctx,
|
||||
)
|
||||
.expect(
|
||||
"Item::type_param shouldn't \
|
||||
ever fail if we are looking \
|
||||
at a TemplateTypeParameter",
|
||||
);
|
||||
args.push(param);
|
||||
}
|
||||
_ => {}
|
||||
|
@ -973,7 +975,7 @@ impl Type {
|
|||
Err(..) => {
|
||||
error!(
|
||||
"Failed to parse template alias \
|
||||
{:?}",
|
||||
{:?}",
|
||||
location
|
||||
);
|
||||
return Err(ParseError::Continue);
|
||||
|
@ -1009,10 +1011,8 @@ impl Type {
|
|||
|
||||
debug!(
|
||||
"TypeRef: location = {:?}; referenced = \
|
||||
{:?}; referenced_ty = {:?}",
|
||||
location,
|
||||
referenced,
|
||||
referenced_ty
|
||||
{:?}; referenced_ty = {:?}",
|
||||
location, referenced, referenced_ty
|
||||
);
|
||||
|
||||
let id = Item::from_ty_or_ref_with_id(
|
||||
|
@ -1022,7 +1022,9 @@ impl Type {
|
|||
parent_id,
|
||||
ctx,
|
||||
);
|
||||
return Ok(ParseResult::AlreadyResolved(id.into()));
|
||||
return Ok(ParseResult::AlreadyResolved(
|
||||
id.into(),
|
||||
));
|
||||
}
|
||||
CXCursor_NamespaceRef => {
|
||||
return Err(ParseError::Continue);
|
||||
|
@ -1078,11 +1080,10 @@ impl Type {
|
|||
let inner =
|
||||
Item::from_ty_or_ref(pointee, location, None, ctx);
|
||||
TypeKind::BlockPointer(inner)
|
||||
},
|
||||
}
|
||||
// XXX: RValueReference is most likely wrong, but I don't think we
|
||||
// can even add bindings for that, so huh.
|
||||
CXType_RValueReference |
|
||||
CXType_LValueReference => {
|
||||
CXType_RValueReference | CXType_LValueReference => {
|
||||
let inner = Item::from_ty_or_ref(
|
||||
ty.pointee_type().unwrap(),
|
||||
location,
|
||||
|
@ -1092,14 +1093,14 @@ impl Type {
|
|||
TypeKind::Reference(inner)
|
||||
}
|
||||
// XXX DependentSizedArray is wrong
|
||||
CXType_VariableArray |
|
||||
CXType_DependentSizedArray => {
|
||||
CXType_VariableArray | CXType_DependentSizedArray => {
|
||||
let inner = Item::from_ty(
|
||||
ty.elem_type().as_ref().unwrap(),
|
||||
location,
|
||||
None,
|
||||
ctx,
|
||||
).expect("Not able to resolve array element?");
|
||||
)
|
||||
.expect("Not able to resolve array element?");
|
||||
TypeKind::Pointer(inner)
|
||||
}
|
||||
CXType_IncompleteArray => {
|
||||
|
@ -1108,13 +1109,12 @@ impl Type {
|
|||
location,
|
||||
None,
|
||||
ctx,
|
||||
).expect("Not able to resolve array element?");
|
||||
)
|
||||
.expect("Not able to resolve array element?");
|
||||
TypeKind::Array(inner, 0)
|
||||
}
|
||||
CXType_FunctionNoProto |
|
||||
CXType_FunctionProto => {
|
||||
let signature =
|
||||
FunctionSig::from_ty(ty, &location, ctx)?;
|
||||
CXType_FunctionNoProto | CXType_FunctionProto => {
|
||||
let signature = FunctionSig::from_ty(ty, &location, ctx)?;
|
||||
TypeKind::Function(signature)
|
||||
}
|
||||
CXType_Typedef => {
|
||||
|
@ -1141,7 +1141,8 @@ impl Type {
|
|||
ty,
|
||||
Some(location),
|
||||
ctx,
|
||||
).expect("Not a complex type?");
|
||||
)
|
||||
.expect("Not a complex type?");
|
||||
|
||||
if name.is_empty() {
|
||||
// The pretty-printed name may contain typedefed name,
|
||||
|
@ -1160,7 +1161,8 @@ impl Type {
|
|||
location,
|
||||
None,
|
||||
ctx,
|
||||
).expect("Not able to resolve vector element?");
|
||||
)
|
||||
.expect("Not able to resolve vector element?");
|
||||
TypeKind::Vector(inner, ty.num_elements().unwrap())
|
||||
}
|
||||
CXType_ConstantArray => {
|
||||
|
@ -1169,7 +1171,8 @@ impl Type {
|
|||
location,
|
||||
None,
|
||||
ctx,
|
||||
).expect("Not able to resolve array element?");
|
||||
)
|
||||
.expect("Not able to resolve array element?");
|
||||
TypeKind::Array(inner, ty.num_elements().unwrap())
|
||||
}
|
||||
CXType_Elaborated => {
|
||||
|
@ -1183,8 +1186,7 @@ impl Type {
|
|||
}
|
||||
CXType_ObjCId => TypeKind::ObjCId,
|
||||
CXType_ObjCSel => TypeKind::ObjCSel,
|
||||
CXType_ObjCClass |
|
||||
CXType_ObjCInterface => {
|
||||
CXType_ObjCClass | CXType_ObjCInterface => {
|
||||
let interface = ObjCInterface::from_ty(&location, ctx)
|
||||
.expect("Not a valid objc interface?");
|
||||
name = interface.rust_name();
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
//! Intermediate representation of variables.
|
||||
|
||||
use callbacks::MacroParsingBehavior;
|
||||
use super::context::{BindgenContext, TypeId};
|
||||
use super::dot::DotAttributes;
|
||||
use super::function::cursor_mangling;
|
||||
use super::int::IntKind;
|
||||
use super::item::Item;
|
||||
use super::ty::{FloatKind, TypeKind};
|
||||
use callbacks::MacroParsingBehavior;
|
||||
use cexpr;
|
||||
use clang;
|
||||
use parse::{ClangItemParser, ClangSubItemParser, ParseError, ParseResult};
|
||||
|
@ -133,9 +133,9 @@ impl ClangSubItemParser for Var {
|
|||
cursor: clang::Cursor,
|
||||
ctx: &mut BindgenContext,
|
||||
) -> Result<ParseResult<Self>, ParseError> {
|
||||
use clang_sys::*;
|
||||
use cexpr::expr::EvalResult;
|
||||
use cexpr::literal::CChar;
|
||||
use clang_sys::*;
|
||||
match cursor.kind() {
|
||||
CXCursor_MacroDefinition => {
|
||||
if let Some(callbacks) = ctx.parse_callbacks() {
|
||||
|
@ -205,9 +205,12 @@ impl ClangSubItemParser for Var {
|
|||
(TypeKind::Pointer(char_ty), VarType::String(val))
|
||||
}
|
||||
EvalResult::Int(Wrapping(value)) => {
|
||||
let kind = ctx.parse_callbacks()
|
||||
let kind = ctx
|
||||
.parse_callbacks()
|
||||
.and_then(|c| c.int_macro(&name, value))
|
||||
.unwrap_or_else(|| default_macro_constant_type(value));
|
||||
.unwrap_or_else(|| {
|
||||
default_macro_constant_type(value)
|
||||
});
|
||||
|
||||
(TypeKind::Int(kind), VarType::Int(value))
|
||||
}
|
||||
|
@ -239,7 +242,7 @@ impl ClangSubItemParser for Var {
|
|||
ty.kind(),
|
||||
CXType_Auto,
|
||||
"Couldn't resolve constant type, and it \
|
||||
wasn't an nondeductible auto type!"
|
||||
wasn't an nondeductible auto type!"
|
||||
);
|
||||
return Err(e);
|
||||
}
|
||||
|
@ -249,9 +252,9 @@ impl ClangSubItemParser for Var {
|
|||
// tests/headers/inner_const.hpp
|
||||
//
|
||||
// That's fine because in that case we know it's not a literal.
|
||||
let canonical_ty = ctx.safe_resolve_type(ty).and_then(|t| {
|
||||
t.safe_canonical_type(ctx)
|
||||
});
|
||||
let canonical_ty = ctx
|
||||
.safe_resolve_type(ty)
|
||||
.and_then(|t| t.safe_canonical_type(ctx));
|
||||
|
||||
let is_integer = canonical_ty.map_or(false, |t| t.is_integer());
|
||||
let is_float = canonical_ty.map_or(false, |t| t.is_float());
|
||||
|
@ -266,27 +269,29 @@ impl ClangSubItemParser for Var {
|
|||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let mut val = cursor
|
||||
.evaluate()
|
||||
.and_then(|v| v.as_int());
|
||||
let mut val = cursor.evaluate().and_then(|v| v.as_int());
|
||||
if val.is_none() || !kind.signedness_matches(val.unwrap()) {
|
||||
let tu = ctx.translation_unit();
|
||||
val = get_integer_literal_from_cursor(&cursor, tu);
|
||||
}
|
||||
|
||||
val.map(|val| if kind == IntKind::Bool {
|
||||
VarType::Bool(val != 0)
|
||||
} else {
|
||||
VarType::Int(val)
|
||||
val.map(|val| {
|
||||
if kind == IntKind::Bool {
|
||||
VarType::Bool(val != 0)
|
||||
} else {
|
||||
VarType::Int(val)
|
||||
}
|
||||
})
|
||||
} else if is_float {
|
||||
cursor.evaluate().and_then(|v| v.as_double()).map(
|
||||
VarType::Float,
|
||||
)
|
||||
cursor
|
||||
.evaluate()
|
||||
.and_then(|v| v.as_double())
|
||||
.map(VarType::Float)
|
||||
} else {
|
||||
cursor.evaluate().and_then(|v| v.as_literal_string()).map(
|
||||
VarType::String,
|
||||
)
|
||||
cursor
|
||||
.evaluate()
|
||||
.and_then(|v| v.as_literal_string())
|
||||
.map(VarType::String)
|
||||
};
|
||||
|
||||
let mangling = cursor_mangling(ctx, &cursor);
|
||||
|
@ -355,8 +360,7 @@ fn get_integer_literal_from_cursor(
|
|||
let mut value = None;
|
||||
cursor.visit(|c| {
|
||||
match c.kind() {
|
||||
CXCursor_IntegerLiteral |
|
||||
CXCursor_UnaryOperator => {
|
||||
CXCursor_IntegerLiteral | CXCursor_UnaryOperator => {
|
||||
value = parse_int_literal_tokens(&c);
|
||||
}
|
||||
CXCursor_UnexposedExpr => {
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
// constant.
|
||||
#![allow(non_upper_case_globals)]
|
||||
// `quote!` nests quite deeply.
|
||||
#![recursion_limit="128"]
|
||||
#![recursion_limit = "128"]
|
||||
|
||||
#[macro_use]
|
||||
extern crate bitflags;
|
||||
|
@ -23,7 +23,7 @@ extern crate cexpr;
|
|||
#[allow(unused_extern_crates)]
|
||||
extern crate cfg_if;
|
||||
extern crate clang_sys;
|
||||
extern crate fxhash;
|
||||
extern crate rustc_hash;
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
extern crate peeking_take_while;
|
||||
|
@ -32,6 +32,7 @@ extern crate quote;
|
|||
extern crate proc_macro2;
|
||||
extern crate regex;
|
||||
extern crate shlex;
|
||||
#[cfg(feature = "which-rustfmt")]
|
||||
extern crate which;
|
||||
|
||||
#[cfg(feature = "logging")]
|
||||
|
@ -81,26 +82,26 @@ doc_mod!(ir, ir_docs);
|
|||
doc_mod!(parse, parse_docs);
|
||||
doc_mod!(regex_set, regex_set_docs);
|
||||
|
||||
pub use features::{LATEST_STABLE_RUST, RUST_TARGET_STRINGS, RustTarget};
|
||||
pub use codegen::EnumVariation;
|
||||
use features::RustFeatures;
|
||||
pub use features::{RustTarget, LATEST_STABLE_RUST, RUST_TARGET_STRINGS};
|
||||
use ir::context::{BindgenContext, ItemId};
|
||||
use ir::item::Item;
|
||||
use parse::{ClangItemParser, ParseError};
|
||||
use regex_set::RegexSet;
|
||||
pub use codegen::EnumVariation;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::fs::{File, OpenOptions};
|
||||
use std::io::{self, Write};
|
||||
use std::{env, iter};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{Command, Stdio};
|
||||
use std::sync::Arc;
|
||||
use std::{env, iter};
|
||||
|
||||
// Some convenient typedefs for a fast hash map and hash set.
|
||||
type HashMap<K, V> = ::fxhash::FxHashMap<K, V>;
|
||||
type HashSet<K> = ::fxhash::FxHashSet<K>;
|
||||
pub(crate) use ::std::collections::hash_map::Entry;
|
||||
type HashMap<K, V> = ::rustc_hash::FxHashMap<K, V>;
|
||||
type HashSet<K> = ::rustc_hash::FxHashSet<K>;
|
||||
pub(crate) use std::collections::hash_map::Entry;
|
||||
|
||||
fn args_are_cpp(clang_args: &[String]) -> bool {
|
||||
return clang_args
|
||||
|
@ -225,13 +226,20 @@ impl Builder {
|
|||
|
||||
if self.options.default_enum_style != Default::default() {
|
||||
output_vector.push("--default-enum-style=".into());
|
||||
output_vector.push(match self.options.default_enum_style {
|
||||
codegen::EnumVariation::Rust { non_exhaustive: false } => "rust",
|
||||
codegen::EnumVariation::Rust { non_exhaustive: true } => "rust_non_exhaustive",
|
||||
codegen::EnumVariation::Bitfield => "bitfield",
|
||||
codegen::EnumVariation::Consts => "consts",
|
||||
codegen::EnumVariation::ModuleConsts => "moduleconsts",
|
||||
}.into())
|
||||
output_vector.push(
|
||||
match self.options.default_enum_style {
|
||||
codegen::EnumVariation::Rust {
|
||||
non_exhaustive: false,
|
||||
} => "rust",
|
||||
codegen::EnumVariation::Rust {
|
||||
non_exhaustive: true,
|
||||
} => "rust_non_exhaustive",
|
||||
codegen::EnumVariation::Bitfield => "bitfield",
|
||||
codegen::EnumVariation::Consts => "consts",
|
||||
codegen::EnumVariation::ModuleConsts => "moduleconsts",
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
self.options
|
||||
|
@ -538,7 +546,8 @@ impl Builder {
|
|||
output_vector.push("--no-rustfmt-bindings".into());
|
||||
}
|
||||
|
||||
if let Some(path) = self.options
|
||||
if let Some(path) = self
|
||||
.options
|
||||
.rustfmt_configuration_file
|
||||
.as_ref()
|
||||
.and_then(|f| f.to_str())
|
||||
|
@ -611,9 +620,8 @@ impl Builder {
|
|||
///
|
||||
/// The file `name` will be added to the clang arguments.
|
||||
pub fn header_contents(mut self, name: &str, contents: &str) -> Builder {
|
||||
self.input_header_contents.push(
|
||||
(name.into(), contents.into()),
|
||||
);
|
||||
self.input_header_contents
|
||||
.push((name.into(), contents.into()));
|
||||
self
|
||||
}
|
||||
|
||||
|
@ -800,7 +808,10 @@ impl Builder {
|
|||
}
|
||||
|
||||
/// Set the default style of code to generate for enums
|
||||
pub fn default_enum_style(mut self, arg: codegen::EnumVariation) -> Builder {
|
||||
pub fn default_enum_style(
|
||||
mut self,
|
||||
arg: codegen::EnumVariation,
|
||||
) -> Builder {
|
||||
self.options.default_enum_style = arg;
|
||||
self
|
||||
}
|
||||
|
@ -834,7 +845,10 @@ impl Builder {
|
|||
///
|
||||
/// This makes bindgen generate enums instead of constants. Regular
|
||||
/// expressions are supported.
|
||||
pub fn rustified_non_exhaustive_enum<T: AsRef<str>>(mut self, arg: T) -> Builder {
|
||||
pub fn rustified_non_exhaustive_enum<T: AsRef<str>>(
|
||||
mut self,
|
||||
arg: T,
|
||||
) -> Builder {
|
||||
self.options.rustified_non_exhaustive_enums.insert(arg);
|
||||
self
|
||||
}
|
||||
|
@ -1146,7 +1160,7 @@ impl Builder {
|
|||
/// [`ParseCallbacks`](./callbacks/trait.ParseCallbacks.html) documentation.
|
||||
pub fn parse_callbacks(
|
||||
mut self,
|
||||
cb: Box<callbacks::ParseCallbacks>,
|
||||
cb: Box<dyn callbacks::ParseCallbacks>,
|
||||
) -> Self {
|
||||
self.options.parse_callbacks = Some(cb);
|
||||
self
|
||||
|
@ -1200,7 +1214,9 @@ impl Builder {
|
|||
/// Generate the Rust bindings using the options built up thus far.
|
||||
pub fn generate(mut self) -> Result<Bindings, ()> {
|
||||
// Add any extra arguments from the environment to the clang command line.
|
||||
if let Some(extra_clang_args) = env::var("BINDGEN_EXTRA_CLANG_ARGS").ok() {
|
||||
if let Some(extra_clang_args) =
|
||||
env::var("BINDGEN_EXTRA_CLANG_ARGS").ok()
|
||||
{
|
||||
// Try to parse it with shell quoting. If we fail, make it one single big argument.
|
||||
if let Some(strings) = shlex::split(&extra_clang_args) {
|
||||
self.options.clang_args.extend(strings);
|
||||
|
@ -1211,18 +1227,18 @@ impl Builder {
|
|||
|
||||
// Transform input headers to arguments on the clang command line.
|
||||
self.options.input_header = self.input_headers.pop();
|
||||
self.options.clang_args.extend(
|
||||
self.input_headers
|
||||
.drain(..)
|
||||
.flat_map(|header| {
|
||||
iter::once("-include".into()).chain(iter::once(header))
|
||||
}),
|
||||
);
|
||||
self.options
|
||||
.clang_args
|
||||
.extend(self.input_headers.drain(..).flat_map(|header| {
|
||||
iter::once("-include".into()).chain(iter::once(header))
|
||||
}));
|
||||
|
||||
self.options.input_unsaved_files.extend(
|
||||
self.input_header_contents.drain(..).map(|(name, contents)| {
|
||||
clang::UnsavedFile::new(&name, &contents)
|
||||
}),
|
||||
self.input_header_contents
|
||||
.drain(..)
|
||||
.map(|(name, contents)| {
|
||||
clang::UnsavedFile::new(&name, &contents)
|
||||
}),
|
||||
);
|
||||
|
||||
Bindings::generate(self.options)
|
||||
|
@ -1235,14 +1251,19 @@ impl Builder {
|
|||
/// `__bindgen.ii`
|
||||
pub fn dump_preprocessed_input(&self) -> io::Result<()> {
|
||||
fn check_is_cpp(name_file: &str) -> bool {
|
||||
name_file.ends_with(".hpp") || name_file.ends_with(".hxx")
|
||||
|| name_file.ends_with(".hh")
|
||||
|| name_file.ends_with(".h++")
|
||||
name_file.ends_with(".hpp") ||
|
||||
name_file.ends_with(".hxx") ||
|
||||
name_file.ends_with(".hh") ||
|
||||
name_file.ends_with(".h++")
|
||||
}
|
||||
|
||||
let clang = clang_sys::support::Clang::find(None, &[]).ok_or_else(|| {
|
||||
io::Error::new(io::ErrorKind::Other, "Cannot find clang executable")
|
||||
})?;
|
||||
let clang =
|
||||
clang_sys::support::Clang::find(None, &[]).ok_or_else(|| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
"Cannot find clang executable",
|
||||
)
|
||||
})?;
|
||||
|
||||
// The contents of a wrapper file that includes all the input header
|
||||
// files.
|
||||
|
@ -1500,7 +1521,7 @@ struct BindgenOptions {
|
|||
|
||||
/// A user-provided visitor to allow customizing different kinds of
|
||||
/// situations.
|
||||
parse_callbacks: Option<Box<callbacks::ParseCallbacks>>,
|
||||
parse_callbacks: Option<Box<dyn callbacks::ParseCallbacks>>,
|
||||
|
||||
/// Which kind of items should we generate? By default, we'll generate all
|
||||
/// of them.
|
||||
|
@ -1565,7 +1586,6 @@ struct BindgenOptions {
|
|||
|
||||
/// The absolute path to the rustfmt configuration file, if None, the standard rustfmt
|
||||
/// options are used.
|
||||
|
||||
rustfmt_configuration_file: Option<PathBuf>,
|
||||
|
||||
/// The set of types that we should not derive `PartialEq` for.
|
||||
|
@ -1709,9 +1729,10 @@ fn ensure_libclang_is_loaded() {
|
|||
lazy_static! {
|
||||
static ref LIBCLANG: Arc<clang_sys::SharedLibrary> = {
|
||||
clang_sys::load().expect("Unable to find libclang");
|
||||
clang_sys::get_library()
|
||||
.expect("We just loaded libclang and it had better still be \
|
||||
here!")
|
||||
clang_sys::get_library().expect(
|
||||
"We just loaded libclang and it had better still be \
|
||||
here!",
|
||||
)
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -1732,7 +1753,10 @@ impl Bindings {
|
|||
) -> Result<Bindings, ()> {
|
||||
ensure_libclang_is_loaded();
|
||||
|
||||
debug!("Generating bindings, libclang at {}", clang_sys::get_library().unwrap().path().display());
|
||||
debug!(
|
||||
"Generating bindings, libclang at {}",
|
||||
clang_sys::get_library().unwrap().path().display()
|
||||
);
|
||||
|
||||
options.build();
|
||||
|
||||
|
@ -1745,32 +1769,45 @@ impl Bindings {
|
|||
// promote them to `-isystem`.
|
||||
let clang_args_for_clang_sys = {
|
||||
let mut last_was_include_prefix = false;
|
||||
options.clang_args.iter().filter(|arg| {
|
||||
if last_was_include_prefix {
|
||||
last_was_include_prefix = false;
|
||||
return false;
|
||||
}
|
||||
options
|
||||
.clang_args
|
||||
.iter()
|
||||
.filter(|arg| {
|
||||
if last_was_include_prefix {
|
||||
last_was_include_prefix = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
let arg = &**arg;
|
||||
let arg = &**arg;
|
||||
|
||||
// https://clang.llvm.org/docs/ClangCommandLineReference.html
|
||||
// -isystem and -isystem-after are harmless.
|
||||
if arg == "-I" || arg == "--include-directory" {
|
||||
last_was_include_prefix = true;
|
||||
return false;
|
||||
}
|
||||
// https://clang.llvm.org/docs/ClangCommandLineReference.html
|
||||
// -isystem and -isystem-after are harmless.
|
||||
if arg == "-I" || arg == "--include-directory" {
|
||||
last_was_include_prefix = true;
|
||||
return false;
|
||||
}
|
||||
|
||||
if arg.starts_with("-I") || arg.starts_with("--include-directory=") {
|
||||
return false;
|
||||
}
|
||||
if arg.starts_with("-I") ||
|
||||
arg.starts_with("--include-directory=")
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
}).cloned().collect::<Vec<_>>()
|
||||
true
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
debug!("Trying to find clang with flags: {:?}", clang_args_for_clang_sys);
|
||||
debug!(
|
||||
"Trying to find clang with flags: {:?}",
|
||||
clang_args_for_clang_sys
|
||||
);
|
||||
|
||||
let clang = match clang_sys::support::Clang::find(None, &clang_args_for_clang_sys) {
|
||||
let clang = match clang_sys::support::Clang::find(
|
||||
None,
|
||||
&clang_args_for_clang_sys,
|
||||
) {
|
||||
None => return,
|
||||
Some(clang) => clang,
|
||||
};
|
||||
|
@ -1780,9 +1817,9 @@ impl Bindings {
|
|||
// Whether we are working with C or C++ inputs.
|
||||
let is_cpp = args_are_cpp(&options.clang_args);
|
||||
let search_paths = if is_cpp {
|
||||
clang.cpp_search_paths
|
||||
clang.cpp_search_paths
|
||||
} else {
|
||||
clang.c_search_paths
|
||||
clang.c_search_paths
|
||||
};
|
||||
|
||||
if let Some(search_paths) = search_paths {
|
||||
|
@ -1815,7 +1852,10 @@ impl Bindings {
|
|||
return Err(());
|
||||
}
|
||||
if !can_read(&md.permissions()) {
|
||||
eprintln!("error: insufficient permissions to read '{}'", h);
|
||||
eprintln!(
|
||||
"error: insufficient permissions to read '{}'",
|
||||
h
|
||||
);
|
||||
return Err(());
|
||||
}
|
||||
options.clang_args.push(h.clone())
|
||||
|
@ -1835,8 +1875,7 @@ impl Bindings {
|
|||
let mut context = BindgenContext::new(options);
|
||||
|
||||
{
|
||||
let _t = time::Timer::new("parse")
|
||||
.with_output(time_phases);
|
||||
let _t = time::Timer::new("parse").with_output(time_phases);
|
||||
parse(&mut context)?;
|
||||
}
|
||||
|
||||
|
@ -1846,14 +1885,14 @@ impl Bindings {
|
|||
options: options,
|
||||
module: quote! {
|
||||
#( #items )*
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/// Convert these bindings into source text (with raw lines prepended).
|
||||
pub fn to_string(&self) -> String {
|
||||
let mut bytes = vec![];
|
||||
self.write(Box::new(&mut bytes) as Box<Write>)
|
||||
self.write(Box::new(&mut bytes) as Box<dyn Write>)
|
||||
.expect("writing to a vec cannot fail");
|
||||
String::from_utf8(bytes)
|
||||
.expect("we should only write bindings that are valid utf-8")
|
||||
|
@ -1871,7 +1910,7 @@ impl Bindings {
|
|||
}
|
||||
|
||||
/// Write these bindings as source text to the given `Write`able.
|
||||
pub fn write<'a>(&self, mut writer: Box<Write + 'a>) -> io::Result<()> {
|
||||
pub fn write<'a>(&self, mut writer: Box<dyn Write + 'a>) -> io::Result<()> {
|
||||
writer.write(
|
||||
"/* automatically generated by rust-bindgen */\n\n".as_bytes(),
|
||||
)?;
|
||||
|
@ -1890,11 +1929,14 @@ impl Bindings {
|
|||
match self.rustfmt_generated_string(&bindings) {
|
||||
Ok(rustfmt_bindings) => {
|
||||
writer.write(rustfmt_bindings.as_bytes())?;
|
||||
},
|
||||
}
|
||||
Err(err) => {
|
||||
eprintln!("Failed to run rustfmt: {} (non-fatal, continuing)", err);
|
||||
eprintln!(
|
||||
"Failed to run rustfmt: {} (non-fatal, continuing)",
|
||||
err
|
||||
);
|
||||
writer.write(bindings.as_bytes())?;
|
||||
},
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1908,10 +1950,18 @@ impl Bindings {
|
|||
if let Ok(rustfmt) = env::var("RUSTFMT") {
|
||||
return Ok(Cow::Owned(rustfmt.into()));
|
||||
}
|
||||
#[cfg(feature = "which-rustfmt")]
|
||||
match which::which("rustfmt") {
|
||||
Ok(p) => Ok(Cow::Owned(p)),
|
||||
Err(e) => Err(io::Error::new(io::ErrorKind::Other, format!("{}", e))),
|
||||
Err(e) => {
|
||||
Err(io::Error::new(io::ErrorKind::Other, format!("{}", e)))
|
||||
}
|
||||
}
|
||||
#[cfg(not(feature = "which-rustfmt"))]
|
||||
Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
"which wasn't enabled, and no rustfmt binary specified",
|
||||
))
|
||||
}
|
||||
|
||||
/// Checks if rustfmt_bindings is set and runs rustfmt on the string
|
||||
|
@ -1929,11 +1979,10 @@ impl Bindings {
|
|||
let rustfmt = self.rustfmt_path()?;
|
||||
let mut cmd = Command::new(&*rustfmt);
|
||||
|
||||
cmd
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped());
|
||||
cmd.stdin(Stdio::piped()).stdout(Stdio::piped());
|
||||
|
||||
if let Some(path) = self.options
|
||||
if let Some(path) = self
|
||||
.options
|
||||
.rustfmt_configuration_file
|
||||
.as_ref()
|
||||
.and_then(|f| f.to_str())
|
||||
|
@ -1959,29 +2008,28 @@ impl Bindings {
|
|||
io::copy(&mut child_stdout, &mut output)?;
|
||||
|
||||
let status = child.wait()?;
|
||||
let source = stdin_handle.join()
|
||||
.expect("The thread writing to rustfmt's stdin doesn't do \
|
||||
anything that could panic");
|
||||
let source = stdin_handle.join().expect(
|
||||
"The thread writing to rustfmt's stdin doesn't do \
|
||||
anything that could panic",
|
||||
);
|
||||
|
||||
match String::from_utf8(output) {
|
||||
Ok(bindings) => {
|
||||
match status.code() {
|
||||
Some(0) => Ok(Cow::Owned(bindings)),
|
||||
Some(2) => Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
"Rustfmt parsing errors.".to_string(),
|
||||
)),
|
||||
Some(3) => {
|
||||
warn!("Rustfmt could not format some lines.");
|
||||
Ok(Cow::Owned(bindings))
|
||||
}
|
||||
_ => Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
"Internal rustfmt error".to_string(),
|
||||
)),
|
||||
Ok(bindings) => match status.code() {
|
||||
Some(0) => Ok(Cow::Owned(bindings)),
|
||||
Some(2) => Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
"Rustfmt parsing errors.".to_string(),
|
||||
)),
|
||||
Some(3) => {
|
||||
warn!("Rustfmt could not format some lines.");
|
||||
Ok(Cow::Owned(bindings))
|
||||
}
|
||||
_ => Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
"Internal rustfmt error".to_string(),
|
||||
)),
|
||||
},
|
||||
_ => Ok(Cow::Owned(source))
|
||||
_ => Ok(Cow::Owned(source)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2032,7 +2080,6 @@ fn parse(context: &mut BindgenContext) -> Result<(), ()> {
|
|||
let cursor = context.translation_unit().cursor();
|
||||
|
||||
if context.options().emit_ast {
|
||||
|
||||
fn dump_if_not_builtin(cur: &clang::Cursor) -> CXChildVisitResult {
|
||||
if !cur.is_builtin() {
|
||||
clang::ast_dump(&cur, 0)
|
||||
|
@ -2072,9 +2119,10 @@ pub fn clang_version() -> ClangVersion {
|
|||
}
|
||||
|
||||
let raw_v: String = clang::extract_clang_version();
|
||||
let split_v: Option<Vec<&str>> = raw_v.split_whitespace().nth(2).map(|v| {
|
||||
v.split('.').collect()
|
||||
});
|
||||
let split_v: Option<Vec<&str>> = raw_v
|
||||
.split_whitespace()
|
||||
.nth(2)
|
||||
.map(|v| v.split('.').collect());
|
||||
match split_v {
|
||||
Some(v) => {
|
||||
if v.len() >= 2 {
|
||||
|
@ -2111,13 +2159,14 @@ fn commandline_flag_unit_test_function() {
|
|||
"--no-derive-default",
|
||||
"--generate",
|
||||
"functions,types,vars,methods,constructors,destructors",
|
||||
].iter()
|
||||
.map(|&x| x.into())
|
||||
.collect::<Vec<String>>();
|
||||
]
|
||||
.iter()
|
||||
.map(|&x| x.into())
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
assert!(test_cases.iter().all(
|
||||
|ref x| command_line_flags.contains(x),
|
||||
));
|
||||
assert!(test_cases
|
||||
.iter()
|
||||
.all(|ref x| command_line_flags.contains(x),));
|
||||
|
||||
//Test 2
|
||||
let bindings = ::builder()
|
||||
|
@ -2136,13 +2185,13 @@ fn commandline_flag_unit_test_function() {
|
|||
"Distinct_Type",
|
||||
"--whitelist-function",
|
||||
"safe_function",
|
||||
].iter()
|
||||
.map(|&x| x.into())
|
||||
.collect::<Vec<String>>();
|
||||
]
|
||||
.iter()
|
||||
.map(|&x| x.into())
|
||||
.collect::<Vec<String>>();
|
||||
println!("{:?}", command_line_flags);
|
||||
|
||||
assert!(test_cases.iter().all(
|
||||
|ref x| command_line_flags.contains(x),
|
||||
));
|
||||
|
||||
assert!(test_cases
|
||||
.iter()
|
||||
.all(|ref x| command_line_flags.contains(x),));
|
||||
}
|
||||
|
|
|
@ -45,7 +45,6 @@ pub fn main() {
|
|||
|
||||
match builder_from_flags(bind_args.into_iter()) {
|
||||
Ok((builder, output, verbose)) => {
|
||||
|
||||
let builder_result = panic::catch_unwind(|| {
|
||||
builder.generate().expect("Unable to generate bindings")
|
||||
});
|
||||
|
@ -71,12 +70,12 @@ fn print_verbose_err() {
|
|||
println!("Bindgen unexpectedly panicked");
|
||||
println!(
|
||||
"This may be caused by one of the known-unsupported \
|
||||
things (https://rust-lang.github.io/rust-bindgen/cpp.html), \
|
||||
please modify the bindgen flags to work around it as \
|
||||
described in https://rust-lang.github.io/rust-bindgen/cpp.html"
|
||||
things (https://rust-lang.github.io/rust-bindgen/cpp.html), \
|
||||
please modify the bindgen flags to work around it as \
|
||||
described in https://rust-lang.github.io/rust-bindgen/cpp.html"
|
||||
);
|
||||
println!(
|
||||
"Otherwise, please file an issue at \
|
||||
https://github.com/rust-lang/rust-bindgen/issues/new"
|
||||
https://github.com/rust-lang/rust-bindgen/issues/new"
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,14 +1,12 @@
|
|||
use bindgen::{Builder, CodegenConfig, RUST_TARGET_STRINGS, RustTarget, builder, EnumVariation};
|
||||
use bindgen::{builder, Builder, CodegenConfig, EnumVariation, RustTarget, RUST_TARGET_STRINGS};
|
||||
use clap::{App, Arg};
|
||||
use std::fs::File;
|
||||
use std::io::{self, Error, ErrorKind, Write, stderr};
|
||||
use std::io::{self, stderr, Error, ErrorKind, Write};
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
/// Construct a new [`Builder`](./struct.Builder.html) from command line flags.
|
||||
pub fn builder_from_flags<I>(
|
||||
args: I,
|
||||
) -> Result<(Builder, Box<io::Write>, bool), io::Error>
|
||||
pub fn builder_from_flags<I>(args: I) -> Result<(Builder, Box<dyn io::Write>, bool), io::Error>
|
||||
where
|
||||
I: Iterator<Item = String>,
|
||||
{
|
||||
|
@ -31,12 +29,20 @@ where
|
|||
.help("The default style of code used to generate enums.")
|
||||
.value_name("variant")
|
||||
.default_value("consts")
|
||||
.possible_values(&["consts", "moduleconsts", "bitfield", "rust", "rust_non_exhaustive"])
|
||||
.possible_values(&[
|
||||
"consts",
|
||||
"moduleconsts",
|
||||
"bitfield",
|
||||
"rust",
|
||||
"rust_non_exhaustive",
|
||||
])
|
||||
.multiple(false),
|
||||
Arg::with_name("bitfield-enum")
|
||||
.long("bitfield-enum")
|
||||
.help("Mark any enum whose name matches <regex> as a set of \
|
||||
bitfield flags.")
|
||||
.help(
|
||||
"Mark any enum whose name matches <regex> as a set of \
|
||||
bitfield flags.",
|
||||
)
|
||||
.value_name("regex")
|
||||
.takes_value(true)
|
||||
.multiple(true)
|
||||
|
@ -50,16 +56,20 @@ where
|
|||
.number_of_values(1),
|
||||
Arg::with_name("constified-enum")
|
||||
.long("constified-enum")
|
||||
.help("Mark any enum whose name matches <regex> as a series of \
|
||||
constants.")
|
||||
.help(
|
||||
"Mark any enum whose name matches <regex> as a series of \
|
||||
constants.",
|
||||
)
|
||||
.value_name("regex")
|
||||
.takes_value(true)
|
||||
.multiple(true)
|
||||
.number_of_values(1),
|
||||
Arg::with_name("constified-enum-module")
|
||||
.long("constified-enum-module")
|
||||
.help("Mark any enum whose name matches <regex> as a module of \
|
||||
constants.")
|
||||
.help(
|
||||
"Mark any enum whose name matches <regex> as a module of \
|
||||
constants.",
|
||||
)
|
||||
.value_name("regex")
|
||||
.takes_value(true)
|
||||
.multiple(true)
|
||||
|
@ -98,14 +108,16 @@ where
|
|||
.long("no-derive-default")
|
||||
.hidden(true)
|
||||
.help("Avoid deriving Default on any type."),
|
||||
Arg::with_name("impl-debug")
|
||||
.long("impl-debug")
|
||||
.help("Create Debug implementation, if it can not be derived \
|
||||
automatically."),
|
||||
Arg::with_name("impl-debug").long("impl-debug").help(
|
||||
"Create Debug implementation, if it can not be derived \
|
||||
automatically.",
|
||||
),
|
||||
Arg::with_name("impl-partialeq")
|
||||
.long("impl-partialeq")
|
||||
.help("Create PartialEq implementation, if it can not be derived \
|
||||
automatically."),
|
||||
.help(
|
||||
"Create PartialEq implementation, if it can not be derived \
|
||||
automatically.",
|
||||
),
|
||||
Arg::with_name("with-derive-default")
|
||||
.long("with-derive-default")
|
||||
.help("Derive Default on any type."),
|
||||
|
@ -120,22 +132,30 @@ where
|
|||
.help("Derive partialord on any type."),
|
||||
Arg::with_name("with-derive-eq")
|
||||
.long("with-derive-eq")
|
||||
.help("Derive eq on any type. Enable this option also \
|
||||
enables --with-derive-partialeq"),
|
||||
.help(
|
||||
"Derive eq on any type. Enable this option also \
|
||||
enables --with-derive-partialeq",
|
||||
),
|
||||
Arg::with_name("with-derive-ord")
|
||||
.long("with-derive-ord")
|
||||
.help("Derive ord on any type. Enable this option also \
|
||||
enables --with-derive-partialord"),
|
||||
.help(
|
||||
"Derive ord on any type. Enable this option also \
|
||||
enables --with-derive-partialord",
|
||||
),
|
||||
Arg::with_name("no-doc-comments")
|
||||
.long("no-doc-comments")
|
||||
.help("Avoid including doc comments in the output, see: \
|
||||
https://github.com/rust-lang/rust-bindgen/issues/426"),
|
||||
.help(
|
||||
"Avoid including doc comments in the output, see: \
|
||||
https://github.com/rust-lang/rust-bindgen/issues/426",
|
||||
),
|
||||
Arg::with_name("no-recursive-whitelist")
|
||||
.long("no-recursive-whitelist")
|
||||
.help("Disable whitelisting types recursively. This will cause \
|
||||
bindgen to emit Rust code that won't compile! See the \
|
||||
`bindgen::Builder::whitelist_recursively` method's \
|
||||
documentation for details."),
|
||||
.help(
|
||||
"Disable whitelisting types recursively. This will cause \
|
||||
bindgen to emit Rust code that won't compile! See the \
|
||||
`bindgen::Builder::whitelist_recursively` method's \
|
||||
documentation for details.",
|
||||
),
|
||||
Arg::with_name("objc-extern-crate")
|
||||
.long("objc-extern-crate")
|
||||
.help("Use extern crate instead of use for objc."),
|
||||
|
@ -148,23 +168,23 @@ where
|
|||
Arg::with_name("distrust-clang-mangling")
|
||||
.long("distrust-clang-mangling")
|
||||
.help("Do not trust the libclang-provided mangling"),
|
||||
Arg::with_name("builtins")
|
||||
.long("builtins")
|
||||
.help("Output bindings for builtin definitions, e.g. \
|
||||
__builtin_va_list."),
|
||||
Arg::with_name("builtins").long("builtins").help(
|
||||
"Output bindings for builtin definitions, e.g. \
|
||||
__builtin_va_list.",
|
||||
),
|
||||
Arg::with_name("ctypes-prefix")
|
||||
.long("ctypes-prefix")
|
||||
.help("Use the given prefix before raw types instead of \
|
||||
::std::os::raw.")
|
||||
.help(
|
||||
"Use the given prefix before raw types instead of \
|
||||
::std::os::raw.",
|
||||
)
|
||||
.value_name("prefix")
|
||||
.takes_value(true),
|
||||
Arg::with_name("time-phases")
|
||||
.long("time-phases")
|
||||
.help("Time the different bindgen phases and print to stderr"),
|
||||
// All positional arguments after the end of options marker, `--`
|
||||
Arg::with_name("clang-args")
|
||||
.last(true)
|
||||
.multiple(true),
|
||||
Arg::with_name("clang-args").last(true).multiple(true),
|
||||
Arg::with_name("emit-clang-ast")
|
||||
.long("emit-clang-ast")
|
||||
.help("Output the Clang AST for debugging purposes."),
|
||||
|
@ -181,18 +201,24 @@ where
|
|||
.help("Enable support for C++ namespaces."),
|
||||
Arg::with_name("disable-name-namespacing")
|
||||
.long("disable-name-namespacing")
|
||||
.help("Disable namespacing via mangling, causing bindgen to \
|
||||
generate names like \"Baz\" instead of \"foo_bar_Baz\" \
|
||||
for an input name \"foo::bar::Baz\"."),
|
||||
.help(
|
||||
"Disable namespacing via mangling, causing bindgen to \
|
||||
generate names like \"Baz\" instead of \"foo_bar_Baz\" \
|
||||
for an input name \"foo::bar::Baz\".",
|
||||
),
|
||||
Arg::with_name("ignore-functions")
|
||||
.long("ignore-functions")
|
||||
.help("Do not generate bindings for functions or methods. This \
|
||||
is useful when you only care about struct layouts."),
|
||||
.help(
|
||||
"Do not generate bindings for functions or methods. This \
|
||||
is useful when you only care about struct layouts.",
|
||||
),
|
||||
Arg::with_name("generate")
|
||||
.long("generate")
|
||||
.help("Generate only given items, split by commas. \
|
||||
Valid values are \"functions\",\"types\", \"vars\", \
|
||||
\"methods\", \"constructors\" and \"destructors\".")
|
||||
.help(
|
||||
"Generate only given items, split by commas. \
|
||||
Valid values are \"functions\",\"types\", \"vars\", \
|
||||
\"methods\", \"constructors\" and \"destructors\".",
|
||||
)
|
||||
.takes_value(true),
|
||||
Arg::with_name("ignore-methods")
|
||||
.long("ignore-methods")
|
||||
|
@ -237,16 +263,20 @@ where
|
|||
.help("Use types from Rust core instead of std."),
|
||||
Arg::with_name("conservative-inline-namespaces")
|
||||
.long("conservative-inline-namespaces")
|
||||
.help("Conservatively generate inline namespaces to avoid name \
|
||||
conflicts."),
|
||||
.help(
|
||||
"Conservatively generate inline namespaces to avoid name \
|
||||
conflicts.",
|
||||
),
|
||||
Arg::with_name("use-msvc-mangling")
|
||||
.long("use-msvc-mangling")
|
||||
.help("MSVC C++ ABI mangling. DEPRECATED: Has no effect."),
|
||||
Arg::with_name("whitelist-function")
|
||||
.long("whitelist-function")
|
||||
.help("Whitelist all the free-standing functions matching \
|
||||
<regex>. Other non-whitelisted functions will not be \
|
||||
generated.")
|
||||
.help(
|
||||
"Whitelist all the free-standing functions matching \
|
||||
<regex>. Other non-whitelisted functions will not be \
|
||||
generated.",
|
||||
)
|
||||
.value_name("regex")
|
||||
.takes_value(true)
|
||||
.multiple(true)
|
||||
|
@ -256,17 +286,21 @@ where
|
|||
.help("Generate inline functions."),
|
||||
Arg::with_name("whitelist-type")
|
||||
.long("whitelist-type")
|
||||
.help("Only generate types matching <regex>. Other non-whitelisted types will \
|
||||
not be generated.")
|
||||
.help(
|
||||
"Only generate types matching <regex>. Other non-whitelisted types will \
|
||||
not be generated.",
|
||||
)
|
||||
.value_name("regex")
|
||||
.takes_value(true)
|
||||
.multiple(true)
|
||||
.number_of_values(1),
|
||||
Arg::with_name("whitelist-var")
|
||||
.long("whitelist-var")
|
||||
.help("Whitelist all the free-standing variables matching \
|
||||
<regex>. Other non-whitelisted variables will not be \
|
||||
generated.")
|
||||
.help(
|
||||
"Whitelist all the free-standing variables matching \
|
||||
<regex>. Other non-whitelisted variables will not be \
|
||||
generated.",
|
||||
)
|
||||
.value_name("regex")
|
||||
.takes_value(true)
|
||||
.multiple(true)
|
||||
|
@ -276,27 +310,35 @@ where
|
|||
.help("Print verbose error messages."),
|
||||
Arg::with_name("dump-preprocessed-input")
|
||||
.long("dump-preprocessed-input")
|
||||
.help("Preprocess and dump the input header files to disk. \
|
||||
Useful when debugging bindgen, using C-Reduce, or when \
|
||||
filing issues. The resulting file will be named \
|
||||
something like `__bindgen.i` or `__bindgen.ii`."),
|
||||
.help(
|
||||
"Preprocess and dump the input header files to disk. \
|
||||
Useful when debugging bindgen, using C-Reduce, or when \
|
||||
filing issues. The resulting file will be named \
|
||||
something like `__bindgen.i` or `__bindgen.ii`.",
|
||||
),
|
||||
Arg::with_name("no-record-matches")
|
||||
.long("no-record-matches")
|
||||
.help("Do not record matching items in the regex sets. \
|
||||
This disables reporting of unused items."),
|
||||
.help(
|
||||
"Do not record matching items in the regex sets. \
|
||||
This disables reporting of unused items.",
|
||||
),
|
||||
Arg::with_name("no-rustfmt-bindings")
|
||||
.long("no-rustfmt-bindings")
|
||||
.help("Do not format the generated bindings with rustfmt."),
|
||||
Arg::with_name("rustfmt-bindings")
|
||||
.long("rustfmt-bindings")
|
||||
.help("Format the generated bindings with rustfmt. DEPRECATED: \
|
||||
--rustfmt-bindings is now enabled by default. Disable \
|
||||
with --no-rustfmt-bindings."),
|
||||
.help(
|
||||
"Format the generated bindings with rustfmt. DEPRECATED: \
|
||||
--rustfmt-bindings is now enabled by default. Disable \
|
||||
with --no-rustfmt-bindings.",
|
||||
),
|
||||
Arg::with_name("rustfmt-configuration-file")
|
||||
.long("rustfmt-configuration-file")
|
||||
.help("The absolute path to the rustfmt configuration file. \
|
||||
The configuration file will be used for formatting the bindings. \
|
||||
This parameter is incompatible with --no-rustfmt-bindings.")
|
||||
.help(
|
||||
"The absolute path to the rustfmt configuration file. \
|
||||
The configuration file will be used for formatting the bindings. \
|
||||
This parameter is incompatible with --no-rustfmt-bindings.",
|
||||
)
|
||||
.value_name("path")
|
||||
.takes_value(true)
|
||||
.multiple(false)
|
||||
|
@ -324,8 +366,10 @@ where
|
|||
.number_of_values(1),
|
||||
Arg::with_name("enable-function-attribute-detection")
|
||||
.long("enable-function-attribute-detection")
|
||||
.help("Enables detecting unexposed attributes in functions (slow).
|
||||
Used to generate #[must_use] annotations."),
|
||||
.help(
|
||||
"Enables detecting unexposed attributes in functions (slow).
|
||||
Used to generate #[must_use] annotations.",
|
||||
),
|
||||
Arg::with_name("use-array-pointers-in-arguments")
|
||||
.long("use-array-pointers-in-arguments")
|
||||
.help("Use `*const [T; size]` instead of `*const T` for C arrays"),
|
||||
|
@ -345,7 +389,8 @@ where
|
|||
writeln!(
|
||||
&mut stderr(),
|
||||
"warning: the `--unstable-rust` option is deprecated"
|
||||
).expect("Unable to write error message");
|
||||
)
|
||||
.expect("Unable to write error message");
|
||||
}
|
||||
|
||||
if let Some(rust_target) = matches.value_of("rust-target") {
|
||||
|
@ -600,9 +645,9 @@ where
|
|||
|
||||
let output = if let Some(path) = matches.value_of("output") {
|
||||
let file = File::create(path)?;
|
||||
Box::new(io::BufWriter::new(file)) as Box<io::Write>
|
||||
Box::new(io::BufWriter::new(file)) as Box<dyn io::Write>
|
||||
} else {
|
||||
Box::new(io::BufWriter::new(io::stdout())) as Box<io::Write>
|
||||
Box::new(io::BufWriter::new(io::stdout())) as Box<dyn io::Write>
|
||||
};
|
||||
|
||||
if matches.is_present("dump-preprocessed-input") {
|
||||
|
@ -624,7 +669,7 @@ where
|
|||
if no_rustfmt_bindings {
|
||||
return Err(Error::new(
|
||||
ErrorKind::Other,
|
||||
"Cannot supply both --rustfmt-configuration-file and --no-rustfmt-bindings"
|
||||
"Cannot supply both --rustfmt-configuration-file and --no-rustfmt-bindings",
|
||||
));
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
use std::io::{self, Write};
|
||||
use std::time::{Instant, Duration};
|
||||
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
/// RAII timer to measure how long phases take.
|
||||
#[derive(Debug)]
|
||||
|
@ -10,7 +9,6 @@ pub struct Timer<'a> {
|
|||
start: Instant,
|
||||
}
|
||||
|
||||
|
||||
impl<'a> Timer<'a> {
|
||||
/// Creates a Timer with the given name, and starts it. By default,
|
||||
/// will print to stderr when it is `drop`'d
|
||||
|
@ -18,7 +16,7 @@ impl<'a> Timer<'a> {
|
|||
Timer {
|
||||
output: true,
|
||||
name,
|
||||
start: Instant::now()
|
||||
start: Instant::now(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -37,19 +35,16 @@ impl<'a> Timer<'a> {
|
|||
fn print_elapsed(&mut self) {
|
||||
if self.output {
|
||||
let elapsed = self.elapsed();
|
||||
let time = (elapsed.as_secs() as f64) * 1e3
|
||||
+ (elapsed.subsec_nanos() as f64) / 1e6;
|
||||
let time = (elapsed.as_secs() as f64) * 1e3 +
|
||||
(elapsed.subsec_nanos() as f64) / 1e6;
|
||||
let stderr = io::stderr();
|
||||
// Arbitrary output format, subject to change.
|
||||
writeln!(stderr.lock(),
|
||||
" time: {:>9.3} ms.\t{}",
|
||||
time, self.name)
|
||||
.expect("timer write should not fail");
|
||||
writeln!(stderr.lock(), " time: {:>9.3} ms.\t{}", time, self.name)
|
||||
.expect("timer write should not fail");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl<'a> Drop for Timer<'a> {
|
||||
fn drop(&mut self) {
|
||||
self.print_elapsed();
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
{"files":{"Cargo.toml":"b523856472549844b4bf20eca0473d955a7e5eeb95c70eddd31a05ac455427bb","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"89857eaaa305afe540abcf56fabae0194dfb4e7906a8098b7206acb23ed11ce8","build.rs":"36fa668f3bf309f243d0e977e8428446cc424303139c1f63410b3c2e30445aec","src/fallback.rs":"e4d1bcb1e92383a2285e6c947dd74b0e34144904948db68127faea627f5dd6ff","src/lib.rs":"896a1d212e30902ff051313808007406ca4471c27880a6ef19508f0ebb8333ee","src/strnom.rs":"60f5380106dbe568cca7abd09877e133c874fbee95d502e4830425c4613a640d","src/wrapper.rs":"0d7fe28ab2b7ee02b8eb8c5a636da364c60f6704b23e7db0a1ddd57c742f54b1","tests/marker.rs":"0227d07bbc7f2e2ad34662a6acb65668b7dc2f79141c4faa672703a04e27bea0","tests/test.rs":"166d35835355bdaa85bcf69de4dfb56ccddd8acf2e1a8cbc506782632b151674"},"package":"4d317f9caece796be1980837fd5cb3dfec5613ebdb04ad0956deea83ce168915"}
|
|
@ -0,0 +1,39 @@
|
|||
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||
#
|
||||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g. crates.io) dependencies
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
# editing this file be aware that the upstream Cargo.toml
|
||||
# will likely look very different (and much more reasonable)
|
||||
|
||||
[package]
|
||||
name = "proc-macro2"
|
||||
version = "0.4.27"
|
||||
authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
||||
build = "build.rs"
|
||||
description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
|
||||
homepage = "https://github.com/alexcrichton/proc-macro2"
|
||||
documentation = "https://docs.rs/proc-macro2"
|
||||
readme = "README.md"
|
||||
keywords = ["macros"]
|
||||
license = "MIT/Apache-2.0"
|
||||
repository = "https://github.com/alexcrichton/proc-macro2"
|
||||
[package.metadata.docs.rs]
|
||||
rustc-args = ["--cfg", "procmacro2_semver_exempt"]
|
||||
rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
|
||||
[dependencies.unicode-xid]
|
||||
version = "0.1"
|
||||
[dev-dependencies.quote]
|
||||
version = "0.6"
|
||||
|
||||
[features]
|
||||
default = ["proc-macro"]
|
||||
nightly = []
|
||||
proc-macro = []
|
||||
span-locations = []
|
||||
[badges.travis-ci]
|
||||
repository = "alexcrichton/proc-macro2"
|
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -0,0 +1,25 @@
|
|||
Copyright (c) 2014 Alex Crichton
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
|
@ -0,0 +1,100 @@
|
|||
# proc-macro2
|
||||
|
||||
[![Build Status](https://api.travis-ci.com/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.com/alexcrichton/proc-macro2)
|
||||
[![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2)
|
||||
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
|
||||
|
||||
A wrapper around the procedural macro API of the compiler's `proc_macro` crate.
|
||||
This library serves three purposes:
|
||||
|
||||
- **Bring proc-macro-like functionality to other contexts like build.rs and
|
||||
main.rs.** Types from `proc_macro` are entirely specific to procedural macros
|
||||
and cannot ever exist in code outside of a procedural macro. Meanwhile
|
||||
`proc_macro2` types may exist anywhere including non-macro code. By developing
|
||||
foundational libraries like [syn] and [quote] against `proc_macro2` rather
|
||||
than `proc_macro`, the procedural macro ecosystem becomes easily applicable to
|
||||
many other use cases and we avoid reimplementing non-macro equivalents of
|
||||
those libraries.
|
||||
|
||||
- **Make procedural macros unit testable.** As a consequence of being specific
|
||||
to procedural macros, nothing that uses `proc_macro` can be executed from a
|
||||
unit test. In order for helper libraries or components of a macro to be
|
||||
testable in isolation, they must be implemented using `proc_macro2`.
|
||||
|
||||
- **Provide the latest and greatest APIs across all compiler versions.**
|
||||
Procedural macros were first introduced to Rust in 1.15.0 with an extremely
|
||||
minimal interface. Since then, many improvements have landed to make macros
|
||||
more flexible and easier to write. This library tracks the procedural macro
|
||||
API of the most recent stable compiler but employs a polyfill to provide that
|
||||
API consistently across any compiler since 1.15.0.
|
||||
|
||||
[syn]: https://github.com/dtolnay/syn
|
||||
[quote]: https://github.com/dtolnay/quote
|
||||
|
||||
## Usage
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
proc-macro2 = "0.4"
|
||||
```
|
||||
|
||||
The skeleton of a typical procedural macro typically looks like this:
|
||||
|
||||
```rust
|
||||
extern crate proc_macro;
|
||||
|
||||
#[proc_macro_derive(MyDerive)]
|
||||
pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let input = proc_macro2::TokenStream::from(input);
|
||||
|
||||
let output: proc_macro2::TokenStream = {
|
||||
/* transform input */
|
||||
};
|
||||
|
||||
proc_macro::TokenStream::from(output)
|
||||
}
|
||||
```
|
||||
|
||||
If parsing with [Syn], you'll use [`parse_macro_input!`] instead to propagate
|
||||
parse errors correctly back to the compiler when parsing fails.
|
||||
|
||||
[`parse_macro_input!`]: https://docs.rs/syn/0.15/syn/macro.parse_macro_input.html
|
||||
|
||||
## Unstable features
|
||||
|
||||
The default feature set of proc-macro2 tracks the most recent stable compiler
|
||||
API. Functionality in `proc_macro` that is not yet stable is not exposed by
|
||||
proc-macro2 by default.
|
||||
|
||||
To opt into the additional APIs available in the most recent nightly compiler,
|
||||
the `procmacro2_semver_exempt` config flag must be passed to rustc. As usual, we
|
||||
will polyfill those nightly-only APIs all the way back to Rust 1.15.0. As these
|
||||
are unstable APIs that track the nightly compiler, minor versions of proc-macro2
|
||||
may make breaking changes to them at any time.
|
||||
|
||||
```
|
||||
RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
|
||||
```
|
||||
|
||||
Note that this must not only be done for your crate, but for any crate that
|
||||
depends on your crate. This infectious nature is intentional, as it serves as a
|
||||
reminder that you are outside of the normal semver guarantees.
|
||||
|
||||
Semver exempt methods are marked as such in the proc-macro2 documentation.
|
||||
|
||||
# License
|
||||
|
||||
This project is licensed under either of
|
||||
|
||||
* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
http://www.apache.org/licenses/LICENSE-2.0)
|
||||
* MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
http://opensource.org/licenses/MIT)
|
||||
|
||||
at your option.
|
||||
|
||||
### Contribution
|
||||
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in Serde by you, as defined in the Apache-2.0 license, shall be
|
||||
dual licensed as above, without any additional terms or conditions.
|
|
@ -0,0 +1,133 @@
|
|||
// rustc-cfg emitted by the build script:
|
||||
//
|
||||
// "u128"
|
||||
// Include u128 and i128 constructors for proc_macro2::Literal. Enabled on
|
||||
// any compiler 1.26+.
|
||||
//
|
||||
// "use_proc_macro"
|
||||
// Link to extern crate proc_macro. Available on any compiler and any target
|
||||
// except wasm32. Requires "proc-macro" Cargo cfg to be enabled (default is
|
||||
// enabled). On wasm32 we never link to proc_macro even if "proc-macro" cfg
|
||||
// is enabled.
|
||||
//
|
||||
// "wrap_proc_macro"
|
||||
// Wrap types from libproc_macro rather than polyfilling the whole API.
|
||||
// Enabled on rustc 1.29+ as long as procmacro2_semver_exempt is not set,
|
||||
// because we can't emulate the unstable API without emulating everything
|
||||
// else. Also enabled unconditionally on nightly, in which case the
|
||||
// procmacro2_semver_exempt surface area is implemented by using the
|
||||
// nightly-only proc_macro API.
|
||||
//
|
||||
// "slow_extend"
|
||||
// Fallback when `impl Extend for TokenStream` is not available. These impls
|
||||
// were added one version later than the rest of the proc_macro token API.
|
||||
// Enabled on rustc 1.29 only.
|
||||
//
|
||||
// "nightly"
|
||||
// Enable the Span::unwrap method. This is to support proc_macro_span and
|
||||
// proc_macro_diagnostic use on the nightly channel without requiring the
|
||||
// semver exemption opt-in. Enabled when building with nightly.
|
||||
//
|
||||
// "super_unstable"
|
||||
// Implement the semver exempt API in terms of the nightly-only proc_macro
|
||||
// API. Enabled when using procmacro2_semver_exempt on a nightly compiler.
|
||||
//
|
||||
// "span_locations"
|
||||
// Provide methods Span::start and Span::end which give the line/column
|
||||
// location of a token. Enabled by procmacro2_semver_exempt or the
|
||||
// "span-locations" Cargo cfg. This is behind a cfg because tracking
|
||||
// location inside spans is a performance hit.
|
||||
|
||||
use std::env;
|
||||
use std::process::Command;
|
||||
use std::str;
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
|
||||
let target = env::var("TARGET").unwrap();
|
||||
|
||||
let version = match rustc_version() {
|
||||
Some(version) => version,
|
||||
None => return,
|
||||
};
|
||||
|
||||
if version.minor >= 26 {
|
||||
println!("cargo:rustc-cfg=u128");
|
||||
}
|
||||
|
||||
let semver_exempt = cfg!(procmacro2_semver_exempt);
|
||||
if semver_exempt {
|
||||
// https://github.com/alexcrichton/proc-macro2/issues/147
|
||||
println!("cargo:rustc-cfg=procmacro2_semver_exempt");
|
||||
}
|
||||
|
||||
if semver_exempt || cfg!(feature = "span-locations") {
|
||||
println!("cargo:rustc-cfg=span_locations");
|
||||
}
|
||||
|
||||
if !enable_use_proc_macro(&target) {
|
||||
return;
|
||||
}
|
||||
|
||||
println!("cargo:rustc-cfg=use_proc_macro");
|
||||
|
||||
// Rust 1.29 stabilized the necessary APIs in the `proc_macro` crate
|
||||
if version.nightly || version.minor >= 29 && !semver_exempt {
|
||||
println!("cargo:rustc-cfg=wrap_proc_macro");
|
||||
}
|
||||
|
||||
if version.minor == 29 {
|
||||
println!("cargo:rustc-cfg=slow_extend");
|
||||
}
|
||||
|
||||
if version.nightly {
|
||||
println!("cargo:rustc-cfg=nightly");
|
||||
}
|
||||
|
||||
if semver_exempt && version.nightly {
|
||||
println!("cargo:rustc-cfg=super_unstable");
|
||||
}
|
||||
}
|
||||
|
||||
fn enable_use_proc_macro(target: &str) -> bool {
|
||||
// wasm targets don't have the `proc_macro` crate, disable this feature.
|
||||
if target.contains("wasm32") {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Otherwise, only enable it if our feature is actually enabled.
|
||||
cfg!(feature = "proc-macro")
|
||||
}
|
||||
|
||||
struct RustcVersion {
|
||||
minor: u32,
|
||||
nightly: bool,
|
||||
}
|
||||
|
||||
fn rustc_version() -> Option<RustcVersion> {
|
||||
macro_rules! otry {
|
||||
($e:expr) => {
|
||||
match $e {
|
||||
Some(e) => e,
|
||||
None => return None,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
let rustc = otry!(env::var_os("RUSTC"));
|
||||
let output = otry!(Command::new(rustc).arg("--version").output().ok());
|
||||
let version = otry!(str::from_utf8(&output.stdout).ok());
|
||||
let nightly = version.contains("nightly");
|
||||
let mut pieces = version.split('.');
|
||||
if pieces.next() != Some("rustc 1") {
|
||||
return None;
|
||||
}
|
||||
let minor = otry!(pieces.next());
|
||||
let minor = otry!(minor.parse().ok());
|
||||
|
||||
Some(RustcVersion {
|
||||
minor: minor,
|
||||
nightly: nightly,
|
||||
})
|
||||
}
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,393 @@
|
|||
//! Adapted from [`nom`](https://github.com/Geal/nom).
|
||||
|
||||
use std::str::{Bytes, CharIndices, Chars};
|
||||
|
||||
use unicode_xid::UnicodeXID;
|
||||
|
||||
use fallback::LexError;
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
pub struct Cursor<'a> {
|
||||
pub rest: &'a str,
|
||||
#[cfg(span_locations)]
|
||||
pub off: u32,
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a> {
|
||||
#[cfg(not(span_locations))]
|
||||
pub fn advance(&self, amt: usize) -> Cursor<'a> {
|
||||
Cursor {
|
||||
rest: &self.rest[amt..],
|
||||
}
|
||||
}
|
||||
#[cfg(span_locations)]
|
||||
pub fn advance(&self, amt: usize) -> Cursor<'a> {
|
||||
Cursor {
|
||||
rest: &self.rest[amt..],
|
||||
off: self.off + (amt as u32),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find(&self, p: char) -> Option<usize> {
|
||||
self.rest.find(p)
|
||||
}
|
||||
|
||||
pub fn starts_with(&self, s: &str) -> bool {
|
||||
self.rest.starts_with(s)
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.rest.is_empty()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.rest.len()
|
||||
}
|
||||
|
||||
pub fn as_bytes(&self) -> &'a [u8] {
|
||||
self.rest.as_bytes()
|
||||
}
|
||||
|
||||
pub fn bytes(&self) -> Bytes<'a> {
|
||||
self.rest.bytes()
|
||||
}
|
||||
|
||||
pub fn chars(&self) -> Chars<'a> {
|
||||
self.rest.chars()
|
||||
}
|
||||
|
||||
pub fn char_indices(&self) -> CharIndices<'a> {
|
||||
self.rest.char_indices()
|
||||
}
|
||||
}
|
||||
|
||||
pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
|
||||
|
||||
pub fn whitespace(input: Cursor) -> PResult<()> {
|
||||
if input.is_empty() {
|
||||
return Err(LexError);
|
||||
}
|
||||
|
||||
let bytes = input.as_bytes();
|
||||
let mut i = 0;
|
||||
while i < bytes.len() {
|
||||
let s = input.advance(i);
|
||||
if bytes[i] == b'/' {
|
||||
if s.starts_with("//")
|
||||
&& (!s.starts_with("///") || s.starts_with("////"))
|
||||
&& !s.starts_with("//!")
|
||||
{
|
||||
if let Some(len) = s.find('\n') {
|
||||
i += len + 1;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
} else if s.starts_with("/**/") {
|
||||
i += 4;
|
||||
continue;
|
||||
} else if s.starts_with("/*")
|
||||
&& (!s.starts_with("/**") || s.starts_with("/***"))
|
||||
&& !s.starts_with("/*!")
|
||||
{
|
||||
let (_, com) = block_comment(s)?;
|
||||
i += com.len();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
match bytes[i] {
|
||||
b' ' | 0x09...0x0d => {
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
b if b <= 0x7f => {}
|
||||
_ => {
|
||||
let ch = s.chars().next().unwrap();
|
||||
if is_whitespace(ch) {
|
||||
i += ch.len_utf8();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
return if i > 0 { Ok((s, ())) } else { Err(LexError) };
|
||||
}
|
||||
Ok((input.advance(input.len()), ()))
|
||||
}
|
||||
|
||||
pub fn block_comment(input: Cursor) -> PResult<&str> {
|
||||
if !input.starts_with("/*") {
|
||||
return Err(LexError);
|
||||
}
|
||||
|
||||
let mut depth = 0;
|
||||
let bytes = input.as_bytes();
|
||||
let mut i = 0;
|
||||
let upper = bytes.len() - 1;
|
||||
while i < upper {
|
||||
if bytes[i] == b'/' && bytes[i + 1] == b'*' {
|
||||
depth += 1;
|
||||
i += 1; // eat '*'
|
||||
} else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
|
||||
depth -= 1;
|
||||
if depth == 0 {
|
||||
return Ok((input.advance(i + 2), &input.rest[..i + 2]));
|
||||
}
|
||||
i += 1; // eat '/'
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
Err(LexError)
|
||||
}
|
||||
|
||||
pub fn skip_whitespace(input: Cursor) -> Cursor {
|
||||
match whitespace(input) {
|
||||
Ok((rest, _)) => rest,
|
||||
Err(LexError) => input,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_whitespace(ch: char) -> bool {
|
||||
// Rust treats left-to-right mark and right-to-left mark as whitespace
|
||||
ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
|
||||
}
|
||||
|
||||
pub fn word_break(input: Cursor) -> PResult<()> {
|
||||
match input.chars().next() {
|
||||
Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
|
||||
Some(_) | None => Ok((input, ())),
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! named {
|
||||
($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
|
||||
fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
|
||||
$submac!(i, $($args)*)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! alt {
|
||||
($i:expr, $e:ident | $($rest:tt)*) => {
|
||||
alt!($i, call!($e) | $($rest)*)
|
||||
};
|
||||
|
||||
($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => {
|
||||
match $subrule!($i, $($args)*) {
|
||||
res @ Ok(_) => res,
|
||||
_ => alt!($i, $($rest)*)
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => {
|
||||
match $subrule!($i, $($args)*) {
|
||||
Ok((i, o)) => Ok((i, $gen(o))),
|
||||
Err(LexError) => alt!($i, $($rest)*)
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => {
|
||||
alt!($i, call!($e) => { $gen } | $($rest)*)
|
||||
};
|
||||
|
||||
($i:expr, $e:ident => { $gen:expr }) => {
|
||||
alt!($i, call!($e) => { $gen })
|
||||
};
|
||||
|
||||
($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => {
|
||||
match $subrule!($i, $($args)*) {
|
||||
Ok((i, o)) => Ok((i, $gen(o))),
|
||||
Err(LexError) => Err(LexError),
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, $e:ident) => {
|
||||
alt!($i, call!($e))
|
||||
};
|
||||
|
||||
($i:expr, $subrule:ident!( $($args:tt)*)) => {
|
||||
$subrule!($i, $($args)*)
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! do_parse {
|
||||
($i:expr, ( $($rest:expr),* )) => {
|
||||
Ok(($i, ( $($rest),* )))
|
||||
};
|
||||
|
||||
($i:expr, $e:ident >> $($rest:tt)*) => {
|
||||
do_parse!($i, call!($e) >> $($rest)*)
|
||||
};
|
||||
|
||||
($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
|
||||
match $submac!($i, $($args)*) {
|
||||
Err(LexError) => Err(LexError),
|
||||
Ok((i, _)) => do_parse!(i, $($rest)*),
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => {
|
||||
do_parse!($i, $field: call!($e) >> $($rest)*)
|
||||
};
|
||||
|
||||
($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
|
||||
match $submac!($i, $($args)*) {
|
||||
Err(LexError) => Err(LexError),
|
||||
Ok((i, o)) => {
|
||||
let $field = o;
|
||||
do_parse!(i, $($rest)*)
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! peek {
|
||||
($i:expr, $submac:ident!( $($args:tt)* )) => {
|
||||
match $submac!($i, $($args)*) {
|
||||
Ok((_, o)) => Ok(($i, o)),
|
||||
Err(LexError) => Err(LexError),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! call {
|
||||
($i:expr, $fun:expr $(, $args:expr)*) => {
|
||||
$fun($i $(, $args)*)
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! option {
|
||||
($i:expr, $f:expr) => {
|
||||
match $f($i) {
|
||||
Ok((i, o)) => Ok((i, Some(o))),
|
||||
Err(LexError) => Ok(($i, None)),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! take_until_newline_or_eof {
|
||||
($i:expr,) => {{
|
||||
if $i.len() == 0 {
|
||||
Ok(($i, ""))
|
||||
} else {
|
||||
match $i.find('\n') {
|
||||
Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
|
||||
None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
|
||||
}
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! tuple {
|
||||
($i:expr, $($rest:tt)*) => {
|
||||
tuple_parser!($i, (), $($rest)*)
|
||||
};
|
||||
}
|
||||
|
||||
/// Do not use directly. Use `tuple!`.
|
||||
macro_rules! tuple_parser {
|
||||
($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => {
|
||||
tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*)
|
||||
};
|
||||
|
||||
($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
|
||||
match $submac!($i, $($args)*) {
|
||||
Err(LexError) => Err(LexError),
|
||||
Ok((i, o)) => tuple_parser!(i, (o), $($rest)*),
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
|
||||
match $submac!($i, $($args)*) {
|
||||
Err(LexError) => Err(LexError),
|
||||
Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*),
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, ($($parsed:tt),*), $e:ident) => {
|
||||
tuple_parser!($i, ($($parsed),*), call!($e))
|
||||
};
|
||||
|
||||
($i:expr, (), $submac:ident!( $($args:tt)* )) => {
|
||||
$submac!($i, $($args)*)
|
||||
};
|
||||
|
||||
($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => {
|
||||
match $submac!($i, $($args)*) {
|
||||
Err(LexError) => Err(LexError),
|
||||
Ok((i, o)) => Ok((i, ($($parsed),*, o)))
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, ($($parsed:expr),*)) => {
|
||||
Ok(($i, ($($parsed),*)))
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! not {
|
||||
($i:expr, $submac:ident!( $($args:tt)* )) => {
|
||||
match $submac!($i, $($args)*) {
|
||||
Ok((_, _)) => Err(LexError),
|
||||
Err(LexError) => Ok(($i, ())),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! tag {
|
||||
($i:expr, $tag:expr) => {
|
||||
if $i.starts_with($tag) {
|
||||
Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
|
||||
} else {
|
||||
Err(LexError)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! punct {
|
||||
($i:expr, $punct:expr) => {
|
||||
$crate::strnom::punct($i, $punct)
|
||||
};
|
||||
}
|
||||
|
||||
/// Do not use directly. Use `punct!`.
|
||||
pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
|
||||
let input = skip_whitespace(input);
|
||||
if input.starts_with(token) {
|
||||
Ok((input.advance(token.len()), token))
|
||||
} else {
|
||||
Err(LexError)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! preceded {
|
||||
($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => {
|
||||
match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) {
|
||||
Ok((remaining, (_, o))) => Ok((remaining, o)),
|
||||
Err(LexError) => Err(LexError),
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
|
||||
preceded!($i, $submac!($($args)*), call!($g))
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! delimited {
|
||||
($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => {
|
||||
match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) {
|
||||
Err(LexError) => Err(LexError),
|
||||
Ok((i1, (_, o, _))) => Ok((i1, o))
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! map {
|
||||
($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
|
||||
match $submac!($i, $($args)*) {
|
||||
Err(LexError) => Err(LexError),
|
||||
Ok((i, o)) => Ok((i, call!(o, $g)))
|
||||
}
|
||||
};
|
||||
|
||||
($i:expr, $f:expr, $g:expr) => {
|
||||
map!($i, call!($f), $g)
|
||||
};
|
||||
}
|
|
@ -0,0 +1,926 @@
|
|||
use std::fmt;
|
||||
use std::iter;
|
||||
use std::panic::{self, PanicInfo};
|
||||
#[cfg(super_unstable)]
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use fallback;
|
||||
use proc_macro;
|
||||
|
||||
use {Delimiter, Punct, Spacing, TokenTree};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum TokenStream {
|
||||
Compiler(proc_macro::TokenStream),
|
||||
Fallback(fallback::TokenStream),
|
||||
}
|
||||
|
||||
pub enum LexError {
|
||||
Compiler(proc_macro::LexError),
|
||||
Fallback(fallback::LexError),
|
||||
}
|
||||
|
||||
fn nightly_works() -> bool {
|
||||
use std::sync::atomic::*;
|
||||
use std::sync::Once;
|
||||
|
||||
static WORKS: AtomicUsize = ATOMIC_USIZE_INIT;
|
||||
static INIT: Once = Once::new();
|
||||
|
||||
match WORKS.load(Ordering::SeqCst) {
|
||||
1 => return false,
|
||||
2 => return true,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
|
||||
// then use catch_unwind to determine whether the compiler's proc_macro is
|
||||
// working. When proc-macro2 is used from outside of a procedural macro all
|
||||
// of the proc_macro crate's APIs currently panic.
|
||||
//
|
||||
// The Once is to prevent the possibility of this ordering:
|
||||
//
|
||||
// thread 1 calls take_hook, gets the user's original hook
|
||||
// thread 1 calls set_hook with the null hook
|
||||
// thread 2 calls take_hook, thinks null hook is the original hook
|
||||
// thread 2 calls set_hook with the null hook
|
||||
// thread 1 calls set_hook with the actual original hook
|
||||
// thread 2 calls set_hook with what it thinks is the original hook
|
||||
//
|
||||
// in which the user's hook has been lost.
|
||||
//
|
||||
// There is still a race condition where a panic in a different thread can
|
||||
// happen during the interval that the user's original panic hook is
|
||||
// unregistered such that their hook is incorrectly not called. This is
|
||||
// sufficiently unlikely and less bad than printing panic messages to stderr
|
||||
// on correct use of this crate. Maybe there is a libstd feature request
|
||||
// here. For now, if a user needs to guarantee that this failure mode does
|
||||
// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
|
||||
// the main thread before launching any other threads.
|
||||
INIT.call_once(|| {
|
||||
type PanicHook = Fn(&PanicInfo) + Sync + Send + 'static;
|
||||
|
||||
let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
|
||||
let sanity_check = &*null_hook as *const PanicHook;
|
||||
let original_hook = panic::take_hook();
|
||||
panic::set_hook(null_hook);
|
||||
|
||||
let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
|
||||
WORKS.store(works as usize + 1, Ordering::SeqCst);
|
||||
|
||||
let hopefully_null_hook = panic::take_hook();
|
||||
panic::set_hook(original_hook);
|
||||
if sanity_check != &*hopefully_null_hook {
|
||||
panic!("observed race condition in proc_macro2::nightly_works");
|
||||
}
|
||||
});
|
||||
nightly_works()
|
||||
}
|
||||
|
||||
fn mismatch() -> ! {
|
||||
panic!("stable/nightly mismatch")
|
||||
}
|
||||
|
||||
impl TokenStream {
|
||||
pub fn new() -> TokenStream {
|
||||
if nightly_works() {
|
||||
TokenStream::Compiler(proc_macro::TokenStream::new())
|
||||
} else {
|
||||
TokenStream::Fallback(fallback::TokenStream::new())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => tts.is_empty(),
|
||||
TokenStream::Fallback(tts) => tts.is_empty(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::TokenStream {
|
||||
match self {
|
||||
TokenStream::Compiler(s) => s,
|
||||
TokenStream::Fallback(_) => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_stable(self) -> fallback::TokenStream {
|
||||
match self {
|
||||
TokenStream::Compiler(_) => mismatch(),
|
||||
TokenStream::Fallback(s) => s,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for TokenStream {
|
||||
type Err = LexError;
|
||||
|
||||
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
||||
if nightly_works() {
|
||||
Ok(TokenStream::Compiler(src.parse()?))
|
||||
} else {
|
||||
Ok(TokenStream::Fallback(src.parse()?))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for TokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => tts.fmt(f),
|
||||
TokenStream::Fallback(tts) => tts.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proc_macro::TokenStream> for TokenStream {
|
||||
fn from(inner: proc_macro::TokenStream) -> TokenStream {
|
||||
TokenStream::Compiler(inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenStream> for proc_macro::TokenStream {
|
||||
fn from(inner: TokenStream) -> proc_macro::TokenStream {
|
||||
match inner {
|
||||
TokenStream::Compiler(inner) => inner,
|
||||
TokenStream::Fallback(inner) => inner.to_string().parse().unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::TokenStream> for TokenStream {
|
||||
fn from(inner: fallback::TokenStream) -> TokenStream {
|
||||
TokenStream::Fallback(inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TokenTree> for TokenStream {
|
||||
fn from(token: TokenTree) -> TokenStream {
|
||||
if !nightly_works() {
|
||||
return TokenStream::Fallback(token.into());
|
||||
}
|
||||
let tt: proc_macro::TokenTree = match token {
|
||||
TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
|
||||
TokenTree::Punct(tt) => {
|
||||
let spacing = match tt.spacing() {
|
||||
Spacing::Joint => proc_macro::Spacing::Joint,
|
||||
Spacing::Alone => proc_macro::Spacing::Alone,
|
||||
};
|
||||
let mut op = proc_macro::Punct::new(tt.as_char(), spacing);
|
||||
op.set_span(tt.span().inner.unwrap_nightly());
|
||||
op.into()
|
||||
}
|
||||
TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
|
||||
TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
|
||||
};
|
||||
TokenStream::Compiler(tt.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl iter::FromIterator<TokenTree> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
|
||||
if nightly_works() {
|
||||
let trees = trees
|
||||
.into_iter()
|
||||
.map(TokenStream::from)
|
||||
.flat_map(|t| match t {
|
||||
TokenStream::Compiler(s) => s,
|
||||
TokenStream::Fallback(_) => mismatch(),
|
||||
});
|
||||
TokenStream::Compiler(trees.collect())
|
||||
} else {
|
||||
TokenStream::Fallback(trees.into_iter().collect())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl iter::FromIterator<TokenStream> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
||||
let mut streams = streams.into_iter();
|
||||
match streams.next() {
|
||||
#[cfg(slow_extend)]
|
||||
Some(TokenStream::Compiler(first)) => {
|
||||
let stream = iter::once(first)
|
||||
.chain(streams.map(|s| match s {
|
||||
TokenStream::Compiler(s) => s,
|
||||
TokenStream::Fallback(_) => mismatch(),
|
||||
}))
|
||||
.collect();
|
||||
TokenStream::Compiler(stream)
|
||||
}
|
||||
#[cfg(not(slow_extend))]
|
||||
Some(TokenStream::Compiler(mut first)) => {
|
||||
first.extend(streams.map(|s| match s {
|
||||
TokenStream::Compiler(s) => s,
|
||||
TokenStream::Fallback(_) => mismatch(),
|
||||
}));
|
||||
TokenStream::Compiler(first)
|
||||
}
|
||||
Some(TokenStream::Fallback(mut first)) => {
|
||||
first.extend(streams.map(|s| match s {
|
||||
TokenStream::Fallback(s) => s,
|
||||
TokenStream::Compiler(_) => mismatch(),
|
||||
}));
|
||||
TokenStream::Fallback(first)
|
||||
}
|
||||
None => TokenStream::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TokenTree> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => {
|
||||
#[cfg(not(slow_extend))]
|
||||
{
|
||||
tts.extend(
|
||||
streams
|
||||
.into_iter()
|
||||
.map(|t| TokenStream::from(t).unwrap_nightly()),
|
||||
);
|
||||
}
|
||||
#[cfg(slow_extend)]
|
||||
{
|
||||
*tts =
|
||||
tts.clone()
|
||||
.into_iter()
|
||||
.chain(streams.into_iter().map(TokenStream::from).flat_map(
|
||||
|t| match t {
|
||||
TokenStream::Compiler(tts) => tts.into_iter(),
|
||||
_ => mismatch(),
|
||||
},
|
||||
))
|
||||
.collect();
|
||||
}
|
||||
}
|
||||
TokenStream::Fallback(tts) => tts.extend(streams),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TokenStream> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => {
|
||||
#[cfg(not(slow_extend))]
|
||||
{
|
||||
tts.extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
|
||||
}
|
||||
#[cfg(slow_extend)]
|
||||
{
|
||||
*tts = tts
|
||||
.clone()
|
||||
.into_iter()
|
||||
.chain(streams.into_iter().flat_map(|t| match t {
|
||||
TokenStream::Compiler(tts) => tts.into_iter(),
|
||||
_ => mismatch(),
|
||||
}))
|
||||
.collect();
|
||||
}
|
||||
}
|
||||
TokenStream::Fallback(tts) => {
|
||||
tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for TokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => tts.fmt(f),
|
||||
TokenStream::Fallback(tts) => tts.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proc_macro::LexError> for LexError {
|
||||
fn from(e: proc_macro::LexError) -> LexError {
|
||||
LexError::Compiler(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::LexError> for LexError {
|
||||
fn from(e: fallback::LexError) -> LexError {
|
||||
LexError::Fallback(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for LexError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
LexError::Compiler(e) => e.fmt(f),
|
||||
LexError::Fallback(e) => e.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum TokenTreeIter {
|
||||
Compiler(proc_macro::token_stream::IntoIter),
|
||||
Fallback(fallback::TokenTreeIter),
|
||||
}
|
||||
|
||||
impl IntoIterator for TokenStream {
|
||||
type Item = TokenTree;
|
||||
type IntoIter = TokenTreeIter;
|
||||
|
||||
fn into_iter(self) -> TokenTreeIter {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => TokenTreeIter::Compiler(tts.into_iter()),
|
||||
TokenStream::Fallback(tts) => TokenTreeIter::Fallback(tts.into_iter()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for TokenTreeIter {
|
||||
type Item = TokenTree;
|
||||
|
||||
fn next(&mut self) -> Option<TokenTree> {
|
||||
let token = match self {
|
||||
TokenTreeIter::Compiler(iter) => iter.next()?,
|
||||
TokenTreeIter::Fallback(iter) => return iter.next(),
|
||||
};
|
||||
Some(match token {
|
||||
proc_macro::TokenTree::Group(tt) => ::Group::_new(Group::Compiler(tt)).into(),
|
||||
proc_macro::TokenTree::Punct(tt) => {
|
||||
let spacing = match tt.spacing() {
|
||||
proc_macro::Spacing::Joint => Spacing::Joint,
|
||||
proc_macro::Spacing::Alone => Spacing::Alone,
|
||||
};
|
||||
let mut o = Punct::new(tt.as_char(), spacing);
|
||||
o.set_span(::Span::_new(Span::Compiler(tt.span())));
|
||||
o.into()
|
||||
}
|
||||
proc_macro::TokenTree::Ident(s) => ::Ident::_new(Ident::Compiler(s)).into(),
|
||||
proc_macro::TokenTree::Literal(l) => ::Literal::_new(Literal::Compiler(l)).into(),
|
||||
})
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
match self {
|
||||
TokenTreeIter::Compiler(tts) => tts.size_hint(),
|
||||
TokenTreeIter::Fallback(tts) => tts.size_hint(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for TokenTreeIter {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("TokenTreeIter").finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
#[cfg(super_unstable)]
|
||||
pub enum SourceFile {
|
||||
Compiler(proc_macro::SourceFile),
|
||||
Fallback(fallback::SourceFile),
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
impl SourceFile {
|
||||
fn nightly(sf: proc_macro::SourceFile) -> Self {
|
||||
SourceFile::Compiler(sf)
|
||||
}
|
||||
|
||||
/// Get the path to this source file as a string.
|
||||
pub fn path(&self) -> PathBuf {
|
||||
match self {
|
||||
SourceFile::Compiler(a) => a.path(),
|
||||
SourceFile::Fallback(a) => a.path(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_real(&self) -> bool {
|
||||
match self {
|
||||
SourceFile::Compiler(a) => a.is_real(),
|
||||
SourceFile::Fallback(a) => a.is_real(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
impl fmt::Debug for SourceFile {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
SourceFile::Compiler(a) => a.fmt(f),
|
||||
SourceFile::Fallback(a) => a.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(super_unstable, feature = "span-locations"))]
|
||||
pub struct LineColumn {
|
||||
pub line: usize,
|
||||
pub column: usize,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum Span {
|
||||
Compiler(proc_macro::Span),
|
||||
Fallback(fallback::Span),
|
||||
}
|
||||
|
||||
impl Span {
|
||||
pub fn call_site() -> Span {
|
||||
if nightly_works() {
|
||||
Span::Compiler(proc_macro::Span::call_site())
|
||||
} else {
|
||||
Span::Fallback(fallback::Span::call_site())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn def_site() -> Span {
|
||||
if nightly_works() {
|
||||
Span::Compiler(proc_macro::Span::def_site())
|
||||
} else {
|
||||
Span::Fallback(fallback::Span::def_site())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn resolved_at(&self, other: Span) -> Span {
|
||||
match (self, other) {
|
||||
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn located_at(&self, other: Span) -> Span {
|
||||
match (self, other) {
|
||||
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unwrap(self) -> proc_macro::Span {
|
||||
match self {
|
||||
Span::Compiler(s) => s,
|
||||
Span::Fallback(_) => panic!("proc_macro::Span is only available in procedural macros"),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn source_file(&self) -> SourceFile {
|
||||
match self {
|
||||
Span::Compiler(s) => SourceFile::nightly(s.source_file()),
|
||||
Span::Fallback(s) => SourceFile::Fallback(s.source_file()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(super_unstable, feature = "span-locations"))]
|
||||
pub fn start(&self) -> LineColumn {
|
||||
match self {
|
||||
#[cfg(nightly)]
|
||||
Span::Compiler(s) => {
|
||||
let proc_macro::LineColumn { line, column } = s.start();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
#[cfg(not(nightly))]
|
||||
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
|
||||
Span::Fallback(s) => {
|
||||
let fallback::LineColumn { line, column } = s.start();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(super_unstable, feature = "span-locations"))]
|
||||
pub fn end(&self) -> LineColumn {
|
||||
match self {
|
||||
#[cfg(nightly)]
|
||||
Span::Compiler(s) => {
|
||||
let proc_macro::LineColumn { line, column } = s.end();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
#[cfg(not(nightly))]
|
||||
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
|
||||
Span::Fallback(s) => {
|
||||
let fallback::LineColumn { line, column } = s.end();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn join(&self, other: Span) -> Option<Span> {
|
||||
let ret = match (self, other) {
|
||||
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.join(b)?),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.join(b)?),
|
||||
_ => return None,
|
||||
};
|
||||
Some(ret)
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn eq(&self, other: &Span) -> bool {
|
||||
match (self, other) {
|
||||
(Span::Compiler(a), Span::Compiler(b)) => a.eq(b),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => a.eq(b),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Span {
|
||||
match self {
|
||||
Span::Compiler(s) => s,
|
||||
Span::Fallback(_) => mismatch(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proc_macro::Span> for ::Span {
|
||||
fn from(proc_span: proc_macro::Span) -> ::Span {
|
||||
::Span::_new(Span::Compiler(proc_span))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::Span> for Span {
|
||||
fn from(inner: fallback::Span) -> Span {
|
||||
Span::Fallback(inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Span {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Span::Compiler(s) => s.fmt(f),
|
||||
Span::Fallback(s) => s.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
|
||||
match span {
|
||||
Span::Compiler(s) => {
|
||||
debug.field("span", &s);
|
||||
}
|
||||
Span::Fallback(s) => fallback::debug_span_field_if_nontrivial(debug, s),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum Group {
|
||||
Compiler(proc_macro::Group),
|
||||
Fallback(fallback::Group),
|
||||
}
|
||||
|
||||
impl Group {
|
||||
pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
|
||||
match stream {
|
||||
TokenStream::Compiler(stream) => {
|
||||
let delimiter = match delimiter {
|
||||
Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
|
||||
Delimiter::Bracket => proc_macro::Delimiter::Bracket,
|
||||
Delimiter::Brace => proc_macro::Delimiter::Brace,
|
||||
Delimiter::None => proc_macro::Delimiter::None,
|
||||
};
|
||||
Group::Compiler(proc_macro::Group::new(delimiter, stream))
|
||||
}
|
||||
TokenStream::Fallback(stream) => {
|
||||
Group::Fallback(fallback::Group::new(delimiter, stream))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delimiter(&self) -> Delimiter {
|
||||
match self {
|
||||
Group::Compiler(g) => match g.delimiter() {
|
||||
proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
|
||||
proc_macro::Delimiter::Bracket => Delimiter::Bracket,
|
||||
proc_macro::Delimiter::Brace => Delimiter::Brace,
|
||||
proc_macro::Delimiter::None => Delimiter::None,
|
||||
},
|
||||
Group::Fallback(g) => g.delimiter(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stream(&self) -> TokenStream {
|
||||
match self {
|
||||
Group::Compiler(g) => TokenStream::Compiler(g.stream()),
|
||||
Group::Fallback(g) => TokenStream::Fallback(g.stream()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Group::Compiler(g) => Span::Compiler(g.span()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn span_open(&self) -> Span {
|
||||
match self {
|
||||
Group::Compiler(g) => Span::Compiler(g.span_open()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span_open()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn span_close(&self) -> Span {
|
||||
match self {
|
||||
Group::Compiler(g) => Span::Compiler(g.span_close()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span_close()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
match (self, span) {
|
||||
(Group::Compiler(g), Span::Compiler(s)) => g.set_span(s),
|
||||
(Group::Fallback(g), Span::Fallback(s)) => g.set_span(s),
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Group {
|
||||
match self {
|
||||
Group::Compiler(g) => g,
|
||||
Group::Fallback(_) => mismatch(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::Group> for Group {
|
||||
fn from(g: fallback::Group) -> Self {
|
||||
Group::Fallback(g)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Group {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Group::Compiler(group) => group.fmt(formatter),
|
||||
Group::Fallback(group) => group.fmt(formatter),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Group {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Group::Compiler(group) => group.fmt(formatter),
|
||||
Group::Fallback(group) => group.fmt(formatter),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum Ident {
|
||||
Compiler(proc_macro::Ident),
|
||||
Fallback(fallback::Ident),
|
||||
}
|
||||
|
||||
impl Ident {
|
||||
pub fn new(string: &str, span: Span) -> Ident {
|
||||
match span {
|
||||
Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new(string, s)),
|
||||
Span::Fallback(s) => Ident::Fallback(fallback::Ident::new(string, s)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_raw(string: &str, span: Span) -> Ident {
|
||||
match span {
|
||||
Span::Compiler(s) => {
|
||||
let p: proc_macro::TokenStream = string.parse().unwrap();
|
||||
let ident = match p.into_iter().next() {
|
||||
Some(proc_macro::TokenTree::Ident(mut i)) => {
|
||||
i.set_span(s);
|
||||
i
|
||||
}
|
||||
_ => panic!(),
|
||||
};
|
||||
Ident::Compiler(ident)
|
||||
}
|
||||
Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_raw(string, s)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Ident::Compiler(t) => Span::Compiler(t.span()),
|
||||
Ident::Fallback(t) => Span::Fallback(t.span()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
match (self, span) {
|
||||
(Ident::Compiler(t), Span::Compiler(s)) => t.set_span(s),
|
||||
(Ident::Fallback(t), Span::Fallback(s)) => t.set_span(s),
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Ident {
|
||||
match self {
|
||||
Ident::Compiler(s) => s,
|
||||
Ident::Fallback(_) => mismatch(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Ident {
|
||||
fn eq(&self, other: &Ident) -> bool {
|
||||
match (self, other) {
|
||||
(Ident::Compiler(t), Ident::Compiler(o)) => t.to_string() == o.to_string(),
|
||||
(Ident::Fallback(t), Ident::Fallback(o)) => t == o,
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PartialEq<T> for Ident
|
||||
where
|
||||
T: ?Sized + AsRef<str>,
|
||||
{
|
||||
fn eq(&self, other: &T) -> bool {
|
||||
let other = other.as_ref();
|
||||
match self {
|
||||
Ident::Compiler(t) => t.to_string() == other,
|
||||
Ident::Fallback(t) => t == other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Ident {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Ident::Compiler(t) => t.fmt(f),
|
||||
Ident::Fallback(t) => t.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Ident {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Ident::Compiler(t) => t.fmt(f),
|
||||
Ident::Fallback(t) => t.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum Literal {
|
||||
Compiler(proc_macro::Literal),
|
||||
Fallback(fallback::Literal),
|
||||
}
|
||||
|
||||
macro_rules! suffixed_numbers {
|
||||
($($name:ident => $kind:ident,)*) => ($(
|
||||
pub fn $name(n: $kind) -> Literal {
|
||||
if nightly_works() {
|
||||
Literal::Compiler(proc_macro::Literal::$name(n))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::$name(n))
|
||||
}
|
||||
}
|
||||
)*)
|
||||
}
|
||||
|
||||
macro_rules! unsuffixed_integers {
|
||||
($($name:ident => $kind:ident,)*) => ($(
|
||||
pub fn $name(n: $kind) -> Literal {
|
||||
if nightly_works() {
|
||||
Literal::Compiler(proc_macro::Literal::$name(n))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::$name(n))
|
||||
}
|
||||
}
|
||||
)*)
|
||||
}
|
||||
|
||||
impl Literal {
|
||||
suffixed_numbers! {
|
||||
u8_suffixed => u8,
|
||||
u16_suffixed => u16,
|
||||
u32_suffixed => u32,
|
||||
u64_suffixed => u64,
|
||||
usize_suffixed => usize,
|
||||
i8_suffixed => i8,
|
||||
i16_suffixed => i16,
|
||||
i32_suffixed => i32,
|
||||
i64_suffixed => i64,
|
||||
isize_suffixed => isize,
|
||||
|
||||
f32_suffixed => f32,
|
||||
f64_suffixed => f64,
|
||||
}
|
||||
|
||||
#[cfg(u128)]
|
||||
suffixed_numbers! {
|
||||
i128_suffixed => i128,
|
||||
u128_suffixed => u128,
|
||||
}
|
||||
|
||||
unsuffixed_integers! {
|
||||
u8_unsuffixed => u8,
|
||||
u16_unsuffixed => u16,
|
||||
u32_unsuffixed => u32,
|
||||
u64_unsuffixed => u64,
|
||||
usize_unsuffixed => usize,
|
||||
i8_unsuffixed => i8,
|
||||
i16_unsuffixed => i16,
|
||||
i32_unsuffixed => i32,
|
||||
i64_unsuffixed => i64,
|
||||
isize_unsuffixed => isize,
|
||||
}
|
||||
|
||||
#[cfg(u128)]
|
||||
unsuffixed_integers! {
|
||||
i128_unsuffixed => i128,
|
||||
u128_unsuffixed => u128,
|
||||
}
|
||||
|
||||
pub fn f32_unsuffixed(f: f32) -> Literal {
|
||||
if nightly_works() {
|
||||
Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn f64_unsuffixed(f: f64) -> Literal {
|
||||
if nightly_works() {
|
||||
Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn string(t: &str) -> Literal {
|
||||
if nightly_works() {
|
||||
Literal::Compiler(proc_macro::Literal::string(t))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::string(t))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn character(t: char) -> Literal {
|
||||
if nightly_works() {
|
||||
Literal::Compiler(proc_macro::Literal::character(t))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::character(t))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn byte_string(bytes: &[u8]) -> Literal {
|
||||
if nightly_works() {
|
||||
Literal::Compiler(proc_macro::Literal::byte_string(bytes))
|
||||
} else {
|
||||
Literal::Fallback(fallback::Literal::byte_string(bytes))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
Literal::Compiler(lit) => Span::Compiler(lit.span()),
|
||||
Literal::Fallback(lit) => Span::Fallback(lit.span()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
match (self, span) {
|
||||
(Literal::Compiler(lit), Span::Compiler(s)) => lit.set_span(s),
|
||||
(Literal::Fallback(lit), Span::Fallback(s)) => lit.set_span(s),
|
||||
_ => mismatch(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Literal {
|
||||
match self {
|
||||
Literal::Compiler(s) => s,
|
||||
Literal::Fallback(_) => mismatch(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<fallback::Literal> for Literal {
|
||||
fn from(s: fallback::Literal) -> Literal {
|
||||
Literal::Fallback(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Literal {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Literal::Compiler(t) => t.fmt(f),
|
||||
Literal::Fallback(t) => t.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Literal {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Literal::Compiler(t) => t.fmt(f),
|
||||
Literal::Fallback(t) => t.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
extern crate proc_macro2;
|
||||
|
||||
use proc_macro2::*;
|
||||
|
||||
macro_rules! assert_impl {
|
||||
($ty:ident is $($marker:ident) and +) => {
|
||||
#[test]
|
||||
#[allow(non_snake_case)]
|
||||
fn $ty() {
|
||||
fn assert_implemented<T: $($marker +)+>() {}
|
||||
assert_implemented::<$ty>();
|
||||
}
|
||||
};
|
||||
|
||||
($ty:ident is not $($marker:ident) or +) => {
|
||||
#[test]
|
||||
#[allow(non_snake_case)]
|
||||
fn $ty() {
|
||||
$(
|
||||
{
|
||||
// Implemented for types that implement $marker.
|
||||
trait IsNotImplemented {
|
||||
fn assert_not_implemented() {}
|
||||
}
|
||||
impl<T: $marker> IsNotImplemented for T {}
|
||||
|
||||
// Implemented for the type being tested.
|
||||
trait IsImplemented {
|
||||
fn assert_not_implemented() {}
|
||||
}
|
||||
impl IsImplemented for $ty {}
|
||||
|
||||
// If $ty does not implement $marker, there is no ambiguity
|
||||
// in the following trait method call.
|
||||
<$ty>::assert_not_implemented();
|
||||
}
|
||||
)+
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
assert_impl!(Delimiter is Send and Sync);
|
||||
assert_impl!(Spacing is Send and Sync);
|
||||
|
||||
assert_impl!(Group is not Send or Sync);
|
||||
assert_impl!(Ident is not Send or Sync);
|
||||
assert_impl!(LexError is not Send or Sync);
|
||||
assert_impl!(Literal is not Send or Sync);
|
||||
assert_impl!(Punct is not Send or Sync);
|
||||
assert_impl!(Span is not Send or Sync);
|
||||
assert_impl!(TokenStream is not Send or Sync);
|
||||
assert_impl!(TokenTree is not Send or Sync);
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
mod semver_exempt {
|
||||
use super::*;
|
||||
|
||||
assert_impl!(LineColumn is Send and Sync);
|
||||
|
||||
assert_impl!(SourceFile is not Send or Sync);
|
||||
}
|
|
@ -0,0 +1,389 @@
|
|||
extern crate proc_macro2;
|
||||
|
||||
use std::str::{self, FromStr};
|
||||
|
||||
use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
|
||||
|
||||
#[test]
|
||||
fn terms() {
|
||||
assert_eq!(
|
||||
Ident::new("String", Span::call_site()).to_string(),
|
||||
"String"
|
||||
);
|
||||
assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
|
||||
assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
fn raw_terms() {
|
||||
assert_eq!(
|
||||
Ident::new_raw("String", Span::call_site()).to_string(),
|
||||
"r#String"
|
||||
);
|
||||
assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn");
|
||||
assert_eq!(Ident::new_raw("_", Span::call_site()).to_string(), "r#_");
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
|
||||
fn term_empty() {
|
||||
Ident::new("", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Ident cannot be a number; use Literal instead")]
|
||||
fn term_number() {
|
||||
Ident::new("255", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "\"a#\" is not a valid Ident")]
|
||||
fn term_invalid() {
|
||||
Ident::new("a#", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "not a valid Ident")]
|
||||
fn raw_term_empty() {
|
||||
Ident::new("r#", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "not a valid Ident")]
|
||||
fn raw_term_number() {
|
||||
Ident::new("r#255", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "\"r#a#\" is not a valid Ident")]
|
||||
fn raw_term_invalid() {
|
||||
Ident::new("r#a#", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "not a valid Ident")]
|
||||
fn lifetime_empty() {
|
||||
Ident::new("'", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "not a valid Ident")]
|
||||
fn lifetime_number() {
|
||||
Ident::new("'255", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = r#""\'a#" is not a valid Ident"#)]
|
||||
fn lifetime_invalid() {
|
||||
Ident::new("'a#", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literals() {
|
||||
assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
|
||||
assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
|
||||
assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn roundtrip() {
|
||||
fn roundtrip(p: &str) {
|
||||
println!("parse: {}", p);
|
||||
let s = p.parse::<TokenStream>().unwrap().to_string();
|
||||
println!("first: {}", s);
|
||||
let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
|
||||
assert_eq!(s, s2);
|
||||
}
|
||||
roundtrip("a");
|
||||
roundtrip("<<");
|
||||
roundtrip("<<=");
|
||||
roundtrip(
|
||||
"
|
||||
1
|
||||
1.0
|
||||
1f32
|
||||
2f64
|
||||
1usize
|
||||
4isize
|
||||
4e10
|
||||
1_000
|
||||
1_0i32
|
||||
8u8
|
||||
9
|
||||
0
|
||||
0xffffffffffffffffffffffffffffffff
|
||||
",
|
||||
);
|
||||
roundtrip("'a");
|
||||
roundtrip("'_");
|
||||
roundtrip("'static");
|
||||
roundtrip("'\\u{10__FFFF}'");
|
||||
roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fail() {
|
||||
fn fail(p: &str) {
|
||||
if let Ok(s) = p.parse::<TokenStream>() {
|
||||
panic!("should have failed to parse: {}\n{:#?}", p, s);
|
||||
}
|
||||
}
|
||||
fail("1x");
|
||||
fail("1u80");
|
||||
fail("1f320");
|
||||
fail("' static");
|
||||
fail("r#1");
|
||||
fail("r#_");
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
#[test]
|
||||
fn span_test() {
|
||||
use proc_macro2::TokenTree;
|
||||
|
||||
fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
|
||||
let ts = p.parse::<TokenStream>().unwrap();
|
||||
check_spans_internal(ts, &mut lines);
|
||||
}
|
||||
|
||||
fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
|
||||
for i in ts {
|
||||
if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
|
||||
*lines = rest;
|
||||
|
||||
let start = i.span().start();
|
||||
assert_eq!(start.line, sline, "sline did not match for {}", i);
|
||||
assert_eq!(start.column, scol, "scol did not match for {}", i);
|
||||
|
||||
let end = i.span().end();
|
||||
assert_eq!(end.line, eline, "eline did not match for {}", i);
|
||||
assert_eq!(end.column, ecol, "ecol did not match for {}", i);
|
||||
|
||||
match i {
|
||||
TokenTree::Group(ref g) => {
|
||||
check_spans_internal(g.stream().clone(), lines);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
check_spans(
|
||||
"\
|
||||
/// This is a document comment
|
||||
testing 123
|
||||
{
|
||||
testing 234
|
||||
}",
|
||||
&[
|
||||
(1, 0, 1, 30), // #
|
||||
(1, 0, 1, 30), // [ ... ]
|
||||
(1, 0, 1, 30), // doc
|
||||
(1, 0, 1, 30), // =
|
||||
(1, 0, 1, 30), // "This is..."
|
||||
(2, 0, 2, 7), // testing
|
||||
(2, 8, 2, 11), // 123
|
||||
(3, 0, 5, 1), // { ... }
|
||||
(4, 2, 4, 9), // testing
|
||||
(4, 10, 4, 13), // 234
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
#[cfg(not(nightly))]
|
||||
#[test]
|
||||
fn default_span() {
|
||||
let start = Span::call_site().start();
|
||||
assert_eq!(start.line, 1);
|
||||
assert_eq!(start.column, 0);
|
||||
let end = Span::call_site().end();
|
||||
assert_eq!(end.line, 1);
|
||||
assert_eq!(end.column, 0);
|
||||
let source_file = Span::call_site().source_file();
|
||||
assert_eq!(source_file.path().to_string_lossy(), "<unspecified>");
|
||||
assert!(!source_file.is_real());
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
#[test]
|
||||
fn span_join() {
|
||||
let source1 = "aaa\nbbb"
|
||||
.parse::<TokenStream>()
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>();
|
||||
let source2 = "ccc\nddd"
|
||||
.parse::<TokenStream>()
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert!(source1[0].span().source_file() != source2[0].span().source_file());
|
||||
assert_eq!(
|
||||
source1[0].span().source_file(),
|
||||
source1[1].span().source_file()
|
||||
);
|
||||
|
||||
let joined1 = source1[0].span().join(source1[1].span());
|
||||
let joined2 = source1[0].span().join(source2[0].span());
|
||||
assert!(joined1.is_some());
|
||||
assert!(joined2.is_none());
|
||||
|
||||
let start = joined1.unwrap().start();
|
||||
let end = joined1.unwrap().end();
|
||||
assert_eq!(start.line, 1);
|
||||
assert_eq!(start.column, 0);
|
||||
assert_eq!(end.line, 2);
|
||||
assert_eq!(end.column, 3);
|
||||
|
||||
assert_eq!(
|
||||
joined1.unwrap().source_file(),
|
||||
source1[0].span().source_file()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_panic() {
|
||||
let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
|
||||
assert!(s.parse::<proc_macro2::TokenStream>().is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tricky_doc_comment() {
|
||||
let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
|
||||
let tokens = stream.into_iter().collect::<Vec<_>>();
|
||||
assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
|
||||
|
||||
let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
|
||||
let tokens = stream.into_iter().collect::<Vec<_>>();
|
||||
assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
|
||||
match tokens[0] {
|
||||
proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
|
||||
_ => panic!("wrong token {:?}", tokens[0]),
|
||||
}
|
||||
let mut tokens = match tokens[1] {
|
||||
proc_macro2::TokenTree::Group(ref tt) => {
|
||||
assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
|
||||
tt.stream().into_iter()
|
||||
}
|
||||
_ => panic!("wrong token {:?}", tokens[0]),
|
||||
};
|
||||
|
||||
match tokens.next().unwrap() {
|
||||
proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
|
||||
t => panic!("wrong token {:?}", t),
|
||||
}
|
||||
match tokens.next().unwrap() {
|
||||
proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
|
||||
t => panic!("wrong token {:?}", t),
|
||||
}
|
||||
match tokens.next().unwrap() {
|
||||
proc_macro2::TokenTree::Literal(ref tt) => {
|
||||
assert_eq!(tt.to_string(), "\" doc\"");
|
||||
}
|
||||
t => panic!("wrong token {:?}", t),
|
||||
}
|
||||
assert!(tokens.next().is_none());
|
||||
|
||||
let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
|
||||
let tokens = stream.into_iter().collect::<Vec<_>>();
|
||||
assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn op_before_comment() {
|
||||
let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
|
||||
match tts.next().unwrap() {
|
||||
TokenTree::Punct(tt) => {
|
||||
assert_eq!(tt.as_char(), '~');
|
||||
assert_eq!(tt.spacing(), Spacing::Alone);
|
||||
}
|
||||
wrong => panic!("wrong token {:?}", wrong),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn raw_identifier() {
|
||||
let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
|
||||
match tts.next().unwrap() {
|
||||
TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()),
|
||||
wrong => panic!("wrong token {:?}", wrong),
|
||||
}
|
||||
assert!(tts.next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_debug_ident() {
|
||||
let ident = Ident::new("proc_macro", Span::call_site());
|
||||
|
||||
#[cfg(not(procmacro2_semver_exempt))]
|
||||
let expected = "Ident(proc_macro)";
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
|
||||
|
||||
assert_eq!(expected, format!("{:?}", ident));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_debug_tokenstream() {
|
||||
let tts = TokenStream::from_str("[a + 1]").unwrap();
|
||||
|
||||
#[cfg(not(procmacro2_semver_exempt))]
|
||||
let expected = "\
|
||||
TokenStream [
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: a
|
||||
},
|
||||
Punct {
|
||||
op: '+',
|
||||
spacing: Alone
|
||||
},
|
||||
Literal {
|
||||
lit: 1
|
||||
}
|
||||
]
|
||||
}
|
||||
]\
|
||||
";
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
let expected = "\
|
||||
TokenStream [
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: a,
|
||||
span: bytes(2..3)
|
||||
},
|
||||
Punct {
|
||||
op: '+',
|
||||
spacing: Alone,
|
||||
span: bytes(4..5)
|
||||
},
|
||||
Literal {
|
||||
lit: 1,
|
||||
span: bytes(6..7)
|
||||
}
|
||||
],
|
||||
span: bytes(1..8)
|
||||
}
|
||||
]\
|
||||
";
|
||||
|
||||
assert_eq!(expected, format!("{:#?}", tts));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn default_tokenstream_is_empty() {
|
||||
let default_token_stream: TokenStream = Default::default();
|
||||
|
||||
assert!(default_token_stream.is_empty());
|
||||
}
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"b523856472549844b4bf20eca0473d955a7e5eeb95c70eddd31a05ac455427bb","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"89857eaaa305afe540abcf56fabae0194dfb4e7906a8098b7206acb23ed11ce8","build.rs":"36fa668f3bf309f243d0e977e8428446cc424303139c1f63410b3c2e30445aec","src/fallback.rs":"e4d1bcb1e92383a2285e6c947dd74b0e34144904948db68127faea627f5dd6ff","src/lib.rs":"896a1d212e30902ff051313808007406ca4471c27880a6ef19508f0ebb8333ee","src/strnom.rs":"60f5380106dbe568cca7abd09877e133c874fbee95d502e4830425c4613a640d","src/wrapper.rs":"0d7fe28ab2b7ee02b8eb8c5a636da364c60f6704b23e7db0a1ddd57c742f54b1","tests/marker.rs":"0227d07bbc7f2e2ad34662a6acb65668b7dc2f79141c4faa672703a04e27bea0","tests/test.rs":"166d35835355bdaa85bcf69de4dfb56ccddd8acf2e1a8cbc506782632b151674"},"package":"4d317f9caece796be1980837fd5cb3dfec5613ebdb04ad0956deea83ce168915"}
|
||||
{"files":{"Cargo.toml":"9d18d9cad8a90dd6eb3f9ff06357a9f9a93fdb4697445bbdb4b77be361377708","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"362a2156f7645528061b6e8487a2eb0f32f1693012ed82ee57afa05c039bba0d","build.rs":"0cc6e2cb919ddbff59cf1d810283939f97a59f0037540c0f2ee3453237635ff8","src/fallback.rs":"5c6379a90735e27abcc40253b223158c6b1e5784f3850bc423335363e87ef038","src/lib.rs":"3c257d875da825fb74522d74459d4ac697ab3c998f58af57aa17ae9dfaa19308","src/strnom.rs":"37f7791f73f123817ad5403af1d4e2a0714be27401729a2d451bc80b1f26bac9","src/wrapper.rs":"6e9aa48b55da1edd81a72552d6705e251ea5e77827a611bed5fa6a89ee9e3d59","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"8c427be9cba1fa8d4a16647e53e3545e5863e29e2c0b311c93c9dd1399abf6a1"},"package":"afdc77cc74ec70ed262262942ebb7dac3d479e9e5cfa2da1841c0806f6cdabcc"}
|
|
@ -3,7 +3,7 @@
|
|||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g. crates.io) dependencies
|
||||
# to registry (e.g., crates.io) dependencies
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
|
@ -11,24 +11,28 @@
|
|||
# will likely look very different (and much more reasonable)
|
||||
|
||||
[package]
|
||||
edition = "2018"
|
||||
name = "proc-macro2"
|
||||
version = "0.4.27"
|
||||
version = "1.0.4"
|
||||
authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
||||
build = "build.rs"
|
||||
description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
|
||||
homepage = "https://github.com/alexcrichton/proc-macro2"
|
||||
documentation = "https://docs.rs/proc-macro2"
|
||||
readme = "README.md"
|
||||
keywords = ["macros"]
|
||||
license = "MIT/Apache-2.0"
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/alexcrichton/proc-macro2"
|
||||
[package.metadata.docs.rs]
|
||||
rustc-args = ["--cfg", "procmacro2_semver_exempt"]
|
||||
rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
|
||||
|
||||
[lib]
|
||||
name = "proc_macro2"
|
||||
[dependencies.unicode-xid]
|
||||
version = "0.1"
|
||||
version = "0.2"
|
||||
[dev-dependencies.quote]
|
||||
version = "0.6"
|
||||
version = "1.0"
|
||||
default_features = false
|
||||
|
||||
[features]
|
||||
default = ["proc-macro"]
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
|
||||
|
||||
A wrapper around the procedural macro API of the compiler's `proc_macro` crate.
|
||||
This library serves three purposes:
|
||||
This library serves two purposes:
|
||||
|
||||
- **Bring proc-macro-like functionality to other contexts like build.rs and
|
||||
main.rs.** Types from `proc_macro` are entirely specific to procedural macros
|
||||
|
@ -21,13 +21,6 @@ This library serves three purposes:
|
|||
unit test. In order for helper libraries or components of a macro to be
|
||||
testable in isolation, they must be implemented using `proc_macro2`.
|
||||
|
||||
- **Provide the latest and greatest APIs across all compiler versions.**
|
||||
Procedural macros were first introduced to Rust in 1.15.0 with an extremely
|
||||
minimal interface. Since then, many improvements have landed to make macros
|
||||
more flexible and easier to write. This library tracks the procedural macro
|
||||
API of the most recent stable compiler but employs a polyfill to provide that
|
||||
API consistently across any compiler since 1.15.0.
|
||||
|
||||
[syn]: https://github.com/dtolnay/syn
|
||||
[quote]: https://github.com/dtolnay/quote
|
||||
|
||||
|
@ -35,7 +28,7 @@ This library serves three purposes:
|
|||
|
||||
```toml
|
||||
[dependencies]
|
||||
proc-macro2 = "0.4"
|
||||
proc-macro2 = "1.0"
|
||||
```
|
||||
|
||||
The skeleton of a typical procedural macro typically looks like this:
|
||||
|
@ -58,7 +51,7 @@ pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
|||
If parsing with [Syn], you'll use [`parse_macro_input!`] instead to propagate
|
||||
parse errors correctly back to the compiler when parsing fails.
|
||||
|
||||
[`parse_macro_input!`]: https://docs.rs/syn/0.15/syn/macro.parse_macro_input.html
|
||||
[`parse_macro_input!`]: https://docs.rs/syn/1.0/syn/macro.parse_macro_input.html
|
||||
|
||||
## Unstable features
|
||||
|
||||
|
@ -67,10 +60,10 @@ API. Functionality in `proc_macro` that is not yet stable is not exposed by
|
|||
proc-macro2 by default.
|
||||
|
||||
To opt into the additional APIs available in the most recent nightly compiler,
|
||||
the `procmacro2_semver_exempt` config flag must be passed to rustc. As usual, we
|
||||
will polyfill those nightly-only APIs all the way back to Rust 1.15.0. As these
|
||||
are unstable APIs that track the nightly compiler, minor versions of proc-macro2
|
||||
may make breaking changes to them at any time.
|
||||
the `procmacro2_semver_exempt` config flag must be passed to rustc. We will
|
||||
polyfill those nightly-only APIs back to Rust 1.31.0. As these are unstable APIs
|
||||
that track the nightly compiler, minor versions of proc-macro2 may make breaking
|
||||
changes to them at any time.
|
||||
|
||||
```
|
||||
RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
|
||||
|
@ -82,19 +75,19 @@ reminder that you are outside of the normal semver guarantees.
|
|||
|
||||
Semver exempt methods are marked as such in the proc-macro2 documentation.
|
||||
|
||||
# License
|
||||
<br>
|
||||
|
||||
This project is licensed under either of
|
||||
#### License
|
||||
|
||||
* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
http://www.apache.org/licenses/LICENSE-2.0)
|
||||
* MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
http://opensource.org/licenses/MIT)
|
||||
<sup>
|
||||
Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
|
||||
2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
|
||||
</sup>
|
||||
|
||||
at your option.
|
||||
|
||||
### Contribution
|
||||
<br>
|
||||
|
||||
<sub>
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in Serde by you, as defined in the Apache-2.0 license, shall be
|
||||
dual licensed as above, without any additional terms or conditions.
|
||||
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
|
||||
be dual licensed as above, without any additional terms or conditions.
|
||||
</sub>
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
// rustc-cfg emitted by the build script:
|
||||
//
|
||||
// "u128"
|
||||
// Include u128 and i128 constructors for proc_macro2::Literal. Enabled on
|
||||
// any compiler 1.26+.
|
||||
//
|
||||
// "use_proc_macro"
|
||||
// Link to extern crate proc_macro. Available on any compiler and any target
|
||||
// except wasm32. Requires "proc-macro" Cargo cfg to be enabled (default is
|
||||
|
@ -18,15 +14,11 @@
|
|||
// procmacro2_semver_exempt surface area is implemented by using the
|
||||
// nightly-only proc_macro API.
|
||||
//
|
||||
// "slow_extend"
|
||||
// Fallback when `impl Extend for TokenStream` is not available. These impls
|
||||
// were added one version later than the rest of the proc_macro token API.
|
||||
// Enabled on rustc 1.29 only.
|
||||
//
|
||||
// "nightly"
|
||||
// Enable the Span::unwrap method. This is to support proc_macro_span and
|
||||
// proc_macro_diagnostic use on the nightly channel without requiring the
|
||||
// semver exemption opt-in. Enabled when building with nightly.
|
||||
// "proc_macro_span"
|
||||
// Enable non-dummy behavior of Span::start and Span::end methods which
|
||||
// requires an unstable compiler feature. Enabled when building with
|
||||
// nightly, unless `-Z allow-feature` in RUSTFLAGS disallows unstable
|
||||
// features.
|
||||
//
|
||||
// "super_unstable"
|
||||
// Implement the semver exempt API in terms of the nightly-only proc_macro
|
||||
|
@ -39,21 +31,20 @@
|
|||
// location inside spans is a performance hit.
|
||||
|
||||
use std::env;
|
||||
use std::process::Command;
|
||||
use std::process::{self, Command};
|
||||
use std::str;
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
|
||||
let target = env::var("TARGET").unwrap();
|
||||
|
||||
let version = match rustc_version() {
|
||||
Some(version) => version,
|
||||
None => return,
|
||||
};
|
||||
|
||||
if version.minor >= 26 {
|
||||
println!("cargo:rustc-cfg=u128");
|
||||
if version.minor < 31 {
|
||||
eprintln!("Minimum supported rustc version is 1.31");
|
||||
process::exit(1);
|
||||
}
|
||||
|
||||
let semver_exempt = cfg!(procmacro2_semver_exempt);
|
||||
|
@ -66,23 +57,19 @@ fn main() {
|
|||
println!("cargo:rustc-cfg=span_locations");
|
||||
}
|
||||
|
||||
let target = env::var("TARGET").unwrap();
|
||||
if !enable_use_proc_macro(&target) {
|
||||
return;
|
||||
}
|
||||
|
||||
println!("cargo:rustc-cfg=use_proc_macro");
|
||||
|
||||
// Rust 1.29 stabilized the necessary APIs in the `proc_macro` crate
|
||||
if version.nightly || version.minor >= 29 && !semver_exempt {
|
||||
if version.nightly || !semver_exempt {
|
||||
println!("cargo:rustc-cfg=wrap_proc_macro");
|
||||
}
|
||||
|
||||
if version.minor == 29 {
|
||||
println!("cargo:rustc-cfg=slow_extend");
|
||||
}
|
||||
|
||||
if version.nightly {
|
||||
println!("cargo:rustc-cfg=nightly");
|
||||
if version.nightly && feature_allowed("proc_macro_span") {
|
||||
println!("cargo:rustc-cfg=proc_macro_span");
|
||||
}
|
||||
|
||||
if semver_exempt && version.nightly {
|
||||
|
@ -106,28 +93,37 @@ struct RustcVersion {
|
|||
}
|
||||
|
||||
fn rustc_version() -> Option<RustcVersion> {
|
||||
macro_rules! otry {
|
||||
($e:expr) => {
|
||||
match $e {
|
||||
Some(e) => e,
|
||||
None => return None,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
let rustc = otry!(env::var_os("RUSTC"));
|
||||
let output = otry!(Command::new(rustc).arg("--version").output().ok());
|
||||
let version = otry!(str::from_utf8(&output.stdout).ok());
|
||||
let nightly = version.contains("nightly");
|
||||
let rustc = env::var_os("RUSTC")?;
|
||||
let output = Command::new(rustc).arg("--version").output().ok()?;
|
||||
let version = str::from_utf8(&output.stdout).ok()?;
|
||||
let nightly = version.contains("nightly") || version.contains("dev");
|
||||
let mut pieces = version.split('.');
|
||||
if pieces.next() != Some("rustc 1") {
|
||||
return None;
|
||||
}
|
||||
let minor = otry!(pieces.next());
|
||||
let minor = otry!(minor.parse().ok());
|
||||
|
||||
Some(RustcVersion {
|
||||
minor: minor,
|
||||
nightly: nightly,
|
||||
})
|
||||
let minor = pieces.next()?.parse().ok()?;
|
||||
Some(RustcVersion { minor, nightly })
|
||||
}
|
||||
|
||||
fn feature_allowed(feature: &str) -> bool {
|
||||
// Recognized formats:
|
||||
//
|
||||
// -Z allow-features=feature1,feature2
|
||||
//
|
||||
// -Zallow-features=feature1,feature2
|
||||
|
||||
if let Some(rustflags) = env::var_os("RUSTFLAGS") {
|
||||
for mut flag in rustflags.to_string_lossy().split(' ') {
|
||||
if flag.starts_with("-Z") {
|
||||
flag = &flag["-Z".len()..];
|
||||
}
|
||||
if flag.starts_with("allow-features=") {
|
||||
flag = &flag["allow-features=".len()..];
|
||||
return flag.split(',').any(|allowed| allowed == feature);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No allow-features= flag, allowed by default.
|
||||
true
|
||||
}
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
#[cfg(span_locations)]
|
||||
use std::cell::RefCell;
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
#[cfg(span_locations)]
|
||||
use std::cmp;
|
||||
use std::fmt;
|
||||
use std::iter;
|
||||
use std::ops::RangeBounds;
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use std::vec;
|
||||
|
||||
use strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
|
||||
use crate::strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
|
||||
use crate::{Delimiter, Punct, Spacing, TokenTree};
|
||||
use unicode_xid::UnicodeXID;
|
||||
|
||||
use {Delimiter, Punct, Spacing, TokenTree};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TokenStream {
|
||||
inner: Vec<TokenTree>,
|
||||
|
@ -35,8 +35,8 @@ impl TokenStream {
|
|||
|
||||
#[cfg(span_locations)]
|
||||
fn get_cursor(src: &str) -> Cursor {
|
||||
// Create a dummy file & add it to the codemap
|
||||
CODEMAP.with(|cm| {
|
||||
// Create a dummy file & add it to the source map
|
||||
SOURCE_MAP.with(|cm| {
|
||||
let mut cm = cm.borrow_mut();
|
||||
let name = format!("<parsed string {}>", cm.files.len());
|
||||
let span = cm.add_file(&name, src);
|
||||
|
@ -56,7 +56,7 @@ impl FromStr for TokenStream {
|
|||
type Err = LexError;
|
||||
|
||||
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
||||
// Create a dummy file & add it to the codemap
|
||||
// Create a dummy file & add it to the source map
|
||||
let cursor = get_cursor(src);
|
||||
|
||||
match token_stream(cursor) {
|
||||
|
@ -118,8 +118,8 @@ impl fmt::Debug for TokenStream {
|
|||
}
|
||||
|
||||
#[cfg(use_proc_macro)]
|
||||
impl From<::proc_macro::TokenStream> for TokenStream {
|
||||
fn from(inner: ::proc_macro::TokenStream) -> TokenStream {
|
||||
impl From<proc_macro::TokenStream> for TokenStream {
|
||||
fn from(inner: proc_macro::TokenStream) -> TokenStream {
|
||||
inner
|
||||
.to_string()
|
||||
.parse()
|
||||
|
@ -128,8 +128,8 @@ impl From<::proc_macro::TokenStream> for TokenStream {
|
|||
}
|
||||
|
||||
#[cfg(use_proc_macro)]
|
||||
impl From<TokenStream> for ::proc_macro::TokenStream {
|
||||
fn from(inner: TokenStream) -> ::proc_macro::TokenStream {
|
||||
impl From<TokenStream> for proc_macro::TokenStream {
|
||||
fn from(inner: TokenStream) -> proc_macro::TokenStream {
|
||||
inner
|
||||
.to_string()
|
||||
.parse()
|
||||
|
@ -225,7 +225,7 @@ pub struct LineColumn {
|
|||
|
||||
#[cfg(span_locations)]
|
||||
thread_local! {
|
||||
static CODEMAP: RefCell<Codemap> = RefCell::new(Codemap {
|
||||
static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
|
||||
// NOTE: We start with a single dummy file which all call_site() and
|
||||
// def_site() spans reference.
|
||||
files: vec![{
|
||||
|
@ -295,12 +295,12 @@ fn lines_offsets(s: &str) -> Vec<usize> {
|
|||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
struct Codemap {
|
||||
struct SourceMap {
|
||||
files: Vec<FileInfo>,
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
impl Codemap {
|
||||
impl SourceMap {
|
||||
fn next_start_pos(&self) -> u32 {
|
||||
// Add 1 so there's always space between files.
|
||||
//
|
||||
|
@ -314,22 +314,19 @@ impl Codemap {
|
|||
let lo = self.next_start_pos();
|
||||
// XXX(nika): Shouild we bother doing a checked cast or checked add here?
|
||||
let span = Span {
|
||||
lo: lo,
|
||||
lo,
|
||||
hi: lo + (src.len() as u32),
|
||||
};
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
self.files.push(FileInfo {
|
||||
name: name.to_owned(),
|
||||
span: span,
|
||||
lines: lines,
|
||||
span,
|
||||
lines,
|
||||
});
|
||||
|
||||
#[cfg(not(procmacro2_semver_exempt))]
|
||||
self.files.push(FileInfo {
|
||||
span: span,
|
||||
lines: lines,
|
||||
});
|
||||
self.files.push(FileInfo { span, lines });
|
||||
let _ = name;
|
||||
|
||||
span
|
||||
|
@ -384,7 +381,7 @@ impl Span {
|
|||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
pub fn source_file(&self) -> SourceFile {
|
||||
CODEMAP.with(|cm| {
|
||||
SOURCE_MAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
let fi = cm.fileinfo(*self);
|
||||
SourceFile {
|
||||
|
@ -395,7 +392,7 @@ impl Span {
|
|||
|
||||
#[cfg(span_locations)]
|
||||
pub fn start(&self) -> LineColumn {
|
||||
CODEMAP.with(|cm| {
|
||||
SOURCE_MAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
let fi = cm.fileinfo(*self);
|
||||
fi.offset_line_column(self.lo as usize)
|
||||
|
@ -404,16 +401,21 @@ impl Span {
|
|||
|
||||
#[cfg(span_locations)]
|
||||
pub fn end(&self) -> LineColumn {
|
||||
CODEMAP.with(|cm| {
|
||||
SOURCE_MAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
let fi = cm.fileinfo(*self);
|
||||
fi.offset_line_column(self.hi as usize)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
#[cfg(not(span_locations))]
|
||||
pub fn join(&self, _other: Span) -> Option<Span> {
|
||||
Some(Span {})
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
pub fn join(&self, other: Span) -> Option<Span> {
|
||||
CODEMAP.with(|cm| {
|
||||
SOURCE_MAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
// If `other` is not within the same FileInfo as us, return None.
|
||||
if !cm.fileinfo(*self).span_within(other) {
|
||||
|
@ -453,8 +455,8 @@ pub struct Group {
|
|||
impl Group {
|
||||
pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
|
||||
Group {
|
||||
delimiter: delimiter,
|
||||
stream: stream,
|
||||
delimiter,
|
||||
stream,
|
||||
span: Span::call_site(),
|
||||
}
|
||||
}
|
||||
|
@ -471,12 +473,10 @@ impl Group {
|
|||
self.span
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
pub fn span_open(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
pub fn span_close(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
@ -523,12 +523,12 @@ pub struct Ident {
|
|||
|
||||
impl Ident {
|
||||
fn _new(string: &str, raw: bool, span: Span) -> Ident {
|
||||
validate_term(string);
|
||||
validate_ident(string);
|
||||
|
||||
Ident {
|
||||
sym: string.to_owned(),
|
||||
span: span,
|
||||
raw: raw,
|
||||
span,
|
||||
raw,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -566,7 +566,7 @@ fn is_ident_continue(c: char) -> bool {
|
|||
|| (c > '\x7f' && UnicodeXID::is_xid_continue(c))
|
||||
}
|
||||
|
||||
fn validate_term(string: &str) {
|
||||
fn validate_ident(string: &str) {
|
||||
let validate = string;
|
||||
if validate.is_empty() {
|
||||
panic!("Ident is not allowed to be empty; use Option<Ident>");
|
||||
|
@ -671,7 +671,7 @@ macro_rules! unsuffixed_numbers {
|
|||
impl Literal {
|
||||
fn _new(text: String) -> Literal {
|
||||
Literal {
|
||||
text: text,
|
||||
text,
|
||||
span: Span::call_site(),
|
||||
}
|
||||
}
|
||||
|
@ -681,40 +681,32 @@ impl Literal {
|
|||
u16_suffixed => u16,
|
||||
u32_suffixed => u32,
|
||||
u64_suffixed => u64,
|
||||
u128_suffixed => u128,
|
||||
usize_suffixed => usize,
|
||||
i8_suffixed => i8,
|
||||
i16_suffixed => i16,
|
||||
i32_suffixed => i32,
|
||||
i64_suffixed => i64,
|
||||
i128_suffixed => i128,
|
||||
isize_suffixed => isize,
|
||||
|
||||
f32_suffixed => f32,
|
||||
f64_suffixed => f64,
|
||||
}
|
||||
|
||||
#[cfg(u128)]
|
||||
suffixed_numbers! {
|
||||
u128_suffixed => u128,
|
||||
i128_suffixed => i128,
|
||||
}
|
||||
|
||||
unsuffixed_numbers! {
|
||||
u8_unsuffixed => u8,
|
||||
u16_unsuffixed => u16,
|
||||
u32_unsuffixed => u32,
|
||||
u64_unsuffixed => u64,
|
||||
u128_unsuffixed => u128,
|
||||
usize_unsuffixed => usize,
|
||||
i8_unsuffixed => i8,
|
||||
i16_unsuffixed => i16,
|
||||
i32_unsuffixed => i32,
|
||||
i64_unsuffixed => i64,
|
||||
isize_unsuffixed => isize,
|
||||
}
|
||||
|
||||
#[cfg(u128)]
|
||||
unsuffixed_numbers! {
|
||||
u128_unsuffixed => u128,
|
||||
i128_unsuffixed => i128,
|
||||
isize_unsuffixed => isize,
|
||||
}
|
||||
|
||||
pub fn f32_unsuffixed(f: f32) -> Literal {
|
||||
|
@ -734,17 +726,31 @@ impl Literal {
|
|||
}
|
||||
|
||||
pub fn string(t: &str) -> Literal {
|
||||
let mut s = t
|
||||
.chars()
|
||||
.flat_map(|c| c.escape_default())
|
||||
.collect::<String>();
|
||||
s.push('"');
|
||||
s.insert(0, '"');
|
||||
Literal::_new(s)
|
||||
let mut text = String::with_capacity(t.len() + 2);
|
||||
text.push('"');
|
||||
for c in t.chars() {
|
||||
if c == '\'' {
|
||||
// escape_default turns this into "\'" which is unnecessary.
|
||||
text.push(c);
|
||||
} else {
|
||||
text.extend(c.escape_default());
|
||||
}
|
||||
}
|
||||
text.push('"');
|
||||
Literal::_new(text)
|
||||
}
|
||||
|
||||
pub fn character(t: char) -> Literal {
|
||||
Literal::_new(format!("'{}'", t.escape_default().collect::<String>()))
|
||||
let mut text = String::new();
|
||||
text.push('\'');
|
||||
if t == '"' {
|
||||
// escape_default turns this into '\"' which is unnecessary.
|
||||
text.push(t);
|
||||
} else {
|
||||
text.extend(t.escape_default());
|
||||
}
|
||||
text.push('\'');
|
||||
Literal::_new(text)
|
||||
}
|
||||
|
||||
pub fn byte_string(bytes: &[u8]) -> Literal {
|
||||
|
@ -757,7 +763,7 @@ impl Literal {
|
|||
b'\r' => escaped.push_str(r"\r"),
|
||||
b'"' => escaped.push_str("\\\""),
|
||||
b'\\' => escaped.push_str("\\\\"),
|
||||
b'\x20'...b'\x7E' => escaped.push(*b as char),
|
||||
b'\x20'..=b'\x7E' => escaped.push(*b as char),
|
||||
_ => escaped.push_str(&format!("\\x{:02X}", b)),
|
||||
}
|
||||
}
|
||||
|
@ -772,6 +778,10 @@ impl Literal {
|
|||
pub fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
|
||||
pub fn subspan<R: RangeBounds<usize>>(&self, _range: R) -> Option<Span> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Literal {
|
||||
|
@ -817,21 +827,21 @@ fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
|
|||
fn spanned<'a, T>(
|
||||
input: Cursor<'a>,
|
||||
f: fn(Cursor<'a>) -> PResult<'a, T>,
|
||||
) -> PResult<'a, (T, ::Span)> {
|
||||
) -> PResult<'a, (T, crate::Span)> {
|
||||
let (a, b) = f(skip_whitespace(input))?;
|
||||
Ok((a, ((b, ::Span::_new_stable(Span::call_site())))))
|
||||
Ok((a, ((b, crate::Span::_new_stable(Span::call_site())))))
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
fn spanned<'a, T>(
|
||||
input: Cursor<'a>,
|
||||
f: fn(Cursor<'a>) -> PResult<'a, T>,
|
||||
) -> PResult<'a, (T, ::Span)> {
|
||||
) -> PResult<'a, (T, crate::Span)> {
|
||||
let input = skip_whitespace(input);
|
||||
let lo = input.off;
|
||||
let (a, b) = f(input)?;
|
||||
let hi = a.off;
|
||||
let span = ::Span::_new_stable(Span { lo: lo, hi: hi });
|
||||
let span = crate::Span::_new_stable(Span { lo, hi });
|
||||
Ok((a, (b, span)))
|
||||
}
|
||||
|
||||
|
@ -842,9 +852,9 @@ fn token_tree(input: Cursor) -> PResult<TokenTree> {
|
|||
}
|
||||
|
||||
named!(token_kind -> TokenTree, alt!(
|
||||
map!(group, |g| TokenTree::Group(::Group::_new_stable(g)))
|
||||
map!(group, |g| TokenTree::Group(crate::Group::_new_stable(g)))
|
||||
|
|
||||
map!(literal, |l| TokenTree::Literal(::Literal::_new_stable(l))) // must be before symbol
|
||||
map!(literal, |l| TokenTree::Literal(crate::Literal::_new_stable(l))) // must be before symbol
|
||||
|
|
||||
map!(op, TokenTree::Punct)
|
||||
|
|
||||
|
@ -876,14 +886,27 @@ fn symbol_leading_ws(input: Cursor) -> PResult<TokenTree> {
|
|||
}
|
||||
|
||||
fn symbol(input: Cursor) -> PResult<TokenTree> {
|
||||
let mut chars = input.char_indices();
|
||||
|
||||
let raw = input.starts_with("r#");
|
||||
if raw {
|
||||
chars.next();
|
||||
chars.next();
|
||||
let rest = input.advance((raw as usize) << 1);
|
||||
|
||||
let (rest, sym) = symbol_not_raw(rest)?;
|
||||
|
||||
if !raw {
|
||||
let ident = crate::Ident::new(sym, crate::Span::call_site());
|
||||
return Ok((rest, ident.into()));
|
||||
}
|
||||
|
||||
if sym == "_" {
|
||||
return Err(LexError);
|
||||
}
|
||||
|
||||
let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
|
||||
Ok((rest, ident.into()))
|
||||
}
|
||||
|
||||
fn symbol_not_raw(input: Cursor) -> PResult<&str> {
|
||||
let mut chars = input.char_indices();
|
||||
|
||||
match chars.next() {
|
||||
Some((_, ch)) if is_ident_start(ch) => {}
|
||||
_ => return Err(LexError),
|
||||
|
@ -897,17 +920,7 @@ fn symbol(input: Cursor) -> PResult<TokenTree> {
|
|||
}
|
||||
}
|
||||
|
||||
let a = &input.rest[..end];
|
||||
if a == "r#_" {
|
||||
Err(LexError)
|
||||
} else {
|
||||
let ident = if raw {
|
||||
::Ident::_new_raw(&a[2..], ::Span::call_site())
|
||||
} else {
|
||||
::Ident::new(a, ::Span::call_site())
|
||||
};
|
||||
Ok((input.advance(end), ident.into()))
|
||||
}
|
||||
Ok((input.advance(end), &input.rest[..end]))
|
||||
}
|
||||
|
||||
fn literal(input: Cursor) -> PResult<Literal> {
|
||||
|
@ -947,10 +960,12 @@ named!(string -> (), alt!(
|
|||
) => { |_| () }
|
||||
));
|
||||
|
||||
named!(quoted_string -> (), delimited!(
|
||||
punct!("\""),
|
||||
cooked_string,
|
||||
tag!("\"")
|
||||
named!(quoted_string -> (), do_parse!(
|
||||
punct!("\"") >>
|
||||
cooked_string >>
|
||||
tag!("\"") >>
|
||||
option!(symbol_not_raw) >>
|
||||
(())
|
||||
));
|
||||
|
||||
fn cooked_string(input: Cursor) -> PResult<()> {
|
||||
|
@ -1159,8 +1174,8 @@ fn backslash_x_char<I>(chars: &mut I) -> bool
|
|||
where
|
||||
I: Iterator<Item = (usize, char)>,
|
||||
{
|
||||
next_ch!(chars @ '0'...'7');
|
||||
next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F');
|
||||
next_ch!(chars @ '0'..='7');
|
||||
next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
||||
true
|
||||
}
|
||||
|
||||
|
@ -1168,8 +1183,8 @@ fn backslash_x_byte<I>(chars: &mut I) -> bool
|
|||
where
|
||||
I: Iterator<Item = (usize, u8)>,
|
||||
{
|
||||
next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F');
|
||||
next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F');
|
||||
next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
|
||||
next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
|
||||
true
|
||||
}
|
||||
|
||||
|
@ -1178,9 +1193,9 @@ where
|
|||
I: Iterator<Item = (usize, char)>,
|
||||
{
|
||||
next_ch!(chars @ '{');
|
||||
next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F');
|
||||
next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
|
||||
loop {
|
||||
let c = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F' | '_' | '}');
|
||||
let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
|
||||
if c == '}' {
|
||||
return true;
|
||||
}
|
||||
|
@ -1188,10 +1203,10 @@ where
|
|||
}
|
||||
|
||||
fn float(input: Cursor) -> PResult<()> {
|
||||
let (rest, ()) = float_digits(input)?;
|
||||
for suffix in &["f32", "f64"] {
|
||||
if rest.starts_with(suffix) {
|
||||
return word_break(rest.advance(suffix.len()));
|
||||
let (mut rest, ()) = float_digits(input)?;
|
||||
if let Some(ch) = rest.chars().next() {
|
||||
if is_ident_start(ch) {
|
||||
rest = symbol_not_raw(rest)?.0;
|
||||
}
|
||||
}
|
||||
word_break(rest)
|
||||
|
@ -1209,7 +1224,7 @@ fn float_digits(input: Cursor) -> PResult<()> {
|
|||
let mut has_exp = false;
|
||||
while let Some(&ch) = chars.peek() {
|
||||
match ch {
|
||||
'0'...'9' | '_' => {
|
||||
'0'..='9' | '_' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
}
|
||||
|
@ -1220,7 +1235,7 @@ fn float_digits(input: Cursor) -> PResult<()> {
|
|||
chars.next();
|
||||
if chars
|
||||
.peek()
|
||||
.map(|&ch| ch == '.' || UnicodeXID::is_xid_start(ch))
|
||||
.map(|&ch| ch == '.' || is_ident_start(ch))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return Err(LexError);
|
||||
|
@ -1254,7 +1269,7 @@ fn float_digits(input: Cursor) -> PResult<()> {
|
|||
chars.next();
|
||||
len += 1;
|
||||
}
|
||||
'0'...'9' => {
|
||||
'0'..='9' => {
|
||||
chars.next();
|
||||
len += 1;
|
||||
has_exp_value = true;
|
||||
|
@ -1275,12 +1290,10 @@ fn float_digits(input: Cursor) -> PResult<()> {
|
|||
}
|
||||
|
||||
fn int(input: Cursor) -> PResult<()> {
|
||||
let (rest, ()) = digits(input)?;
|
||||
for suffix in &[
|
||||
"isize", "i8", "i16", "i32", "i64", "i128", "usize", "u8", "u16", "u32", "u64", "u128",
|
||||
] {
|
||||
if rest.starts_with(suffix) {
|
||||
return word_break(rest.advance(suffix.len()));
|
||||
let (mut rest, ()) = digits(input)?;
|
||||
if let Some(ch) = rest.chars().next() {
|
||||
if is_ident_start(ch) {
|
||||
rest = symbol_not_raw(rest)?.0;
|
||||
}
|
||||
}
|
||||
word_break(rest)
|
||||
|
@ -1304,9 +1317,9 @@ fn digits(mut input: Cursor) -> PResult<()> {
|
|||
let mut empty = true;
|
||||
for b in input.bytes() {
|
||||
let digit = match b {
|
||||
b'0'...b'9' => (b - b'0') as u64,
|
||||
b'a'...b'f' => 10 + (b - b'a') as u64,
|
||||
b'A'...b'F' => 10 + (b - b'A') as u64,
|
||||
b'0'..=b'9' => (b - b'0') as u64,
|
||||
b'a'..=b'f' => 10 + (b - b'a') as u64,
|
||||
b'A'..=b'F' => 10 + (b - b'A') as u64,
|
||||
b'_' => {
|
||||
if empty && base == 10 {
|
||||
return Err(LexError);
|
||||
|
@ -1376,15 +1389,15 @@ fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
|
|||
trees.push(Punct::new('!', Spacing::Alone).into());
|
||||
}
|
||||
let mut stream = vec![
|
||||
TokenTree::Ident(::Ident::new("doc", span)),
|
||||
TokenTree::Ident(crate::Ident::new("doc", span)),
|
||||
TokenTree::Punct(Punct::new('=', Spacing::Alone)),
|
||||
TokenTree::Literal(::Literal::string(comment)),
|
||||
TokenTree::Literal(crate::Literal::string(comment)),
|
||||
];
|
||||
for tt in stream.iter_mut() {
|
||||
tt.set_span(span);
|
||||
}
|
||||
let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
|
||||
trees.push(::Group::_new_stable(group).into());
|
||||
trees.push(crate::Group::_new_stable(group).into());
|
||||
for tt in trees.iter_mut() {
|
||||
tt.set_span(span);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//! A wrapper around the procedural macro API of the compiler's [`proc_macro`]
|
||||
//! crate. This library serves three purposes:
|
||||
//! crate. This library serves two purposes:
|
||||
//!
|
||||
//! [`proc_macro`]: https://doc.rust-lang.org/proc_macro/
|
||||
//!
|
||||
|
@ -18,14 +18,6 @@
|
|||
//! a macro to be testable in isolation, they must be implemented using
|
||||
//! `proc_macro2`.
|
||||
//!
|
||||
//! - **Provide the latest and greatest APIs across all compiler versions.**
|
||||
//! Procedural macros were first introduced to Rust in 1.15.0 with an
|
||||
//! extremely minimal interface. Since then, many improvements have landed to
|
||||
//! make macros more flexible and easier to write. This library tracks the
|
||||
//! procedural macro API of the most recent stable compiler but employs a
|
||||
//! polyfill to provide that API consistently across any compiler since
|
||||
//! 1.15.0.
|
||||
//!
|
||||
//! [syn]: https://github.com/dtolnay/syn
|
||||
//! [quote]: https://github.com/dtolnay/quote
|
||||
//!
|
||||
|
@ -33,12 +25,13 @@
|
|||
//!
|
||||
//! The skeleton of a typical procedural macro typically looks like this:
|
||||
//!
|
||||
//! ```edition2018
|
||||
//! ```
|
||||
//! extern crate proc_macro;
|
||||
//!
|
||||
//! # const IGNORE: &str = stringify! {
|
||||
//! #[proc_macro_derive(MyDerive)]
|
||||
//! # };
|
||||
//! # #[cfg(wrap_proc_macro)]
|
||||
//! pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
//! let input = proc_macro2::TokenStream::from(input);
|
||||
//!
|
||||
|
@ -54,7 +47,7 @@
|
|||
//! If parsing with [Syn], you'll use [`parse_macro_input!`] instead to
|
||||
//! propagate parse errors correctly back to the compiler when parsing fails.
|
||||
//!
|
||||
//! [`parse_macro_input!`]: https://docs.rs/syn/0.15/syn/macro.parse_macro_input.html
|
||||
//! [`parse_macro_input!`]: https://docs.rs/syn/1.0/syn/macro.parse_macro_input.html
|
||||
//!
|
||||
//! # Unstable features
|
||||
//!
|
||||
|
@ -64,9 +57,9 @@
|
|||
//!
|
||||
//! To opt into the additional APIs available in the most recent nightly
|
||||
//! compiler, the `procmacro2_semver_exempt` config flag must be passed to
|
||||
//! rustc. As usual, we will polyfill those nightly-only APIs all the way back
|
||||
//! to Rust 1.15.0. As these are unstable APIs that track the nightly compiler,
|
||||
//! minor versions of proc-macro2 may make breaking changes to them at any time.
|
||||
//! rustc. We will polyfill those nightly-only APIs back to Rust 1.31.0. As
|
||||
//! these are unstable APIs that track the nightly compiler, minor versions of
|
||||
//! proc-macro2 may make breaking changes to them at any time.
|
||||
//!
|
||||
//! ```sh
|
||||
//! RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
|
||||
|
@ -77,21 +70,27 @@
|
|||
//! as a reminder that you are outside of the normal semver guarantees.
|
||||
//!
|
||||
//! Semver exempt methods are marked as such in the proc-macro2 documentation.
|
||||
//!
|
||||
//! # Thread-Safety
|
||||
//!
|
||||
//! Most types in this crate are `!Sync` because the underlying compiler
|
||||
//! types make use of thread-local memory, meaning they cannot be accessed from
|
||||
//! a different thread.
|
||||
|
||||
// Proc-macro2 types in rustdoc of other crates get linked to here.
|
||||
#![doc(html_root_url = "https://docs.rs/proc-macro2/0.4.27")]
|
||||
#![cfg_attr(nightly, feature(proc_macro_span))]
|
||||
#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.4")]
|
||||
#![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
|
||||
#![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))]
|
||||
|
||||
#[cfg(use_proc_macro)]
|
||||
extern crate proc_macro;
|
||||
extern crate unicode_xid;
|
||||
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::iter::FromIterator;
|
||||
use std::marker;
|
||||
use std::ops::RangeBounds;
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
|
@ -102,7 +101,7 @@ mod strnom;
|
|||
mod fallback;
|
||||
|
||||
#[cfg(not(wrap_proc_macro))]
|
||||
use fallback as imp;
|
||||
use crate::fallback as imp;
|
||||
#[path = "wrapper.rs"]
|
||||
#[cfg(wrap_proc_macro)]
|
||||
mod imp;
|
||||
|
@ -129,7 +128,7 @@ pub struct LexError {
|
|||
impl TokenStream {
|
||||
fn _new(inner: imp::TokenStream) -> TokenStream {
|
||||
TokenStream {
|
||||
inner: inner,
|
||||
inner,
|
||||
_marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
@ -146,11 +145,6 @@ impl TokenStream {
|
|||
TokenStream::_new(imp::TokenStream::new())
|
||||
}
|
||||
|
||||
#[deprecated(since = "0.4.4", note = "please use TokenStream::new")]
|
||||
pub fn empty() -> TokenStream {
|
||||
TokenStream::new()
|
||||
}
|
||||
|
||||
/// Checks if this `TokenStream` is empty.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.inner.is_empty()
|
||||
|
@ -199,6 +193,12 @@ impl From<TokenStream> for proc_macro::TokenStream {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<TokenTree> for TokenStream {
|
||||
fn from(token: TokenTree) -> Self {
|
||||
TokenStream::_new(imp::TokenStream::from(token))
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TokenTree> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
|
||||
self.inner.extend(streams)
|
||||
|
@ -261,7 +261,7 @@ pub struct SourceFile {
|
|||
impl SourceFile {
|
||||
fn _new(inner: imp::SourceFile) -> Self {
|
||||
SourceFile {
|
||||
inner: inner,
|
||||
inner,
|
||||
_marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
@ -301,6 +301,7 @@ impl fmt::Debug for SourceFile {
|
|||
///
|
||||
/// This type is semver exempt and not exposed by default.
|
||||
#[cfg(span_locations)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub struct LineColumn {
|
||||
/// The 1-indexed line in the source file on which the span starts or ends
|
||||
/// (inclusive).
|
||||
|
@ -320,7 +321,7 @@ pub struct Span {
|
|||
impl Span {
|
||||
fn _new(inner: imp::Span) -> Span {
|
||||
Span {
|
||||
inner: inner,
|
||||
inner,
|
||||
_marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
@ -403,10 +404,7 @@ impl Span {
|
|||
#[cfg(span_locations)]
|
||||
pub fn start(&self) -> LineColumn {
|
||||
let imp::LineColumn { line, column } = self.inner.start();
|
||||
LineColumn {
|
||||
line: line,
|
||||
column: column,
|
||||
}
|
||||
LineColumn { line, column }
|
||||
}
|
||||
|
||||
/// Get the ending line/column in the source file for this span.
|
||||
|
@ -415,23 +413,23 @@ impl Span {
|
|||
#[cfg(span_locations)]
|
||||
pub fn end(&self) -> LineColumn {
|
||||
let imp::LineColumn { line, column } = self.inner.end();
|
||||
LineColumn {
|
||||
line: line,
|
||||
column: column,
|
||||
}
|
||||
LineColumn { line, column }
|
||||
}
|
||||
|
||||
/// Create a new span encompassing `self` and `other`.
|
||||
///
|
||||
/// Returns `None` if `self` and `other` are from different files.
|
||||
///
|
||||
/// This method is semver exempt and not exposed by default.
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
/// Warning: the underlying [`proc_macro::Span::join`] method is
|
||||
/// nightly-only. When called from within a procedural macro not using a
|
||||
/// nightly compiler, this method will always return `None`.
|
||||
///
|
||||
/// [`proc_macro::Span::join`]: https://doc.rust-lang.org/proc_macro/struct.Span.html#method.join
|
||||
pub fn join(&self, other: Span) -> Option<Span> {
|
||||
self.inner.join(other.inner).map(Span::_new)
|
||||
}
|
||||
|
||||
/// Compares to spans to see if they're equal.
|
||||
/// Compares two spans to see if they're equal.
|
||||
///
|
||||
/// This method is semver exempt and not exposed by default.
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
|
@ -575,7 +573,7 @@ pub enum Delimiter {
|
|||
|
||||
impl Group {
|
||||
fn _new(inner: imp::Group) -> Self {
|
||||
Group { inner: inner }
|
||||
Group { inner }
|
||||
}
|
||||
|
||||
fn _new_stable(inner: fallback::Group) -> Self {
|
||||
|
@ -625,7 +623,6 @@ impl Group {
|
|||
/// pub fn span_open(&self) -> Span {
|
||||
/// ^
|
||||
/// ```
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
pub fn span_open(&self) -> Span {
|
||||
Span::_new(self.inner.span_open())
|
||||
}
|
||||
|
@ -636,7 +633,6 @@ impl Group {
|
|||
/// pub fn span_close(&self) -> Span {
|
||||
/// ^
|
||||
/// ```
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
pub fn span_close(&self) -> Span {
|
||||
Span::_new(self.inner.span_close())
|
||||
}
|
||||
|
@ -684,7 +680,7 @@ pub struct Punct {
|
|||
pub enum Spacing {
|
||||
/// E.g. `+` is `Alone` in `+ =`, `+ident` or `+()`.
|
||||
Alone,
|
||||
/// E.g. `+` is `Joint` in `+=` or `'#`.
|
||||
/// E.g. `+` is `Joint` in `+=` or `'` is `Joint` in `'#`.
|
||||
///
|
||||
/// Additionally, single quote `'` can join with identifiers to form
|
||||
/// lifetimes `'ident`.
|
||||
|
@ -701,8 +697,8 @@ impl Punct {
|
|||
/// which can be further configured with the `set_span` method below.
|
||||
pub fn new(op: char, spacing: Spacing) -> Punct {
|
||||
Punct {
|
||||
op: op,
|
||||
spacing: spacing,
|
||||
op,
|
||||
spacing,
|
||||
span: Span::call_site(),
|
||||
}
|
||||
}
|
||||
|
@ -764,7 +760,7 @@ impl fmt::Debug for Punct {
|
|||
/// Rust keywords. Use `input.call(Ident::parse_any)` when parsing to match the
|
||||
/// behaviour of `Ident::new`.
|
||||
///
|
||||
/// [`Parse`]: https://docs.rs/syn/0.15/syn/parse/trait.Parse.html
|
||||
/// [`Parse`]: https://docs.rs/syn/1.0/syn/parse/trait.Parse.html
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -772,7 +768,7 @@ impl fmt::Debug for Punct {
|
|||
/// A span must be provided explicitly which governs the name resolution
|
||||
/// behavior of the resulting identifier.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// ```
|
||||
/// use proc_macro2::{Ident, Span};
|
||||
///
|
||||
/// fn main() {
|
||||
|
@ -784,7 +780,7 @@ impl fmt::Debug for Punct {
|
|||
///
|
||||
/// An ident can be interpolated into a token stream using the `quote!` macro.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// ```
|
||||
/// use proc_macro2::{Ident, Span};
|
||||
/// use quote::quote;
|
||||
///
|
||||
|
@ -803,7 +799,7 @@ impl fmt::Debug for Punct {
|
|||
/// A string representation of the ident is available through the `to_string()`
|
||||
/// method.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// ```
|
||||
/// # use proc_macro2::{Ident, Span};
|
||||
/// #
|
||||
/// # let ident = Ident::new("another_identifier", Span::call_site());
|
||||
|
@ -823,7 +819,7 @@ pub struct Ident {
|
|||
impl Ident {
|
||||
fn _new(inner: imp::Ident) -> Ident {
|
||||
Ident {
|
||||
inner: inner,
|
||||
inner,
|
||||
_marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
@ -853,7 +849,12 @@ impl Ident {
|
|||
/// # Panics
|
||||
///
|
||||
/// Panics if the input string is neither a keyword nor a legal variable
|
||||
/// name.
|
||||
/// name. If you are not sure whether the string contains an identifier and
|
||||
/// need to handle an error case, use
|
||||
/// <a href="https://docs.rs/syn/1.0/syn/fn.parse_str.html"><code
|
||||
/// style="padding-right:0;">syn::parse_str</code></a><code
|
||||
/// style="padding-left:0;">::<Ident></code>
|
||||
/// rather than `Ident::new`.
|
||||
pub fn new(string: &str, span: Span) -> Ident {
|
||||
Ident::_new(imp::Ident::new(string, span.inner))
|
||||
}
|
||||
|
@ -986,7 +987,7 @@ macro_rules! unsuffixed_int_literals {
|
|||
impl Literal {
|
||||
fn _new(inner: imp::Literal) -> Literal {
|
||||
Literal {
|
||||
inner: inner,
|
||||
inner,
|
||||
_marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
@ -1003,18 +1004,14 @@ impl Literal {
|
|||
u16_suffixed => u16,
|
||||
u32_suffixed => u32,
|
||||
u64_suffixed => u64,
|
||||
u128_suffixed => u128,
|
||||
usize_suffixed => usize,
|
||||
i8_suffixed => i8,
|
||||
i16_suffixed => i16,
|
||||
i32_suffixed => i32,
|
||||
i64_suffixed => i64,
|
||||
isize_suffixed => isize,
|
||||
}
|
||||
|
||||
#[cfg(u128)]
|
||||
suffixed_int_literals! {
|
||||
u128_suffixed => u128,
|
||||
i128_suffixed => i128,
|
||||
isize_suffixed => isize,
|
||||
}
|
||||
|
||||
unsuffixed_int_literals! {
|
||||
|
@ -1022,25 +1019,47 @@ impl Literal {
|
|||
u16_unsuffixed => u16,
|
||||
u32_unsuffixed => u32,
|
||||
u64_unsuffixed => u64,
|
||||
u128_unsuffixed => u128,
|
||||
usize_unsuffixed => usize,
|
||||
i8_unsuffixed => i8,
|
||||
i16_unsuffixed => i16,
|
||||
i32_unsuffixed => i32,
|
||||
i64_unsuffixed => i64,
|
||||
i128_unsuffixed => i128,
|
||||
isize_unsuffixed => isize,
|
||||
}
|
||||
|
||||
#[cfg(u128)]
|
||||
unsuffixed_int_literals! {
|
||||
u128_unsuffixed => u128,
|
||||
i128_unsuffixed => i128,
|
||||
}
|
||||
|
||||
/// Creates a new unsuffixed floating-point literal.
|
||||
///
|
||||
/// This constructor is similar to those like `Literal::i8_unsuffixed` where
|
||||
/// the float's value is emitted directly into the token but no suffix is
|
||||
/// used, so it may be inferred to be a `f64` later in the compiler.
|
||||
/// Literals created from negative numbers may not survive rountrips through
|
||||
/// `TokenStream` or strings and may be broken into two tokens (`-` and
|
||||
/// positive literal).
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This function requires that the specified float is finite, for example
|
||||
/// if it is infinity or NaN this function will panic.
|
||||
pub fn f64_unsuffixed(f: f64) -> Literal {
|
||||
assert!(f.is_finite());
|
||||
Literal::_new(imp::Literal::f64_unsuffixed(f))
|
||||
}
|
||||
|
||||
/// Creates a new suffixed floating-point literal.
|
||||
///
|
||||
/// This constructor will create a literal like `1.0f64` where the value
|
||||
/// specified is the preceding part of the token and `f64` is the suffix of
|
||||
/// the token. This token will always be inferred to be an `f64` in the
|
||||
/// compiler. Literals created from negative numbers may not survive
|
||||
/// rountrips through `TokenStream` or strings and may be broken into two
|
||||
/// tokens (`-` and positive literal).
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This function requires that the specified float is finite, for example
|
||||
/// if it is infinity or NaN this function will panic.
|
||||
pub fn f64_suffixed(f: f64) -> Literal {
|
||||
assert!(f.is_finite());
|
||||
Literal::_new(imp::Literal::f64_suffixed(f))
|
||||
|
@ -1064,30 +1083,61 @@ impl Literal {
|
|||
Literal::_new(imp::Literal::f32_unsuffixed(f))
|
||||
}
|
||||
|
||||
/// Creates a new suffixed floating-point literal.
|
||||
///
|
||||
/// This constructor will create a literal like `1.0f32` where the value
|
||||
/// specified is the preceding part of the token and `f32` is the suffix of
|
||||
/// the token. This token will always be inferred to be an `f32` in the
|
||||
/// compiler. Literals created from negative numbers may not survive
|
||||
/// rountrips through `TokenStream` or strings and may be broken into two
|
||||
/// tokens (`-` and positive literal).
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This function requires that the specified float is finite, for example
|
||||
/// if it is infinity or NaN this function will panic.
|
||||
pub fn f32_suffixed(f: f32) -> Literal {
|
||||
assert!(f.is_finite());
|
||||
Literal::_new(imp::Literal::f32_suffixed(f))
|
||||
}
|
||||
|
||||
/// String literal.
|
||||
pub fn string(string: &str) -> Literal {
|
||||
Literal::_new(imp::Literal::string(string))
|
||||
}
|
||||
|
||||
/// Character literal.
|
||||
pub fn character(ch: char) -> Literal {
|
||||
Literal::_new(imp::Literal::character(ch))
|
||||
}
|
||||
|
||||
/// Byte string literal.
|
||||
pub fn byte_string(s: &[u8]) -> Literal {
|
||||
Literal::_new(imp::Literal::byte_string(s))
|
||||
}
|
||||
|
||||
/// Returns the span encompassing this literal.
|
||||
pub fn span(&self) -> Span {
|
||||
Span::_new(self.inner.span())
|
||||
}
|
||||
|
||||
/// Configures the span associated for this literal.
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.inner.set_span(span.inner);
|
||||
}
|
||||
|
||||
/// Returns a `Span` that is a subset of `self.span()` containing only
|
||||
/// the source bytes in range `range`. Returns `None` if the would-be
|
||||
/// trimmed span is outside the bounds of `self`.
|
||||
///
|
||||
/// Warning: the underlying [`proc_macro::Literal::subspan`] method is
|
||||
/// nightly-only. When called from within a procedural macro not using a
|
||||
/// nightly compiler, this method will always return `None`.
|
||||
///
|
||||
/// [`proc_macro::Literal::subspan`]: https://doc.rust-lang.org/proc_macro/struct.Literal.html#method.subspan
|
||||
pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
|
||||
self.inner.subspan(range).map(Span::_new)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Literal {
|
||||
|
@ -1108,14 +1158,14 @@ pub mod token_stream {
|
|||
use std::marker;
|
||||
use std::rc::Rc;
|
||||
|
||||
use imp;
|
||||
pub use TokenStream;
|
||||
use TokenTree;
|
||||
pub use crate::TokenStream;
|
||||
use crate::{imp, TokenTree};
|
||||
|
||||
/// An iterator over `TokenStream`'s `TokenTree`s.
|
||||
///
|
||||
/// The iteration is "shallow", e.g. the iterator doesn't recurse into
|
||||
/// delimited groups, and returns whole groups as token trees.
|
||||
#[derive(Clone)]
|
||||
pub struct IntoIter {
|
||||
inner: imp::TokenTreeIter,
|
||||
_marker: marker::PhantomData<Rc<()>>,
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
//! Adapted from [`nom`](https://github.com/Geal/nom).
|
||||
|
||||
use crate::fallback::LexError;
|
||||
use std::str::{Bytes, CharIndices, Chars};
|
||||
|
||||
use unicode_xid::UnicodeXID;
|
||||
|
||||
use fallback::LexError;
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
pub struct Cursor<'a> {
|
||||
pub rest: &'a str,
|
||||
|
@ -95,7 +93,7 @@ pub fn whitespace(input: Cursor) -> PResult<()> {
|
|||
}
|
||||
}
|
||||
match bytes[i] {
|
||||
b' ' | 0x09...0x0d => {
|
||||
b' ' | 0x09..=0x0d => {
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -1,14 +1,12 @@
|
|||
use std::fmt;
|
||||
use std::iter;
|
||||
use std::ops::RangeBounds;
|
||||
use std::panic::{self, PanicInfo};
|
||||
#[cfg(super_unstable)]
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use fallback;
|
||||
use proc_macro;
|
||||
|
||||
use {Delimiter, Punct, Spacing, TokenTree};
|
||||
use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum TokenStream {
|
||||
|
@ -25,7 +23,7 @@ fn nightly_works() -> bool {
|
|||
use std::sync::atomic::*;
|
||||
use std::sync::Once;
|
||||
|
||||
static WORKS: AtomicUsize = ATOMIC_USIZE_INIT;
|
||||
static WORKS: AtomicUsize = AtomicUsize::new(0);
|
||||
static INIT: Once = Once::new();
|
||||
|
||||
match WORKS.load(Ordering::SeqCst) {
|
||||
|
@ -59,7 +57,7 @@ fn nightly_works() -> bool {
|
|||
// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
|
||||
// the main thread before launching any other threads.
|
||||
INIT.call_once(|| {
|
||||
type PanicHook = Fn(&PanicInfo) + Sync + Send + 'static;
|
||||
type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
|
||||
|
||||
let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
|
||||
let sanity_check = &*null_hook as *const PanicHook;
|
||||
|
@ -199,17 +197,6 @@ impl iter::FromIterator<TokenStream> for TokenStream {
|
|||
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
||||
let mut streams = streams.into_iter();
|
||||
match streams.next() {
|
||||
#[cfg(slow_extend)]
|
||||
Some(TokenStream::Compiler(first)) => {
|
||||
let stream = iter::once(first)
|
||||
.chain(streams.map(|s| match s {
|
||||
TokenStream::Compiler(s) => s,
|
||||
TokenStream::Fallback(_) => mismatch(),
|
||||
}))
|
||||
.collect();
|
||||
TokenStream::Compiler(stream)
|
||||
}
|
||||
#[cfg(not(slow_extend))]
|
||||
Some(TokenStream::Compiler(mut first)) => {
|
||||
first.extend(streams.map(|s| match s {
|
||||
TokenStream::Compiler(s) => s,
|
||||
|
@ -233,27 +220,11 @@ impl Extend<TokenTree> for TokenStream {
|
|||
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
|
||||
match self {
|
||||
TokenStream::Compiler(tts) => {
|
||||
#[cfg(not(slow_extend))]
|
||||
{
|
||||
tts.extend(
|
||||
streams
|
||||
.into_iter()
|
||||
.map(|t| TokenStream::from(t).unwrap_nightly()),
|
||||
);
|
||||
}
|
||||
#[cfg(slow_extend)]
|
||||
{
|
||||
*tts =
|
||||
tts.clone()
|
||||
.into_iter()
|
||||
.chain(streams.into_iter().map(TokenStream::from).flat_map(
|
||||
|t| match t {
|
||||
TokenStream::Compiler(tts) => tts.into_iter(),
|
||||
_ => mismatch(),
|
||||
},
|
||||
))
|
||||
.collect();
|
||||
}
|
||||
tts.extend(
|
||||
streams
|
||||
.into_iter()
|
||||
.map(|t| TokenStream::from(t).unwrap_nightly()),
|
||||
);
|
||||
}
|
||||
TokenStream::Fallback(tts) => tts.extend(streams),
|
||||
}
|
||||
|
@ -317,6 +288,7 @@ impl fmt::Debug for LexError {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum TokenTreeIter {
|
||||
Compiler(proc_macro::token_stream::IntoIter),
|
||||
Fallback(fallback::TokenTreeIter),
|
||||
|
@ -343,18 +315,18 @@ impl Iterator for TokenTreeIter {
|
|||
TokenTreeIter::Fallback(iter) => return iter.next(),
|
||||
};
|
||||
Some(match token {
|
||||
proc_macro::TokenTree::Group(tt) => ::Group::_new(Group::Compiler(tt)).into(),
|
||||
proc_macro::TokenTree::Group(tt) => crate::Group::_new(Group::Compiler(tt)).into(),
|
||||
proc_macro::TokenTree::Punct(tt) => {
|
||||
let spacing = match tt.spacing() {
|
||||
proc_macro::Spacing::Joint => Spacing::Joint,
|
||||
proc_macro::Spacing::Alone => Spacing::Alone,
|
||||
};
|
||||
let mut o = Punct::new(tt.as_char(), spacing);
|
||||
o.set_span(::Span::_new(Span::Compiler(tt.span())));
|
||||
o.set_span(crate::Span::_new(Span::Compiler(tt.span())));
|
||||
o.into()
|
||||
}
|
||||
proc_macro::TokenTree::Ident(s) => ::Ident::_new(Ident::Compiler(s)).into(),
|
||||
proc_macro::TokenTree::Literal(l) => ::Literal::_new(Literal::Compiler(l)).into(),
|
||||
proc_macro::TokenTree::Ident(s) => crate::Ident::_new(Ident::Compiler(s)).into(),
|
||||
proc_macro::TokenTree::Literal(l) => crate::Literal::_new(Literal::Compiler(l)).into(),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -477,12 +449,12 @@ impl Span {
|
|||
#[cfg(any(super_unstable, feature = "span-locations"))]
|
||||
pub fn start(&self) -> LineColumn {
|
||||
match self {
|
||||
#[cfg(nightly)]
|
||||
#[cfg(proc_macro_span)]
|
||||
Span::Compiler(s) => {
|
||||
let proc_macro::LineColumn { line, column } = s.start();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
#[cfg(not(nightly))]
|
||||
#[cfg(not(proc_macro_span))]
|
||||
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
|
||||
Span::Fallback(s) => {
|
||||
let fallback::LineColumn { line, column } = s.start();
|
||||
|
@ -494,12 +466,12 @@ impl Span {
|
|||
#[cfg(any(super_unstable, feature = "span-locations"))]
|
||||
pub fn end(&self) -> LineColumn {
|
||||
match self {
|
||||
#[cfg(nightly)]
|
||||
#[cfg(proc_macro_span)]
|
||||
Span::Compiler(s) => {
|
||||
let proc_macro::LineColumn { line, column } = s.end();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
#[cfg(not(nightly))]
|
||||
#[cfg(not(proc_macro_span))]
|
||||
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
|
||||
Span::Fallback(s) => {
|
||||
let fallback::LineColumn { line, column } = s.end();
|
||||
|
@ -508,9 +480,9 @@ impl Span {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn join(&self, other: Span) -> Option<Span> {
|
||||
let ret = match (self, other) {
|
||||
#[cfg(proc_macro_span)]
|
||||
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.join(b)?),
|
||||
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.join(b)?),
|
||||
_ => return None,
|
||||
|
@ -535,9 +507,9 @@ impl Span {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<proc_macro::Span> for ::Span {
|
||||
fn from(proc_span: proc_macro::Span) -> ::Span {
|
||||
::Span::_new(Span::Compiler(proc_span))
|
||||
impl From<proc_macro::Span> for crate::Span {
|
||||
fn from(proc_span: proc_macro::Span) -> crate::Span {
|
||||
crate::Span::_new(Span::Compiler(proc_span))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -615,18 +587,22 @@ impl Group {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn span_open(&self) -> Span {
|
||||
match self {
|
||||
#[cfg(proc_macro_span)]
|
||||
Group::Compiler(g) => Span::Compiler(g.span_open()),
|
||||
#[cfg(not(proc_macro_span))]
|
||||
Group::Compiler(g) => Span::Compiler(g.span()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span_open()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(super_unstable)]
|
||||
pub fn span_close(&self) -> Span {
|
||||
match self {
|
||||
#[cfg(proc_macro_span)]
|
||||
Group::Compiler(g) => Span::Compiler(g.span_close()),
|
||||
#[cfg(not(proc_macro_span))]
|
||||
Group::Compiler(g) => Span::Compiler(g.span()),
|
||||
Group::Fallback(g) => Span::Fallback(g.span_close()),
|
||||
}
|
||||
}
|
||||
|
@ -802,40 +778,32 @@ impl Literal {
|
|||
u16_suffixed => u16,
|
||||
u32_suffixed => u32,
|
||||
u64_suffixed => u64,
|
||||
u128_suffixed => u128,
|
||||
usize_suffixed => usize,
|
||||
i8_suffixed => i8,
|
||||
i16_suffixed => i16,
|
||||
i32_suffixed => i32,
|
||||
i64_suffixed => i64,
|
||||
i128_suffixed => i128,
|
||||
isize_suffixed => isize,
|
||||
|
||||
f32_suffixed => f32,
|
||||
f64_suffixed => f64,
|
||||
}
|
||||
|
||||
#[cfg(u128)]
|
||||
suffixed_numbers! {
|
||||
i128_suffixed => i128,
|
||||
u128_suffixed => u128,
|
||||
}
|
||||
|
||||
unsuffixed_integers! {
|
||||
u8_unsuffixed => u8,
|
||||
u16_unsuffixed => u16,
|
||||
u32_unsuffixed => u32,
|
||||
u64_unsuffixed => u64,
|
||||
u128_unsuffixed => u128,
|
||||
usize_unsuffixed => usize,
|
||||
i8_unsuffixed => i8,
|
||||
i16_unsuffixed => i16,
|
||||
i32_unsuffixed => i32,
|
||||
i64_unsuffixed => i64,
|
||||
isize_unsuffixed => isize,
|
||||
}
|
||||
|
||||
#[cfg(u128)]
|
||||
unsuffixed_integers! {
|
||||
i128_unsuffixed => i128,
|
||||
u128_unsuffixed => u128,
|
||||
isize_unsuffixed => isize,
|
||||
}
|
||||
|
||||
pub fn f32_unsuffixed(f: f32) -> Literal {
|
||||
|
@ -893,6 +861,16 @@ impl Literal {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
|
||||
match self {
|
||||
#[cfg(proc_macro_span)]
|
||||
Literal::Compiler(lit) => lit.subspan(range).map(Span::Compiler),
|
||||
#[cfg(not(proc_macro_span))]
|
||||
Literal::Compiler(_lit) => None,
|
||||
Literal::Fallback(lit) => lit.subspan(range).map(Span::Fallback),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Literal {
|
||||
match self {
|
||||
Literal::Compiler(s) => s,
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
#[test]
|
||||
#[ignore]
|
||||
fn make_sure_no_proc_macro() {
|
||||
assert!(
|
||||
!cfg!(feature = "proc-macro"),
|
||||
"still compiled with proc_macro?"
|
||||
);
|
||||
}
|
|
@ -1,5 +1,3 @@
|
|||
extern crate proc_macro2;
|
||||
|
||||
use proc_macro2::*;
|
||||
|
||||
macro_rules! assert_impl {
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
extern crate proc_macro2;
|
||||
|
||||
use std::str::{self, FromStr};
|
||||
|
||||
use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
|
||||
|
||||
#[test]
|
||||
fn terms() {
|
||||
fn idents() {
|
||||
assert_eq!(
|
||||
Ident::new("String", Span::call_site()).to_string(),
|
||||
"String"
|
||||
|
@ -16,7 +14,7 @@ fn terms() {
|
|||
|
||||
#[test]
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
fn raw_terms() {
|
||||
fn raw_idents() {
|
||||
assert_eq!(
|
||||
Ident::new_raw("String", Span::call_site()).to_string(),
|
||||
"r#String"
|
||||
|
@ -27,37 +25,37 @@ fn raw_terms() {
|
|||
|
||||
#[test]
|
||||
#[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
|
||||
fn term_empty() {
|
||||
fn ident_empty() {
|
||||
Ident::new("", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Ident cannot be a number; use Literal instead")]
|
||||
fn term_number() {
|
||||
fn ident_number() {
|
||||
Ident::new("255", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "\"a#\" is not a valid Ident")]
|
||||
fn term_invalid() {
|
||||
fn ident_invalid() {
|
||||
Ident::new("a#", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "not a valid Ident")]
|
||||
fn raw_term_empty() {
|
||||
fn raw_ident_empty() {
|
||||
Ident::new("r#", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "not a valid Ident")]
|
||||
fn raw_term_number() {
|
||||
fn raw_ident_number() {
|
||||
Ident::new("r#255", Span::call_site());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "\"r#a#\" is not a valid Ident")]
|
||||
fn raw_term_invalid() {
|
||||
fn raw_ident_invalid() {
|
||||
Ident::new("r#a#", Span::call_site());
|
||||
}
|
||||
|
||||
|
@ -80,12 +78,40 @@ fn lifetime_invalid() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn literals() {
|
||||
fn literal_string() {
|
||||
assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
|
||||
assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
|
||||
assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_character() {
|
||||
assert_eq!(Literal::character('x').to_string(), "'x'");
|
||||
assert_eq!(Literal::character('\'').to_string(), "'\\''");
|
||||
assert_eq!(Literal::character('"').to_string(), "'\"'");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_float() {
|
||||
assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_suffix() {
|
||||
fn token_count(p: &str) -> usize {
|
||||
p.parse::<TokenStream>().unwrap().into_iter().count()
|
||||
}
|
||||
|
||||
assert_eq!(token_count("999u256"), 1);
|
||||
assert_eq!(token_count("999r#u256"), 3);
|
||||
assert_eq!(token_count("1."), 1);
|
||||
assert_eq!(token_count("1.f32"), 3);
|
||||
assert_eq!(token_count("1.0_0"), 1);
|
||||
assert_eq!(token_count("1._0"), 3);
|
||||
assert_eq!(token_count("1._m"), 3);
|
||||
assert_eq!(token_count("\"\"s"), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn roundtrip() {
|
||||
fn roundtrip(p: &str) {
|
||||
|
@ -113,6 +139,9 @@ fn roundtrip() {
|
|||
9
|
||||
0
|
||||
0xffffffffffffffffffffffffffffffff
|
||||
1x
|
||||
1u80
|
||||
1f320
|
||||
",
|
||||
);
|
||||
roundtrip("'a");
|
||||
|
@ -129,9 +158,6 @@ fn fail() {
|
|||
panic!("should have failed to parse: {}\n{:#?}", p, s);
|
||||
}
|
||||
}
|
||||
fail("1x");
|
||||
fail("1u80");
|
||||
fail("1f320");
|
||||
fail("' static");
|
||||
fail("r#1");
|
||||
fail("r#_");
|
||||
|
@ -334,6 +360,27 @@ fn test_debug_tokenstream() {
|
|||
|
||||
#[cfg(not(procmacro2_semver_exempt))]
|
||||
let expected = "\
|
||||
TokenStream [
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: a,
|
||||
},
|
||||
Punct {
|
||||
op: '+',
|
||||
spacing: Alone,
|
||||
},
|
||||
Literal {
|
||||
lit: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
]\
|
||||
";
|
||||
|
||||
#[cfg(not(procmacro2_semver_exempt))]
|
||||
let expected_before_trailing_commas = "\
|
||||
TokenStream [
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
|
@ -355,6 +402,31 @@ TokenStream [
|
|||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
let expected = "\
|
||||
TokenStream [
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: a,
|
||||
span: bytes(2..3),
|
||||
},
|
||||
Punct {
|
||||
op: '+',
|
||||
spacing: Alone,
|
||||
span: bytes(4..5),
|
||||
},
|
||||
Literal {
|
||||
lit: 1,
|
||||
span: bytes(6..7),
|
||||
},
|
||||
],
|
||||
span: bytes(1..8),
|
||||
},
|
||||
]\
|
||||
";
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
let expected_before_trailing_commas = "\
|
||||
TokenStream [
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
|
@ -378,7 +450,12 @@ TokenStream [
|
|||
]\
|
||||
";
|
||||
|
||||
assert_eq!(expected, format!("{:#?}", tts));
|
||||
let actual = format!("{:#?}", tts);
|
||||
if actual.ends_with(",\n]") {
|
||||
assert_eq!(expected, actual);
|
||||
} else {
|
||||
assert_eq!(expected_before_trailing_commas, actual);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
{"files":{"Cargo.toml":"68f4dc89836a05a2347086addab1849567ef8073c552ec0dfca8f96fd20550f9","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"d9392d4c7af3bf9714f0a95801d64de46ffd4558cdfeea0eb85b414e555abb72","src/ext.rs":"03919239a20f8393288783a21bf6fdee12e405d13d162c9faa6f8f5ce54b003b","src/lib.rs":"5345b4d2e6f923724cec35c62d7397e6f04d5503d2d813bff7bbaa7ffc39a9cf","src/to_tokens.rs":"0dcd15cba2aa83abeb47b9a1babce7a29643b5efa2fe620b070cb37bb21a84f1","tests/conditional/integer128.rs":"d83e21a91efbaa801a82ae499111bdda2d31edaa620e78c0199eba42d69c9ee6","tests/test.rs":"810013d7fd77b738abd0ace90ce2f2f3e219c757652eabab29bc1c0ce4a73b24"},"package":"cdd8e04bd9c52e0342b406469d494fcb033be4bdbe5c606016defbb1681411e1"}
|
|
@ -0,0 +1,33 @@
|
|||
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||
#
|
||||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g. crates.io) dependencies
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
# editing this file be aware that the upstream Cargo.toml
|
||||
# will likely look very different (and much more reasonable)
|
||||
|
||||
[package]
|
||||
name = "quote"
|
||||
version = "0.6.11"
|
||||
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
||||
include = ["Cargo.toml", "src/**/*.rs", "tests/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
|
||||
description = "Quasi-quoting macro quote!(...)"
|
||||
documentation = "https://docs.rs/quote/"
|
||||
readme = "README.md"
|
||||
keywords = ["syn"]
|
||||
categories = ["development-tools::procedural-macro-helpers"]
|
||||
license = "MIT/Apache-2.0"
|
||||
repository = "https://github.com/dtolnay/quote"
|
||||
[dependencies.proc-macro2]
|
||||
version = "0.4.21"
|
||||
default-features = false
|
||||
|
||||
[features]
|
||||
default = ["proc-macro"]
|
||||
proc-macro = ["proc-macro2/proc-macro"]
|
||||
[badges.travis-ci]
|
||||
repository = "dtolnay/quote"
|
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -0,0 +1,25 @@
|
|||
Copyright (c) 2016 The Rust Project Developers
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
|
@ -0,0 +1,241 @@
|
|||
Rust Quasi-Quoting
|
||||
==================
|
||||
|
||||
[![Build Status](https://api.travis-ci.org/dtolnay/quote.svg?branch=master)](https://travis-ci.org/dtolnay/quote)
|
||||
[![Latest Version](https://img.shields.io/crates/v/quote.svg)](https://crates.io/crates/quote)
|
||||
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/quote/)
|
||||
|
||||
This crate provides the [`quote!`] macro for turning Rust syntax tree data
|
||||
structures into tokens of source code.
|
||||
|
||||
[`quote!`]: https://docs.rs/quote/0.6/quote/macro.quote.html
|
||||
|
||||
Procedural macros in Rust receive a stream of tokens as input, execute arbitrary
|
||||
Rust code to determine how to manipulate those tokens, and produce a stream of
|
||||
tokens to hand back to the compiler to compile into the caller's crate.
|
||||
Quasi-quoting is a solution to one piece of that -- producing tokens to return
|
||||
to the compiler.
|
||||
|
||||
The idea of quasi-quoting is that we write *code* that we treat as *data*.
|
||||
Within the `quote!` macro, we can write what looks like code to our text editor
|
||||
or IDE. We get all the benefits of the editor's brace matching, syntax
|
||||
highlighting, indentation, and maybe autocompletion. But rather than compiling
|
||||
that as code into the current crate, we can treat it as data, pass it around,
|
||||
mutate it, and eventually hand it back to the compiler as tokens to compile into
|
||||
the macro caller's crate.
|
||||
|
||||
This crate is motivated by the procedural macro use case, but is a
|
||||
general-purpose Rust quasi-quoting library and is not specific to procedural
|
||||
macros.
|
||||
|
||||
*Version requirement: Quote supports any compiler version back to Rust's very
|
||||
first support for procedural macros in Rust 1.15.0.*
|
||||
|
||||
[*Release notes*](https://github.com/dtolnay/quote/releases)
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
quote = "0.6"
|
||||
```
|
||||
|
||||
## Syntax
|
||||
|
||||
The quote crate provides a [`quote!`] macro within which you can write Rust code
|
||||
that gets packaged into a [`TokenStream`] and can be treated as data. You should
|
||||
think of `TokenStream` as representing a fragment of Rust source code.
|
||||
|
||||
[`TokenStream`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.TokenStream.html
|
||||
|
||||
Within the `quote!` macro, interpolation is done with `#var`. Any type
|
||||
implementing the [`quote::ToTokens`] trait can be interpolated. This includes
|
||||
most Rust primitive types as well as most of the syntax tree types from [`syn`].
|
||||
|
||||
[`quote::ToTokens`]: https://docs.rs/quote/0.6/quote/trait.ToTokens.html
|
||||
[`syn`]: https://github.com/dtolnay/syn
|
||||
|
||||
```rust
|
||||
let tokens = quote! {
|
||||
struct SerializeWith #generics #where_clause {
|
||||
value: &'a #field_ty,
|
||||
phantom: core::marker::PhantomData<#item_ty>,
|
||||
}
|
||||
|
||||
impl #generics serde::Serialize for SerializeWith #generics #where_clause {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
#path(self.value, serializer)
|
||||
}
|
||||
}
|
||||
|
||||
SerializeWith {
|
||||
value: #value,
|
||||
phantom: core::marker::PhantomData::<#item_ty>,
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
## Repetition
|
||||
|
||||
Repetition is done using `#(...)*` or `#(...),*` similar to `macro_rules!`. This
|
||||
iterates through the elements of any variable interpolated within the repetition
|
||||
and inserts a copy of the repetition body for each one. The variables in an
|
||||
interpolation may be anything that implements `IntoIterator`, including `Vec` or
|
||||
a pre-existing iterator.
|
||||
|
||||
- `#(#var)*` — no separators
|
||||
- `#(#var),*` — the character before the asterisk is used as a separator
|
||||
- `#( struct #var; )*` — the repetition can contain other things
|
||||
- `#( #k => println!("{}", #v), )*` — even multiple interpolations
|
||||
|
||||
Note that there is a difference between `#(#var ,)*` and `#(#var),*`—the latter
|
||||
does not produce a trailing comma. This matches the behavior of delimiters in
|
||||
`macro_rules!`.
|
||||
|
||||
## Returning tokens to the compiler
|
||||
|
||||
The `quote!` macro evaluates to an expression of type
|
||||
`proc_macro2::TokenStream`. Meanwhile Rust procedural macros are expected to
|
||||
return the type `proc_macro::TokenStream`.
|
||||
|
||||
The difference between the two types is that `proc_macro` types are entirely
|
||||
specific to procedural macros and cannot ever exist in code outside of a
|
||||
procedural macro, while `proc_macro2` types may exist anywhere including tests
|
||||
and non-macro code like main.rs and build.rs. This is why even the procedural
|
||||
macro ecosystem is largely built around `proc_macro2`, because that ensures the
|
||||
libraries are unit testable and accessible in non-macro contexts.
|
||||
|
||||
There is a [`From`]-conversion in both directions so returning the output of
|
||||
`quote!` from a procedural macro usually looks like `tokens.into()` or
|
||||
`proc_macro::TokenStream::from(tokens)`.
|
||||
|
||||
[`From`]: https://doc.rust-lang.org/std/convert/trait.From.html
|
||||
|
||||
## Examples
|
||||
|
||||
### Combining quoted fragments
|
||||
|
||||
Usually you don't end up constructing an entire final `TokenStream` in one
|
||||
piece. Different parts may come from different helper functions. The tokens
|
||||
produced by `quote!` themselves implement `ToTokens` and so can be interpolated
|
||||
into later `quote!` invocations to build up a final result.
|
||||
|
||||
```rust
|
||||
let type_definition = quote! {...};
|
||||
let methods = quote! {...};
|
||||
|
||||
let tokens = quote! {
|
||||
#type_definition
|
||||
#methods
|
||||
};
|
||||
```
|
||||
|
||||
### Constructing identifiers
|
||||
|
||||
Suppose we have an identifier `ident` which came from somewhere in a macro
|
||||
input and we need to modify it in some way for the macro output. Let's consider
|
||||
prepending the identifier with an underscore.
|
||||
|
||||
Simply interpolating the identifier next to an underscore will not have the
|
||||
behavior of concatenating them. The underscore and the identifier will continue
|
||||
to be two separate tokens as if you had written `_ x`.
|
||||
|
||||
```rust
|
||||
// incorrect
|
||||
quote! {
|
||||
let mut _#ident = 0;
|
||||
}
|
||||
```
|
||||
|
||||
The solution is to perform token-level manipulations using the APIs provided by
|
||||
Syn and proc-macro2.
|
||||
|
||||
```rust
|
||||
let concatenated = format!("_{}", ident);
|
||||
let varname = syn::Ident::new(&concatenated, ident.span());
|
||||
quote! {
|
||||
let mut #varname = 0;
|
||||
}
|
||||
```
|
||||
|
||||
### Making method calls
|
||||
|
||||
Let's say our macro requires some type specified in the macro input to have a
|
||||
constructor called `new`. We have the type in a variable called `field_type` of
|
||||
type `syn::Type` and want to invoke the constructor.
|
||||
|
||||
```rust
|
||||
// incorrect
|
||||
quote! {
|
||||
let value = #field_type::new();
|
||||
}
|
||||
```
|
||||
|
||||
This works only sometimes. If `field_type` is `String`, the expanded code
|
||||
contains `String::new()` which is fine. But if `field_type` is something like
|
||||
`Vec<i32>` then the expanded code is `Vec<i32>::new()` which is invalid syntax.
|
||||
Ordinarily in handwritten Rust we would write `Vec::<i32>::new()` but for macros
|
||||
often the following is more convenient.
|
||||
|
||||
```rust
|
||||
quote! {
|
||||
let value = <#field_type>::new();
|
||||
}
|
||||
```
|
||||
|
||||
This expands to `<Vec<i32>>::new()` which behaves correctly.
|
||||
|
||||
A similar pattern is appropriate for trait methods.
|
||||
|
||||
```rust
|
||||
quote! {
|
||||
let value = <#field_type as core::default::Default>::default();
|
||||
}
|
||||
```
|
||||
|
||||
## Hygiene
|
||||
|
||||
Any interpolated tokens preserve the `Span` information provided by their
|
||||
`ToTokens` implementation. Tokens that originate within a `quote!` invocation
|
||||
are spanned with [`Span::call_site()`].
|
||||
|
||||
[`Span::call_site()`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.Span.html#method.call_site
|
||||
|
||||
A different span can be provided explicitly through the [`quote_spanned!`]
|
||||
macro.
|
||||
|
||||
[`quote_spanned!`]: https://docs.rs/quote/0.6/quote/macro.quote_spanned.html
|
||||
|
||||
### Limitations
|
||||
|
||||
- A non-repeating variable may not be interpolated inside of a repeating block
|
||||
([#7]).
|
||||
- The same variable may not be interpolated more than once inside of a repeating
|
||||
block ([#8]).
|
||||
|
||||
[#7]: https://github.com/dtolnay/quote/issues/7
|
||||
[#8]: https://github.com/dtolnay/quote/issues/8
|
||||
|
||||
### Recursion limit
|
||||
|
||||
The `quote!` macro relies on deep recursion so some large invocations may fail
|
||||
with "recursion limit reached" when you compile. If it fails, bump up the
|
||||
recursion limit by adding `#![recursion_limit = "128"]` to your crate. An even
|
||||
higher limit may be necessary for especially large invocations. You don't need
|
||||
this unless the compiler tells you that you need it.
|
||||
|
||||
## License
|
||||
|
||||
Licensed under either of
|
||||
|
||||
* Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
|
||||
* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
|
||||
|
||||
at your option.
|
||||
|
||||
### Contribution
|
||||
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
|
||||
be dual licensed as above, without any additional terms or conditions.
|
|
@ -0,0 +1,112 @@
|
|||
use super::ToTokens;
|
||||
|
||||
use std::iter;
|
||||
|
||||
use proc_macro2::{TokenStream, TokenTree};
|
||||
|
||||
/// TokenStream extension trait with methods for appending tokens.
|
||||
///
|
||||
/// This trait is sealed and cannot be implemented outside of the `quote` crate.
|
||||
pub trait TokenStreamExt: private::Sealed {
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// Appends the token specified to this list of tokens.
|
||||
fn append<U>(&mut self, token: U)
|
||||
where
|
||||
U: Into<TokenTree>;
|
||||
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use quote::{quote, TokenStreamExt, ToTokens};
|
||||
/// # use proc_macro2::TokenStream;
|
||||
/// #
|
||||
/// struct X;
|
||||
///
|
||||
/// impl ToTokens for X {
|
||||
/// fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
/// tokens.append_all(&[true, false]);
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// let tokens = quote!(#X);
|
||||
/// assert_eq!(tokens.to_string(), "true false");
|
||||
/// ```
|
||||
fn append_all<T, I>(&mut self, iter: I)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>;
|
||||
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// Appends all of the items in the iterator `I`, separated by the tokens
|
||||
/// `U`.
|
||||
fn append_separated<T, I, U>(&mut self, iter: I, op: U)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
U: ToTokens;
|
||||
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// Appends all tokens in the iterator `I`, appending `U` after each
|
||||
/// element, including after the last element of the iterator.
|
||||
fn append_terminated<T, I, U>(&mut self, iter: I, term: U)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
U: ToTokens;
|
||||
}
|
||||
|
||||
impl TokenStreamExt for TokenStream {
|
||||
fn append<U>(&mut self, token: U)
|
||||
where
|
||||
U: Into<TokenTree>,
|
||||
{
|
||||
self.extend(iter::once(token.into()));
|
||||
}
|
||||
|
||||
fn append_all<T, I>(&mut self, iter: I)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
{
|
||||
for token in iter {
|
||||
token.to_tokens(self);
|
||||
}
|
||||
}
|
||||
|
||||
fn append_separated<T, I, U>(&mut self, iter: I, op: U)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
U: ToTokens,
|
||||
{
|
||||
for (i, token) in iter.into_iter().enumerate() {
|
||||
if i > 0 {
|
||||
op.to_tokens(self);
|
||||
}
|
||||
token.to_tokens(self);
|
||||
}
|
||||
}
|
||||
|
||||
fn append_terminated<T, I, U>(&mut self, iter: I, term: U)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
U: ToTokens,
|
||||
{
|
||||
for token in iter {
|
||||
token.to_tokens(self);
|
||||
term.to_tokens(self);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod private {
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
pub trait Sealed {}
|
||||
|
||||
impl Sealed for TokenStream {}
|
||||
}
|
|
@ -0,0 +1,969 @@
|
|||
//! This crate provides the [`quote!`] macro for turning Rust syntax tree data
|
||||
//! structures into tokens of source code.
|
||||
//!
|
||||
//! [`quote!`]: macro.quote.html
|
||||
//!
|
||||
//! Procedural macros in Rust receive a stream of tokens as input, execute
|
||||
//! arbitrary Rust code to determine how to manipulate those tokens, and produce
|
||||
//! a stream of tokens to hand back to the compiler to compile into the caller's
|
||||
//! crate. Quasi-quoting is a solution to one piece of that -- producing tokens
|
||||
//! to return to the compiler.
|
||||
//!
|
||||
//! The idea of quasi-quoting is that we write *code* that we treat as *data*.
|
||||
//! Within the `quote!` macro, we can write what looks like code to our text
|
||||
//! editor or IDE. We get all the benefits of the editor's brace matching,
|
||||
//! syntax highlighting, indentation, and maybe autocompletion. But rather than
|
||||
//! compiling that as code into the current crate, we can treat it as data, pass
|
||||
//! it around, mutate it, and eventually hand it back to the compiler as tokens
|
||||
//! to compile into the macro caller's crate.
|
||||
//!
|
||||
//! This crate is motivated by the procedural macro use case, but is a
|
||||
//! general-purpose Rust quasi-quoting library and is not specific to procedural
|
||||
//! macros.
|
||||
//!
|
||||
//! *Version requirement: Quote supports any compiler version back to Rust's
|
||||
//! very first support for procedural macros in Rust 1.15.0.*
|
||||
//!
|
||||
//! ```toml
|
||||
//! [dependencies]
|
||||
//! quote = "0.6"
|
||||
//! ```
|
||||
//!
|
||||
//! # Example
|
||||
//!
|
||||
//! The following quasi-quoted block of code is something you might find in [a]
|
||||
//! procedural macro having to do with data structure serialization. The `#var`
|
||||
//! syntax performs interpolation of runtime variables into the quoted tokens.
|
||||
//! Check out the documentation of the [`quote!`] macro for more detail about
|
||||
//! the syntax. See also the [`quote_spanned!`] macro which is important for
|
||||
//! implementing hygienic procedural macros.
|
||||
//!
|
||||
//! [a]: https://serde.rs/
|
||||
//! [`quote_spanned!`]: macro.quote_spanned.html
|
||||
//!
|
||||
//! ```edition2018
|
||||
//! # use quote::quote;
|
||||
//! #
|
||||
//! # let generics = "";
|
||||
//! # let where_clause = "";
|
||||
//! # let field_ty = "";
|
||||
//! # let item_ty = "";
|
||||
//! # let path = "";
|
||||
//! # let value = "";
|
||||
//! #
|
||||
//! let tokens = quote! {
|
||||
//! struct SerializeWith #generics #where_clause {
|
||||
//! value: &'a #field_ty,
|
||||
//! phantom: core::marker::PhantomData<#item_ty>,
|
||||
//! }
|
||||
//!
|
||||
//! impl #generics serde::Serialize for SerializeWith #generics #where_clause {
|
||||
//! fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
//! where
|
||||
//! S: serde::Serializer,
|
||||
//! {
|
||||
//! #path(self.value, serializer)
|
||||
//! }
|
||||
//! }
|
||||
//!
|
||||
//! SerializeWith {
|
||||
//! value: #value,
|
||||
//! phantom: core::marker::PhantomData::<#item_ty>,
|
||||
//! }
|
||||
//! };
|
||||
//! ```
|
||||
//!
|
||||
//! # Recursion limit
|
||||
//!
|
||||
//! The `quote!` macro relies on deep recursion so some large invocations may
|
||||
//! fail with "recursion limit reached" when you compile. If it fails, bump up
|
||||
//! the recursion limit by adding `#![recursion_limit = "128"]` to your crate.
|
||||
//! An even higher limit may be necessary for especially large invocations.
|
||||
|
||||
// Quote types in rustdoc of other crates get linked to here.
|
||||
#![doc(html_root_url = "https://docs.rs/quote/0.6.11")]
|
||||
|
||||
#[cfg(all(
|
||||
not(all(target_arch = "wasm32", target_os = "unknown")),
|
||||
feature = "proc-macro"
|
||||
))]
|
||||
extern crate proc_macro;
|
||||
extern crate proc_macro2;
|
||||
|
||||
mod ext;
|
||||
pub use ext::TokenStreamExt;
|
||||
|
||||
mod to_tokens;
|
||||
pub use to_tokens::ToTokens;
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub mod __rt {
|
||||
use ext::TokenStreamExt;
|
||||
pub use proc_macro2::*;
|
||||
|
||||
fn is_ident_start(c: u8) -> bool {
|
||||
(b'a' <= c && c <= b'z') || (b'A' <= c && c <= b'Z') || c == b'_'
|
||||
}
|
||||
|
||||
fn is_ident_continue(c: u8) -> bool {
|
||||
(b'a' <= c && c <= b'z')
|
||||
|| (b'A' <= c && c <= b'Z')
|
||||
|| c == b'_'
|
||||
|| (b'0' <= c && c <= b'9')
|
||||
}
|
||||
|
||||
fn is_ident(token: &str) -> bool {
|
||||
if token.bytes().all(|digit| digit >= b'0' && digit <= b'9') {
|
||||
return false;
|
||||
}
|
||||
|
||||
let mut bytes = token.bytes();
|
||||
let first = bytes.next().unwrap();
|
||||
if !is_ident_start(first) {
|
||||
return false;
|
||||
}
|
||||
for ch in bytes {
|
||||
if !is_ident_continue(ch) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
pub fn parse(tokens: &mut TokenStream, span: Span, s: &str) {
|
||||
if is_ident(s) {
|
||||
// Fast path, since idents are the most common token.
|
||||
tokens.append(Ident::new(s, span));
|
||||
} else {
|
||||
let s: TokenStream = s.parse().expect("invalid token stream");
|
||||
tokens.extend(s.into_iter().map(|mut t| {
|
||||
t.set_span(span);
|
||||
t
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! push_punct {
|
||||
($name:ident $char1:tt) => {
|
||||
pub fn $name(tokens: &mut TokenStream, span: Span) {
|
||||
let mut punct = Punct::new($char1, Spacing::Alone);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
}
|
||||
};
|
||||
($name:ident $char1:tt $char2:tt) => {
|
||||
pub fn $name(tokens: &mut TokenStream, span: Span) {
|
||||
let mut punct = Punct::new($char1, Spacing::Joint);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
let mut punct = Punct::new($char2, Spacing::Alone);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
}
|
||||
};
|
||||
($name:ident $char1:tt $char2:tt $char3:tt) => {
|
||||
pub fn $name(tokens: &mut TokenStream, span: Span) {
|
||||
let mut punct = Punct::new($char1, Spacing::Joint);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
let mut punct = Punct::new($char2, Spacing::Joint);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
let mut punct = Punct::new($char3, Spacing::Alone);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
push_punct!(push_add '+');
|
||||
push_punct!(push_add_eq '+' '=');
|
||||
push_punct!(push_and '&');
|
||||
push_punct!(push_and_and '&' '&');
|
||||
push_punct!(push_and_eq '&' '=');
|
||||
push_punct!(push_at '@');
|
||||
push_punct!(push_bang '!');
|
||||
push_punct!(push_caret '^');
|
||||
push_punct!(push_caret_eq '^' '=');
|
||||
push_punct!(push_colon ':');
|
||||
push_punct!(push_colon2 ':' ':');
|
||||
push_punct!(push_comma ',');
|
||||
push_punct!(push_div '/');
|
||||
push_punct!(push_div_eq '/' '=');
|
||||
push_punct!(push_dot '.');
|
||||
push_punct!(push_dot2 '.' '.');
|
||||
push_punct!(push_dot3 '.' '.' '.');
|
||||
push_punct!(push_dot_dot_eq '.' '.' '=');
|
||||
push_punct!(push_eq '=');
|
||||
push_punct!(push_eq_eq '=' '=');
|
||||
push_punct!(push_ge '>' '=');
|
||||
push_punct!(push_gt '>');
|
||||
push_punct!(push_le '<' '=');
|
||||
push_punct!(push_lt '<');
|
||||
push_punct!(push_mul_eq '*' '=');
|
||||
push_punct!(push_ne '!' '=');
|
||||
push_punct!(push_or '|');
|
||||
push_punct!(push_or_eq '|' '=');
|
||||
push_punct!(push_or_or '|' '|');
|
||||
push_punct!(push_pound '#');
|
||||
push_punct!(push_question '?');
|
||||
push_punct!(push_rarrow '-' '>');
|
||||
push_punct!(push_larrow '<' '-');
|
||||
push_punct!(push_rem '%');
|
||||
push_punct!(push_rem_eq '%' '=');
|
||||
push_punct!(push_fat_arrow '=' '>');
|
||||
push_punct!(push_semi ';');
|
||||
push_punct!(push_shl '<' '<');
|
||||
push_punct!(push_shl_eq '<' '<' '=');
|
||||
push_punct!(push_shr '>' '>');
|
||||
push_punct!(push_shr_eq '>' '>' '=');
|
||||
push_punct!(push_star '*');
|
||||
push_punct!(push_sub '-');
|
||||
push_punct!(push_sub_eq '-' '=');
|
||||
}
|
||||
|
||||
/// The whole point.
|
||||
///
|
||||
/// Performs variable interpolation against the input and produces it as
|
||||
/// [`TokenStream`]. For returning tokens to the compiler in a procedural macro, use
|
||||
/// `into()` to build a `TokenStream`.
|
||||
///
|
||||
/// [`TokenStream`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.TokenStream.html
|
||||
///
|
||||
/// # Interpolation
|
||||
///
|
||||
/// Variable interpolation is done with `#var` (similar to `$var` in
|
||||
/// `macro_rules!` macros). This grabs the `var` variable that is currently in
|
||||
/// scope and inserts it in that location in the output tokens. Any type
|
||||
/// implementing the [`ToTokens`] trait can be interpolated. This includes most
|
||||
/// Rust primitive types as well as most of the syntax tree types from the [Syn]
|
||||
/// crate.
|
||||
///
|
||||
/// [`ToTokens`]: trait.ToTokens.html
|
||||
/// [Syn]: https://github.com/dtolnay/syn
|
||||
///
|
||||
/// Repetition is done using `#(...)*` or `#(...),*` again similar to
|
||||
/// `macro_rules!`. This iterates through the elements of any variable
|
||||
/// interpolated within the repetition and inserts a copy of the repetition body
|
||||
/// for each one. The variables in an interpolation may be anything that
|
||||
/// implements `IntoIterator`, including `Vec` or a pre-existing iterator.
|
||||
///
|
||||
/// - `#(#var)*` — no separators
|
||||
/// - `#(#var),*` — the character before the asterisk is used as a separator
|
||||
/// - `#( struct #var; )*` — the repetition can contain other tokens
|
||||
/// - `#( #k => println!("{}", #v), )*` — even multiple interpolations
|
||||
///
|
||||
/// # Hygiene
|
||||
///
|
||||
/// Any interpolated tokens preserve the `Span` information provided by their
|
||||
/// `ToTokens` implementation. Tokens that originate within the `quote!`
|
||||
/// invocation are spanned with [`Span::call_site()`].
|
||||
///
|
||||
/// [`Span::call_site()`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.Span.html#method.call_site
|
||||
///
|
||||
/// A different span can be provided through the [`quote_spanned!`] macro.
|
||||
///
|
||||
/// [`quote_spanned!`]: macro.quote_spanned.html
|
||||
///
|
||||
/// # Return type
|
||||
///
|
||||
/// The macro evaluates to an expression of type `proc_macro2::TokenStream`.
|
||||
/// Meanwhile Rust procedural macros are expected to return the type
|
||||
/// `proc_macro::TokenStream`.
|
||||
///
|
||||
/// The difference between the two types is that `proc_macro` types are entirely
|
||||
/// specific to procedural macros and cannot ever exist in code outside of a
|
||||
/// procedural macro, while `proc_macro2` types may exist anywhere including
|
||||
/// tests and non-macro code like main.rs and build.rs. This is why even the
|
||||
/// procedural macro ecosystem is largely built around `proc_macro2`, because
|
||||
/// that ensures the libraries are unit testable and accessible in non-macro
|
||||
/// contexts.
|
||||
///
|
||||
/// There is a [`From`]-conversion in both directions so returning the output of
|
||||
/// `quote!` from a procedural macro usually looks like `tokens.into()` or
|
||||
/// `proc_macro::TokenStream::from(tokens)`.
|
||||
///
|
||||
/// [`From`]: https://doc.rust-lang.org/std/convert/trait.From.html
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ## Procedural macro
|
||||
///
|
||||
/// The structure of a basic procedural macro is as follows. Refer to the [Syn]
|
||||
/// crate for further useful guidance on using `quote!` as part of a procedural
|
||||
/// macro.
|
||||
///
|
||||
/// [Syn]: https://github.com/dtolnay/syn
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # #[cfg(any())]
|
||||
/// extern crate proc_macro;
|
||||
/// # use proc_macro2 as proc_macro;
|
||||
///
|
||||
/// use proc_macro::TokenStream;
|
||||
/// use quote::quote;
|
||||
///
|
||||
/// # const IGNORE_TOKENS: &'static str = stringify! {
|
||||
/// #[proc_macro_derive(HeapSize)]
|
||||
/// # };
|
||||
/// pub fn derive_heap_size(input: TokenStream) -> TokenStream {
|
||||
/// // Parse the input and figure out what implementation to generate...
|
||||
/// # const IGNORE_TOKENS: &'static str = stringify! {
|
||||
/// let name = /* ... */;
|
||||
/// let expr = /* ... */;
|
||||
/// # };
|
||||
/// #
|
||||
/// # let name = 0;
|
||||
/// # let expr = 0;
|
||||
///
|
||||
/// let expanded = quote! {
|
||||
/// // The generated impl.
|
||||
/// impl heapsize::HeapSize for #name {
|
||||
/// fn heap_size_of_children(&self) -> usize {
|
||||
/// #expr
|
||||
/// }
|
||||
/// }
|
||||
/// };
|
||||
///
|
||||
/// // Hand the output tokens back to the compiler.
|
||||
/// TokenStream::from(expanded)
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// ## Combining quoted fragments
|
||||
///
|
||||
/// Usually you don't end up constructing an entire final `TokenStream` in one
|
||||
/// piece. Different parts may come from different helper functions. The tokens
|
||||
/// produced by `quote!` themselves implement `ToTokens` and so can be
|
||||
/// interpolated into later `quote!` invocations to build up a final result.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// let type_definition = quote! {...};
|
||||
/// let methods = quote! {...};
|
||||
///
|
||||
/// let tokens = quote! {
|
||||
/// #type_definition
|
||||
/// #methods
|
||||
/// };
|
||||
/// ```
|
||||
///
|
||||
/// ## Constructing identifiers
|
||||
///
|
||||
/// Suppose we have an identifier `ident` which came from somewhere in a macro
|
||||
/// input and we need to modify it in some way for the macro output. Let's
|
||||
/// consider prepending the identifier with an underscore.
|
||||
///
|
||||
/// Simply interpolating the identifier next to an underscore will not have the
|
||||
/// behavior of concatenating them. The underscore and the identifier will
|
||||
/// continue to be two separate tokens as if you had written `_ x`.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use proc_macro2::{self as syn, Span};
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// # let ident = syn::Ident::new("i", Span::call_site());
|
||||
/// #
|
||||
/// // incorrect
|
||||
/// quote! {
|
||||
/// let mut _#ident = 0;
|
||||
/// }
|
||||
/// # ;
|
||||
/// ```
|
||||
///
|
||||
/// The solution is to perform token-level manipulations using the APIs provided
|
||||
/// by Syn and proc-macro2.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use proc_macro2::{self as syn, Span};
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// # let ident = syn::Ident::new("i", Span::call_site());
|
||||
/// #
|
||||
/// let concatenated = format!("_{}", ident);
|
||||
/// let varname = syn::Ident::new(&concatenated, ident.span());
|
||||
/// quote! {
|
||||
/// let mut #varname = 0;
|
||||
/// }
|
||||
/// # ;
|
||||
/// ```
|
||||
///
|
||||
/// ## Making method calls
|
||||
///
|
||||
/// Let's say our macro requires some type specified in the macro input to have
|
||||
/// a constructor called `new`. We have the type in a variable called
|
||||
/// `field_type` of type `syn::Type` and want to invoke the constructor.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// # let field_type = quote!(...);
|
||||
/// #
|
||||
/// // incorrect
|
||||
/// quote! {
|
||||
/// let value = #field_type::new();
|
||||
/// }
|
||||
/// # ;
|
||||
/// ```
|
||||
///
|
||||
/// This works only sometimes. If `field_type` is `String`, the expanded code
|
||||
/// contains `String::new()` which is fine. But if `field_type` is something
|
||||
/// like `Vec<i32>` then the expanded code is `Vec<i32>::new()` which is invalid
|
||||
/// syntax. Ordinarily in handwritten Rust we would write `Vec::<i32>::new()`
|
||||
/// but for macros often the following is more convenient.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// # let field_type = quote!(...);
|
||||
/// #
|
||||
/// quote! {
|
||||
/// let value = <#field_type>::new();
|
||||
/// }
|
||||
/// # ;
|
||||
/// ```
|
||||
///
|
||||
/// This expands to `<Vec<i32>>::new()` which behaves correctly.
|
||||
///
|
||||
/// A similar pattern is appropriate for trait methods.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// # let field_type = quote!(...);
|
||||
/// #
|
||||
/// quote! {
|
||||
/// let value = <#field_type as core::default::Default>::default();
|
||||
/// }
|
||||
/// # ;
|
||||
/// ```
|
||||
#[macro_export(local_inner_macros)]
|
||||
macro_rules! quote {
|
||||
($($tt:tt)*) => (quote_spanned!($crate::__rt::Span::call_site()=> $($tt)*));
|
||||
}
|
||||
|
||||
/// Same as `quote!`, but applies a given span to all tokens originating within
|
||||
/// the macro invocation.
|
||||
///
|
||||
/// # Syntax
|
||||
///
|
||||
/// A span expression of type [`Span`], followed by `=>`, followed by the tokens
|
||||
/// to quote. The span expression should be brief -- use a variable for anything
|
||||
/// more than a few characters. There should be no space before the `=>` token.
|
||||
///
|
||||
/// [`Span`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.Span.html
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use proc_macro2::Span;
|
||||
/// # use quote::quote_spanned;
|
||||
/// #
|
||||
/// # const IGNORE_TOKENS: &'static str = stringify! {
|
||||
/// let span = /* ... */;
|
||||
/// # };
|
||||
/// # let span = Span::call_site();
|
||||
/// # let init = 0;
|
||||
///
|
||||
/// // On one line, use parentheses.
|
||||
/// let tokens = quote_spanned!(span=> Box::into_raw(Box::new(#init)));
|
||||
///
|
||||
/// // On multiple lines, place the span at the top and use braces.
|
||||
/// let tokens = quote_spanned! {span=>
|
||||
/// Box::into_raw(Box::new(#init))
|
||||
/// };
|
||||
/// ```
|
||||
///
|
||||
/// The lack of space before the `=>` should look jarring to Rust programmers
|
||||
/// and this is intentional. The formatting is designed to be visibly
|
||||
/// off-balance and draw the eye a particular way, due to the span expression
|
||||
/// being evaluated in the context of the procedural macro and the remaining
|
||||
/// tokens being evaluated in the generated code.
|
||||
///
|
||||
/// # Hygiene
|
||||
///
|
||||
/// Any interpolated tokens preserve the `Span` information provided by their
|
||||
/// `ToTokens` implementation. Tokens that originate within the `quote_spanned!`
|
||||
/// invocation are spanned with the given span argument.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// The following procedural macro code uses `quote_spanned!` to assert that a
|
||||
/// particular Rust type implements the [`Sync`] trait so that references can be
|
||||
/// safely shared between threads.
|
||||
///
|
||||
/// [`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html
|
||||
///
|
||||
/// ```edition2018
|
||||
/// # use quote::{quote_spanned, TokenStreamExt, ToTokens};
|
||||
/// # use proc_macro2::{Span, TokenStream};
|
||||
/// #
|
||||
/// # struct Type;
|
||||
/// #
|
||||
/// # impl Type {
|
||||
/// # fn span(&self) -> Span {
|
||||
/// # Span::call_site()
|
||||
/// # }
|
||||
/// # }
|
||||
/// #
|
||||
/// # impl ToTokens for Type {
|
||||
/// # fn to_tokens(&self, _tokens: &mut TokenStream) {}
|
||||
/// # }
|
||||
/// #
|
||||
/// # let ty = Type;
|
||||
/// # let call_site = Span::call_site();
|
||||
/// #
|
||||
/// let ty_span = ty.span();
|
||||
/// let assert_sync = quote_spanned! {ty_span=>
|
||||
/// struct _AssertSync where #ty: Sync;
|
||||
/// };
|
||||
/// ```
|
||||
///
|
||||
/// If the assertion fails, the user will see an error like the following. The
|
||||
/// input span of their type is hightlighted in the error.
|
||||
///
|
||||
/// ```text
|
||||
/// error[E0277]: the trait bound `*const (): std::marker::Sync` is not satisfied
|
||||
/// --> src/main.rs:10:21
|
||||
/// |
|
||||
/// 10 | static ref PTR: *const () = &();
|
||||
/// | ^^^^^^^^^ `*const ()` cannot be shared between threads safely
|
||||
/// ```
|
||||
///
|
||||
/// In this example it is important for the where-clause to be spanned with the
|
||||
/// line/column information of the user's input type so that error messages are
|
||||
/// placed appropriately by the compiler. But it is also incredibly important
|
||||
/// that `Sync` resolves at the macro definition site and not the macro call
|
||||
/// site. If we resolve `Sync` at the same span that the user's type is going to
|
||||
/// be resolved, then they could bypass our check by defining their own trait
|
||||
/// named `Sync` that is implemented for their type.
|
||||
#[macro_export(local_inner_macros)]
|
||||
macro_rules! quote_spanned {
|
||||
($span:expr=> $($tt:tt)*) => {
|
||||
{
|
||||
let mut _s = $crate::__rt::TokenStream::new();
|
||||
let _span = $span;
|
||||
quote_each_token!(_s _span $($tt)*);
|
||||
_s
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Extract the names of all #metavariables and pass them to the $finish macro.
|
||||
//
|
||||
// in: pounded_var_names!(then () a #b c #( #d )* #e)
|
||||
// out: then!(() b d e)
|
||||
#[macro_export(local_inner_macros)]
|
||||
#[doc(hidden)]
|
||||
macro_rules! pounded_var_names {
|
||||
($finish:ident ($($found:ident)*) # ( $($inner:tt)* ) $($rest:tt)*) => {
|
||||
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
|
||||
};
|
||||
|
||||
($finish:ident ($($found:ident)*) # [ $($inner:tt)* ] $($rest:tt)*) => {
|
||||
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
|
||||
};
|
||||
|
||||
($finish:ident ($($found:ident)*) # { $($inner:tt)* } $($rest:tt)*) => {
|
||||
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
|
||||
};
|
||||
|
||||
($finish:ident ($($found:ident)*) # $first:ident $($rest:tt)*) => {
|
||||
pounded_var_names!($finish ($($found)* $first) $($rest)*)
|
||||
};
|
||||
|
||||
($finish:ident ($($found:ident)*) ( $($inner:tt)* ) $($rest:tt)*) => {
|
||||
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
|
||||
};
|
||||
|
||||
($finish:ident ($($found:ident)*) [ $($inner:tt)* ] $($rest:tt)*) => {
|
||||
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
|
||||
};
|
||||
|
||||
($finish:ident ($($found:ident)*) { $($inner:tt)* } $($rest:tt)*) => {
|
||||
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
|
||||
};
|
||||
|
||||
($finish:ident ($($found:ident)*) $ignore:tt $($rest:tt)*) => {
|
||||
pounded_var_names!($finish ($($found)*) $($rest)*)
|
||||
};
|
||||
|
||||
($finish:ident ($($found:ident)*)) => {
|
||||
$finish!(() $($found)*)
|
||||
};
|
||||
}
|
||||
|
||||
// in: nested_tuples_pat!(() a b c d e)
|
||||
// out: ((((a b) c) d) e)
|
||||
//
|
||||
// in: nested_tuples_pat!(() a)
|
||||
// out: a
|
||||
#[macro_export(local_inner_macros)]
|
||||
#[doc(hidden)]
|
||||
macro_rules! nested_tuples_pat {
|
||||
(()) => {
|
||||
&()
|
||||
};
|
||||
|
||||
(() $first:ident $($rest:ident)*) => {
|
||||
nested_tuples_pat!(($first) $($rest)*)
|
||||
};
|
||||
|
||||
(($pat:pat) $first:ident $($rest:ident)*) => {
|
||||
nested_tuples_pat!((($pat, $first)) $($rest)*)
|
||||
};
|
||||
|
||||
(($done:pat)) => {
|
||||
$done
|
||||
};
|
||||
}
|
||||
|
||||
// in: multi_zip_expr!(() a b c d e)
|
||||
// out: a.into_iter().zip(b).zip(c).zip(d).zip(e)
|
||||
//
|
||||
// in: multi_zip_iter!(() a)
|
||||
// out: a
|
||||
#[macro_export(local_inner_macros)]
|
||||
#[doc(hidden)]
|
||||
macro_rules! multi_zip_expr {
|
||||
(()) => {
|
||||
&[]
|
||||
};
|
||||
|
||||
(() $single:ident) => {
|
||||
$single
|
||||
};
|
||||
|
||||
(() $first:ident $($rest:ident)*) => {
|
||||
multi_zip_expr!(($first.into_iter()) $($rest)*)
|
||||
};
|
||||
|
||||
(($zips:expr) $first:ident $($rest:ident)*) => {
|
||||
multi_zip_expr!(($zips.zip($first)) $($rest)*)
|
||||
};
|
||||
|
||||
(($done:expr)) => {
|
||||
$done
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export(local_inner_macros)]
|
||||
#[doc(hidden)]
|
||||
macro_rules! quote_each_token {
|
||||
($tokens:ident $span:ident) => {};
|
||||
|
||||
($tokens:ident $span:ident # ! $($rest:tt)*) => {
|
||||
quote_each_token!($tokens $span #);
|
||||
quote_each_token!($tokens $span !);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident # ( $($inner:tt)* ) * $($rest:tt)*) => {
|
||||
for pounded_var_names!(nested_tuples_pat () $($inner)*)
|
||||
in pounded_var_names!(multi_zip_expr () $($inner)*) {
|
||||
quote_each_token!($tokens $span $($inner)*);
|
||||
}
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident # ( $($inner:tt)* ) $sep:tt * $($rest:tt)*) => {
|
||||
for (_i, pounded_var_names!(nested_tuples_pat () $($inner)*))
|
||||
in pounded_var_names!(multi_zip_expr () $($inner)*).into_iter().enumerate() {
|
||||
if _i > 0 {
|
||||
quote_each_token!($tokens $span $sep);
|
||||
}
|
||||
quote_each_token!($tokens $span $($inner)*);
|
||||
}
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident # [ $($inner:tt)* ] $($rest:tt)*) => {
|
||||
quote_each_token!($tokens $span #);
|
||||
$tokens.extend({
|
||||
let mut g = $crate::__rt::Group::new(
|
||||
$crate::__rt::Delimiter::Bracket,
|
||||
quote_spanned!($span=> $($inner)*),
|
||||
);
|
||||
g.set_span($span);
|
||||
Some($crate::__rt::TokenTree::from(g))
|
||||
});
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident # $first:ident $($rest:tt)*) => {
|
||||
$crate::ToTokens::to_tokens(&$first, &mut $tokens);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident ( $($first:tt)* ) $($rest:tt)*) => {
|
||||
$tokens.extend({
|
||||
let mut g = $crate::__rt::Group::new(
|
||||
$crate::__rt::Delimiter::Parenthesis,
|
||||
quote_spanned!($span=> $($first)*),
|
||||
);
|
||||
g.set_span($span);
|
||||
Some($crate::__rt::TokenTree::from(g))
|
||||
});
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident [ $($first:tt)* ] $($rest:tt)*) => {
|
||||
$tokens.extend({
|
||||
let mut g = $crate::__rt::Group::new(
|
||||
$crate::__rt::Delimiter::Bracket,
|
||||
quote_spanned!($span=> $($first)*),
|
||||
);
|
||||
g.set_span($span);
|
||||
Some($crate::__rt::TokenTree::from(g))
|
||||
});
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident { $($first:tt)* } $($rest:tt)*) => {
|
||||
$tokens.extend({
|
||||
let mut g = $crate::__rt::Group::new(
|
||||
$crate::__rt::Delimiter::Brace,
|
||||
quote_spanned!($span=> $($first)*),
|
||||
);
|
||||
g.set_span($span);
|
||||
Some($crate::__rt::TokenTree::from(g))
|
||||
});
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident + $($rest:tt)*) => {
|
||||
$crate::__rt::push_add(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident += $($rest:tt)*) => {
|
||||
$crate::__rt::push_add_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident & $($rest:tt)*) => {
|
||||
$crate::__rt::push_and(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident && $($rest:tt)*) => {
|
||||
$crate::__rt::push_and_and(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident &= $($rest:tt)*) => {
|
||||
$crate::__rt::push_and_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident @ $($rest:tt)*) => {
|
||||
$crate::__rt::push_at(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident ! $($rest:tt)*) => {
|
||||
$crate::__rt::push_bang(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident ^ $($rest:tt)*) => {
|
||||
$crate::__rt::push_caret(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident ^= $($rest:tt)*) => {
|
||||
$crate::__rt::push_caret_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident : $($rest:tt)*) => {
|
||||
$crate::__rt::push_colon(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident :: $($rest:tt)*) => {
|
||||
$crate::__rt::push_colon2(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident , $($rest:tt)*) => {
|
||||
$crate::__rt::push_comma(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident / $($rest:tt)*) => {
|
||||
$crate::__rt::push_div(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident /= $($rest:tt)*) => {
|
||||
$crate::__rt::push_div_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident . $($rest:tt)*) => {
|
||||
$crate::__rt::push_dot(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident .. $($rest:tt)*) => {
|
||||
$crate::__rt::push_dot2(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident ... $($rest:tt)*) => {
|
||||
$crate::__rt::push_dot3(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident ..= $($rest:tt)*) => {
|
||||
$crate::__rt::push_dot_dot_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident = $($rest:tt)*) => {
|
||||
$crate::__rt::push_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident == $($rest:tt)*) => {
|
||||
$crate::__rt::push_eq_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident >= $($rest:tt)*) => {
|
||||
$crate::__rt::push_ge(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident > $($rest:tt)*) => {
|
||||
$crate::__rt::push_gt(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident <= $($rest:tt)*) => {
|
||||
$crate::__rt::push_le(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident < $($rest:tt)*) => {
|
||||
$crate::__rt::push_lt(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident *= $($rest:tt)*) => {
|
||||
$crate::__rt::push_mul_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident != $($rest:tt)*) => {
|
||||
$crate::__rt::push_ne(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident | $($rest:tt)*) => {
|
||||
$crate::__rt::push_or(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident |= $($rest:tt)*) => {
|
||||
$crate::__rt::push_or_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident || $($rest:tt)*) => {
|
||||
$crate::__rt::push_or_or(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident # $($rest:tt)*) => {
|
||||
$crate::__rt::push_pound(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident ? $($rest:tt)*) => {
|
||||
$crate::__rt::push_question(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident -> $($rest:tt)*) => {
|
||||
$crate::__rt::push_rarrow(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident <- $($rest:tt)*) => {
|
||||
$crate::__rt::push_larrow(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident % $($rest:tt)*) => {
|
||||
$crate::__rt::push_rem(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident %= $($rest:tt)*) => {
|
||||
$crate::__rt::push_rem_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident => $($rest:tt)*) => {
|
||||
$crate::__rt::push_fat_arrow(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident ; $($rest:tt)*) => {
|
||||
$crate::__rt::push_semi(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident << $($rest:tt)*) => {
|
||||
$crate::__rt::push_shl(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident <<= $($rest:tt)*) => {
|
||||
$crate::__rt::push_shl_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident >> $($rest:tt)*) => {
|
||||
$crate::__rt::push_shr(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident >>= $($rest:tt)*) => {
|
||||
$crate::__rt::push_shr_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident * $($rest:tt)*) => {
|
||||
$crate::__rt::push_star(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident - $($rest:tt)*) => {
|
||||
$crate::__rt::push_sub(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident -= $($rest:tt)*) => {
|
||||
$crate::__rt::push_sub_eq(&mut $tokens, $span);
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
|
||||
($tokens:ident $span:ident $first:tt $($rest:tt)*) => {
|
||||
$crate::__rt::parse(&mut $tokens, $span, quote_stringify!($first));
|
||||
quote_each_token!($tokens $span $($rest)*);
|
||||
};
|
||||
}
|
||||
|
||||
// Unhygienically invoke whatever `stringify` the caller has in scope i.e. not a
|
||||
// local macro. The macros marked `local_inner_macros` above cannot invoke
|
||||
// `stringify` directly.
|
||||
#[macro_export]
|
||||
#[doc(hidden)]
|
||||
macro_rules! quote_stringify {
|
||||
($tt:tt) => {
|
||||
stringify!($tt)
|
||||
};
|
||||
}
|
|
@ -0,0 +1,198 @@
|
|||
use super::TokenStreamExt;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::iter;
|
||||
|
||||
use proc_macro2::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
|
||||
|
||||
/// Types that can be interpolated inside a [`quote!`] invocation.
|
||||
///
|
||||
/// [`quote!`]: macro.quote.html
|
||||
pub trait ToTokens {
|
||||
/// Write `self` to the given `TokenStream`.
|
||||
///
|
||||
/// The token append methods provided by the [`TokenStreamExt`] extension
|
||||
/// trait may be useful for implementing `ToTokens`.
|
||||
///
|
||||
/// [`TokenStreamExt`]: trait.TokenStreamExt.html
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// Example implementation for a struct representing Rust paths like
|
||||
/// `std::cmp::PartialEq`:
|
||||
///
|
||||
/// ```edition2018
|
||||
/// use proc_macro2::{TokenTree, Spacing, Span, Punct, TokenStream};
|
||||
/// use quote::{TokenStreamExt, ToTokens};
|
||||
///
|
||||
/// pub struct Path {
|
||||
/// pub global: bool,
|
||||
/// pub segments: Vec<PathSegment>,
|
||||
/// }
|
||||
///
|
||||
/// impl ToTokens for Path {
|
||||
/// fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
/// for (i, segment) in self.segments.iter().enumerate() {
|
||||
/// if i > 0 || self.global {
|
||||
/// // Double colon `::`
|
||||
/// tokens.append(Punct::new(':', Spacing::Joint));
|
||||
/// tokens.append(Punct::new(':', Spacing::Alone));
|
||||
/// }
|
||||
/// segment.to_tokens(tokens);
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// #
|
||||
/// # pub struct PathSegment;
|
||||
/// #
|
||||
/// # impl ToTokens for PathSegment {
|
||||
/// # fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
/// # unimplemented!()
|
||||
/// # }
|
||||
/// # }
|
||||
/// ```
|
||||
fn to_tokens(&self, tokens: &mut TokenStream);
|
||||
|
||||
/// Convert `self` directly into a `TokenStream` object.
|
||||
///
|
||||
/// This method is implicitly implemented using `to_tokens`, and acts as a
|
||||
/// convenience method for consumers of the `ToTokens` trait.
|
||||
fn into_token_stream(self) -> TokenStream
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
let mut tokens = TokenStream::new();
|
||||
self.to_tokens(&mut tokens);
|
||||
tokens
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized + ToTokens> ToTokens for &'a T {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
(**self).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized + ToTokens> ToTokens for &'a mut T {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
(**self).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: ?Sized + ToOwned + ToTokens> ToTokens for Cow<'a, T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
(**self).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + ToTokens> ToTokens for Box<T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
(**self).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ToTokens> ToTokens for Option<T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
if let Some(ref t) = *self {
|
||||
t.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for str {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::string(self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for String {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.as_str().to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! primitive {
|
||||
($($t:ident => $name:ident)*) => ($(
|
||||
impl ToTokens for $t {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::$name(*self));
|
||||
}
|
||||
}
|
||||
)*)
|
||||
}
|
||||
|
||||
primitive! {
|
||||
i8 => i8_suffixed
|
||||
i16 => i16_suffixed
|
||||
i32 => i32_suffixed
|
||||
i64 => i64_suffixed
|
||||
isize => isize_suffixed
|
||||
|
||||
u8 => u8_suffixed
|
||||
u16 => u16_suffixed
|
||||
u32 => u32_suffixed
|
||||
u64 => u64_suffixed
|
||||
usize => usize_suffixed
|
||||
|
||||
f32 => f32_suffixed
|
||||
f64 => f64_suffixed
|
||||
}
|
||||
|
||||
#[cfg(integer128)]
|
||||
primitive! {
|
||||
i128 => i128_suffixed
|
||||
u128 => u128_suffixed
|
||||
}
|
||||
|
||||
impl ToTokens for char {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::character(*self));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for bool {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let word = if *self { "true" } else { "false" };
|
||||
tokens.append(Ident::new(word, Span::call_site()));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Group {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(self.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Ident {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(self.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Punct {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(self.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Literal {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(self.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for TokenTree {
|
||||
fn to_tokens(&self, dst: &mut TokenStream) {
|
||||
dst.append(self.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for TokenStream {
|
||||
fn to_tokens(&self, dst: &mut TokenStream) {
|
||||
dst.extend(iter::once(self.clone()));
|
||||
}
|
||||
|
||||
fn into_token_stream(self) -> TokenStream {
|
||||
self
|
||||
}
|
||||
}
|
|
@ -0,0 +1,295 @@
|
|||
#![cfg_attr(feature = "cargo-clippy", allow(blacklisted_name))]
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
extern crate proc_macro2;
|
||||
#[macro_use]
|
||||
extern crate quote;
|
||||
|
||||
use proc_macro2::{Ident, Span, TokenStream};
|
||||
use quote::TokenStreamExt;
|
||||
|
||||
mod conditional {
|
||||
#[cfg(integer128)]
|
||||
mod integer128;
|
||||
}
|
||||
|
||||
struct X;
|
||||
|
||||
impl quote::ToTokens for X {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Ident::new("X", Span::call_site()));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_quote_impl() {
|
||||
let tokens = quote! {
|
||||
impl<'a, T: ToTokens> ToTokens for &'a T {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
(**self).to_tokens(tokens)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let expected = concat!(
|
||||
"impl < 'a , T : ToTokens > ToTokens for & 'a T { ",
|
||||
"fn to_tokens ( & self , tokens : & mut TokenStream ) { ",
|
||||
"( * * self ) . to_tokens ( tokens ) ",
|
||||
"} ",
|
||||
"}"
|
||||
);
|
||||
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_substitution() {
|
||||
let x = X;
|
||||
let tokens = quote!(#x <#x> (#x) [#x] {#x});
|
||||
|
||||
let expected = "X < X > ( X ) [ X ] { X }";
|
||||
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iter() {
|
||||
let primes = &[X, X, X, X];
|
||||
|
||||
assert_eq!("X X X X", quote!(#(#primes)*).to_string());
|
||||
|
||||
assert_eq!("X , X , X , X ,", quote!(#(#primes,)*).to_string());
|
||||
|
||||
assert_eq!("X , X , X , X", quote!(#(#primes),*).to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_advanced() {
|
||||
let generics = quote!( <'a, T> );
|
||||
|
||||
let where_clause = quote!( where T: Serialize );
|
||||
|
||||
let field_ty = quote!(String);
|
||||
|
||||
let item_ty = quote!(Cow<'a, str>);
|
||||
|
||||
let path = quote!(SomeTrait::serialize_with);
|
||||
|
||||
let value = quote!(self.x);
|
||||
|
||||
let tokens = quote! {
|
||||
struct SerializeWith #generics #where_clause {
|
||||
value: &'a #field_ty,
|
||||
phantom: ::std::marker::PhantomData<#item_ty>,
|
||||
}
|
||||
|
||||
impl #generics ::serde::Serialize for SerializeWith #generics #where_clause {
|
||||
fn serialize<S>(&self, s: &mut S) -> Result<(), S::Error>
|
||||
where S: ::serde::Serializer
|
||||
{
|
||||
#path(self.value, s)
|
||||
}
|
||||
}
|
||||
|
||||
SerializeWith {
|
||||
value: #value,
|
||||
phantom: ::std::marker::PhantomData::<#item_ty>,
|
||||
}
|
||||
};
|
||||
|
||||
let expected = concat!(
|
||||
"struct SerializeWith < 'a , T > where T : Serialize { ",
|
||||
"value : & 'a String , ",
|
||||
"phantom : :: std :: marker :: PhantomData < Cow < 'a , str > > , ",
|
||||
"} ",
|
||||
"impl < 'a , T > :: serde :: Serialize for SerializeWith < 'a , T > where T : Serialize { ",
|
||||
"fn serialize < S > ( & self , s : & mut S ) -> Result < ( ) , S :: Error > ",
|
||||
"where S : :: serde :: Serializer ",
|
||||
"{ ",
|
||||
"SomeTrait :: serialize_with ( self . value , s ) ",
|
||||
"} ",
|
||||
"} ",
|
||||
"SerializeWith { ",
|
||||
"value : self . x , ",
|
||||
"phantom : :: std :: marker :: PhantomData :: < Cow < 'a , str > > , ",
|
||||
"}"
|
||||
);
|
||||
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_integer() {
|
||||
let ii8 = -1i8;
|
||||
let ii16 = -1i16;
|
||||
let ii32 = -1i32;
|
||||
let ii64 = -1i64;
|
||||
let iisize = -1isize;
|
||||
let uu8 = 1u8;
|
||||
let uu16 = 1u16;
|
||||
let uu32 = 1u32;
|
||||
let uu64 = 1u64;
|
||||
let uusize = 1usize;
|
||||
|
||||
let tokens = quote! {
|
||||
#ii8 #ii16 #ii32 #ii64 #iisize
|
||||
#uu8 #uu16 #uu32 #uu64 #uusize
|
||||
};
|
||||
let expected = "-1i8 -1i16 -1i32 -1i64 -1isize 1u8 1u16 1u32 1u64 1usize";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_floating() {
|
||||
let e32 = 2.345f32;
|
||||
|
||||
let e64 = 2.345f64;
|
||||
|
||||
let tokens = quote! {
|
||||
#e32
|
||||
#e64
|
||||
};
|
||||
let expected = concat!("2.345f32 2.345f64");
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_char() {
|
||||
let zero = '\0';
|
||||
let pound = '#';
|
||||
let quote = '"';
|
||||
let apost = '\'';
|
||||
let newline = '\n';
|
||||
let heart = '\u{2764}';
|
||||
|
||||
let tokens = quote! {
|
||||
#zero #pound #quote #apost #newline #heart
|
||||
};
|
||||
let expected = "'\\u{0}' '#' '\\\"' '\\'' '\\n' '\\u{2764}'";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_str() {
|
||||
let s = "\0 a 'b \" c";
|
||||
let tokens = quote!(#s);
|
||||
let expected = "\"\\u{0} a \\'b \\\" c\"";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_string() {
|
||||
let s = "\0 a 'b \" c".to_string();
|
||||
let tokens = quote!(#s);
|
||||
let expected = "\"\\u{0} a \\'b \\\" c\"";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ident() {
|
||||
let foo = Ident::new("Foo", Span::call_site());
|
||||
let bar = Ident::new(&format!("Bar{}", 7), Span::call_site());
|
||||
let tokens = quote!(struct #foo; enum #bar {});
|
||||
let expected = "struct Foo ; enum Bar7 { }";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_duplicate() {
|
||||
let ch = 'x';
|
||||
|
||||
let tokens = quote!(#ch #ch);
|
||||
|
||||
let expected = "'x' 'x'";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fancy_repetition() {
|
||||
let foo = vec!["a", "b"];
|
||||
let bar = vec![true, false];
|
||||
|
||||
let tokens = quote! {
|
||||
#(#foo: #bar),*
|
||||
};
|
||||
|
||||
let expected = r#""a" : true , "b" : false"#;
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nested_fancy_repetition() {
|
||||
let nested = vec![vec!['a', 'b', 'c'], vec!['x', 'y', 'z']];
|
||||
|
||||
let tokens = quote! {
|
||||
#(
|
||||
#(#nested)*
|
||||
),*
|
||||
};
|
||||
|
||||
let expected = "'a' 'b' 'c' , 'x' 'y' 'z'";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_repetition() {
|
||||
let tokens = quote!(#(a b)* #(c d),*);
|
||||
assert_eq!("", tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_variable_name_conflict() {
|
||||
// The implementation of `#(...),*` uses the variable `_i` but it should be
|
||||
// fine, if a little confusing when debugging.
|
||||
let _i = vec!['a', 'b'];
|
||||
let tokens = quote! { #(#_i),* };
|
||||
let expected = "'a' , 'b'";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_quote() {
|
||||
let tokens = quote!();
|
||||
assert_eq!("", tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_box_str() {
|
||||
let b = "str".to_owned().into_boxed_str();
|
||||
let tokens = quote! { #b };
|
||||
assert_eq!("\"str\"", tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cow() {
|
||||
let owned: Cow<Ident> = Cow::Owned(Ident::new("owned", Span::call_site()));
|
||||
|
||||
let ident = Ident::new("borrowed", Span::call_site());
|
||||
let borrowed = Cow::Borrowed(&ident);
|
||||
|
||||
let tokens = quote! { #owned #borrowed };
|
||||
assert_eq!("owned borrowed", tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_closure() {
|
||||
fn field_i(i: usize) -> Ident {
|
||||
Ident::new(&format!("__field{}", i), Span::call_site())
|
||||
}
|
||||
|
||||
let fields = (0usize..3)
|
||||
.map(field_i as fn(_) -> _)
|
||||
.map(|var| quote! { #var });
|
||||
|
||||
let tokens = quote! { #(#fields)* };
|
||||
assert_eq!("__field0 __field1 __field2", tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_append_tokens() {
|
||||
let mut a = quote!(a);
|
||||
let b = quote!(b);
|
||||
a.append_all(b);
|
||||
assert_eq!("a b", a.to_string());
|
||||
}
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"68f4dc89836a05a2347086addab1849567ef8073c552ec0dfca8f96fd20550f9","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"d9392d4c7af3bf9714f0a95801d64de46ffd4558cdfeea0eb85b414e555abb72","src/ext.rs":"03919239a20f8393288783a21bf6fdee12e405d13d162c9faa6f8f5ce54b003b","src/lib.rs":"5345b4d2e6f923724cec35c62d7397e6f04d5503d2d813bff7bbaa7ffc39a9cf","src/to_tokens.rs":"0dcd15cba2aa83abeb47b9a1babce7a29643b5efa2fe620b070cb37bb21a84f1","tests/conditional/integer128.rs":"d83e21a91efbaa801a82ae499111bdda2d31edaa620e78c0199eba42d69c9ee6","tests/test.rs":"810013d7fd77b738abd0ace90ce2f2f3e219c757652eabab29bc1c0ce4a73b24"},"package":"cdd8e04bd9c52e0342b406469d494fcb033be4bdbe5c606016defbb1681411e1"}
|
||||
{"files":{"Cargo.toml":"b5c36a5bffa3623f84002fa884157ae303d2dae68d2f8a6d73ba87e82d7c56d7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"ea5abae24fdf6d9be51c80427bd12b95d146c4660e872599910cf062d6fbab9a","src/ext.rs":"a9fed3a1a4c9d3f2de717ba808af99291b995db2cbf8067f4b6927c39cc62bc6","src/format.rs":"46bf0859e6da5ec195a409ba8bbd2029d32a30d169c30c4c8aee7020f478a8a2","src/ident_fragment.rs":"0824dca06942d8e097d220db0ace0fe3ae7cf08f0a86e9828d012c131b6590c2","src/lib.rs":"bce63d6d9822373dab6f9a1f3df419b5753625e618474c304f05ab3b38845760","src/runtime.rs":"13263adfb56e2c597c69277b3500ab35ca8a08f60ba6a66f921ffa5cdc09bde2","src/spanned.rs":"adc0ed742ad17327c375879472d435cea168c208c303f53eb93cb2c0f10f3650","src/to_tokens.rs":"e589c1643479a9003d4dd1d9fa63714042b106f1b16d8ea3903cfe2f73a020f5","tests/compiletest.rs":"0a52a44786aea1c299c695bf948b2ed2081e4cc344e5c2cadceab4eb03d0010d","tests/test.rs":"92062fb9ba4a3b74345fede8e09e1d376107f98dcd79931a794433fa2d74aeb5","tests/ui/does-not-have-iter-interpolated-dup.rs":"ad13eea21d4cdd2ab6c082f633392e1ff20fb0d1af5f2177041e0bf7f30da695","tests/ui/does-not-have-iter-interpolated.rs":"83a5b3f240651adcbe4b6e51076d76d653ad439b37442cf4054f1fd3c073f3b7","tests/ui/does-not-have-iter-separated.rs":"fe413c48331d5e3a7ae5fef6a5892a90c72f610d54595879eb49d0a94154ba3f","tests/ui/does-not-have-iter.rs":"09dc9499d861b63cebb0848b855b78e2dc9497bfde37ba6339f3625ae009a62f","tests/ui/not-quotable.rs":"5759d0884943417609f28faadc70254a3e2fd3d9bd6ff7297a3fb70a77fafd8a","tests/ui/not-repeatable.rs":"b08405e02d46712d47e48ec8d0d68c93d8ebf3bb299714a373c2c954de79f6bd","tests/ui/wrong-type-span.rs":"5f310cb7fde3ef51bad01e7f286d244e3b6e67396cd2ea7eab77275c9d902699"},"package":"053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"}
|
|
@ -3,7 +3,7 @@
|
|||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g. crates.io) dependencies
|
||||
# to registry (e.g., crates.io) dependencies
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
|
@ -11,8 +11,9 @@
|
|||
# will likely look very different (and much more reasonable)
|
||||
|
||||
[package]
|
||||
edition = "2018"
|
||||
name = "quote"
|
||||
version = "0.6.11"
|
||||
version = "1.0.2"
|
||||
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
||||
include = ["Cargo.toml", "src/**/*.rs", "tests/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
|
||||
description = "Quasi-quoting macro quote!(...)"
|
||||
|
@ -20,11 +21,19 @@ documentation = "https://docs.rs/quote/"
|
|||
readme = "README.md"
|
||||
keywords = ["syn"]
|
||||
categories = ["development-tools::procedural-macro-helpers"]
|
||||
license = "MIT/Apache-2.0"
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/dtolnay/quote"
|
||||
|
||||
[lib]
|
||||
name = "quote"
|
||||
[dependencies.proc-macro2]
|
||||
version = "0.4.21"
|
||||
version = "1.0"
|
||||
default-features = false
|
||||
[dev-dependencies.rustversion]
|
||||
version = "0.1"
|
||||
|
||||
[dev-dependencies.trybuild]
|
||||
version = "1.0"
|
||||
|
||||
[features]
|
||||
default = ["proc-macro"]
|
||||
|
|
|
@ -8,13 +8,13 @@ Rust Quasi-Quoting
|
|||
This crate provides the [`quote!`] macro for turning Rust syntax tree data
|
||||
structures into tokens of source code.
|
||||
|
||||
[`quote!`]: https://docs.rs/quote/0.6/quote/macro.quote.html
|
||||
[`quote!`]: https://docs.rs/quote/1.0/quote/macro.quote.html
|
||||
|
||||
Procedural macros in Rust receive a stream of tokens as input, execute arbitrary
|
||||
Rust code to determine how to manipulate those tokens, and produce a stream of
|
||||
tokens to hand back to the compiler to compile into the caller's crate.
|
||||
Quasi-quoting is a solution to one piece of that -- producing tokens to return
|
||||
to the compiler.
|
||||
Quasi-quoting is a solution to one piece of that — producing tokens to
|
||||
return to the compiler.
|
||||
|
||||
The idea of quasi-quoting is that we write *code* that we treat as *data*.
|
||||
Within the `quote!` macro, we can write what looks like code to our text editor
|
||||
|
@ -35,7 +35,7 @@ first support for procedural macros in Rust 1.15.0.*
|
|||
|
||||
```toml
|
||||
[dependencies]
|
||||
quote = "0.6"
|
||||
quote = "1.0"
|
||||
```
|
||||
|
||||
## Syntax
|
||||
|
@ -44,13 +44,13 @@ The quote crate provides a [`quote!`] macro within which you can write Rust code
|
|||
that gets packaged into a [`TokenStream`] and can be treated as data. You should
|
||||
think of `TokenStream` as representing a fragment of Rust source code.
|
||||
|
||||
[`TokenStream`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.TokenStream.html
|
||||
[`TokenStream`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.TokenStream.html
|
||||
|
||||
Within the `quote!` macro, interpolation is done with `#var`. Any type
|
||||
implementing the [`quote::ToTokens`] trait can be interpolated. This includes
|
||||
most Rust primitive types as well as most of the syntax tree types from [`syn`].
|
||||
|
||||
[`quote::ToTokens`]: https://docs.rs/quote/0.6/quote/trait.ToTokens.html
|
||||
[`quote::ToTokens`]: https://docs.rs/quote/1.0/quote/trait.ToTokens.html
|
||||
[`syn`]: https://github.com/dtolnay/syn
|
||||
|
||||
```rust
|
||||
|
@ -148,8 +148,20 @@ quote! {
|
|||
}
|
||||
```
|
||||
|
||||
The solution is to perform token-level manipulations using the APIs provided by
|
||||
Syn and proc-macro2.
|
||||
The solution is to build a new identifier token with the correct value. As this
|
||||
is such a common case, the `format_ident!` macro provides a convenient utility
|
||||
for doing so correctly.
|
||||
|
||||
```rust
|
||||
let varname = format_ident!("_{}", ident);
|
||||
quote! {
|
||||
let mut #varname = 0;
|
||||
}
|
||||
```
|
||||
|
||||
Alternatively, the APIs provided by Syn and proc-macro2 can be used to directly
|
||||
build the identifier. This is roughly equivalent to the above, but will not
|
||||
handle `ident` being a raw identifier.
|
||||
|
||||
```rust
|
||||
let concatenated = format!("_{}", ident);
|
||||
|
@ -200,42 +212,26 @@ Any interpolated tokens preserve the `Span` information provided by their
|
|||
`ToTokens` implementation. Tokens that originate within a `quote!` invocation
|
||||
are spanned with [`Span::call_site()`].
|
||||
|
||||
[`Span::call_site()`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.Span.html#method.call_site
|
||||
[`Span::call_site()`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html#method.call_site
|
||||
|
||||
A different span can be provided explicitly through the [`quote_spanned!`]
|
||||
macro.
|
||||
|
||||
[`quote_spanned!`]: https://docs.rs/quote/0.6/quote/macro.quote_spanned.html
|
||||
[`quote_spanned!`]: https://docs.rs/quote/1.0/quote/macro.quote_spanned.html
|
||||
|
||||
### Limitations
|
||||
<br>
|
||||
|
||||
- A non-repeating variable may not be interpolated inside of a repeating block
|
||||
([#7]).
|
||||
- The same variable may not be interpolated more than once inside of a repeating
|
||||
block ([#8]).
|
||||
#### License
|
||||
|
||||
[#7]: https://github.com/dtolnay/quote/issues/7
|
||||
[#8]: https://github.com/dtolnay/quote/issues/8
|
||||
<sup>
|
||||
Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
|
||||
2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
|
||||
</sup>
|
||||
|
||||
### Recursion limit
|
||||
|
||||
The `quote!` macro relies on deep recursion so some large invocations may fail
|
||||
with "recursion limit reached" when you compile. If it fails, bump up the
|
||||
recursion limit by adding `#![recursion_limit = "128"]` to your crate. An even
|
||||
higher limit may be necessary for especially large invocations. You don't need
|
||||
this unless the compiler tells you that you need it.
|
||||
|
||||
## License
|
||||
|
||||
Licensed under either of
|
||||
|
||||
* Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
|
||||
* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
|
||||
|
||||
at your option.
|
||||
|
||||
### Contribution
|
||||
<br>
|
||||
|
||||
<sub>
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
|
||||
be dual licensed as above, without any additional terms or conditions.
|
||||
</sub>
|
||||
|
|
|
@ -17,7 +17,7 @@ pub trait TokenStreamExt: private::Sealed {
|
|||
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// ```edition2018
|
||||
/// ```
|
||||
/// # use quote::{quote, TokenStreamExt, ToTokens};
|
||||
/// # use proc_macro2::TokenStream;
|
||||
/// #
|
||||
|
@ -32,29 +32,29 @@ pub trait TokenStreamExt: private::Sealed {
|
|||
/// let tokens = quote!(#X);
|
||||
/// assert_eq!(tokens.to_string(), "true false");
|
||||
/// ```
|
||||
fn append_all<T, I>(&mut self, iter: I)
|
||||
fn append_all<I>(&mut self, iter: I)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>;
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens;
|
||||
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// Appends all of the items in the iterator `I`, separated by the tokens
|
||||
/// `U`.
|
||||
fn append_separated<T, I, U>(&mut self, iter: I, op: U)
|
||||
fn append_separated<I, U>(&mut self, iter: I, op: U)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens,
|
||||
U: ToTokens;
|
||||
|
||||
/// For use by `ToTokens` implementations.
|
||||
///
|
||||
/// Appends all tokens in the iterator `I`, appending `U` after each
|
||||
/// element, including after the last element of the iterator.
|
||||
fn append_terminated<T, I, U>(&mut self, iter: I, term: U)
|
||||
fn append_terminated<I, U>(&mut self, iter: I, term: U)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens,
|
||||
U: ToTokens;
|
||||
}
|
||||
|
||||
|
@ -66,20 +66,20 @@ impl TokenStreamExt for TokenStream {
|
|||
self.extend(iter::once(token.into()));
|
||||
}
|
||||
|
||||
fn append_all<T, I>(&mut self, iter: I)
|
||||
fn append_all<I>(&mut self, iter: I)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens,
|
||||
{
|
||||
for token in iter {
|
||||
token.to_tokens(self);
|
||||
}
|
||||
}
|
||||
|
||||
fn append_separated<T, I, U>(&mut self, iter: I, op: U)
|
||||
fn append_separated<I, U>(&mut self, iter: I, op: U)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens,
|
||||
U: ToTokens,
|
||||
{
|
||||
for (i, token) in iter.into_iter().enumerate() {
|
||||
|
@ -90,10 +90,10 @@ impl TokenStreamExt for TokenStream {
|
|||
}
|
||||
}
|
||||
|
||||
fn append_terminated<T, I, U>(&mut self, iter: I, term: U)
|
||||
fn append_terminated<I, U>(&mut self, iter: I, term: U)
|
||||
where
|
||||
T: ToTokens,
|
||||
I: IntoIterator<Item = T>,
|
||||
I: IntoIterator,
|
||||
I::Item: ToTokens,
|
||||
U: ToTokens,
|
||||
{
|
||||
for token in iter {
|
||||
|
|
|
@ -0,0 +1,164 @@
|
|||
/// Formatting macro for constructing `Ident`s.
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Syntax
|
||||
///
|
||||
/// Syntax is copied from the [`format!`] macro, supporting both positional and
|
||||
/// named arguments.
|
||||
///
|
||||
/// Only a limited set of formatting traits are supported. The current mapping
|
||||
/// of format types to traits is:
|
||||
///
|
||||
/// * `{}` ⇒ [`IdentFragment`]
|
||||
/// * `{:o}` ⇒ [`Octal`](`std::fmt::Octal`)
|
||||
/// * `{:x}` ⇒ [`LowerHex`](`std::fmt::LowerHex`)
|
||||
/// * `{:X}` ⇒ [`UpperHex`](`std::fmt::UpperHex`)
|
||||
/// * `{:b}` ⇒ [`Binary`](`std::fmt::Binary`)
|
||||
///
|
||||
/// See [`std::fmt`] for more information.
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # IdentFragment
|
||||
///
|
||||
/// Unlike `format!`, this macro uses the [`IdentFragment`] formatting trait by
|
||||
/// default. This trait is like `Display`, with a few differences:
|
||||
///
|
||||
/// * `IdentFragment` is only implemented for a limited set of types, such as
|
||||
/// unsigned integers and strings.
|
||||
/// * [`Ident`] arguments will have their `r#` prefixes stripped, if present.
|
||||
///
|
||||
/// [`Ident`]: `proc_macro2::Ident`
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Hygiene
|
||||
///
|
||||
/// The [`Span`] of the first `Ident` argument is used as the span of the final
|
||||
/// identifier, falling back to [`Span::call_site`] when no identifiers are
|
||||
/// provided.
|
||||
///
|
||||
/// ```
|
||||
/// # use quote::format_ident;
|
||||
/// # let ident = format_ident!("Ident");
|
||||
/// // If `ident` is an Ident, the span of `my_ident` will be inherited from it.
|
||||
/// let my_ident = format_ident!("My{}{}", ident, "IsCool");
|
||||
/// assert_eq!(my_ident, "MyIdentIsCool");
|
||||
/// ```
|
||||
///
|
||||
/// Alternatively, the span can be overridden by passing the `span` named
|
||||
/// argument.
|
||||
///
|
||||
/// ```
|
||||
/// # use quote::format_ident;
|
||||
/// # const IGNORE_TOKENS: &'static str = stringify! {
|
||||
/// let my_span = /* ... */;
|
||||
/// # };
|
||||
/// # let my_span = proc_macro2::Span::call_site();
|
||||
/// format_ident!("MyIdent", span = my_span);
|
||||
/// ```
|
||||
///
|
||||
/// [`Span`]: `proc_macro2::Span`
|
||||
/// [`Span::call_site`]: `proc_macro2::Span::call_site`
|
||||
///
|
||||
/// <p><br></p>
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This method will panic if the resulting formatted string is not a valid
|
||||
/// identifier.
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Composing raw and non-raw identifiers:
|
||||
/// ```
|
||||
/// # use quote::format_ident;
|
||||
/// let my_ident = format_ident!("My{}", "Ident");
|
||||
/// assert_eq!(my_ident, "MyIdent");
|
||||
///
|
||||
/// let raw = format_ident!("r#Raw");
|
||||
/// assert_eq!(raw, "r#Raw");
|
||||
///
|
||||
/// let my_ident_raw = format_ident!("{}Is{}", my_ident, raw);
|
||||
/// assert_eq!(my_ident_raw, "MyIdentIsRaw");
|
||||
/// ```
|
||||
///
|
||||
/// Integer formatting options:
|
||||
/// ```
|
||||
/// # use quote::format_ident;
|
||||
/// let num: u32 = 10;
|
||||
///
|
||||
/// let decimal = format_ident!("Id_{}", num);
|
||||
/// assert_eq!(decimal, "Id_10");
|
||||
///
|
||||
/// let octal = format_ident!("Id_{:o}", num);
|
||||
/// assert_eq!(octal, "Id_12");
|
||||
///
|
||||
/// let binary = format_ident!("Id_{:b}", num);
|
||||
/// assert_eq!(binary, "Id_1010");
|
||||
///
|
||||
/// let lower_hex = format_ident!("Id_{:x}", num);
|
||||
/// assert_eq!(lower_hex, "Id_a");
|
||||
///
|
||||
/// let upper_hex = format_ident!("Id_{:X}", num);
|
||||
/// assert_eq!(upper_hex, "Id_A");
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! format_ident {
|
||||
($fmt:expr) => {
|
||||
$crate::format_ident_impl!([
|
||||
::std::option::Option::None,
|
||||
$fmt
|
||||
])
|
||||
};
|
||||
|
||||
($fmt:expr, $($rest:tt)*) => {
|
||||
$crate::format_ident_impl!([
|
||||
::std::option::Option::None,
|
||||
$fmt
|
||||
] $($rest)*)
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
#[doc(hidden)]
|
||||
macro_rules! format_ident_impl {
|
||||
// Final state
|
||||
([$span:expr, $($fmt:tt)*]) => {
|
||||
$crate::__rt::mk_ident(&format!($($fmt)*), $span)
|
||||
};
|
||||
|
||||
// Span argument
|
||||
([$old:expr, $($fmt:tt)*] span = $span:expr) => {
|
||||
$crate::format_ident_impl!([$old, $($fmt)*] span = $span,)
|
||||
};
|
||||
([$old:expr, $($fmt:tt)*] span = $span:expr, $($rest:tt)*) => {
|
||||
$crate::format_ident_impl!([
|
||||
::std::option::Option::Some::<$crate::__rt::Span>($span),
|
||||
$($fmt)*
|
||||
] $($rest)*)
|
||||
};
|
||||
|
||||
// Named argument
|
||||
([$span:expr, $($fmt:tt)*] $name:ident = $arg:expr) => {
|
||||
$crate::format_ident_impl!([$span, $($fmt)*] $name = $arg,)
|
||||
};
|
||||
([$span:expr, $($fmt:tt)*] $name:ident = $arg:expr, $($rest:tt)*) => {
|
||||
match $crate::__rt::IdentFragmentAdapter(&$arg) {
|
||||
arg => $crate::format_ident_impl!([$span.or(arg.span()), $($fmt)*, $name = arg] $($rest)*),
|
||||
}
|
||||
};
|
||||
|
||||
// Positional argument
|
||||
([$span:expr, $($fmt:tt)*] $arg:expr) => {
|
||||
$crate::format_ident_impl!([$span, $($fmt)*] $arg,)
|
||||
};
|
||||
([$span:expr, $($fmt:tt)*] $arg:expr, $($rest:tt)*) => {
|
||||
match $crate::__rt::IdentFragmentAdapter(&$arg) {
|
||||
arg => $crate::format_ident_impl!([$span.or(arg.span()), $($fmt)*, arg] $($rest)*),
|
||||
}
|
||||
};
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
use proc_macro2::{Ident, Span};
|
||||
use std::fmt;
|
||||
|
||||
/// Specialized formatting trait used by `format_ident!`.
|
||||
///
|
||||
/// [`Ident`] arguments formatted using this trait will have their `r#` prefix
|
||||
/// stripped, if present.
|
||||
///
|
||||
/// See [`format_ident!`] for more information.
|
||||
pub trait IdentFragment {
|
||||
/// Format this value as an identifier fragment.
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result;
|
||||
|
||||
/// Span associated with this `IdentFragment`.
|
||||
///
|
||||
/// If non-`None`, may be inherited by formatted identifiers.
|
||||
fn span(&self) -> Option<Span> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: IdentFragment + ?Sized> IdentFragment for &'a T {
|
||||
fn span(&self) -> Option<Span> {
|
||||
<T as IdentFragment>::span(*self)
|
||||
}
|
||||
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
IdentFragment::fmt(*self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: IdentFragment + ?Sized> IdentFragment for &'a mut T {
|
||||
fn span(&self) -> Option<Span> {
|
||||
<T as IdentFragment>::span(*self)
|
||||
}
|
||||
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
IdentFragment::fmt(*self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl IdentFragment for Ident {
|
||||
fn span(&self) -> Option<Span> {
|
||||
Some(self.span())
|
||||
}
|
||||
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let id = self.to_string();
|
||||
if id.starts_with("r#") {
|
||||
fmt::Display::fmt(&id[2..], f)
|
||||
} else {
|
||||
fmt::Display::fmt(&id[..], f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Limited set of types which this is implemented for, as we want to avoid types
|
||||
// which will often include non-identifier characters in their `Display` impl.
|
||||
macro_rules! ident_fragment_display {
|
||||
($($T:ty),*) => {
|
||||
$(
|
||||
impl IdentFragment for $T {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
ident_fragment_display!(bool, str, String);
|
||||
ident_fragment_display!(u8, u16, u32, u64, u128, usize);
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,373 @@
|
|||
use crate::{IdentFragment, ToTokens, TokenStreamExt};
|
||||
use std::fmt;
|
||||
use std::ops::BitOr;
|
||||
|
||||
pub use proc_macro2::*;
|
||||
|
||||
pub struct HasIterator; // True
|
||||
pub struct ThereIsNoIteratorInRepetition; // False
|
||||
|
||||
impl BitOr<ThereIsNoIteratorInRepetition> for ThereIsNoIteratorInRepetition {
|
||||
type Output = ThereIsNoIteratorInRepetition;
|
||||
fn bitor(self, _rhs: ThereIsNoIteratorInRepetition) -> ThereIsNoIteratorInRepetition {
|
||||
ThereIsNoIteratorInRepetition
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr<ThereIsNoIteratorInRepetition> for HasIterator {
|
||||
type Output = HasIterator;
|
||||
fn bitor(self, _rhs: ThereIsNoIteratorInRepetition) -> HasIterator {
|
||||
HasIterator
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr<HasIterator> for ThereIsNoIteratorInRepetition {
|
||||
type Output = HasIterator;
|
||||
fn bitor(self, _rhs: HasIterator) -> HasIterator {
|
||||
HasIterator
|
||||
}
|
||||
}
|
||||
|
||||
impl BitOr<HasIterator> for HasIterator {
|
||||
type Output = HasIterator;
|
||||
fn bitor(self, _rhs: HasIterator) -> HasIterator {
|
||||
HasIterator
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension traits used by the implementation of `quote!`. These are defined
|
||||
/// in separate traits, rather than as a single trait due to ambiguity issues.
|
||||
///
|
||||
/// These traits expose a `quote_into_iter` method which should allow calling
|
||||
/// whichever impl happens to be applicable. Calling that method repeatedly on
|
||||
/// the returned value should be idempotent.
|
||||
pub mod ext {
|
||||
use super::RepInterp;
|
||||
use super::{HasIterator as HasIter, ThereIsNoIteratorInRepetition as DoesNotHaveIter};
|
||||
use crate::ToTokens;
|
||||
use std::collections::btree_set::{self, BTreeSet};
|
||||
use std::slice;
|
||||
|
||||
/// Extension trait providing the `quote_into_iter` method on iterators.
|
||||
pub trait RepIteratorExt: Iterator + Sized {
|
||||
fn quote_into_iter(self) -> (Self, HasIter) {
|
||||
(self, HasIter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Iterator> RepIteratorExt for T {}
|
||||
|
||||
/// Extension trait providing the `quote_into_iter` method for
|
||||
/// non-iterable types. These types interpolate the same value in each
|
||||
/// iteration of the repetition.
|
||||
pub trait RepToTokensExt {
|
||||
/// Pretend to be an iterator for the purposes of `quote_into_iter`.
|
||||
/// This allows repeated calls to `quote_into_iter` to continue
|
||||
/// correctly returning DoesNotHaveIter.
|
||||
fn next(&self) -> Option<&Self> {
|
||||
Some(self)
|
||||
}
|
||||
|
||||
fn quote_into_iter(&self) -> (&Self, DoesNotHaveIter) {
|
||||
(self, DoesNotHaveIter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ToTokens + ?Sized> RepToTokensExt for T {}
|
||||
|
||||
/// Extension trait providing the `quote_into_iter` method for types that
|
||||
/// can be referenced as an iterator.
|
||||
pub trait RepAsIteratorExt<'q> {
|
||||
type Iter: Iterator;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter);
|
||||
}
|
||||
|
||||
impl<'q, 'a, T: RepAsIteratorExt<'q> + ?Sized> RepAsIteratorExt<'q> for &'a T {
|
||||
type Iter = T::Iter;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
<T as RepAsIteratorExt>::quote_into_iter(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'q, 'a, T: RepAsIteratorExt<'q> + ?Sized> RepAsIteratorExt<'q> for &'a mut T {
|
||||
type Iter = T::Iter;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
<T as RepAsIteratorExt>::quote_into_iter(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'q, T: 'q> RepAsIteratorExt<'q> for [T] {
|
||||
type Iter = slice::Iter<'q, T>;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
(self.iter(), HasIter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'q, T: 'q> RepAsIteratorExt<'q> for Vec<T> {
|
||||
type Iter = slice::Iter<'q, T>;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
(self.iter(), HasIter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'q, T: 'q> RepAsIteratorExt<'q> for BTreeSet<T> {
|
||||
type Iter = btree_set::Iter<'q, T>;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
(self.iter(), HasIter)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! array_rep_slice {
|
||||
($($l:tt)*) => {
|
||||
$(
|
||||
impl<'q, T: 'q> RepAsIteratorExt<'q> for [T; $l] {
|
||||
type Iter = slice::Iter<'q, T>;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
(self.iter(), HasIter)
|
||||
}
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
array_rep_slice!(
|
||||
0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
|
||||
17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32
|
||||
);
|
||||
|
||||
impl<'q, T: RepAsIteratorExt<'q>> RepAsIteratorExt<'q> for RepInterp<T> {
|
||||
type Iter = T::Iter;
|
||||
|
||||
fn quote_into_iter(&'q self) -> (Self::Iter, HasIter) {
|
||||
self.0.quote_into_iter()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper type used within interpolations to allow for repeated binding names.
|
||||
// Implements the relevant traits, and exports a dummy `next()` method.
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct RepInterp<T>(pub T);
|
||||
|
||||
impl<T> RepInterp<T> {
|
||||
// This method is intended to look like `Iterator::next`, and is called when
|
||||
// a name is bound multiple times, as the previous binding will shadow the
|
||||
// original `Iterator` object. This allows us to avoid advancing the
|
||||
// iterator multiple times per iteration.
|
||||
pub fn next(self) -> Option<T> {
|
||||
Some(self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Iterator> Iterator for RepInterp<T> {
|
||||
type Item = T::Item;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ToTokens> ToTokens for RepInterp<T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.0.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
fn is_ident_start(c: u8) -> bool {
|
||||
(b'a' <= c && c <= b'z') || (b'A' <= c && c <= b'Z') || c == b'_'
|
||||
}
|
||||
|
||||
fn is_ident_continue(c: u8) -> bool {
|
||||
(b'a' <= c && c <= b'z') || (b'A' <= c && c <= b'Z') || c == b'_' || (b'0' <= c && c <= b'9')
|
||||
}
|
||||
|
||||
fn is_ident(token: &str) -> bool {
|
||||
let mut iter = token.bytes();
|
||||
let first_ok = iter.next().map(is_ident_start).unwrap_or(false);
|
||||
|
||||
first_ok && iter.all(is_ident_continue)
|
||||
}
|
||||
|
||||
pub fn parse(tokens: &mut TokenStream, span: Span, s: &str) {
|
||||
if is_ident(s) {
|
||||
// Fast path, since idents are the most common token.
|
||||
tokens.append(Ident::new(s, span));
|
||||
} else {
|
||||
let s: TokenStream = s.parse().expect("invalid token stream");
|
||||
tokens.extend(s.into_iter().map(|mut t| {
|
||||
t.set_span(span);
|
||||
t
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! push_punct {
|
||||
($name:ident $char1:tt) => {
|
||||
pub fn $name(tokens: &mut TokenStream, span: Span) {
|
||||
let mut punct = Punct::new($char1, Spacing::Alone);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
}
|
||||
};
|
||||
($name:ident $char1:tt $char2:tt) => {
|
||||
pub fn $name(tokens: &mut TokenStream, span: Span) {
|
||||
let mut punct = Punct::new($char1, Spacing::Joint);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
let mut punct = Punct::new($char2, Spacing::Alone);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
}
|
||||
};
|
||||
($name:ident $char1:tt $char2:tt $char3:tt) => {
|
||||
pub fn $name(tokens: &mut TokenStream, span: Span) {
|
||||
let mut punct = Punct::new($char1, Spacing::Joint);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
let mut punct = Punct::new($char2, Spacing::Joint);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
let mut punct = Punct::new($char3, Spacing::Alone);
|
||||
punct.set_span(span);
|
||||
tokens.append(punct);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
push_punct!(push_add '+');
|
||||
push_punct!(push_add_eq '+' '=');
|
||||
push_punct!(push_and '&');
|
||||
push_punct!(push_and_and '&' '&');
|
||||
push_punct!(push_and_eq '&' '=');
|
||||
push_punct!(push_at '@');
|
||||
push_punct!(push_bang '!');
|
||||
push_punct!(push_caret '^');
|
||||
push_punct!(push_caret_eq '^' '=');
|
||||
push_punct!(push_colon ':');
|
||||
push_punct!(push_colon2 ':' ':');
|
||||
push_punct!(push_comma ',');
|
||||
push_punct!(push_div '/');
|
||||
push_punct!(push_div_eq '/' '=');
|
||||
push_punct!(push_dot '.');
|
||||
push_punct!(push_dot2 '.' '.');
|
||||
push_punct!(push_dot3 '.' '.' '.');
|
||||
push_punct!(push_dot_dot_eq '.' '.' '=');
|
||||
push_punct!(push_eq '=');
|
||||
push_punct!(push_eq_eq '=' '=');
|
||||
push_punct!(push_ge '>' '=');
|
||||
push_punct!(push_gt '>');
|
||||
push_punct!(push_le '<' '=');
|
||||
push_punct!(push_lt '<');
|
||||
push_punct!(push_mul_eq '*' '=');
|
||||
push_punct!(push_ne '!' '=');
|
||||
push_punct!(push_or '|');
|
||||
push_punct!(push_or_eq '|' '=');
|
||||
push_punct!(push_or_or '|' '|');
|
||||
push_punct!(push_pound '#');
|
||||
push_punct!(push_question '?');
|
||||
push_punct!(push_rarrow '-' '>');
|
||||
push_punct!(push_larrow '<' '-');
|
||||
push_punct!(push_rem '%');
|
||||
push_punct!(push_rem_eq '%' '=');
|
||||
push_punct!(push_fat_arrow '=' '>');
|
||||
push_punct!(push_semi ';');
|
||||
push_punct!(push_shl '<' '<');
|
||||
push_punct!(push_shl_eq '<' '<' '=');
|
||||
push_punct!(push_shr '>' '>');
|
||||
push_punct!(push_shr_eq '>' '>' '=');
|
||||
push_punct!(push_star '*');
|
||||
push_punct!(push_sub '-');
|
||||
push_punct!(push_sub_eq '-' '=');
|
||||
|
||||
// Helper method for constructing identifiers from the `format_ident!` macro,
|
||||
// handling `r#` prefixes.
|
||||
//
|
||||
// Directly parsing the input string may produce a valid identifier,
|
||||
// although the input string was invalid, due to ignored characters such as
|
||||
// whitespace and comments. Instead, we always create a non-raw identifier
|
||||
// to validate that the string is OK, and only parse again if needed.
|
||||
//
|
||||
// The `is_ident` method defined above is insufficient for validation, as it
|
||||
// will reject non-ASCII identifiers.
|
||||
pub fn mk_ident(id: &str, span: Option<Span>) -> Ident {
|
||||
let span = span.unwrap_or_else(Span::call_site);
|
||||
|
||||
let is_raw = id.starts_with("r#");
|
||||
let unraw = Ident::new(if is_raw { &id[2..] } else { id }, span);
|
||||
if !is_raw {
|
||||
return unraw;
|
||||
}
|
||||
|
||||
// At this point, the identifier is raw, and the unraw-ed version of it was
|
||||
// successfully converted into an identifier. Try to produce a valid raw
|
||||
// identifier by running the `TokenStream` parser, and unwrapping the first
|
||||
// token as an `Ident`.
|
||||
//
|
||||
// FIXME: When `Ident::new_raw` becomes stable, this method should be
|
||||
// updated to call it when available.
|
||||
match id.parse::<TokenStream>() {
|
||||
Ok(ts) => {
|
||||
let mut iter = ts.into_iter();
|
||||
match (iter.next(), iter.next()) {
|
||||
(Some(TokenTree::Ident(mut id)), None) => {
|
||||
id.set_span(span);
|
||||
id
|
||||
}
|
||||
_ => unreachable!("valid raw ident fails to parse"),
|
||||
}
|
||||
}
|
||||
Err(_) => unreachable!("valid raw ident fails to parse"),
|
||||
}
|
||||
}
|
||||
|
||||
// Adapts from `IdentFragment` to `fmt::Display` for use by the `format_ident!`
|
||||
// macro, and exposes span information from these fragments.
|
||||
//
|
||||
// This struct also has forwarding implementations of the formatting traits
|
||||
// `Octal`, `LowerHex`, `UpperHex`, and `Binary` to allow for their use within
|
||||
// `format_ident!`.
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct IdentFragmentAdapter<T: IdentFragment>(pub T);
|
||||
|
||||
impl<T: IdentFragment> IdentFragmentAdapter<T> {
|
||||
pub fn span(&self) -> Option<Span> {
|
||||
self.0.span()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdentFragment> fmt::Display for IdentFragmentAdapter<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
IdentFragment::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdentFragment + fmt::Octal> fmt::Octal for IdentFragmentAdapter<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Octal::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdentFragment + fmt::LowerHex> fmt::LowerHex for IdentFragmentAdapter<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::LowerHex::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdentFragment + fmt::UpperHex> fmt::UpperHex for IdentFragmentAdapter<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::UpperHex::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IdentFragment + fmt::Binary> fmt::Binary for IdentFragmentAdapter<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Binary::fmt(&self.0, f)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
use crate::ToTokens;
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
|
||||
pub trait Spanned {
|
||||
fn __span(&self) -> Span;
|
||||
}
|
||||
|
||||
impl Spanned for Span {
|
||||
fn __span(&self) -> Span {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + ToTokens> Spanned for T {
|
||||
fn __span(&self) -> Span {
|
||||
join_spans(self.into_token_stream())
|
||||
}
|
||||
}
|
||||
|
||||
fn join_spans(tokens: TokenStream) -> Span {
|
||||
let mut iter = tokens.into_iter().filter_map(|tt| {
|
||||
// FIXME: This shouldn't be required, since optimally spans should
|
||||
// never be invalid. This filter_map can probably be removed when
|
||||
// https://github.com/rust-lang/rust/issues/43081 is resolved.
|
||||
let span = tt.span();
|
||||
let debug = format!("{:?}", span);
|
||||
if debug.ends_with("bytes(0..0)") {
|
||||
None
|
||||
} else {
|
||||
Some(span)
|
||||
}
|
||||
});
|
||||
|
||||
let first = match iter.next() {
|
||||
Some(span) => span,
|
||||
None => return Span::call_site(),
|
||||
};
|
||||
|
||||
iter.fold(None, |_prev, next| Some(next))
|
||||
.and_then(|last| first.join(last))
|
||||
.unwrap_or(first)
|
||||
}
|
|
@ -2,10 +2,11 @@ use super::TokenStreamExt;
|
|||
|
||||
use std::borrow::Cow;
|
||||
use std::iter;
|
||||
use std::rc::Rc;
|
||||
|
||||
use proc_macro2::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
|
||||
|
||||
/// Types that can be interpolated inside a [`quote!`] invocation.
|
||||
/// Types that can be interpolated inside a `quote!` invocation.
|
||||
///
|
||||
/// [`quote!`]: macro.quote.html
|
||||
pub trait ToTokens {
|
||||
|
@ -21,7 +22,7 @@ pub trait ToTokens {
|
|||
/// Example implementation for a struct representing Rust paths like
|
||||
/// `std::cmp::PartialEq`:
|
||||
///
|
||||
/// ```edition2018
|
||||
/// ```
|
||||
/// use proc_macro2::{TokenTree, Spacing, Span, Punct, TokenStream};
|
||||
/// use quote::{TokenStreamExt, ToTokens};
|
||||
///
|
||||
|
@ -53,6 +54,16 @@ pub trait ToTokens {
|
|||
/// ```
|
||||
fn to_tokens(&self, tokens: &mut TokenStream);
|
||||
|
||||
/// Convert `self` directly into a `TokenStream` object.
|
||||
///
|
||||
/// This method is implicitly implemented using `to_tokens`, and acts as a
|
||||
/// convenience method for consumers of the `ToTokens` trait.
|
||||
fn to_token_stream(&self) -> TokenStream {
|
||||
let mut tokens = TokenStream::new();
|
||||
self.to_tokens(&mut tokens);
|
||||
tokens
|
||||
}
|
||||
|
||||
/// Convert `self` directly into a `TokenStream` object.
|
||||
///
|
||||
/// This method is implicitly implemented using `to_tokens`, and acts as a
|
||||
|
@ -61,9 +72,7 @@ pub trait ToTokens {
|
|||
where
|
||||
Self: Sized,
|
||||
{
|
||||
let mut tokens = TokenStream::new();
|
||||
self.to_tokens(&mut tokens);
|
||||
tokens
|
||||
self.to_token_stream()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -91,6 +100,12 @@ impl<T: ?Sized + ToTokens> ToTokens for Box<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + ToTokens> ToTokens for Rc<T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
(**self).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ToTokens> ToTokens for Option<T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
if let Some(ref t) = *self {
|
||||
|
@ -126,24 +141,20 @@ primitive! {
|
|||
i16 => i16_suffixed
|
||||
i32 => i32_suffixed
|
||||
i64 => i64_suffixed
|
||||
i128 => i128_suffixed
|
||||
isize => isize_suffixed
|
||||
|
||||
u8 => u8_suffixed
|
||||
u16 => u16_suffixed
|
||||
u32 => u32_suffixed
|
||||
u64 => u64_suffixed
|
||||
u128 => u128_suffixed
|
||||
usize => usize_suffixed
|
||||
|
||||
f32 => f32_suffixed
|
||||
f64 => f64_suffixed
|
||||
}
|
||||
|
||||
#[cfg(integer128)]
|
||||
primitive! {
|
||||
i128 => i128_suffixed
|
||||
u128 => u128_suffixed
|
||||
}
|
||||
|
||||
impl ToTokens for char {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Literal::character(*self));
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
#[rustversion::attr(not(nightly), ignore)]
|
||||
#[test]
|
||||
fn ui() {
|
||||
let t = trybuild::TestCases::new();
|
||||
t.compile_fail("tests/ui/*.rs");
|
||||
}
|
|
@ -1,18 +1,10 @@
|
|||
#![cfg_attr(feature = "cargo-clippy", allow(blacklisted_name))]
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
extern crate proc_macro2;
|
||||
#[macro_use]
|
||||
extern crate quote;
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use proc_macro2::{Ident, Span, TokenStream};
|
||||
use quote::TokenStreamExt;
|
||||
|
||||
mod conditional {
|
||||
#[cfg(integer128)]
|
||||
mod integer128;
|
||||
}
|
||||
use quote::{format_ident, quote, TokenStreamExt};
|
||||
|
||||
struct X;
|
||||
|
||||
|
@ -125,18 +117,20 @@ fn test_integer() {
|
|||
let ii16 = -1i16;
|
||||
let ii32 = -1i32;
|
||||
let ii64 = -1i64;
|
||||
let ii128 = -1i128;
|
||||
let iisize = -1isize;
|
||||
let uu8 = 1u8;
|
||||
let uu16 = 1u16;
|
||||
let uu32 = 1u32;
|
||||
let uu64 = 1u64;
|
||||
let uu128 = 1u128;
|
||||
let uusize = 1usize;
|
||||
|
||||
let tokens = quote! {
|
||||
#ii8 #ii16 #ii32 #ii64 #iisize
|
||||
#uu8 #uu16 #uu32 #uu64 #uusize
|
||||
#ii8 #ii16 #ii32 #ii64 #ii128 #iisize
|
||||
#uu8 #uu16 #uu32 #uu64 #uu128 #uusize
|
||||
};
|
||||
let expected = "-1i8 -1i16 -1i32 -1i64 -1isize 1u8 1u16 1u32 1u64 1usize";
|
||||
let expected = "-1i8 -1i16 -1i32 -1i64 -1i128 -1isize 1u8 1u16 1u32 1u64 1u128 1usize";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
|
@ -166,7 +160,7 @@ fn test_char() {
|
|||
let tokens = quote! {
|
||||
#zero #pound #quote #apost #newline #heart
|
||||
};
|
||||
let expected = "'\\u{0}' '#' '\\\"' '\\'' '\\n' '\\u{2764}'";
|
||||
let expected = "'\\u{0}' '#' '\"' '\\'' '\\n' '\\u{2764}'";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
|
@ -174,7 +168,7 @@ fn test_char() {
|
|||
fn test_str() {
|
||||
let s = "\0 a 'b \" c";
|
||||
let tokens = quote!(#s);
|
||||
let expected = "\"\\u{0} a \\'b \\\" c\"";
|
||||
let expected = "\"\\u{0} a 'b \\\" c\"";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
|
@ -182,7 +176,7 @@ fn test_str() {
|
|||
fn test_string() {
|
||||
let s = "\0 a 'b \" c".to_string();
|
||||
let tokens = quote!(#s);
|
||||
let expected = "\"\\u{0} a \\'b \\\" c\"";
|
||||
let expected = "\"\\u{0} a 'b \\\" c\"";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
|
@ -233,9 +227,42 @@ fn test_nested_fancy_repetition() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_repetition() {
|
||||
let tokens = quote!(#(a b)* #(c d),*);
|
||||
assert_eq!("", tokens.to_string());
|
||||
fn test_duplicate_name_repetition() {
|
||||
let foo = &["a", "b"];
|
||||
|
||||
let tokens = quote! {
|
||||
#(#foo: #foo),*
|
||||
#(#foo: #foo),*
|
||||
};
|
||||
|
||||
let expected = r#""a" : "a" , "b" : "b" "a" : "a" , "b" : "b""#;
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_duplicate_name_repetition_no_copy() {
|
||||
let foo = vec!["a".to_owned(), "b".to_owned()];
|
||||
|
||||
let tokens = quote! {
|
||||
#(#foo: #foo),*
|
||||
};
|
||||
|
||||
let expected = r#""a" : "a" , "b" : "b""#;
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_btreeset_repetition() {
|
||||
let mut set = BTreeSet::new();
|
||||
set.insert("a".to_owned());
|
||||
set.insert("b".to_owned());
|
||||
|
||||
let tokens = quote! {
|
||||
#(#set: #set),*
|
||||
};
|
||||
|
||||
let expected = r#""a" : "a" , "b" : "b""#;
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -248,6 +275,19 @@ fn test_variable_name_conflict() {
|
|||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nonrep_in_repetition() {
|
||||
let rep = vec!["a", "b"];
|
||||
let nonrep = "c";
|
||||
|
||||
let tokens = quote! {
|
||||
#(#rep #rep : #nonrep #nonrep),*
|
||||
};
|
||||
|
||||
let expected = r#""a" "a" : "c" "c" , "b" "b" : "c" "c""#;
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_quote() {
|
||||
let tokens = quote!();
|
||||
|
@ -275,7 +315,7 @@ fn test_cow() {
|
|||
#[test]
|
||||
fn test_closure() {
|
||||
fn field_i(i: usize) -> Ident {
|
||||
Ident::new(&format!("__field{}", i), Span::call_site())
|
||||
format_ident!("__field{}", i)
|
||||
}
|
||||
|
||||
let fields = (0usize..3)
|
||||
|
@ -293,3 +333,97 @@ fn test_append_tokens() {
|
|||
a.append_all(b);
|
||||
assert_eq!("a b", a.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_ident() {
|
||||
let id0 = format_ident!("Aa");
|
||||
let id1 = format_ident!("Hello{x}", x = id0);
|
||||
let id2 = format_ident!("Hello{x}", x = 5usize);
|
||||
let id3 = format_ident!("Hello{}_{x}", id0, x = 10usize);
|
||||
let id4 = format_ident!("Aa", span = Span::call_site());
|
||||
|
||||
assert_eq!(id0, "Aa");
|
||||
assert_eq!(id1, "HelloAa");
|
||||
assert_eq!(id2, "Hello5");
|
||||
assert_eq!(id3, "HelloAa_10");
|
||||
assert_eq!(id4, "Aa");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_ident_strip_raw() {
|
||||
let id = format_ident!("r#struct");
|
||||
let my_id = format_ident!("MyId{}", id);
|
||||
let raw_my_id = format_ident!("r#MyId{}", id);
|
||||
|
||||
assert_eq!(id, "r#struct");
|
||||
assert_eq!(my_id, "MyIdstruct");
|
||||
assert_eq!(raw_my_id, "r#MyIdstruct");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_outer_line_comment() {
|
||||
let tokens = quote! {
|
||||
/// doc
|
||||
};
|
||||
let expected = "# [ doc = r\" doc\" ]";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_inner_line_comment() {
|
||||
let tokens = quote! {
|
||||
//! doc
|
||||
};
|
||||
let expected = "# ! [ doc = r\" doc\" ]";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_outer_block_comment() {
|
||||
let tokens = quote! {
|
||||
/** doc */
|
||||
};
|
||||
let expected = "# [ doc = r\" doc \" ]";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_inner_block_comment() {
|
||||
let tokens = quote! {
|
||||
/*! doc */
|
||||
};
|
||||
let expected = "# ! [ doc = r\" doc \" ]";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_outer_attr() {
|
||||
let tokens = quote! {
|
||||
#[inline]
|
||||
};
|
||||
let expected = "# [ inline ]";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_inner_attr() {
|
||||
let tokens = quote! {
|
||||
#![no_std]
|
||||
};
|
||||
let expected = "# ! [ no_std ]";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
||||
// https://github.com/dtolnay/quote/issues/130
|
||||
#[test]
|
||||
fn test_star_after_repetition() {
|
||||
let c = vec!['0', '1'];
|
||||
let tokens = quote! {
|
||||
#(
|
||||
f(#c);
|
||||
)*
|
||||
*out = None;
|
||||
};
|
||||
let expected = "f ( '0' ) ; f ( '1' ) ; * out = None ;";
|
||||
assert_eq!(expected, tokens.to_string());
|
||||
}
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
use quote::quote;
|
||||
|
||||
fn main() {
|
||||
let nonrep = "";
|
||||
|
||||
// Without some protection against repetitions with no iterator somewhere
|
||||
// inside, this would loop infinitely.
|
||||
quote!(#(#nonrep #nonrep)*);
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
use quote::quote;
|
||||
|
||||
fn main() {
|
||||
let nonrep = "";
|
||||
|
||||
// Without some protection against repetitions with no iterator somewhere
|
||||
// inside, this would loop infinitely.
|
||||
quote!(#(#nonrep)*);
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
use quote::quote;
|
||||
|
||||
fn main() {
|
||||
quote!(#(a b),*);
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
use quote::quote;
|
||||
|
||||
fn main() {
|
||||
quote!(#(a b)*);
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
use quote::quote;
|
||||
use std::net::Ipv4Addr;
|
||||
|
||||
fn main() {
|
||||
let ip = Ipv4Addr::LOCALHOST;
|
||||
let _ = quote! { #ip };
|
||||
}
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче