зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1511811 - Go back to bindgen 0.43 since I can't manage to make the tup build succeed.
This commit is contained in:
Родитель
bd99a5f577
Коммит
9ba570d998
|
@ -160,7 +160,7 @@ dependencies = [
|
|||
name = "baldrdash"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bindgen 0.44.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bindgen 0.43.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cranelift-codegen 0.25.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cranelift-wasm 0.25.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -214,7 +214,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.44.0"
|
||||
version = "0.43.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -224,10 +224,10 @@ dependencies = [
|
|||
"clap 2.31.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"proc-macro2 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"proc-macro2 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"which 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1214,7 +1214,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
name = "js"
|
||||
version = "0.1.4"
|
||||
dependencies = [
|
||||
"bindgen 0.44.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bindgen 0.43.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -1238,7 +1238,7 @@ name = "jsrust_shared"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"baldrdash 0.1.0",
|
||||
"bindgen 0.44.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bindgen 0.43.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2360,7 +2360,7 @@ dependencies = [
|
|||
"app_units 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"arrayvec 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bindgen 0.44.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bindgen 0.43.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
@ -2991,10 +2991,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "which"
|
||||
version = "2.0.1"
|
||||
version = "1.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
|
@ -3122,7 +3121,7 @@ dependencies = [
|
|||
"checksum base64 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "85415d2594767338a74a30c1d370b2f3262ec1b4ed2d7bba5b3faf4de40467d9"
|
||||
"checksum binary-space-partition 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "88ceb0d16c4fd0e42876e298d7d3ce3780dd9ebdcbe4199816a32c77e08597ff"
|
||||
"checksum bincode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bda13183df33055cbb84b847becce220d392df502ebe7a4a78d7021771ed94d0"
|
||||
"checksum bindgen 0.44.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d8de1946c252145e09ed00ea90a8685fce15b8f94bb4dc1a0daaf2a9b375be61"
|
||||
"checksum bindgen 0.43.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6d52d263eacd15d26cbcf215d254b410bd58212aaa2d3c453a04b2d3b3adcf41"
|
||||
"checksum binjs_meta 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "430239e4551e42b80fa5d92322ac80ea38c9dda56e5d5582e057e2288352b71a"
|
||||
"checksum bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6f1efcc46c18245a69c38fcc5cc650f16d3a59d034f3106e9ed63748f695730a"
|
||||
"checksum bit-vec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4440d5cb623bb7390ae27fec0bb6c61111969860f8e3ae198bfa0663645e67cf"
|
||||
|
@ -3382,7 +3381,7 @@ dependencies = [
|
|||
"checksum want 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "797464475f30ddb8830cc529aaaae648d581f99e2036a928877dfde027ddf6b3"
|
||||
"checksum wasmparser 0.22.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1b4e0f66e314a8e63ff5c3cc5103f7d0a3de9ee98bb61a960adcf7f1d9debd2f"
|
||||
"checksum webidl 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d0f807f7488d680893f7188aa09d7672a3a0a8461975a098a2edf0a52e3fee29"
|
||||
"checksum which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b57acb10231b9493c8472b20cb57317d0679a49e0bdbee44b3b803a6473af164"
|
||||
"checksum which 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e84a603e7e0b1ce1aa1ee2b109c7be00155ce52df5081590d1ffb93f4f515cb2"
|
||||
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
|
||||
"checksum winapi 0.3.6 (git+https://github.com/froydnj/winapi-rs?branch=aarch64)" = "<none>"
|
||||
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
|
||||
|
|
|
@ -7,7 +7,7 @@ license = "MPL-2.0"
|
|||
|
||||
[build-dependencies]
|
||||
env_logger = {version = "0.5", default-features = false} # disable `regex` to reduce code size
|
||||
bindgen = {version = "0.44", default-features = false} # disable `logging` to reduce code size
|
||||
bindgen = {version = "0.43", default-features = false} # disable `logging` to reduce code size
|
||||
cmake = "0.1"
|
||||
glob = "0.2.11"
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ path = "lib.rs"
|
|||
baldrdash = { path = "../../wasm/cranelift" }
|
||||
|
||||
[build-dependencies]
|
||||
bindgen = {version = "0.44", default-features = false} # disable `logging` to reduce code size
|
||||
bindgen = {version = "0.43", default-features = false} # disable `logging` to reduce code size
|
||||
|
||||
# Uncomment this to enable perf support in release mode.
|
||||
#[profile.release]
|
||||
|
|
|
@ -15,7 +15,7 @@ log = { version = "0.4.4", default-features = false, features = ["release_max_le
|
|||
env_logger = "0.5.6"
|
||||
|
||||
[build-dependencies]
|
||||
bindgen = {version = "0.44", default-features = false} # disable `logging` to reduce code size
|
||||
bindgen = {version = "0.43", default-features = false} # disable `logging` to reduce code size
|
||||
|
||||
# Uncomment this to enable perf support in release mode.
|
||||
#[profile.release]
|
||||
|
|
|
@ -5,19 +5,6 @@
|
|||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
cargo_extra_outputs = {
|
||||
'backtrace-sys': [
|
||||
'backtrace-supported.h',
|
||||
'config.h',
|
||||
'src/libbacktrace/alloc.o',
|
||||
'src/libbacktrace/dwarf.o',
|
||||
'src/libbacktrace/fileline.o',
|
||||
'src/libbacktrace/posix.o',
|
||||
'src/libbacktrace/read.o',
|
||||
'src/libbacktrace/sort.o',
|
||||
'src/libbacktrace/state.o',
|
||||
'src/libbacktrace/elf.o',
|
||||
'libbacktrace.a',
|
||||
],
|
||||
'bindgen': [
|
||||
'tests.rs',
|
||||
'host-target.txt',
|
||||
|
|
|
@ -78,7 +78,7 @@ void = "1.0.2"
|
|||
[build-dependencies]
|
||||
lazy_static = "1"
|
||||
log = "0.4"
|
||||
bindgen = { version = "0.44", optional = true, default-features = false }
|
||||
bindgen = { version = "0.43", optional = true, default-features = false }
|
||||
regex = {version = "1.0", optional = true}
|
||||
walkdir = "2.1.4"
|
||||
toml = {version = "0.4.5", optional = true, default-features = false}
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"2dc33b3636721ad992f5c9a50dd8e673e68746826d0b825b23d40e5e98329696","LICENSE":"c23953d9deb0a3312dbeaf6c128a657f3591acee45067612fa68405eaa4525db","README.md":"0b50adc1da2d15211d61cab2ff8b9f1e8eccc37ae25695ba7a0c21b77389aa4c","build.rs":"032a1c51963894a421b0535f9227796d88768ac5f665a81d2edced69dc6d106a","src/callbacks.rs":"b24d7982332c6a35928f134184ddf4072fe4545a45546b97b9b0e0c1fbb77c08","src/clang.rs":"ae7b7d992e1a85c9d813257aa3cab2f93a287fd58bf178e8dce427648bf51000","src/codegen/bitfield_unit.rs":"88b0604322dc449fc9284850eadc1f5d14b42fa747d4258bae0b6b9535f52dfd","src/codegen/bitfield_unit_tests.rs":"2073ac6a36e0bc9afaef5b1207966817c8fb7a1a9f6368c3b1b8f79822efbfba","src/codegen/error.rs":"2613af1d833377fd4a70719f4a09951d9d45dc9227827b9a2a938a1bcaaea2dd","src/codegen/helpers.rs":"dcd9f57b80ccf4a46874123ee0d5511c2b06390382eb6fdf88a06e56c04b21d9","src/codegen/impl_debug.rs":"19a8f75a1513efb523f33fd02246976e81842d38f249261b0d1a671365f78caf","src/codegen/impl_partialeq.rs":"d40d9ee2849c4d3d557b033c4d3af5e6de4a44347f67c0f016198086338811af","src/codegen/mod.rs":"3f673a7b17b289af66990cc132421c0385d852da6b404ac2ffe53b33c53c2aba","src/codegen/struct_layout.rs":"43132726f981b2d90f957fa6a0909fe48c07ca3e19b83886f24c876a33c61848","src/extra_assertions.rs":"449549c4a7a50c3f0b06332452b2fb6c9b23f31ca8e5e1656fe6c7f21e8ef7fa","src/features.rs":"5fe97362ab406a09422473b8f94603832130ecab10dc4df8feebaebc1793ac1b","src/ir/analysis/derive_copy.rs":"b7e12cdc74937909529e4cefe9f43b3ee0a5590f07392b73481811ac9fddedd2","src/ir/analysis/derive_debug.rs":"cf9346ecb3afd4e94094a2723e4d76c76c55f42a13dc1d5ec6564d25d3a46cf4","src/ir/analysis/derive_default.rs":"87332eccd5accbfbf7fad2e1511be4f8945b0538ae3e0628c8af17d16068691f","src/ir/analysis/derive_hash.rs":"521ea1dbe221755042a95e8e8dcb594e427e54be2eb869c61ebbdb27fec5aa77","src/ir/analysis/derive_partialeq_or_partialord.rs":"3c5d051f69401fe50b56143143eca3e71674d6a87d0013c31745b75d0f3d584f","src/ir/analysis/has_destructor.rs":"d9aaaceba580b48eb0df4e5537b34b417c51ccdfeb8f6b72484f3bf4992317fe","src/ir/analysis/has_float.rs":"5f7ee1b834978817041d884fee4648b31ecb66c62aafb8e7a9a17e5ac434bfe5","src/ir/analysis/has_type_param_in_array.rs":"abf74468b923c015aaf67599e50857267516010472819a79ca494fe02dd6ac93","src/ir/analysis/has_vtable.rs":"37765e954ef792e369a58ccfe1d827a00fe9bce680466da1d6523671b94b6c92","src/ir/analysis/mod.rs":"ea5ace45c77e855674bb565ba0fef556f60e3293b0ddcf11d3a5a6ec15ab0648","src/ir/analysis/sizedness.rs":"4f788bff0ceb0e008d70145510340ab636e5203787316f0be41f789ce9b2f73d","src/ir/analysis/template_params.rs":"6554dd1240142ec0e7299e678b696725f5cba99243d1c3d1cbf58d4764082fd6","src/ir/annotations.rs":"39a5ab19f4d5dfa617577e4a0d0d2b67b5369d480c7cca4b14d172458c9843f0","src/ir/comment.rs":"1b068d5834da7360aec4cb80d9c55219cedbb2ae8b9727a39ec7d156c88fe0b5","src/ir/comp.rs":"7b22f3ff19ca45a6fbfe7ea015109d43f4ddf65b33b47b1c37829fcb87cdff9b","src/ir/context.rs":"12f584e20af7c6074a07d66aa9efbe38d9bef211e3011ccc558d392e98ce6af9","src/ir/derive.rs":"19601e76528d6cce8e04a66572e75da4e9efdecc4d60a983fc68c11958e9f3ec","src/ir/dot.rs":"d01f1621ab67e368d854a82bd6bb0b8dd52f3c2c733de8eaf81aece9543818cb","src/ir/enum_ty.rs":"9cc242d6b3c1866665594e8b306860ee39c0ea42d22198d46b7fded473fe3e84","src/ir/function.rs":"33292992af5513e9814fb9d39a6ebf12934fc8887db3d9f49ff6aa9ec47d8573","src/ir/int.rs":"07e0c7dbd2dd977177fae3acd2a14adf271c6cf9ff4b57cddc11d50734fd4801","src/ir/item.rs":"8de5fe624696c170aa3d96105acd365a19eebfaa3ac1aa451a147960c3a88018","src/ir/item_kind.rs":"dbeae8c4fd0e5c9485d325aea040e056a1f2cd6d43fc927dee8fe1c0c59a7197","src/ir/layout.rs":"e722edffcd34914b534813da5af6fe8ba69927a54e0ec88ae1733f5ddf0e50b1","src/ir/mod.rs":"2eae90f207fad2e45957ec9287064992a419e3fc916aba84faff2ea25cbeb5ee","src/ir/module.rs":"c4d90bf38fe3672e01923734ccbdb7951ea929949d5f413a9c2aee12395a5094","src/ir/objc.rs":"557531c8cbb2da37045c11fd5af667ef2bbd790212e232d261359d98fa871abf","src/ir/template.rs":"c0f8570b927dfd6a421fc4ce3094ec837a3ed936445225dbfac961e8e0842ae5","src/ir/traversal.rs":"ea751379a5aec02f93f8d2c61e18232776b1f000dbeae64b9a7195ba21a19dd6","src/ir/ty.rs":"1068a7e4916d69b5034a76c47b67e6257db906cc16dad6d8af4bdb39ad52cd84","src/ir/var.rs":"92b08b8abab9caa23a0ff5121b7ad0165b6f356dd7d51a687826c9428967c082","src/lib.rs":"70f9100949cac6f603324dc91e01e14b2649190d183a647ad1686e9f75a1312d","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"e519053bcdde6bc88f60f955246a02d53b3db1cc5ccd1612e6675b790b7460b0","src/options.rs":"a4b4028542d6292363fc97621c704bf1b4e7eb149e9cb86b52e30aad0be13b99","src/parse.rs":"be7d13cc84fae79ec7b3aa9e77063fa475a48d74a854423e2c72d75006a25202","src/regex_set.rs":"c417889726b5e3325f9375551bf23fd54c9b40020151c364741ea6126ede386b","src/time.rs":"3b763e6fee51d0eb01228dfe28bc28a9f692aff73b2a7b90a030902e0238fca6"},"package":"d8de1946c252145e09ed00ea90a8685fce15b8f94bb4dc1a0daaf2a9b375be61"}
|
||||
{"files":{"Cargo.toml":"e0559de35f6564bbfc4779f43d9104d604befb7ff7de5baf591379c285544d3c","LICENSE":"c23953d9deb0a3312dbeaf6c128a657f3591acee45067612fa68405eaa4525db","README.md":"630d1a1d123c131bad0fec23173e263ba8ecc064b5cd8446d4cab7ffd197db45","build.rs":"032a1c51963894a421b0535f9227796d88768ac5f665a81d2edced69dc6d106a","src/callbacks.rs":"936198c967ca4205ab043ce2264d8188d0716ad7c294cebdaacde2b486224450","src/clang.rs":"b25f8d455e3cd89d416a4c5e55d828db9691f4def82109c1dd12457e5ca2c13c","src/codegen/bitfield_unit.rs":"88b0604322dc449fc9284850eadc1f5d14b42fa747d4258bae0b6b9535f52dfd","src/codegen/bitfield_unit_tests.rs":"2073ac6a36e0bc9afaef5b1207966817c8fb7a1a9f6368c3b1b8f79822efbfba","src/codegen/error.rs":"2613af1d833377fd4a70719f4a09951d9d45dc9227827b9a2a938a1bcaaea2dd","src/codegen/helpers.rs":"2c890c96a1a6b72ada63593cb544f005476fb176d7181553713e83710dc8eefd","src/codegen/impl_debug.rs":"43b977b8d16073d021977ce57f3c22eb5b1083493905ae19a171e2271939f574","src/codegen/impl_partialeq.rs":"671dd0eac712bf8281e11a7b3e545a443c6e9e2c8ee7fbebeb03c76667ca206b","src/codegen/mod.rs":"57a6c0dc52af70b08f54e744b629df67c5528a8d63ccb9485cc1af91d02dadc0","src/codegen/struct_layout.rs":"b77f03dfbbed408a5fa6e693560aea8dc902fe7d10d847ce39122e6961078515","src/extra_assertions.rs":"449549c4a7a50c3f0b06332452b2fb6c9b23f31ca8e5e1656fe6c7f21e8ef7fa","src/features.rs":"be74e03d4f00582fa8970439da52057b04204b450193833953ed84772933bd46","src/ir/analysis/derive_copy.rs":"b7e12cdc74937909529e4cefe9f43b3ee0a5590f07392b73481811ac9fddedd2","src/ir/analysis/derive_debug.rs":"cf9346ecb3afd4e94094a2723e4d76c76c55f42a13dc1d5ec6564d25d3a46cf4","src/ir/analysis/derive_default.rs":"87332eccd5accbfbf7fad2e1511be4f8945b0538ae3e0628c8af17d16068691f","src/ir/analysis/derive_hash.rs":"521ea1dbe221755042a95e8e8dcb594e427e54be2eb869c61ebbdb27fec5aa77","src/ir/analysis/derive_partialeq_or_partialord.rs":"3c5d051f69401fe50b56143143eca3e71674d6a87d0013c31745b75d0f3d584f","src/ir/analysis/has_destructor.rs":"d9aaaceba580b48eb0df4e5537b34b417c51ccdfeb8f6b72484f3bf4992317fe","src/ir/analysis/has_float.rs":"5f7ee1b834978817041d884fee4648b31ecb66c62aafb8e7a9a17e5ac434bfe5","src/ir/analysis/has_type_param_in_array.rs":"abf74468b923c015aaf67599e50857267516010472819a79ca494fe02dd6ac93","src/ir/analysis/has_vtable.rs":"37765e954ef792e369a58ccfe1d827a00fe9bce680466da1d6523671b94b6c92","src/ir/analysis/mod.rs":"ea5ace45c77e855674bb565ba0fef556f60e3293b0ddcf11d3a5a6ec15ab0648","src/ir/analysis/sizedness.rs":"4f788bff0ceb0e008d70145510340ab636e5203787316f0be41f789ce9b2f73d","src/ir/analysis/template_params.rs":"6554dd1240142ec0e7299e678b696725f5cba99243d1c3d1cbf58d4764082fd6","src/ir/annotations.rs":"ef106afcbe6084c18bd13a37ee3c1cdc9596bfb055db8c773d81f8f15fec3208","src/ir/comment.rs":"1b068d5834da7360aec4cb80d9c55219cedbb2ae8b9727a39ec7d156c88fe0b5","src/ir/comp.rs":"7b22f3ff19ca45a6fbfe7ea015109d43f4ddf65b33b47b1c37829fcb87cdff9b","src/ir/context.rs":"925ba08ad614c40b0578be524206a662aec53f959d47d3b2cc84389718fc485b","src/ir/derive.rs":"19601e76528d6cce8e04a66572e75da4e9efdecc4d60a983fc68c11958e9f3ec","src/ir/dot.rs":"d01f1621ab67e368d854a82bd6bb0b8dd52f3c2c733de8eaf81aece9543818cb","src/ir/enum_ty.rs":"9cc242d6b3c1866665594e8b306860ee39c0ea42d22198d46b7fded473fe3e84","src/ir/function.rs":"c497a6e07e95dc65be73f12396e344929973243d5cf7808a97c5309b0b090ef8","src/ir/int.rs":"07e0c7dbd2dd977177fae3acd2a14adf271c6cf9ff4b57cddc11d50734fd4801","src/ir/item.rs":"d626a0054df8254a504b44019dc531a933ec1bd3961b1465a602f0d767e0ad4e","src/ir/item_kind.rs":"dbeae8c4fd0e5c9485d325aea040e056a1f2cd6d43fc927dee8fe1c0c59a7197","src/ir/layout.rs":"e722edffcd34914b534813da5af6fe8ba69927a54e0ec88ae1733f5ddf0e50b1","src/ir/mod.rs":"2eae90f207fad2e45957ec9287064992a419e3fc916aba84faff2ea25cbeb5ee","src/ir/module.rs":"c4d90bf38fe3672e01923734ccbdb7951ea929949d5f413a9c2aee12395a5094","src/ir/objc.rs":"828a890acdc8b10c44e69e2ed4a4f5d8c0e734606d3a8cc71658dcf43a49acf4","src/ir/template.rs":"c0f8570b927dfd6a421fc4ce3094ec837a3ed936445225dbfac961e8e0842ae5","src/ir/traversal.rs":"ea751379a5aec02f93f8d2c61e18232776b1f000dbeae64b9a7195ba21a19dd6","src/ir/ty.rs":"1068a7e4916d69b5034a76c47b67e6257db906cc16dad6d8af4bdb39ad52cd84","src/ir/var.rs":"5c0caaa505faef18e334c6198b3634b6f390d14cf9da629226cd78617fd3594b","src/lib.rs":"994d8495557cadc8c4a748e2643b35c6850f2c7130e35c8abf4ae02b83cfeff7","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"e519053bcdde6bc88f60f955246a02d53b3db1cc5ccd1612e6675b790b7460b0","src/options.rs":"a4b4028542d6292363fc97621c704bf1b4e7eb149e9cb86b52e30aad0be13b99","src/parse.rs":"be7d13cc84fae79ec7b3aa9e77063fa475a48d74a854423e2c72d75006a25202","src/regex_set.rs":"c417889726b5e3325f9375551bf23fd54c9b40020151c364741ea6126ede386b","src/time.rs":"3b763e6fee51d0eb01228dfe28bc28a9f692aff73b2a7b90a030902e0238fca6"},"package":"6d52d263eacd15d26cbcf215d254b410bd58212aaa2d3c453a04b2d3b3adcf41"}
|
|
@ -12,18 +12,18 @@
|
|||
|
||||
[package]
|
||||
name = "bindgen"
|
||||
version = "0.44.0"
|
||||
version = "0.43.2"
|
||||
authors = ["Jyun-Yan You <jyyou.tw@gmail.com>", "Emilio Cobos Álvarez <emilio@crisal.io>", "Nick Fitzgerald <fitzgen@gmail.com>", "The Servo project developers"]
|
||||
build = "build.rs"
|
||||
include = ["LICENSE", "README.md", "Cargo.toml", "build.rs", "src/*.rs", "src/**/*.rs"]
|
||||
description = "Automatically generates Rust FFI bindings to C and C++ libraries."
|
||||
homepage = "https://rust-lang.github.io/rust-bindgen/"
|
||||
homepage = "https://rust-lang-nursery.github.io/rust-bindgen/"
|
||||
documentation = "https://docs.rs/bindgen"
|
||||
readme = "README.md"
|
||||
keywords = ["bindings", "ffi", "code-generation"]
|
||||
categories = ["external-ffi-bindings", "development-tools::ffi"]
|
||||
license = "BSD-3-Clause"
|
||||
repository = "https://github.com/rust-lang/rust-bindgen"
|
||||
repository = "https://github.com/rust-lang-nursery/rust-bindgen"
|
||||
|
||||
[lib]
|
||||
path = "src/lib.rs"
|
||||
|
@ -63,18 +63,18 @@ optional = true
|
|||
version = "0.1.2"
|
||||
|
||||
[dependencies.proc-macro2]
|
||||
version = "0.4"
|
||||
version = "0.3.2, < 0.3.6"
|
||||
default-features = false
|
||||
|
||||
[dependencies.quote]
|
||||
version = "0.6"
|
||||
version = "0.5"
|
||||
default-features = false
|
||||
|
||||
[dependencies.regex]
|
||||
version = "1.0"
|
||||
|
||||
[dependencies.which]
|
||||
version = "2.0"
|
||||
version = "1.0.2"
|
||||
[dev-dependencies.clap]
|
||||
version = "2"
|
||||
|
||||
|
@ -95,4 +95,4 @@ testing_only_libclang_3_9 = []
|
|||
testing_only_libclang_4 = []
|
||||
testing_only_libclang_5 = []
|
||||
[badges.travis-ci]
|
||||
repository = "rust-lang/rust-bindgen"
|
||||
repository = "rust-lang-nursery/rust-bindgen"
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
# `bindgen`
|
||||
|
||||
[`impl period`](https://blog.rust-lang.org/2017/09/18/impl-future-for-rust.html) has been started! Join us at [Gitter.im](https://gitter.im/rust-impl-period/WG-dev-tools-bindgen).
|
||||
|
||||
**`bindgen` automatically generates Rust FFI bindings to C (and some C++) libraries.**
|
||||
|
||||
For example, given the C header `doggo.h`:
|
||||
|
@ -32,7 +34,7 @@ extern "C" {
|
|||
|
||||
## Users Guide
|
||||
|
||||
[📚 Read the `bindgen` users guide here! 📚](https://rust-lang.github.io/rust-bindgen)
|
||||
[📚 Read the `bindgen` users guide here! 📚](https://rust-lang-nursery.github.io/rust-bindgen)
|
||||
|
||||
## API Reference
|
||||
|
||||
|
|
|
@ -35,11 +35,6 @@ pub trait ParseCallbacks: fmt::Debug + UnwindSafe {
|
|||
None
|
||||
}
|
||||
|
||||
/// This will be run on every string macro. The callback can not influence the further
|
||||
/// treatment of the macro, but may use the value to generate additional code or configuration.
|
||||
fn str_macro(&self, _name: &str, _value: &[u8]) {
|
||||
}
|
||||
|
||||
/// This function should return whether, given an enum variant
|
||||
/// name, and value, this enum variant will forcibly be a constant.
|
||||
fn enum_variant_behavior(
|
||||
|
|
|
@ -499,31 +499,6 @@ impl Cursor {
|
|||
}
|
||||
}
|
||||
|
||||
/// Does this cursor have the given simple attribute?
|
||||
///
|
||||
/// Note that this will only work for attributes that don't have an existing libclang
|
||||
/// CursorKind, e.g. pure, const, etc.
|
||||
pub fn has_simple_attr(&self, attr: &str) -> bool {
|
||||
let mut found_attr = false;
|
||||
self.visit(|cur| {
|
||||
if cur.kind() == CXCursor_UnexposedAttr {
|
||||
found_attr = cur.tokens().map(|tokens| {
|
||||
tokens.iter().any(|t| {
|
||||
t.kind == CXToken_Identifier && t.spelling == attr
|
||||
})
|
||||
}).unwrap_or(false);
|
||||
|
||||
if found_attr {
|
||||
return CXChildVisit_Break;
|
||||
}
|
||||
}
|
||||
|
||||
CXChildVisit_Continue
|
||||
});
|
||||
|
||||
found_attr
|
||||
}
|
||||
|
||||
/// Given that this cursor's referent is a `typedef`, get the `Type` that is
|
||||
/// being aliased.
|
||||
pub fn typedef_type(&self) -> Option<Type> {
|
||||
|
|
|
@ -2,54 +2,52 @@
|
|||
|
||||
use ir::context::BindgenContext;
|
||||
use ir::layout::Layout;
|
||||
use proc_macro2::{self, Ident, Span};
|
||||
use quote::TokenStreamExt;
|
||||
use quote;
|
||||
use proc_macro2::{Term, Span};
|
||||
|
||||
pub mod attributes {
|
||||
use proc_macro2::{self, Ident, Span};
|
||||
use quote;
|
||||
use proc_macro2::{Term, Span};
|
||||
|
||||
pub fn repr(which: &str) -> proc_macro2::TokenStream {
|
||||
let which = Ident::new(which, Span::call_site());
|
||||
pub fn repr(which: &str) -> quote::Tokens {
|
||||
let which = Term::new(which, Span::call_site());
|
||||
quote! {
|
||||
#[repr( #which )]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn repr_list(which_ones: &[&str]) -> proc_macro2::TokenStream {
|
||||
let which_ones = which_ones.iter().cloned().map(|one| Ident::new(one, Span::call_site()));
|
||||
pub fn repr_list(which_ones: &[&str]) -> quote::Tokens {
|
||||
let which_ones = which_ones.iter().cloned().map(|one| Term::new(one, Span::call_site()));
|
||||
quote! {
|
||||
#[repr( #( #which_ones ),* )]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn derives(which_ones: &[&str]) -> proc_macro2::TokenStream {
|
||||
let which_ones = which_ones.iter().cloned().map(|one| Ident::new(one, Span::call_site()));
|
||||
pub fn derives(which_ones: &[&str]) -> quote::Tokens {
|
||||
let which_ones = which_ones.iter().cloned().map(|one| Term::new(one, Span::call_site()));
|
||||
quote! {
|
||||
#[derive( #( #which_ones ),* )]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn inline() -> proc_macro2::TokenStream {
|
||||
pub fn inline() -> quote::Tokens {
|
||||
quote! {
|
||||
#[inline]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn must_use() -> proc_macro2::TokenStream {
|
||||
quote! {
|
||||
#[must_use]
|
||||
}
|
||||
pub fn doc(comment: String) -> quote::Tokens {
|
||||
// Doc comments are already preprocessed into nice `///` formats by the
|
||||
// time they get here. Just make sure that we have newlines around it so
|
||||
// that nothing else gets wrapped into the comment.
|
||||
let mut tokens = quote! {};
|
||||
tokens.append(Term::new("\n", Span::call_site()));
|
||||
tokens.append(Term::new(&comment, Span::call_site()));
|
||||
tokens.append(Term::new("\n", Span::call_site()));
|
||||
tokens
|
||||
}
|
||||
|
||||
pub fn doc(comment: String) -> proc_macro2::TokenStream {
|
||||
use std::str::FromStr;
|
||||
|
||||
// NOTE(emilio): By this point comments are already preprocessed and in
|
||||
// `///` form. Quote turns them into `#[doc]` comments, but oh well.
|
||||
proc_macro2::TokenStream::from_str(&comment).unwrap()
|
||||
}
|
||||
|
||||
pub fn link_name(name: &str) -> proc_macro2::TokenStream {
|
||||
pub fn link_name(name: &str) -> quote::Tokens {
|
||||
// LLVM mangles the name by default but it's already mangled.
|
||||
// Prefixing the name with \u{1} should tell LLVM to not mangle it.
|
||||
let name = format!("\u{1}{}", name);
|
||||
|
@ -61,7 +59,7 @@ pub mod attributes {
|
|||
|
||||
/// Generates a proper type for a field or type with a given `Layout`, that is,
|
||||
/// a type with the correct size and alignment restrictions.
|
||||
pub fn blob(ctx: &BindgenContext, layout: Layout) -> proc_macro2::TokenStream {
|
||||
pub fn blob(ctx: &BindgenContext, layout: Layout) -> quote::Tokens {
|
||||
let opaque = layout.opaque();
|
||||
|
||||
// FIXME(emilio, #412): We fall back to byte alignment, but there are
|
||||
|
@ -76,7 +74,7 @@ pub fn blob(ctx: &BindgenContext, layout: Layout) -> proc_macro2::TokenStream {
|
|||
}
|
||||
};
|
||||
|
||||
let ty_name = Ident::new(ty_name, Span::call_site());
|
||||
let ty_name = Term::new(ty_name, Span::call_site());
|
||||
|
||||
let data_len = opaque.array_size(ctx).unwrap_or(layout.size);
|
||||
|
||||
|
@ -92,14 +90,14 @@ pub fn blob(ctx: &BindgenContext, layout: Layout) -> proc_macro2::TokenStream {
|
|||
}
|
||||
|
||||
/// Integer type of the same size as the given `Layout`.
|
||||
pub fn integer_type(ctx: &BindgenContext, layout: Layout) -> Option<proc_macro2::TokenStream> {
|
||||
pub fn integer_type(ctx: &BindgenContext, layout: Layout) -> Option<quote::Tokens> {
|
||||
let name = Layout::known_type_for_size(ctx, layout.size)?;
|
||||
let name = Ident::new(name, Span::call_site());
|
||||
let name = Term::new(name, Span::call_site());
|
||||
Some(quote! { #name })
|
||||
}
|
||||
|
||||
/// Generates a bitfield allocation unit type for a type with the given `Layout`.
|
||||
pub fn bitfield_unit(ctx: &BindgenContext, layout: Layout) -> proc_macro2::TokenStream {
|
||||
pub fn bitfield_unit(ctx: &BindgenContext, layout: Layout) -> quote::Tokens {
|
||||
let mut tokens = quote! {};
|
||||
|
||||
if ctx.options().enable_cxx_namespaces {
|
||||
|
@ -126,9 +124,10 @@ pub mod ast_ty {
|
|||
use ir::function::FunctionSig;
|
||||
use ir::layout::Layout;
|
||||
use ir::ty::FloatKind;
|
||||
use quote;
|
||||
use proc_macro2;
|
||||
|
||||
pub fn raw_type(ctx: &BindgenContext, name: &str) -> proc_macro2::TokenStream {
|
||||
pub fn raw_type(ctx: &BindgenContext, name: &str) -> quote::Tokens {
|
||||
let ident = ctx.rust_ident_raw(name);
|
||||
match ctx.options().ctypes_prefix {
|
||||
Some(ref prefix) => {
|
||||
|
@ -147,7 +146,7 @@ pub mod ast_ty {
|
|||
ctx: &BindgenContext,
|
||||
fk: FloatKind,
|
||||
layout: Option<Layout>,
|
||||
) -> proc_macro2::TokenStream {
|
||||
) -> quote::Tokens {
|
||||
// TODO: we probably should take the type layout into account more
|
||||
// often?
|
||||
//
|
||||
|
@ -187,25 +186,25 @@ pub mod ast_ty {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn int_expr(val: i64) -> proc_macro2::TokenStream {
|
||||
pub fn int_expr(val: i64) -> quote::Tokens {
|
||||
// Don't use quote! { #val } because that adds the type suffix.
|
||||
let val = proc_macro2::Literal::i64_unsuffixed(val);
|
||||
quote!(#val)
|
||||
}
|
||||
|
||||
pub fn uint_expr(val: u64) -> proc_macro2::TokenStream {
|
||||
pub fn uint_expr(val: u64) -> quote::Tokens {
|
||||
// Don't use quote! { #val } because that adds the type suffix.
|
||||
let val = proc_macro2::Literal::u64_unsuffixed(val);
|
||||
quote!(#val)
|
||||
}
|
||||
|
||||
pub fn byte_array_expr(bytes: &[u8]) -> proc_macro2::TokenStream {
|
||||
pub fn byte_array_expr(bytes: &[u8]) -> quote::Tokens {
|
||||
let mut bytes: Vec<_> = bytes.iter().cloned().collect();
|
||||
bytes.push(0);
|
||||
quote! { [ #(#bytes),* ] }
|
||||
}
|
||||
|
||||
pub fn cstr_expr(mut string: String) -> proc_macro2::TokenStream {
|
||||
pub fn cstr_expr(mut string: String) -> quote::Tokens {
|
||||
string.push('\0');
|
||||
let b = proc_macro2::Literal::byte_string(&string.as_bytes());
|
||||
quote! {
|
||||
|
@ -216,7 +215,7 @@ pub mod ast_ty {
|
|||
pub fn float_expr(
|
||||
ctx: &BindgenContext,
|
||||
f: f64,
|
||||
) -> Result<proc_macro2::TokenStream, ()> {
|
||||
) -> Result<quote::Tokens, ()> {
|
||||
if f.is_finite() {
|
||||
let val = proc_macro2::Literal::f64_unsuffixed(f);
|
||||
|
||||
|
@ -250,7 +249,7 @@ pub mod ast_ty {
|
|||
pub fn arguments_from_signature(
|
||||
signature: &FunctionSig,
|
||||
ctx: &BindgenContext,
|
||||
) -> Vec<proc_macro2::TokenStream> {
|
||||
) -> Vec<quote::Tokens> {
|
||||
let mut unnamed_arguments = 0;
|
||||
signature
|
||||
.argument_types()
|
||||
|
|
|
@ -3,14 +3,14 @@ use ir::context::BindgenContext;
|
|||
use ir::derive::CanTriviallyDeriveDebug;
|
||||
use ir::item::{HasTypeParamInArray, IsOpaque, Item, ItemCanonicalName};
|
||||
use ir::ty::{RUST_DERIVE_IN_ARRAY_LIMIT, TypeKind};
|
||||
use proc_macro2;
|
||||
use quote;
|
||||
|
||||
pub fn gen_debug_impl(
|
||||
ctx: &BindgenContext,
|
||||
fields: &[Field],
|
||||
item: &Item,
|
||||
kind: CompKind,
|
||||
) -> proc_macro2::TokenStream {
|
||||
) -> quote::Tokens {
|
||||
let struct_name = item.canonical_name(ctx);
|
||||
let mut format_string = format!("{} {{{{ ", struct_name);
|
||||
let mut tokens = vec![];
|
||||
|
@ -63,7 +63,7 @@ pub trait ImplDebug<'a> {
|
|||
&self,
|
||||
ctx: &BindgenContext,
|
||||
extra: Self::Extra,
|
||||
) -> Option<(String, Vec<proc_macro2::TokenStream>)>;
|
||||
) -> Option<(String, Vec<quote::Tokens>)>;
|
||||
}
|
||||
|
||||
impl<'a> ImplDebug<'a> for FieldData {
|
||||
|
@ -73,7 +73,7 @@ impl<'a> ImplDebug<'a> for FieldData {
|
|||
&self,
|
||||
ctx: &BindgenContext,
|
||||
_: Self::Extra,
|
||||
) -> Option<(String, Vec<proc_macro2::TokenStream>)> {
|
||||
) -> Option<(String, Vec<quote::Tokens>)> {
|
||||
if let Some(name) = self.name() {
|
||||
ctx.resolve_item(self.ty()).impl_debug(ctx, name)
|
||||
} else {
|
||||
|
@ -89,7 +89,7 @@ impl<'a> ImplDebug<'a> for BitfieldUnit {
|
|||
&self,
|
||||
ctx: &BindgenContext,
|
||||
_: Self::Extra,
|
||||
) -> Option<(String, Vec<proc_macro2::TokenStream>)> {
|
||||
) -> Option<(String, Vec<quote::Tokens>)> {
|
||||
let mut format_string = String::new();
|
||||
let mut tokens = vec![];
|
||||
for (i, bitfield) in self.bitfields().iter().enumerate() {
|
||||
|
@ -118,7 +118,7 @@ impl<'a> ImplDebug<'a> for Item {
|
|||
&self,
|
||||
ctx: &BindgenContext,
|
||||
name: &str,
|
||||
) -> Option<(String, Vec<proc_macro2::TokenStream>)> {
|
||||
) -> Option<(String, Vec<quote::Tokens>)> {
|
||||
let name_ident = ctx.rust_ident(name);
|
||||
|
||||
// We don't know if blacklisted items `impl Debug` or not, so we can't
|
||||
|
@ -136,8 +136,8 @@ impl<'a> ImplDebug<'a> for Item {
|
|||
|
||||
fn debug_print(
|
||||
name: &str,
|
||||
name_ident: proc_macro2::TokenStream,
|
||||
) -> Option<(String, Vec<proc_macro2::TokenStream>)> {
|
||||
name_ident: quote::Tokens,
|
||||
) -> Option<(String, Vec<quote::Tokens>)> {
|
||||
Some((
|
||||
format!("{}: {{:?}}", name),
|
||||
vec![quote! {
|
||||
|
|
|
@ -3,6 +3,7 @@ use ir::comp::{CompInfo, CompKind, Field, FieldMethods};
|
|||
use ir::context::BindgenContext;
|
||||
use ir::item::{IsOpaque, Item};
|
||||
use ir::ty::{TypeKind, RUST_DERIVE_IN_ARRAY_LIMIT};
|
||||
use quote;
|
||||
use proc_macro2;
|
||||
|
||||
/// Generate a manual implementation of `PartialEq` trait for the
|
||||
|
@ -11,8 +12,8 @@ pub fn gen_partialeq_impl(
|
|||
ctx: &BindgenContext,
|
||||
comp_info: &CompInfo,
|
||||
item: &Item,
|
||||
ty_for_impl: &proc_macro2::TokenStream,
|
||||
) -> Option<proc_macro2::TokenStream> {
|
||||
ty_for_impl: "e::Tokens,
|
||||
) -> Option<quote::Tokens> {
|
||||
let mut tokens = vec![];
|
||||
|
||||
if item.is_opaque(ctx, &()) {
|
||||
|
@ -70,8 +71,8 @@ pub fn gen_partialeq_impl(
|
|||
})
|
||||
}
|
||||
|
||||
fn gen_field(ctx: &BindgenContext, ty_item: &Item, name: &str) -> proc_macro2::TokenStream {
|
||||
fn quote_equals(name_ident: proc_macro2::Ident) -> proc_macro2::TokenStream {
|
||||
fn gen_field(ctx: &BindgenContext, ty_item: &Item, name: &str) -> quote::Tokens {
|
||||
fn quote_equals(name_ident: proc_macro2::Term) -> quote::Tokens {
|
||||
quote! { self.#name_ident == other.#name_ident }
|
||||
}
|
||||
|
||||
|
|
|
@ -37,8 +37,8 @@ use ir::template::{AsTemplateParam, TemplateInstantiation, TemplateParameters};
|
|||
use ir::ty::{Type, TypeKind};
|
||||
use ir::var::Var;
|
||||
|
||||
use quote::TokenStreamExt;
|
||||
use proc_macro2::{self, Ident, Span};
|
||||
use quote;
|
||||
use proc_macro2::{self, Term, Span};
|
||||
|
||||
use std;
|
||||
use std::borrow::Cow;
|
||||
|
@ -48,12 +48,11 @@ use std::collections::hash_map::{Entry, HashMap};
|
|||
use std::fmt::Write;
|
||||
use std::iter;
|
||||
use std::ops;
|
||||
use std::str::FromStr;
|
||||
|
||||
// Name of type defined in constified enum module
|
||||
pub static CONSTIFIED_ENUM_MODULE_REPR_NAME: &'static str = "Type";
|
||||
|
||||
fn top_level_path(ctx: &BindgenContext, item: &Item) -> Vec<proc_macro2::TokenStream> {
|
||||
fn top_level_path(ctx: &BindgenContext, item: &Item) -> Vec<quote::Tokens> {
|
||||
let mut path = vec![quote! { self }];
|
||||
|
||||
if ctx.options().enable_cxx_namespaces {
|
||||
|
@ -65,7 +64,7 @@ fn top_level_path(ctx: &BindgenContext, item: &Item) -> Vec<proc_macro2::TokenSt
|
|||
path
|
||||
}
|
||||
|
||||
fn root_import(ctx: &BindgenContext, module: &Item) -> proc_macro2::TokenStream {
|
||||
fn root_import(ctx: &BindgenContext, module: &Item) -> quote::Tokens {
|
||||
assert!(ctx.options().enable_cxx_namespaces, "Somebody messed it up");
|
||||
assert!(module.is_module());
|
||||
|
||||
|
@ -77,7 +76,7 @@ fn root_import(ctx: &BindgenContext, module: &Item) -> proc_macro2::TokenStream
|
|||
|
||||
|
||||
let mut tokens = quote! {};
|
||||
tokens.append_separated(path, quote!(::));
|
||||
tokens.append_separated(path, Term::new("::", Span::call_site()));
|
||||
|
||||
quote! {
|
||||
#[allow(unused_imports)]
|
||||
|
@ -86,7 +85,7 @@ fn root_import(ctx: &BindgenContext, module: &Item) -> proc_macro2::TokenStream
|
|||
}
|
||||
|
||||
struct CodegenResult<'a> {
|
||||
items: Vec<proc_macro2::TokenStream>,
|
||||
items: Vec<quote::Tokens>,
|
||||
|
||||
/// A monotonic counter used to add stable unique id's to stuff that doesn't
|
||||
/// need to be referenced by anything.
|
||||
|
@ -213,7 +212,7 @@ impl<'a> CodegenResult<'a> {
|
|||
self.vars_seen.insert(name.into());
|
||||
}
|
||||
|
||||
fn inner<F>(&mut self, cb: F) -> Vec<proc_macro2::TokenStream>
|
||||
fn inner<F>(&mut self, cb: F) -> Vec<quote::Tokens>
|
||||
where
|
||||
F: FnOnce(&mut Self),
|
||||
{
|
||||
|
@ -232,7 +231,7 @@ impl<'a> CodegenResult<'a> {
|
|||
}
|
||||
|
||||
impl<'a> ops::Deref for CodegenResult<'a> {
|
||||
type Target = Vec<proc_macro2::TokenStream>;
|
||||
type Target = Vec<quote::Tokens>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.items
|
||||
|
@ -248,11 +247,11 @@ impl<'a> ops::DerefMut for CodegenResult<'a> {
|
|||
/// A trait to convert a rust type into a pointer, optionally const, to the same
|
||||
/// type.
|
||||
trait ToPtr {
|
||||
fn to_ptr(self, is_const: bool) -> proc_macro2::TokenStream;
|
||||
fn to_ptr(self, is_const: bool) -> quote::Tokens;
|
||||
}
|
||||
|
||||
impl ToPtr for proc_macro2::TokenStream {
|
||||
fn to_ptr(self, is_const: bool) -> proc_macro2::TokenStream {
|
||||
impl ToPtr for quote::Tokens {
|
||||
fn to_ptr(self, is_const: bool) -> quote::Tokens {
|
||||
if is_const {
|
||||
quote! { *const #self }
|
||||
} else {
|
||||
|
@ -261,7 +260,7 @@ impl ToPtr for proc_macro2::TokenStream {
|
|||
}
|
||||
}
|
||||
|
||||
/// An extension trait for `proc_macro2::TokenStream` that lets us append any implicit
|
||||
/// An extension trait for `quote::Tokens` that lets us append any implicit
|
||||
/// template parameters that exist for some type, if necessary.
|
||||
trait AppendImplicitTemplateParams {
|
||||
fn append_implicit_template_params(
|
||||
|
@ -271,7 +270,7 @@ trait AppendImplicitTemplateParams {
|
|||
);
|
||||
}
|
||||
|
||||
impl AppendImplicitTemplateParams for proc_macro2::TokenStream {
|
||||
impl AppendImplicitTemplateParams for quote::Tokens {
|
||||
fn append_implicit_template_params(
|
||||
&mut self,
|
||||
ctx: &BindgenContext,
|
||||
|
@ -440,7 +439,10 @@ impl CodeGenerator for Module {
|
|||
if let Some(raw_lines) = ctx.options().module_lines.get(&path) {
|
||||
for raw_line in raw_lines {
|
||||
found_any = true;
|
||||
result.push(proc_macro2::TokenStream::from_str(raw_line).unwrap());
|
||||
// FIXME(emilio): The use of `Term` is an abuse, but we abuse it
|
||||
// in a bunch more places.
|
||||
let line = Term::new(raw_line, Span::call_site());
|
||||
result.push(quote! { #line });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -754,7 +756,7 @@ impl CodeGenerator for Type {
|
|||
pub use
|
||||
});
|
||||
let path = top_level_path(ctx, item);
|
||||
tokens.append_separated(path, quote!(::));
|
||||
tokens.append_separated(path, Term::new("::", Span::call_site()));
|
||||
tokens.append_all(quote! {
|
||||
:: #inner_rust_type as #rust_name ;
|
||||
});
|
||||
|
@ -866,7 +868,7 @@ impl<'a> TryToRustTy for Vtable<'a> {
|
|||
&self,
|
||||
ctx: &BindgenContext,
|
||||
_: &(),
|
||||
) -> error::Result<proc_macro2::TokenStream> {
|
||||
) -> error::Result<quote::Tokens> {
|
||||
let name = ctx.rust_ident(self.canonical_name(ctx));
|
||||
Ok(quote! {
|
||||
#name
|
||||
|
@ -961,8 +963,8 @@ trait FieldCodegen<'a> {
|
|||
methods: &mut M,
|
||||
extra: Self::Extra,
|
||||
) where
|
||||
F: Extend<proc_macro2::TokenStream>,
|
||||
M: Extend<proc_macro2::TokenStream>;
|
||||
F: Extend<quote::Tokens>,
|
||||
M: Extend<quote::Tokens>;
|
||||
}
|
||||
|
||||
impl<'a> FieldCodegen<'a> for Field {
|
||||
|
@ -981,8 +983,8 @@ impl<'a> FieldCodegen<'a> for Field {
|
|||
methods: &mut M,
|
||||
_: (),
|
||||
) where
|
||||
F: Extend<proc_macro2::TokenStream>,
|
||||
M: Extend<proc_macro2::TokenStream>,
|
||||
F: Extend<quote::Tokens>,
|
||||
M: Extend<quote::Tokens>,
|
||||
{
|
||||
match *self {
|
||||
Field::DataMember(ref data) => {
|
||||
|
@ -1033,8 +1035,8 @@ impl<'a> FieldCodegen<'a> for FieldData {
|
|||
methods: &mut M,
|
||||
_: (),
|
||||
) where
|
||||
F: Extend<proc_macro2::TokenStream>,
|
||||
M: Extend<proc_macro2::TokenStream>,
|
||||
F: Extend<quote::Tokens>,
|
||||
M: Extend<quote::Tokens>,
|
||||
{
|
||||
// Bitfields are handled by `FieldCodegen` implementations for
|
||||
// `BitfieldUnit` and `Bitfield`.
|
||||
|
@ -1168,8 +1170,8 @@ impl<'a> FieldCodegen<'a> for FieldData {
|
|||
|
||||
impl BitfieldUnit {
|
||||
/// Get the constructor name for this bitfield unit.
|
||||
fn ctor_name(&self) -> proc_macro2::TokenStream {
|
||||
let ctor_name = Ident::new(&format!("new_bitfield_{}", self.nth()), Span::call_site());
|
||||
fn ctor_name(&self) -> quote::Tokens {
|
||||
let ctor_name = Term::new(&format!("new_bitfield_{}", self.nth()), Span::call_site());
|
||||
quote! {
|
||||
#ctor_name
|
||||
}
|
||||
|
@ -1187,9 +1189,9 @@ impl Bitfield {
|
|||
fn extend_ctor_impl(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
param_name: proc_macro2::TokenStream,
|
||||
mut ctor_impl: proc_macro2::TokenStream,
|
||||
) -> proc_macro2::TokenStream {
|
||||
param_name: quote::Tokens,
|
||||
mut ctor_impl: quote::Tokens,
|
||||
) -> quote::Tokens {
|
||||
let bitfield_ty = ctx.resolve_type(self.ty());
|
||||
let bitfield_ty_layout = bitfield_ty.layout(ctx).expect(
|
||||
"Bitfield without layout? Gah!",
|
||||
|
@ -1233,8 +1235,8 @@ impl<'a> FieldCodegen<'a> for BitfieldUnit {
|
|||
methods: &mut M,
|
||||
_: (),
|
||||
) where
|
||||
F: Extend<proc_macro2::TokenStream>,
|
||||
M: Extend<proc_macro2::TokenStream>,
|
||||
F: Extend<quote::Tokens>,
|
||||
M: Extend<quote::Tokens>,
|
||||
{
|
||||
result.saw_bitfield_unit();
|
||||
|
||||
|
@ -1331,7 +1333,7 @@ impl<'a> FieldCodegen<'a> for BitfieldUnit {
|
|||
fn bitfield_getter_name(
|
||||
ctx: &BindgenContext,
|
||||
bitfield: &Bitfield,
|
||||
) -> proc_macro2::TokenStream {
|
||||
) -> quote::Tokens {
|
||||
let name = bitfield.getter_name();
|
||||
let name = ctx.rust_ident_raw(name);
|
||||
quote! { #name }
|
||||
|
@ -1340,7 +1342,7 @@ fn bitfield_getter_name(
|
|||
fn bitfield_setter_name(
|
||||
ctx: &BindgenContext,
|
||||
bitfield: &Bitfield,
|
||||
) -> proc_macro2::TokenStream {
|
||||
) -> quote::Tokens {
|
||||
let setter = bitfield.setter_name();
|
||||
let setter = ctx.rust_ident_raw(setter);
|
||||
quote! { #setter }
|
||||
|
@ -1362,13 +1364,13 @@ impl<'a> FieldCodegen<'a> for Bitfield {
|
|||
methods: &mut M,
|
||||
(unit_field_name, bitfield_representable_as_int): (&'a str, &mut bool),
|
||||
) where
|
||||
F: Extend<proc_macro2::TokenStream>,
|
||||
M: Extend<proc_macro2::TokenStream>,
|
||||
F: Extend<quote::Tokens>,
|
||||
M: Extend<quote::Tokens>,
|
||||
{
|
||||
let prefix = ctx.trait_prefix();
|
||||
let getter_name = bitfield_getter_name(ctx, self);
|
||||
let setter_name = bitfield_setter_name(ctx, self);
|
||||
let unit_field_ident = Ident::new(unit_field_name, Span::call_site());
|
||||
let unit_field_ident = Term::new(unit_field_name, Span::call_site());
|
||||
|
||||
let bitfield_ty_item = ctx.resolve_item(self.ty());
|
||||
let bitfield_ty = bitfield_ty_item.expect_type();
|
||||
|
@ -1762,8 +1764,6 @@ impl CodeGenerator for CompInfo {
|
|||
derives.push("Eq");
|
||||
}
|
||||
|
||||
derives.extend(item.annotations().derives().iter().map(String::as_str));
|
||||
|
||||
if !derives.is_empty() {
|
||||
attributes.push(attributes::derives(&derives))
|
||||
}
|
||||
|
@ -1803,7 +1803,7 @@ impl CodeGenerator for CompInfo {
|
|||
if self.found_unknown_attr() {
|
||||
warn!(
|
||||
"Type {} has an unknown attribute that may affect layout",
|
||||
canonical_ident
|
||||
canonical_ident.as_str()
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1817,7 +1817,7 @@ impl CodeGenerator for CompInfo {
|
|||
if ctx.options().layout_tests && !self.is_forward_declaration() {
|
||||
if let Some(layout) = layout {
|
||||
let fn_name =
|
||||
format!("bindgen_test_layout_{}", canonical_ident);
|
||||
format!("bindgen_test_layout_{}", canonical_ident.as_str());
|
||||
let fn_name = ctx.rust_ident_raw(fn_name);
|
||||
let prefix = ctx.trait_prefix();
|
||||
let size_of_expr = quote! {
|
||||
|
@ -1879,7 +1879,7 @@ impl CodeGenerator for CompInfo {
|
|||
})
|
||||
})
|
||||
})
|
||||
.collect::<Vec<proc_macro2::TokenStream>>();
|
||||
.collect::<Vec<quote::Tokens>>();
|
||||
|
||||
asserts
|
||||
};
|
||||
|
@ -2019,7 +2019,7 @@ trait MethodCodegen {
|
|||
fn codegen_method<'a>(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
methods: &mut Vec<proc_macro2::TokenStream>,
|
||||
methods: &mut Vec<quote::Tokens>,
|
||||
method_names: &mut HashMap<String, usize>,
|
||||
result: &mut CodegenResult<'a>,
|
||||
parent: &CompInfo,
|
||||
|
@ -2030,7 +2030,7 @@ impl MethodCodegen for Method {
|
|||
fn codegen_method<'a>(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
methods: &mut Vec<proc_macro2::TokenStream>,
|
||||
methods: &mut Vec<quote::Tokens>,
|
||||
method_names: &mut HashMap<String, usize>,
|
||||
result: &mut CodegenResult<'a>,
|
||||
_parent: &CompInfo,
|
||||
|
@ -2154,13 +2154,9 @@ impl MethodCodegen for Method {
|
|||
let mut attrs = vec![];
|
||||
attrs.push(attributes::inline());
|
||||
|
||||
if signature.must_use() && ctx.options().rust_features().must_use_function {
|
||||
attrs.push(attributes::must_use());
|
||||
}
|
||||
|
||||
let name = ctx.rust_ident(&name);
|
||||
methods.push(quote! {
|
||||
#(#attrs)*
|
||||
#[inline]
|
||||
pub unsafe fn #name ( #( #args ),* ) #ret {
|
||||
#block
|
||||
}
|
||||
|
@ -2235,24 +2231,24 @@ impl std::str::FromStr for EnumVariation {
|
|||
enum EnumBuilder<'a> {
|
||||
Rust {
|
||||
codegen_depth: usize,
|
||||
attrs: Vec<proc_macro2::TokenStream>,
|
||||
ident: Ident,
|
||||
tokens: proc_macro2::TokenStream,
|
||||
attrs: Vec<quote::Tokens>,
|
||||
ident: Term,
|
||||
tokens: quote::Tokens,
|
||||
emitted_any_variants: bool,
|
||||
},
|
||||
Bitfield {
|
||||
codegen_depth: usize,
|
||||
canonical_name: &'a str,
|
||||
tokens: proc_macro2::TokenStream,
|
||||
tokens: quote::Tokens,
|
||||
},
|
||||
Consts {
|
||||
variants: Vec<proc_macro2::TokenStream>,
|
||||
variants: Vec<quote::Tokens>,
|
||||
codegen_depth: usize,
|
||||
},
|
||||
ModuleConsts {
|
||||
codegen_depth: usize,
|
||||
module_name: &'a str,
|
||||
module_items: Vec<proc_macro2::TokenStream>,
|
||||
module_items: Vec<quote::Tokens>,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -2271,12 +2267,12 @@ impl<'a> EnumBuilder<'a> {
|
|||
/// the representation, and which variation it should be generated as.
|
||||
fn new(
|
||||
name: &'a str,
|
||||
attrs: Vec<proc_macro2::TokenStream>,
|
||||
repr: proc_macro2::TokenStream,
|
||||
attrs: Vec<quote::Tokens>,
|
||||
repr: quote::Tokens,
|
||||
enum_variation: EnumVariation,
|
||||
enum_codegen_depth: usize,
|
||||
) -> Self {
|
||||
let ident = Ident::new(name, Span::call_site());
|
||||
let ident = Term::new(name, Span::call_site());
|
||||
|
||||
match enum_variation {
|
||||
EnumVariation::Bitfield => {
|
||||
|
@ -2314,7 +2310,7 @@ impl<'a> EnumBuilder<'a> {
|
|||
}
|
||||
|
||||
EnumVariation::ModuleConsts => {
|
||||
let ident = Ident::new(CONSTIFIED_ENUM_MODULE_REPR_NAME, Span::call_site());
|
||||
let ident = Term::new(CONSTIFIED_ENUM_MODULE_REPR_NAME, Span::call_site());
|
||||
let type_definition = quote! {
|
||||
#( #attrs )*
|
||||
pub type #ident = #repr;
|
||||
|
@ -2335,7 +2331,7 @@ impl<'a> EnumBuilder<'a> {
|
|||
ctx: &BindgenContext,
|
||||
variant: &EnumVariant,
|
||||
mangling_prefix: Option<&str>,
|
||||
rust_ty: proc_macro2::TokenStream,
|
||||
rust_ty: quote::Tokens,
|
||||
result: &mut CodegenResult<'b>,
|
||||
is_ty_named: bool,
|
||||
) -> Self {
|
||||
|
@ -2437,9 +2433,9 @@ impl<'a> EnumBuilder<'a> {
|
|||
fn build<'b>(
|
||||
self,
|
||||
ctx: &BindgenContext,
|
||||
rust_ty: proc_macro2::TokenStream,
|
||||
rust_ty: quote::Tokens,
|
||||
result: &mut CodegenResult<'b>,
|
||||
) -> proc_macro2::TokenStream {
|
||||
) -> quote::Tokens {
|
||||
match self {
|
||||
EnumBuilder::Rust { attrs, ident, tokens, emitted_any_variants, .. } => {
|
||||
let variants = if !emitted_any_variants {
|
||||
|
@ -2610,23 +2606,23 @@ impl CodeGenerator for Enum {
|
|||
ctx: &BindgenContext,
|
||||
enum_: &Type,
|
||||
// Only to avoid recomputing every time.
|
||||
enum_canonical_name: &Ident,
|
||||
enum_canonical_name: &Term,
|
||||
// May be the same as "variant" if it's because the
|
||||
// enum is unnamed and we still haven't seen the
|
||||
// value.
|
||||
variant_name: &Ident,
|
||||
referenced_name: &Ident,
|
||||
enum_rust_ty: proc_macro2::TokenStream,
|
||||
variant_name: &str,
|
||||
referenced_name: &Term,
|
||||
enum_rust_ty: quote::Tokens,
|
||||
result: &mut CodegenResult<'a>,
|
||||
) {
|
||||
let constant_name = if enum_.name().is_some() {
|
||||
if ctx.options().prepend_enum_name {
|
||||
format!("{}_{}", enum_canonical_name, variant_name)
|
||||
format!("{}_{}", enum_canonical_name.as_str(), variant_name)
|
||||
} else {
|
||||
format!("{}", variant_name)
|
||||
variant_name.into()
|
||||
}
|
||||
} else {
|
||||
format!("{}", variant_name)
|
||||
variant_name.into()
|
||||
};
|
||||
let constant_name = ctx.rust_ident(constant_name);
|
||||
|
||||
|
@ -2650,7 +2646,7 @@ impl CodeGenerator for Enum {
|
|||
);
|
||||
|
||||
// A map where we keep a value -> variant relation.
|
||||
let mut seen_values = HashMap::<_, Ident>::new();
|
||||
let mut seen_values = HashMap::<_, Term>::new();
|
||||
let enum_rust_ty = item.to_rust_ty_or_opaque(ctx, &());
|
||||
let is_toplevel = item.is_toplevel(ctx);
|
||||
|
||||
|
@ -2722,7 +2718,7 @@ impl CodeGenerator for Enum {
|
|||
ctx,
|
||||
enum_ty,
|
||||
&ident,
|
||||
&Ident::new(&*mangled_name, Span::call_site()),
|
||||
&*mangled_name,
|
||||
existing_variant_name,
|
||||
enum_rust_ty.clone(),
|
||||
result,
|
||||
|
@ -2763,11 +2759,11 @@ impl CodeGenerator for Enum {
|
|||
let parent_name =
|
||||
parent_canonical_name.as_ref().unwrap();
|
||||
|
||||
Ident::new(
|
||||
Term::new(
|
||||
&format!(
|
||||
"{}_{}",
|
||||
parent_name,
|
||||
variant_name
|
||||
variant_name.as_str()
|
||||
),
|
||||
Span::call_site()
|
||||
)
|
||||
|
@ -2777,7 +2773,7 @@ impl CodeGenerator for Enum {
|
|||
ctx,
|
||||
enum_ty,
|
||||
&ident,
|
||||
&mangled_name,
|
||||
mangled_name.as_str(),
|
||||
&variant_name,
|
||||
enum_rust_ty.clone(),
|
||||
result,
|
||||
|
@ -2814,7 +2810,7 @@ trait TryToOpaque {
|
|||
&self,
|
||||
ctx: &BindgenContext,
|
||||
extra: &Self::Extra,
|
||||
) -> error::Result<proc_macro2::TokenStream> {
|
||||
) -> error::Result<quote::Tokens> {
|
||||
self.try_get_layout(ctx, extra).map(|layout| {
|
||||
helpers::blob(ctx, layout)
|
||||
})
|
||||
|
@ -2841,7 +2837,7 @@ trait ToOpaque: TryToOpaque {
|
|||
&self,
|
||||
ctx: &BindgenContext,
|
||||
extra: &Self::Extra,
|
||||
) -> proc_macro2::TokenStream {
|
||||
) -> quote::Tokens {
|
||||
let layout = self.get_layout(ctx, extra);
|
||||
helpers::blob(ctx, layout)
|
||||
}
|
||||
|
@ -2867,7 +2863,7 @@ trait TryToRustTy {
|
|||
&self,
|
||||
ctx: &BindgenContext,
|
||||
extra: &Self::Extra,
|
||||
) -> error::Result<proc_macro2::TokenStream>;
|
||||
) -> error::Result<quote::Tokens>;
|
||||
}
|
||||
|
||||
/// Fallible conversion to a Rust type or an opaque blob with the correct size
|
||||
|
@ -2882,7 +2878,7 @@ trait TryToRustTyOrOpaque: TryToRustTy + TryToOpaque {
|
|||
&self,
|
||||
ctx: &BindgenContext,
|
||||
extra: &<Self as TryToRustTyOrOpaque>::Extra,
|
||||
) -> error::Result<proc_macro2::TokenStream>;
|
||||
) -> error::Result<quote::Tokens>;
|
||||
}
|
||||
|
||||
impl<E, T> TryToRustTyOrOpaque for T
|
||||
|
@ -2896,7 +2892,7 @@ where
|
|||
&self,
|
||||
ctx: &BindgenContext,
|
||||
extra: &E,
|
||||
) -> error::Result<proc_macro2::TokenStream> {
|
||||
) -> error::Result<quote::Tokens> {
|
||||
self.try_to_rust_ty(ctx, extra).or_else(
|
||||
|_| if let Ok(layout) =
|
||||
self.try_get_layout(ctx, extra)
|
||||
|
@ -2933,7 +2929,7 @@ trait ToRustTyOrOpaque: TryToRustTy + ToOpaque {
|
|||
&self,
|
||||
ctx: &BindgenContext,
|
||||
extra: &<Self as ToRustTyOrOpaque>::Extra,
|
||||
) -> proc_macro2::TokenStream;
|
||||
) -> quote::Tokens;
|
||||
}
|
||||
|
||||
impl<E, T> ToRustTyOrOpaque for T
|
||||
|
@ -2946,7 +2942,7 @@ where
|
|||
&self,
|
||||
ctx: &BindgenContext,
|
||||
extra: &E,
|
||||
) -> proc_macro2::TokenStream {
|
||||
) -> quote::Tokens {
|
||||
self.try_to_rust_ty(ctx, extra).unwrap_or_else(|_| {
|
||||
self.to_opaque(ctx, extra)
|
||||
})
|
||||
|
@ -2978,7 +2974,7 @@ where
|
|||
&self,
|
||||
ctx: &BindgenContext,
|
||||
_: &(),
|
||||
) -> error::Result<proc_macro2::TokenStream> {
|
||||
) -> error::Result<quote::Tokens> {
|
||||
ctx.resolve_item((*self).into()).try_to_rust_ty(ctx, &())
|
||||
}
|
||||
}
|
||||
|
@ -3002,7 +2998,7 @@ impl TryToRustTy for Item {
|
|||
&self,
|
||||
ctx: &BindgenContext,
|
||||
_: &(),
|
||||
) -> error::Result<proc_macro2::TokenStream> {
|
||||
) -> error::Result<quote::Tokens> {
|
||||
self.kind().expect_type().try_to_rust_ty(ctx, self)
|
||||
}
|
||||
}
|
||||
|
@ -3026,7 +3022,7 @@ impl TryToRustTy for Type {
|
|||
&self,
|
||||
ctx: &BindgenContext,
|
||||
item: &Item,
|
||||
) -> error::Result<proc_macro2::TokenStream> {
|
||||
) -> error::Result<quote::Tokens> {
|
||||
use self::helpers::ast_ty::*;
|
||||
|
||||
match *self.kind() {
|
||||
|
@ -3128,7 +3124,7 @@ impl TryToRustTy for Type {
|
|||
}
|
||||
TypeKind::Enum(..) => {
|
||||
let path = item.namespace_aware_canonical_path(ctx);
|
||||
let path = proc_macro2::TokenStream::from_str(&path.join("::")).unwrap();
|
||||
let path = Term::new(&path.join("::"), Span::call_site());
|
||||
Ok(quote!(#path))
|
||||
}
|
||||
TypeKind::TemplateInstantiation(ref inst) => {
|
||||
|
@ -3231,7 +3227,7 @@ impl TryToRustTy for TemplateInstantiation {
|
|||
&self,
|
||||
ctx: &BindgenContext,
|
||||
item: &Item,
|
||||
) -> error::Result<proc_macro2::TokenStream> {
|
||||
) -> error::Result<quote::Tokens> {
|
||||
if self.is_opaque(ctx, item) {
|
||||
return Err(error::Error::InstantiationOfOpaqueType);
|
||||
}
|
||||
|
@ -3243,7 +3239,7 @@ impl TryToRustTy for TemplateInstantiation {
|
|||
|
||||
let mut ty = quote! {};
|
||||
let def_path = def.namespace_aware_canonical_path(ctx);
|
||||
ty.append_separated(def_path.into_iter().map(|p| ctx.rust_ident(p)), quote!(::));
|
||||
ty.append_separated(def_path.into_iter().map(|p| ctx.rust_ident(p)), Term::new("::", Span::call_site()));
|
||||
|
||||
let def_params = def.self_template_params(ctx);
|
||||
if def_params.is_empty() {
|
||||
|
@ -3295,7 +3291,7 @@ impl TryToRustTy for FunctionSig {
|
|||
&self,
|
||||
ctx: &BindgenContext,
|
||||
_: &(),
|
||||
) -> error::Result<proc_macro2::TokenStream> {
|
||||
) -> error::Result<quote::Tokens> {
|
||||
// TODO: we might want to consider ignoring the reference return value.
|
||||
let ret = utils::fnsig_return_ty(ctx, &self);
|
||||
let arguments = utils::fnsig_arguments(ctx, &self);
|
||||
|
@ -3304,7 +3300,7 @@ impl TryToRustTy for FunctionSig {
|
|||
match abi {
|
||||
Abi::ThisCall if !ctx.options().rust_features().thiscall_abi => {
|
||||
warn!("Skipping function with thiscall ABI that isn't supported by the configured Rust target");
|
||||
Ok(proc_macro2::TokenStream::new())
|
||||
Ok(quote::Tokens::new())
|
||||
}
|
||||
_ => {
|
||||
Ok(quote! {
|
||||
|
@ -3378,10 +3374,6 @@ impl CodeGenerator for Function {
|
|||
|
||||
let mut attributes = vec![];
|
||||
|
||||
if signature.must_use() && ctx.options().rust_features().must_use_function {
|
||||
attributes.push(attributes::must_use());
|
||||
}
|
||||
|
||||
if let Some(comment) = item.comment(ctx) {
|
||||
attributes.push(attributes::doc(comment));
|
||||
}
|
||||
|
@ -3436,7 +3428,7 @@ fn objc_method_codegen(
|
|||
method: &ObjCMethod,
|
||||
class_name: Option<&str>,
|
||||
prefix: &str,
|
||||
) -> (proc_macro2::TokenStream, proc_macro2::TokenStream) {
|
||||
) -> (quote::Tokens, quote::Tokens) {
|
||||
let signature = method.signature();
|
||||
let fn_args = utils::fnsig_arguments(ctx, signature);
|
||||
let fn_ret = utils::fnsig_return_ty(ctx, signature);
|
||||
|
@ -3550,7 +3542,7 @@ impl CodeGenerator for ObjCInterface {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn codegen(context: BindgenContext) -> (Vec<proc_macro2::TokenStream>, BindgenOptions) {
|
||||
pub(crate) fn codegen(context: BindgenContext) -> (Vec<quote::Tokens>, BindgenOptions) {
|
||||
context.gen(|context| {
|
||||
let _t = context.timer("codegen");
|
||||
let counter = Cell::new(0);
|
||||
|
@ -3586,12 +3578,12 @@ mod utils {
|
|||
use ir::function::FunctionSig;
|
||||
use ir::item::{Item, ItemCanonicalPath};
|
||||
use ir::ty::TypeKind;
|
||||
use proc_macro2;
|
||||
use quote;
|
||||
use proc_macro2::{Term, Span};
|
||||
use std::mem;
|
||||
use std::str::FromStr;
|
||||
|
||||
pub fn prepend_bitfield_unit_type(result: &mut Vec<proc_macro2::TokenStream>) {
|
||||
let bitfield_unit_type = proc_macro2::TokenStream::from_str(include_str!("./bitfield_unit.rs")).unwrap();
|
||||
pub fn prepend_bitfield_unit_type(result: &mut Vec<quote::Tokens>) {
|
||||
let bitfield_unit_type = Term::new(include_str!("./bitfield_unit.rs"), Span::call_site());
|
||||
let bitfield_unit_type = quote!(#bitfield_unit_type);
|
||||
|
||||
let items = vec![bitfield_unit_type];
|
||||
|
@ -3601,7 +3593,7 @@ mod utils {
|
|||
|
||||
pub fn prepend_objc_header(
|
||||
ctx: &BindgenContext,
|
||||
result: &mut Vec<proc_macro2::TokenStream>,
|
||||
result: &mut Vec<quote::Tokens>,
|
||||
) {
|
||||
let use_objc = if ctx.options().objc_extern_crate {
|
||||
quote! {
|
||||
|
@ -3626,7 +3618,7 @@ mod utils {
|
|||
|
||||
pub fn prepend_block_header(
|
||||
ctx: &BindgenContext,
|
||||
result: &mut Vec<proc_macro2::TokenStream>,
|
||||
result: &mut Vec<quote::Tokens>,
|
||||
) {
|
||||
let use_block = if ctx.options().block_extern_crate {
|
||||
quote! {
|
||||
|
@ -3645,7 +3637,7 @@ mod utils {
|
|||
|
||||
pub fn prepend_union_types(
|
||||
ctx: &BindgenContext,
|
||||
result: &mut Vec<proc_macro2::TokenStream>,
|
||||
result: &mut Vec<quote::Tokens>,
|
||||
) {
|
||||
let prefix = ctx.trait_prefix();
|
||||
|
||||
|
@ -3744,7 +3736,7 @@ mod utils {
|
|||
|
||||
pub fn prepend_incomplete_array_types(
|
||||
ctx: &BindgenContext,
|
||||
result: &mut Vec<proc_macro2::TokenStream>,
|
||||
result: &mut Vec<quote::Tokens>,
|
||||
) {
|
||||
let prefix = ctx.trait_prefix();
|
||||
|
||||
|
@ -3817,7 +3809,7 @@ mod utils {
|
|||
}
|
||||
|
||||
pub fn prepend_complex_type(
|
||||
result: &mut Vec<proc_macro2::TokenStream>,
|
||||
result: &mut Vec<quote::Tokens>,
|
||||
) {
|
||||
let complex_type = quote! {
|
||||
#[derive(PartialEq, Copy, Clone, Hash, Debug, Default)]
|
||||
|
@ -3836,14 +3828,18 @@ mod utils {
|
|||
pub fn build_path(
|
||||
item: &Item,
|
||||
ctx: &BindgenContext,
|
||||
) -> error::Result<proc_macro2::TokenStream> {
|
||||
) -> error::Result<quote::Tokens> {
|
||||
use proc_macro2::{Term, Span};
|
||||
|
||||
let path = item.namespace_aware_canonical_path(ctx);
|
||||
let tokens = proc_macro2::TokenStream::from_str(&path.join("::")).unwrap();
|
||||
let path = Term::new(&path.join("::"), Span::call_site());
|
||||
let tokens = quote! {#path};
|
||||
//tokens.append_separated(path, "::");
|
||||
|
||||
Ok(tokens)
|
||||
}
|
||||
|
||||
fn primitive_ty(ctx: &BindgenContext, name: &str) -> proc_macro2::TokenStream {
|
||||
fn primitive_ty(ctx: &BindgenContext, name: &str) -> quote::Tokens {
|
||||
let ident = ctx.rust_ident_raw(name);
|
||||
quote! {
|
||||
#ident
|
||||
|
@ -3853,7 +3849,7 @@ mod utils {
|
|||
pub fn type_from_named(
|
||||
ctx: &BindgenContext,
|
||||
name: &str,
|
||||
) -> Option<proc_macro2::TokenStream> {
|
||||
) -> Option<quote::Tokens> {
|
||||
// FIXME: We could use the inner item to check this is really a
|
||||
// primitive type but, who the heck overrides these anyway?
|
||||
Some(match name {
|
||||
|
@ -3876,7 +3872,7 @@ mod utils {
|
|||
pub fn fnsig_return_ty(
|
||||
ctx: &BindgenContext,
|
||||
sig: &FunctionSig,
|
||||
) -> proc_macro2::TokenStream {
|
||||
) -> quote::Tokens {
|
||||
let return_item = ctx.resolve_item(sig.return_type());
|
||||
if let TypeKind::Void = *return_item.kind().expect_type().kind() {
|
||||
quote! { }
|
||||
|
@ -3891,7 +3887,7 @@ mod utils {
|
|||
pub fn fnsig_arguments(
|
||||
ctx: &BindgenContext,
|
||||
sig: &FunctionSig,
|
||||
) -> Vec<proc_macro2::TokenStream> {
|
||||
) -> Vec<quote::Tokens> {
|
||||
use super::ToPtr;
|
||||
|
||||
let mut unnamed_arguments = 0;
|
||||
|
@ -3954,7 +3950,7 @@ mod utils {
|
|||
pub fn fnsig_block(
|
||||
ctx: &BindgenContext,
|
||||
sig: &FunctionSig,
|
||||
) -> proc_macro2::TokenStream {
|
||||
) -> quote::Tokens {
|
||||
let args = sig.argument_types().iter().map(|&(_, ty)| {
|
||||
let arg_item = ctx.resolve_item(ty);
|
||||
|
||||
|
|
|
@ -6,7 +6,8 @@ use ir::comp::CompInfo;
|
|||
use ir::context::BindgenContext;
|
||||
use ir::layout::Layout;
|
||||
use ir::ty::{Type, TypeKind};
|
||||
use proc_macro2::{self, Ident, Span};
|
||||
use quote;
|
||||
use proc_macro2::{Term, Span};
|
||||
use std::cmp;
|
||||
|
||||
/// Trace the layout of struct.
|
||||
|
@ -153,7 +154,7 @@ impl<'a> StructLayoutTracker<'a> {
|
|||
field_name: &str,
|
||||
field_ty: &Type,
|
||||
field_offset: Option<usize>,
|
||||
) -> Option<proc_macro2::TokenStream> {
|
||||
) -> Option<quote::Tokens> {
|
||||
let mut field_layout = field_ty.layout(self.ctx)?;
|
||||
|
||||
if let TypeKind::Array(inner, len) =
|
||||
|
@ -235,7 +236,7 @@ impl<'a> StructLayoutTracker<'a> {
|
|||
padding_layout.map(|layout| self.padding_field(layout))
|
||||
}
|
||||
|
||||
pub fn pad_struct(&mut self, layout: Layout) -> Option<proc_macro2::TokenStream> {
|
||||
pub fn pad_struct(&mut self, layout: Layout) -> Option<quote::Tokens> {
|
||||
debug!(
|
||||
"pad_struct:\n\tself = {:#?}\n\tlayout = {:#?}",
|
||||
self,
|
||||
|
@ -309,13 +310,13 @@ impl<'a> StructLayoutTracker<'a> {
|
|||
align_to(self.latest_offset, layout.align) - self.latest_offset
|
||||
}
|
||||
|
||||
fn padding_field(&mut self, layout: Layout) -> proc_macro2::TokenStream {
|
||||
fn padding_field(&mut self, layout: Layout) -> quote::Tokens {
|
||||
let ty = helpers::blob(self.ctx, layout);
|
||||
let padding_count = self.padding_count;
|
||||
|
||||
self.padding_count += 1;
|
||||
|
||||
let padding_field_name = Ident::new(&format!("__bindgen_padding_{}", padding_count), Span::call_site());
|
||||
let padding_field_name = Term::new(&format!("__bindgen_padding_{}", padding_count), Span::call_site());
|
||||
|
||||
self.max_field_align = cmp::max(self.max_field_align, layout.align);
|
||||
|
||||
|
|
|
@ -98,8 +98,6 @@ macro_rules! rust_target_base {
|
|||
=> Stable_1_25 => 1.25;
|
||||
/// Rust stable 1.26
|
||||
=> Stable_1_26 => 1.26;
|
||||
/// Rust stable 1.27
|
||||
=> Stable_1_27 => 1.27;
|
||||
/// Nightly rust
|
||||
=> Nightly => nightly;
|
||||
);
|
||||
|
@ -180,10 +178,6 @@ rust_feature_def!(
|
|||
/// [i128 / u128 support](https://doc.rust-lang.org/std/primitive.i128.html)
|
||||
=> i128_and_u128;
|
||||
}
|
||||
Stable_1_27 {
|
||||
/// `must_use` attribute on functions ([PR](https://github.com/rust-lang/rust/pull/48925))
|
||||
=> must_use_function;
|
||||
}
|
||||
Nightly {
|
||||
/// `thiscall` calling convention ([Tracking issue](https://github.com/rust-lang/rust/issues/42202))
|
||||
=> thiscall_abi;
|
||||
|
|
|
@ -58,8 +58,6 @@ pub struct Annotations {
|
|||
/// In that case, bindgen will generate a constant for `Bar` instead of
|
||||
/// `Baz`.
|
||||
constify_enum_variant: bool,
|
||||
/// List of explicit derives for this type.
|
||||
derives: Vec<String>,
|
||||
}
|
||||
|
||||
fn parse_accessor(s: &str) -> FieldAccessorKind {
|
||||
|
@ -81,7 +79,6 @@ impl Default for Annotations {
|
|||
private_fields: None,
|
||||
accessor_kind: None,
|
||||
constify_enum_variant: false,
|
||||
derives: vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -133,11 +130,6 @@ impl Annotations {
|
|||
self.use_instead_of.as_ref().map(|s| &**s)
|
||||
}
|
||||
|
||||
/// The list of derives that have been specified in this annotation.
|
||||
pub fn derives(&self) -> &[String] {
|
||||
&self.derives
|
||||
}
|
||||
|
||||
/// Should we avoid implementing the `Copy` trait?
|
||||
pub fn disallow_copy(&self) -> bool {
|
||||
self.disallow_copy
|
||||
|
@ -173,9 +165,6 @@ impl Annotations {
|
|||
attr.value.split("::").map(Into::into).collect(),
|
||||
)
|
||||
}
|
||||
"derive" => {
|
||||
self.derives.push(attr.value)
|
||||
}
|
||||
"private" => {
|
||||
self.private_fields = Some(attr.value != "false")
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ use super::derive::{CanDeriveCopy, CanDeriveDebug, CanDeriveDefault,
|
|||
CanDeriveHash, CanDerivePartialOrd, CanDeriveOrd,
|
||||
CanDerivePartialEq, CanDeriveEq, CanDerive};
|
||||
use super::int::IntKind;
|
||||
use super::item::{IsOpaque, Item, ItemAncestors, ItemSet};
|
||||
use super::item::{IsOpaque, Item, ItemAncestors, ItemCanonicalPath, ItemSet};
|
||||
use super::item_kind::ItemKind;
|
||||
use super::module::{Module, ModuleKind};
|
||||
use super::template::{TemplateInstantiation, TemplateParameters};
|
||||
|
@ -24,7 +24,7 @@ use cexpr;
|
|||
use clang::{self, Cursor};
|
||||
use clang_sys;
|
||||
use parse::ClangItemParser;
|
||||
use proc_macro2::{Ident, Span};
|
||||
use proc_macro2::{Term, Span};
|
||||
use std::borrow::Cow;
|
||||
use std::cell::Cell;
|
||||
use std::collections::{HashMap, HashSet, hash_map};
|
||||
|
@ -922,7 +922,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
}
|
||||
|
||||
/// Returns a mangled name as a rust identifier.
|
||||
pub fn rust_ident<S>(&self, name: S) -> Ident
|
||||
pub fn rust_ident<S>(&self, name: S) -> Term
|
||||
where
|
||||
S: AsRef<str>
|
||||
{
|
||||
|
@ -930,11 +930,11 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
}
|
||||
|
||||
/// Returns a mangled name as a rust identifier.
|
||||
pub fn rust_ident_raw<T>(&self, name: T) -> Ident
|
||||
pub fn rust_ident_raw<T>(&self, name: T) -> Term
|
||||
where
|
||||
T: AsRef<str>
|
||||
{
|
||||
Ident::new(name.as_ref(), Span::call_site())
|
||||
Term::new(name.as_ref(), Span::call_site())
|
||||
}
|
||||
|
||||
/// Iterate over all items that have been defined.
|
||||
|
@ -1107,7 +1107,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
_ => continue,
|
||||
}
|
||||
|
||||
let path = item.path_for_whitelisting(self);
|
||||
let path = item.canonical_path(self);
|
||||
let replacement = self.replacements.get(&path[1..]);
|
||||
|
||||
if let Some(replacement) = replacement {
|
||||
|
@ -2307,7 +2307,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
return true;
|
||||
}
|
||||
|
||||
let name = item.path_for_whitelisting(self)[1..].join("::");
|
||||
let name = item.canonical_path(self)[1..].join("::");
|
||||
debug!("whitelisted_items: testing {:?}", name);
|
||||
match *item.kind() {
|
||||
ItemKind::Module(..) => true,
|
||||
|
@ -2324,7 +2324,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
|
||||
let parent = self.resolve_item(item.parent_id());
|
||||
if parent.is_module() {
|
||||
let mut prefix_path = parent.path_for_whitelisting(self);
|
||||
let mut prefix_path = parent.canonical_path(self);
|
||||
|
||||
// Unnamed top-level enums are special and we
|
||||
// whitelist them via the `whitelisted_vars` filter,
|
||||
|
@ -2393,7 +2393,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
|
||||
/// Convenient method for getting the prefix to use for most traits in
|
||||
/// codegen depending on the `use_core` option.
|
||||
pub fn trait_prefix(&self) -> Ident {
|
||||
pub fn trait_prefix(&self) -> Term {
|
||||
if self.options().use_core {
|
||||
self.rust_ident_raw("core")
|
||||
} else {
|
||||
|
@ -2570,19 +2570,19 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
|
||||
/// Check if `--no-partialeq` flag is enabled for this item.
|
||||
pub fn no_partialeq_by_name(&self, item: &Item) -> bool {
|
||||
let name = item.path_for_whitelisting(self)[1..].join("::");
|
||||
let name = item.canonical_path(self)[1..].join("::");
|
||||
self.options().no_partialeq_types.matches(&name)
|
||||
}
|
||||
|
||||
/// Check if `--no-copy` flag is enabled for this item.
|
||||
pub fn no_copy_by_name(&self, item: &Item) -> bool {
|
||||
let name = item.path_for_whitelisting(self)[1..].join("::");
|
||||
let name = item.canonical_path(self)[1..].join("::");
|
||||
self.options().no_copy_types.matches(&name)
|
||||
}
|
||||
|
||||
/// Check if `--no-hash` flag is enabled for this item.
|
||||
pub fn no_hash_by_name(&self, item: &Item) -> bool {
|
||||
let name = item.path_for_whitelisting(self)[1..].join("::");
|
||||
let name = item.canonical_path(self)[1..].join("::");
|
||||
self.options().no_hash_types.matches(&name)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,8 +12,6 @@ use ir::derive::{CanTriviallyDeriveDebug, CanTriviallyDeriveHash,
|
|||
CanTriviallyDerivePartialEqOrPartialOrd, CanDerive};
|
||||
use parse::{ClangItemParser, ClangSubItemParser, ParseError, ParseResult};
|
||||
use quote;
|
||||
use quote::TokenStreamExt;
|
||||
use proc_macro2;
|
||||
use std::io;
|
||||
|
||||
const RUST_DERIVE_FUNPTR_LIMIT: usize = 12;
|
||||
|
@ -194,7 +192,7 @@ impl Abi {
|
|||
}
|
||||
|
||||
impl quote::ToTokens for Abi {
|
||||
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
|
||||
fn to_tokens(&self, tokens: &mut quote::Tokens) {
|
||||
tokens.append_all(match *self {
|
||||
Abi::C => quote! { "C" },
|
||||
Abi::Stdcall => quote! { "stdcall" },
|
||||
|
@ -223,9 +221,6 @@ pub struct FunctionSig {
|
|||
/// Whether this function is variadic.
|
||||
is_variadic: bool,
|
||||
|
||||
/// Whether this function's return value must be used.
|
||||
must_use: bool,
|
||||
|
||||
/// The ABI of this function.
|
||||
abi: Abi,
|
||||
}
|
||||
|
@ -313,16 +308,14 @@ impl FunctionSig {
|
|||
/// Construct a new function signature.
|
||||
pub fn new(
|
||||
return_type: TypeId,
|
||||
argument_types: Vec<(Option<String>, TypeId)>,
|
||||
arguments: Vec<(Option<String>, TypeId)>,
|
||||
is_variadic: bool,
|
||||
must_use: bool,
|
||||
abi: Abi,
|
||||
) -> Self {
|
||||
FunctionSig {
|
||||
return_type,
|
||||
argument_types,
|
||||
is_variadic,
|
||||
must_use,
|
||||
return_type: return_type,
|
||||
argument_types: arguments,
|
||||
is_variadic: is_variadic,
|
||||
abi: abi,
|
||||
}
|
||||
}
|
||||
|
@ -394,7 +387,6 @@ impl FunctionSig {
|
|||
}
|
||||
};
|
||||
|
||||
let must_use = cursor.has_simple_attr("warn_unused_result");
|
||||
let is_method = cursor.kind() == CXCursor_CXXMethod;
|
||||
let is_constructor = cursor.kind() == CXCursor_Constructor;
|
||||
let is_destructor = cursor.kind() == CXCursor_Destructor;
|
||||
|
@ -466,7 +458,7 @@ impl FunctionSig {
|
|||
warn!("Unknown calling convention: {:?}", call_conv);
|
||||
}
|
||||
|
||||
Ok(Self::new(ret.into(), args, ty.is_variadic(), must_use, abi))
|
||||
Ok(Self::new(ret.into(), args, ty.is_variadic(), abi))
|
||||
}
|
||||
|
||||
/// Get this function signature's return type.
|
||||
|
@ -492,11 +484,6 @@ impl FunctionSig {
|
|||
self.is_variadic && !self.argument_types.is_empty()
|
||||
}
|
||||
|
||||
/// Must this function's return value be used?
|
||||
pub fn must_use(&self) -> bool {
|
||||
self.must_use
|
||||
}
|
||||
|
||||
/// Are function pointers with this signature able to derive Rust traits?
|
||||
/// Rust only supports deriving traits for function pointers with a limited
|
||||
/// number of parameters and a couple ABIs.
|
||||
|
|
|
@ -635,7 +635,7 @@ impl Item {
|
|||
return true;
|
||||
}
|
||||
|
||||
let path = self.path_for_whitelisting(ctx);
|
||||
let path = self.canonical_path(ctx);
|
||||
let name = path[1..].join("::");
|
||||
ctx.options().blacklisted_items.matches(&name) ||
|
||||
match self.kind {
|
||||
|
@ -875,13 +875,10 @@ impl Item {
|
|||
|
||||
let name = names.join("_");
|
||||
|
||||
let name = if opt.user_mangled == UserMangled::Yes {
|
||||
ctx.parse_callbacks()
|
||||
.and_then(|callbacks| callbacks.item_name(&name))
|
||||
.unwrap_or(name)
|
||||
} else {
|
||||
name
|
||||
};
|
||||
let name = ctx
|
||||
.parse_callbacks()
|
||||
.and_then(|callbacks| callbacks.item_name(&name))
|
||||
.unwrap_or(name);
|
||||
|
||||
ctx.rust_mangle(&name).into_owned()
|
||||
}
|
||||
|
@ -975,44 +972,6 @@ impl Item {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the path we should use for whitelisting / blacklisting, which
|
||||
/// doesn't include user-mangling.
|
||||
pub fn path_for_whitelisting(&self, ctx: &BindgenContext) -> Vec<String> {
|
||||
self.compute_path(ctx, UserMangled::No)
|
||||
}
|
||||
|
||||
fn compute_path(&self, ctx: &BindgenContext, mangled: UserMangled) -> Vec<String> {
|
||||
if let Some(path) = self.annotations().use_instead_of() {
|
||||
let mut ret =
|
||||
vec![ctx.resolve_item(ctx.root_module()).name(ctx).get()];
|
||||
ret.extend_from_slice(path);
|
||||
return ret;
|
||||
}
|
||||
|
||||
let target = ctx.resolve_item(self.name_target(ctx));
|
||||
let mut path: Vec<_> = target
|
||||
.ancestors(ctx)
|
||||
.chain(iter::once(ctx.root_module().into()))
|
||||
.map(|id| ctx.resolve_item(id))
|
||||
.filter(|item| {
|
||||
item.id() == target.id() ||
|
||||
item.as_module().map_or(false, |module| {
|
||||
!module.is_inline() ||
|
||||
ctx.options().conservative_inline_namespaces
|
||||
})
|
||||
})
|
||||
.map(|item| {
|
||||
ctx.resolve_item(item.name_target(ctx))
|
||||
.name(ctx)
|
||||
.within_namespaces()
|
||||
.user_mangled(mangled)
|
||||
.get()
|
||||
})
|
||||
.collect();
|
||||
path.reverse();
|
||||
path
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> IsOpaque for T
|
||||
|
@ -1040,7 +999,7 @@ impl IsOpaque for Item {
|
|||
);
|
||||
self.annotations.opaque() ||
|
||||
self.as_type().map_or(false, |ty| ty.is_opaque(ctx, self)) ||
|
||||
ctx.opaque_by_name(&self.path_for_whitelisting(ctx))
|
||||
ctx.opaque_by_name(&self.canonical_path(ctx))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1862,19 +1821,35 @@ impl ItemCanonicalPath for Item {
|
|||
}
|
||||
|
||||
fn canonical_path(&self, ctx: &BindgenContext) -> Vec<String> {
|
||||
self.compute_path(ctx, UserMangled::Yes)
|
||||
}
|
||||
}
|
||||
if let Some(path) = self.annotations().use_instead_of() {
|
||||
let mut ret =
|
||||
vec![ctx.resolve_item(ctx.root_module()).name(ctx).get()];
|
||||
ret.extend_from_slice(path);
|
||||
return ret;
|
||||
}
|
||||
|
||||
/// Whether to use the user-mangled name (mangled by the `item_name` callback or
|
||||
/// not.
|
||||
///
|
||||
/// Most of the callers probably want just yes, but the ones dealing with
|
||||
/// whitelisting and blacklisting don't.
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
enum UserMangled {
|
||||
No,
|
||||
Yes,
|
||||
let target = ctx.resolve_item(self.name_target(ctx));
|
||||
let mut path: Vec<_> = target
|
||||
.ancestors(ctx)
|
||||
.chain(iter::once(ctx.root_module().into()))
|
||||
.map(|id| ctx.resolve_item(id))
|
||||
.filter(|item| {
|
||||
item.id() == target.id() ||
|
||||
item.as_module().map_or(false, |module| {
|
||||
!module.is_inline() ||
|
||||
ctx.options().conservative_inline_namespaces
|
||||
})
|
||||
})
|
||||
.map(|item| {
|
||||
ctx.resolve_item(item.name_target(ctx))
|
||||
.name(ctx)
|
||||
.within_namespaces()
|
||||
.get()
|
||||
})
|
||||
.collect();
|
||||
path.reverse();
|
||||
path
|
||||
}
|
||||
}
|
||||
|
||||
/// Builder struct for naming variations, which hold inside different
|
||||
|
@ -1884,7 +1859,6 @@ pub struct NameOptions<'a> {
|
|||
item: &'a Item,
|
||||
ctx: &'a BindgenContext,
|
||||
within_namespaces: bool,
|
||||
user_mangled: UserMangled,
|
||||
}
|
||||
|
||||
impl<'a> NameOptions<'a> {
|
||||
|
@ -1894,7 +1868,6 @@ impl<'a> NameOptions<'a> {
|
|||
item: item,
|
||||
ctx: ctx,
|
||||
within_namespaces: false,
|
||||
user_mangled: UserMangled::Yes,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1905,11 +1878,6 @@ impl<'a> NameOptions<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
fn user_mangled(&mut self, user_mangled: UserMangled) -> &mut Self {
|
||||
self.user_mangled = user_mangled;
|
||||
self
|
||||
}
|
||||
|
||||
/// Construct a name `String`
|
||||
pub fn get(&self) -> String {
|
||||
self.item.real_canonical_name(self.ctx, self)
|
||||
|
|
|
@ -12,7 +12,8 @@ use clang_sys::CXCursor_ObjCClassRef;
|
|||
use clang_sys::CXCursor_ObjCInstanceMethodDecl;
|
||||
use clang_sys::CXCursor_ObjCProtocolDecl;
|
||||
use clang_sys::CXCursor_ObjCProtocolRef;
|
||||
use proc_macro2::{TokenStream, Ident, Span};
|
||||
use quote;
|
||||
use proc_macro2::{Term, Span};
|
||||
|
||||
/// Objective C interface as used in TypeKind
|
||||
///
|
||||
|
@ -212,11 +213,11 @@ impl ObjCMethod {
|
|||
}
|
||||
|
||||
/// Formats the method call
|
||||
pub fn format_method_call(&self, args: &[TokenStream]) -> TokenStream {
|
||||
pub fn format_method_call(&self, args: &[quote::Tokens]) -> quote::Tokens {
|
||||
let split_name: Vec<_> = self.name
|
||||
.split(':')
|
||||
.filter(|p| !p.is_empty())
|
||||
.map(|name| Ident::new(name, Span::call_site()))
|
||||
.map(|name| Term::new(name, Span::call_site()))
|
||||
.collect();
|
||||
|
||||
// No arguments
|
||||
|
@ -242,7 +243,7 @@ impl ObjCMethod {
|
|||
let arg = arg.to_string();
|
||||
let name_and_sig: Vec<&str> = arg.split(' ').collect();
|
||||
let name = name_and_sig[0];
|
||||
args_without_types.push(Ident::new(name, Span::call_site()))
|
||||
args_without_types.push(Term::new(name, Span::call_site()))
|
||||
};
|
||||
|
||||
let args = split_name
|
||||
|
|
|
@ -199,9 +199,6 @@ impl ClangSubItemParser for Var {
|
|||
true,
|
||||
ctx,
|
||||
);
|
||||
if let Some(callbacks) = ctx.parse_callbacks() {
|
||||
callbacks.str_macro(&name, &val);
|
||||
}
|
||||
(TypeKind::Pointer(char_ty), VarType::String(val))
|
||||
}
|
||||
EvalResult::Int(Wrapping(value)) => {
|
||||
|
|
|
@ -1675,7 +1675,7 @@ fn ensure_libclang_is_loaded() {
|
|||
#[derive(Debug)]
|
||||
pub struct Bindings {
|
||||
options: BindgenOptions,
|
||||
module: proc_macro2::TokenStream,
|
||||
module: quote::Tokens,
|
||||
}
|
||||
|
||||
impl Bindings {
|
||||
|
@ -1863,7 +1863,7 @@ impl Bindings {
|
|||
None => {
|
||||
let path = which::which("rustfmt")
|
||||
.map_err(|e| {
|
||||
io::Error::new(io::ErrorKind::Other, format!("{}", e))
|
||||
io::Error::new(io::ErrorKind::Other, e.to_owned())
|
||||
})?;
|
||||
|
||||
Cow::Owned(path)
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"62733232b8d4a9f167d51d248d9907c303cb806455a36a442917dc9774d254d3","LICENSE.txt":"0041560f5d419c30e1594567f3b7ac2bc078ff6a68f437e0348ba85d9cf99112","README.md":"2d1ccfee41e041a9d2f39c5337086593cb0da100ee2b316afcc3d51dbff060b1","appveyor.yml":"e4869d4badf63843e322477ccdd117e3e3b29f28dd76f2c5733156f6c30780ef","src/checker.rs":"3187d4f53a03430c548c213df5271f33f5238c29408be10e0d6cb6256c2bffb4","src/error.rs":"d579a2e37c3a1a39f229d882bf1975271100a379df98c991607815056c384610","src/finder.rs":"de85f10fa1e9df8c682c988e334dc1fb9e752741c89e7af35d76c08c8230e7bd","src/helper.rs":"1ea08b0c4675ac46ee71ef2225bdef8a6d055a9644d5d42ba94f0c8e9d2d3279","src/lib.rs":"9c336275288f113826d909ada1a8a83bd040499213bec1d34c526b1bfc368b60"},"package":"b57acb10231b9493c8472b20cb57317d0679a49e0bdbee44b3b803a6473af164"}
|
||||
{"files":{"Cargo.toml":"3f02328e981dc20612ba4f46567c5fe74a70843b5b3a2413ae8c56b83052c3c7","LICENSE.txt":"0041560f5d419c30e1594567f3b7ac2bc078ff6a68f437e0348ba85d9cf99112","README.md":"0d0396a19e15ffb94087423c835bc67e92489ff3f014bff65d0fd939ac18cb68","appveyor.yml":"e4869d4badf63843e322477ccdd117e3e3b29f28dd76f2c5733156f6c30780ef","src/lib.rs":"9fc692e7bdfa43dbc1e38e68b257e0a07a2b5e99158cdd7da1f5e5ee659e55e7"},"package":"e84a603e7e0b1ce1aa1ee2b109c7be00155ce52df5081590d1ffb93f4f515cb2"}
|
|
@ -12,20 +12,15 @@
|
|||
|
||||
[package]
|
||||
name = "which"
|
||||
version = "2.0.1"
|
||||
authors = ["Harry Fei <tiziyuanfang@gmail.com>"]
|
||||
version = "1.0.5"
|
||||
authors = ["fangyuanziti <tiziyuanfang@gmail.com>"]
|
||||
description = "A Rust equivalent of Unix command \"which\". Locate installed execuable in cross platforms."
|
||||
documentation = "https://docs.rs/which/"
|
||||
readme = "README.md"
|
||||
keywords = ["which", "which-rs", "unix", "command"]
|
||||
categories = ["os", "filesystem"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/harryfei/which-rs.git"
|
||||
[dependencies.failure]
|
||||
version = "0.1.1"
|
||||
features = ["std"]
|
||||
default-features = false
|
||||
|
||||
repository = "https://github.com/fangyuanziti/which-rs.git"
|
||||
[dependencies.libc]
|
||||
version = "0.2.10"
|
||||
[dev-dependencies.tempdir]
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
[![Travis Build Status](https://travis-ci.org/harryfei/which-rs.svg?branch=master)](https://travis-ci.org/harryfei/which-rs)
|
||||
[![Travis Build Status](https://travis-ci.org/fangyuanziti/which-rs.svg?branch=master)](https://travis-ci.org/fangyuanziti/which-rs)
|
||||
[![Appveyor Build status](https://ci.appveyor.com/api/projects/status/1y40b135iaixs9x6?svg=true)](https://ci.appveyor.com/project/HarryFei/which-rs)
|
||||
|
||||
# which
|
||||
|
|
|
@ -1,70 +0,0 @@
|
|||
use std::fs;
|
||||
#[cfg(unix)]
|
||||
use std::ffi::CString;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
use std::path::Path;
|
||||
#[cfg(unix)]
|
||||
use libc;
|
||||
use finder::Checker;
|
||||
|
||||
pub struct ExecutableChecker;
|
||||
|
||||
impl ExecutableChecker {
|
||||
pub fn new() -> ExecutableChecker {
|
||||
ExecutableChecker
|
||||
}
|
||||
}
|
||||
|
||||
impl Checker for ExecutableChecker {
|
||||
#[cfg(unix)]
|
||||
fn is_valid(&self, path: &Path) -> bool {
|
||||
CString::new(path.as_os_str().as_bytes())
|
||||
.and_then(|c| Ok(unsafe { libc::access(c.as_ptr(), libc::X_OK) == 0 }))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn is_valid(&self, _path: &Path) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ExistedChecker;
|
||||
|
||||
impl ExistedChecker {
|
||||
pub fn new() -> ExistedChecker {
|
||||
ExistedChecker
|
||||
}
|
||||
}
|
||||
|
||||
impl Checker for ExistedChecker {
|
||||
fn is_valid(&self, path: &Path) -> bool {
|
||||
fs::metadata(path)
|
||||
.map(|metadata| metadata.is_file())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CompositeChecker {
|
||||
checkers: Vec<Box<Checker>>,
|
||||
}
|
||||
|
||||
impl CompositeChecker {
|
||||
pub fn new() -> CompositeChecker {
|
||||
CompositeChecker {
|
||||
checkers: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_checker(mut self, checker: Box<Checker>) -> CompositeChecker {
|
||||
self.checkers.push(checker);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Checker for CompositeChecker {
|
||||
fn is_valid(&self, path: &Path) -> bool {
|
||||
self.checkers.iter().all(|checker| checker.is_valid(path))
|
||||
}
|
||||
}
|
|
@ -1,70 +0,0 @@
|
|||
use failure::{Backtrace, Context, Fail};
|
||||
use std;
|
||||
use std::fmt::{self, Display};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Error {
|
||||
inner: Context<ErrorKind>,
|
||||
}
|
||||
|
||||
// To suppress false positives from cargo-clippy
|
||||
#[cfg_attr(feature = "cargo-clippy", allow(empty_line_after_outer_attr))]
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
|
||||
pub enum ErrorKind {
|
||||
BadAbsolutePath,
|
||||
BadRelativePath,
|
||||
CannotFindBinaryPath,
|
||||
CannotGetCurrentDir,
|
||||
}
|
||||
|
||||
impl Fail for ErrorKind {}
|
||||
|
||||
impl Display for ErrorKind {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let display = match *self {
|
||||
ErrorKind::BadAbsolutePath => "Bad absolute path",
|
||||
ErrorKind::BadRelativePath => "Bad relative path",
|
||||
ErrorKind::CannotFindBinaryPath => "Cannot find binary path",
|
||||
ErrorKind::CannotGetCurrentDir => "Cannot get current directory",
|
||||
};
|
||||
f.write_str(display)
|
||||
}
|
||||
}
|
||||
|
||||
impl Fail for Error {
|
||||
fn cause(&self) -> Option<&Fail> {
|
||||
self.inner.cause()
|
||||
}
|
||||
|
||||
fn backtrace(&self) -> Option<&Backtrace> {
|
||||
self.inner.backtrace()
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
Display::fmt(&self.inner, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error {
|
||||
pub fn kind(&self) -> ErrorKind {
|
||||
*self.inner.get_context()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ErrorKind> for Error {
|
||||
fn from(kind: ErrorKind) -> Error {
|
||||
Error {
|
||||
inner: Context::new(kind),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Context<ErrorKind>> for Error {
|
||||
fn from(inner: Context<ErrorKind>) -> Error {
|
||||
Error { inner }
|
||||
}
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
|
@ -1,155 +0,0 @@
|
|||
use error::*;
|
||||
#[cfg(windows)]
|
||||
use helper::has_executable_extension;
|
||||
use std::env;
|
||||
use std::ffi::OsStr;
|
||||
#[cfg(windows)]
|
||||
use std::ffi::OsString;
|
||||
use std::iter;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
pub trait Checker {
|
||||
fn is_valid(&self, path: &Path) -> bool;
|
||||
}
|
||||
|
||||
trait PathExt {
|
||||
fn has_separator(&self) -> bool;
|
||||
|
||||
fn to_absolute<P>(self, cwd: P) -> PathBuf
|
||||
where
|
||||
P: AsRef<Path>;
|
||||
}
|
||||
|
||||
impl PathExt for PathBuf {
|
||||
fn has_separator(&self) -> bool {
|
||||
self.components().count() > 1
|
||||
}
|
||||
|
||||
fn to_absolute<P>(self, cwd: P) -> PathBuf
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
if self.is_absolute() {
|
||||
self
|
||||
} else {
|
||||
let mut new_path = PathBuf::from(cwd.as_ref());
|
||||
new_path.push(self);
|
||||
new_path
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Finder;
|
||||
|
||||
impl Finder {
|
||||
pub fn new() -> Finder {
|
||||
Finder
|
||||
}
|
||||
|
||||
pub fn find<T, U, V>(
|
||||
&self,
|
||||
binary_name: T,
|
||||
paths: Option<U>,
|
||||
cwd: V,
|
||||
binary_checker: &Checker,
|
||||
) -> Result<PathBuf>
|
||||
where
|
||||
T: AsRef<OsStr>,
|
||||
U: AsRef<OsStr>,
|
||||
V: AsRef<Path>,
|
||||
{
|
||||
let path = PathBuf::from(&binary_name);
|
||||
|
||||
let binary_path_candidates: Box<dyn Iterator<Item = _>> = if path.has_separator() {
|
||||
// Search binary in cwd if the path have a path separator.
|
||||
let candidates = Self::cwd_search_candidates(path, cwd).into_iter();
|
||||
Box::new(candidates)
|
||||
} else {
|
||||
// Search binary in PATHs(defined in environment variable).
|
||||
let p = paths.ok_or(ErrorKind::CannotFindBinaryPath)?;
|
||||
let paths: Vec<_> = env::split_paths(&p).collect();
|
||||
|
||||
let candidates = Self::path_search_candidates(path, paths).into_iter();
|
||||
|
||||
Box::new(candidates)
|
||||
};
|
||||
|
||||
for p in binary_path_candidates {
|
||||
// find a valid binary
|
||||
if binary_checker.is_valid(&p) {
|
||||
return Ok(p);
|
||||
}
|
||||
}
|
||||
|
||||
// can't find any binary
|
||||
return Err(ErrorKind::CannotFindBinaryPath.into());
|
||||
}
|
||||
|
||||
fn cwd_search_candidates<C>(binary_name: PathBuf, cwd: C) -> impl IntoIterator<Item = PathBuf>
|
||||
where
|
||||
C: AsRef<Path>,
|
||||
{
|
||||
let path = binary_name.to_absolute(cwd);
|
||||
|
||||
Self::append_extension(iter::once(path))
|
||||
}
|
||||
|
||||
fn path_search_candidates<P>(
|
||||
binary_name: PathBuf,
|
||||
paths: P,
|
||||
) -> impl IntoIterator<Item = PathBuf>
|
||||
where
|
||||
P: IntoIterator<Item = PathBuf>,
|
||||
{
|
||||
let new_paths = paths.into_iter().map(move |p| p.join(binary_name.clone()));
|
||||
|
||||
Self::append_extension(new_paths)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn append_extension<P>(paths: P) -> impl IntoIterator<Item = PathBuf>
|
||||
where
|
||||
P: IntoIterator<Item = PathBuf>,
|
||||
{
|
||||
paths
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn append_extension<P>(paths: P) -> impl IntoIterator<Item = PathBuf>
|
||||
where
|
||||
P: IntoIterator<Item = PathBuf>,
|
||||
{
|
||||
// Read PATHEXT env variable and split it into vector of String
|
||||
let path_exts =
|
||||
env::var_os("PATHEXT").unwrap_or(OsString::from(env::consts::EXE_EXTENSION));
|
||||
|
||||
let exe_extension_vec = env::split_paths(&path_exts)
|
||||
.filter_map(|e| e.to_str().map(|e| e.to_owned()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
paths
|
||||
.into_iter()
|
||||
.flat_map(move |p| -> Box<dyn Iterator<Item = _>> {
|
||||
// Check if path already have executable extension
|
||||
if has_executable_extension(&p, &exe_extension_vec) {
|
||||
Box::new(iter::once(p))
|
||||
} else {
|
||||
// Appended paths with windows executable extensions.
|
||||
// e.g. path `c:/windows/bin` will expend to:
|
||||
// c:/windows/bin.COM
|
||||
// c:/windows/bin.EXE
|
||||
// c:/windows/bin.CMD
|
||||
// ...
|
||||
let ps = exe_extension_vec.clone().into_iter().map(move |e| {
|
||||
// Append the extension.
|
||||
let mut p = p.clone().to_path_buf().into_os_string();
|
||||
p.push(e);
|
||||
|
||||
PathBuf::from(p)
|
||||
});
|
||||
|
||||
Box::new(ps)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,40 +0,0 @@
|
|||
use std::path::Path;
|
||||
|
||||
/// Check if given path has extension which in the given vector.
|
||||
pub fn has_executable_extension<T: AsRef<Path>, S: AsRef<str>>(path: T, exts_vec: &Vec<S>) -> bool {
|
||||
let ext = path.as_ref().extension().and_then(|e| e.to_str());
|
||||
match ext {
|
||||
Some(ext) => exts_vec
|
||||
.iter()
|
||||
.any(|e| ext.eq_ignore_ascii_case(&e.as_ref()[1..])),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn test_extension_in_extension_vector() {
|
||||
// Case insensitive
|
||||
assert!(has_executable_extension(
|
||||
PathBuf::from("foo.exe"),
|
||||
&vec![".COM", ".EXE", ".CMD"]
|
||||
));
|
||||
|
||||
assert!(has_executable_extension(
|
||||
PathBuf::from("foo.CMD"),
|
||||
&vec![".COM", ".EXE", ".CMD"]
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extension_not_in_extension_vector() {
|
||||
assert!(!has_executable_extension(
|
||||
PathBuf::from("foo.bar"),
|
||||
&vec![".COM", ".EXE", ".CMD"]
|
||||
));
|
||||
}
|
||||
}
|
|
@ -13,33 +13,47 @@
|
|||
//!
|
||||
//! ```
|
||||
|
||||
extern crate failure;
|
||||
extern crate libc;
|
||||
#[cfg(test)]
|
||||
extern crate tempdir;
|
||||
|
||||
use failure::ResultExt;
|
||||
mod checker;
|
||||
mod error;
|
||||
mod finder;
|
||||
#[cfg(windows)]
|
||||
mod helper;
|
||||
|
||||
use std::env;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::{env, fs};
|
||||
|
||||
// Remove the `AsciiExt` will make `which-rs` build failed in older versions of Rust.
|
||||
// Please Keep it here though we don't need it in the new Rust version(>=1.23).
|
||||
#[allow(unused_imports)]
|
||||
// Please Keep it here though we don't need it in the latest Rust version.
|
||||
#[allow(unused)]
|
||||
use std::ascii::AsciiExt;
|
||||
|
||||
#[cfg(unix)]
|
||||
use std::ffi::CString;
|
||||
use std::ffi::OsStr;
|
||||
#[cfg(unix)]
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
|
||||
/// Like `Path::with_extension`, but don't replace an existing extension.
|
||||
fn ensure_exe_extension<T: AsRef<Path>>(path: T) -> PathBuf {
|
||||
if env::consts::EXE_EXTENSION.is_empty() {
|
||||
// Nothing to do.
|
||||
path.as_ref().to_path_buf()
|
||||
} else {
|
||||
match path.as_ref()
|
||||
.extension()
|
||||
.and_then(|e| e.to_str())
|
||||
.map(|e| e.eq_ignore_ascii_case(env::consts::EXE_EXTENSION)) {
|
||||
// Already has the right extension.
|
||||
Some(true) => path.as_ref().to_path_buf(),
|
||||
_ => {
|
||||
// Append the extension.
|
||||
let mut s = path.as_ref().to_path_buf().into_os_string();
|
||||
s.push(".");
|
||||
s.push(env::consts::EXE_EXTENSION);
|
||||
PathBuf::from(s)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
use checker::CompositeChecker;
|
||||
use checker::ExecutableChecker;
|
||||
use checker::ExistedChecker;
|
||||
pub use error::*;
|
||||
use finder::Finder;
|
||||
|
||||
/// Find a exectable binary's path by name.
|
||||
///
|
||||
|
@ -62,17 +76,17 @@ use finder::Finder;
|
|||
/// assert_eq!(result, PathBuf::from("/usr/bin/rustc"));
|
||||
///
|
||||
/// ```
|
||||
pub fn which<T: AsRef<OsStr>>(binary_name: T) -> Result<PathBuf> {
|
||||
let cwd = env::current_dir().context(ErrorKind::CannotGetCurrentDir)?;
|
||||
which_in(binary_name, env::var_os("PATH"), &cwd)
|
||||
pub fn which<T: AsRef<OsStr>>(binary_name: T) -> Result<PathBuf, &'static str> {
|
||||
env::current_dir()
|
||||
.or_else(|_| Err("Couldn't get current directory"))
|
||||
.and_then(|cwd| which_in(binary_name, env::var_os("PATH"), &cwd))
|
||||
}
|
||||
|
||||
/// Find `binary_name` in the path list `paths`, using `cwd` to resolve relative paths.
|
||||
pub fn which_in<T, U, V>(binary_name: T, paths: Option<U>, cwd: V) -> Result<PathBuf>
|
||||
where
|
||||
T: AsRef<OsStr>,
|
||||
U: AsRef<OsStr>,
|
||||
V: AsRef<Path>,
|
||||
pub fn which_in<T, U, V>(binary_name: T, paths: Option<U>, cwd: V) -> Result<PathBuf, &'static str>
|
||||
where T: AsRef<OsStr>,
|
||||
U: AsRef<OsStr>,
|
||||
V: AsRef<Path>
|
||||
{
|
||||
let binary_checker = CompositeChecker::new()
|
||||
.add_checker(Box::new(ExistedChecker::new()))
|
||||
|
@ -83,6 +97,148 @@ where
|
|||
finder.find(binary_name, paths, cwd, &binary_checker)
|
||||
}
|
||||
|
||||
struct Finder;
|
||||
|
||||
impl Finder {
|
||||
fn new() -> Finder {
|
||||
Finder
|
||||
}
|
||||
|
||||
fn find<T, U, V>(&self,
|
||||
binary_name: T,
|
||||
paths: Option<U>,
|
||||
cwd: V,
|
||||
binary_checker: &Checker)
|
||||
-> Result<PathBuf, &'static str>
|
||||
where T: AsRef<OsStr>,
|
||||
U: AsRef<OsStr>,
|
||||
V: AsRef<Path>
|
||||
{
|
||||
|
||||
let path = ensure_exe_extension(binary_name.as_ref());
|
||||
|
||||
// Does it have a path separator?
|
||||
if path.components().count() > 1 {
|
||||
if path.is_absolute() {
|
||||
if binary_checker.is_valid(&path) {
|
||||
// Already fine.
|
||||
Ok(path)
|
||||
} else {
|
||||
// Absolute path but it's not usable.
|
||||
Err("Bad absolute path")
|
||||
}
|
||||
} else {
|
||||
// Try to make it absolute.
|
||||
let mut new_path = PathBuf::from(cwd.as_ref());
|
||||
new_path.push(path);
|
||||
let new_path = ensure_exe_extension(new_path);
|
||||
if binary_checker.is_valid(&new_path) {
|
||||
Ok(new_path)
|
||||
} else {
|
||||
// File doesn't exist or isn't executable.
|
||||
Err("Bad relative path")
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// No separator, look it up in `paths`.
|
||||
paths.and_then(|paths| {
|
||||
env::split_paths(paths.as_ref())
|
||||
.map(|p| ensure_exe_extension(p.join(binary_name.as_ref())))
|
||||
.skip_while(|p| !(binary_checker.is_valid(&p)))
|
||||
.next()
|
||||
})
|
||||
.ok_or("Cannot find binary path")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
trait Checker {
|
||||
fn is_valid(&self, path: &Path) -> bool;
|
||||
}
|
||||
|
||||
struct ExecutableChecker;
|
||||
|
||||
impl ExecutableChecker {
|
||||
fn new() -> ExecutableChecker {
|
||||
ExecutableChecker
|
||||
}
|
||||
}
|
||||
|
||||
impl Checker for ExecutableChecker {
|
||||
#[cfg(unix)]
|
||||
fn is_valid(&self, path: &Path) -> bool {
|
||||
CString::new(path.as_os_str().as_bytes())
|
||||
.and_then(|c| Ok(unsafe { libc::access(c.as_ptr(), libc::X_OK) == 0 }))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn is_valid(&self, _path: &Path) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
struct ExistedChecker;
|
||||
|
||||
impl ExistedChecker {
|
||||
fn new() -> ExistedChecker {
|
||||
ExistedChecker
|
||||
}
|
||||
}
|
||||
|
||||
impl Checker for ExistedChecker {
|
||||
fn is_valid(&self, path: &Path) -> bool {
|
||||
fs::metadata(path)
|
||||
.map(|metadata| metadata.is_file())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
struct CompositeChecker {
|
||||
checkers: Vec<Box<Checker>>,
|
||||
}
|
||||
|
||||
impl CompositeChecker {
|
||||
fn new() -> CompositeChecker {
|
||||
CompositeChecker { checkers: Vec::new() }
|
||||
}
|
||||
|
||||
fn add_checker(mut self, checker: Box<Checker>) -> CompositeChecker {
|
||||
self.checkers.push(checker);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Checker for CompositeChecker {
|
||||
fn is_valid(&self, path: &Path) -> bool {
|
||||
self.checkers
|
||||
.iter()
|
||||
.all(|checker| checker.is_valid(path))
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_exe_extension() {
|
||||
let expected = PathBuf::from("foo").with_extension(env::consts::EXE_EXTENSION);
|
||||
assert_eq!(expected, ensure_exe_extension(PathBuf::from("foo")));
|
||||
let p = expected.clone();
|
||||
assert_eq!(expected, ensure_exe_extension(p));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(windows)]
|
||||
fn test_exe_extension_existing_extension() {
|
||||
assert_eq!(PathBuf::from("foo.bar.exe"),
|
||||
ensure_exe_extension("foo.bar"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(windows)]
|
||||
fn test_exe_extension_existing_extension_uppercase() {
|
||||
assert_eq!(PathBuf::from("foo.EXE"), ensure_exe_extension("foo.EXE"));
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
@ -107,10 +263,10 @@ mod test {
|
|||
const BIN_NAME: &'static str = "bin";
|
||||
|
||||
#[cfg(unix)]
|
||||
fn mk_bin(dir: &Path, path: &str, extension: &str) -> io::Result<PathBuf> {
|
||||
fn mk_bin(dir: &Path, path: &str) -> io::Result<PathBuf> {
|
||||
use libc;
|
||||
use std::os::unix::fs::OpenOptionsExt;
|
||||
let bin = dir.join(path).with_extension(extension);
|
||||
let bin = dir.join(path).with_extension(env::consts::EXE_EXTENSION);
|
||||
fs::OpenOptions::new()
|
||||
.write(true)
|
||||
.create(true)
|
||||
|
@ -119,26 +275,17 @@ mod test {
|
|||
.and_then(|_f| bin.canonicalize())
|
||||
}
|
||||
|
||||
fn touch(dir: &Path, path: &str, extension: &str) -> io::Result<PathBuf> {
|
||||
let b = dir.join(path).with_extension(extension);
|
||||
fn touch(dir: &Path, path: &str) -> io::Result<PathBuf> {
|
||||
let b = dir.join(path).with_extension(env::consts::EXE_EXTENSION);
|
||||
fs::File::create(&b).and_then(|_f| b.canonicalize())
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn mk_bin(dir: &Path, path: &str, extension: &str) -> io::Result<PathBuf> {
|
||||
touch(dir, path, extension)
|
||||
#[cfg(not(unix))]
|
||||
fn mk_bin(dir: &Path, path: &str) -> io::Result<PathBuf> {
|
||||
touch(dir, path)
|
||||
}
|
||||
|
||||
impl TestFixture {
|
||||
// tmp/a/bin
|
||||
// tmp/a/bin.exe
|
||||
// tmp/a/bin.cmd
|
||||
// tmp/b/bin
|
||||
// tmp/b/bin.exe
|
||||
// tmp/b/bin.cmd
|
||||
// tmp/c/bin
|
||||
// tmp/c/bin.exe
|
||||
// tmp/c/bin.cmd
|
||||
pub fn new() -> TestFixture {
|
||||
let tempdir = TempDir::new("which_tests").unwrap();
|
||||
let mut builder = fs::DirBuilder::new();
|
||||
|
@ -148,9 +295,7 @@ mod test {
|
|||
for d in SUBDIRS.iter() {
|
||||
let p = tempdir.path().join(d);
|
||||
builder.create(&p).unwrap();
|
||||
bins.push(mk_bin(&p, &BIN_NAME, "").unwrap());
|
||||
bins.push(mk_bin(&p, &BIN_NAME, "exe").unwrap());
|
||||
bins.push(mk_bin(&p, &BIN_NAME, "cmd").unwrap());
|
||||
bins.push(mk_bin(&p, &BIN_NAME).unwrap());
|
||||
paths.push(p);
|
||||
}
|
||||
TestFixture {
|
||||
|
@ -161,16 +306,16 @@ mod test {
|
|||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn touch(&self, path: &str, extension: &str) -> io::Result<PathBuf> {
|
||||
touch(self.tempdir.path(), &path, &extension)
|
||||
pub fn touch(&self, path: &str) -> io::Result<PathBuf> {
|
||||
touch(self.tempdir.path(), &path)
|
||||
}
|
||||
|
||||
pub fn mk_bin(&self, path: &str, extension: &str) -> io::Result<PathBuf> {
|
||||
mk_bin(self.tempdir.path(), &path, &extension)
|
||||
pub fn mk_bin(&self, path: &str) -> io::Result<PathBuf> {
|
||||
mk_bin(self.tempdir.path(), &path)
|
||||
}
|
||||
}
|
||||
|
||||
fn _which<T: AsRef<OsStr>>(f: &TestFixture, path: T) -> Result<PathBuf> {
|
||||
fn _which<T: AsRef<OsStr>>(f: &TestFixture, path: T) -> Result<PathBuf, &'static str> {
|
||||
which_in(path, Some(f.paths.clone()), f.tempdir.path())
|
||||
}
|
||||
|
||||
|
@ -181,52 +326,28 @@ mod test {
|
|||
let result = which("rustc");
|
||||
assert!(result.is_ok());
|
||||
|
||||
let which_result = Command::new("which").arg("rustc").output();
|
||||
let which_result = Command::new("which")
|
||||
.arg("rustc")
|
||||
.output();
|
||||
|
||||
assert_eq!(
|
||||
String::from(result.unwrap().to_str().unwrap()),
|
||||
String::from_utf8(which_result.unwrap().stdout)
|
||||
.unwrap()
|
||||
.trim()
|
||||
);
|
||||
assert_eq!(String::from(result.unwrap().to_str().unwrap()),
|
||||
String::from_utf8(which_result.unwrap().stdout).unwrap().trim());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(unix)]
|
||||
fn test_which() {
|
||||
let f = TestFixture::new();
|
||||
assert_eq!(
|
||||
_which(&f, &BIN_NAME).unwrap().canonicalize().unwrap(),
|
||||
f.bins[0]
|
||||
)
|
||||
assert_eq!(_which(&f, &BIN_NAME).unwrap().canonicalize().unwrap(),
|
||||
f.bins[0])
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(windows)]
|
||||
fn test_which() {
|
||||
let f = TestFixture::new();
|
||||
assert_eq!(
|
||||
_which(&f, &BIN_NAME).unwrap().canonicalize().unwrap(),
|
||||
f.bins[1]
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(unix)]
|
||||
fn test_which_extension() {
|
||||
let f = TestFixture::new();
|
||||
let b = Path::new(&BIN_NAME).with_extension("");
|
||||
let b = Path::new(&BIN_NAME).with_extension(env::consts::EXE_EXTENSION);
|
||||
assert_eq!(_which(&f, &b).unwrap().canonicalize().unwrap(), f.bins[0])
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(windows)]
|
||||
fn test_which_extension() {
|
||||
let f = TestFixture::new();
|
||||
let b = Path::new(&BIN_NAME).with_extension("cmd");
|
||||
assert_eq!(_which(&f, &b).unwrap().canonicalize().unwrap(), f.bins[2])
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_which_not_found() {
|
||||
let f = TestFixture::new();
|
||||
|
@ -236,28 +357,15 @@ mod test {
|
|||
#[test]
|
||||
fn test_which_second() {
|
||||
let f = TestFixture::new();
|
||||
let b = f.mk_bin("b/another", env::consts::EXE_EXTENSION).unwrap();
|
||||
let b = f.mk_bin("b/another").unwrap();
|
||||
assert_eq!(_which(&f, "another").unwrap().canonicalize().unwrap(), b);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(unix)]
|
||||
fn test_which_absolute() {
|
||||
let f = TestFixture::new();
|
||||
assert_eq!(
|
||||
_which(&f, &f.bins[3]).unwrap().canonicalize().unwrap(),
|
||||
f.bins[3].canonicalize().unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(windows)]
|
||||
fn test_which_absolute() {
|
||||
let f = TestFixture::new();
|
||||
assert_eq!(
|
||||
_which(&f, &f.bins[4]).unwrap().canonicalize().unwrap(),
|
||||
f.bins[4].canonicalize().unwrap()
|
||||
);
|
||||
assert_eq!(_which(&f, &f.bins[1]).unwrap().canonicalize().unwrap(),
|
||||
f.bins[1].canonicalize().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -266,81 +374,35 @@ mod test {
|
|||
// Test that an absolute path with an uppercase extension
|
||||
// is accepted.
|
||||
let f = TestFixture::new();
|
||||
let p = &f.bins[4];
|
||||
assert_eq!(
|
||||
_which(&f, &p).unwrap().canonicalize().unwrap(),
|
||||
f.bins[4].canonicalize().unwrap()
|
||||
);
|
||||
let p = f.bins[1].with_extension("EXE");
|
||||
assert_eq!(_which(&f, &p).unwrap().canonicalize().unwrap(),
|
||||
f.bins[1].canonicalize().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(unix)]
|
||||
fn test_which_absolute_extension() {
|
||||
let f = TestFixture::new();
|
||||
// Don't append EXE_EXTENSION here.
|
||||
let b = f.bins[3].parent().unwrap().join(&BIN_NAME);
|
||||
assert_eq!(
|
||||
_which(&f, &b).unwrap().canonicalize().unwrap(),
|
||||
f.bins[3].canonicalize().unwrap()
|
||||
);
|
||||
let b = f.bins[1].parent().unwrap().join(&BIN_NAME);
|
||||
assert_eq!(_which(&f, &b).unwrap().canonicalize().unwrap(),
|
||||
f.bins[1].canonicalize().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(windows)]
|
||||
fn test_which_absolute_extension() {
|
||||
let f = TestFixture::new();
|
||||
// Don't append EXE_EXTENSION here.
|
||||
let b = f.bins[4].parent().unwrap().join(&BIN_NAME);
|
||||
assert_eq!(
|
||||
_which(&f, &b).unwrap().canonicalize().unwrap(),
|
||||
f.bins[4].canonicalize().unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(unix)]
|
||||
fn test_which_relative() {
|
||||
let f = TestFixture::new();
|
||||
assert_eq!(
|
||||
_which(&f, "b/bin").unwrap().canonicalize().unwrap(),
|
||||
f.bins[3].canonicalize().unwrap()
|
||||
);
|
||||
assert_eq!(_which(&f, "b/bin").unwrap().canonicalize().unwrap(),
|
||||
f.bins[1].canonicalize().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(windows)]
|
||||
fn test_which_relative() {
|
||||
let f = TestFixture::new();
|
||||
assert_eq!(
|
||||
_which(&f, "b/bin").unwrap().canonicalize().unwrap(),
|
||||
f.bins[4].canonicalize().unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(unix)]
|
||||
fn test_which_relative_extension() {
|
||||
// test_which_relative tests a relative path without an extension,
|
||||
// so test a relative path with an extension here.
|
||||
let f = TestFixture::new();
|
||||
let b = Path::new("b/bin").with_extension(env::consts::EXE_EXTENSION);
|
||||
assert_eq!(
|
||||
_which(&f, &b).unwrap().canonicalize().unwrap(),
|
||||
f.bins[3].canonicalize().unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(windows)]
|
||||
fn test_which_relative_extension() {
|
||||
// test_which_relative tests a relative path without an extension,
|
||||
// so test a relative path with an extension here.
|
||||
let f = TestFixture::new();
|
||||
let b = Path::new("b/bin").with_extension("cmd");
|
||||
assert_eq!(
|
||||
_which(&f, &b).unwrap().canonicalize().unwrap(),
|
||||
f.bins[5].canonicalize().unwrap()
|
||||
);
|
||||
assert_eq!(_which(&f, &b).unwrap().canonicalize().unwrap(),
|
||||
f.bins[1].canonicalize().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -350,30 +412,15 @@ mod test {
|
|||
// is accepted.
|
||||
let f = TestFixture::new();
|
||||
let b = Path::new("b/bin").with_extension("EXE");
|
||||
assert_eq!(
|
||||
_which(&f, &b).unwrap().canonicalize().unwrap(),
|
||||
f.bins[4].canonicalize().unwrap()
|
||||
);
|
||||
assert_eq!(_which(&f, &b).unwrap().canonicalize().unwrap(),
|
||||
f.bins[1].canonicalize().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(unix)]
|
||||
fn test_which_relative_leading_dot() {
|
||||
let f = TestFixture::new();
|
||||
assert_eq!(
|
||||
_which(&f, "./b/bin").unwrap().canonicalize().unwrap(),
|
||||
f.bins[3].canonicalize().unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(windows)]
|
||||
fn test_which_relative_leading_dot() {
|
||||
let f = TestFixture::new();
|
||||
assert_eq!(
|
||||
_which(&f, "./b/bin").unwrap().canonicalize().unwrap(),
|
||||
f.bins[4].canonicalize().unwrap()
|
||||
);
|
||||
assert_eq!(_which(&f, "./b/bin").unwrap().canonicalize().unwrap(),
|
||||
f.bins[1].canonicalize().unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -381,7 +428,7 @@ mod test {
|
|||
fn test_which_non_executable() {
|
||||
// Shouldn't return non-executable files.
|
||||
let f = TestFixture::new();
|
||||
f.touch("b/another", "").unwrap();
|
||||
f.touch("b/another").unwrap();
|
||||
assert!(_which(&f, "another").is_err());
|
||||
}
|
||||
|
||||
|
@ -390,7 +437,7 @@ mod test {
|
|||
fn test_which_absolute_non_executable() {
|
||||
// Shouldn't return non-executable files, even if given an absolute path.
|
||||
let f = TestFixture::new();
|
||||
let b = f.touch("b/another", "").unwrap();
|
||||
let b = f.touch("b/another").unwrap();
|
||||
assert!(_which(&f, &b).is_err());
|
||||
}
|
||||
|
||||
|
@ -399,20 +446,7 @@ mod test {
|
|||
fn test_which_relative_non_executable() {
|
||||
// Shouldn't return non-executable files.
|
||||
let f = TestFixture::new();
|
||||
f.touch("b/another", "").unwrap();
|
||||
f.touch("b/another").unwrap();
|
||||
assert!(_which(&f, "b/another").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_failure() {
|
||||
let f = TestFixture::new();
|
||||
|
||||
let run = || -> std::result::Result<PathBuf, failure::Error> {
|
||||
// Test the conversion to failure
|
||||
let p = _which(&f, "./b/bin")?;
|
||||
Ok(p)
|
||||
};
|
||||
|
||||
let _ = run();
|
||||
}
|
||||
}
|
||||
|
|
Загрузка…
Ссылка в новой задаче