зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1770894 - Force update bindgen to 0.59. r=emilio
Differential Revision: https://phabricator.services.mozilla.com/D147150
This commit is contained in:
Родитель
3419858c99
Коммит
0f7d648b3b
|
@ -338,7 +338,7 @@ version = "1.1.0"
|
|||
name = "baldrdash"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bindgen",
|
||||
"bindgen 0.56.999",
|
||||
"cranelift-codegen",
|
||||
"cranelift-wasm",
|
||||
"env_logger",
|
||||
|
@ -393,12 +393,19 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.56.0"
|
||||
version = "0.56.999"
|
||||
dependencies = [
|
||||
"bindgen 0.59.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.59.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2da379dbebc0b76ef63ca68d8fc6e71c0f13e59432e0987e508c1820e6ab5239"
|
||||
checksum = "2bd2a9a458e8f4304c52c43ebb0cfbd520289f8379a52e329a38afda99bf8eb8"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"cexpr",
|
||||
"cexpr 0.6.999",
|
||||
"clang-sys",
|
||||
"lazy_static",
|
||||
"lazycell",
|
||||
|
@ -635,6 +642,13 @@ dependencies = [
|
|||
"nom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cexpr"
|
||||
version = "0.6.999"
|
||||
dependencies = [
|
||||
"cexpr 0.4.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "0.1.10"
|
||||
|
@ -829,7 +843,7 @@ version = "0.2.9"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ca4679a59dbd8c15f064c012dfe8c1163b9453224238b59bb9328c142b8b248b"
|
||||
dependencies = [
|
||||
"bindgen",
|
||||
"bindgen 0.56.999",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1907,7 +1921,7 @@ name = "gecko-profiler"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bincode",
|
||||
"bindgen",
|
||||
"bindgen 0.56.999",
|
||||
"lazy_static",
|
||||
"mozbuild",
|
||||
"profiler-macros",
|
||||
|
@ -2414,7 +2428,7 @@ name = "http3server"
|
|||
version = "0.1.1"
|
||||
dependencies = [
|
||||
"base64 0.10.1",
|
||||
"bindgen",
|
||||
"bindgen 0.56.999",
|
||||
"log",
|
||||
"mio 0.6.23",
|
||||
"mio-extras",
|
||||
|
@ -3337,7 +3351,7 @@ dependencies = [
|
|||
name = "mozilla-central-workspace-hack"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bindgen",
|
||||
"bindgen 0.56.999",
|
||||
"libc",
|
||||
"quote",
|
||||
"serde",
|
||||
|
@ -3482,7 +3496,7 @@ name = "neqo-crypto"
|
|||
version = "0.5.7"
|
||||
source = "git+https://github.com/mozilla/neqo?tag=v0.5.7#f3de275b12c40f45718ce43a0482e771ba6cd4b8"
|
||||
dependencies = [
|
||||
"bindgen",
|
||||
"bindgen 0.56.999",
|
||||
"log",
|
||||
"mozbuild",
|
||||
"neqo-common",
|
||||
|
@ -4746,9 +4760,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "shlex"
|
||||
version = "0.1.1"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2"
|
||||
checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3"
|
||||
|
||||
[[package]]
|
||||
name = "siphasher"
|
||||
|
@ -4880,7 +4894,7 @@ dependencies = [
|
|||
"app_units",
|
||||
"arrayvec 0.5.2",
|
||||
"atomic_refcell",
|
||||
"bindgen",
|
||||
"bindgen 0.56.999",
|
||||
"bitflags",
|
||||
"byteorder",
|
||||
"cssparser",
|
||||
|
|
|
@ -93,6 +93,12 @@ mozbuild = { path = "build/rust/mozbuild" }
|
|||
# Patch itoa 0.4 to 1.0.
|
||||
itoa = { path = "build/rust/itoa" }
|
||||
|
||||
# Patch cexpr 0.6 down to cexpr 0.4, which is enough for bindgen 0.59's needs and avoids pulling another version of nom.
|
||||
cexpr = { path = "build/rust/cexpr" }
|
||||
|
||||
# Patch bindgen 0.56 to 0.59.
|
||||
bindgen = { path = "build/rust/bindgen" }
|
||||
|
||||
# Patch autocfg to hide rustc output. Workaround for https://github.com/cuviper/autocfg/issues/30
|
||||
autocfg = { path = "third_party/rust/autocfg" }
|
||||
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
[package]
|
||||
name = "bindgen"
|
||||
version = "0.56.999"
|
||||
edition = "2018"
|
||||
license = "BSD-3-Clause"
|
||||
|
||||
[lib]
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies.bindgen]
|
||||
version = "0.59"
|
||||
default-features = false
|
||||
|
||||
[features]
|
||||
logging = ["bindgen/logging"]
|
||||
runtime = ["bindgen/runtime"]
|
||||
static = ["bindgen/static"]
|
||||
which-rustfmt = ["bindgen/which-rustfmt"]
|
|
@ -0,0 +1,26 @@
|
|||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright notice, this
|
||||
// list of conditions and the following disclaimer.
|
||||
//
|
||||
// * Redistributions in binary form must reproduce the above copyright notice,
|
||||
// this list of conditions and the following disclaimer in the documentation
|
||||
// and/or other materials provided with the distribution.
|
||||
//
|
||||
// * Neither the name of the copyright holder nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
pub use bindgen::*;
|
|
@ -0,0 +1,11 @@
|
|||
[package]
|
||||
name = "cexpr"
|
||||
version = "0.6.999"
|
||||
edition = "2018"
|
||||
license = "Apache-2.0/MIT"
|
||||
|
||||
[lib]
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
cexpr = "0.4"
|
|
@ -0,0 +1,5 @@
|
|||
// Licensed under the Apache License, Version 2.0
|
||||
// <http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <http://opensource.org/licenses/MIT>, at your option.
|
||||
|
||||
pub use cexpr::*;
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.lock":"836e8f8431bd4ebdac9b1251676f6afa755757e401455259fe659e7280be8230","Cargo.toml":"3a585a6e27a177f08dedcb21f7d555e9db58fa158203273b228db91ebee4e6b3","LICENSE":"c23953d9deb0a3312dbeaf6c128a657f3591acee45067612fa68405eaa4525db","README.md":"29fe30d7a2729922b13a578bc8f5eedc808fd0f2ef67a3f12017548baf8f293a","build.rs":"3fe1e534c99df4ee207606794f133fb187c0948e055389f74c904994ecaed38a","csmith-fuzzing/README.md":"7107b70fedb0c0a0cadb3c439a49c1bd0119a6d38dc63b1aecc74d1942256ef2","src/callbacks.rs":"1e5a118b94977938751758ac0495b1d41ce5e280c066614a4a7cbd930f326350","src/clang.rs":"aa0644278a8319506be08904c0f6706fbcdcd72eb1e85564b8c7488bd810e126","src/codegen/bitfield_unit.rs":"a8fb1a2d97a99685106fcaac87d2013f79d2690d6a46ff05ad1e3629b6075664","src/codegen/bitfield_unit_tests.rs":"dd252134118450800b516e375c872e17b4c1aee63a7b8adbe5b2cd53434bbc7e","src/codegen/dyngen.rs":"15149bc927e5b2706f93e52a6b26ef55384b3baf40bfc9bc4343e9820479f26b","src/codegen/error.rs":"5e308b8c54b68511fc8ea2ad15ddac510172c4ff460a80a265336440b0c9653d","src/codegen/helpers.rs":"ea83104addb8af31736aaeb850e10c694cd434befe7ffaaa206208f722d72c58","src/codegen/impl_debug.rs":"1ff9ec754b610c98c757b114c6509473ead0e1a9375e9089a7fa40a41a753709","src/codegen/impl_partialeq.rs":"5e526fd88dd15dd1f04addd3c6ecea1d3da92293fadf04346d6c716791f436f9","src/codegen/mod.rs":"19fd11feefab0ff9ecaf8a01583583008269adce805508fb61b9a8acc49da586","src/codegen/struct_layout.rs":"b62c3569dcfb011daa4d09f1aa9eb732da69546c3deb9f247fa8ce7114dbc7b9","src/extra_assertions.rs":"494534bd4f18b80d89b180c8a93733e6617edcf7deac413e9a73fd6e7bc9ced7","src/features.rs":"fafb85510b1dfc9a41ed71f7d765fca49b236deb4ee567e00204e751362aaf23","src/ir/analysis/derive.rs":"ff4821d810961696008a57ae496f95ebcdc14b4c439fe87d78a84817442fa759","src/ir/analysis/has_destructor.rs":"d9a3a24bd4cabc87cddb0c76d27da1691f8f37ffb8eadf5b5975a1c44dea99c2","src/ir/analysis/has_float.rs":"5242cc07ec4d4bdf5a792e1f8ee5758a87838314917d42dbb9dcfc19620520ce","src/ir/analysis/has_type_param_in_array.rs":"ec3fb67f782abb4c866da91bce3f7ee6f8e2310c47a54065282431b909233f7d","src/ir/analysis/has_vtable.rs":"63e2d0f62171811893615c11453bc7b39438d0d83c3eb444dec2346140d86efe","src/ir/analysis/mod.rs":"2c54f0cd6f3d86cf3fcb07d9d0be06cde839cab4170671c80d806a3f27820faf","src/ir/analysis/sizedness.rs":"17f1f2b6affd025f73853b9b5a76b3f157b5f9e563e9eaa374735fcb84c13308","src/ir/analysis/template_params.rs":"da949976a7fd04d6fc564ea6a77dfdbf4f5bf05db64687ed7a0616cba598a42d","src/ir/annotations.rs":"1c931d7bbba1e1613e9cccaab58d14f75e79b831b5c881e41f5b5257a9cbced7","src/ir/comment.rs":"31d64a49ae3d9c3c348fa2539e03306ca3a23fae429cab452e42b31ecf632145","src/ir/comp.rs":"abaa90e27dc6416f1b8db003f87888e7651d5b46c4d4526153980e5621612e54","src/ir/context.rs":"3a76458a5aa74075a60a5cd752ed59ad3943054f55b017700389f78072935215","src/ir/derive.rs":"e5581852eec87918901a129284b4965aefc8a19394187a8095779a084f28fabe","src/ir/dot.rs":"e25ff72ac174a798894c9673d81bdfb86fa9f4228b34a14ce0dc741a186a52bd","src/ir/enum_ty.rs":"e49e3c6ffc0289584e2f836fe56a4b7ebf6ca3f8b602248141d67b9f533770cc","src/ir/function.rs":"aa454ace56bda8074b2865933282aa124624310c8bc0c994d454f5799f4e88be","src/ir/int.rs":"68a86182743ec338d58e42203364dc7c8970cb7ec3550433ca92f0c9489b4442","src/ir/item.rs":"a71bdacc7419ec86d52ac534158cf4bfa4600e9cbc214c0075766700f5b053b0","src/ir/item_kind.rs":"7666a1ff1b8260978b790a08b4139ab56b5c65714a5652bbcec7faa7443adc36","src/ir/layout.rs":"755e3787c262de434a53a8c326f0e825f95415ed6b0f925c1cddf208ca8e3bc4","src/ir/mod.rs":"713cd537434567003197a123cbae679602c715e976d22f7b23dafd0826ea4c70","src/ir/module.rs":"70cf6ddfeabe6cdc13fdc767c783216c073404848d827e85fc6c2de3a19b5c3f","src/ir/objc.rs":"195fb2a3e4371b90244f3a8f295fd80cc77e0f2daf8fd27e3d8e5b78bd6b55d6","src/ir/template.rs":"44bd7214cf1e7f70e60694115082aac5b8a6c1687fff584cd08cdcfadabc5734","src/ir/traversal.rs":"5ac088277f4dfe2918d81b9294aaee41fd83db8e46def66a05f89de078bf4c49","src/ir/ty.rs":"8f2b970da76850685c4d334289af6dede7742862d7a81f2236115afaa1a92fa9","src/ir/var.rs":"86e9f19403fb9231ba60dec0a04e5b56fe28a37c7a5e6f676c978789c9d93c5a","src/lib.rs":"ed2d0aeb48b28b4a96b8e76a10e00b10cb6cc32c0a686d536f9021463b7ee0e8","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"74e582c37b146090332b1496f5b4bca02c7629d03a4ae40302cb4a723f08e445","src/options.rs":"119358b741601dafc13560856f6e4b4f78b6cd2b19067893c2672ba8f5dc6de1","src/parse.rs":"4ffc54415eadb622ee488603862788c78361ef2c889de25259441a340c2a010f","src/regex_set.rs":"6c46357fb1ee68250e5e017cbf691f340041489ae78599eee7a5665a6ddce27f","src/time.rs":"8efe317e7c6b5ba8e0865ce7b49ca775ee8a02590f4241ef62f647fa3c22b68e"},"package":"2da379dbebc0b76ef63ca68d8fc6e71c0f13e59432e0987e508c1820e6ab5239"}
|
||||
{"files":{"Cargo.lock":"a915231b52b67320c7d440eb711c99632e4b948e5dcbeb6835e18bd0d798c76a","Cargo.toml":"655f82f7efb9e6b434a4710d8b1ea5b8c2116bccc6d8a4f87a7abc9e0c69051b","LICENSE":"c23953d9deb0a3312dbeaf6c128a657f3591acee45067612fa68405eaa4525db","README.md":"c093205492ab9f00f275c50aacfc9058264d3dcc7c7d2ff83e0cc4858d1cee49","build.rs":"d53484feea4cd147cd80280ac270c24ab727386acabb043e6347c44ac5369d0e","csmith-fuzzing/README.md":"7107b70fedb0c0a0cadb3c439a49c1bd0119a6d38dc63b1aecc74d1942256ef2","src/callbacks.rs":"cb4ca440e356dde75919a5298b75cbf145c981c2e1da62907337706286dd5c9e","src/clang.rs":"6b02ae174012372d00b442b5ec5a66a6122a091217039e5ba4917578c769d01f","src/codegen/bitfield_unit.rs":"fddeaeab5859f4e82081865595b7705f5c0774d997df95fa5c655b81b9cae125","src/codegen/bitfield_unit_tests.rs":"9df86490de5e9d66ccea583dcb686dd440375dc1a3c3cf89a89d5de3883bf28a","src/codegen/dyngen.rs":"b1bca96fbd81b1c0678122df8d28f3b60cd74047a43d0d298d69feb06eecf459","src/codegen/error.rs":"5e308b8c54b68511fc8ea2ad15ddac510172c4ff460a80a265336440b0c9653d","src/codegen/helpers.rs":"b4e2ee991e83fda62b0aebd562b948eba785179cb4aa1a154d00ffad215b7be5","src/codegen/impl_debug.rs":"71d8e28873ba2de466f2637a824746963702f0511728037d72ee5670c51194cb","src/codegen/impl_partialeq.rs":"f4599e32c66179ae515a6518a3e94b686689cf59f7dd9ab618c3fb69f17d2c77","src/codegen/mod.rs":"a286fa9a31254ce317c4baad05af446b59aaa23fb80aba9f260e67d15c64ff8c","src/codegen/struct_layout.rs":"d03e66412f4bb1fa59c623873b2a22e100d029a002c07aaf4586f4852a410b54","src/deps.rs":"de4a91d1d252295e1abaf4ab1f90f7be618c67649edb12081c3a501e61398a75","src/extra_assertions.rs":"494534bd4f18b80d89b180c8a93733e6617edcf7deac413e9a73fd6e7bc9ced7","src/features.rs":"f93bb757400580a75adc6a187cdeb032ec4d6efe7d3fcb9a6864472edd875580","src/ir/analysis/derive.rs":"066d35cdb7523c5edd141394286911128261b4db23cc17520e3b3111ef1bb51e","src/ir/analysis/has_destructor.rs":"7a82f01e7e0595a31b56f7c398fa3019b3fe9100a2a73b56768f7e6943dcc3ce","src/ir/analysis/has_float.rs":"58ea1e38a59ef208400fd65d426cb5b288949df2d383b3a194fa01b99d2a87fa","src/ir/analysis/has_type_param_in_array.rs":"d1b9eb119dc14f662eb9bd1394c859f485479e4912589709cdd33f6752094e22","src/ir/analysis/has_vtable.rs":"368cf30fbe3fab7190fab48718b948caac5da8c9e797b709488716b919315636","src/ir/analysis/mod.rs":"cde4ce0667d1895008c9b2af479211c828740fcb59fa13d600cbdc100fa8bdc5","src/ir/analysis/sizedness.rs":"944443d6aab35d2dd80e4f5e59176ac1e1c463ba2f0eb25d33f1d95dfac1a6d0","src/ir/analysis/template_params.rs":"a2d2e247c2f51cd90e83f11bce0305c2e498232d015f88192b44e8522e7fd8b1","src/ir/annotations.rs":"456276ef7f9b04e40b7b10aa7570d98b11aae8efe676679881459ae878bbecfc","src/ir/comment.rs":"9c0c4789c0893b636fac42228f8a0292a06cb4f2b7431895490784dd16b7f79a","src/ir/comp.rs":"811a2abfbf8ed6925327ad005a460ca698d40a2d5d4698015e1bcd4e7d2c9cf0","src/ir/context.rs":"df486590515ffaab8b51c96699a239de202569a8718d9c4b79a8ccc8808cee69","src/ir/derive.rs":"e5581852eec87918901a129284b4965aefc8a19394187a8095779a084f28fabe","src/ir/dot.rs":"2d79d698e6ac59ce032840e62ff11103abed1d5e9e700cf383b492333eeebe1f","src/ir/enum_ty.rs":"c2d928bb1a8453922c962cb11a7ab3b737c5651599141ece8d31e21e6eb74585","src/ir/function.rs":"3e13078b36ee02142017cfbbaaeb9e64ef485a12e151096e12f54a8fde984505","src/ir/int.rs":"68a86182743ec338d58e42203364dc7c8970cb7ec3550433ca92f0c9489b4442","src/ir/item.rs":"1c79d6dd400ab01545a19214847245b440690bfe129895f164bef460ee41b857","src/ir/item_kind.rs":"7666a1ff1b8260978b790a08b4139ab56b5c65714a5652bbcec7faa7443adc36","src/ir/layout.rs":"d6bd9a14b94320f9e2517bf9fc9ffaf4220954fa24d77d90bba070dbede7392b","src/ir/mod.rs":"713cd537434567003197a123cbae679602c715e976d22f7b23dafd0826ea4c70","src/ir/module.rs":"7cae5561bcf84a5c3b1ee8f1c3336a33b7f44f0d5ffe885fb108289983fe763e","src/ir/objc.rs":"dd394c1db6546cbe5111ce5cd2f211f9839aba81c5e7228c2a68fba386bc259f","src/ir/template.rs":"3bb3e7f6ec28eff73c2032922d30b93d70da117b848e9cb02bdf6c9a74294f7f","src/ir/traversal.rs":"105d93bc2f1f55033c621667a0e55153844eec34560ae88183f799d0d0c1a6f2","src/ir/ty.rs":"2ecae57f018732b6daf1c08fc98765456a9e6a24cbceaf7f1bc004676b0113ee","src/ir/var.rs":"fe7720438af43fa3bbe3850aff331bb47131b2c21e975f92bfbcdc182789105a","src/lib.rs":"0f148aef6fd6ae814df29317fe5860d0c1747c40d5182f2518d3b81a03b6587a","src/log_stubs.rs":"9f974e041e35c8c7e29985d27ae5cd0858d68f8676d1dc005c6388d7d011707f","src/main.rs":"188cd89581490eb5f26a194cc25e4f38f3e0b93eed7ad591bc73362febd26b72","src/options.rs":"14190fae2aaad331f0660e4cc1d5a1fea0c2c88696091715867a3e7282a1d7b5","src/parse.rs":"4ffc54415eadb622ee488603862788c78361ef2c889de25259441a340c2a010f","src/regex_set.rs":"6c46357fb1ee68250e5e017cbf691f340041489ae78599eee7a5665a6ddce27f","src/time.rs":"8efe317e7c6b5ba8e0865ce7b49ca775ee8a02590f4241ef62f647fa3c22b68e"},"package":"2bd2a9a458e8f4304c52c43ebb0cfbd520289f8379a52e329a38afda99bf8eb8"}
|
|
@ -1,5 +1,7 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "0.7.15"
|
||||
|
@ -31,7 +33,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.56.0"
|
||||
version = "0.59.2"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"cexpr",
|
||||
|
@ -48,6 +50,7 @@ dependencies = [
|
|||
"regex",
|
||||
"rustc-hash",
|
||||
"shlex",
|
||||
"tempfile",
|
||||
"which",
|
||||
]
|
||||
|
||||
|
@ -59,19 +62,13 @@ checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
|
|||
|
||||
[[package]]
|
||||
name = "cexpr"
|
||||
version = "0.4.0"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f4aedb84272dbe89af497cf81375129abda4fc0a9e7c5d317498c15cc30c0d27"
|
||||
checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
|
||||
dependencies = [
|
||||
"nom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
|
@ -80,9 +77,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
|||
|
||||
[[package]]
|
||||
name = "clang-sys"
|
||||
version = "1.0.3"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0659001ab56b791be01d4b729c44376edc6718cf389a502e579b77b758f3296c"
|
||||
checksum = "853eda514c284c2287f4bf20ae614f8781f40a81d32ecda6e91449304dfe077c"
|
||||
dependencies = [
|
||||
"glob",
|
||||
"libc",
|
||||
|
@ -111,10 +108,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499"
|
||||
|
||||
[[package]]
|
||||
name = "env_logger"
|
||||
version = "0.8.1"
|
||||
name = "either"
|
||||
version = "1.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "54532e3223c5af90a6a757c90b5c5521564b07e5e7a958681bcd2afad421cdcd"
|
||||
checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
|
||||
|
||||
[[package]]
|
||||
name = "env_logger"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b2cf0344971ee6c64c31be0d530793fba457d322dfec2810c453d0ef228f9c3"
|
||||
dependencies = [
|
||||
"atty",
|
||||
"humantime",
|
||||
|
@ -123,6 +126,17 @@ dependencies = [
|
|||
"termcolor",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"wasi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "glob"
|
||||
version = "0.3.0"
|
||||
|
@ -131,18 +145,18 @@ checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
|
|||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.1.17"
|
||||
version = "0.1.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8"
|
||||
checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "humantime"
|
||||
version = "2.0.1"
|
||||
version = "2.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3c1ad908cc71012b7bea4d0c53ba96a8cba9962f048fa68d143376143d863b7a"
|
||||
checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
|
@ -158,27 +172,27 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
|
|||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.80"
|
||||
version = "0.2.98"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4d58d1b70b004888f764dfbf6a26a3b0342a1632d33968e4a179d8011c760614"
|
||||
checksum = "320cfe77175da3a483efed4bc0adc1968ca050b098ce4f2f1c13a56626128790"
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
version = "0.6.5"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1090080fe06ec2648d0da3881d9453d97e71a45f00eb179af7fdd7e3f686fdb0"
|
||||
checksum = "6f84d96438c15fcd6c3f244c8fce01d1e2b9c6b5623e9c711dc9286d8fc92d6a"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"cfg-if",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.11"
|
||||
version = "0.4.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b"
|
||||
checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710"
|
||||
dependencies = [
|
||||
"cfg-if 0.1.10",
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -188,12 +202,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525"
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "5.1.2"
|
||||
name = "minimal-lexical"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ffb4262d26ed83a1c0a33a38fe2bb15797329c85770da05e6b828ddb782627af"
|
||||
checksum = "9c64630dcdd71f1a64c435f54885086a0de5d6a12d104d69b165fb7d5286d677"
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "7.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7ffd9d26838a953b4af82cbeb9f1592c6798916983959be223a7124e992742c1"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"minimal-lexical",
|
||||
"version_check",
|
||||
]
|
||||
|
||||
|
@ -204,40 +225,103 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.24"
|
||||
name = "ppv-lite86"
|
||||
version = "0.2.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
|
||||
checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c7ed8b8c7b886ea3ed7dde405212185f423ab44682667c8c6dd14aa1d9f6612"
|
||||
dependencies = [
|
||||
"unicode-xid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.7"
|
||||
version = "1.0.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37"
|
||||
checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.4.2"
|
||||
name = "rand"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "38cf2c13ed4745de91a5eb834e11c00bcc3709e773173b2ce4c56c9fbde04b9c"
|
||||
checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"rand_chacha",
|
||||
"rand_core",
|
||||
"rand_hc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_chacha"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
|
||||
dependencies = [
|
||||
"ppv-lite86",
|
||||
"rand_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_core"
|
||||
version = "0.6.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_hc"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7"
|
||||
dependencies = [
|
||||
"rand_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.2.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5ab49abadf3f9e1c4bc499e8845e152ad87d2ad2d30371841171169e9d75feee"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2a26af418b574bd56588335b3a3659a65725d4e636eb1016c2f9e3b38c7cc759"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-syntax",
|
||||
"thread_local",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.21"
|
||||
version = "0.6.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3b181ba2dcf07aaccad5448e8ead58db5b742cf85dfe035e2227f137a539a189"
|
||||
checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
|
||||
|
||||
[[package]]
|
||||
name = "remove_dir_all"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
|
||||
dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-hash"
|
||||
|
@ -247,9 +331,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
|||
|
||||
[[package]]
|
||||
name = "shlex"
|
||||
version = "0.1.1"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2"
|
||||
checksum = "42a568c8f2cd051a4d283bd6eb0343ac214c1b0f1ac19f93e1175b2dee38c73d"
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
|
@ -258,10 +342,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
|
||||
|
||||
[[package]]
|
||||
name = "termcolor"
|
||||
version = "1.1.0"
|
||||
name = "tempfile"
|
||||
version = "3.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bb6bfa289a4d7c5766392812c0a1f4c1ba45afa1ad47803c11e1f407d846d75f"
|
||||
checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"rand",
|
||||
"redox_syscall",
|
||||
"remove_dir_all",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "termcolor"
|
||||
version = "1.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4"
|
||||
dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
@ -275,15 +373,6 @@ dependencies = [
|
|||
"unicode-width",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thread_local"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-width"
|
||||
version = "0.1.8"
|
||||
|
@ -292,9 +381,9 @@ checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3"
|
|||
|
||||
[[package]]
|
||||
name = "unicode-xid"
|
||||
version = "0.2.1"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
|
||||
checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
|
||||
|
||||
[[package]]
|
||||
name = "vec_map"
|
||||
|
@ -304,16 +393,24 @@ checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
|
|||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.2"
|
||||
version = "0.9.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed"
|
||||
checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe"
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.10.2+wasi-snapshot-preview1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
|
||||
|
||||
[[package]]
|
||||
name = "which"
|
||||
version = "3.1.1"
|
||||
version = "4.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d011071ae14a2f6671d0b74080ae0cd8ebf3a6f8c9589a2cd45f23126fe29724"
|
||||
checksum = "7cc009ab82a2afc94b9e467ab4214aee9cad1356cd9191264203d7d72006e00d"
|
||||
dependencies = [
|
||||
"either",
|
||||
"lazy_static",
|
||||
"libc",
|
||||
]
|
||||
|
||||
|
|
|
@ -3,17 +3,16 @@
|
|||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies
|
||||
# to registry (e.g., crates.io) dependencies.
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
# editing this file be aware that the upstream Cargo.toml
|
||||
# will likely look very different (and much more reasonable)
|
||||
# If you are reading this file be aware that the original Cargo.toml
|
||||
# will likely look very different (and much more reasonable).
|
||||
# See Cargo.toml.orig for the original contents.
|
||||
|
||||
[package]
|
||||
edition = "2018"
|
||||
name = "bindgen"
|
||||
version = "0.56.0"
|
||||
version = "0.59.2"
|
||||
authors = ["Jyun-Yan You <jyyou.tw@gmail.com>", "Emilio Cobos Álvarez <emilio@crisal.io>", "Nick Fitzgerald <fitzgen@gmail.com>", "The Servo project developers"]
|
||||
build = "build.rs"
|
||||
include = ["LICENSE", "README.md", "Cargo.toml", "build.rs", "src/*.rs", "src/**/*.rs"]
|
||||
|
@ -38,7 +37,7 @@ required-features = ["clap"]
|
|||
version = "1.0.3"
|
||||
|
||||
[dependencies.cexpr]
|
||||
version = "0.4"
|
||||
version = "0.6"
|
||||
|
||||
[dependencies.clang-sys]
|
||||
version = "1"
|
||||
|
@ -49,7 +48,7 @@ version = "2"
|
|||
optional = true
|
||||
|
||||
[dependencies.env_logger]
|
||||
version = "0.8"
|
||||
version = "0.9.0"
|
||||
optional = true
|
||||
|
||||
[dependencies.lazy_static]
|
||||
|
@ -82,10 +81,10 @@ default-features = false
|
|||
version = "1.0.1"
|
||||
|
||||
[dependencies.shlex]
|
||||
version = "0.1"
|
||||
version = "1"
|
||||
|
||||
[dependencies.which]
|
||||
version = "3.0"
|
||||
version = "4.2.1"
|
||||
optional = true
|
||||
default-features = false
|
||||
[dev-dependencies.clap]
|
||||
|
@ -95,7 +94,10 @@ version = "2"
|
|||
version = "0.1"
|
||||
|
||||
[dev-dependencies.shlex]
|
||||
version = "0.1"
|
||||
version = "1"
|
||||
|
||||
[dev-dependencies.tempfile]
|
||||
version = "3"
|
||||
|
||||
[features]
|
||||
default = ["logging", "clap", "runtime", "which-rustfmt"]
|
||||
|
|
|
@ -39,7 +39,7 @@ extern "C" {
|
|||
|
||||
## MSRV
|
||||
|
||||
The minimum supported Rust version is **1.40**.
|
||||
The minimum supported Rust version is **1.46**.
|
||||
|
||||
No MSRV bump policy has been established yet, so MSRV may increase in any release.
|
||||
|
||||
|
@ -60,6 +60,10 @@ End-users should set these environment variables to modify `bindgen`'s behavior
|
|||
- Examples:
|
||||
- Specify alternate sysroot: `--sysroot=/path/to/sysroot`
|
||||
- Add include search path with spaces: `-I"/path/with spaces"`
|
||||
- `BINDGEN_EXTRA_CLANG_ARGS_<TARGET>`: similar to `BINDGEN_EXTRA_CLANG_ARGS`,
|
||||
but used to set per-target arguments to pass to clang. Useful to set system include
|
||||
directories in a target-specific way in cross-compilation environments with multiple targets.
|
||||
Has precedence over `BINDGEN_EXTRA_CLANG_ARGS`.
|
||||
|
||||
Additionally, `bindgen` uses `libclang` to parse C and C++ header files.
|
||||
To modify how `bindgen` searches for `libclang`, see the [`clang-sys` documentation][clang-sys-env].
|
||||
|
|
|
@ -79,4 +79,12 @@ fn main() {
|
|||
println!("cargo:rerun-if-env-changed=LIBCLANG_PATH");
|
||||
println!("cargo:rerun-if-env-changed=LIBCLANG_STATIC_PATH");
|
||||
println!("cargo:rerun-if-env-changed=BINDGEN_EXTRA_CLANG_ARGS");
|
||||
println!(
|
||||
"cargo:rerun-if-env-changed=BINDGEN_EXTRA_CLANG_ARGS_{}",
|
||||
std::env::var("TARGET").unwrap()
|
||||
);
|
||||
println!(
|
||||
"cargo:rerun-if-env-changed=BINDGEN_EXTRA_CLANG_ARGS_{}",
|
||||
std::env::var("TARGET").unwrap().replace("-", "_")
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
//! A public API for more fine-grained customization of bindgen behavior.
|
||||
|
||||
pub use crate::ir::analysis::DeriveTrait;
|
||||
pub use crate::ir::derive::CanDerive as ImplementsTrait;
|
||||
pub use crate::ir::enum_ty::{EnumVariantCustomBehavior, EnumVariantValue};
|
||||
pub use crate::ir::int::IntKind;
|
||||
use std::fmt;
|
||||
|
@ -76,4 +78,29 @@ pub trait ParseCallbacks: fmt::Debug + UnwindSafe {
|
|||
|
||||
/// This will be called on every file inclusion, with the full path of the included file.
|
||||
fn include_file(&self, _filename: &str) {}
|
||||
|
||||
/// This will be called to determine whether a particular blocklisted type
|
||||
/// implements a trait or not. This will be used to implement traits on
|
||||
/// other types containing the blocklisted type.
|
||||
///
|
||||
/// * `None`: use the default behavior
|
||||
/// * `Some(ImplementsTrait::Yes)`: `_name` implements `_derive_trait`
|
||||
/// * `Some(ImplementsTrait::Manually)`: any type including `_name` can't
|
||||
/// derive `_derive_trait` but can implemented it manually
|
||||
/// * `Some(ImplementsTrait::No)`: `_name` doesn't implement `_derive_trait`
|
||||
fn blocklisted_type_implements_trait(
|
||||
&self,
|
||||
_name: &str,
|
||||
_derive_trait: DeriveTrait,
|
||||
) -> Option<ImplementsTrait> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Provide a list of custom derive attributes.
|
||||
///
|
||||
/// If no additional attributes are wanted, this function should return an
|
||||
/// empty `Vec`.
|
||||
fn add_derives(&self, _name: &str) -> Vec<String> {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,9 +4,7 @@
|
|||
#![allow(non_upper_case_globals, dead_code)]
|
||||
|
||||
use crate::ir::context::BindgenContext;
|
||||
use cexpr;
|
||||
use clang_sys::*;
|
||||
use regex;
|
||||
use std::ffi::{CStr, CString};
|
||||
use std::fmt;
|
||||
use std::hash::Hash;
|
||||
|
@ -85,7 +83,7 @@ impl Cursor {
|
|||
|
||||
let mut result = Vec::with_capacity(count);
|
||||
for i in 0..count {
|
||||
let string_ptr = (*manglings).Strings.offset(i as isize);
|
||||
let string_ptr = (*manglings).Strings.add(i);
|
||||
result.push(cxstring_to_string_leaky(*string_ptr));
|
||||
}
|
||||
clang_disposeStringSet(manglings);
|
||||
|
@ -223,12 +221,12 @@ impl Cursor {
|
|||
/// not tracking the type declaration but the location of the cursor, given
|
||||
/// clang doesn't expose a proper declaration for these types.
|
||||
pub fn is_template_like(&self) -> bool {
|
||||
match self.kind() {
|
||||
matches!(
|
||||
self.kind(),
|
||||
CXCursor_ClassTemplate |
|
||||
CXCursor_ClassTemplatePartialSpecialization |
|
||||
CXCursor_TypeAliasTemplateDecl => true,
|
||||
_ => false,
|
||||
}
|
||||
CXCursor_ClassTemplatePartialSpecialization |
|
||||
CXCursor_TypeAliasTemplateDecl
|
||||
)
|
||||
}
|
||||
|
||||
/// Is this Cursor pointing to a function-like macro definition?
|
||||
|
@ -241,7 +239,7 @@ impl Cursor {
|
|||
self.x.kind
|
||||
}
|
||||
|
||||
/// Returns true is the cursor is a definition
|
||||
/// Returns true if the cursor is a definition
|
||||
pub fn is_definition(&self) -> bool {
|
||||
unsafe { clang_isCursorDefinition(self.x) != 0 }
|
||||
}
|
||||
|
@ -275,7 +273,7 @@ impl Cursor {
|
|||
return parent.is_in_non_fully_specialized_template();
|
||||
}
|
||||
|
||||
return true;
|
||||
true
|
||||
}
|
||||
|
||||
/// Is this cursor pointing a valid referent?
|
||||
|
@ -402,12 +400,9 @@ impl Cursor {
|
|||
where
|
||||
Visitor: FnMut(Cursor) -> CXChildVisitResult,
|
||||
{
|
||||
let data = &mut visitor as *mut Visitor;
|
||||
unsafe {
|
||||
clang_visitChildren(
|
||||
self.x,
|
||||
visit_children::<Visitor>,
|
||||
mem::transmute(&mut visitor),
|
||||
);
|
||||
clang_visitChildren(self.x, visit_children::<Visitor>, data.cast());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -469,6 +464,27 @@ impl Cursor {
|
|||
unsafe { clang_Cursor_isFunctionInlined(self.x) != 0 }
|
||||
}
|
||||
|
||||
/// Is the referent a defaulted function?
|
||||
pub fn is_defaulted_function(&self) -> bool {
|
||||
unsafe { clang_CXXMethod_isDefaulted(self.x) != 0 }
|
||||
}
|
||||
|
||||
/// Is the referent a deleted function?
|
||||
pub fn is_deleted_function(&self) -> bool {
|
||||
// Unfortunately, libclang doesn't yet have an API for checking if a
|
||||
// member function is deleted, but the following should be a good
|
||||
// enough approximation.
|
||||
// Deleted functions are implicitly inline according to paragraph 4 of
|
||||
// [dcl.fct.def.delete] in the C++ standard. Normal inline functions
|
||||
// have a definition in the same translation unit, so if this is an
|
||||
// inline function without a definition, and it's not a defaulted
|
||||
// function, we can reasonably safely conclude that it's a deleted
|
||||
// function.
|
||||
self.is_inlined_function() &&
|
||||
self.definition().is_none() &&
|
||||
!self.is_defaulted_function()
|
||||
}
|
||||
|
||||
/// Get the width of this cursor's referent bit field, or `None` if the
|
||||
/// referent is not a bit field.
|
||||
pub fn bit_width(&self) -> Option<u32> {
|
||||
|
@ -632,6 +648,15 @@ impl Cursor {
|
|||
unsafe { clang_getCXXAccessSpecifier(self.x) }
|
||||
}
|
||||
|
||||
/// Is the cursor's referrent publically accessible in C++?
|
||||
///
|
||||
/// Returns true if self.access_specifier() is `CX_CXXPublic` or
|
||||
/// `CX_CXXInvalidAccessSpecifier`.
|
||||
pub fn public_accessible(&self) -> bool {
|
||||
let access = self.access_specifier();
|
||||
access == CX_CXXPublic || access == CX_CXXInvalidAccessSpecifier
|
||||
}
|
||||
|
||||
/// Is this cursor's referent a field declaration that is marked as
|
||||
/// `mutable`?
|
||||
pub fn is_mutable_field(&self) -> bool {
|
||||
|
@ -870,7 +895,7 @@ extern "C" fn visit_children<Visitor>(
|
|||
where
|
||||
Visitor: FnMut(Cursor) -> CXChildVisitResult,
|
||||
{
|
||||
let func: &mut Visitor = unsafe { mem::transmute(data) };
|
||||
let func: &mut Visitor = unsafe { &mut *(data as *mut Visitor) };
|
||||
let child = Cursor { x: cur };
|
||||
|
||||
(*func)(child)
|
||||
|
@ -997,7 +1022,7 @@ impl Type {
|
|||
let s = unsafe { cxstring_into_string(clang_getTypeSpelling(self.x)) };
|
||||
// Clang 5.0 introduced changes in the spelling API so it returned the
|
||||
// full qualified name. Let's undo that here.
|
||||
if s.split("::").all(|s| is_valid_identifier(s)) {
|
||||
if s.split("::").all(is_valid_identifier) {
|
||||
if let Some(s) = s.split("::").last() {
|
||||
return s.to_owned();
|
||||
}
|
||||
|
@ -1025,7 +1050,7 @@ impl Type {
|
|||
ctx.target_pointer_size() as c_longlong
|
||||
}
|
||||
// Work-around https://bugs.llvm.org/show_bug.cgi?id=40813
|
||||
CXType_Auto if self.is_non_deductible_auto_type() => return -6,
|
||||
CXType_Auto if self.is_non_deductible_auto_type() => -6,
|
||||
_ => unsafe { clang_Type_getSizeOf(self.x) },
|
||||
}
|
||||
}
|
||||
|
@ -1038,7 +1063,7 @@ impl Type {
|
|||
ctx.target_pointer_size() as c_longlong
|
||||
}
|
||||
// Work-around https://bugs.llvm.org/show_bug.cgi?id=40813
|
||||
CXType_Auto if self.is_non_deductible_auto_type() => return -6,
|
||||
CXType_Auto if self.is_non_deductible_auto_type() => -6,
|
||||
_ => unsafe { clang_Type_getAlignOf(self.x) },
|
||||
}
|
||||
}
|
||||
|
@ -1256,12 +1281,12 @@ impl Type {
|
|||
// nasty... But can happen in <type_traits>. Unfortunately I couldn't
|
||||
// reduce it enough :(
|
||||
self.template_args().map_or(false, |args| args.len() > 0) &&
|
||||
match self.declaration().kind() {
|
||||
!matches!(
|
||||
self.declaration().kind(),
|
||||
CXCursor_ClassTemplatePartialSpecialization |
|
||||
CXCursor_TypeAliasTemplateDecl |
|
||||
CXCursor_TemplateTemplateParameter => false,
|
||||
_ => true,
|
||||
}
|
||||
CXCursor_TypeAliasTemplateDecl |
|
||||
CXCursor_TemplateTemplateParameter
|
||||
)
|
||||
}
|
||||
|
||||
/// Is this type an associated template type? Eg `T::Associated` in
|
||||
|
@ -1376,6 +1401,12 @@ impl fmt::Display for SourceLocation {
|
|||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for SourceLocation {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self)
|
||||
}
|
||||
}
|
||||
|
||||
/// A comment in the source text.
|
||||
///
|
||||
/// Comments are sort of parsed by Clang, and have a tree structure.
|
||||
|
@ -1674,11 +1705,7 @@ impl UnsavedFile {
|
|||
Contents: contents.as_ptr(),
|
||||
Length: contents.as_bytes().len() as c_ulong,
|
||||
};
|
||||
UnsavedFile {
|
||||
x: x,
|
||||
name: name,
|
||||
contents: contents,
|
||||
}
|
||||
UnsavedFile { x, name, contents }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1789,7 +1816,7 @@ pub fn ast_dump(c: &Cursor, depth: isize) -> CXChildVisitResult {
|
|||
|
||||
if let Some(refd) = c.referenced() {
|
||||
if refd != *c {
|
||||
println!("");
|
||||
println!();
|
||||
print_cursor(
|
||||
depth,
|
||||
String::from(prefix) + "referenced.",
|
||||
|
@ -1800,7 +1827,7 @@ pub fn ast_dump(c: &Cursor, depth: isize) -> CXChildVisitResult {
|
|||
|
||||
let canonical = c.canonical();
|
||||
if canonical != *c {
|
||||
println!("");
|
||||
println!();
|
||||
print_cursor(
|
||||
depth,
|
||||
String::from(prefix) + "canonical.",
|
||||
|
@ -1810,7 +1837,7 @@ pub fn ast_dump(c: &Cursor, depth: isize) -> CXChildVisitResult {
|
|||
|
||||
if let Some(specialized) = c.specialized() {
|
||||
if specialized != *c {
|
||||
println!("");
|
||||
println!();
|
||||
print_cursor(
|
||||
depth,
|
||||
String::from(prefix) + "specialized.",
|
||||
|
@ -1820,7 +1847,7 @@ pub fn ast_dump(c: &Cursor, depth: isize) -> CXChildVisitResult {
|
|||
}
|
||||
|
||||
if let Some(parent) = c.fallible_semantic_parent() {
|
||||
println!("");
|
||||
println!();
|
||||
print_cursor(
|
||||
depth,
|
||||
String::from(prefix) + "semantic-parent.",
|
||||
|
@ -1868,34 +1895,34 @@ pub fn ast_dump(c: &Cursor, depth: isize) -> CXChildVisitResult {
|
|||
|
||||
let canonical = ty.canonical_type();
|
||||
if canonical != *ty {
|
||||
println!("");
|
||||
println!();
|
||||
print_type(depth, String::from(prefix) + "canonical.", &canonical);
|
||||
}
|
||||
|
||||
if let Some(pointee) = ty.pointee_type() {
|
||||
if pointee != *ty {
|
||||
println!("");
|
||||
println!();
|
||||
print_type(depth, String::from(prefix) + "pointee.", &pointee);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(elem) = ty.elem_type() {
|
||||
if elem != *ty {
|
||||
println!("");
|
||||
println!();
|
||||
print_type(depth, String::from(prefix) + "elements.", &elem);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ret) = ty.ret_type() {
|
||||
if ret != *ty {
|
||||
println!("");
|
||||
println!();
|
||||
print_type(depth, String::from(prefix) + "return.", &ret);
|
||||
}
|
||||
}
|
||||
|
||||
let named = ty.named();
|
||||
if named != *ty && named.is_valid() {
|
||||
println!("");
|
||||
println!();
|
||||
print_type(depth, String::from(prefix) + "named.", &named);
|
||||
}
|
||||
}
|
||||
|
@ -1903,13 +1930,13 @@ pub fn ast_dump(c: &Cursor, depth: isize) -> CXChildVisitResult {
|
|||
print_indent(depth, "(");
|
||||
print_cursor(depth, "", c);
|
||||
|
||||
println!("");
|
||||
println!();
|
||||
let ty = c.cur_type();
|
||||
print_type(depth, "type.", &ty);
|
||||
|
||||
let declaration = ty.declaration();
|
||||
if declaration != *c && declaration.kind() != CXCursor_NoDeclFound {
|
||||
println!("");
|
||||
println!();
|
||||
print_cursor(depth, "type.declaration.", &declaration);
|
||||
}
|
||||
|
||||
|
@ -1917,7 +1944,7 @@ pub fn ast_dump(c: &Cursor, depth: isize) -> CXChildVisitResult {
|
|||
let mut found_children = false;
|
||||
c.visit(|s| {
|
||||
if !found_children {
|
||||
println!("");
|
||||
println!();
|
||||
found_children = true;
|
||||
}
|
||||
ast_dump(&s, depth + 1)
|
||||
|
|
|
@ -1,18 +1,17 @@
|
|||
#[repr(C)]
|
||||
#[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
||||
pub struct __BindgenBitfieldUnit<Storage, Align> {
|
||||
pub struct __BindgenBitfieldUnit<Storage> {
|
||||
storage: Storage,
|
||||
align: [Align; 0],
|
||||
}
|
||||
|
||||
impl<Storage, Align> __BindgenBitfieldUnit<Storage, Align> {
|
||||
impl<Storage> __BindgenBitfieldUnit<Storage> {
|
||||
#[inline]
|
||||
pub const fn new(storage: Storage) -> Self {
|
||||
Self { storage, align: [] }
|
||||
Self { storage }
|
||||
}
|
||||
}
|
||||
|
||||
impl<Storage, Align> __BindgenBitfieldUnit<Storage, Align>
|
||||
impl<Storage> __BindgenBitfieldUnit<Storage>
|
||||
where
|
||||
Storage: AsRef<[u8]> + AsMut<[u8]>,
|
||||
{
|
||||
|
|
|
@ -22,12 +22,10 @@
|
|||
//! ```
|
||||
|
||||
use super::bitfield_unit::__BindgenBitfieldUnit;
|
||||
use std::mem;
|
||||
|
||||
#[test]
|
||||
fn bitfield_unit_get_bit() {
|
||||
let unit =
|
||||
__BindgenBitfieldUnit::<[u8; 2], u64>::new([0b10011101, 0b00011101]);
|
||||
let unit = __BindgenBitfieldUnit::<[u8; 2]>::new([0b10011101, 0b00011101]);
|
||||
|
||||
let mut bits = vec![];
|
||||
for i in 0..16 {
|
||||
|
@ -50,7 +48,7 @@ fn bitfield_unit_get_bit() {
|
|||
#[test]
|
||||
fn bitfield_unit_set_bit() {
|
||||
let mut unit =
|
||||
__BindgenBitfieldUnit::<[u8; 2], u64>::new([0b00000000, 0b00000000]);
|
||||
__BindgenBitfieldUnit::<[u8; 2]>::new([0b00000000, 0b00000000]);
|
||||
|
||||
for i in 0..16 {
|
||||
if i % 3 == 0 {
|
||||
|
@ -63,7 +61,7 @@ fn bitfield_unit_set_bit() {
|
|||
}
|
||||
|
||||
let mut unit =
|
||||
__BindgenBitfieldUnit::<[u8; 2], u64>::new([0b11111111, 0b11111111]);
|
||||
__BindgenBitfieldUnit::<[u8; 2]>::new([0b11111111, 0b11111111]);
|
||||
|
||||
for i in 0..16 {
|
||||
if i % 3 == 0 {
|
||||
|
@ -76,43 +74,6 @@ fn bitfield_unit_set_bit() {
|
|||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bitfield_unit_align() {
|
||||
assert_eq!(
|
||||
mem::align_of::<__BindgenBitfieldUnit<[u8; 1], u8>>(),
|
||||
mem::align_of::<u8>()
|
||||
);
|
||||
assert_eq!(
|
||||
mem::align_of::<__BindgenBitfieldUnit<[u8; 1], u16>>(),
|
||||
mem::align_of::<u16>()
|
||||
);
|
||||
assert_eq!(
|
||||
mem::align_of::<__BindgenBitfieldUnit<[u8; 1], u32>>(),
|
||||
mem::align_of::<u32>()
|
||||
);
|
||||
assert_eq!(
|
||||
mem::align_of::<__BindgenBitfieldUnit<[u8; 1], u64>>(),
|
||||
mem::align_of::<u64>()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
mem::align_of::<__BindgenBitfieldUnit<[u8; 8], u8>>(),
|
||||
mem::align_of::<u8>()
|
||||
);
|
||||
assert_eq!(
|
||||
mem::align_of::<__BindgenBitfieldUnit<[u8; 8], u16>>(),
|
||||
mem::align_of::<u16>()
|
||||
);
|
||||
assert_eq!(
|
||||
mem::align_of::<__BindgenBitfieldUnit<[u8; 8], u32>>(),
|
||||
mem::align_of::<u32>()
|
||||
);
|
||||
assert_eq!(
|
||||
mem::align_of::<__BindgenBitfieldUnit<[u8; 8], u64>>(),
|
||||
mem::align_of::<u64>()
|
||||
);
|
||||
}
|
||||
|
||||
macro_rules! bitfield_unit_get {
|
||||
(
|
||||
$(
|
||||
|
@ -123,7 +84,7 @@ macro_rules! bitfield_unit_get {
|
|||
fn bitfield_unit_get() {
|
||||
$({
|
||||
let expected = $expected;
|
||||
let unit = __BindgenBitfieldUnit::<_, u64>::new($storage);
|
||||
let unit = __BindgenBitfieldUnit::<_>::new($storage);
|
||||
let actual = unit.get($start, $len);
|
||||
|
||||
println!();
|
||||
|
@ -223,7 +184,7 @@ macro_rules! bitfield_unit_set {
|
|||
#[test]
|
||||
fn bitfield_unit_set() {
|
||||
$(
|
||||
let mut unit = __BindgenBitfieldUnit::<[u8; 4], u64>::new([0, 0, 0, 0]);
|
||||
let mut unit = __BindgenBitfieldUnit::<[u8; 4]>::new([0, 0, 0, 0]);
|
||||
unit.set($start, $len, $val);
|
||||
let actual = unit.get(0, 32);
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
use crate::codegen;
|
||||
use crate::ir::function::Abi;
|
||||
use proc_macro2::Ident;
|
||||
|
||||
|
@ -76,6 +77,7 @@ impl DynamicItems {
|
|||
let constructor_inits = &self.constructor_inits;
|
||||
let init_fields = &self.init_fields;
|
||||
let struct_implementation = &self.struct_implementation;
|
||||
|
||||
quote! {
|
||||
extern crate libloading;
|
||||
|
||||
|
@ -89,14 +91,20 @@ impl DynamicItems {
|
|||
path: P
|
||||
) -> Result<Self, ::libloading::Error>
|
||||
where P: AsRef<::std::ffi::OsStr> {
|
||||
let __library = ::libloading::Library::new(path)?;
|
||||
let library = ::libloading::Library::new(path)?;
|
||||
Self::from_library(library)
|
||||
}
|
||||
|
||||
pub unsafe fn from_library<L>(
|
||||
library: L
|
||||
) -> Result<Self, ::libloading::Error>
|
||||
where L: Into<::libloading::Library> {
|
||||
let __library = library.into();
|
||||
#( #constructor_inits )*
|
||||
Ok(
|
||||
#lib_ident {
|
||||
__library,
|
||||
#( #init_fields ),*
|
||||
}
|
||||
)
|
||||
Ok(#lib_ident {
|
||||
__library,
|
||||
#( #init_fields ),*
|
||||
})
|
||||
}
|
||||
|
||||
#( #struct_implementation )*
|
||||
|
@ -109,6 +117,7 @@ impl DynamicItems {
|
|||
ident: Ident,
|
||||
abi: Abi,
|
||||
is_variadic: bool,
|
||||
is_required: bool,
|
||||
args: Vec<proc_macro2::TokenStream>,
|
||||
args_identifiers: Vec<proc_macro2::TokenStream>,
|
||||
ret: proc_macro2::TokenStream,
|
||||
|
@ -118,24 +127,48 @@ impl DynamicItems {
|
|||
assert_eq!(args.len(), args_identifiers.len());
|
||||
}
|
||||
|
||||
let signature = quote! { unsafe extern #abi fn ( #( #args),* ) #ret };
|
||||
let member = if is_required {
|
||||
signature
|
||||
} else {
|
||||
quote! { Result<#signature, ::libloading::Error> }
|
||||
};
|
||||
|
||||
self.struct_members.push(quote! {
|
||||
pub #ident: Result<unsafe extern #abi fn ( #( #args ),* ) #ret, ::libloading::Error>,
|
||||
pub #ident: #member,
|
||||
});
|
||||
|
||||
// N.B: If the signature was required, it won't be wrapped in a Result<...>
|
||||
// and we can simply call it directly.
|
||||
let fn_ = if is_required {
|
||||
quote! { self.#ident }
|
||||
} else {
|
||||
quote! { self.#ident.as_ref().expect("Expected function, got error.") }
|
||||
};
|
||||
let call_body = quote! {
|
||||
(#fn_)(#( #args_identifiers ),*)
|
||||
};
|
||||
|
||||
// We can't implement variadic functions from C easily, so we allow to
|
||||
// access the function pointer so that the user can call it just fine.
|
||||
if !is_variadic {
|
||||
self.struct_implementation.push(quote! {
|
||||
pub unsafe fn #ident ( &self, #( #args ),* ) -> #ret_ty {
|
||||
let sym = self.#ident.as_ref().expect("Expected function, got error.");
|
||||
(sym)(#( #args_identifiers ),*)
|
||||
#call_body
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
let ident_str = ident.to_string();
|
||||
self.constructor_inits.push(quote! {
|
||||
let #ident = __library.get(#ident_str.as_bytes()).map(|sym| *sym);
|
||||
// N.B: Unwrap the signature upon construction if it is required to be resolved.
|
||||
let ident_str = codegen::helpers::ast_ty::cstr_expr(ident.to_string());
|
||||
self.constructor_inits.push(if is_required {
|
||||
quote! {
|
||||
let #ident = __library.get(#ident_str).map(|sym| *sym)?;
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
let #ident = __library.get(#ident_str).map(|sym| *sym);
|
||||
}
|
||||
});
|
||||
|
||||
self.init_fields.push(quote! {
|
||||
|
|
|
@ -120,16 +120,9 @@ pub fn bitfield_unit(ctx: &BindgenContext, layout: Layout) -> TokenStream {
|
|||
tokens.append_all(quote! { root:: });
|
||||
}
|
||||
|
||||
let align = match layout.align {
|
||||
n if n >= 8 => quote! { u64 },
|
||||
4 => quote! { u32 },
|
||||
2 => quote! { u16 },
|
||||
_ => quote! { u8 },
|
||||
};
|
||||
|
||||
let size = layout.size;
|
||||
tokens.append_all(quote! {
|
||||
__BindgenBitfieldUnit<[u8; #size], #align>
|
||||
__BindgenBitfieldUnit<[u8; #size]>
|
||||
});
|
||||
|
||||
tokens
|
||||
|
@ -237,14 +230,14 @@ pub mod ast_ty {
|
|||
}
|
||||
|
||||
pub fn byte_array_expr(bytes: &[u8]) -> TokenStream {
|
||||
let mut bytes: Vec<_> = bytes.iter().cloned().collect();
|
||||
let mut bytes: Vec<_> = bytes.to_vec();
|
||||
bytes.push(0);
|
||||
quote! { [ #(#bytes),* ] }
|
||||
}
|
||||
|
||||
pub fn cstr_expr(mut string: String) -> TokenStream {
|
||||
string.push('\0');
|
||||
let b = proc_macro2::Literal::byte_string(&string.as_bytes());
|
||||
let b = proc_macro2::Literal::byte_string(string.as_bytes());
|
||||
quote! {
|
||||
#b
|
||||
}
|
||||
|
@ -278,7 +271,7 @@ pub mod ast_ty {
|
|||
}
|
||||
|
||||
warn!("Unknown non-finite float number: {:?}", f);
|
||||
return Err(());
|
||||
Err(())
|
||||
}
|
||||
|
||||
pub fn arguments_from_signature(
|
||||
|
|
|
@ -2,7 +2,6 @@ use crate::ir::comp::{BitfieldUnit, CompKind, Field, FieldData, FieldMethods};
|
|||
use crate::ir::context::BindgenContext;
|
||||
use crate::ir::item::{HasTypeParamInArray, IsOpaque, Item, ItemCanonicalName};
|
||||
use crate::ir::ty::{TypeKind, RUST_DERIVE_IN_ARRAY_LIMIT};
|
||||
use proc_macro2;
|
||||
|
||||
pub fn gen_debug_impl(
|
||||
ctx: &BindgenContext,
|
||||
|
@ -23,8 +22,8 @@ pub fn gen_debug_impl(
|
|||
}
|
||||
CompKind::Struct => {
|
||||
let processed_fields = fields.iter().filter_map(|f| match f {
|
||||
&Field::DataMember(ref fd) => fd.impl_debug(ctx, ()),
|
||||
&Field::Bitfields(ref bu) => bu.impl_debug(ctx, ()),
|
||||
Field::DataMember(ref fd) => fd.impl_debug(ctx, ()),
|
||||
Field::Bitfields(ref bu) => bu.impl_debug(ctx, ()),
|
||||
});
|
||||
|
||||
for (i, (fstring, toks)) in processed_fields.enumerate() {
|
||||
|
@ -120,9 +119,9 @@ impl<'a> ImplDebug<'a> for Item {
|
|||
) -> Option<(String, Vec<proc_macro2::TokenStream>)> {
|
||||
let name_ident = ctx.rust_ident(name);
|
||||
|
||||
// We don't know if blacklisted items `impl Debug` or not, so we can't
|
||||
// We don't know if blocklisted items `impl Debug` or not, so we can't
|
||||
// add them to the format string we're building up.
|
||||
if !ctx.whitelisted_items().contains(&self.id()) {
|
||||
if !ctx.allowlisted_items().contains(&self.id()) {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
@ -181,27 +180,27 @@ impl<'a> ImplDebug<'a> for Item {
|
|||
format!("{}: Array with length {}", name, len),
|
||||
vec![],
|
||||
))
|
||||
} else if len < RUST_DERIVE_IN_ARRAY_LIMIT {
|
||||
} else if len < RUST_DERIVE_IN_ARRAY_LIMIT ||
|
||||
ctx.options().rust_features().larger_arrays
|
||||
{
|
||||
// The simple case
|
||||
debug_print(name, quote! { #name_ident })
|
||||
} else if ctx.options().use_core {
|
||||
// There is no String in core; reducing field visibility to avoid breaking
|
||||
// no_std setups.
|
||||
Some((format!("{}: [...]", name), vec![]))
|
||||
} else {
|
||||
if ctx.options().use_core {
|
||||
// There is no String in core; reducing field visibility to avoid breaking
|
||||
// no_std setups.
|
||||
Some((format!("{}: [...]", name), vec![]))
|
||||
} else {
|
||||
// Let's implement our own print function
|
||||
Some((
|
||||
format!("{}: [{{}}]", name),
|
||||
vec![quote! {
|
||||
self.#name_ident
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, v)| format!("{}{:?}", if i > 0 { ", " } else { "" }, v))
|
||||
.collect::<String>()
|
||||
}],
|
||||
))
|
||||
}
|
||||
// Let's implement our own print function
|
||||
Some((
|
||||
format!("{}: [{{}}]", name),
|
||||
vec![quote! {
|
||||
self.#name_ident
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, v)| format!("{}{:?}", if i > 0 { ", " } else { "" }, v))
|
||||
.collect::<String>()
|
||||
}],
|
||||
))
|
||||
}
|
||||
}
|
||||
TypeKind::Vector(_, len) => {
|
||||
|
|
|
@ -2,7 +2,6 @@ use crate::ir::comp::{CompInfo, CompKind, Field, FieldMethods};
|
|||
use crate::ir::context::BindgenContext;
|
||||
use crate::ir::item::{IsOpaque, Item};
|
||||
use crate::ir::ty::{TypeKind, RUST_DERIVE_IN_ARRAY_LIMIT};
|
||||
use proc_macro2;
|
||||
|
||||
/// Generate a manual implementation of `PartialEq` trait for the
|
||||
/// specified compound type.
|
||||
|
@ -51,7 +50,7 @@ pub fn gen_partialeq_impl(
|
|||
}
|
||||
Field::Bitfields(ref bu) => {
|
||||
for bitfield in bu.bitfields() {
|
||||
if let Some(_) = bitfield.name() {
|
||||
if bitfield.name().is_some() {
|
||||
let getter_name = bitfield.getter_name();
|
||||
let name_ident = ctx.rust_ident_raw(getter_name);
|
||||
tokens.push(quote! {
|
||||
|
@ -104,7 +103,7 @@ fn gen_field(
|
|||
TypeKind::Opaque => quote_equals(name_ident),
|
||||
|
||||
TypeKind::TemplateInstantiation(ref inst) => {
|
||||
if inst.is_opaque(ctx, &ty_item) {
|
||||
if inst.is_opaque(ctx, ty_item) {
|
||||
quote! {
|
||||
&self. #name_ident [..] == &other. #name_ident [..]
|
||||
}
|
||||
|
@ -114,7 +113,9 @@ fn gen_field(
|
|||
}
|
||||
|
||||
TypeKind::Array(_, len) => {
|
||||
if len <= RUST_DERIVE_IN_ARRAY_LIMIT {
|
||||
if len <= RUST_DERIVE_IN_ARRAY_LIMIT ||
|
||||
ctx.options().rust_features().larger_arrays
|
||||
{
|
||||
quote_equals(name_ident)
|
||||
} else {
|
||||
quote! {
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -18,6 +18,8 @@ pub struct StructLayoutTracker<'a> {
|
|||
ctx: &'a BindgenContext,
|
||||
comp: &'a CompInfo,
|
||||
is_packed: bool,
|
||||
known_type_layout: Option<Layout>,
|
||||
is_rust_union: bool,
|
||||
latest_offset: usize,
|
||||
padding_count: usize,
|
||||
latest_field_layout: Option<Layout>,
|
||||
|
@ -86,11 +88,17 @@ impl<'a> StructLayoutTracker<'a> {
|
|||
ty: &'a Type,
|
||||
name: &'a str,
|
||||
) -> Self {
|
||||
let known_type_layout = ty.layout(ctx);
|
||||
let is_packed = comp.is_packed(ctx, known_type_layout.as_ref());
|
||||
let is_rust_union = comp.is_union() &&
|
||||
comp.can_be_rust_union(ctx, known_type_layout.as_ref());
|
||||
StructLayoutTracker {
|
||||
name,
|
||||
ctx,
|
||||
comp,
|
||||
is_packed: comp.is_packed(ctx, &ty.layout(ctx)),
|
||||
is_packed,
|
||||
known_type_layout,
|
||||
is_rust_union,
|
||||
latest_offset: 0,
|
||||
padding_count: 0,
|
||||
latest_field_layout: None,
|
||||
|
@ -99,6 +107,10 @@ impl<'a> StructLayoutTracker<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn is_rust_union(&self) -> bool {
|
||||
self.is_rust_union
|
||||
}
|
||||
|
||||
pub fn saw_vtable(&mut self) {
|
||||
debug!("saw vtable for {}", self.name);
|
||||
|
||||
|
@ -139,18 +151,9 @@ impl<'a> StructLayoutTracker<'a> {
|
|||
// actually generate the dummy alignment.
|
||||
}
|
||||
|
||||
pub fn saw_union(&mut self, layout: Layout) {
|
||||
debug!("saw union for {}: {:?}", self.name, layout);
|
||||
self.align_to_latest_field(layout);
|
||||
|
||||
self.latest_offset += self.padding_bytes(layout) + layout.size;
|
||||
self.latest_field_layout = Some(layout);
|
||||
self.max_field_align = cmp::max(self.max_field_align, layout.align);
|
||||
}
|
||||
|
||||
/// Add a padding field if necessary for a given new field _before_ adding
|
||||
/// that field.
|
||||
pub fn pad_field(
|
||||
/// Returns a padding field if necessary for a given new field _before_
|
||||
/// adding that field.
|
||||
pub fn saw_field(
|
||||
&mut self,
|
||||
field_name: &str,
|
||||
field_ty: &Type,
|
||||
|
@ -177,26 +180,50 @@ impl<'a> StructLayoutTracker<'a> {
|
|||
}
|
||||
}
|
||||
}
|
||||
self.saw_field_with_layout(field_name, field_layout, field_offset)
|
||||
}
|
||||
|
||||
pub fn saw_field_with_layout(
|
||||
&mut self,
|
||||
field_name: &str,
|
||||
field_layout: Layout,
|
||||
field_offset: Option<usize>,
|
||||
) -> Option<proc_macro2::TokenStream> {
|
||||
let will_merge_with_bitfield = self.align_to_latest_field(field_layout);
|
||||
|
||||
let padding_layout = if self.is_packed {
|
||||
let is_union = self.comp.is_union();
|
||||
let padding_bytes = match field_offset {
|
||||
Some(offset) if offset / 8 > self.latest_offset => {
|
||||
offset / 8 - self.latest_offset
|
||||
}
|
||||
_ => {
|
||||
if will_merge_with_bitfield ||
|
||||
field_layout.align == 0 ||
|
||||
is_union
|
||||
{
|
||||
0
|
||||
} else if !self.is_packed {
|
||||
self.padding_bytes(field_layout)
|
||||
} else if let Some(l) = self.known_type_layout {
|
||||
self.padding_bytes(l)
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
self.latest_offset += padding_bytes;
|
||||
|
||||
let padding_layout = if self.is_packed || is_union {
|
||||
None
|
||||
} else {
|
||||
let padding_bytes = match field_offset {
|
||||
Some(offset) if offset / 8 > self.latest_offset => {
|
||||
offset / 8 - self.latest_offset
|
||||
}
|
||||
_ if will_merge_with_bitfield || field_layout.align == 0 => 0,
|
||||
_ => self.padding_bytes(field_layout),
|
||||
};
|
||||
let force_padding = self.ctx.options().force_explicit_padding;
|
||||
|
||||
// Otherwise the padding is useless.
|
||||
let need_padding = padding_bytes >= field_layout.align ||
|
||||
let need_padding = force_padding ||
|
||||
padding_bytes >= field_layout.align ||
|
||||
field_layout.align > MAX_GUARANTEED_ALIGN;
|
||||
|
||||
self.latest_offset += padding_bytes;
|
||||
|
||||
debug!(
|
||||
"Offset: <padding>: {} -> {}",
|
||||
self.latest_offset - padding_bytes,
|
||||
|
@ -212,11 +239,14 @@ impl<'a> StructLayoutTracker<'a> {
|
|||
field_layout
|
||||
);
|
||||
|
||||
let padding_align = if force_padding {
|
||||
1
|
||||
} else {
|
||||
cmp::min(field_layout.align, MAX_GUARANTEED_ALIGN)
|
||||
};
|
||||
|
||||
if need_padding && padding_bytes != 0 {
|
||||
Some(Layout::new(
|
||||
padding_bytes,
|
||||
cmp::min(field_layout.align, MAX_GUARANTEED_ALIGN),
|
||||
))
|
||||
Some(Layout::new(padding_bytes, padding_align))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -238,6 +268,37 @@ impl<'a> StructLayoutTracker<'a> {
|
|||
padding_layout.map(|layout| self.padding_field(layout))
|
||||
}
|
||||
|
||||
pub fn add_tail_padding(
|
||||
&mut self,
|
||||
comp_name: &str,
|
||||
comp_layout: Layout,
|
||||
) -> Option<proc_macro2::TokenStream> {
|
||||
// Only emit an padding field at the end of a struct if the
|
||||
// user configures explicit padding.
|
||||
if !self.ctx.options().force_explicit_padding {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Padding doesn't make sense for rust unions.
|
||||
if self.is_rust_union {
|
||||
return None;
|
||||
}
|
||||
|
||||
if self.latest_offset == comp_layout.size {
|
||||
// This struct does not contain tail padding.
|
||||
return None;
|
||||
}
|
||||
|
||||
trace!(
|
||||
"need a tail padding field for {}: offset {} -> size {}",
|
||||
comp_name,
|
||||
self.latest_offset,
|
||||
comp_layout.size
|
||||
);
|
||||
let size = comp_layout.size - self.latest_offset;
|
||||
Some(self.padding_field(Layout::new(size, 0)))
|
||||
}
|
||||
|
||||
pub fn pad_struct(
|
||||
&mut self,
|
||||
layout: Layout,
|
||||
|
@ -372,6 +433,6 @@ impl<'a> StructLayoutTracker<'a> {
|
|||
|
||||
// Else, just align the obvious way.
|
||||
self.latest_offset += self.padding_bytes(layout);
|
||||
return false;
|
||||
false
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
/// Generating build depfiles from parsed bindings.
|
||||
use std::{collections::BTreeSet, path::PathBuf};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct DepfileSpec {
|
||||
pub output_module: String,
|
||||
pub depfile_path: PathBuf,
|
||||
}
|
||||
|
||||
impl DepfileSpec {
|
||||
pub fn write(&self, deps: &BTreeSet<String>) -> std::io::Result<()> {
|
||||
let mut buf = format!("{}:", self.output_module);
|
||||
|
||||
for file in deps {
|
||||
buf = format!("{} {}", buf, file);
|
||||
}
|
||||
|
||||
std::fs::write(&self.depfile_path, &buf)
|
||||
}
|
||||
}
|
|
@ -87,8 +87,9 @@ macro_rules! rust_target_base {
|
|||
$x_macro!(
|
||||
/// Rust stable 1.0
|
||||
=> Stable_1_0 => 1.0;
|
||||
/// Rust stable 1.1
|
||||
=> Stable_1_1 => 1.1;
|
||||
/// Rust stable 1.17
|
||||
/// * Static lifetime elision ([RFC 1623](https://github.com/rust-lang/rfcs/blob/master/text/1623-static.md))
|
||||
=> Stable_1_17 => 1.17;
|
||||
/// Rust stable 1.19
|
||||
/// * Untagged unions ([RFC 1444](https://github.com/rust-lang/rfcs/blob/master/text/1444-union.md))
|
||||
=> Stable_1_19 => 1.19;
|
||||
|
@ -123,6 +124,9 @@ macro_rules! rust_target_base {
|
|||
/// Rust stable 1.40
|
||||
/// * `non_exhaustive` enums/structs ([Tracking issue](https://github.com/rust-lang/rust/issues/44109))
|
||||
=> Stable_1_40 => 1.40;
|
||||
/// Rust stable 1.47
|
||||
/// * `larger_arrays` ([Tracking issue](https://github.com/rust-lang/rust/pull/74060))
|
||||
=> Stable_1_47 => 1.47;
|
||||
/// Nightly rust
|
||||
/// * `thiscall` calling convention ([Tracking issue](https://github.com/rust-lang/rust/issues/42202))
|
||||
=> Nightly => nightly;
|
||||
|
@ -134,7 +138,7 @@ rust_target_base!(rust_target_def);
|
|||
rust_target_base!(rust_target_values_def);
|
||||
|
||||
/// Latest stable release of Rust
|
||||
pub const LATEST_STABLE_RUST: RustTarget = RustTarget::Stable_1_40;
|
||||
pub const LATEST_STABLE_RUST: RustTarget = RustTarget::Stable_1_47;
|
||||
|
||||
/// Create RustFeatures struct definition, new(), and a getter for each field
|
||||
macro_rules! rust_feature_def {
|
||||
|
@ -188,6 +192,9 @@ macro_rules! rust_feature_def {
|
|||
// documentation for the relevant variant in the rust_target_base macro
|
||||
// definition.
|
||||
rust_feature_def!(
|
||||
Stable_1_17 {
|
||||
=> static_lifetime_elision;
|
||||
}
|
||||
Stable_1_19 {
|
||||
=> untagged_union;
|
||||
}
|
||||
|
@ -222,6 +229,9 @@ rust_feature_def!(
|
|||
Stable_1_40 {
|
||||
=> non_exhaustive;
|
||||
}
|
||||
Stable_1_47 {
|
||||
=> larger_arrays;
|
||||
}
|
||||
Nightly {
|
||||
=> thiscall_abi;
|
||||
}
|
||||
|
@ -243,7 +253,8 @@ mod test {
|
|||
fn target_features() {
|
||||
let f_1_0 = RustFeatures::from(RustTarget::Stable_1_0);
|
||||
assert!(
|
||||
!f_1_0.core_ffi_c_void &&
|
||||
!f_1_0.static_lifetime_elision &&
|
||||
!f_1_0.core_ffi_c_void &&
|
||||
!f_1_0.untagged_union &&
|
||||
!f_1_0.associated_const &&
|
||||
!f_1_0.builtin_clone_impls &&
|
||||
|
@ -252,7 +263,8 @@ mod test {
|
|||
);
|
||||
let f_1_21 = RustFeatures::from(RustTarget::Stable_1_21);
|
||||
assert!(
|
||||
!f_1_21.core_ffi_c_void &&
|
||||
f_1_21.static_lifetime_elision &&
|
||||
!f_1_21.core_ffi_c_void &&
|
||||
f_1_21.untagged_union &&
|
||||
f_1_21.associated_const &&
|
||||
f_1_21.builtin_clone_impls &&
|
||||
|
@ -261,7 +273,8 @@ mod test {
|
|||
);
|
||||
let f_nightly = RustFeatures::from(RustTarget::Nightly);
|
||||
assert!(
|
||||
f_nightly.core_ffi_c_void &&
|
||||
f_nightly.static_lifetime_elision &&
|
||||
f_nightly.core_ffi_c_void &&
|
||||
f_nightly.untagged_union &&
|
||||
f_nightly.associated_const &&
|
||||
f_nightly.builtin_clone_impls &&
|
||||
|
@ -280,6 +293,7 @@ mod test {
|
|||
#[test]
|
||||
fn str_to_target() {
|
||||
test_target("1.0", RustTarget::Stable_1_0);
|
||||
test_target("1.17", RustTarget::Stable_1_17);
|
||||
test_target("1.19", RustTarget::Stable_1_19);
|
||||
test_target("1.21", RustTarget::Stable_1_21);
|
||||
test_target("1.25", RustTarget::Stable_1_25);
|
||||
|
|
|
@ -9,6 +9,7 @@ use crate::ir::context::{BindgenContext, ItemId};
|
|||
use crate::ir::derive::CanDerive;
|
||||
use crate::ir::function::FunctionSig;
|
||||
use crate::ir::item::{IsOpaque, Item};
|
||||
use crate::ir::layout::Layout;
|
||||
use crate::ir::template::TemplateParameters;
|
||||
use crate::ir::traversal::{EdgeKind, Trace};
|
||||
use crate::ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
|
||||
|
@ -16,7 +17,7 @@ use crate::ir::ty::{Type, TypeKind};
|
|||
use crate::{Entry, HashMap, HashSet};
|
||||
|
||||
/// Which trait to consider when doing the `CannotDerive` analysis.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
pub enum DeriveTrait {
|
||||
/// The `Copy` trait.
|
||||
Copy,
|
||||
|
@ -138,15 +139,28 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
}
|
||||
|
||||
fn constrain_type(&mut self, item: &Item, ty: &Type) -> CanDerive {
|
||||
if !self.ctx.whitelisted_items().contains(&item.id()) {
|
||||
trace!(
|
||||
" cannot derive {} for blacklisted type",
|
||||
self.derive_trait
|
||||
);
|
||||
return CanDerive::No;
|
||||
if !self.ctx.allowlisted_items().contains(&item.id()) {
|
||||
let can_derive = self
|
||||
.ctx
|
||||
.blocklisted_type_implements_trait(item, self.derive_trait);
|
||||
match can_derive {
|
||||
CanDerive::Yes => trace!(
|
||||
" blocklisted type explicitly implements {}",
|
||||
self.derive_trait
|
||||
),
|
||||
CanDerive::Manually => trace!(
|
||||
" blocklisted type requires manual implementation of {}",
|
||||
self.derive_trait
|
||||
),
|
||||
CanDerive::No => trace!(
|
||||
" cannot derive {} for blocklisted type",
|
||||
self.derive_trait
|
||||
),
|
||||
}
|
||||
return can_derive;
|
||||
}
|
||||
|
||||
if self.derive_trait.not_by_name(self.ctx, &item) {
|
||||
if self.derive_trait.not_by_name(self.ctx, item) {
|
||||
trace!(
|
||||
" cannot derive {} for explicitly excluded type",
|
||||
self.derive_trait
|
||||
|
@ -210,13 +224,13 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
let inner_type =
|
||||
self.ctx.resolve_type(inner).canonical_type(self.ctx);
|
||||
if let TypeKind::Function(ref sig) = *inner_type.kind() {
|
||||
return self.derive_trait.can_derive_fnptr(sig);
|
||||
self.derive_trait.can_derive_fnptr(sig)
|
||||
} else {
|
||||
return self.derive_trait.can_derive_pointer();
|
||||
self.derive_trait.can_derive_pointer()
|
||||
}
|
||||
}
|
||||
TypeKind::Function(ref sig) => {
|
||||
return self.derive_trait.can_derive_fnptr(sig)
|
||||
self.derive_trait.can_derive_fnptr(sig)
|
||||
}
|
||||
|
||||
// Complex cases need more information
|
||||
|
@ -242,7 +256,7 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
return CanDerive::No;
|
||||
}
|
||||
|
||||
if self.derive_trait.can_derive_large_array() {
|
||||
if self.derive_trait.can_derive_large_array(self.ctx) {
|
||||
trace!(" array can derive {}", self.derive_trait);
|
||||
return CanDerive::Yes;
|
||||
}
|
||||
|
@ -257,7 +271,7 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
" array is small enough to derive {}",
|
||||
self.derive_trait
|
||||
);
|
||||
return CanDerive::Yes;
|
||||
CanDerive::Yes
|
||||
}
|
||||
TypeKind::Vector(t, len) => {
|
||||
let inner_type =
|
||||
|
@ -272,7 +286,7 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
return CanDerive::No;
|
||||
}
|
||||
assert_ne!(len, 0, "vectors cannot have zero length");
|
||||
return self.derive_trait.can_derive_vector();
|
||||
self.derive_trait.can_derive_vector()
|
||||
}
|
||||
|
||||
TypeKind::Comp(ref info) => {
|
||||
|
@ -364,7 +378,7 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
// Bitfield units are always represented as arrays of u8, but
|
||||
// they're not traced as arrays, so we need to check here
|
||||
// instead.
|
||||
if !self.derive_trait.can_derive_large_array() &&
|
||||
if !self.derive_trait.can_derive_large_array(self.ctx) &&
|
||||
info.has_too_large_bitfield_unit() &&
|
||||
!item.is_opaque(self.ctx, &())
|
||||
{
|
||||
|
@ -376,7 +390,7 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
}
|
||||
|
||||
let pred = self.derive_trait.consider_edge_comp();
|
||||
return self.constrain_join(item, pred);
|
||||
self.constrain_join(item, pred)
|
||||
}
|
||||
|
||||
TypeKind::ResolvedTypeRef(..) |
|
||||
|
@ -384,12 +398,12 @@ impl<'ctx> CannotDerive<'ctx> {
|
|||
TypeKind::Alias(..) |
|
||||
TypeKind::BlockPointer(..) => {
|
||||
let pred = self.derive_trait.consider_edge_typeref();
|
||||
return self.constrain_join(item, pred);
|
||||
self.constrain_join(item, pred)
|
||||
}
|
||||
|
||||
TypeKind::TemplateInstantiation(..) => {
|
||||
let pred = self.derive_trait.consider_edge_tmpl_inst();
|
||||
return self.constrain_join(item, pred);
|
||||
self.constrain_join(item, pred)
|
||||
}
|
||||
|
||||
TypeKind::Opaque => unreachable!(
|
||||
|
@ -457,10 +471,7 @@ impl DeriveTrait {
|
|||
fn consider_edge_comp(&self) -> EdgePredicate {
|
||||
match self {
|
||||
DeriveTrait::PartialEqOrPartialOrd => consider_edge_default,
|
||||
_ => |kind| match kind {
|
||||
EdgeKind::BaseMember | EdgeKind::Field => true,
|
||||
_ => false,
|
||||
},
|
||||
_ => |kind| matches!(kind, EdgeKind::BaseMember | EdgeKind::Field),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -483,48 +494,37 @@ impl DeriveTrait {
|
|||
}
|
||||
}
|
||||
|
||||
fn can_derive_large_array(&self) -> bool {
|
||||
match self {
|
||||
DeriveTrait::Copy => true,
|
||||
_ => false,
|
||||
fn can_derive_large_array(&self, ctx: &BindgenContext) -> bool {
|
||||
if ctx.options().rust_features().larger_arrays {
|
||||
!matches!(self, DeriveTrait::Default)
|
||||
} else {
|
||||
matches!(self, DeriveTrait::Copy)
|
||||
}
|
||||
}
|
||||
|
||||
fn can_derive_union(&self) -> bool {
|
||||
match self {
|
||||
DeriveTrait::Copy => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self, DeriveTrait::Copy)
|
||||
}
|
||||
|
||||
fn can_derive_compound_with_destructor(&self) -> bool {
|
||||
match self {
|
||||
DeriveTrait::Copy => false,
|
||||
_ => true,
|
||||
}
|
||||
!matches!(self, DeriveTrait::Copy)
|
||||
}
|
||||
|
||||
fn can_derive_compound_with_vtable(&self) -> bool {
|
||||
match self {
|
||||
DeriveTrait::Default => false,
|
||||
_ => true,
|
||||
}
|
||||
!matches!(self, DeriveTrait::Default)
|
||||
}
|
||||
|
||||
fn can_derive_compound_forward_decl(&self) -> bool {
|
||||
match self {
|
||||
DeriveTrait::Copy | DeriveTrait::Debug => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self, DeriveTrait::Copy | DeriveTrait::Debug)
|
||||
}
|
||||
|
||||
fn can_derive_incomplete_array(&self) -> bool {
|
||||
match self {
|
||||
!matches!(
|
||||
self,
|
||||
DeriveTrait::Copy |
|
||||
DeriveTrait::Hash |
|
||||
DeriveTrait::PartialEqOrPartialOrd => false,
|
||||
_ => true,
|
||||
}
|
||||
DeriveTrait::Hash |
|
||||
DeriveTrait::PartialEqOrPartialOrd
|
||||
)
|
||||
}
|
||||
|
||||
fn can_derive_fnptr(&self, f: &FunctionSig) -> CanDerive {
|
||||
|
@ -640,10 +640,10 @@ impl<'ctx> MonotoneFramework for CannotDerive<'ctx> {
|
|||
}
|
||||
|
||||
fn initial_worklist(&self) -> Vec<ItemId> {
|
||||
// The transitive closure of all whitelisted items, including explicitly
|
||||
// blacklisted items.
|
||||
// The transitive closure of all allowlisted items, including explicitly
|
||||
// blocklisted items.
|
||||
self.ctx
|
||||
.whitelisted_items()
|
||||
.allowlisted_items()
|
||||
.iter()
|
||||
.cloned()
|
||||
.flat_map(|i| {
|
||||
|
@ -673,10 +673,10 @@ impl<'ctx> MonotoneFramework for CannotDerive<'ctx> {
|
|||
Some(ty) => {
|
||||
let mut can_derive = self.constrain_type(item, ty);
|
||||
if let CanDerive::Yes = can_derive {
|
||||
if !self.derive_trait.can_derive_large_array() &&
|
||||
ty.layout(self.ctx).map_or(false, |l| {
|
||||
l.align > RUST_DERIVE_IN_ARRAY_LIMIT
|
||||
})
|
||||
let is_reached_limit =
|
||||
|l: Layout| l.align > RUST_DERIVE_IN_ARRAY_LIMIT;
|
||||
if !self.derive_trait.can_derive_large_array(self.ctx) &&
|
||||
ty.layout(self.ctx).map_or(false, is_reached_limit)
|
||||
{
|
||||
// We have to be conservative: the struct *could* have enough
|
||||
// padding that we emit an array that is longer than
|
||||
|
|
|
@ -41,16 +41,16 @@ pub struct HasDestructorAnalysis<'ctx> {
|
|||
|
||||
impl<'ctx> HasDestructorAnalysis<'ctx> {
|
||||
fn consider_edge(kind: EdgeKind) -> bool {
|
||||
match kind {
|
||||
// These are the only edges that can affect whether a type has a
|
||||
// destructor or not.
|
||||
// These are the only edges that can affect whether a type has a
|
||||
// destructor or not.
|
||||
matches!(
|
||||
kind,
|
||||
EdgeKind::TypeReference |
|
||||
EdgeKind::BaseMember |
|
||||
EdgeKind::Field |
|
||||
EdgeKind::TemplateArgument |
|
||||
EdgeKind::TemplateDeclaration => true,
|
||||
_ => false,
|
||||
}
|
||||
EdgeKind::BaseMember |
|
||||
EdgeKind::Field |
|
||||
EdgeKind::TemplateArgument |
|
||||
EdgeKind::TemplateDeclaration
|
||||
)
|
||||
}
|
||||
|
||||
fn insert<Id: Into<ItemId>>(&mut self, id: Id) -> ConstrainResult {
|
||||
|
@ -83,7 +83,7 @@ impl<'ctx> MonotoneFramework for HasDestructorAnalysis<'ctx> {
|
|||
}
|
||||
|
||||
fn initial_worklist(&self) -> Vec<ItemId> {
|
||||
self.ctx.whitelisted_items().iter().cloned().collect()
|
||||
self.ctx.allowlisted_items().iter().cloned().collect()
|
||||
}
|
||||
|
||||
fn constrain(&mut self, id: ItemId) -> ConstrainResult {
|
||||
|
|
|
@ -94,7 +94,7 @@ impl<'ctx> MonotoneFramework for HasFloat<'ctx> {
|
|||
}
|
||||
|
||||
fn initial_worklist(&self) -> Vec<ItemId> {
|
||||
self.ctx.whitelisted_items().iter().cloned().collect()
|
||||
self.ctx.allowlisted_items().iter().cloned().collect()
|
||||
}
|
||||
|
||||
fn constrain(&mut self, id: ItemId) -> ConstrainResult {
|
||||
|
|
|
@ -100,7 +100,7 @@ impl<'ctx> MonotoneFramework for HasTypeParameterInArray<'ctx> {
|
|||
}
|
||||
|
||||
fn initial_worklist(&self) -> Vec<ItemId> {
|
||||
self.ctx.whitelisted_items().iter().cloned().collect()
|
||||
self.ctx.allowlisted_items().iter().cloned().collect()
|
||||
}
|
||||
|
||||
fn constrain(&mut self, id: ItemId) -> ConstrainResult {
|
||||
|
|
|
@ -79,14 +79,14 @@ pub struct HasVtableAnalysis<'ctx> {
|
|||
|
||||
impl<'ctx> HasVtableAnalysis<'ctx> {
|
||||
fn consider_edge(kind: EdgeKind) -> bool {
|
||||
match kind {
|
||||
// These are the only edges that can affect whether a type has a
|
||||
// vtable or not.
|
||||
// These are the only edges that can affect whether a type has a
|
||||
// vtable or not.
|
||||
matches!(
|
||||
kind,
|
||||
EdgeKind::TypeReference |
|
||||
EdgeKind::BaseMember |
|
||||
EdgeKind::TemplateDeclaration => true,
|
||||
_ => false,
|
||||
}
|
||||
EdgeKind::BaseMember |
|
||||
EdgeKind::TemplateDeclaration
|
||||
)
|
||||
}
|
||||
|
||||
fn insert<Id: Into<ItemId>>(
|
||||
|
@ -147,7 +147,7 @@ impl<'ctx> MonotoneFramework for HasVtableAnalysis<'ctx> {
|
|||
}
|
||||
|
||||
fn initial_worklist(&self) -> Vec<ItemId> {
|
||||
self.ctx.whitelisted_items().iter().cloned().collect()
|
||||
self.ctx.allowlisted_items().iter().cloned().collect()
|
||||
}
|
||||
|
||||
fn constrain(&mut self, id: ItemId) -> ConstrainResult {
|
||||
|
|
|
@ -183,8 +183,8 @@ where
|
|||
{
|
||||
let mut dependencies = HashMap::default();
|
||||
|
||||
for &item in ctx.whitelisted_items() {
|
||||
dependencies.entry(item).or_insert(vec![]);
|
||||
for &item in ctx.allowlisted_items() {
|
||||
dependencies.entry(item).or_insert_with(Vec::new);
|
||||
|
||||
{
|
||||
// We reverse our natural IR graph edges to find dependencies
|
||||
|
@ -192,12 +192,12 @@ where
|
|||
item.trace(
|
||||
ctx,
|
||||
&mut |sub_item: ItemId, edge_kind| {
|
||||
if ctx.whitelisted_items().contains(&sub_item) &&
|
||||
if ctx.allowlisted_items().contains(&sub_item) &&
|
||||
consider_edge(edge_kind)
|
||||
{
|
||||
dependencies
|
||||
.entry(sub_item)
|
||||
.or_insert(vec![])
|
||||
.or_insert_with(Vec::new)
|
||||
.push(item);
|
||||
}
|
||||
},
|
||||
|
|
|
@ -112,17 +112,17 @@ pub struct SizednessAnalysis<'ctx> {
|
|||
|
||||
impl<'ctx> SizednessAnalysis<'ctx> {
|
||||
fn consider_edge(kind: EdgeKind) -> bool {
|
||||
match kind {
|
||||
// These are the only edges that can affect whether a type is
|
||||
// zero-sized or not.
|
||||
// These are the only edges that can affect whether a type is
|
||||
// zero-sized or not.
|
||||
matches!(
|
||||
kind,
|
||||
EdgeKind::TemplateArgument |
|
||||
EdgeKind::TemplateParameterDefinition |
|
||||
EdgeKind::TemplateDeclaration |
|
||||
EdgeKind::TypeReference |
|
||||
EdgeKind::BaseMember |
|
||||
EdgeKind::Field => true,
|
||||
_ => false,
|
||||
}
|
||||
EdgeKind::TemplateParameterDefinition |
|
||||
EdgeKind::TemplateDeclaration |
|
||||
EdgeKind::TypeReference |
|
||||
EdgeKind::BaseMember |
|
||||
EdgeKind::Field
|
||||
)
|
||||
}
|
||||
|
||||
/// Insert an incremental result, and return whether this updated our
|
||||
|
@ -194,7 +194,7 @@ impl<'ctx> MonotoneFramework for SizednessAnalysis<'ctx> {
|
|||
|
||||
fn initial_worklist(&self) -> Vec<TypeId> {
|
||||
self.ctx
|
||||
.whitelisted_items()
|
||||
.allowlisted_items()
|
||||
.iter()
|
||||
.cloned()
|
||||
.filter_map(|id| id.as_type_id(self.ctx))
|
||||
|
|
|
@ -137,13 +137,13 @@ use crate::{HashMap, HashSet};
|
|||
/// analysis. If we didn't, then we would mistakenly determine that ever
|
||||
/// template parameter is always used.
|
||||
///
|
||||
/// The final wrinkle is handling of blacklisted types. Normally, we say that
|
||||
/// the set of whitelisted items is the transitive closure of items explicitly
|
||||
/// called out for whitelisting, *without* any items explicitly called out as
|
||||
/// blacklisted. However, for the purposes of this analysis's correctness, we
|
||||
/// The final wrinkle is handling of blocklisted types. Normally, we say that
|
||||
/// the set of allowlisted items is the transitive closure of items explicitly
|
||||
/// called out for allowlisting, *without* any items explicitly called out as
|
||||
/// blocklisted. However, for the purposes of this analysis's correctness, we
|
||||
/// simplify and consider run the analysis on the full transitive closure of
|
||||
/// whitelisted items. We do, however, treat instantiations of blacklisted items
|
||||
/// specially; see `constrain_instantiation_of_blacklisted_template` and its
|
||||
/// allowlisted items. We do, however, treat instantiations of blocklisted items
|
||||
/// specially; see `constrain_instantiation_of_blocklisted_template` and its
|
||||
/// documentation for details.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct UsedTemplateParameters<'ctx> {
|
||||
|
@ -155,10 +155,10 @@ pub struct UsedTemplateParameters<'ctx> {
|
|||
|
||||
dependencies: HashMap<ItemId, Vec<ItemId>>,
|
||||
|
||||
// The set of whitelisted items, without any blacklisted items reachable
|
||||
// from the whitelisted items which would otherwise be considered
|
||||
// whitelisted as well.
|
||||
whitelisted_items: HashSet<ItemId>,
|
||||
// The set of allowlisted items, without any blocklisted items reachable
|
||||
// from the allowlisted items which would otherwise be considered
|
||||
// allowlisted as well.
|
||||
allowlisted_items: HashSet<ItemId>,
|
||||
}
|
||||
|
||||
impl<'ctx> UsedTemplateParameters<'ctx> {
|
||||
|
@ -221,25 +221,25 @@ impl<'ctx> UsedTemplateParameters<'ctx> {
|
|||
)
|
||||
}
|
||||
|
||||
/// We say that blacklisted items use all of their template parameters. The
|
||||
/// blacklisted type is most likely implemented explicitly by the user,
|
||||
/// We say that blocklisted items use all of their template parameters. The
|
||||
/// blocklisted type is most likely implemented explicitly by the user,
|
||||
/// since it won't be in the generated bindings, and we don't know exactly
|
||||
/// what they'll to with template parameters, but we can push the issue down
|
||||
/// the line to them.
|
||||
fn constrain_instantiation_of_blacklisted_template(
|
||||
fn constrain_instantiation_of_blocklisted_template(
|
||||
&self,
|
||||
this_id: ItemId,
|
||||
used_by_this_id: &mut ItemSet,
|
||||
instantiation: &TemplateInstantiation,
|
||||
) {
|
||||
trace!(
|
||||
" instantiation of blacklisted template, uses all template \
|
||||
" instantiation of blocklisted template, uses all template \
|
||||
arguments"
|
||||
);
|
||||
|
||||
let args = instantiation
|
||||
.template_arguments()
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|a| {
|
||||
a.into_resolver()
|
||||
.through_type_refs()
|
||||
|
@ -379,10 +379,10 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
|
|||
fn new(ctx: &'ctx BindgenContext) -> UsedTemplateParameters<'ctx> {
|
||||
let mut used = HashMap::default();
|
||||
let mut dependencies = HashMap::default();
|
||||
let whitelisted_items: HashSet<_> =
|
||||
ctx.whitelisted_items().iter().cloned().collect();
|
||||
let allowlisted_items: HashSet<_> =
|
||||
ctx.allowlisted_items().iter().cloned().collect();
|
||||
|
||||
let whitelisted_and_blacklisted_items: ItemSet = whitelisted_items
|
||||
let allowlisted_and_blocklisted_items: ItemSet = allowlisted_items
|
||||
.iter()
|
||||
.cloned()
|
||||
.flat_map(|i| {
|
||||
|
@ -398,9 +398,9 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
|
|||
})
|
||||
.collect();
|
||||
|
||||
for item in whitelisted_and_blacklisted_items {
|
||||
dependencies.entry(item).or_insert(vec![]);
|
||||
used.entry(item).or_insert(Some(ItemSet::new()));
|
||||
for item in allowlisted_and_blocklisted_items {
|
||||
dependencies.entry(item).or_insert_with(Vec::new);
|
||||
used.entry(item).or_insert_with(|| Some(ItemSet::new()));
|
||||
|
||||
{
|
||||
// We reverse our natural IR graph edges to find dependencies
|
||||
|
@ -408,10 +408,11 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
|
|||
item.trace(
|
||||
ctx,
|
||||
&mut |sub_item: ItemId, _| {
|
||||
used.entry(sub_item).or_insert(Some(ItemSet::new()));
|
||||
used.entry(sub_item)
|
||||
.or_insert_with(|| Some(ItemSet::new()));
|
||||
dependencies
|
||||
.entry(sub_item)
|
||||
.or_insert(vec![])
|
||||
.or_insert_with(Vec::new)
|
||||
.push(item);
|
||||
},
|
||||
&(),
|
||||
|
@ -421,53 +422,56 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
|
|||
// Additionally, whether a template instantiation's template
|
||||
// arguments are used depends on whether the template declaration's
|
||||
// generic template parameters are used.
|
||||
ctx.resolve_item(item).as_type().map(|ty| match ty.kind() {
|
||||
&TypeKind::TemplateInstantiation(ref inst) => {
|
||||
let decl = ctx.resolve_type(inst.template_definition());
|
||||
let args = inst.template_arguments();
|
||||
let item_kind =
|
||||
ctx.resolve_item(item).as_type().map(|ty| ty.kind());
|
||||
if let Some(&TypeKind::TemplateInstantiation(ref inst)) = item_kind
|
||||
{
|
||||
let decl = ctx.resolve_type(inst.template_definition());
|
||||
let args = inst.template_arguments();
|
||||
|
||||
// Although template definitions should always have
|
||||
// template parameters, there is a single exception:
|
||||
// opaque templates. Hence the unwrap_or.
|
||||
let params = decl.self_template_params(ctx);
|
||||
// Although template definitions should always have
|
||||
// template parameters, there is a single exception:
|
||||
// opaque templates. Hence the unwrap_or.
|
||||
let params = decl.self_template_params(ctx);
|
||||
|
||||
for (arg, param) in args.iter().zip(params.iter()) {
|
||||
let arg = arg
|
||||
.into_resolver()
|
||||
.through_type_aliases()
|
||||
.through_type_refs()
|
||||
.resolve(ctx)
|
||||
.id();
|
||||
for (arg, param) in args.iter().zip(params.iter()) {
|
||||
let arg = arg
|
||||
.into_resolver()
|
||||
.through_type_aliases()
|
||||
.through_type_refs()
|
||||
.resolve(ctx)
|
||||
.id();
|
||||
|
||||
let param = param
|
||||
.into_resolver()
|
||||
.through_type_aliases()
|
||||
.through_type_refs()
|
||||
.resolve(ctx)
|
||||
.id();
|
||||
let param = param
|
||||
.into_resolver()
|
||||
.through_type_aliases()
|
||||
.through_type_refs()
|
||||
.resolve(ctx)
|
||||
.id();
|
||||
|
||||
used.entry(arg).or_insert(Some(ItemSet::new()));
|
||||
used.entry(param).or_insert(Some(ItemSet::new()));
|
||||
used.entry(arg).or_insert_with(|| Some(ItemSet::new()));
|
||||
used.entry(param).or_insert_with(|| Some(ItemSet::new()));
|
||||
|
||||
dependencies.entry(arg).or_insert(vec![]).push(param);
|
||||
}
|
||||
dependencies
|
||||
.entry(arg)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(param);
|
||||
}
|
||||
_ => {}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if cfg!(feature = "testing_only_extra_assertions") {
|
||||
// Invariant: The `used` map has an entry for every whitelisted
|
||||
// item, as well as all explicitly blacklisted items that are
|
||||
// reachable from whitelisted items.
|
||||
// Invariant: The `used` map has an entry for every allowlisted
|
||||
// item, as well as all explicitly blocklisted items that are
|
||||
// reachable from allowlisted items.
|
||||
//
|
||||
// Invariant: the `dependencies` map has an entry for every
|
||||
// whitelisted item.
|
||||
// allowlisted item.
|
||||
//
|
||||
// (This is so that every item we call `constrain` on is guaranteed
|
||||
// to have a set of template parameters, and we can allow
|
||||
// blacklisted templates to use all of their parameters).
|
||||
for item in whitelisted_items.iter() {
|
||||
// blocklisted templates to use all of their parameters).
|
||||
for item in allowlisted_items.iter() {
|
||||
extra_assert!(used.contains_key(item));
|
||||
extra_assert!(dependencies.contains_key(item));
|
||||
item.trace(
|
||||
|
@ -482,18 +486,18 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
|
|||
}
|
||||
|
||||
UsedTemplateParameters {
|
||||
ctx: ctx,
|
||||
used: used,
|
||||
dependencies: dependencies,
|
||||
whitelisted_items: whitelisted_items,
|
||||
ctx,
|
||||
used,
|
||||
dependencies,
|
||||
allowlisted_items,
|
||||
}
|
||||
}
|
||||
|
||||
fn initial_worklist(&self) -> Vec<ItemId> {
|
||||
// The transitive closure of all whitelisted items, including explicitly
|
||||
// blacklisted items.
|
||||
// The transitive closure of all allowlisted items, including explicitly
|
||||
// blocklisted items.
|
||||
self.ctx
|
||||
.whitelisted_items()
|
||||
.allowlisted_items()
|
||||
.iter()
|
||||
.cloned()
|
||||
.flat_map(|i| {
|
||||
|
@ -538,7 +542,7 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
|
|||
// template definition uses the corresponding template parameter.
|
||||
Some(&TypeKind::TemplateInstantiation(ref inst)) => {
|
||||
if self
|
||||
.whitelisted_items
|
||||
.allowlisted_items
|
||||
.contains(&inst.template_definition().into())
|
||||
{
|
||||
self.constrain_instantiation(
|
||||
|
@ -547,7 +551,7 @@ impl<'ctx> MonotoneFramework for UsedTemplateParameters<'ctx> {
|
|||
inst,
|
||||
);
|
||||
} else {
|
||||
self.constrain_instantiation_of_blacklisted_template(
|
||||
self.constrain_instantiation_of_blocklisted_template(
|
||||
id,
|
||||
&mut used_by_this_id,
|
||||
inst,
|
||||
|
|
|
@ -25,7 +25,7 @@ pub enum FieldAccessorKind {
|
|||
/// documentation:
|
||||
///
|
||||
/// http://www.stack.nl/~dimitri/doxygen/manual/docblocks.html
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
#[derive(Default, Clone, PartialEq, Debug)]
|
||||
pub struct Annotations {
|
||||
/// Whether this item is marked as opaque. Only applies to types.
|
||||
opaque: bool,
|
||||
|
@ -42,6 +42,8 @@ pub struct Annotations {
|
|||
disallow_debug: bool,
|
||||
/// Manually disable deriving/implement default on this type.
|
||||
disallow_default: bool,
|
||||
/// Whether to add a #[must_use] annotation to this type.
|
||||
must_use_type: bool,
|
||||
/// Whether fields should be marked as private or not. You can set this on
|
||||
/// structs (it will apply to all the fields), or individual fields.
|
||||
private_fields: Option<bool>,
|
||||
|
@ -75,23 +77,6 @@ fn parse_accessor(s: &str) -> FieldAccessorKind {
|
|||
}
|
||||
}
|
||||
|
||||
impl Default for Annotations {
|
||||
fn default() -> Self {
|
||||
Annotations {
|
||||
opaque: false,
|
||||
hide: false,
|
||||
use_instead_of: None,
|
||||
disallow_copy: false,
|
||||
disallow_debug: false,
|
||||
disallow_default: false,
|
||||
private_fields: None,
|
||||
accessor_kind: None,
|
||||
constify_enum_variant: false,
|
||||
derives: vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Annotations {
|
||||
/// Construct new annotations for the given cursor and its bindgen comments
|
||||
/// (if any).
|
||||
|
@ -140,7 +125,7 @@ impl Annotations {
|
|||
///
|
||||
/// That is, code for `Foo` is used to generate `Bar`.
|
||||
pub fn use_instead_of(&self) -> Option<&[String]> {
|
||||
self.use_instead_of.as_ref().map(|s| &**s)
|
||||
self.use_instead_of.as_deref()
|
||||
}
|
||||
|
||||
/// The list of derives that have been specified in this annotation.
|
||||
|
@ -163,6 +148,11 @@ impl Annotations {
|
|||
self.disallow_default
|
||||
}
|
||||
|
||||
/// Should this type get a `#[must_use]` annotation?
|
||||
pub fn must_use_type(&self) -> bool {
|
||||
self.must_use_type
|
||||
}
|
||||
|
||||
/// Should the fields be private?
|
||||
pub fn private_fields(&self) -> Option<bool> {
|
||||
self.private_fields
|
||||
|
@ -190,6 +180,7 @@ impl Annotations {
|
|||
"nocopy" => self.disallow_copy = true,
|
||||
"nodebug" => self.disallow_debug = true,
|
||||
"nodefault" => self.disallow_default = true,
|
||||
"mustusetype" => self.must_use_type = true,
|
||||
"replaces" => {
|
||||
self.use_instead_of = Some(
|
||||
attr.value.split("::").map(Into::into).collect(),
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
//! Utilities for manipulating C/C++ comments.
|
||||
|
||||
use std::iter;
|
||||
|
||||
/// The type of a comment.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
enum Kind {
|
||||
|
@ -15,7 +13,7 @@ enum Kind {
|
|||
|
||||
/// Preprocesses a C/C++ comment so that it is a valid Rust comment.
|
||||
pub fn preprocess(comment: &str, indent: usize) -> String {
|
||||
match self::kind(&comment) {
|
||||
match self::kind(comment) {
|
||||
Some(Kind::SingleLines) => preprocess_single_lines(comment, indent),
|
||||
Some(Kind::MultiLine) => preprocess_multi_line(comment, indent),
|
||||
None => comment.to_owned(),
|
||||
|
@ -35,7 +33,7 @@ fn kind(comment: &str) -> Option<Kind> {
|
|||
|
||||
fn make_indent(indent: usize) -> String {
|
||||
const RUST_INDENTATION: usize = 4;
|
||||
iter::repeat(' ').take(indent * RUST_INDENTATION).collect()
|
||||
" ".repeat(indent * RUST_INDENTATION)
|
||||
}
|
||||
|
||||
/// Preprocesses multiple single line comments.
|
||||
|
|
|
@ -111,11 +111,10 @@ impl Method {
|
|||
|
||||
/// Is this a virtual method?
|
||||
pub fn is_virtual(&self) -> bool {
|
||||
match self.kind {
|
||||
MethodKind::Virtual { .. } |
|
||||
MethodKind::VirtualDestructor { .. } => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(
|
||||
self.kind,
|
||||
MethodKind::Virtual { .. } | MethodKind::VirtualDestructor { .. }
|
||||
)
|
||||
}
|
||||
|
||||
/// Is this a static method?
|
||||
|
@ -148,8 +147,8 @@ pub trait FieldMethods {
|
|||
/// If this is a bitfield, how many bits does it need?
|
||||
fn bitfield_width(&self) -> Option<u32>;
|
||||
|
||||
/// Is this field marked as `mutable`?
|
||||
fn is_mutable(&self) -> bool;
|
||||
/// Is this feild declared public?
|
||||
fn is_public(&self) -> bool;
|
||||
|
||||
/// Get the annotations for this field.
|
||||
fn annotations(&self) -> &Annotations;
|
||||
|
@ -415,8 +414,8 @@ impl FieldMethods for Bitfield {
|
|||
self.data.bitfield_width()
|
||||
}
|
||||
|
||||
fn is_mutable(&self) -> bool {
|
||||
self.data.is_mutable()
|
||||
fn is_public(&self) -> bool {
|
||||
self.data.is_public()
|
||||
}
|
||||
|
||||
fn annotations(&self) -> &Annotations {
|
||||
|
@ -443,7 +442,7 @@ impl RawField {
|
|||
comment: Option<String>,
|
||||
annotations: Option<Annotations>,
|
||||
bitfield_width: Option<u32>,
|
||||
mutable: bool,
|
||||
public: bool,
|
||||
offset: Option<usize>,
|
||||
) -> RawField {
|
||||
RawField(FieldData {
|
||||
|
@ -452,7 +451,7 @@ impl RawField {
|
|||
comment,
|
||||
annotations: annotations.unwrap_or_default(),
|
||||
bitfield_width,
|
||||
mutable,
|
||||
public,
|
||||
offset,
|
||||
})
|
||||
}
|
||||
|
@ -475,8 +474,8 @@ impl FieldMethods for RawField {
|
|||
self.0.bitfield_width()
|
||||
}
|
||||
|
||||
fn is_mutable(&self) -> bool {
|
||||
self.0.is_mutable()
|
||||
fn is_public(&self) -> bool {
|
||||
self.0.is_public()
|
||||
}
|
||||
|
||||
fn annotations(&self) -> &Annotations {
|
||||
|
@ -588,7 +587,7 @@ where
|
|||
} else {
|
||||
bytes_from_bits_pow2(unit_align_in_bits)
|
||||
};
|
||||
let size = align_to(unit_size_in_bits, align * 8) / 8;
|
||||
let size = align_to(unit_size_in_bits, 8) / 8;
|
||||
let layout = Layout::new(size, align);
|
||||
fields.extend(Some(Field::Bitfields(BitfieldUnit {
|
||||
nth: *bitfield_unit_count,
|
||||
|
@ -630,7 +629,7 @@ where
|
|||
bitfield_unit_count,
|
||||
unit_size_in_bits,
|
||||
unit_align,
|
||||
mem::replace(&mut bitfields_in_unit, vec![]),
|
||||
mem::take(&mut bitfields_in_unit),
|
||||
packed,
|
||||
);
|
||||
|
||||
|
@ -639,15 +638,12 @@ where
|
|||
offset = 0;
|
||||
unit_align = 0;
|
||||
}
|
||||
} else {
|
||||
if offset != 0 &&
|
||||
(bitfield_width == 0 ||
|
||||
(offset & (bitfield_align * 8 - 1)) +
|
||||
bitfield_width >
|
||||
bitfield_size * 8)
|
||||
{
|
||||
offset = align_to(offset, bitfield_align * 8);
|
||||
}
|
||||
} else if offset != 0 &&
|
||||
(bitfield_width == 0 ||
|
||||
(offset & (bitfield_align * 8 - 1)) + bitfield_width >
|
||||
bitfield_size * 8)
|
||||
{
|
||||
offset = align_to(offset, bitfield_align * 8);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -706,24 +702,24 @@ where
|
|||
/// after.
|
||||
#[derive(Debug)]
|
||||
enum CompFields {
|
||||
BeforeComputingBitfieldUnits(Vec<RawField>),
|
||||
AfterComputingBitfieldUnits {
|
||||
Before(Vec<RawField>),
|
||||
After {
|
||||
fields: Vec<Field>,
|
||||
has_bitfield_units: bool,
|
||||
},
|
||||
ErrorComputingBitfieldUnits,
|
||||
Error,
|
||||
}
|
||||
|
||||
impl Default for CompFields {
|
||||
fn default() -> CompFields {
|
||||
CompFields::BeforeComputingBitfieldUnits(vec![])
|
||||
CompFields::Before(vec![])
|
||||
}
|
||||
}
|
||||
|
||||
impl CompFields {
|
||||
fn append_raw_field(&mut self, raw: RawField) {
|
||||
match *self {
|
||||
CompFields::BeforeComputingBitfieldUnits(ref mut raws) => {
|
||||
CompFields::Before(ref mut raws) => {
|
||||
raws.push(raw);
|
||||
}
|
||||
_ => {
|
||||
|
@ -736,9 +732,7 @@ impl CompFields {
|
|||
|
||||
fn compute_bitfield_units(&mut self, ctx: &BindgenContext, packed: bool) {
|
||||
let raws = match *self {
|
||||
CompFields::BeforeComputingBitfieldUnits(ref mut raws) => {
|
||||
mem::replace(raws, vec![])
|
||||
}
|
||||
CompFields::Before(ref mut raws) => mem::take(raws),
|
||||
_ => {
|
||||
panic!("Already computed bitfield units");
|
||||
}
|
||||
|
@ -748,25 +742,23 @@ impl CompFields {
|
|||
|
||||
match result {
|
||||
Ok((fields, has_bitfield_units)) => {
|
||||
*self = CompFields::AfterComputingBitfieldUnits {
|
||||
*self = CompFields::After {
|
||||
fields,
|
||||
has_bitfield_units,
|
||||
};
|
||||
}
|
||||
Err(()) => {
|
||||
*self = CompFields::ErrorComputingBitfieldUnits;
|
||||
*self = CompFields::Error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn deanonymize_fields(&mut self, ctx: &BindgenContext, methods: &[Method]) {
|
||||
let fields = match *self {
|
||||
CompFields::AfterComputingBitfieldUnits {
|
||||
ref mut fields, ..
|
||||
} => fields,
|
||||
CompFields::After { ref mut fields, .. } => fields,
|
||||
// Nothing to do here.
|
||||
CompFields::ErrorComputingBitfieldUnits => return,
|
||||
CompFields::BeforeComputingBitfieldUnits(_) => {
|
||||
CompFields::Error => return,
|
||||
CompFields::Before(_) => {
|
||||
panic!("Not yet computed bitfield units.");
|
||||
}
|
||||
};
|
||||
|
@ -778,7 +770,7 @@ impl CompFields {
|
|||
) -> bool {
|
||||
methods.iter().any(|method| {
|
||||
let method_name = ctx.resolve_func(method.signature()).name();
|
||||
method_name == name || ctx.rust_mangle(&method_name) == name
|
||||
method_name == name || ctx.rust_mangle(method_name) == name
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -820,7 +812,7 @@ impl CompFields {
|
|||
for field in fields.iter_mut() {
|
||||
match *field {
|
||||
Field::DataMember(FieldData { ref mut name, .. }) => {
|
||||
if let Some(_) = *name {
|
||||
if name.is_some() {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -858,13 +850,13 @@ impl Trace for CompFields {
|
|||
T: Tracer,
|
||||
{
|
||||
match *self {
|
||||
CompFields::ErrorComputingBitfieldUnits => {}
|
||||
CompFields::BeforeComputingBitfieldUnits(ref fields) => {
|
||||
CompFields::Error => {}
|
||||
CompFields::Before(ref fields) => {
|
||||
for f in fields {
|
||||
tracer.visit_kind(f.ty().into(), EdgeKind::Field);
|
||||
}
|
||||
}
|
||||
CompFields::AfterComputingBitfieldUnits { ref fields, .. } => {
|
||||
CompFields::After { ref fields, .. } => {
|
||||
for f in fields {
|
||||
f.trace(context, tracer, &());
|
||||
}
|
||||
|
@ -891,8 +883,8 @@ pub struct FieldData {
|
|||
/// If this field is a bitfield, and how many bits does it contain if it is.
|
||||
bitfield_width: Option<u32>,
|
||||
|
||||
/// If the C++ field is marked as `mutable`
|
||||
mutable: bool,
|
||||
/// If the C++ field is declared `public`
|
||||
public: bool,
|
||||
|
||||
/// The offset of the field (in bits)
|
||||
offset: Option<usize>,
|
||||
|
@ -900,7 +892,7 @@ pub struct FieldData {
|
|||
|
||||
impl FieldMethods for FieldData {
|
||||
fn name(&self) -> Option<&str> {
|
||||
self.name.as_ref().map(|n| &**n)
|
||||
self.name.as_deref()
|
||||
}
|
||||
|
||||
fn ty(&self) -> TypeId {
|
||||
|
@ -908,15 +900,15 @@ impl FieldMethods for FieldData {
|
|||
}
|
||||
|
||||
fn comment(&self) -> Option<&str> {
|
||||
self.comment.as_ref().map(|c| &**c)
|
||||
self.comment.as_deref()
|
||||
}
|
||||
|
||||
fn bitfield_width(&self) -> Option<u32> {
|
||||
self.bitfield_width
|
||||
}
|
||||
|
||||
fn is_mutable(&self) -> bool {
|
||||
self.mutable
|
||||
fn is_public(&self) -> bool {
|
||||
self.public
|
||||
}
|
||||
|
||||
fn annotations(&self) -> &Annotations {
|
||||
|
@ -954,6 +946,8 @@ pub struct Base {
|
|||
pub kind: BaseKind,
|
||||
/// Name of the field in which this base should be stored.
|
||||
pub field_name: String,
|
||||
/// Whether this base is inherited from publically.
|
||||
pub is_pub: bool,
|
||||
}
|
||||
|
||||
impl Base {
|
||||
|
@ -981,6 +975,11 @@ impl Base {
|
|||
|
||||
true
|
||||
}
|
||||
|
||||
/// Whether this base is inherited from publically.
|
||||
pub fn is_public(&self) -> bool {
|
||||
self.is_pub
|
||||
}
|
||||
}
|
||||
|
||||
/// A compound type.
|
||||
|
@ -1106,21 +1105,17 @@ impl CompInfo {
|
|||
}
|
||||
|
||||
// empty union case
|
||||
if self.fields().is_empty() {
|
||||
if !self.has_fields() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut max_size = 0;
|
||||
// Don't allow align(0)
|
||||
let mut max_align = 1;
|
||||
for field in self.fields() {
|
||||
let field_layout = field.layout(ctx);
|
||||
|
||||
if let Some(layout) = field_layout {
|
||||
max_size = cmp::max(max_size, layout.size);
|
||||
max_align = cmp::max(max_align, layout.align);
|
||||
}
|
||||
}
|
||||
self.each_known_field_layout(ctx, |layout| {
|
||||
max_size = cmp::max(max_size, layout.size);
|
||||
max_align = cmp::max(max_align, layout.align);
|
||||
});
|
||||
|
||||
Some(Layout::new(max_size, max_align))
|
||||
}
|
||||
|
@ -1128,24 +1123,54 @@ impl CompInfo {
|
|||
/// Get this type's set of fields.
|
||||
pub fn fields(&self) -> &[Field] {
|
||||
match self.fields {
|
||||
CompFields::ErrorComputingBitfieldUnits => &[],
|
||||
CompFields::AfterComputingBitfieldUnits { ref fields, .. } => {
|
||||
fields
|
||||
}
|
||||
CompFields::BeforeComputingBitfieldUnits(_) => {
|
||||
CompFields::Error => &[],
|
||||
CompFields::After { ref fields, .. } => fields,
|
||||
CompFields::Before(..) => {
|
||||
panic!("Should always have computed bitfield units first");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn has_fields(&self) -> bool {
|
||||
match self.fields {
|
||||
CompFields::Error => false,
|
||||
CompFields::After { ref fields, .. } => !fields.is_empty(),
|
||||
CompFields::Before(ref raw_fields) => !raw_fields.is_empty(),
|
||||
}
|
||||
}
|
||||
|
||||
fn each_known_field_layout(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
mut callback: impl FnMut(Layout),
|
||||
) {
|
||||
match self.fields {
|
||||
CompFields::Error => {}
|
||||
CompFields::After { ref fields, .. } => {
|
||||
for field in fields.iter() {
|
||||
if let Some(layout) = field.layout(ctx) {
|
||||
callback(layout);
|
||||
}
|
||||
}
|
||||
}
|
||||
CompFields::Before(ref raw_fields) => {
|
||||
for field in raw_fields.iter() {
|
||||
let field_ty = ctx.resolve_type(field.0.ty);
|
||||
if let Some(layout) = field_ty.layout(ctx) {
|
||||
callback(layout);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn has_bitfields(&self) -> bool {
|
||||
match self.fields {
|
||||
CompFields::ErrorComputingBitfieldUnits => false,
|
||||
CompFields::AfterComputingBitfieldUnits {
|
||||
has_bitfield_units,
|
||||
..
|
||||
CompFields::Error => false,
|
||||
CompFields::After {
|
||||
has_bitfield_units, ..
|
||||
} => has_bitfield_units,
|
||||
CompFields::BeforeComputingBitfieldUnits(_) => {
|
||||
CompFields::Before(_) => {
|
||||
panic!("Should always have computed bitfield units first");
|
||||
}
|
||||
}
|
||||
|
@ -1242,6 +1267,7 @@ impl CompInfo {
|
|||
let mut ci = CompInfo::new(kind);
|
||||
ci.is_forward_declaration =
|
||||
location.map_or(true, |cur| match cur.kind() {
|
||||
CXCursor_ParmDecl => true,
|
||||
CXCursor_StructDecl | CXCursor_UnionDecl |
|
||||
CXCursor_ClassDecl => !cur.is_definition(),
|
||||
_ => false,
|
||||
|
@ -1250,7 +1276,7 @@ impl CompInfo {
|
|||
let mut maybe_anonymous_struct_field = None;
|
||||
cursor.visit(|cur| {
|
||||
if cur.kind() != CXCursor_FieldDecl {
|
||||
if let Some((ty, clang_ty, offset)) =
|
||||
if let Some((ty, clang_ty, public, offset)) =
|
||||
maybe_anonymous_struct_field.take()
|
||||
{
|
||||
if cur.kind() == CXCursor_TypedefDecl &&
|
||||
|
@ -1263,7 +1289,7 @@ impl CompInfo {
|
|||
// nothing.
|
||||
} else {
|
||||
let field = RawField::new(
|
||||
None, ty, None, None, None, false, offset,
|
||||
None, ty, None, None, None, public, offset,
|
||||
);
|
||||
ci.fields.append_raw_field(field);
|
||||
}
|
||||
|
@ -1272,7 +1298,7 @@ impl CompInfo {
|
|||
|
||||
match cur.kind() {
|
||||
CXCursor_FieldDecl => {
|
||||
if let Some((ty, clang_ty, offset)) =
|
||||
if let Some((ty, clang_ty, public, offset)) =
|
||||
maybe_anonymous_struct_field.take()
|
||||
{
|
||||
let mut used = false;
|
||||
|
@ -1282,9 +1308,10 @@ impl CompInfo {
|
|||
}
|
||||
CXChildVisit_Continue
|
||||
});
|
||||
|
||||
if !used {
|
||||
let field = RawField::new(
|
||||
None, ty, None, None, None, false, offset,
|
||||
None, ty, None, None, None, public, offset,
|
||||
);
|
||||
ci.fields.append_raw_field(field);
|
||||
}
|
||||
|
@ -1301,7 +1328,7 @@ impl CompInfo {
|
|||
let comment = cur.raw_comment();
|
||||
let annotations = Annotations::new(&cur);
|
||||
let name = cur.spelling();
|
||||
let is_mutable = cursor.is_mutable_field();
|
||||
let is_public = cur.public_accessible();
|
||||
let offset = cur.offset_of_field().ok();
|
||||
|
||||
// Name can be empty if there are bitfields, for example,
|
||||
|
@ -1319,7 +1346,7 @@ impl CompInfo {
|
|||
comment,
|
||||
annotations,
|
||||
bit_width,
|
||||
is_mutable,
|
||||
is_public,
|
||||
offset,
|
||||
);
|
||||
ci.fields.append_raw_field(field);
|
||||
|
@ -1366,19 +1393,26 @@ impl CompInfo {
|
|||
let inner = Item::parse(cur, Some(potential_id), ctx)
|
||||
.expect("Inner ClassDecl");
|
||||
|
||||
let inner = inner.expect_type_id(ctx);
|
||||
// If we avoided recursion parsing this type (in
|
||||
// `Item::from_ty_with_id()`), then this might not be a
|
||||
// valid type ID, so check and gracefully handle this.
|
||||
if ctx.resolve_item_fallible(inner).is_some() {
|
||||
let inner = inner.expect_type_id(ctx);
|
||||
|
||||
ci.inner_types.push(inner);
|
||||
ci.inner_types.push(inner);
|
||||
|
||||
// A declaration of an union or a struct without name could
|
||||
// also be an unnamed field, unfortunately.
|
||||
if cur.spelling().is_empty() &&
|
||||
cur.kind() != CXCursor_EnumDecl
|
||||
{
|
||||
let ty = cur.cur_type();
|
||||
let offset = cur.offset_of_field().ok();
|
||||
maybe_anonymous_struct_field =
|
||||
Some((inner, ty, offset));
|
||||
// A declaration of an union or a struct without name
|
||||
// could also be an unnamed field, unfortunately.
|
||||
if cur.spelling().is_empty() &&
|
||||
cur.kind() != CXCursor_EnumDecl
|
||||
{
|
||||
let ty = cur.cur_type();
|
||||
let public = cur.public_accessible();
|
||||
let offset = cur.offset_of_field().ok();
|
||||
|
||||
maybe_anonymous_struct_field =
|
||||
Some((inner, ty, public, offset));
|
||||
}
|
||||
}
|
||||
}
|
||||
CXCursor_PackedAttr => {
|
||||
|
@ -1411,6 +1445,8 @@ impl CompInfo {
|
|||
ty: type_id,
|
||||
kind,
|
||||
field_name,
|
||||
is_pub: cur.access_specifier() ==
|
||||
clang_sys::CX_CXXPublic,
|
||||
});
|
||||
}
|
||||
CXCursor_Constructor | CXCursor_Destructor |
|
||||
|
@ -1526,9 +1562,9 @@ impl CompInfo {
|
|||
CXChildVisit_Continue
|
||||
});
|
||||
|
||||
if let Some((ty, _, offset)) = maybe_anonymous_struct_field {
|
||||
if let Some((ty, _, public, offset)) = maybe_anonymous_struct_field {
|
||||
let field =
|
||||
RawField::new(None, ty, None, None, None, false, offset);
|
||||
RawField::new(None, ty, None, None, None, public, offset);
|
||||
ci.fields.append_raw_field(field);
|
||||
}
|
||||
|
||||
|
@ -1576,7 +1612,7 @@ impl CompInfo {
|
|||
pub fn is_packed(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
layout: &Option<Layout>,
|
||||
layout: Option<&Layout>,
|
||||
) -> bool {
|
||||
if self.packed_attr {
|
||||
return true;
|
||||
|
@ -1584,24 +1620,18 @@ impl CompInfo {
|
|||
|
||||
// Even though `libclang` doesn't expose `#pragma packed(...)`, we can
|
||||
// detect it through its effects.
|
||||
if let Some(ref parent_layout) = *layout {
|
||||
if self.fields().iter().any(|f| match *f {
|
||||
Field::Bitfields(ref unit) => {
|
||||
unit.layout().align > parent_layout.align
|
||||
}
|
||||
Field::DataMember(ref data) => {
|
||||
let field_ty = ctx.resolve_type(data.ty());
|
||||
field_ty.layout(ctx).map_or(false, |field_ty_layout| {
|
||||
field_ty_layout.align > parent_layout.align
|
||||
})
|
||||
}
|
||||
}) {
|
||||
if let Some(parent_layout) = layout {
|
||||
let mut packed = false;
|
||||
self.each_known_field_layout(ctx, |layout| {
|
||||
packed = packed || layout.align > parent_layout.align;
|
||||
});
|
||||
if packed {
|
||||
info!("Found a struct that was defined within `#pragma packed(...)`");
|
||||
return true;
|
||||
} else if self.has_own_virtual_method {
|
||||
if parent_layout.align == 1 {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if self.has_own_virtual_method && parent_layout.align == 1 {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1614,10 +1644,13 @@ impl CompInfo {
|
|||
}
|
||||
|
||||
/// Compute this compound structure's bitfield allocation units.
|
||||
pub fn compute_bitfield_units(&mut self, ctx: &BindgenContext) {
|
||||
// TODO(emilio): If we could detect #pragma packed here we'd fix layout
|
||||
// tests in divide-by-zero-in-struct-layout.rs
|
||||
self.fields.compute_bitfield_units(ctx, self.packed_attr)
|
||||
pub fn compute_bitfield_units(
|
||||
&mut self,
|
||||
ctx: &BindgenContext,
|
||||
layout: Option<&Layout>,
|
||||
) {
|
||||
let packed = self.is_packed(ctx, layout);
|
||||
self.fields.compute_bitfield_units(ctx, packed)
|
||||
}
|
||||
|
||||
/// Assign for each anonymous field a generated name.
|
||||
|
@ -1630,7 +1663,12 @@ impl CompInfo {
|
|||
/// Requirements:
|
||||
/// 1. Current RustTarget allows for `untagged_union`
|
||||
/// 2. Each field can derive `Copy`
|
||||
pub fn can_be_rust_union(&self, ctx: &BindgenContext) -> bool {
|
||||
/// 3. It's not zero-sized.
|
||||
pub fn can_be_rust_union(
|
||||
&self,
|
||||
ctx: &BindgenContext,
|
||||
layout: Option<&Layout>,
|
||||
) -> bool {
|
||||
if !ctx.options().rust_features().untagged_union {
|
||||
return false;
|
||||
}
|
||||
|
@ -1639,12 +1677,22 @@ impl CompInfo {
|
|||
return false;
|
||||
}
|
||||
|
||||
self.fields().iter().all(|f| match *f {
|
||||
let all_can_copy = self.fields().iter().all(|f| match *f {
|
||||
Field::DataMember(ref field_data) => {
|
||||
field_data.ty().can_derive_copy(ctx)
|
||||
}
|
||||
Field::Bitfields(_) => true,
|
||||
})
|
||||
});
|
||||
|
||||
if !all_can_copy {
|
||||
return false;
|
||||
}
|
||||
|
||||
if layout.map_or(false, |l| l.size == 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1713,7 +1761,7 @@ impl IsOpaque for CompInfo {
|
|||
// is a type parameter), then we can't compute bitfield units. We are
|
||||
// left with no choice but to make the whole struct opaque, or else we
|
||||
// might generate structs with incorrect sizes and alignments.
|
||||
if let CompFields::ErrorComputingBitfieldUnits = self.fields {
|
||||
if let CompFields::Error = self.fields {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -1739,7 +1787,7 @@ impl IsOpaque for CompInfo {
|
|||
//
|
||||
// See https://github.com/rust-lang/rust-bindgen/issues/537 and
|
||||
// https://github.com/rust-lang/rust/issues/33158
|
||||
if self.is_packed(ctx, layout) &&
|
||||
if self.is_packed(ctx, layout.as_ref()) &&
|
||||
layout.map_or(false, |l| l.align > 1)
|
||||
{
|
||||
warn!("Found a type that is both packed and aligned to greater than \
|
||||
|
|
|
@ -28,8 +28,8 @@ use cexpr;
|
|||
use clang_sys;
|
||||
use proc_macro2::{Ident, Span};
|
||||
use std::borrow::Cow;
|
||||
use std::cell::Cell;
|
||||
use std::collections::HashMap as StdHashMap;
|
||||
use std::cell::{Cell, RefCell};
|
||||
use std::collections::{BTreeSet, HashMap as StdHashMap};
|
||||
use std::iter::IntoIterator;
|
||||
use std::mem;
|
||||
|
||||
|
@ -299,7 +299,7 @@ where
|
|||
/// types.
|
||||
#[derive(Eq, PartialEq, Hash, Debug)]
|
||||
enum TypeKey {
|
||||
USR(String),
|
||||
Usr(String),
|
||||
Declaration(Cursor),
|
||||
}
|
||||
|
||||
|
@ -354,6 +354,9 @@ pub struct BindgenContext {
|
|||
/// This needs to be an std::HashMap because the cexpr API requires it.
|
||||
parsed_macros: StdHashMap<Vec<u8>, cexpr::expr::EvalResult>,
|
||||
|
||||
/// A set of all the included filenames.
|
||||
deps: BTreeSet<String>,
|
||||
|
||||
/// The active replacements collected from replaces="xxx" annotations.
|
||||
replacements: HashMap<Vec<String>, ItemId>,
|
||||
|
||||
|
@ -376,14 +379,18 @@ pub struct BindgenContext {
|
|||
/// Whether a bindgen complex was generated
|
||||
generated_bindgen_complex: Cell<bool>,
|
||||
|
||||
/// The set of `ItemId`s that are whitelisted. This the very first thing
|
||||
/// The set of `ItemId`s that are allowlisted. This the very first thing
|
||||
/// computed after parsing our IR, and before running any of our analyses.
|
||||
whitelisted: Option<ItemSet>,
|
||||
allowlisted: Option<ItemSet>,
|
||||
|
||||
/// The set of `ItemId`s that are whitelisted for code generation _and_ that
|
||||
/// Cache for calls to `ParseCallbacks::blocklisted_type_implements_trait`
|
||||
blocklisted_types_implement_traits:
|
||||
RefCell<HashMap<DeriveTrait, HashMap<ItemId, CanDerive>>>,
|
||||
|
||||
/// The set of `ItemId`s that are allowlisted for code generation _and_ that
|
||||
/// we should generate accounting for the codegen options.
|
||||
///
|
||||
/// It's computed right after computing the whitelisted items.
|
||||
/// It's computed right after computing the allowlisted items.
|
||||
codegen_items: Option<ItemSet>,
|
||||
|
||||
/// Map from an item's id to the set of template parameter items that it
|
||||
|
@ -463,8 +470,8 @@ pub struct BindgenContext {
|
|||
has_float: Option<HashSet<ItemId>>,
|
||||
}
|
||||
|
||||
/// A traversal of whitelisted items.
|
||||
struct WhitelistedItemsTraversal<'ctx> {
|
||||
/// A traversal of allowlisted items.
|
||||
struct AllowlistedItemsTraversal<'ctx> {
|
||||
ctx: &'ctx BindgenContext,
|
||||
traversal: ItemTraversal<
|
||||
'ctx,
|
||||
|
@ -474,14 +481,14 @@ struct WhitelistedItemsTraversal<'ctx> {
|
|||
>,
|
||||
}
|
||||
|
||||
impl<'ctx> Iterator for WhitelistedItemsTraversal<'ctx> {
|
||||
impl<'ctx> Iterator for AllowlistedItemsTraversal<'ctx> {
|
||||
type Item = ItemId;
|
||||
|
||||
fn next(&mut self) -> Option<ItemId> {
|
||||
loop {
|
||||
let id = self.traversal.next()?;
|
||||
|
||||
if self.ctx.resolve_item(id).is_blacklisted(self.ctx) {
|
||||
if self.ctx.resolve_item(id).is_blocklisted(self.ctx) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -490,8 +497,8 @@ impl<'ctx> Iterator for WhitelistedItemsTraversal<'ctx> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'ctx> WhitelistedItemsTraversal<'ctx> {
|
||||
/// Construct a new whitelisted items traversal.
|
||||
impl<'ctx> AllowlistedItemsTraversal<'ctx> {
|
||||
/// Construct a new allowlisted items traversal.
|
||||
pub fn new<R>(
|
||||
ctx: &'ctx BindgenContext,
|
||||
roots: R,
|
||||
|
@ -500,7 +507,7 @@ impl<'ctx> WhitelistedItemsTraversal<'ctx> {
|
|||
where
|
||||
R: IntoIterator<Item = ItemId>,
|
||||
{
|
||||
WhitelistedItemsTraversal {
|
||||
AllowlistedItemsTraversal {
|
||||
ctx,
|
||||
traversal: ItemTraversal::new(ctx, roots, predicate),
|
||||
}
|
||||
|
@ -541,8 +548,16 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
let root_module = Self::build_root_module(ItemId(0));
|
||||
let root_module_id = root_module.id().as_module_id_unchecked();
|
||||
|
||||
// depfiles need to include the explicitly listed headers too
|
||||
let mut deps = BTreeSet::default();
|
||||
if let Some(filename) = &options.input_header {
|
||||
deps.insert(filename.clone());
|
||||
}
|
||||
deps.extend(options.extra_input_headers.iter().cloned());
|
||||
|
||||
BindgenContext {
|
||||
items: vec![Some(root_module)],
|
||||
deps,
|
||||
types: Default::default(),
|
||||
type_params: Default::default(),
|
||||
modules: Default::default(),
|
||||
|
@ -559,7 +574,8 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
target_info,
|
||||
options,
|
||||
generated_bindgen_complex: Cell::new(false),
|
||||
whitelisted: None,
|
||||
allowlisted: None,
|
||||
blocklisted_types_implement_traits: Default::default(),
|
||||
codegen_items: None,
|
||||
used_template_parameters: None,
|
||||
need_bitfield_allocation: Default::default(),
|
||||
|
@ -624,7 +640,20 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
|
||||
/// Get the user-provided callbacks by reference, if any.
|
||||
pub fn parse_callbacks(&self) -> Option<&dyn ParseCallbacks> {
|
||||
self.options().parse_callbacks.as_ref().map(|t| &**t)
|
||||
self.options().parse_callbacks.as_deref()
|
||||
}
|
||||
|
||||
/// Add another path to the set of included files.
|
||||
pub fn include_file(&mut self, filename: String) {
|
||||
if let Some(cbs) = self.parse_callbacks() {
|
||||
cbs.include_file(&filename);
|
||||
}
|
||||
self.deps.insert(filename);
|
||||
}
|
||||
|
||||
/// Get any included files.
|
||||
pub fn deps(&self) -> &BTreeSet<String> {
|
||||
&self.deps
|
||||
}
|
||||
|
||||
/// Define a new item.
|
||||
|
@ -673,8 +702,10 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
// Unnamed items can have an USR, but they can't be referenced from
|
||||
// other sites explicitly and the USR can match if the unnamed items are
|
||||
// nested, so don't bother tracking them.
|
||||
if is_type && !is_template_instantiation && declaration.is_some() {
|
||||
let mut declaration = declaration.unwrap();
|
||||
if !is_type || is_template_instantiation {
|
||||
return;
|
||||
}
|
||||
if let Some(mut declaration) = declaration {
|
||||
if !declaration.is_valid() {
|
||||
if let Some(location) = location {
|
||||
if location.is_template_like() {
|
||||
|
@ -703,7 +734,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
let key = if is_unnamed {
|
||||
TypeKey::Declaration(declaration)
|
||||
} else if let Some(usr) = declaration.usr() {
|
||||
TypeKey::USR(usr)
|
||||
TypeKey::Usr(usr)
|
||||
} else {
|
||||
warn!(
|
||||
"Valid declaration with no USR: {:?}, {:?}",
|
||||
|
@ -718,8 +749,8 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
}
|
||||
|
||||
/// Ensure that every item (other than the root module) is in a module's
|
||||
/// children list. This is to make sure that every whitelisted item get's
|
||||
/// codegen'd, even if its parent is not whitelisted. See issue #769 for
|
||||
/// children list. This is to make sure that every allowlisted item get's
|
||||
/// codegen'd, even if its parent is not allowlisted. See issue #769 for
|
||||
/// details.
|
||||
fn add_item_to_module(&mut self, item: &Item) {
|
||||
assert!(item.id() != self.root_module);
|
||||
|
@ -800,31 +831,32 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
// TODO: Move all this syntax crap to other part of the code.
|
||||
|
||||
/// Mangles a name so it doesn't conflict with any keyword.
|
||||
#[rustfmt::skip]
|
||||
pub fn rust_mangle<'a>(&self, name: &'a str) -> Cow<'a, str> {
|
||||
if name.contains("@") ||
|
||||
name.contains("?") ||
|
||||
name.contains("$") ||
|
||||
match name {
|
||||
if name.contains('@') ||
|
||||
name.contains('?') ||
|
||||
name.contains('$') ||
|
||||
matches!(
|
||||
name,
|
||||
"abstract" | "alignof" | "as" | "async" | "become" |
|
||||
"box" | "break" | "const" | "continue" | "crate" | "do" |
|
||||
"else" | "enum" | "extern" | "false" | "final" | "fn" |
|
||||
"for" | "if" | "impl" | "in" | "let" | "loop" | "macro" |
|
||||
"match" | "mod" | "move" | "mut" | "offsetof" |
|
||||
"override" | "priv" | "proc" | "pub" | "pure" | "ref" |
|
||||
"return" | "Self" | "self" | "sizeof" | "static" |
|
||||
"struct" | "super" | "trait" | "true" | "type" | "typeof" |
|
||||
"unsafe" | "unsized" | "use" | "virtual" | "where" |
|
||||
"while" | "yield" | "str" | "bool" | "f32" | "f64" |
|
||||
"usize" | "isize" | "u128" | "i128" | "u64" | "i64" |
|
||||
"u32" | "i32" | "u16" | "i16" | "u8" | "i8" | "_" => true,
|
||||
_ => false,
|
||||
}
|
||||
"box" | "break" | "const" | "continue" | "crate" | "do" |
|
||||
"dyn" | "else" | "enum" | "extern" | "false" | "final" |
|
||||
"fn" | "for" | "if" | "impl" | "in" | "let" | "loop" |
|
||||
"macro" | "match" | "mod" | "move" | "mut" | "offsetof" |
|
||||
"override" | "priv" | "proc" | "pub" | "pure" | "ref" |
|
||||
"return" | "Self" | "self" | "sizeof" | "static" |
|
||||
"struct" | "super" | "trait" | "true" | "try" | "type" | "typeof" |
|
||||
"unsafe" | "unsized" | "use" | "virtual" | "where" |
|
||||
"while" | "yield" | "str" | "bool" | "f32" | "f64" |
|
||||
"usize" | "isize" | "u128" | "i128" | "u64" | "i64" |
|
||||
"u32" | "i32" | "u16" | "i16" | "u8" | "i8" | "_"
|
||||
)
|
||||
{
|
||||
let mut s = name.to_owned();
|
||||
s = s.replace("@", "_");
|
||||
s = s.replace("?", "_");
|
||||
s = s.replace("$", "_");
|
||||
s.push_str("_");
|
||||
s.push('_');
|
||||
return Cow::Owned(s);
|
||||
}
|
||||
Cow::Borrowed(name)
|
||||
|
@ -874,11 +906,10 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
None => continue,
|
||||
};
|
||||
|
||||
match *ty.kind() {
|
||||
TypeKind::UnresolvedTypeRef(ref ty, loc, parent_id) => {
|
||||
typerefs.push((id, ty.clone(), loc, parent_id));
|
||||
}
|
||||
_ => {}
|
||||
if let TypeKind::UnresolvedTypeRef(ref ty, loc, parent_id) =
|
||||
*ty.kind()
|
||||
{
|
||||
typerefs.push((id, *ty, loc, parent_id));
|
||||
};
|
||||
}
|
||||
typerefs
|
||||
|
@ -949,15 +980,14 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
assert!(self.collected_typerefs());
|
||||
|
||||
let need_bitfield_allocation =
|
||||
mem::replace(&mut self.need_bitfield_allocation, vec![]);
|
||||
mem::take(&mut self.need_bitfield_allocation);
|
||||
for id in need_bitfield_allocation {
|
||||
self.with_loaned_item(id, |ctx, item| {
|
||||
item.kind_mut()
|
||||
.as_type_mut()
|
||||
let ty = item.kind_mut().as_type_mut().unwrap();
|
||||
let layout = ty.layout(ctx);
|
||||
ty.as_comp_mut()
|
||||
.unwrap()
|
||||
.as_comp_mut()
|
||||
.unwrap()
|
||||
.compute_bitfield_units(ctx);
|
||||
.compute_bitfield_units(ctx, layout.as_ref());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -1024,7 +1054,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
_ => continue,
|
||||
}
|
||||
|
||||
let path = item.path_for_whitelisting(self);
|
||||
let path = item.path_for_allowlisting(self);
|
||||
let replacement = self.replacements.get(&path[1..]);
|
||||
|
||||
if let Some(replacement) = replacement {
|
||||
|
@ -1093,7 +1123,8 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
.ancestors(immut_self)
|
||||
.find(|id| immut_self.resolve_item(*id).is_module())
|
||||
};
|
||||
let new_module = new_module.unwrap_or(self.root_module.into());
|
||||
let new_module =
|
||||
new_module.unwrap_or_else(|| self.root_module.into());
|
||||
|
||||
if new_module == old_module {
|
||||
// Already in the correct module.
|
||||
|
@ -1134,10 +1165,10 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
|
||||
self.assert_no_dangling_references();
|
||||
|
||||
// Compute the whitelisted set after processing replacements and
|
||||
// Compute the allowlisted set after processing replacements and
|
||||
// resolving type refs, as those are the final mutations of the IR
|
||||
// graph, and their completion means that the IR graph is now frozen.
|
||||
self.compute_whitelisted_and_codegen_items();
|
||||
self.compute_allowlisted_and_codegen_items();
|
||||
|
||||
// Make sure to do this after processing replacements, since that messes
|
||||
// with the parentage and module children, and we want to assert that it
|
||||
|
@ -1293,20 +1324,20 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
|
||||
fn find_used_template_parameters(&mut self) {
|
||||
let _t = self.timer("find_used_template_parameters");
|
||||
if self.options.whitelist_recursively {
|
||||
if self.options.allowlist_recursively {
|
||||
let used_params = analyze::<UsedTemplateParameters>(self);
|
||||
self.used_template_parameters = Some(used_params);
|
||||
} else {
|
||||
// If you aren't recursively whitelisting, then we can't really make
|
||||
// If you aren't recursively allowlisting, then we can't really make
|
||||
// any sense of template parameter usage, and you're on your own.
|
||||
let mut used_params = HashMap::default();
|
||||
for &id in self.whitelisted_items() {
|
||||
used_params.entry(id).or_insert(
|
||||
for &id in self.allowlisted_items() {
|
||||
used_params.entry(id).or_insert_with(|| {
|
||||
id.self_template_params(self)
|
||||
.into_iter()
|
||||
.map(|p| p.into())
|
||||
.collect(),
|
||||
);
|
||||
.collect()
|
||||
});
|
||||
}
|
||||
self.used_template_parameters = Some(used_params);
|
||||
}
|
||||
|
@ -1319,9 +1350,9 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
/// template usage information is only computed as we enter the codegen
|
||||
/// phase.
|
||||
///
|
||||
/// If the item is blacklisted, then we say that it always uses the template
|
||||
/// If the item is blocklisted, then we say that it always uses the template
|
||||
/// parameter. This is a little subtle. The template parameter usage
|
||||
/// analysis only considers whitelisted items, and if any blacklisted item
|
||||
/// analysis only considers allowlisted items, and if any blocklisted item
|
||||
/// shows up in the generated bindings, it is the user's responsibility to
|
||||
/// manually provide a definition for them. To give them the most
|
||||
/// flexibility when doing that, we assume that they use every template
|
||||
|
@ -1336,7 +1367,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
"We only compute template parameter usage as we enter codegen"
|
||||
);
|
||||
|
||||
if self.resolve_item(item).is_blacklisted(self) {
|
||||
if self.resolve_item(item).is_blocklisted(self) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -1392,7 +1423,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
|
||||
fn build_root_module(id: ItemId) -> Item {
|
||||
let module = Module::new(Some("root".into()), ModuleKind::Normal);
|
||||
Item::new(id, None, None, id, ItemKind::Module(module))
|
||||
Item::new(id, None, None, id, ItemKind::Module(module), None)
|
||||
}
|
||||
|
||||
/// Get the root module.
|
||||
|
@ -1702,6 +1733,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
None,
|
||||
self.current_module.into(),
|
||||
ItemKind::Type(sub_ty),
|
||||
Some(child.location()),
|
||||
);
|
||||
|
||||
// Bypass all the validations in add_item explicitly.
|
||||
|
@ -1766,6 +1798,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
None,
|
||||
self.current_module.into(),
|
||||
ItemKind::Type(ty),
|
||||
Some(location.location()),
|
||||
);
|
||||
|
||||
// Bypass all the validations in add_item explicitly.
|
||||
|
@ -1787,7 +1820,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
.or_else(|| {
|
||||
decl.cursor()
|
||||
.usr()
|
||||
.and_then(|usr| self.types.get(&TypeKey::USR(usr)))
|
||||
.and_then(|usr| self.types.get(&TypeKey::Usr(usr)))
|
||||
})
|
||||
.cloned()
|
||||
}
|
||||
|
@ -1803,8 +1836,8 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
) -> Option<TypeId> {
|
||||
use clang_sys::{CXCursor_TypeAliasTemplateDecl, CXCursor_TypeRef};
|
||||
debug!(
|
||||
"builtin_or_resolved_ty: {:?}, {:?}, {:?}",
|
||||
ty, location, parent_id
|
||||
"builtin_or_resolved_ty: {:?}, {:?}, {:?}, {:?}",
|
||||
ty, location, with_id, parent_id
|
||||
);
|
||||
|
||||
if let Some(decl) = ty.canonical_declaration(location.as_ref()) {
|
||||
|
@ -1820,32 +1853,32 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
// of it, or
|
||||
// * we have already parsed and resolved this type, and
|
||||
// there's nothing left to do.
|
||||
if decl.cursor().is_template_like() &&
|
||||
*ty != decl.cursor().cur_type() &&
|
||||
location.is_some()
|
||||
{
|
||||
let location = location.unwrap();
|
||||
|
||||
// For specialized type aliases, there's no way to get the
|
||||
// template parameters as of this writing (for a struct
|
||||
// specialization we wouldn't be in this branch anyway).
|
||||
//
|
||||
// Explicitly return `None` if there aren't any
|
||||
// unspecialized parameters (contains any `TypeRef`) so we
|
||||
// resolve the canonical type if there is one and it's
|
||||
// exposed.
|
||||
//
|
||||
// This is _tricky_, I know :(
|
||||
if decl.cursor().kind() == CXCursor_TypeAliasTemplateDecl &&
|
||||
!location.contains_cursor(CXCursor_TypeRef) &&
|
||||
ty.canonical_type().is_valid_and_exposed()
|
||||
if let Some(location) = location {
|
||||
if decl.cursor().is_template_like() &&
|
||||
*ty != decl.cursor().cur_type()
|
||||
{
|
||||
return None;
|
||||
}
|
||||
// For specialized type aliases, there's no way to get the
|
||||
// template parameters as of this writing (for a struct
|
||||
// specialization we wouldn't be in this branch anyway).
|
||||
//
|
||||
// Explicitly return `None` if there aren't any
|
||||
// unspecialized parameters (contains any `TypeRef`) so we
|
||||
// resolve the canonical type if there is one and it's
|
||||
// exposed.
|
||||
//
|
||||
// This is _tricky_, I know :(
|
||||
if decl.cursor().kind() ==
|
||||
CXCursor_TypeAliasTemplateDecl &&
|
||||
!location.contains_cursor(CXCursor_TypeRef) &&
|
||||
ty.canonical_type().is_valid_and_exposed()
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
return self
|
||||
.instantiate_template(with_id, id, ty, location)
|
||||
.or_else(|| Some(id));
|
||||
return self
|
||||
.instantiate_template(with_id, id, ty, location)
|
||||
.or(Some(id));
|
||||
}
|
||||
}
|
||||
|
||||
return Some(self.build_ty_wrapper(with_id, id, parent_id, ty));
|
||||
|
@ -1899,14 +1932,16 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
) -> TypeId {
|
||||
let spelling = ty.spelling();
|
||||
let layout = ty.fallible_layout(self).ok();
|
||||
let location = ty.declaration().location();
|
||||
let type_kind = TypeKind::ResolvedTypeRef(wrapped_id);
|
||||
let ty = Type::new(Some(spelling), layout, type_kind, is_const);
|
||||
let item = Item::new(
|
||||
with_id,
|
||||
None,
|
||||
None,
|
||||
parent_id.unwrap_or(self.current_module.into()),
|
||||
parent_id.unwrap_or_else(|| self.current_module.into()),
|
||||
ItemKind::Type(ty),
|
||||
Some(location),
|
||||
);
|
||||
self.add_builtin_item(item);
|
||||
with_id.as_type_id_unchecked()
|
||||
|
@ -1967,6 +2002,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
let spelling = ty.spelling();
|
||||
let is_const = ty.is_const();
|
||||
let layout = ty.fallible_layout(self).ok();
|
||||
let location = ty.declaration().location();
|
||||
let ty = Type::new(Some(spelling), layout, type_kind, is_const);
|
||||
let id = self.next_item_id();
|
||||
let item = Item::new(
|
||||
|
@ -1975,6 +2011,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
None,
|
||||
self.root_module.into(),
|
||||
ItemKind::Type(ty),
|
||||
Some(location),
|
||||
);
|
||||
self.add_builtin_item(item);
|
||||
Some(id.as_type_id_unchecked())
|
||||
|
@ -2046,10 +2083,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
id: Id,
|
||||
) -> bool {
|
||||
let id = id.into();
|
||||
match self.replacements.get(path) {
|
||||
Some(replaced_by) if *replaced_by != id => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.replacements.get(path), Some(replaced_by) if *replaced_by != id)
|
||||
}
|
||||
|
||||
/// Is the type with the given `name` marked as opaque?
|
||||
|
@ -2084,11 +2118,9 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
module_name = Some(spelling)
|
||||
}
|
||||
|
||||
let tokens = cursor.tokens();
|
||||
let mut iter = tokens.iter();
|
||||
let mut kind = ModuleKind::Normal;
|
||||
let mut found_namespace_keyword = false;
|
||||
while let Some(token) = iter.next() {
|
||||
for token in cursor.tokens().iter() {
|
||||
match token.spelling() {
|
||||
b"inline" => {
|
||||
assert!(!found_namespace_keyword);
|
||||
|
@ -2168,6 +2200,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
None,
|
||||
self.current_module.into(),
|
||||
ItemKind::Module(module),
|
||||
Some(cursor.location()),
|
||||
);
|
||||
|
||||
let module_id = module.id().as_module_id_unchecked();
|
||||
|
@ -2194,15 +2227,60 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
self.current_module = previous_id;
|
||||
}
|
||||
|
||||
/// Iterate over all (explicitly or transitively) whitelisted items.
|
||||
/// Iterate over all (explicitly or transitively) allowlisted items.
|
||||
///
|
||||
/// If no items are explicitly whitelisted, then all items are considered
|
||||
/// whitelisted.
|
||||
pub fn whitelisted_items(&self) -> &ItemSet {
|
||||
/// If no items are explicitly allowlisted, then all items are considered
|
||||
/// allowlisted.
|
||||
pub fn allowlisted_items(&self) -> &ItemSet {
|
||||
assert!(self.in_codegen_phase());
|
||||
assert!(self.current_module == self.root_module);
|
||||
|
||||
self.whitelisted.as_ref().unwrap()
|
||||
self.allowlisted.as_ref().unwrap()
|
||||
}
|
||||
|
||||
/// Check whether a particular blocklisted type implements a trait or not.
|
||||
/// Results may be cached.
|
||||
pub fn blocklisted_type_implements_trait(
|
||||
&self,
|
||||
item: &Item,
|
||||
derive_trait: DeriveTrait,
|
||||
) -> CanDerive {
|
||||
assert!(self.in_codegen_phase());
|
||||
assert!(self.current_module == self.root_module);
|
||||
|
||||
*self
|
||||
.blocklisted_types_implement_traits
|
||||
.borrow_mut()
|
||||
.entry(derive_trait)
|
||||
.or_default()
|
||||
.entry(item.id())
|
||||
.or_insert_with(|| {
|
||||
item.expect_type()
|
||||
.name()
|
||||
.and_then(|name| match self.options.parse_callbacks {
|
||||
Some(ref cb) => cb.blocklisted_type_implements_trait(
|
||||
name,
|
||||
derive_trait,
|
||||
),
|
||||
// Sized integer types from <stdint.h> get mapped to Rust primitive
|
||||
// types regardless of whether they are blocklisted, so ensure that
|
||||
// standard traits are considered derivable for them too.
|
||||
None => match name {
|
||||
"int8_t" | "uint8_t" | "int16_t" | "uint16_t" |
|
||||
"int32_t" | "uint32_t" | "int64_t" |
|
||||
"uint64_t" | "uintptr_t" | "intptr_t" |
|
||||
"ptrdiff_t" => Some(CanDerive::Yes),
|
||||
"size_t" if self.options.size_t_is_usize => {
|
||||
Some(CanDerive::Yes)
|
||||
}
|
||||
"ssize_t" if self.options.size_t_is_usize => {
|
||||
Some(CanDerive::Yes)
|
||||
}
|
||||
_ => Some(CanDerive::No),
|
||||
},
|
||||
})
|
||||
.unwrap_or(CanDerive::No)
|
||||
})
|
||||
}
|
||||
|
||||
/// Get a reference to the set of items we should generate.
|
||||
|
@ -2212,12 +2290,12 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
self.codegen_items.as_ref().unwrap()
|
||||
}
|
||||
|
||||
/// Compute the whitelisted items set and populate `self.whitelisted`.
|
||||
fn compute_whitelisted_and_codegen_items(&mut self) {
|
||||
/// Compute the allowlisted items set and populate `self.allowlisted`.
|
||||
fn compute_allowlisted_and_codegen_items(&mut self) {
|
||||
assert!(self.in_codegen_phase());
|
||||
assert!(self.current_module == self.root_module);
|
||||
assert!(self.whitelisted.is_none());
|
||||
let _t = self.timer("compute_whitelisted_and_codegen_items");
|
||||
assert!(self.allowlisted.is_none());
|
||||
let _t = self.timer("compute_allowlisted_and_codegen_items");
|
||||
|
||||
let roots = {
|
||||
let mut roots = self
|
||||
|
@ -2225,11 +2303,11 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
// Only consider roots that are enabled for codegen.
|
||||
.filter(|&(_, item)| item.is_enabled_for_codegen(self))
|
||||
.filter(|&(_, item)| {
|
||||
// If nothing is explicitly whitelisted, then everything is fair
|
||||
// If nothing is explicitly allowlisted, then everything is fair
|
||||
// game.
|
||||
if self.options().whitelisted_types.is_empty() &&
|
||||
self.options().whitelisted_functions.is_empty() &&
|
||||
self.options().whitelisted_vars.is_empty()
|
||||
if self.options().allowlisted_types.is_empty() &&
|
||||
self.options().allowlisted_functions.is_empty() &&
|
||||
self.options().allowlisted_vars.is_empty()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
@ -2240,25 +2318,25 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
return true;
|
||||
}
|
||||
|
||||
let name = item.path_for_whitelisting(self)[1..].join("::");
|
||||
debug!("whitelisted_items: testing {:?}", name);
|
||||
let name = item.path_for_allowlisting(self)[1..].join("::");
|
||||
debug!("allowlisted_items: testing {:?}", name);
|
||||
match *item.kind() {
|
||||
ItemKind::Module(..) => true,
|
||||
ItemKind::Function(_) => {
|
||||
self.options().whitelisted_functions.matches(&name)
|
||||
self.options().allowlisted_functions.matches(&name)
|
||||
}
|
||||
ItemKind::Var(_) => {
|
||||
self.options().whitelisted_vars.matches(&name)
|
||||
self.options().allowlisted_vars.matches(&name)
|
||||
}
|
||||
ItemKind::Type(ref ty) => {
|
||||
if self.options().whitelisted_types.matches(&name) {
|
||||
if self.options().allowlisted_types.matches(&name) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Auto-whitelist types that don't need code
|
||||
// generation if not whitelisting recursively, to
|
||||
// Auto-allowlist types that don't need code
|
||||
// generation if not allowlisting recursively, to
|
||||
// make the #[derive] analysis not be lame.
|
||||
if !self.options().whitelist_recursively {
|
||||
if !self.options().allowlist_recursively {
|
||||
match *ty.kind() {
|
||||
TypeKind::Void |
|
||||
TypeKind::NullPtr |
|
||||
|
@ -2278,7 +2356,7 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
}
|
||||
|
||||
// Unnamed top-level enums are special and we
|
||||
// whitelist them via the `whitelisted_vars` filter,
|
||||
// allowlist them via the `allowlisted_vars` filter,
|
||||
// since they're effectively top-level constants,
|
||||
// and there's no way for them to be referenced
|
||||
// consistently.
|
||||
|
@ -2297,12 +2375,14 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
}
|
||||
|
||||
let mut prefix_path =
|
||||
parent.path_for_whitelisting(self).clone();
|
||||
parent.path_for_allowlisting(self).clone();
|
||||
enum_.variants().iter().any(|variant| {
|
||||
prefix_path.push(variant.name().into());
|
||||
prefix_path.push(
|
||||
variant.name_for_allowlisting().into(),
|
||||
);
|
||||
let name = prefix_path[1..].join("::");
|
||||
prefix_path.pop().unwrap();
|
||||
self.options().whitelisted_vars.matches(&name)
|
||||
self.options().allowlisted_vars.matches(&name)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -2317,48 +2397,48 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
roots
|
||||
};
|
||||
|
||||
let whitelisted_items_predicate =
|
||||
if self.options().whitelist_recursively {
|
||||
let allowlisted_items_predicate =
|
||||
if self.options().allowlist_recursively {
|
||||
traversal::all_edges
|
||||
} else {
|
||||
// Only follow InnerType edges from the whitelisted roots.
|
||||
// Only follow InnerType edges from the allowlisted roots.
|
||||
// Such inner types (e.g. anonymous structs/unions) are
|
||||
// always emitted by codegen, and they need to be whitelisted
|
||||
// always emitted by codegen, and they need to be allowlisted
|
||||
// to make sure they are processed by e.g. the derive analysis.
|
||||
traversal::only_inner_type_edges
|
||||
};
|
||||
|
||||
let whitelisted = WhitelistedItemsTraversal::new(
|
||||
let allowlisted = AllowlistedItemsTraversal::new(
|
||||
self,
|
||||
roots.clone(),
|
||||
whitelisted_items_predicate,
|
||||
allowlisted_items_predicate,
|
||||
)
|
||||
.collect::<ItemSet>();
|
||||
|
||||
let codegen_items = if self.options().whitelist_recursively {
|
||||
WhitelistedItemsTraversal::new(
|
||||
let codegen_items = if self.options().allowlist_recursively {
|
||||
AllowlistedItemsTraversal::new(
|
||||
self,
|
||||
roots.clone(),
|
||||
roots,
|
||||
traversal::codegen_edges,
|
||||
)
|
||||
.collect::<ItemSet>()
|
||||
} else {
|
||||
whitelisted.clone()
|
||||
allowlisted.clone()
|
||||
};
|
||||
|
||||
self.whitelisted = Some(whitelisted);
|
||||
self.allowlisted = Some(allowlisted);
|
||||
self.codegen_items = Some(codegen_items);
|
||||
|
||||
for item in self.options().whitelisted_functions.unmatched_items() {
|
||||
warn!("unused option: --whitelist-function {}", item);
|
||||
for item in self.options().allowlisted_functions.unmatched_items() {
|
||||
warn!("unused option: --allowlist-function {}", item);
|
||||
}
|
||||
|
||||
for item in self.options().whitelisted_vars.unmatched_items() {
|
||||
warn!("unused option: --whitelist-var {}", item);
|
||||
for item in self.options().allowlisted_vars.unmatched_items() {
|
||||
warn!("unused option: --allowlist-var {}", item);
|
||||
}
|
||||
|
||||
for item in self.options().whitelisted_types.unmatched_items() {
|
||||
warn!("unused option: --whitelist-type {}", item);
|
||||
for item in self.options().allowlisted_types.unmatched_items() {
|
||||
warn!("unused option: --allowlist-type {}", item);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2575,33 +2655,39 @@ If you encounter an error missing from this list, please file an issue or a PR!"
|
|||
|
||||
/// Check if `--no-partialeq` flag is enabled for this item.
|
||||
pub fn no_partialeq_by_name(&self, item: &Item) -> bool {
|
||||
let name = item.path_for_whitelisting(self)[1..].join("::");
|
||||
let name = item.path_for_allowlisting(self)[1..].join("::");
|
||||
self.options().no_partialeq_types.matches(&name)
|
||||
}
|
||||
|
||||
/// Check if `--no-copy` flag is enabled for this item.
|
||||
pub fn no_copy_by_name(&self, item: &Item) -> bool {
|
||||
let name = item.path_for_whitelisting(self)[1..].join("::");
|
||||
let name = item.path_for_allowlisting(self)[1..].join("::");
|
||||
self.options().no_copy_types.matches(&name)
|
||||
}
|
||||
|
||||
/// Check if `--no-debug` flag is enabled for this item.
|
||||
pub fn no_debug_by_name(&self, item: &Item) -> bool {
|
||||
let name = item.path_for_whitelisting(self)[1..].join("::");
|
||||
let name = item.path_for_allowlisting(self)[1..].join("::");
|
||||
self.options().no_debug_types.matches(&name)
|
||||
}
|
||||
|
||||
/// Check if `--no-default` flag is enabled for this item.
|
||||
pub fn no_default_by_name(&self, item: &Item) -> bool {
|
||||
let name = item.path_for_whitelisting(self)[1..].join("::");
|
||||
let name = item.path_for_allowlisting(self)[1..].join("::");
|
||||
self.options().no_default_types.matches(&name)
|
||||
}
|
||||
|
||||
/// Check if `--no-hash` flag is enabled for this item.
|
||||
pub fn no_hash_by_name(&self, item: &Item) -> bool {
|
||||
let name = item.path_for_whitelisting(self)[1..].join("::");
|
||||
let name = item.path_for_allowlisting(self)[1..].join("::");
|
||||
self.options().no_hash_types.matches(&name)
|
||||
}
|
||||
|
||||
/// Check if `--must-use-type` flag is enabled for this item.
|
||||
pub fn must_use_type_by_name(&self, item: &Item) -> bool {
|
||||
let name = item.path_for_allowlisting(self)[1..].join("::");
|
||||
self.options().must_use_types.matches(&name)
|
||||
}
|
||||
}
|
||||
|
||||
/// A builder struct for configuring item resolution options.
|
||||
|
@ -2633,7 +2719,7 @@ impl ItemResolver {
|
|||
pub fn new<Id: Into<ItemId>>(id: Id) -> ItemResolver {
|
||||
let id = id.into();
|
||||
ItemResolver {
|
||||
id: id,
|
||||
id,
|
||||
through_type_refs: false,
|
||||
through_type_aliases: false,
|
||||
}
|
||||
|
@ -2656,8 +2742,16 @@ impl ItemResolver {
|
|||
assert!(ctx.collected_typerefs());
|
||||
|
||||
let mut id = self.id;
|
||||
let mut seen_ids = HashSet::default();
|
||||
loop {
|
||||
let item = ctx.resolve_item(id);
|
||||
|
||||
// Detect cycles and bail out. These can happen in certain cases
|
||||
// involving incomplete qualified dependent types (#2085).
|
||||
if !seen_ids.insert(id) {
|
||||
return item;
|
||||
}
|
||||
|
||||
let ty_kind = item.as_type().map(|t| t.kind());
|
||||
match ty_kind {
|
||||
Some(&TypeKind::ResolvedTypeRef(next_id))
|
||||
|
@ -2692,7 +2786,7 @@ impl PartialType {
|
|||
/// Construct a new `PartialType`.
|
||||
pub fn new(decl: Cursor, id: ItemId) -> PartialType {
|
||||
// assert!(decl == decl.canonical());
|
||||
PartialType { decl: decl, id: id }
|
||||
PartialType { decl, id }
|
||||
}
|
||||
|
||||
/// The cursor pointing to this partial type's declaration location.
|
||||
|
|
|
@ -32,13 +32,13 @@ where
|
|||
let mut err: Option<io::Result<_>> = None;
|
||||
|
||||
for (id, item) in ctx.items() {
|
||||
let is_whitelisted = ctx.whitelisted_items().contains(&id);
|
||||
let is_allowlisted = ctx.allowlisted_items().contains(&id);
|
||||
|
||||
writeln!(
|
||||
&mut dot_file,
|
||||
r#"{} [fontname="courier", color={}, label=< <table border="0" align="left">"#,
|
||||
id.as_usize(),
|
||||
if is_whitelisted { "black" } else { "gray" }
|
||||
if is_allowlisted { "black" } else { "gray" }
|
||||
)?;
|
||||
item.dot_attributes(ctx, &mut dot_file)?;
|
||||
writeln!(&mut dot_file, r#"</table> >];"#)?;
|
||||
|
@ -56,7 +56,7 @@ where
|
|||
id.as_usize(),
|
||||
sub_id.as_usize(),
|
||||
edge_kind,
|
||||
if is_whitelisted { "black" } else { "gray" }
|
||||
if is_allowlisted { "black" } else { "gray" }
|
||||
) {
|
||||
Ok(_) => {}
|
||||
Err(e) => err = Some(Err(e)),
|
||||
|
|
|
@ -86,7 +86,7 @@ impl Enum {
|
|||
} else {
|
||||
Some(type_name)
|
||||
};
|
||||
let type_name = type_name.as_ref().map(String::as_str);
|
||||
let type_name = type_name.as_deref();
|
||||
|
||||
let definition = declaration.definition().unwrap_or(declaration);
|
||||
definition.visit(|cursor| {
|
||||
|
@ -118,7 +118,7 @@ impl Enum {
|
|||
}
|
||||
});
|
||||
|
||||
let name = ctx
|
||||
let new_name = ctx
|
||||
.parse_callbacks()
|
||||
.and_then(|callbacks| {
|
||||
callbacks.enum_variant_name(type_name, &name, val)
|
||||
|
@ -130,10 +130,11 @@ impl Enum {
|
|||
.last()
|
||||
.cloned()
|
||||
})
|
||||
.unwrap_or(name);
|
||||
.unwrap_or_else(|| name.clone());
|
||||
|
||||
let comment = cursor.raw_comment();
|
||||
variants.push(EnumVariant::new(
|
||||
new_name,
|
||||
name,
|
||||
comment,
|
||||
val,
|
||||
|
@ -152,7 +153,7 @@ impl Enum {
|
|||
enums: &RegexSet,
|
||||
item: &Item,
|
||||
) -> bool {
|
||||
let path = item.path_for_whitelisting(ctx);
|
||||
let path = item.path_for_allowlisting(ctx);
|
||||
let enum_ty = item.expect_type();
|
||||
|
||||
if enums.matches(&path[1..].join("::")) {
|
||||
|
@ -224,6 +225,9 @@ pub struct EnumVariant {
|
|||
/// The name of the variant.
|
||||
name: String,
|
||||
|
||||
/// The original name of the variant (without user mangling)
|
||||
name_for_allowlisting: String,
|
||||
|
||||
/// An optional doc comment.
|
||||
comment: Option<String>,
|
||||
|
||||
|
@ -251,12 +255,14 @@ impl EnumVariant {
|
|||
/// Construct a new enumeration variant from the given parts.
|
||||
pub fn new(
|
||||
name: String,
|
||||
name_for_allowlisting: String,
|
||||
comment: Option<String>,
|
||||
val: EnumVariantValue,
|
||||
custom_behavior: Option<EnumVariantCustomBehavior>,
|
||||
) -> Self {
|
||||
EnumVariant {
|
||||
name,
|
||||
name_for_allowlisting,
|
||||
comment,
|
||||
val,
|
||||
custom_behavior,
|
||||
|
@ -268,6 +274,11 @@ impl EnumVariant {
|
|||
&self.name
|
||||
}
|
||||
|
||||
/// Get this variant's name.
|
||||
pub fn name_for_allowlisting(&self) -> &str {
|
||||
&self.name_for_allowlisting
|
||||
}
|
||||
|
||||
/// Get this variant's value.
|
||||
pub fn val(&self) -> EnumVariantValue {
|
||||
self.val
|
||||
|
@ -275,7 +286,7 @@ impl EnumVariant {
|
|||
|
||||
/// Get this variant's documentation.
|
||||
pub fn comment(&self) -> Option<&str> {
|
||||
self.comment.as_ref().map(|s| &**s)
|
||||
self.comment.as_deref()
|
||||
}
|
||||
|
||||
/// Returns whether this variant should be enforced to be a constant by code
|
||||
|
|
|
@ -28,7 +28,9 @@ pub enum FunctionKind {
|
|||
}
|
||||
|
||||
impl FunctionKind {
|
||||
fn from_cursor(cursor: &clang::Cursor) -> Option<FunctionKind> {
|
||||
/// Given a clang cursor, return the kind of function it represents, or
|
||||
/// `None` otherwise.
|
||||
pub fn from_cursor(cursor: &clang::Cursor) -> Option<FunctionKind> {
|
||||
// FIXME(emilio): Deduplicate logic with `ir::comp`.
|
||||
Some(match cursor.kind() {
|
||||
clang_sys::CXCursor_FunctionDecl => FunctionKind::Function,
|
||||
|
@ -121,7 +123,7 @@ impl Function {
|
|||
|
||||
/// Get this function's name.
|
||||
pub fn mangled_name(&self) -> Option<&str> {
|
||||
self.mangled_name.as_ref().map(|n| &**n)
|
||||
self.mangled_name.as_deref()
|
||||
}
|
||||
|
||||
/// Get this function's signature type.
|
||||
|
@ -185,10 +187,7 @@ pub enum Abi {
|
|||
impl Abi {
|
||||
/// Returns whether this Abi is known or not.
|
||||
fn is_unknown(&self) -> bool {
|
||||
match *self {
|
||||
Abi::Unknown(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(*self, Abi::Unknown(..))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -338,7 +337,7 @@ fn args_from_ty_and_cursor(
|
|||
});
|
||||
|
||||
let cursor = arg_cur.unwrap_or(*cursor);
|
||||
let ty = arg_ty.unwrap_or(cursor.cur_type());
|
||||
let ty = arg_ty.unwrap_or_else(|| cursor.cur_type());
|
||||
(name, Item::from_ty_or_ref(ty, cursor, None, ctx))
|
||||
})
|
||||
.collect()
|
||||
|
@ -358,7 +357,7 @@ impl FunctionSig {
|
|||
argument_types,
|
||||
is_variadic,
|
||||
must_use,
|
||||
abi: abi,
|
||||
abi,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -409,7 +408,7 @@ impl FunctionSig {
|
|||
CXCursor_CXXMethod |
|
||||
CXCursor_ObjCInstanceMethodDecl |
|
||||
CXCursor_ObjCClassMethodDecl => {
|
||||
args_from_ty_and_cursor(&ty, &cursor, ctx)
|
||||
args_from_ty_and_cursor(ty, &cursor, ctx)
|
||||
}
|
||||
_ => {
|
||||
// For non-CXCursor_FunctionDecl, visiting the cursor's children
|
||||
|
@ -432,7 +431,7 @@ impl FunctionSig {
|
|||
// right AST for functions tagged as stdcall and such...
|
||||
//
|
||||
// https://bugs.llvm.org/show_bug.cgi?id=45919
|
||||
args_from_ty_and_cursor(&ty, &cursor, ctx)
|
||||
args_from_ty_and_cursor(ty, &cursor, ctx)
|
||||
} else {
|
||||
args
|
||||
}
|
||||
|
@ -520,7 +519,7 @@ impl FunctionSig {
|
|||
warn!("Unknown calling convention: {:?}", call_conv);
|
||||
}
|
||||
|
||||
Ok(Self::new(ret.into(), args, ty.is_variadic(), must_use, abi))
|
||||
Ok(Self::new(ret, args, ty.is_variadic(), must_use, abi))
|
||||
}
|
||||
|
||||
/// Get this function signature's return type.
|
||||
|
@ -565,10 +564,7 @@ impl FunctionSig {
|
|||
return false;
|
||||
}
|
||||
|
||||
match self.abi {
|
||||
Abi::C | Abi::Unknown(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.abi, Abi::C | Abi::Unknown(..))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -595,10 +591,13 @@ impl ClangSubItemParser for Function {
|
|||
return Err(ParseError::Continue);
|
||||
}
|
||||
|
||||
if !context.options().generate_inline_functions &&
|
||||
cursor.is_inlined_function()
|
||||
{
|
||||
return Err(ParseError::Continue);
|
||||
if cursor.is_inlined_function() {
|
||||
if !context.options().generate_inline_functions {
|
||||
return Err(ParseError::Continue);
|
||||
}
|
||||
if cursor.is_deleted_function() {
|
||||
return Err(ParseError::Continue);
|
||||
}
|
||||
}
|
||||
|
||||
let linkage = cursor.linkage();
|
||||
|
|
|
@ -4,7 +4,7 @@ use super::super::codegen::{EnumVariation, CONSTIFIED_ENUM_MODULE_REPR_NAME};
|
|||
use super::analysis::{HasVtable, HasVtableResult, Sizedness, SizednessResult};
|
||||
use super::annotations::Annotations;
|
||||
use super::comment;
|
||||
use super::comp::MethodKind;
|
||||
use super::comp::{CompKind, MethodKind};
|
||||
use super::context::{BindgenContext, ItemId, PartialType, TypeId};
|
||||
use super::derive::{
|
||||
CanDeriveCopy, CanDeriveDebug, CanDeriveDefault, CanDeriveEq,
|
||||
|
@ -273,10 +273,10 @@ impl Trace for Item {
|
|||
where
|
||||
T: Tracer,
|
||||
{
|
||||
// Even if this item is blacklisted/hidden, we want to trace it. It is
|
||||
// Even if this item is blocklisted/hidden, we want to trace it. It is
|
||||
// traversal iterators' consumers' responsibility to filter items as
|
||||
// needed. Generally, this filtering happens in the implementation of
|
||||
// `Iterator` for `WhitelistedItems`. Fully tracing blacklisted items is
|
||||
// `Iterator` for `allowlistedItems`. Fully tracing blocklisted items is
|
||||
// necessary for things like the template parameter usage analysis to
|
||||
// function correctly.
|
||||
|
||||
|
@ -301,12 +301,12 @@ impl Trace for Item {
|
|||
}
|
||||
ItemKind::Module(_) => {
|
||||
// Module -> children edges are "weak", and we do not want to
|
||||
// trace them. If we did, then whitelisting wouldn't work as
|
||||
// trace them. If we did, then allowlisting wouldn't work as
|
||||
// expected: everything in every module would end up
|
||||
// whitelisted.
|
||||
// allowlisted.
|
||||
//
|
||||
// TODO: make a new edge kind for module -> children edges and
|
||||
// filter them during whitelisting traversals.
|
||||
// filter them during allowlisting traversals.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -400,9 +400,9 @@ pub struct Item {
|
|||
/// considerably faster in those cases.
|
||||
canonical_name: LazyCell<String>,
|
||||
|
||||
/// The path to use for whitelisting and other name-based checks, as
|
||||
/// returned by `path_for_whitelisting`, lazily constructed.
|
||||
path_for_whitelisting: LazyCell<Vec<String>>,
|
||||
/// The path to use for allowlisting and other name-based checks, as
|
||||
/// returned by `path_for_allowlisting`, lazily constructed.
|
||||
path_for_allowlisting: LazyCell<Vec<String>>,
|
||||
|
||||
/// A doc comment over the item, if any.
|
||||
comment: Option<String>,
|
||||
|
@ -417,6 +417,8 @@ pub struct Item {
|
|||
parent_id: ItemId,
|
||||
/// The item kind.
|
||||
kind: ItemKind,
|
||||
/// The source location of the item.
|
||||
location: Option<clang::SourceLocation>,
|
||||
}
|
||||
|
||||
impl AsRef<ItemId> for Item {
|
||||
|
@ -433,18 +435,20 @@ impl Item {
|
|||
annotations: Option<Annotations>,
|
||||
parent_id: ItemId,
|
||||
kind: ItemKind,
|
||||
location: Option<clang::SourceLocation>,
|
||||
) -> Self {
|
||||
debug_assert!(id != parent_id || kind.is_module());
|
||||
Item {
|
||||
id: id,
|
||||
id,
|
||||
local_id: LazyCell::new(),
|
||||
next_child_local_id: Cell::new(1),
|
||||
canonical_name: LazyCell::new(),
|
||||
path_for_whitelisting: LazyCell::new(),
|
||||
parent_id: parent_id,
|
||||
comment: comment,
|
||||
path_for_allowlisting: LazyCell::new(),
|
||||
parent_id,
|
||||
comment,
|
||||
annotations: annotations.unwrap_or_default(),
|
||||
kind: kind,
|
||||
kind,
|
||||
location,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -454,10 +458,15 @@ impl Item {
|
|||
ty: &clang::Type,
|
||||
ctx: &mut BindgenContext,
|
||||
) -> TypeId {
|
||||
let location = ty.declaration().location();
|
||||
let ty = Opaque::from_clang_ty(ty, ctx);
|
||||
let kind = ItemKind::Type(ty);
|
||||
let parent = ctx.root_module().into();
|
||||
ctx.add_item(Item::new(with_id, None, None, parent, kind), None, None);
|
||||
ctx.add_item(
|
||||
Item::new(with_id, None, None, parent, kind, Some(location)),
|
||||
None,
|
||||
None,
|
||||
);
|
||||
with_id.as_type_id_unchecked()
|
||||
}
|
||||
|
||||
|
@ -612,10 +621,7 @@ impl Item {
|
|||
|
||||
/// Is this item a module?
|
||||
pub fn is_module(&self) -> bool {
|
||||
match self.kind {
|
||||
ItemKind::Module(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.kind, ItemKind::Module(..))
|
||||
}
|
||||
|
||||
/// Get this item's annotations.
|
||||
|
@ -623,10 +629,10 @@ impl Item {
|
|||
&self.annotations
|
||||
}
|
||||
|
||||
/// Whether this item should be blacklisted.
|
||||
/// Whether this item should be blocklisted.
|
||||
///
|
||||
/// This may be due to either annotations or to other kind of configuration.
|
||||
pub fn is_blacklisted(&self, ctx: &BindgenContext) -> bool {
|
||||
pub fn is_blocklisted(&self, ctx: &BindgenContext) -> bool {
|
||||
debug_assert!(
|
||||
ctx.in_codegen_phase(),
|
||||
"You're not supposed to call this yet"
|
||||
|
@ -635,18 +641,29 @@ impl Item {
|
|||
return true;
|
||||
}
|
||||
|
||||
let path = self.path_for_whitelisting(ctx);
|
||||
if !ctx.options().blocklisted_files.is_empty() {
|
||||
if let Some(location) = &self.location {
|
||||
let (file, _, _, _) = location.location();
|
||||
if let Some(filename) = file.name() {
|
||||
if ctx.options().blocklisted_files.matches(&filename) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let path = self.path_for_allowlisting(ctx);
|
||||
let name = path[1..].join("::");
|
||||
ctx.options().blacklisted_items.matches(&name) ||
|
||||
ctx.options().blocklisted_items.matches(&name) ||
|
||||
match self.kind {
|
||||
ItemKind::Type(..) => {
|
||||
ctx.options().blacklisted_types.matches(&name) ||
|
||||
ctx.is_replaced_type(&path, self.id)
|
||||
ctx.options().blocklisted_types.matches(&name) ||
|
||||
ctx.is_replaced_type(path, self.id)
|
||||
}
|
||||
ItemKind::Function(..) => {
|
||||
ctx.options().blacklisted_functions.matches(&name)
|
||||
ctx.options().blocklisted_functions.matches(&name)
|
||||
}
|
||||
// TODO: Add constant / namespace blacklisting?
|
||||
// TODO: Add constant / namespace blocklisting?
|
||||
ItemKind::Var(..) | ItemKind::Module(..) => false,
|
||||
}
|
||||
}
|
||||
|
@ -658,10 +675,7 @@ impl Item {
|
|||
|
||||
/// Is this item a var type?
|
||||
pub fn is_var(&self) -> bool {
|
||||
match *self.kind() {
|
||||
ItemKind::Var(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(*self.kind(), ItemKind::Var(..))
|
||||
}
|
||||
|
||||
/// Take out item NameOptions
|
||||
|
@ -722,7 +736,7 @@ impl Item {
|
|||
.through_type_refs()
|
||||
.resolve(ctx)
|
||||
.push_disambiguated_name(ctx, to, level + 1);
|
||||
to.push_str("_");
|
||||
to.push('_');
|
||||
}
|
||||
to.push_str(&format!("close{}", level));
|
||||
}
|
||||
|
@ -835,7 +849,7 @@ impl Item {
|
|||
if ctx.options().enable_cxx_namespaces {
|
||||
return path.last().unwrap().clone();
|
||||
}
|
||||
return path.join("_").to_owned();
|
||||
return path.join("_");
|
||||
}
|
||||
|
||||
let base_name = target.base_name(ctx);
|
||||
|
@ -873,7 +887,7 @@ impl Item {
|
|||
|
||||
// If target is anonymous we need find its first named ancestor.
|
||||
if target.is_anon() {
|
||||
while let Some(id) = ids_iter.next() {
|
||||
for id in ids_iter.by_ref() {
|
||||
ids.push(id);
|
||||
|
||||
if !ctx.resolve_item(id).is_anon() {
|
||||
|
@ -904,6 +918,12 @@ impl Item {
|
|||
names.push(base_name);
|
||||
}
|
||||
|
||||
if ctx.options().c_naming {
|
||||
if let Some(prefix) = self.c_naming_prefix() {
|
||||
names.insert(0, prefix.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
let name = names.join("_");
|
||||
|
||||
let name = if opt.user_mangled == UserMangled::Yes {
|
||||
|
@ -1012,10 +1032,10 @@ impl Item {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns the path we should use for whitelisting / blacklisting, which
|
||||
/// Returns the path we should use for allowlisting / blocklisting, which
|
||||
/// doesn't include user-mangling.
|
||||
pub fn path_for_whitelisting(&self, ctx: &BindgenContext) -> &Vec<String> {
|
||||
self.path_for_whitelisting
|
||||
pub fn path_for_allowlisting(&self, ctx: &BindgenContext) -> &Vec<String> {
|
||||
self.path_for_allowlisting
|
||||
.borrow_with(|| self.compute_path(ctx, UserMangled::No))
|
||||
}
|
||||
|
||||
|
@ -1054,6 +1074,23 @@ impl Item {
|
|||
path.reverse();
|
||||
path
|
||||
}
|
||||
|
||||
/// Returns a prefix for the canonical name when C naming is enabled.
|
||||
fn c_naming_prefix(&self) -> Option<&str> {
|
||||
let ty = match self.kind {
|
||||
ItemKind::Type(ref ty) => ty,
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
Some(match ty.kind() {
|
||||
TypeKind::Comp(ref ci) => match ci.kind() {
|
||||
CompKind::Struct => "struct",
|
||||
CompKind::Union => "union",
|
||||
},
|
||||
TypeKind::Enum(..) => "enum",
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> IsOpaque for T
|
||||
|
@ -1081,7 +1118,7 @@ impl IsOpaque for Item {
|
|||
);
|
||||
self.annotations.opaque() ||
|
||||
self.as_type().map_or(false, |ty| ty.is_opaque(ctx, self)) ||
|
||||
ctx.opaque_by_name(&self.path_for_whitelisting(ctx))
|
||||
ctx.opaque_by_name(self.path_for_allowlisting(ctx))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1091,20 +1128,16 @@ where
|
|||
{
|
||||
fn has_vtable(&self, ctx: &BindgenContext) -> bool {
|
||||
let id: ItemId = (*self).into();
|
||||
id.as_type_id(ctx)
|
||||
.map_or(false, |id| match ctx.lookup_has_vtable(id) {
|
||||
HasVtableResult::No => false,
|
||||
_ => true,
|
||||
})
|
||||
id.as_type_id(ctx).map_or(false, |id| {
|
||||
!matches!(ctx.lookup_has_vtable(id), HasVtableResult::No)
|
||||
})
|
||||
}
|
||||
|
||||
fn has_vtable_ptr(&self, ctx: &BindgenContext) -> bool {
|
||||
let id: ItemId = (*self).into();
|
||||
id.as_type_id(ctx)
|
||||
.map_or(false, |id| match ctx.lookup_has_vtable(id) {
|
||||
HasVtableResult::SelfHasVtable => true,
|
||||
_ => false,
|
||||
})
|
||||
id.as_type_id(ctx).map_or(false, |id| {
|
||||
matches!(ctx.lookup_has_vtable(id), HasVtableResult::SelfHasVtable)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1284,7 +1317,7 @@ impl ClangItemParser for Item {
|
|||
let id = ctx.next_item_id();
|
||||
let module = ctx.root_module().into();
|
||||
ctx.add_item(
|
||||
Item::new(id, None, None, module, ItemKind::Type(ty)),
|
||||
Item::new(id, None, None, module, ItemKind::Type(ty), None),
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
@ -1322,6 +1355,7 @@ impl ClangItemParser for Item {
|
|||
annotations,
|
||||
relevant_parent_id,
|
||||
ItemKind::$what(item),
|
||||
Some(cursor.location()),
|
||||
),
|
||||
declaration,
|
||||
Some(cursor),
|
||||
|
@ -1369,7 +1403,7 @@ impl ClangItemParser for Item {
|
|||
}
|
||||
ctx.known_semantic_parent(definition)
|
||||
.or(parent_id)
|
||||
.unwrap_or(ctx.current_module().into())
|
||||
.unwrap_or_else(|| ctx.current_module().into())
|
||||
}
|
||||
None => relevant_parent_id,
|
||||
};
|
||||
|
@ -1390,7 +1424,7 @@ impl ClangItemParser for Item {
|
|||
if cursor.kind() == CXCursor_UnexposedDecl {
|
||||
Err(ParseError::Recurse)
|
||||
} else {
|
||||
// We whitelist cursors here known to be unhandled, to prevent being
|
||||
// We allowlist cursors here known to be unhandled, to prevent being
|
||||
// too noisy about this.
|
||||
match cursor.kind() {
|
||||
CXCursor_MacroDefinition |
|
||||
|
@ -1415,9 +1449,7 @@ impl ClangItemParser for Item {
|
|||
);
|
||||
}
|
||||
Some(filename) => {
|
||||
if let Some(cb) = ctx.parse_callbacks() {
|
||||
cb.include_file(&filename)
|
||||
}
|
||||
ctx.include_file(filename);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1503,8 +1535,9 @@ impl ClangItemParser for Item {
|
|||
potential_id,
|
||||
None,
|
||||
None,
|
||||
parent_id.unwrap_or(current_module.into()),
|
||||
parent_id.unwrap_or_else(|| current_module.into()),
|
||||
ItemKind::Type(Type::new(None, None, kind, is_const)),
|
||||
Some(location.location()),
|
||||
),
|
||||
None,
|
||||
None,
|
||||
|
@ -1560,9 +1593,21 @@ impl ClangItemParser for Item {
|
|||
}
|
||||
}
|
||||
|
||||
// Treat all types that are declared inside functions as opaque. The Rust binding
|
||||
// won't be able to do anything with them anyway.
|
||||
//
|
||||
// (If we don't do this check here, we can have subtle logic bugs because we generally
|
||||
// ignore function bodies. See issue #2036.)
|
||||
if let Some(ref parent) = ty.declaration().fallible_semantic_parent() {
|
||||
if FunctionKind::from_cursor(parent).is_some() {
|
||||
debug!("Skipping type declared inside function: {:?}", ty);
|
||||
return Ok(Item::new_opaque_type(id, ty, ctx));
|
||||
}
|
||||
}
|
||||
|
||||
let decl = {
|
||||
let decl = ty.declaration();
|
||||
decl.definition().unwrap_or(decl)
|
||||
let canonical_def = ty.canonical_type().declaration().definition();
|
||||
canonical_def.unwrap_or_else(|| ty.declaration())
|
||||
};
|
||||
|
||||
let comment = decl.raw_comment().or_else(|| location.raw_comment());
|
||||
|
@ -1570,7 +1615,7 @@ impl ClangItemParser for Item {
|
|||
Annotations::new(&decl).or_else(|| Annotations::new(&location));
|
||||
|
||||
if let Some(ref annotations) = annotations {
|
||||
if let Some(ref replaced) = annotations.use_instead_of() {
|
||||
if let Some(replaced) = annotations.use_instead_of() {
|
||||
ctx.replace(replaced, id);
|
||||
}
|
||||
}
|
||||
|
@ -1624,6 +1669,7 @@ impl ClangItemParser for Item {
|
|||
annotations,
|
||||
relevant_parent_id,
|
||||
ItemKind::Type(item),
|
||||
Some(location.location()),
|
||||
),
|
||||
declaration,
|
||||
Some(location),
|
||||
|
@ -1818,11 +1864,7 @@ impl ClangItemParser for Item {
|
|||
clang_sys::CXChildVisit_Continue
|
||||
});
|
||||
|
||||
if let Some(def) = definition {
|
||||
def
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
definition?
|
||||
};
|
||||
assert!(is_template_with_spelling(&definition, &ty_spelling));
|
||||
|
||||
|
@ -1856,6 +1898,7 @@ impl ClangItemParser for Item {
|
|||
None,
|
||||
parent,
|
||||
ItemKind::Type(Type::named(name)),
|
||||
Some(location.location()),
|
||||
);
|
||||
ctx.add_type_param(item, definition);
|
||||
Some(id.as_type_id_unchecked())
|
||||
|
@ -1906,7 +1949,7 @@ impl ItemCanonicalPath for Item {
|
|||
path.push(CONSTIFIED_ENUM_MODULE_REPR_NAME.into());
|
||||
}
|
||||
|
||||
return path;
|
||||
path
|
||||
}
|
||||
|
||||
fn canonical_path(&self, ctx: &BindgenContext) -> Vec<String> {
|
||||
|
@ -1918,7 +1961,7 @@ impl ItemCanonicalPath for Item {
|
|||
/// not.
|
||||
///
|
||||
/// Most of the callers probably want just yes, but the ones dealing with
|
||||
/// whitelisting and blacklisting don't.
|
||||
/// allowlisting and blocklisting don't.
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
enum UserMangled {
|
||||
No,
|
||||
|
@ -1939,8 +1982,8 @@ impl<'a> NameOptions<'a> {
|
|||
/// Construct a new `NameOptions`
|
||||
pub fn new(item: &'a Item, ctx: &'a BindgenContext) -> Self {
|
||||
NameOptions {
|
||||
item: item,
|
||||
ctx: ctx,
|
||||
item,
|
||||
ctx,
|
||||
within_namespaces: false,
|
||||
user_mangled: UserMangled::Yes,
|
||||
}
|
||||
|
|
|
@ -64,7 +64,7 @@ impl Layout {
|
|||
next_align *= 2;
|
||||
}
|
||||
Layout {
|
||||
size: size,
|
||||
size,
|
||||
align: next_align / 2,
|
||||
packed: false,
|
||||
}
|
||||
|
|
|
@ -32,15 +32,15 @@ impl Module {
|
|||
/// Construct a new `Module`.
|
||||
pub fn new(name: Option<String>, kind: ModuleKind) -> Self {
|
||||
Module {
|
||||
name: name,
|
||||
kind: kind,
|
||||
name,
|
||||
kind,
|
||||
children: ItemSet::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get this module's name.
|
||||
pub fn name(&self) -> Option<&str> {
|
||||
self.name.as_ref().map(|n| &**n)
|
||||
self.name.as_deref()
|
||||
}
|
||||
|
||||
/// Get a mutable reference to this module's children.
|
||||
|
|
|
@ -89,12 +89,10 @@ impl ObjCInterface {
|
|||
pub fn rust_name(&self) -> String {
|
||||
if let Some(ref cat) = self.category {
|
||||
format!("{}_{}", self.name(), cat)
|
||||
} else if self.is_protocol {
|
||||
format!("P{}", self.name())
|
||||
} else {
|
||||
if self.is_protocol {
|
||||
format!("P{}", self.name())
|
||||
} else {
|
||||
format!("I{}", self.name().to_owned())
|
||||
}
|
||||
format!("I{}", self.name().to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -149,28 +147,34 @@ impl ObjCInterface {
|
|||
// Gather protocols this interface conforms to
|
||||
let needle = format!("P{}", c.spelling());
|
||||
let items_map = ctx.items();
|
||||
debug!("Interface {} conforms to {}, find the item", interface.name, needle);
|
||||
debug!(
|
||||
"Interface {} conforms to {}, find the item",
|
||||
interface.name, needle
|
||||
);
|
||||
|
||||
for (id, item) in items_map
|
||||
{
|
||||
for (id, item) in items_map {
|
||||
if let Some(ty) = item.as_type() {
|
||||
match *ty.kind() {
|
||||
TypeKind::ObjCInterface(ref protocol) => {
|
||||
if protocol.is_protocol
|
||||
{
|
||||
debug!("Checking protocol {}, ty.name {:?}", protocol.name, ty.name());
|
||||
if Some(needle.as_ref()) == ty.name() {
|
||||
debug!("Found conforming protocol {:?}", item);
|
||||
interface.conforms_to.push(id);
|
||||
break;
|
||||
}
|
||||
if let TypeKind::ObjCInterface(ref protocol) =
|
||||
*ty.kind()
|
||||
{
|
||||
if protocol.is_protocol {
|
||||
debug!(
|
||||
"Checking protocol {}, ty.name {:?}",
|
||||
protocol.name,
|
||||
ty.name()
|
||||
);
|
||||
if Some(needle.as_ref()) == ty.name() {
|
||||
debug!(
|
||||
"Found conforming protocol {:?}",
|
||||
item
|
||||
);
|
||||
interface.conforms_to.push(id);
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
CXCursor_ObjCInstanceMethodDecl |
|
||||
CXCursor_ObjCClassMethodDecl => {
|
||||
|
@ -178,8 +182,10 @@ impl ObjCInterface {
|
|||
let signature =
|
||||
FunctionSig::from_ty(&c.cur_type(), &c, ctx)
|
||||
.expect("Invalid function sig");
|
||||
let is_class_method = c.kind() == CXCursor_ObjCClassMethodDecl;
|
||||
let method = ObjCMethod::new(&name, signature, is_class_method);
|
||||
let is_class_method =
|
||||
c.kind() == CXCursor_ObjCClassMethodDecl;
|
||||
let method =
|
||||
ObjCMethod::new(&name, signature, is_class_method);
|
||||
interface.add_method(method);
|
||||
}
|
||||
CXCursor_TemplateTypeParameter => {
|
||||
|
@ -189,7 +195,7 @@ impl ObjCInterface {
|
|||
CXCursor_ObjCSuperClassRef => {
|
||||
let item = Item::from_ty_or_ref(c.cur_type(), c, None, ctx);
|
||||
interface.parent_class = Some(item.into());
|
||||
},
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
CXChildVisit_Continue
|
||||
|
@ -218,7 +224,7 @@ impl ObjCMethod {
|
|||
|
||||
ObjCMethod {
|
||||
name: name.to_owned(),
|
||||
rust_name: rust_name.to_owned(),
|
||||
rust_name,
|
||||
signature,
|
||||
is_class_method,
|
||||
}
|
||||
|
@ -261,7 +267,7 @@ impl ObjCMethod {
|
|||
.collect();
|
||||
|
||||
// No arguments
|
||||
if args.len() == 0 && split_name.len() == 1 {
|
||||
if args.is_empty() && split_name.len() == 1 {
|
||||
let name = &split_name[0];
|
||||
return quote! {
|
||||
#name
|
||||
|
@ -269,13 +275,12 @@ impl ObjCMethod {
|
|||
}
|
||||
|
||||
// Check right amount of arguments
|
||||
if args.len() != split_name.len() - 1 {
|
||||
panic!(
|
||||
"Incorrect method name or arguments for objc method, {:?} vs {:?}",
|
||||
args,
|
||||
split_name,
|
||||
);
|
||||
}
|
||||
assert!(
|
||||
args.len() == split_name.len() - 1,
|
||||
"Incorrect method name or arguments for objc method, {:?} vs {:?}",
|
||||
args,
|
||||
split_name
|
||||
);
|
||||
|
||||
// Get arguments without type signatures to pass to `msg_send!`
|
||||
let mut args_without_types = vec![];
|
||||
|
|
|
@ -134,10 +134,10 @@ pub trait TemplateParameters: Sized {
|
|||
where
|
||||
Self: ItemAncestors,
|
||||
{
|
||||
let ancestors: Vec<_> = self.ancestors(ctx).collect();
|
||||
let mut ancestors: Vec<_> = self.ancestors(ctx).collect();
|
||||
ancestors.reverse();
|
||||
ancestors
|
||||
.into_iter()
|
||||
.rev()
|
||||
.flat_map(|id| id.self_template_params(ctx).into_iter())
|
||||
.collect()
|
||||
}
|
||||
|
@ -306,13 +306,13 @@ impl IsOpaque for TemplateInstantiation {
|
|||
// correct fix is to make `canonical_{name,path}` include template
|
||||
// arguments properly.
|
||||
|
||||
let mut path = item.path_for_whitelisting(ctx).clone();
|
||||
let mut path = item.path_for_allowlisting(ctx).clone();
|
||||
let args: Vec<_> = self
|
||||
.template_arguments()
|
||||
.iter()
|
||||
.map(|arg| {
|
||||
let arg_path =
|
||||
ctx.resolve_item(*arg).path_for_whitelisting(ctx);
|
||||
ctx.resolve_item(*arg).path_for_allowlisting(ctx);
|
||||
arg_path[1..].join("::")
|
||||
})
|
||||
.collect();
|
||||
|
|
|
@ -24,9 +24,9 @@ impl Edge {
|
|||
}
|
||||
}
|
||||
|
||||
impl Into<ItemId> for Edge {
|
||||
fn into(self) -> ItemId {
|
||||
self.to
|
||||
impl From<Edge> for ItemId {
|
||||
fn from(val: Edge) -> Self {
|
||||
val.to
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -201,7 +201,7 @@ pub fn all_edges(_: &BindgenContext, _: Edge) -> bool {
|
|||
/// A `TraversalPredicate` implementation that only follows
|
||||
/// `EdgeKind::InnerType` edges, and therefore traversals using this predicate
|
||||
/// will only visit the traversal's roots and their inner types. This is used
|
||||
/// in no-recursive-whitelist mode, where inner types such as anonymous
|
||||
/// in no-recursive-allowlist mode, where inner types such as anonymous
|
||||
/// structs/unions still need to be processed.
|
||||
pub fn only_inner_type_edges(_: &BindgenContext, edge: Edge) -> bool {
|
||||
edge.kind == EdgeKind::InnerType
|
||||
|
@ -377,7 +377,7 @@ pub trait Trace {
|
|||
|
||||
/// An graph traversal of the transitive closure of references between items.
|
||||
///
|
||||
/// See `BindgenContext::whitelisted_items` for more information.
|
||||
/// See `BindgenContext::allowlisted_items` for more information.
|
||||
pub struct ItemTraversal<'ctx, Storage, Queue, Predicate>
|
||||
where
|
||||
Storage: TraversalStorage<'ctx>,
|
||||
|
@ -424,10 +424,10 @@ where
|
|||
}
|
||||
|
||||
ItemTraversal {
|
||||
ctx: ctx,
|
||||
seen: seen,
|
||||
queue: queue,
|
||||
predicate: predicate,
|
||||
ctx,
|
||||
seen,
|
||||
queue,
|
||||
predicate,
|
||||
currently_traversing: None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,7 +39,6 @@ pub struct Type {
|
|||
/// traits, and so if we have a type containing an array with more than this
|
||||
/// many items, we won't be able to derive common traits on that type.
|
||||
///
|
||||
/// We need type-level integers yesterday :'(
|
||||
pub const RUST_DERIVE_IN_ARRAY_LIMIT: usize = 32;
|
||||
|
||||
impl Type {
|
||||
|
@ -88,23 +87,17 @@ impl Type {
|
|||
|
||||
/// Get this type's name.
|
||||
pub fn name(&self) -> Option<&str> {
|
||||
self.name.as_ref().map(|name| &**name)
|
||||
self.name.as_deref()
|
||||
}
|
||||
|
||||
/// Whether this is a block pointer type.
|
||||
pub fn is_block_pointer(&self) -> bool {
|
||||
match self.kind {
|
||||
TypeKind::BlockPointer(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.kind, TypeKind::BlockPointer(..))
|
||||
}
|
||||
|
||||
/// Is this a compound type?
|
||||
pub fn is_comp(&self) -> bool {
|
||||
match self.kind {
|
||||
TypeKind::Comp(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.kind, TypeKind::Comp(..))
|
||||
}
|
||||
|
||||
/// Is this a union?
|
||||
|
@ -117,58 +110,43 @@ impl Type {
|
|||
|
||||
/// Is this type of kind `TypeKind::TypeParam`?
|
||||
pub fn is_type_param(&self) -> bool {
|
||||
match self.kind {
|
||||
TypeKind::TypeParam => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.kind, TypeKind::TypeParam)
|
||||
}
|
||||
|
||||
/// Is this a template instantiation type?
|
||||
pub fn is_template_instantiation(&self) -> bool {
|
||||
match self.kind {
|
||||
TypeKind::TemplateInstantiation(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.kind, TypeKind::TemplateInstantiation(..))
|
||||
}
|
||||
|
||||
/// Is this a template alias type?
|
||||
pub fn is_template_alias(&self) -> bool {
|
||||
match self.kind {
|
||||
TypeKind::TemplateAlias(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.kind, TypeKind::TemplateAlias(..))
|
||||
}
|
||||
|
||||
/// Is this a function type?
|
||||
pub fn is_function(&self) -> bool {
|
||||
match self.kind {
|
||||
TypeKind::Function(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.kind, TypeKind::Function(..))
|
||||
}
|
||||
|
||||
/// Is this an enum type?
|
||||
pub fn is_enum(&self) -> bool {
|
||||
match self.kind {
|
||||
TypeKind::Enum(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.kind, TypeKind::Enum(..))
|
||||
}
|
||||
|
||||
/// Is this either a builtin or named type?
|
||||
pub fn is_builtin_or_type_param(&self) -> bool {
|
||||
match self.kind {
|
||||
matches!(
|
||||
self.kind,
|
||||
TypeKind::Void |
|
||||
TypeKind::NullPtr |
|
||||
TypeKind::Function(..) |
|
||||
TypeKind::Array(..) |
|
||||
TypeKind::Reference(..) |
|
||||
TypeKind::Pointer(..) |
|
||||
TypeKind::Int(..) |
|
||||
TypeKind::Float(..) |
|
||||
TypeKind::TypeParam => true,
|
||||
_ => false,
|
||||
}
|
||||
TypeKind::NullPtr |
|
||||
TypeKind::Function(..) |
|
||||
TypeKind::Array(..) |
|
||||
TypeKind::Reference(..) |
|
||||
TypeKind::Pointer(..) |
|
||||
TypeKind::Int(..) |
|
||||
TypeKind::Float(..) |
|
||||
TypeKind::TypeParam
|
||||
)
|
||||
}
|
||||
|
||||
/// Creates a new named type, with name `name`.
|
||||
|
@ -179,26 +157,17 @@ impl Type {
|
|||
|
||||
/// Is this a floating point type?
|
||||
pub fn is_float(&self) -> bool {
|
||||
match self.kind {
|
||||
TypeKind::Float(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.kind, TypeKind::Float(..))
|
||||
}
|
||||
|
||||
/// Is this a boolean type?
|
||||
pub fn is_bool(&self) -> bool {
|
||||
match self.kind {
|
||||
TypeKind::Int(IntKind::Bool) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.kind, TypeKind::Int(IntKind::Bool))
|
||||
}
|
||||
|
||||
/// Is this an integer type?
|
||||
pub fn is_integer(&self) -> bool {
|
||||
match self.kind {
|
||||
TypeKind::Int(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.kind, TypeKind::Int(..))
|
||||
}
|
||||
|
||||
/// Cast this type to an integer kind, or `None` if it is not an integer
|
||||
|
@ -217,19 +186,15 @@ impl Type {
|
|||
|
||||
/// Is this a reference to another type?
|
||||
pub fn is_type_ref(&self) -> bool {
|
||||
match self.kind {
|
||||
TypeKind::ResolvedTypeRef(_) |
|
||||
TypeKind::UnresolvedTypeRef(_, _, _) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(
|
||||
self.kind,
|
||||
TypeKind::ResolvedTypeRef(_) | TypeKind::UnresolvedTypeRef(_, _, _)
|
||||
)
|
||||
}
|
||||
|
||||
/// Is this an unresolved reference?
|
||||
pub fn is_unresolved_ref(&self) -> bool {
|
||||
match self.kind {
|
||||
TypeKind::UnresolvedTypeRef(_, _, _) => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.kind, TypeKind::UnresolvedTypeRef(_, _, _))
|
||||
}
|
||||
|
||||
/// Is this a incomplete array type?
|
||||
|
@ -279,14 +244,14 @@ impl Type {
|
|||
match self.kind {
|
||||
TypeKind::TypeParam => {
|
||||
let name = self.name().expect("Unnamed named type?");
|
||||
!clang::is_valid_identifier(&name)
|
||||
!clang::is_valid_identifier(name)
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Takes `name`, and returns a suitable identifier representation for it.
|
||||
fn sanitize_name<'a>(name: &'a str) -> Cow<'a, str> {
|
||||
fn sanitize_name(name: &str) -> Cow<str> {
|
||||
if clang::is_valid_identifier(name) {
|
||||
return Cow::Borrowed(name);
|
||||
}
|
||||
|
@ -301,12 +266,8 @@ impl Type {
|
|||
ctx: &BindgenContext,
|
||||
) -> Option<Cow<'a, str>> {
|
||||
let name_info = match *self.kind() {
|
||||
TypeKind::Pointer(inner) => {
|
||||
Some((inner.into(), Cow::Borrowed("ptr")))
|
||||
}
|
||||
TypeKind::Reference(inner) => {
|
||||
Some((inner.into(), Cow::Borrowed("ref")))
|
||||
}
|
||||
TypeKind::Pointer(inner) => Some((inner, Cow::Borrowed("ptr"))),
|
||||
TypeKind::Reference(inner) => Some((inner, Cow::Borrowed("ref"))),
|
||||
TypeKind::Array(inner, length) => {
|
||||
Some((inner, format!("array{}", length).into()))
|
||||
}
|
||||
|
@ -376,16 +337,16 @@ impl Type {
|
|||
/// There are some types we don't want to stop at when finding an opaque
|
||||
/// item, so we can arrive to the proper item that needs to be generated.
|
||||
pub fn should_be_traced_unconditionally(&self) -> bool {
|
||||
match self.kind {
|
||||
matches!(
|
||||
self.kind,
|
||||
TypeKind::Comp(..) |
|
||||
TypeKind::Function(..) |
|
||||
TypeKind::Pointer(..) |
|
||||
TypeKind::Array(..) |
|
||||
TypeKind::Reference(..) |
|
||||
TypeKind::TemplateInstantiation(..) |
|
||||
TypeKind::ResolvedTypeRef(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
TypeKind::Function(..) |
|
||||
TypeKind::Pointer(..) |
|
||||
TypeKind::Array(..) |
|
||||
TypeKind::Reference(..) |
|
||||
TypeKind::TemplateInstantiation(..) |
|
||||
TypeKind::ResolvedTypeRef(..)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -792,7 +753,7 @@ impl Type {
|
|||
(ty.template_args().is_some() && ty_kind != CXType_Typedef)
|
||||
{
|
||||
// This is a template instantiation.
|
||||
match TemplateInstantiation::from_ty(&ty, ctx) {
|
||||
match TemplateInstantiation::from_ty(ty, ctx) {
|
||||
Some(inst) => TypeKind::TemplateInstantiation(inst),
|
||||
None => TypeKind::Opaque,
|
||||
}
|
||||
|
@ -1121,7 +1082,16 @@ impl Type {
|
|||
let inner = cursor.typedef_type().expect("Not valid Type?");
|
||||
let inner =
|
||||
Item::from_ty_or_ref(inner, location, None, ctx);
|
||||
TypeKind::Alias(inner)
|
||||
if inner == potential_id {
|
||||
warn!(
|
||||
"Generating oqaque type instead of self-referential \
|
||||
typedef");
|
||||
// This can happen if we bail out of recursive situations
|
||||
// within the clang parsing.
|
||||
TypeKind::Opaque
|
||||
} else {
|
||||
TypeKind::Alias(inner)
|
||||
}
|
||||
}
|
||||
CXType_Enum => {
|
||||
let enum_ = Enum::from_ty(ty, ctx).expect("Not an enum?");
|
||||
|
|
|
@ -88,7 +88,7 @@ impl Var {
|
|||
|
||||
/// Get this variable's mangled name.
|
||||
pub fn mangled_name(&self) -> Option<&str> {
|
||||
self.mangled_name.as_ref().map(|n| &**n)
|
||||
self.mangled_name.as_deref()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -117,7 +117,6 @@ impl DotAttributes for Var {
|
|||
}
|
||||
}
|
||||
|
||||
// TODO(emilio): we could make this more (or less) granular, I guess.
|
||||
fn default_macro_constant_type(ctx: &BindgenContext, value: i64) -> IntKind {
|
||||
if value < 0 ||
|
||||
ctx.options().default_macro_constant_type ==
|
||||
|
@ -125,13 +124,28 @@ fn default_macro_constant_type(ctx: &BindgenContext, value: i64) -> IntKind {
|
|||
{
|
||||
if value < i32::min_value() as i64 || value > i32::max_value() as i64 {
|
||||
IntKind::I64
|
||||
} else {
|
||||
} else if !ctx.options().fit_macro_constants ||
|
||||
value < i16::min_value() as i64 ||
|
||||
value > i16::max_value() as i64
|
||||
{
|
||||
IntKind::I32
|
||||
} else if value < i8::min_value() as i64 ||
|
||||
value > i8::max_value() as i64
|
||||
{
|
||||
IntKind::I16
|
||||
} else {
|
||||
IntKind::I8
|
||||
}
|
||||
} else if value > u32::max_value() as i64 {
|
||||
IntKind::U64
|
||||
} else {
|
||||
} else if !ctx.options().fit_macro_constants ||
|
||||
value > u16::max_value() as i64
|
||||
{
|
||||
IntKind::U32
|
||||
} else if value > u8::max_value() as i64 {
|
||||
IntKind::U16
|
||||
} else {
|
||||
IntKind::U8
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -268,7 +282,7 @@ impl ClangSubItemParser for Var {
|
|||
.parse_callbacks()
|
||||
.and_then(|c| c.int_macro(&name, value))
|
||||
.unwrap_or_else(|| {
|
||||
default_macro_constant_type(&ctx, value)
|
||||
default_macro_constant_type(ctx, value)
|
||||
});
|
||||
|
||||
(TypeKind::Int(kind), VarType::Int(value))
|
||||
|
@ -384,11 +398,8 @@ fn parse_macro(
|
|||
|
||||
let parser = expr::IdentifierParser::new(ctx.parsed_macros());
|
||||
|
||||
match parser.macro_definition(&cexpr_tokens) {
|
||||
Ok((_, (id, val))) => {
|
||||
return Some((id.into(), val));
|
||||
}
|
||||
_ => {}
|
||||
if let Ok((_, (id, val))) = parser.macro_definition(&cexpr_tokens) {
|
||||
return Some((id.into(), val));
|
||||
}
|
||||
|
||||
// Try without the last token, to workaround a libclang bug in versions
|
||||
|
|
|
@ -51,6 +51,7 @@ macro_rules! doc_mod {
|
|||
|
||||
mod clang;
|
||||
mod codegen;
|
||||
mod deps;
|
||||
mod features;
|
||||
mod ir;
|
||||
mod parse;
|
||||
|
@ -88,12 +89,28 @@ type HashSet<K> = ::rustc_hash::FxHashSet<K>;
|
|||
pub(crate) use std::collections::hash_map::Entry;
|
||||
|
||||
/// Default prefix for the anon fields.
|
||||
pub const DEFAULT_ANON_FIELDS_PREFIX: &'static str = "__bindgen_anon_";
|
||||
pub const DEFAULT_ANON_FIELDS_PREFIX: &str = "__bindgen_anon_";
|
||||
|
||||
fn file_is_cpp(name_file: &str) -> bool {
|
||||
name_file.ends_with(".hpp") ||
|
||||
name_file.ends_with(".hxx") ||
|
||||
name_file.ends_with(".hh") ||
|
||||
name_file.ends_with(".h++")
|
||||
}
|
||||
|
||||
fn args_are_cpp(clang_args: &[String]) -> bool {
|
||||
return clang_args
|
||||
.windows(2)
|
||||
.any(|w| w[0] == "-xc++" || w[1] == "-xc++" || w == &["-x", "c++"]);
|
||||
for w in clang_args.windows(2) {
|
||||
if w[0] == "-xc++" || w[1] == "-xc++" {
|
||||
return true;
|
||||
}
|
||||
if w[0] == "-x" && w[1] == "c++" {
|
||||
return true;
|
||||
}
|
||||
if w[0] == "-include" && file_is_cpp(&w[1]) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
|
@ -161,8 +178,8 @@ impl Default for CodegenConfig {
|
|||
///
|
||||
/// // Configure and generate bindings.
|
||||
/// let bindings = builder().header("path/to/input/header")
|
||||
/// .whitelist_type("SomeCoolClass")
|
||||
/// .whitelist_function("do_some_cool_thing")
|
||||
/// .allowlist_type("SomeCoolClass")
|
||||
/// .allowlist_function("do_some_cool_thing")
|
||||
/// .generate()?;
|
||||
///
|
||||
/// // Write the generated bindings to an output file.
|
||||
|
@ -288,18 +305,20 @@ impl Builder {
|
|||
(&self.options.type_alias, "--type-alias"),
|
||||
(&self.options.new_type_alias, "--new-type-alias"),
|
||||
(&self.options.new_type_alias_deref, "--new-type-alias-deref"),
|
||||
(&self.options.blacklisted_types, "--blacklist-type"),
|
||||
(&self.options.blacklisted_functions, "--blacklist-function"),
|
||||
(&self.options.blacklisted_items, "--blacklist-item"),
|
||||
(&self.options.blocklisted_types, "--blocklist-type"),
|
||||
(&self.options.blocklisted_functions, "--blocklist-function"),
|
||||
(&self.options.blocklisted_items, "--blocklist-item"),
|
||||
(&self.options.blocklisted_files, "--blocklist-file"),
|
||||
(&self.options.opaque_types, "--opaque-type"),
|
||||
(&self.options.whitelisted_functions, "--whitelist-function"),
|
||||
(&self.options.whitelisted_types, "--whitelist-type"),
|
||||
(&self.options.whitelisted_vars, "--whitelist-var"),
|
||||
(&self.options.allowlisted_functions, "--allowlist-function"),
|
||||
(&self.options.allowlisted_types, "--allowlist-type"),
|
||||
(&self.options.allowlisted_vars, "--allowlist-var"),
|
||||
(&self.options.no_partialeq_types, "--no-partialeq"),
|
||||
(&self.options.no_copy_types, "--no-copy"),
|
||||
(&self.options.no_debug_types, "--no-debug"),
|
||||
(&self.options.no_default_types, "--no-default"),
|
||||
(&self.options.no_hash_types, "--no-hash"),
|
||||
(&self.options.must_use_types, "--must-use-type"),
|
||||
];
|
||||
|
||||
for (set, flag) in regex_sets {
|
||||
|
@ -363,8 +382,8 @@ impl Builder {
|
|||
output_vector.push("--no-doc-comments".into());
|
||||
}
|
||||
|
||||
if !self.options.whitelist_recursively {
|
||||
output_vector.push("--no-recursive-whitelist".into());
|
||||
if !self.options.allowlist_recursively {
|
||||
output_vector.push("--no-recursive-allowlist".into());
|
||||
}
|
||||
|
||||
if self.options.objc_extern_crate {
|
||||
|
@ -462,6 +481,10 @@ impl Builder {
|
|||
output_vector.push("--no-prepend-enum-name".into());
|
||||
}
|
||||
|
||||
if self.options.fit_macro_constants {
|
||||
output_vector.push("--fit-macro-constant-types".into());
|
||||
}
|
||||
|
||||
if self.options.array_pointers_in_arguments {
|
||||
output_vector.push("--use-array-pointers-in-arguments".into());
|
||||
}
|
||||
|
@ -478,6 +501,14 @@ impl Builder {
|
|||
output_vector.push(line.clone());
|
||||
}
|
||||
|
||||
for (module, lines) in &self.options.module_lines {
|
||||
for line in lines.iter() {
|
||||
output_vector.push("--module-raw-line".into());
|
||||
output_vector.push(module.clone());
|
||||
output_vector.push(line.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if self.options.use_core {
|
||||
output_vector.push("--use-core".into());
|
||||
}
|
||||
|
@ -512,10 +543,29 @@ impl Builder {
|
|||
output_vector.push(path.into());
|
||||
}
|
||||
|
||||
if self.options.dynamic_library_name.is_some() {
|
||||
let libname = self.options.dynamic_library_name.as_ref().unwrap();
|
||||
if let Some(ref name) = self.options.dynamic_library_name {
|
||||
output_vector.push("--dynamic-loading".into());
|
||||
output_vector.push(libname.clone());
|
||||
output_vector.push(name.clone());
|
||||
}
|
||||
|
||||
if self.options.dynamic_link_require_all {
|
||||
output_vector.push("--dynamic-link-require-all".into());
|
||||
}
|
||||
|
||||
if self.options.respect_cxx_access_specs {
|
||||
output_vector.push("--respect-cxx-access-specs".into());
|
||||
}
|
||||
|
||||
if self.options.translate_enum_integer_types {
|
||||
output_vector.push("--translate-enum-integer-types".into());
|
||||
}
|
||||
|
||||
if self.options.c_naming {
|
||||
output_vector.push("--c-naming".into());
|
||||
}
|
||||
|
||||
if self.options.force_explicit_padding {
|
||||
output_vector.push("--explicit-padding".into());
|
||||
}
|
||||
|
||||
// Add clang arguments
|
||||
|
@ -565,12 +615,33 @@ impl Builder {
|
|||
self
|
||||
}
|
||||
|
||||
/// Add a depfile output which will be written alongside the generated bindings.
|
||||
pub fn depfile<H: Into<String>, D: Into<PathBuf>>(
|
||||
mut self,
|
||||
output_module: H,
|
||||
depfile: D,
|
||||
) -> Builder {
|
||||
self.options.depfile = Some(deps::DepfileSpec {
|
||||
output_module: output_module.into(),
|
||||
depfile_path: depfile.into(),
|
||||
});
|
||||
self
|
||||
}
|
||||
|
||||
/// Add `contents` as an input C/C++ header named `name`.
|
||||
///
|
||||
/// The file `name` will be added to the clang arguments.
|
||||
pub fn header_contents(mut self, name: &str, contents: &str) -> Builder {
|
||||
// Apparently clang relies on having virtual FS correspondent to
|
||||
// the real one, so we need absolute paths here
|
||||
let absolute_path = env::current_dir()
|
||||
.expect("Cannot retrieve current directory")
|
||||
.join(name)
|
||||
.to_str()
|
||||
.expect("Cannot convert current directory name to string")
|
||||
.to_owned();
|
||||
self.input_header_contents
|
||||
.push((name.into(), contents.into()));
|
||||
.push((absolute_path, contents.into()));
|
||||
self
|
||||
}
|
||||
|
||||
|
@ -603,12 +674,7 @@ impl Builder {
|
|||
}
|
||||
|
||||
/// Whether the generated bindings should contain documentation comments
|
||||
/// (docstrings) or not.
|
||||
///
|
||||
/// This ideally will always be true, but it may need to be false until we
|
||||
/// implement some processing on comments to work around issues as described
|
||||
/// in [rust-bindgen issue
|
||||
/// #426](https://github.com/rust-lang/rust-bindgen/issues/426).
|
||||
/// (docstrings) or not. This is set to true by default.
|
||||
///
|
||||
/// Note that clang by default excludes comments from system headers, pass
|
||||
/// `-fretain-comments-from-system-headers` as
|
||||
|
@ -622,9 +688,9 @@ impl Builder {
|
|||
self
|
||||
}
|
||||
|
||||
/// Whether to whitelist recursively or not. Defaults to true.
|
||||
/// Whether to allowlist recursively or not. Defaults to true.
|
||||
///
|
||||
/// Given that we have explicitly whitelisted the "initiate_dance_party"
|
||||
/// Given that we have explicitly allowlisted the "initiate_dance_party"
|
||||
/// function in this C header:
|
||||
///
|
||||
/// ```c
|
||||
|
@ -637,23 +703,29 @@ impl Builder {
|
|||
///
|
||||
/// We would normally generate bindings to both the `initiate_dance_party`
|
||||
/// function and the `MoonBoots` struct that it transitively references. By
|
||||
/// configuring with `whitelist_recursively(false)`, `bindgen` will not emit
|
||||
/// bindings for anything except the explicitly whitelisted items, and there
|
||||
/// configuring with `allowlist_recursively(false)`, `bindgen` will not emit
|
||||
/// bindings for anything except the explicitly allowlisted items, and there
|
||||
/// would be no emitted struct definition for `MoonBoots`. However, the
|
||||
/// `initiate_dance_party` function would still reference `MoonBoots`!
|
||||
///
|
||||
/// **Disabling this feature will almost certainly cause `bindgen` to emit
|
||||
/// bindings that will not compile!** If you disable this feature, then it
|
||||
/// is *your* responsibility to provide definitions for every type that is
|
||||
/// referenced from an explicitly whitelisted item. One way to provide the
|
||||
/// referenced from an explicitly allowlisted item. One way to provide the
|
||||
/// definitions is by using the [`Builder::raw_line`](#method.raw_line)
|
||||
/// method, another would be to define them in Rust and then `include!(...)`
|
||||
/// the bindings immediately afterwards.
|
||||
pub fn whitelist_recursively(mut self, doit: bool) -> Self {
|
||||
self.options.whitelist_recursively = doit;
|
||||
pub fn allowlist_recursively(mut self, doit: bool) -> Self {
|
||||
self.options.allowlist_recursively = doit;
|
||||
self
|
||||
}
|
||||
|
||||
/// Deprecated alias for allowlist_recursively.
|
||||
#[deprecated(note = "Use allowlist_recursively instead")]
|
||||
pub fn whitelist_recursively(self, doit: bool) -> Self {
|
||||
self.allowlist_recursively(doit)
|
||||
}
|
||||
|
||||
/// Generate `#[macro_use] extern crate objc;` instead of `use objc;`
|
||||
/// in the prologue of the files generated from objective-c files
|
||||
pub fn objc_extern_crate(mut self, doit: bool) -> Self {
|
||||
|
@ -688,30 +760,53 @@ impl Builder {
|
|||
|
||||
/// Hide the given type from the generated bindings. Regular expressions are
|
||||
/// supported.
|
||||
#[deprecated(note = "Use blacklist_type instead")]
|
||||
#[deprecated(note = "Use blocklist_type instead")]
|
||||
pub fn hide_type<T: AsRef<str>>(self, arg: T) -> Builder {
|
||||
self.blacklist_type(arg)
|
||||
self.blocklist_type(arg)
|
||||
}
|
||||
|
||||
/// Hide the given type from the generated bindings. Regular expressions are
|
||||
/// supported.
|
||||
#[deprecated(note = "Use blocklist_type instead")]
|
||||
pub fn blacklist_type<T: AsRef<str>>(self, arg: T) -> Builder {
|
||||
self.blocklist_type(arg)
|
||||
}
|
||||
|
||||
/// Hide the given type from the generated bindings. Regular expressions are
|
||||
/// supported.
|
||||
///
|
||||
/// To blacklist types prefixed with "mylib" use `"mylib_.*"`.
|
||||
/// To blocklist types prefixed with "mylib" use `"mylib_.*"`.
|
||||
/// For more complicated expressions check
|
||||
/// [regex](https://docs.rs/regex/*/regex/) docs
|
||||
pub fn blacklist_type<T: AsRef<str>>(mut self, arg: T) -> Builder {
|
||||
self.options.blacklisted_types.insert(arg);
|
||||
pub fn blocklist_type<T: AsRef<str>>(mut self, arg: T) -> Builder {
|
||||
self.options.blocklisted_types.insert(arg);
|
||||
self
|
||||
}
|
||||
|
||||
/// Hide the given function from the generated bindings. Regular expressions
|
||||
/// are supported.
|
||||
#[deprecated(note = "Use blocklist_function instead")]
|
||||
pub fn blacklist_function<T: AsRef<str>>(self, arg: T) -> Builder {
|
||||
self.blocklist_function(arg)
|
||||
}
|
||||
|
||||
/// Hide the given function from the generated bindings. Regular expressions
|
||||
/// are supported.
|
||||
///
|
||||
/// To blacklist functions prefixed with "mylib" use `"mylib_.*"`.
|
||||
/// To blocklist functions prefixed with "mylib" use `"mylib_.*"`.
|
||||
/// For more complicated expressions check
|
||||
/// [regex](https://docs.rs/regex/*/regex/) docs
|
||||
pub fn blacklist_function<T: AsRef<str>>(mut self, arg: T) -> Builder {
|
||||
self.options.blacklisted_functions.insert(arg);
|
||||
pub fn blocklist_function<T: AsRef<str>>(mut self, arg: T) -> Builder {
|
||||
self.options.blocklisted_functions.insert(arg);
|
||||
self
|
||||
}
|
||||
|
||||
/// Hide the given item from the generated bindings, regardless of
|
||||
/// whether it's a type, function, module, etc. Regular
|
||||
/// expressions are supported.
|
||||
#[deprecated(note = "Use blocklist_item instead")]
|
||||
pub fn blacklist_item<T: AsRef<str>>(mut self, arg: T) -> Builder {
|
||||
self.options.blocklisted_items.insert(arg);
|
||||
self
|
||||
}
|
||||
|
||||
|
@ -719,11 +814,18 @@ impl Builder {
|
|||
/// whether it's a type, function, module, etc. Regular
|
||||
/// expressions are supported.
|
||||
///
|
||||
/// To blacklist items prefixed with "mylib" use `"mylib_.*"`.
|
||||
/// To blocklist items prefixed with "mylib" use `"mylib_.*"`.
|
||||
/// For more complicated expressions check
|
||||
/// [regex](https://docs.rs/regex/*/regex/) docs
|
||||
pub fn blacklist_item<T: AsRef<str>>(mut self, arg: T) -> Builder {
|
||||
self.options.blacklisted_items.insert(arg);
|
||||
pub fn blocklist_item<T: AsRef<str>>(mut self, arg: T) -> Builder {
|
||||
self.options.blocklisted_items.insert(arg);
|
||||
self
|
||||
}
|
||||
|
||||
/// Hide any contents of the given file from the generated bindings,
|
||||
/// regardless of whether it's a type, function, module etc.
|
||||
pub fn blocklist_file<T: AsRef<str>>(mut self, arg: T) -> Builder {
|
||||
self.options.blocklisted_files.insert(arg);
|
||||
self
|
||||
}
|
||||
|
||||
|
@ -738,64 +840,86 @@ impl Builder {
|
|||
self
|
||||
}
|
||||
|
||||
/// Whitelist the given type so that it (and all types that it transitively
|
||||
/// Allowlist the given type so that it (and all types that it transitively
|
||||
/// refers to) appears in the generated bindings. Regular expressions are
|
||||
/// supported.
|
||||
#[deprecated(note = "use whitelist_type instead")]
|
||||
#[deprecated(note = "use allowlist_type instead")]
|
||||
pub fn whitelisted_type<T: AsRef<str>>(self, arg: T) -> Builder {
|
||||
self.whitelist_type(arg)
|
||||
self.allowlist_type(arg)
|
||||
}
|
||||
|
||||
/// Whitelist the given type so that it (and all types that it transitively
|
||||
/// Allowlist the given type so that it (and all types that it transitively
|
||||
/// refers to) appears in the generated bindings. Regular expressions are
|
||||
/// supported.
|
||||
#[deprecated(note = "use allowlist_type instead")]
|
||||
pub fn whitelist_type<T: AsRef<str>>(self, arg: T) -> Builder {
|
||||
self.allowlist_type(arg)
|
||||
}
|
||||
|
||||
/// Allowlist the given type so that it (and all types that it transitively
|
||||
/// refers to) appears in the generated bindings. Regular expressions are
|
||||
/// supported.
|
||||
///
|
||||
/// To whitelist types prefixed with "mylib" use `"mylib_.*"`.
|
||||
/// To allowlist types prefixed with "mylib" use `"mylib_.*"`.
|
||||
/// For more complicated expressions check
|
||||
/// [regex](https://docs.rs/regex/*/regex/) docs
|
||||
pub fn whitelist_type<T: AsRef<str>>(mut self, arg: T) -> Builder {
|
||||
self.options.whitelisted_types.insert(arg);
|
||||
pub fn allowlist_type<T: AsRef<str>>(mut self, arg: T) -> Builder {
|
||||
self.options.allowlisted_types.insert(arg);
|
||||
self
|
||||
}
|
||||
|
||||
/// Whitelist the given function so that it (and all types that it
|
||||
/// Allowlist the given function so that it (and all types that it
|
||||
/// transitively refers to) appears in the generated bindings. Regular
|
||||
/// expressions are supported.
|
||||
///
|
||||
/// To whitelist functions prefixed with "mylib" use `"mylib_.*"`.
|
||||
/// To allowlist functions prefixed with "mylib" use `"mylib_.*"`.
|
||||
/// For more complicated expressions check
|
||||
/// [regex](https://docs.rs/regex/*/regex/) docs
|
||||
pub fn whitelist_function<T: AsRef<str>>(mut self, arg: T) -> Builder {
|
||||
self.options.whitelisted_functions.insert(arg);
|
||||
pub fn allowlist_function<T: AsRef<str>>(mut self, arg: T) -> Builder {
|
||||
self.options.allowlisted_functions.insert(arg);
|
||||
self
|
||||
}
|
||||
|
||||
/// Whitelist the given function.
|
||||
/// Allowlist the given function.
|
||||
///
|
||||
/// Deprecated: use whitelist_function instead.
|
||||
#[deprecated(note = "use whitelist_function instead")]
|
||||
/// Deprecated: use allowlist_function instead.
|
||||
#[deprecated(note = "use allowlist_function instead")]
|
||||
pub fn whitelist_function<T: AsRef<str>>(self, arg: T) -> Builder {
|
||||
self.allowlist_function(arg)
|
||||
}
|
||||
|
||||
/// Allowlist the given function.
|
||||
///
|
||||
/// Deprecated: use allowlist_function instead.
|
||||
#[deprecated(note = "use allowlist_function instead")]
|
||||
pub fn whitelisted_function<T: AsRef<str>>(self, arg: T) -> Builder {
|
||||
self.whitelist_function(arg)
|
||||
self.allowlist_function(arg)
|
||||
}
|
||||
|
||||
/// Whitelist the given variable so that it (and all types that it
|
||||
/// Allowlist the given variable so that it (and all types that it
|
||||
/// transitively refers to) appears in the generated bindings. Regular
|
||||
/// expressions are supported.
|
||||
///
|
||||
/// To whitelist variables prefixed with "mylib" use `"mylib_.*"`.
|
||||
/// To allowlist variables prefixed with "mylib" use `"mylib_.*"`.
|
||||
/// For more complicated expressions check
|
||||
/// [regex](https://docs.rs/regex/*/regex/) docs
|
||||
pub fn whitelist_var<T: AsRef<str>>(mut self, arg: T) -> Builder {
|
||||
self.options.whitelisted_vars.insert(arg);
|
||||
pub fn allowlist_var<T: AsRef<str>>(mut self, arg: T) -> Builder {
|
||||
self.options.allowlisted_vars.insert(arg);
|
||||
self
|
||||
}
|
||||
|
||||
/// Whitelist the given variable.
|
||||
/// Deprecated: use allowlist_var instead.
|
||||
#[deprecated(note = "use allowlist_var instead")]
|
||||
pub fn whitelist_var<T: AsRef<str>>(self, arg: T) -> Builder {
|
||||
self.allowlist_var(arg)
|
||||
}
|
||||
|
||||
/// Allowlist the given variable.
|
||||
///
|
||||
/// Deprecated: use whitelist_var instead.
|
||||
#[deprecated(note = "use whitelist_var instead")]
|
||||
/// Deprecated: use allowlist_var instead.
|
||||
#[deprecated(note = "use allowlist_var instead")]
|
||||
pub fn whitelisted_var<T: AsRef<str>>(self, arg: T) -> Builder {
|
||||
self.whitelist_var(arg)
|
||||
self.allowlist_var(arg)
|
||||
}
|
||||
|
||||
/// Set the default style of code to generate for enums
|
||||
|
@ -1124,7 +1248,7 @@ impl Builder {
|
|||
/// This method disables that behavior.
|
||||
///
|
||||
/// Note that this intentionally does not change the names used for
|
||||
/// whitelisting and blacklisting, which should still be mangled with the
|
||||
/// allowlisting and blocklisting, which should still be mangled with the
|
||||
/// namespaces.
|
||||
///
|
||||
/// Note, also, that this option may cause bindgen to generate duplicate
|
||||
|
@ -1264,6 +1388,12 @@ impl Builder {
|
|||
self
|
||||
}
|
||||
|
||||
/// Whether to try to fit macro constants to types smaller than u32/i32
|
||||
pub fn fit_macro_constants(mut self, doit: bool) -> Self {
|
||||
self.options.fit_macro_constants = doit;
|
||||
self
|
||||
}
|
||||
|
||||
/// Prepend the enum name to constant or newtype variants.
|
||||
pub fn prepend_enum_name(mut self, doit: bool) -> Self {
|
||||
self.options.prepend_enum_name = doit;
|
||||
|
@ -1302,11 +1432,22 @@ impl Builder {
|
|||
self
|
||||
}
|
||||
|
||||
/// If true, always emit explicit padding fields.
|
||||
///
|
||||
/// If a struct needs to be serialized in its native format (padding bytes
|
||||
/// and all), for example writing it to a file or sending it on the network,
|
||||
/// then this should be enabled, as anything reading the padding bytes of
|
||||
/// a struct may lead to Undefined Behavior.
|
||||
pub fn explicit_padding(mut self, doit: bool) -> Self {
|
||||
self.options.force_explicit_padding = doit;
|
||||
self
|
||||
}
|
||||
|
||||
/// Generate the Rust bindings using the options built up thus far.
|
||||
pub fn generate(mut self) -> Result<Bindings, ()> {
|
||||
// Add any extra arguments from the environment to the clang command line.
|
||||
if let Some(extra_clang_args) =
|
||||
env::var("BINDGEN_EXTRA_CLANG_ARGS").ok()
|
||||
get_target_dependent_env_var("BINDGEN_EXTRA_CLANG_ARGS")
|
||||
{
|
||||
// Try to parse it with shell quoting. If we fail, make it one single big argument.
|
||||
if let Some(strings) = shlex::split(&extra_clang_args) {
|
||||
|
@ -1318,11 +1459,13 @@ impl Builder {
|
|||
|
||||
// Transform input headers to arguments on the clang command line.
|
||||
self.options.input_header = self.input_headers.pop();
|
||||
self.options
|
||||
.clang_args
|
||||
.extend(self.input_headers.drain(..).flat_map(|header| {
|
||||
iter::once("-include".into()).chain(iter::once(header))
|
||||
}));
|
||||
self.options.extra_input_headers = self.input_headers;
|
||||
self.options.clang_args.extend(
|
||||
self.options.extra_input_headers.iter().flat_map(|header| {
|
||||
iter::once("-include".into())
|
||||
.chain(iter::once(header.to_string()))
|
||||
}),
|
||||
);
|
||||
|
||||
self.options.input_unsaved_files.extend(
|
||||
self.input_header_contents
|
||||
|
@ -1341,13 +1484,6 @@ impl Builder {
|
|||
/// issues. The resulting file will be named something like `__bindgen.i` or
|
||||
/// `__bindgen.ii`
|
||||
pub fn dump_preprocessed_input(&self) -> io::Result<()> {
|
||||
fn check_is_cpp(name_file: &str) -> bool {
|
||||
name_file.ends_with(".hpp") ||
|
||||
name_file.ends_with(".hxx") ||
|
||||
name_file.ends_with(".hh") ||
|
||||
name_file.ends_with(".h++")
|
||||
}
|
||||
|
||||
let clang =
|
||||
clang_sys::support::Clang::find(None, &[]).ok_or_else(|| {
|
||||
io::Error::new(
|
||||
|
@ -1365,7 +1501,7 @@ impl Builder {
|
|||
|
||||
// For each input header, add `#include "$header"`.
|
||||
for header in &self.input_headers {
|
||||
is_cpp |= check_is_cpp(header);
|
||||
is_cpp |= file_is_cpp(header);
|
||||
|
||||
wrapper_contents.push_str("#include \"");
|
||||
wrapper_contents.push_str(header);
|
||||
|
@ -1375,7 +1511,7 @@ impl Builder {
|
|||
// For each input header content, add a prefix line of `#line 0 "$name"`
|
||||
// followed by the contents.
|
||||
for &(ref name, ref contents) in &self.input_header_contents {
|
||||
is_cpp |= check_is_cpp(name);
|
||||
is_cpp |= file_is_cpp(name);
|
||||
|
||||
wrapper_contents.push_str("#line 0 \"");
|
||||
wrapper_contents.push_str(name);
|
||||
|
@ -1461,6 +1597,13 @@ impl Builder {
|
|||
self
|
||||
}
|
||||
|
||||
/// Add `#[must_use]` for the given type. Regular
|
||||
/// expressions are supported.
|
||||
pub fn must_use_type<T: Into<String>>(mut self, arg: T) -> Builder {
|
||||
self.options.must_use_types.insert(arg.into());
|
||||
self
|
||||
}
|
||||
|
||||
/// Set whether `arr[size]` should be treated as `*mut T` or `*mut [T; size]` (same for mut)
|
||||
pub fn array_pointers_in_arguments(mut self, doit: bool) -> Self {
|
||||
self.options.array_pointers_in_arguments = doit;
|
||||
|
@ -1484,22 +1627,59 @@ impl Builder {
|
|||
self.options.dynamic_library_name = Some(dynamic_library_name.into());
|
||||
self
|
||||
}
|
||||
|
||||
/// Require successful linkage for all routines in a shared library.
|
||||
/// This allows us to optimize function calls by being able to safely assume function pointers
|
||||
/// are valid.
|
||||
pub fn dynamic_link_require_all(mut self, req: bool) -> Self {
|
||||
self.options.dynamic_link_require_all = req;
|
||||
self
|
||||
}
|
||||
|
||||
/// Generate bindings as `pub` only if the bound item is publically accessible by C++.
|
||||
pub fn respect_cxx_access_specs(mut self, doit: bool) -> Self {
|
||||
self.options.respect_cxx_access_specs = doit;
|
||||
self
|
||||
}
|
||||
|
||||
/// Always translate enum integer types to native Rust integer types.
|
||||
///
|
||||
/// This will result in enums having types such as `u32` and `i16` instead
|
||||
/// of `c_uint` and `c_short`. Types for Rustified enums are always
|
||||
/// translated.
|
||||
pub fn translate_enum_integer_types(mut self, doit: bool) -> Self {
|
||||
self.options.translate_enum_integer_types = doit;
|
||||
self
|
||||
}
|
||||
|
||||
/// Generate types with C style naming.
|
||||
///
|
||||
/// This will add prefixes to the generated type names. For example instead of a struct `A` we
|
||||
/// will generate struct `struct_A`. Currently applies to structs, unions, and enums.
|
||||
pub fn c_naming(mut self, doit: bool) -> Self {
|
||||
self.options.c_naming = doit;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Configuration options for generated bindings.
|
||||
#[derive(Debug)]
|
||||
struct BindgenOptions {
|
||||
/// The set of types that have been blacklisted and should not appear
|
||||
/// The set of types that have been blocklisted and should not appear
|
||||
/// anywhere in the generated code.
|
||||
blacklisted_types: RegexSet,
|
||||
blocklisted_types: RegexSet,
|
||||
|
||||
/// The set of functions that have been blacklisted and should not appear
|
||||
/// The set of functions that have been blocklisted and should not appear
|
||||
/// in the generated code.
|
||||
blacklisted_functions: RegexSet,
|
||||
blocklisted_functions: RegexSet,
|
||||
|
||||
/// The set of items, regardless of item-type, that have been
|
||||
/// blacklisted and should not appear in the generated code.
|
||||
blacklisted_items: RegexSet,
|
||||
/// blocklisted and should not appear in the generated code.
|
||||
blocklisted_items: RegexSet,
|
||||
|
||||
/// The set of files whose contents should be blocklisted and should not
|
||||
/// appear in the generated code.
|
||||
blocklisted_files: RegexSet,
|
||||
|
||||
/// The set of types that should be treated as opaque structures in the
|
||||
/// generated code.
|
||||
|
@ -1508,19 +1688,22 @@ struct BindgenOptions {
|
|||
/// The explicit rustfmt path.
|
||||
rustfmt_path: Option<PathBuf>,
|
||||
|
||||
/// The path to which we should write a Makefile-syntax depfile (if any).
|
||||
depfile: Option<deps::DepfileSpec>,
|
||||
|
||||
/// The set of types that we should have bindings for in the generated
|
||||
/// code.
|
||||
///
|
||||
/// This includes all types transitively reachable from any type in this
|
||||
/// set. One might think of whitelisted types/vars/functions as GC roots,
|
||||
/// set. One might think of allowlisted types/vars/functions as GC roots,
|
||||
/// and the generated Rust code as including everything that gets marked.
|
||||
whitelisted_types: RegexSet,
|
||||
allowlisted_types: RegexSet,
|
||||
|
||||
/// Whitelisted functions. See docs for `whitelisted_types` for more.
|
||||
whitelisted_functions: RegexSet,
|
||||
/// Allowlisted functions. See docs for `allowlisted_types` for more.
|
||||
allowlisted_functions: RegexSet,
|
||||
|
||||
/// Whitelisted variables. See docs for `whitelisted_types` for more.
|
||||
whitelisted_vars: RegexSet,
|
||||
/// Allowlisted variables. See docs for `allowlisted_types` for more.
|
||||
allowlisted_vars: RegexSet,
|
||||
|
||||
/// The default style of code to generate for enums
|
||||
default_enum_style: codegen::EnumVariation,
|
||||
|
@ -1669,6 +1852,9 @@ struct BindgenOptions {
|
|||
/// The input header file.
|
||||
input_header: Option<String>,
|
||||
|
||||
/// Any additional input header files.
|
||||
extra_input_headers: Vec<String>,
|
||||
|
||||
/// Unsaved files for input.
|
||||
input_unsaved_files: Vec<clang::UnsavedFile>,
|
||||
|
||||
|
@ -1686,14 +1872,14 @@ struct BindgenOptions {
|
|||
conservative_inline_namespaces: bool,
|
||||
|
||||
/// Whether to keep documentation comments in the generated output. See the
|
||||
/// documentation for more details.
|
||||
/// documentation for more details. Defaults to true.
|
||||
generate_comments: bool,
|
||||
|
||||
/// Whether to generate inline functions. Defaults to false.
|
||||
generate_inline_functions: bool,
|
||||
|
||||
/// Whether to whitelist types recursively. Defaults to true.
|
||||
whitelist_recursively: bool,
|
||||
/// Whether to allowlist types recursively. Defaults to true.
|
||||
allowlist_recursively: bool,
|
||||
|
||||
/// Instead of emitting 'use objc;' to files generated from objective c files,
|
||||
/// generate '#[macro_use] extern crate objc;'
|
||||
|
@ -1719,6 +1905,9 @@ struct BindgenOptions {
|
|||
/// Whether to detect include paths using clang_sys.
|
||||
detect_include_paths: bool,
|
||||
|
||||
/// Whether to try to fit macro constants into types smaller than u32/i32
|
||||
fit_macro_constants: bool,
|
||||
|
||||
/// Whether to prepend the enum name to constant or newtype variants.
|
||||
prepend_enum_name: bool,
|
||||
|
||||
|
@ -1730,7 +1919,7 @@ struct BindgenOptions {
|
|||
|
||||
/// Whether we should record which items in the regex sets ever matched.
|
||||
///
|
||||
/// This may be a bit slower, but will enable reporting of unused whitelist
|
||||
/// This may be a bit slower, but will enable reporting of unused allowlist
|
||||
/// items via the `error!` log.
|
||||
record_matches: bool,
|
||||
|
||||
|
@ -1759,6 +1948,9 @@ struct BindgenOptions {
|
|||
/// The set of types that we should not derive `Hash` for.
|
||||
no_hash_types: RegexSet,
|
||||
|
||||
/// The set of types that we should be annotated with `#[must_use]`.
|
||||
must_use_types: RegexSet,
|
||||
|
||||
/// Decide if C arrays should be regular pointers in rust or array pointers
|
||||
array_pointers_in_arguments: bool,
|
||||
|
||||
|
@ -1768,6 +1960,24 @@ struct BindgenOptions {
|
|||
/// The name of the dynamic library (if we are generating bindings for a shared library). If
|
||||
/// this is None, no dynamic bindings are created.
|
||||
dynamic_library_name: Option<String>,
|
||||
|
||||
/// Require successful linkage for all routines in a shared library.
|
||||
/// This allows us to optimize function calls by being able to safely assume function pointers
|
||||
/// are valid. No effect if `dynamic_library_name` is None.
|
||||
dynamic_link_require_all: bool,
|
||||
|
||||
/// Only make generated bindings `pub` if the items would be publically accessible
|
||||
/// by C++.
|
||||
respect_cxx_access_specs: bool,
|
||||
|
||||
/// Always translate enum integer types to native Rust integer types.
|
||||
translate_enum_integer_types: bool,
|
||||
|
||||
/// Generate types with C style naming.
|
||||
c_naming: bool,
|
||||
|
||||
/// Always output explicit padding fields
|
||||
force_explicit_padding: bool,
|
||||
}
|
||||
|
||||
/// TODO(emilio): This is sort of a lie (see the error message that results from
|
||||
|
@ -1778,12 +1988,13 @@ impl ::std::panic::UnwindSafe for BindgenOptions {}
|
|||
impl BindgenOptions {
|
||||
fn build(&mut self) {
|
||||
let mut regex_sets = [
|
||||
&mut self.whitelisted_vars,
|
||||
&mut self.whitelisted_types,
|
||||
&mut self.whitelisted_functions,
|
||||
&mut self.blacklisted_types,
|
||||
&mut self.blacklisted_functions,
|
||||
&mut self.blacklisted_items,
|
||||
&mut self.allowlisted_vars,
|
||||
&mut self.allowlisted_types,
|
||||
&mut self.allowlisted_functions,
|
||||
&mut self.blocklisted_types,
|
||||
&mut self.blocklisted_functions,
|
||||
&mut self.blocklisted_items,
|
||||
&mut self.blocklisted_files,
|
||||
&mut self.opaque_types,
|
||||
&mut self.bitfield_enums,
|
||||
&mut self.constified_enums,
|
||||
|
@ -1799,6 +2010,7 @@ impl BindgenOptions {
|
|||
&mut self.no_debug_types,
|
||||
&mut self.no_default_types,
|
||||
&mut self.no_hash_types,
|
||||
&mut self.must_use_types,
|
||||
];
|
||||
let record_matches = self.record_matches;
|
||||
for regex_set in &mut regex_sets {
|
||||
|
@ -1827,14 +2039,16 @@ impl Default for BindgenOptions {
|
|||
BindgenOptions {
|
||||
rust_target,
|
||||
rust_features: rust_target.into(),
|
||||
blacklisted_types: Default::default(),
|
||||
blacklisted_functions: Default::default(),
|
||||
blacklisted_items: Default::default(),
|
||||
blocklisted_types: Default::default(),
|
||||
blocklisted_functions: Default::default(),
|
||||
blocklisted_items: Default::default(),
|
||||
blocklisted_files: Default::default(),
|
||||
opaque_types: Default::default(),
|
||||
rustfmt_path: Default::default(),
|
||||
whitelisted_types: Default::default(),
|
||||
whitelisted_functions: Default::default(),
|
||||
whitelisted_vars: Default::default(),
|
||||
depfile: Default::default(),
|
||||
allowlisted_types: Default::default(),
|
||||
allowlisted_functions: Default::default(),
|
||||
allowlisted_vars: Default::default(),
|
||||
default_enum_style: Default::default(),
|
||||
bitfield_enums: Default::default(),
|
||||
newtype_enums: Default::default(),
|
||||
|
@ -1877,18 +2091,20 @@ impl Default for BindgenOptions {
|
|||
module_lines: HashMap::default(),
|
||||
clang_args: vec![],
|
||||
input_header: None,
|
||||
extra_input_headers: vec![],
|
||||
input_unsaved_files: vec![],
|
||||
parse_callbacks: None,
|
||||
codegen_config: CodegenConfig::all(),
|
||||
conservative_inline_namespaces: false,
|
||||
generate_comments: true,
|
||||
generate_inline_functions: false,
|
||||
whitelist_recursively: true,
|
||||
allowlist_recursively: true,
|
||||
generate_block: false,
|
||||
objc_extern_crate: false,
|
||||
block_extern_crate: false,
|
||||
enable_mangling: true,
|
||||
detect_include_paths: true,
|
||||
fit_macro_constants: false,
|
||||
prepend_enum_name: true,
|
||||
time_phases: false,
|
||||
record_matches: true,
|
||||
|
@ -1900,9 +2116,15 @@ impl Default for BindgenOptions {
|
|||
no_debug_types: Default::default(),
|
||||
no_default_types: Default::default(),
|
||||
no_hash_types: Default::default(),
|
||||
must_use_types: Default::default(),
|
||||
array_pointers_in_arguments: false,
|
||||
wasm_import_module_name: None,
|
||||
dynamic_library_name: None,
|
||||
dynamic_link_require_all: false,
|
||||
respect_cxx_access_specs: false,
|
||||
translate_enum_integer_types: false,
|
||||
c_naming: false,
|
||||
force_explicit_padding: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1940,7 +2162,7 @@ pub struct Bindings {
|
|||
module: proc_macro2::TokenStream,
|
||||
}
|
||||
|
||||
pub(crate) const HOST_TARGET: &'static str =
|
||||
pub(crate) const HOST_TARGET: &str =
|
||||
include_str!(concat!(env!("OUT_DIR"), "/host-target.txt"));
|
||||
|
||||
// Some architecture triplets are different between rust and libclang, see #1211
|
||||
|
@ -1948,7 +2170,8 @@ pub(crate) const HOST_TARGET: &'static str =
|
|||
fn rust_to_clang_target(rust_target: &str) -> String {
|
||||
if rust_target.starts_with("aarch64-apple-") {
|
||||
let mut clang_target = "arm64-apple-".to_owned();
|
||||
clang_target.push_str(&rust_target["aarch64-apple-".len()..]);
|
||||
clang_target
|
||||
.push_str(rust_target.strip_prefix("aarch64-apple-").unwrap());
|
||||
return clang_target;
|
||||
}
|
||||
rust_target.to_owned()
|
||||
|
@ -2069,7 +2292,9 @@ impl Bindings {
|
|||
debug!("Found clang: {:?}", clang);
|
||||
|
||||
// Whether we are working with C or C++ inputs.
|
||||
let is_cpp = args_are_cpp(&options.clang_args);
|
||||
let is_cpp = args_are_cpp(&options.clang_args) ||
|
||||
options.input_header.as_deref().map_or(false, file_is_cpp);
|
||||
|
||||
let search_paths = if is_cpp {
|
||||
clang.cpp_search_paths
|
||||
} else {
|
||||
|
@ -2119,7 +2344,10 @@ impl Bindings {
|
|||
}
|
||||
}
|
||||
|
||||
for f in options.input_unsaved_files.iter() {
|
||||
for (idx, f) in options.input_unsaved_files.iter().enumerate() {
|
||||
if idx != 0 || options.input_header.is_some() {
|
||||
options.clang_args.push("-include".to_owned());
|
||||
}
|
||||
options.clang_args.push(f.name.to_str().unwrap().to_owned())
|
||||
}
|
||||
|
||||
|
@ -2153,15 +2381,6 @@ impl Bindings {
|
|||
})
|
||||
}
|
||||
|
||||
/// Convert these bindings into source text (with raw lines prepended).
|
||||
pub fn to_string(&self) -> String {
|
||||
let mut bytes = vec![];
|
||||
self.write(Box::new(&mut bytes) as Box<dyn Write>)
|
||||
.expect("writing to a vec cannot fail");
|
||||
String::from_utf8(bytes)
|
||||
.expect("we should only write bindings that are valid utf-8")
|
||||
}
|
||||
|
||||
/// Write these bindings as source text to a file.
|
||||
pub fn write_to_file<P: AsRef<Path>>(&self, path: P) -> io::Result<()> {
|
||||
let file = OpenOptions::new()
|
||||
|
@ -2211,7 +2430,7 @@ impl Bindings {
|
|||
}
|
||||
|
||||
/// Gets the rustfmt path to rustfmt the generated bindings.
|
||||
fn rustfmt_path<'a>(&'a self) -> io::Result<Cow<'a, PathBuf>> {
|
||||
fn rustfmt_path(&self) -> io::Result<Cow<PathBuf>> {
|
||||
debug_assert!(self.options.rustfmt_bindings);
|
||||
if let Some(ref p) = self.options.rustfmt_path {
|
||||
return Ok(Cow::Borrowed(p));
|
||||
|
@ -2302,6 +2521,18 @@ impl Bindings {
|
|||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Bindings {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut bytes = vec![];
|
||||
self.write(Box::new(&mut bytes) as Box<dyn Write>)
|
||||
.expect("writing to a vec cannot fail");
|
||||
f.write_str(
|
||||
std::str::from_utf8(&bytes)
|
||||
.expect("we should only write bindings that are valid utf-8"),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Determines whether the given cursor is in any of the files matched by the
|
||||
/// options.
|
||||
fn filter_builtins(ctx: &BindgenContext, cursor: &clang::Cursor) -> bool {
|
||||
|
@ -2350,7 +2581,7 @@ fn parse(context: &mut BindgenContext) -> Result<(), ()> {
|
|||
if context.options().emit_ast {
|
||||
fn dump_if_not_builtin(cur: &clang::Cursor) -> CXChildVisitResult {
|
||||
if !cur.is_builtin() {
|
||||
clang::ast_dump(&cur, 0)
|
||||
clang::ast_dump(cur, 0)
|
||||
} else {
|
||||
CXChildVisit_Continue
|
||||
}
|
||||
|
@ -2383,28 +2614,23 @@ pub struct ClangVersion {
|
|||
pub fn clang_version() -> ClangVersion {
|
||||
ensure_libclang_is_loaded();
|
||||
|
||||
//Debian clang version 11.0.1-2
|
||||
let raw_v: String = clang::extract_clang_version();
|
||||
let split_v: Option<Vec<&str>> = raw_v
|
||||
.split_whitespace()
|
||||
.nth(2)
|
||||
.find(|t| t.chars().next().map_or(false, |v| v.is_ascii_digit()))
|
||||
.map(|v| v.split('.').collect());
|
||||
match split_v {
|
||||
Some(v) => {
|
||||
if v.len() >= 2 {
|
||||
let maybe_major = v[0].parse::<u32>();
|
||||
let maybe_minor = v[1].parse::<u32>();
|
||||
match (maybe_major, maybe_minor) {
|
||||
(Ok(major), Ok(minor)) => {
|
||||
return ClangVersion {
|
||||
parsed: Some((major, minor)),
|
||||
full: raw_v.clone(),
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
if let Some(v) = split_v {
|
||||
if v.len() >= 2 {
|
||||
let maybe_major = v[0].parse::<u32>();
|
||||
let maybe_minor = v[1].parse::<u32>();
|
||||
if let (Ok(major), Ok(minor)) = (maybe_major, maybe_minor) {
|
||||
return ClangVersion {
|
||||
parsed: Some((major, minor)),
|
||||
full: raw_v.clone(),
|
||||
};
|
||||
}
|
||||
}
|
||||
None => {}
|
||||
};
|
||||
ClangVersion {
|
||||
parsed: None,
|
||||
|
@ -2412,10 +2638,25 @@ pub fn clang_version() -> ClangVersion {
|
|||
}
|
||||
}
|
||||
|
||||
/// Looks for the env var `var_${TARGET}`, and falls back to just `var` when it is not found.
|
||||
fn get_target_dependent_env_var(var: &str) -> Option<String> {
|
||||
if let Ok(target) = env::var("TARGET") {
|
||||
if let Ok(v) = env::var(&format!("{}_{}", var, target)) {
|
||||
return Some(v);
|
||||
}
|
||||
if let Ok(v) =
|
||||
env::var(&format!("{}_{}", var, target.replace("-", "_")))
|
||||
{
|
||||
return Some(v);
|
||||
}
|
||||
}
|
||||
env::var(var).ok()
|
||||
}
|
||||
|
||||
/// A ParseCallbacks implementation that will act on file includes by echoing a rerun-if-changed
|
||||
/// line
|
||||
///
|
||||
/// When running in side a `build.rs` script, this can be used to make cargo invalidate the
|
||||
/// When running inside a `build.rs` script, this can be used to make cargo invalidate the
|
||||
/// generated bindings whenever any of the files included from the header change:
|
||||
/// ```
|
||||
/// use bindgen::builder;
|
||||
|
@ -2457,8 +2698,8 @@ fn commandline_flag_unit_test_function() {
|
|||
//Test 2
|
||||
let bindings = crate::builder()
|
||||
.header("input_header")
|
||||
.whitelist_type("Distinct_Type")
|
||||
.whitelist_function("safe_function");
|
||||
.allowlist_type("Distinct_Type")
|
||||
.allowlist_function("safe_function");
|
||||
|
||||
let command_line_flags = bindings.command_line_flags();
|
||||
let test_cases = vec![
|
||||
|
@ -2467,9 +2708,9 @@ fn commandline_flag_unit_test_function() {
|
|||
"--no-derive-default",
|
||||
"--generate",
|
||||
"functions,types,vars,methods,constructors,destructors",
|
||||
"--whitelist-type",
|
||||
"--allowlist-type",
|
||||
"Distinct_Type",
|
||||
"--whitelist-function",
|
||||
"--allowlist-function",
|
||||
"safe_function",
|
||||
]
|
||||
.iter()
|
||||
|
|
|
@ -1,30 +1,32 @@
|
|||
#![allow(unused)]
|
||||
|
||||
macro_rules! log {
|
||||
(target: $target:expr, $lvl:expr, $($arg:tt)+) => {
|
||||
(target: $target:expr, $lvl:expr, $($arg:tt)+) => {{
|
||||
let _ = $target;
|
||||
let _ = log!($lvl, $($arg)+);
|
||||
};
|
||||
}};
|
||||
($lvl:expr, $($arg:tt)+) => {{
|
||||
let _ = $lvl;
|
||||
let _ = format_args!($($arg)+);
|
||||
}};
|
||||
}
|
||||
macro_rules! error {
|
||||
(target: $target:expr, $($arg:tt)*) => { log!($target, $($arg)*); };
|
||||
($($arg:tt)*) => { log!("", $($arg)*); };
|
||||
(target: $target:expr, $($arg:tt)+) => { log!(target: $target, "", $($arg)+) };
|
||||
($($arg:tt)+) => { log!("", $($arg)+) };
|
||||
}
|
||||
macro_rules! warn {
|
||||
(target: $target:expr, $($arg:tt)*) => { log!($target, $($arg)*); };
|
||||
($($arg:tt)*) => { log!("", $($arg)*); };
|
||||
(target: $target:expr, $($arg:tt)*) => { log!(target: $target, "", $($arg)*) };
|
||||
($($arg:tt)*) => { log!("", $($arg)*) };
|
||||
}
|
||||
macro_rules! info {
|
||||
(target: $target:expr, $($arg:tt)*) => { log!($target, $($arg)*); };
|
||||
($($arg:tt)*) => { log!("", $($arg)*); };
|
||||
(target: $target:expr, $($arg:tt)+) => { log!(target: $target, "", $($arg)+) };
|
||||
($($arg:tt)+) => { log!("", $($arg)+) };
|
||||
}
|
||||
macro_rules! debug {
|
||||
(target: $target:expr, $($arg:tt)*) => { log!($target, $($arg)*); };
|
||||
($($arg:tt)*) => { log!("", $($arg)*); };
|
||||
(target: $target:expr, $($arg:tt)+) => { log!(target: $target, "", $($arg)+) };
|
||||
($($arg:tt)+) => { log!("", $($arg)+) };
|
||||
}
|
||||
macro_rules! trace {
|
||||
(target: $target:expr, $($arg:tt)*) => { log!($target, $($arg)*); };
|
||||
($($arg:tt)*) => { log!("", $($arg)*); };
|
||||
(target: $target:expr, $($arg:tt)+) => { log!(target: $target, "", $($arg)+) };
|
||||
($($arg:tt)+) => { log!("", $($arg)+) };
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ fn clang_version_check() {
|
|||
);
|
||||
|
||||
if expected_version.is_some() {
|
||||
assert_eq!(version.parsed, version.parsed);
|
||||
// assert_eq!(version.parsed, version.parsed);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -45,9 +45,7 @@ pub fn main() {
|
|||
#[cfg(feature = "logging")]
|
||||
env_logger::init();
|
||||
|
||||
let bind_args: Vec<_> = env::args().collect();
|
||||
|
||||
match builder_from_flags(bind_args.into_iter()) {
|
||||
match builder_from_flags(env::args()) {
|
||||
Ok((builder, output, verbose)) => {
|
||||
clang_version_check();
|
||||
let builder_result = panic::catch_unwind(|| {
|
||||
|
|
|
@ -30,6 +30,10 @@ where
|
|||
Arg::with_name("header")
|
||||
.help("C or C++ header file")
|
||||
.required(true),
|
||||
Arg::with_name("depfile")
|
||||
.long("depfile")
|
||||
.takes_value(true)
|
||||
.help("Path to write depfile to"),
|
||||
Arg::with_name("default-enum-style")
|
||||
.long("default-enum-style")
|
||||
.help("The default style of code used to generate enums.")
|
||||
|
@ -136,27 +140,38 @@ where
|
|||
.takes_value(true)
|
||||
.multiple(true)
|
||||
.number_of_values(1),
|
||||
Arg::with_name("blacklist-type")
|
||||
.long("blacklist-type")
|
||||
Arg::with_name("blocklist-type")
|
||||
.alias("blacklist-type")
|
||||
.long("blocklist-type")
|
||||
.help("Mark <type> as hidden.")
|
||||
.value_name("type")
|
||||
.takes_value(true)
|
||||
.multiple(true)
|
||||
.number_of_values(1),
|
||||
Arg::with_name("blacklist-function")
|
||||
.long("blacklist-function")
|
||||
Arg::with_name("blocklist-function")
|
||||
.alias("blacklist-function")
|
||||
.long("blocklist-function")
|
||||
.help("Mark <function> as hidden.")
|
||||
.value_name("function")
|
||||
.takes_value(true)
|
||||
.multiple(true)
|
||||
.number_of_values(1),
|
||||
Arg::with_name("blacklist-item")
|
||||
.long("blacklist-item")
|
||||
Arg::with_name("blocklist-item")
|
||||
.alias("blacklist-item")
|
||||
.long("blocklist-item")
|
||||
.help("Mark <item> as hidden.")
|
||||
.value_name("item")
|
||||
.takes_value(true)
|
||||
.multiple(true)
|
||||
.number_of_values(1),
|
||||
Arg::with_name("blocklist-file")
|
||||
.alias("blacklist-file")
|
||||
.long("blocklist-file")
|
||||
.help("Mark all contents of <path> as hidden.")
|
||||
.value_name("path")
|
||||
.takes_value(true)
|
||||
.multiple(true)
|
||||
.number_of_values(1),
|
||||
Arg::with_name("no-layout-tests")
|
||||
.long("no-layout-tests")
|
||||
.help("Avoid generating layout tests for any type."),
|
||||
|
@ -210,12 +225,13 @@ where
|
|||
"Avoid including doc comments in the output, see: \
|
||||
https://github.com/rust-lang/rust-bindgen/issues/426",
|
||||
),
|
||||
Arg::with_name("no-recursive-whitelist")
|
||||
.long("no-recursive-whitelist")
|
||||
Arg::with_name("no-recursive-allowlist")
|
||||
.long("no-recursive-allowlist")
|
||||
.alias("no-recursive-whitelist")
|
||||
.help(
|
||||
"Disable whitelisting types recursively. This will cause \
|
||||
"Disable allowlisting types recursively. This will cause \
|
||||
bindgen to emit Rust code that won't compile! See the \
|
||||
`bindgen::Builder::whitelist_recursively` method's \
|
||||
`bindgen::Builder::allowlist_recursively` method's \
|
||||
documentation for details.",
|
||||
),
|
||||
Arg::with_name("objc-extern-crate")
|
||||
|
@ -316,6 +332,9 @@ where
|
|||
Arg::with_name("no-include-path-detection")
|
||||
.long("no-include-path-detection")
|
||||
.help("Do not try to detect default include paths"),
|
||||
Arg::with_name("fit-macro-constant-types")
|
||||
.long("fit-macro-constant-types")
|
||||
.help("Try to fit macro constants into types smaller than u32/i32"),
|
||||
Arg::with_name("unstable-rust")
|
||||
.long("unstable-rust")
|
||||
.help("Generate unstable Rust code (deprecated; use --rust-target instead).")
|
||||
|
@ -338,6 +357,13 @@ where
|
|||
.takes_value(true)
|
||||
.multiple(true)
|
||||
.number_of_values(1),
|
||||
Arg::with_name("module-raw-line")
|
||||
.long("module-raw-line")
|
||||
.help("Add a raw line of Rust code to a given module.")
|
||||
.takes_value(true)
|
||||
.multiple(true)
|
||||
.number_of_values(2)
|
||||
.value_names(&["module-name", "raw-line"]),
|
||||
Arg::with_name("rust-target")
|
||||
.long("rust-target")
|
||||
.help(&rust_target_help)
|
||||
|
@ -354,11 +380,12 @@ where
|
|||
Arg::with_name("use-msvc-mangling")
|
||||
.long("use-msvc-mangling")
|
||||
.help("MSVC C++ ABI mangling. DEPRECATED: Has no effect."),
|
||||
Arg::with_name("whitelist-function")
|
||||
.long("whitelist-function")
|
||||
Arg::with_name("allowlist-function")
|
||||
.long("allowlist-function")
|
||||
.alias("whitelist-function")
|
||||
.help(
|
||||
"Whitelist all the free-standing functions matching \
|
||||
<regex>. Other non-whitelisted functions will not be \
|
||||
"Allowlist all the free-standing functions matching \
|
||||
<regex>. Other non-allowlisted functions will not be \
|
||||
generated.",
|
||||
)
|
||||
.value_name("regex")
|
||||
|
@ -368,21 +395,23 @@ where
|
|||
Arg::with_name("generate-inline-functions")
|
||||
.long("generate-inline-functions")
|
||||
.help("Generate inline functions."),
|
||||
Arg::with_name("whitelist-type")
|
||||
.long("whitelist-type")
|
||||
Arg::with_name("allowlist-type")
|
||||
.long("allowlist-type")
|
||||
.alias("whitelist-type")
|
||||
.help(
|
||||
"Only generate types matching <regex>. Other non-whitelisted types will \
|
||||
"Only generate types matching <regex>. Other non-allowlisted types will \
|
||||
not be generated.",
|
||||
)
|
||||
.value_name("regex")
|
||||
.takes_value(true)
|
||||
.multiple(true)
|
||||
.number_of_values(1),
|
||||
Arg::with_name("whitelist-var")
|
||||
.long("whitelist-var")
|
||||
Arg::with_name("allowlist-var")
|
||||
.long("allowlist-var")
|
||||
.alias("whitelist-var")
|
||||
.help(
|
||||
"Whitelist all the free-standing variables matching \
|
||||
<regex>. Other non-whitelisted variables will not be \
|
||||
"Allowlist all the free-standing variables matching \
|
||||
<regex>. Other non-allowlisted variables will not be \
|
||||
generated.",
|
||||
)
|
||||
.value_name("regex")
|
||||
|
@ -465,6 +494,13 @@ where
|
|||
.takes_value(true)
|
||||
.multiple(true)
|
||||
.number_of_values(1),
|
||||
Arg::with_name("must-use-type")
|
||||
.long("must-use-type")
|
||||
.help("Add #[must_use] annotation to types matching <regex>.")
|
||||
.value_name("regex")
|
||||
.takes_value(true)
|
||||
.multiple(true)
|
||||
.number_of_values(1),
|
||||
Arg::with_name("enable-function-attribute-detection")
|
||||
.long("enable-function-attribute-detection")
|
||||
.help(
|
||||
|
@ -483,6 +519,21 @@ where
|
|||
.long("dynamic-loading")
|
||||
.takes_value(true)
|
||||
.help("Use dynamic loading mode with the given library name."),
|
||||
Arg::with_name("dynamic-link-require-all")
|
||||
.long("dynamic-link-require-all")
|
||||
.help("Require successful linkage to all functions in the library."),
|
||||
Arg::with_name("respect-cxx-access-specs")
|
||||
.long("respect-cxx-access-specs")
|
||||
.help("Makes generated bindings `pub` only for items if the items are publically accessible in C++."),
|
||||
Arg::with_name("translate-enum-integer-types")
|
||||
.long("translate-enum-integer-types")
|
||||
.help("Always translate enum integer types to native Rust integer types."),
|
||||
Arg::with_name("c-naming")
|
||||
.long("c-naming")
|
||||
.help("Generate types with C style naming."),
|
||||
Arg::with_name("explicit-padding")
|
||||
.long("explicit-padding")
|
||||
.help("Always output explicit padding fields."),
|
||||
]) // .args()
|
||||
.get_matches_from(args);
|
||||
|
||||
|
@ -569,21 +620,27 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
if let Some(hidden_types) = matches.values_of("blacklist-type") {
|
||||
if let Some(hidden_types) = matches.values_of("blocklist-type") {
|
||||
for ty in hidden_types {
|
||||
builder = builder.blacklist_type(ty);
|
||||
builder = builder.blocklist_type(ty);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(hidden_functions) = matches.values_of("blacklist-function") {
|
||||
if let Some(hidden_functions) = matches.values_of("blocklist-function") {
|
||||
for fun in hidden_functions {
|
||||
builder = builder.blacklist_function(fun);
|
||||
builder = builder.blocklist_function(fun);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(hidden_identifiers) = matches.values_of("blacklist-item") {
|
||||
if let Some(hidden_identifiers) = matches.values_of("blocklist-item") {
|
||||
for id in hidden_identifiers {
|
||||
builder = builder.blacklist_item(id);
|
||||
builder = builder.blocklist_item(id);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(hidden_files) = matches.values_of("blocklist-file") {
|
||||
for file in hidden_files {
|
||||
builder = builder.blocklist_file(file);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -647,6 +704,10 @@ where
|
|||
builder = builder.detect_include_paths(false);
|
||||
}
|
||||
|
||||
if matches.is_present("fit-macro-constant-types") {
|
||||
builder = builder.fit_macro_constants(true);
|
||||
}
|
||||
|
||||
if matches.is_present("time-phases") {
|
||||
builder = builder.time_phases(true);
|
||||
}
|
||||
|
@ -670,7 +731,7 @@ where
|
|||
|
||||
if let Some(what_to_generate) = matches.value_of("generate") {
|
||||
let mut config = CodegenConfig::empty();
|
||||
for what in what_to_generate.split(",") {
|
||||
for what in what_to_generate.split(',') {
|
||||
match what {
|
||||
"functions" => config.insert(CodegenConfig::FUNCTIONS),
|
||||
"types" => config.insert(CodegenConfig::TYPES),
|
||||
|
@ -741,8 +802,8 @@ where
|
|||
builder = builder.generate_comments(false);
|
||||
}
|
||||
|
||||
if matches.is_present("no-recursive-whitelist") {
|
||||
builder = builder.whitelist_recursively(false);
|
||||
if matches.is_present("no-recursive-allowlist") {
|
||||
builder = builder.allowlist_recursively(false);
|
||||
}
|
||||
|
||||
if matches.is_present("objc-extern-crate") {
|
||||
|
@ -769,6 +830,13 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
if let Some(mut values) = matches.values_of("module-raw-line") {
|
||||
while let Some(module) = values.next() {
|
||||
let line = values.next().unwrap();
|
||||
builder = builder.module_raw_line(module, line);
|
||||
}
|
||||
}
|
||||
|
||||
if matches.is_present("use-core") {
|
||||
builder = builder.use_core();
|
||||
}
|
||||
|
@ -785,21 +853,21 @@ where
|
|||
builder = builder.generate_inline_functions(true);
|
||||
}
|
||||
|
||||
if let Some(whitelist) = matches.values_of("whitelist-function") {
|
||||
for regex in whitelist {
|
||||
builder = builder.whitelist_function(regex);
|
||||
if let Some(allowlist) = matches.values_of("allowlist-function") {
|
||||
for regex in allowlist {
|
||||
builder = builder.allowlist_function(regex);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(whitelist) = matches.values_of("whitelist-type") {
|
||||
for regex in whitelist {
|
||||
builder = builder.whitelist_type(regex);
|
||||
if let Some(allowlist) = matches.values_of("allowlist-type") {
|
||||
for regex in allowlist {
|
||||
builder = builder.allowlist_type(regex);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(whitelist) = matches.values_of("whitelist-var") {
|
||||
for regex in whitelist {
|
||||
builder = builder.whitelist_var(regex);
|
||||
if let Some(allowlist) = matches.values_of("allowlist-var") {
|
||||
for regex in allowlist {
|
||||
builder = builder.allowlist_var(regex);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -811,8 +879,14 @@ where
|
|||
|
||||
let output = if let Some(path) = matches.value_of("output") {
|
||||
let file = File::create(path)?;
|
||||
if let Some(depfile) = matches.value_of("depfile") {
|
||||
builder = builder.depfile(path, depfile);
|
||||
}
|
||||
Box::new(io::BufWriter::new(file)) as Box<dyn io::Write>
|
||||
} else {
|
||||
if let Some(depfile) = matches.value_of("depfile") {
|
||||
builder = builder.depfile("-", depfile);
|
||||
}
|
||||
Box::new(io::BufWriter::new(io::stdout())) as Box<dyn io::Write>
|
||||
};
|
||||
|
||||
|
@ -890,10 +964,36 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
if let Some(must_use_type) = matches.values_of("must-use-type") {
|
||||
for regex in must_use_type {
|
||||
builder = builder.must_use_type(regex);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(dynamic_library_name) = matches.value_of("dynamic-loading") {
|
||||
builder = builder.dynamic_library_name(dynamic_library_name);
|
||||
}
|
||||
|
||||
if matches.is_present("dynamic-link-require-all") {
|
||||
builder = builder.dynamic_link_require_all(true);
|
||||
}
|
||||
|
||||
if matches.is_present("respect-cxx-access-specs") {
|
||||
builder = builder.respect_cxx_access_specs(true);
|
||||
}
|
||||
|
||||
if matches.is_present("translate-enum-integer-types") {
|
||||
builder = builder.translate_enum_integer_types(true);
|
||||
}
|
||||
|
||||
if matches.is_present("c-naming") {
|
||||
builder = builder.c_naming(true);
|
||||
}
|
||||
|
||||
if matches.is_present("explicit-padding") {
|
||||
builder = builder.explicit_padding(true);
|
||||
}
|
||||
|
||||
let verbose = matches.is_present("verbose");
|
||||
|
||||
Ok((builder, output, verbose))
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"9e05d860f5fba962d28a1a133a455f31cc6cd08c45565e6d682adf0e19f899ba","src/lib.rs":"9bf5a63ce0b189fbbc1e4505e684027db50d4a267e92a5af4e38b4569abd0f54"},"package":"7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2"}
|
||||
{"files":{"CHANGELOG.md":"ae8160bce335d8cb67f0d522402ed7bdb47266ca774d2ba3edc661783c86bbbe","Cargo.toml":"cdc548ec58d7bcee2494dcab1de5996cdfc748622d685e1cf74a50d54edbdf34","LICENSE-APACHE":"553fffcd9b1cb158bc3e9edc35da85ca5c3b3d7d2e61c883ebcfa8a65814b583","LICENSE-MIT":"4455bf75a91154108304cb283e0fea9948c14f13e20d60887cf2552449dea3b1","README.md":"7b378c1f3f7a3c7a8a819a736a43aa6e5d984d11b412224ef25597dd1ae2fac2","src/lib.rs":"1a3880eb7688af89736e52de8deac316698e664b8b1b64f80c346bf79b18f8b8"},"package":"43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3"}
|
|
@ -0,0 +1,17 @@
|
|||
# 1.1.0
|
||||
|
||||
* Adds the `std` feature (enabled by default)
|
||||
* Disabling the `std` feature makes the crate work in `#![no_std]` mode, assuming presence of the `alloc` crate
|
||||
|
||||
# 1.0.0
|
||||
|
||||
* Adds the `join` convenience function.
|
||||
* Fixes parsing of `'\\n'` to match the behavior of bash/Zsh/Python `shlex`. The result was previously `\n`, now it is `\\n`.
|
||||
|
||||
# 0.1.1
|
||||
|
||||
* Adds handling of `#` comments.
|
||||
|
||||
# 0.1.0
|
||||
|
||||
This is the initial release.
|
|
@ -1,9 +1,24 @@
|
|||
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||
#
|
||||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
# editing this file be aware that the upstream Cargo.toml
|
||||
# will likely look very different (and much more reasonable)
|
||||
|
||||
[package]
|
||||
name = "shlex"
|
||||
version = "0.1.1"
|
||||
authors = ["comex <comexk@gmail.com>"]
|
||||
license = "MIT/Apache-2.0"
|
||||
version = "1.1.0"
|
||||
authors = ["comex <comexk@gmail.com>", "Fenhl <fenhl@fenhl.net>"]
|
||||
description = "Split a string into shell words, like Python's shlex."
|
||||
categories = ["command-line-interface", "parser-implementations"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/comex/rust-shlex"
|
||||
description = """
|
||||
Split a string into shell words, like Python's shlex.
|
||||
"""
|
||||
|
||||
[features]
|
||||
default = ["std"]
|
||||
std = []
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
Copyright 2015 Nicholas Allegra (comex).
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 Nicholas Allegra (comex).
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
|
@ -0,0 +1,30 @@
|
|||
|
||||
Same idea as (but implementation not directly based on) the Python shlex
|
||||
module. However, this implementation does not support any of the Python
|
||||
module's customization because it makes parsing slower and is fairly useless.
|
||||
You only get the default settings of shlex.split, which mimic the POSIX shell:
|
||||
<https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html>
|
||||
|
||||
This implementation also deviates from the Python version in not treating \r
|
||||
specially, which I believe is more compliant.
|
||||
|
||||
The algorithms in this crate are oblivious to UTF-8 high bytes, so they iterate
|
||||
over the bytes directly as a micro-optimization.
|
||||
|
||||
Disabling the `std` feature (which is enabled by default) will allow the crate
|
||||
to work in `no_std` environments, where the `alloc` crate, and a global
|
||||
allocator, are available.
|
||||
|
||||
# LICENSE
|
||||
|
||||
The source code in this repository is Licensed under either of
|
||||
- Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
https://www.apache.org/licenses/LICENSE-2.0)
|
||||
- MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
https://opensource.org/licenses/MIT)
|
||||
|
||||
at your option.
|
||||
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in the work by you, as defined in the Apache-2.0 license, shall
|
||||
be dual licensed as above, without any additional terms or conditions.
|
|
@ -1,26 +1,38 @@
|
|||
// Copyright 2015 Nicholas Allegra (comex).
|
||||
// Licensed under the Apache License, Version 2.0 <http://www.apache.org/licenses/LICENSE-2.0> or
|
||||
// the MIT license <http://opensource.org/licenses/MIT>, at your option. This file may not be
|
||||
// Licensed under the Apache License, Version 2.0 <https://www.apache.org/licenses/LICENSE-2.0> or
|
||||
// the MIT license <https://opensource.org/licenses/MIT>, at your option. This file may not be
|
||||
// copied, modified, or distributed except according to those terms.
|
||||
|
||||
//! Same idea as (but implementation not directly based on) the Python shlex module. However, this
|
||||
//! implementation does not support any of the Python module's customization because it makes
|
||||
//! parsing slower and is fairly useless. You only get the default settings of shlex.split, which
|
||||
//! mimic the POSIX shell:
|
||||
//! http://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html
|
||||
//! <https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html>
|
||||
//!
|
||||
//! This implementation also deviates from the Python version in not treating \r specially, which I
|
||||
//! believe is more compliant.
|
||||
//! This implementation also deviates from the Python version in not treating `\r` specially, which
|
||||
//! I believe is more compliant.
|
||||
//!
|
||||
//! The algorithms in this crate are oblivious to UTF-8 high bytes, so they iterate over the bytes
|
||||
//! directly as a micro-optimization.
|
||||
//!
|
||||
//! Disabling the `std` feature (which is enabled by default) will allow the crate to work in
|
||||
//! `no_std` environments, where the `alloc` crate, and a global allocator, are available.
|
||||
|
||||
use std::borrow::Cow;
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
extern crate alloc;
|
||||
use alloc::vec::Vec;
|
||||
use alloc::borrow::Cow;
|
||||
use alloc::string::String;
|
||||
#[cfg(test)]
|
||||
use alloc::vec;
|
||||
#[cfg(test)]
|
||||
use alloc::borrow::ToOwned;
|
||||
|
||||
/// An iterator that takes an input string and splits it into the words using the same syntax as
|
||||
/// the POSIX shell.
|
||||
pub struct Shlex<'a> {
|
||||
in_iter: std::str::Bytes<'a>,
|
||||
in_iter: core::str::Bytes<'a>,
|
||||
/// The number of newlines read so far, plus one.
|
||||
pub line_no: usize,
|
||||
/// An input string is erroneous if it ends while inside a quotation or right after an
|
||||
|
@ -96,17 +108,6 @@ impl<'a> Shlex<'a> {
|
|||
loop {
|
||||
if let Some(ch2) = self.next_char() {
|
||||
match ch2 as char {
|
||||
'\\' => {
|
||||
if let Some(ch3) = self.next_char() {
|
||||
match ch3 as char {
|
||||
// for single quotes, only these can be escaped
|
||||
'\'' | '\\' => { result.push(ch3); },
|
||||
_ => { result.push('\\' as u8); result.push(ch3); }
|
||||
}
|
||||
} else {
|
||||
return Err(());
|
||||
}
|
||||
},
|
||||
'\'' => { return Ok(()); },
|
||||
_ => { result.push(ch2); },
|
||||
}
|
||||
|
@ -181,6 +182,15 @@ pub fn quote(in_str: &str) -> Cow<str> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Convenience function that consumes an iterable of words and turns it into a single string,
|
||||
/// quoting words when necessary. Consecutive words will be separated by a single space.
|
||||
pub fn join<'a, I: IntoIterator<Item = &'a str>>(words: I) -> String {
|
||||
words.into_iter()
|
||||
.map(quote)
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ")
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
static SPLIT_TEST_ITEMS: &'static [(&'static str, Option<&'static [&'static str]>)] = &[
|
||||
("foo$baz", Some(&["foo$baz"])),
|
||||
|
@ -191,7 +201,7 @@ static SPLIT_TEST_ITEMS: &'static [(&'static str, Option<&'static [&'static str]
|
|||
("foo\\\nbar", Some(&["foobar"])),
|
||||
("\"foo\\\nbar\"", Some(&["foobar"])),
|
||||
("'baz\\$b'", Some(&["baz\\$b"])),
|
||||
("'baz\\\''", Some(&["baz\'"])),
|
||||
("'baz\\\''", None),
|
||||
("\\", None),
|
||||
("\"\\", None),
|
||||
("'\\", None),
|
||||
|
@ -201,6 +211,8 @@ static SPLIT_TEST_ITEMS: &'static [(&'static str, Option<&'static [&'static str]
|
|||
("foo #bar", Some(&["foo"])),
|
||||
("foo#bar", Some(&["foo#bar"])),
|
||||
("foo\"#bar", None),
|
||||
("'\\n'", Some(&["\\n"])),
|
||||
("'\\\\n'", Some(&["\\\\n"])),
|
||||
];
|
||||
|
||||
#[test]
|
||||
|
@ -227,3 +239,11 @@ fn test_quote() {
|
|||
assert_eq!(quote("\""), "\"\\\"\"");
|
||||
assert_eq!(quote(""), "\"\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_join() {
|
||||
assert_eq!(join(vec![]), "");
|
||||
assert_eq!(join(vec![""]), "\"\"");
|
||||
assert_eq!(join(vec!["a", "b"]), "a b");
|
||||
assert_eq!(join(vec!["foo bar", "baz"]), "\"foo bar\" baz");
|
||||
}
|
||||
|
|
Загрузка…
Ссылка в новой задаче