Bug 1412486: Revendor rust dependencies. r=me

MozReview-Commit-ID: 5mVyHlwTia1


--HG--
rename : third_party/rust/bindgen/.cargo-checksum.json => third_party/rust/bindgen-0.30.0/.cargo-checksum.json
rename : third_party/rust/bindgen/Cargo.toml => third_party/rust/bindgen-0.30.0/Cargo.toml
rename : third_party/rust/bindgen/build.rs => third_party/rust/bindgen-0.30.0/build.rs
rename : third_party/rust/bindgen/src/codegen/derive_debug.rs => third_party/rust/bindgen-0.30.0/src/codegen/derive_debug.rs
rename : third_party/rust/bindgen-0.29.1/src/codegen/error.rs => third_party/rust/bindgen-0.30.0/src/codegen/error.rs
rename : third_party/rust/bindgen/src/codegen/helpers.rs => third_party/rust/bindgen-0.30.0/src/codegen/helpers.rs
rename : third_party/rust/bindgen/src/codegen/mod.rs => third_party/rust/bindgen-0.30.0/src/codegen/mod.rs
rename : third_party/rust/bindgen/src/codegen/struct_layout.rs => third_party/rust/bindgen-0.30.0/src/codegen/struct_layout.rs
rename : third_party/rust/bindgen-0.29.1/src/extra_assertions.rs => third_party/rust/bindgen-0.30.0/src/extra_assertions.rs
rename : third_party/rust/bindgen/src/features.rs => third_party/rust/bindgen-0.30.0/src/features.rs
rename : third_party/rust/bindgen/src/ir/analysis/derive_copy.rs => third_party/rust/bindgen-0.30.0/src/ir/analysis/derive_copy.rs
rename : third_party/rust/bindgen/src/ir/analysis/derive_debug.rs => third_party/rust/bindgen-0.30.0/src/ir/analysis/derive_debug.rs
rename : third_party/rust/bindgen/src/ir/analysis/derive_default.rs => third_party/rust/bindgen-0.30.0/src/ir/analysis/derive_default.rs
rename : third_party/rust/bindgen/src/ir/analysis/derive_hash.rs => third_party/rust/bindgen-0.30.0/src/ir/analysis/derive_hash.rs
rename : third_party/rust/bindgen/src/ir/analysis/derive_partial_eq.rs => third_party/rust/bindgen-0.30.0/src/ir/analysis/derive_partial_eq.rs
rename : third_party/rust/bindgen/src/ir/analysis/has_destructor.rs => third_party/rust/bindgen-0.30.0/src/ir/analysis/has_destructor.rs
rename : third_party/rust/bindgen/src/ir/analysis/has_float.rs => third_party/rust/bindgen-0.30.0/src/ir/analysis/has_float.rs
rename : third_party/rust/bindgen/src/ir/analysis/has_type_param_in_array.rs => third_party/rust/bindgen-0.30.0/src/ir/analysis/has_type_param_in_array.rs
rename : third_party/rust/bindgen/src/ir/analysis/has_vtable.rs => third_party/rust/bindgen-0.30.0/src/ir/analysis/has_vtable.rs
rename : third_party/rust/bindgen/src/ir/analysis/mod.rs => third_party/rust/bindgen-0.30.0/src/ir/analysis/mod.rs
rename : third_party/rust/bindgen/src/ir/analysis/template_params.rs => third_party/rust/bindgen-0.30.0/src/ir/analysis/template_params.rs
rename : third_party/rust/bindgen/src/ir/comp.rs => third_party/rust/bindgen-0.30.0/src/ir/comp.rs
rename : third_party/rust/bindgen/src/ir/context.rs => third_party/rust/bindgen-0.30.0/src/ir/context.rs
rename : third_party/rust/bindgen/src/ir/derive.rs => third_party/rust/bindgen-0.30.0/src/ir/derive.rs
rename : third_party/rust/bindgen/src/ir/dot.rs => third_party/rust/bindgen-0.30.0/src/ir/dot.rs
rename : third_party/rust/bindgen/src/ir/enum_ty.rs => third_party/rust/bindgen-0.30.0/src/ir/enum_ty.rs
rename : third_party/rust/bindgen/src/ir/function.rs => third_party/rust/bindgen-0.30.0/src/ir/function.rs
rename : third_party/rust/bindgen/src/ir/item.rs => third_party/rust/bindgen-0.30.0/src/ir/item.rs
rename : third_party/rust/bindgen/src/ir/layout.rs => third_party/rust/bindgen-0.30.0/src/ir/layout.rs
rename : third_party/rust/bindgen-0.29.1/src/ir/mod.rs => third_party/rust/bindgen-0.30.0/src/ir/mod.rs
rename : third_party/rust/bindgen/src/ir/module.rs => third_party/rust/bindgen-0.30.0/src/ir/module.rs
rename : third_party/rust/bindgen/src/ir/objc.rs => third_party/rust/bindgen-0.30.0/src/ir/objc.rs
rename : third_party/rust/bindgen/src/ir/template.rs => third_party/rust/bindgen-0.30.0/src/ir/template.rs
rename : third_party/rust/bindgen/src/ir/traversal.rs => third_party/rust/bindgen-0.30.0/src/ir/traversal.rs
rename : third_party/rust/bindgen/src/ir/ty.rs => third_party/rust/bindgen-0.30.0/src/ir/ty.rs
rename : third_party/rust/bindgen/src/ir/var.rs => third_party/rust/bindgen-0.30.0/src/ir/var.rs
rename : third_party/rust/bindgen/src/lib.rs => third_party/rust/bindgen-0.30.0/src/lib.rs
rename : third_party/rust/bindgen-0.29.1/src/log_stubs.rs => third_party/rust/bindgen-0.30.0/src/log_stubs.rs
rename : third_party/rust/bindgen/src/main.rs => third_party/rust/bindgen-0.30.0/src/main.rs
rename : third_party/rust/bindgen/src/options.rs => third_party/rust/bindgen-0.30.0/src/options.rs
rename : third_party/rust/bindgen/src/parse.rs => third_party/rust/bindgen-0.30.0/src/parse.rs
rename : third_party/rust/bindgen/src/regex_set.rs => third_party/rust/bindgen-0.30.0/src/regex_set.rs
rename : third_party/rust/which/.cargo-checksum.json => third_party/rust/which-1.0.2/.cargo-checksum.json
rename : third_party/rust/which/Cargo.toml => third_party/rust/which-1.0.2/Cargo.toml
rename : third_party/rust/which/README.md => third_party/rust/which-1.0.2/README.md
This commit is contained in:
Emilio Cobos Álvarez 2017-10-31 23:32:11 +01:00
Родитель 0cd9bd902c
Коммит 97fd5248b0
181 изменённых файлов: 12417 добавлений и 20627 удалений

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

Просмотреть файл

@ -1,61 +0,0 @@
<!-- Thanks for filing a bindgen issue! We appreciate it :-) -->
### Input C/C++ Header
```C++
// Insert your minimal C or C++ header here.
//
// It should *NOT* have any `#include`s! Not all systems have the same header
// files, and therefore any `#include` harms reproducibility. Additionally,
// the test case isn't minimal since the included file almost assuredly
// contains things that aren't necessary to reproduce the bug, and makes
// tracking it down much more difficult.
//
// Use the `--dump-preprocessed-input` flag or the
// `bindgen::Builder::dump_preprocessed_input` method to make your test case
// standalone and without `#include`s, and then use C-Reduce to minimize it:
// https://github.com/rust-lang-nursery/rust-bindgen/blob/master/CONTRIBUTING.md#using-creduce-to-minimize-test-cases
```
### Bindgen Invocation
<!-- Place either the `bindgen::Builder` or the command line flags used here. -->
```Rust
bindgen::Builder::default()
.header("input.h")
.generate()
.unwrap()
```
or
```
$ bindgen input.h --whatever --flags
```
### Actual Results
```
Insert panic message and backtrace (set the `RUST_BACKTRACE=1` env var) here.
```
and/or
```rust
// Insert the (incorrect/buggy) generated bindings here
```
and/or
```
Insert compilation errors generated when compiling the bindings with rustc here
```
### Expected Results
<!--
Replace this with a description of what you expected instead of the actual
results. The more precise, the better! For example, if a struct in the generated
bindings is missing a field that exists in the C/C++ struct, note that here.
-->

48
third_party/rust/bindgen-0.29.1/.travis.yml поставляемый
Просмотреть файл

@ -1,48 +0,0 @@
language: rust
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- gcc-5
os:
- linux
rust:
- stable
env:
global:
- CARGO_TARGET_DIR=/tmp/bindgen
matrix:
- LLVM_VERSION=3.7.1 BINDGEN_FEATURES=testing_only_libclang_3_8
- LLVM_VERSION=3.8.1 BINDGEN_FEATURES=testing_only_libclang_3_8
- LLVM_VERSION=3.9.0 BINDGEN_FEATURES=testing_only_libclang_3_9
- LLVM_VERSION=4.0.0 BINDGEN_FEATURES=testing_only_libclang_4
matrix:
fast_finish: true
allow_failures:
- env: LLVM_VERSION=3.7.1 BINDGEN_FEATURES=testing_only_libclang_3_8
cache:
directories:
- $HOME/.cargo
before_install: . ./ci/before_install.sh
script:
# - ./ci/assert-rustfmt.sh
- BINDGEN_FEATURES="$BINDGEN_FEATURES" ./ci/assert-docs.sh
- BINDGEN_FEATURES="$BINDGEN_FEATURES" ./ci/test.sh
- ./ci/test-book.sh
after_success:
- test "$TRAVIS_PULL_REQUEST" == "false" &&
test "$TRAVIS_BRANCH" == "master" &&
./ci/deploy-book.sh
notifications:
webhooks: http://build.servo.org:54856/travis

Просмотреть файл

@ -1,339 +0,0 @@
# Contributing to `bindgen`
Hi! We'd love to have your contributions! If you want help or mentorship, reach
out to us in a GitHub issue, or stop by
[#servo on irc.mozilla.org](irc://irc.mozilla.org#servo) and introduce yourself.
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
- [Code of Conduct](#code-of-conduct)
- [Filing an Issue](#filing-an-issue)
- [Looking to Start Contributing to `bindgen`?](#looking-to-start-contributing-to-bindgen)
- [Building](#building)
- [Testing](#testing)
- [Overview](#overview)
- [Running All Tests](#running-all-tests)
- [Running a Single Test](#running-a-single-test)
- [Authoring New Tests](#authoring-new-tests)
- [Test Expectations and `libclang` Versions](#test-expectations-and-libclang-versions)
- [Automatic code formatting](#automatic-code-formatting)
- [Pull Requests and Code Reviews](#pull-requests-and-code-reviews)
- [Generating Graphviz Dot Files](#generating-graphviz-dot-files)
- [Debug Logging](#debug-logging)
- [Using `creduce` to Minimize Test Cases](#using-creduce-to-minimize-test-cases)
- [Isolating Your Test Case](#isolating-your-test-case)
- [Writing a Predicate Script](#writing-a-predicate-script)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
## Code of Conduct
We abide by the [Rust Code of Conduct][coc] and ask that you do as well.
[coc]: https://www.rust-lang.org/en-US/conduct.html
## Filing an Issue
Think you've found a bug? File an issue! To help us understand and reproduce the
issue, provide us with:
* A (preferably reduced) C/C++ header file that reproduces the issue
* The `bindgen` flags used to reproduce the issue with the header file
* The expected `bindgen` output
* The actual `bindgen` output
* The [debugging logs](#logs) generated when running `bindgen` on this testcase
## Looking to Start Contributing to `bindgen`?
* [Issues labeled "easy"](https://github.com/rust-lang-nursery/rust-bindgen/issues?q=is%3Aopen+is%3Aissue+label%3AE-easy)
* [Issues labeled "less easy"](https://github.com/rust-lang-nursery/rust-bindgen/issues?q=is%3Aopen+is%3Aissue+label%3AE-less-easy)
* Still can't find something to work on? [Drop a comment here](https://github.com/rust-lang-nursery/rust-bindgen/issues/747)
## Building
To build the `bindgen` library and the `bindgen` executable:
```
$ cargo build
```
If you installed multiple versions of llvm, it may not be able to locate the
latest version of libclang. In that case, you may want to either uninstall other
versions of llvm, or specify the path of the desired libclang explicitly:
```
$ export LIBCLANG_PATH=path/to/clang-3.9/lib
```
On Linux and macOS, you may also need to add a path to `libclang.so` (usually
the same path as above) to library search path. This can be done as below:
```
$ export LD_LIBRARY_PATH=path/to/clang-3.9/lib # for Linux
$ export DYLD_LIBRARY_PATH=path/to/clang-3.9/lib # for macOS
```
Additionally, you may want to build and test with the `testing_only_docs`
feature to ensure that you aren't forgetting to document types and functions. CI
will catch it if you forget, but the turn around will be a lot slower ;)
```
$ cargo build --features testing_only_docs
```
## Testing
Code for binding generation and testing thereof is in the `bindgen` crate.
The following sections assume you are working in that subdirectory.
### Overview
Input C/C++ test headers reside in the `tests/headers` directory. Expected
output Rust bindings live in `tests/expectations/tests`.
For example, `tests/headers/my_header.h`'s expected generated Rust bindings
would be `tests/expectations/tests/my_header.rs`.
Run `cargo test` to compare generated Rust bindings to the expectations.
### Running All Tests
```
$ cargo test --features testing_only_libclang_$VERSION
```
Where `$VERSION` is one of:
* `4`
* `3_9`
* `3_8`
depending on which version of `libclang` you have installed.
### Running a Single Test
To generate bindings for a single test header, compile the bindings, and run the
layout assertion tests for those bindings, use the `tests/test-one.sh`
script. It supports fuzzy searching for test headers. For example, to test
`tests/headers/what_is_going_on.hpp`, execute this command:
```
$ ./tests/test-one.sh going
```
### Authoring New Tests
To add a new test header to the suite, simply put it in the `tests/headers`
directory. Next, run `bindgen` to generate the initial expected output Rust
bindings. Put those in `tests/expectations/tests`.
If your new test requires certain flags to be passed to `bindgen`, you can
specify them at the top of the test header, with a comment like this:
```c
// bindgen-flags: --enable-cxx-namespaces -- -std=c++14
```
Then verify the new Rust bindings compile and pass some basic tests:
```
$ cargo test -p tests_expectations
```
### Test Expectations and `libclang` Versions
If a test generates different bindings across different `libclang` versions (for
example, because we take advantage of better/newer APIs when possible), then you
can add multiple test expectations, one for each supported `libclang`
version. Instead of having a single `tests/expectations/tests/my_test.rs` file,
add each of:
* `tests/expectations/tests/libclang-4/my_test.rs`
* `tests/expectations/tests/libclang-3.9/my_test.rs`
* `tests/expectations/tests/libclang-3.8/my_test.rs`
If you need to update the test expectations for a test file that generates
different bindings for different `libclang` versions, you *don't* need to have
many version of `libclang` installed locally. Just make a work-in-progress pull
request, and then when Travis CI fails, it will log a diff of the
expectations. Use the diff to patch the appropriate expectation file locally and
then update your pull request.
## Automatic code formatting
We use [`rustfmt`](https://github.com/rust-lang-nursery/rustfmt) to enforce a
consistent code style across the whole `bindgen` code base. This is enforced in
CI, and your pull requests will get automatically rejected if you don't
re-format with the latest `rustfmt` before pushing.
You can install the latest version of `rustfmt` with this command:
```
$ cargo install -f rustfmt
```
Ensure that `~/.cargo/bin` is on your path.
Once that is taken care of, you can (re)format all code by running this command:
```
$ cargo fmt
```
The code style is described in the `rustfmt.toml` file in top level of the repo.
## Pull Requests and Code Reviews
Ensure that each commit stands alone, and passes tests. This enables better `git
bisect`ing when needed. If your commits do not stand on their own, then rebase
them on top of the latest master and squash them into a single commit.
All pull requests undergo code review before merging. To request review, comment
`r? @github_username_of_reviewer`. They we will respond with `r+` to approve the
pull request, or may leave feedback and request changes to the pull request. Any
changes should be squashed into the original commit.
Unsure who to ask for review? Ask any of:
* `@emilio`
* `@fitzgen`
More resources:
* [Servo's GitHub Workflow](https://github.com/servo/servo/wiki/Github-workflow)
* [Beginner's Guide to Rebasing and Squashing](https://github.com/servo/servo/wiki/Beginner's-guide-to-rebasing-and-squashing)
## Generating Graphviz Dot Files
We can generate [Graphviz](http://graphviz.org/pdf/dotguide.pdf) dot files from
our internal representation of a C/C++ input header, and then you can create a
PNG or PDF from it with Graphviz's `dot` program. This is very useful when
debugging bindgen!
First, make sure you have Graphviz and `dot` installed:
```
$ brew install graphviz # OS X
$ sudo dnf install graphviz # Fedora
$ # Etc...
```
Then, use the `--emit-ir-graphviz` flag to generate a `dot` file from our IR:
```
$ cargo run -- example.hpp --emit-ir-graphviz output.dot
```
Finally, convert the `dot` file to an image:
```
$ dot -Tpng output.dot -o output.png
```
The final result will look something like this:
[![An example graphviz rendering of our IR](./example-graphviz-ir.png)](./example-graphviz-ir.png)
## Debug Logging
To help debug what `bindgen` is doing, you can define the environment variable
`RUST_LOG=bindgen` to get a bunch of debugging log spew.
```
$ RUST_LOG=bindgen ./target/debug/bindgen [flags...] ~/path/to/some/header.h
```
This logging can also be used when debugging failing tests:
```
$ RUST_LOG=bindgen cargo test
```
## Using `creduce` to Minimize Test Cases
If you are hacking on `bindgen` and find a test case that causes an unexpected
panic, results in bad Rust bindings, or some other incorrectness in `bindgen`,
then using `creduce` can help reduce the test case to a minimal one.
[Follow these instructions for building and/or installing `creduce`.](https://github.com/csmith-project/creduce/blob/master/INSTALL)
Running `creduce` requires two things:
1. Your isolated test case, and
2. A script to act as a predicate script describing whether the behavior you're
trying to isolate occurred.
With those two things in hand, running `creduce` looks like this:
$ creduce ./predicate.sh ./isolated_test_case.h
### Isolating Your Test Case
If you're using `bindgen` as a command line tool, pass
`--dump-preprocessed-input` flag.
If you're using `bindgen` as a Rust library, invoke the
`bindgen::Builder::dump_preprocessed_input` method where you call
`bindgen::Builder::generate`.
Afterwards, there should be a `__bindgen.i` or `__bindgen.ii` file containing
the combined and preprocessed input headers, which is usable as an isolated,
standalone test case.
### Writing a Predicate Script
Writing a `predicate.sh` script for a `bindgen` test case is fairly
straightforward. One potential gotcha is that `creduce` can and will attempt to
reduce test cases into invalid C/C++ code. That might be useful for C/C++
compilers, but we generally only care about valid C/C++ input headers.
Here is a skeleton predicate script:
```bash
#!/usr/bin/env bash
# Exit the script with a nonzero exit code if:
# * any individual command finishes with a nonzero exit code, or
# * we access any undefined variable.
set -eu
# Print out Rust backtraces on panic. Useful for minimizing a particular panic.
export RUST_BACKTRACE=1
# If the `libclang.so` you're using for `bindgen` isn't the system
# `libclang.so`, let the linker find it.
export LD_LIBRARY_PATH=~/path/to/your/directory/containing/libclang
# Make sure that the reduced test case is valid C/C++ by compiling it. If it
# isn't valid C/C++, this command will exit with a nonzero exit code and cause
# the whole script to do the same.
clang[++ --std=c++14] -c ./pre_processed_header.hpp
# Run `bindgen` and `grep` for the thing your hunting down! Make sure to include
# `2>&1` to get at stderr if you're hunting down a panic.
~/src/rust-bindgen/target/debug/bindgen \
./pre_processed_header.hpp \
[ <extra flags> ] \
2>&1 \
| grep "<pattern in generated bindings or a panic string or ...>"
```
When hunting down a panic, I `grep`ed like this:
... | grep "thread main panicked at '<panic error message here>'"
When hunting down bad codegen for a base member, I `grep`ed like this:
... | grep "pub _base: MyInvalidBaseTypeThatShouldntBeHere"
That's pretty much it! I want to impress upon you that `creduce` is *really*
helpful and has enabled me to reduce 30k lines of test case into 5 lines. And it
works pretty quickly too. Super valuable tool to have in your belt when hacking
on `bindgen`!
Happy bug hunting and test case reducing!
[More information on using `creduce`.](https://embed.cs.utah.edu/creduce/using/)

28
third_party/rust/bindgen-0.29.1/LICENSE поставляемый
Просмотреть файл

@ -1,28 +0,0 @@
Copyright (c) 2013, Jyun-Yan You
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the author nor the names of his contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.

43
third_party/rust/bindgen-0.29.1/README.md поставляемый
Просмотреть файл

@ -1,43 +0,0 @@
# `bindgen`
**`bindgen` automatically generates Rust FFI bindings to C and C++ libraries.**
For example, given the C header `doggo.h`:
```c
typedef struct Doggo {
int many;
char wow;
} Doggo;
void eleven_out_of_ten_majestic_af(Doggo* pupper);
```
`bindgen` produces Rust FFI code allowing you to call into the `doggo` library's
functions and use its types:
```rust
/* automatically generated by rust-bindgen */
#[repr(C)]
pub struct Doggo {
pub many: ::std::os::raw::c_int,
pub wow: ::std::os::raw::c_char,
}
extern "C" {
pub fn eleven_out_of_ten_majestic_af(pupper: *mut Doggo);
}
```
## Users Guide
[📚 Read the `bindgen` users guide here! 📚](https://rust-lang-nursery.github.io/rust-bindgen)
## API Reference
[API reference documentation is on docs.rs](https://docs.rs/bindgen)
## Contributing
[See `CONTRIBUTING.md` for hacking on `bindgen`!](./CONTRIBUTING.md)

57
third_party/rust/bindgen-0.29.1/appveyor.yml поставляемый
Просмотреть файл

@ -1,57 +0,0 @@
environment:
RUST_BACKTRACE: 1
RUST_CHANNEL: "%Configuration%"
matrix:
- TARGET: gnu
LLVM_VERSION: 3.9.0-2
BINDGEN_FEATURES: testing_only_libclang_3_9
- TARGET: gnu
LLVM_VERSION: 4.0.0-1
BINDGEN_FEATURES: testing_only_libclang_4
- TARGET: msvc
LLVM_VERSION: 3.9.0
BINDGEN_FEATURES: testing_only_libclang_3_9
- TARGET: msvc
LLVM_VERSION: 4.0.0
BINDGEN_FEATURES: testing_only_libclang_4
configuration:
- stable
- nightly
platform:
- x64
- x86
branches:
only:
- master
install:
- if %PLATFORM% == x86 (set RUST_PLATFORM=i686&set MINGW_BITS=32) else (set RUST_PLATFORM=x86_64&set MINGW_BITS=64)
- echo %RUST_CHANNEL%
- echo %RUST_PLATFORM%
- echo %MINGW_BITS%
- echo %RUST_PLATFORM%-pc-windows-%TARGET%
# install Rust
- appveyor-retry appveyor DownloadFile https://win.rustup.rs/ -FileName rustup-init.exe
- rustup-init.exe -y --default-host %RUST_PLATFORM%-pc-windows-%TARGET% --default-toolchain %RUST_CHANNEL%
- set PATH=%PATH%;C:\Users\appveyor\.cargo\bin
# install LLVM for GNU
- if %TARGET%==gnu set PATH=C:\msys64\mingw%MINGW_BITS%\bin;C:\msys64\usr\bin\;%PATH%
- if %TARGET%==gnu set "MINGW_URL=http://repo.msys2.org/mingw/%RUST_PLATFORM%/mingw-w64-%RUST_PLATFORM%"
- if %TARGET%==gnu set "URL_VER=%LLVM_VERSION%-any.pkg.tar.xz"
- if %TARGET%==gnu bash -lc "pacman -U --noconfirm $MINGW_URL-clang-$URL_VER $MINGW_URL-llvm-$URL_VER"
- if %TARGET%==gnu bash -lc "clang --version"
# install LLVM for MSVC
- if %TARGET%==msvc appveyor-retry appveyor DownloadFile http://releases.llvm.org/%LLVM_VERSION%/LLVM-%LLVM_VERSION%-win64.exe -FileName llvm-installer.exe
- if %TARGET%==msvc 7z x llvm-installer.exe -oc:\llvm-binary
- if %TARGET%==msvc set PATH=C:\llvm-binary\bin;%PATH%
- if %TARGET%==msvc where clang
- if %TARGET%==msvc clang --version
build_script:
- if %TARGET%==msvc .\ci\test.bat
- if %TARGET%==gnu bash -lc "export BINDGEN_FEATURES=$BINDGEN_FEATURES; cd $APPVEYOR_BUILD_FOLDER; ./ci/test.sh"
test: off

Просмотреть файл

@ -1,3 +0,0 @@
title = "The `bindgen` User Guide"
author = "The Servo project developers"
description = "`bindgen` automatically generates Rust FFI bindings to C and C++ libraries."

Просмотреть файл

@ -1,21 +0,0 @@
# Summary
- [Introduction](./introduction.md)
- [Requirements](./requirements.md)
- [Library Usage with `build.rs`](./library-usage.md)
- [Tutorial](./tutorial-0.md)
- [Add `bindgen` as a Build Dependency](./tutorial-1.md)
- [Create a `wrapper.h` Header](./tutorial-2.md)
- [Create a `build.rs` File](./tutorial-3.md)
- [Include the Generated Bindings in `src/lib.rs`](./tutorial-4.md)
- [Write a Sanity Test](./tutorial-5.md)
- [Publish Your Crate!](./tutorial-6.md)
- [Command Line Usage](./command-line-usage.md)
- [Customizing the Generated Bindings](./customizing-generated-bindings.md)
- [Whitelisting](./whitelisting.md)
- [Blacklisting](./blacklisting.md)
- [Treating a Type as an Opaque Blob of Bytes](./opaque.md)
- [Replacing One Type with Another](./replacing-types.md)
- [Preventing the Derivation of `Copy` and `Clone`](./nocopy.md)
- [Generating Bindings to C++](./cpp.md)
- [Using Unions](./using-unions.md)

Просмотреть файл

@ -1,26 +0,0 @@
# Blacklisting
If you need to provide your own custom translation of some type (for example,
because you need to wrap one of its fields in an `UnsafeCell`), you can
explicitly blacklist generation of its definition. Uses of the blacklisted type
will still appear in other types' definitions. (If you don't want the type to
appear in the bindings at
all, [make it opaque](./opaque.html) instead of
blacklisting it.)
### Library
* [`bindgen::Builder::hide_type`](https://docs.rs/bindgen/0.23.1/bindgen/struct.Builder.html#method.hide_type)
### Command Line
* `--blacklist-type <type>`
### Annotations
```cpp
/// <div rustbindgen hide></div>
class Foo {
// ...
};
```

Просмотреть файл

@ -1 +0,0 @@
# Chapter 1

Просмотреть файл

@ -1,27 +0,0 @@
# Command Line Usage
Install the `bindgen` executable with `cargo`:
```bash
$ cargo install bindgen
```
The `bindgen` executable is installed to `~/.cargo/bin`. You have to add that
directory to your `$PATH` to use `bindgen`.
`bindgen` takes the path to an input C or C++ header file, and optionally an
output file path for the generated bindings. If the output file path is not
supplied, the bindings are printed to `stdout`.
If we wanted to generated Rust FFI bindings from a C header named `input.h` and
put them in the `bindings.rs` file, we would invoke `bindgen` like this:
```bash
$ bindgen input.h -o bindings.rs
```
For more details, pass the `--help` flag:
```bash
$ bindgen --help
```

Просмотреть файл

@ -1,27 +0,0 @@
# Generating Bindings to C++
`bindgen` can handle a surprising number of C++ features, but not all of
them. When `bindgen` can't translate some C++ construct into Rust, it usually
comes down to one of two things:
1. Rust has no equivalent language feature
2. C++ is *hard!*
Notable C++ features that are unsupported or only partially supported, and for
which `bindgen` *should* generate opaque blobs whenever it finds an occurrence
of them in a type it is generating bindings for:
* Template specialization
* Partial template specialization
* Traits templates
* SFINAE
When passing in header files, the file will automatically be treated as C++ if
it ends in `.hpp`. If it doesn't, adding `-x c++` clang args can be used to
force C++ mode. You probably also want to use `-std=c++14` or similar clang args
as well.
You pretty much **must** use [whitelisting](./whitelisting.html) when working
with C++ to avoid pulling in all of the `std::*` types, many of which `bindgen`
cannot handle. Additionally, you may want to mark other types
as [opaque](./opaque.html) that `bindgen` stumbles on.

Просмотреть файл

@ -1,28 +0,0 @@
# Customizing the Generated Bindings
The translation of classes, structs, enums, and typedefs can be adjusted in a
few ways:
1. By using the `bindgen::Builder`'s configuration methods, when using `bindgen`
as a library.
2. By passing extra flags and options to the `bindgen` executable.
3. By adding an annotation comment to the C/C++ source code. Annotations are
specially formatted HTML tags inside doxygen style comments:
* For single line comments:
```c
/// <div rustbindgen></div>
```
* For multi-line comments:
```c
/**
* <div rustbindgen></div>
*/
```
We'll leave the nitty-gritty details to
the [docs.rs API reference](https://docs.rs/bindgen) and `bindgen --help`, but
provide higher level concept documentation here.

Просмотреть файл

@ -1,34 +0,0 @@
# Introduction
**[`bindgen`](https://github.com/rust-lang-nursery/rust-bindgen) automatically generates Rust
FFI bindings to C and C++ libraries.**
For example, given the C header `cool.h`:
```c
typedef struct CoolStruct {
int x;
int y;
} CoolStruct;
void cool_function(int i, char c, CoolStruct* cs);
```
`bindgen` produces Rust FFI code allowing you to call into the `cool` library's
functions and use its types:
```rust
/* automatically generated by rust-bindgen */
#[repr(C)]
pub struct CoolStruct {
pub x: ::std::os::raw::c_int,
pub y: ::std::os::raw::c_int,
}
extern "C" {
pub fn cool_function(i: ::std::os::raw::c_int,
c: ::std::os::raw::c_char,
cs: *mut CoolStruct);
}
```

Просмотреть файл

@ -1,22 +0,0 @@
# Library Usage with `build.rs`
💡 This is the recommended way to use `bindgen`. 💡
Often times C and C++ headers will have platform- and architecture-specific
`#ifdef`s that affect the shape of the Rust FFI bindings we need to create to
interface Rust code with the outside world. By using `bindgen` as a library
inside your `build.rs`, you can generate bindings for the current target
on-the-fly. Otherwise, you would need to generate and maintain
`x86_64-unknown-linux-gnu-bindings.rs`, `x86_64-apple-darwin-bindings.rs`,
etc... separate bindings files for each of your supported targets, which can be
a huge pain. The downside is that everyone building your crate also needs
`libclang` available to run `bindgen`.
## Library API Documentation
[📚 There is complete API reference documentation on docs.rs 📚](https://docs.rs/bindgen)
## Tutorial
The next section contains a detailed, step-by-step tutorial for using `bindgen`
as a library inside `build.rs`.

Просмотреть файл

@ -1,20 +0,0 @@
# Preventing the Derivation of `Copy` and `Clone`
`bindgen` will attempt to derive the `Copy` and `Clone` traits on a best-effort
basis. Sometimes, it might not understand that although adding `#[derive(Copy,
Clone)]` to a translated type definition will compile, it still shouldn't do
that for reasons it can't know. In these cases, the `nocopy` annotation can be
used to prevent bindgen to autoderive the `Copy` and `Clone` traits for a type.
```c
/**
* Although bindgen can't know, this struct is not safe to move because pthread
* mutexes can't move in memory!
*
* <div rustbindgen nocopy></div>
*/
struct MyMutexWrapper {
pthread_mutex_t raw;
// ...
};
```

Просмотреть файл

@ -1,26 +0,0 @@
# Treating a Type as an Opaque Blob of Bytes
Sometimes a type definition is simply not translatable to Rust, for example it
uses
[C++'s SFINAE](https://en.wikipedia.org/wiki/Substitution_failure_is_not_an_error) for
which Rust has no equivalent. In these cases, it is best to treat all
occurrences of the type as an opaque blob of bytes with a size and
alignment. `bindgen` will attempt to detect such cases and do this
automatically, but other times it needs some explicit help from you.
### Library
* [`bindgen::Builder::opaque_type`](https://docs.rs/bindgen/0.23.1/bindgen/struct.Builder.html#method.opaque_type)
### Command Line
* `--opaque-type <type>`
### Annotation
```cpp
/// <div rustbindgen opaque></div>
class Foo {
// ...
};
```

Просмотреть файл

@ -1,27 +0,0 @@
# Replacing One Type with Another
The `replaces` annotation can be used to use a type as a replacement for other
(presumably more complex) type. This is used in Stylo to generate bindings for
structures that for multiple reasons are too complex for bindgen to understand.
For example, in a C++ header:
```cpp
/**
* <div rustbindgen replaces="nsTArray"></div>
*/
template<typename T>
class nsTArray_Simple {
T* mBuffer;
public:
// The existence of a destructor here prevents bindgen from deriving the Clone
// trait via a simple memory copy.
~nsTArray_Simple() {};
};
```
That way, after code generation, the bindings for the `nsTArray` type are
the ones that would be generated for `nsTArray_Simple`.
Replacing is only available as an annotation. To replace a C or C++ definition
with a Rust definition, use [blacklisting](./blacklisting.html).

Просмотреть файл

@ -1,67 +0,0 @@
# Requirements
This page lists the requirements for running `bindgen` and how to get them.
## Clang
`bindgen` leverages `libclang` to preprocess, parse, and type check C and C++
header files.
It is recommended to use Clang 3.9 or greater, however `bindgen` can run with
older Clangs with some features disabled.
* **If you are generating bindings to C,** 3.7 and 3.8 will probably work OK for
you.
* **If you are generating bindings to C++,** you almost definitely want 3.9 or
greater.
### Installing Clang 3.9
#### Windows
Download and install the official pre-built binary from
[LLVM download page](http://releases.llvm.org/download.html).
#### macOS
If you use Homebrew:
```bash
$ brew install llvm
```
If you use MacPorts:
```bash
$ port install clang-3.9
```
#### Debian-based Linuxes
```bash
# apt-get install llvm-3.9-dev libclang-3.9-dev clang-3.9
```
Ubuntu 16.10 provides the necessary packages directly. If you are using older
version of Ubuntu or other Debian-based distros, you may need to add the LLVM
repos to get version 3.9. See http://apt.llvm.org/.
#### Arch
```bash
# pacman -S clang
```
#### From source
If your package manager doesn't yet offer Clang 3.9, you'll need to build from
source. For that, follow the
instructions [here](http://clang.llvm.org/get_started.html).
Those instructions list optional steps. For `bindgen`:
* Checkout and build clang
* Checkout and build the extra-clang-tools
* You do not need to checkout or build compiler-rt
* You do not need to checkout or build libcxx

Просмотреть файл

@ -1,12 +0,0 @@
# Tutorial
The following tutorial is adapted from [this blog post][tutorial].
What follows is a whirlwind introductory tutorial to using `bindgen` from inside
`build.rs`. We'll generate bindings to `bzip2` (which is available on most
systems) on-the-fly.
[**TL;DR?** The full tutorial code is available here.][example]
[tutorial]: http://fitzgeraldnick.com/2016/12/14/using-libbindgen-in-build-rs.html
[example]: https://github.com/fitzgen/bindgen-tutorial-bzip2-sys

Просмотреть файл

@ -1,9 +0,0 @@
# Add `bindgen` as a Build Dependency
Declare a build-time dependency on `bindgen` by adding it to the
`[build-dependencies]` section of our crate's `Cargo.toml` metadata file:
```toml
[build-dependencies]
bindgen = "0.26.3"
```

Просмотреть файл

@ -1,20 +0,0 @@
# Create a `wrapper.h` Header
The `wrapper.h` file will include all the various headers containing
declarations of structs and functions we would like bindings for. In the
particular case of `bzip2`, this is pretty easy since the entire public API is
contained in a single header. For a project like [SpiderMonkey][spidermonkey],
where the public API is split across multiple header files and grouped by
functionality, we'd want to include all those headers we want to bind to in this
single `wrapper.h` entry point for `bindgen`.
Here is our `wrapper.h`:
```c
#include <bzlib.h>
```
This is also where we would add any [replacement types](./replacing-types.html),
if we were using some.
[spidermonkey]: https://developer.mozilla.org/en-US/docs/Mozilla/Projects/SpiderMonkey/How_to_embed_the_JavaScript_engine

Просмотреть файл

@ -1,46 +0,0 @@
# Create a `build.rs` File
We create a `build.rs` file in our crate's root. Cargo will pick up on the existence of this file and compile and executed it before the rest of the crate is built.
This can be used to generate code at compile time.
And of course in our case, we will be generating Rust FFI
bindings to `bzip2` at compile time. The resulting bindings will be written to
`$OUT_DIR/bindings.rs` where `$OUT_DIR` is chosen by `cargo` and is something
like `./target/debug/build/bindgen-tutorial-bzip2-sys-afc7747d7eafd720/out/`.
```rust,ignore
extern crate bindgen;
use std::env;
use std::path::PathBuf;
fn main() {
// Tell cargo to tell rustc to link the system bzip2
// shared library.
println!("cargo:rustc-link-lib=bz2");
// The bindgen::Builder is the main entry point
// to bindgen, and lets you build up options for
// the resulting bindings.
let bindings = bindgen::Builder::default()
// The input header we would like to generate
// bindings for.
.header("wrapper.h")
// Finish the builder and generate the bindings.
.generate()
// Unwrap the Result and panic on failure.
.expect("Unable to generate bindings");
// Write the bindings to the $OUT_DIR/bindings.rs file.
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
bindings
.write_to_file(out_path.join("bindings.rs"))
.expect("Couldn't write bindings!");
}
```
Now, when we run `cargo build`, our bindings to `bzip2` are generated on the
fly!
[There's more info about `build.rs` files in the crates.io documentation.][build-rs]
[build-rs]: http://doc.crates.io/build-script.html

Просмотреть файл

@ -1,57 +0,0 @@
# Include the Generated Bindings in `src/lib.rs`
We can use the `include!` macro to dump our generated bindings right into our
crate's main entry point, `src/lib.rs`:
```rust,ignore
#![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
```
Because `bzip2`'s symbols do not follow Rust's style conventions, we suppress a
bunch of warnings with a few `#![allow(...)]` pragmas.
We can run `cargo build` again to check that the bindings themselves compile:
```bash
$ cargo build
Compiling bindgen-tutorial-bzip2-sys v0.1.0
Finished debug [unoptimized + debuginfo] target(s) in 62.8 secs
```
And we can run `cargo test` to verify that the layout, size, and alignment of
our generated Rust FFI structs match what `bindgen` thinks they should be:
```bash
$ cargo test
Compiling bindgen-tutorial-bzip2-sys v0.1.0
Finished debug [unoptimized + debuginfo] target(s) in 0.0 secs
Running target/debug/deps/bzip2_sys-10413fc2af207810
running 14 tests
test bindgen_test_layout___darwin_pthread_handler_rec ... ok
test bindgen_test_layout___sFILE ... ok
test bindgen_test_layout___sbuf ... ok
test bindgen_test_layout__bindgen_ty_1 ... ok
test bindgen_test_layout__bindgen_ty_2 ... ok
test bindgen_test_layout__opaque_pthread_attr_t ... ok
test bindgen_test_layout__opaque_pthread_cond_t ... ok
test bindgen_test_layout__opaque_pthread_mutex_t ... ok
test bindgen_test_layout__opaque_pthread_condattr_t ... ok
test bindgen_test_layout__opaque_pthread_mutexattr_t ... ok
test bindgen_test_layout__opaque_pthread_once_t ... ok
test bindgen_test_layout__opaque_pthread_rwlock_t ... ok
test bindgen_test_layout__opaque_pthread_rwlockattr_t ... ok
test bindgen_test_layout__opaque_pthread_t ... ok
test result: ok. 14 passed; 0 failed; 0 ignored; 0 measured
Doc-tests bindgen-tutorial-bzip2-sys
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
```

Просмотреть файл

@ -1,169 +0,0 @@
# Write a Sanity Test
Finally, to tie everything together, let's write a sanity test that round trips
some text through compression and decompression, and then asserts that it came
back out the same as it went in. This is a little wordy using the raw FFI
bindings, but hopefully we wouldn't usually ask people to do this, we'd provide
a nice Rust-y API on top of the raw FFI bindings for them. However, since this
is for testing the bindings directly, our sanity test will use the bindings
directly.
The test data I'm round tripping are some Futurama quotes I got off the internet
and put in the `futurama-quotes.txt` file, which is read into a `&'static str`
at compile time via the `include_str!("../futurama-quotes.txt")` macro
invocation.
Without further ado, here is the test, which should be appended to the bottom of
our `src/lib.rs` file:
```rust
#[cfg(test)]
mod tests {
use super::*;
use std::mem;
#[test]
fn round_trip_compression_decompression() {
unsafe {
let input = include_str!("../futurama-quotes.txt").as_bytes();
let mut compressed_output: Vec<u8> = vec![0; input.len()];
let mut decompressed_output: Vec<u8> = vec![0; input.len()];
// Construct a compression stream.
let mut stream: bz_stream = mem::zeroed();
let result = BZ2_bzCompressInit(&mut stream as *mut _,
1, // 1 x 100000 block size
4, // verbosity (4 = most verbose)
0); // default work factor
match result {
r if r == (BZ_CONFIG_ERROR as _) => panic!("BZ_CONFIG_ERROR"),
r if r == (BZ_PARAM_ERROR as _) => panic!("BZ_PARAM_ERROR"),
r if r == (BZ_MEM_ERROR as _) => panic!("BZ_MEM_ERROR"),
r if r == (BZ_OK as _) => {},
r => panic!("Unknown return value = {}", r),
}
// Compress `input` into `compressed_output`.
stream.next_in = input.as_ptr() as *mut _;
stream.avail_in = input.len() as _;
stream.next_out = compressed_output.as_mut_ptr() as *mut _;
stream.avail_out = compressed_output.len() as _;
let result = BZ2_bzCompress(&mut stream as *mut _, BZ_FINISH as _);
match result {
r if r == (BZ_RUN_OK as _) => panic!("BZ_RUN_OK"),
r if r == (BZ_FLUSH_OK as _) => panic!("BZ_FLUSH_OK"),
r if r == (BZ_FINISH_OK as _) => panic!("BZ_FINISH_OK"),
r if r == (BZ_SEQUENCE_ERROR as _) => panic!("BZ_SEQUENCE_ERROR"),
r if r == (BZ_STREAM_END as _) => {},
r => panic!("Unknown return value = {}", r),
}
// Finish the compression stream.
let result = BZ2_bzCompressEnd(&mut stream as *mut _);
match result {
r if r == (BZ_PARAM_ERROR as _) => panic!(BZ_PARAM_ERROR),
r if r == (BZ_OK as _) => {},
r => panic!("Unknown return value = {}", r),
}
// Construct a decompression stream.
let mut stream: bz_stream = mem::zeroed();
let result = BZ2_bzDecompressInit(&mut stream as *mut _,
4, // verbosity (4 = most verbose)
0); // default small factor
match result {
r if r == (BZ_CONFIG_ERROR as _) => panic!("BZ_CONFIG_ERROR"),
r if r == (BZ_PARAM_ERROR as _) => panic!("BZ_PARAM_ERROR"),
r if r == (BZ_MEM_ERROR as _) => panic!("BZ_MEM_ERROR"),
r if r == (BZ_OK as _) => {},
r => panic!("Unknown return value = {}", r),
}
// Decompress `compressed_output` into `decompressed_output`.
stream.next_in = compressed_output.as_ptr() as *mut _;
stream.avail_in = compressed_output.len() as _;
stream.next_out = decompressed_output.as_mut_ptr() as *mut _;
stream.avail_out = decompressed_output.len() as _;
let result = BZ2_bzDecompress(&mut stream as *mut _);
match result {
r if r == (BZ_PARAM_ERROR as _) => panic!("BZ_PARAM_ERROR"),
r if r == (BZ_DATA_ERROR as _) => panic!("BZ_DATA_ERROR"),
r if r == (BZ_DATA_ERROR_MAGIC as _) => panic!("BZ_DATA_ERROR"),
r if r == (BZ_MEM_ERROR as _) => panic!("BZ_MEM_ERROR"),
r if r == (BZ_OK as _) => panic!("BZ_OK"),
r if r == (BZ_STREAM_END as _) => {},
r => panic!("Unknown return value = {}", r),
}
// Close the decompression stream.
let result = BZ2_bzDecompressEnd(&mut stream as *mut _);
match result {
r if r == (BZ_PARAM_ERROR as _) => panic!("BZ_PARAM_ERROR"),
r if r == (BZ_OK as _) => {},
r => panic!("Unknown return value = {}", r),
}
assert_eq!(input, &decompressed_output[..]);
}
}
}
```
Now let's run `cargo test` again and verify that everying is linking and binding
properly!
```bash
$ cargo test
Compiling bindgen-tutorial-bzip2-sys v0.1.0
Finished debug [unoptimized + debuginfo] target(s) in 0.54 secs
Running target/debug/deps/bindgen_tutorial_bzip2_sys-1c5626bbc4401c3a
running 15 tests
test bindgen_test_layout___darwin_pthread_handler_rec ... ok
test bindgen_test_layout___sFILE ... ok
test bindgen_test_layout___sbuf ... ok
test bindgen_test_layout__bindgen_ty_1 ... ok
test bindgen_test_layout__bindgen_ty_2 ... ok
test bindgen_test_layout__opaque_pthread_attr_t ... ok
test bindgen_test_layout__opaque_pthread_cond_t ... ok
test bindgen_test_layout__opaque_pthread_condattr_t ... ok
test bindgen_test_layout__opaque_pthread_mutex_t ... ok
test bindgen_test_layout__opaque_pthread_mutexattr_t ... ok
test bindgen_test_layout__opaque_pthread_once_t ... ok
test bindgen_test_layout__opaque_pthread_rwlock_t ... ok
test bindgen_test_layout__opaque_pthread_rwlockattr_t ... ok
test bindgen_test_layout__opaque_pthread_t ... ok
block 1: crc = 0x47bfca17, combined CRC = 0x47bfca17, size = 2857
bucket sorting ...
depth 1 has 2849 unresolved strings
depth 2 has 2702 unresolved strings
depth 4 has 1508 unresolved strings
depth 8 has 538 unresolved strings
depth 16 has 148 unresolved strings
depth 32 has 0 unresolved strings
reconstructing block ...
2857 in block, 2221 after MTF & 1-2 coding, 61+2 syms in use
initial group 5, [0 .. 1], has 570 syms (25.7%)
initial group 4, [2 .. 2], has 256 syms (11.5%)
initial group 3, [3 .. 6], has 554 syms (24.9%)
initial group 2, [7 .. 12], has 372 syms (16.7%)
initial group 1, [13 .. 62], has 469 syms (21.1%)
pass 1: size is 2743, grp uses are 13 6 15 0 11
pass 2: size is 1216, grp uses are 13 7 15 0 10
pass 3: size is 1214, grp uses are 13 8 14 0 10
pass 4: size is 1213, grp uses are 13 9 13 0 10
bytes: mapping 19, selectors 17, code lengths 79, codes 1213
final combined CRC = 0x47bfca17
[1: huff+mtf rt+rld {0x47bfca17, 0x47bfca17}]
combined CRCs: stored = 0x47bfca17, computed = 0x47bfca17
test tests::round_trip_compression_decompression ... ok
test result: ok. 15 passed; 0 failed; 0 ignored; 0 measured
Doc-tests bindgen-tutorial-bzip2-sys
running 0 tests
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
```

Просмотреть файл

@ -1,13 +0,0 @@
# Publish Your Crate!
That's it! Now we can publish our crate on crates.io and we can write a nice,
Rust-y API wrapping the raw FFI bindings in a safe interface. However, there is
already a [`bzip2-sys`][bz-sys] crate providing raw FFI bindings, and there is
already a [`bzip2`][bz] crate providing a nice, safe, Rust-y API on top of the
bindings, so we have nothing left to do here!
Check out the [full code on Github!][example]
[bz-sys]: https://crates.io/crates/bzip2-sys
[bz]: https://crates.io/crates/bzip2
[example]: https://github.com/fitzgen/bindgen-tutorial-bzip2-sys

Просмотреть файл

@ -1,134 +0,0 @@
# Using the Union Types Generated by Bindgen
**NOTE:** As of Rust version 1.17, Rust does not have a stable `union` type. Issue [#32836](https://github.com/rust-lang/rust/issues/32836) tracks the stabilization of a `union` type in Rust.
By using the flag `--unstable-rust`, bindgen will generate the preliminary unstable `union` type.
In general, most interactions with unions (either reading or writing) are unsafe.
For this discussion, we will use the following C type definitions:
```c
typedef struct {
int32_t a;
int32_t b;
} alpha_t;
typedef struct {
uint32_t c;
uint16_t d;
uint16_t e;
uint8_t f;
} beta_t;
typedef union {
alpha_t alfa;
beta_t bravo;
} greek_t;
```
## Relevant Bindgen Options
### Library
* [`bindgen::Builder::unstable_rust()`](https://docs.rs/bindgen/0.25.3/bindgen/struct.Builder.html#method.unstable_rust)
* [`bindgen::Builder::derive_default()`](https://docs.rs/bindgen/0.25.3/bindgen/struct.Builder.html#method.derive_default)
### Command Line
* `--unstable-rust`
* `--with-derive-default`
## Using the unstable `union` version
With `struct`s generated by bindgen from C, it is possible to initialize fields in a "normal" rust way:
```rust,ignore
mod bindings;
fn main() {
let x = bindings::alpha_t {
a: 1,
b: -1,
};
}
```
When using the unstable `union` type, there are two choices for initialization: Zeroed, and with a specific variant.
```rust,ignore
#![feature(untagged_unions)]
mod bindings_unstable;
fn unstable() {
// Initalize the union to zero
let x = bindings_unstable::greek_t::default();
// If `--with-derive-default` option is not used, the following may be used
// to initalize the union to zero:
let x = unsafe{ std::mem::zeroed::<bindings_unstable::greek_t>() };
// Or, it is possible to initialize exactly one variant of the enum:
let x = bindings_unstable::greek_t {
alfa: bindings_unstable::alpha_t {
a: 1,
b: -1,
},
};
unsafe {
println!("{:?}", z.alfa); // alpha_t { a: 1, b: -1 }
println!("{:?}", z.bravo); // beta_t { c: 1, d: 65535, e: 65535, f: 127 }
}
}
```
## Using the stable BindgenUnion types
For versions of Rust that do not support the new `union` type, bindgen will generate types which provide union-like access to structure fields.
Interacting with these unions is slightly different than the new `union` types. Whenever a variant of the union is accessed, it must be done through a reference.
```rust,ignore
mod bindings;
fn stable() {
// `default()` or `zeroed()` may still be used with Bindgen's Union types
let mut x = bindings::greek_t::default();
// This will not work:
// let x = bindings::greek_t {
// alfa: bindings::alpha_t {
// a: 1,
// b: -1,
// },
// };
// Instead, access the field through `.as_ref()` and `.as_mut()` helpers:
unsafe {
*x.alfa.as_mut() = bindings::alpha_t {
a: 1,
b: -1,
};
println!("{:?}", x.alfa.as_ref()); // alpha_t { a: 1, b: -1 }
println!("{:?}", x.bravo.as_ref()); // beta_t { c: 1, d: 65535, e: 65535, f: 0 }
}
```
If you attempt to access a BindgenUnion field directly, you will see errors like this:
```text
error[E0308]: mismatched types
--> src/main.rs:44:15
|
44 | alfa: bindings::alpha_t {
| _______________^
45 | | a: 1,
46 | | b: -1,
47 | | },
| |_________^ expected struct `bindings::__BindgenUnionField`, found struct `bindings::alpha_t`
|
= note: expected type `bindings::__BindgenUnionField<bindings::alpha_t>`
found type `bindings::alpha_t`
```

Просмотреть файл

@ -1,31 +0,0 @@
# Whitelisting
Whitelisting allows us to be precise about which type, function, and global
variable definitions `bindgen` generates bindings for. By default, if we don't
specify any whitelisting rules, everything is considered whitelisted. This may
not be desirable because of either
* the generated bindings contain a lot of extra defintions we don't plan on using, or
* the header file contains C++ features for which Rust does not have a
corresponding form (such as partial template specialization), and we would
like to avoid these definitions
If we specify whitelisting rules, then `bindgen` will only generate bindings to
types, functions, and global variables that match the whitelisting rules, or are
transitively used by a definition that matches them.
### Library
* [`bindgen::Builder::whitelisted_type`](https://docs.rs/bindgen/0.23.1/bindgen/struct.Builder.html#method.whitelisted_type)
* [`bindgen::Builder::whitelisted_function`](https://docs.rs/bindgen/0.23.1/bindgen/struct.Builder.html#method.whitelisted_function)
* [`bindgen::Builder::whitelisted_var`](https://docs.rs/bindgen/0.23.1/bindgen/struct.Builder.html#method.whitelisted_function)
### Command Line
* `--whitelist-type <type>`
* `--whitelist-function <function>`
* `--whitelist-var <var>`
### Annotations
None.

Просмотреть файл

@ -1,6 +0,0 @@
#!/usr/bin/env bash
set -xeu
cd "$(dirname "$0")/.."
cargo check --features "$BINDGEN_FEATURES testing_only_docs"

Просмотреть файл

@ -1,7 +0,0 @@
@echo off
cd "%~dp0.."
git add -u
git diff @
git diff-index --quiet HEAD

Просмотреть файл

@ -1,8 +0,0 @@
#!/usr/bin/env bash
set -xeu
cd "$(dirname "$0")/.."
git add -u
git diff @
git diff-index --quiet HEAD

Просмотреть файл

@ -1,16 +0,0 @@
#!/usr/bin/env bash
set -xeu
cd "$(dirname "$0")/.."
# Ensure we have the most up-to-date `rustfmt`.
cargo install -f rustfmt
# Run `rustfmt` on the crate! If `rustfmt` can't make a long line shorter, it
# prints an error and exits non-zero, so tell it to kindly shut its yapper and
# make sure it doesn't cause us to exit this whole script non-zero.
cargo fmt --quiet || true
# Exit non-zero if this resulted in any diffs.
./ci/assert-no-diff.sh

Просмотреть файл

@ -1,31 +0,0 @@
set -ex
pushd ~
# Workaround for Travis CI macOS bug (https://github.com/travis-ci/travis-ci/issues/6307)
if [ "${TRAVIS_OS_NAME}" == "osx" ]; then
rvm get head || true
fi
function llvm_download() {
export LLVM_VERSION_TRIPLE="${LLVM_VERSION}"
export LLVM=clang+llvm-${LLVM_VERSION_TRIPLE}-x86_64-$1
wget http://llvm.org/releases/${LLVM_VERSION_TRIPLE}/${LLVM}.tar.xz
mkdir llvm
tar -xf ${LLVM}.tar.xz -C llvm --strip-components=1
export LLVM_CONFIG_PATH=`pwd`/llvm/bin/llvm-config
if [ "${TRAVIS_OS_NAME}" == "osx" ]; then
cp llvm/lib/libclang.dylib /usr/local/lib/libclang.dylib
fi
}
if [ "${TRAVIS_OS_NAME}" == "linux" ]; then
llvm_download linux-gnu-ubuntu-14.04
else
llvm_download apple-darwin
fi
popd
set +e

Просмотреть файл

@ -1,33 +0,0 @@
#!/usr/bin/env bash
set -xeu
cd "$(dirname "$0")/../book"
# Ensure mdbook is installed.
cargo install mdbook --vers "^0.0.22" || true
export PATH="$PATH:~/.cargo/bin"
# Get the git revision we are on.
rev=$(git rev-parse --short HEAD)
# Build the users guide book and go into the built book's directory.
rm -rf ./book
mdbook build
cd ./book
# Make the built book directory a new git repo, fetch upstream, make a new
# commit on gh-pages, and push it upstream.
git init
git config user.name "Travis CI"
git config user.email "builds@travis-ci.org"
git remote add upstream "https://$GH_TOKEN@github.com/rust-lang-nursery/rust-bindgen.git"
git fetch upstream
git reset upstream/gh-pages
touch .
git add -A .
git commit -m "Rebuild users guide at ${rev}"
git push upstream HEAD:gh-pages

Просмотреть файл

@ -1,23 +0,0 @@
#!/usr/bin/env bash
# Don't allow any system include directives in tests.
set -eu
cd "$(dirname "$0")/.."
echo "Checking for #include directives of system headers..."
grep -rn '#include\s*<.*>' tests/headers || {
echo "Found none; OK!"
exit 0
}
echo "
Found a test with an #include directive of a system header file!
There is no guarantee that the system running the tests has the header
file, let alone the same version of it that you have. Any test with such an
include directive won't reliably produce the consistent bindings across systems.
"
exit 1

Просмотреть файл

@ -1,10 +0,0 @@
#!/usr/bin/env bash
set -xeu
cd "$(dirname "$0")/../book"
cargo install mdbook --vers "^0.0.22" || true
export PATH="$PATH:~/.cargo/bin"
mdbook build
mdbook test

49
third_party/rust/bindgen-0.29.1/ci/test.bat поставляемый
Просмотреть файл

@ -1,49 +0,0 @@
@echo off
cd "%~dp0.."
set RUST_BACKTRACE=1
if not defined BINDGEN_FEATURES (
echo Environment variable BINDGEN_FEATURES must be defined.
exit /B 1
)
findstr /r /c:"#include *<.*>" tests\headers\* >nul 2>&1 && (
echo Found a test with an #include directive of a system header file!
echo.
echo There is no guarantee that the system running the tests has the header
echo file, let alone the same version of it that you have. Any test with such an
echo include directive won't reliably produce the consistent bindings across systems.
exit /B 1
) || (
echo Found none. OK!
set ERRORLEVEL=0
)
@echo on
::Regenerate the test headers' bindings in debug and release modes, and assert
::that we always get the expected generated bindings.
cargo test --features "%BINDGEN_FEATURES%" || exit /b 1
call .\ci\assert-no-diff.bat
cargo test --features "%BINDGEN_FEATURES% testing_only_extra_assertions" || exit /b 1
call .\ci\assert-no-diff.bat
cargo test --release --features "%BINDGEN_FEATURES% testing_only_extra_assertions" || exit /b 1
call .\ci\assert-no-diff.bat
::Now test the expectations' size and alignment tests.
pushd tests\expectations
cargo test || exit /b 1
cargo test --release || exit /b 1
popd
::And finally, test our example bindgen + build.rs integration template project.
cd bindgen-integration
cargo test --features "%BINDGEN_FEATURES%" || exit /b 1
cargo test --release --features "%BINDGEN_FEATURES%" || exit /b 1

38
third_party/rust/bindgen-0.29.1/ci/test.sh поставляемый
Просмотреть файл

@ -1,38 +0,0 @@
#!/usr/bin/env bash
set -xeu
cd "$(dirname "$0")/.."
export RUST_BACKTRACE=1
# Disallow system header file includes in our test suite.
./ci/no-includes.sh
# Regenerate the test headers' bindings in debug and release modes, and assert
# that we always get the expected generated bindings.
cargo test --features "$BINDGEN_FEATURES"
./ci/assert-no-diff.sh
cargo test --features "$BINDGEN_FEATURES testing_only_extra_assertions"
./ci/assert-no-diff.sh
cargo test --release --features "$BINDGEN_FEATURES testing_only_extra_assertions"
./ci/assert-no-diff.sh
if [ -v "${TRAVIS_OS_NAME}" ]; then
# Now test the expectations' size and alignment tests.
pushd tests/expectations
cargo test
cargo test --release
popd
# And finally, test our example bindgen + build.rs integration template project.
cd bindgen-integration
cargo test --features "$BINDGEN_FEATURES"
cargo test --release --features "$BINDGEN_FEATURES"
fi

Двоичные данные
third_party/rust/bindgen-0.29.1/example-graphviz-ir.png поставляемый

Двоичный файл не отображается.

До

Ширина:  |  Высота:  |  Размер: 1.3 MiB

10
third_party/rust/bindgen-0.29.1/rustfmt.toml поставляемый
Просмотреть файл

@ -1,10 +0,0 @@
max_width = 80
format_strings = false
fn_brace_style = "SameLineWhere"
item_brace_style = "SameLineWhere"
struct_lit_multiline_style = "ForceMulti"
where_trailing_comma = true
reorder_imports = true
reorder_imported_names = true
normalize_comments = false
write_mode = "Overwrite"

Просмотреть файл

@ -1,97 +0,0 @@
//! Traits for determining whether we can derive traits for a thing or not.
use super::context::BindgenContext;
/// A trait that encapsulates the logic for whether or not we can derive `Debug`
/// for a given thing.
///
/// This should ideally be a no-op that just returns `true`, but instead needs
/// to be a recursive method that checks whether all the proper members can
/// derive debug or not, because of the limit rust has on 32 items as max in the
/// array.
pub trait CanDeriveDebug {
/// Return `true` if `Debug` can be derived for this thing, `false`
/// otherwise.
fn can_derive_debug(&self, ctx: &BindgenContext) -> bool;
}
/// A trait that encapsulates the logic for whether or not we can derive `Debug`.
/// The difference between this trait and the CanDeriveDebug is that the type
/// implementing this trait cannot use recursion or lookup result from fix point
/// analysis. It's a helper trait for fix point analysis.
pub trait CanTriviallyDeriveDebug {
/// Serve the same purpose as the Extra in CanDeriveDebug.
type Extra;
/// Return `true` if `Debug` can be derived for this thing, `false`
/// otherwise.
fn can_trivially_derive_debug(&self,
ctx: &BindgenContext,
extra: Self::Extra)
-> bool;
}
/// A trait that encapsulates the logic for whether or not we can derive `Copy`
/// for a given thing.
pub trait CanDeriveCopy<'a> {
/// Implementations can define this type to get access to any extra
/// information required to determine whether they can derive `Copy`. If
/// extra information is unneeded, then this should simply be the unit type.
type Extra;
/// Return `true` if `Copy` can be derived for this thing, `false`
/// otherwise.
fn can_derive_copy(&'a self,
ctx: &'a BindgenContext,
extra: Self::Extra)
-> bool;
/// For some reason, deriving copies of an array of a type that is not known
/// to be `Copy` is a compile error. e.g.:
///
/// ```rust
/// #[derive(Copy, Clone)]
/// struct A<T> {
/// member: T,
/// }
/// ```
///
/// is fine, while:
///
/// ```rust,ignore
/// #[derive(Copy, Clone)]
/// struct A<T> {
/// member: [T; 1],
/// }
/// ```
///
/// is an error.
///
/// That's the whole point of the existence of `can_derive_copy_in_array`.
fn can_derive_copy_in_array(&'a self,
ctx: &'a BindgenContext,
extra: Self::Extra)
-> bool;
}
/// A trait that encapsulates the logic for whether or not we can derive `Default`
/// for a given thing.
///
/// This should ideally be a no-op that just returns `true`, but instead needs
/// to be a recursive method that checks whether all the proper members can
/// derive default or not, because of the limit rust has on 32 items as max in the
/// array.
pub trait CanDeriveDefault<'a> {
/// Implementations can define this type to get access to any extra
/// information required to determine whether they can derive `Default`. If
/// extra information is unneeded, then this should simply be the unit type.
type Extra;
/// Return `true` if `Default` can be derived for this thing, `false`
/// otherwise.
fn can_derive_default(&self,
ctx: &BindgenContext,
extra: Self::Extra)
-> bool;
}

105
third_party/rust/bindgen-0.29.1/src/uses.rs поставляемый
Просмотреть файл

@ -1,105 +0,0 @@
//! Take in our IR and output a C/C++ file with dummy uses of each IR type.
//!
//! Say that we had this C++ header, `header.hpp`:
//!
//! ```c++
//! class Point {
//! int x;
//! int y;
//! }
//!
//! enum Bar {
//! THIS,
//! THAT,
//! OTHER
//! }
//! ```
//!
//! If we generated dummy uses for this header, we would get a `.cpp` file like
//! this:
//!
//! ```c++
//! #include "header.hpp"
//!
//! void dummy(Point*) {}
//! void dummy(Bar*) {}
//! ```
//!
//! This is useful because we can compile this `.cpp` file into an object file,
//! and then compare its debugging information to the debugging information
//! generated for our Rust bindings. These two sets of debugging information had
//! better agree on the C/C++ types' physical layout, or else our bindings are
//! incorrect!
//!
//! "But you still haven't explained why we have to generate the dummy uses" you
//! complain. Well if the types are never used, then they are elided when the
//! C/C++ compiler generates debugging information.
use ir::context::BindgenContext;
use ir::item::{Item, ItemAncestors, ItemCanonicalName};
use ir::template::TemplateParameters;
use std::io;
// Like `canonical_path`, except we always take namespaces into account, ignore
// the generated names of anonymous items, and return a `String`.
//
// TODO: Would it be easier to try and demangle the USR?
fn namespaced_name(ctx: &BindgenContext, item: &Item) -> String {
let mut names: Vec<_> = item.ancestors(ctx)
.map(|id| ctx.resolve_item(id).canonical_name(ctx))
.filter(|name| !name.starts_with("_bindgen_"))
.collect();
names.reverse();
names.join("::")
}
/// Generate the dummy uses for all the items in the given context, and write
/// the dummy uses to `dest`.
pub fn generate_dummy_uses<W>(ctx: &mut BindgenContext,
mut dest: W)
-> io::Result<()>
where W: io::Write,
{
ctx.gen(|ctx| {
let input_header = ctx.options()
.input_header
.as_ref()
.expect("Should not generate dummy uses without an input header");
try!(writeln!(dest, "/* automatically generated by rust-bindgen */"));
try!(writeln!(dest, ""));
try!(writeln!(dest, "#include \"{}\"", input_header));
try!(writeln!(dest, ""));
let type_items = ctx.whitelisted_items()
.iter()
.cloned()
.map(|id| ctx.resolve_item(id))
.filter(|item| {
// We only want type items.
if let Some(ty) = item.kind().as_type() {
// However, we don't want anonymous types, as we can't
// generate dummy uses for them.
ty.name().is_some() &&
// Nor do we want builtin types or named template type
// arguments. Again, we can't generate dummy uses for
// these.
!ty.is_builtin_or_named() &&
// And finally, we won't be creating any dummy
// instantiations, so ignore template declarations and
// instantiations.
item.all_template_params(ctx).is_none()
} else {
false
}
})
.map(|item| namespaced_name(ctx, item))
.enumerate();
for (idx, name) in type_items {
try!(writeln!(dest, "void dummy{}({}*) {{ }}", idx, name));
}
Ok(())
})
}

1
third_party/rust/bindgen-0.30.0/.cargo-checksum.json поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
{"files":{"Cargo.toml":"8f81f53eb8f6cfc4d8c0e5eaed51b0a7d85a60c9b710fd9df62d25a1db189f92","build.rs":"deff14b2204700f9fd40ba426148d648626461a4ce372c49b8c0f92e09646c80","src/callbacks.rs":"c5c4e5bc8c49cb191d1b100339772fdc7dd1dbf5025a9de1ecaafb70f86cb48f","src/clang.rs":"541a016580c98c2e2af36c3c11b80127c26090795a380480c925c5f411f8100d","src/codegen/derive_debug.rs":"77e16be27a6999726978bf33dc54227cf3b074101ebd55e90f655340cf05ba8b","src/codegen/error.rs":"2613af1d833377fd4a70719f4a09951d9d45dc9227827b9a2a938a1bcaaea2dd","src/codegen/helpers.rs":"19c5b4a86df9410d7e6cb27c2a8797dd205e4c96eab203798b70cd30dd35e572","src/codegen/mod.rs":"4488f141de2abb5d1fa36df7818daeeebba4f5237a9e43101fc9d805e0a80436","src/codegen/struct_layout.rs":"b92fef035e5deaf3fe9f3c2d4ea61a758b873f6f193fe68693955d9d14a396cb","src/extra_assertions.rs":"449549c4a7a50c3f0b06332452b2fb6c9b23f31ca8e5e1656fe6c7f21e8ef7fa","src/features.rs":"b686a3e4ce5712473d0a7c0f817bef29b9337265ec6df1278087a708e1180108","src/ir/analysis/derive_copy.rs":"e17960cd79d764a36fd7cea8bad944a8994fc9cb3a20080955f28509b9e66c9e","src/ir/analysis/derive_debug.rs":"ffb933c46cc26c0ed7c7ccf16a0a19dddb3b0108ca913bd41b785c3afbd4ee0b","src/ir/analysis/derive_default.rs":"740b281eddf9f9f0606963fef7485e9219e7ebedeb7966c83c63f723d1deb62f","src/ir/analysis/derive_hash.rs":"6c046a54d495e3d6ec666f23c6209212321f72f1ed843523e8a9aa0cd6421c9e","src/ir/analysis/derive_partial_eq.rs":"fdd0d78861f146ce9f62c49979009cfad82ec9b96c8c79415513158fc9bf7ad0","src/ir/analysis/has_destructor.rs":"42fdc74e363bc32dbe51558cb5b330bad0e40d79b4cd3c6ff36b6f418a68c0ad","src/ir/analysis/has_float.rs":"02b7ccf9a99b1d96e3a0ec712de45461ab714184129f2634de46b33fb1758ccd","src/ir/analysis/has_type_param_in_array.rs":"39f10af6a6b7af17ee505777dbd10989942272b44efba2a1e320d8b4bbabe0f0","src/ir/analysis/has_vtable.rs":"33def5eb43270fff87455a0e8d262817876e2cf8c676b8cb6c8ec37e84dd99d1","src/ir/analysis/mod.rs":"10a7817a9c990dd8125e4ca9ed1fe02b9a0e27c4dd0320e909bb55f727ed8191","src/ir/analysis/template_params.rs":"e1a3709c3c07b7be21e3912339820f86992b56af44d923919023b7e015f41755","src/ir/annotations.rs":"ef106afcbe6084c18bd13a37ee3c1cdc9596bfb055db8c773d81f8f15fec3208","src/ir/comment.rs":"36f2a1d3970fdbf3d72c1f094043902747cde395215bdf7e9103926d9df011fd","src/ir/comp.rs":"062ea5ec95717e32b26be39bd4664179ff790831042205d795af1a4654922c8d","src/ir/context.rs":"68dbaa11ae5041965e08de24955852982f10c764decb7ba1de6c82073e95916c","src/ir/derive.rs":"608e9bacd6d6d29f5b4357fe2f7cdda62e79c77271b022e5275d22abc22788d3","src/ir/dot.rs":"173e57c3017182279bff98ea5edfd8e6e007a25e70da27139578a637a0a747bc","src/ir/enum_ty.rs":"d633d4d36a64cafd4e1e4ba82872058d5a0aada007e47353e4905ce1fe7d16ec","src/ir/function.rs":"409b779469c8092100991bc442b0b5bcfe9d5510edb71db12a38181df7519938","src/ir/int.rs":"1f61a472288afe489d9320bc8b13920333ece57891ae8570b4c4f25ab50688e6","src/ir/item.rs":"bff6369353b37a209236d750d274c0282a8669c9f7bee2b0eeea706e17537d1f","src/ir/item_kind.rs":"13048962657436a757ff9483d96f2ce351ec5f827ecab4921ed643f6f115c497","src/ir/layout.rs":"39c415271629fc5a43373bcd5ba8bfa26ebb2544aa9e28598b0141214111bb67","src/ir/mod.rs":"2eae90f207fad2e45957ec9287064992a419e3fc916aba84faff2ea25cbeb5ee","src/ir/module.rs":"5d46d631cec17ef1d1882da60080898760181c2ddf991473afdd464bf8c7d867","src/ir/objc.rs":"52454e14371535ff4da4025cf45fee3d3beadbe36759a6ebf0d1d7048a00714f","src/ir/template.rs":"cc96a205dec677962376cec0bdbf53820d633378fa92d9faeb34953d2943a928","src/ir/traversal.rs":"521fdd685ba8c684199cbc8131561ed39aed852dd0d1e76e17d8d2a3d204428b","src/ir/ty.rs":"263e7c5794b56dd0499db8b102169f70881b5ff1d15ded1fe49fc24c29d9ab34","src/ir/var.rs":"c60354e164e357480e72b20d7f5c2f7188da8af38ad1db0a3f79806ef60388ab","src/lib.rs":"6efe2ba78af7a2c790e73e03ca6876c24a56636b3b2fb74926a866675dc7ee71","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"6ebd647814b339bbd318f5d55f54ef98091138c2c7208436a88204be56a5a49b","src/options.rs":"cced11c8ba947953098df62beb7980dd72d9aa9c6fd3c8dba5aac745bdcd2315","src/parse.rs":"812171946f0ec8a83f26f9041151ad9dcdff11961e5154e4dae4be248f86b296","src/regex_set.rs":"e4cc668c724a80f7dd7d028f4a22146680513b40cf3a5255551c41079d302309"},"package":"33024f55a754d920637461adf87fb485702a69bdf7ac1d307b7e18da93bae505"}

Просмотреть файл

@ -12,10 +12,10 @@
[package]
name = "bindgen"
version = "0.29.1"
version = "0.30.0"
authors = ["Jyun-Yan You <jyyou.tw@gmail.com>", "Emilio Cobos Álvarez <emilio@crisal.io>", "Nick Fitzgerald <fitzgen@gmail.com>", "The Servo project developers"]
build = "build.rs"
exclude = ["bindgen-integration", "ci", "tests/**", "*.orig"]
include = ["Cargo.toml", "build.rs", "src/*.rs", "src/**/*.rs"]
description = "Automatically generates Rust FFI bindings to C and C++ libraries."
documentation = "https://docs.rs/bindgen"
readme = "README.md"
@ -31,65 +31,68 @@ path = "src/lib.rs"
name = "bindgen"
path = "src/main.rs"
doc = false
[dependencies.syntex_syntax]
version = "0.58"
[dependencies.aster]
version = "0.41"
features = ["with-syntex"]
[dependencies.cfg-if]
version = "0.1.0"
[dependencies.log]
version = "0.3"
optional = true
[dependencies.regex]
version = "0.2"
[dependencies.peeking_take_while]
version = "0.1.2"
[dependencies.clang-sys]
version = "0.21.0"
features = ["runtime", "clang_3_9"]
[dependencies.clap]
version = "2"
[dependencies.quasi]
version = "0.32"
features = ["with-syntex"]
[dependencies.aster]
version = "0.41"
features = ["with-syntex"]
[dependencies.which]
version = "1.0.2"
[dependencies.clang-sys]
version = "0.19.0"
features = ["runtime", "clang_3_9"]
[dependencies.env_logger]
version = "0.4"
optional = true
[dependencies.cexpr]
version = "0.2"
[dependencies.syntex_syntax]
version = "0.58"
[dependencies.lazy_static]
version = "0.2.1"
[dev-dependencies.diff]
version = "0.1"
[dependencies.cfg-if]
version = "0.1.0"
[dependencies.peeking_take_while]
version = "0.1.2"
[dependencies.clap]
version = "2"
[dependencies.regex]
version = "0.2"
[dependencies.cexpr]
version = "0.2"
[dev-dependencies.shlex]
version = "0.1"
[dev-dependencies.clap]
version = "2"
[dev-dependencies.diff]
version = "0.1"
[build-dependencies.quasi_codegen]
version = "0.32"
[features]
testing_only_libclang_3_9 = []
default = ["logging"]
testing_only_extra_assertions = []
logging = ["env_logger", "log"]
static = []
testing_only_libclang_3_9 = []
testing_only_libclang_4 = []
testing_only_libclang_3_8 = []
default = ["logging"]
testing_only_docs = []
testing_only_extra_assertions = []
testing_only_libclang_3_8 = []
[badges.travis-ci]
repository = "rust-lang-nursery/rust-bindgen"

Просмотреть файл

@ -16,9 +16,10 @@ mod codegen {
println!("cargo:rerun-if-changed=src/codegen/helpers.rs");
println!("cargo:rerun-if-changed=src/codegen/struct_layout.rs");
let mut dst =
File::create(Path::new(&out_dir).join("host-target.txt")).unwrap();
dst.write_all(env::var("TARGET").unwrap().as_bytes()).unwrap();
let mut dst = File::create(Path::new(&out_dir).join("host-target.txt"))
.unwrap();
dst.write_all(env::var("TARGET").unwrap().as_bytes())
.unwrap();
}
}
@ -32,9 +33,11 @@ mod testgen {
pub fn main() {
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
let mut dst = File::create(Path::new(&out_dir).join("tests.rs")).unwrap();
let mut dst = File::create(Path::new(&out_dir).join("tests.rs"))
.unwrap();
let manifest_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
let manifest_dir =
PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
let headers_dir = manifest_dir.join("tests").join("headers");
let headers = match fs::read_dir(headers_dir) {
@ -51,12 +54,19 @@ mod testgen {
for entry in entries {
match entry.path().extension().and_then(OsStr::to_str) {
Some("h") | Some("hpp") => {
let func = entry.file_name().to_str().unwrap()
let func = entry
.file_name()
.to_str()
.unwrap()
.replace(|c| !char::is_alphanumeric(c), "_")
.replace("__", "_")
.to_lowercase();
writeln!(dst, "test_header!(header_{}, {:?});",
func, entry.path()).unwrap();
writeln!(
dst,
"test_header!(header_{}, {:?});",
func,
entry.path()
).unwrap();
}
_ => {}
}

Просмотреть файл

@ -8,7 +8,6 @@ use std::panic::UnwindSafe;
/// A trait to allow configuring different kinds of types in different
/// situations.
pub trait ParseCallbacks: fmt::Debug + UnwindSafe {
/// This function will be run on every macro that is identified
fn parsed_macro(&self, _name: &str) {}
@ -21,11 +20,12 @@ pub trait ParseCallbacks: fmt::Debug + UnwindSafe {
/// This function should return whether, given the a given enum variant
/// name, and value, returns whether this enum variant will forcibly be a
/// constant.
fn enum_variant_behavior(&self,
_enum_name: Option<&str>,
_variant_name: &str,
_variant_value: EnumVariantValue)
-> Option<EnumVariantCustomBehavior> {
fn enum_variant_behavior(
&self,
_enum_name: Option<&str>,
_variant_name: &str,
_variant_value: EnumVariantValue,
) -> Option<EnumVariantCustomBehavior> {
None
}
}

Просмотреть файл

@ -24,12 +24,14 @@ pub struct Cursor {
impl fmt::Debug for Cursor {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt,
"Cursor({} kind: {}, loc: {}, usr: {:?})",
self.spelling(),
kind_to_str(self.kind()),
self.location(),
self.usr())
write!(
fmt,
"Cursor({} kind: {}, loc: {}, usr: {:?})",
self.spelling(),
kind_to_str(self.kind()),
self.location(),
self.usr()
)
}
}
@ -213,11 +215,12 @@ impl Cursor {
let mut semantic_parent = self.fallible_semantic_parent();
while semantic_parent.is_some() &&
(semantic_parent.unwrap().kind() == CXCursor_Namespace ||
semantic_parent.unwrap().kind() == CXCursor_NamespaceAlias ||
semantic_parent.unwrap().kind() == CXCursor_NamespaceRef) {
semantic_parent = semantic_parent.unwrap()
.fallible_semantic_parent();
(semantic_parent.unwrap().kind() == CXCursor_Namespace ||
semantic_parent.unwrap().kind() == CXCursor_NamespaceAlias ||
semantic_parent.unwrap().kind() == CXCursor_NamespaceRef)
{
semantic_parent =
semantic_parent.unwrap().fallible_semantic_parent();
}
let tu = self.translation_unit();
@ -256,8 +259,8 @@ impl Cursor {
/// remaining free template arguments?
pub fn is_fully_specialized_template(&self) -> bool {
self.is_template_specialization() &&
self.kind() != CXCursor_ClassTemplatePartialSpecialization &&
self.num_template_args().unwrap_or(0) > 0
self.kind() != CXCursor_ClassTemplatePartialSpecialization &&
self.num_template_args().unwrap_or(0) > 0
}
/// Is the referent a template specialization that still has remaining free
@ -388,12 +391,15 @@ impl Cursor {
///
/// Call the given function on each AST node traversed.
pub fn visit<Visitor>(&self, mut visitor: Visitor)
where Visitor: FnMut(Cursor) -> CXChildVisitResult,
where
Visitor: FnMut(Cursor) -> CXChildVisitResult,
{
unsafe {
clang_visitChildren(self.x,
visit_children::<Visitor>,
mem::transmute(&mut visitor));
clang_visitChildren(
self.x,
visit_children::<Visitor>,
mem::transmute(&mut visitor),
);
}
}
@ -451,7 +457,7 @@ impl Cursor {
/// Is the referent an inlined function?
pub fn is_inlined_function(&self) -> bool {
clang_Cursor_isFunctionInlined::is_loaded() &&
unsafe { clang_Cursor_isFunctionInlined(self.x) != 0 }
unsafe { clang_Cursor_isFunctionInlined(self.x) != 0 }
}
/// Get the width of this cursor's referent bit field, or `None` if the
@ -572,7 +578,7 @@ impl Cursor {
/// `mutable`?
pub fn is_mutable_field(&self) -> bool {
clang_CXXField_isMutable::is_loaded() &&
unsafe { clang_CXXField_isMutable(self.x) != 0 }
unsafe { clang_CXXField_isMutable(self.x) != 0 }
}
/// Get the offset of the field represented by the Cursor.
@ -628,18 +634,21 @@ impl Cursor {
/// (including '_') and does not start with a digit.
pub fn is_valid_identifier(name: &str) -> bool {
let mut chars = name.chars();
let first_valid = chars.next()
let first_valid = chars
.next()
.map(|c| c.is_alphabetic() || c == '_')
.unwrap_or(false);
first_valid && chars.all(|c| c.is_alphanumeric() || c == '_')
}
extern "C" fn visit_children<Visitor>(cur: CXCursor,
_parent: CXCursor,
data: CXClientData)
-> CXChildVisitResult
where Visitor: FnMut(Cursor) -> CXChildVisitResult,
extern "C" fn visit_children<Visitor>(
cur: CXCursor,
_parent: CXCursor,
data: CXClientData,
) -> CXChildVisitResult
where
Visitor: FnMut(Cursor) -> CXChildVisitResult,
{
let func: &mut Visitor = unsafe { mem::transmute(data) };
let child = Cursor {
@ -679,13 +688,15 @@ impl Eq for Type {}
impl fmt::Debug for Type {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt,
"Type({}, kind: {}, cconv: {}, decl: {:?}, canon: {:?})",
self.spelling(),
type_to_str(self.kind()),
self.call_conv(),
self.declaration(),
self.declaration().canonical())
write!(
fmt,
"Type({}, kind: {}, cconv: {}, decl: {:?}, canon: {:?})",
self.spelling(),
type_to_str(self.kind()),
self.call_conv(),
self.declaration(),
self.declaration().canonical()
)
}
}
@ -738,9 +749,10 @@ impl Type {
}
/// Get the canonical declaration of this type, if it is available.
pub fn canonical_declaration(&self,
location: Option<&Cursor>)
-> Option<CanonicalTypeDeclaration> {
pub fn canonical_declaration(
&self,
location: Option<&Cursor>,
) -> Option<CanonicalTypeDeclaration> {
let mut declaration = self.declaration();
if !declaration.is_valid() {
if let Some(location) = location {
@ -957,12 +969,12 @@ impl Type {
// nasty... But can happen in <type_traits>. Unfortunately I couldn't
// reduce it enough :(
self.template_args().map_or(false, |args| args.len() > 0) &&
match self.declaration().kind() {
CXCursor_ClassTemplatePartialSpecialization |
CXCursor_TypeAliasTemplateDecl |
CXCursor_TemplateTemplateParameter => false,
_ => true,
}
match self.declaration().kind() {
CXCursor_ClassTemplatePartialSpecialization |
CXCursor_TypeAliasTemplateDecl |
CXCursor_TemplateTemplateParameter => false,
_ => true,
}
}
/// Is this type an associated template type? Eg `T::Associated` in
@ -985,8 +997,8 @@ impl Type {
}
self.kind() == CXType_Unexposed &&
(hacky_parse_associated_type(self.spelling()) ||
hacky_parse_associated_type(self.canonical_type().spelling()))
(hacky_parse_associated_type(self.spelling()) ||
hacky_parse_associated_type(self.canonical_type().spelling()))
}
}
@ -1054,17 +1066,21 @@ impl SourceLocation {
let mut line = 0;
let mut col = 0;
let mut off = 0;
clang_getSpellingLocation(self.x,
&mut file,
&mut line,
&mut col,
&mut off);
(File {
x: file,
},
line as usize,
col as usize,
off as usize)
clang_getSpellingLocation(
self.x,
&mut file,
&mut line,
&mut col,
&mut off,
);
(
File {
x: file,
},
line as usize,
col as usize,
off as usize,
)
}
}
}
@ -1164,11 +1180,13 @@ impl Iterator for CommentAttributesIterator {
Some(CommentAttribute {
name: unsafe {
cxstring_into_string(
clang_HTMLStartTag_getAttrName(self.x, idx))
clang_HTMLStartTag_getAttrName(self.x, idx),
)
},
value: unsafe {
cxstring_into_string(
clang_HTMLStartTag_getAttrValue(self.x, idx))
clang_HTMLStartTag_getAttrValue(self.x, idx),
)
},
})
} else {
@ -1264,27 +1282,32 @@ impl fmt::Debug for TranslationUnit {
impl TranslationUnit {
/// Parse a source file into a translation unit.
pub fn parse(ix: &Index,
file: &str,
cmd_args: &[String],
unsaved: &[UnsavedFile],
opts: CXTranslationUnit_Flags)
-> Option<TranslationUnit> {
pub fn parse(
ix: &Index,
file: &str,
cmd_args: &[String],
unsaved: &[UnsavedFile],
opts: CXTranslationUnit_Flags,
) -> Option<TranslationUnit> {
let fname = CString::new(file).unwrap();
let _c_args: Vec<CString> =
cmd_args.iter().map(|s| CString::new(s.clone()).unwrap()).collect();
let _c_args: Vec<CString> = cmd_args
.iter()
.map(|s| CString::new(s.clone()).unwrap())
.collect();
let c_args: Vec<*const c_char> =
_c_args.iter().map(|s| s.as_ptr()).collect();
let mut c_unsaved: Vec<CXUnsavedFile> =
unsaved.iter().map(|f| f.x).collect();
let tu = unsafe {
clang_parseTranslationUnit(ix.x,
fname.as_ptr(),
c_args.as_ptr(),
c_args.len() as c_int,
c_unsaved.as_mut_ptr(),
c_unsaved.len() as c_uint,
opts)
clang_parseTranslationUnit(
ix.x,
fname.as_ptr(),
c_args.as_ptr(),
c_args.len() as c_int,
c_unsaved.as_mut_ptr(),
c_unsaved.len() as c_uint,
opts,
)
};
if tu.is_null() {
None
@ -1337,8 +1360,8 @@ impl TranslationUnit {
return None;
}
let token_array = slice::from_raw_parts(token_ptr,
num_tokens as usize);
let token_array =
slice::from_raw_parts(token_ptr, num_tokens as usize);
for &token in token_array.iter() {
let kind = clang_getTokenKind(token);
let spelling =
@ -1356,9 +1379,10 @@ impl TranslationUnit {
/// Convert a set of tokens from clang into `cexpr` tokens, for further
/// processing.
pub fn cexpr_tokens(&self,
cursor: &Cursor)
-> Option<Vec<cexpr::token::Token>> {
pub fn cexpr_tokens(
&self,
cursor: &Cursor,
) -> Option<Vec<cexpr::token::Token>> {
use cexpr::token;
self.tokens(cursor).map(|tokens| {
@ -1375,7 +1399,7 @@ impl TranslationUnit {
CXToken_Comment => return None,
_ => {
error!("Found unexpected token kind: {:?}", token);
return None
return None;
}
};
@ -1457,10 +1481,12 @@ impl UnsavedFile {
impl fmt::Debug for UnsavedFile {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt,
"UnsavedFile(name: {:?}, contents: {:?})",
self.name,
self.contents)
write!(
fmt,
"UnsavedFile(name: {:?}, contents: {:?})",
self.name,
self.contents
)
}
}
@ -1485,30 +1511,38 @@ pub fn ast_dump(c: &Cursor, depth: isize) -> CXChildVisitResult {
fn print_cursor<S: AsRef<str>>(depth: isize, prefix: S, c: &Cursor) {
let prefix = prefix.as_ref();
print_indent(depth,
format!(" {}kind = {}", prefix, kind_to_str(c.kind())));
print_indent(depth,
format!(" {}spelling = \"{}\"", prefix, c.spelling()));
print_indent(
depth,
format!(" {}kind = {}", prefix, kind_to_str(c.kind())),
);
print_indent(
depth,
format!(" {}spelling = \"{}\"", prefix, c.spelling()),
);
print_indent(depth, format!(" {}location = {}", prefix, c.location()));
print_indent(depth,
format!(" {}is-definition? {}",
prefix,
c.is_definition()));
print_indent(depth,
format!(" {}is-declaration? {}",
prefix,
c.is_declaration()));
print_indent(depth,
format!(" {}is-inlined-function? {}",
prefix,
c.is_inlined_function()));
print_indent(
depth,
format!(" {}is-definition? {}", prefix, c.is_definition()),
);
print_indent(
depth,
format!(" {}is-declaration? {}", prefix, c.is_declaration()),
);
print_indent(
depth,
format!(
" {}is-inlined-function? {}",
prefix,
c.is_inlined_function()
),
);
let templ_kind = c.template_kind();
if templ_kind != CXCursor_NoDeclFound {
print_indent(depth,
format!(" {}template-kind = {}",
prefix,
kind_to_str(templ_kind)));
print_indent(
depth,
format!(" {}template-kind = {}", prefix, kind_to_str(templ_kind)),
);
}
if let Some(usr) = c.usr() {
print_indent(depth, format!(" {}usr = \"{}\"", prefix, usr));
@ -1517,67 +1551,75 @@ pub fn ast_dump(c: &Cursor, depth: isize) -> CXChildVisitResult {
print_indent(depth, format!(" {}number-of-args = {}", prefix, num));
}
if let Some(num) = c.num_template_args() {
print_indent(depth,
format!(" {}number-of-template-args = {}",
prefix,
num));
print_indent(
depth,
format!(" {}number-of-template-args = {}", prefix, num),
);
}
if let Some(width) = c.bit_width() {
print_indent(depth, format!(" {}bit-width = {}", prefix, width));
}
if let Some(ty) = c.enum_type() {
print_indent(depth,
format!(" {}enum-type = {}",
prefix,
type_to_str(ty.kind())));
print_indent(
depth,
format!(" {}enum-type = {}", prefix, type_to_str(ty.kind())),
);
}
if let Some(val) = c.enum_val_signed() {
print_indent(depth, format!(" {}enum-val = {}", prefix, val));
}
if let Some(ty) = c.typedef_type() {
print_indent(depth,
format!(" {}typedef-type = {}",
prefix,
type_to_str(ty.kind())));
print_indent(
depth,
format!(" {}typedef-type = {}", prefix, type_to_str(ty.kind())),
);
}
if let Some(ty) = c.ret_type() {
print_indent(depth,
format!(" {}ret-type = {}",
prefix,
type_to_str(ty.kind())));
print_indent(
depth,
format!(" {}ret-type = {}", prefix, type_to_str(ty.kind())),
);
}
if let Some(refd) = c.referenced() {
if refd != *c {
println!("");
print_cursor(depth,
String::from(prefix) + "referenced.",
&refd);
print_cursor(
depth,
String::from(prefix) + "referenced.",
&refd,
);
}
}
let canonical = c.canonical();
if canonical != *c {
println!("");
print_cursor(depth,
String::from(prefix) + "canonical.",
&canonical);
print_cursor(
depth,
String::from(prefix) + "canonical.",
&canonical,
);
}
if let Some(specialized) = c.specialized() {
if specialized != *c {
println!("");
print_cursor(depth,
String::from(prefix) + "specialized.",
&specialized);
print_cursor(
depth,
String::from(prefix) + "specialized.",
&specialized,
);
}
}
if let Some(parent) = c.fallible_semantic_parent() {
println!("");
print_cursor(depth,
String::from(prefix) + "semantic-parent.",
&parent);
print_cursor(
depth,
String::from(prefix) + "semantic-parent.",
&parent,
);
}
}
@ -1592,22 +1634,32 @@ pub fn ast_dump(c: &Cursor, depth: isize) -> CXChildVisitResult {
print_indent(depth, format!(" {}cconv = {}", prefix, ty.call_conv()));
print_indent(depth,
format!(" {}spelling = \"{}\"", prefix, ty.spelling()));
print_indent(
depth,
format!(" {}spelling = \"{}\"", prefix, ty.spelling()),
);
let num_template_args =
unsafe { clang_Type_getNumTemplateArguments(ty.x) };
if num_template_args >= 0 {
print_indent(depth,
format!(" {}number-of-template-args = {}",
prefix,
num_template_args));
print_indent(
depth,
format!(
" {}number-of-template-args = {}",
prefix,
num_template_args
),
);
}
if let Some(num) = ty.num_elements() {
print_indent(depth,
format!(" {}number-of-elements = {}", prefix, num));
print_indent(
depth,
format!(" {}number-of-elements = {}", prefix, num),
);
}
print_indent(depth,
format!(" {}is-variadic? {}", prefix, ty.is_variadic()));
print_indent(
depth,
format!(" {}is-variadic? {}", prefix, ty.is_variadic()),
);
let canonical = ty.canonical_type();
if canonical != *ty {
@ -1699,7 +1751,8 @@ impl EvalResult {
// `CXType_Unexposed` from evaluation.
let mut found_cant_eval = false;
cursor.visit(|c| if c.kind() == CXCursor_TypeRef &&
c.cur_type().kind() == CXType_Unexposed {
c.cur_type().kind() == CXType_Unexposed
{
found_cant_eval = true;
CXChildVisit_Break
} else {

Просмотреть файл

Просмотреть файл

@ -11,19 +11,35 @@ pub mod attributes {
use syntax::ast;
pub fn allow(which_ones: &[&str]) -> ast::Attribute {
aster::AstBuilder::new().attr().list("allow").words(which_ones).build()
aster::AstBuilder::new()
.attr()
.list("allow")
.words(which_ones)
.build()
}
pub fn repr(which: &str) -> ast::Attribute {
aster::AstBuilder::new().attr().list("repr").words(&[which]).build()
aster::AstBuilder::new()
.attr()
.list("repr")
.words(&[which])
.build()
}
pub fn repr_list(which_ones: &[&str]) -> ast::Attribute {
aster::AstBuilder::new().attr().list("repr").words(which_ones).build()
aster::AstBuilder::new()
.attr()
.list("repr")
.words(which_ones)
.build()
}
pub fn derives(which_ones: &[&str]) -> ast::Attribute {
aster::AstBuilder::new().attr().list("derive").words(which_ones).build()
aster::AstBuilder::new()
.attr()
.list("derive")
.words(which_ones)
.build()
}
pub fn inline() -> ast::Attribute {
@ -35,46 +51,37 @@ pub mod attributes {
}
pub fn link_name(name: &str) -> ast::Attribute {
aster::AstBuilder::new().attr().name_value("link_name").str(name)
aster::AstBuilder::new()
.attr()
.name_value("link_name")
.str(name)
}
}
/// Generates a proper type for a field or type with a given `Layout`, that is,
/// a type with the correct size and alignment restrictions.
pub struct BlobTyBuilder {
layout: Layout,
}
pub fn blob(layout: Layout) -> P<ast::Ty> {
let opaque = layout.opaque();
impl BlobTyBuilder {
pub fn new(layout: Layout) -> Self {
BlobTyBuilder {
layout: layout,
// FIXME(emilio, #412): We fall back to byte alignment, but there are
// some things that legitimately are more than 8-byte aligned.
//
// Eventually we should be able to `unwrap` here, but...
let ty_name = match opaque.known_rust_type_for_array() {
Some(ty) => ty,
None => {
warn!("Found unknown alignment on code generation!");
"u8"
}
}
};
pub fn build(self) -> P<ast::Ty> {
let opaque = self.layout.opaque();
let data_len = opaque.array_size().unwrap_or(layout.size);
// FIXME(emilio, #412): We fall back to byte alignment, but there are
// some things that legitimately are more than 8-byte aligned.
//
// Eventually we should be able to `unwrap` here, but...
let ty_name = match opaque.known_rust_type_for_array() {
Some(ty) => ty,
None => {
warn!("Found unknown alignment on code generation!");
"u8"
}
};
let data_len = opaque.array_size().unwrap_or(self.layout.size);
let inner_ty = aster::AstBuilder::new().ty().path().id(ty_name).build();
if data_len == 1 {
inner_ty
} else {
aster::ty::TyBuilder::new().array(data_len).build(inner_ty)
}
let inner_ty = aster::AstBuilder::new().ty().path().id(ty_name).build();
if data_len == 1 {
inner_ty
} else {
aster::ty::TyBuilder::new().array(data_len).build(inner_ty)
}
}
@ -97,9 +104,10 @@ pub mod ast_ty {
}
}
pub fn float_kind_rust_type(ctx: &BindgenContext,
fk: FloatKind)
-> P<ast::Ty> {
pub fn float_kind_rust_type(
ctx: &BindgenContext,
fk: FloatKind,
) -> P<ast::Ty> {
// TODO: we probably should just take the type layout into
// account?
//
@ -153,14 +161,17 @@ pub mod ast_ty {
pub fn cstr_expr(mut string: String) -> P<ast::Expr> {
string.push('\0');
aster::AstBuilder::new()
.expr()
.build_lit(aster::AstBuilder::new().lit().byte_str(string))
aster::AstBuilder::new().expr().build_lit(
aster::AstBuilder::new()
.lit()
.byte_str(string),
)
}
pub fn float_expr(ctx: &BindgenContext,
f: f64)
-> Result<P<ast::Expr>, ()> {
pub fn float_expr(
ctx: &BindgenContext,
f: f64,
) -> Result<P<ast::Expr>, ()> {
use aster::symbol::ToSymbol;
if f.is_finite() {
@ -171,8 +182,9 @@ pub mod ast_ty {
string.push('.');
}
let kind = ast::LitKind::FloatUnsuffixed(string.as_str().to_symbol());
return Ok(aster::AstBuilder::new().expr().lit().build_lit(kind))
let kind =
ast::LitKind::FloatUnsuffixed(string.as_str().to_symbol());
return Ok(aster::AstBuilder::new().expr().lit().build_lit(kind));
}
let prefix = ctx.trait_prefix();
@ -192,13 +204,15 @@ pub mod ast_ty {
return Err(());
}
pub fn arguments_from_signature(signature: &FunctionSig,
ctx: &BindgenContext)
-> Vec<P<ast::Expr>> {
pub fn arguments_from_signature(
signature: &FunctionSig,
ctx: &BindgenContext,
) -> Vec<P<ast::Expr>> {
// TODO: We need to keep in sync the argument names, so we should unify
// this with the other loop that decides them.
let mut unnamed_arguments = 0;
signature.argument_types()
signature
.argument_types()
.iter()
.map(|&(ref name, _ty)| {
let arg_name = match *name {

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,6 +1,6 @@
//! Helpers for code generation that need struct layout
use super::helpers::BlobTyBuilder;
use super::helpers;
use aster::struct_field::StructFieldBuilder;
@ -81,7 +81,11 @@ fn test_bytes_from_bits_pow2() {
}
impl<'a, 'ctx> StructLayoutTracker<'a, 'ctx> {
pub fn new(ctx: &'a BindgenContext<'ctx>, comp: &'a CompInfo, name: &'a str) -> Self {
pub fn new(
ctx: &'a BindgenContext<'ctx>,
comp: &'a CompInfo,
name: &'a str,
) -> Self {
StructLayoutTracker {
name: name,
ctx: ctx,
@ -121,9 +125,11 @@ impl<'a, 'ctx> StructLayoutTracker<'a, 'ctx> {
self.latest_offset += layout.size;
debug!("Offset: <bitfield>: {} -> {}",
self.latest_offset - layout.size,
self.latest_offset);
debug!(
"Offset: <bitfield>: {} -> {}",
self.latest_offset - layout.size,
self.latest_offset
);
self.latest_field_layout = Some(layout);
self.last_field_was_bitfield = true;
@ -143,30 +149,33 @@ impl<'a, 'ctx> StructLayoutTracker<'a, 'ctx> {
/// Add a padding field if necessary for a given new field _before_ adding
/// that field.
pub fn pad_field(&mut self,
field_name: &str,
field_ty: &Type,
field_offset: Option<usize>)
-> Option<ast::StructField> {
pub fn pad_field(
&mut self,
field_name: &str,
field_ty: &Type,
field_offset: Option<usize>,
) -> Option<ast::StructField> {
let mut field_layout = match field_ty.layout(self.ctx) {
Some(l) => l,
None => return None,
};
if let TypeKind::Array(inner, len) =
*field_ty.canonical_type(self.ctx).kind() {
*field_ty.canonical_type(self.ctx).kind()
{
// FIXME(emilio): As an _ultra_ hack, we correct the layout returned
// by arrays of structs that have a bigger alignment than what we
// can support.
//
// This means that the structs in the array are super-unsafe to
// access, since they won't be properly aligned, but *shrug*.
if let Some(layout) = self.ctx
.resolve_type(inner)
.layout(self.ctx) {
if let Some(layout) = self.ctx.resolve_type(inner).layout(
self.ctx,
)
{
if layout.align > mem::size_of::<*mut ()>() {
field_layout.size = align_to(layout.size, layout.align) *
len;
len;
field_layout.align = mem::size_of::<*mut ()>();
}
}
@ -187,25 +196,30 @@ impl<'a, 'ctx> StructLayoutTracker<'a, 'ctx> {
// Otherwise the padding is useless.
let need_padding = padding_bytes >= field_layout.align ||
field_layout.align > mem::size_of::<*mut ()>();
field_layout.align > mem::size_of::<*mut ()>();
self.latest_offset += padding_bytes;
debug!("Offset: <padding>: {} -> {}",
self.latest_offset - padding_bytes,
self.latest_offset);
debug!(
"Offset: <padding>: {} -> {}",
self.latest_offset - padding_bytes,
self.latest_offset
);
debug!("align field {} to {}/{} with {} padding bytes {:?}",
field_name,
self.latest_offset,
field_offset.unwrap_or(0) / 8,
padding_bytes,
field_layout);
debug!(
"align field {} to {}/{} with {} padding bytes {:?}",
field_name,
self.latest_offset,
field_offset.unwrap_or(0) / 8,
padding_bytes,
field_layout
);
if need_padding && padding_bytes != 0 {
Some(Layout::new(padding_bytes,
cmp::min(field_layout.align,
mem::size_of::<*mut ()>())))
Some(Layout::new(
padding_bytes,
cmp::min(field_layout.align, mem::size_of::<*mut ()>()),
))
} else {
None
}
@ -213,25 +227,33 @@ impl<'a, 'ctx> StructLayoutTracker<'a, 'ctx> {
self.latest_offset += field_layout.size;
self.latest_field_layout = Some(field_layout);
self.max_field_align = cmp::max(self.max_field_align,
field_layout.align);
self.max_field_align =
cmp::max(self.max_field_align, field_layout.align);
self.last_field_was_bitfield = false;
debug!("Offset: {}: {} -> {}",
field_name,
self.latest_offset - field_layout.size,
self.latest_offset);
debug!(
"Offset: {}: {} -> {}",
field_name,
self.latest_offset - field_layout.size,
self.latest_offset
);
padding_layout.map(|layout| self.padding_field(layout))
}
pub fn pad_struct(&mut self, layout: Layout) -> Option<ast::StructField> {
debug!("pad_struct:\n\tself = {:#?}\n\tlayout = {:#?}", self, layout);
debug!(
"pad_struct:\n\tself = {:#?}\n\tlayout = {:#?}",
self,
layout
);
if layout.size < self.latest_offset {
error!("Calculated wrong layout for {}, too more {} bytes",
self.name,
self.latest_offset - layout.size);
error!(
"Calculated wrong layout for {}, too more {} bytes",
self.name,
self.latest_offset - layout.size
);
return None;
}
@ -244,14 +266,17 @@ impl<'a, 'ctx> StructLayoutTracker<'a, 'ctx> {
// regardless, because bitfields don't respect alignment as strictly as
// other fields.
if padding_bytes > 0 &&
(padding_bytes >= layout.align ||
(self.last_field_was_bitfield &&
padding_bytes >= self.latest_field_layout.unwrap().align) ||
layout.align > mem::size_of::<*mut ()>()) {
(padding_bytes >= layout.align ||
(self.last_field_was_bitfield &&
padding_bytes >=
self.latest_field_layout.unwrap().align) ||
layout.align > mem::size_of::<*mut ()>())
{
let layout = if self.comp.packed() {
Layout::new(padding_bytes, 1)
} else if self.last_field_was_bitfield ||
layout.align > mem::size_of::<*mut ()>() {
layout.align > mem::size_of::<*mut ()>()
{
// We've already given up on alignment here.
Layout::for_size(padding_bytes)
} else {
@ -268,12 +293,15 @@ impl<'a, 'ctx> StructLayoutTracker<'a, 'ctx> {
pub fn align_struct(&self, layout: Layout) -> Option<ast::StructField> {
if self.max_field_align < layout.align &&
layout.align <= mem::size_of::<*mut ()>() {
let ty = BlobTyBuilder::new(Layout::new(0, layout.align)).build();
layout.align <= mem::size_of::<*mut ()>()
{
let ty = helpers::blob(Layout::new(0, layout.align));
Some(StructFieldBuilder::named("__bindgen_align")
.pub_()
.build_ty(ty))
Some(
StructFieldBuilder::named("__bindgen_align")
.pub_()
.build_ty(ty),
)
} else {
None
}
@ -284,7 +312,7 @@ impl<'a, 'ctx> StructLayoutTracker<'a, 'ctx> {
}
fn padding_field(&mut self, layout: Layout) -> ast::StructField {
let ty = BlobTyBuilder::new(layout).build();
let ty = helpers::blob(layout);
let padding_count = self.padding_count;
self.padding_count += 1;
@ -293,7 +321,9 @@ impl<'a, 'ctx> StructLayoutTracker<'a, 'ctx> {
self.max_field_align = cmp::max(self.max_field_align, layout.align);
StructFieldBuilder::named(padding_field_name).pub_().build_ty(ty)
StructFieldBuilder::named(padding_field_name)
.pub_()
.build_ty(ty)
}
/// Returns whether the new field is known to merge with a bitfield.
@ -312,14 +342,17 @@ impl<'a, 'ctx> StructLayoutTracker<'a, 'ctx> {
// If it was, we may or may not need to align, depending on what the
// current field alignment and the bitfield size and alignment are.
debug!("align_to_bitfield? {}: {:?} {:?}",
self.last_field_was_bitfield,
layout,
new_field_layout);
debug!(
"align_to_bitfield? {}: {:?} {:?}",
self.last_field_was_bitfield,
layout,
new_field_layout
);
if self.last_field_was_bitfield &&
new_field_layout.align <= layout.size % layout.align &&
new_field_layout.size <= layout.size % layout.align {
new_field_layout.align <= layout.size % layout.align &&
new_field_layout.size <= layout.size % layout.align
{
// The new field will be coalesced into some of the remaining bits.
//
// FIXME(emilio): I think this may not catch everything?

185
third_party/rust/bindgen-0.30.0/src/features.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,185 @@
//! Contains code for selecting features
#![deny(missing_docs)]
#![deny(warnings)]
#![deny(unused_extern_crates)]
use std::io;
use std::str::FromStr;
/// Define RustTarget struct definition, Default impl, and conversions
/// between RustTarget and String.
macro_rules! rust_target_def {
( $( $( #[$attr:meta] )* => $release:ident => $value:expr; )* ) => {
/// Represents the version of the Rust language to target.
///
/// To support a beta release, use the corresponding stable release.
///
/// This enum will have more variants added as necessary.
#[derive(Debug, Copy, Clone, Eq, PartialEq, PartialOrd, Hash)]
#[allow(non_camel_case_types)]
pub enum RustTarget {
$(
$(
#[$attr]
)*
$release,
)*
}
impl Default for RustTarget {
/// Gives the latest stable Rust version
fn default() -> RustTarget {
LATEST_STABLE_RUST
}
}
impl FromStr for RustTarget {
type Err = io::Error;
/// Create a `RustTarget` from a string.
///
/// * The stable/beta versions of Rust are of the form "1.0",
/// "1.19", etc.
/// * The nightly version should be specified with "nightly".
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.as_ref() {
$(
stringify!($value) => Ok(RustTarget::$release),
)*
_ => Err(
io::Error::new(
io::ErrorKind::InvalidInput,
concat!(
"Got an invalid rust target. Accepted values ",
"are of the form ",
"\"1.0\" or \"nightly\"."))),
}
}
}
impl From<RustTarget> for String {
fn from(target: RustTarget) -> Self {
match target {
$(
RustTarget::$release => stringify!($value),
)*
}.into()
}
}
}
}
/// Defines an array slice with all RustTarget values
macro_rules! rust_target_values_def {
( $( $( #[$attr:meta] )* => $release:ident => $value:expr; )* ) => {
/// Strings of allowed `RustTarget` values
pub static RUST_TARGET_STRINGS: &'static [&str] = &[
$(
stringify!($value),
)*
];
}
}
/// Defines macro which takes a macro
macro_rules! rust_target_base {
( $x_macro:ident ) => {
$x_macro!(
/// Rust stable 1.0
=> Stable_1_0 => 1.0;
/// Rust stable 1.19
=> Stable_1_19 => 1.19;
/// Nightly rust
=> Nightly => nightly;
);
}
}
rust_target_base!(rust_target_def);
rust_target_base!(rust_target_values_def);
/// Latest stable release of Rust
pub const LATEST_STABLE_RUST: RustTarget = RustTarget::Stable_1_19;
/// Create RustFeatures struct definition, new(), and a getter for each field
macro_rules! rust_feature_def {
( $( $( #[$attr:meta] )* => $feature:ident; )* ) => {
/// Features supported by a rust target
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub struct RustFeatures {
$(
$feature: bool,
)*
}
impl RustFeatures {
/// Gives a RustFeatures struct with all features disabled
fn new() -> Self {
RustFeatures {
$(
$feature: false,
)*
}
}
$(
$(
#[$attr]
)*
pub fn $feature(&self) -> bool {
self.$feature
}
)*
}
}
}
rust_feature_def!(
/// Untagged unions ([RFC 1444](https://github.com/rust-lang/rfcs/blob/master/text/1444-union.md))
=> untagged_union;
/// Constant function ([RFC 911](https://github.com/rust-lang/rfcs/blob/master/text/0911-const-fn.md))
=> const_fn;
);
impl From<RustTarget> for RustFeatures {
fn from(rust_target: RustTarget) -> Self {
let mut features = RustFeatures::new();
if rust_target >= RustTarget::Stable_1_19 {
features.untagged_union = true;
}
if rust_target >= RustTarget::Nightly {
features.const_fn = true;
}
features
}
}
impl Default for RustFeatures {
fn default() -> Self {
let default_rust_target: RustTarget = Default::default();
Self::from(default_rust_target)
}
}
#[cfg(test)]
mod test {
#![allow(unused_imports)]
use super::*;
fn test_target(target_str: &str, target: RustTarget) {
let target_string: String = target.into();
assert_eq!(target_str, target_string);
assert_eq!(target, RustTarget::from_str(target_str).unwrap());
}
#[test]
fn str_to_target() {
test_target("1.0", RustTarget::Stable_1_0);
test_target("1.19", RustTarget::Stable_1_19);
test_target("nightly", RustTarget::Nightly);
}
}

327
third_party/rust/bindgen-0.30.0/src/ir/analysis/derive_copy.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,327 @@
//! Determining which types for which we can emit `#[derive(Copy)]`.
use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
use ir::comp::CompKind;
use ir::comp::Field;
use ir::comp::FieldMethods;
use ir::context::{BindgenContext, ItemId};
use ir::derive::CanTriviallyDeriveCopy;
use ir::item::IsOpaque;
use ir::template::TemplateParameters;
use ir::traversal::EdgeKind;
use ir::ty::TypeKind;
use std::collections::HashMap;
use std::collections::HashSet;
/// An analysis that finds for each IR item whether copy cannot be derived.
///
/// We use the monotone constraint function `cannot_derive_copy`, defined as
/// follows:
///
/// * If T is Opaque and layout of the type is known, get this layout as opaque
/// type and check whether it can be derived using trivial checks.
/// * If T is Array type, copy cannot be derived if the length of the array is
/// larger than the limit or the type of data the array contains cannot derive
/// copy.
/// * If T is a type alias, a templated alias or an indirection to another type,
/// copy cannot be derived if the type T refers to cannot be derived copy.
/// * If T is a compound type, copy cannot be derived if any of its base member
/// or field cannot be derived copy.
/// * If T is an instantiation of an abstract template definition, T cannot be
/// derived copy if any of the template arguments or template definition
/// cannot derive copy.
#[derive(Debug, Clone)]
pub struct CannotDeriveCopy<'ctx, 'gen>
where
'gen: 'ctx,
{
ctx: &'ctx BindgenContext<'gen>,
// The incremental result of this analysis's computation. Everything in this
// set cannot derive copy.
cannot_derive_copy: HashSet<ItemId>,
// Dependencies saying that if a key ItemId has been inserted into the
// `cannot_derive_copy` set, then each of
// the ids in Vec<ItemId> need to be considered again.
//
// This is a subset of the natural IR graph with reversed edges, where we
// only include the edges from the IR graph that can affect whether a type
// can derive copy or not.
dependencies: HashMap<ItemId, Vec<ItemId>>,
}
impl<'ctx, 'gen> CannotDeriveCopy<'ctx, 'gen> {
fn consider_edge(kind: EdgeKind) -> bool {
match kind {
// These are the only edges that can affect whether a type can derive
// copy or not.
EdgeKind::BaseMember |
EdgeKind::Field |
EdgeKind::TypeReference |
EdgeKind::VarType |
EdgeKind::TemplateArgument |
EdgeKind::TemplateDeclaration |
EdgeKind::TemplateParameterDefinition => true,
EdgeKind::Constructor |
EdgeKind::Destructor |
EdgeKind::FunctionReturn |
EdgeKind::FunctionParameter |
EdgeKind::InnerType |
EdgeKind::InnerVar |
EdgeKind::Method => false,
EdgeKind::Generic => false,
}
}
fn insert(&mut self, id: ItemId) -> ConstrainResult {
trace!("inserting {:?} into the cannot_derive_copy set", id);
let was_not_already_in_set = self.cannot_derive_copy.insert(id);
assert!(
was_not_already_in_set,
"We shouldn't try and insert {:?} twice because if it was \
already in the set, `constrain` should have exited early.",
id
);
ConstrainResult::Changed
}
}
impl<'ctx, 'gen> MonotoneFramework for CannotDeriveCopy<'ctx, 'gen> {
type Node = ItemId;
type Extra = &'ctx BindgenContext<'gen>;
type Output = HashSet<ItemId>;
fn new(ctx: &'ctx BindgenContext<'gen>) -> CannotDeriveCopy<'ctx, 'gen> {
let cannot_derive_copy = HashSet::new();
let dependencies = generate_dependencies(ctx, Self::consider_edge);
CannotDeriveCopy {
ctx,
cannot_derive_copy,
dependencies,
}
}
fn initial_worklist(&self) -> Vec<ItemId> {
self.ctx.whitelisted_items().iter().cloned().collect()
}
fn constrain(&mut self, id: ItemId) -> ConstrainResult {
trace!("constrain: {:?}", id);
if self.cannot_derive_copy.contains(&id) {
trace!(" already know it cannot derive Copy");
return ConstrainResult::Same;
}
let item = self.ctx.resolve_item(id);
let ty = match item.as_type() {
Some(ty) => ty,
None => {
trace!(" not a type; ignoring");
return ConstrainResult::Same;
}
};
if item.is_opaque(self.ctx, &()) {
let layout_can_derive = ty.layout(self.ctx).map_or(true, |l| {
l.opaque().can_trivially_derive_copy()
});
return if layout_can_derive {
trace!(" we can trivially derive Copy for the layout");
ConstrainResult::Same
} else {
trace!(" we cannot derive Copy for the layout");
self.insert(id)
};
}
match *ty.kind() {
// Handle the simple cases. These can derive copy without further
// information.
TypeKind::Void |
TypeKind::NullPtr |
TypeKind::Int(..) |
TypeKind::Float(..) |
TypeKind::Complex(..) |
TypeKind::Function(..) |
TypeKind::Enum(..) |
TypeKind::Reference(..) |
TypeKind::TypeParam |
TypeKind::BlockPointer |
TypeKind::Pointer(..) |
TypeKind::UnresolvedTypeRef(..) |
TypeKind::ObjCInterface(..) |
TypeKind::ObjCId |
TypeKind::ObjCSel => {
trace!(" simple type that can always derive Copy");
ConstrainResult::Same
}
TypeKind::Array(t, len) => {
let cant_derive_copy = self.cannot_derive_copy.contains(&t);
if cant_derive_copy {
trace!(
" arrays of T for which we cannot derive Copy \
also cannot derive Copy"
);
return self.insert(id);
}
if len > 0 {
trace!(" array can derive Copy with positive length");
ConstrainResult::Same
} else {
trace!(" array cannot derive Copy with 0 length");
self.insert(id)
}
}
TypeKind::ResolvedTypeRef(t) |
TypeKind::TemplateAlias(t, _) |
TypeKind::Alias(t) => {
let cant_derive_copy = self.cannot_derive_copy.contains(&t);
if cant_derive_copy {
trace!(
" arrays of T for which we cannot derive Copy \
also cannot derive Copy"
);
return self.insert(id);
}
trace!(
" aliases and type refs to T which can derive \
Copy can also derive Copy"
);
ConstrainResult::Same
}
TypeKind::Comp(ref info) => {
assert!(
!info.has_non_type_template_params(),
"The early ty.is_opaque check should have handled this case"
);
// NOTE: Take into account that while unions in C and C++ are copied by
// default, the may have an explicit destructor in C++, so we can't
// defer this check just for the union case.
if self.ctx.lookup_item_id_has_destructor(&id) {
trace!(" comp has destructor which cannot derive copy");
return self.insert(id);
}
if info.kind() == CompKind::Union {
if !self.ctx.options().rust_features().untagged_union() {
// NOTE: If there's no template parameters we can derive copy
// unconditionally, since arrays are magical for rustc, and
// __BindgenUnionField always implements copy.
trace!(
" comp can always derive debug if it's a Union and no template parameters"
);
return ConstrainResult::Same;
}
// https://github.com/rust-lang/rust/issues/36640
if info.self_template_params(self.ctx).is_some() ||
item.used_template_params(self.ctx).is_some()
{
trace!(
" comp cannot derive copy because issue 36640"
);
return self.insert(id);
}
}
let bases_cannot_derive =
info.base_members().iter().any(|base| {
self.cannot_derive_copy.contains(&base.ty)
});
if bases_cannot_derive {
trace!(
" base members cannot derive Copy, so we can't \
either"
);
return self.insert(id);
}
let fields_cannot_derive =
info.fields().iter().any(|f| match *f {
Field::DataMember(ref data) => {
self.cannot_derive_copy.contains(&data.ty())
}
Field::Bitfields(ref bfu) => {
bfu.bitfields().iter().any(|b| {
self.cannot_derive_copy.contains(&b.ty())
})
}
});
if fields_cannot_derive {
trace!(" fields cannot derive Copy, so we can't either");
return self.insert(id);
}
trace!(" comp can derive Copy");
ConstrainResult::Same
}
TypeKind::TemplateInstantiation(ref template) => {
let args_cannot_derive =
template.template_arguments().iter().any(|arg| {
self.cannot_derive_copy.contains(&arg)
});
if args_cannot_derive {
trace!(
" template args cannot derive Copy, so \
insantiation can't either"
);
return self.insert(id);
}
assert!(
!template.template_definition().is_opaque(self.ctx, &()),
"The early ty.is_opaque check should have handled this case"
);
let def_cannot_derive = self.cannot_derive_copy.contains(
&template.template_definition(),
);
if def_cannot_derive {
trace!(
" template definition cannot derive Copy, so \
insantiation can't either"
);
return self.insert(id);
}
trace!(" template instantiation can derive Copy");
ConstrainResult::Same
}
TypeKind::Opaque => {
unreachable!(
"The early ty.is_opaque check should have handled this case"
)
}
}
}
fn each_depending_on<F>(&self, id: ItemId, mut f: F)
where
F: FnMut(ItemId),
{
if let Some(edges) = self.dependencies.get(&id) {
for item in edges {
trace!("enqueue {:?} into worklist", item);
f(*item);
}
}
}
}
impl<'ctx, 'gen> From<CannotDeriveCopy<'ctx, 'gen>> for HashSet<ItemId> {
fn from(analysis: CannotDeriveCopy<'ctx, 'gen>) -> Self {
analysis.cannot_derive_copy
}
}

Просмотреть файл

@ -1,17 +1,17 @@
//! Determining which types for which we can emit `#[derive(Debug)]`.
use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
use std::collections::HashSet;
use std::collections::HashMap;
use ir::comp::CompKind;
use ir::comp::Field;
use ir::comp::FieldMethods;
use ir::context::{BindgenContext, ItemId};
use ir::derive::CanTriviallyDeriveDebug;
use ir::item::IsOpaque;
use ir::traversal::EdgeKind;
use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
use ir::ty::TypeKind;
use ir::comp::Field;
use ir::comp::FieldMethods;
use ir::derive::CanTriviallyDeriveDebug;
use ir::comp::CompKind;
use std::collections::HashMap;
use std::collections::HashSet;
/// An analysis that finds for each IR item whether debug cannot be derived.
///
@ -34,7 +34,8 @@ use ir::comp::CompKind;
/// cannot derive debug.
#[derive(Debug, Clone)]
pub struct CannotDeriveDebug<'ctx, 'gen>
where 'gen: 'ctx
where
'gen: 'ctx,
{
ctx: &'ctx BindgenContext<'gen>,
@ -130,7 +131,7 @@ impl<'ctx, 'gen> MonotoneFramework for CannotDeriveDebug<'ctx, 'gen> {
if ty.is_opaque(self.ctx, item) {
let layout_can_derive = ty.layout(self.ctx).map_or(true, |l| {
l.opaque().can_trivially_derive_debug(self.ctx, ())
l.opaque().can_trivially_derive_debug()
});
return if layout_can_derive {
trace!(" we can trivially derive Debug for the layout");
@ -141,7 +142,10 @@ impl<'ctx, 'gen> MonotoneFramework for CannotDeriveDebug<'ctx, 'gen> {
};
}
if ty.layout(self.ctx).map_or(false, |l| l.align > RUST_DERIVE_IN_ARRAY_LIMIT) {
if ty.layout(self.ctx).map_or(false, |l| {
l.align > RUST_DERIVE_IN_ARRAY_LIMIT
})
{
// We have to be conservative: the struct *could* have enough
// padding that we emit an array that is longer than
// `RUST_DERIVE_IN_ARRAY_LIMIT`. If we moved padding calculations
@ -162,7 +166,7 @@ impl<'ctx, 'gen> MonotoneFramework for CannotDeriveDebug<'ctx, 'gen> {
TypeKind::Enum(..) |
TypeKind::Reference(..) |
TypeKind::BlockPointer |
TypeKind::Named |
TypeKind::TypeParam |
TypeKind::UnresolvedTypeRef(..) |
TypeKind::ObjCInterface(..) |
TypeKind::ObjCId |
@ -173,8 +177,10 @@ impl<'ctx, 'gen> MonotoneFramework for CannotDeriveDebug<'ctx, 'gen> {
TypeKind::Array(t, len) => {
if self.cannot_derive_debug.contains(&t) {
trace!(" arrays of T for which we cannot derive Debug \
also cannot derive Debug");
trace!(
" arrays of T for which we cannot derive Debug \
also cannot derive Debug"
);
return self.insert(id);
}
@ -191,12 +197,16 @@ impl<'ctx, 'gen> MonotoneFramework for CannotDeriveDebug<'ctx, 'gen> {
TypeKind::TemplateAlias(t, _) |
TypeKind::Alias(t) => {
if self.cannot_derive_debug.contains(&t) {
trace!(" aliases and type refs to T which cannot derive \
Debug also cannot derive Debug");
trace!(
" aliases and type refs to T which cannot derive \
Debug also cannot derive Debug"
);
self.insert(id)
} else {
trace!(" aliases and type refs to T which can derive \
Debug can also derive Debug");
trace!(
" aliases and type refs to T which can derive \
Debug can also derive Debug"
);
ConstrainResult::Same
}
}
@ -208,14 +218,15 @@ impl<'ctx, 'gen> MonotoneFramework for CannotDeriveDebug<'ctx, 'gen> {
);
if info.kind() == CompKind::Union {
if self.ctx.options().unstable_rust {
if self.ctx.options().rust_features().untagged_union() {
trace!(" cannot derive Debug for Rust unions");
return self.insert(id);
}
if ty.layout(self.ctx)
.map_or(true,
|l| l.opaque().can_trivially_derive_debug(self.ctx, ())) {
if ty.layout(self.ctx).map_or(true, |l| {
l.opaque().can_trivially_derive_debug()
})
{
trace!(" union layout can trivially derive Debug");
return ConstrainResult::Same;
} else {
@ -224,32 +235,33 @@ impl<'ctx, 'gen> MonotoneFramework for CannotDeriveDebug<'ctx, 'gen> {
}
}
let bases_cannot_derive = info.base_members()
.iter()
.any(|base| self.cannot_derive_debug.contains(&base.ty));
let bases_cannot_derive =
info.base_members().iter().any(|base| {
self.cannot_derive_debug.contains(&base.ty)
});
if bases_cannot_derive {
trace!(" base members cannot derive Debug, so we can't \
either");
trace!(
" base members cannot derive Debug, so we can't \
either"
);
return self.insert(id);
}
let fields_cannot_derive = info.fields()
.iter()
.any(|f| {
match *f {
Field::DataMember(ref data) => {
self.cannot_derive_debug.contains(&data.ty())
}
Field::Bitfields(ref bfu) => {
bfu.bitfields()
.iter().any(|b| {
self.cannot_derive_debug.contains(&b.ty())
})
}
let fields_cannot_derive =
info.fields().iter().any(|f| match *f {
Field::DataMember(ref data) => {
self.cannot_derive_debug.contains(&data.ty())
}
Field::Bitfields(ref bfu) => {
bfu.bitfields().iter().any(|b| {
self.cannot_derive_debug.contains(&b.ty())
})
}
});
if fields_cannot_derive {
trace!(" fields cannot derive Debug, so we can't either");
trace!(
" fields cannot derive Debug, so we can't either"
);
return self.insert(id);
}
@ -258,10 +270,13 @@ impl<'ctx, 'gen> MonotoneFramework for CannotDeriveDebug<'ctx, 'gen> {
}
TypeKind::Pointer(inner) => {
let inner_type = self.ctx.resolve_type(inner).canonical_type(self.ctx);
let inner_type =
self.ctx.resolve_type(inner).canonical_type(self.ctx);
if let TypeKind::Function(ref sig) = *inner_type.kind() {
if !sig.can_trivially_derive_debug(&self.ctx, ()) {
trace!(" function pointer that can't trivially derive Debug");
if !sig.can_trivially_derive_debug() {
trace!(
" function pointer that can't trivially derive Debug"
);
return self.insert(id);
}
}
@ -270,12 +285,15 @@ impl<'ctx, 'gen> MonotoneFramework for CannotDeriveDebug<'ctx, 'gen> {
}
TypeKind::TemplateInstantiation(ref template) => {
let args_cannot_derive = template.template_arguments()
.iter()
.any(|arg| self.cannot_derive_debug.contains(&arg));
let args_cannot_derive =
template.template_arguments().iter().any(|arg| {
self.cannot_derive_debug.contains(&arg)
});
if args_cannot_derive {
trace!(" template args cannot derive Debug, so \
insantiation can't either");
trace!(
" template args cannot derive Debug, so \
insantiation can't either"
);
return self.insert(id);
}
@ -283,11 +301,14 @@ impl<'ctx, 'gen> MonotoneFramework for CannotDeriveDebug<'ctx, 'gen> {
!template.template_definition().is_opaque(self.ctx, &()),
"The early ty.is_opaque check should have handled this case"
);
let def_cannot_derive = self.cannot_derive_debug
.contains(&template.template_definition());
let def_cannot_derive = self.cannot_derive_debug.contains(
&template.template_definition(),
);
if def_cannot_derive {
trace!(" template definition cannot derive Debug, so \
insantiation can't either");
trace!(
" template definition cannot derive Debug, so \
insantiation can't either"
);
return self.insert(id);
}
@ -304,7 +325,8 @@ impl<'ctx, 'gen> MonotoneFramework for CannotDeriveDebug<'ctx, 'gen> {
}
fn each_depending_on<F>(&self, id: ItemId, mut f: F)
where F: FnMut(ItemId),
where
F: FnMut(ItemId),
{
if let Some(edges) = self.dependencies.get(&id) {
for item in edges {

Просмотреть файл

@ -0,0 +1,394 @@
//! Determining which types for which we can emit `#[derive(Default)]`.
use super::{ConstrainResult, HasVtable, MonotoneFramework};
use ir::comp::CompKind;
use ir::comp::Field;
use ir::comp::FieldMethods;
use ir::context::{BindgenContext, ItemId};
use ir::derive::CanTriviallyDeriveDefault;
use ir::item::IsOpaque;
use ir::item::ItemSet;
use ir::traversal::EdgeKind;
use ir::traversal::Trace;
use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
use ir::ty::TypeKind;
use std::collections::HashMap;
use std::collections::HashSet;
/// An analysis that finds for each IR item whether default cannot be derived.
///
/// We use the monotone constraint function `cannot_derive_default`, defined as
/// follows:
///
/// * If T is Opaque and layout of the type is known, get this layout as opaque
/// type and check whether it can be derived using trivial checks.
/// * If T is Array type, default cannot be derived if the length of the array is
/// larger than the limit or the type of data the array contains cannot derive
/// default.
/// * If T is a type alias, a templated alias or an indirection to another type,
/// default cannot be derived if the type T refers to cannot be derived default.
/// * If T is a compound type, default cannot be derived if any of its base member
/// or field cannot be derived default.
#[derive(Debug, Clone)]
pub struct CannotDeriveDefault<'ctx, 'gen>
where
'gen: 'ctx,
{
ctx: &'ctx BindgenContext<'gen>,
// The incremental result of this analysis's computation. Everything in this
// set cannot derive default.
cannot_derive_default: HashSet<ItemId>,
// Dependencies saying that if a key ItemId has been inserted into the
// `cannot_derive_default` set, then each of the ids in Vec<ItemId> need to be
// considered again.
//
// This is a subset of the natural IR graph with reversed edges, where we
// only include the edges from the IR graph that can affect whether a type
// can derive default or not.
dependencies: HashMap<ItemId, Vec<ItemId>>,
}
impl<'ctx, 'gen> CannotDeriveDefault<'ctx, 'gen> {
fn consider_edge(kind: EdgeKind) -> bool {
match kind {
// These are the only edges that can affect whether a type can derive
// default or not.
EdgeKind::BaseMember |
EdgeKind::Field |
EdgeKind::TypeReference |
EdgeKind::VarType |
EdgeKind::TemplateArgument |
EdgeKind::TemplateDeclaration |
EdgeKind::TemplateParameterDefinition => true,
EdgeKind::Constructor |
EdgeKind::Destructor |
EdgeKind::FunctionReturn |
EdgeKind::FunctionParameter |
EdgeKind::InnerType |
EdgeKind::InnerVar |
EdgeKind::Method => false,
EdgeKind::Generic => false,
}
}
fn insert(&mut self, id: ItemId) -> ConstrainResult {
trace!("inserting {:?} into the cannot_derive_default set", id);
let was_not_already_in_set = self.cannot_derive_default.insert(id);
assert!(
was_not_already_in_set,
"We shouldn't try and insert {:?} twice because if it was \
already in the set, `constrain` should have exited early.",
id
);
ConstrainResult::Changed
}
}
impl<'ctx, 'gen> MonotoneFramework for CannotDeriveDefault<'ctx, 'gen> {
type Node = ItemId;
type Extra = &'ctx BindgenContext<'gen>;
type Output = HashSet<ItemId>;
fn new(ctx: &'ctx BindgenContext<'gen>) -> CannotDeriveDefault<'ctx, 'gen> {
let mut dependencies = HashMap::new();
let cannot_derive_default = HashSet::new();
let whitelisted_items: HashSet<_> =
ctx.whitelisted_items().iter().cloned().collect();
let whitelisted_and_blacklisted_items: ItemSet = whitelisted_items
.iter()
.cloned()
.flat_map(|i| {
let mut reachable = vec![i];
i.trace(ctx, &mut |s, _| { reachable.push(s); }, &());
reachable
})
.collect();
for item in whitelisted_and_blacklisted_items {
dependencies.entry(item).or_insert(vec![]);
{
// We reverse our natural IR graph edges to find dependencies
// between nodes.
item.trace(
ctx,
&mut |sub_item: ItemId, edge_kind| {
if ctx.whitelisted_items().contains(&sub_item) &&
Self::consider_edge(edge_kind)
{
dependencies
.entry(sub_item)
.or_insert(vec![])
.push(item);
}
},
&(),
);
}
}
CannotDeriveDefault {
ctx,
cannot_derive_default,
dependencies,
}
}
fn initial_worklist(&self) -> Vec<ItemId> {
self.ctx.whitelisted_items().iter().cloned().collect()
}
fn constrain(&mut self, id: ItemId) -> ConstrainResult {
trace!("constrain: {:?}", id);
if self.cannot_derive_default.contains(&id) {
trace!(" already know it cannot derive Default");
return ConstrainResult::Same;
}
let item = self.ctx.resolve_item(id);
let ty = match item.as_type() {
Some(ty) => ty,
None => {
trace!(" not a type; ignoring");
return ConstrainResult::Same;
}
};
if item.is_opaque(self.ctx, &()) {
let layout_can_derive = ty.layout(self.ctx).map_or(true, |l| {
l.opaque().can_trivially_derive_default()
});
return if layout_can_derive {
trace!(" we can trivially derive Default for the layout");
ConstrainResult::Same
} else {
trace!(" we cannot derive Default for the layout");
self.insert(id)
};
}
if ty.layout(self.ctx).map_or(false, |l| {
l.align > RUST_DERIVE_IN_ARRAY_LIMIT
})
{
// We have to be conservative: the struct *could* have enough
// padding that we emit an array that is longer than
// `RUST_DERIVE_IN_ARRAY_LIMIT`. If we moved padding calculations
// into the IR and computed them before this analysis, then we could
// be precise rather than conservative here.
return self.insert(id);
}
match *ty.kind() {
// Handle the simple cases. These can derive Default without further
// information.
TypeKind::Function(..) |
TypeKind::Int(..) |
TypeKind::Float(..) |
TypeKind::Complex(..) => {
trace!(" simple type that can always derive Default");
ConstrainResult::Same
}
TypeKind::Void |
TypeKind::TypeParam |
TypeKind::Reference(..) |
TypeKind::NullPtr |
TypeKind::Pointer(..) |
TypeKind::BlockPointer |
TypeKind::ObjCId |
TypeKind::ObjCSel |
TypeKind::ObjCInterface(..) |
TypeKind::Enum(..) => {
trace!(" types that always cannot derive Default");
self.insert(id)
}
TypeKind::Array(t, len) => {
if self.cannot_derive_default.contains(&t) {
trace!(
" arrays of T for which we cannot derive Default \
also cannot derive Default"
);
return self.insert(id);
}
if len <= RUST_DERIVE_IN_ARRAY_LIMIT {
trace!(" array is small enough to derive Default");
ConstrainResult::Same
} else {
trace!(" array is too large to derive Default");
self.insert(id)
}
}
TypeKind::ResolvedTypeRef(t) |
TypeKind::TemplateAlias(t, _) |
TypeKind::Alias(t) => {
if self.cannot_derive_default.contains(&t) {
trace!(
" aliases and type refs to T which cannot derive \
Default also cannot derive Default"
);
self.insert(id)
} else {
trace!(
" aliases and type refs to T which can derive \
Default can also derive Default"
);
ConstrainResult::Same
}
}
TypeKind::Comp(ref info) => {
assert!(
!info.has_non_type_template_params(),
"The early ty.is_opaque check should have handled this case"
);
if info.kind() == CompKind::Union {
if self.ctx.options().rust_features().untagged_union() {
trace!(" cannot derive Default for Rust unions");
return self.insert(id);
}
if ty.layout(self.ctx).map_or(true, |l| {
l.opaque().can_trivially_derive_default()
})
{
trace!(" union layout can trivially derive Default");
return ConstrainResult::Same;
} else {
trace!(" union layout cannot derive Default");
return self.insert(id);
}
}
if item.has_vtable(self.ctx) {
trace!(" comp with vtable cannot derive Default");
return self.insert(id);
}
let bases_cannot_derive =
info.base_members().iter().any(|base| {
!self.ctx.whitelisted_items().contains(&base.ty) ||
self.cannot_derive_default.contains(&base.ty)
});
if bases_cannot_derive {
trace!(
" base members cannot derive Default, so we can't \
either"
);
return self.insert(id);
}
let fields_cannot_derive =
info.fields().iter().any(|f| match *f {
Field::DataMember(ref data) => {
!self.ctx.whitelisted_items().contains(
&data.ty(),
) ||
self.cannot_derive_default.contains(&data.ty())
}
Field::Bitfields(ref bfu) => {
bfu.bitfields().iter().any(|b| {
!self.ctx.whitelisted_items().contains(
&b.ty(),
) ||
self.cannot_derive_default.contains(&b.ty())
})
}
});
if fields_cannot_derive {
trace!(
" fields cannot derive Default, so we can't either"
);
return self.insert(id);
}
trace!(" comp can derive Default");
ConstrainResult::Same
}
TypeKind::TemplateInstantiation(ref template) => {
if self.ctx.whitelisted_items().contains(
&template.template_definition(),
)
{
let args_cannot_derive =
template.template_arguments().iter().any(|arg| {
self.cannot_derive_default.contains(&arg)
});
if args_cannot_derive {
trace!(
" template args cannot derive Default, so \
insantiation can't either"
);
return self.insert(id);
}
assert!(
!template.template_definition().is_opaque(self.ctx, &()),
"The early ty.is_opaque check should have handled this case"
);
let def_cannot_derive =
self.cannot_derive_default.contains(&template
.template_definition());
if def_cannot_derive {
trace!(
" template definition cannot derive Default, so \
insantiation can't either"
);
return self.insert(id);
}
trace!(" template instantiation can derive Default");
ConstrainResult::Same
} else {
trace!(
" blacklisted template instantiation cannot derive default"
);
return self.insert(id);
}
}
TypeKind::Opaque => {
unreachable!(
"The early ty.is_opaque check should have handled this case"
)
}
TypeKind::UnresolvedTypeRef(..) => {
unreachable!(
"Type with unresolved type ref can't reach derive default"
)
}
}
}
fn each_depending_on<F>(&self, id: ItemId, mut f: F)
where
F: FnMut(ItemId),
{
if let Some(edges) = self.dependencies.get(&id) {
for item in edges {
trace!("enqueue {:?} into worklist", item);
f(*item);
}
}
}
}
impl<'ctx, 'gen> From<CannotDeriveDefault<'ctx, 'gen>> for HashSet<ItemId> {
fn from(analysis: CannotDeriveDefault<'ctx, 'gen>) -> Self {
analysis.cannot_derive_default
}
}

361
third_party/rust/bindgen-0.30.0/src/ir/analysis/derive_hash.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,361 @@
//! Determining which types for which we can emit `#[derive(Hash)]`.
use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
use ir::comp::CompKind;
use ir::comp::Field;
use ir::comp::FieldMethods;
use ir::context::{BindgenContext, ItemId};
use ir::derive::CanTriviallyDeriveHash;
use ir::item::IsOpaque;
use ir::traversal::EdgeKind;
use ir::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
use ir::ty::TypeKind;
use std::collections::HashMap;
use std::collections::HashSet;
/// An analysis that finds for each IR item whether hash cannot be derived.
///
/// We use the monotone constraint function `cannot_derive_hash`, defined as
/// follows:
///
/// * If T is Opaque and layout of the type is known, get this layout as opaque
/// type and check whether it can be derived using trivial checks.
/// * If T is Array type, hash cannot be derived if the length of the array is
/// larger than the limit or the type of data the array contains cannot derive
/// hash.
/// * If T is a type alias, a templated alias or an indirection to another type,
/// hash cannot be derived if the type T refers to cannot be derived hash.
/// * If T is a compound type, hash cannot be derived if any of its base member
/// or field cannot be derived hash.
/// * If T is a pointer, T cannot be derived hash if T is a function pointer
/// and the function signature cannot be derived hash.
/// * If T is an instantiation of an abstract template definition, T cannot be
/// derived hash if any of the template arguments or template definition
/// cannot derive hash.
#[derive(Debug, Clone)]
pub struct CannotDeriveHash<'ctx, 'gen>
where
'gen: 'ctx,
{
ctx: &'ctx BindgenContext<'gen>,
// The incremental result of this analysis's computation. Everything in this
// set cannot derive hash.
cannot_derive_hash: HashSet<ItemId>,
// Dependencies saying that if a key ItemId has been inserted into the
// `cannot_derive_hash` set, then each of the ids in Vec<ItemId> need to be
// considered again.
//
// This is a subset of the natural IR graph with reversed edges, where we
// only include the edges from the IR graph that can affect whether a type
// can derive hash or not.
dependencies: HashMap<ItemId, Vec<ItemId>>,
}
impl<'ctx, 'gen> CannotDeriveHash<'ctx, 'gen> {
fn consider_edge(kind: EdgeKind) -> bool {
match kind {
// These are the only edges that can affect whether a type can derive
// hash or not.
EdgeKind::BaseMember |
EdgeKind::Field |
EdgeKind::TypeReference |
EdgeKind::VarType |
EdgeKind::TemplateArgument |
EdgeKind::TemplateDeclaration |
EdgeKind::TemplateParameterDefinition => true,
EdgeKind::Constructor |
EdgeKind::Destructor |
EdgeKind::FunctionReturn |
EdgeKind::FunctionParameter |
EdgeKind::InnerType |
EdgeKind::InnerVar |
EdgeKind::Method => false,
EdgeKind::Generic => false,
}
}
fn insert(&mut self, id: ItemId) -> ConstrainResult {
trace!("inserting {:?} into the cannot_derive_hash set", id);
let was_not_already_in_set = self.cannot_derive_hash.insert(id);
assert!(
was_not_already_in_set,
"We shouldn't try and insert {:?} twice because if it was \
already in the set, `constrain` should have exited early.",
id
);
ConstrainResult::Changed
}
}
impl<'ctx, 'gen> MonotoneFramework for CannotDeriveHash<'ctx, 'gen> {
type Node = ItemId;
type Extra = &'ctx BindgenContext<'gen>;
type Output = HashSet<ItemId>;
fn new(ctx: &'ctx BindgenContext<'gen>) -> CannotDeriveHash<'ctx, 'gen> {
let cannot_derive_hash = HashSet::new();
let dependencies = generate_dependencies(ctx, Self::consider_edge);
CannotDeriveHash {
ctx,
cannot_derive_hash,
dependencies,
}
}
fn initial_worklist(&self) -> Vec<ItemId> {
self.ctx.whitelisted_items().iter().cloned().collect()
}
fn constrain(&mut self, id: ItemId) -> ConstrainResult {
trace!("constrain: {:?}", id);
if self.cannot_derive_hash.contains(&id) {
trace!(" already know it cannot derive Hash");
return ConstrainResult::Same;
}
let item = self.ctx.resolve_item(id);
let ty = match item.as_type() {
Some(ty) => ty,
None => {
trace!(" not a type; ignoring");
return ConstrainResult::Same;
}
};
if item.is_opaque(self.ctx, &()) {
let layout_can_derive = ty.layout(self.ctx).map_or(true, |l| {
l.opaque().can_trivially_derive_hash()
});
return if layout_can_derive {
trace!(" we can trivially derive Hash for the layout");
ConstrainResult::Same
} else {
trace!(" we cannot derive Hash for the layout");
self.insert(id)
};
}
if ty.layout(self.ctx).map_or(false, |l| {
l.align > RUST_DERIVE_IN_ARRAY_LIMIT
})
{
// We have to be conservative: the struct *could* have enough
// padding that we emit an array that is longer than
// `RUST_DERIVE_IN_ARRAY_LIMIT`. If we moved padding calculations
// into the IR and computed them before this analysis, then we could
// be precise rather than conservative here.
return self.insert(id);
}
match *ty.kind() {
// Handle the simple cases. These can derive hash without further
// information.
TypeKind::Void |
TypeKind::NullPtr |
TypeKind::Int(..) |
TypeKind::Enum(..) |
TypeKind::TypeParam |
TypeKind::UnresolvedTypeRef(..) |
TypeKind::BlockPointer |
TypeKind::Reference(..) |
TypeKind::ObjCInterface(..) |
TypeKind::ObjCId |
TypeKind::ObjCSel => {
trace!(" simple type that can always derive Hash");
ConstrainResult::Same
}
TypeKind::Complex(..) |
TypeKind::Float(..) => {
trace!(" float cannot derive Hash");
self.insert(id)
}
TypeKind::Array(t, len) => {
if self.cannot_derive_hash.contains(&t) {
trace!(
" arrays of T for which we cannot derive Hash \
also cannot derive Hash"
);
return self.insert(id);
}
if len <= RUST_DERIVE_IN_ARRAY_LIMIT {
trace!(" array is small enough to derive Hash");
ConstrainResult::Same
} else {
trace!(" array is too large to derive Hash");
self.insert(id)
}
}
TypeKind::Pointer(inner) => {
let inner_type =
self.ctx.resolve_type(inner).canonical_type(self.ctx);
if let TypeKind::Function(ref sig) = *inner_type.kind() {
if !sig.can_trivially_derive_hash() {
trace!(
" function pointer that can't trivially derive Hash"
);
return self.insert(id);
}
}
trace!(" pointers can derive Hash");
ConstrainResult::Same
}
TypeKind::Function(ref sig) => {
if !sig.can_trivially_derive_hash() {
trace!(" function that can't trivially derive Hash");
return self.insert(id);
}
trace!(" function can derive Hash");
ConstrainResult::Same
}
TypeKind::ResolvedTypeRef(t) |
TypeKind::TemplateAlias(t, _) |
TypeKind::Alias(t) => {
if self.cannot_derive_hash.contains(&t) {
trace!(
" aliases and type refs to T which cannot derive \
Hash also cannot derive Hash"
);
self.insert(id)
} else {
trace!(
" aliases and type refs to T which can derive \
Hash can also derive Hash"
);
ConstrainResult::Same
}
}
TypeKind::Comp(ref info) => {
assert!(
!info.has_non_type_template_params(),
"The early ty.is_opaque check should have handled this case"
);
if info.kind() == CompKind::Union {
if self.ctx.options().rust_features().untagged_union() {
trace!(" cannot derive Hash for Rust unions");
return self.insert(id);
}
if ty.layout(self.ctx).map_or(true, |l| {
l.opaque().can_trivially_derive_hash()
})
{
trace!(" union layout can trivially derive Hash");
return ConstrainResult::Same;
} else {
trace!(" union layout cannot derive Hash");
return self.insert(id);
}
}
let bases_cannot_derive =
info.base_members().iter().any(|base| {
!self.ctx.whitelisted_items().contains(&base.ty) ||
self.cannot_derive_hash.contains(&base.ty)
});
if bases_cannot_derive {
trace!(
" base members cannot derive Hash, so we can't \
either"
);
return self.insert(id);
}
let fields_cannot_derive =
info.fields().iter().any(|f| match *f {
Field::DataMember(ref data) => {
!self.ctx.whitelisted_items().contains(
&data.ty(),
) ||
self.cannot_derive_hash.contains(&data.ty())
}
Field::Bitfields(ref bfu) => {
bfu.bitfields().iter().any(|b| {
!self.ctx.whitelisted_items().contains(
&b.ty(),
) ||
self.cannot_derive_hash.contains(&b.ty())
})
}
});
if fields_cannot_derive {
trace!(" fields cannot derive Hash, so we can't either");
return self.insert(id);
}
trace!(" comp can derive Hash");
ConstrainResult::Same
}
TypeKind::TemplateInstantiation(ref template) => {
let args_cannot_derive =
template.template_arguments().iter().any(|arg| {
self.cannot_derive_hash.contains(&arg)
});
if args_cannot_derive {
trace!(
" template args cannot derive Hash, so \
insantiation can't either"
);
return self.insert(id);
}
assert!(
!template.template_definition().is_opaque(self.ctx, &()),
"The early ty.is_opaque check should have handled this case"
);
let def_cannot_derive = self.cannot_derive_hash.contains(
&template.template_definition(),
);
if def_cannot_derive {
trace!(
" template definition cannot derive Hash, so \
insantiation can't either"
);
return self.insert(id);
}
trace!(" template instantiation can derive Hash");
ConstrainResult::Same
}
TypeKind::Opaque => {
unreachable!(
"The early ty.is_opaque check should have handled this case"
)
}
}
}
fn each_depending_on<F>(&self, id: ItemId, mut f: F)
where
F: FnMut(ItemId),
{
if let Some(edges) = self.dependencies.get(&id) {
for item in edges {
trace!("enqueue {:?} into worklist", item);
f(*item);
}
}
}
}
impl<'ctx, 'gen> From<CannotDeriveHash<'ctx, 'gen>> for HashSet<ItemId> {
fn from(analysis: CannotDeriveHash<'ctx, 'gen>) -> Self {
analysis.cannot_derive_hash
}
}

Просмотреть файл

@ -0,0 +1,179 @@
//! Determining which types have destructors
use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
use ir::context::{BindgenContext, ItemId};
use ir::traversal::EdgeKind;
use ir::comp::{CompKind, Field, FieldMethods};
use ir::ty::TypeKind;
use std::collections::HashMap;
use std::collections::HashSet;
/// An analysis that finds for each IR item whether it has a destructor or not
///
/// We use the monotone function `has destructor`, defined as follows:
///
/// * If T is a type alias, a templated alias, or an indirection to another type,
/// T has a destructor if the type T refers to has a destructor.
/// * If T is a compound type, T has a destructor if we saw a destructor when parsing it,
/// or if it's a struct, T has a destructor if any of its base members has a destructor,
/// or if any of its fields have a destructor.
/// * If T is an instantiation of an abstract template definition, T has
/// a destructor if its template definition has a destructor,
/// or if any of the template arguments has a destructor.
/// * If T is the type of a field, that field has a destructor if it's not a bitfield,
/// and if T has a destructor.
#[derive(Debug, Clone)]
pub struct HasDestructorAnalysis<'ctx, 'gen>
where
'gen: 'ctx,
{
ctx: &'ctx BindgenContext<'gen>,
// The incremental result of this analysis's computation. Everything in this
// set definitely has a destructor.
have_destructor: HashSet<ItemId>,
// Dependencies saying that if a key ItemId has been inserted into the
// `have_destructor` set, then each of the ids in Vec<ItemId> need to be
// considered again.
//
// This is a subset of the natural IR graph with reversed edges, where we
// only include the edges from the IR graph that can affect whether a type
// has a destructor or not.
dependencies: HashMap<ItemId, Vec<ItemId>>,
}
impl<'ctx, 'gen> HasDestructorAnalysis<'ctx, 'gen> {
fn consider_edge(kind: EdgeKind) -> bool {
match kind {
// These are the only edges that can affect whether a type has a
// destructor or not.
EdgeKind::TypeReference |
EdgeKind::BaseMember |
EdgeKind::Field |
EdgeKind::TemplateArgument |
EdgeKind::TemplateDeclaration => true,
_ => false,
}
}
fn insert(&mut self, id: ItemId) -> ConstrainResult {
let was_not_already_in_set = self.have_destructor.insert(id);
assert!(
was_not_already_in_set,
"We shouldn't try and insert {:?} twice because if it was \
already in the set, `constrain` should have exited early.",
id
);
ConstrainResult::Changed
}
}
impl<'ctx, 'gen> MonotoneFramework for HasDestructorAnalysis<'ctx, 'gen> {
type Node = ItemId;
type Extra = &'ctx BindgenContext<'gen>;
type Output = HashSet<ItemId>;
fn new(ctx: &'ctx BindgenContext<'gen>) -> Self {
let have_destructor = HashSet::new();
let dependencies = generate_dependencies(ctx, Self::consider_edge);
HasDestructorAnalysis {
ctx,
have_destructor,
dependencies,
}
}
fn initial_worklist(&self) -> Vec<ItemId> {
self.ctx.whitelisted_items().iter().cloned().collect()
}
fn constrain(&mut self, id: ItemId) -> ConstrainResult {
if self.have_destructor.contains(&id) {
// We've already computed that this type has a destructor and that can't
// change.
return ConstrainResult::Same;
}
let item = self.ctx.resolve_item(id);
let ty = match item.as_type() {
None => return ConstrainResult::Same,
Some(ty) => ty,
};
match *ty.kind() {
TypeKind::TemplateAlias(t, _) |
TypeKind::Alias(t) |
TypeKind::ResolvedTypeRef(t) => {
if self.have_destructor.contains(&t) {
self.insert(id)
} else {
ConstrainResult::Same
}
}
TypeKind::Comp(ref info) => {
if info.has_own_destructor() {
return self.insert(id);
}
match info.kind() {
CompKind::Union => ConstrainResult::Same,
CompKind::Struct => {
let base_or_field_destructor =
info.base_members().iter().any(|base| {
self.have_destructor.contains(&base.ty)
}) ||
info.fields().iter().any(|field| {
match *field {
Field::DataMember(ref data) =>
self.have_destructor.contains(&data.ty()),
Field::Bitfields(_) => false
}
});
if base_or_field_destructor {
self.insert(id)
} else {
ConstrainResult::Same
}
}
}
}
TypeKind::TemplateInstantiation(ref inst) => {
let definition_or_arg_destructor =
self.have_destructor.contains(&inst.template_definition())
||
inst.template_arguments().iter().any(|arg| {
self.have_destructor.contains(arg)
});
if definition_or_arg_destructor {
self.insert(id)
} else {
ConstrainResult::Same
}
}
_ => ConstrainResult::Same,
}
}
fn each_depending_on<F>(&self, id: ItemId, mut f: F)
where
F: FnMut(ItemId),
{
if let Some(edges) = self.dependencies.get(&id) {
for item in edges {
trace!("enqueue {:?} into worklist", item);
f(*item);
}
}
}
}
impl<'ctx, 'gen> From<HasDestructorAnalysis<'ctx, 'gen>> for HashSet<ItemId> {
fn from(analysis: HasDestructorAnalysis<'ctx, 'gen>) -> Self {
analysis.have_destructor
}
}

239
third_party/rust/bindgen-0.30.0/src/ir/analysis/has_float.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,239 @@
//! Determining which types has float.
use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
use std::collections::HashSet;
use std::collections::HashMap;
use ir::context::{BindgenContext, ItemId};
use ir::traversal::EdgeKind;
use ir::ty::TypeKind;
use ir::comp::Field;
use ir::comp::FieldMethods;
/// An analysis that finds for each IR item whether it has float or not.
///
/// We use the monotone constraint function `has_float`,
/// defined as follows:
///
/// * If T is float or complex float, T trivially has.
/// * If T is a type alias, a templated alias or an indirection to another type,
/// it has float if the type T refers to has.
/// * If T is a compound type, it has float if any of base memter or field
/// has.
/// * If T is an instantiation of an abstract template definition, T has
/// float if any of the template arguments or template definition
/// has.
#[derive(Debug, Clone)]
pub struct HasFloat<'ctx, 'gen>
where 'gen: 'ctx
{
ctx: &'ctx BindgenContext<'gen>,
// The incremental result of this analysis's computation. Everything in this
// set has float.
has_float: HashSet<ItemId>,
// Dependencies saying that if a key ItemId has been inserted into the
// `has_float` set, then each of the ids in Vec<ItemId> need to be
// considered again.
//
// This is a subset of the natural IR graph with reversed edges, where we
// only include the edges from the IR graph that can affect whether a type
// has float or not.
dependencies: HashMap<ItemId, Vec<ItemId>>,
}
impl<'ctx, 'gen> HasFloat<'ctx, 'gen> {
fn consider_edge(kind: EdgeKind) -> bool {
match kind {
EdgeKind::BaseMember |
EdgeKind::Field |
EdgeKind::TypeReference |
EdgeKind::VarType |
EdgeKind::TemplateArgument |
EdgeKind::TemplateDeclaration |
EdgeKind::TemplateParameterDefinition => true,
EdgeKind::Constructor |
EdgeKind::Destructor |
EdgeKind::FunctionReturn |
EdgeKind::FunctionParameter |
EdgeKind::InnerType |
EdgeKind::InnerVar |
EdgeKind::Method => false,
EdgeKind::Generic => false,
}
}
fn insert(&mut self, id: ItemId) -> ConstrainResult {
trace!("inserting {:?} into the has_float set", id);
let was_not_already_in_set = self.has_float.insert(id);
assert!(
was_not_already_in_set,
"We shouldn't try and insert {:?} twice because if it was \
already in the set, `constrain` should have exited early.",
id
);
ConstrainResult::Changed
}
}
impl<'ctx, 'gen> MonotoneFramework for HasFloat<'ctx, 'gen> {
type Node = ItemId;
type Extra = &'ctx BindgenContext<'gen>;
type Output = HashSet<ItemId>;
fn new(ctx: &'ctx BindgenContext<'gen>) -> HasFloat<'ctx, 'gen> {
let has_float = HashSet::new();
let dependencies = generate_dependencies(ctx, Self::consider_edge);
HasFloat {
ctx,
has_float,
dependencies,
}
}
fn initial_worklist(&self) -> Vec<ItemId> {
self.ctx.whitelisted_items().iter().cloned().collect()
}
fn constrain(&mut self, id: ItemId) -> ConstrainResult {
trace!("constrain: {:?}", id);
if self.has_float.contains(&id) {
trace!(" already know it do not have float");
return ConstrainResult::Same;
}
let item = self.ctx.resolve_item(id);
let ty = match item.as_type() {
Some(ty) => ty,
None => {
trace!(" not a type; ignoring");
return ConstrainResult::Same;
}
};
match *ty.kind() {
TypeKind::Void |
TypeKind::NullPtr |
TypeKind::Int(..) |
TypeKind::Function(..) |
TypeKind::Enum(..) |
TypeKind::Reference(..) |
TypeKind::BlockPointer |
TypeKind::TypeParam |
TypeKind::Opaque |
TypeKind::Pointer(..) |
TypeKind::UnresolvedTypeRef(..) |
TypeKind::ObjCInterface(..) |
TypeKind::ObjCId |
TypeKind::ObjCSel => {
trace!(" simple type that do not have float");
ConstrainResult::Same
}
TypeKind::Float(..) |
TypeKind::Complex(..) => {
trace!(" float type has float");
self.insert(id)
}
TypeKind::Array(t, _) => {
if self.has_float.contains(&t) {
trace!(" Array with type T that has float also has float");
return self.insert(id)
}
trace!(" Array with type T that do not have float also do not have float");
ConstrainResult::Same
}
TypeKind::ResolvedTypeRef(t) |
TypeKind::TemplateAlias(t, _) |
TypeKind::Alias(t) => {
if self.has_float.contains(&t) {
trace!(" aliases and type refs to T which have float \
also have float");
self.insert(id)
} else {
trace!(" aliases and type refs to T which do not have float \
also do not have floaarrayt");
ConstrainResult::Same
}
}
TypeKind::Comp(ref info) => {
let bases_have = info.base_members()
.iter()
.any(|base| self.has_float.contains(&base.ty));
if bases_have {
trace!(" bases have float, so we also have");
return self.insert(id);
}
let fields_have = info.fields()
.iter()
.any(|f| {
match *f {
Field::DataMember(ref data) => {
self.has_float.contains(&data.ty())
}
Field::Bitfields(ref bfu) => {
bfu.bitfields()
.iter().any(|b| {
self.has_float.contains(&b.ty())
})
},
}
});
if fields_have {
trace!(" fields have float, so we also have");
return self.insert(id);
}
trace!(" comp doesn't have float");
ConstrainResult::Same
}
TypeKind::TemplateInstantiation(ref template) => {
let args_have = template.template_arguments()
.iter()
.any(|arg| self.has_float.contains(&arg));
if args_have {
trace!(" template args have float, so \
insantiation also has float");
return self.insert(id);
}
let def_has = self.has_float
.contains(&template.template_definition());
if def_has {
trace!(" template definition has float, so \
insantiation also has");
return self.insert(id);
}
trace!(" template instantiation do not have float");
ConstrainResult::Same
}
}
}
fn each_depending_on<F>(&self, id: ItemId, mut f: F)
where F: FnMut(ItemId),
{
if let Some(edges) = self.dependencies.get(&id) {
for item in edges {
trace!("enqueue {:?} into worklist", item);
f(*item);
}
}
}
}
impl<'ctx, 'gen> From<HasFloat<'ctx, 'gen>> for HashSet<ItemId> {
fn from(analysis: HasFloat<'ctx, 'gen>) -> Self {
analysis.has_float
}
}

Просмотреть файл

@ -0,0 +1,256 @@
//! Determining which types has typed parameters in array.
use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
use ir::comp::Field;
use ir::comp::FieldMethods;
use ir::context::{BindgenContext, ItemId};
use ir::traversal::EdgeKind;
use ir::ty::TypeKind;
use std::collections::HashMap;
use std::collections::HashSet;
/// An analysis that finds for each IR item whether it has array or not.
///
/// We use the monotone constraint function `has_type_parameter_in_array`,
/// defined as follows:
///
/// * If T is Array type with type parameter, T trivially has.
/// * If T is a type alias, a templated alias or an indirection to another type,
/// it has type parameter in array if the type T refers to has.
/// * If T is a compound type, it has array if any of base memter or field
/// has type paramter in array.
/// * If T is an instantiation of an abstract template definition, T has
/// type parameter in array if any of the template arguments or template definition
/// has.
#[derive(Debug, Clone)]
pub struct HasTypeParameterInArray<'ctx, 'gen>
where
'gen: 'ctx,
{
ctx: &'ctx BindgenContext<'gen>,
// The incremental result of this analysis's computation. Everything in this
// set has array.
has_type_parameter_in_array: HashSet<ItemId>,
// Dependencies saying that if a key ItemId has been inserted into the
// `has_type_parameter_in_array` set, then each of the ids in Vec<ItemId> need to be
// considered again.
//
// This is a subset of the natural IR graph with reversed edges, where we
// only include the edges from the IR graph that can affect whether a type
// has array or not.
dependencies: HashMap<ItemId, Vec<ItemId>>,
}
impl<'ctx, 'gen> HasTypeParameterInArray<'ctx, 'gen> {
fn consider_edge(kind: EdgeKind) -> bool {
match kind {
// These are the only edges that can affect whether a type has type parameter
// in array or not.
EdgeKind::BaseMember |
EdgeKind::Field |
EdgeKind::TypeReference |
EdgeKind::VarType |
EdgeKind::TemplateArgument |
EdgeKind::TemplateDeclaration |
EdgeKind::TemplateParameterDefinition => true,
EdgeKind::Constructor |
EdgeKind::Destructor |
EdgeKind::FunctionReturn |
EdgeKind::FunctionParameter |
EdgeKind::InnerType |
EdgeKind::InnerVar |
EdgeKind::Method => false,
EdgeKind::Generic => false,
}
}
fn insert(&mut self, id: ItemId) -> ConstrainResult {
trace!(
"inserting {:?} into the has_type_parameter_in_array set",
id
);
let was_not_already_in_set =
self.has_type_parameter_in_array.insert(id);
assert!(
was_not_already_in_set,
"We shouldn't try and insert {:?} twice because if it was \
already in the set, `constrain` should have exited early.",
id
);
ConstrainResult::Changed
}
}
impl<'ctx, 'gen> MonotoneFramework for HasTypeParameterInArray<'ctx, 'gen> {
type Node = ItemId;
type Extra = &'ctx BindgenContext<'gen>;
type Output = HashSet<ItemId>;
fn new(
ctx: &'ctx BindgenContext<'gen>,
) -> HasTypeParameterInArray<'ctx, 'gen> {
let has_type_parameter_in_array = HashSet::new();
let dependencies = generate_dependencies(ctx, Self::consider_edge);
HasTypeParameterInArray {
ctx,
has_type_parameter_in_array,
dependencies,
}
}
fn initial_worklist(&self) -> Vec<ItemId> {
self.ctx.whitelisted_items().iter().cloned().collect()
}
fn constrain(&mut self, id: ItemId) -> ConstrainResult {
trace!("constrain: {:?}", id);
if self.has_type_parameter_in_array.contains(&id) {
trace!(" already know it do not have array");
return ConstrainResult::Same;
}
let item = self.ctx.resolve_item(id);
let ty = match item.as_type() {
Some(ty) => ty,
None => {
trace!(" not a type; ignoring");
return ConstrainResult::Same;
}
};
match *ty.kind() {
// Handle the simple cases. These cannot have array in type parameter
// without further information.
TypeKind::Void |
TypeKind::NullPtr |
TypeKind::Int(..) |
TypeKind::Float(..) |
TypeKind::Complex(..) |
TypeKind::Function(..) |
TypeKind::Enum(..) |
TypeKind::Reference(..) |
TypeKind::BlockPointer |
TypeKind::TypeParam |
TypeKind::Opaque |
TypeKind::Pointer(..) |
TypeKind::UnresolvedTypeRef(..) |
TypeKind::ObjCInterface(..) |
TypeKind::ObjCId |
TypeKind::ObjCSel => {
trace!(" simple type that do not have array");
ConstrainResult::Same
}
TypeKind::Array(t, _) => {
let inner_ty =
self.ctx.resolve_type(t).canonical_type(self.ctx);
match *inner_ty.kind() {
TypeKind::TypeParam => {
trace!(" Array with Named type has type parameter");
self.insert(id)
}
_ => {
trace!(
" Array without Named type does have type parameter"
);
ConstrainResult::Same
}
}
}
TypeKind::ResolvedTypeRef(t) |
TypeKind::TemplateAlias(t, _) |
TypeKind::Alias(t) => {
if self.has_type_parameter_in_array.contains(&t) {
trace!(
" aliases and type refs to T which have array \
also have array"
);
self.insert(id)
} else {
trace!(
" aliases and type refs to T which do not have array \
also do not have array"
);
ConstrainResult::Same
}
}
TypeKind::Comp(ref info) => {
let bases_have = info.base_members().iter().any(|base| {
self.has_type_parameter_in_array.contains(&base.ty)
});
if bases_have {
trace!(" bases have array, so we also have");
return self.insert(id);
}
let fields_have = info.fields().iter().any(|f| match *f {
Field::DataMember(ref data) => {
self.has_type_parameter_in_array.contains(&data.ty())
}
Field::Bitfields(..) => false,
});
if fields_have {
trace!(" fields have array, so we also have");
return self.insert(id);
}
trace!(" comp doesn't have array");
ConstrainResult::Same
}
TypeKind::TemplateInstantiation(ref template) => {
let args_have =
template.template_arguments().iter().any(|arg| {
self.has_type_parameter_in_array.contains(&arg)
});
if args_have {
trace!(
" template args have array, so \
insantiation also has array"
);
return self.insert(id);
}
let def_has = self.has_type_parameter_in_array.contains(
&template.template_definition(),
);
if def_has {
trace!(
" template definition has array, so \
insantiation also has"
);
return self.insert(id);
}
trace!(" template instantiation do not have array");
ConstrainResult::Same
}
}
}
fn each_depending_on<F>(&self, id: ItemId, mut f: F)
where
F: FnMut(ItemId),
{
if let Some(edges) = self.dependencies.get(&id) {
for item in edges {
trace!("enqueue {:?} into worklist", item);
f(*item);
}
}
}
}
impl<'ctx, 'gen> From<HasTypeParameterInArray<'ctx, 'gen>> for HashSet<ItemId> {
fn from(analysis: HasTypeParameterInArray<'ctx, 'gen>) -> Self {
analysis.has_type_parameter_in_array
}
}

Просмотреть файл

@ -1,10 +1,11 @@
//! Determining which types has vtable
use super::{ConstrainResult, MonotoneFramework, generate_dependencies};
use std::collections::HashSet;
use std::collections::HashMap;
use ir::context::{BindgenContext, ItemId};
use ir::traversal::EdgeKind;
use ir::ty::TypeKind;
use std::collections::HashMap;
use std::collections::HashSet;
/// An analysis that finds for each IR item whether it has vtable or not
///
@ -18,7 +19,8 @@ use ir::ty::TypeKind;
/// vtable if template definition has vtable
#[derive(Debug, Clone)]
pub struct HasVtableAnalysis<'ctx, 'gen>
where 'gen: 'ctx
where
'gen: 'ctx,
{
ctx: &'ctx BindgenContext<'gen>,
@ -90,7 +92,7 @@ impl<'ctx, 'gen> MonotoneFramework for HasVtableAnalysis<'ctx, 'gen> {
let item = self.ctx.resolve_item(id);
let ty = match item.as_type() {
None => return ConstrainResult::Same,
Some(ty) => ty
Some(ty) => ty,
};
// TODO #851: figure out a way to handle deriving from template type parameters.
@ -104,7 +106,7 @@ impl<'ctx, 'gen> MonotoneFramework for HasVtableAnalysis<'ctx, 'gen> {
} else {
ConstrainResult::Same
}
},
}
TypeKind::Comp(ref info) => {
if info.has_own_virtual_method() {
@ -118,7 +120,7 @@ impl<'ctx, 'gen> MonotoneFramework for HasVtableAnalysis<'ctx, 'gen> {
} else {
ConstrainResult::Same
}
},
}
TypeKind::TemplateInstantiation(ref inst) => {
if self.have_vtable.contains(&inst.template_definition()) {
@ -126,14 +128,15 @@ impl<'ctx, 'gen> MonotoneFramework for HasVtableAnalysis<'ctx, 'gen> {
} else {
ConstrainResult::Same
}
},
}
_ => ConstrainResult::Same,
}
}
fn each_depending_on<F>(&self, id: ItemId, mut f: F)
where F: FnMut(ItemId),
where
F: FnMut(ItemId),
{
if let Some(edges) = self.dependencies.get(&id) {
for item in edges {

Просмотреть файл

@ -43,8 +43,22 @@ pub use self::template_params::UsedTemplateParameters;
mod derive_debug;
pub use self::derive_debug::CannotDeriveDebug;
mod has_vtable;
pub use self::has_vtable::HasVtableAnalysis;
pub use self::has_vtable::HasVtable;
pub use self::has_vtable::HasVtableAnalysis;
mod has_destructor;
pub use self::has_destructor::HasDestructorAnalysis;
mod derive_default;
pub use self::derive_default::CannotDeriveDefault;
mod derive_copy;
pub use self::derive_copy::CannotDeriveCopy;
mod has_type_param_in_array;
pub use self::has_type_param_in_array::HasTypeParameterInArray;
mod derive_hash;
pub use self::derive_hash::CannotDeriveHash;
mod derive_partial_eq;
pub use self::derive_partial_eq::CannotDerivePartialEq;
mod has_float;
pub use self::has_float::HasFloat;
use ir::context::{BindgenContext, ItemId};
use ir::traversal::{EdgeKind, Trace};
@ -105,7 +119,8 @@ pub trait MonotoneFramework: Sized + fmt::Debug {
/// queue up in the worklist when `constrain(node)` reports updated
/// information.
fn each_depending_on<F>(&self, node: Self::Node, f: F)
where F: FnMut(Self::Node);
where
F: FnMut(Self::Node);
}
/// Whether an analysis's `constrain` function modified the incremental results
@ -121,16 +136,18 @@ pub enum ConstrainResult {
/// Run an analysis in the monotone framework.
pub fn analyze<Analysis>(extra: Analysis::Extra) -> Analysis::Output
where Analysis: MonotoneFramework,
where
Analysis: MonotoneFramework,
{
let mut analysis = Analysis::new(extra);
let mut worklist = analysis.initial_worklist();
while let Some(node) = worklist.pop() {
if let ConstrainResult::Changed = analysis.constrain(node) {
analysis.each_depending_on(node, |needs_work| {
worklist.push(needs_work);
});
analysis.each_depending_on(
node,
|needs_work| { worklist.push(needs_work); },
);
}
}
@ -138,8 +155,13 @@ pub fn analyze<Analysis>(extra: Analysis::Extra) -> Analysis::Output
}
/// Generate the dependency map for analysis
pub fn generate_dependencies<F>(ctx: &BindgenContext, consider_edge: F) -> HashMap<ItemId, Vec<ItemId>>
where F: Fn(EdgeKind) -> bool {
pub fn generate_dependencies<F>(
ctx: &BindgenContext,
consider_edge: F,
) -> HashMap<ItemId, Vec<ItemId>>
where
F: Fn(EdgeKind) -> bool,
{
let mut dependencies = HashMap::new();
for &item in ctx.whitelisted_items() {
@ -148,14 +170,19 @@ pub fn generate_dependencies<F>(ctx: &BindgenContext, consider_edge: F) -> HashM
{
// We reverse our natural IR graph edges to find dependencies
// between nodes.
item.trace(ctx, &mut |sub_item: ItemId, edge_kind| {
if ctx.whitelisted_items().contains(&sub_item) &&
consider_edge(edge_kind) {
dependencies.entry(sub_item)
.or_insert(vec![])
.push(item);
item.trace(
ctx,
&mut |sub_item: ItemId, edge_kind| {
if ctx.whitelisted_items().contains(&sub_item) &&
consider_edge(edge_kind)
{
dependencies.entry(sub_item).or_insert(vec![]).push(
item,
);
}
}, &());
},
&(),
);
}
}
dependencies
@ -303,7 +330,8 @@ mod tests {
}
fn each_depending_on<F>(&self, node: Node, mut f: F)
where F: FnMut(Node),
where
F: FnMut(Node),
{
for dep in self.reversed.0[&node].iter() {
f(*dep);
@ -324,7 +352,8 @@ mod tests {
println!("reachable = {:#?}", reachable);
fn nodes<A>(nodes: A) -> HashSet<Node>
where A: AsRef<[usize]>,
where
A: AsRef<[usize]>,
{
nodes.as_ref().iter().cloned().map(Node).collect()
}

Просмотреть файл

@ -147,7 +147,8 @@ use std::collections::{HashMap, HashSet};
/// documentation for details.
#[derive(Debug, Clone)]
pub struct UsedTemplateParameters<'ctx, 'gen>
where 'gen: 'ctx,
where
'gen: 'ctx,
{
ctx: &'ctx BindgenContext<'gen>,
@ -208,11 +209,15 @@ impl<'ctx, 'gen> UsedTemplateParameters<'ctx, 'gen> {
fn take_this_id_usage_set(&mut self, this_id: ItemId) -> ItemSet {
self.used
.get_mut(&this_id)
.expect("Should have a set of used template params for every item \
id")
.expect(
"Should have a set of used template params for every item \
id",
)
.take()
.expect("Should maintain the invariant that all used template param \
sets are `Some` upon entry of `constrain`")
.expect(
"Should maintain the invariant that all used template param \
sets are `Some` upon entry of `constrain`",
)
}
/// We say that blacklisted items use all of their template parameters. The
@ -220,14 +225,19 @@ impl<'ctx, 'gen> UsedTemplateParameters<'ctx, 'gen> {
/// since it won't be in the generated bindings, and we don't know exactly
/// what they'll to with template parameters, but we can push the issue down
/// the line to them.
fn constrain_instantiation_of_blacklisted_template(&self,
this_id: ItemId,
used_by_this_id: &mut ItemSet,
instantiation: &TemplateInstantiation) {
trace!(" instantiation of blacklisted template, uses all template \
arguments");
fn constrain_instantiation_of_blacklisted_template(
&self,
this_id: ItemId,
used_by_this_id: &mut ItemSet,
instantiation: &TemplateInstantiation,
) {
trace!(
" instantiation of blacklisted template, uses all template \
arguments"
);
let args = instantiation.template_arguments()
let args = instantiation
.template_arguments()
.into_iter()
.map(|a| {
a.into_resolver()
@ -238,12 +248,15 @@ impl<'ctx, 'gen> UsedTemplateParameters<'ctx, 'gen> {
})
.filter(|a| *a != this_id)
.flat_map(|a| {
self.used.get(&a)
self.used
.get(&a)
.expect("Should have a used entry for the template arg")
.as_ref()
.expect("Because a != this_id, and all used template \
.expect(
"Because a != this_id, and all used template \
param sets other than this_id's are `Some`, \
a's used template param set should be `Some`")
a's used template param set should be `Some`",
)
.iter()
.cloned()
});
@ -253,17 +266,18 @@ impl<'ctx, 'gen> UsedTemplateParameters<'ctx, 'gen> {
/// A template instantiation's concrete template argument is only used if
/// the template definition uses the corresponding template parameter.
fn constrain_instantiation(&self,
this_id: ItemId,
used_by_this_id: &mut ItemSet,
instantiation: &TemplateInstantiation) {
fn constrain_instantiation(
&self,
this_id: ItemId,
used_by_this_id: &mut ItemSet,
instantiation: &TemplateInstantiation,
) {
trace!(" template instantiation");
let decl = self.ctx.resolve_type(instantiation.template_definition());
let args = instantiation.template_arguments();
let params = decl.self_template_params(self.ctx)
.unwrap_or(vec![]);
let params = decl.self_template_params(self.ctx).unwrap_or(vec![]);
debug_assert!(this_id != instantiation.template_definition());
let used_by_def = self.used
@ -275,10 +289,12 @@ impl<'ctx, 'gen> UsedTemplateParameters<'ctx, 'gen> {
instantiation itself");
for (arg, param) in args.iter().zip(params.iter()) {
trace!(" instantiation's argument {:?} is used if definition's \
trace!(
" instantiation's argument {:?} is used if definition's \
parameter {:?} is used",
arg,
param);
arg,
param
);
if used_by_def.contains(param) {
trace!(" param is used by template definition");
@ -297,10 +313,12 @@ impl<'ctx, 'gen> UsedTemplateParameters<'ctx, 'gen> {
.get(&arg)
.expect("Should have a used entry for the template arg")
.as_ref()
.expect("Because arg != this_id, and all used template \
.expect(
"Because arg != this_id, and all used template \
param sets other than this_id's are `Some`, \
arg's used template param set should be \
`Some`")
`Some`",
)
.iter()
.cloned();
used_by_this_id.extend(used_by_arg);
@ -313,31 +331,39 @@ impl<'ctx, 'gen> UsedTemplateParameters<'ctx, 'gen> {
fn constrain_join(&self, used_by_this_id: &mut ItemSet, item: &Item) {
trace!(" other item: join with successors' usage");
item.trace(self.ctx, &mut |sub_id, edge_kind| {
// Ignore ourselves, since union with ourself is a
// no-op. Ignore edges that aren't relevant to the
// analysis.
if sub_id == item.id() || !Self::consider_edge(edge_kind) {
return;
}
item.trace(
self.ctx,
&mut |sub_id, edge_kind| {
// Ignore ourselves, since union with ourself is a
// no-op. Ignore edges that aren't relevant to the
// analysis.
if sub_id == item.id() || !Self::consider_edge(edge_kind) {
return;
}
let used_by_sub_id = self.used
.get(&sub_id)
.expect("Should have a used set for the sub_id successor")
.as_ref()
.expect("Because sub_id != id, and all used template \
let used_by_sub_id = self.used
.get(&sub_id)
.expect("Should have a used set for the sub_id successor")
.as_ref()
.expect(
"Because sub_id != id, and all used template \
param sets other than id's are `Some`, \
sub_id's used template param set should be \
`Some`")
.iter()
.cloned();
`Some`",
)
.iter()
.cloned();
trace!(" union with {:?}'s usage: {:?}",
sub_id,
used_by_sub_id.clone().collect::<Vec<_>>());
trace!(
" union with {:?}'s usage: {:?}",
sub_id,
used_by_sub_id.clone().collect::<Vec<_>>()
);
used_by_this_id.extend(used_by_sub_id);
}, &());
used_by_this_id.extend(used_by_sub_id);
},
&(),
);
}
}
@ -346,22 +372,20 @@ impl<'ctx, 'gen> MonotoneFramework for UsedTemplateParameters<'ctx, 'gen> {
type Extra = &'ctx BindgenContext<'gen>;
type Output = HashMap<ItemId, ItemSet>;
fn new(ctx: &'ctx BindgenContext<'gen>)
-> UsedTemplateParameters<'ctx, 'gen> {
fn new(
ctx: &'ctx BindgenContext<'gen>,
) -> UsedTemplateParameters<'ctx, 'gen> {
let mut used = HashMap::new();
let mut dependencies = HashMap::new();
let whitelisted_items: HashSet<_> = ctx.whitelisted_items()
.iter()
.cloned()
.collect();
let whitelisted_items: HashSet<_> =
ctx.whitelisted_items().iter().cloned().collect();
let whitelisted_and_blacklisted_items: ItemSet = whitelisted_items.iter()
let whitelisted_and_blacklisted_items: ItemSet = whitelisted_items
.iter()
.cloned()
.flat_map(|i| {
let mut reachable = vec![i];
i.trace(ctx, &mut |s, _| {
reachable.push(s);
}, &());
i.trace(ctx, &mut |s, _| { reachable.push(s); }, &());
reachable
})
.collect();
@ -373,53 +397,54 @@ impl<'ctx, 'gen> MonotoneFramework for UsedTemplateParameters<'ctx, 'gen> {
{
// We reverse our natural IR graph edges to find dependencies
// between nodes.
item.trace(ctx, &mut |sub_item: ItemId, _| {
used.entry(sub_item).or_insert(Some(ItemSet::new()));
dependencies.entry(sub_item)
.or_insert(vec![])
.push(item);
}, &());
item.trace(
ctx,
&mut |sub_item: ItemId, _| {
used.entry(sub_item).or_insert(Some(ItemSet::new()));
dependencies.entry(sub_item).or_insert(vec![]).push(
item,
);
},
&(),
);
}
// Additionally, whether a template instantiation's template
// arguments are used depends on whether the template declaration's
// generic template parameters are used.
ctx.resolve_item(item)
.as_type()
.map(|ty| match ty.kind() {
&TypeKind::TemplateInstantiation(ref inst) => {
let decl = ctx.resolve_type(inst.template_definition());
let args = inst.template_arguments();
ctx.resolve_item(item).as_type().map(|ty| match ty.kind() {
&TypeKind::TemplateInstantiation(ref inst) => {
let decl = ctx.resolve_type(inst.template_definition());
let args = inst.template_arguments();
// Although template definitions should always have
// template parameters, there is a single exception:
// opaque templates. Hence the unwrap_or.
let params = decl.self_template_params(ctx)
.unwrap_or(vec![]);
// Although template definitions should always have
// template parameters, there is a single exception:
// opaque templates. Hence the unwrap_or.
let params =
decl.self_template_params(ctx).unwrap_or(vec![]);
for (arg, param) in args.iter().zip(params.iter()) {
let arg = arg.into_resolver()
.through_type_aliases()
.through_type_refs()
.resolve(ctx)
.id();
for (arg, param) in args.iter().zip(params.iter()) {
let arg = arg.into_resolver()
.through_type_aliases()
.through_type_refs()
.resolve(ctx)
.id();
let param = param.into_resolver()
.through_type_aliases()
.through_type_refs()
.resolve(ctx)
.id();
let param = param
.into_resolver()
.through_type_aliases()
.through_type_refs()
.resolve(ctx)
.id();
used.entry(arg).or_insert(Some(ItemSet::new()));
used.entry(param).or_insert(Some(ItemSet::new()));
used.entry(arg).or_insert(Some(ItemSet::new()));
used.entry(param).or_insert(Some(ItemSet::new()));
dependencies.entry(arg)
.or_insert(vec![])
.push(param);
}
dependencies.entry(arg).or_insert(vec![]).push(param);
}
_ => {}
});
}
_ => {}
});
}
if cfg!(feature = "testing_only_extra_assertions") {
@ -436,10 +461,14 @@ impl<'ctx, 'gen> MonotoneFramework for UsedTemplateParameters<'ctx, 'gen> {
for item in whitelisted_items.iter() {
extra_assert!(used.contains_key(item));
extra_assert!(dependencies.contains_key(item));
item.trace(ctx, &mut |sub_item, _| {
extra_assert!(used.contains_key(&sub_item));
extra_assert!(dependencies.contains_key(&sub_item));
}, &())
item.trace(
ctx,
&mut |sub_item, _| {
extra_assert!(used.contains_key(&sub_item));
extra_assert!(dependencies.contains_key(&sub_item));
},
&(),
)
}
}
@ -460,9 +489,7 @@ impl<'ctx, 'gen> MonotoneFramework for UsedTemplateParameters<'ctx, 'gen> {
.cloned()
.flat_map(|i| {
let mut reachable = vec![i];
i.trace(self.ctx, &mut |s, _| {
reachable.push(s);
}, &());
i.trace(self.ctx, &mut |s, _| { reachable.push(s); }, &());
reachable
})
.collect()
@ -488,19 +515,28 @@ impl<'ctx, 'gen> MonotoneFramework for UsedTemplateParameters<'ctx, 'gen> {
let ty_kind = item.as_type().map(|ty| ty.kind());
match ty_kind {
// Named template type parameters trivially use themselves.
Some(&TypeKind::Named) => {
Some(&TypeKind::TypeParam) => {
trace!(" named type, trivially uses itself");
used_by_this_id.insert(id);
}
// Template instantiations only use their template arguments if the
// template definition uses the corresponding template parameter.
Some(&TypeKind::TemplateInstantiation(ref inst)) => {
if self.whitelisted_items.contains(&inst.template_definition()) {
self.constrain_instantiation(id, &mut used_by_this_id, inst);
if self.whitelisted_items.contains(
&inst.template_definition(),
)
{
self.constrain_instantiation(
id,
&mut used_by_this_id,
inst,
);
} else {
self.constrain_instantiation_of_blacklisted_template(id,
&mut used_by_this_id,
inst);
self.constrain_instantiation_of_blacklisted_template(
id,
&mut used_by_this_id,
inst,
);
}
}
// Otherwise, add the union of each of its referent item's template
@ -511,9 +547,11 @@ impl<'ctx, 'gen> MonotoneFramework for UsedTemplateParameters<'ctx, 'gen> {
trace!(" finally, used set is {:?}", used_by_this_id);
let new_len = used_by_this_id.len();
assert!(new_len >= original_len,
"This is the property that ensures this function is monotone -- \
if it doesn't hold, the analysis might never terminate!");
assert!(
new_len >= original_len,
"This is the property that ensures this function is monotone -- \
if it doesn't hold, the analysis might never terminate!"
);
// Put the set back in the hash map and restore our invariant.
debug_assert!(self.used[&id].is_none());
@ -528,7 +566,8 @@ impl<'ctx, 'gen> MonotoneFramework for UsedTemplateParameters<'ctx, 'gen> {
}
fn each_depending_on<F>(&self, item: ItemId, mut f: F)
where F: FnMut(ItemId),
where
F: FnMut(ItemId),
{
if let Some(edges) = self.dependencies.get(&item) {
for item in edges {
@ -542,7 +581,8 @@ impl<'ctx, 'gen> MonotoneFramework for UsedTemplateParameters<'ctx, 'gen> {
impl<'ctx, 'gen> From<UsedTemplateParameters<'ctx, 'gen>>
for HashMap<ItemId, ItemSet> {
fn from(used_templ_params: UsedTemplateParameters<'ctx, 'gen>) -> Self {
used_templ_params.used
used_templ_params
.used
.into_iter()
.map(|(k, v)| (k, v.unwrap()))
.collect()

Просмотреть файл

@ -148,10 +148,11 @@ impl Annotations {
fn parse(&mut self, comment: &clang::Comment, matched: &mut bool) {
use clang_sys::CXComment_HTMLStartTag;
if comment.kind() == CXComment_HTMLStartTag &&
comment.get_tag_name() == "div" &&
comment.get_tag_attrs()
.next()
.map_or(false, |attr| attr.name == "rustbindgen") {
comment.get_tag_name() == "div" &&
comment.get_tag_attrs().next().map_or(false, |attr| {
attr.name == "rustbindgen"
})
{
*matched = true;
for attr in comment.get_tag_attrs() {
match attr.name.as_str() {
@ -159,10 +160,10 @@ impl Annotations {
"hide" => self.hide = true,
"nocopy" => self.disallow_copy = true,
"replaces" => {
self.use_instead_of = Some(attr.value
.split("::")
.map(Into::into)
.collect())
self.use_instead_of =
Some(
attr.value.split("::").map(Into::into).collect(),
)
}
"private" => {
self.private_fields = Some(attr.value != "false")

Просмотреть файл

@ -47,7 +47,8 @@ fn preprocess_single_lines(comment: &str, indent: usize) -> String {
let indent = make_indent(indent);
let mut is_first = true;
let lines: Vec<_> = comment.lines()
let lines: Vec<_> = comment
.lines()
.map(|l| l.trim_left_matches('/').trim())
.map(|l| {
let indent = if is_first { "" } else { &*indent };
@ -60,12 +61,13 @@ fn preprocess_single_lines(comment: &str, indent: usize) -> String {
}
fn preprocess_multi_line(comment: &str, indent: usize) -> String {
let comment = comment.trim_left_matches('/')
.trim_left_matches('*')
.trim_left_matches('!')
.trim_right_matches('/')
.trim_right_matches('*')
.trim();
let comment = comment
.trim_left_matches('/')
.trim_left_matches('*')
.trim_left_matches('!')
.trim_right_matches('/')
.trim_right_matches('*')
.trim();
let indent = make_indent(indent);
// Strip any potential `*` characters preceding each line.
@ -109,10 +111,14 @@ mod test {
#[test]
fn processes_multi_lines_correctly() {
assert_eq!(preprocess("/** hello \n * world \n * foo \n */", 0),
"/// hello\n/// world\n/// foo");
assert_eq!(
preprocess("/** hello \n * world \n * foo \n */", 0),
"/// hello\n/// world\n/// foo"
);
assert_eq!(preprocess("/**\nhello\n*world\n*foo\n*/", 0),
"/// hello\n/// world\n/// foo");
assert_eq!(
preprocess("/**\nhello\n*world\n*foo\n*/", 0),
"/// hello\n/// world\n/// foo"
);
}
}

Просмотреть файл

@ -2,18 +2,17 @@
use super::annotations::Annotations;
use super::context::{BindgenContext, ItemId};
use super::derive::{CanDeriveCopy, CanDeriveDefault};
use super::dot::DotAttributes;
use super::item::{IsOpaque, Item};
use super::layout::Layout;
use super::traversal::{EdgeKind, Trace, Tracer};
use super::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
// use super::ty::RUST_DERIVE_IN_ARRAY_LIMIT;
use super::template::TemplateParameters;
use super::traversal::{EdgeKind, Trace, Tracer};
use clang;
use codegen::struct_layout::{align_to, bytes_from_bits_pow2};
use ir::derive::CanDeriveCopy;
use parse::{ClangItemParser, ParseError};
use peeking_take_while::PeekableExt;
use std::cell::Cell;
use std::cmp;
use std::io;
use std::mem;
@ -75,7 +74,7 @@ impl Method {
/// Is this a destructor method?
pub fn is_destructor(&self) -> bool {
self.kind == MethodKind::Destructor ||
self.kind == MethodKind::VirtualDestructor
self.kind == MethodKind::VirtualDestructor
}
/// Is this a constructor?
@ -86,7 +85,7 @@ impl Method {
/// Is this a virtual method?
pub fn is_virtual(&self) -> bool {
self.kind == MethodKind::Virtual ||
self.kind == MethodKind::VirtualDestructor
self.kind == MethodKind::VirtualDestructor
}
/// Is this a static method?
@ -170,18 +169,12 @@ pub enum Field {
}
impl Field {
fn has_destructor(&self, ctx: &BindgenContext) -> bool {
match *self {
Field::DataMember(ref data) => ctx.resolve_type(data.ty).has_destructor(ctx),
// Bitfields may not be of a type that has a destructor.
Field::Bitfields(BitfieldUnit { .. }) => false,
}
}
/// Get this field's layout.
pub fn layout(&self, ctx: &BindgenContext) -> Option<Layout> {
match *self {
Field::Bitfields(BitfieldUnit { layout, ..}) => Some(layout),
Field::Bitfields(BitfieldUnit {
layout, ..
}) => Some(layout),
Field::DataMember(ref data) => {
ctx.resolve_type(data.ty).layout(ctx)
}
@ -193,13 +186,16 @@ impl Trace for Field {
type Extra = ();
fn trace<T>(&self, _: &BindgenContext, tracer: &mut T, _: &())
where T: Tracer,
where
T: Tracer,
{
match *self {
Field::DataMember(ref data) => {
tracer.visit_kind(data.ty, EdgeKind::Field);
}
Field::Bitfields(BitfieldUnit { ref bitfields, .. }) => {
Field::Bitfields(BitfieldUnit {
ref bitfields, ..
}) => {
for bf in bitfields {
tracer.visit_kind(bf.ty(), EdgeKind::Field);
}
@ -209,16 +205,24 @@ impl Trace for Field {
}
impl DotAttributes for Field {
fn dot_attributes<W>(&self, ctx: &BindgenContext, out: &mut W) -> io::Result<()>
where W: io::Write
fn dot_attributes<W>(
&self,
ctx: &BindgenContext,
out: &mut W,
) -> io::Result<()>
where
W: io::Write,
{
match *self {
Field::DataMember(ref data) => {
data.dot_attributes(ctx, out)
}
Field::Bitfields(BitfieldUnit { layout, ref bitfields, .. }) => {
writeln!(out,
r#"<tr>
Field::DataMember(ref data) => data.dot_attributes(ctx, out),
Field::Bitfields(BitfieldUnit {
layout,
ref bitfields,
..
}) => {
writeln!(
out,
r#"<tr>
<td>bitfield unit</td>
<td>
<table border="0">
@ -229,8 +233,9 @@ impl DotAttributes for Field {
<td>unit.align</td><td>{}</td>
</tr>
"#,
layout.size,
layout.align)?;
layout.size,
layout.align
)?;
for bf in bitfields {
bf.dot_attributes(ctx, out)?;
}
@ -241,25 +246,39 @@ impl DotAttributes for Field {
}
impl DotAttributes for FieldData {
fn dot_attributes<W>(&self, _ctx: &BindgenContext, out: &mut W) -> io::Result<()>
where W: io::Write
fn dot_attributes<W>(
&self,
_ctx: &BindgenContext,
out: &mut W,
) -> io::Result<()>
where
W: io::Write,
{
writeln!(out,
"<tr><td>{}</td><td>{:?}</td></tr>",
self.name().unwrap_or("(anonymous)"),
self.ty())
writeln!(
out,
"<tr><td>{}</td><td>{:?}</td></tr>",
self.name().unwrap_or("(anonymous)"),
self.ty()
)
}
}
impl DotAttributes for Bitfield {
fn dot_attributes<W>(&self, _ctx: &BindgenContext, out: &mut W) -> io::Result<()>
where W: io::Write
fn dot_attributes<W>(
&self,
_ctx: &BindgenContext,
out: &mut W,
) -> io::Result<()>
where
W: io::Write,
{
writeln!(out,
"<tr><td>{} : {}</td><td>{:?}</td></tr>",
self.name(),
self.width(),
self.ty())
writeln!(
out,
"<tr><td>{} : {}</td><td>{:?}</td></tr>",
self.name(),
self.width(),
self.ty()
)
}
}
@ -359,14 +378,15 @@ struct RawField(FieldData);
impl RawField {
/// Construct a new `RawField`.
fn new(name: Option<String>,
ty: ItemId,
comment: Option<String>,
annotations: Option<Annotations>,
bitfield: Option<u32>,
mutable: bool,
offset: Option<usize>)
-> RawField {
fn new(
name: Option<String>,
ty: ItemId,
comment: Option<String>,
annotations: Option<Annotations>,
bitfield: Option<u32>,
mutable: bool,
offset: Option<usize>,
) -> RawField {
RawField(FieldData {
name: name,
ty: ty,
@ -411,10 +431,12 @@ impl FieldMethods for RawField {
/// Convert the given ordered set of raw fields into a list of either plain data
/// members, and/or bitfield units containing multiple bitfields.
fn raw_fields_to_fields_and_bitfield_units<I>(ctx: &BindgenContext,
raw_fields: I)
-> Vec<Field>
where I: IntoIterator<Item=RawField>
fn raw_fields_to_fields_and_bitfield_units<I>(
ctx: &BindgenContext,
raw_fields: I,
) -> Vec<Field>
where
I: IntoIterator<Item = RawField>,
{
let mut raw_fields = raw_fields.into_iter().fuse().peekable();
let mut fields = vec![];
@ -444,26 +466,32 @@ fn raw_fields_to_fields_and_bitfield_units<I>(ctx: &BindgenContext,
break;
}
bitfields_to_allocation_units(ctx,
&mut bitfield_unit_count,
&mut fields,
bitfields);
bitfields_to_allocation_units(
ctx,
&mut bitfield_unit_count,
&mut fields,
bitfields,
);
}
assert!(raw_fields.next().is_none(),
"The above loop should consume all items in `raw_fields`");
assert!(
raw_fields.next().is_none(),
"The above loop should consume all items in `raw_fields`"
);
fields
}
/// Given a set of contiguous raw bitfields, group and allocate them into
/// (potentially multiple) bitfield units.
fn bitfields_to_allocation_units<E, I>(ctx: &BindgenContext,
bitfield_unit_count: &mut usize,
mut fields: &mut E,
raw_bitfields: I)
where E: Extend<Field>,
I: IntoIterator<Item=RawField>
fn bitfields_to_allocation_units<E, I>(
ctx: &BindgenContext,
bitfield_unit_count: &mut usize,
fields: &mut E,
raw_bitfields: I,
) where
E: Extend<Field>,
I: IntoIterator<Item = RawField>,
{
assert!(ctx.collected_typerefs());
@ -478,12 +506,14 @@ fn bitfields_to_allocation_units<E, I>(ctx: &BindgenContext,
// TODO(emilio): Take into account C++'s wide bitfields, and
// packing, sigh.
fn flush_allocation_unit<E>(mut fields: &mut E,
bitfield_unit_count: &mut usize,
unit_size_in_bits: usize,
unit_align_in_bits: usize,
bitfields: Vec<Bitfield>)
where E: Extend<Field>
fn flush_allocation_unit<E>(
fields: &mut E,
bitfield_unit_count: &mut usize,
unit_size_in_bits: usize,
unit_align_in_bits: usize,
bitfields: Vec<Bitfield>,
) where
E: Extend<Field>,
{
*bitfield_unit_count += 1;
let align = bytes_from_bits_pow2(unit_align_in_bits);
@ -508,26 +538,28 @@ fn bitfields_to_allocation_units<E, I>(ctx: &BindgenContext,
for bitfield in raw_bitfields {
let bitfield_width = bitfield.bitfield().unwrap() as usize;
let bitfield_layout =
ctx.resolve_type(bitfield.ty())
.layout(ctx)
.expect("Bitfield without layout? Gah!");
let bitfield_layout = ctx.resolve_type(bitfield.ty())
.layout(ctx)
.expect("Bitfield without layout? Gah!");
let bitfield_size = bitfield_layout.size;
let bitfield_align = bitfield_layout.align;
let mut offset = unit_size_in_bits;
if is_ms_struct {
if unit_size_in_bits != 0 &&
(bitfield_width == 0 ||
bitfield_width > unfilled_bits_in_unit) {
(bitfield_width == 0 ||
bitfield_width > unfilled_bits_in_unit)
{
// We've reached the end of this allocation unit, so flush it
// and its bitfields.
unit_size_in_bits = align_to(unit_size_in_bits, unit_align * 8);
flush_allocation_unit(fields,
bitfield_unit_count,
unit_size_in_bits,
unit_align,
mem::replace(&mut bitfields_in_unit, vec![]));
flush_allocation_unit(
fields,
bitfield_unit_count,
unit_size_in_bits,
unit_align,
mem::replace(&mut bitfields_in_unit, vec![]),
);
// Now we're working on a fresh bitfield allocation unit, so reset
// the current unit size and alignment.
@ -541,7 +573,9 @@ fn bitfields_to_allocation_units<E, I>(ctx: &BindgenContext,
} else {
if offset != 0 &&
(bitfield_width == 0 ||
(offset & (bitfield_align * 8 - 1)) + bitfield_width > bitfield_size * 8) {
(offset & (bitfield_align * 8 - 1)) + bitfield_width >
bitfield_size * 8)
{
offset = align_to(offset, bitfield_align * 8);
}
}
@ -572,11 +606,13 @@ fn bitfields_to_allocation_units<E, I>(ctx: &BindgenContext,
if unit_size_in_bits != 0 {
// Flush the last allocation unit and its bitfields.
flush_allocation_unit(fields,
bitfield_unit_count,
unit_size_in_bits,
unit_align,
bitfields_in_unit);
flush_allocation_unit(
fields,
bitfield_unit_count,
unit_size_in_bits,
unit_align,
bitfields_in_unit,
);
}
}
@ -606,7 +642,9 @@ impl CompFields {
raws.push(raw);
}
CompFields::AfterComputingBitfieldUnits(_) => {
panic!("Must not append new fields after computing bitfield allocation units");
panic!(
"Must not append new fields after computing bitfield allocation units"
);
}
}
}
@ -621,8 +659,12 @@ impl CompFields {
}
};
let fields_and_units = raw_fields_to_fields_and_bitfield_units(ctx, raws);
mem::replace(self, CompFields::AfterComputingBitfieldUnits(fields_and_units));
let fields_and_units =
raw_fields_to_fields_and_bitfield_units(ctx, raws);
mem::replace(
self,
CompFields::AfterComputingBitfieldUnits(fields_and_units),
);
}
}
@ -630,7 +672,8 @@ impl Trace for CompFields {
type Extra = ();
fn trace<T>(&self, context: &BindgenContext, tracer: &mut T, _: &())
where T: Tracer,
where
T: Tracer,
{
match *self {
CompFields::BeforeComputingBitfieldUnits(ref fields) => {
@ -702,42 +745,6 @@ impl FieldMethods for FieldData {
}
}
impl<'a> CanDeriveDefault<'a> for Field {
type Extra = ();
fn can_derive_default(&self, ctx: &BindgenContext, _: ()) -> bool {
match *self {
Field::DataMember(ref data) => data.ty.can_derive_default(ctx, ()),
Field::Bitfields(BitfieldUnit { ref bitfields, .. }) => bitfields.iter().all(|b| {
b.ty().can_derive_default(ctx, ())
}),
}
}
}
impl<'a> CanDeriveCopy<'a> for Field {
type Extra = ();
fn can_derive_copy(&self, ctx: &BindgenContext, _: ()) -> bool {
match *self {
Field::DataMember(ref data) => data.ty.can_derive_copy(ctx, ()),
Field::Bitfields(BitfieldUnit { ref bitfields, .. }) => bitfields.iter().all(|b| {
b.ty().can_derive_copy(ctx, ())
}),
}
}
fn can_derive_copy_in_array(&self, ctx: &BindgenContext, _: ()) -> bool {
match *self {
Field::DataMember(ref data) => data.ty.can_derive_copy_in_array(ctx, ()),
Field::Bitfields(BitfieldUnit { ref bitfields, .. }) => bitfields.iter().all(|b| {
b.ty().can_derive_copy_in_array(ctx, ())
}),
}
}
}
/// The kind of inheritance a base class is using.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum BaseKind {
@ -786,7 +793,7 @@ pub struct CompInfo {
/// The abstract template parameters of this class. Note that these are NOT
/// concrete template arguments, and should always be a
/// `Type(TypeKind::Named(name))`. For concrete template arguments, see
/// `Type(TypeKind::TypeParam(name))`. For concrete template arguments, see
/// `TypeKind::TemplateInstantiation`.
template_params: Vec<ItemId>,
@ -845,14 +852,6 @@ pub struct CompInfo {
/// and pray, or behave as an opaque type.
found_unknown_attr: bool,
/// Used to detect if we've run in a can_derive_default cycle while cycling
/// around the template arguments.
detect_derive_default_cycle: Cell<bool>,
/// Used to detect if we've run in a has_destructor cycle while cycling
/// around the template arguments.
detect_has_destructor_cycle: Cell<bool>,
/// Used to indicate when a struct has been forward declared. Usually used
/// in headers so that APIs can't modify them directly.
is_forward_declaration: bool,
@ -877,8 +876,6 @@ impl CompInfo {
has_non_type_template_params: false,
packed: false,
found_unknown_attr: false,
detect_derive_default_cycle: Cell::new(false),
detect_has_destructor_cycle: Cell::new(false),
is_forward_declaration: false,
}
}
@ -886,37 +883,12 @@ impl CompInfo {
/// Is this compound type unsized?
pub fn is_unsized(&self, ctx: &BindgenContext, itemid: &ItemId) -> bool {
!ctx.lookup_item_id_has_vtable(itemid) && self.fields().is_empty() &&
self.base_members.iter().all(|base| {
ctx.resolve_type(base.ty).canonical_type(ctx).is_unsized(ctx, &base.ty)
})
}
/// Does this compound type have a destructor?
pub fn has_destructor(&self, ctx: &BindgenContext) -> bool {
if self.detect_has_destructor_cycle.get() {
warn!("Cycle detected looking for destructors");
// Assume no destructor, since we don't have an explicit one.
return false;
}
self.detect_has_destructor_cycle.set(true);
let has_destructor = self.has_destructor ||
match self.kind {
CompKind::Union => false,
CompKind::Struct => {
self.base_members.iter().any(|base| {
ctx.resolve_type(base.ty).has_destructor(ctx)
}) ||
self.fields().iter().any(|field| {
field.has_destructor(ctx)
})
}
};
self.detect_has_destructor_cycle.set(false);
has_destructor
self.base_members.iter().all(|base| {
ctx.resolve_type(base.ty).canonical_type(ctx).is_unsized(
ctx,
&base.ty,
)
})
}
/// Compute the layout of this type.
@ -971,6 +943,11 @@ impl CompInfo {
return self.has_own_virtual_method;
}
/// Did we see a destructor when parsing this type?
pub fn has_own_destructor(&self) -> bool {
self.has_destructor
}
/// Get this type's set of methods.
pub fn methods(&self) -> &[Method] {
&self.methods
@ -1002,14 +979,17 @@ impl CompInfo {
}
/// Construct a new compound type from a Clang type.
pub fn from_ty(potential_id: ItemId,
ty: &clang::Type,
location: Option<clang::Cursor>,
ctx: &mut BindgenContext)
-> Result<Self, ParseError> {
pub fn from_ty(
potential_id: ItemId,
ty: &clang::Type,
location: Option<clang::Cursor>,
ctx: &mut BindgenContext,
) -> Result<Self, ParseError> {
use clang_sys::*;
assert!(ty.template_args().is_none(),
"We handle template instantiations elsewhere");
assert!(
ty.template_args().is_none(),
"We handle template instantiations elsewhere"
);
let mut cursor = ty.declaration();
let mut kind = Self::kind_from_cursor(&cursor);
@ -1160,8 +1140,8 @@ impl CompInfo {
ci.packed = true;
}
CXCursor_TemplateTypeParameter => {
let param = Item::named_type(None, cur, ctx)
.expect("Item::named_type should't fail when pointing \
let param = Item::type_param(None, cur, ctx)
.expect("Item::type_param should't fail when pointing \
at a TemplateTypeParameter");
ci.template_params.push(param);
}
@ -1287,8 +1267,9 @@ impl CompInfo {
Ok(ci)
}
fn kind_from_cursor(cursor: &clang::Cursor)
-> Result<CompKind, ParseError> {
fn kind_from_cursor(
cursor: &clang::Cursor,
) -> Result<CompKind, ParseError> {
use clang_sys::*;
Ok(match cursor.kind() {
CXCursor_UnionDecl => CompKind::Union,
@ -1333,7 +1314,11 @@ impl CompInfo {
/// Returns whether this type needs an explicit vtable because it has
/// virtual methods and none of its base classes has already a vtable.
pub fn needs_explicit_vtable(&self, ctx: &BindgenContext, item: &Item) -> bool {
pub fn needs_explicit_vtable(
&self,
ctx: &BindgenContext,
item: &Item,
) -> bool {
ctx.lookup_item_id_has_vtable(&item.id()) &&
!self.base_members.iter().any(|base| {
// NB: Ideally, we could rely in all these types being `comp`, and
@ -1358,11 +1343,31 @@ impl CompInfo {
pub fn compute_bitfield_units(&mut self, ctx: &BindgenContext) {
self.fields.compute_bitfield_units(ctx);
}
/// Returns whether the current union can be represented as a Rust `union`
///
/// Requirements:
/// 1. Current RustTarget allows for `untagged_union`
/// 2. Each field can derive `Copy`
pub fn can_be_rust_union(&self, ctx: &BindgenContext) -> bool {
ctx.options().rust_features().untagged_union() &&
self.fields().iter().all(|f| match *f {
Field::DataMember(ref field_data) => {
field_data.ty().can_derive_copy(ctx)
}
Field::Bitfields(_) => false,
})
}
}
impl DotAttributes for CompInfo {
fn dot_attributes<W>(&self, ctx: &BindgenContext, out: &mut W) -> io::Result<()>
where W: io::Write
fn dot_attributes<W>(
&self,
ctx: &BindgenContext,
out: &mut W,
) -> io::Result<()>
where
W: io::Write,
{
writeln!(out, "<tr><td>CompKind</td><td>{:?}</td></tr>", self.kind)?;
@ -1379,7 +1384,10 @@ impl DotAttributes for CompInfo {
}
if self.has_non_type_template_params {
writeln!(out, "<tr><td>has_non_type_template_params</td><td>true</td></tr>")?;
writeln!(
out,
"<tr><td>has_non_type_template_params</td><td>true</td></tr>"
)?;
}
if self.packed {
@ -1387,7 +1395,10 @@ impl DotAttributes for CompInfo {
}
if self.is_forward_declaration {
writeln!(out, "<tr><td>is_forward_declaration</td><td>true</td></tr>")?;
writeln!(
out,
"<tr><td>is_forward_declaration</td><td>true</td></tr>"
)?;
}
if !self.fields().is_empty() {
@ -1411,9 +1422,10 @@ impl IsOpaque for CompInfo {
}
impl TemplateParameters for CompInfo {
fn self_template_params(&self,
_ctx: &BindgenContext)
-> Option<Vec<ItemId>> {
fn self_template_params(
&self,
_ctx: &BindgenContext,
) -> Option<Vec<ItemId>> {
if self.template_params.is_empty() {
None
} else {
@ -1422,105 +1434,12 @@ impl TemplateParameters for CompInfo {
}
}
impl<'a> CanDeriveDefault<'a> for CompInfo {
type Extra = (&'a Item, Option<Layout>);
fn can_derive_default(&self,
ctx: &BindgenContext,
(item, layout): (&Item, Option<Layout>))
-> bool {
// We can reach here recursively via template parameters of a member,
// for example.
if self.detect_derive_default_cycle.get() {
warn!("Derive default cycle detected!");
return true;
}
if layout.map_or(false, |l| l.align > RUST_DERIVE_IN_ARRAY_LIMIT) {
return false;
}
if self.kind == CompKind::Union {
if ctx.options().unstable_rust {
return false;
}
return layout.map_or(true, |l| l.opaque().can_derive_default(ctx, ()));
}
if self.has_non_type_template_params {
return layout.map_or(true, |l| l.opaque().can_derive_default(ctx, ()));
}
self.detect_derive_default_cycle.set(true);
let can_derive_default = !ctx.lookup_item_id_has_vtable(&item.id()) &&
!self.needs_explicit_vtable(ctx, item) &&
self.base_members
.iter()
.all(|base| base.ty.can_derive_default(ctx, ())) &&
self.fields()
.iter()
.all(|f| f.can_derive_default(ctx, ()));
self.detect_derive_default_cycle.set(false);
can_derive_default
}
}
impl<'a> CanDeriveCopy<'a> for CompInfo {
type Extra = (&'a Item, Option<Layout>);
fn can_derive_copy(&self,
ctx: &BindgenContext,
(item, layout): (&Item, Option<Layout>))
-> bool {
if self.has_non_type_template_params() {
return layout.map_or(true, |l| l.opaque().can_derive_copy(ctx, ()));
}
// NOTE: Take into account that while unions in C and C++ are copied by
// default, the may have an explicit destructor in C++, so we can't
// defer this check just for the union case.
if self.has_destructor(ctx) {
return false;
}
if self.kind == CompKind::Union {
if !ctx.options().unstable_rust {
// NOTE: If there's no template parameters we can derive copy
// unconditionally, since arrays are magical for rustc, and
// __BindgenUnionField always implements copy.
return true;
}
// https://github.com/rust-lang/rust/issues/36640
if !self.template_params.is_empty() ||
item.used_template_params(ctx).is_some() {
return false;
}
}
self.base_members
.iter()
.all(|base| base.ty.can_derive_copy(ctx, ())) &&
self.fields().iter().all(|field| field.can_derive_copy(ctx, ()))
}
fn can_derive_copy_in_array(&self,
ctx: &BindgenContext,
extra: (&Item, Option<Layout>))
-> bool {
self.can_derive_copy(ctx, extra)
}
}
impl Trace for CompInfo {
type Extra = Item;
fn trace<T>(&self, context: &BindgenContext, tracer: &mut T, item: &Item)
where T: Tracer,
where
T: Tracer,
{
let params = item.all_template_params(context).unwrap_or(vec![]);
for p in params {

Разница между файлами не показана из-за своего большого размера Загрузить разницу

129
third_party/rust/bindgen-0.30.0/src/ir/derive.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,129 @@
//! Traits for determining whether we can derive traits for a thing or not.
use super::context::BindgenContext;
/// A trait that encapsulates the logic for whether or not we can derive `Debug`
/// for a given thing.
///
/// This should ideally be a no-op that just returns `true`, but instead needs
/// to be a recursive method that checks whether all the proper members can
/// derive debug or not, because of the limit rust has on 32 items as max in the
/// array.
pub trait CanDeriveDebug {
/// Return `true` if `Debug` can be derived for this thing, `false`
/// otherwise.
fn can_derive_debug(&self, ctx: &BindgenContext) -> bool;
}
/// A trait that encapsulates the logic for whether or not we can derive `Debug`.
/// The difference between this trait and the CanDeriveDebug is that the type
/// implementing this trait cannot use recursion or lookup result from fix point
/// analysis. It's a helper trait for fix point analysis.
pub trait CanTriviallyDeriveDebug {
/// Return `true` if `Debug` can be derived for this thing, `false`
/// otherwise.
fn can_trivially_derive_debug(&self) -> bool;
}
/// A trait that encapsulates the logic for whether or not we can derive `Copy`
/// for a given thing.
pub trait CanDeriveCopy<'a> {
/// Return `true` if `Copy` can be derived for this thing, `false`
/// otherwise.
fn can_derive_copy(&'a self, ctx: &'a BindgenContext) -> bool;
}
/// A trait that encapsulates the logic for whether or not we can derive `Copy`.
/// The difference between this trait and the CanDeriveCopy is that the type
/// implementing this trait cannot use recursion or lookup result from fix point
/// analysis. It's a helper trait for fix point analysis.
pub trait CanTriviallyDeriveCopy {
/// Return `true` if `Copy` can be derived for this thing, `false`
/// otherwise.
fn can_trivially_derive_copy(&self) -> bool;
}
/// A trait that encapsulates the logic for whether or not we can derive `Default`
/// for a given thing.
///
/// This should ideally be a no-op that just returns `true`, but instead needs
/// to be a recursive method that checks whether all the proper members can
/// derive default or not, because of the limit rust has on 32 items as max in the
/// array.
pub trait CanDeriveDefault {
/// Return `true` if `Default` can be derived for this thing, `false`
/// otherwise.
fn can_derive_default(&self, ctx: &BindgenContext) -> bool;
}
/// A trait that encapsulates the logic for whether or not we can derive `Default`.
/// The difference between this trait and the CanDeriveDefault is that the type
/// implementing this trait cannot use recursion or lookup result from fix point
/// analysis. It's a helper trait for fix point analysis.
pub trait CanTriviallyDeriveDefault {
/// Return `true` if `Default` can be derived for this thing, `false`
/// otherwise.
fn can_trivially_derive_default(&self) -> bool;
}
/// A trait that encapsulates the logic for whether or not we can derive `Hash`
/// for a given thing.
///
/// This should ideally be a no-op that just returns `true`, but instead needs
/// to be a recursive method that checks whether all the proper members can
/// derive default or not, because of the limit rust has on 32 items as max in the
/// array.
pub trait CanDeriveHash {
/// Return `true` if `Default` can be derived for this thing, `false`
/// otherwise.
fn can_derive_hash(&self, ctx: &BindgenContext) -> bool;
}
/// A trait that encapsulates the logic for whether or not we can derive `PartialEq`
/// for a given thing.
///
/// This should ideally be a no-op that just returns `true`, but instead needs
/// to be a recursive method that checks whether all the proper members can
/// derive default or not, because of the limit rust has on 32 items as max in the
/// array.
pub trait CanDerivePartialEq {
/// Return `true` if `Default` can be derived for this thing, `false`
/// otherwise.
fn can_derive_partialeq(&self, ctx: &BindgenContext) -> bool;
}
/// A trait that encapsulates the logic for whether or not we can derive `Eq`
/// for a given thing.
///
/// This should ideally be a no-op that just returns `true`, but instead needs
/// to be a recursive method that checks whether all the proper members can
/// derive eq or not, because of the limit rust has on 32 items as max in the
/// array.
pub trait CanDeriveEq {
/// Return `true` if `Eq` can be derived for this thing, `false`
/// otherwise.
fn can_derive_eq(&self,
ctx: &BindgenContext)
-> bool;
}
/// A trait that encapsulates the logic for whether or not we can derive `Hash`.
/// The difference between this trait and the CanDeriveHash is that the type
/// implementing this trait cannot use recursion or lookup result from fix point
/// analysis. It's a helper trait for fix point analysis.
pub trait CanTriviallyDeriveHash {
/// Return `true` if `Hash` can be derived for this thing, `false`
/// otherwise.
fn can_trivially_derive_hash(&self) -> bool;
}
/// A trait that encapsulates the logic for whether or not we can derive `PartialEq`.
/// The difference between this trait and the CanDerivePartialEq is that the type
/// implementing this trait cannot use recursion or lookup result from fix point
/// analysis. It's a helper trait for fix point analysis.
pub trait CanTriviallyDerivePartialEq {
/// Return `true` if `PartialEq` can be derived for this thing, `false`
/// otherwise.
fn can_trivially_derive_partialeq(&self) -> bool;
}

Просмотреть файл

@ -11,16 +11,19 @@ use std::path::Path;
pub trait DotAttributes {
/// Write this thing's attributes to the given output. Each attribute must
/// be its own `<tr>...</tr>`.
fn dot_attributes<W>(&self,
ctx: &BindgenContext,
out: &mut W)
-> io::Result<()>
where W: io::Write;
fn dot_attributes<W>(
&self,
ctx: &BindgenContext,
out: &mut W,
) -> io::Result<()>
where
W: io::Write;
}
/// Write a graphviz dot file containing our IR.
pub fn write_dot_file<P>(ctx: &BindgenContext, path: P) -> io::Result<()>
where P: AsRef<Path>,
where
P: AsRef<Path>,
{
let file = try!(File::create(path));
let mut dot_file = io::BufWriter::new(file);
@ -29,28 +32,34 @@ pub fn write_dot_file<P>(ctx: &BindgenContext, path: P) -> io::Result<()>
let mut err: Option<io::Result<_>> = None;
for (id, item) in ctx.items() {
try!(writeln!(&mut dot_file,
r#"{} [fontname="courier", label=< <table border="0" align="left">"#,
id.as_usize()));
try!(writeln!(
&mut dot_file,
r#"{} [fontname="courier", label=< <table border="0" align="left">"#,
id.as_usize()
));
try!(item.dot_attributes(ctx, &mut dot_file));
try!(writeln!(&mut dot_file, r#"</table> >];"#));
item.trace(ctx,
&mut |sub_id: ItemId, edge_kind| {
if err.is_some() {
return;
}
item.trace(
ctx,
&mut |sub_id: ItemId, edge_kind| {
if err.is_some() {
return;
}
match writeln!(&mut dot_file,
"{} -> {} [label={:?}];",
id.as_usize(),
sub_id.as_usize(),
edge_kind) {
Ok(_) => {}
Err(e) => err = Some(Err(e)),
}
},
&());
match writeln!(
&mut dot_file,
"{} -> {} [label={:?}];",
id.as_usize(),
sub_id.as_usize(),
edge_kind
) {
Ok(_) => {}
Err(e) => err = Some(Err(e)),
}
},
&(),
);
if let Some(err) = err {
return err;
@ -58,10 +67,12 @@ pub fn write_dot_file<P>(ctx: &BindgenContext, path: P) -> io::Result<()>
if let Some(module) = item.as_module() {
for child in module.children() {
try!(writeln!(&mut dot_file,
"{} -> {} [style=dotted]",
item.id().as_usize(),
child.as_usize()));
try!(writeln!(
&mut dot_file,
"{} -> {} [style=dotted]",
item.id().as_usize(),
child.as_usize()
));
}
}
}

Просмотреть файл

@ -53,9 +53,10 @@ impl Enum {
}
/// Construct an enumeration from the given Clang type.
pub fn from_ty(ty: &clang::Type,
ctx: &mut BindgenContext)
-> Result<Self, ParseError> {
pub fn from_ty(
ty: &clang::Type,
ctx: &mut BindgenContext,
) -> Result<Self, ParseError> {
use clang_sys::*;
debug!("Enum::from_ty {:?}", ty);
@ -64,20 +65,20 @@ impl Enum {
}
let declaration = ty.declaration().canonical();
let repr = declaration.enum_type()
.and_then(|et| Item::from_ty(&et, declaration, None, ctx).ok());
let repr = declaration.enum_type().and_then(|et| {
Item::from_ty(&et, declaration, None, ctx).ok()
});
let mut variants = vec![];
// Assume signedness since the default type by the C standard is an int.
let is_signed =
repr.and_then(|r| ctx.resolve_type(r).safe_canonical_type(ctx))
.map_or(true, |ty| match *ty.kind() {
TypeKind::Int(ref int_kind) => int_kind.is_signed(),
ref other => {
panic!("Since when enums can be non-integers? {:?}",
other)
}
});
let is_signed = repr.and_then(
|r| ctx.resolve_type(r).safe_canonical_type(ctx),
).map_or(true, |ty| match *ty.kind() {
TypeKind::Int(ref int_kind) => int_kind.is_signed(),
ref other => {
panic!("Since when enums can be non-integers? {:?}", other)
}
});
let type_name = ty.spelling();
let type_name = if type_name.is_empty() {
@ -98,26 +99,28 @@ impl Enum {
if let Some(val) = value {
let name = cursor.spelling();
let custom_behavior = ctx.parse_callbacks()
.and_then(|t| {
t.enum_variant_behavior(type_name, &name, val)
})
.and_then(
|t| t.enum_variant_behavior(type_name, &name, val),
)
.or_else(|| {
Annotations::new(&cursor)
.and_then(|anno| if anno.hide() {
Annotations::new(&cursor).and_then(
|anno| if anno.hide() {
Some(EnumVariantCustomBehavior::Hide)
} else if
anno.constify_enum_variant() {
} else if anno.constify_enum_variant() {
Some(EnumVariantCustomBehavior::Constify)
} else {
None
})
},
)
});
let comment = cursor.raw_comment();
variants.push(EnumVariant::new(name,
comment,
val,
custom_behavior));
variants.push(EnumVariant::new(
name,
comment,
val,
custom_behavior,
));
}
}
CXChildVisit_Continue
@ -126,15 +129,19 @@ impl Enum {
}
/// Whether the enum should be an constified enum module
pub fn is_constified_enum_module(&self, ctx: &BindgenContext, item: &Item) -> bool {
pub fn is_constified_enum_module(
&self,
ctx: &BindgenContext,
item: &Item,
) -> bool {
let name = item.canonical_name(ctx);
let enum_ty = item.expect_type();
ctx.options().constified_enum_modules.matches(&name) ||
(enum_ty.name().is_none() &&
self.variants()
.iter()
.any(|v| ctx.options().constified_enum_modules.matches(&v.name())))
(enum_ty.name().is_none() &&
self.variants().iter().any(|v| {
ctx.options().constified_enum_modules.matches(&v.name())
}))
}
}
@ -166,11 +173,12 @@ pub enum EnumVariantValue {
impl EnumVariant {
/// Construct a new enumeration variant from the given parts.
pub fn new(name: String,
comment: Option<String>,
val: EnumVariantValue,
custom_behavior: Option<EnumVariantCustomBehavior>)
-> Self {
pub fn new(
name: String,
comment: Option<String>,
val: EnumVariantValue,
custom_behavior: Option<EnumVariantCustomBehavior>,
) -> Self {
EnumVariant {
name: name,
comment: comment,
@ -192,14 +200,16 @@ impl EnumVariant {
/// Returns whether this variant should be enforced to be a constant by code
/// generation.
pub fn force_constification(&self) -> bool {
self.custom_behavior
.map_or(false, |b| b == EnumVariantCustomBehavior::Constify)
self.custom_behavior.map_or(false, |b| {
b == EnumVariantCustomBehavior::Constify
})
}
/// Returns whether the current variant should be hidden completely from the
/// resulting rust enum.
pub fn hidden(&self) -> bool {
self.custom_behavior
.map_or(false, |b| b == EnumVariantCustomBehavior::Hide)
self.custom_behavior.map_or(false, |b| {
b == EnumVariantCustomBehavior::Hide
})
}
}

Просмотреть файл

@ -8,11 +8,14 @@ use super::traversal::{EdgeKind, Trace, Tracer};
use super::ty::TypeKind;
use clang;
use clang_sys::{self, CXCallingConv};
use ir::derive::CanTriviallyDeriveDebug;
use ir::derive::{CanTriviallyDeriveDebug, CanTriviallyDeriveHash,
CanTriviallyDerivePartialEq};
use parse::{ClangItemParser, ClangSubItemParser, ParseError, ParseResult};
use std::io;
use syntax::abi;
const RUST_DERIVE_FUNPTR_LIMIT: usize = 12;
/// What kind of a function are we looking at?
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum FunctionKind {
@ -26,8 +29,12 @@ impl FunctionKind {
fn from_cursor(cursor: &clang::Cursor) -> Option<FunctionKind> {
Some(match cursor.kind() {
clang_sys::CXCursor_FunctionDecl => FunctionKind::Function,
clang_sys::CXCursor_Constructor => FunctionKind::Method(MethodKind::Constructor),
clang_sys::CXCursor_Destructor => FunctionKind::Method(MethodKind::Destructor),
clang_sys::CXCursor_Constructor => FunctionKind::Method(
MethodKind::Constructor,
),
clang_sys::CXCursor_Destructor => FunctionKind::Method(
MethodKind::Destructor,
),
clang_sys::CXCursor_CXXMethod => {
if cursor.method_is_virtual() {
FunctionKind::Method(MethodKind::Virtual)
@ -66,12 +73,13 @@ pub struct Function {
impl Function {
/// Construct a new function.
pub fn new(name: String,
mangled_name: Option<String>,
sig: ItemId,
comment: Option<String>,
kind: FunctionKind)
-> Self {
pub fn new(
name: String,
mangled_name: Option<String>,
sig: ItemId,
comment: Option<String>,
kind: FunctionKind,
) -> Self {
Function {
name: name,
mangled_name: mangled_name,
@ -103,17 +111,22 @@ impl Function {
}
impl DotAttributes for Function {
fn dot_attributes<W>(&self,
_ctx: &BindgenContext,
out: &mut W)
-> io::Result<()>
where W: io::Write,
fn dot_attributes<W>(
&self,
_ctx: &BindgenContext,
out: &mut W,
) -> io::Result<()>
where
W: io::Write,
{
if let Some(ref mangled) = self.mangled_name {
let mangled: String = mangled.chars().flat_map(|c| c.escape_default()).collect();
try!(writeln!(out,
"<tr><td>mangled name</td><td>{}</td></tr>",
mangled));
let mangled: String =
mangled.chars().flat_map(|c| c.escape_default()).collect();
try!(writeln!(
out,
"<tr><td>mangled name</td><td>{}</td></tr>",
mangled
));
}
Ok(())
@ -174,18 +187,23 @@ fn mangling_hack_if_needed(ctx: &BindgenContext, symbol: &mut String) {
match symbol.chars().next().unwrap() {
// Stripping leading underscore for all names on Darwin and
// C linkage functions on Win32.
'_' => { symbol.remove(0); }
'_' => {
symbol.remove(0);
}
// Stop Rust from prepending underscore for variables on Win32.
'?' => { symbol.insert(0, '\x01'); }
'?' => {
symbol.insert(0, '\x01');
}
_ => {}
}
}
}
/// Get the mangled name for the cursor's referent.
pub fn cursor_mangling(ctx: &BindgenContext,
cursor: &clang::Cursor)
-> Option<String> {
pub fn cursor_mangling(
ctx: &BindgenContext,
cursor: &clang::Cursor,
) -> Option<String> {
use clang_sys;
if !ctx.options().enable_mangling {
return None;
@ -244,11 +262,12 @@ pub fn cursor_mangling(ctx: &BindgenContext,
impl FunctionSig {
/// Construct a new function signature.
pub fn new(return_type: ItemId,
arguments: Vec<(Option<String>, ItemId)>,
is_variadic: bool,
abi: Abi)
-> Self {
pub fn new(
return_type: ItemId,
arguments: Vec<(Option<String>, ItemId)>,
is_variadic: bool,
abi: Abi,
) -> Self {
FunctionSig {
return_type: return_type,
argument_types: arguments,
@ -258,10 +277,11 @@ impl FunctionSig {
}
/// Construct a new function signature from the given Clang type.
pub fn from_ty(ty: &clang::Type,
cursor: &clang::Cursor,
ctx: &mut BindgenContext)
-> Result<Self, ParseError> {
pub fn from_ty(
ty: &clang::Type,
cursor: &clang::Cursor,
ctx: &mut BindgenContext,
) -> Result<Self, ParseError> {
use clang_sys::*;
debug!("FunctionSig::from_ty {:?} {:?}", ty, cursor);
@ -290,7 +310,8 @@ impl FunctionSig {
CXCursor_ObjCClassMethodDecl => {
// For CXCursor_FunctionDecl, cursor.args() is the reliable way
// to get parameter names and types.
cursor.args()
cursor
.args()
.unwrap()
.iter()
.map(|arg| {
@ -326,7 +347,8 @@ impl FunctionSig {
let is_constructor = cursor.kind() == CXCursor_Constructor;
let is_destructor = cursor.kind() == CXCursor_Destructor;
if (is_constructor || is_destructor || is_method) &&
cursor.lexical_parent() != cursor.semantic_parent() {
cursor.lexical_parent() != cursor.semantic_parent()
{
// Only parse constructors once.
return Err(ParseError::Continue);
}
@ -350,10 +372,11 @@ impl FunctionSig {
}
let ty_ret_type = if cursor.kind() == CXCursor_ObjCInstanceMethodDecl ||
cursor.kind() == CXCursor_ObjCClassMethodDecl {
try!(ty.ret_type()
.or_else(|| cursor.ret_type())
.ok_or(ParseError::Continue))
cursor.kind() == CXCursor_ObjCClassMethodDecl
{
try!(ty.ret_type().or_else(|| cursor.ret_type()).ok_or(
ParseError::Continue,
))
} else {
try!(ty.ret_type().ok_or(ParseError::Continue))
};
@ -393,9 +416,10 @@ impl FunctionSig {
}
impl ClangSubItemParser for Function {
fn parse(cursor: clang::Cursor,
context: &mut BindgenContext)
-> Result<ParseResult<Self>, ParseError> {
fn parse(
cursor: clang::Cursor,
context: &mut BindgenContext,
) -> Result<ParseResult<Self>, ParseError> {
use clang_sys::*;
let kind = match FunctionKind::from_cursor(&cursor) {
@ -415,13 +439,15 @@ impl ClangSubItemParser for Function {
}
if !context.options().generate_inline_functions &&
cursor.is_inlined_function() {
cursor.is_inlined_function()
{
return Err(ParseError::Continue);
}
let linkage = cursor.linkage();
if linkage != CXLinkage_External &&
linkage != CXLinkage_UniqueExternal {
linkage != CXLinkage_UniqueExternal
{
return Err(ParseError::Continue);
}
@ -462,7 +488,8 @@ impl Trace for FunctionSig {
type Extra = ();
fn trace<T>(&self, _: &BindgenContext, tracer: &mut T, _: &())
where T: Tracer,
where
T: Tracer,
{
tracer.visit_kind(self.return_type(), EdgeKind::FunctionReturn);
@ -480,10 +507,35 @@ impl Trace for FunctionSig {
//
// Note that copy is always derived, so we don't need to implement it.
impl CanTriviallyDeriveDebug for FunctionSig {
type Extra = ();
fn can_trivially_derive_debug(&self, _ctx: &BindgenContext, _: ()) -> bool {
const RUST_DERIVE_FUNPTR_LIMIT: usize = 12;
fn can_trivially_derive_debug(&self) -> bool {
if self.argument_types.len() > RUST_DERIVE_FUNPTR_LIMIT {
return false;
}
match self.abi {
Abi::Known(abi::Abi::C) |
Abi::Unknown(..) => true,
_ => false,
}
}
}
impl CanTriviallyDeriveHash for FunctionSig {
fn can_trivially_derive_hash(&self) -> bool {
if self.argument_types.len() > RUST_DERIVE_FUNPTR_LIMIT {
return false;
}
match self.abi {
Abi::Known(abi::Abi::C) |
Abi::Unknown(..) => true,
_ => false,
}
}
}
impl CanTriviallyDerivePartialEq for FunctionSig {
fn can_trivially_derive_partialeq(&self) -> bool {
if self.argument_types.len() > RUST_DERIVE_FUNPTR_LIMIT {
return false;
}

Просмотреть файл

@ -93,9 +93,13 @@ impl IntKind {
SChar | Short | Int | Long | LongLong | I8 | I16 | I32 | I64 |
I128 => true,
Char { is_signed } => is_signed,
Char {
is_signed,
} => is_signed,
Custom { is_signed, .. } => is_signed,
Custom {
is_signed, ..
} => is_signed,
}
}
@ -105,7 +109,14 @@ impl IntKind {
pub fn known_size(&self) -> Option<usize> {
use self::IntKind::*;
Some(match *self {
Bool | UChar | SChar | U8 | I8 | Char { .. } => 1,
Bool |
UChar |
SChar |
U8 |
I8 |
Char {
..
} => 1,
U16 | I16 => 2,
U32 | I32 => 4,
U64 | I64 => 8,

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -127,15 +127,19 @@ impl ItemKind {
}
impl DotAttributes for ItemKind {
fn dot_attributes<W>(&self,
ctx: &BindgenContext,
out: &mut W)
-> io::Result<()>
where W: io::Write,
fn dot_attributes<W>(
&self,
ctx: &BindgenContext,
out: &mut W,
) -> io::Result<()>
where
W: io::Write,
{
try!(writeln!(out,
"<tr><td>kind</td><td>{}</td></tr>",
self.kind_name()));
try!(writeln!(
out,
"<tr><td>kind</td><td>{}</td></tr>",
self.kind_name()
));
match *self {
ItemKind::Module(ref module) => module.dot_attributes(ctx, out),

Просмотреть файл

@ -1,7 +1,8 @@
//! Intermediate representation for the physical layout of some type.
use super::context::BindgenContext;
use super::derive::{CanDeriveCopy, CanTriviallyDeriveDebug, CanDeriveDefault};
use super::derive::{CanTriviallyDeriveCopy, CanTriviallyDeriveDebug,
CanTriviallyDeriveDefault, CanTriviallyDeriveHash,
CanTriviallyDerivePartialEq};
use super::ty::{RUST_DERIVE_IN_ARRAY_LIMIT, Type, TypeKind};
use clang;
use std::{cmp, mem};
@ -21,8 +22,10 @@ pub struct Layout {
fn test_layout_for_size() {
let ptr_size = mem::size_of::<*mut ()>();
assert_eq!(Layout::for_size(ptr_size), Layout::new(ptr_size, ptr_size));
assert_eq!(Layout::for_size(3 * ptr_size),
Layout::new(3 * ptr_size, ptr_size));
assert_eq!(
Layout::for_size(3 * ptr_size),
Layout::new(3 * ptr_size, ptr_size)
);
}
impl Layout {
@ -41,7 +44,8 @@ impl Layout {
pub fn for_size(size: usize) -> Self {
let mut next_align = 2;
while size % next_align == 0 &&
next_align <= mem::size_of::<*mut ()>() {
next_align <= mem::size_of::<*mut ()>()
{
next_align *= 2;
}
Layout {
@ -103,32 +107,41 @@ impl Opaque {
}
impl CanTriviallyDeriveDebug for Opaque {
type Extra = ();
fn can_trivially_derive_debug(&self, _: &BindgenContext, _: ()) -> bool {
self.array_size()
.map_or(false, |size| size <= RUST_DERIVE_IN_ARRAY_LIMIT)
fn can_trivially_derive_debug(&self) -> bool {
self.array_size().map_or(false, |size| {
size <= RUST_DERIVE_IN_ARRAY_LIMIT
})
}
}
impl<'a> CanDeriveDefault<'a> for Opaque {
type Extra = ();
fn can_derive_default(&self, _: &BindgenContext, _: ()) -> bool {
self.array_size()
.map_or(false, |size| size <= RUST_DERIVE_IN_ARRAY_LIMIT)
impl CanTriviallyDeriveDefault for Opaque {
fn can_trivially_derive_default(&self) -> bool {
self.array_size().map_or(false, |size| {
size <= RUST_DERIVE_IN_ARRAY_LIMIT
})
}
}
impl<'a> CanDeriveCopy<'a> for Opaque {
type Extra = ();
fn can_derive_copy(&self, _: &BindgenContext, _: ()) -> bool {
self.array_size()
.map_or(false, |size| size <= RUST_DERIVE_IN_ARRAY_LIMIT)
}
fn can_derive_copy_in_array(&self, ctx: &BindgenContext, _: ()) -> bool {
self.can_derive_copy(ctx, ())
impl CanTriviallyDeriveCopy for Opaque {
fn can_trivially_derive_copy(&self) -> bool {
self.array_size().map_or(false, |size| {
size <= RUST_DERIVE_IN_ARRAY_LIMIT
})
}
}
impl CanTriviallyDeriveHash for Opaque {
fn can_trivially_derive_hash(&self) -> bool {
self.array_size().map_or(false, |size| {
size <= RUST_DERIVE_IN_ARRAY_LIMIT
})
}
}
impl CanTriviallyDerivePartialEq for Opaque {
fn can_trivially_derive_partialeq(&self) -> bool {
self.array_size().map_or(false, |size| {
size <= RUST_DERIVE_IN_ARRAY_LIMIT
})
}
}

Просмотреть файл

Просмотреть файл

@ -60,28 +60,31 @@ impl Module {
}
impl DotAttributes for Module {
fn dot_attributes<W>(&self,
_ctx: &BindgenContext,
out: &mut W)
-> io::Result<()>
where W: io::Write,
fn dot_attributes<W>(
&self,
_ctx: &BindgenContext,
out: &mut W,
) -> io::Result<()>
where
W: io::Write,
{
writeln!(out, "<tr><td>ModuleKind</td><td>{:?}</td></tr>", self.kind)
}
}
impl ClangSubItemParser for Module {
fn parse(cursor: clang::Cursor,
ctx: &mut BindgenContext)
-> Result<ParseResult<Self>, ParseError> {
fn parse(
cursor: clang::Cursor,
ctx: &mut BindgenContext,
) -> Result<ParseResult<Self>, ParseError> {
use clang_sys::*;
match cursor.kind() {
CXCursor_Namespace => {
let module_id = ctx.module(cursor);
ctx.with_module(module_id, |ctx| {
cursor.visit(|cursor| {
parse_one(ctx, cursor, Some(module_id))
})
cursor.visit(
|cursor| parse_one(ctx, cursor, Some(module_id)),
)
});
Ok(ParseResult::AlreadyResolved(module_id))

Просмотреть файл

@ -95,9 +95,10 @@ impl ObjCInterface {
}
/// Parses the Objective C interface from the cursor
pub fn from_ty(cursor: &clang::Cursor,
ctx: &mut BindgenContext)
-> Option<Self> {
pub fn from_ty(
cursor: &clang::Cursor,
ctx: &mut BindgenContext,
) -> Option<Self> {
let name = cursor.spelling();
let mut interface = Self::new(&name);
@ -170,10 +171,11 @@ impl ObjCInterface {
}
impl ObjCMethod {
fn new(name: &str,
signature: FunctionSig,
is_class_method: bool)
-> ObjCMethod {
fn new(
name: &str,
signature: FunctionSig,
is_class_method: bool,
) -> ObjCMethod {
let split_name: Vec<&str> = name.split(':').collect();
let rust_name = split_name.join("_");
@ -220,12 +222,15 @@ impl ObjCMethod {
// Check right amount of arguments
if args.len() != split_name.len() {
panic!("Incorrect method name or arguments for objc method, {:?} vs {:?}",
args,
split_name);
panic!(
"Incorrect method name or arguments for objc method, {:?} vs {:?}",
args,
split_name
);
}
split_name.iter()
split_name
.iter()
.zip(args.iter())
.map(|parts| format!("{}:{} ", parts.0, parts.1))
.collect::<Vec<_>>()
@ -237,7 +242,8 @@ impl Trace for ObjCInterface {
type Extra = ();
fn trace<T>(&self, context: &BindgenContext, tracer: &mut T, _: &())
where T: Tracer,
where
T: Tracer,
{
for method in &self.methods {
method.signature.trace(context, tracer, &());

Просмотреть файл

@ -28,7 +28,6 @@
//! ```
use super::context::{BindgenContext, ItemId};
use super::derive::{CanDeriveCopy};
use super::item::{IsOpaque, Item, ItemAncestors, ItemCanonicalPath};
use super::traversal::{EdgeKind, Trace, Tracer};
use clang;
@ -109,9 +108,8 @@ pub trait TemplateParameters {
/// parameters. Of course, Rust does not allow generic parameters to be
/// anything but types, so we must treat them as opaque, and avoid
/// instantiating them.
fn self_template_params(&self,
ctx: &BindgenContext)
-> Option<Vec<ItemId>>;
fn self_template_params(&self, ctx: &BindgenContext)
-> Option<Vec<ItemId>>;
/// Get the number of free template parameters this template declaration
/// has.
@ -139,7 +137,8 @@ pub trait TemplateParameters {
/// `Foo<int,char>::Inner`. `Foo` *must* be instantiated with template
/// arguments before we can gain access to the `Inner` member type.
fn all_template_params(&self, ctx: &BindgenContext) -> Option<Vec<ItemId>>
where Self: ItemAncestors,
where
Self: ItemAncestors,
{
let each_self_params: Vec<Vec<_>> = self.ancestors(ctx)
.filter_map(|id| id.self_template_params(ctx))
@ -147,10 +146,13 @@ pub trait TemplateParameters {
if each_self_params.is_empty() {
None
} else {
Some(each_self_params.into_iter()
.rev()
.flat_map(|params| params)
.collect())
Some(
each_self_params
.into_iter()
.rev()
.flat_map(|params| params)
.collect(),
)
}
}
@ -158,19 +160,23 @@ pub trait TemplateParameters {
/// subset of `all_template_params` and does not necessarily contain any of
/// `self_template_params`.
fn used_template_params(&self, ctx: &BindgenContext) -> Option<Vec<ItemId>>
where Self: AsRef<ItemId>,
where
Self: AsRef<ItemId>,
{
assert!(ctx.in_codegen_phase(),
"template parameter usage is not computed until codegen");
assert!(
ctx.in_codegen_phase(),
"template parameter usage is not computed until codegen"
);
let id = *self.as_ref();
ctx.resolve_item(id)
.all_template_params(ctx)
.map(|all_params| {
all_params.into_iter()
ctx.resolve_item(id).all_template_params(ctx).map(
|all_params| {
all_params
.into_iter()
.filter(|p| ctx.uses_template_parameter(id, *p))
.collect()
})
},
)
}
}
@ -180,13 +186,18 @@ pub trait AsTemplateParam {
type Extra;
/// Convert this thing to the item id of a named template type parameter.
fn as_template_param(&self,
ctx: &BindgenContext,
extra: &Self::Extra)
-> Option<ItemId>;
fn as_template_param(
&self,
ctx: &BindgenContext,
extra: &Self::Extra,
) -> Option<ItemId>;
/// Is this a named template type parameter?
fn is_template_param(&self, ctx: &BindgenContext, extra: &Self::Extra) -> bool {
fn is_template_param(
&self,
ctx: &BindgenContext,
extra: &Self::Extra,
) -> bool {
self.as_template_param(ctx, extra).is_some()
}
}
@ -203,10 +214,12 @@ pub struct TemplateInstantiation {
impl TemplateInstantiation {
/// Construct a new template instantiation from the given parts.
pub fn new<I>(template_definition: ItemId,
template_args: I)
-> TemplateInstantiation
where I: IntoIterator<Item = ItemId>,
pub fn new<I>(
template_definition: ItemId,
template_args: I,
) -> TemplateInstantiation
where
I: IntoIterator<Item = ItemId>,
{
TemplateInstantiation {
definition: template_definition,
@ -225,9 +238,10 @@ impl TemplateInstantiation {
}
/// Parse a `TemplateInstantiation` from a clang `Type`.
pub fn from_ty(ty: &clang::Type,
ctx: &mut BindgenContext)
-> Option<TemplateInstantiation> {
pub fn from_ty(
ty: &clang::Type,
ctx: &mut BindgenContext,
) -> Option<TemplateInstantiation> {
use clang_sys::*;
let template_args = ty.template_args()
@ -251,51 +265,49 @@ impl TemplateInstantiation {
});
let declaration = ty.declaration();
let definition = if declaration.kind() == CXCursor_TypeAliasTemplateDecl {
Some(declaration)
} else {
declaration
.specialized()
.or_else(|| {
let definition =
if declaration.kind() == CXCursor_TypeAliasTemplateDecl {
Some(declaration)
} else {
declaration.specialized().or_else(|| {
let mut template_ref = None;
ty.declaration().visit(|child| {
if child.kind() == CXCursor_TemplateRef {
template_ref = Some(child);
return CXVisit_Break;
}
if child.kind() == CXCursor_TemplateRef {
template_ref = Some(child);
return CXVisit_Break;
}
// Instantiations of template aliases might have the
// TemplateRef to the template alias definition arbitrarily
// deep, so we need to recurse here and not only visit
// direct children.
CXChildVisit_Recurse
});
// Instantiations of template aliases might have the
// TemplateRef to the template alias definition arbitrarily
// deep, so we need to recurse here and not only visit
// direct children.
CXChildVisit_Recurse
});
template_ref.and_then(|cur| cur.referenced())
})
};
};
let definition = match definition {
Some(def) => def,
None => {
if !ty.declaration().is_builtin() {
warn!("Could not find template definition for template \
instantiation");
warn!(
"Could not find template definition for template \
instantiation"
);
}
return None
return None;
}
};
let template_definition =
Item::from_ty_or_ref(definition.cur_type(), definition, None, ctx);
Some(TemplateInstantiation::new(template_definition, template_args))
}
/// Does this instantiation have a destructor?
pub fn has_destructor(&self, ctx: &BindgenContext) -> bool {
ctx.resolve_type(self.definition).has_destructor(ctx) ||
self.args.iter().any(|arg| ctx.resolve_type(*arg).has_destructor(ctx))
Some(TemplateInstantiation::new(
template_definition,
template_args,
))
}
}
@ -321,9 +333,10 @@ impl IsOpaque for TemplateInstantiation {
.map(|arg| {
let arg_path = arg.canonical_path(ctx);
arg_path[1..].join("::")
}).collect();
})
.collect();
{
let mut last = path.last_mut().unwrap();
let last = path.last_mut().unwrap();
last.push('<');
last.push_str(&args.join(", "));
last.push('>');
@ -333,25 +346,12 @@ impl IsOpaque for TemplateInstantiation {
}
}
impl<'a> CanDeriveCopy<'a> for TemplateInstantiation {
type Extra = ();
fn can_derive_copy(&self, ctx: &BindgenContext, _: ()) -> bool {
self.definition.can_derive_copy(ctx, ()) &&
self.args.iter().all(|arg| arg.can_derive_copy(ctx, ()))
}
fn can_derive_copy_in_array(&self, ctx: &BindgenContext, _: ()) -> bool {
self.definition.can_derive_copy_in_array(ctx, ()) &&
self.args.iter().all(|arg| arg.can_derive_copy_in_array(ctx, ()))
}
}
impl Trace for TemplateInstantiation {
type Extra = ();
fn trace<T>(&self, _ctx: &BindgenContext, tracer: &mut T, _: &())
where T: Tracer,
where
T: Tracer,
{
tracer.visit_kind(self.definition, EdgeKind::TemplateDeclaration);
for &item in self.template_arguments() {

Просмотреть файл

@ -214,7 +214,9 @@ pub fn no_edges(_: &BindgenContext, _: Edge) -> bool {
pub fn codegen_edges(ctx: &BindgenContext, edge: Edge) -> bool {
let cc = &ctx.options().codegen_config;
match edge.kind {
EdgeKind::Generic => ctx.resolve_item(edge.to).is_enabled_for_codegen(ctx),
EdgeKind::Generic => {
ctx.resolve_item(edge.to).is_enabled_for_codegen(ctx)
}
// We statically know the kind of item that non-generic edges can point
// to, so we don't need to actually resolve the item and check
@ -265,12 +267,16 @@ impl<'ctx, 'gen> TraversalStorage<'ctx, 'gen> for ItemSet {
/// each item. This is useful for providing debug assertions with meaningful
/// diagnostic messages about dangling items.
#[derive(Debug)]
pub struct Paths<'ctx, 'gen>(BTreeMap<ItemId, ItemId>,
&'ctx BindgenContext<'gen>)
where 'gen: 'ctx;
pub struct Paths<'ctx, 'gen>(
BTreeMap<ItemId, ItemId>,
&'ctx BindgenContext<'gen>
)
where
'gen: 'ctx;
impl<'ctx, 'gen> TraversalStorage<'ctx, 'gen> for Paths<'ctx, 'gen>
where 'gen: 'ctx,
where
'gen: 'ctx,
{
fn new(ctx: &'ctx BindgenContext<'gen>) -> Self {
Paths(BTreeMap::new(), ctx)
@ -284,10 +290,10 @@ impl<'ctx, 'gen> TraversalStorage<'ctx, 'gen> for Paths<'ctx, 'gen>
let mut path = vec![];
let mut current = item;
loop {
let predecessor = *self.0
.get(&current)
.expect("We know we found this item id, so it must have a \
predecessor");
let predecessor = *self.0.get(&current).expect(
"We know we found this item id, so it must have a \
predecessor",
);
if predecessor == current {
break;
}
@ -295,9 +301,11 @@ impl<'ctx, 'gen> TraversalStorage<'ctx, 'gen> for Paths<'ctx, 'gen>
current = predecessor;
}
path.reverse();
panic!("Found reference to dangling id = {:?}\nvia path = {:?}",
item,
path);
panic!(
"Found reference to dangling id = {:?}\nvia path = {:?}",
item,
path
);
}
newly_discovered
@ -349,7 +357,8 @@ pub trait Tracer {
}
impl<F> Tracer for F
where F: FnMut(ItemId, EdgeKind),
where
F: FnMut(ItemId, EdgeKind),
{
fn visit_kind(&mut self, item: ItemId, kind: EdgeKind) {
(*self)(item, kind)
@ -367,21 +376,24 @@ pub trait Trace {
type Extra;
/// Trace all of this item's outgoing edges to other items.
fn trace<T>(&self,
context: &BindgenContext,
tracer: &mut T,
extra: &Self::Extra)
where T: Tracer;
fn trace<T>(
&self,
context: &BindgenContext,
tracer: &mut T,
extra: &Self::Extra,
) where
T: Tracer;
}
/// An graph traversal of the transitive closure of references between items.
///
/// See `BindgenContext::whitelisted_items` for more information.
pub struct ItemTraversal<'ctx, 'gen, Storage, Queue, Predicate>
where 'gen: 'ctx,
Storage: TraversalStorage<'ctx, 'gen>,
Queue: TraversalQueue,
Predicate: TraversalPredicate,
where
'gen: 'ctx,
Storage: TraversalStorage<'ctx, 'gen>,
Queue: TraversalQueue,
Predicate: TraversalPredicate,
{
ctx: &'ctx BindgenContext<'gen>,
@ -398,22 +410,22 @@ pub struct ItemTraversal<'ctx, 'gen, Storage, Queue, Predicate>
currently_traversing: Option<ItemId>,
}
impl<'ctx, 'gen, Storage, Queue, Predicate> ItemTraversal<'ctx,
'gen,
Storage,
Queue,
Predicate>
where 'gen: 'ctx,
Storage: TraversalStorage<'ctx, 'gen>,
Queue: TraversalQueue,
Predicate: TraversalPredicate,
impl<'ctx, 'gen, Storage, Queue, Predicate>
ItemTraversal<'ctx, 'gen, Storage, Queue, Predicate>
where
'gen: 'ctx,
Storage: TraversalStorage<'ctx, 'gen>,
Queue: TraversalQueue,
Predicate: TraversalPredicate,
{
/// Begin a new traversal, starting from the given roots.
pub fn new<R>(ctx: &'ctx BindgenContext<'gen>,
roots: R,
predicate: Predicate)
-> ItemTraversal<'ctx, 'gen, Storage, Queue, Predicate>
where R: IntoIterator<Item = ItemId>,
pub fn new<R>(
ctx: &'ctx BindgenContext<'gen>,
roots: R,
predicate: Predicate,
) -> ItemTraversal<'ctx, 'gen, Storage, Queue, Predicate>
where
R: IntoIterator<Item = ItemId>,
{
let mut seen = Storage::new(ctx);
let mut queue = Queue::default();
@ -435,10 +447,11 @@ impl<'ctx, 'gen, Storage, Queue, Predicate> ItemTraversal<'ctx,
impl<'ctx, 'gen, Storage, Queue, Predicate> Tracer
for ItemTraversal<'ctx, 'gen, Storage, Queue, Predicate>
where 'gen: 'ctx,
Storage: TraversalStorage<'ctx, 'gen>,
Queue: TraversalQueue,
Predicate: TraversalPredicate,
where
'gen: 'ctx,
Storage: TraversalStorage<'ctx, 'gen>,
Queue: TraversalQueue,
Predicate: TraversalPredicate,
{
fn visit_kind(&mut self, item: ItemId, kind: EdgeKind) {
let edge = Edge::new(item, kind);
@ -446,8 +459,8 @@ impl<'ctx, 'gen, Storage, Queue, Predicate> Tracer
return;
}
let is_newly_discovered = self.seen
.add(self.currently_traversing, item);
let is_newly_discovered =
self.seen.add(self.currently_traversing, item);
if is_newly_discovered {
self.queue.push(item)
}
@ -456,10 +469,11 @@ impl<'ctx, 'gen, Storage, Queue, Predicate> Tracer
impl<'ctx, 'gen, Storage, Queue, Predicate> Iterator
for ItemTraversal<'ctx, 'gen, Storage, Queue, Predicate>
where 'gen: 'ctx,
Storage: TraversalStorage<'ctx, 'gen>,
Queue: TraversalQueue,
Predicate: TraversalPredicate,
where
'gen: 'ctx,
Storage: TraversalStorage<'ctx, 'gen>,
Queue: TraversalQueue,
Predicate: TraversalPredicate,
{
type Item = ItemId;
@ -470,10 +484,14 @@ impl<'ctx, 'gen, Storage, Queue, Predicate> Iterator
};
let newly_discovered = self.seen.add(None, id);
debug_assert!(!newly_discovered,
"should have already seen anything we get out of our queue");
debug_assert!(self.ctx.resolve_item_fallible(id).is_some(),
"should only get IDs of actual items in our context during traversal");
debug_assert!(
!newly_discovered,
"should have already seen anything we get out of our queue"
);
debug_assert!(
self.ctx.resolve_item_fallible(id).is_some(),
"should only get IDs of actual items in our context during traversal"
);
self.currently_traversing = Some(id);
id.trace(self.ctx, self, &());
@ -488,11 +506,13 @@ impl<'ctx, 'gen, Storage, Queue, Predicate> Iterator
/// See `BindgenContext::assert_no_dangling_item_traversal` for more
/// information.
pub type AssertNoDanglingItemsTraversal<'ctx, 'gen> =
ItemTraversal<'ctx,
'gen,
Paths<'ctx, 'gen>,
VecDeque<ItemId>,
for<'a> fn(&'a BindgenContext, Edge) -> bool>;
ItemTraversal<
'ctx,
'gen,
Paths<'ctx, 'gen>,
VecDeque<ItemId>,
for<'a> fn(&'a BindgenContext, Edge) -> bool,
>;
#[cfg(test)]
mod tests {

Просмотреть файл

@ -2,7 +2,6 @@
use super::comp::CompInfo;
use super::context::{BindgenContext, ItemId};
use super::derive::{CanDeriveCopy, CanDeriveDefault};
use super::dot::DotAttributes;
use super::enum_ty::Enum;
use super::function::FunctionSig;
@ -10,10 +9,12 @@ use super::int::IntKind;
use super::item::{IsOpaque, Item};
use super::layout::{Layout, Opaque};
use super::objc::ObjCInterface;
use super::template::{AsTemplateParam, TemplateInstantiation, TemplateParameters};
use super::template::{AsTemplateParam, TemplateInstantiation,
TemplateParameters};
use super::traversal::{EdgeKind, Trace, Tracer};
use clang::{self, Cursor};
use parse::{ClangItemParser, ParseError, ParseResult};
use std::borrow::Cow;
use std::io;
use std::mem;
@ -61,11 +62,12 @@ impl Type {
}
/// Construct a new `Type`.
pub fn new(name: Option<String>,
layout: Option<Layout>,
kind: TypeKind,
is_const: bool)
-> Self {
pub fn new(
name: Option<String>,
layout: Option<Layout>,
kind: TypeKind,
is_const: bool,
) -> Self {
Type {
name: name,
layout: layout,
@ -97,10 +99,10 @@ impl Type {
}
}
/// Is this type of kind `TypeKind::Named`?
pub fn is_named(&self) -> bool {
/// Is this type of kind `TypeKind::TypeParam`?
pub fn is_type_param(&self) -> bool {
match self.kind {
TypeKind::Named => true,
TypeKind::TypeParam => true,
_ => false,
}
}
@ -138,7 +140,7 @@ impl Type {
}
/// Is this either a builtin or named type?
pub fn is_builtin_or_named(&self) -> bool {
pub fn is_builtin_or_type_param(&self) -> bool {
match self.kind {
TypeKind::Void |
TypeKind::NullPtr |
@ -149,19 +151,15 @@ impl Type {
TypeKind::BlockPointer |
TypeKind::Int(..) |
TypeKind::Float(..) |
TypeKind::Named => true,
TypeKind::TypeParam => true,
_ => false,
}
}
/// Creates a new named type, with name `name`.
pub fn named(name: String) -> Self {
let name = if name.is_empty() {
None
} else {
Some(name)
};
Self::new(name, None, TypeKind::Named, false)
let name = if name.is_empty() { None } else { Some(name) };
Self::new(name, None, TypeKind::TypeParam, false)
}
/// Is this a floating point type?
@ -226,8 +224,10 @@ impl Type {
// Use the actual pointer size!
TypeKind::Pointer(..) |
TypeKind::BlockPointer => {
Some(Layout::new(mem::size_of::<*mut ()>(),
mem::align_of::<*mut ()>()))
Some(Layout::new(
mem::size_of::<*mut ()>(),
mem::align_of::<*mut ()>(),
))
}
TypeKind::ResolvedTypeRef(inner) => {
ctx.resolve_type(inner).layout(ctx)
@ -237,48 +237,61 @@ impl Type {
})
}
/// Returns whether this type has a destructor.
pub fn has_destructor(&self, ctx: &BindgenContext) -> bool {
match self.kind {
TypeKind::TemplateAlias(t, _) |
TypeKind::Alias(t) |
TypeKind::ResolvedTypeRef(t) => {
ctx.resolve_type(t).has_destructor(ctx)
}
TypeKind::TemplateInstantiation(ref inst) => {
inst.has_destructor(ctx)
}
TypeKind::Comp(ref info) => info.has_destructor(ctx),
_ => false,
}
}
/// Whether this named type is an invalid C++ identifier. This is done to
/// avoid generating invalid code with some cases we can't handle, see:
///
/// tests/headers/381-decltype-alias.hpp
pub fn is_invalid_named_type(&self) -> bool {
pub fn is_invalid_type_param(&self) -> bool {
match self.kind {
TypeKind::Named => {
TypeKind::TypeParam => {
let name = self.name().expect("Unnamed named type?");
!Self::is_valid_identifier(&name)
!clang::is_valid_identifier(&name)
}
_ => false,
}
}
/// Checks whether the name looks like an identifier,
/// i.e. is alphanumeric (including '_') and does not start with a digit.
pub fn is_valid_identifier(name: &str) -> bool {
clang::is_valid_identifier(name)
/// Takes `name`, and returns a suitable identifier representation for it.
fn sanitize_name<'a>(name: &'a str) -> Cow<'a, str> {
if clang::is_valid_identifier(name) {
return Cow::Borrowed(name);
}
let name = name.replace(|c| c == ' ' || c == ':' || c == '.', "_");
Cow::Owned(name)
}
/// Get this type's santizied name.
pub fn sanitized_name<'a>(
&'a self,
ctx: &BindgenContext,
) -> Option<Cow<'a, str>> {
let name_info = match *self.kind() {
TypeKind::Pointer(inner) => Some((inner, Cow::Borrowed("ptr"))),
TypeKind::Reference(inner) => Some((inner, Cow::Borrowed("ref"))),
TypeKind::Array(inner, length) => {
Some((inner, format!("array{}", length).into()))
}
_ => None,
};
if let Some((inner, prefix)) = name_info {
ctx.resolve_item(inner)
.expect_type()
.sanitized_name(ctx)
.map(|name| format!("{}_{}", prefix, name).into())
} else {
self.name().map(Self::sanitize_name)
}
}
/// See safe_canonical_type.
pub fn canonical_type<'tr>(&'tr self,
ctx: &'tr BindgenContext)
-> &'tr Type {
self.safe_canonical_type(ctx)
.expect("Should have been resolved after parsing!")
pub fn canonical_type<'tr>(
&'tr self,
ctx: &'tr BindgenContext,
) -> &'tr Type {
self.safe_canonical_type(ctx).expect(
"Should have been resolved after parsing!",
)
}
/// Returns the canonical type of this type, that is, the "inner type".
@ -286,11 +299,12 @@ impl Type {
/// For example, for a `typedef`, the canonical type would be the
/// `typedef`ed type, for a template instantiation, would be the template
/// its specializing, and so on. Return None if the type is unresolved.
pub fn safe_canonical_type<'tr>(&'tr self,
ctx: &'tr BindgenContext)
-> Option<&'tr Type> {
pub fn safe_canonical_type<'tr>(
&'tr self,
ctx: &'tr BindgenContext,
) -> Option<&'tr Type> {
match self.kind {
TypeKind::Named |
TypeKind::TypeParam |
TypeKind::Array(..) |
TypeKind::Comp(..) |
TypeKind::Opaque |
@ -344,7 +358,9 @@ impl IsOpaque for Type {
fn is_opaque(&self, ctx: &BindgenContext, item: &Item) -> bool {
match self.kind {
TypeKind::Opaque => true,
TypeKind::TemplateInstantiation(ref inst) => inst.is_opaque(ctx, item),
TypeKind::TemplateInstantiation(ref inst) => {
inst.is_opaque(ctx, item)
}
TypeKind::Comp(ref comp) => comp.is_opaque(ctx, &()),
TypeKind::ResolvedTypeRef(to) => to.is_opaque(ctx, &()),
_ => false,
@ -355,7 +371,11 @@ impl IsOpaque for Type {
impl AsTemplateParam for Type {
type Extra = Item;
fn as_template_param(&self, ctx: &BindgenContext, item: &Item) -> Option<ItemId> {
fn as_template_param(
&self,
ctx: &BindgenContext,
item: &Item,
) -> Option<ItemId> {
self.kind.as_template_param(ctx, item)
}
}
@ -363,9 +383,13 @@ impl AsTemplateParam for Type {
impl AsTemplateParam for TypeKind {
type Extra = Item;
fn as_template_param(&self, ctx: &BindgenContext, item: &Item) -> Option<ItemId> {
fn as_template_param(
&self,
ctx: &BindgenContext,
item: &Item,
) -> Option<ItemId> {
match *self {
TypeKind::Named => Some(item.id()),
TypeKind::TypeParam => Some(item.id()),
TypeKind::ResolvedTypeRef(id) => id.as_template_param(ctx, &()),
_ => None,
}
@ -373,18 +397,22 @@ impl AsTemplateParam for TypeKind {
}
impl DotAttributes for Type {
fn dot_attributes<W>(&self,
ctx: &BindgenContext,
out: &mut W)
-> io::Result<()>
where W: io::Write,
fn dot_attributes<W>(
&self,
ctx: &BindgenContext,
out: &mut W,
) -> io::Result<()>
where
W: io::Write,
{
if let Some(ref layout) = self.layout {
try!(writeln!(out,
"<tr><td>size</td><td>{}</td></tr>
try!(writeln!(
out,
"<tr><td>size</td><td>{}</td></tr>
<tr><td>align</td><td>{}</td></tr>",
layout.size,
layout.align));
layout.size,
layout.align
));
if layout.packed {
try!(writeln!(out, "<tr><td>packed</td><td>true</td></tr>"));
}
@ -399,38 +427,14 @@ impl DotAttributes for Type {
}
impl DotAttributes for TypeKind {
fn dot_attributes<W>(&self,
ctx: &BindgenContext,
out: &mut W)
-> io::Result<()>
where W: io::Write,
fn dot_attributes<W>(
&self,
ctx: &BindgenContext,
out: &mut W,
) -> io::Result<()>
where
W: io::Write,
{
write!(out,
"<tr><td>TypeKind</td><td>{}</td></tr>",
match *self {
TypeKind::Void => "Void",
TypeKind::NullPtr => "NullPtr",
TypeKind::Comp(..) => "Comp",
TypeKind::Opaque => "Opaque",
TypeKind::Int(..) => "Int",
TypeKind::Float(..) => "Float",
TypeKind::Complex(..) => "Complex",
TypeKind::Alias(..) => "Alias",
TypeKind::TemplateAlias(..) => "TemplateAlias",
TypeKind::Array(..) => "Array",
TypeKind::Function(..) => "Function",
TypeKind::Enum(..) => "Enum",
TypeKind::Pointer(..) => "Pointer",
TypeKind::BlockPointer => "BlockPointer",
TypeKind::Reference(..) => "Reference",
TypeKind::TemplateInstantiation(..) => "TemplateInstantiation",
TypeKind::ResolvedTypeRef(..) => "ResolvedTypeRef",
TypeKind::Named => "Named",
TypeKind::ObjCId => "ObjCId",
TypeKind::ObjCSel => "ObjCSel",
TypeKind::ObjCInterface(..) => "ObjCInterface",
TypeKind::UnresolvedTypeRef(..) => unreachable!("there shouldn't be any more of these anymore"),
})?;
if let TypeKind::Comp(ref comp) = *self {
comp.dot_attributes(ctx, out)?;
}
@ -440,62 +444,67 @@ impl DotAttributes for TypeKind {
}
#[test]
fn is_invalid_named_type_valid() {
let ty = Type::new(Some("foo".into()), None, TypeKind::Named, false);
assert!(!ty.is_invalid_named_type())
fn is_invalid_type_param_valid() {
let ty = Type::new(Some("foo".into()), None, TypeKind::TypeParam, false);
assert!(!ty.is_invalid_type_param())
}
#[test]
fn is_invalid_named_type_valid_underscore_and_numbers() {
let ty =
Type::new(Some("_foo123456789_".into()), None, TypeKind::Named, false);
assert!(!ty.is_invalid_named_type())
fn is_invalid_type_param_valid_underscore_and_numbers() {
let ty = Type::new(
Some("_foo123456789_".into()),
None,
TypeKind::TypeParam,
false,
);
assert!(!ty.is_invalid_type_param())
}
#[test]
fn is_invalid_named_type_valid_unnamed_kind() {
fn is_invalid_type_param_valid_unnamed_kind() {
let ty = Type::new(Some("foo".into()), None, TypeKind::Void, false);
assert!(!ty.is_invalid_named_type())
assert!(!ty.is_invalid_type_param())
}
#[test]
fn is_invalid_named_type_invalid_start() {
let ty = Type::new(Some("1foo".into()), None, TypeKind::Named, false);
assert!(ty.is_invalid_named_type())
fn is_invalid_type_param_invalid_start() {
let ty = Type::new(Some("1foo".into()), None, TypeKind::TypeParam, false);
assert!(ty.is_invalid_type_param())
}
#[test]
fn is_invalid_named_type_invalid_remaing() {
let ty = Type::new(Some("foo-".into()), None, TypeKind::Named, false);
assert!(ty.is_invalid_named_type())
fn is_invalid_type_param_invalid_remaing() {
let ty = Type::new(Some("foo-".into()), None, TypeKind::TypeParam, false);
assert!(ty.is_invalid_type_param())
}
#[test]
#[should_panic]
fn is_invalid_named_type_unnamed() {
let ty = Type::new(None, None, TypeKind::Named, false);
assert!(ty.is_invalid_named_type())
fn is_invalid_type_param_unnamed() {
let ty = Type::new(None, None, TypeKind::TypeParam, false);
assert!(ty.is_invalid_type_param())
}
#[test]
fn is_invalid_named_type_empty_name() {
let ty = Type::new(Some("".into()), None, TypeKind::Named, false);
assert!(ty.is_invalid_named_type())
fn is_invalid_type_param_empty_name() {
let ty = Type::new(Some("".into()), None, TypeKind::TypeParam, false);
assert!(ty.is_invalid_type_param())
}
impl TemplateParameters for Type {
fn self_template_params(&self,
ctx: &BindgenContext)
-> Option<Vec<ItemId>> {
fn self_template_params(
&self,
ctx: &BindgenContext,
) -> Option<Vec<ItemId>> {
self.kind.self_template_params(ctx)
}
}
impl TemplateParameters for TypeKind {
fn self_template_params(&self,
ctx: &BindgenContext)
-> Option<Vec<ItemId>> {
fn self_template_params(
&self,
ctx: &BindgenContext,
) -> Option<Vec<ItemId>> {
match *self {
TypeKind::ResolvedTypeRef(id) => {
ctx.resolve_type(id).self_template_params(ctx)
@ -517,7 +526,7 @@ impl TemplateParameters for TypeKind {
TypeKind::BlockPointer |
TypeKind::Reference(_) |
TypeKind::UnresolvedTypeRef(..) |
TypeKind::Named |
TypeKind::TypeParam |
TypeKind::Alias(_) |
TypeKind::ObjCId |
TypeKind::ObjCSel |
@ -526,87 +535,6 @@ impl TemplateParameters for TypeKind {
}
}
impl<'a> CanDeriveDefault<'a> for Type {
type Extra = &'a Item;
fn can_derive_default(&self, ctx: &BindgenContext, item: &Item) -> bool {
match self.kind {
TypeKind::Array(t, len) => {
len <= RUST_DERIVE_IN_ARRAY_LIMIT &&
t.can_derive_default(ctx, ())
}
TypeKind::ResolvedTypeRef(t) |
TypeKind::TemplateAlias(t, _) |
TypeKind::Alias(t) => t.can_derive_default(ctx, ()),
TypeKind::Comp(ref info) => {
info.can_derive_default(ctx, (&item, self.layout(ctx)))
}
TypeKind::Opaque => {
self.layout
.map_or(true, |l| l.opaque().can_derive_default(ctx, ()))
}
TypeKind::Void |
TypeKind::Named |
TypeKind::TemplateInstantiation(..) |
TypeKind::Reference(..) |
TypeKind::NullPtr |
TypeKind::Pointer(..) |
TypeKind::BlockPointer |
TypeKind::ObjCId |
TypeKind::ObjCSel |
TypeKind::ObjCInterface(..) |
TypeKind::Enum(..) => false,
TypeKind::Function(..) |
TypeKind::Int(..) |
TypeKind::Float(..) |
TypeKind::Complex(..) => true,
TypeKind::UnresolvedTypeRef(..) => unreachable!(),
}
}
}
impl<'a> CanDeriveCopy<'a> for Type {
type Extra = &'a Item;
fn can_derive_copy(&self, ctx: &BindgenContext, item: &Item) -> bool {
match self.kind {
TypeKind::Array(t, len) => {
len <= RUST_DERIVE_IN_ARRAY_LIMIT &&
t.can_derive_copy_in_array(ctx, ())
}
TypeKind::ResolvedTypeRef(t) |
TypeKind::TemplateAlias(t, _) |
TypeKind::Alias(t) => t.can_derive_copy(ctx, ()),
TypeKind::TemplateInstantiation(ref inst) => {
inst.can_derive_copy(ctx, ())
}
TypeKind::Comp(ref info) => {
info.can_derive_copy(ctx, (item, self.layout(ctx)))
}
TypeKind::Opaque => {
self.layout
.map_or(true, |l| l.opaque().can_derive_copy(ctx, ()))
}
_ => true,
}
}
fn can_derive_copy_in_array(&self,
ctx: &BindgenContext,
item: &Item)
-> bool {
match self.kind {
TypeKind::ResolvedTypeRef(t) |
TypeKind::TemplateAlias(t, _) |
TypeKind::Alias(t) |
TypeKind::Array(t, _) => t.can_derive_copy_in_array(ctx, ()),
TypeKind::Named => false,
_ => self.can_derive_copy(ctx, item),
}
}
}
/// The kind of float this type represents.
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum FloatKind {
@ -695,10 +623,12 @@ pub enum TypeKind {
/// already known types, and are converted to ResolvedTypeRef.
///
/// see tests/headers/typeref.hpp to see somewhere where this is a problem.
UnresolvedTypeRef(clang::Type,
clang::Cursor,
/* parent_id */
Option<ItemId>),
UnresolvedTypeRef(
clang::Type,
clang::Cursor,
/* parent_id */
Option<ItemId>
),
/// An indirection to another type.
///
@ -707,7 +637,7 @@ pub enum TypeKind {
ResolvedTypeRef(ItemId),
/// A named type, that is, a template parameter.
Named,
TypeParam,
/// Objective C interface. Always referenced through a pointer
ObjCInterface(ObjCInterface),
@ -743,7 +673,7 @@ impl Type {
let definition = inst.template_definition();
ctx.resolve_type(definition).is_unsized(ctx, &definition)
}
TypeKind::Named |
TypeKind::TypeParam |
TypeKind::Int(..) |
TypeKind::Float(..) |
TypeKind::Complex(..) |
@ -769,18 +699,21 @@ impl Type {
///
/// It's sort of nasty and full of special-casing, but hopefully the
/// comments in every special case justify why they're there.
pub fn from_clang_ty(potential_id: ItemId,
ty: &clang::Type,
location: Cursor,
parent_id: Option<ItemId>,
ctx: &mut BindgenContext)
-> Result<ParseResult<Self>, ParseError> {
pub fn from_clang_ty(
potential_id: ItemId,
ty: &clang::Type,
location: Cursor,
parent_id: Option<ItemId>,
ctx: &mut BindgenContext,
) -> Result<ParseResult<Self>, ParseError> {
use clang_sys::*;
{
let already_resolved = ctx.builtin_or_resolved_ty(potential_id,
parent_id,
ty,
Some(location));
let already_resolved = ctx.builtin_or_resolved_ty(
potential_id,
parent_id,
ty,
Some(location),
);
if let Some(ty) = already_resolved {
debug!("{:?} already resolved: {:?}", ty, location);
return Ok(ParseResult::AlreadyResolved(ty));
@ -791,10 +724,12 @@ impl Type {
let cursor = ty.declaration();
let mut name = cursor.spelling();
debug!("from_clang_ty: {:?}, ty: {:?}, loc: {:?}",
potential_id,
ty,
location);
debug!(
"from_clang_ty: {:?}, ty: {:?}, loc: {:?}",
potential_id,
ty,
location
);
debug!("currently_parsed_types: {:?}", ctx.currently_parsed_types());
let canonical_ty = ty.canonical_type();
@ -813,8 +748,10 @@ impl Type {
// We are rewriting them as id to suppress multiple conflicting
// typedefs at root level
if ty_kind == CXType_Typedef {
let is_template_type_param = ty.declaration().kind() == CXCursor_TemplateTypeParameter;
let is_canonical_objcpointer = canonical_ty.kind() == CXType_ObjCObjectPointer;
let is_template_type_param = ty.declaration().kind() ==
CXCursor_TemplateTypeParameter;
let is_canonical_objcpointer = canonical_ty.kind() ==
CXType_ObjCObjectPointer;
// We have found a template type for objc interface
if is_canonical_objcpointer && is_template_type_param {
@ -827,16 +764,19 @@ impl Type {
if location.kind() == CXCursor_ClassTemplatePartialSpecialization {
// Sorry! (Not sorry)
warn!("Found a partial template specialization; bindgen does not \
warn!(
"Found a partial template specialization; bindgen does not \
support partial template specialization! Constructing \
opaque type instead.");
return Ok(ParseResult::New(Opaque::from_clang_ty(&canonical_ty),
None));
opaque type instead."
);
return Ok(
ParseResult::New(Opaque::from_clang_ty(&canonical_ty), None),
);
}
let kind = if location.kind() == CXCursor_TemplateRef ||
(ty.template_args().is_some() &&
ty_kind != CXType_Typedef) {
(ty.template_args().is_some() && ty_kind != CXType_Typedef)
{
// This is a template instantiation.
match TemplateInstantiation::from_ty(&ty, ctx) {
Some(inst) => TypeKind::TemplateInstantiation(inst),
@ -876,25 +816,29 @@ impl Type {
let signature =
try!(FunctionSig::from_ty(ty, &location, ctx));
TypeKind::Function(signature)
// Same here, with template specialisations we can safely
// assume this is a Comp(..)
// Same here, with template specialisations we can safely
// assume this is a Comp(..)
} else if ty.is_fully_instantiated_template() {
debug!("Template specialization: {:?}, {:?} {:?}",
ty,
location,
canonical_ty);
let complex = CompInfo::from_ty(potential_id,
ty,
Some(location),
ctx)
.expect("C'mon");
debug!(
"Template specialization: {:?}, {:?} {:?}",
ty,
location,
canonical_ty
);
let complex = CompInfo::from_ty(
potential_id,
ty,
Some(location),
ctx,
).expect("C'mon");
TypeKind::Comp(complex)
} else {
match location.kind() {
CXCursor_CXXBaseSpecifier |
CXCursor_ClassTemplate => {
if location.kind() ==
CXCursor_CXXBaseSpecifier {
CXCursor_CXXBaseSpecifier
{
// In the case we're parsing a base specifier
// inside an unexposed or invalid type, it means
// that we're parsing one of two things:
@ -934,29 +878,34 @@ impl Type {
// [2]: forward-inherit-struct-with-fields.hpp
// [3]: forward-inherit-struct.hpp
// [4]: inherit-namespaced.hpp
if location.spelling()
.chars()
.all(|c| {
c.is_alphanumeric() || c == '_'
}) {
if location.spelling().chars().all(|c| {
c.is_alphanumeric() || c == '_'
})
{
return Err(ParseError::Recurse);
}
} else {
name = location.spelling();
}
let complex = CompInfo::from_ty(potential_id,
ty,
Some(location),
ctx);
let complex = CompInfo::from_ty(
potential_id,
ty,
Some(location),
ctx,
);
match complex {
Ok(complex) => TypeKind::Comp(complex),
Err(_) => {
warn!("Could not create complex type \
warn!(
"Could not create complex type \
from class template or base \
specifier, using opaque blob");
specifier, using opaque blob"
);
let opaque = Opaque::from_clang_ty(ty);
return Ok(ParseResult::New(opaque, None));
return Ok(
ParseResult::New(opaque, None),
);
}
}
}
@ -987,10 +936,10 @@ impl Type {
}
CXCursor_TemplateTypeParameter => {
let param =
Item::named_type(None,
Item::type_param(None,
cur,
ctx)
.expect("Item::named_type shouldn't \
.expect("Item::type_param shouldn't \
ever fail if we are looking \
at a TemplateTypeParameter");
args.push(param);
@ -1003,9 +952,11 @@ impl Type {
let inner_type = match inner {
Ok(inner) => inner,
Err(..) => {
error!("Failed to parse template alias \
error!(
"Failed to parse template alias \
{:?}",
location);
location
);
return Err(ParseError::Continue);
}
};
@ -1016,35 +967,42 @@ impl Type {
let referenced = location.referenced().unwrap();
let referenced_ty = referenced.cur_type();
debug!("TemplateRef: location = {:?}; referenced = \
debug!(
"TemplateRef: location = {:?}; referenced = \
{:?}; referenced_ty = {:?}",
location,
referenced,
referenced_ty);
location,
referenced,
referenced_ty
);
return Self::from_clang_ty(potential_id,
&referenced_ty,
referenced,
parent_id,
ctx);
return Self::from_clang_ty(
potential_id,
&referenced_ty,
referenced,
parent_id,
ctx,
);
}
CXCursor_TypeRef => {
let referenced = location.referenced().unwrap();
let referenced_ty = referenced.cur_type();
let declaration = referenced_ty.declaration();
debug!("TypeRef: location = {:?}; referenced = \
debug!(
"TypeRef: location = {:?}; referenced = \
{:?}; referenced_ty = {:?}",
location,
referenced,
referenced_ty);
location,
referenced,
referenced_ty
);
let item =
Item::from_ty_or_ref_with_id(potential_id,
referenced_ty,
declaration,
parent_id,
ctx);
let item = Item::from_ty_or_ref_with_id(
potential_id,
referenced_ty,
declaration,
parent_id,
ctx,
);
return Ok(ParseResult::AlreadyResolved(item));
}
CXCursor_NamespaceRef => {
@ -1052,10 +1010,12 @@ impl Type {
}
_ => {
if ty.kind() == CXType_Unexposed {
warn!("Unexposed type {:?}, recursing inside, \
warn!(
"Unexposed type {:?}, recursing inside, \
loc: {:?}",
ty,
location);
ty,
location
);
return Err(ParseError::Recurse);
}
@ -1071,11 +1031,13 @@ impl Type {
return Err(ParseError::Continue);
}
return Self::from_clang_ty(potential_id,
&canonical_ty,
location,
parent_id,
ctx);
return Self::from_clang_ty(
potential_id,
&canonical_ty,
location,
parent_id,
ctx,
);
}
// NOTE: We don't resolve pointers eagerly because the pointee type
// might not have been parsed, and if it contains templates or
@ -1108,8 +1070,8 @@ impl Type {
// pointers, etc, which isn't trivial given function pointers
// are mostly unexposed. I don't have the time for it right now.
let mut pointee = ty.pointee_type().unwrap();
let canonical_pointee = canonical_ty.pointee_type()
.unwrap();
let canonical_pointee =
canonical_ty.pointee_type().unwrap();
if pointee.call_conv() != canonical_pointee.call_conv() {
pointee = canonical_pointee;
}
@ -1122,29 +1084,32 @@ impl Type {
// can even add bindings for that, so huh.
CXType_RValueReference |
CXType_LValueReference => {
let inner = Item::from_ty_or_ref(ty.pointee_type()
.unwrap(),
location,
None,
ctx);
let inner = Item::from_ty_or_ref(
ty.pointee_type().unwrap(),
location,
None,
ctx,
);
TypeKind::Reference(inner)
}
// XXX DependentSizedArray is wrong
CXType_VariableArray |
CXType_DependentSizedArray => {
let inner = Item::from_ty(ty.elem_type().as_ref().unwrap(),
location,
None,
ctx)
.expect("Not able to resolve array element?");
let inner = Item::from_ty(
ty.elem_type().as_ref().unwrap(),
location,
None,
ctx,
).expect("Not able to resolve array element?");
TypeKind::Pointer(inner)
}
CXType_IncompleteArray => {
let inner = Item::from_ty(ty.elem_type().as_ref().unwrap(),
location,
None,
ctx)
.expect("Not able to resolve array element?");
let inner = Item::from_ty(
ty.elem_type().as_ref().unwrap(),
location,
None,
ctx,
).expect("Not able to resolve array element?");
TypeKind::Array(inner, 0)
}
CXType_FunctionNoProto |
@ -1164,7 +1129,7 @@ impl Type {
if name.is_empty() {
let pretty_name = ty.spelling();
if Self::is_valid_identifier(&pretty_name) {
if clang::is_valid_identifier(&pretty_name) {
name = pretty_name;
}
}
@ -1172,17 +1137,18 @@ impl Type {
TypeKind::Enum(enum_)
}
CXType_Record => {
let complex = CompInfo::from_ty(potential_id,
ty,
Some(location),
ctx)
.expect("Not a complex type?");
let complex = CompInfo::from_ty(
potential_id,
ty,
Some(location),
ctx,
).expect("Not a complex type?");
if name.is_empty() {
// The pretty-printed name may contain typedefed name,
// but may also be "struct (anonymous at .h:1)"
let pretty_name = ty.spelling();
if Self::is_valid_identifier(&pretty_name) {
if clang::is_valid_identifier(&pretty_name) {
name = pretty_name;
}
}
@ -1196,19 +1162,22 @@ impl Type {
// That being said, that should be fixed eventually.
CXType_Vector |
CXType_ConstantArray => {
let inner = Item::from_ty(ty.elem_type().as_ref().unwrap(),
location,
None,
ctx)
.expect("Not able to resolve array element?");
let inner = Item::from_ty(
ty.elem_type().as_ref().unwrap(),
location,
None,
ctx,
).expect("Not able to resolve array element?");
TypeKind::Array(inner, ty.num_elements().unwrap())
}
CXType_Elaborated => {
return Self::from_clang_ty(potential_id,
&ty.named(),
location,
parent_id,
ctx);
return Self::from_clang_ty(
potential_id,
&ty.named(),
location,
parent_id,
ctx,
);
}
CXType_ObjCId => TypeKind::ObjCId,
CXType_ObjCSel => TypeKind::ObjCSel,
@ -1220,10 +1189,12 @@ impl Type {
TypeKind::ObjCInterface(interface)
}
_ => {
error!("unsupported type: kind = {:?}; ty = {:?}; at {:?}",
ty.kind(),
ty,
location);
error!(
"unsupported type: kind = {:?}; ty = {:?}; at {:?}",
ty.kind(),
ty,
location
);
return Err(ParseError::Continue);
}
}
@ -1242,7 +1213,8 @@ impl Trace for Type {
type Extra = Item;
fn trace<T>(&self, context: &BindgenContext, tracer: &mut T, item: &Item)
where T: Tracer,
where
T: Tracer,
{
match *self.kind() {
TypeKind::Pointer(inner) |
@ -1255,8 +1227,10 @@ impl Trace for Type {
TypeKind::TemplateAlias(inner, ref template_params) => {
tracer.visit_kind(inner, EdgeKind::TypeReference);
for &item in template_params {
tracer.visit_kind(item,
EdgeKind::TemplateParameterDefinition);
tracer.visit_kind(
item,
EdgeKind::TemplateParameterDefinition,
);
}
}
TypeKind::TemplateInstantiation(ref inst) => {
@ -1280,7 +1254,7 @@ impl Trace for Type {
// None of these variants have edges to other items and types.
TypeKind::Opaque |
TypeKind::UnresolvedTypeRef(_, _, None) |
TypeKind::Named |
TypeKind::TypeParam |
TypeKind::Void |
TypeKind::NullPtr |
TypeKind::Int(_) |

Просмотреть файл

@ -44,12 +44,13 @@ pub struct Var {
impl Var {
/// Construct a new `Var`.
pub fn new(name: String,
mangled: Option<String>,
ty: ItemId,
val: Option<VarType>,
is_const: bool)
-> Var {
pub fn new(
name: String,
mangled: Option<String>,
ty: ItemId,
val: Option<VarType>,
is_const: bool,
) -> Var {
assert!(!name.is_empty());
Var {
name: name,
@ -87,20 +88,24 @@ impl Var {
}
impl DotAttributes for Var {
fn dot_attributes<W>(&self,
_ctx: &BindgenContext,
out: &mut W)
-> io::Result<()>
where W: io::Write,
fn dot_attributes<W>(
&self,
_ctx: &BindgenContext,
out: &mut W,
) -> io::Result<()>
where
W: io::Write,
{
if self.is_const {
try!(writeln!(out, "<tr><td>const</td><td>true</td></tr>"));
}
if let Some(ref mangled) = self.mangled_name {
try!(writeln!(out,
"<tr><td>mangled name</td><td>{}</td></tr>",
mangled));
try!(writeln!(
out,
"<tr><td>mangled name</td><td>{}</td></tr>",
mangled
));
}
Ok(())
@ -108,9 +113,10 @@ impl DotAttributes for Var {
}
impl ClangSubItemParser for Var {
fn parse(cursor: clang::Cursor,
ctx: &mut BindgenContext)
-> Result<ParseResult<Self>, ParseError> {
fn parse(
cursor: clang::Cursor,
ctx: &mut BindgenContext,
) -> Result<ParseResult<Self>, ParseError> {
use clang_sys::*;
use cexpr::expr::EvalResult;
use cexpr::literal::CChar;
@ -168,10 +174,11 @@ impl ClangSubItemParser for Var {
(TypeKind::Int(IntKind::U8), VarType::Char(c))
}
EvalResult::Str(val) => {
let char_ty =
Item::builtin_type(TypeKind::Int(IntKind::U8),
true,
ctx);
let char_ty = Item::builtin_type(
TypeKind::Int(IntKind::U8),
true,
ctx,
);
(TypeKind::Pointer(char_ty), VarType::String(val))
}
EvalResult::Int(Wrapping(value)) => {
@ -195,8 +202,10 @@ impl ClangSubItemParser for Var {
let ty = Item::builtin_type(type_kind, true, ctx);
Ok(ParseResult::New(Var::new(name, None, ty, Some(val), true),
Some(cursor)))
Ok(ParseResult::New(
Var::new(name, None, ty, Some(val), true),
Some(cursor),
))
}
CXCursor_VarDecl => {
let name = cursor.spelling();
@ -213,10 +222,12 @@ impl ClangSubItemParser for Var {
let ty = match Item::from_ty(&ty, cursor, None, ctx) {
Ok(ty) => ty,
Err(e) => {
assert_eq!(ty.kind(),
CXType_Auto,
"Couldn't resolve constant type, and it \
wasn't an nondeductible auto type!");
assert_eq!(
ty.kind(),
CXType_Auto,
"Couldn't resolve constant type, and it \
wasn't an nondeductible auto type!"
);
return Err(e);
}
};
@ -225,8 +236,9 @@ impl ClangSubItemParser for Var {
// tests/headers/inner_const.hpp
//
// That's fine because in that case we know it's not a literal.
let canonical_ty = ctx.safe_resolve_type(ty)
.and_then(|t| t.safe_canonical_type(ctx));
let canonical_ty = ctx.safe_resolve_type(ty).and_then(|t| {
t.safe_canonical_type(ctx)
});
let is_integer = canonical_ty.map_or(false, |t| t.is_integer());
let is_float = canonical_ty.map_or(false, |t| t.is_float());
@ -241,7 +253,8 @@ impl ClangSubItemParser for Var {
_ => unreachable!(),
};
let mut val = cursor.evaluate()
let mut val = cursor
.evaluate()
.and_then(|v| v.as_int())
.map(|val| val as i64);
if val.is_none() || !kind.signedness_matches(val.unwrap()) {
@ -255,13 +268,13 @@ impl ClangSubItemParser for Var {
VarType::Int(val)
})
} else if is_float {
cursor.evaluate()
.and_then(|v| v.as_double())
.map(VarType::Float)
cursor.evaluate().and_then(|v| v.as_double()).map(
VarType::Float,
)
} else {
cursor.evaluate()
.and_then(|v| v.as_literal_string())
.map(VarType::String)
cursor.evaluate().and_then(|v| v.as_literal_string()).map(
VarType::String,
)
};
let mangling = cursor_mangling(ctx, &cursor);
@ -278,10 +291,11 @@ impl ClangSubItemParser for Var {
}
/// Try and parse a macro using all the macros parsed until now.
fn parse_macro(ctx: &BindgenContext,
cursor: &clang::Cursor,
unit: &clang::TranslationUnit)
-> Option<(Vec<u8>, cexpr::expr::EvalResult)> {
fn parse_macro(
ctx: &BindgenContext,
cursor: &clang::Cursor,
unit: &clang::TranslationUnit,
) -> Option<(Vec<u8>, cexpr::expr::EvalResult)> {
use cexpr::{expr, nom};
let mut cexpr_tokens = match unit.cexpr_tokens(cursor) {
@ -309,16 +323,15 @@ fn parse_macro(ctx: &BindgenContext,
}
match parser.macro_definition(&cexpr_tokens) {
nom::IResult::Done(_, (id, val)) => {
Some((id.into(), val))
}
_ => None
nom::IResult::Done(_, (id, val)) => Some((id.into(), val)),
_ => None,
}
}
fn parse_int_literal_tokens(cursor: &clang::Cursor,
unit: &clang::TranslationUnit)
-> Option<i64> {
fn parse_int_literal_tokens(
cursor: &clang::Cursor,
unit: &clang::TranslationUnit,
) -> Option<i64> {
use cexpr::{expr, nom};
use cexpr::expr::EvalResult;
@ -334,9 +347,10 @@ fn parse_int_literal_tokens(cursor: &clang::Cursor,
}
}
fn get_integer_literal_from_cursor(cursor: &clang::Cursor,
unit: &clang::TranslationUnit)
-> Option<i64> {
fn get_integer_literal_from_cursor(
cursor: &clang::Cursor,
unit: &clang::TranslationUnit,
) -> Option<i64> {
use clang_sys::*;
let mut value = None;
cursor.visit(|c| {

Просмотреть файл

@ -4,15 +4,12 @@
//! functions and use types defined in the header.
//!
//! See the [`Builder`](./struct.Builder.html) struct for usage.
#![deny(missing_docs)]
// #![deny(warnings)]
#![deny(warnings)]
#![deny(unused_extern_crates)]
// We internally use the deprecated BindgenOptions all over the place. Once we
// remove its `pub` declaration, we can un-deprecate it and remove this pragma.
#![allow(deprecated)]
// To avoid rather annoying warnings when matching with CXCursor_xxx as a
// constant.
#![allow(non_upper_case_globals)]
@ -29,6 +26,7 @@ extern crate peeking_take_while;
extern crate regex;
#[macro_use]
extern crate lazy_static;
extern crate which;
#[cfg(feature = "logging")]
#[macro_use]
@ -62,10 +60,10 @@ macro_rules! doc_mod {
}
mod clang;
mod features;
mod ir;
mod parse;
mod regex_set;
mod uses;
pub mod callbacks;
@ -73,23 +71,25 @@ pub mod callbacks;
mod codegen;
doc_mod!(clang, clang_docs);
doc_mod!(features, features_docs);
doc_mod!(ir, ir_docs);
doc_mod!(parse, parse_docs);
doc_mod!(regex_set, regex_set_docs);
doc_mod!(uses, uses_docs);
mod codegen {
include!(concat!(env!("OUT_DIR"), "/codegen.rs"));
}
pub use features::{LATEST_STABLE_RUST, RUST_TARGET_STRINGS, RustTarget};
use features::RustFeatures;
use ir::context::{BindgenContext, ItemId};
use ir::item::Item;
use parse::{ClangItemParser, ParseError};
use regex_set::RegexSet;
use std::fs::{File, OpenOptions};
use std::iter;
use std::io::{self, Write};
use std::iter;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::sync::Arc;
@ -181,7 +181,7 @@ pub fn builder() -> Builder {
}
impl Builder {
/// Generates the command line flags use for creating `Builder`.
/// Generates the command line flags use for creating `Builder`.
pub fn command_line_flags(&self) -> Vec<String> {
let mut output_vector: Vec<String> = Vec::new();
@ -190,14 +190,20 @@ impl Builder {
output_vector.push(header);
}
output_vector.push(self.options.rust_target.into());
self.options
.bitfield_enums
.get_items()
.iter()
.map(|item| {
output_vector.push("--bitfield-enum".into());
output_vector.push(item.trim_left_matches("^").trim_right_matches("$").into());
})
output_vector.push("--bitfield-enum".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
@ -205,9 +211,13 @@ impl Builder {
.get_items()
.iter()
.map(|item| {
output_vector.push("--constified-enum".into());
output_vector.push(item.trim_left_matches("^").trim_right_matches("$").into());
})
output_vector.push("--constified-enum".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
@ -215,9 +225,13 @@ impl Builder {
.get_items()
.iter()
.map(|item| {
output_vector.push("--constified-enum-module".into());
output_vector.push(item.trim_left_matches("^").trim_right_matches("$").into());
})
output_vector.push("--constified-enum-module".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
@ -225,9 +239,13 @@ impl Builder {
.get_items()
.iter()
.map(|item| {
output_vector.push("--blacklist-type".into());
output_vector.push(item.trim_left_matches("^").trim_right_matches("$").into());
})
output_vector.push("--blacklist-type".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
if !self.options.layout_tests {
@ -238,12 +256,28 @@ impl Builder {
output_vector.push("--no-derive-debug".into());
}
if !self.options.impl_debug {
output_vector.push("--impl-debug".into());
}
if !self.options.derive_default {
output_vector.push("--no-derive-default".into());
} else {
output_vector.push("--with-derive-default".into());
}
if self.options.derive_hash {
output_vector.push("--with-derive-hash".into());
}
if self.options.derive_partialeq {
output_vector.push("--with-derive-partialeq".into());
}
if self.options.derive_eq {
output_vector.push("--with-derive-eq".into());
}
if !self.options.generate_comments {
output_vector.push("--no-doc-comments".into());
}
@ -265,11 +299,6 @@ impl Builder {
output_vector.push(prefix.clone());
}
if let Some(ref dummy) = self.options.dummy_uses {
output_vector.push("--dummy-uses".into());
output_vector.push(dummy.clone());
}
if self.options.emit_ast {
output_vector.push("--emit-clang-ast".into());
}
@ -292,9 +321,13 @@ impl Builder {
.links
.iter()
.map(|&(ref item, _)| {
output_vector.push("--framework".into());
output_vector.push(item.trim_left_matches("^").trim_right_matches("$").into());
})
output_vector.push("--framework".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
if !self.options.codegen_config.functions {
@ -304,7 +337,7 @@ impl Builder {
output_vector.push("--generate".into());
//Temporary placeholder for below 4 options
let mut options:Vec<String> = Vec::new();
let mut options: Vec<String> = Vec::new();
if self.options.codegen_config.functions {
options.push("function".into());
}
@ -334,9 +367,13 @@ impl Builder {
.links
.iter()
.map(|&(ref item, _)| {
output_vector.push("--clang-args".into());
output_vector.push(item.trim_left_matches("^").trim_right_matches("$").into());
})
output_vector.push("--clang-args".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
if !self.options.convert_floats {
@ -347,36 +384,44 @@ impl Builder {
output_vector.push("--no-prepend-enum-name".into());
}
if !self.options.unstable_rust {
output_vector.push("--unstable-rust".into());
}
self.options
.opaque_types
.get_items()
.iter()
.map(|item| {
output_vector.push("--opaque-type".into());
output_vector.push(item.trim_left_matches("^").trim_right_matches("$").into());
})
output_vector.push("--opaque-type".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.raw_lines
.iter()
.map(|item| {
output_vector.push("--raw-line".into());
output_vector.push(item.trim_left_matches("^").trim_right_matches("$").into());
})
output_vector.push("--raw-line".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
.links
.iter()
.map(|&(ref item, _)| {
output_vector.push("--static".into());
output_vector.push(item.trim_left_matches("^").trim_right_matches("$").into());
})
output_vector.push("--static".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
if self.options.use_core {
@ -392,9 +437,13 @@ impl Builder {
.get_items()
.iter()
.map(|item| {
output_vector.push("--whitelist-function".into());
output_vector.push(item.trim_left_matches("^").trim_right_matches("$").into());
})
output_vector.push("--whitelist-function".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
@ -402,9 +451,13 @@ impl Builder {
.get_items()
.iter()
.map(|item| {
output_vector.push("--whitelist-type".into());
output_vector.push(item.trim_left_matches("^").trim_right_matches("$").into());
})
output_vector.push("--whitelist-type".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
@ -412,24 +465,40 @@ impl Builder {
.get_items()
.iter()
.map(|item| {
output_vector.push("--whitelist-var".into());
output_vector.push(item.trim_left_matches("^").trim_right_matches("$").into());
})
output_vector.push("--whitelist-var".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
output_vector.push("--".into());
if !self.options.clang_args.is_empty() {
output_vector.extend(
self.options
.clang_args
.iter()
.cloned()
);
output_vector.extend(self.options.clang_args.iter().cloned());
}
if self.input_headers.len() > 1 {
output_vector.extend(self.input_headers[..self.input_headers.len() - 1].iter().cloned());
output_vector.extend(
self.input_headers[..self.input_headers.len() - 1]
.iter()
.cloned(),
);
}
if !self.options.rustfmt_bindings {
output_vector.push("--rustfmt-bindings".into());
}
if let Some(path) = self.options
.rustfmt_configuration_file
.as_ref()
.and_then(|f| f.to_str())
{
output_vector.push("--rustfmt-configuration-file".into());
output_vector.push(path.into());
}
output_vector
@ -466,7 +535,17 @@ impl Builder {
///
/// The file `name` will be added to the clang arguments.
pub fn header_contents(mut self, name: &str, contents: &str) -> Builder {
self.input_header_contents.push((name.into(), contents.into()));
self.input_header_contents.push(
(name.into(), contents.into()),
);
self
}
/// Specify the rust target
///
/// The default is the latest stable Rust version
pub fn rust_target(mut self, rust_target: RustTarget) -> Self {
self.options.set_rust_target(rust_target);
self
}
@ -519,13 +598,6 @@ impl Builder {
self
}
/// Generate a C/C++ file that includes the header and has dummy uses of
/// every type defined in the header.
pub fn dummy_uses<T: Into<String>>(mut self, dummy_uses: T) -> Builder {
self.options.dummy_uses = Some(dummy_uses.into());
self
}
/// Hide the given type from the generated bindings. Regular expressions are
/// supported.
pub fn hide_type<T: AsRef<str>>(mut self, arg: T) -> Builder {
@ -609,8 +681,10 @@ impl Builder {
/// Add arguments to be passed straight through to clang.
pub fn clang_args<I>(mut self, iter: I) -> Builder
where I: IntoIterator,
I::Item: AsRef<str> {
where
I: IntoIterator,
I::Item: AsRef<str>,
{
for arg in iter {
self = self.clang_arg(arg.as_ref())
}
@ -631,7 +705,9 @@ impl Builder {
/// Make the generated bindings link the given framework.
pub fn link_framework<T: Into<String>>(mut self, library: T) -> Builder {
self.options.links.push((library.into(), LinkType::Framework));
self.options.links.push(
(library.into(), LinkType::Framework),
);
self
}
@ -660,12 +736,44 @@ impl Builder {
self
}
/// Set whether `Debug` should be implemented, if it can not be derived automatically.
pub fn impl_debug(mut self, doit: bool) -> Self {
self.options.impl_debug = doit;
self
}
/// Set whether `Default` should be derived by default.
pub fn derive_default(mut self, doit: bool) -> Self {
self.options.derive_default = doit;
self
}
/// Set whether `Hash` should be derived by default.
pub fn derive_hash(mut self, doit: bool) -> Self {
self.options.derive_hash = doit;
self
}
/// Set whether `PartialEq` should be derived by default.
/// If we don't compute partialeq, we also cannot compute
/// eq. Set the derive_eq to `false` when doit is `false`.
pub fn derive_partialeq(mut self, doit: bool) -> Self {
self.options.derive_partialeq = doit;
if !doit {
self.options.derive_eq = false;
}
self
}
/// Set whether `Eq` should be derived by default.
/// We can't compute Eq without computing PartialEq, so
/// we set the same option to derive_partialeq.
pub fn derive_eq(mut self, doit: bool) -> Self {
self.options.derive_eq = doit;
self.options.derive_partialeq = doit;
self
}
/// Emit Clang AST.
pub fn emit_clang_ast(mut self) -> Builder {
self.options.emit_ast = true;
@ -758,9 +866,14 @@ impl Builder {
}
/// Avoid generating any unstable Rust, such as Rust unions, in the generated bindings.
pub fn unstable_rust(mut self, doit: bool) -> Self {
self.options.unstable_rust = doit;
self
#[deprecated(note = "please use `rust_target` instead")]
pub fn unstable_rust(self, doit: bool) -> Self {
let rust_target = if doit {
RustTarget::Nightly
} else {
LATEST_STABLE_RUST
};
self.rust_target(rust_target)
}
/// Use core instead of libstd in the generated bindings.
@ -777,7 +890,10 @@ impl Builder {
/// Allows configuring types in different situations, see the
/// [`ParseCallbacks`](./callbacks/trait.ParseCallbacks.html) documentation.
pub fn parse_callbacks(mut self, cb: Box<callbacks::ParseCallbacks>) -> Self {
pub fn parse_callbacks(
mut self,
cb: Box<callbacks::ParseCallbacks>,
) -> Self {
self.options.parse_callbacks = Some(cb);
self
}
@ -795,6 +911,20 @@ impl Builder {
self
}
/// Set whether rustfmt should format the generated bindings.
pub fn rustfmt_bindings(mut self, doit: bool) -> Self {
self.options.rustfmt_bindings = doit;
self
}
/// Set the absolute path to the rustfmt configuration file, if None, the standard rustfmt
/// options are used.
pub fn rustfmt_configuration_file(mut self, path: Option<PathBuf>) -> Self {
self = self.rustfmt_bindings(true);
self.options.rustfmt_configuration_file = path;
self
}
/// Generate the Rust bindings using the options built up thus far.
pub fn generate<'ctx>(mut self) -> Result<Bindings<'ctx>, ()> {
self.options.input_header = self.input_headers.pop();
@ -802,15 +932,14 @@ impl Builder {
self.input_headers
.drain(..)
.flat_map(|header| {
iter::once("-include".into())
.chain(iter::once(header))
})
iter::once("-include".into()).chain(iter::once(header))
}),
);
self.options.input_unsaved_files.extend(
self.input_header_contents
.drain(..)
.map(|(name, contents)| clang::UnsavedFile::new(&name, &contents))
self.input_header_contents.drain(..).map(|(name, contents)| {
clang::UnsavedFile::new(&name, &contents)
}),
);
Bindings::generate(self.options, None)
@ -822,9 +951,9 @@ impl Builder {
/// issues. The resulting file will be named something like `__bindgen.i` or
/// `__bindgen.ii`
pub fn dump_preprocessed_input(&self) -> io::Result<()> {
let clang = clang_sys::support::Clang::find(None, &[])
.ok_or_else(|| io::Error::new(io::ErrorKind::Other,
"Cannot find clang executable"))?;
let clang = clang_sys::support::Clang::find(None, &[]).ok_or_else(|| {
io::Error::new(io::ErrorKind::Other, "Cannot find clang executable")
})?;
// The contents of a wrapper file that includes all the input header
// files.
@ -893,8 +1022,10 @@ impl Builder {
if child.wait()?.success() {
Ok(())
} else {
Err(io::Error::new(io::ErrorKind::Other,
"clang exited with non-zero status"))
Err(io::Error::new(
io::ErrorKind::Other,
"clang exited with non-zero status",
))
}
}
}
@ -965,13 +1096,25 @@ pub struct BindgenOptions {
/// and types.
pub derive_debug: bool,
/// True if we should implement the Debug trait for C/C++ structures and types
/// that do not support automatically deriving Debug.
pub impl_debug: bool,
/// True if we should derive Default trait implementations for C/C++ structures
/// and types.
pub derive_default: bool,
/// True if we can use unstable Rust code in the bindings, false if we
/// cannot.
pub unstable_rust: bool,
/// True if we should derive Hash trait implementations for C/C++ structures
/// and types.
pub derive_hash: bool,
/// True if we should derive PartialEq trait implementations for C/C++ structures
/// and types.
pub derive_partialeq: bool,
/// True if we should derive Eq trait implementations for C/C++ structures
/// and types.
pub derive_eq: bool,
/// True if we should avoid using libstd to use libcore instead.
pub use_core: bool,
@ -1001,10 +1144,6 @@ pub struct BindgenOptions {
/// Unsaved files for input.
pub input_unsaved_files: Vec<clang::UnsavedFile>,
/// Generate a dummy C/C++ file that includes the header and has dummy uses
/// of all types defined therein. See the `uses` module for more.
pub dummy_uses: Option<String>,
/// A user-provided visitor to allow customizing different kinds of
/// situations.
pub parse_callbacks: Option<Box<callbacks::ParseCallbacks>>,
@ -1043,6 +1182,19 @@ pub struct BindgenOptions {
/// Whether to prepend the enum name to bitfield or constant variants.
pub prepend_enum_name: bool,
/// Version of the Rust compiler to target
rust_target: RustTarget,
/// Features to enable, derived from `rust_target`
rust_features: RustFeatures,
/// Whether rustfmt should format the generated bindings.
pub rustfmt_bindings: bool,
/// The absolute path to the rustfmt configuration file, if None, the standard rustfmt
/// options are used.
pub rustfmt_configuration_file: Option<PathBuf>,
}
/// TODO(emilio): This is sort of a lie (see the error message that results from
@ -1061,11 +1213,33 @@ impl BindgenOptions {
self.constified_enum_modules.build();
self.constified_enums.build();
}
/// Update rust target version
pub fn set_rust_target(&mut self, rust_target: RustTarget) {
self.rust_target = rust_target;
// Keep rust_features synced with rust_target
self.rust_features = rust_target.into();
}
/// Get target Rust version
pub fn rust_target(&self) -> RustTarget {
self.rust_target
}
/// Get features supported by target Rust version
pub fn rust_features(&self) -> RustFeatures {
self.rust_features
}
}
impl Default for BindgenOptions {
fn default() -> BindgenOptions {
let rust_target = RustTarget::default();
BindgenOptions {
rust_target: rust_target,
rust_features: rust_target.into(),
hidden_types: Default::default(),
opaque_types: Default::default(),
whitelisted_types: Default::default(),
@ -1081,10 +1255,13 @@ impl Default for BindgenOptions {
emit_ir_graphviz: None,
layout_tests: true,
derive_debug: true,
impl_debug: false,
derive_default: false,
derive_hash: false,
derive_partialeq: false,
derive_eq: false,
enable_cxx_namespaces: false,
disable_name_namespacing: false,
unstable_rust: false,
use_core: false,
ctypes_prefix: None,
namespaced_constants: true,
@ -1094,7 +1271,6 @@ impl Default for BindgenOptions {
clang_args: vec![],
input_header: None,
input_unsaved_files: vec![],
dummy_uses: None,
parse_callbacks: None,
codegen_config: CodegenConfig::all(),
conservative_inline_namespaces: false,
@ -1104,6 +1280,8 @@ impl Default for BindgenOptions {
objc_extern_crate: false,
enable_mangling: true,
prepend_enum_name: true,
rustfmt_bindings: false,
rustfmt_configuration_file: None,
}
}
}
@ -1154,9 +1332,10 @@ impl<'ctx> Bindings<'ctx> {
///
/// Deprecated - use a `Builder` instead
#[deprecated]
pub fn generate(mut options: BindgenOptions,
span: Option<Span>)
-> Result<Bindings<'ctx>, ()> {
pub fn generate(
mut options: BindgenOptions,
span: Option<Span>,
) -> Result<Bindings<'ctx>, ()> {
let span = span.unwrap_or(DUMMY_SP);
ensure_libclang_is_loaded();
@ -1190,10 +1369,15 @@ impl<'ctx> Bindings<'ctx> {
};
// TODO: Make this path fixup configurable?
if let Some(clang) = clang_sys::support::Clang::find(None, &clang_args_for_clang_sys) {
if let Some(clang) = clang_sys::support::Clang::find(
None,
&clang_args_for_clang_sys,
)
{
// If --target is specified, assume caller knows what they're doing
// and don't mess with include paths for them
let has_target_arg = options.clang_args
let has_target_arg = options
.clang_args
.iter()
.rposition(|arg| arg.starts_with("--target"))
.is_some();
@ -1243,25 +1427,34 @@ impl<'ctx> Bindings<'ctx> {
let mut mod_str = vec![];
{
let ref_writer = Box::new(mod_str.by_ref()) as Box<Write>;
self.write(ref_writer).expect("Could not write bindings to string");
self.write(ref_writer).expect(
"Could not write bindings to string",
);
}
String::from_utf8(mod_str).unwrap()
}
/// Write these bindings as source text to a file.
pub fn write_to_file<P: AsRef<Path>>(&self, path: P) -> io::Result<()> {
let file = try!(OpenOptions::new()
.write(true)
.truncate(true)
.create(true)
.open(path));
self.write(Box::new(file))
{
let file = try!(
OpenOptions::new()
.write(true)
.truncate(true)
.create(true)
.open(path.as_ref())
);
self.write(Box::new(file))?;
}
self.rustfmt_generated_file(path.as_ref())
}
/// Write these bindings as source text to the given `Write`able.
pub fn write<'a>(&self, mut writer: Box<Write + 'a>) -> io::Result<()> {
try!(writer.write("/* automatically generated by rust-bindgen */\n\n"
.as_bytes()));
try!(writer.write(
"/* automatically generated by rust-bindgen */\n\n".as_bytes(),
));
for line in self.context.options().raw_lines.iter() {
try!(writer.write(line.as_bytes()));
@ -1278,27 +1471,61 @@ impl<'ctx> Bindings<'ctx> {
ps.s.out.flush()
}
/// Generate and write dummy uses of all the types we parsed, if we've been
/// requested to do so in the options.
///
/// See the `uses` module for more information.
pub fn write_dummy_uses(&mut self) -> io::Result<()> {
let file = if let Some(ref dummy_path) =
self.context.options().dummy_uses {
Some(try!(OpenOptions::new()
.write(true)
.truncate(true)
.create(true)
.open(dummy_path)))
} else {
None
};
if let Some(file) = file {
try!(uses::generate_dummy_uses(&mut self.context, file));
/// Checks if rustfmt_bindings is set and runs rustfmt on the file
fn rustfmt_generated_file(&self, file: &Path) -> io::Result<()> {
if !self.context.options().rustfmt_bindings {
return Ok(());
}
Ok(())
let rustfmt = if let Ok(rustfmt) = which::which("rustfmt") {
rustfmt
} else {
return Err(io::Error::new(
io::ErrorKind::Other,
"Rustfmt activated, but it could not be found in global path.",
));
};
let mut cmd = Command::new(rustfmt);
if let Some(path) = self.context
.options()
.rustfmt_configuration_file
.as_ref()
.and_then(|f| f.to_str())
{
cmd.args(&["--config-path", path]);
}
if let Ok(output) = cmd.arg(file).output() {
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
match output.status.code() {
Some(2) => Err(io::Error::new(
io::ErrorKind::Other,
format!("Rustfmt parsing errors:\n{}", stderr),
)),
Some(3) => {
warn!(
"Rustfmt could not format some lines:\n{}",
stderr
);
Ok(())
}
_ => Err(io::Error::new(
io::ErrorKind::Other,
format!("Internal rustfmt error:\n{}", stderr),
)),
}
} else {
Ok(())
}
} else {
Err(io::Error::new(
io::ErrorKind::Other,
"Error executing rustfmt!",
))
}
}
}
@ -1309,10 +1536,11 @@ fn filter_builtins(ctx: &BindgenContext, cursor: &clang::Cursor) -> bool {
}
/// Parse one `Item` from the Clang cursor.
pub fn parse_one(ctx: &mut BindgenContext,
cursor: clang::Cursor,
parent: Option<ItemId>)
-> clang_sys::CXChildVisitResult {
pub fn parse_one(
ctx: &mut BindgenContext,
cursor: clang::Cursor,
parent: Option<ItemId>,
) -> clang_sys::CXChildVisitResult {
if !filter_builtins(ctx, &cursor) {
return CXChildVisit_Continue;
}
@ -1363,8 +1591,10 @@ fn parse(context: &mut BindgenContext) -> Result<(), ()> {
cursor.visit(|cursor| parse_one(context, cursor, None))
});
assert!(context.current_module() == context.root_module(),
"How did this happen?");
assert!(
context.current_module() == context.root_module(),
"How did this happen?"
);
Ok(())
}
@ -1385,9 +1615,9 @@ pub fn clang_version() -> ClangVersion {
}
let raw_v: String = clang::extract_clang_version();
let split_v: Option<Vec<&str>> = raw_v.split_whitespace()
.nth(2)
.map(|v| v.split('.').collect());
let split_v: Option<Vec<&str>> = raw_v.split_whitespace().nth(2).map(|v| {
v.split('.').collect()
});
match split_v {
Some(v) => {
if v.len() >= 2 {
@ -1419,30 +1649,41 @@ fn commandline_flag_unit_test_function() {
let bindings = ::builder();
let command_line_flags = bindings.command_line_flags();
let test_cases = vec!["--no-derive-default",
"--generate", "function,types,vars,methods,constructors,destructors"]
.iter()
.map(|&x| x.into())
.collect::<Vec<String>>();
let test_cases = vec![
"--no-derive-default",
"--generate",
"function,types,vars,methods,constructors,destructors",
].iter()
.map(|&x| x.into())
.collect::<Vec<String>>();
assert!(test_cases.iter().all(|ref x| command_line_flags.contains(x)) );
assert!(test_cases.iter().all(
|ref x| command_line_flags.contains(x),
));
//Test 2
let bindings = ::builder().header("input_header")
.whitelisted_type("Distinct_Type")
.whitelisted_function("safe_function");
let bindings = ::builder()
.header("input_header")
.whitelisted_type("Distinct_Type")
.whitelisted_function("safe_function");
let command_line_flags = bindings.command_line_flags();
let test_cases = vec!["input_header",
"--no-derive-default",
"--generate", "function,types,vars,methods,constructors,destructors",
"--whitelist-type", "Distinct_Type",
"--whitelist-function", "safe_function"]
.iter()
.map(|&x| x.into())
.collect::<Vec<String>>();
let test_cases = vec![
"input_header",
"--no-derive-default",
"--generate",
"function,types,vars,methods,constructors,destructors",
"--whitelist-type",
"Distinct_Type",
"--whitelist-function",
"safe_function",
].iter()
.map(|&x| x.into())
.collect::<Vec<String>>();
println!("{:?}", command_line_flags);
assert!(test_cases.iter().all(|ref x| command_line_flags.contains(x)) );
assert!(test_cases.iter().all(
|ref x| command_line_flags.contains(x),
));
}

Просмотреть файл

Просмотреть файл

@ -1,8 +1,8 @@
extern crate bindgen;
#[cfg(feature="logging")]
#[cfg(feature = "logging")]
extern crate env_logger;
#[macro_use]
#[cfg(feature="logging")]
#[cfg(feature = "logging")]
extern crate log;
extern crate clang_sys;
extern crate clap;
@ -12,21 +12,20 @@ use std::env;
use std::panic;
#[macro_use]
#[cfg(not(feature="logging"))]
#[cfg(not(feature = "logging"))]
mod log_stubs;
mod options;
use options::builder_from_flags;
pub fn main() {
#[cfg(feature="logging")]
#[cfg(feature = "logging")]
log::set_logger(|max_log_level| {
use env_logger::Logger;
let env_logger = Logger::new();
max_log_level.set(env_logger.filter());
Box::new(env_logger)
})
.expect("Failed to set logger.");
use env_logger::Logger;
let env_logger = Logger::new();
max_log_level.set(env_logger.filter());
Box::new(env_logger)
}).expect("Failed to set logger.");
let bind_args: Vec<_> = env::args().collect();
@ -64,11 +63,8 @@ pub fn main() {
std::process::exit(1);
}
let mut bindings = builder_result.unwrap();
bindings.write(output)
.expect("Unable to write output");
bindings.write_dummy_uses()
.expect("Unable to write dummy uses to file.");
let bindings = builder_result.unwrap();
bindings.write(output).expect("Unable to write output");
}
Err(error) => {
println!("{}", error);
@ -79,10 +75,14 @@ pub fn main() {
fn print_verbose_err() {
println!("Bindgen unexpectedly panicked");
println!("This may be caused by one of the known-unsupported \
println!(
"This may be caused by one of the known-unsupported \
things (https://github.com/rust-lang-nursery/rust-bindgen#c), \
please modify the bindgen flags to work around it as \
described in https://github.com/rust-lang-nursery/rust-bindgen#c");
println!("Otherwise, please file an issue at \
https://github.com/rust-lang-nursery/rust-bindgen/issues/new");
described in https://github.com/rust-lang-nursery/rust-bindgen#c"
);
println!(
"Otherwise, please file an issue at \
https://github.com/rust-lang-nursery/rust-bindgen/issues/new"
);
}

Просмотреть файл

@ -1,14 +1,23 @@
use bindgen::{Builder, CodegenConfig, builder};
use bindgen::{Builder, CodegenConfig, RUST_TARGET_STRINGS, RustTarget, builder};
use clap::{App, Arg};
use std::fs::File;
use std::io::{self, Error, ErrorKind};
use std::io::{self, Error, ErrorKind, Write, stderr};
use std::path::PathBuf;
use std::str::FromStr;
/// Construct a new [`Builder`](./struct.Builder.html) from command line flags.
pub fn builder_from_flags<I>
(args: I)
-> Result<(Builder, Box<io::Write>, bool), io::Error>
where I: Iterator<Item = String>,
pub fn builder_from_flags<I>(
args: I,
) -> Result<(Builder, Box<io::Write>, bool), io::Error>
where
I: Iterator<Item = String>,
{
let rust_target_help = format!(
"Version of the Rust compiler to target. Valid options are: {:?}. Defaults to {:?}.",
RUST_TARGET_STRINGS,
String::from(RustTarget::default())
);
let matches = App::new("bindgen")
.version(env!("CARGO_PKG_VERSION"))
.about("Generates Rust bindings from C/C++ headers.")
@ -55,6 +64,10 @@ pub fn builder_from_flags<I>
Arg::with_name("no-derive-debug")
.long("no-derive-debug")
.help("Avoid deriving Debug on any type."),
Arg::with_name("impl-debug")
.long("impl-debug")
.help("Create Debug implementation, if it can not be derived \
automatically."),
Arg::with_name("no-derive-default")
.long("no-derive-default")
.hidden(true)
@ -62,6 +75,15 @@ pub fn builder_from_flags<I>
Arg::with_name("with-derive-default")
.long("with-derive-default")
.help("Derive Default on any type."),
Arg::with_name("with-derive-hash")
.long("with-derive-hash")
.help("Derive hash on any type."),
Arg::with_name("with-derive-partialeq")
.long("with-derive-partialeq")
.help("Derive partialeq on any type."),
Arg::with_name("with-derive-eq")
.long("with-derive-eq")
.help("Derive eq on any type. Enable this option also enables --with-derive-partialeq"),
Arg::with_name("no-doc-comments")
.long("no-doc-comments")
.help("Avoid including doc comments in the output, see: \
@ -88,11 +110,6 @@ pub fn builder_from_flags<I>
// All positional arguments after the end of options marker, `--`
Arg::with_name("clang-args")
.multiple(true),
Arg::with_name("dummy-uses")
.long("dummy-uses")
.help("For testing purposes, generate a C/C++ file containing \
dummy uses of all types defined in the input header.")
.takes_value(true),
Arg::with_name("emit-clang-ast")
.long("emit-clang-ast")
.help("Output the Clang AST for debugging purposes."),
@ -146,7 +163,7 @@ pub fn builder_from_flags<I>
.help("Do not prepend the enum name to bitfield or constant variants."),
Arg::with_name("unstable-rust")
.long("unstable-rust")
.help("Generate unstable Rust code.")
.help("Generate unstable Rust code (deprecated; use --rust-target instead).")
.multiple(true), // FIXME: Pass legacy test suite
Arg::with_name("opaque-type")
.long("opaque-type")
@ -166,6 +183,10 @@ pub fn builder_from_flags<I>
.takes_value(true)
.multiple(true)
.number_of_values(1),
Arg::with_name("rust-target")
.long("rust-target")
.help(&rust_target_help)
.takes_value(true),
Arg::with_name("static")
.long("static-link")
.help("Link to static library.")
@ -219,7 +240,20 @@ pub fn builder_from_flags<I>
.help("Preprocess and dump the input header files to disk. \
Useful when debugging bindgen, using C-Reduce, or when \
filing issues. The resulting file will be named \
something like `__bindgen.i` or `__bindgen.ii`.")
something like `__bindgen.i` or `__bindgen.ii`."),
Arg::with_name("rustfmt-bindings")
.long("rustfmt-bindings")
.help("Format the generated bindings with rustfmt. \
Rustfmt needs to be in the global PATH."),
Arg::with_name("rustfmt-configuration-file")
.long("rustfmt-configuration-file")
.help("The absolute path to the rustfmt configuration file. \
The configuration file will be used for formatting the bindings. \
Setting this parameter, will automatically set --rustfmt-bindings.")
.value_name("path")
.takes_value(true)
.multiple(false)
.number_of_values(1),
]) // .args()
.get_matches_from(args);
@ -231,6 +265,18 @@ pub fn builder_from_flags<I>
return Err(Error::new(ErrorKind::Other, "Header not found"));
}
if matches.is_present("unstable-rust") {
builder = builder.rust_target(RustTarget::Nightly);
writeln!(
&mut stderr(),
"warning: the `--unstable-rust` option is deprecated"
).expect("Unable to write error message");
}
if let Some(rust_target) = matches.value_of("rust-target") {
builder = builder.rust_target(RustTarget::from_str(rust_target)?);
}
if let Some(bitfields) = matches.values_of("bitfield-enum") {
for regex in bitfields {
builder = builder.bitfield_enum(regex);
@ -266,10 +312,26 @@ pub fn builder_from_flags<I>
builder = builder.derive_debug(false);
}
if matches.is_present("impl-debug") {
builder = builder.impl_debug(true);
}
if matches.is_present("with-derive-default") {
builder = builder.derive_default(true);
}
if matches.is_present("with-derive-hash") {
builder = builder.derive_hash(true);
}
if matches.is_present("with-derive-partialeq") {
builder = builder.derive_partialeq(true);
}
if matches.is_present("with-derive-eq") {
builder = builder.derive_eq(true);
}
if matches.is_present("no-derive-default") {
builder = builder.derive_default(false);
}
@ -282,10 +344,6 @@ pub fn builder_from_flags<I>
builder = builder.ctypes_prefix(prefix);
}
if let Some(dummy) = matches.value_of("dummy-uses") {
builder = builder.dummy_uses(dummy);
}
if let Some(links) = matches.values_of("dynamic") {
for library in links {
builder = builder.link(library);
@ -303,9 +361,10 @@ pub fn builder_from_flags<I>
"constructors" => config.constructors = true,
"destructors" => config.destructors = true,
otherwise => {
return Err(Error::new(ErrorKind::Other,
format!("Unknown generate item: {}",
otherwise)));
return Err(Error::new(
ErrorKind::Other,
format!("Unknown generate item: {}", otherwise),
));
}
}
}
@ -346,10 +405,6 @@ pub fn builder_from_flags<I>
builder = builder.ignore_methods();
}
if matches.is_present("unstable-rust") {
builder = builder.unstable_rust(true);
}
if matches.is_present("no-convert-floats") {
builder = builder.no_convert_floats();
}
@ -435,6 +490,30 @@ pub fn builder_from_flags<I>
builder.dump_preprocessed_input()?;
}
if matches.is_present("rustfmt-bindings") {
builder = builder.rustfmt_bindings(true);
}
if let Some(path_str) = matches.value_of("rustfmt-configuration-file") {
let path = PathBuf::from(path_str);
if !path.is_absolute() {
return Err(Error::new(
ErrorKind::Other,
"--rustfmt-configuration--file needs to be an absolute path!",
));
}
if path.to_str().is_none() {
return Err(Error::new(
ErrorKind::Other,
"--rustfmt-configuration-file contains non-valid UTF8 characters.",
));
}
builder = builder.rustfmt_configuration_file(Some(path));
}
let verbose = matches.is_present("verbose");
Ok((builder, output, verbose))

Просмотреть файл

@ -34,61 +34,69 @@ pub trait ClangSubItemParser: Sized {
///
/// The fact that is a reference guarantees it's held by the context, and
/// allow returning already existing types.
fn parse(cursor: clang::Cursor,
context: &mut BindgenContext)
-> Result<ParseResult<Self>, ParseError>;
fn parse(
cursor: clang::Cursor,
context: &mut BindgenContext,
) -> Result<ParseResult<Self>, ParseError>;
}
/// An intermediate representation item that can be parsed from a Clang cursor.
pub trait ClangItemParser: Sized {
/// Parse this item from the given Clang cursor.
fn parse(cursor: clang::Cursor,
parent: Option<ItemId>,
context: &mut BindgenContext)
-> Result<ItemId, ParseError>;
fn parse(
cursor: clang::Cursor,
parent: Option<ItemId>,
context: &mut BindgenContext,
) -> Result<ItemId, ParseError>;
/// Parse this item from the given Clang type.
fn from_ty(ty: &clang::Type,
location: clang::Cursor,
parent: Option<ItemId>,
ctx: &mut BindgenContext)
-> Result<ItemId, ParseError>;
fn from_ty(
ty: &clang::Type,
location: clang::Cursor,
parent: Option<ItemId>,
ctx: &mut BindgenContext,
) -> Result<ItemId, ParseError>;
/// Identical to `from_ty`, but use the given `id` as the `ItemId` for the
/// newly parsed item.
fn from_ty_with_id(id: ItemId,
ty: &clang::Type,
location: clang::Cursor,
parent: Option<ItemId>,
ctx: &mut BindgenContext)
-> Result<ItemId, ParseError>;
fn from_ty_with_id(
id: ItemId,
ty: &clang::Type,
location: clang::Cursor,
parent: Option<ItemId>,
ctx: &mut BindgenContext,
) -> Result<ItemId, ParseError>;
/// Parse this item from the given Clang type, or if we haven't resolved all
/// the other items this one depends on, an unresolved reference.
fn from_ty_or_ref(ty: clang::Type,
location: clang::Cursor,
parent_id: Option<ItemId>,
context: &mut BindgenContext)
-> ItemId;
fn from_ty_or_ref(
ty: clang::Type,
location: clang::Cursor,
parent_id: Option<ItemId>,
context: &mut BindgenContext,
) -> ItemId;
/// Identical to `from_ty_or_ref`, but use the given `potential_id` as the
/// `ItemId` for the newly parsed item.
fn from_ty_or_ref_with_id(potential_id: ItemId,
ty: clang::Type,
location: clang::Cursor,
parent_id: Option<ItemId>,
context: &mut BindgenContext)
-> ItemId;
fn from_ty_or_ref_with_id(
potential_id: ItemId,
ty: clang::Type,
location: clang::Cursor,
parent_id: Option<ItemId>,
context: &mut BindgenContext,
) -> ItemId;
/// Create a named template type.
fn named_type(with_id: Option<ItemId>,
location: clang::Cursor,
ctx: &mut BindgenContext)
-> Option<ItemId>;
fn type_param(
with_id: Option<ItemId>,
location: clang::Cursor,
ctx: &mut BindgenContext,
) -> Option<ItemId>;
/// Create a builtin type.
fn builtin_type(kind: TypeKind,
is_const: bool,
context: &mut BindgenContext)
-> ItemId;
fn builtin_type(
kind: TypeKind,
is_const: bool,
context: &mut BindgenContext,
) -> ItemId;
}

Просмотреть файл

@ -20,8 +20,9 @@ impl RegexSet {
/// Extend this set with every regex in the iterator.
pub fn extend<I, S>(&mut self, iter: I)
where I: IntoIterator<Item = S>,
S: AsRef<str>,
where
I: IntoIterator<Item = S>,
S: AsRef<str>,
{
for s in iter.into_iter() {
self.insert(s)
@ -30,12 +31,13 @@ impl RegexSet {
/// Insert a new regex into this set.
pub fn insert<S>(&mut self, string: S)
where S: AsRef<str>,
where
S: AsRef<str>,
{
self.items.push(format!("^{}$", string.as_ref()));
self.set = None;
}
/// Returns slice of String from its field 'items'
pub fn get_items(&self) -> &[String] {
&self.items[..]
@ -61,10 +63,13 @@ impl RegexSet {
/// Does the given `string` match any of the regexes in this set?
pub fn matches<S>(&self, string: S) -> bool
where S: AsRef<str>,
where
S: AsRef<str>,
{
let s = string.as_ref();
self.set.as_ref().map(|set| set.is_match(s)).unwrap_or(false)
self.set.as_ref().map(|set| set.is_match(s)).unwrap_or(
false,
)
}
}

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"8f81f53eb8f6cfc4d8c0e5eaed51b0a7d85a60c9b710fd9df62d25a1db189f92","build.rs":"deff14b2204700f9fd40ba426148d648626461a4ce372c49b8c0f92e09646c80","src/callbacks.rs":"c5c4e5bc8c49cb191d1b100339772fdc7dd1dbf5025a9de1ecaafb70f86cb48f","src/clang.rs":"541a016580c98c2e2af36c3c11b80127c26090795a380480c925c5f411f8100d","src/codegen/derive_debug.rs":"77e16be27a6999726978bf33dc54227cf3b074101ebd55e90f655340cf05ba8b","src/codegen/error.rs":"2613af1d833377fd4a70719f4a09951d9d45dc9227827b9a2a938a1bcaaea2dd","src/codegen/helpers.rs":"19c5b4a86df9410d7e6cb27c2a8797dd205e4c96eab203798b70cd30dd35e572","src/codegen/mod.rs":"4488f141de2abb5d1fa36df7818daeeebba4f5237a9e43101fc9d805e0a80436","src/codegen/struct_layout.rs":"b92fef035e5deaf3fe9f3c2d4ea61a758b873f6f193fe68693955d9d14a396cb","src/extra_assertions.rs":"449549c4a7a50c3f0b06332452b2fb6c9b23f31ca8e5e1656fe6c7f21e8ef7fa","src/features.rs":"b686a3e4ce5712473d0a7c0f817bef29b9337265ec6df1278087a708e1180108","src/ir/analysis/derive_copy.rs":"e17960cd79d764a36fd7cea8bad944a8994fc9cb3a20080955f28509b9e66c9e","src/ir/analysis/derive_debug.rs":"ffb933c46cc26c0ed7c7ccf16a0a19dddb3b0108ca913bd41b785c3afbd4ee0b","src/ir/analysis/derive_default.rs":"740b281eddf9f9f0606963fef7485e9219e7ebedeb7966c83c63f723d1deb62f","src/ir/analysis/derive_hash.rs":"6c046a54d495e3d6ec666f23c6209212321f72f1ed843523e8a9aa0cd6421c9e","src/ir/analysis/derive_partial_eq.rs":"fdd0d78861f146ce9f62c49979009cfad82ec9b96c8c79415513158fc9bf7ad0","src/ir/analysis/has_destructor.rs":"42fdc74e363bc32dbe51558cb5b330bad0e40d79b4cd3c6ff36b6f418a68c0ad","src/ir/analysis/has_float.rs":"02b7ccf9a99b1d96e3a0ec712de45461ab714184129f2634de46b33fb1758ccd","src/ir/analysis/has_type_param_in_array.rs":"39f10af6a6b7af17ee505777dbd10989942272b44efba2a1e320d8b4bbabe0f0","src/ir/analysis/has_vtable.rs":"33def5eb43270fff87455a0e8d262817876e2cf8c676b8cb6c8ec37e84dd99d1","src/ir/analysis/mod.rs":"10a7817a9c990dd8125e4ca9ed1fe02b9a0e27c4dd0320e909bb55f727ed8191","src/ir/analysis/template_params.rs":"e1a3709c3c07b7be21e3912339820f86992b56af44d923919023b7e015f41755","src/ir/annotations.rs":"ef106afcbe6084c18bd13a37ee3c1cdc9596bfb055db8c773d81f8f15fec3208","src/ir/comment.rs":"36f2a1d3970fdbf3d72c1f094043902747cde395215bdf7e9103926d9df011fd","src/ir/comp.rs":"062ea5ec95717e32b26be39bd4664179ff790831042205d795af1a4654922c8d","src/ir/context.rs":"68dbaa11ae5041965e08de24955852982f10c764decb7ba1de6c82073e95916c","src/ir/derive.rs":"608e9bacd6d6d29f5b4357fe2f7cdda62e79c77271b022e5275d22abc22788d3","src/ir/dot.rs":"173e57c3017182279bff98ea5edfd8e6e007a25e70da27139578a637a0a747bc","src/ir/enum_ty.rs":"d633d4d36a64cafd4e1e4ba82872058d5a0aada007e47353e4905ce1fe7d16ec","src/ir/function.rs":"409b779469c8092100991bc442b0b5bcfe9d5510edb71db12a38181df7519938","src/ir/int.rs":"1f61a472288afe489d9320bc8b13920333ece57891ae8570b4c4f25ab50688e6","src/ir/item.rs":"bff6369353b37a209236d750d274c0282a8669c9f7bee2b0eeea706e17537d1f","src/ir/item_kind.rs":"13048962657436a757ff9483d96f2ce351ec5f827ecab4921ed643f6f115c497","src/ir/layout.rs":"39c415271629fc5a43373bcd5ba8bfa26ebb2544aa9e28598b0141214111bb67","src/ir/mod.rs":"2eae90f207fad2e45957ec9287064992a419e3fc916aba84faff2ea25cbeb5ee","src/ir/module.rs":"5d46d631cec17ef1d1882da60080898760181c2ddf991473afdd464bf8c7d867","src/ir/objc.rs":"52454e14371535ff4da4025cf45fee3d3beadbe36759a6ebf0d1d7048a00714f","src/ir/template.rs":"cc96a205dec677962376cec0bdbf53820d633378fa92d9faeb34953d2943a928","src/ir/traversal.rs":"521fdd685ba8c684199cbc8131561ed39aed852dd0d1e76e17d8d2a3d204428b","src/ir/ty.rs":"263e7c5794b56dd0499db8b102169f70881b5ff1d15ded1fe49fc24c29d9ab34","src/ir/var.rs":"c60354e164e357480e72b20d7f5c2f7188da8af38ad1db0a3f79806ef60388ab","src/lib.rs":"6efe2ba78af7a2c790e73e03ca6876c24a56636b3b2fb74926a866675dc7ee71","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"6ebd647814b339bbd318f5d55f54ef98091138c2c7208436a88204be56a5a49b","src/options.rs":"cced11c8ba947953098df62beb7980dd72d9aa9c6fd3c8dba5aac745bdcd2315","src/parse.rs":"812171946f0ec8a83f26f9041151ad9dcdff11961e5154e4dae4be248f86b296","src/regex_set.rs":"e4cc668c724a80f7dd7d028f4a22146680513b40cf3a5255551c41079d302309"},"package":"33024f55a754d920637461adf87fb485702a69bdf7ac1d307b7e18da93bae505"}
{"files":{"Cargo.toml":"314227b3a01aea704a0b7b1a2280350ccf965823e3d2ae307bda4b332f88b863","build.rs":"032a1c51963894a421b0535f9227796d88768ac5f665a81d2edced69dc6d106a","src/callbacks.rs":"c5c4e5bc8c49cb191d1b100339772fdc7dd1dbf5025a9de1ecaafb70f86cb48f","src/clang.rs":"541a016580c98c2e2af36c3c11b80127c26090795a380480c925c5f411f8100d","src/codegen/error.rs":"2613af1d833377fd4a70719f4a09951d9d45dc9227827b9a2a938a1bcaaea2dd","src/codegen/helpers.rs":"c7ce49fca07d7690f24fe3440af635a66c758a03d3d79396623289fd191215f6","src/codegen/impl_debug.rs":"e2ffd5b6ed936698aa4b9e7e3459d353383792707ad51f829a18a822f69cab0e","src/codegen/impl_partialeq.rs":"e86050b98f57fa4496dbde0beea319a89e46290309d274f626361779549b95bd","src/codegen/mod.rs":"92249f5626cf12d2f9f032277562b1f5e797cdc28c47efc778c6f6fdfb2ecedc","src/codegen/struct_layout.rs":"45f5c4182b6d4d890bb37f47cff13d1c8fb2392cc15d64fb7afccd12f91276dc","src/extra_assertions.rs":"449549c4a7a50c3f0b06332452b2fb6c9b23f31ca8e5e1656fe6c7f21e8ef7fa","src/features.rs":"8b1c8ea23bd3d0e00dddf3897639c43f0c18057f9f3e5dee48e09a8017cf1a67","src/ir/analysis/derive_copy.rs":"c8d700dcde2419d2687377a7cb1024ad3722bbcb83d0b1031618f57d484793b4","src/ir/analysis/derive_debug.rs":"1d6621c0fa5d899310cc175cb99703606ed34fd7f7ad77bb60f012f25ba504af","src/ir/analysis/derive_default.rs":"3af97eaa9bdc91a0bded060b393d0bb23ef9dcf59a7a6ed7d79814f35f73b66a","src/ir/analysis/derive_hash.rs":"c8a95040352b3d10f8edb46f6ae513d4f15ec87197668b1fc1d7cf2cb416054f","src/ir/analysis/derive_partialeq_or_partialord.rs":"1f7c097bde85ebb4082a3a318a753db13e8b4733fa1433dcc054f137dc0e6c2d","src/ir/analysis/has_destructor.rs":"d9aaaceba580b48eb0df4e5537b34b417c51ccdfeb8f6b72484f3bf4992317fe","src/ir/analysis/has_float.rs":"2a0465503d2c8247eaf916bd6a03594f3dc0370533d9a7c58cc5afb86693816c","src/ir/analysis/has_type_param_in_array.rs":"fcb1c78b6000f1f5eb8d8147e2afdaba9eb0e3a81b61e72537048dfdbeea7bcd","src/ir/analysis/has_vtable.rs":"37765e954ef792e369a58ccfe1d827a00fe9bce680466da1d6523671b94b6c92","src/ir/analysis/mod.rs":"ea5ace45c77e855674bb565ba0fef556f60e3293b0ddcf11d3a5a6ec15ab0648","src/ir/analysis/sizedness.rs":"3d3c8bde40604d53bb64273a3cbd8c55936a7dfe1de9b2ba92fc2c45572624b4","src/ir/analysis/template_params.rs":"5c6ee7a251a321ef5733e2e7ac3264621b4181268babcc008b69dbfc37691fb1","src/ir/annotations.rs":"ef106afcbe6084c18bd13a37ee3c1cdc9596bfb055db8c773d81f8f15fec3208","src/ir/comment.rs":"36f2a1d3970fdbf3d72c1f094043902747cde395215bdf7e9103926d9df011fd","src/ir/comp.rs":"b952cec08cdd126779b630ce523e285610bd4f4da563c661a8b92c556a4e1628","src/ir/context.rs":"2fa83bb1a062636f50e71452ae9cc00e772862f1c160a38d6f0c80a320fedfd8","src/ir/derive.rs":"1fd6ad621e3c60b950acbd51fbe386d1f0fadb7c1889c723245afff45e42e143","src/ir/dot.rs":"eca03a493a7fe48044b832c37c71d51f1bd4e8689feb6f1cb8bc0422541c7284","src/ir/enum_ty.rs":"7f6b56d47d47974548219d221eaa8a9bdea40fc85f405e2d28af5e4a629d748e","src/ir/function.rs":"b95d381633cfd652e46d2322dd039c328dbd3d2959730916a2786b80fc8f862d","src/ir/int.rs":"1f61a472288afe489d9320bc8b13920333ece57891ae8570b4c4f25ab50688e6","src/ir/item.rs":"a46f5968c85861d44356bacdd26b7e5925aa4ba715acfedf08d47122818b00c3","src/ir/item_kind.rs":"13048962657436a757ff9483d96f2ce351ec5f827ecab4921ed643f6f115c497","src/ir/layout.rs":"e3d1adf1ad2fa5bd96530cdd5097db3d9cc7b44d33ec23a04fcfccecd9cf4469","src/ir/mod.rs":"2eae90f207fad2e45957ec9287064992a419e3fc916aba84faff2ea25cbeb5ee","src/ir/module.rs":"c4d90bf38fe3672e01923734ccbdb7951ea929949d5f413a9c2aee12395a5094","src/ir/objc.rs":"05068c4fbf42429c4ac2a233c874f18ffcf7dc1744398e400a5a48d0e7a972f2","src/ir/template.rs":"bcd750450a4df0200a6e7958f9c96a09b91e3ccd29c60712f2b9d3458f1234aa","src/ir/traversal.rs":"a108f2b2a1b3d2e679274eead8cf6f8fc78454e88943527d56bb30b5f071f104","src/ir/ty.rs":"944dcee00762d77640d1bc9aacad16c489c2d2cb9d2c5fd00cbc97a5599984e4","src/ir/var.rs":"685718e5df2e146fde3c00a18940cbd30d25b5055a254cf14385aa269d98d906","src/lib.rs":"9dfc84f927bb3ba0737be29e668bf55943fa08bb73656fec7b5dbb70556ae235","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"fa0a21b4aca4df72896f911de3ee252bec87abb4c871e39b53e90923181a1658","src/options.rs":"8507b27bec74b3729b79c306ced738d60df026d63939f35e62332c4505497054","src/parse.rs":"be7d13cc84fae79ec7b3aa9e77063fa475a48d74a854423e2c72d75006a25202","src/regex_set.rs":"a55241f2117f15729d174790f386e255fcb224b692325bbe6716dbb1d6874881","src/time.rs":"a02befb48d10dcc31e3f9571b2fa1c40f97fafe6f6ae7d7fc4f8fd01f1a169ba"},"package":"57253399c086f4f29e57ffd3b5cdbc23a806a00292619351aa4cfa39cb49d4ea"}

56
third_party/rust/bindgen/Cargo.toml поставляемый
Просмотреть файл

@ -12,7 +12,7 @@
[package]
name = "bindgen"
version = "0.30.0"
version = "0.31.3"
authors = ["Jyun-Yan You <jyyou.tw@gmail.com>", "Emilio Cobos Álvarez <emilio@crisal.io>", "Nick Fitzgerald <fitzgen@gmail.com>", "The Servo project developers"]
build = "build.rs"
include = ["Cargo.toml", "build.rs", "src/*.rs", "src/**/*.rs"]
@ -31,68 +31,58 @@ path = "src/lib.rs"
name = "bindgen"
path = "src/main.rs"
doc = false
[dependencies.log]
version = "0.3"
optional = true
[dependencies.cexpr]
version = "0.2"
[dependencies.quasi]
version = "0.32"
features = ["with-syntex"]
[dependencies.aster]
version = "0.41"
features = ["with-syntex"]
[dependencies.which]
version = "1.0.2"
[dependencies.cfg-if]
version = "0.1.0"
[dependencies.clang-sys]
version = "0.19.0"
version = "0.21.0"
features = ["runtime", "clang_3_9"]
[dependencies.clap]
version = "2"
[dependencies.env_logger]
version = "0.4"
optional = true
[dependencies.syntex_syntax]
version = "0.58"
[dependencies.lazy_static]
version = "0.2.1"
[dependencies.cfg-if]
version = "0.1.0"
[dependencies.log]
version = "0.3"
optional = true
[dependencies.peeking_take_while]
version = "0.1.2"
[dependencies.clap]
version = "2"
[dependencies.quote]
version = "0.3.15"
[dependencies.regex]
version = "0.2"
[dependencies.cexpr]
version = "0.2"
[dev-dependencies.shlex]
version = "0.1"
[dependencies.which]
version = "1.0.2"
[dev-dependencies.clap]
version = "2"
[dev-dependencies.diff]
version = "0.1"
[build-dependencies.quasi_codegen]
version = "0.32"
[dev-dependencies.shlex]
version = "0.1"
[features]
testing_only_extra_assertions = []
default = ["logging"]
logging = ["env_logger", "log"]
static = []
testing_only_docs = []
testing_only_extra_assertions = []
testing_only_libclang_3_8 = []
testing_only_libclang_3_9 = []
testing_only_libclang_4 = []
default = ["logging"]
testing_only_docs = []
testing_only_libclang_3_8 = []
[badges.travis-ci]
repository = "rust-lang-nursery/rust-bindgen"

13
third_party/rust/bindgen/build.rs поставляемый
Просмотреть файл

@ -1,5 +1,4 @@
mod codegen {
extern crate quasi_codegen;
mod target {
use std::env;
use std::fs::File;
use std::io::Write;
@ -7,14 +6,6 @@ mod codegen {
pub fn main() {
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
let src = Path::new("src/codegen/mod.rs");
let dst = Path::new(&out_dir).join("codegen.rs");
quasi_codegen::expand(&src, &dst).unwrap();
println!("cargo:rerun-if-changed=src/codegen/mod.rs");
println!("cargo:rerun-if-changed=src/codegen/error.rs");
println!("cargo:rerun-if-changed=src/codegen/helpers.rs");
println!("cargo:rerun-if-changed=src/codegen/struct_layout.rs");
let mut dst = File::create(Path::new(&out_dir).join("host-target.txt"))
.unwrap();
@ -77,6 +68,6 @@ mod testgen {
}
fn main() {
codegen::main();
target::main();
testgen::main();
}

Просмотреть файл

@ -1,66 +1,62 @@
//! Helpers for code generation that don't need macro expansion.
use aster;
use ir::layout::Layout;
use syntax::ast;
use syntax::ptr::P;
use quote;
pub mod attributes {
use aster;
use syntax::ast;
use quote;
pub fn allow(which_ones: &[&str]) -> ast::Attribute {
aster::AstBuilder::new()
.attr()
.list("allow")
.words(which_ones)
.build()
pub fn repr(which: &str) -> quote::Tokens {
let which = quote::Ident::new(which);
quote! {
#[repr( #which )]
}
}
pub fn repr(which: &str) -> ast::Attribute {
aster::AstBuilder::new()
.attr()
.list("repr")
.words(&[which])
.build()
pub fn repr_list(which_ones: &[&str]) -> quote::Tokens {
let which_ones = which_ones.iter().cloned().map(quote::Ident::new);
quote! {
#[repr( #( #which_ones ),* )]
}
}
pub fn repr_list(which_ones: &[&str]) -> ast::Attribute {
aster::AstBuilder::new()
.attr()
.list("repr")
.words(which_ones)
.build()
pub fn derives(which_ones: &[&str]) -> quote::Tokens {
let which_ones = which_ones.iter().cloned().map(quote::Ident::new);
quote! {
#[derive( #( #which_ones ),* )]
}
}
pub fn derives(which_ones: &[&str]) -> ast::Attribute {
aster::AstBuilder::new()
.attr()
.list("derive")
.words(which_ones)
.build()
pub fn inline() -> quote::Tokens {
quote! {
#[inline]
}
}
pub fn inline() -> ast::Attribute {
aster::AstBuilder::new().attr().word("inline")
pub fn doc(comment: String) -> quote::Tokens {
// Doc comments are already preprocessed into nice `///` formats by the
// time they get here. Just make sure that we have newlines around it so
// that nothing else gets wrapped into the comment.
let mut tokens = quote! {};
tokens.append("\n");
tokens.append(comment);
tokens.append("\n");
tokens
}
pub fn doc(comment: String) -> ast::Attribute {
aster::AstBuilder::new().attr().doc(&*comment)
}
pub fn link_name(name: &str) -> ast::Attribute {
aster::AstBuilder::new()
.attr()
.name_value("link_name")
.str(name)
pub fn link_name(name: &str) -> quote::Tokens {
// LLVM mangles the name by default but it's already mangled.
// Prefixing the name with \u{1} should tell LLVM to not mangle it.
let name = format!("\u{1}{}", name);
quote! {
#[link_name = #name]
}
}
}
/// Generates a proper type for a field or type with a given `Layout`, that is,
/// a type with the correct size and alignment restrictions.
pub fn blob(layout: Layout) -> P<ast::Ty> {
pub fn blob(layout: Layout) -> quote::Tokens {
let opaque = layout.opaque();
// FIXME(emilio, #412): We fall back to byte alignment, but there are
@ -75,39 +71,46 @@ pub fn blob(layout: Layout) -> P<ast::Ty> {
}
};
let ty_name = quote::Ident::new(ty_name);
let data_len = opaque.array_size().unwrap_or(layout.size);
let inner_ty = aster::AstBuilder::new().ty().path().id(ty_name).build();
if data_len == 1 {
inner_ty
quote! {
#ty_name
}
} else {
aster::ty::TyBuilder::new().array(data_len).build(inner_ty)
quote! {
[ #ty_name ; #data_len ]
}
}
}
pub mod ast_ty {
use aster;
use ir::context::BindgenContext;
use ir::function::FunctionSig;
use ir::ty::FloatKind;
use syntax::ast;
use syntax::ptr::P;
use quote;
pub fn raw_type(ctx: &BindgenContext, name: &str) -> P<ast::Ty> {
let ident = ctx.rust_ident_raw(&name);
pub fn raw_type(ctx: &BindgenContext, name: &str) -> quote::Tokens {
let ident = ctx.rust_ident_raw(name);
match ctx.options().ctypes_prefix {
Some(ref prefix) => {
let prefix = ctx.rust_ident_raw(prefix);
quote_ty!(ctx.ext_cx(), $prefix::$ident)
let prefix = ctx.rust_ident_raw(prefix.as_str());
quote! {
#prefix::#ident
}
}
None => quote_ty!(ctx.ext_cx(), ::std::os::raw::$ident),
None => quote! {
::std::os::raw::#ident
},
}
}
pub fn float_kind_rust_type(
ctx: &BindgenContext,
fk: FloatKind,
) -> P<ast::Ty> {
) -> quote::Tokens {
// TODO: we probably should just take the type layout into
// account?
//
@ -116,64 +119,57 @@ pub mod ast_ty {
// FIXME: `c_longdouble` doesn't seem to be defined in some
// systems, so we use `c_double` directly.
match (fk, ctx.options().convert_floats) {
(FloatKind::Float, true) => aster::ty::TyBuilder::new().f32(),
(FloatKind::Float, true) => quote! { f32 },
(FloatKind::Double, true) |
(FloatKind::LongDouble, true) => aster::ty::TyBuilder::new().f64(),
(FloatKind::LongDouble, true) => quote! { f64 },
(FloatKind::Float, false) => raw_type(ctx, "c_float"),
(FloatKind::Double, false) |
(FloatKind::LongDouble, false) => raw_type(ctx, "c_double"),
(FloatKind::Float128, _) => {
aster::ty::TyBuilder::new().array(16).u8()
}
(FloatKind::Float128, _) => quote! { [u8; 16] },
}
}
pub fn int_expr(val: i64) -> P<ast::Expr> {
use std::i64;
let expr = aster::AstBuilder::new().expr();
pub fn int_expr(val: i64) -> quote::Tokens {
// Don't use quote! { #val } because that adds the type suffix.
let mut tokens = quote! {};
tokens.append(val.to_string());
tokens
}
// This is not representable as an i64 if it's negative, so we
// special-case it.
//
// Fix in aster incoming.
if val == i64::MIN {
expr.neg().uint(1u64 << 63)
} else {
expr.int(val)
pub fn uint_expr(val: u64) -> quote::Tokens {
// Don't use quote! { #val } because that adds the type suffix.
let mut tokens = quote! {};
tokens.append(val.to_string());
tokens
}
/// Returns hex representation of the given value.
pub fn hex_expr(val: u64) -> quote::Tokens {
let mut tokens = quote! {};
tokens.append(format!("{:#x}", val));
tokens
}
pub fn byte_array_expr(bytes: &[u8]) -> quote::Tokens {
let mut bytes: Vec<_> = bytes.iter().cloned().collect();
bytes.push(0);
quote! {
#bytes
}
}
pub fn bool_expr(val: bool) -> P<ast::Expr> {
aster::AstBuilder::new().expr().bool(val)
}
pub fn byte_array_expr(bytes: &[u8]) -> P<ast::Expr> {
let mut vec = Vec::with_capacity(bytes.len() + 1);
for byte in bytes {
vec.push(int_expr(*byte as i64));
}
vec.push(int_expr(0));
let kind = ast::ExprKind::Array(vec);
aster::AstBuilder::new().expr().build_expr_kind(kind)
}
pub fn cstr_expr(mut string: String) -> P<ast::Expr> {
pub fn cstr_expr(mut string: String) -> quote::Tokens {
string.push('\0');
aster::AstBuilder::new().expr().build_lit(
aster::AstBuilder::new()
.lit()
.byte_str(string),
)
let b = quote::ByteStr(&string);
quote! {
#b
}
}
pub fn float_expr(
ctx: &BindgenContext,
f: f64,
) -> Result<P<ast::Expr>, ()> {
use aster::symbol::ToSymbol;
) -> Result<quote::Tokens, ()> {
if f.is_finite() {
let mut string = f.to_string();
@ -182,21 +178,28 @@ pub mod ast_ty {
string.push('.');
}
let kind =
ast::LitKind::FloatUnsuffixed(string.as_str().to_symbol());
return Ok(aster::AstBuilder::new().expr().lit().build_lit(kind));
let mut tokens = quote! {};
tokens.append(string);
return Ok(tokens);
}
let prefix = ctx.trait_prefix();
if f.is_nan() {
return Ok(quote_expr!(ctx.ext_cx(), ::$prefix::f64::NAN));
return Ok(quote! {
::#prefix::f64::NAN
});
}
if f.is_infinite() {
return Ok(if f.is_sign_positive() {
quote_expr!(ctx.ext_cx(), ::$prefix::f64::INFINITY)
quote! {
::#prefix::f64::INFINITY
}
} else {
quote_expr!(ctx.ext_cx(), ::$prefix::f64::NEG_INFINITY)
quote! {
::#prefix::f64::NEG_INFINITY
}
});
}
@ -207,23 +210,24 @@ pub mod ast_ty {
pub fn arguments_from_signature(
signature: &FunctionSig,
ctx: &BindgenContext,
) -> Vec<P<ast::Expr>> {
// TODO: We need to keep in sync the argument names, so we should unify
// this with the other loop that decides them.
) -> Vec<quote::Tokens> {
let mut unnamed_arguments = 0;
signature
.argument_types()
.iter()
.map(|&(ref name, _ty)| {
let arg_name = match *name {
Some(ref name) => ctx.rust_mangle(name).into_owned(),
match *name {
Some(ref name) => {
let name = ctx.rust_ident(name);
quote! { #name }
}
None => {
unnamed_arguments += 1;
format!("arg{}", unnamed_arguments)
let name = ctx.rust_ident(format!("arg{}", unnamed_arguments));
quote! { #name }
}
};
aster::expr::ExprBuilder::new().id(arg_name)
}
})
.collect::<Vec<_>>()
.collect()
}
}

222
third_party/rust/bindgen/src/codegen/impl_debug.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,222 @@
use ir::comp::{BitfieldUnit, CompKind, Field, FieldData, FieldMethods};
use ir::context::BindgenContext;
use ir::derive::CanTriviallyDeriveDebug;
use ir::item::{HasTypeParamInArray, IsOpaque, Item, ItemCanonicalName};
use ir::ty::{RUST_DERIVE_IN_ARRAY_LIMIT, TypeKind};
use quote;
pub fn gen_debug_impl(
ctx: &BindgenContext,
fields: &[Field],
item: &Item,
kind: CompKind,
) -> quote::Tokens {
let struct_name = item.canonical_name(ctx);
let mut format_string = format!("{} {{{{ ", struct_name);
let mut tokens = vec![];
if item.is_opaque(ctx, &()) {
format_string.push_str("opaque");
} else {
match kind {
CompKind::Union => {
format_string.push_str("union");
}
CompKind::Struct => {
let processed_fields = fields.iter().filter_map(|f| match f {
&Field::DataMember(ref fd) => fd.impl_debug(ctx, ()),
&Field::Bitfields(ref bu) => bu.impl_debug(ctx, ()),
});
for (i, (fstring, toks)) in processed_fields.enumerate() {
if i > 0 {
format_string.push_str(", ");
}
tokens.extend(toks);
format_string.push_str(&fstring);
}
}
}
}
format_string.push_str(" }}");
tokens.insert(0, quote! { #format_string });
quote! {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, #( #tokens ),*)
}
}
}
/// A trait for the things which we can codegen tokens that contribute towards a
/// generated `impl Debug`.
pub trait ImplDebug<'a> {
/// Any extra parameter required by this a particular `ImplDebug` implementation.
type Extra;
/// Generate a format string snippet to be included in the larger `impl Debug`
/// format string, and the code to get the format string's interpolation values.
fn impl_debug(
&self,
ctx: &BindgenContext,
extra: Self::Extra,
) -> Option<(String, Vec<quote::Tokens>)>;
}
impl<'a> ImplDebug<'a> for FieldData {
type Extra = ();
fn impl_debug(
&self,
ctx: &BindgenContext,
_: Self::Extra,
) -> Option<(String, Vec<quote::Tokens>)> {
if let Some(name) = self.name() {
ctx.resolve_item(self.ty()).impl_debug(ctx, name)
} else {
None
}
}
}
impl<'a> ImplDebug<'a> for BitfieldUnit {
type Extra = ();
fn impl_debug(
&self,
ctx: &BindgenContext,
_: Self::Extra,
) -> Option<(String, Vec<quote::Tokens>)> {
let mut format_string = String::new();
let mut tokens = vec![];
for (i, bitfield) in self.bitfields().iter().enumerate() {
if i > 0 {
format_string.push_str(", ");
}
if let Some(bitfield_name) = bitfield.name() {
format_string.push_str(&format!("{} : {{:?}}", bitfield_name));
let getter_name = bitfield.getter_name();
let name_ident = ctx.rust_ident_raw(getter_name);
tokens.push(quote! {
self.#name_ident ()
});
}
}
Some((format_string, tokens))
}
}
impl<'a> ImplDebug<'a> for Item {
type Extra = &'a str;
fn impl_debug(
&self,
ctx: &BindgenContext,
name: &str,
) -> Option<(String, Vec<quote::Tokens>)> {
let name_ident = ctx.rust_ident(name);
// We don't know if blacklisted items `impl Debug` or not, so we can't
// add them to the format string we're building up.
if !ctx.whitelisted_items().contains(&self.id()) {
return None;
}
let ty = match self.as_type() {
Some(ty) => ty,
None => {
return None;
}
};
fn debug_print(
name: &str,
name_ident: quote::Tokens,
) -> Option<(String, Vec<quote::Tokens>)> {
Some((
format!("{}: {{:?}}", name),
vec![quote! {
self.#name_ident
}],
))
}
match *ty.kind() {
// Handle the simple cases.
TypeKind::Void |
TypeKind::NullPtr |
TypeKind::Int(..) |
TypeKind::Float(..) |
TypeKind::Complex(..) |
TypeKind::Function(..) |
TypeKind::Enum(..) |
TypeKind::Reference(..) |
TypeKind::BlockPointer |
TypeKind::UnresolvedTypeRef(..) |
TypeKind::ObjCInterface(..) |
TypeKind::ObjCId |
TypeKind::Comp(..) |
TypeKind::ObjCSel => debug_print(name, quote! { #name_ident }),
TypeKind::TemplateInstantiation(ref inst) => {
if inst.is_opaque(ctx, self) {
Some((format!("{}: opaque", name), vec![]))
} else {
debug_print(name, quote! { #name_ident })
}
}
// The generic is not required to implement Debug, so we can not debug print that type
TypeKind::TypeParam => {
Some((format!("{}: Non-debuggable generic", name), vec![]))
}
TypeKind::Array(_, len) => {
// Generics are not required to implement Debug
if self.has_type_param_in_array(ctx) {
Some(
(format!("{}: Array with length {}", name, len), vec![]),
)
} else if len < RUST_DERIVE_IN_ARRAY_LIMIT {
// The simple case
debug_print(name, quote! { #name_ident })
} else {
// Let's implement our own print function
Some((
format!("{}: [{{}}]", name),
vec![quote! {
self.#name_ident
.iter()
.enumerate()
.map(|(i, v)| format!("{}{:?}", if i > 0 { ", " } else { "" }, v))
.collect::<String>()
}],
))
}
}
TypeKind::ResolvedTypeRef(t) |
TypeKind::TemplateAlias(t, _) |
TypeKind::Alias(t) => {
// We follow the aliases
ctx.resolve_item(t).impl_debug(ctx, name)
}
TypeKind::Pointer(inner) => {
let inner_type = ctx.resolve_type(inner).canonical_type(ctx);
match *inner_type.kind() {
TypeKind::Function(ref sig)
if !sig.can_trivially_derive_debug() => {
Some((format!("{}: FunctionPointer", name), vec![]))
}
_ => debug_print(name, quote! { #name_ident }),
}
}
TypeKind::Opaque => None,
}
}
}

125
third_party/rust/bindgen/src/codegen/impl_partialeq.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,125 @@
use ir::comp::{CompInfo, CompKind, Field, FieldMethods};
use ir::context::BindgenContext;
use ir::item::{IsOpaque, Item};
use ir::ty::{TypeKind, RUST_DERIVE_IN_ARRAY_LIMIT};
use quote;
/// Generate a manual implementation of `PartialEq` trait for the
/// specified compound type.
pub fn gen_partialeq_impl(
ctx: &BindgenContext,
comp_info: &CompInfo,
item: &Item,
ty_for_impl: &quote::Tokens,
) -> Option<quote::Tokens> {
let mut tokens = vec![];
if item.is_opaque(ctx, &()) {
tokens.push(quote! {
&self._bindgen_opaque_blob[..] == &other._bindgen_opaque_blob[..]
});
} else if comp_info.kind() == CompKind::Union {
assert!(!ctx.options().rust_features().untagged_union());
tokens.push(quote! {
&self.bindgen_union_field[..] == &other.bindgen_union_field[..]
});
} else {
for base in comp_info.base_members().iter() {
if !base.requires_storage(ctx) {
continue;
}
let ty_item = ctx.resolve_item(base.ty);
let field_name = &base.field_name;
if ty_item.is_opaque(ctx, &()) {
let field_name = ctx.rust_ident(field_name);
tokens.push(quote! {
&self. #field_name [..] == &other. #field_name [..]
});
} else {
tokens.push(gen_field(ctx, ty_item, field_name));
}
}
for field in comp_info.fields() {
match *field {
Field::DataMember(ref fd) => {
let ty_item = ctx.resolve_item(fd.ty());
let name = fd.name().unwrap();
tokens.push(gen_field(ctx, ty_item, name));
}
Field::Bitfields(ref bu) => for bitfield in bu.bitfields() {
if let Some(_) = bitfield.name() {
let getter_name = bitfield.getter_name();
let name_ident = ctx.rust_ident_raw(getter_name);
tokens.push(quote! {
self.#name_ident () == other.#name_ident ()
});
}
},
}
}
}
Some(quote! {
fn eq(&self, other: & #ty_for_impl) -> bool {
#( #tokens )&&*
}
})
}
fn gen_field(ctx: &BindgenContext, ty_item: &Item, name: &str) -> quote::Tokens {
fn quote_equals(name_ident: quote::Ident) -> quote::Tokens {
quote! { self.#name_ident == other.#name_ident }
}
let name_ident = ctx.rust_ident(name);
let ty = ty_item.expect_type();
match *ty.kind() {
TypeKind::Void |
TypeKind::NullPtr |
TypeKind::Int(..) |
TypeKind::Complex(..) |
TypeKind::Float(..) |
TypeKind::Enum(..) |
TypeKind::TypeParam |
TypeKind::UnresolvedTypeRef(..) |
TypeKind::BlockPointer |
TypeKind::Reference(..) |
TypeKind::ObjCInterface(..) |
TypeKind::ObjCId |
TypeKind::ObjCSel |
TypeKind::Comp(..) |
TypeKind::Pointer(_) |
TypeKind::Function(..) |
TypeKind::Opaque => quote_equals(name_ident),
TypeKind::TemplateInstantiation(ref inst) => {
if inst.is_opaque(ctx, &ty_item) {
quote! {
&self. #name_ident [..] == &other. #name_ident [..]
}
} else {
quote_equals(name_ident)
}
}
TypeKind::Array(_, len) => if len <= RUST_DERIVE_IN_ARRAY_LIMIT {
quote_equals(name_ident)
} else {
quote! {
&self. #name_ident [..] == &other. #name_ident [..]
}
},
TypeKind::ResolvedTypeRef(t) |
TypeKind::TemplateAlias(t, _) |
TypeKind::Alias(t) => {
let inner_item = ctx.resolve_item(t);
gen_field(ctx, inner_item, name)
}
}
}

2801
third_party/rust/bindgen/src/codegen/mod.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -2,22 +2,19 @@
use super::helpers;
use aster::struct_field::StructFieldBuilder;
use ir::comp::CompInfo;
use ir::context::BindgenContext;
use ir::layout::Layout;
use ir::ty::{Type, TypeKind};
use quote;
use std::cmp;
use std::mem;
use syntax::ast;
/// Trace the layout of struct.
#[derive(Debug)]
pub struct StructLayoutTracker<'a, 'ctx: 'a> {
pub struct StructLayoutTracker<'a> {
name: &'a str,
ctx: &'a BindgenContext<'ctx>,
ctx: &'a BindgenContext,
comp: &'a CompInfo,
latest_offset: usize,
padding_count: usize,
@ -80,9 +77,9 @@ fn test_bytes_from_bits_pow2() {
}
}
impl<'a, 'ctx> StructLayoutTracker<'a, 'ctx> {
impl<'a> StructLayoutTracker<'a> {
pub fn new(
ctx: &'a BindgenContext<'ctx>,
ctx: &'a BindgenContext,
comp: &'a CompInfo,
name: &'a str,
) -> Self {
@ -154,7 +151,7 @@ impl<'a, 'ctx> StructLayoutTracker<'a, 'ctx> {
field_name: &str,
field_ty: &Type,
field_offset: Option<usize>,
) -> Option<ast::StructField> {
) -> Option<quote::Tokens> {
let mut field_layout = match field_ty.layout(self.ctx) {
Some(l) => l,
None => return None,
@ -241,7 +238,7 @@ impl<'a, 'ctx> StructLayoutTracker<'a, 'ctx> {
padding_layout.map(|layout| self.padding_field(layout))
}
pub fn pad_struct(&mut self, layout: Layout) -> Option<ast::StructField> {
pub fn pad_struct(&mut self, layout: Layout) -> Option<quote::Tokens> {
debug!(
"pad_struct:\n\tself = {:#?}\n\tlayout = {:#?}",
self,
@ -291,39 +288,28 @@ impl<'a, 'ctx> StructLayoutTracker<'a, 'ctx> {
}
}
pub fn align_struct(&self, layout: Layout) -> Option<ast::StructField> {
if self.max_field_align < layout.align &&
pub fn requires_explicit_align(&self, layout: Layout) -> bool {
self.max_field_align < layout.align &&
layout.align <= mem::size_of::<*mut ()>()
{
let ty = helpers::blob(Layout::new(0, layout.align));
Some(
StructFieldBuilder::named("__bindgen_align")
.pub_()
.build_ty(ty),
)
} else {
None
}
}
fn padding_bytes(&self, layout: Layout) -> usize {
align_to(self.latest_offset, layout.align) - self.latest_offset
}
fn padding_field(&mut self, layout: Layout) -> ast::StructField {
fn padding_field(&mut self, layout: Layout) -> quote::Tokens {
let ty = helpers::blob(layout);
let padding_count = self.padding_count;
self.padding_count += 1;
let padding_field_name = format!("__bindgen_padding_{}", padding_count);
let padding_field_name = quote::Ident::new(format!("__bindgen_padding_{}", padding_count));
self.max_field_align = cmp::max(self.max_field_align, layout.align);
StructFieldBuilder::named(padding_field_name)
.pub_()
.build_ty(ty)
quote! {
pub #padding_field_name : #ty ,
}
}
/// Returns whether the new field is known to merge with a bitfield.

14
third_party/rust/bindgen/src/features.rs поставляемый
Просмотреть файл

@ -90,6 +90,8 @@ macro_rules! rust_target_base {
=> Stable_1_0 => 1.0;
/// Rust stable 1.19
=> Stable_1_19 => 1.19;
/// Rust stable 1.21
=> Stable_1_21 => 1.21;
/// Nightly rust
=> Nightly => nightly;
);
@ -100,7 +102,7 @@ rust_target_base!(rust_target_def);
rust_target_base!(rust_target_values_def);
/// Latest stable release of Rust
pub const LATEST_STABLE_RUST: RustTarget = RustTarget::Stable_1_19;
pub const LATEST_STABLE_RUST: RustTarget = RustTarget::Stable_1_21;
/// Create RustFeatures struct definition, new(), and a getter for each field
macro_rules! rust_feature_def {
@ -140,6 +142,10 @@ rust_feature_def!(
=> untagged_union;
/// Constant function ([RFC 911](https://github.com/rust-lang/rfcs/blob/master/text/0911-const-fn.md))
=> const_fn;
/// `thiscall` calling convention ([Tracking issue](https://github.com/rust-lang/rust/issues/42202))
=> thiscall_abi;
/// builtin impls for `Clone` ([PR](https://github.com/rust-lang/rust/pull/43690))
=> builtin_clone_impls;
);
impl From<RustTarget> for RustFeatures {
@ -150,8 +156,13 @@ impl From<RustTarget> for RustFeatures {
features.untagged_union = true;
}
if rust_target >= RustTarget::Stable_1_21 {
features.builtin_clone_impls = true;
}
if rust_target >= RustTarget::Nightly {
features.const_fn = true;
features.thiscall_abi = true;
}
features
@ -180,6 +191,7 @@ mod test {
fn str_to_target() {
test_target("1.0", RustTarget::Stable_1_0);
test_target("1.19", RustTarget::Stable_1_19);
test_target("1.21", RustTarget::Stable_1_21);
test_target("nightly", RustTarget::Nightly);
}
}

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше