Bug 1469228: Revendor rust dependencies. r=me

MozReview-Commit-ID: KEx2kxGH0iO
This commit is contained in:
Emilio Cobos Álvarez 2018-06-18 10:39:42 +02:00
Родитель 659f21db51
Коммит 5d02e2677d
76 изменённых файлов: 557 добавлений и 28918 удалений

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"e173ef38709c8afee2488a6b0acf94c7695c3cebe89b3fe444253808bede8405","LICENSE":"2c6fc9268c3b765da5bf34fe4909425437f61be05674c2516c7f8cf1251c20aa","src/lib.rs":"71e7248b21b5e603e31060ecf241cf204efdfea5a0b400d084601f6c8bdfe11c"},"package":"f9f316203d1ea36f4f18316822806f6999aa3dc5ed1adf51e35b77e3b3933d78"} {"files":{"Cargo.toml":"517c9ae719b876cf67d9bd8dfec71d0b7a35ffe6e68668c35e21e2f6c3dfdc05","LICENSE":"2c6fc9268c3b765da5bf34fe4909425437f61be05674c2516c7f8cf1251c20aa","src/lib.rs":"71e7248b21b5e603e31060ecf241cf204efdfea5a0b400d084601f6c8bdfe11c"},"package":"0472c17c83d3ec1af32fb6ee2b3ad56ae0b6e69355d63d1d30602055c34324a8"}

6
third_party/rust/cstr-macros/Cargo.toml поставляемый
Просмотреть файл

@ -12,7 +12,7 @@
[package] [package]
name = "cstr-macros" name = "cstr-macros"
version = "0.1.2" version = "0.1.3"
authors = ["Xidorn Quan <me@upsuper.org>"] authors = ["Xidorn Quan <me@upsuper.org>"]
description = "Procedural macros for cstr" description = "Procedural macros for cstr"
license = "MIT" license = "MIT"
@ -24,8 +24,8 @@ proc-macro = true
version = "0.1" version = "0.1"
[dependencies.syn] [dependencies.syn]
version = "0.12" version = "0.13"
features = ["derive", "parsing"] features = ["derive", "parsing"]
default-features = false default-features = false
[dev-dependencies.quote] [dev-dependencies.quote]
version = "0.4" version = "0.5"

Просмотреть файл

@ -1 +1 @@
{"files":{".travis.yml":"2e3d3211e52ff52d83a0a2a495a28175dbcf2a30ab680d7c8f20622751b04f78","CHANGELOG.md":"fa0ef2d2b2d5382962d371a68e5eba052f5b303d70090e21c2d1f4c5e52b9851","Cargo.toml":"ee3bb349f21a8bf3801692a0af134b3e1071f86bd1d4aa52b47f45c7e9d696eb","LICENSE":"8ea93490d74a5a1b1af3ff71d786271b3f1e5f0bea79ac16e02ec533cef040d6","README.md":"7e05868f02bae698ee3193b86e86f25faa4dbc63661062e1d3f7bff590cfb313","examples/consume_fields.rs":"f32d3873b61e22b1ded500571ec7120420b4825ee7f807d07409e3a257407add","examples/fallible_read.rs":"4e2d558f8a77c4fffa79bde5a6c48df3dbc932822e7bc7cf0a903d1ea38b8d6f","publish.sh":"42795a155247c69402f8c4c917c9874a06dfde5a7606c8b59fc4e9ccd34233dd","src/lib.rs":"c13e19cd0363784d9ec3605bafcaf74648594fb419162143c4ecc3308a8ec695","tests/accrue_errors.rs":"7a995118cfa75ac90accf9a35e17b07a00e8528c8ccc5dd8700ba7b4c59393c1","tests/custom_bound.rs":"cca7c557ac0a2efd9554d31f0df9a98c4f6f01b9f211107846732cc1fe9f7856","tests/enums_newtype.rs":"328ebbbb4aa540f06f13158dff22cf9d98d45dba2331f75e4aa169d348397b76","tests/enums_struct.rs":"560a8bfdea9eca7f8c2024bc8740e71ef1a0681cea36b098ceadba520fad8640","tests/enums_unit.rs":"5e9458af9d695706e181b6622dcbc8b80b9eae33dcc1f15da7eecfd3e7037777","tests/error.rs":"69d12e969555fc1d72990d7596b54adcb469da95f1f257d0273f31bc9c855db5","tests/from_variant.rs":"af60c9dec64e80e2ac3beafb942d8edc1100a1342bb97378e6a35f878dd1fb50","tests/generics.rs":"3d884d65cb6b57d4bc4b3f7c39f321b2df3cd339fa15db5b66dc7c97ef84df17","tests/happy_path.rs":"5143dbf33b59fcab94be61affefddf485857f1a5cb3d45d5583463423d417cdf","tests/multiple.rs":"20e1c5110449db46df68c5a4cdb6e0c4c0e9a6b47451fe73f1523a1cf730196d","tests/newtype.rs":"a8709857e2067bc01b388a11230db5764c9e5fe2341c98d6c819adc01472b988","tests/skip.rs":"e34034c6b5fae80c8cf2caa762a41ab3d971f8af50f1022e68ba299218477892","tests/split_declaration.rs":"d55219ec0dce001ccd1975f0b4fbe0f5e8c5792a1ddf2de5a210d380bc6761e0","tests/supports.rs":"1131c2afd42e20e4a39f922758cbb8d7c5a0167ae41f9cd1cd14b40db055cd10"},"package":"d3effd06d4057f275cb7858889f4952920bab78dd8ff0f6e7dfe0c8d2e67ed89"} {"files":{".travis.yml":"2e3d3211e52ff52d83a0a2a495a28175dbcf2a30ab680d7c8f20622751b04f78","CHANGELOG.md":"d7322023ffc58d041c542f8de0f43167f34ba4fdacc9a5014308d97055f7b729","Cargo.toml":"c1d8f7f99c24eb373e5aefaf3c678eea57d72552fdbb3547872b62b0d28aa07f","LICENSE":"8ea93490d74a5a1b1af3ff71d786271b3f1e5f0bea79ac16e02ec533cef040d6","README.md":"7e05868f02bae698ee3193b86e86f25faa4dbc63661062e1d3f7bff590cfb313","examples/consume_fields.rs":"f32d3873b61e22b1ded500571ec7120420b4825ee7f807d07409e3a257407add","examples/fallible_read.rs":"4e2d558f8a77c4fffa79bde5a6c48df3dbc932822e7bc7cf0a903d1ea38b8d6f","publish.sh":"42795a155247c69402f8c4c917c9874a06dfde5a7606c8b59fc4e9ccd34233dd","src/lib.rs":"c13e19cd0363784d9ec3605bafcaf74648594fb419162143c4ecc3308a8ec695","tests/accrue_errors.rs":"7a995118cfa75ac90accf9a35e17b07a00e8528c8ccc5dd8700ba7b4c59393c1","tests/custom_bound.rs":"cca7c557ac0a2efd9554d31f0df9a98c4f6f01b9f211107846732cc1fe9f7856","tests/enums_newtype.rs":"328ebbbb4aa540f06f13158dff22cf9d98d45dba2331f75e4aa169d348397b76","tests/enums_struct.rs":"560a8bfdea9eca7f8c2024bc8740e71ef1a0681cea36b098ceadba520fad8640","tests/enums_unit.rs":"5e9458af9d695706e181b6622dcbc8b80b9eae33dcc1f15da7eecfd3e7037777","tests/error.rs":"69d12e969555fc1d72990d7596b54adcb469da95f1f257d0273f31bc9c855db5","tests/from_variant.rs":"af60c9dec64e80e2ac3beafb942d8edc1100a1342bb97378e6a35f878dd1fb50","tests/generics.rs":"3d884d65cb6b57d4bc4b3f7c39f321b2df3cd339fa15db5b66dc7c97ef84df17","tests/happy_path.rs":"5143dbf33b59fcab94be61affefddf485857f1a5cb3d45d5583463423d417cdf","tests/multiple.rs":"20e1c5110449db46df68c5a4cdb6e0c4c0e9a6b47451fe73f1523a1cf730196d","tests/newtype.rs":"a8709857e2067bc01b388a11230db5764c9e5fe2341c98d6c819adc01472b988","tests/skip.rs":"e34034c6b5fae80c8cf2caa762a41ab3d971f8af50f1022e68ba299218477892","tests/split_declaration.rs":"d55219ec0dce001ccd1975f0b4fbe0f5e8c5792a1ddf2de5a210d380bc6761e0","tests/supports.rs":"1131c2afd42e20e4a39f922758cbb8d7c5a0167ae41f9cd1cd14b40db055cd10"},"package":"2a78af487e4eb8f4421a1770687b328af6bb4494ca93435210678c6eea875c11"}

15
third_party/rust/darling/CHANGELOG.md поставляемый
Просмотреть файл

@ -1,6 +1,19 @@
# Changelog # Changelog
## Unreleased Changes ## Unreleased Features
_None_
## v0.4.0 (April 5, 2018)
- Update dependencies on `proc-macro`, `quote`, and `syn` [#26](https://github.com/TedDriggs/darling/pull/26). Thanks to @hcpl
## v0.3.3 (April 2, 2018)
**YANKED**
## v0.3.2 (March 13, 2018)
- Derive `Default` on `darling::Ignored` (fixes [#25](https://github.com/TedDriggs/darling/issues/25)).
## v0.3.1 (March 7, 2018)
- Support proc-macro2/nightly [#24](https://github.com/TedDriggs/darling/pull/24). Thanks to @kdy1
## v0.3.0 (January 26, 2018) ## v0.3.0 (January 26, 2018)

12
third_party/rust/darling/Cargo.toml поставляемый
Просмотреть файл

@ -12,22 +12,22 @@
[package] [package]
name = "darling" name = "darling"
version = "0.3.0" version = "0.4.0"
authors = ["Ted Driggs <ted.driggs@outlook.com>"] authors = ["Ted Driggs <ted.driggs@outlook.com>"]
description = "A proc-macro library for reading attributes into structs when\nimplementing custom derives.\n" description = "A proc-macro library for reading attributes into structs when\nimplementing custom derives.\n"
documentation = "https://docs.rs/darling/0.3.0" documentation = "https://docs.rs/darling/0.4.0"
readme = "README.md" readme = "README.md"
license = "MIT" license = "MIT"
repository = "https://github.com/TedDriggs/darling" repository = "https://github.com/TedDriggs/darling"
[dependencies.darling_core] [dependencies.darling_core]
version = "=0.3.0" version = "=0.4.0"
[dependencies.darling_macro] [dependencies.darling_macro]
version = "=0.3.0" version = "=0.4.0"
[dev-dependencies.quote] [dev-dependencies.quote]
version = "0.4" version = "0.5"
[dev-dependencies.syn] [dev-dependencies.syn]
version = "0.12.10" version = "0.13"
[badges.travis-ci] [badges.travis-ci]
repository = "TedDriggs/darling" repository = "TedDriggs/darling"

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"64569afb788cd8b08c9d13a994f39b9123138f0391cd2aa989d4949eadaf09ad","src/ast.rs":"2538b41e2a579c0a5a49e02f911120ffff39d48dfc0d80570a5fcfe95c971794","src/codegen/default_expr.rs":"8cea4bf29096ad1add5325430a12865173a13a821e1888bed8120ec6120a7764","src/codegen/error.rs":"2a1bde9a20c664f26c6a0017e35ddf82885a31b8be42a628ea5549013b1eab44","src/codegen/field.rs":"ad8355c7bb87269c7dcc1d27695b0f8de410b546625d33d5a219fbadf85f8230","src/codegen/fmi_impl.rs":"89a66b24d7527989dd90ca71d9409fd8cdcf3a659fa1a670448032a4b384e83c","src/codegen/from_derive_impl.rs":"36507c9eddd354a50f96cd28e737c914be494c83ae61202b533524a9d90a2ca9","src/codegen/from_field.rs":"586866442f6628fd055f139b018a8c5c13e3aea20954ec741517aa9ab731c163","src/codegen/from_variant_impl.rs":"d42ecd82d3159aa7ee89ed81ed355c927dea9df2a298cf1db0c486699b77eac2","src/codegen/mod.rs":"46cdb1b4a76eb2e56f01e2c9e2879aed9b1c21ecbed42575a2eeccabf446a27a","src/codegen/outer_from_impl.rs":"a484fc3faed8a722327df18cb5179812b60ff62795a3b00b2b968a40bddec52a","src/codegen/trait_impl.rs":"715ce9dcb82d855e9dd8f2a70599bc3c5328acde70c92b7db5bd4c276598a7d0","src/codegen/variant.rs":"294045aefcfcb54e8b9536d6d91251a46115271869578b3212ae36ae76883b18","src/codegen/variant_data.rs":"efdee90d7e9acce39672024502ceb0616bc162d11e59d255fcbcd23f00f80806","src/error.rs":"55f33c960123799e1ccb27676d780751c0b62d1f52ccb9a2ac69cea4acfe55db","src/from_derive_input.rs":"ea55cc1b2bc17495d36fb616f7cd7aa78e4b74ea7c8554eed9d392ee853700c3","src/from_field.rs":"b42c2fc037aebdd55f195d722ba20075d3e095f03e89890a0d37d406d6419d87","src/from_meta_item.rs":"c4cf05c3649db57ead2d7fd6ae3973b08e74ad20022ac62a7e5e49aa74a46a57","src/from_variant.rs":"2baeb09c8a95ff606d9d5ca8992f10bbad9c4925590332af1a8b5bdae54ebae8","src/lib.rs":"e8b381a74c9303a620e7f210174bfab4c44e3137cba387a9547376982e3fb10a","src/macros.rs":"ff0c87953220702d8440894a7f0f557b1aae930096663c0c98e7ca686a94f305","src/options/core.rs":"689067ee0901714e53caeef5d5634c4bc02f52ff06e3ff286410eecaca665734","src/options/forward_attrs.rs":"35a83a4ae695872413d964d9050e35a0075c8386c286d291b1ecf1779f9ba8a3","src/options/from_derive.rs":"502e18c3d9f90d7a4cebc8c6b60181ab6068958a0ba2e70fe645528dee34b231","src/options/from_field.rs":"7222be5e62026184169f12adb08403abc89d66c53e678f8d8b43afaeceed9e4f","src/options/from_meta_item.rs":"cbc2d747e9e35e0d68b26c9f1592914bb4924cac01a6cdaf9137f643a72b551a","src/options/from_variant.rs":"6f8538da3fb61e614552839ee32bc479d33b5227d7f9d9b357d8d05146b96dac","src/options/input_field.rs":"364c7a30d0c320197062706ba12507742b97513bb64a644243b03210ef3bb334","src/options/input_variant.rs":"2fc064fb87a73542a012a31aa5fd9702cf58b52a1bf37dabbfa7fb2e758ff9cc","src/options/mod.rs":"ceefde4d1dba9b5f3822f667c34a6eb963e5a899973475456bfe7939177f0e19","src/options/outer_from.rs":"3125aad9f8c033727fd3ef4ef1e1881c77fa52463f78342c408bf135e8533037","src/options/shape.rs":"118af560da80a46d6e3f8980c3d9b4863319d224a8b2985520901bfea0eba531","src/util/ident_list.rs":"11b5008380ace89d5745cdd83b73a2841c5382f05d3a8942ba998a7e4d6abb31","src/util/ignored.rs":"89e0b5bc0f9dd8b77a63d5c1b7b3a7bb4b68d539fb97efe0d86cd1dbb46be1e8","src/util/mod.rs":"0c9ee0ba8ec03ca654fd298bd0d82588f224e3743227e6cba2beba4ab2f4dee4","src/util/over_ride.rs":"f63637ff73b3f377a4b1c38714a0f108b98ff40a96dd3ffbebb1e4ecc7523813"},"package":"167dd3e235c2f1da16a635c282630452cdf49191eb05711de1bcd1d3d5068c00"} {"files":{"Cargo.toml":"06561e115990be3f278ed12704d0eb575c971242cbdfe8ebb32a8132280e054e","src/ast.rs":"2538b41e2a579c0a5a49e02f911120ffff39d48dfc0d80570a5fcfe95c971794","src/codegen/default_expr.rs":"28d750fb5ed3a6344007bf545c48e4d9a15c175209903d4009efc0de257adf2e","src/codegen/error.rs":"2a1bde9a20c664f26c6a0017e35ddf82885a31b8be42a628ea5549013b1eab44","src/codegen/field.rs":"ad8355c7bb87269c7dcc1d27695b0f8de410b546625d33d5a219fbadf85f8230","src/codegen/fmi_impl.rs":"89a66b24d7527989dd90ca71d9409fd8cdcf3a659fa1a670448032a4b384e83c","src/codegen/from_derive_impl.rs":"36507c9eddd354a50f96cd28e737c914be494c83ae61202b533524a9d90a2ca9","src/codegen/from_field.rs":"586866442f6628fd055f139b018a8c5c13e3aea20954ec741517aa9ab731c163","src/codegen/from_variant_impl.rs":"d42ecd82d3159aa7ee89ed81ed355c927dea9df2a298cf1db0c486699b77eac2","src/codegen/mod.rs":"46cdb1b4a76eb2e56f01e2c9e2879aed9b1c21ecbed42575a2eeccabf446a27a","src/codegen/outer_from_impl.rs":"2314c1594bd63e682ebd4a4b4954b2b9f16aa50b1422c05568bce97ae29f9727","src/codegen/trait_impl.rs":"715ce9dcb82d855e9dd8f2a70599bc3c5328acde70c92b7db5bd4c276598a7d0","src/codegen/variant.rs":"294045aefcfcb54e8b9536d6d91251a46115271869578b3212ae36ae76883b18","src/codegen/variant_data.rs":"efdee90d7e9acce39672024502ceb0616bc162d11e59d255fcbcd23f00f80806","src/error.rs":"55f33c960123799e1ccb27676d780751c0b62d1f52ccb9a2ac69cea4acfe55db","src/from_derive_input.rs":"ea55cc1b2bc17495d36fb616f7cd7aa78e4b74ea7c8554eed9d392ee853700c3","src/from_field.rs":"b42c2fc037aebdd55f195d722ba20075d3e095f03e89890a0d37d406d6419d87","src/from_meta_item.rs":"996ccec9dca998ff41f65bb346e5cc75952af5d61339c6951bebdbf8db1212c5","src/from_variant.rs":"2baeb09c8a95ff606d9d5ca8992f10bbad9c4925590332af1a8b5bdae54ebae8","src/lib.rs":"58b910cecc1f1962c2d6059db384f065099547c34631d9ddcc35099db8e16405","src/macros.rs":"ef249cd9ca593aac423b4242df1c39c31610438da094c21562d74a7e5823c700","src/options/core.rs":"689067ee0901714e53caeef5d5634c4bc02f52ff06e3ff286410eecaca665734","src/options/forward_attrs.rs":"35a83a4ae695872413d964d9050e35a0075c8386c286d291b1ecf1779f9ba8a3","src/options/from_derive.rs":"502e18c3d9f90d7a4cebc8c6b60181ab6068958a0ba2e70fe645528dee34b231","src/options/from_field.rs":"7222be5e62026184169f12adb08403abc89d66c53e678f8d8b43afaeceed9e4f","src/options/from_meta_item.rs":"cbc2d747e9e35e0d68b26c9f1592914bb4924cac01a6cdaf9137f643a72b551a","src/options/from_variant.rs":"6f8538da3fb61e614552839ee32bc479d33b5227d7f9d9b357d8d05146b96dac","src/options/input_field.rs":"6d43c2907694c4187e9f182f7945fc769ce210cde8eb1b4a336dea2a7fce3710","src/options/input_variant.rs":"2fc064fb87a73542a012a31aa5fd9702cf58b52a1bf37dabbfa7fb2e758ff9cc","src/options/mod.rs":"ceefde4d1dba9b5f3822f667c34a6eb963e5a899973475456bfe7939177f0e19","src/options/outer_from.rs":"3125aad9f8c033727fd3ef4ef1e1881c77fa52463f78342c408bf135e8533037","src/options/shape.rs":"118af560da80a46d6e3f8980c3d9b4863319d224a8b2985520901bfea0eba531","src/util/ident_list.rs":"11b5008380ace89d5745cdd83b73a2841c5382f05d3a8942ba998a7e4d6abb31","src/util/ignored.rs":"66e2e3201e17e8fffe2f249a4327b8178a20304624a47c0149fe8dd5e05d187c","src/util/mod.rs":"0c9ee0ba8ec03ca654fd298bd0d82588f224e3743227e6cba2beba4ab2f4dee4","src/util/over_ride.rs":"f63637ff73b3f377a4b1c38714a0f108b98ff40a96dd3ffbebb1e4ecc7523813"},"package":"b315f49c7b6db3708bca6e6913c194581a44ec619b7a39e131d4dd63733a3698"}

9
third_party/rust/darling_core/Cargo.toml поставляемый
Просмотреть файл

@ -12,7 +12,7 @@
[package] [package]
name = "darling_core" name = "darling_core"
version = "0.3.0" version = "0.4.0"
authors = ["Ted Driggs <ted.driggs@outlook.com>"] authors = ["Ted Driggs <ted.driggs@outlook.com>"]
description = "Helper crate for proc-macro library for reading attributes into structs when\nimplementing custom derives. Use https://crates.io/crates/darling in your code.\n" description = "Helper crate for proc-macro library for reading attributes into structs when\nimplementing custom derives. Use https://crates.io/crates/darling in your code.\n"
license = "MIT" license = "MIT"
@ -20,11 +20,14 @@ repository = "https://github.com/TedDriggs/darling"
[dependencies.ident_case] [dependencies.ident_case]
version = "1.0.0" version = "1.0.0"
[dependencies.proc-macro2]
version = "0.3"
[dependencies.quote] [dependencies.quote]
version = "0.4" version = "0.5"
[dependencies.syn] [dependencies.syn]
version = "0.12.10" version = "0.13"
features = ["extra-traits"] features = ["extra-traits"]
[features] [features]

Просмотреть файл

@ -24,7 +24,7 @@ impl<'a> ToTokens for DefaultExpression<'a> {
fn to_tokens(&self, tokens: &mut Tokens) { fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append_all(match *self { tokens.append_all(match *self {
DefaultExpression::Inherit(ident) => { DefaultExpression::Inherit(ident) => {
let dsn = Ident::from(DEFAULT_STRUCT_NAME); let dsn = Ident::new(DEFAULT_STRUCT_NAME, ::proc_macro2::Span::call_site());
quote!(#dsn.#ident) quote!(#dsn.#ident)
}, },
DefaultExpression::Explicit(path) => quote!(#path()), DefaultExpression::Explicit(path) => quote!(#path()),
@ -38,7 +38,7 @@ pub struct DefaultDeclaration<'a>(&'a DefaultExpression<'a>);
impl<'a> ToTokens for DefaultDeclaration<'a> { impl<'a> ToTokens for DefaultDeclaration<'a> {
fn to_tokens(&self, tokens: &mut Tokens) { fn to_tokens(&self, tokens: &mut Tokens) {
let name = Ident::from(DEFAULT_STRUCT_NAME); let name = Ident::new(DEFAULT_STRUCT_NAME, ::proc_macro2::Span::call_site());
let expr = self.0; let expr = self.0;
tokens.append_all(quote!(let #name: Self = #expr;)); tokens.append_all(quote!(let #name: Self = #expr;));
} }

Просмотреть файл

@ -37,6 +37,7 @@ fn compute_impl_bounds(bound: Path, mut generics: Generics) -> Generics {
} }
let added_bound = TypeParamBound::Trait(TraitBound { let added_bound = TypeParamBound::Trait(TraitBound {
paren_token: None,
modifier: TraitBoundModifier::None, modifier: TraitBoundModifier::None,
lifetimes: None, lifetimes: None,
path: bound, path: bound,

Просмотреть файл

@ -192,7 +192,7 @@ impl FromMetaItem for isize {
impl FromMetaItem for syn::Ident { impl FromMetaItem for syn::Ident {
fn from_string(value: &str) -> Result<Self> { fn from_string(value: &str) -> Result<Self> {
Ok(syn::Ident::from(value)) Ok(syn::Ident::new(value, ::proc_macro2::Span::call_site()))
} }
} }

1
third_party/rust/darling_core/src/lib.rs поставляемый
Просмотреть файл

@ -5,6 +5,7 @@ extern crate quote;
#[macro_use] #[macro_use]
extern crate syn; extern crate syn;
extern crate proc_macro2;
extern crate ident_case; extern crate ident_case;

6
third_party/rust/darling_core/src/macros.rs поставляемый
Просмотреть файл

@ -1,3 +1,9 @@
macro_rules! quote {
($($tt:tt)*) => {
quote_spanned!(::proc_macro2::Span::call_site() => $($tt)*)
};
}
macro_rules! path { macro_rules! path {
($($path:tt)+) => { ($($path:tt)+) => {
parse_quote!($($path)+) parse_quote!($($path)+)

Просмотреть файл

@ -61,7 +61,7 @@ impl InputField {
} }
pub fn from_field(f: &syn::Field, parent: Option<&Core>) -> Result<Self> { pub fn from_field(f: &syn::Field, parent: Option<&Core>) -> Result<Self> {
let ident = f.ident.clone().unwrap_or(syn::Ident::from("__unnamed")); let ident = f.ident.clone().unwrap_or(syn::Ident::new("__unnamed", ::proc_macro2::Span::call_site()));
let ty = f.ty.clone(); let ty = f.ty.clone();
let base = Self::new(ident, ty).parse_attributes(&f.attrs)?; let base = Self::new(ident, ty).parse_attributes(&f.attrs)?;

Просмотреть файл

@ -6,7 +6,7 @@ use {FromMetaItem, FromDeriveInput, FromField, FromVariant, Result};
/// ///
/// All meta-items, fields, and variants will be successfully read into /// All meta-items, fields, and variants will be successfully read into
/// the `Ignored` struct, with all properties discarded. /// the `Ignored` struct, with all properties discarded.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
pub struct Ignored; pub struct Ignored;
impl FromMetaItem for Ignored { impl FromMetaItem for Ignored {

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"24ebe6f6d1e4bd5ca3dd416bda62ab1d12d05f6990d30cff0fd301cc83273ecc","src/lib.rs":"d900da894985945215cb4494ebd4e8b5f697c19bf9e624a1bb03d22a0a5367a5"},"package":"c53edaba455f6073a10c27c72440860eb3f60444f8c8660a391032eeae744d82"} {"files":{"Cargo.toml":"cf9d579ee6af881a7aa52d43d637b4afa9cf589bfda3fa63159538d681855330","src/lib.rs":"d900da894985945215cb4494ebd4e8b5f697c19bf9e624a1bb03d22a0a5367a5"},"package":"eb69a38fdeaeaf3db712e1df170de67ee9dfc24fb88ca3e9d21e703ec25a4d8e"}

8
third_party/rust/darling_macro/Cargo.toml поставляемый
Просмотреть файл

@ -12,7 +12,7 @@
[package] [package]
name = "darling_macro" name = "darling_macro"
version = "0.3.0" version = "0.4.0"
authors = ["Ted Driggs <ted.driggs@outlook.com>"] authors = ["Ted Driggs <ted.driggs@outlook.com>"]
description = "Internal support for a proc-macro library for reading attributes into structs when\nimplementing custom derives. Use https://crates.io/crates/darling in your code.\n" description = "Internal support for a proc-macro library for reading attributes into structs when\nimplementing custom derives. Use https://crates.io/crates/darling in your code.\n"
license = "MIT" license = "MIT"
@ -21,10 +21,10 @@ repository = "https://github.com/TedDriggs/darling"
[lib] [lib]
proc-macro = true proc-macro = true
[dependencies.darling_core] [dependencies.darling_core]
version = "=0.3.0" version = "=0.4.0"
[dependencies.quote] [dependencies.quote]
version = "0.4" version = "0.5"
[dependencies.syn] [dependencies.syn]
version = "0.12" version = "0.13"

Просмотреть файл

@ -1 +0,0 @@
{"files":{".travis.yml":"e455a0ed5c3dd056d31f4c7be088bc94f21cab6595a23f2f015b1efc0ac2b55c","Cargo.toml":"0b700f1e7b8ba76ce4678d36b6906d38455e88f51085ea9f120d6ca63f13d5d7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"adf2e822923878c2ebf4a0a782898c598fc6f57a3af905b85d57fc716c836687","src/lib.rs":"fbae25504264b185d877fb8784d4d88333ea34a7cbeddca3277dc8421f179933","src/macros.rs":"414505e520b8d705b4ce5a64ec2e82d6d1af0b88567454169486a668fbc1e9c8","src/stable.rs":"6363c4c0ef989c2ec81aa75be71c69a103d45a1de439f3f3bcb6806d8a78a172","src/strnom.rs":"1baded8543a9930798fb16092fe51e9074591902e327e0f94eb1c908a6370de9","src/unstable.rs":"110d27103e37427b3d1dcb45b6ba9dc9f5641a255766a43d5db0f4fd10a341ed","tests/test.rs":"9e75d5289abc1dc58c1df00ae051d8c3cd2c0d7830cca5ad689007c05acffe26"},"package":"d1cb7aaaa4bf022ec2b14ff2f2ba1643a22f3cee88df014a85e14b392282c61d"}

Просмотреть файл

@ -1,31 +0,0 @@
language: rust
sudo: false
matrix:
include:
- rust: 1.15.0
- rust: stable
- rust: beta
- rust: nightly
before_script:
- pip install 'travis-cargo<0.2' --user && export PATH=$HOME/.local/bin:$PATH
script:
- cargo test
- cargo build --features nightly
- RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
- RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build --features nightly
- RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo doc --no-deps
after_success:
- travis-cargo --only nightly doc-upload
script:
- cargo test
- RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
env:
global:
- TRAVIS_CARGO_NIGHTLY_FEATURE=""
- secure: "NAsZghAVTAksrm4WP4I66VmD2wW0eRbwB+ZKHUQfvbgUaCRvVdp4WBbWXGU/f/yHgDFWZwljWR4iPMiBwAK8nZsQFRuLFdHrOOHqbkj639LLdT9A07s1zLMB1GfR1fDttzrGhm903pbT2yxSyqqpahGYM7TaGDYYmKYIk4XyVNA5F5Sk7RI+rCecKraoYDeUEFbjWWYtU2FkEXsELEKj0emX5reWkR+wja3QokFcRZ25+Zd2dRC0K8W5QcY2UokLzKncBMCTC5q70H616S3r/9qW67Si1njsJ7RzP0NlZQUNQ/VCvwr4LCr9w+AD9i1SZtXxuux77tWEWSJvBzUc82dDMUv/floJuF7HTulSxxQoRm+fbzpXj9mgaJNiUHXru6ZRTCRVRUSXpcAco94bVoy/jnjrTe3jgAIZK5w14zA8yLw1Jxof31DlbcWORxgF+6fnY2nKPRN2oiQ50+jm1AuGDZX59/wMiu1QlkjOBHtikHp+u+7mp3SkkM04DvuQ/tWODQQnOOtrA0EB3i5H1zeTSnUcmbJufUljWWOvF1QYII08MccqwfG1KWbpobvdu+cV2iVhkq/lNCEL3Ai101CnmSCnMz+9oK/XxYOrx2TnaD9ootOKgnk7XWxF19GZecQx6O2hHTouxvB/0KcRPGWmMWl0H88f3T/Obql8bG8="
notifications:
email:
on_success: never

31
third_party/rust/proc-macro2-0.2.2/Cargo.toml поставляемый
Просмотреть файл

@ -1,31 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "proc-macro2"
version = "0.2.2"
authors = ["Alex Crichton <alex@alexcrichton.com>"]
description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
homepage = "https://github.com/alexcrichton/proc-macro2"
documentation = "https://docs.rs/proc-macro2"
readme = "README.md"
keywords = ["macros"]
license = "MIT/Apache-2.0"
repository = "https://github.com/alexcrichton/proc-macro2"
[lib]
doctest = false
[dependencies.unicode-xid]
version = "0.1"
[features]
nightly = []

Просмотреть файл

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

Просмотреть файл

@ -1,25 +0,0 @@
Copyright (c) 2014 Alex Crichton
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

98
third_party/rust/proc-macro2-0.2.2/README.md поставляемый
Просмотреть файл

@ -1,98 +0,0 @@
# proc-macro2
[![Build Status](https://api.travis-ci.org/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.org/alexcrichton/proc-macro2)
[![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2)
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
A small shim over the `proc_macro` crate in the compiler intended to multiplex
the current stable interface (as of 2017-07-05) and the [upcoming richer
interface][upcoming].
[upcoming]: https://github.com/rust-lang/rust/pull/40939
The upcoming support has features like:
* Span information on tokens
* No need to go in/out through strings
* Structured input/output
The hope is that libraries ported to `proc_macro2` will be trivial to port to
the real `proc_macro` crate once the support on nightly is stabilize.
## Usage
This crate by default compiles on the stable version of the compiler. It only
uses the stable surface area of the `proc_macro` crate upstream in the compiler
itself. Usage is done via:
```toml
[dependencies]
proc-macro2 = "0.2"
```
followed by
```rust
extern crate proc_macro;
extern crate proc_macro2;
#[proc_macro_derive(MyDerive)]
pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input: proc_macro2::TokenStream = input.into();
let output: proc_macro2::TokenStream = {
/* transform input */
};
output.into()
}
```
If you'd like you can enable the `nightly` feature in this crate. This will
cause it to compile against the **unstable and nightly-only** features of the
`proc_macro` crate. This in turn requires a nightly compiler. This should help
preserve span information, however, coming in from the compiler itself.
You can enable this feature via:
```toml
[dependencies]
proc-macro2 = { version = "0.2", features = ["nightly"] }
```
## Unstable Features
`proc-macro2` supports exporting some methods from `proc_macro` which are
currently highly unstable, and may not be stabilized in the first pass of
`proc_macro` stabilizations. These features are not exported by default. Minor
versions of `proc-macro2` may make breaking changes to them at any time.
To enable these features, the `procmacro2_semver_exempt` config flag must be
passed to rustc.
```
RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
```
Note that this must not only be done for your crate, but for any crate that
depends on your crate. This infectious nature is intentional, as it serves as a
reminder that you are outside of the normal semver guarantees.
# License
This project is licensed under either of
* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
http://www.apache.org/licenses/LICENSE-2.0)
* MIT license ([LICENSE-MIT](LICENSE-MIT) or
http://opensource.org/licenses/MIT)
at your option.
### Contribution
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in Serde by you, as defined in the Apache-2.0 license, shall be
dual licensed as above, without any additional terms or conditions.

337
third_party/rust/proc-macro2-0.2.2/src/lib.rs поставляемый
Просмотреть файл

@ -1,337 +0,0 @@
//! A "shim crate" intended to multiplex the `proc_macro` API on to stable Rust.
//!
//! Procedural macros in Rust operate over the upstream
//! `proc_macro::TokenStream` type. This type currently is quite conservative
//! and exposed no internal implementation details. Nightly compilers, however,
//! contain a much richer interface. This richer interface allows fine-grained
//! inspection of the token stream which avoids stringification/re-lexing and
//! also preserves span information.
//!
//! The upcoming APIs added to `proc_macro` upstream are the foundation for
//! productive procedural macros in the ecosystem. To help prepare the ecosystem
//! for using them this crate serves to both compile on stable and nightly and
//! mirrors the API-to-be. The intention is that procedural macros which switch
//! to use this crate will be trivially able to switch to the upstream
//! `proc_macro` crate once its API stabilizes.
//!
//! In the meantime this crate also has a `nightly` Cargo feature which
//! enables it to reimplement itself with the unstable API of `proc_macro`.
//! This'll allow immediate usage of the beneficial upstream API, particularly
//! around preserving span information.
// Proc-macro2 types in rustdoc of other crates get linked to here.
#![doc(html_root_url = "https://docs.rs/proc-macro2/0.2.2")]
#![cfg_attr(feature = "nightly", feature(proc_macro))]
extern crate proc_macro;
#[cfg(not(feature = "nightly"))]
extern crate unicode_xid;
use std::fmt;
use std::str::FromStr;
use std::iter::FromIterator;
#[macro_use]
#[cfg(not(feature = "nightly"))]
mod strnom;
#[path = "stable.rs"]
#[cfg(not(feature = "nightly"))]
mod imp;
#[path = "unstable.rs"]
#[cfg(feature = "nightly")]
mod imp;
#[macro_use]
mod macros;
#[derive(Clone)]
pub struct TokenStream(imp::TokenStream);
pub struct LexError(imp::LexError);
impl FromStr for TokenStream {
type Err = LexError;
fn from_str(src: &str) -> Result<TokenStream, LexError> {
match src.parse() {
Ok(e) => Ok(TokenStream(e)),
Err(e) => Err(LexError(e)),
}
}
}
impl From<proc_macro::TokenStream> for TokenStream {
fn from(inner: proc_macro::TokenStream) -> TokenStream {
TokenStream(inner.into())
}
}
impl From<TokenStream> for proc_macro::TokenStream {
fn from(inner: TokenStream) -> proc_macro::TokenStream {
inner.0.into()
}
}
impl From<TokenTree> for TokenStream {
fn from(tree: TokenTree) -> TokenStream {
TokenStream(tree.into())
}
}
impl<T: Into<TokenStream>> FromIterator<T> for TokenStream {
fn from_iter<I: IntoIterator<Item = T>>(streams: I) -> Self {
TokenStream(streams.into_iter().map(|t| t.into().0).collect())
}
}
impl IntoIterator for TokenStream {
type Item = TokenTree;
type IntoIter = TokenTreeIter;
fn into_iter(self) -> TokenTreeIter {
TokenTreeIter(self.0.into_iter())
}
}
impl TokenStream {
pub fn empty() -> TokenStream {
TokenStream(imp::TokenStream::empty())
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
// Returned by reference, so we can't easily wrap it.
#[cfg(procmacro2_semver_exempt)]
pub use imp::FileName;
#[cfg(procmacro2_semver_exempt)]
#[derive(Clone, PartialEq, Eq)]
pub struct SourceFile(imp::SourceFile);
#[cfg(procmacro2_semver_exempt)]
impl SourceFile {
/// Get the path to this source file as a string.
pub fn path(&self) -> &FileName {
self.0.path()
}
pub fn is_real(&self) -> bool {
self.0.is_real()
}
}
#[cfg(procmacro2_semver_exempt)]
impl AsRef<FileName> for SourceFile {
fn as_ref(&self) -> &FileName {
self.0.path()
}
}
#[cfg(procmacro2_semver_exempt)]
impl fmt::Debug for SourceFile {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
#[cfg(procmacro2_semver_exempt)]
pub struct LineColumn {
pub line: usize,
pub column: usize,
}
#[derive(Copy, Clone)]
pub struct Span(imp::Span);
impl Span {
pub fn call_site() -> Span {
Span(imp::Span::call_site())
}
pub fn def_site() -> Span {
Span(imp::Span::def_site())
}
/// Creates a new span with the same line/column information as `self` but
/// that resolves symbols as though it were at `other`.
pub fn resolved_at(&self, other: Span) -> Span {
Span(self.0.resolved_at(other.0))
}
/// Creates a new span with the same name resolution behavior as `self` but
/// with the line/column information of `other`.
pub fn located_at(&self, other: Span) -> Span {
Span(self.0.located_at(other.0))
}
/// This method is only available when the `"nightly"` feature is enabled.
#[cfg(feature = "nightly")]
pub fn unstable(self) -> proc_macro::Span {
self.0.unstable()
}
#[cfg(procmacro2_semver_exempt)]
pub fn source_file(&self) -> SourceFile {
SourceFile(self.0.source_file())
}
#[cfg(procmacro2_semver_exempt)]
pub fn start(&self) -> LineColumn {
let imp::LineColumn{ line, column } = self.0.start();
LineColumn { line: line, column: column }
}
#[cfg(procmacro2_semver_exempt)]
pub fn end(&self) -> LineColumn {
let imp::LineColumn{ line, column } = self.0.end();
LineColumn { line: line, column: column }
}
#[cfg(procmacro2_semver_exempt)]
pub fn join(&self, other: Span) -> Option<Span> {
self.0.join(other.0).map(Span)
}
}
#[derive(Clone, Debug)]
pub struct TokenTree {
pub span: Span,
pub kind: TokenNode,
}
impl From<TokenNode> for TokenTree {
fn from(kind: TokenNode) -> TokenTree {
TokenTree { span: Span::def_site(), kind: kind }
}
}
impl fmt::Display for TokenTree {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
TokenStream::from(self.clone()).fmt(f)
}
}
#[derive(Clone, Debug)]
pub enum TokenNode {
Group(Delimiter, TokenStream),
Term(Term),
Op(char, Spacing),
Literal(Literal),
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum Delimiter {
Parenthesis,
Brace,
Bracket,
None,
}
#[derive(Copy, Clone)]
pub struct Term(imp::Term);
impl Term {
pub fn intern(string: &str) -> Term {
Term(imp::Term::intern(string))
}
pub fn as_str(&self) -> &str {
self.0.as_str()
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum Spacing {
Alone,
Joint,
}
#[derive(Clone)]
pub struct Literal(imp::Literal);
macro_rules! int_literals {
($($kind:ident,)*) => ($(
pub fn $kind(n: $kind) -> Literal {
Literal(n.into())
}
)*)
}
impl Literal {
pub fn integer(s: i64) -> Literal {
Literal(imp::Literal::integer(s))
}
int_literals! {
u8, u16, u32, u64, usize,
i8, i16, i32, i64, isize,
}
pub fn float(f: f64) -> Literal {
Literal(imp::Literal::float(f))
}
pub fn f64(f: f64) -> Literal {
Literal(f.into())
}
pub fn f32(f: f32) -> Literal {
Literal(f.into())
}
pub fn string(string: &str) -> Literal {
Literal(string.into())
}
pub fn character(ch: char) -> Literal {
Literal(ch.into())
}
pub fn byte_string(s: &[u8]) -> Literal {
Literal(imp::Literal::byte_string(s))
}
// =======================================================================
// Not present upstream in proc_macro yet
pub fn byte_char(b: u8) -> Literal {
Literal(imp::Literal::byte_char(b))
}
pub fn doccomment(s: &str) -> Literal {
Literal(imp::Literal::doccomment(s))
}
pub fn raw_string(s: &str, pounds: usize) -> Literal {
Literal(imp::Literal::raw_string(s, pounds))
}
pub fn raw_byte_string(s: &str, pounds: usize) -> Literal {
Literal(imp::Literal::raw_byte_string(s, pounds))
}
}
pub struct TokenTreeIter(imp::TokenTreeIter);
impl Iterator for TokenTreeIter {
type Item = TokenTree;
fn next(&mut self) -> Option<TokenTree> {
self.0.next()
}
}
forward_fmt!(Debug for LexError);
forward_fmt!(Debug for Literal);
forward_fmt!(Debug for Span);
forward_fmt!(Debug for Term);
forward_fmt!(Debug for TokenTreeIter);
forward_fmt!(Debug for TokenStream);
forward_fmt!(Display for Literal);
forward_fmt!(Display for TokenStream);

Просмотреть файл

@ -1,9 +0,0 @@
macro_rules! forward_fmt {
($tr:ident for $ty:ident) => {
impl ::std::fmt::$tr for $ty {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
::std::fmt::$tr::fmt(&self.0, f)
}
}
}
}

1206
third_party/rust/proc-macro2-0.2.2/src/stable.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,447 +0,0 @@
//! Adapted from [`nom`](https://github.com/Geal/nom).
use std::str::{Chars, CharIndices, Bytes};
use unicode_xid::UnicodeXID;
use imp::LexError;
#[derive(Copy, Clone, Eq, PartialEq)]
pub struct Cursor<'a> {
pub rest: &'a str,
#[cfg(procmacro2_semver_exempt)]
pub off: u32,
}
impl<'a> Cursor<'a> {
#[cfg(not(procmacro2_semver_exempt))]
pub fn advance(&self, amt: usize) -> Cursor<'a> {
Cursor {
rest: &self.rest[amt..],
}
}
#[cfg(procmacro2_semver_exempt)]
pub fn advance(&self, amt: usize) -> Cursor<'a> {
Cursor {
rest: &self.rest[amt..],
off: self.off + (amt as u32),
}
}
pub fn find(&self, p: char) -> Option<usize> {
self.rest.find(p)
}
pub fn starts_with(&self, s: &str) -> bool {
self.rest.starts_with(s)
}
pub fn is_empty(&self) -> bool {
self.rest.is_empty()
}
pub fn len(&self) -> usize {
self.rest.len()
}
pub fn as_bytes(&self) -> &'a [u8] {
self.rest.as_bytes()
}
pub fn bytes(&self) -> Bytes<'a> {
self.rest.bytes()
}
pub fn chars(&self) -> Chars<'a> {
self.rest.chars()
}
pub fn char_indices(&self) -> CharIndices<'a> {
self.rest.char_indices()
}
}
pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
pub fn whitespace(input: Cursor) -> PResult<()> {
if input.is_empty() {
return Err(LexError);
}
let bytes = input.as_bytes();
let mut i = 0;
while i < bytes.len() {
let s = input.advance(i);
if bytes[i] == b'/' {
if s.starts_with("//") && (!s.starts_with("///") || s.starts_with("////")) &&
!s.starts_with("//!") {
if let Some(len) = s.find('\n') {
i += len + 1;
continue;
}
break;
} else if s.starts_with("/**/") {
i += 4;
continue
} else if s.starts_with("/*") && (!s.starts_with("/**") || s.starts_with("/***")) &&
!s.starts_with("/*!") {
let (_, com) = block_comment(s)?;
i += com.len();
continue;
}
}
match bytes[i] {
b' ' | 0x09...0x0d => {
i += 1;
continue;
}
b if b <= 0x7f => {}
_ => {
let ch = s.chars().next().unwrap();
if is_whitespace(ch) {
i += ch.len_utf8();
continue;
}
}
}
return if i > 0 {
Ok((s, ()))
} else {
Err(LexError)
};
}
Ok((input.advance(input.len()), ()))
}
pub fn block_comment(input: Cursor) -> PResult<&str> {
if !input.starts_with("/*") {
return Err(LexError);
}
let mut depth = 0;
let bytes = input.as_bytes();
let mut i = 0;
let upper = bytes.len() - 1;
while i < upper {
if bytes[i] == b'/' && bytes[i + 1] == b'*' {
depth += 1;
i += 1; // eat '*'
} else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
depth -= 1;
if depth == 0 {
return Ok((input.advance(i + 2), &input.rest[..i + 2]));
}
i += 1; // eat '/'
}
i += 1;
}
Err(LexError)
}
pub fn skip_whitespace(input: Cursor) -> Cursor {
match whitespace(input) {
Ok((rest, _)) => rest,
Err(LexError) => input,
}
}
fn is_whitespace(ch: char) -> bool {
// Rust treats left-to-right mark and right-to-left mark as whitespace
ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
}
pub fn word_break(input: Cursor) -> PResult<()> {
match input.chars().next() {
Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
Some(_) | None => Ok((input, ())),
}
}
macro_rules! named {
($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
$submac!(i, $($args)*)
}
};
}
macro_rules! alt {
($i:expr, $e:ident | $($rest:tt)*) => {
alt!($i, call!($e) | $($rest)*)
};
($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => {
match $subrule!($i, $($args)*) {
res @ Ok(_) => res,
_ => alt!($i, $($rest)*)
}
};
($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => {
match $subrule!($i, $($args)*) {
Ok((i, o)) => Ok((i, $gen(o))),
Err(LexError) => alt!($i, $($rest)*)
}
};
($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => {
alt!($i, call!($e) => { $gen } | $($rest)*)
};
($i:expr, $e:ident => { $gen:expr }) => {
alt!($i, call!($e) => { $gen })
};
($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => {
match $subrule!($i, $($args)*) {
Ok((i, o)) => Ok((i, $gen(o))),
Err(LexError) => Err(LexError),
}
};
($i:expr, $e:ident) => {
alt!($i, call!($e))
};
($i:expr, $subrule:ident!( $($args:tt)*)) => {
$subrule!($i, $($args)*)
};
}
macro_rules! do_parse {
($i:expr, ( $($rest:expr),* )) => {
Ok(($i, ( $($rest),* )))
};
($i:expr, $e:ident >> $($rest:tt)*) => {
do_parse!($i, call!($e) >> $($rest)*)
};
($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, _)) => do_parse!(i, $($rest)*),
}
};
($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => {
do_parse!($i, $field: call!($e) >> $($rest)*)
};
($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, o)) => {
let $field = o;
do_parse!(i, $($rest)*)
},
}
};
}
macro_rules! peek {
($i:expr, $submac:ident!( $($args:tt)* )) => {
match $submac!($i, $($args)*) {
Ok((_, o)) => Ok(($i, o)),
Err(LexError) => Err(LexError),
}
};
}
macro_rules! call {
($i:expr, $fun:expr $(, $args:expr)*) => {
$fun($i $(, $args)*)
};
}
macro_rules! option {
($i:expr, $f:expr) => {
match $f($i) {
Ok((i, o)) => Ok((i, Some(o))),
Err(LexError) => Ok(($i, None)),
}
};
}
macro_rules! take_until {
($i:expr, $substr:expr) => {{
if $substr.len() > $i.len() {
Err(LexError)
} else {
let substr_vec: Vec<char> = $substr.chars().collect();
let mut window: Vec<char> = vec![];
let mut offset = $i.len();
let mut parsed = false;
for (o, c) in $i.char_indices() {
window.push(c);
if window.len() > substr_vec.len() {
window.remove(0);
}
if window == substr_vec {
parsed = true;
window.pop();
let window_len: usize = window.iter()
.map(|x| x.len_utf8())
.fold(0, |x, y| x + y);
offset = o - window_len;
break;
}
}
if parsed {
Ok(($i.advance(offset), &$i.rest[..offset]))
} else {
Err(LexError)
}
}
}};
}
macro_rules! tuple {
($i:expr, $($rest:tt)*) => {
tuple_parser!($i, (), $($rest)*)
};
}
/// Do not use directly. Use `tuple!`.
macro_rules! tuple_parser {
($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => {
tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*)
};
($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, o)) => tuple_parser!(i, (o), $($rest)*),
}
};
($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*),
}
};
($i:expr, ($($parsed:tt),*), $e:ident) => {
tuple_parser!($i, ($($parsed),*), call!($e))
};
($i:expr, (), $submac:ident!( $($args:tt)* )) => {
$submac!($i, $($args)*)
};
($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, o)) => Ok((i, ($($parsed),*, o)))
}
};
($i:expr, ($($parsed:expr),*)) => {
Ok(($i, ($($parsed),*)))
};
}
macro_rules! not {
($i:expr, $submac:ident!( $($args:tt)* )) => {
match $submac!($i, $($args)*) {
Ok((_, _)) => Err(LexError),
Err(LexError) => Ok(($i, ())),
}
};
}
macro_rules! tag {
($i:expr, $tag:expr) => {
if $i.starts_with($tag) {
Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
} else {
Err(LexError)
}
};
}
macro_rules! punct {
($i:expr, $punct:expr) => {
$crate::strnom::punct($i, $punct)
};
}
/// Do not use directly. Use `punct!`.
pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
let input = skip_whitespace(input);
if input.starts_with(token) {
Ok((input.advance(token.len()), token))
} else {
Err(LexError)
}
}
macro_rules! preceded {
($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => {
match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) {
Ok((remaining, (_, o))) => Ok((remaining, o)),
Err(LexError) => Err(LexError),
}
};
($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
preceded!($i, $submac!($($args)*), call!($g))
};
}
macro_rules! delimited {
($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => {
match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) {
Err(LexError) => Err(LexError),
Ok((i1, (_, o, _))) => Ok((i1, o))
}
};
}
macro_rules! map {
($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, o)) => Ok((i, call!(o, $g)))
}
};
($i:expr, $f:expr, $g:expr) => {
map!($i, call!($f), $g)
};
}
macro_rules! many0 {
($i:expr, $f:expr) => {{
let ret;
let mut res = ::std::vec::Vec::new();
let mut input = $i;
loop {
if input.is_empty() {
ret = Ok((input, res));
break;
}
match $f(input) {
Err(LexError) => {
ret = Ok((input, res));
break;
}
Ok((i, o)) => {
// loop trip must always consume (otherwise infinite loops)
if i.len() == input.len() {
ret = Err(LexError);
break;
}
res.push(o);
input = i;
}
}
}
ret
}};
}

Просмотреть файл

@ -1,406 +0,0 @@
use std::ascii;
use std::fmt;
use std::iter;
use std::str::FromStr;
use proc_macro;
use {TokenTree, TokenNode, Delimiter, Spacing};
#[derive(Clone)]
pub struct TokenStream(proc_macro::TokenStream);
pub struct LexError(proc_macro::LexError);
impl TokenStream {
pub fn empty() -> TokenStream {
TokenStream(proc_macro::TokenStream::empty())
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
impl FromStr for TokenStream {
type Err = LexError;
fn from_str(src: &str) -> Result<TokenStream, LexError> {
Ok(TokenStream(src.parse().map_err(LexError)?))
}
}
impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
impl From<proc_macro::TokenStream> for TokenStream {
fn from(inner: proc_macro::TokenStream) -> TokenStream {
TokenStream(inner)
}
}
impl From<TokenStream> for proc_macro::TokenStream {
fn from(inner: TokenStream) -> proc_macro::TokenStream {
inner.0
}
}
impl From<TokenTree> for TokenStream {
fn from(tree: TokenTree) -> TokenStream {
TokenStream(proc_macro::TokenTree {
span: (tree.span.0).0,
kind: match tree.kind {
TokenNode::Group(delim, s) => {
let delim = match delim {
Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
Delimiter::Bracket => proc_macro::Delimiter::Bracket,
Delimiter::Brace => proc_macro::Delimiter::Brace,
Delimiter::None => proc_macro::Delimiter::None,
};
proc_macro::TokenNode::Group(delim, (s.0).0)
}
TokenNode::Op(ch, kind) => {
let kind = match kind {
Spacing::Joint => proc_macro::Spacing::Joint,
Spacing::Alone => proc_macro::Spacing::Alone,
};
proc_macro::TokenNode::Op(ch, kind)
}
TokenNode::Term(s) => {
proc_macro::TokenNode::Term((s.0).0)
}
TokenNode::Literal(l) => {
proc_macro::TokenNode::Literal((l.0).0)
}
},
}.into())
}
}
impl iter::FromIterator<TokenStream> for TokenStream {
fn from_iter<I: IntoIterator<Item=TokenStream>>(streams: I) -> Self {
let streams = streams.into_iter().map(|s| s.0);
TokenStream(streams.collect::<proc_macro::TokenStream>())
}
}
impl fmt::Debug for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
impl fmt::Debug for LexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
pub struct TokenTreeIter(proc_macro::TokenTreeIter);
impl IntoIterator for TokenStream {
type Item = TokenTree;
type IntoIter = TokenTreeIter;
fn into_iter(self) -> TokenTreeIter {
TokenTreeIter(self.0.into_iter())
}
}
impl Iterator for TokenTreeIter {
type Item = TokenTree;
fn next(&mut self) -> Option<TokenTree> {
let token = match self.0.next() {
Some(n) => n,
None => return None,
};
Some(TokenTree {
span: ::Span(Span(token.span)),
kind: match token.kind {
proc_macro::TokenNode::Group(delim, s) => {
let delim = match delim {
proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
proc_macro::Delimiter::Bracket => Delimiter::Bracket,
proc_macro::Delimiter::Brace => Delimiter::Brace,
proc_macro::Delimiter::None => Delimiter::None,
};
TokenNode::Group(delim, ::TokenStream(TokenStream(s)))
}
proc_macro::TokenNode::Op(ch, kind) => {
let kind = match kind {
proc_macro::Spacing::Joint => Spacing::Joint,
proc_macro::Spacing::Alone => Spacing::Alone,
};
TokenNode::Op(ch, kind)
}
proc_macro::TokenNode::Term(s) => {
TokenNode::Term(::Term(Term(s)))
}
proc_macro::TokenNode::Literal(l) => {
TokenNode::Literal(::Literal(Literal(l)))
}
},
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
}
impl fmt::Debug for TokenTreeIter {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("TokenTreeIter").finish()
}
}
#[cfg(procmacro2_semver_exempt)]
#[derive(Clone, PartialEq, Eq)]
pub struct FileName(String);
#[cfg(procmacro2_semver_exempt)]
impl fmt::Display for FileName {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
// NOTE: We have to generate our own filename object here because we can't wrap
// the one provided by proc_macro.
#[cfg(procmacro2_semver_exempt)]
#[derive(Clone, PartialEq, Eq)]
pub struct SourceFile(proc_macro::SourceFile, FileName);
#[cfg(procmacro2_semver_exempt)]
impl SourceFile {
fn new(sf: proc_macro::SourceFile) -> Self {
let filename = FileName(sf.path().to_string());
SourceFile(sf, filename)
}
/// Get the path to this source file as a string.
pub fn path(&self) -> &FileName {
&self.1
}
pub fn is_real(&self) -> bool {
self.0.is_real()
}
}
#[cfg(procmacro2_semver_exempt)]
impl AsRef<FileName> for SourceFile {
fn as_ref(&self) -> &FileName {
self.path()
}
}
#[cfg(procmacro2_semver_exempt)]
impl fmt::Debug for SourceFile {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
#[cfg(procmacro2_semver_exempt)]
pub struct LineColumn {
pub line: usize,
pub column: usize,
}
#[derive(Copy, Clone)]
pub struct Span(proc_macro::Span);
impl From<proc_macro::Span> for ::Span {
fn from(proc_span: proc_macro::Span) -> ::Span {
::Span(Span(proc_span))
}
}
impl Span {
pub fn call_site() -> Span {
Span(proc_macro::Span::call_site())
}
pub fn def_site() -> Span {
Span(proc_macro::Span::def_site())
}
pub fn resolved_at(&self, other: Span) -> Span {
Span(self.0.resolved_at(other.0))
}
pub fn located_at(&self, other: Span) -> Span {
Span(self.0.located_at(other.0))
}
pub fn unstable(self) -> proc_macro::Span {
self.0
}
#[cfg(procmacro2_semver_exempt)]
pub fn source_file(&self) -> SourceFile {
SourceFile::new(self.0.source_file())
}
#[cfg(procmacro2_semver_exempt)]
pub fn start(&self) -> LineColumn {
let proc_macro::LineColumn{ line, column } = self.0.start();
LineColumn { line, column }
}
#[cfg(procmacro2_semver_exempt)]
pub fn end(&self) -> LineColumn {
let proc_macro::LineColumn{ line, column } = self.0.end();
LineColumn { line, column }
}
#[cfg(procmacro2_semver_exempt)]
pub fn join(&self, other: Span) -> Option<Span> {
self.0.join(other.0).map(Span)
}
}
impl fmt::Debug for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
#[derive(Copy, Clone)]
pub struct Term(proc_macro::Term);
impl Term {
pub fn intern(string: &str) -> Term {
Term(proc_macro::Term::intern(string))
}
pub fn as_str(&self) -> &str {
self.0.as_str()
}
}
impl fmt::Debug for Term {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
#[derive(Clone)]
pub struct Literal(proc_macro::Literal);
impl Literal {
pub fn byte_char(byte: u8) -> Literal {
match byte {
0 => Literal(to_literal("b'\\0'")),
b'\"' => Literal(to_literal("b'\"'")),
n => {
let mut escaped = "b'".to_string();
escaped.extend(ascii::escape_default(n).map(|c| c as char));
escaped.push('\'');
Literal(to_literal(&escaped))
}
}
}
pub fn byte_string(bytes: &[u8]) -> Literal {
Literal(proc_macro::Literal::byte_string(bytes))
}
pub fn doccomment(s: &str) -> Literal {
Literal(to_literal(s))
}
pub fn float(s: f64) -> Literal {
Literal(proc_macro::Literal::float(s))
}
pub fn integer(s: i64) -> Literal {
Literal(proc_macro::Literal::integer(s.into()))
}
pub fn raw_string(s: &str, pounds: usize) -> Literal {
let mut ret = format!("r");
ret.extend((0..pounds).map(|_| "#"));
ret.push('"');
ret.push_str(s);
ret.push('"');
ret.extend((0..pounds).map(|_| "#"));
Literal(to_literal(&ret))
}
pub fn raw_byte_string(s: &str, pounds: usize) -> Literal {
let mut ret = format!("br");
ret.extend((0..pounds).map(|_| "#"));
ret.push('"');
ret.push_str(s);
ret.push('"');
ret.extend((0..pounds).map(|_| "#"));
Literal(to_literal(&ret))
}
}
impl fmt::Display for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
impl fmt::Debug for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
fn to_literal(s: &str) -> proc_macro::Literal {
let stream = s.parse::<proc_macro::TokenStream>().unwrap();
match stream.into_iter().next().unwrap().kind {
proc_macro::TokenNode::Literal(l) => l,
_ => unreachable!(),
}
}
macro_rules! ints {
($($t:ident,)*) => {$(
impl From<$t> for Literal {
fn from(t: $t) -> Literal {
Literal(proc_macro::Literal::$t(t))
}
}
)*}
}
ints! {
u8, u16, u32, u64, usize,
i8, i16, i32, i64, isize,
}
macro_rules! floats {
($($t:ident,)*) => {$(
impl From<$t> for Literal {
fn from(t: $t) -> Literal {
Literal(proc_macro::Literal::$t(t))
}
}
)*}
}
floats! {
f32, f64,
}
impl<'a> From<&'a str> for Literal {
fn from(t: &'a str) -> Literal {
Literal(proc_macro::Literal::string(t))
}
}
impl From<char> for Literal {
fn from(t: char) -> Literal {
Literal(proc_macro::Literal::character(t))
}
}

Просмотреть файл

@ -1,179 +0,0 @@
extern crate proc_macro2;
use std::str;
use proc_macro2::{Term, Literal, TokenStream};
#[cfg(procmacro2_semver_exempt)]
use proc_macro2::TokenNode;
#[cfg(procmacro2_semver_exempt)]
#[cfg(not(feature = "nightly"))]
use proc_macro2::Span;
#[test]
fn symbols() {
assert_eq!(Term::intern("foo").as_str(), "foo");
assert_eq!(Term::intern("bar").as_str(), "bar");
}
#[test]
fn literals() {
assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
assert_eq!(Literal::float(10.0).to_string(), "10.0");
}
#[test]
fn roundtrip() {
fn roundtrip(p: &str) {
println!("parse: {}", p);
let s = p.parse::<TokenStream>().unwrap().to_string();
println!("first: {}", s);
let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
assert_eq!(s, s2);
}
roundtrip("a");
roundtrip("<<");
roundtrip("<<=");
roundtrip("
/// a
wut
");
roundtrip("
1
1.0
1f32
2f64
1usize
4isize
4e10
1_000
1_0i32
8u8
9
0
0xffffffffffffffffffffffffffffffff
");
roundtrip("'a");
roundtrip("'static");
roundtrip("'\\u{10__FFFF}'");
roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
}
#[test]
fn fail() {
fn fail(p: &str) {
if p.parse::<TokenStream>().is_ok() {
panic!("should have failed to parse: {}", p);
}
}
fail("1x");
fail("1u80");
fail("1f320");
fail("' static");
fail("'mut");
}
#[cfg(procmacro2_semver_exempt)]
#[test]
fn span_test() {
fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
let ts = p.parse::<TokenStream>().unwrap();
check_spans_internal(ts, &mut lines);
}
fn check_spans_internal(
ts: TokenStream,
lines: &mut &[(usize, usize, usize, usize)],
) {
for i in ts {
if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
*lines = rest;
let start = i.span.start();
assert_eq!(start.line, sline, "sline did not match for {}", i);
assert_eq!(start.column, scol, "scol did not match for {}", i);
let end = i.span.end();
assert_eq!(end.line, eline, "eline did not match for {}", i);
assert_eq!(end.column, ecol, "ecol did not match for {}", i);
match i.kind {
TokenNode::Group(_, stream) =>
check_spans_internal(stream, lines),
_ => {}
}
}
}
}
check_spans("\
/// This is a document comment
testing 123
{
testing 234
}", &[
(1, 0, 1, 30),
(2, 0, 2, 7),
(2, 8, 2, 11),
(3, 0, 5, 1),
(4, 2, 4, 9),
(4, 10, 4, 13),
]);
}
#[cfg(procmacro2_semver_exempt)]
#[cfg(not(feature = "nightly"))]
#[test]
fn default_span() {
let start = Span::call_site().start();
assert_eq!(start.line, 1);
assert_eq!(start.column, 0);
let end = Span::call_site().end();
assert_eq!(end.line, 1);
assert_eq!(end.column, 0);
let source_file = Span::call_site().source_file();
assert_eq!(source_file.path().to_string(), "<unspecified>");
assert!(!source_file.is_real());
}
#[cfg(procmacro2_semver_exempt)]
#[test]
fn span_join() {
let source1 =
"aaa\nbbb".parse::<TokenStream>().unwrap().into_iter().collect::<Vec<_>>();
let source2 =
"ccc\nddd".parse::<TokenStream>().unwrap().into_iter().collect::<Vec<_>>();
assert!(source1[0].span.source_file() != source2[0].span.source_file());
assert_eq!(source1[0].span.source_file(), source1[1].span.source_file());
let joined1 = source1[0].span.join(source1[1].span);
let joined2 = source1[0].span.join(source2[0].span);
assert!(joined1.is_some());
assert!(joined2.is_none());
let start = joined1.unwrap().start();
let end = joined1.unwrap().end();
assert_eq!(start.line, 1);
assert_eq!(start.column, 0);
assert_eq!(end.line, 2);
assert_eq!(end.column, 3);
assert_eq!(joined1.unwrap().source_file(), source1[0].span.source_file());
}
#[test]
fn no_panic() {
let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
assert!(s.parse::<proc_macro2::TokenStream>().is_err());
}
#[test]
fn tricky_doc_commaent() {
let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
let tokens = stream.into_iter().collect::<Vec<_>>();
assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
}

Просмотреть файл

@ -1 +0,0 @@
{"files":{"Cargo.toml":"c8e98953df6fdcc4bdf6a1b7d970c214e8f5eb0f21da327d1c0916735303cd3a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"f1812dcc3e666d6bebca97f3739058e1bd8de1a2542c9a8cb258d0a259bd59e1","src/lib.rs":"b63a044edeff7ae12d0733e0a7fe64babf9b593b624fa753639ad3f340f24031","src/to_tokens.rs":"3b7fe0934ce2d9c23d9851ec624349cfa6e9d5cd9ed31c67f25cecce50dc218f","src/tokens.rs":"963474535197c1a79bf60af570470e7a89dce43546ee3186920197fdb40bdd9b","tests/test.rs":"8db237707035f55af7c7ef82c2c3892a048411963dccd019da0148bacae8c3d2"},"package":"1eca14c727ad12702eb4b6bfb5a232287dcf8385cb8ca83a3eeaf6519c44c408"}

24
third_party/rust/quote-0.4.2/Cargo.toml поставляемый
Просмотреть файл

@ -1,24 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "quote"
version = "0.4.2"
authors = ["David Tolnay <dtolnay@gmail.com>"]
include = ["Cargo.toml", "src/**/*.rs", "tests/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
description = "Quasi-quoting macro quote!(...)"
documentation = "https://docs.rs/quote/"
keywords = ["syn"]
license = "MIT/Apache-2.0"
repository = "https://github.com/dtolnay/quote"
[dependencies.proc-macro2]
version = "0.2"

201
third_party/rust/quote-0.4.2/LICENSE-APACHE поставляемый
Просмотреть файл

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
third_party/rust/quote-0.4.2/LICENSE-MIT поставляемый
Просмотреть файл

@ -1,25 +0,0 @@
Copyright (c) 2016 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

135
third_party/rust/quote-0.4.2/README.md поставляемый
Просмотреть файл

@ -1,135 +0,0 @@
Rust Quasi-Quoting
==================
[![Build Status](https://api.travis-ci.org/dtolnay/quote.svg?branch=master)](https://travis-ci.org/dtolnay/quote)
[![Latest Version](https://img.shields.io/crates/v/quote.svg)](https://crates.io/crates/quote)
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/quote/)
This crate provides the [`quote!`] macro for turning Rust syntax tree data
structures into tokens of source code.
[`quote!`]: https://docs.rs/quote/0.4/quote/macro.quote.html
Procedural macros in Rust receive a stream of tokens as input, execute arbitrary
Rust code to determine how to manipulate those tokens, and produce a stream of
tokens to hand back to the compiler to compile into the caller's crate.
Quasi-quoting is a solution to one piece of that -- producing tokens to return
to the compiler.
The idea of quasi-quoting is that we write *code* that we treat as *data*.
Within the `quote!` macro, we can write what looks like code to our text editor
or IDE. We get all the benefits of the editor's brace matching, syntax
highlighting, indentation, and maybe autocompletion. But rather than compiling
that as code into the current crate, we can treat it as data, pass it around,
mutate it, and eventually hand it back to the compiler as tokens to compile into
the macro caller's crate.
This crate is motivated by the procedural macro use case, but is a
general-purpose Rust quasi-quoting library and is not specific to procedural
macros.
*Version requirement: Quote supports any compiler version back to Rust's very
first support for procedural macros in Rust 1.15.0.*
```toml
[dependencies]
quote = "0.4"
```
```rust
#[macro_use]
extern crate quote;
```
## Syntax
The quote crate provides a [`quote!`] macro within which you can write Rust code
that gets packaged into a [`quote::Tokens`] and can be treated as data. You
should think of `Tokens` as representing a fragment of Rust source code. Call
`to_string()` on a `Tokens` to get back the fragment of source code as a string,
or call `into()` to stream them as a `TokenStream` back to the compiler in a
procedural macro.
[`quote::Tokens`]: https://docs.rs/quote/0.4/quote/struct.Tokens.html
Within the `quote!` macro, interpolation is done with `#var`. Any type
implementing the [`quote::ToTokens`] trait can be interpolated. This includes
most Rust primitive types as well as most of the syntax tree types from [`syn`].
[`quote::ToTokens`]: https://docs.rs/quote/0.4/quote/trait.ToTokens.html
[`syn`]: https://github.com/dtolnay/syn
```rust
let tokens = quote! {
struct SerializeWith #generics #where_clause {
value: &'a #field_ty,
phantom: ::std::marker::PhantomData<#item_ty>,
}
impl #generics serde::Serialize for SerializeWith #generics #where_clause {
fn serialize<S>(&self, s: &mut S) -> Result<(), S::Error>
where S: serde::Serializer
{
#path(self.value, s)
}
}
SerializeWith {
value: #value,
phantom: ::std::marker::PhantomData::<#item_ty>,
}
};
```
## Repetition
Repetition is done using `#(...)*` or `#(...),*` similar to `macro_rules!`. This
iterates through the elements of any variable interpolated within the repetition
and inserts a copy of the repetition body for each one. The variables in an
interpolation may be anything that implements `IntoIterator`, including `Vec` or
a pre-existing iterator.
- `#(#var)*` — no separators
- `#(#var),*` — the character before the asterisk is used as a separator
- `#( struct #var; )*` — the repetition can contain other things
- `#( #k => println!("{}", #v), )*` — even multiple interpolations
Note that there is a difference between `#(#var ,)*` and `#(#var),*`—the latter
does not produce a trailing comma. This matches the behavior of delimiters in
`macro_rules!`.
## Hygiene
Any interpolated tokens preserve the `Span` information provided by their
`ToTokens` implementation. Tokens that originate within a `quote!` invocation
are spanned with [`Span::def_site()`].
[`Span::def_site()`]: https://docs.rs/proc-macro2/0.2/proc_macro2/struct.Span.html#method.def_site
A different span can be provided explicitly through the [`quote_spanned!`]
macro.
[`quote_spanned!`]: https://docs.rs/quote/0.4/quote/macro.quote_spanned.html
### Recursion limit
The `quote!` macro relies on deep recursion so some large invocations may fail
with "recursion limit reached" when you compile. If it fails, bump up the
recursion limit by adding `#![recursion_limit = "128"]` to your crate. An even
higher limit may be necessary for especially large invocations. You don't need
this unless the compiler tells you that you need it.
## License
Licensed under either of
* Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
at your option.
### Contribution
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
be dual licensed as above, without any additional terms or conditions.

501
third_party/rust/quote-0.4.2/src/lib.rs поставляемый
Просмотреть файл

@ -1,501 +0,0 @@
//! This crate provides the [`quote!`] macro for turning Rust syntax tree data
//! structures into tokens of source code.
//!
//! [`quote!`]: macro.quote.html
//!
//! Procedural macros in Rust receive a stream of tokens as input, execute
//! arbitrary Rust code to determine how to manipulate those tokens, and produce
//! a stream of tokens to hand back to the compiler to compile into the caller's
//! crate. Quasi-quoting is a solution to one piece of that -- producing tokens
//! to return to the compiler.
//!
//! The idea of quasi-quoting is that we write *code* that we treat as *data*.
//! Within the `quote!` macro, we can write what looks like code to our text
//! editor or IDE. We get all the benefits of the editor's brace matching,
//! syntax highlighting, indentation, and maybe autocompletion. But rather than
//! compiling that as code into the current crate, we can treat it as data, pass
//! it around, mutate it, and eventually hand it back to the compiler as tokens
//! to compile into the macro caller's crate.
//!
//! This crate is motivated by the procedural macro use case, but is a
//! general-purpose Rust quasi-quoting library and is not specific to procedural
//! macros.
//!
//! *Version requirement: Quote supports any compiler version back to Rust's
//! very first support for procedural macros in Rust 1.15.0.*
//!
//! ```toml
//! [dependencies]
//! quote = "0.4"
//! ```
//!
//! ```
//! #[macro_use]
//! extern crate quote;
//! #
//! # fn main() {}
//! ```
//!
//! # Example
//!
//! The following quasi-quoted block of code is something you might find in [a]
//! procedural macro having to do with data structure serialization. The `#var`
//! syntax performs interpolation of runtime variables into the quoted tokens.
//! Check out the documentation of the [`quote!`] macro for more detail about
//! the syntax. See also the [`quote_spanned!`] macro which is important for
//! implementing hygienic procedural macros.
//!
//! [a]: https://serde.rs/
//! [`quote_spanned!`]: macro.quote_spanned.html
//!
//! ```
//! # #[macro_use]
//! # extern crate quote;
//! #
//! # fn main() {
//! # let generics = "";
//! # let where_clause = "";
//! # let field_ty = "";
//! # let item_ty = "";
//! # let path = "";
//! # let value = "";
//! #
//! let tokens = quote! {
//! struct SerializeWith #generics #where_clause {
//! value: &'a #field_ty,
//! phantom: ::std::marker::PhantomData<#item_ty>,
//! }
//!
//! impl #generics serde::Serialize for SerializeWith #generics #where_clause {
//! fn serialize<S>(&self, s: &mut S) -> Result<(), S::Error>
//! where S: serde::Serializer
//! {
//! #path(self.value, s)
//! }
//! }
//!
//! SerializeWith {
//! value: #value,
//! phantom: ::std::marker::PhantomData::<#item_ty>,
//! }
//! };
//! #
//! # }
//! ```
//!
//! ## Recursion limit
//!
//! The `quote!` macro relies on deep recursion so some large invocations may
//! fail with "recursion limit reached" when you compile. If it fails, bump up
//! the recursion limit by adding `#![recursion_limit = "128"]` to your crate.
//! An even higher limit may be necessary for especially large invocations.
// Quote types in rustdoc of other crates get linked to here.
#![doc(html_root_url = "https://docs.rs/quote/0.4.2")]
extern crate proc_macro2;
extern crate proc_macro;
mod tokens;
pub use tokens::Tokens;
mod to_tokens;
pub use to_tokens::ToTokens;
// Not public API.
#[doc(hidden)]
pub mod __rt {
// Not public API.
pub use proc_macro2::*;
// Not public API.
pub fn parse(tokens: &mut ::Tokens, span: Span, s: &str) {
let s: TokenStream = s.parse().expect("invalid token stream");
tokens.append_all(s.into_iter().map(|mut t| {
t.span = span;
t
}));
}
// Not public API.
pub fn append_kind(tokens: &mut ::Tokens, span: Span, kind: TokenNode) {
tokens.append(TokenTree {
span: span,
kind: kind,
})
}
}
/// The whole point.
///
/// Performs variable interpolation against the input and produces it as
/// [`Tokens`]. For returning tokens to the compiler in a procedural macro, use
/// `into()` to build a `TokenStream`.
///
/// [`Tokens`]: struct.Tokens.html
///
/// # Interpolation
///
/// Variable interpolation is done with `#var` (similar to `$var` in
/// `macro_rules!` macros). This grabs the `var` variable that is currently in
/// scope and inserts it in that location in the output tokens. The variable
/// must implement the [`ToTokens`] trait.
///
/// [`ToTokens`]: trait.ToTokens.html
///
/// Repetition is done using `#(...)*` or `#(...),*` again similar to
/// `macro_rules!`. This iterates through the elements of any variable
/// interpolated within the repetition and inserts a copy of the repetition body
/// for each one. The variables in an interpolation may be anything that
/// implements `IntoIterator`, including `Vec` or a pre-existing iterator.
///
/// - `#(#var)*` — no separators
/// - `#(#var),*` — the character before the asterisk is used as a separator
/// - `#( struct #var; )*` — the repetition can contain other tokens
/// - `#( #k => println!("{}", #v), )*` — even multiple interpolations
///
/// # Hygiene
///
/// Any interpolated tokens preserve the `Span` information provided by their
/// `ToTokens` implementation. Tokens that originate within the `quote!`
/// invocation are spanned with [`Span::def_site()`].
///
/// [`Span::def_site()`]: https://docs.rs/proc-macro2/0.2/proc_macro2/struct.Span.html#method.def_site
///
/// A different span can be provided through the [`quote_spanned!`] macro.
///
/// [`quote_spanned!`]: macro.quote_spanned.html
///
/// # Example
///
/// ```
/// extern crate proc_macro;
///
/// #[macro_use]
/// extern crate quote;
///
/// use proc_macro::TokenStream;
///
/// # const IGNORE_TOKENS: &'static str = stringify! {
/// #[proc_macro_derive(HeapSize)]
/// # };
/// pub fn derive_heap_size(input: TokenStream) -> TokenStream {
/// // Parse the input and figure out what implementation to generate...
/// # const IGNORE_TOKENS: &'static str = stringify! {
/// let name = /* ... */;
/// let expr = /* ... */;
/// # };
/// #
/// # let name = 0;
/// # let expr = 0;
///
/// let expanded = quote! {
/// // The generated impl.
/// impl ::heapsize::HeapSize for #name {
/// fn heap_size_of_children(&self) -> usize {
/// #expr
/// }
/// }
/// };
///
/// // Hand the output tokens back to the compiler.
/// expanded.into()
/// }
/// #
/// # fn main() {}
/// ```
#[macro_export]
macro_rules! quote {
($($tt:tt)*) => (quote_spanned!($crate::__rt::Span::def_site()=> $($tt)*));
}
/// Same as `quote!`, but applies a given span to all tokens originating within
/// the macro invocation.
///
/// # Syntax
///
/// A span expression of type [`Span`], followed by `=>`, followed by the tokens
/// to quote. The span expression should be brief -- use a variable for anything
/// more than a few characters. There should be no space before the `=>` token.
///
/// [`Span`]: https://docs.rs/proc-macro2/0.2/proc_macro2/struct.Span.html
///
/// ```
/// # #[macro_use]
/// # extern crate quote;
/// # extern crate proc_macro2;
/// #
/// # use proc_macro2::Span;
/// #
/// # fn main() {
/// # const IGNORE_TOKENS: &'static str = stringify! {
/// let span = /* ... */;
/// # };
/// # let span = Span::call_site();
/// # let init = 0;
///
/// // On one line, use parentheses.
/// let tokens = quote_spanned!(span=> Box::into_raw(Box::new(#init)));
///
/// // On multiple lines, place the span at the top and use braces.
/// let tokens = quote_spanned! {span=>
/// Box::into_raw(Box::new(#init))
/// };
/// # }
/// ```
///
/// # Hygiene
///
/// Any interpolated tokens preserve the `Span` information provided by their
/// `ToTokens` implementation. Tokens that originate within the `quote_spanned!`
/// invocation are spanned with the given span argument.
///
/// # Example
///
/// The following procedural macro code uses `quote_spanned!` to assert that a
/// particular Rust type implements the [`Sync`] trait so that references can be
/// safely shared between threads.
///
/// [`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html
///
/// ```
/// # #[macro_use]
/// # extern crate quote;
/// # extern crate proc_macro2;
/// #
/// # use quote::{Tokens, ToTokens};
/// # use proc_macro2::Span;
/// #
/// # struct Type;
/// #
/// # impl Type {
/// # fn span(&self) -> Span {
/// # Span::call_site()
/// # }
/// # }
/// #
/// # impl ToTokens for Type {
/// # fn to_tokens(&self, _tokens: &mut Tokens) {}
/// # }
/// #
/// # fn main() {
/// # let ty = Type;
/// # let def_site = Span::def_site();
/// #
/// let ty_span = ty.span().resolved_at(def_site);
/// let assert_sync = quote_spanned! {ty_span=>
/// struct _AssertSync where #ty: Sync;
/// };
/// # }
/// ```
///
/// If the assertion fails, the user will see an error like the following. The
/// input span of their type is hightlighted in the error.
///
/// ```text
/// error[E0277]: the trait bound `*const (): std::marker::Sync` is not satisfied
/// --> src/main.rs:10:21
/// |
/// 10 | static ref PTR: *const () = &();
/// | ^^^^^^^^^ `*const ()` cannot be shared between threads safely
/// ```
///
/// In this example it is important for the where-clause to be spanned with the
/// line/column information of the user's input type so that error messages are
/// placed appropriately by the compiler. But it is also incredibly important
/// that `Sync` resolves at the macro definition site and not the macro call
/// site. If we resolve `Sync` at the same span that the user's type is going to
/// be resolved, then they could bypass our check by defining their own trait
/// named `Sync` that is implemented for their type.
#[macro_export]
macro_rules! quote_spanned {
($span:expr=> $($tt:tt)*) => {
{
let mut _s = $crate::Tokens::new();
let _span = $span;
quote_each_token!(_s _span $($tt)*);
_s
}
};
}
// Extract the names of all #metavariables and pass them to the $finish macro.
//
// in: pounded_var_names!(then () a #b c #( #d )* #e)
// out: then!(() b d e)
#[macro_export]
#[doc(hidden)]
macro_rules! pounded_var_names {
($finish:ident ($($found:ident)*) # ( $($inner:tt)* ) $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) # [ $($inner:tt)* ] $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) # { $($inner:tt)* } $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) # $first:ident $($rest:tt)*) => {
pounded_var_names!($finish ($($found)* $first) $($rest)*)
};
($finish:ident ($($found:ident)*) ( $($inner:tt)* ) $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) [ $($inner:tt)* ] $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) { $($inner:tt)* } $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*)
};
($finish:ident ($($found:ident)*) $ignore:tt $($rest:tt)*) => {
pounded_var_names!($finish ($($found)*) $($rest)*)
};
($finish:ident ($($found:ident)*)) => {
$finish!(() $($found)*)
};
}
// in: nested_tuples_pat!(() a b c d e)
// out: ((((a b) c) d) e)
//
// in: nested_tuples_pat!(() a)
// out: a
#[macro_export]
#[doc(hidden)]
macro_rules! nested_tuples_pat {
(()) => {
&()
};
(() $first:ident $($rest:ident)*) => {
nested_tuples_pat!(($first) $($rest)*)
};
(($pat:pat) $first:ident $($rest:ident)*) => {
nested_tuples_pat!((($pat, $first)) $($rest)*)
};
(($done:pat)) => {
$done
};
}
// in: multi_zip_expr!(() a b c d e)
// out: a.into_iter().zip(b).zip(c).zip(d).zip(e)
//
// in: multi_zip_iter!(() a)
// out: a
#[macro_export]
#[doc(hidden)]
macro_rules! multi_zip_expr {
(()) => {
&[]
};
(() $single:ident) => {
$single
};
(() $first:ident $($rest:ident)*) => {
multi_zip_expr!(($first.into_iter()) $($rest)*)
};
(($zips:expr) $first:ident $($rest:ident)*) => {
multi_zip_expr!(($zips.zip($first)) $($rest)*)
};
(($done:expr)) => {
$done
};
}
#[macro_export]
#[doc(hidden)]
macro_rules! quote_each_token {
($tokens:ident $span:ident) => {};
($tokens:ident $span:ident # ! $($rest:tt)*) => {
quote_each_token!($tokens $span #);
quote_each_token!($tokens $span !);
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident # ( $($inner:tt)* ) * $($rest:tt)*) => {
for pounded_var_names!(nested_tuples_pat () $($inner)*)
in pounded_var_names!(multi_zip_expr () $($inner)*) {
quote_each_token!($tokens $span $($inner)*);
}
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident # ( $($inner:tt)* ) $sep:tt * $($rest:tt)*) => {
for (_i, pounded_var_names!(nested_tuples_pat () $($inner)*))
in pounded_var_names!(multi_zip_expr () $($inner)*).into_iter().enumerate() {
if _i > 0 {
quote_each_token!($tokens $span $sep);
}
quote_each_token!($tokens $span $($inner)*);
}
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident # [ $($inner:tt)* ] $($rest:tt)*) => {
quote_each_token!($tokens $span #);
$crate::__rt::append_kind(&mut $tokens,
$span,
$crate::__rt::TokenNode::Group(
$crate::__rt::Delimiter::Bracket,
quote_spanned!($span=> $($inner)*).into()
));
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident # $first:ident $($rest:tt)*) => {
$crate::ToTokens::to_tokens(&$first, &mut $tokens);
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident ( $($first:tt)* ) $($rest:tt)*) => {
$crate::__rt::append_kind(&mut $tokens,
$span,
$crate::__rt::TokenNode::Group(
$crate::__rt::Delimiter::Parenthesis,
quote_spanned!($span=> $($first)*).into()
));
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident [ $($first:tt)* ] $($rest:tt)*) => {
$crate::__rt::append_kind(&mut $tokens,
$span,
$crate::__rt::TokenNode::Group(
$crate::__rt::Delimiter::Bracket,
quote_spanned!($span=> $($first)*).into()
));
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident { $($first:tt)* } $($rest:tt)*) => {
$crate::__rt::append_kind(&mut $tokens,
$span,
$crate::__rt::TokenNode::Group(
$crate::__rt::Delimiter::Brace,
quote_spanned!($span=> $($first)*).into()
));
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident $first:tt $($rest:tt)*) => {
// TODO: this seems slow... special case some `:tt` arguments?
$crate::__rt::parse(&mut $tokens, $span, stringify!($first));
quote_each_token!($tokens $span $($rest)*);
};
}

175
third_party/rust/quote-0.4.2/src/to_tokens.rs поставляемый
Просмотреть файл

@ -1,175 +0,0 @@
use super::Tokens;
use std::borrow::Cow;
use proc_macro2::{Literal, Span, Term, TokenNode, TokenTree, TokenStream};
fn tt(kind: TokenNode) -> TokenTree {
TokenTree {
span: Span::def_site(),
kind: kind,
}
}
/// Types that can be interpolated inside a [`quote!`] invocation.
///
/// [`quote!`]: macro.quote.html
pub trait ToTokens {
/// Write `self` to the given `Tokens`.
///
/// Example implementation for a struct representing Rust paths like
/// `std::cmp::PartialEq`:
///
/// ```
/// extern crate quote;
/// use quote::{Tokens, ToTokens};
///
/// extern crate proc_macro2;
/// use proc_macro2::{TokenTree, TokenNode, Spacing, Span};
///
/// pub struct Path {
/// pub global: bool,
/// pub segments: Vec<PathSegment>,
/// }
///
/// impl ToTokens for Path {
/// fn to_tokens(&self, tokens: &mut Tokens) {
/// for (i, segment) in self.segments.iter().enumerate() {
/// if i > 0 || self.global {
/// // Double colon `::`
/// tokens.append(TokenTree {
/// span: Span::def_site(),
/// kind: TokenNode::Op(':', Spacing::Joint),
/// });
/// tokens.append(TokenTree {
/// span: Span::def_site(),
/// kind: TokenNode::Op(':', Spacing::Alone),
/// });
/// }
/// segment.to_tokens(tokens);
/// }
/// }
/// }
/// #
/// # pub struct PathSegment;
/// #
/// # impl ToTokens for PathSegment {
/// # fn to_tokens(&self, tokens: &mut Tokens) {
/// # unimplemented!()
/// # }
/// # }
/// #
/// # fn main() {}
/// ```
fn to_tokens(&self, tokens: &mut Tokens);
/// Convert `self` directly into a `Tokens` object.
///
/// This method is implicitly implemented using `to_tokens`, and acts as a
/// convenience method for consumers of the `ToTokens` trait.
fn into_tokens(self) -> Tokens
where
Self: Sized,
{
let mut tokens = Tokens::new();
self.to_tokens(&mut tokens);
tokens
}
}
impl<'a, T: ?Sized + ToTokens> ToTokens for &'a T {
fn to_tokens(&self, tokens: &mut Tokens) {
(**self).to_tokens(tokens);
}
}
impl<'a, T: ?Sized + ToOwned + ToTokens> ToTokens for Cow<'a, T> {
fn to_tokens(&self, tokens: &mut Tokens) {
(**self).to_tokens(tokens);
}
}
impl<T: ?Sized + ToTokens> ToTokens for Box<T> {
fn to_tokens(&self, tokens: &mut Tokens) {
(**self).to_tokens(tokens);
}
}
impl<T: ToTokens> ToTokens for Option<T> {
fn to_tokens(&self, tokens: &mut Tokens) {
if let Some(ref t) = *self {
t.to_tokens(tokens);
}
}
}
impl ToTokens for str {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(tt(TokenNode::Literal(Literal::string(self))));
}
}
impl ToTokens for String {
fn to_tokens(&self, tokens: &mut Tokens) {
self.as_str().to_tokens(tokens);
}
}
macro_rules! primitive {
($($t:ident)*) => ($(
impl ToTokens for $t {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(tt(TokenNode::Literal(Literal::$t(*self))));
}
}
)*)
}
primitive! {
i8 i16 i32 i64 isize
u8 u16 u32 u64 usize
f32 f64
}
impl ToTokens for char {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(tt(TokenNode::Literal(Literal::character(*self))));
}
}
impl ToTokens for bool {
fn to_tokens(&self, tokens: &mut Tokens) {
let word = if *self { "true" } else { "false" };
tokens.append(tt(TokenNode::Term(Term::intern(word))));
}
}
impl ToTokens for Term {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(tt(TokenNode::Term(*self)));
}
}
impl ToTokens for Literal {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(tt(TokenNode::Literal(self.clone())));
}
}
impl ToTokens for TokenNode {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(tt(self.clone()));
}
}
impl ToTokens for TokenTree {
fn to_tokens(&self, dst: &mut Tokens) {
dst.append(self.clone());
}
}
impl ToTokens for TokenStream {
fn to_tokens(&self, dst: &mut Tokens) {
dst.append_all(self.clone().into_iter());
}
}

264
third_party/rust/quote-0.4.2/src/tokens.rs поставляемый
Просмотреть файл

@ -1,264 +0,0 @@
use super::ToTokens;
use std::fmt::{self, Debug, Display};
use std::hash::{Hash, Hasher};
use proc_macro;
use proc_macro2::{TokenStream, TokenTree};
/// Tokens produced by a [`quote!`] invocation.
///
/// [`quote!`]: macro.quote.html
#[derive(Clone, Default)]
pub struct Tokens {
tts: Vec<TokenTree>,
}
impl Tokens {
/// Empty tokens.
pub fn new() -> Self {
Tokens { tts: Vec::new() }
}
/// For use by `ToTokens` implementations.
///
/// Appends the token specified to this list of tokens.
pub fn append<U>(&mut self, token: U)
where
U: Into<TokenTree>,
{
self.tts.push(token.into());
}
/// For use by `ToTokens` implementations.
///
/// ```
/// # #[macro_use] extern crate quote;
/// # use quote::{Tokens, ToTokens};
/// # fn main() {
/// struct X;
///
/// impl ToTokens for X {
/// fn to_tokens(&self, tokens: &mut Tokens) {
/// tokens.append_all(&[true, false]);
/// }
/// }
///
/// let tokens = quote!(#X);
/// assert_eq!(tokens.to_string(), "true false");
/// # }
/// ```
pub fn append_all<T, I>(&mut self, iter: I)
where
T: ToTokens,
I: IntoIterator<Item = T>,
{
for token in iter {
token.to_tokens(self);
}
}
/// For use by `ToTokens` implementations.
///
/// Appends all of the items in the iterator `I`, separated by the tokens
/// `U`.
pub fn append_separated<T, I, U>(&mut self, iter: I, op: U)
where
T: ToTokens,
I: IntoIterator<Item = T>,
U: ToTokens,
{
for (i, token) in iter.into_iter().enumerate() {
if i > 0 {
op.to_tokens(self);
}
token.to_tokens(self);
}
}
/// For use by `ToTokens` implementations.
///
/// Appends all tokens in the iterator `I`, appending `U` after each
/// element, including after the last element of the iterator.
pub fn append_terminated<T, I, U>(&mut self, iter: I, term: U)
where
T: ToTokens,
I: IntoIterator<Item = T>,
U: ToTokens,
{
for token in iter {
token.to_tokens(self);
term.to_tokens(self);
}
}
}
impl ToTokens for Tokens {
fn to_tokens(&self, dst: &mut Tokens) {
dst.tts.extend(self.tts.iter().cloned());
}
fn into_tokens(self) -> Tokens {
self
}
}
impl From<Tokens> for TokenStream {
fn from(tokens: Tokens) -> TokenStream {
tokens.tts.into_iter().collect()
}
}
impl From<Tokens> for proc_macro::TokenStream {
fn from(tokens: Tokens) -> proc_macro::TokenStream {
TokenStream::from(tokens).into()
}
}
/// Allows a `Tokens` to be passed to `Tokens::append_all`.
impl IntoIterator for Tokens {
type Item = TokenTree;
type IntoIter = private::IntoIter;
fn into_iter(self) -> Self::IntoIter {
private::into_iter(self.tts.into_iter())
}
}
mod private {
use std::vec;
use proc_macro2::TokenTree;
pub struct IntoIter(vec::IntoIter<TokenTree>);
pub fn into_iter(tts: vec::IntoIter<TokenTree>) -> IntoIter {
IntoIter(tts)
}
impl Iterator for IntoIter {
type Item = TokenTree;
fn next(&mut self) -> Option<Self::Item> {
self.0.next()
}
}
}
impl Display for Tokens {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&TokenStream::from(self.clone()), formatter)
}
}
impl Debug for Tokens {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
struct DebugAsDisplay<'a, T: 'a>(&'a T);
impl<'a, T> Debug for DebugAsDisplay<'a, T>
where
T: Display,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(self.0, formatter)
}
}
formatter
.debug_tuple("Tokens")
.field(&DebugAsDisplay(self))
.finish()
}
}
fn tt_eq(a: &TokenTree, b: &TokenTree) -> bool {
use proc_macro2::{TokenNode, Delimiter, Spacing};
match (&a.kind, &b.kind) {
(&TokenNode::Group(d1, ref s1), &TokenNode::Group(d2, ref s2)) => {
match (d1, d2) {
(Delimiter::Parenthesis, Delimiter::Parenthesis)
| (Delimiter::Brace, Delimiter::Brace)
| (Delimiter::Bracket, Delimiter::Bracket)
| (Delimiter::None, Delimiter::None) => {}
_ => return false,
}
let s1 = s1.clone().into_iter();
let mut s2 = s2.clone().into_iter();
for item1 in s1 {
let item2 = match s2.next() {
Some(item) => item,
None => return false,
};
if !tt_eq(&item1, &item2) {
return false;
}
}
s2.next().is_none()
}
(&TokenNode::Op(o1, k1), &TokenNode::Op(o2, k2)) => {
o1 == o2 && match (k1, k2) {
(Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true,
_ => false,
}
}
(&TokenNode::Literal(ref l1), &TokenNode::Literal(ref l2)) => {
l1.to_string() == l2.to_string()
}
(&TokenNode::Term(ref s1), &TokenNode::Term(ref s2)) => s1.as_str() == s2.as_str(),
_ => false,
}
}
impl PartialEq for Tokens {
fn eq(&self, other: &Self) -> bool {
if self.tts.len() != other.tts.len() {
return false;
}
self.tts
.iter()
.zip(other.tts.iter())
.all(|(a, b)| tt_eq(a, b))
}
}
fn tt_hash<H: Hasher>(tt: &TokenTree, h: &mut H) {
use proc_macro2::{TokenNode, Delimiter, Spacing};
match tt.kind {
TokenNode::Group(delim, ref stream) => {
0u8.hash(h);
match delim {
Delimiter::Parenthesis => 0u8.hash(h),
Delimiter::Brace => 1u8.hash(h),
Delimiter::Bracket => 2u8.hash(h),
Delimiter::None => 3u8.hash(h),
}
for item in stream.clone() {
tt_hash(&item, h);
}
0xffu8.hash(h); // terminator w/ a variant we don't normally hash
}
TokenNode::Op(op, kind) => {
1u8.hash(h);
op.hash(h);
match kind {
Spacing::Alone => 0u8.hash(h),
Spacing::Joint => 1u8.hash(h),
}
}
TokenNode::Literal(ref lit) => (2u8, lit.to_string()).hash(h),
TokenNode::Term(ref word) => (3u8, word.as_str()).hash(h),
}
}
impl<'a> Hash for Tokens {
fn hash<H: Hasher>(&self, h: &mut H) {
self.tts.len().hash(h);
for tt in &self.tts {
tt_hash(&tt, h);
}
}
}

292
third_party/rust/quote-0.4.2/tests/test.rs поставляемый
Просмотреть файл

@ -1,292 +0,0 @@
#![cfg_attr(feature = "cargo-clippy", allow(blacklisted_name))]
use std::borrow::Cow;
extern crate proc_macro2;
#[macro_use]
extern crate quote;
use proc_macro2::{Span, Term};
struct X;
impl quote::ToTokens for X {
fn to_tokens(&self, tokens: &mut quote::Tokens) {
tokens.append(proc_macro2::TokenTree {
kind: proc_macro2::TokenNode::Term(Term::intern("X")),
span: Span::def_site(),
});
}
}
#[test]
fn test_quote_impl() {
let tokens = quote!(
impl<'a, T: ToTokens> ToTokens for &'a T {
fn to_tokens(&self, tokens: &mut Tokens) {
(**self).to_tokens(tokens)
}
}
);
let expected = concat!(
"impl < 'a , T : ToTokens > ToTokens for & 'a T { ",
"fn to_tokens ( & self , tokens : & mut Tokens ) { ",
"( * * self ) . to_tokens ( tokens ) ",
"} ",
"}"
);
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_substitution() {
let x = X;
let tokens = quote!(#x <#x> (#x) [#x] {#x});
let expected = "X < X > ( X ) [ X ] { X }";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_iter() {
let primes = &[X, X, X, X];
assert_eq!("X X X X", quote!(#(#primes)*).to_string());
assert_eq!("X , X , X , X ,", quote!(#(#primes,)*).to_string());
assert_eq!("X , X , X , X", quote!(#(#primes),*).to_string());
}
#[test]
fn test_advanced() {
let generics = quote!( <'a, T> );
let where_clause = quote!( where T: Serialize );
let field_ty = quote!(String);
let item_ty = quote!(Cow<'a, str>);
let path = quote!(SomeTrait::serialize_with);
let value = quote!(self.x);
let tokens = quote! {
struct SerializeWith #generics #where_clause {
value: &'a #field_ty,
phantom: ::std::marker::PhantomData<#item_ty>,
}
impl #generics ::serde::Serialize for SerializeWith #generics #where_clause {
fn serialize<S>(&self, s: &mut S) -> Result<(), S::Error>
where S: ::serde::Serializer
{
#path(self.value, s)
}
}
SerializeWith {
value: #value,
phantom: ::std::marker::PhantomData::<#item_ty>,
}
};
let expected = concat!(
"struct SerializeWith < 'a , T > where T : Serialize { ",
"value : & 'a String , ",
"phantom : :: std :: marker :: PhantomData < Cow < 'a , str > > , ",
"} ",
"impl < 'a , T > :: serde :: Serialize for SerializeWith < 'a , T > where T : Serialize { ",
"fn serialize < S > ( & self , s : & mut S ) -> Result < ( ) , S :: Error > ",
"where S : :: serde :: Serializer ",
"{ ",
"SomeTrait :: serialize_with ( self . value , s ) ",
"} ",
"} ",
"SerializeWith { ",
"value : self . x , ",
"phantom : :: std :: marker :: PhantomData :: < Cow < 'a , str > > , ",
"}"
);
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_integer() {
let ii8 = -1i8;
let ii16 = -1i16;
let ii32 = -1i32;
let ii64 = -1i64;
let iisize = -1isize;
let uu8 = 1u8;
let uu16 = 1u16;
let uu32 = 1u32;
let uu64 = 1u64;
let uusize = 1usize;
let tokens = quote! {
#ii8 #ii16 #ii32 #ii64 #iisize
#uu8 #uu16 #uu32 #uu64 #uusize
};
let expected = "-1i8 -1i16 -1i32 -1i64 -1isize 1u8 1u16 1u32 1u64 1usize";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_floating() {
let e32 = 2.345f32;
let e64 = 2.345f64;
let tokens = quote! {
#e32
#e64
};
let expected = concat!("2.345f32 2.345f64");
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_char() {
let zero = '\0';
let pound = '#';
let quote = '"';
let apost = '\'';
let newline = '\n';
let heart = '\u{2764}';
let tokens = quote! {
#zero #pound #quote #apost #newline #heart
};
let expected = "'\\u{0}' '#' '\\\"' '\\'' '\\n' '\\u{2764}'";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_str() {
let s = "\0 a 'b \" c";
let tokens = quote!(#s);
let expected = "\"\\u{0} a \\'b \\\" c\"";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_string() {
let s = "\0 a 'b \" c".to_string();
let tokens = quote!(#s);
let expected = "\"\\u{0} a \\'b \\\" c\"";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_ident() {
let foo = Term::intern("Foo");
let bar = Term::intern(&format!("Bar{}", 7));
let tokens = quote!(struct #foo; enum #bar {});
let expected = "struct Foo ; enum Bar7 { }";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_duplicate() {
let ch = 'x';
let tokens = quote!(#ch #ch);
let expected = "'x' 'x'";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_fancy_repetition() {
let foo = vec!["a", "b"];
let bar = vec![true, false];
let tokens = quote! {
#(#foo: #bar),*
};
let expected = r#""a" : true , "b" : false"#;
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_nested_fancy_repetition() {
let nested = vec![vec!['a', 'b', 'c'], vec!['x', 'y', 'z']];
let tokens = quote! {
#(
#(#nested)*
),*
};
let expected = "'a' 'b' 'c' , 'x' 'y' 'z'";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_empty_repetition() {
let tokens = quote!(#(a b)* #(c d),*);
assert_eq!("", tokens.to_string());
}
#[test]
fn test_variable_name_conflict() {
// The implementation of `#(...),*` uses the variable `_i` but it should be
// fine, if a little confusing when debugging.
let _i = vec!['a', 'b'];
let tokens = quote! { #(#_i),* };
let expected = "'a' , 'b'";
assert_eq!(expected, tokens.to_string());
}
#[test]
fn test_empty_quote() {
let tokens = quote!();
assert_eq!("", tokens.to_string());
}
#[test]
fn test_box_str() {
let b = "str".to_owned().into_boxed_str();
let tokens = quote! { #b };
assert_eq!("\"str\"", tokens.to_string());
}
#[test]
fn test_cow() {
let owned: Cow<Term> = Cow::Owned(Term::intern("owned"));
let ident = Term::intern("borrowed");
let borrowed = Cow::Borrowed(&ident);
let tokens = quote! { #owned #borrowed };
assert_eq!("owned borrowed", tokens.to_string());
}
#[test]
fn test_closure() {
fn field_i(i: usize) -> Term {
Term::intern(&format!("__field{}", i))
}
let fields = (0usize..3)
.map(field_i as fn(_) -> _)
.map(|var| quote! { #var });
let tokens = quote! { #(#fields)* };
assert_eq!("__field0 __field1 __field2", tokens.to_string());
}
#[test]
fn test_append_tokens() {
let mut a = quote!(a);
let b = quote!(b);
a.append_all(b);
assert_eq!("a b", a.to_string());
}

Просмотреть файл

@ -1 +0,0 @@
{"files":{"Cargo.toml":"8c4299d297c1f5d0f6afcfedde821e5cdf1893607290af92aebd9b4b86c48386","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"f033c371492a4769d377a8bf1a139adc7bf94ea00595b867a3e234eeab994c8c","README.md":"632c404dd064731af5fd71e643fbba83ced6ac9198497b672139bada194cf41b","src/attr.rs":"35cc8556dd26662d79c3059315de5a5758ce18b697a8ce32361b09ce1a820a0e","src/buffer.rs":"81ca1506c599cfba1136d9fec2d83137f4a4085b341658a4155d978f31b64552","src/data.rs":"d052602339af37df6454a07fd1163e382571d8665529450339cfe55773e308ae","src/derive.rs":"eed8e88cd763e852782b10d2366a81e52d3ec0777676eaa94827ea3b46151134","src/error.rs":"db9f0648e7399cfcaef9c431b452b5012a6056e75c586212e2cfdb1e18b8c69a","src/expr.rs":"274d9f462225432a3ff0c611a1db44b0b3cc6a673b8c8df2a53bd00cf3a5be95","src/file.rs":"43d5b12828a5c2a0bbef7baa56b0b9638575d73d84e0babaf0e85cdc2d573f16","src/gen/fold.rs":"3bf425ce1a461ce527799e2624a311081e35adc133de3acd5195638024a6872f","src/gen/visit.rs":"6f58e84d9b304d8e8493c01ac71d1d36e61c9e18355d60d06e890b94cdb513d0","src/gen/visit_mut.rs":"537dc348ce5a7353d0d0456138043e4ce916c2ca57c90da10500856ac6393dfe","src/gen_helper.rs":"2be46ff201fd53908350bde4d6c8b7dc427dbd156fa538869a9ccbdf6279af04","src/generics.rs":"5dc13558159085b4743d7a0a65f6cfda735c2a23071c588a7dc215834e0c6b44","src/ident.rs":"ea657c29ee5d483ac59664f808fae3e522e18578d86764b7efcceafc6d8236ca","src/item.rs":"f000f831e6f24de60a7ff7e6164f6adc93ae0fb5d0dc5efc6488bb9519f43dd8","src/lib.rs":"dd1212af93788c6a01193d67185b4258a92c7b6c3f34af395f3ed08174117fdd","src/lifetime.rs":"2c92822a1dfc72738b64d787ef9f7ceb1251e689cacb9e780dc784f99b2d9321","src/lit.rs":"7d0eea1b0057d0ae1bce21815b8fe0ee97b0cd287a8d10e4ff74419b7d6fbefe","src/mac.rs":"ec945e4926de028c153c87f1643e8910e53738d8a4b519b74254479e61acabec","src/macros.rs":"b975b110f9b904e5d82bd0222f7cd0398825fcde146c2b98b581daccf2eb8341","src/op.rs":"b9f9ff8027fc65403858d8fed7d0ac3c919d70f97f830fd7532fb22e1dea993b","src/parse_quote.rs":"b0221332823786d664de3e4976cdd3962ca5fa3c1558bb57f9c8810348ded0b0","src/parsers.rs":"9ef1c5e7760a7a4132fe6834dda5871ff9b6513f51243008f710ee4fe973529d","src/path.rs":"01455551da12e8782b4c97ccb8c670d81ea1db1264393a73577535141d5266a8","src/punctuated.rs":"87312dc0d057935774ac64e2c2fdfdae7f693b128c53415d76d78ca377098ced","src/spanned.rs":"9343c288a6d07a0d306c9bd2f332b8eb91fb657f88ec8fbb336ad1b667b583dd","src/synom.rs":"74d8c022ef216f798268999c9c934dca05ad75914d60fc2b445d3a7201826f0d","src/token.rs":"4f16136315cc9ff1e707e80bde148d9471f7e4708c30e07b9c1d3a0bc98805fd","src/tt.rs":"2e10762c00bce15a8e22125bba26c856112e701a82f849aa0d46701e6228823f","src/ty.rs":"0f73cc4626171be4ab2a1de9b2c0c94229055a762ba50ba9ba41b29908447867"},"package":"9e1c669ed757c0ebd04337f6a5bb972d05e0c08fe2540dd3ee3dd9e4daf1604c"}

55
third_party/rust/syn-0.12.12/Cargo.toml поставляемый
Просмотреть файл

@ -1,55 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "syn"
version = "0.12.12"
authors = ["David Tolnay <dtolnay@gmail.com>"]
include = ["/Cargo.toml", "/src/**/*.rs", "/README.md", "/LICENSE-APACHE", "/LICENSE-MIT"]
description = "Nom parser for Rust source code"
documentation = "https://docs.rs/syn"
categories = ["development-tools::procedural-macro-helpers"]
license = "MIT/Apache-2.0"
repository = "https://github.com/dtolnay/syn"
[package.metadata.docs.rs]
all-features = true
[[example]]
name = "dump-syntax"
path = "examples/dump-syntax/main.rs"
required-features = ["full", "parsing", "extra-traits"]
[dependencies.proc-macro2]
version = "0.2"
[dependencies.quote]
version = "0.4"
optional = true
[dependencies.unicode-xid]
version = "0.1"
[dev-dependencies.rayon]
version = "0.9"
[dev-dependencies.walkdir]
version = "1.0.1"
[features]
clone-impls = []
default = ["derive", "parsing", "printing", "clone-impls"]
derive = []
extra-traits = []
fold = []
full = []
parsing = []
printing = ["quote"]
visit = []
visit-mut = []

201
third_party/rust/syn-0.12.12/LICENSE-APACHE поставляемый
Просмотреть файл

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
third_party/rust/syn-0.12.12/LICENSE-MIT поставляемый
Просмотреть файл

@ -1,25 +0,0 @@
Copyright (c) 2018 Syn Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

277
third_party/rust/syn-0.12.12/README.md поставляемый
Просмотреть файл

@ -1,277 +0,0 @@
Nom parser for Rust source code
===============================
[![Build Status](https://api.travis-ci.org/dtolnay/syn.svg?branch=master)](https://travis-ci.org/dtolnay/syn)
[![Latest Version](https://img.shields.io/crates/v/syn.svg)](https://crates.io/crates/syn)
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/syn/0.12/syn/)
Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree
of Rust source code.
Currently this library is geared toward the [custom derive] use case but
contains some APIs that may be useful for Rust procedural macros more generally.
[custom derive]: https://github.com/rust-lang/rfcs/blob/master/text/1681-macros-1.1.md
- **Data structures** — Syn provides a complete syntax tree that can represent
any valid Rust source code. The syntax tree is rooted at [`syn::File`] which
represents a full source file, but there are other entry points that may be
useful to procedural macros including [`syn::Item`], [`syn::Expr`] and
[`syn::Type`].
- **Custom derives** — Of particular interest to custom derives is
[`syn::DeriveInput`] which is any of the three legal input items to a derive
macro. An example below shows using this type in a library that can derive
implementations of a trait of your own.
- **Parser combinators** — Parsing in Syn is built on a suite of public parser
combinator macros that you can use for parsing any token-based syntax you
dream up within a `functionlike!(...)` procedural macro. Every syntax tree
node defined by Syn is individually parsable and may be used as a building
block for custom syntaxes, or you may do it all yourself working from the most
primitive tokens.
- **Location information** — Every token parsed by Syn is associated with a
`Span` that tracks line and column information back to the source of that
token. These spans allow a procedural macro to display detailed error messages
pointing to all the right places in the user's code. There is an example of
this below.
- **Feature flags** — Functionality is aggressively feature gated so your
procedural macros enable only what they need, and do not pay in compile time
for all the rest.
[`syn::File`]: https://docs.rs/syn/0.12/syn/struct.File.html
[`syn::Item`]: https://docs.rs/syn/0.12/syn/enum.Item.html
[`syn::Expr`]: https://docs.rs/syn/0.12/syn/enum.Expr.html
[`syn::Type`]: https://docs.rs/syn/0.12/syn/enum.Type.html
[`syn::DeriveInput`]: https://docs.rs/syn/0.12/syn/struct.DeriveInput.html
If you get stuck with anything involving procedural macros in Rust I am happy to
provide help even if the issue is not related to Syn. Please file a ticket in
this repo.
*Version requirement: Syn supports any compiler version back to Rust's very
first support for procedural macros in Rust 1.15.0. Some features especially
around error reporting are only available in newer compilers or on the nightly
channel.*
## Example of a custom derive
The canonical custom derive using Syn looks like this. We write an ordinary Rust
function tagged with a `proc_macro_derive` attribute and the name of the trait
we are deriving. Any time that derive appears in the user's code, the Rust
compiler passes their data structure as tokens into our macro. We get to execute
arbitrary Rust code to figure out what to do with those tokens, then hand some
tokens back to the compiler to compile into the user's crate.
[`TokenStream`]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
```toml
[dependencies]
syn = "0.12"
quote = "0.4"
[lib]
proc-macro = true
```
```rust
extern crate proc_macro;
extern crate syn;
#[macro_use]
extern crate quote;
use proc_macro::TokenStream;
use syn::DeriveInput;
#[proc_macro_derive(MyMacro)]
pub fn my_macro(input: TokenStream) -> TokenStream {
// Parse the input tokens into a syntax tree
let input: DeriveInput = syn::parse(input).unwrap();
// Build the output, possibly using quasi-quotation
let expanded = quote! {
// ...
};
// Hand the output tokens back to the compiler
expanded.into()
}
```
The [`heapsize`] example directory shows a complete working Macros 1.1
implementation of a custom derive. It works on any Rust compiler \>=1.15.0. The
example derives a `HeapSize` trait which computes an estimate of the amount of
heap memory owned by a value.
[`heapsize`]: examples/heapsize
```rust
pub trait HeapSize {
/// Total number of bytes of heap memory owned by `self`.
fn heap_size_of_children(&self) -> usize;
}
```
The custom derive allows users to write `#[derive(HeapSize)]` on data structures
in their program.
```rust
#[derive(HeapSize)]
struct Demo<'a, T: ?Sized> {
a: Box<T>,
b: u8,
c: &'a str,
d: String,
}
```
## Spans and error reporting
The [`heapsize2`] example directory is an extension of the `heapsize` example
that demonstrates some of the hygiene and error reporting properties of Macros
2.0. This example currently requires a nightly Rust compiler \>=1.24.0-nightly
but we are working to stabilize all of the APIs involved.
[`heapsize2`]: examples/heapsize2
The token-based procedural macro API provides great control over where the
compiler's error messages are displayed in user code. Consider the error the
user sees if one of their field types does not implement `HeapSize`.
```rust
#[derive(HeapSize)]
struct Broken {
ok: String,
bad: std::thread::Thread,
}
```
In the Macros 1.1 string-based procedural macro world, the resulting error would
point unhelpfully to the invocation of the derive macro and not to the actual
problematic field.
```
error[E0599]: no method named `heap_size_of_children` found for type `std::thread::Thread` in the current scope
--> src/main.rs:4:10
|
4 | #[derive(HeapSize)]
| ^^^^^^^^
```
By tracking span information all the way through the expansion of a procedural
macro as shown in the `heapsize2` example, token-based macros in Syn are able to
trigger errors that directly pinpoint the source of the problem.
```
error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not satisfied
--> src/main.rs:7:5
|
7 | bad: std::thread::Thread,
| ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `HeapSize` is not implemented for `std::thread::Thread`
```
## Parsing a custom syntax using combinators
The [`lazy-static`] example directory shows the implementation of a
`functionlike!(...)` procedural macro in which the input tokens are parsed using
[`nom`]-style parser combinators.
[`lazy-static`]: examples/lazy-static
[`nom`]: https://github.com/Geal/nom
The example reimplements the popular `lazy_static` crate from crates.io as a
procedural macro.
```
lazy_static! {
static ref USERNAME: Regex = Regex::new("^[a-z0-9_-]{3,16}$").unwrap();
}
```
The implementation shows how to trigger custom warnings and error messages on
the macro input.
```
warning: come on, pick a more creative name
--> src/main.rs:10:16
|
10 | static ref FOO: String = "lazy_static".to_owned();
| ^^^
```
## Debugging
When developing a procedural macro it can be helpful to look at what the
generated code looks like. Use `cargo rustc -- -Zunstable-options
--pretty=expanded` or the [`cargo expand`] subcommand.
[`cargo expand`]: https://github.com/dtolnay/cargo-expand
To show the expanded code for some crate that uses your procedural macro, run
`cargo expand` from that crate. To show the expanded code for one of your own
test cases, run `cargo expand --test the_test_case` where the last argument is
the name of the test file without the `.rs` extension.
This write-up by Brandon W Maister discusses debugging in more detail:
[Debugging Rust's new Custom Derive system][debugging].
[debugging]: https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/
## Optional features
Syn puts a lot of functionality behind optional features in order to optimize
compile time for the most common use cases. The following features are
available.
- **`derive`** *(enabled by default)* — Data structures for representing the
possible input to a custom derive, including structs and enums and types.
- **`full`** — Data structures for representing the syntax tree of all valid
Rust source code, including items and expressions.
- **`parsing`** *(enabled by default)* — Ability to parse input tokens into a
syntax tree node of a chosen type.
- **`printing`** *(enabled by default)* — Ability to print a syntax tree node as
tokens of Rust source code.
- **`visit`** — Trait for traversing a syntax tree.
- **`visit-mut`** — Trait for traversing and mutating in place a syntax tree.
- **`fold`** — Trait for transforming an owned syntax tree.
- **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree
types.
- **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree
types.
## Nightly features
By default Syn uses the [`proc-macro2`] crate to emulate the nightly compiler's
procedural macro API in a stable way that works all the way back to Rust 1.15.0.
This shim makes it possible to write code without regard for whether the current
compiler version supports the features we use.
[`proc-macro2`]: https://github.com/alexcrichton/proc-macro2
On a nightly compiler, to eliminate the stable shim and use the compiler's
`proc-macro` directly, add `proc-macro2` to your Cargo.toml and set its
`"nightly"` feature which bypasses the stable shim.
```toml
[dependencies]
syn = "0.12"
proc-macro2 = { version = "0.2", features = ["nightly"] }
```
## License
Licensed under either of
* Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
at your option.
### Contribution
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
be dual licensed as above, without any additional terms or conditions.

571
third_party/rust/syn-0.12.12/src/attr.rs поставляемый
Просмотреть файл

@ -1,571 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use super::*;
use punctuated::Punctuated;
use std::iter;
use proc_macro2::{Delimiter, Spacing, TokenNode, TokenStream, TokenTree};
#[cfg(feature = "extra-traits")]
use std::hash::{Hash, Hasher};
#[cfg(feature = "extra-traits")]
use tt::TokenStreamHelper;
ast_struct! {
/// An attribute like `#[repr(transparent)]`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Syntax
///
/// Rust has six types of attributes.
///
/// - Outer attributes like `#[repr(transparent)]`. These appear outside or
/// in front of the item they describe.
/// - Inner attributes like `#![feature(proc_macro)]`. These appear inside
/// of the item they describe, usually a module.
/// - Outer doc comments like `/// # Example`.
/// - Inner doc comments like `//! Please file an issue`.
/// - Outer block comments `/** # Example */`.
/// - Inner block comments `/*! Please file an issue */`.
///
/// The `style` field of type `AttrStyle` distinguishes whether an attribute
/// is outer or inner. Doc comments and block comments are promoted to
/// attributes that have `is_sugared_doc` set to true, as this is how they
/// are processed by the compiler and by `macro_rules!` macros.
///
/// The `path` field gives the possibly colon-delimited path against which
/// the attribute is resolved. It is equal to `"doc"` for desugared doc
/// comments. The `tts` field contains the rest of the attribute body as
/// tokens.
///
/// ```text
/// #[derive(Copy)] #[crate::precondition x < 5]
/// ^^^^^^~~~~~~ ^^^^^^^^^^^^^^^^^^^ ~~~~~
/// path tts path tts
/// ```
///
/// Use the [`interpret_meta`] method to try parsing the tokens of an
/// attribute into the structured representation that is used by convention
/// across most Rust libraries.
///
/// [`interpret_meta`]: #method.interpret_meta
pub struct Attribute #manual_extra_traits {
pub pound_token: Token![#],
pub style: AttrStyle,
pub bracket_token: token::Bracket,
pub path: Path,
pub tts: TokenStream,
pub is_sugared_doc: bool,
}
}
#[cfg(feature = "extra-traits")]
impl Eq for Attribute {}
#[cfg(feature = "extra-traits")]
impl PartialEq for Attribute {
fn eq(&self, other: &Self) -> bool {
self.style == other.style && self.pound_token == other.pound_token
&& self.bracket_token == other.bracket_token && self.path == other.path
&& TokenStreamHelper(&self.tts) == TokenStreamHelper(&other.tts)
&& self.is_sugared_doc == other.is_sugared_doc
}
}
#[cfg(feature = "extra-traits")]
impl Hash for Attribute {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
self.style.hash(state);
self.pound_token.hash(state);
self.bracket_token.hash(state);
self.path.hash(state);
TokenStreamHelper(&self.tts).hash(state);
self.is_sugared_doc.hash(state);
}
}
impl Attribute {
/// Parses the tokens after the path as a [`Meta`](enum.Meta.html) if
/// possible.
pub fn interpret_meta(&self) -> Option<Meta> {
let name = if self.path.segments.len() == 1 {
&self.path.segments.first().unwrap().value().ident
} else {
return None;
};
if self.tts.is_empty() {
return Some(Meta::Word(*name));
}
let tts = self.tts.clone().into_iter().collect::<Vec<_>>();
if tts.len() == 1 {
if let TokenNode::Group(Delimiter::Parenthesis, ref ts) = tts[0].kind {
let tokens = ts.clone().into_iter().collect::<Vec<_>>();
if let Some(nested_meta_items) = list_of_nested_meta_items_from_tokens(&tokens) {
return Some(Meta::List(MetaList {
paren_token: token::Paren(tts[0].span),
ident: *name,
nested: nested_meta_items,
}));
}
}
}
if tts.len() == 2 {
if let TokenNode::Op('=', Spacing::Alone) = tts[0].kind {
if let TokenNode::Literal(ref lit) = tts[1].kind {
if !lit.to_string().starts_with('/') {
return Some(Meta::NameValue(MetaNameValue {
ident: *name,
eq_token: Token![=]([tts[0].span]),
lit: Lit::new(lit.clone(), tts[1].span),
}));
}
} else if let TokenNode::Term(ref term) = tts[1].kind {
match term.as_str() {
v @ "true" | v @ "false" => {
return Some(Meta::NameValue(MetaNameValue {
ident: *name,
eq_token: Token![=]([tts[0].span]),
lit: Lit::Bool(LitBool { value: v == "true", span: tts[1].span }),
}));
},
_ => {}
}
}
}
}
None
}
}
fn nested_meta_item_from_tokens(tts: &[TokenTree]) -> Option<(NestedMeta, &[TokenTree])> {
assert!(!tts.is_empty());
match tts[0].kind {
TokenNode::Literal(ref lit) => {
if lit.to_string().starts_with('/') {
None
} else {
let lit = Lit::new(lit.clone(), tts[0].span);
Some((NestedMeta::Literal(lit), &tts[1..]))
}
}
TokenNode::Term(sym) => {
let ident = Ident::new(sym.as_str(), tts[0].span);
if tts.len() >= 3 {
if let TokenNode::Op('=', Spacing::Alone) = tts[1].kind {
if let TokenNode::Literal(ref lit) = tts[2].kind {
if !lit.to_string().starts_with('/') {
let pair = MetaNameValue {
ident: Ident::new(sym.as_str(), tts[0].span),
eq_token: Token![=]([tts[1].span]),
lit: Lit::new(lit.clone(), tts[2].span),
};
return Some((Meta::NameValue(pair).into(), &tts[3..]));
}
} else if let TokenNode::Term(ref term) = tts[2].kind {
match term.as_str() {
v @ "true" | v @ "false" => {
let pair = MetaNameValue {
ident: Ident::new(sym.as_str(), tts[0].span),
eq_token: Token![=]([tts[1].span]),
lit: Lit::Bool(LitBool { value: v == "true", span: tts[2].span }),
};
return Some((Meta::NameValue(pair).into(), &tts[3..]));
},
_ => {}
}
}
}
}
if tts.len() >= 2 {
if let TokenNode::Group(Delimiter::Parenthesis, ref inner_tts) = tts[1].kind {
let inner_tts = inner_tts.clone().into_iter().collect::<Vec<_>>();
return match list_of_nested_meta_items_from_tokens(&inner_tts) {
Some(nested_meta_items) => {
let list = MetaList {
ident: ident,
paren_token: token::Paren(tts[1].span),
nested: nested_meta_items,
};
Some((Meta::List(list).into(), &tts[2..]))
}
None => None,
};
}
}
Some((Meta::Word(ident).into(), &tts[1..]))
}
_ => None,
}
}
fn list_of_nested_meta_items_from_tokens(
mut tts: &[TokenTree],
) -> Option<Punctuated<NestedMeta, Token![,]>> {
let mut nested_meta_items = Punctuated::new();
let mut first = true;
while !tts.is_empty() {
let prev_comma = if first {
first = false;
None
} else if let TokenNode::Op(',', Spacing::Alone) = tts[0].kind {
let tok = Token![,]([tts[0].span]);
tts = &tts[1..];
if tts.is_empty() {
break;
}
Some(tok)
} else {
return None;
};
let (nested, rest) = match nested_meta_item_from_tokens(tts) {
Some(pair) => pair,
None => return None,
};
if let Some(comma) = prev_comma {
nested_meta_items.push_punct(comma);
}
nested_meta_items.push_value(nested);
tts = rest;
}
Some(nested_meta_items)
}
ast_enum! {
/// Distinguishes between attributes that decorate an item and attributes
/// that are contained within an item.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Outer attributes
///
/// - `#[repr(transparent)]`
/// - `/// # Example`
/// - `/** Please file an issue */`
///
/// # Inner attributes
///
/// - `#![feature(proc_macro)]`
/// - `//! # Example`
/// - `/*! Please file an issue */`
#[cfg_attr(feature = "clone-impls", derive(Copy))]
pub enum AttrStyle {
Outer,
Inner(Token![!]),
}
}
ast_enum_of_structs! {
/// Content of a compile-time structured attribute.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// ## Word
///
/// A meta word is like the `test` in `#[test]`.
///
/// ## List
///
/// A meta list is like the `derive(Copy)` in `#[derive(Copy)]`.
///
/// ## NameValue
///
/// A name-value meta is like the `path = "..."` in `#[path =
/// "sys/windows.rs"]`.
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
pub enum Meta {
pub Word(Ident),
/// A structured list within an attribute, like `derive(Copy, Clone)`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub List(MetaList {
pub ident: Ident,
pub paren_token: token::Paren,
pub nested: Punctuated<NestedMeta, Token![,]>,
}),
/// A name-value pair within an attribute, like `feature = "nightly"`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub NameValue(MetaNameValue {
pub ident: Ident,
pub eq_token: Token![=],
pub lit: Lit,
}),
}
}
impl Meta {
/// Returns the identifier that begins this structured meta item.
///
/// For example this would return the `test` in `#[test]`, the `derive` in
/// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`.
pub fn name(&self) -> Ident {
match *self {
Meta::Word(ref meta) => *meta,
Meta::List(ref meta) => meta.ident,
Meta::NameValue(ref meta) => meta.ident,
}
}
}
ast_enum_of_structs! {
/// Element of a compile-time attribute list.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub enum NestedMeta {
/// A structured meta item, like the `Copy` in `#[derive(Copy)]` which
/// would be a nested `Meta::Word`.
pub Meta(Meta),
/// A Rust literal, like the `"new_name"` in `#[rename("new_name")]`.
pub Literal(Lit),
}
}
pub trait FilterAttrs<'a> {
type Ret: Iterator<Item = &'a Attribute>;
fn outer(self) -> Self::Ret;
fn inner(self) -> Self::Ret;
}
impl<'a, T> FilterAttrs<'a> for T
where
T: IntoIterator<Item = &'a Attribute>,
{
type Ret = iter::Filter<T::IntoIter, fn(&&Attribute) -> bool>;
fn outer(self) -> Self::Ret {
fn is_outer(attr: &&Attribute) -> bool {
match attr.style {
AttrStyle::Outer => true,
_ => false,
}
}
self.into_iter().filter(is_outer)
}
fn inner(self) -> Self::Ret {
fn is_inner(attr: &&Attribute) -> bool {
match attr.style {
AttrStyle::Inner(_) => true,
_ => false,
}
}
self.into_iter().filter(is_inner)
}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use buffer::Cursor;
use parse_error;
use synom::PResult;
use proc_macro2::{Literal, Spacing, Span, TokenNode, TokenTree};
fn eq(span: Span) -> TokenTree {
TokenTree {
span: span,
kind: TokenNode::Op('=', Spacing::Alone),
}
}
impl Attribute {
named!(pub parse_inner -> Self, alt!(
do_parse!(
pound: punct!(#) >>
bang: punct!(!) >>
path_and_tts: brackets!(tuple!(
call!(Path::parse_mod_style),
syn!(TokenStream)
)) >>
({
let (bracket, (path, tts)) = path_and_tts;
Attribute {
style: AttrStyle::Inner(bang),
path: path,
tts: tts,
is_sugared_doc: false,
pound_token: pound,
bracket_token: bracket,
}
})
)
|
map!(
call!(lit_doc_comment, Comment::Inner),
|lit| {
let span = lit.span;
Attribute {
style: AttrStyle::Inner(<Token![!]>::new(span)),
path: Ident::new("doc", span).into(),
tts: vec![
eq(span),
lit,
].into_iter().collect(),
is_sugared_doc: true,
pound_token: <Token![#]>::new(span),
bracket_token: token::Bracket(span),
}
}
)
));
named!(pub parse_outer -> Self, alt!(
do_parse!(
pound: punct!(#) >>
path_and_tts: brackets!(tuple!(
call!(Path::parse_mod_style),
syn!(TokenStream)
)) >>
({
let (bracket, (path, tts)) = path_and_tts;
Attribute {
style: AttrStyle::Outer,
path: path,
tts: tts,
is_sugared_doc: false,
pound_token: pound,
bracket_token: bracket,
}
})
)
|
map!(
call!(lit_doc_comment, Comment::Outer),
|lit| {
let span = lit.span;
Attribute {
style: AttrStyle::Outer,
path: Ident::new("doc", span).into(),
tts: vec![
eq(span),
lit,
].into_iter().collect(),
is_sugared_doc: true,
pound_token: <Token![#]>::new(span),
bracket_token: token::Bracket(span),
}
}
)
));
}
enum Comment {
Inner,
Outer,
}
fn lit_doc_comment(input: Cursor, style: Comment) -> PResult<TokenTree> {
match input.literal() {
Some((span, lit, rest)) => {
let string = lit.to_string();
let ok = match style {
Comment::Inner => string.starts_with("//!") || string.starts_with("/*!"),
Comment::Outer => string.starts_with("///") || string.starts_with("/**"),
};
if ok {
Ok((
TokenTree {
span: span,
kind: TokenNode::Literal(Literal::string(&string)),
},
rest,
))
} else {
parse_error()
}
}
_ => parse_error(),
}
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{ToTokens, Tokens};
use proc_macro2::Literal;
impl ToTokens for Attribute {
fn to_tokens(&self, tokens: &mut Tokens) {
// If this was a sugared doc, emit it in its original form instead of `#[doc = "..."]`
if self.is_sugared_doc {
if let Some(Meta::NameValue(ref pair)) = self.interpret_meta() {
if pair.ident == "doc" {
if let Lit::Str(ref comment) = pair.lit {
tokens.append(TokenTree {
span: comment.span,
kind: TokenNode::Literal(Literal::doccomment(&comment.value())),
});
return;
}
}
}
}
self.pound_token.to_tokens(tokens);
if let AttrStyle::Inner(ref b) = self.style {
b.to_tokens(tokens);
}
self.bracket_token.surround(tokens, |tokens| {
self.path.to_tokens(tokens);
self.tts.to_tokens(tokens);
});
}
}
impl ToTokens for MetaList {
fn to_tokens(&self, tokens: &mut Tokens) {
self.ident.to_tokens(tokens);
self.paren_token.surround(tokens, |tokens| {
self.nested.to_tokens(tokens);
})
}
}
impl ToTokens for MetaNameValue {
fn to_tokens(&self, tokens: &mut Tokens) {
self.ident.to_tokens(tokens);
self.eq_token.to_tokens(tokens);
self.lit.to_tokens(tokens);
}
}
}

467
third_party/rust/syn-0.12.12/src/buffer.rs поставляемый
Просмотреть файл

@ -1,467 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A stably addressed token buffer supporting efficient traversal based on a
//! cheaply copyable cursor.
//!
//! The [`Synom`] trait is implemented for syntax tree types that can be parsed
//! from one of these token cursors.
//!
//! [`Synom`]: ../synom/trait.Synom.html
//!
//! *This module is available if Syn is built with the `"parsing"` feature.*
//!
//! # Example
//!
//! This example shows a basic token parser for parsing a token stream without
//! using Syn's parser combinator macros.
//!
//! ```
//! #![feature(proc_macro)]
//!
//! extern crate syn;
//! extern crate proc_macro;
//!
//! #[macro_use]
//! extern crate quote;
//!
//! use syn::{token, ExprTuple};
//! use syn::buffer::{Cursor, TokenBuffer};
//! use syn::spanned::Spanned;
//! use syn::synom::Synom;
//! use proc_macro::{Diagnostic, Span, TokenStream};
//!
//! /// A basic token parser for parsing a token stream without using Syn's
//! /// parser combinator macros.
//! pub struct Parser<'a> {
//! cursor: Cursor<'a>,
//! }
//!
//! impl<'a> Parser<'a> {
//! pub fn new(cursor: Cursor<'a>) -> Self {
//! Parser { cursor }
//! }
//!
//! pub fn current_span(&self) -> Span {
//! self.cursor.span().unstable()
//! }
//!
//! pub fn parse<T: Synom>(&mut self) -> Result<T, Diagnostic> {
//! let (val, rest) = T::parse(self.cursor)
//! .map_err(|e| match T::description() {
//! Some(desc) => {
//! self.current_span().error(format!("{}: expected {}", e, desc))
//! }
//! None => {
//! self.current_span().error(e.to_string())
//! }
//! })?;
//!
//! self.cursor = rest;
//! Ok(val)
//! }
//!
//! pub fn expect_eof(&mut self) -> Result<(), Diagnostic> {
//! if !self.cursor.eof() {
//! return Err(self.current_span().error("trailing characters; expected eof"));
//! }
//!
//! Ok(())
//! }
//! }
//!
//! fn eval(input: TokenStream) -> Result<TokenStream, Diagnostic> {
//! let buffer = TokenBuffer::new(input);
//! let mut parser = Parser::new(buffer.begin());
//!
//! // Parse some syntax tree types out of the input tokens. In this case we
//! // expect something like:
//! //
//! // (a, b, c) = (1, 2, 3)
//! let a = parser.parse::<ExprTuple>()?;
//! parser.parse::<token::Eq>()?;
//! let b = parser.parse::<ExprTuple>()?;
//! parser.expect_eof()?;
//!
//! // Perform some validation and report errors.
//! let (a_len, b_len) = (a.elems.len(), b.elems.len());
//! if a_len != b_len {
//! let diag = b.span().unstable()
//! .error(format!("expected {} element(s), got {}", a_len, b_len))
//! .span_note(a.span().unstable(), "because of this");
//!
//! return Err(diag);
//! }
//!
//! // Build the output tokens.
//! let out = quote! {
//! println!("All good! Received two tuples of size {}", #a_len);
//! };
//!
//! Ok(out.into())
//! }
//! #
//! # extern crate proc_macro2;
//! #
//! # // This method exists on proc_macro2::Span but is behind the "nightly"
//! # // feature.
//! # trait ToUnstableSpan {
//! # fn unstable(&self) -> Span;
//! # }
//! #
//! # impl ToUnstableSpan for proc_macro2::Span {
//! # fn unstable(&self) -> Span {
//! # unimplemented!()
//! # }
//! # }
//! #
//! # fn main() {}
//! ```
// This module is heavily commented as it contains the only unsafe code in Syn,
// and caution should be used when editing it. The public-facing interface is
// 100% safe but the implementation is fragile internally.
use proc_macro as pm;
use proc_macro2::{Delimiter, Literal, Spacing, Span, Term, TokenNode, TokenStream, TokenTree};
use std::ptr;
use std::marker::PhantomData;
#[cfg(synom_verbose_trace)]
use std::fmt::{self, Debug};
/// Internal type which is used instead of `TokenTree` to represent a token tree
/// within a `TokenBuffer`.
enum Entry {
// Mimicking types from proc-macro.
Group(Span, Delimiter, TokenBuffer),
Term(Span, Term),
Op(Span, char, Spacing),
Literal(Span, Literal),
// End entries contain a raw pointer to the entry from the containing
// token tree, or null if this is the outermost level.
End(*const Entry),
}
/// A buffer that can be efficiently traversed multiple times, unlike
/// `TokenStream` which requires a deep copy in order to traverse more than
/// once.
///
/// See the [module documentation] for an example of `TokenBuffer` in action.
///
/// [module documentation]: index.html
///
/// *This type is available if Syn is built with the `"parsing"` feature.*
pub struct TokenBuffer {
// NOTE: Do not derive clone on this - there are raw pointers inside which
// will be messed up. Moving the `TokenBuffer` itself is safe as the actual
// backing slices won't be moved.
data: Box<[Entry]>,
}
impl TokenBuffer {
// NOTE: DO NOT MUTATE THE `Vec` RETURNED FROM THIS FUNCTION ONCE IT
// RETURNS, THE ADDRESS OF ITS BACKING MEMORY MUST REMAIN STABLE.
fn inner_new(stream: TokenStream, up: *const Entry) -> TokenBuffer {
// Build up the entries list, recording the locations of any Groups
// in the list to be processed later.
let mut entries = Vec::new();
let mut seqs = Vec::new();
for tt in stream {
match tt.kind {
TokenNode::Term(sym) => {
entries.push(Entry::Term(tt.span, sym));
}
TokenNode::Op(chr, ok) => {
entries.push(Entry::Op(tt.span, chr, ok));
}
TokenNode::Literal(lit) => {
entries.push(Entry::Literal(tt.span, lit));
}
TokenNode::Group(delim, seq_stream) => {
// Record the index of the interesting entry, and store an
// `End(null)` there temporarially.
seqs.push((entries.len(), tt.span, delim, seq_stream));
entries.push(Entry::End(ptr::null()));
}
}
}
// Add an `End` entry to the end with a reference to the enclosing token
// stream which was passed in.
entries.push(Entry::End(up));
// NOTE: This is done to ensure that we don't accidentally modify the
// length of the backing buffer. The backing buffer must remain at a
// constant address after this point, as we are going to store a raw
// pointer into it.
let mut entries = entries.into_boxed_slice();
for (idx, span, delim, seq_stream) in seqs {
// We know that this index refers to one of the temporary
// `End(null)` entries, and we know that the last entry is
// `End(up)`, so the next index is also valid.
let seq_up = &entries[idx + 1] as *const Entry;
// The end entry stored at the end of this Entry::Group should
// point to the Entry which follows the Group in the list.
let inner = Self::inner_new(seq_stream, seq_up);
entries[idx] = Entry::Group(span, delim, inner);
}
TokenBuffer { data: entries }
}
/// Creates a `TokenBuffer` containing all the tokens from the input
/// `TokenStream`.
pub fn new(stream: pm::TokenStream) -> TokenBuffer {
Self::new2(stream.into())
}
/// Creates a `TokenBuffer` containing all the tokens from the input
/// `TokenStream`.
pub fn new2(stream: TokenStream) -> TokenBuffer {
Self::inner_new(stream, ptr::null())
}
/// Creates a cursor referencing the first token in the buffer and able to
/// traverse until the end of the buffer.
pub fn begin(&self) -> Cursor {
unsafe { Cursor::create(&self.data[0], &self.data[self.data.len() - 1]) }
}
}
/// A cheaply copyable cursor into a `TokenBuffer`.
///
/// This cursor holds a shared reference into the immutable data which is used
/// internally to represent a `TokenStream`, and can be efficiently manipulated
/// and copied around.
///
/// An empty `Cursor` can be created directly, or one may create a `TokenBuffer`
/// object and get a cursor to its first token with `begin()`.
///
/// Two cursors are equal if they have the same location in the same input
/// stream, and have the same scope.
///
/// See the [module documentation] for an example of a `Cursor` in action.
///
/// [module documentation]: index.html
///
/// *This type is available if Syn is built with the `"parsing"` feature.*
#[derive(Copy, Clone, Eq, PartialEq)]
pub struct Cursor<'a> {
/// The current entry which the `Cursor` is pointing at.
ptr: *const Entry,
/// This is the only `Entry::End(..)` object which this cursor is allowed to
/// point at. All other `End` objects are skipped over in `Cursor::create`.
scope: *const Entry,
/// This uses the &'a reference which guarantees that these pointers are
/// still valid.
marker: PhantomData<&'a Entry>,
}
impl<'a> Cursor<'a> {
/// Creates a cursor referencing a static empty TokenStream.
pub fn empty() -> Self {
// It's safe in this situation for us to put an `Entry` object in global
// storage, despite it not actually being safe to send across threads
// (`Term` is a reference into a thread-local table). This is because
// this entry never includes a `Term` object.
//
// This wrapper struct allows us to break the rules and put a `Sync`
// object in global storage.
struct UnsafeSyncEntry(Entry);
unsafe impl Sync for UnsafeSyncEntry {}
static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0 as *const Entry));
Cursor {
ptr: &EMPTY_ENTRY.0,
scope: &EMPTY_ENTRY.0,
marker: PhantomData,
}
}
/// This create method intelligently exits non-explicitly-entered
/// `None`-delimited scopes when the cursor reaches the end of them,
/// allowing for them to be treated transparently.
unsafe fn create(mut ptr: *const Entry, scope: *const Entry) -> Self {
// NOTE: If we're looking at a `End(..)`, we want to advance the cursor
// past it, unless `ptr == scope`, which means that we're at the edge of
// our cursor's scope. We should only have `ptr != scope` at the exit
// from None-delimited groups entered with `ignore_none`.
while let Entry::End(exit) = *ptr {
if ptr == scope {
break;
}
ptr = exit;
}
Cursor {
ptr: ptr,
scope: scope,
marker: PhantomData,
}
}
/// Get the current entry.
fn entry(self) -> &'a Entry {
unsafe { &*self.ptr }
}
/// Bump the cursor to point at the next token after the current one. This
/// is undefined behavior if the cursor is currently looking at an
/// `Entry::End`.
unsafe fn bump(self) -> Cursor<'a> {
Cursor::create(self.ptr.offset(1), self.scope)
}
/// If the cursor is looking at a `None`-delimited group, move it to look at
/// the first token inside instead. If the group is empty, this will move
/// the cursor past the `None`-delimited group.
///
/// WARNING: This mutates its argument.
fn ignore_none(&mut self) {
if let Entry::Group(_, Delimiter::None, ref buf) = *self.entry() {
// NOTE: We call `Cursor::create` here to make sure that situations
// where we should immediately exit the span after entering it are
// handled correctly.
unsafe {
*self = Cursor::create(&buf.data[0], self.scope);
}
}
}
/// Checks whether the cursor is currently pointing at the end of its valid
/// scope.
#[inline]
pub fn eof(self) -> bool {
// We're at eof if we're at the end of our scope.
self.ptr == self.scope
}
/// If the cursor is pointing at a `Group` with the given delimiter, returns
/// a cursor into that group and one pointing to the next `TokenTree`.
pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, Span, Cursor<'a>)> {
// If we're not trying to enter a none-delimited group, we want to
// ignore them. We have to make sure to _not_ ignore them when we want
// to enter them, of course. For obvious reasons.
if delim != Delimiter::None {
self.ignore_none();
}
if let Entry::Group(span, group_delim, ref buf) = *self.entry() {
if group_delim == delim {
return Some((buf.begin(), span, unsafe { self.bump() }));
}
}
None
}
/// If the cursor is pointing at a `Term`, returns it along with a cursor
/// pointing at the next `TokenTree`.
pub fn term(mut self) -> Option<(Span, Term, Cursor<'a>)> {
self.ignore_none();
match *self.entry() {
Entry::Term(span, term) => Some((span, term, unsafe { self.bump() })),
_ => None,
}
}
/// If the cursor is pointing at an `Op`, returns it along with a cursor
/// pointing at the next `TokenTree`.
pub fn op(mut self) -> Option<(Span, char, Spacing, Cursor<'a>)> {
self.ignore_none();
match *self.entry() {
Entry::Op(span, op, spacing) => Some((span, op, spacing, unsafe { self.bump() })),
_ => None,
}
}
/// If the cursor is pointing at a `Literal`, return it along with a cursor
/// pointing at the next `TokenTree`.
pub fn literal(mut self) -> Option<(Span, Literal, Cursor<'a>)> {
self.ignore_none();
match *self.entry() {
Entry::Literal(span, ref lit) => Some((span, lit.clone(), unsafe { self.bump() })),
_ => None,
}
}
/// Copies all remaining tokens visible from this cursor into a
/// `TokenStream`.
pub fn token_stream(self) -> TokenStream {
let mut tts = Vec::new();
let mut cursor = self;
while let Some((tt, rest)) = cursor.token_tree() {
tts.push(tt);
cursor = rest;
}
tts.into_iter().collect()
}
/// If the cursor is pointing at a `TokenTree`, returns it along with a
/// cursor pointing at the next `TokenTree`.
///
/// Returns `None` if the cursor has reached the end of its stream.
///
/// This method does not treat `None`-delimited groups as transparent, and
/// will return a `Group(None, ..)` if the cursor is looking at one.
pub fn token_tree(self) -> Option<(TokenTree, Cursor<'a>)> {
let tree = match *self.entry() {
Entry::Group(span, delim, ref buf) => {
let stream = buf.begin().token_stream();
TokenTree {
span: span,
kind: TokenNode::Group(delim, stream),
}
}
Entry::Literal(span, ref lit) => TokenTree {
span: span,
kind: TokenNode::Literal(lit.clone()),
},
Entry::Term(span, sym) => TokenTree {
span: span,
kind: TokenNode::Term(sym),
},
Entry::Op(span, chr, spacing) => TokenTree {
span: span,
kind: TokenNode::Op(chr, spacing),
},
Entry::End(..) => {
return None;
}
};
Some((tree, unsafe { self.bump() }))
}
/// Returns the `Span` of the current token, or `Span::call_site()` if this
/// cursor points to eof.
pub fn span(self) -> Span {
match *self.entry() {
Entry::Group(span, ..)
| Entry::Literal(span, ..)
| Entry::Term(span, ..)
| Entry::Op(span, ..) => span,
Entry::End(..) => Span::call_site(),
}
}
}
// We do a custom implementation for `Debug` as the default implementation is
// pretty useless.
#[cfg(synom_verbose_trace)]
impl<'a> Debug for Cursor<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Print what the cursor is currently looking at.
// This will look like Cursor("some remaining tokens here")
f.debug_tuple("Cursor")
.field(&self.token_stream().to_string())
.finish()
}
}

388
third_party/rust/syn-0.12.12/src/data.rs поставляемый
Просмотреть файл

@ -1,388 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use super::*;
use punctuated::Punctuated;
ast_struct! {
/// An enum variant.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct Variant {
/// Attributes tagged on the variant.
pub attrs: Vec<Attribute>,
/// Name of the variant.
pub ident: Ident,
/// Content stored in the variant.
pub fields: Fields,
/// Explicit discriminant: `Variant = 1`
pub discriminant: Option<(Token![=], Expr)>,
}
}
ast_enum_of_structs! {
/// Data stored within an enum variant or struct.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
pub enum Fields {
/// Named fields of a struct or struct variant such as `Point { x: f64,
/// y: f64 }`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Named(FieldsNamed {
pub brace_token: token::Brace,
pub named: Punctuated<Field, Token![,]>,
}),
/// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Unnamed(FieldsUnnamed {
pub paren_token: token::Paren,
pub unnamed: Punctuated<Field, Token![,]>,
}),
/// Unit struct or unit variant such as `None`.
pub Unit,
}
}
impl Fields {
/// Get an iterator over the [`Field`] items in this object. This iterator
/// can be used to iterate over a named or unnamed struct or variant's
/// fields uniformly.
///
/// [`Field`]: struct.Field.html
pub fn iter(&self) -> punctuated::Iter<Field, Token![,]> {
match *self {
Fields::Unit => punctuated::Iter::private_empty(),
Fields::Named(ref f) => f.named.iter(),
Fields::Unnamed(ref f) => f.unnamed.iter(),
}
}
}
impl<'a> IntoIterator for &'a Fields {
type Item = &'a Field;
type IntoIter = punctuated::Iter<'a, Field, Token![,]>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
ast_struct! {
/// A field of a struct or enum variant.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct Field {
/// Attributes tagged on the field.
pub attrs: Vec<Attribute>,
/// Visibility of the field.
pub vis: Visibility,
/// Name of the field, if any.
///
/// Fields of tuple structs have no names.
pub ident: Option<Ident>,
pub colon_token: Option<Token![:]>,
/// Type of the field.
pub ty: Type,
}
}
ast_enum_of_structs! {
/// The visibility level of an item: inherited or `pub` or
/// `pub(restricted)`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
pub enum Visibility {
/// A public visibility level: `pub`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Public(VisPublic {
pub pub_token: Token![pub],
}),
/// A crate-level visibility: `pub(crate)`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Crate(VisCrate {
pub pub_token: Token![pub],
pub paren_token: token::Paren,
pub crate_token: Token![crate],
}),
/// A visibility level restricted to some path: `pub(self)` or
/// `pub(super)` or `pub(in some::module)`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Restricted(VisRestricted {
pub pub_token: Token![pub],
pub paren_token: token::Paren,
pub in_token: Option<Token![in]>,
pub path: Box<Path>,
}),
/// An inherited visibility, which usually means private.
pub Inherited,
}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use synom::Synom;
impl Synom for Variant {
named!(parse -> Self, do_parse!(
attrs: many0!(Attribute::parse_outer) >>
id: syn!(Ident) >>
fields: alt!(
syn!(FieldsNamed) => { Fields::Named }
|
syn!(FieldsUnnamed) => { Fields::Unnamed }
|
epsilon!() => { |_| Fields::Unit }
) >>
disr: option!(tuple!(punct!(=), syn!(Expr))) >>
(Variant {
ident: id,
attrs: attrs,
fields: fields,
discriminant: disr,
})
));
fn description() -> Option<&'static str> {
Some("enum variant")
}
}
impl Synom for FieldsNamed {
named!(parse -> Self, map!(
braces!(call!(Punctuated::parse_terminated_with, Field::parse_named)),
|(brace, fields)| FieldsNamed {
brace_token: brace,
named: fields,
}
));
fn description() -> Option<&'static str> {
Some("named fields in a struct or struct variant")
}
}
impl Synom for FieldsUnnamed {
named!(parse -> Self, map!(
parens!(call!(Punctuated::parse_terminated_with, Field::parse_unnamed)),
|(paren, fields)| FieldsUnnamed {
paren_token: paren,
unnamed: fields,
}
));
fn description() -> Option<&'static str> {
Some("unnamed fields in a tuple struct or tuple variant")
}
}
impl Field {
named!(pub parse_named -> Self, do_parse!(
attrs: many0!(Attribute::parse_outer) >>
vis: syn!(Visibility) >>
id: syn!(Ident) >>
colon: punct!(:) >>
ty: syn!(Type) >>
(Field {
ident: Some(id),
vis: vis,
attrs: attrs,
ty: ty,
colon_token: Some(colon),
})
));
named!(pub parse_unnamed -> Self, do_parse!(
attrs: many0!(Attribute::parse_outer) >>
vis: syn!(Visibility) >>
ty: syn!(Type) >>
(Field {
ident: None,
colon_token: None,
vis: vis,
attrs: attrs,
ty: ty,
})
));
}
impl Synom for Visibility {
named!(parse -> Self, alt!(
do_parse!(
pub_token: keyword!(pub) >>
other: parens!(keyword!(crate)) >>
(Visibility::Crate(VisCrate {
pub_token: pub_token,
paren_token: other.0,
crate_token: other.1,
}))
)
|
do_parse!(
pub_token: keyword!(pub) >>
other: parens!(keyword!(self)) >>
(Visibility::Restricted(VisRestricted {
pub_token: pub_token,
paren_token: other.0,
in_token: None,
path: Box::new(other.1.into()),
}))
)
|
do_parse!(
pub_token: keyword!(pub) >>
other: parens!(keyword!(super)) >>
(Visibility::Restricted(VisRestricted {
pub_token: pub_token,
paren_token: other.0,
in_token: None,
path: Box::new(other.1.into()),
}))
)
|
do_parse!(
pub_token: keyword!(pub) >>
other: parens!(do_parse!(
in_tok: keyword!(in) >>
restricted: call!(Path::parse_mod_style) >>
(in_tok, restricted)
)) >>
(Visibility::Restricted(VisRestricted {
pub_token: pub_token,
paren_token: other.0,
in_token: Some((other.1).0),
path: Box::new((other.1).1),
}))
)
|
keyword!(pub) => { |tok| {
Visibility::Public(VisPublic {
pub_token: tok,
})
} }
|
epsilon!() => { |_| Visibility::Inherited }
));
fn description() -> Option<&'static str> {
Some("visibility qualifier such as `pub`")
}
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{ToTokens, Tokens};
impl ToTokens for Variant {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append_all(&self.attrs);
self.ident.to_tokens(tokens);
self.fields.to_tokens(tokens);
if let Some((ref eq_token, ref disc)) = self.discriminant {
eq_token.to_tokens(tokens);
disc.to_tokens(tokens);
}
}
}
impl ToTokens for FieldsNamed {
fn to_tokens(&self, tokens: &mut Tokens) {
self.brace_token.surround(tokens, |tokens| {
self.named.to_tokens(tokens);
});
}
}
impl ToTokens for FieldsUnnamed {
fn to_tokens(&self, tokens: &mut Tokens) {
self.paren_token.surround(tokens, |tokens| {
self.unnamed.to_tokens(tokens);
});
}
}
impl ToTokens for Field {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append_all(&self.attrs);
self.vis.to_tokens(tokens);
if let Some(ref ident) = self.ident {
ident.to_tokens(tokens);
TokensOrDefault(&self.colon_token).to_tokens(tokens);
}
self.ty.to_tokens(tokens);
}
}
impl ToTokens for VisPublic {
fn to_tokens(&self, tokens: &mut Tokens) {
self.pub_token.to_tokens(tokens)
}
}
impl ToTokens for VisCrate {
fn to_tokens(&self, tokens: &mut Tokens) {
self.pub_token.to_tokens(tokens);
self.paren_token.surround(tokens, |tokens| {
self.crate_token.to_tokens(tokens);
})
}
}
impl ToTokens for VisRestricted {
fn to_tokens(&self, tokens: &mut Tokens) {
self.pub_token.to_tokens(tokens);
self.paren_token.surround(tokens, |tokens| {
// XXX: If we have a path which is not "self" or "super",
// automatically add the "in" token.
self.in_token.to_tokens(tokens);
self.path.to_tokens(tokens);
});
}
}
}

210
third_party/rust/syn-0.12.12/src/derive.rs поставляемый
Просмотреть файл

@ -1,210 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use super::*;
use punctuated::Punctuated;
ast_struct! {
/// Data structure sent to a `proc_macro_derive` macro.
///
/// *This type is available if Syn is built with the `"derive"` feature.*
pub struct DeriveInput {
/// Attributes tagged on the whole struct or enum.
pub attrs: Vec<Attribute>,
/// Visibility of the struct or enum.
pub vis: Visibility,
/// Name of the struct or enum.
pub ident: Ident,
/// Generics required to complete the definition.
pub generics: Generics,
/// Data within the struct or enum.
pub data: Data,
}
}
ast_enum_of_structs! {
/// The storage of a struct, enum or union data structure.
///
/// *This type is available if Syn is built with the `"derive"` feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
pub enum Data {
/// A struct input to a `proc_macro_derive` macro.
///
/// *This type is available if Syn is built with the `"derive"`
/// feature.*
pub Struct(DataStruct {
pub struct_token: Token![struct],
pub fields: Fields,
pub semi_token: Option<Token![;]>,
}),
/// An enum input to a `proc_macro_derive` macro.
///
/// *This type is available if Syn is built with the `"derive"`
/// feature.*
pub Enum(DataEnum {
pub enum_token: Token![enum],
pub brace_token: token::Brace,
pub variants: Punctuated<Variant, Token![,]>,
}),
/// A tagged union input to a `proc_macro_derive` macro.
///
/// *This type is available if Syn is built with the `"derive"`
/// feature.*
pub Union(DataUnion {
pub union_token: Token![union],
pub fields: FieldsNamed,
}),
}
do_not_generate_to_tokens
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use synom::Synom;
impl Synom for DeriveInput {
named!(parse -> Self, do_parse!(
attrs: many0!(Attribute::parse_outer) >>
vis: syn!(Visibility) >>
which: alt!(
keyword!(struct) => { Ok }
|
keyword!(enum) => { Err }
) >>
id: syn!(Ident) >>
generics: syn!(Generics) >>
item: switch!(value!(which),
Ok(s) => map!(data_struct, move |(wh, fields, semi)| DeriveInput {
ident: id,
vis: vis,
attrs: attrs,
generics: Generics {
where_clause: wh,
.. generics
},
data: Data::Struct(DataStruct {
struct_token: s,
fields: fields,
semi_token: semi,
}),
})
|
Err(e) => map!(data_enum, move |(wh, brace, variants)| DeriveInput {
ident: id,
vis: vis,
attrs: attrs,
generics: Generics {
where_clause: wh,
.. generics
},
data: Data::Enum(DataEnum {
variants: variants,
brace_token: brace,
enum_token: e,
}),
})
) >>
(item)
));
fn description() -> Option<&'static str> {
Some("derive input")
}
}
named!(data_struct -> (Option<WhereClause>, Fields, Option<Token![;]>), alt!(
do_parse!(
wh: option!(syn!(WhereClause)) >>
fields: syn!(FieldsNamed) >>
(wh, Fields::Named(fields), None)
)
|
do_parse!(
fields: syn!(FieldsUnnamed) >>
wh: option!(syn!(WhereClause)) >>
semi: punct!(;) >>
(wh, Fields::Unnamed(fields), Some(semi))
)
|
do_parse!(
wh: option!(syn!(WhereClause)) >>
semi: punct!(;) >>
(wh, Fields::Unit, Some(semi))
)
));
named!(data_enum -> (Option<WhereClause>, token::Brace, Punctuated<Variant, Token![,]>), do_parse!(
wh: option!(syn!(WhereClause)) >>
data: braces!(Punctuated::parse_terminated) >>
(wh, data.0, data.1)
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use attr::FilterAttrs;
use quote::{ToTokens, Tokens};
impl ToTokens for DeriveInput {
fn to_tokens(&self, tokens: &mut Tokens) {
for attr in self.attrs.outer() {
attr.to_tokens(tokens);
}
self.vis.to_tokens(tokens);
match self.data {
Data::Struct(ref d) => d.struct_token.to_tokens(tokens),
Data::Enum(ref d) => d.enum_token.to_tokens(tokens),
Data::Union(ref d) => d.union_token.to_tokens(tokens),
}
self.ident.to_tokens(tokens);
self.generics.to_tokens(tokens);
match self.data {
Data::Struct(ref data) => match data.fields {
Fields::Named(ref fields) => {
self.generics.where_clause.to_tokens(tokens);
fields.to_tokens(tokens);
}
Fields::Unnamed(ref fields) => {
fields.to_tokens(tokens);
self.generics.where_clause.to_tokens(tokens);
TokensOrDefault(&data.semi_token).to_tokens(tokens);
}
Fields::Unit => {
self.generics.where_clause.to_tokens(tokens);
TokensOrDefault(&data.semi_token).to_tokens(tokens);
}
},
Data::Enum(ref data) => {
self.generics.where_clause.to_tokens(tokens);
data.brace_token.surround(tokens, |tokens| {
data.variants.to_tokens(tokens);
});
}
Data::Union(ref data) => {
self.generics.where_clause.to_tokens(tokens);
data.fields.to_tokens(tokens);
}
}
}
}
}

60
third_party/rust/syn-0.12.12/src/error.rs поставляемый
Просмотреть файл

@ -1,60 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::error::Error;
use buffer::Cursor;
use std::fmt::{self, Display};
/// The result of a `Synom` parser.
///
/// Refer to the [module documentation] for details about parsing in Syn.
///
/// [module documentation]: index.html
///
/// *This type is available if Syn is built with the `"parsing"` feature.*
pub type PResult<'a, O> = Result<(O, Cursor<'a>), ParseError>;
/// An error with a default error message.
///
/// NOTE: We should provide better error messages in the future.
pub fn parse_error<O>() -> PResult<'static, O> {
Err(ParseError(None))
}
/// Error returned when a `Synom` parser cannot parse the input tokens.
///
/// Refer to the [module documentation] for details about parsing in Syn.
///
/// [module documentation]: index.html
///
/// *This type is available if Syn is built with the `"parsing"` feature.*
#[derive(Debug)]
pub struct ParseError(Option<String>);
impl Error for ParseError {
fn description(&self) -> &str {
match self.0 {
Some(ref desc) => desc,
None => "failed to parse",
}
}
}
impl Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(self.description(), f)
}
}
impl ParseError {
// For syn use only. Not public API.
#[doc(hidden)]
pub fn new<T: Into<String>>(msg: T) -> Self {
ParseError(Some(msg.into()))
}
}

3573
third_party/rust/syn-0.12.12/src/expr.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

122
third_party/rust/syn-0.12.12/src/file.rs поставляемый
Просмотреть файл

@ -1,122 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use super::*;
ast_struct! {
/// A complete file of Rust source code.
///
/// *This type is available if Syn is built with the `"full"` feature.*
///
/// # Example
///
/// Parse a Rust source file into a `syn::File` and print out a debug
/// representation of the syntax tree.
///
/// ```
/// extern crate syn;
///
/// use std::env;
/// use std::fs::File;
/// use std::io::Read;
/// use std::process;
///
/// fn main() {
/// # }
/// #
/// # fn fake_main() {
/// let mut args = env::args();
/// let _ = args.next(); // executable name
///
/// let filename = match (args.next(), args.next()) {
/// (Some(filename), None) => filename,
/// _ => {
/// eprintln!("Usage: dump-syntax path/to/filename.rs");
/// process::exit(1);
/// }
/// };
///
/// let mut file = File::open(&filename).expect("Unable to open file");
///
/// let mut src = String::new();
/// file.read_to_string(&mut src).expect("Unable to read file");
///
/// let syntax = syn::parse_file(&src).expect("Unable to parse file");
/// println!("{:#?}", syntax);
/// }
/// ```
///
/// Running with its own source code as input, this program prints output
/// that begins with:
///
/// ```text
/// File {
/// shebang: None,
/// attrs: [],
/// items: [
/// ExternCrate(
/// ItemExternCrate {
/// attrs: [],
/// vis: Inherited,
/// extern_token: Extern,
/// crate_token: Crate,
/// ident: Ident {
/// term: Term(
/// "syn"
/// ),
/// span: Span
/// },
/// rename: None,
/// semi_token: Semi
/// }
/// ),
/// ...
/// ```
pub struct File {
pub shebang: Option<String>,
pub attrs: Vec<Attribute>,
pub items: Vec<Item>,
}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use synom::Synom;
impl Synom for File {
named!(parse -> Self, do_parse!(
attrs: many0!(Attribute::parse_inner) >>
items: many0!(Item::parse) >>
(File {
shebang: None,
attrs: attrs,
items: items,
})
));
fn description() -> Option<&'static str> {
Some("crate")
}
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use attr::FilterAttrs;
use quote::{ToTokens, Tokens};
impl ToTokens for File {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append_all(self.attrs.inner());
tokens.append_all(&self.items);
}
}
}

2946
third_party/rust/syn-0.12.12/src/gen/fold.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

2278
third_party/rust/syn-0.12.12/src/gen/visit.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

165
third_party/rust/syn-0.12.12/src/gen_helper.rs поставляемый
Просмотреть файл

@ -1,165 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[cfg(feature = "fold")]
pub mod fold {
use punctuated::{Pair, Punctuated};
use fold::Fold;
use proc_macro2::Span;
pub trait FoldHelper {
type Item;
fn lift<F>(self, f: F) -> Self
where
F: FnMut(Self::Item) -> Self::Item;
}
impl<T> FoldHelper for Vec<T> {
type Item = T;
fn lift<F>(self, f: F) -> Self
where
F: FnMut(Self::Item) -> Self::Item,
{
self.into_iter().map(f).collect()
}
}
impl<T, U> FoldHelper for Punctuated<T, U> {
type Item = T;
fn lift<F>(self, mut f: F) -> Self
where
F: FnMut(Self::Item) -> Self::Item,
{
self.into_pairs()
.map(Pair::into_tuple)
.map(|(t, u)| Pair::new(f(t), u))
.collect()
}
}
pub fn tokens_helper<F: Fold + ?Sized, S: Spans>(folder: &mut F, spans: &S) -> S {
spans.fold(folder)
}
pub trait Spans {
fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self;
}
impl Spans for Span {
fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self {
folder.fold_span(*self)
}
}
impl Spans for [Span; 1] {
fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self {
[folder.fold_span(self[0])]
}
}
impl Spans for [Span; 2] {
fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self {
[folder.fold_span(self[0]), folder.fold_span(self[1])]
}
}
impl Spans for [Span; 3] {
fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self {
[
folder.fold_span(self[0]),
folder.fold_span(self[1]),
folder.fold_span(self[2]),
]
}
}
}
#[cfg(feature = "visit")]
pub mod visit {
use proc_macro2::Span;
use visit::Visit;
pub fn tokens_helper<'ast, V: Visit<'ast> + ?Sized, S: Spans>(
visitor: &mut V,
spans: &'ast S,
) {
spans.visit(visitor);
}
pub trait Spans {
fn visit<'ast, V: Visit<'ast> + ?Sized>(&'ast self, visitor: &mut V);
}
impl Spans for Span {
fn visit<'ast, V: Visit<'ast> + ?Sized>(&'ast self, visitor: &mut V) {
visitor.visit_span(self);
}
}
impl Spans for [Span; 1] {
fn visit<'ast, V: Visit<'ast> + ?Sized>(&'ast self, visitor: &mut V) {
visitor.visit_span(&self[0]);
}
}
impl Spans for [Span; 2] {
fn visit<'ast, V: Visit<'ast> + ?Sized>(&'ast self, visitor: &mut V) {
visitor.visit_span(&self[0]);
visitor.visit_span(&self[1]);
}
}
impl Spans for [Span; 3] {
fn visit<'ast, V: Visit<'ast> + ?Sized>(&'ast self, visitor: &mut V) {
visitor.visit_span(&self[0]);
visitor.visit_span(&self[1]);
visitor.visit_span(&self[2]);
}
}
}
#[cfg(feature = "visit-mut")]
pub mod visit_mut {
use proc_macro2::Span;
use visit_mut::VisitMut;
pub fn tokens_helper<V: VisitMut + ?Sized, S: Spans>(visitor: &mut V, spans: &mut S) {
spans.visit_mut(visitor);
}
pub trait Spans {
fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V);
}
impl Spans for Span {
fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V) {
visitor.visit_span_mut(self);
}
}
impl Spans for [Span; 1] {
fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V) {
visitor.visit_span_mut(&mut self[0]);
}
}
impl Spans for [Span; 2] {
fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V) {
visitor.visit_span_mut(&mut self[0]);
visitor.visit_span_mut(&mut self[1]);
}
}
impl Spans for [Span; 3] {
fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V) {
visitor.visit_span_mut(&mut self[0]);
visitor.visit_span_mut(&mut self[1]);
visitor.visit_span_mut(&mut self[2]);
}
}
}

984
third_party/rust/syn-0.12.12/src/generics.rs поставляемый
Просмотреть файл

@ -1,984 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use super::*;
use punctuated::{Iter, IterMut, Punctuated};
ast_struct! {
/// Lifetimes and type parameters attached to a declaration of a function,
/// enum, trait, etc.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[derive(Default)]
pub struct Generics {
pub lt_token: Option<Token![<]>,
pub params: Punctuated<GenericParam, Token![,]>,
pub gt_token: Option<Token![>]>,
pub where_clause: Option<WhereClause>,
}
}
ast_enum_of_structs! {
/// A generic type parameter, lifetime, or const generic: `T: Into<String>`,
/// `'a: 'b`, `const LEN: usize`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
pub enum GenericParam {
/// A generic type parameter: `T: Into<String>`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Type(TypeParam {
pub attrs: Vec<Attribute>,
pub ident: Ident,
pub colon_token: Option<Token![:]>,
pub bounds: Punctuated<TypeParamBound, Token![+]>,
pub eq_token: Option<Token![=]>,
pub default: Option<Type>,
}),
/// A lifetime definition: `'a: 'b + 'c + 'd`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Lifetime(LifetimeDef {
pub attrs: Vec<Attribute>,
pub lifetime: Lifetime,
pub colon_token: Option<Token![:]>,
pub bounds: Punctuated<Lifetime, Token![+]>,
}),
/// A const generic parameter: `const LENGTH: usize`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Const(ConstParam {
pub attrs: Vec<Attribute>,
pub const_token: Token![const],
pub ident: Ident,
pub colon_token: Token![:],
pub ty: Type,
pub eq_token: Option<Token![=]>,
pub default: Option<Expr>,
}),
}
}
impl Generics {
/// Returns an
/// <code
/// style="padding-right:0;">Iterator&lt;Item = &amp;</code><a
/// href="struct.TypeParam.html"><code
/// style="padding-left:0;padding-right:0;">TypeParam</code></a><code
/// style="padding-left:0;">&gt;</code>
/// over the type parameters in `self.params`.
pub fn type_params(&self) -> TypeParams {
TypeParams(self.params.iter())
}
/// Returns an
/// <code
/// style="padding-right:0;">Iterator&lt;Item = &amp;mut </code><a
/// href="struct.TypeParam.html"><code
/// style="padding-left:0;padding-right:0;">TypeParam</code></a><code
/// style="padding-left:0;">&gt;</code>
/// over the type parameters in `self.params`.
pub fn type_params_mut(&mut self) -> TypeParamsMut {
TypeParamsMut(self.params.iter_mut())
}
/// Returns an
/// <code
/// style="padding-right:0;">Iterator&lt;Item = &amp;</code><a
/// href="struct.LifetimeDef.html"><code
/// style="padding-left:0;padding-right:0;">LifetimeDef</code></a><code
/// style="padding-left:0;">&gt;</code>
/// over the lifetime parameters in `self.params`.
pub fn lifetimes(&self) -> Lifetimes {
Lifetimes(self.params.iter())
}
/// Returns an
/// <code
/// style="padding-right:0;">Iterator&lt;Item = &amp;mut </code><a
/// href="struct.LifetimeDef.html"><code
/// style="padding-left:0;padding-right:0;">LifetimeDef</code></a><code
/// style="padding-left:0;">&gt;</code>
/// over the lifetime parameters in `self.params`.
pub fn lifetimes_mut(&mut self) -> LifetimesMut {
LifetimesMut(self.params.iter_mut())
}
/// Returns an
/// <code
/// style="padding-right:0;">Iterator&lt;Item = &amp;</code><a
/// href="struct.ConstParam.html"><code
/// style="padding-left:0;padding-right:0;">ConstParam</code></a><code
/// style="padding-left:0;">&gt;</code>
/// over the constant parameters in `self.params`.
pub fn const_params(&self) -> ConstParams {
ConstParams(self.params.iter())
}
/// Returns an
/// <code
/// style="padding-right:0;">Iterator&lt;Item = &amp;mut </code><a
/// href="struct.ConstParam.html"><code
/// style="padding-left:0;padding-right:0;">ConstParam</code></a><code
/// style="padding-left:0;">&gt;</code>
/// over the constant parameters in `self.params`.
pub fn const_params_mut(&mut self) -> ConstParamsMut {
ConstParamsMut(self.params.iter_mut())
}
}
pub struct TypeParams<'a>(Iter<'a, GenericParam, Token![,]>);
impl<'a> Iterator for TypeParams<'a> {
type Item = &'a TypeParam;
fn next(&mut self) -> Option<Self::Item> {
// FIXME: Remove this when ? on Option is stable
let next = match self.0.next() {
Some(item) => item,
None => return None,
};
if let GenericParam::Type(ref type_param) = *next {
Some(type_param)
} else {
self.next()
}
}
}
pub struct TypeParamsMut<'a>(IterMut<'a, GenericParam, Token![,]>);
impl<'a> Iterator for TypeParamsMut<'a> {
type Item = &'a mut TypeParam;
fn next(&mut self) -> Option<Self::Item> {
// FIXME: Remove this when ? on Option is stable
let next = match self.0.next() {
Some(item) => item,
None => return None,
};
if let GenericParam::Type(ref mut type_param) = *next {
Some(type_param)
} else {
self.next()
}
}
}
pub struct Lifetimes<'a>(Iter<'a, GenericParam, Token![,]>);
impl<'a> Iterator for Lifetimes<'a> {
type Item = &'a LifetimeDef;
fn next(&mut self) -> Option<Self::Item> {
// FIXME: Remove this when ? on Option is stable
let next = match self.0.next() {
Some(item) => item,
None => return None,
};
if let GenericParam::Lifetime(ref lifetime) = *next {
Some(lifetime)
} else {
self.next()
}
}
}
pub struct LifetimesMut<'a>(IterMut<'a, GenericParam, Token![,]>);
impl<'a> Iterator for LifetimesMut<'a> {
type Item = &'a mut LifetimeDef;
fn next(&mut self) -> Option<Self::Item> {
// FIXME: Remove this when ? on Option is stable
let next = match self.0.next() {
Some(item) => item,
None => return None,
};
if let GenericParam::Lifetime(ref mut lifetime) = *next {
Some(lifetime)
} else {
self.next()
}
}
}
pub struct ConstParams<'a>(Iter<'a, GenericParam, Token![,]>);
impl<'a> Iterator for ConstParams<'a> {
type Item = &'a ConstParam;
fn next(&mut self) -> Option<Self::Item> {
// FIXME: Remove this when ? on Option is stable
let next = match self.0.next() {
Some(item) => item,
None => return None,
};
if let GenericParam::Const(ref const_param) = *next {
Some(const_param)
} else {
self.next()
}
}
}
pub struct ConstParamsMut<'a>(IterMut<'a, GenericParam, Token![,]>);
impl<'a> Iterator for ConstParamsMut<'a> {
type Item = &'a mut ConstParam;
fn next(&mut self) -> Option<Self::Item> {
// FIXME: Remove this when ? on Option is stable
let next = match self.0.next() {
Some(item) => item,
None => return None,
};
if let GenericParam::Const(ref mut const_param) = *next {
Some(const_param)
} else {
self.next()
}
}
}
/// Returned by `Generics::split_for_impl`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature and the `"printing"` feature.*
#[cfg(feature = "printing")]
#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
pub struct ImplGenerics<'a>(&'a Generics);
/// Returned by `Generics::split_for_impl`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature and the `"printing"` feature.*
#[cfg(feature = "printing")]
#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
pub struct TypeGenerics<'a>(&'a Generics);
/// Returned by `TypeGenerics::as_turbofish`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature and the `"printing"` feature.*
#[cfg(feature = "printing")]
#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
pub struct Turbofish<'a>(&'a Generics);
#[cfg(feature = "printing")]
impl Generics {
/// Split a type's generics into the pieces required for impl'ing a trait
/// for that type.
///
/// ```
/// # extern crate syn;
/// # #[macro_use]
/// # extern crate quote;
/// # fn main() {
/// # let generics: syn::Generics = Default::default();
/// # let name = syn::Ident::from("MyType");
/// let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
/// quote! {
/// impl #impl_generics MyTrait for #name #ty_generics #where_clause {
/// // ...
/// }
/// }
/// # ;
/// # }
/// ```
///
/// *This method is available if Syn is built with the `"derive"` or
/// `"full"` feature and the `"printing"` feature.*
pub fn split_for_impl(&self) -> (ImplGenerics, TypeGenerics, Option<&WhereClause>) {
(
ImplGenerics(self),
TypeGenerics(self),
self.where_clause.as_ref(),
)
}
}
#[cfg(feature = "printing")]
impl<'a> TypeGenerics<'a> {
/// Turn a type's generics like `<X, Y>` into a turbofish like `::<X, Y>`.
///
/// *This method is available if Syn is built with the `"derive"` or
/// `"full"` feature and the `"printing"` feature.*
pub fn as_turbofish(&self) -> Turbofish {
Turbofish(self.0)
}
}
ast_struct! {
/// A set of bound lifetimes: `for<'a, 'b, 'c>`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[derive(Default)]
pub struct BoundLifetimes {
pub for_token: Token![for],
pub lt_token: Token![<],
pub lifetimes: Punctuated<LifetimeDef, Token![,]>,
pub gt_token: Token![>],
}
}
impl LifetimeDef {
pub fn new(lifetime: Lifetime) -> Self {
LifetimeDef {
attrs: Vec::new(),
lifetime: lifetime,
colon_token: None,
bounds: Punctuated::new(),
}
}
}
impl From<Ident> for TypeParam {
fn from(ident: Ident) -> Self {
TypeParam {
attrs: vec![],
ident: ident,
colon_token: None,
bounds: Punctuated::new(),
eq_token: None,
default: None,
}
}
}
ast_enum_of_structs! {
/// A trait or lifetime used as a bound on a type parameter.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub enum TypeParamBound {
pub Trait(TraitBound),
pub Lifetime(Lifetime),
}
}
ast_struct! {
/// A trait used as a bound on a type parameter.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct TraitBound {
pub modifier: TraitBoundModifier,
/// The `for<'a>` in `for<'a> Foo<&'a T>`
pub lifetimes: Option<BoundLifetimes>,
/// The `Foo<&'a T>` in `for<'a> Foo<&'a T>`
pub path: Path,
}
}
ast_enum! {
/// A modifier on a trait bound, currently only used for the `?` in
/// `?Sized`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(feature = "clone-impls", derive(Copy))]
pub enum TraitBoundModifier {
None,
Maybe(Token![?]),
}
}
ast_struct! {
/// A `where` clause in a definition: `where T: Deserialize<'de>, D:
/// 'static`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct WhereClause {
pub where_token: Token![where],
pub predicates: Punctuated<WherePredicate, Token![,]>,
}
}
ast_enum_of_structs! {
/// A single predicate in a `where` clause: `T: Deserialize<'de>`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
pub enum WherePredicate {
/// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Type(PredicateType {
/// Any lifetimes from a `for` binding
pub lifetimes: Option<BoundLifetimes>,
/// The type being bounded
pub bounded_ty: Type,
pub colon_token: Token![:],
/// Trait and lifetime bounds (`Clone+Send+'static`)
pub bounds: Punctuated<TypeParamBound, Token![+]>,
}),
/// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Lifetime(PredicateLifetime {
pub lifetime: Lifetime,
pub colon_token: Option<Token![:]>,
pub bounds: Punctuated<Lifetime, Token![+]>,
}),
/// An equality predicate in a `where` clause (unsupported).
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Eq(PredicateEq {
pub lhs_ty: Type,
pub eq_token: Token![=],
pub rhs_ty: Type,
}),
}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use synom::Synom;
use punctuated::Pair;
impl Synom for Generics {
named!(parse -> Self, map!(
alt!(
do_parse!(
lt: punct!(<) >>
lifetimes: call!(Punctuated::<LifetimeDef, Token![,]>::parse_terminated) >>
ty_params: cond!(
lifetimes.empty_or_trailing(),
Punctuated::<TypeParam, Token![,]>::parse_terminated
) >>
gt: punct!(>) >>
(lifetimes, ty_params, Some(lt), Some(gt))
)
|
epsilon!() => { |_| (Punctuated::new(), None, None, None) }
),
|(lifetimes, ty_params, lt, gt)| Generics {
lt_token: lt,
params: lifetimes.into_pairs()
.map(Pair::into_tuple)
.map(|(life, comma)| Pair::new(GenericParam::Lifetime(life), comma))
.chain(ty_params.unwrap_or_default()
.into_pairs()
.map(Pair::into_tuple)
.map(|(ty, comma)| Pair::new(GenericParam::Type(ty), comma)))
.collect(),
gt_token: gt,
where_clause: None,
}
));
fn description() -> Option<&'static str> {
Some("generic parameters in declaration")
}
}
impl Synom for GenericParam {
named!(parse -> Self, alt!(
syn!(TypeParam) => { GenericParam::Type }
|
syn!(LifetimeDef) => { GenericParam::Lifetime }
|
syn!(ConstParam) => { GenericParam::Const }
));
fn description() -> Option<&'static str> {
Some("generic parameter")
}
}
impl Synom for LifetimeDef {
named!(parse -> Self, do_parse!(
attrs: many0!(Attribute::parse_outer) >>
life: syn!(Lifetime) >>
colon: option!(punct!(:)) >>
bounds: cond!(
colon.is_some(),
Punctuated::parse_separated_nonempty
) >>
(LifetimeDef {
attrs: attrs,
lifetime: life,
bounds: bounds.unwrap_or_default(),
colon_token: colon,
})
));
fn description() -> Option<&'static str> {
Some("lifetime definition")
}
}
impl Synom for BoundLifetimes {
named!(parse -> Self, do_parse!(
for_: keyword!(for) >>
lt: punct!(<) >>
lifetimes: call!(Punctuated::parse_terminated) >>
gt: punct!(>) >>
(BoundLifetimes {
for_token: for_,
lt_token: lt,
gt_token: gt,
lifetimes: lifetimes,
})
));
fn description() -> Option<&'static str> {
Some("bound lifetimes")
}
}
impl Synom for TypeParam {
named!(parse -> Self, do_parse!(
attrs: many0!(Attribute::parse_outer) >>
id: syn!(Ident) >>
colon: option!(punct!(:)) >>
bounds: cond!(
colon.is_some(),
Punctuated::parse_separated_nonempty
) >>
default: option!(do_parse!(
eq: punct!(=) >>
ty: syn!(Type) >>
(eq, ty)
)) >>
(TypeParam {
attrs: attrs,
ident: id,
bounds: bounds.unwrap_or_default(),
colon_token: colon,
eq_token: default.as_ref().map(|d| Token![=]((d.0).0)),
default: default.map(|d| d.1),
})
));
fn description() -> Option<&'static str> {
Some("type parameter")
}
}
impl Synom for TypeParamBound {
named!(parse -> Self, alt!(
syn!(Lifetime) => { TypeParamBound::Lifetime }
|
syn!(TraitBound) => { TypeParamBound::Trait }
|
parens!(syn!(TraitBound)) => { |bound| TypeParamBound::Trait(bound.1) }
));
fn description() -> Option<&'static str> {
Some("type parameter bound")
}
}
impl Synom for TraitBound {
named!(parse -> Self, do_parse!(
modifier: syn!(TraitBoundModifier) >>
lifetimes: option!(syn!(BoundLifetimes)) >>
mut path: syn!(Path) >>
parenthesized: option!(cond_reduce!(
path.segments.last().unwrap().value().arguments.is_empty(),
syn!(ParenthesizedGenericArguments)
)) >>
({
if let Some(parenthesized) = parenthesized {
let parenthesized = PathArguments::Parenthesized(parenthesized);
path.segments.last_mut().unwrap().value_mut().arguments = parenthesized;
}
TraitBound {
modifier: modifier,
lifetimes: lifetimes,
path: path,
}
})
));
fn description() -> Option<&'static str> {
Some("trait bound")
}
}
impl Synom for TraitBoundModifier {
named!(parse -> Self, alt!(
punct!(?) => { TraitBoundModifier::Maybe }
|
epsilon!() => { |_| TraitBoundModifier::None }
));
fn description() -> Option<&'static str> {
Some("trait bound modifier")
}
}
impl Synom for ConstParam {
named!(parse -> Self, do_parse!(
attrs: many0!(Attribute::parse_outer) >>
const_: keyword!(const) >>
ident: syn!(Ident) >>
colon: punct!(:) >>
ty: syn!(Type) >>
eq_def: option!(tuple!(punct!(=), syn!(Expr))) >>
({
let (eq_token, default) = match eq_def {
Some((eq_token, default)) => (Some(eq_token), Some(default)),
None => (None, None),
};
ConstParam {
attrs: attrs,
const_token: const_,
ident: ident,
colon_token: colon,
ty: ty,
eq_token: eq_token,
default: default,
}
})
));
fn description() -> Option<&'static str> {
Some("generic `const` parameter")
}
}
impl Synom for WhereClause {
named!(parse -> Self, do_parse!(
where_: keyword!(where) >>
predicates: call!(Punctuated::parse_terminated) >>
(WhereClause {
predicates: predicates,
where_token: where_,
})
));
fn description() -> Option<&'static str> {
Some("where clause")
}
}
impl Synom for WherePredicate {
named!(parse -> Self, alt!(
do_parse!(
ident: syn!(Lifetime) >>
colon: option!(punct!(:)) >>
bounds: cond!(
colon.is_some(),
Punctuated::parse_separated
) >>
(WherePredicate::Lifetime(PredicateLifetime {
lifetime: ident,
bounds: bounds.unwrap_or_default(),
colon_token: colon,
}))
)
|
do_parse!(
lifetimes: option!(syn!(BoundLifetimes)) >>
bounded_ty: syn!(Type) >>
colon: punct!(:) >>
bounds: call!(Punctuated::parse_separated_nonempty) >>
(WherePredicate::Type(PredicateType {
lifetimes: lifetimes,
bounded_ty: bounded_ty,
bounds: bounds,
colon_token: colon,
}))
)
));
fn description() -> Option<&'static str> {
Some("predicate in where clause")
}
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use attr::FilterAttrs;
use quote::{ToTokens, Tokens};
impl ToTokens for Generics {
fn to_tokens(&self, tokens: &mut Tokens) {
if self.params.is_empty() {
return;
}
TokensOrDefault(&self.lt_token).to_tokens(tokens);
// Print lifetimes before types and consts, regardless of their
// order in self.params.
//
// TODO: ordering rules for const parameters vs type parameters have
// not been settled yet. https://github.com/rust-lang/rust/issues/44580
let mut trailing_or_empty = true;
for param in self.params.pairs() {
if let GenericParam::Lifetime(_) = **param.value() {
param.to_tokens(tokens);
trailing_or_empty = param.punct().is_some();
}
}
for param in self.params.pairs() {
match **param.value() {
GenericParam::Type(_) | GenericParam::Const(_) => {
if !trailing_or_empty {
<Token![,]>::default().to_tokens(tokens);
trailing_or_empty = true;
}
param.to_tokens(tokens);
}
GenericParam::Lifetime(_) => {}
}
}
TokensOrDefault(&self.gt_token).to_tokens(tokens);
}
}
impl<'a> ToTokens for ImplGenerics<'a> {
fn to_tokens(&self, tokens: &mut Tokens) {
if self.0.params.is_empty() {
return;
}
TokensOrDefault(&self.0.lt_token).to_tokens(tokens);
// Print lifetimes before types and consts, regardless of their
// order in self.params.
//
// TODO: ordering rules for const parameters vs type parameters have
// not been settled yet. https://github.com/rust-lang/rust/issues/44580
let mut trailing_or_empty = true;
for param in self.0.params.pairs() {
if let GenericParam::Lifetime(_) = **param.value() {
param.to_tokens(tokens);
trailing_or_empty = param.punct().is_some();
}
}
for param in self.0.params.pairs() {
if let GenericParam::Lifetime(_) = **param.value() {
continue;
}
if !trailing_or_empty {
<Token![,]>::default().to_tokens(tokens);
trailing_or_empty = true;
}
match **param.value() {
GenericParam::Lifetime(_) => unreachable!(),
GenericParam::Type(ref param) => {
// Leave off the type parameter defaults
tokens.append_all(param.attrs.outer());
param.ident.to_tokens(tokens);
if !param.bounds.is_empty() {
TokensOrDefault(&param.colon_token).to_tokens(tokens);
param.bounds.to_tokens(tokens);
}
}
GenericParam::Const(ref param) => {
// Leave off the const parameter defaults
tokens.append_all(param.attrs.outer());
param.const_token.to_tokens(tokens);
param.ident.to_tokens(tokens);
param.colon_token.to_tokens(tokens);
param.ty.to_tokens(tokens);
}
}
param.punct().to_tokens(tokens);
}
TokensOrDefault(&self.0.gt_token).to_tokens(tokens);
}
}
impl<'a> ToTokens for TypeGenerics<'a> {
fn to_tokens(&self, tokens: &mut Tokens) {
if self.0.params.is_empty() {
return;
}
TokensOrDefault(&self.0.lt_token).to_tokens(tokens);
// Print lifetimes before types and consts, regardless of their
// order in self.params.
//
// TODO: ordering rules for const parameters vs type parameters have
// not been settled yet. https://github.com/rust-lang/rust/issues/44580
let mut trailing_or_empty = true;
for param in self.0.params.pairs() {
if let GenericParam::Lifetime(ref def) = **param.value() {
// Leave off the lifetime bounds and attributes
def.lifetime.to_tokens(tokens);
param.punct().to_tokens(tokens);
trailing_or_empty = param.punct().is_some();
}
}
for param in self.0.params.pairs() {
if let GenericParam::Lifetime(_) = **param.value() {
continue;
}
if !trailing_or_empty {
<Token![,]>::default().to_tokens(tokens);
trailing_or_empty = true;
}
match **param.value() {
GenericParam::Lifetime(_) => unreachable!(),
GenericParam::Type(ref param) => {
// Leave off the type parameter defaults
param.ident.to_tokens(tokens);
}
GenericParam::Const(ref param) => {
// Leave off the const parameter defaults
param.ident.to_tokens(tokens);
}
}
param.punct().to_tokens(tokens);
}
TokensOrDefault(&self.0.gt_token).to_tokens(tokens);
}
}
impl<'a> ToTokens for Turbofish<'a> {
fn to_tokens(&self, tokens: &mut Tokens) {
if !self.0.params.is_empty() {
<Token![::]>::default().to_tokens(tokens);
TypeGenerics(self.0).to_tokens(tokens);
}
}
}
impl ToTokens for BoundLifetimes {
fn to_tokens(&self, tokens: &mut Tokens) {
self.for_token.to_tokens(tokens);
self.lt_token.to_tokens(tokens);
self.lifetimes.to_tokens(tokens);
self.gt_token.to_tokens(tokens);
}
}
impl ToTokens for LifetimeDef {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append_all(self.attrs.outer());
self.lifetime.to_tokens(tokens);
if !self.bounds.is_empty() {
TokensOrDefault(&self.colon_token).to_tokens(tokens);
self.bounds.to_tokens(tokens);
}
}
}
impl ToTokens for TypeParam {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append_all(self.attrs.outer());
self.ident.to_tokens(tokens);
if !self.bounds.is_empty() {
TokensOrDefault(&self.colon_token).to_tokens(tokens);
self.bounds.to_tokens(tokens);
}
if self.default.is_some() {
TokensOrDefault(&self.eq_token).to_tokens(tokens);
self.default.to_tokens(tokens);
}
}
}
impl ToTokens for TraitBound {
fn to_tokens(&self, tokens: &mut Tokens) {
self.modifier.to_tokens(tokens);
self.lifetimes.to_tokens(tokens);
self.path.to_tokens(tokens);
}
}
impl ToTokens for TraitBoundModifier {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
TraitBoundModifier::None => {}
TraitBoundModifier::Maybe(ref t) => t.to_tokens(tokens),
}
}
}
impl ToTokens for ConstParam {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append_all(self.attrs.outer());
self.const_token.to_tokens(tokens);
self.ident.to_tokens(tokens);
self.colon_token.to_tokens(tokens);
self.ty.to_tokens(tokens);
if self.default.is_some() {
TokensOrDefault(&self.eq_token).to_tokens(tokens);
self.default.to_tokens(tokens);
}
}
}
impl ToTokens for WhereClause {
fn to_tokens(&self, tokens: &mut Tokens) {
self.where_token.to_tokens(tokens);
self.predicates.to_tokens(tokens);
}
}
impl ToTokens for PredicateType {
fn to_tokens(&self, tokens: &mut Tokens) {
self.lifetimes.to_tokens(tokens);
self.bounded_ty.to_tokens(tokens);
self.colon_token.to_tokens(tokens);
self.bounds.to_tokens(tokens);
}
}
impl ToTokens for PredicateLifetime {
fn to_tokens(&self, tokens: &mut Tokens) {
self.lifetime.to_tokens(tokens);
if !self.bounds.is_empty() {
TokensOrDefault(&self.colon_token).to_tokens(tokens);
self.bounds.to_tokens(tokens);
}
}
}
impl ToTokens for PredicateEq {
fn to_tokens(&self, tokens: &mut Tokens) {
self.lhs_ty.to_tokens(tokens);
self.eq_token.to_tokens(tokens);
self.rhs_ty.to_tokens(tokens);
}
}
}

295
third_party/rust/syn-0.12.12/src/ident.rs поставляемый
Просмотреть файл

@ -1,295 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::borrow::Cow;
use std::cmp::Ordering;
use std::fmt::{self, Display};
use std::hash::{Hash, Hasher};
use proc_macro2::Term;
use unicode_xid::UnicodeXID;
use proc_macro2::Span;
/// A word of Rust code, which may be a keyword or legal variable name.
///
/// An identifier consists of at least one Unicode code point, the first of
/// which has the XID_Start property and the rest of which have the XID_Continue
/// property. An underscore may be used as the first character as long as it is
/// not the only character.
///
/// - The empty string is not an identifier. Use `Option<Ident>`.
/// - An underscore by itself is not an identifier. Use
/// `Token![_]` instead.
/// - A lifetime is not an identifier. Use `syn::Lifetime` instead.
///
/// An identifier constructed with `Ident::new` is permitted to be a Rust
/// keyword, though parsing one through its [`Synom`] implementation rejects
/// Rust keywords.
///
/// [`Synom`]: synom/trait.Synom.html
///
/// # Examples
///
/// A new ident can be created from a string using the `Ident::from` function.
/// Idents produced by `Ident::from` are set to resolve at the procedural macro
/// *def site* by default. A different span can be provided explicitly by using
/// `Ident::new`.
///
/// ```rust
/// extern crate syn;
/// extern crate proc_macro2;
///
/// use syn::Ident;
/// use proc_macro2::Span;
///
/// fn main() {
/// let def_ident = Ident::from("definitely");
/// let call_ident = Ident::new("calligraphy", Span::call_site());
///
/// println!("{} {}", def_ident, call_ident);
/// }
/// ```
///
/// An ident can be interpolated into a token stream using the `quote!` macro.
///
/// ```rust
/// #[macro_use]
/// extern crate quote;
///
/// extern crate syn;
/// use syn::Ident;
///
/// fn main() {
/// let ident = Ident::from("demo");
///
/// // Create a variable binding whose name is this ident.
/// let expanded = quote! { let #ident = 10; };
///
/// // Create a variable binding with a slightly different name.
/// let temp_ident = Ident::from(format!("new_{}", ident));
/// let expanded = quote! { let #temp_ident = 10; };
/// }
/// ```
///
/// A string representation of the ident is available through the `as_ref()` and
/// `to_string()` methods.
///
/// ```rust
/// # use syn::Ident;
/// # let ident = Ident::from("another_identifier");
/// #
/// // Examine the ident as a &str.
/// let ident_str = ident.as_ref();
/// if ident_str.len() > 60 {
/// println!("Very long identifier: {}", ident_str)
/// }
///
/// // Create a String from the ident.
/// let ident_string = ident.to_string();
/// give_away(ident_string);
///
/// fn give_away(s: String) { /* ... */ }
/// ```
#[derive(Copy, Clone, Debug)]
pub struct Ident {
term: Term,
pub span: Span,
}
impl Ident {
/// Creates an ident with the given string representation.
///
/// # Panics
///
/// Panics if the input string is neither a keyword nor a legal variable
/// name.
pub fn new(s: &str, span: Span) -> Self {
if s.is_empty() {
panic!("ident is not allowed to be empty; use Option<Ident>");
}
if s.starts_with('\'') {
panic!("ident is not allowed to be a lifetime; use syn::Lifetime");
}
if s == "_" {
panic!("`_` is not a valid ident; use syn::token::Underscore");
}
if s.bytes().all(|digit| digit >= b'0' && digit <= b'9') {
panic!("ident cannot be a number, use syn::Index instead");
}
fn xid_ok(s: &str) -> bool {
let mut chars = s.chars();
let first = chars.next().unwrap();
if !(UnicodeXID::is_xid_start(first) || first == '_') {
return false;
}
for ch in chars {
if !UnicodeXID::is_xid_continue(ch) {
return false;
}
}
true
}
if !xid_ok(s) {
panic!("{:?} is not a valid ident", s);
}
Ident {
term: Term::intern(s),
span: span,
}
}
}
impl<'a> From<&'a str> for Ident {
fn from(s: &str) -> Self {
Ident::new(s, Span::def_site())
}
}
impl From<Token![self]> for Ident {
fn from(tok: Token![self]) -> Self {
Ident::new("self", tok.0)
}
}
impl From<Token![Self]> for Ident {
fn from(tok: Token![Self]) -> Self {
Ident::new("Self", tok.0)
}
}
impl From<Token![super]> for Ident {
fn from(tok: Token![super]) -> Self {
Ident::new("super", tok.0)
}
}
impl From<Token![crate]> for Ident {
fn from(tok: Token![crate]) -> Self {
Ident::new("crate", tok.0)
}
}
impl<'a> From<Cow<'a, str>> for Ident {
fn from(s: Cow<'a, str>) -> Self {
Ident::new(&s, Span::def_site())
}
}
impl From<String> for Ident {
fn from(s: String) -> Self {
Ident::new(&s, Span::def_site())
}
}
impl AsRef<str> for Ident {
fn as_ref(&self) -> &str {
self.term.as_str()
}
}
impl Display for Ident {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.term.as_str().fmt(formatter)
}
}
impl<T: ?Sized> PartialEq<T> for Ident
where
T: AsRef<str>,
{
fn eq(&self, other: &T) -> bool {
self.as_ref() == other.as_ref()
}
}
impl Eq for Ident {}
impl PartialOrd for Ident {
fn partial_cmp(&self, other: &Ident) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Ident {
fn cmp(&self, other: &Ident) -> Ordering {
self.as_ref().cmp(other.as_ref())
}
}
impl Hash for Ident {
fn hash<H: Hasher>(&self, h: &mut H) {
self.as_ref().hash(h);
}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use synom::Synom;
use buffer::Cursor;
use parse_error;
use synom::PResult;
impl Synom for Ident {
fn parse(input: Cursor) -> PResult<Self> {
let (span, term, rest) = match input.term() {
Some(term) => term,
_ => return parse_error(),
};
if term.as_str().starts_with('\'') {
return parse_error();
}
match term.as_str() {
// From https://doc.rust-lang.org/grammar.html#keywords
"abstract" | "alignof" | "as" | "become" | "box" | "break" | "const"
| "continue" | "crate" | "do" | "else" | "enum" | "extern" | "false" | "final"
| "fn" | "for" | "if" | "impl" | "in" | "let" | "loop" | "macro" | "match"
| "mod" | "move" | "mut" | "offsetof" | "override" | "priv" | "proc" | "pub"
| "pure" | "ref" | "return" | "Self" | "self" | "sizeof" | "static" | "struct"
| "super" | "trait" | "true" | "type" | "typeof" | "unsafe" | "unsized" | "use"
| "virtual" | "where" | "while" | "yield" => return parse_error(),
_ => {}
}
Ok((
Ident {
span: span,
term: term,
},
rest,
))
}
fn description() -> Option<&'static str> {
Some("identifier")
}
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{ToTokens, Tokens};
use proc_macro2::{TokenNode, TokenTree};
impl ToTokens for Ident {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(TokenTree {
span: self.span,
kind: TokenNode::Term(self.term),
})
}
}
}

1998
third_party/rust/syn-0.12.12/src/item.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

723
third_party/rust/syn-0.12.12/src/lib.rs поставляемый
Просмотреть файл

@ -1,723 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Syn is a parsing library for parsing a stream of Rust tokens into a syntax
//! tree of Rust source code.
//!
//! Currently this library is geared toward the [custom derive] use case but
//! contains some APIs that may be useful for Rust procedural macros more
//! generally.
//!
//! [custom derive]: https://github.com/rust-lang/rfcs/blob/master/text/1681-macros-1.1.md
//!
//! - **Data structures** — Syn provides a complete syntax tree that can
//! represent any valid Rust source code. The syntax tree is rooted at
//! [`syn::File`] which represents a full source file, but there are other
//! entry points that may be useful to procedural macros including
//! [`syn::Item`], [`syn::Expr`] and [`syn::Type`].
//!
//! - **Custom derives** — Of particular interest to custom derives is
//! [`syn::DeriveInput`] which is any of the three legal input items to a
//! derive macro. An example below shows using this type in a library that can
//! derive implementations of a trait of your own.
//!
//! - **Parser combinators** — Parsing in Syn is built on a suite of public
//! parser combinator macros that you can use for parsing any token-based
//! syntax you dream up within a `functionlike!(...)` procedural macro. Every
//! syntax tree node defined by Syn is individually parsable and may be used
//! as a building block for custom syntaxes, or you may do it all yourself
//! working from the most primitive tokens.
//!
//! - **Location information** — Every token parsed by Syn is associated with a
//! `Span` that tracks line and column information back to the source of that
//! token. These spans allow a procedural macro to display detailed error
//! messages pointing to all the right places in the user's code. There is an
//! example of this below.
//!
//! - **Feature flags** — Functionality is aggressively feature gated so your
//! procedural macros enable only what they need, and do not pay in compile
//! time for all the rest.
//!
//! [`syn::File`]: struct.File.html
//! [`syn::Item`]: enum.Item.html
//! [`syn::Expr`]: enum.Expr.html
//! [`syn::Type`]: enum.Type.html
//! [`syn::DeriveInput`]: struct.DeriveInput.html
//!
//! *Version requirement: Syn supports any compiler version back to Rust's very
//! first support for procedural macros in Rust 1.15.0. Some features especially
//! around error reporting are only available in newer compilers or on the
//! nightly channel.*
//!
//! ## Example of a custom derive
//!
//! The canonical custom derive using Syn looks like this. We write an ordinary
//! Rust function tagged with a `proc_macro_derive` attribute and the name of
//! the trait we are deriving. Any time that derive appears in the user's code,
//! the Rust compiler passes their data structure as tokens into our macro. We
//! get to execute arbitrary Rust code to figure out what to do with those
//! tokens, then hand some tokens back to the compiler to compile into the
//! user's crate.
//!
//! [`TokenStream`]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
//!
//! ```toml
//! [dependencies]
//! syn = "0.12"
//! quote = "0.4"
//!
//! [lib]
//! proc-macro = true
//! ```
//!
//! ```rust
//! extern crate proc_macro;
//! extern crate syn;
//!
//! #[macro_use]
//! extern crate quote;
//!
//! use proc_macro::TokenStream;
//! use syn::DeriveInput;
//!
//! # const IGNORE_TOKENS: &str = stringify! {
//! #[proc_macro_derive(MyMacro)]
//! # };
//! pub fn my_macro(input: TokenStream) -> TokenStream {
//! // Parse the input tokens into a syntax tree
//! let input: DeriveInput = syn::parse(input).unwrap();
//!
//! // Build the output, possibly using quasi-quotation
//! let expanded = quote! {
//! // ...
//! };
//!
//! // Hand the output tokens back to the compiler
//! expanded.into()
//! }
//! #
//! # fn main() {}
//! ```
//!
//! The [`heapsize`] example directory shows a complete working Macros 1.1
//! implementation of a custom derive. It works on any Rust compiler \>=1.15.0.
//! The example derives a `HeapSize` trait which computes an estimate of the
//! amount of heap memory owned by a value.
//!
//! [`heapsize`]: https://github.com/dtolnay/syn/tree/master/examples/heapsize
//!
//! ```rust
//! pub trait HeapSize {
//! /// Total number of bytes of heap memory owned by `self`.
//! fn heap_size_of_children(&self) -> usize;
//! }
//! ```
//!
//! The custom derive allows users to write `#[derive(HeapSize)]` on data
//! structures in their program.
//!
//! ```rust
//! # const IGNORE_TOKENS: &str = stringify! {
//! #[derive(HeapSize)]
//! # };
//! struct Demo<'a, T: ?Sized> {
//! a: Box<T>,
//! b: u8,
//! c: &'a str,
//! d: String,
//! }
//! ```
//!
//! ## Spans and error reporting
//!
//! The [`heapsize2`] example directory is an extension of the `heapsize`
//! example that demonstrates some of the hygiene and error reporting properties
//! of Macros 2.0. This example currently requires a nightly Rust compiler
//! \>=1.24.0-nightly but we are working to stabilize all of the APIs involved.
//!
//! [`heapsize2`]: https://github.com/dtolnay/syn/tree/master/examples/heapsize2
//!
//! The token-based procedural macro API provides great control over where the
//! compiler's error messages are displayed in user code. Consider the error the
//! user sees if one of their field types does not implement `HeapSize`.
//!
//! ```rust
//! # const IGNORE_TOKENS: &str = stringify! {
//! #[derive(HeapSize)]
//! # };
//! struct Broken {
//! ok: String,
//! bad: std::thread::Thread,
//! }
//! ```
//!
//! In the Macros 1.1 string-based procedural macro world, the resulting error
//! would point unhelpfully to the invocation of the derive macro and not to the
//! actual problematic field.
//!
//! ```text
//! error[E0599]: no method named `heap_size_of_children` found for type `std::thread::Thread` in the current scope
//! --> src/main.rs:4:10
//! |
//! 4 | #[derive(HeapSize)]
//! | ^^^^^^^^
//! ```
//!
//! By tracking span information all the way through the expansion of a
//! procedural macro as shown in the `heapsize2` example, token-based macros in
//! Syn are able to trigger errors that directly pinpoint the source of the
//! problem.
//!
//! ```text
//! error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not satisfied
//! --> src/main.rs:7:5
//! |
//! 7 | bad: std::thread::Thread,
//! | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `HeapSize` is not implemented for `Thread`
//! ```
//!
//! ## Parsing a custom syntax using combinators
//!
//! The [`lazy-static`] example directory shows the implementation of a
//! `functionlike!(...)` procedural macro in which the input tokens are parsed
//! using [`nom`]-style parser combinators.
//!
//! [`lazy-static`]: https://github.com/dtolnay/syn/tree/master/examples/lazy-static
//! [`nom`]: https://github.com/Geal/nom
//!
//! The example reimplements the popular `lazy_static` crate from crates.io as a
//! procedural macro.
//!
//! ```
//! # macro_rules! lazy_static {
//! # ($($tt:tt)*) => {}
//! # }
//! #
//! lazy_static! {
//! static ref USERNAME: Regex = Regex::new("^[a-z0-9_-]{3,16}$").unwrap();
//! }
//! ```
//!
//! The implementation shows how to trigger custom warnings and error messages
//! on the macro input.
//!
//! ```text
//! warning: come on, pick a more creative name
//! --> src/main.rs:10:16
//! |
//! 10 | static ref FOO: String = "lazy_static".to_owned();
//! | ^^^
//! ```
//!
//! ## Debugging
//!
//! When developing a procedural macro it can be helpful to look at what the
//! generated code looks like. Use `cargo rustc -- -Zunstable-options
//! --pretty=expanded` or the [`cargo expand`] subcommand.
//!
//! [`cargo expand`]: https://github.com/dtolnay/cargo-expand
//!
//! To show the expanded code for some crate that uses your procedural macro,
//! run `cargo expand` from that crate. To show the expanded code for one of
//! your own test cases, run `cargo expand --test the_test_case` where the last
//! argument is the name of the test file without the `.rs` extension.
//!
//! This write-up by Brandon W Maister discusses debugging in more detail:
//! [Debugging Rust's new Custom Derive system][debugging].
//!
//! [debugging]: https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/
//!
//! ## Optional features
//!
//! Syn puts a lot of functionality behind optional features in order to
//! optimize compile time for the most common use cases. The following features
//! are available.
//!
//! - **`derive`** *(enabled by default)* — Data structures for representing the
//! possible input to a custom derive, including structs and enums and types.
//! - **`full`** — Data structures for representing the syntax tree of all valid
//! Rust source code, including items and expressions.
//! - **`parsing`** *(enabled by default)* — Ability to parse input tokens into
//! a syntax tree node of a chosen type.
//! - **`printing`** *(enabled by default)* — Ability to print a syntax tree
//! node as tokens of Rust source code.
//! - **`visit`** — Trait for traversing a syntax tree.
//! - **`visit-mut`** — Trait for traversing and mutating in place a syntax
//! tree.
//! - **`fold`** — Trait for transforming an owned syntax tree.
//! - **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree
//! types.
//! - **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree
//! types.
// Syn types in rustdoc of other crates get linked to here.
#![doc(html_root_url = "https://docs.rs/syn/0.12.12")]
#![cfg_attr(feature = "cargo-clippy",
allow(const_static_lifetime, doc_markdown, large_enum_variant, match_bool,
redundant_closure, needless_pass_by_value))]
extern crate proc_macro2;
extern crate proc_macro;
extern crate unicode_xid;
#[cfg(feature = "printing")]
extern crate quote;
#[cfg(feature = "parsing")]
#[macro_use]
#[doc(hidden)]
pub mod parsers;
#[macro_use]
mod macros;
#[macro_use]
pub mod token;
#[cfg(any(feature = "full", feature = "derive"))]
mod attr;
#[cfg(any(feature = "full", feature = "derive"))]
pub use attr::{AttrStyle, Attribute, Meta, MetaList, MetaNameValue, NestedMeta};
#[cfg(any(feature = "full", feature = "derive"))]
mod data;
#[cfg(any(feature = "full", feature = "derive"))]
pub use data::{Field, Fields, FieldsNamed, FieldsUnnamed, Variant, VisCrate, VisPublic,
VisRestricted, Visibility};
#[cfg(any(feature = "full", feature = "derive"))]
mod expr;
#[cfg(any(feature = "full", feature = "derive"))]
pub use expr::{Expr, ExprAddrOf, ExprArray, ExprAssign, ExprAssignOp, ExprBinary, ExprBlock,
ExprBox, ExprBreak, ExprCall, ExprCast, ExprCatch, ExprClosure, ExprContinue,
ExprField, ExprForLoop, ExprGroup, ExprIf, ExprIfLet, ExprInPlace, ExprIndex,
ExprLit, ExprLoop, ExprMacro, ExprMatch, ExprMethodCall, ExprParen, ExprPath,
ExprRange, ExprRepeat, ExprReturn, ExprStruct, ExprTry, ExprTuple, ExprType,
ExprUnary, ExprUnsafe, ExprVerbatim, ExprWhile, ExprWhileLet, ExprYield, Index,
Member};
#[cfg(feature = "full")]
pub use expr::{Arm, Block, FieldPat, FieldValue, GenericMethodArgument, Label, Local,
MethodTurbofish, Pat, PatBox, PatIdent, PatLit, PatMacro, PatPath, PatRange,
PatRef, PatSlice, PatStruct, PatTuple, PatTupleStruct, PatVerbatim, PatWild,
RangeLimits, Stmt};
#[cfg(any(feature = "full", feature = "derive"))]
mod generics;
#[cfg(any(feature = "full", feature = "derive"))]
pub use generics::{BoundLifetimes, ConstParam, GenericParam, Generics, LifetimeDef, PredicateEq,
PredicateLifetime, PredicateType, TraitBound, TraitBoundModifier, TypeParam,
TypeParamBound, WhereClause, WherePredicate};
#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
pub use generics::{ImplGenerics, Turbofish, TypeGenerics};
mod ident;
pub use ident::Ident;
#[cfg(feature = "full")]
mod item;
#[cfg(feature = "full")]
pub use item::{ArgCaptured, ArgSelf, ArgSelfRef, FnArg, FnDecl, ForeignItem, ForeignItemFn,
ForeignItemStatic, ForeignItemType, ForeignItemVerbatim, ImplItem, ImplItemConst,
ImplItemMacro, ImplItemMethod, ImplItemType, ImplItemVerbatim, Item, ItemConst,
ItemEnum, ItemExternCrate, ItemFn, ItemForeignMod, ItemImpl, ItemMacro, ItemMacro2,
ItemMod, ItemStatic, ItemStruct, ItemTrait, ItemType, ItemUnion, ItemUse,
ItemVerbatim, MethodSig, TraitItem, TraitItemConst, TraitItemMacro,
TraitItemMethod, TraitItemType, TraitItemVerbatim, UseGlob, UseList, UsePath,
UseTree};
#[cfg(feature = "full")]
mod file;
#[cfg(feature = "full")]
pub use file::File;
#[cfg(any(feature = "full", feature = "derive"))]
mod lifetime;
#[cfg(any(feature = "full", feature = "derive"))]
pub use lifetime::Lifetime;
#[cfg(any(feature = "full", feature = "derive"))]
mod lit;
#[cfg(any(feature = "full", feature = "derive"))]
pub use lit::{FloatSuffix, IntSuffix, Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat,
LitInt, LitStr, LitVerbatim, StrStyle};
#[cfg(any(feature = "full", feature = "derive"))]
mod mac;
#[cfg(any(feature = "full", feature = "derive"))]
pub use mac::{Macro, MacroDelimiter};
#[cfg(any(feature = "full", feature = "derive"))]
mod derive;
#[cfg(feature = "derive")]
pub use derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput};
#[cfg(any(feature = "full", feature = "derive"))]
mod op;
#[cfg(any(feature = "full", feature = "derive"))]
pub use op::{BinOp, UnOp};
#[cfg(any(feature = "full", feature = "derive"))]
mod ty;
#[cfg(any(feature = "full", feature = "derive"))]
pub use ty::{Abi, BareFnArg, BareFnArgName, ReturnType, Type, TypeArray, TypeBareFn, TypeGroup,
TypeImplTrait, TypeInfer, TypeMacro, TypeNever, TypeParen, TypePath, TypePtr,
TypeReference, TypeSlice, TypeTraitObject, TypeTuple, TypeVerbatim};
#[cfg(any(feature = "full", feature = "derive"))]
mod path;
#[cfg(any(feature = "full", feature = "derive"))]
pub use path::{AngleBracketedGenericArguments, Binding, GenericArgument,
ParenthesizedGenericArguments, Path, PathArguments, PathSegment, QSelf};
#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
pub use path::PathTokens;
#[cfg(feature = "parsing")]
pub mod buffer;
#[cfg(feature = "parsing")]
pub mod synom;
pub mod punctuated;
#[cfg(any(feature = "full", feature = "derive"))]
mod tt;
// Not public API except the `parse_quote!` macro.
#[cfg(feature = "parsing")]
#[doc(hidden)]
pub mod parse_quote;
#[cfg(all(feature = "parsing", feature = "printing"))]
pub mod spanned;
mod gen {
/// Syntax tree traversal to walk a shared borrow of a syntax tree.
///
/// Each method of the [`Visit`] trait is a hook that can be overridden to
/// customize the behavior when visiting the corresponding type of node. By
/// default, every method recursively visits the substructure of the input
/// by invoking the right visitor method of each of its fields.
///
/// [`Visit`]: trait.Visit.html
///
/// ```rust
/// # use syn::{Attribute, BinOp, Expr, ExprBinary};
/// #
/// pub trait Visit<'ast> {
/// /* ... */
///
/// fn visit_expr_binary(&mut self, node: &'ast ExprBinary) {
/// for attr in &node.attrs {
/// self.visit_attribute(attr);
/// }
/// self.visit_expr(&*node.left);
/// self.visit_bin_op(&node.op);
/// self.visit_expr(&*node.right);
/// }
///
/// /* ... */
/// # fn visit_attribute(&mut self, node: &'ast Attribute);
/// # fn visit_expr(&mut self, node: &'ast Expr);
/// # fn visit_bin_op(&mut self, node: &'ast BinOp);
/// }
/// ```
///
/// *This module is available if Syn is built with the `"visit"` feature.*
#[cfg(feature = "visit")]
pub mod visit;
/// Syntax tree traversal to mutate an exclusive borrow of a syntax tree in
/// place.
///
/// Each method of the [`VisitMut`] trait is a hook that can be overridden
/// to customize the behavior when mutating the corresponding type of node.
/// By default, every method recursively visits the substructure of the
/// input by invoking the right visitor method of each of its fields.
///
/// [`VisitMut`]: trait.VisitMut.html
///
/// ```rust
/// # use syn::{Attribute, BinOp, Expr, ExprBinary};
/// #
/// pub trait VisitMut {
/// /* ... */
///
/// fn visit_expr_binary_mut(&mut self, node: &mut ExprBinary) {
/// for attr in &mut node.attrs {
/// self.visit_attribute_mut(attr);
/// }
/// self.visit_expr_mut(&mut *node.left);
/// self.visit_bin_op_mut(&mut node.op);
/// self.visit_expr_mut(&mut *node.right);
/// }
///
/// /* ... */
/// # fn visit_attribute_mut(&mut self, node: &mut Attribute);
/// # fn visit_expr_mut(&mut self, node: &mut Expr);
/// # fn visit_bin_op_mut(&mut self, node: &mut BinOp);
/// }
/// ```
///
/// *This module is available if Syn is built with the `"visit-mut"`
/// feature.*
#[cfg(feature = "visit-mut")]
pub mod visit_mut;
/// Syntax tree traversal to transform the nodes of an owned syntax tree.
///
/// Each method of the [`Fold`] trait is a hook that can be overridden to
/// customize the behavior when transforming the corresponding type of node.
/// By default, every method recursively visits the substructure of the
/// input by invoking the right visitor method of each of its fields.
///
/// [`Fold`]: trait.Fold.html
///
/// ```rust
/// # use syn::{Attribute, BinOp, Expr, ExprBinary};
/// #
/// pub trait Fold {
/// /* ... */
///
/// fn fold_expr_binary(&mut self, node: ExprBinary) -> ExprBinary {
/// ExprBinary {
/// attrs: node.attrs
/// .into_iter()
/// .map(|attr| self.fold_attribute(attr))
/// .collect(),
/// left: Box::new(self.fold_expr(*node.left)),
/// op: self.fold_bin_op(node.op),
/// right: Box::new(self.fold_expr(*node.right)),
/// }
/// }
///
/// /* ... */
/// # fn fold_attribute(&mut self, node: Attribute) -> Attribute;
/// # fn fold_expr(&mut self, node: Expr) -> Expr;
/// # fn fold_bin_op(&mut self, node: BinOp) -> BinOp;
/// }
/// ```
///
/// *This module is available if Syn is built with the `"fold"` feature.*
#[cfg(feature = "fold")]
pub mod fold;
#[cfg(any(feature = "full", feature = "derive"))]
#[path = "../gen_helper.rs"]
mod helper;
}
pub use gen::*;
////////////////////////////////////////////////////////////////////////////////
#[cfg(feature = "parsing")]
use synom::{Synom, Parser};
#[cfg(feature = "parsing")]
mod error;
#[cfg(feature = "parsing")]
use error::ParseError;
// Not public API.
#[cfg(feature = "parsing")]
#[doc(hidden)]
pub use error::parse_error;
/// Parse tokens of source code into the chosen syntax tree node.
///
/// This is preferred over parsing a string because tokens are able to preserve
/// information about where in the user's code they were originally written (the
/// "span" of the token), possibly allowing the compiler to produce better error
/// messages.
///
/// This function parses a `proc_macro::TokenStream` which is the type used for
/// interop with the compiler in a procedural macro. To parse a
/// `proc_macro2::TokenStream`, use [`syn::parse2`] instead.
///
/// [`syn::parse2`]: fn.parse2.html
///
/// *This function is available if Syn is built with the `"parsing"` feature.*
///
/// # Examples
///
/// ```rust
/// extern crate proc_macro;
/// use proc_macro::TokenStream;
///
/// extern crate syn;
///
/// #[macro_use]
/// extern crate quote;
///
/// use syn::DeriveInput;
///
/// # const IGNORE_TOKENS: &str = stringify! {
/// #[proc_macro_derive(MyMacro)]
/// # };
/// pub fn my_macro(input: TokenStream) -> TokenStream {
/// // Parse the tokens into a syntax tree
/// let ast: DeriveInput = syn::parse(input).unwrap();
///
/// // Build the output, possibly using quasi-quotation
/// let expanded = quote! {
/// /* ... */
/// };
///
/// // Convert into a token stream and return it
/// expanded.into()
/// }
/// #
/// # fn main() {}
/// ```
#[cfg(feature = "parsing")]
pub fn parse<T>(tokens: proc_macro::TokenStream) -> Result<T, ParseError>
where
T: Synom,
{
parse2(tokens.into())
}
/// Parse a proc-macro2 token stream into the chosen syntax tree node.
///
/// This function parses a `proc_macro2::TokenStream` which is commonly useful
/// when the input comes from a node of the Syn syntax tree, for example the tts
/// of a [`Macro`] node. When in a procedural macro parsing the
/// `proc_macro::TokenStream` provided by the compiler, use [`syn::parse`]
/// instead.
///
/// [`Macro`]: struct.Macro.html
/// [`syn::parse`]: fn.parse.html
///
/// *This function is available if Syn is built with the `"parsing"` feature.*
#[cfg(feature = "parsing")]
pub fn parse2<T>(tokens: proc_macro2::TokenStream) -> Result<T, ParseError>
where
T: Synom,
{
let parser = T::parse;
parser.parse2(tokens).map_err(|err| {
match T::description() {
Some(s) => ParseError::new(format!("failed to parse {}: {}", s, err)),
None => err,
}
})
}
/// Parse a string of Rust code into the chosen syntax tree node.
///
/// *This function is available if Syn is built with the `"parsing"` feature.*
///
/// # Hygiene
///
/// Every span in the resulting syntax tree will be set to resolve at the macro
/// call site.
///
/// # Examples
///
/// ```rust
/// extern crate syn;
/// #
/// #
/// # type Result<T> = std::result::Result<T, Box<std::error::Error>>;
///
/// use syn::Expr;
///
/// fn run() -> Result<()> {
/// let code = "assert_eq!(u8::max_value(), 255)";
/// let expr = syn::parse_str::<Expr>(code)?;
/// println!("{:#?}", expr);
/// Ok(())
/// }
/// #
/// # fn main() { run().unwrap() }
/// ```
#[cfg(feature = "parsing")]
pub fn parse_str<T: Synom>(s: &str) -> Result<T, ParseError> {
match s.parse() {
Ok(tts) => parse2(tts),
Err(_) => Err(ParseError::new("error while lexing input string")),
}
}
// FIXME the name parse_file makes it sound like you might pass in a path to a
// file, rather than the content.
/// Parse the content of a file of Rust code.
///
/// This is different from `syn::parse_str::<File>(content)` in two ways:
///
/// - It discards a leading byte order mark `\u{FEFF}` if the file has one.
/// - It preserves the shebang line of the file, such as `#!/usr/bin/env rustx`.
///
/// If present, either of these would be an error using `from_str`.
///
/// *This function is available if Syn is built with the `"parsing"` and `"full"` features.*
///
/// # Examples
///
/// ```rust,no_run
/// extern crate syn;
/// #
/// #
/// # type Result<T> = std::result::Result<T, Box<std::error::Error>>;
///
/// use std::fs::File;
/// use std::io::Read;
///
/// fn run() -> Result<()> {
/// let mut file = File::open("path/to/code.rs")?;
/// let mut content = String::new();
/// file.read_to_string(&mut content)?;
///
/// let ast = syn::parse_file(&content)?;
/// if let Some(shebang) = ast.shebang {
/// println!("{}", shebang);
/// }
/// println!("{} items", ast.items.len());
///
/// Ok(())
/// }
/// #
/// # fn main() { run().unwrap() }
/// ```
#[cfg(all(feature = "parsing", feature = "full"))]
pub fn parse_file(mut content: &str) -> Result<File, ParseError> {
// Strip the BOM if it is present
const BOM: &'static str = "\u{feff}";
if content.starts_with(BOM) {
content = &content[BOM.len()..];
}
let mut shebang = None;
if content.starts_with("#!") && !content.starts_with("#![") {
if let Some(idx) = content.find('\n') {
shebang = Some(content[..idx].to_string());
content = &content[idx..];
} else {
shebang = Some(content.to_string());
content = "";
}
}
let mut file: File = parse_str(content)?;
file.shebang = shebang;
Ok(file)
}
#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
struct TokensOrDefault<'a, T: 'a>(&'a Option<T>);
#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
impl<'a, T> quote::ToTokens for TokensOrDefault<'a, T>
where
T: quote::ToTokens + Default,
{
fn to_tokens(&self, tokens: &mut quote::Tokens) {
match *self.0 {
Some(ref t) => t.to_tokens(tokens),
None => T::default().to_tokens(tokens),
}
}
}

161
third_party/rust/syn-0.12.12/src/lifetime.rs поставляемый
Просмотреть файл

@ -1,161 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cmp::Ordering;
use std::fmt::{self, Display};
use std::hash::{Hash, Hasher};
use proc_macro2::{Span, Term};
use unicode_xid::UnicodeXID;
/// A Rust lifetime: `'a`.
///
/// Lifetime names must conform to the following rules:
///
/// - Must start with an apostrophe.
/// - Must not consist of just an apostrophe: `'`.
/// - Must not consist of apostrophe + underscore: `'_`.
/// - Character after the apostrophe must be `_` or a Unicode code point with
/// the XID_Start property.
/// - All following characters must be Unicode code points with the XID_Continue
/// property.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(feature = "extra-traits", derive(Debug))]
#[derive(Copy, Clone)]
pub struct Lifetime {
term: Term,
pub span: Span,
}
impl Lifetime {
pub fn new(term: Term, span: Span) -> Self {
let s = term.as_str();
if !s.starts_with('\'') {
panic!(
"lifetime name must start with apostrophe as in \"'a\", \
got {:?}",
s
);
}
if s == "'" {
panic!("lifetime name must not be empty");
}
if s == "'_" {
panic!("\"'_\" is not a valid lifetime name");
}
fn xid_ok(s: &str) -> bool {
let mut chars = s.chars();
let first = chars.next().unwrap();
if !(UnicodeXID::is_xid_start(first) || first == '_') {
return false;
}
for ch in chars {
if !UnicodeXID::is_xid_continue(ch) {
return false;
}
}
true
}
if !xid_ok(&s[1..]) {
panic!("{:?} is not a valid lifetime name", s);
}
Lifetime {
term: term,
span: span,
}
}
}
impl Display for Lifetime {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
self.term.as_str().fmt(formatter)
}
}
impl PartialEq for Lifetime {
fn eq(&self, other: &Lifetime) -> bool {
self.term.as_str() == other.term.as_str()
}
}
impl Eq for Lifetime {}
impl PartialOrd for Lifetime {
fn partial_cmp(&self, other: &Lifetime) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Lifetime {
fn cmp(&self, other: &Lifetime) -> Ordering {
self.term.as_str().cmp(other.term.as_str())
}
}
impl Hash for Lifetime {
fn hash<H: Hasher>(&self, h: &mut H) {
self.term.as_str().hash(h)
}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use synom::Synom;
use buffer::Cursor;
use parse_error;
use synom::PResult;
impl Synom for Lifetime {
fn parse(input: Cursor) -> PResult<Self> {
let (span, term, rest) = match input.term() {
Some(term) => term,
_ => return parse_error(),
};
if !term.as_str().starts_with('\'') {
return parse_error();
}
Ok((
Lifetime {
term: term,
span: span,
},
rest,
))
}
fn description() -> Option<&'static str> {
Some("lifetime")
}
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{ToTokens, Tokens};
use proc_macro2::{TokenNode, TokenTree};
impl ToTokens for Lifetime {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(TokenTree {
span: self.span,
kind: TokenNode::Term(self.term),
})
}
}
}

1028
third_party/rust/syn-0.12.12/src/lit.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

115
third_party/rust/syn-0.12.12/src/mac.rs поставляемый
Просмотреть файл

@ -1,115 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use super::*;
use proc_macro2::TokenStream;
use token::{Brace, Bracket, Paren};
#[cfg(feature = "extra-traits")]
use std::hash::{Hash, Hasher};
#[cfg(feature = "extra-traits")]
use tt::TokenStreamHelper;
ast_struct! {
/// A macro invocation: `println!("{}", mac)`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct Macro #manual_extra_traits {
pub path: Path,
pub bang_token: Token![!],
pub delimiter: MacroDelimiter,
pub tts: TokenStream,
}
}
ast_enum! {
/// A grouping token that surrounds a macro body: `m!(...)` or `m!{...}` or `m![...]`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub enum MacroDelimiter {
Paren(Paren),
Brace(Brace),
Bracket(Bracket),
}
}
#[cfg(feature = "extra-traits")]
impl Eq for Macro {}
#[cfg(feature = "extra-traits")]
impl PartialEq for Macro {
fn eq(&self, other: &Self) -> bool {
self.path == other.path && self.bang_token == other.bang_token
&& self.delimiter == other.delimiter
&& TokenStreamHelper(&self.tts) == TokenStreamHelper(&other.tts)
}
}
#[cfg(feature = "extra-traits")]
impl Hash for Macro {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
self.path.hash(state);
self.bang_token.hash(state);
self.delimiter.hash(state);
TokenStreamHelper(&self.tts).hash(state);
}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use synom::Synom;
impl Synom for Macro {
named!(parse -> Self, do_parse!(
what: call!(Path::parse_mod_style) >>
bang: punct!(!) >>
body: call!(tt::delimited) >>
(Macro {
path: what,
bang_token: bang,
delimiter: body.0,
tts: body.1,
})
));
fn description() -> Option<&'static str> {
Some("macro invocation")
}
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{ToTokens, Tokens};
impl ToTokens for Macro {
fn to_tokens(&self, tokens: &mut Tokens) {
self.path.to_tokens(tokens);
self.bang_token.to_tokens(tokens);
match self.delimiter {
MacroDelimiter::Paren(ref paren) => {
paren.surround(tokens, |tokens| self.tts.to_tokens(tokens));
}
MacroDelimiter::Brace(ref brace) => {
brace.surround(tokens, |tokens| self.tts.to_tokens(tokens));
}
MacroDelimiter::Bracket(ref bracket) => {
bracket.surround(tokens, |tokens| self.tts.to_tokens(tokens));
}
}
}
}
}

186
third_party/rust/syn-0.12.12/src/macros.rs поставляемый
Просмотреть файл

@ -1,186 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[cfg(any(feature = "full", feature = "derive"))]
macro_rules! ast_struct {
(
$(#[$attr:meta])*
pub struct $name:ident #full $($rest:tt)*
) => {
#[cfg(feature = "full")]
$(#[$attr])*
#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
pub struct $name $($rest)*
#[cfg(not(feature = "full"))]
$(#[$attr])*
#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
pub struct $name {
_noconstruct: (),
}
};
(
$(#[$attr:meta])*
pub struct $name:ident #manual_extra_traits $($rest:tt)*
) => {
$(#[$attr])*
#[cfg_attr(feature = "extra-traits", derive(Debug))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
pub struct $name $($rest)*
};
(
$(#[$attr:meta])*
pub struct $name:ident $($rest:tt)*
) => {
$(#[$attr])*
#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
pub struct $name $($rest)*
};
}
#[cfg(any(feature = "full", feature = "derive"))]
macro_rules! ast_enum {
(
$(#[$enum_attr:meta])*
pub enum $name:ident $(# $tags:ident)* { $($variants:tt)* }
) => (
$(#[$enum_attr])*
#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
pub enum $name {
$($variants)*
}
)
}
#[cfg(any(feature = "full", feature = "derive"))]
macro_rules! ast_enum_of_structs {
(
$(#[$enum_attr:meta])*
pub enum $name:ident {
$(
$(#[$variant_attr:meta])*
pub $variant:ident $( ($member:ident $($rest:tt)*) )*,
)*
}
$($remaining:tt)*
) => (
ast_enum! {
$(#[$enum_attr])*
pub enum $name {
$(
$(#[$variant_attr])*
$variant $( ($member) )*,
)*
}
}
$(
maybe_ast_struct! {
$(#[$variant_attr])*
$(
pub struct $member $($rest)*
)*
}
$(
impl From<$member> for $name {
fn from(e: $member) -> $name {
$name::$variant(e)
}
}
)*
)*
#[cfg(feature = "printing")]
generate_to_tokens! {
$($remaining)*
()
tokens
$name { $($variant $( [$($rest)*] )*,)* }
}
)
}
#[cfg(all(feature = "printing", any(feature = "full", feature = "derive")))]
macro_rules! generate_to_tokens {
(do_not_generate_to_tokens $($foo:tt)*) => ();
(($($arms:tt)*) $tokens:ident $name:ident { $variant:ident, $($next:tt)*}) => {
generate_to_tokens!(
($($arms)* $name::$variant => {})
$tokens $name { $($next)* }
);
};
(($($arms:tt)*) $tokens:ident $name:ident { $variant:ident [$($rest:tt)*], $($next:tt)*}) => {
generate_to_tokens!(
($($arms)* $name::$variant(ref _e) => to_tokens_call!(_e, $tokens, $($rest)*),)
$tokens $name { $($next)* }
);
};
(($($arms:tt)*) $tokens:ident $name:ident {}) => {
impl ::quote::ToTokens for $name {
fn to_tokens(&self, $tokens: &mut ::quote::Tokens) {
match *self {
$($arms)*
}
}
}
};
}
#[cfg(all(feature = "printing", feature = "full"))]
macro_rules! to_tokens_call {
($e:ident, $tokens:ident, $($rest:tt)*) => {
$e.to_tokens($tokens)
};
}
#[cfg(all(feature = "printing", feature = "derive", not(feature = "full")))]
macro_rules! to_tokens_call {
// If the variant is marked as #full, don't auto-generate to-tokens for it.
($e:ident, $tokens:ident, #full $($rest:tt)*) => {
unreachable!()
};
($e:ident, $tokens:ident, $($rest:tt)*) => {
$e.to_tokens($tokens)
};
}
#[cfg(any(feature = "full", feature = "derive"))]
macro_rules! maybe_ast_struct {
(
$(#[$attr:meta])*
$(
pub struct $name:ident
)*
) => ();
($($rest:tt)*) => (ast_struct! { $($rest)* });
}
#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
macro_rules! impl_synom {
($t:ident $description:tt $($parser:tt)+) => {
impl Synom for $t {
named!(parse -> Self, $($parser)+);
fn description() -> Option<&'static str> {
Some($description)
}
}
}
}

223
third_party/rust/syn-0.12.12/src/op.rs поставляемый
Просмотреть файл

@ -1,223 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
ast_enum! {
/// A binary operator: `+`, `+=`, `&`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(feature = "clone-impls", derive(Copy))]
pub enum BinOp {
/// The `+` operator (addition)
Add(Token![+]),
/// The `-` operator (subtraction)
Sub(Token![-]),
/// The `*` operator (multiplication)
Mul(Token![*]),
/// The `/` operator (division)
Div(Token![/]),
/// The `%` operator (modulus)
Rem(Token![%]),
/// The `&&` operator (logical and)
And(Token![&&]),
/// The `||` operator (logical or)
Or(Token![||]),
/// The `^` operator (bitwise xor)
BitXor(Token![^]),
/// The `&` operator (bitwise and)
BitAnd(Token![&]),
/// The `|` operator (bitwise or)
BitOr(Token![|]),
/// The `<<` operator (shift left)
Shl(Token![<<]),
/// The `>>` operator (shift right)
Shr(Token![>>]),
/// The `==` operator (equality)
Eq(Token![==]),
/// The `<` operator (less than)
Lt(Token![<]),
/// The `<=` operator (less than or equal to)
Le(Token![<=]),
/// The `!=` operator (not equal to)
Ne(Token![!=]),
/// The `>=` operator (greater than or equal to)
Ge(Token![>=]),
/// The `>` operator (greater than)
Gt(Token![>]),
/// The `+=` operator
AddEq(Token![+=]),
/// The `-=` operator
SubEq(Token![-=]),
/// The `*=` operator
MulEq(Token![*=]),
/// The `/=` operator
DivEq(Token![/=]),
/// The `%=` operator
RemEq(Token![%=]),
/// The `^=` operator
BitXorEq(Token![^=]),
/// The `&=` operator
BitAndEq(Token![&=]),
/// The `|=` operator
BitOrEq(Token![|=]),
/// The `<<=` operator
ShlEq(Token![<<=]),
/// The `>>=` operator
ShrEq(Token![>>=]),
}
}
ast_enum! {
/// A unary operator: `*`, `!`, `-`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(feature = "clone-impls", derive(Copy))]
pub enum UnOp {
/// The `*` operator for dereferencing
Deref(Token![*]),
/// The `!` operator for logical inversion
Not(Token![!]),
/// The `-` operator for negation
Neg(Token![-]),
}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use synom::Synom;
impl BinOp {
named!(pub parse_binop -> Self, alt!(
punct!(&&) => { BinOp::And }
|
punct!(||) => { BinOp::Or }
|
punct!(<<) => { BinOp::Shl }
|
punct!(>>) => { BinOp::Shr }
|
punct!(==) => { BinOp::Eq }
|
punct!(<=) => { BinOp::Le }
|
punct!(!=) => { BinOp::Ne }
|
punct!(>=) => { BinOp::Ge }
|
punct!(+) => { BinOp::Add }
|
punct!(-) => { BinOp::Sub }
|
punct!(*) => { BinOp::Mul }
|
punct!(/) => { BinOp::Div }
|
punct!(%) => { BinOp::Rem }
|
punct!(^) => { BinOp::BitXor }
|
punct!(&) => { BinOp::BitAnd }
|
punct!(|) => { BinOp::BitOr }
|
punct!(<) => { BinOp::Lt }
|
punct!(>) => { BinOp::Gt }
));
#[cfg(feature = "full")]
named!(pub parse_assign_op -> Self, alt!(
punct!(+=) => { BinOp::AddEq }
|
punct!(-=) => { BinOp::SubEq }
|
punct!(*=) => { BinOp::MulEq }
|
punct!(/=) => { BinOp::DivEq }
|
punct!(%=) => { BinOp::RemEq }
|
punct!(^=) => { BinOp::BitXorEq }
|
punct!(&=) => { BinOp::BitAndEq }
|
punct!(|=) => { BinOp::BitOrEq }
|
punct!(<<=) => { BinOp::ShlEq }
|
punct!(>>=) => { BinOp::ShrEq }
));
}
impl Synom for UnOp {
named!(parse -> Self, alt!(
punct!(*) => { UnOp::Deref }
|
punct!(!) => { UnOp::Not }
|
punct!(-) => { UnOp::Neg }
));
fn description() -> Option<&'static str> {
Some("unary operator: `*`, `!`, or `-`")
}
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{ToTokens, Tokens};
impl ToTokens for BinOp {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
BinOp::Add(ref t) => t.to_tokens(tokens),
BinOp::Sub(ref t) => t.to_tokens(tokens),
BinOp::Mul(ref t) => t.to_tokens(tokens),
BinOp::Div(ref t) => t.to_tokens(tokens),
BinOp::Rem(ref t) => t.to_tokens(tokens),
BinOp::And(ref t) => t.to_tokens(tokens),
BinOp::Or(ref t) => t.to_tokens(tokens),
BinOp::BitXor(ref t) => t.to_tokens(tokens),
BinOp::BitAnd(ref t) => t.to_tokens(tokens),
BinOp::BitOr(ref t) => t.to_tokens(tokens),
BinOp::Shl(ref t) => t.to_tokens(tokens),
BinOp::Shr(ref t) => t.to_tokens(tokens),
BinOp::Eq(ref t) => t.to_tokens(tokens),
BinOp::Lt(ref t) => t.to_tokens(tokens),
BinOp::Le(ref t) => t.to_tokens(tokens),
BinOp::Ne(ref t) => t.to_tokens(tokens),
BinOp::Ge(ref t) => t.to_tokens(tokens),
BinOp::Gt(ref t) => t.to_tokens(tokens),
BinOp::AddEq(ref t) => t.to_tokens(tokens),
BinOp::SubEq(ref t) => t.to_tokens(tokens),
BinOp::MulEq(ref t) => t.to_tokens(tokens),
BinOp::DivEq(ref t) => t.to_tokens(tokens),
BinOp::RemEq(ref t) => t.to_tokens(tokens),
BinOp::BitXorEq(ref t) => t.to_tokens(tokens),
BinOp::BitAndEq(ref t) => t.to_tokens(tokens),
BinOp::BitOrEq(ref t) => t.to_tokens(tokens),
BinOp::ShlEq(ref t) => t.to_tokens(tokens),
BinOp::ShrEq(ref t) => t.to_tokens(tokens),
}
}
}
impl ToTokens for UnOp {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
UnOp::Deref(ref t) => t.to_tokens(tokens),
UnOp::Not(ref t) => t.to_tokens(tokens),
UnOp::Neg(ref t) => t.to_tokens(tokens),
}
}
}
}

Просмотреть файл

@ -1,160 +0,0 @@
/// Quasi-quotation macro that accepts input like the [`quote!`] macro but uses
/// type inference to figure out a return type for those tokens.
///
/// [`quote!`]: https://docs.rs/quote/0.4/quote/index.html
///
/// The return type can be any syntax tree node that implements the [`Synom`]
/// trait.
///
/// [`Synom`]: synom/trait.Synom.html
///
/// ```
/// #[macro_use]
/// extern crate syn;
///
/// #[macro_use]
/// extern crate quote;
///
/// use syn::Stmt;
///
/// fn main() {
/// let name = quote!(v);
/// let ty = quote!(u8);
///
/// let stmt: Stmt = parse_quote! {
/// let #name: #ty = Default::default();
/// };
///
/// println!("{:#?}", stmt);
/// }
/// ```
///
/// *This macro is available if Syn is built with the `"parsing"` feature,
/// although interpolation of syntax tree nodes into the quoted tokens is only
/// supported if Syn is built with the `"printing"` feature as well.*
///
/// # Example
///
/// The following helper function adds a bound `T: HeapSize` to every type
/// parameter `T` in the input generics.
///
/// ```
/// # #[macro_use]
/// # extern crate syn;
/// #
/// # #[macro_use]
/// # extern crate quote;
/// #
/// # use syn::{Generics, GenericParam};
/// #
/// // Add a bound `T: HeapSize` to every type parameter T.
/// fn add_trait_bounds(mut generics: Generics) -> Generics {
/// for param in &mut generics.params {
/// if let GenericParam::Type(ref mut type_param) = *param {
/// type_param.bounds.push(parse_quote!(HeapSize));
/// }
/// }
/// generics
/// }
/// #
/// # fn main() {}
/// ```
///
/// # Special cases
///
/// This macro can parse the following additional types as a special case even
/// though they do not implement the `Synom` trait.
///
/// - [`Attribute`] — parses one attribute, allowing either outer like `#[...]`
/// or inner like `#![...]`
/// - [`Punctuated<T, P>`] — parses zero or more `T` separated by punctuation
/// `P` with optional trailing punctuation
///
/// [`Attribute`]: struct.Attribute.html
/// [`Punctuated<T, P>`]: punctuated/struct.Punctuated.html
///
/// # Panics
///
/// Panics if the tokens fail to parse as the expected syntax tree type. The
/// caller is responsible for ensuring that the input tokens are syntactically
/// valid.
#[macro_export]
macro_rules! parse_quote {
($($tt:tt)*) => {
$crate::parse_quote::parse($crate::parse_quote::From::from(quote!($($tt)*)))
};
}
////////////////////////////////////////////////////////////////////////////////
// Can parse any type that implements Synom.
use synom::{Synom, Parser, PResult};
use buffer::Cursor;
use proc_macro2::TokenStream;
// Not public API.
#[doc(hidden)]
pub use std::convert::From;
// Not public API.
#[doc(hidden)]
pub fn parse<T: ParseQuote>(token_stream: TokenStream) -> T {
let parser = T::parse;
match parser.parse2(token_stream) {
Ok(t) => t,
Err(err) => match T::description() {
Some(s) => panic!("failed to parse {}: {}", s, err),
None => panic!("{}", err),
}
}
}
// Not public API.
#[doc(hidden)]
pub trait ParseQuote: Sized {
fn parse(input: Cursor) -> PResult<Self>;
fn description() -> Option<&'static str>;
}
impl<T> ParseQuote for T where T: Synom {
fn parse(input: Cursor) -> PResult<Self> {
<T as Synom>::parse(input)
}
fn description() -> Option<&'static str> {
<T as Synom>::description()
}
}
////////////////////////////////////////////////////////////////////////////////
// Any other types that we want `parse_quote!` to be able to parse.
use punctuated::Punctuated;
#[cfg(any(feature = "full", feature = "derive"))]
use Attribute;
impl<T, P> ParseQuote for Punctuated<T, P>
where
T: Synom,
P: Synom,
{
named!(parse -> Self, call!(Punctuated::parse_terminated));
fn description() -> Option<&'static str> {
Some("punctuated sequence")
}
}
#[cfg(any(feature = "full", feature = "derive"))]
impl ParseQuote for Attribute {
named!(parse -> Self, alt!(
call!(Attribute::parse_outer)
|
call!(Attribute::parse_inner)
));
fn description() -> Option<&'static str> {
Some("attribute")
}
}

1384
third_party/rust/syn-0.12.12/src/parsers.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

572
third_party/rust/syn-0.12.12/src/path.rs поставляемый
Просмотреть файл

@ -1,572 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use punctuated::Punctuated;
use super::*;
ast_struct! {
/// A path at which a named item is exported: `std::collections::HashMap`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct Path {
pub leading_colon: Option<Token![::]>,
pub segments: Punctuated<PathSegment, Token![::]>,
}
}
impl Path {
pub fn global(&self) -> bool {
self.leading_colon.is_some()
}
}
/// A helper for printing a self-type qualified path as tokens.
///
/// ```rust
/// extern crate syn;
/// extern crate quote;
///
/// use syn::{QSelf, Path, PathTokens};
/// use quote::{Tokens, ToTokens};
///
/// struct MyNode {
/// qself: Option<QSelf>,
/// path: Path,
/// }
///
/// impl ToTokens for MyNode {
/// fn to_tokens(&self, tokens: &mut Tokens) {
/// PathTokens(&self.qself, &self.path).to_tokens(tokens);
/// }
/// }
/// #
/// # fn main() {}
/// ```
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature and the `"printing"` feature.*
#[cfg(feature = "printing")]
#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
pub struct PathTokens<'a>(pub &'a Option<QSelf>, pub &'a Path);
impl<T> From<T> for Path
where
T: Into<PathSegment>,
{
fn from(segment: T) -> Self {
let mut path = Path {
leading_colon: None,
segments: Punctuated::new(),
};
path.segments.push_value(segment.into());
path
}
}
ast_struct! {
/// A segment of a path together with any path arguments on that segment.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct PathSegment {
pub ident: Ident,
pub arguments: PathArguments,
}
}
impl<T> From<T> for PathSegment
where
T: Into<Ident>,
{
fn from(ident: T) -> Self {
PathSegment {
ident: ident.into(),
arguments: PathArguments::None,
}
}
}
ast_enum! {
/// Angle bracketed or parenthesized arguments of a path segment.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// ## Angle bracketed
///
/// The `<'a, T>` in `std::slice::iter<'a, T>`.
///
/// ## Parenthesized
///
/// The `(A, B) -> C` in `Fn(A, B) -> C`.
pub enum PathArguments {
None,
/// The `<'a, T>` in `std::slice::iter<'a, T>`.
AngleBracketed(AngleBracketedGenericArguments),
/// The `(A, B) -> C` in `Fn(A, B) -> C`.
Parenthesized(ParenthesizedGenericArguments),
}
}
impl Default for PathArguments {
fn default() -> Self {
PathArguments::None
}
}
impl PathArguments {
pub fn is_empty(&self) -> bool {
match *self {
PathArguments::None => true,
PathArguments::AngleBracketed(ref bracketed) => bracketed.args.is_empty(),
PathArguments::Parenthesized(_) => false,
}
}
}
ast_enum! {
/// An individual generic argument, like `'a`, `T`, or `Item = T`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub enum GenericArgument {
/// A lifetime argument.
Lifetime(Lifetime),
/// A type argument.
Type(Type),
/// A binding (equality constraint) on an associated type: the `Item =
/// u8` in `Iterator<Item = u8>`.
Binding(Binding),
/// A const expression. Must be inside of a block.
///
/// NOTE: Identity expressions are represented as Type arguments, as
/// they are indistinguishable syntactically.
Const(Expr),
}
}
ast_struct! {
/// Angle bracketed arguments of a path segment: the `<K, V>` in `HashMap<K,
/// V>`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct AngleBracketedGenericArguments {
pub colon2_token: Option<Token![::]>,
pub lt_token: Token![<],
pub args: Punctuated<GenericArgument, Token![,]>,
pub gt_token: Token![>],
}
}
ast_struct! {
/// A binding (equality constraint) on an associated type: `Item = u8`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct Binding {
pub ident: Ident,
pub eq_token: Token![=],
pub ty: Type,
}
}
ast_struct! {
/// Arguments of a function path segment: the `(A, B) -> C` in `Fn(A,B) ->
/// C`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct ParenthesizedGenericArguments {
pub paren_token: token::Paren,
/// `(A, B)`
pub inputs: Punctuated<Type, Token![,]>,
/// `C`
pub output: ReturnType,
}
}
ast_struct! {
/// The explicit Self type in a qualified path: the `T` in `<T as
/// Display>::fmt`.
///
/// The actual path, including the trait and the associated item, is stored
/// separately. The `position` field represents the index of the associated
/// item qualified with this Self type.
///
/// ```text
/// <Vec<T> as a::b::Trait>::AssociatedItem
/// ^~~~~~ ~~~~~~~~~~~~~~^
/// ty position = 3
///
/// <Vec<T>>::AssociatedItem
/// ^~~~~~ ^
/// ty position = 0
/// ```
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct QSelf {
pub lt_token: Token![<],
pub ty: Box<Type>,
pub position: usize,
pub as_token: Option<Token![as]>,
pub gt_token: Token![>],
}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use synom::Synom;
impl Synom for Path {
named!(parse -> Self, do_parse!(
colon: option!(punct!(::)) >>
segments: call!(Punctuated::<PathSegment, Token![::]>::parse_separated_nonempty) >>
cond_reduce!(segments.first().map_or(true, |seg| seg.value().ident != "dyn")) >>
(Path {
leading_colon: colon,
segments: segments,
})
));
fn description() -> Option<&'static str> {
Some("path")
}
}
#[cfg(not(feature = "full"))]
impl Synom for GenericArgument {
named!(parse -> Self, alt!(
call!(ty_no_eq_after) => { GenericArgument::Type }
|
syn!(Lifetime) => { GenericArgument::Lifetime }
|
syn!(Binding) => { GenericArgument::Binding }
));
}
#[cfg(feature = "full")]
impl Synom for GenericArgument {
named!(parse -> Self, alt!(
call!(ty_no_eq_after) => { GenericArgument::Type }
|
syn!(Lifetime) => { GenericArgument::Lifetime }
|
syn!(Binding) => { GenericArgument::Binding }
|
syn!(ExprLit) => { |l| GenericArgument::Const(Expr::Lit(l)) }
|
syn!(ExprBlock) => { |b| GenericArgument::Const(Expr::Block(b)) }
));
fn description() -> Option<&'static str> {
Some("generic argument")
}
}
impl Synom for AngleBracketedGenericArguments {
named!(parse -> Self, do_parse!(
colon2: option!(punct!(::)) >>
lt: punct!(<) >>
args: call!(Punctuated::parse_terminated) >>
gt: punct!(>) >>
(AngleBracketedGenericArguments {
colon2_token: colon2,
lt_token: lt,
args: args,
gt_token: gt,
})
));
fn description() -> Option<&'static str> {
Some("angle bracketed generic arguments")
}
}
impl Synom for ParenthesizedGenericArguments {
named!(parse -> Self, do_parse!(
data: parens!(Punctuated::parse_terminated) >>
output: syn!(ReturnType) >>
(ParenthesizedGenericArguments {
paren_token: data.0,
inputs: data.1,
output: output,
})
));
fn description() -> Option<&'static str> {
Some("parenthesized generic arguments: `Foo(A, B, ..) -> T`")
}
}
impl Synom for PathSegment {
named!(parse -> Self, alt!(
do_parse!(
ident: syn!(Ident) >>
arguments: syn!(AngleBracketedGenericArguments) >>
(PathSegment {
ident: ident,
arguments: PathArguments::AngleBracketed(arguments),
})
)
|
mod_style_path_segment
));
fn description() -> Option<&'static str> {
Some("path segment")
}
}
impl Synom for Binding {
named!(parse -> Self, do_parse!(
id: syn!(Ident) >>
eq: punct!(=) >>
ty: syn!(Type) >>
(Binding {
ident: id,
eq_token: eq,
ty: ty,
})
));
fn description() -> Option<&'static str> {
Some("associated type binding")
}
}
impl Path {
named!(pub parse_mod_style -> Self, do_parse!(
colon: option!(punct!(::)) >>
segments: call!(Punctuated::parse_separated_nonempty_with,
mod_style_path_segment) >>
(Path {
leading_colon: colon,
segments: segments,
})
));
}
named!(mod_style_path_segment -> PathSegment, alt!(
syn!(Ident) => { Into::into }
|
keyword!(super) => { Into::into }
|
keyword!(self) => { Into::into }
|
keyword!(Self) => { Into::into }
|
keyword!(crate) => { Into::into }
));
named!(pub qpath -> (Option<QSelf>, Path), alt!(
map!(syn!(Path), |p| (None, p))
|
do_parse!(
lt: punct!(<) >>
this: syn!(Type) >>
path: option!(tuple!(keyword!(as), syn!(Path))) >>
gt: punct!(>) >>
colon2: punct!(::) >>
rest: call!(Punctuated::parse_separated_nonempty) >>
({
let (pos, as_, path) = match path {
Some((as_, mut path)) => {
let pos = path.segments.len();
path.segments.push_punct(colon2);
path.segments.extend(rest.into_pairs());
(pos, Some(as_), path)
}
None => {
(0, None, Path {
leading_colon: Some(colon2),
segments: rest,
})
}
};
(Some(QSelf {
lt_token: lt,
ty: Box::new(this),
position: pos,
as_token: as_,
gt_token: gt,
}), path)
})
)
|
map!(keyword!(self), |s| (None, s.into()))
));
named!(pub ty_no_eq_after -> Type, do_parse!(
ty: syn!(Type) >>
not!(punct!(=)) >>
(ty)
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{ToTokens, Tokens};
impl ToTokens for Path {
fn to_tokens(&self, tokens: &mut Tokens) {
self.leading_colon.to_tokens(tokens);
self.segments.to_tokens(tokens);
}
}
impl ToTokens for PathSegment {
fn to_tokens(&self, tokens: &mut Tokens) {
self.ident.to_tokens(tokens);
self.arguments.to_tokens(tokens);
}
}
impl ToTokens for PathArguments {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
PathArguments::None => {}
PathArguments::AngleBracketed(ref arguments) => {
arguments.to_tokens(tokens);
}
PathArguments::Parenthesized(ref arguments) => {
arguments.to_tokens(tokens);
}
}
}
}
impl ToTokens for GenericArgument {
#[cfg_attr(feature = "cargo-clippy", allow(match_same_arms))]
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
GenericArgument::Lifetime(ref lt) => lt.to_tokens(tokens),
GenericArgument::Type(ref ty) => ty.to_tokens(tokens),
GenericArgument::Binding(ref tb) => tb.to_tokens(tokens),
GenericArgument::Const(ref e) => match *e {
Expr::Lit(_) => e.to_tokens(tokens),
// NOTE: We should probably support parsing blocks with only
// expressions in them without the full feature for const
// generics.
#[cfg(feature = "full")]
Expr::Block(_) => e.to_tokens(tokens),
// ERROR CORRECTION: Add braces to make sure that the
// generated code is valid.
_ => token::Brace::default().surround(tokens, |tokens| {
e.to_tokens(tokens);
}),
},
}
}
}
impl ToTokens for AngleBracketedGenericArguments {
fn to_tokens(&self, tokens: &mut Tokens) {
self.colon2_token.to_tokens(tokens);
self.lt_token.to_tokens(tokens);
// Print lifetimes before types and consts, all before bindings,
// regardless of their order in self.args.
//
// TODO: ordering rules for const arguments vs type arguments have
// not been settled yet. https://github.com/rust-lang/rust/issues/44580
let mut trailing_or_empty = true;
for param in self.args.pairs() {
if let GenericArgument::Lifetime(_) = **param.value() {
param.to_tokens(tokens);
trailing_or_empty = param.punct().is_some();
}
}
for param in self.args.pairs() {
match **param.value() {
GenericArgument::Type(_) | GenericArgument::Const(_) => {
if !trailing_or_empty {
<Token![,]>::default().to_tokens(tokens);
}
param.to_tokens(tokens);
trailing_or_empty = param.punct().is_some();
}
GenericArgument::Lifetime(_) | GenericArgument::Binding(_) => {}
}
}
for param in self.args.pairs() {
if let GenericArgument::Binding(_) = **param.value() {
if !trailing_or_empty {
<Token![,]>::default().to_tokens(tokens);
trailing_or_empty = true;
}
param.to_tokens(tokens);
}
}
self.gt_token.to_tokens(tokens);
}
}
impl ToTokens for Binding {
fn to_tokens(&self, tokens: &mut Tokens) {
self.ident.to_tokens(tokens);
self.eq_token.to_tokens(tokens);
self.ty.to_tokens(tokens);
}
}
impl ToTokens for ParenthesizedGenericArguments {
fn to_tokens(&self, tokens: &mut Tokens) {
self.paren_token.surround(tokens, |tokens| {
self.inputs.to_tokens(tokens);
});
self.output.to_tokens(tokens);
}
}
impl<'a> ToTokens for PathTokens<'a> {
fn to_tokens(&self, tokens: &mut Tokens) {
let qself = match *self.0 {
Some(ref qself) => qself,
None => return self.1.to_tokens(tokens),
};
qself.lt_token.to_tokens(tokens);
qself.ty.to_tokens(tokens);
// XXX: Gross.
let pos = if qself.position > 0 && qself.position >= self.1.segments.len() {
self.1.segments.len() - 1
} else {
qself.position
};
let mut segments = self.1.segments.pairs();
if pos > 0 {
TokensOrDefault(&qself.as_token).to_tokens(tokens);
self.1.leading_colon.to_tokens(tokens);
for (i, segment) in segments.by_ref().take(pos).enumerate() {
if i + 1 == pos {
segment.value().to_tokens(tokens);
qself.gt_token.to_tokens(tokens);
segment.punct().to_tokens(tokens);
} else {
segment.to_tokens(tokens);
}
}
} else {
qself.gt_token.to_tokens(tokens);
self.1.leading_colon.to_tokens(tokens);
}
for segment in segments {
segment.to_tokens(tokens);
}
}
}
}

669
third_party/rust/syn-0.12.12/src/punctuated.rs поставляемый
Просмотреть файл

@ -1,669 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A punctuated sequence of syntax tree nodes separated by punctuation.
//!
//! Lots of things in Rust are punctuated sequences.
//!
//! - The fields of a struct are `Punctuated<Field, Token![,]>`.
//! - The segments of a path are `Punctuated<PathSegment, Token![::]>`.
//! - The bounds on a generic parameter are `Punctuated<TypeParamBound, Token![+]>`.
//! - The arguments to a function call are `Punctuated<Expr, Token![,]>`.
//!
//! This module provides a common representation for these punctuated sequences
//! in the form of the [`Punctuated<T, P>`] type. We store a vector of pairs of
//! syntax tree node + punctuation, where every node in the sequence is followed
//! by punctuation except for possibly the final one.
//!
//! [`Punctuated<T, P>`]: struct.Punctuated.html
//!
//! ```text
//! a_function_call(arg1, arg2, arg3);
//! ^^^^^ ~~~~~ ^^^^
//! ```
use std::iter::FromIterator;
use std::ops::{Index, IndexMut};
use std::slice;
use std::vec;
#[cfg(feature = "extra-traits")]
use std::fmt::{self, Debug};
#[cfg(feature = "parsing")]
use synom::{Synom, PResult};
#[cfg(feature = "parsing")]
use buffer::Cursor;
#[cfg(feature = "parsing")]
use parse_error;
/// A punctuated sequence of syntax tree nodes of type `T` separated by
/// punctuation of type `P`.
///
/// Refer to the [module documentation] for details about punctuated sequences.
///
/// [module documentation]: index.html
#[cfg_attr(feature = "extra-traits", derive(Eq, PartialEq, Hash))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
pub struct Punctuated<T, P> {
inner: Vec<(T, Option<P>)>,
}
impl<T, P> Punctuated<T, P> {
/// Creates an empty punctuated sequence.
pub fn new() -> Punctuated<T, P> {
Punctuated { inner: Vec::new() }
}
/// Determines whether this punctuated sequence is empty, meaning it
/// contains no syntax tree nodes or punctuation.
pub fn is_empty(&self) -> bool {
self.inner.len() == 0
}
/// Returns the number of syntax tree nodes in this punctuated sequence.
///
/// This is the number of nodes of type `T`, not counting the punctuation of
/// type `P`.
pub fn len(&self) -> usize {
self.inner.len()
}
/// Borrows the first punctuated pair in this sequence.
pub fn first(&self) -> Option<Pair<&T, &P>> {
self.inner.first().map(|&(ref t, ref d)| match *d {
Some(ref d) => Pair::Punctuated(t, d),
None => Pair::End(t),
})
}
/// Borrows the last punctuated pair in this sequence.
pub fn last(&self) -> Option<Pair<&T, &P>> {
self.inner.last().map(|&(ref t, ref d)| match *d {
Some(ref d) => Pair::Punctuated(t, d),
None => Pair::End(t),
})
}
/// Mutably borrows the last punctuated pair in this sequence.
pub fn last_mut(&mut self) -> Option<Pair<&mut T, &mut P>> {
self.inner
.last_mut()
.map(|&mut (ref mut t, ref mut d)| match *d {
Some(ref mut d) => Pair::Punctuated(t, d),
None => Pair::End(t),
})
}
/// Returns an iterator over borrowed syntax tree nodes of type `&T`.
pub fn iter(&self) -> Iter<T, P> {
Iter {
inner: self.inner.iter(),
}
}
/// Returns an iterator over mutably borrowed syntax tree nodes of type
/// `&mut T`.
pub fn iter_mut(&mut self) -> IterMut<T, P> {
IterMut {
inner: self.inner.iter_mut(),
}
}
/// Returns an iterator over the contents of this sequence as borrowed
/// punctuated pairs.
pub fn pairs(&self) -> Pairs<T, P> {
Pairs {
inner: self.inner.iter(),
}
}
/// Returns an iterator over the contents of this sequence as mutably
/// borrowed punctuated pairs.
pub fn pairs_mut(&mut self) -> PairsMut<T, P> {
PairsMut {
inner: self.inner.iter_mut(),
}
}
/// Returns an iterator over the contents of this sequence as owned
/// punctuated pairs.
pub fn into_pairs(self) -> IntoPairs<T, P> {
IntoPairs {
inner: self.inner.into_iter(),
}
}
/// Appends a syntax tree node onto the end of this punctuated sequence. The
/// sequence must previously have a trailing punctuation.
///
/// Use [`push`] instead if the punctuated sequence may or may not already
/// have trailing punctuation.
///
/// [`push`]: #method.push
///
/// # Panics
///
/// Panics if the sequence does not already have a trailing punctuation when
/// this method is called.
pub fn push_value(&mut self, value: T) {
assert!(self.empty_or_trailing());
self.inner.push((value, None));
}
/// Appends a trailing punctuation onto the end of this punctuated sequence.
/// The sequence must be non-empty and must not already have trailing
/// punctuation.
///
/// # Panics
///
/// Panics if the sequence is empty or already has a trailing punctuation.
pub fn push_punct(&mut self, punctuation: P) {
assert!(!self.is_empty());
let last = self.inner.last_mut().unwrap();
assert!(last.1.is_none());
last.1 = Some(punctuation);
}
/// Removes the last punctuated pair from this sequence, or `None` if the
/// sequence is empty.
pub fn pop(&mut self) -> Option<Pair<T, P>> {
self.inner.pop().map(|(t, d)| Pair::new(t, d))
}
/// Determines whether this punctuated sequence ends with a trailing
/// punctuation.
pub fn trailing_punct(&self) -> bool {
self.inner
.last()
.map(|last| last.1.is_some())
.unwrap_or(false)
}
/// Returns true if either this `Punctuated` is empty, or it has a trailing
/// punctuation.
///
/// Equivalent to `punctuated.is_empty() || punctuated.trailing_punct()`.
pub fn empty_or_trailing(&self) -> bool {
self.inner
.last()
.map(|last| last.1.is_some())
.unwrap_or(true)
}
}
impl<T, P> Punctuated<T, P>
where
P: Default,
{
/// Appends a syntax tree node onto the end of this punctuated sequence.
///
/// If there is not a trailing punctuation in this sequence when this method
/// is called, the default value of punctuation type `P` is inserted before
/// the given value of type `T`.
pub fn push(&mut self, value: T) {
if !self.empty_or_trailing() {
self.push_punct(Default::default());
}
self.push_value(value);
}
/// Inserts an element at position `index`.
///
/// # Panics
///
/// Panics if `index` is greater than the number of elements previously in
/// this punctuated sequence.
pub fn insert(&mut self, index: usize, value: T) {
assert!(index <= self.len());
if index == self.len() {
self.push(value);
} else {
self.inner.insert(index, (value, Some(Default::default())));
}
}
}
#[cfg(feature = "extra-traits")]
impl<T: Debug, P: Debug> Debug for Punctuated<T, P> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}
impl<T, P> FromIterator<T> for Punctuated<T, P>
where
P: Default,
{
fn from_iter<I: IntoIterator<Item = T>>(i: I) -> Self {
let mut ret = Punctuated::new();
ret.extend(i);
ret
}
}
impl<T, P> Extend<T> for Punctuated<T, P>
where
P: Default,
{
fn extend<I: IntoIterator<Item = T>>(&mut self, i: I) {
for value in i {
self.push(value);
}
}
}
impl<T, P> FromIterator<Pair<T, P>> for Punctuated<T, P> {
fn from_iter<I: IntoIterator<Item = Pair<T, P>>>(i: I) -> Self {
let mut ret = Punctuated::new();
ret.extend(i);
ret
}
}
impl<T, P> Extend<Pair<T, P>> for Punctuated<T, P> {
fn extend<I: IntoIterator<Item = Pair<T, P>>>(&mut self, i: I) {
for pair in i {
match pair {
Pair::Punctuated(a, b) => self.inner.push((a, Some(b))),
Pair::End(a) => self.inner.push((a, None)),
}
}
}
}
impl<T, P> IntoIterator for Punctuated<T, P> {
type Item = T;
type IntoIter = IntoIter<T, P>;
fn into_iter(self) -> Self::IntoIter {
IntoIter {
inner: self.inner.into_iter(),
}
}
}
impl<'a, T, P> IntoIterator for &'a Punctuated<T, P> {
type Item = &'a T;
type IntoIter = Iter<'a, T, P>;
fn into_iter(self) -> Self::IntoIter {
Punctuated::iter(self)
}
}
impl<'a, T, P> IntoIterator for &'a mut Punctuated<T, P> {
type Item = &'a mut T;
type IntoIter = IterMut<'a, T, P>;
fn into_iter(self) -> Self::IntoIter {
Punctuated::iter_mut(self)
}
}
impl<T, P> Default for Punctuated<T, P> {
fn default() -> Self {
Punctuated::new()
}
}
/// An iterator over borrowed pairs of type `Pair<&T, &P>`.
///
/// Refer to the [module documentation] for details about punctuated sequences.
///
/// [module documentation]: index.html
pub struct Pairs<'a, T: 'a, P: 'a> {
inner: slice::Iter<'a, (T, Option<P>)>,
}
impl<'a, T, P> Iterator for Pairs<'a, T, P> {
type Item = Pair<&'a T, &'a P>;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|pair| match pair.1 {
Some(ref p) => Pair::Punctuated(&pair.0, p),
None => Pair::End(&pair.0),
})
}
}
/// An iterator over mutably borrowed pairs of type `Pair<&mut T, &mut P>`.
///
/// Refer to the [module documentation] for details about punctuated sequences.
///
/// [module documentation]: index.html
pub struct PairsMut<'a, T: 'a, P: 'a> {
inner: slice::IterMut<'a, (T, Option<P>)>,
}
impl<'a, T, P> Iterator for PairsMut<'a, T, P> {
type Item = Pair<&'a mut T, &'a mut P>;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|pair| match pair.1 {
Some(ref mut p) => Pair::Punctuated(&mut pair.0, p),
None => Pair::End(&mut pair.0),
})
}
}
/// An iterator over owned pairs of type `Pair<T, P>`.
///
/// Refer to the [module documentation] for details about punctuated sequences.
///
/// [module documentation]: index.html
pub struct IntoPairs<T, P> {
inner: vec::IntoIter<(T, Option<P>)>,
}
impl<T, P> Iterator for IntoPairs<T, P> {
type Item = Pair<T, P>;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|pair| match pair.1 {
Some(p) => Pair::Punctuated(pair.0, p),
None => Pair::End(pair.0),
})
}
}
/// An iterator over owned values of type `T`.
///
/// Refer to the [module documentation] for details about punctuated sequences.
///
/// [module documentation]: index.html
pub struct IntoIter<T, P> {
inner: vec::IntoIter<(T, Option<P>)>,
}
impl<T, P> Iterator for IntoIter<T, P> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|pair| pair.0)
}
}
/// An iterator over borrowed values of type `&T`.
///
/// Refer to the [module documentation] for details about punctuated sequences.
///
/// [module documentation]: index.html
pub struct Iter<'a, T: 'a, P: 'a> {
inner: slice::Iter<'a, (T, Option<P>)>,
}
#[cfg(any(feature = "full", feature = "derive"))]
impl<'a, T, P> Iter<'a, T, P> {
// Not public API.
#[doc(hidden)]
pub fn private_empty() -> Self {
Iter {
inner: [].iter(),
}
}
}
impl<'a, T, P> Iterator for Iter<'a, T, P> {
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|pair| &pair.0)
}
}
/// An iterator over mutably borrowed values of type `&mut T`.
///
/// Refer to the [module documentation] for details about punctuated sequences.
///
/// [module documentation]: index.html
pub struct IterMut<'a, T: 'a, P: 'a> {
inner: slice::IterMut<'a, (T, Option<P>)>,
}
impl<'a, T, P> Iterator for IterMut<'a, T, P> {
type Item = &'a mut T;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|pair| &mut pair.0)
}
}
/// A single syntax tree node of type `T` followed by its trailing punctuation
/// of type `P` if any.
///
/// Refer to the [module documentation] for details about punctuated sequences.
///
/// [module documentation]: index.html
pub enum Pair<T, P> {
Punctuated(T, P),
End(T),
}
impl<T, P> Pair<T, P> {
/// Extracts the syntax tree node from this punctuated pair, discarding the
/// following punctuation.
pub fn into_value(self) -> T {
match self {
Pair::Punctuated(t, _) | Pair::End(t) => t,
}
}
/// Borrows the syntax tree node from this punctuated pair.
pub fn value(&self) -> &T {
match *self {
Pair::Punctuated(ref t, _) | Pair::End(ref t) => t,
}
}
/// Mutably borrows the syntax tree node from this punctuated pair.
pub fn value_mut(&mut self) -> &mut T {
match *self {
Pair::Punctuated(ref mut t, _) | Pair::End(ref mut t) => t,
}
}
/// Borrows the punctuation from this punctuated pair, unless this pair is
/// the final one and there is no trailing punctuation.
pub fn punct(&self) -> Option<&P> {
match *self {
Pair::Punctuated(_, ref d) => Some(d),
Pair::End(_) => None,
}
}
/// Creates a punctuated pair out of a syntax tree node and an optional
/// following punctuation.
pub fn new(t: T, d: Option<P>) -> Self {
match d {
Some(d) => Pair::Punctuated(t, d),
None => Pair::End(t),
}
}
/// Produces this punctuated pair as a tuple of syntax tree node and
/// optional following punctuation.
pub fn into_tuple(self) -> (T, Option<P>) {
match self {
Pair::Punctuated(t, d) => (t, Some(d)),
Pair::End(t) => (t, None),
}
}
}
impl<T, P> Index<usize> for Punctuated<T, P> {
type Output = T;
fn index(&self, index: usize) -> &Self::Output {
&self.inner[index].0
}
}
impl<T, P> IndexMut<usize> for Punctuated<T, P> {
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
&mut self.inner[index].0
}
}
#[cfg(feature = "parsing")]
impl<T, P> Punctuated<T, P>
where
T: Synom,
P: Synom,
{
/// Parse **zero or more** syntax tree nodes with punctuation in between and
/// **no trailing** punctuation.
pub fn parse_separated(input: Cursor) -> PResult<Self> {
Self::parse_separated_with(input, T::parse)
}
/// Parse **one or more** syntax tree nodes with punctuation in bewteen and
/// **no trailing** punctuation.
/// allowing trailing punctuation.
pub fn parse_separated_nonempty(input: Cursor) -> PResult<Self> {
Self::parse_separated_nonempty_with(input, T::parse)
}
/// Parse **zero or more** syntax tree nodes with punctuation in between and
/// **optional trailing** punctuation.
pub fn parse_terminated(input: Cursor) -> PResult<Self> {
Self::parse_terminated_with(input, T::parse)
}
/// Parse **one or more** syntax tree nodes with punctuation in between and
/// **optional trailing** punctuation.
pub fn parse_terminated_nonempty(input: Cursor) -> PResult<Self> {
Self::parse_terminated_nonempty_with(input, T::parse)
}
}
#[cfg(feature = "parsing")]
impl<T, P> Punctuated<T, P>
where
P: Synom,
{
/// Parse **zero or more** syntax tree nodes using the given parser with
/// punctuation in between and **no trailing** punctuation.
pub fn parse_separated_with(
input: Cursor,
parse: fn(Cursor) -> PResult<T>,
) -> PResult<Self> {
Self::parse(input, parse, false)
}
/// Parse **one or more** syntax tree nodes using the given parser with
/// punctuation in between and **no trailing** punctuation.
pub fn parse_separated_nonempty_with(
input: Cursor,
parse: fn(Cursor) -> PResult<T>,
) -> PResult<Self> {
match Self::parse(input, parse, false) {
Ok((ref b, _)) if b.is_empty() => parse_error(),
other => other,
}
}
/// Parse **zero or more** syntax tree nodes using the given parser with
/// punctuation in between and **optional trailing** punctuation.
pub fn parse_terminated_with(
input: Cursor,
parse: fn(Cursor) -> PResult<T>,
) -> PResult<Self> {
Self::parse(input, parse, true)
}
/// Parse **one or more** syntax tree nodes using the given parser with
/// punctuation in between and **optional trailing** punctuation.
pub fn parse_terminated_nonempty_with(
input: Cursor,
parse: fn(Cursor) -> PResult<T>,
) -> PResult<Self> {
match Self::parse(input, parse, true) {
Ok((ref b, _)) if b.is_empty() => parse_error(),
other => other,
}
}
fn parse(
mut input: Cursor,
parse: fn(Cursor) -> PResult<T>,
terminated: bool,
) -> PResult<Self> {
let mut res = Punctuated::new();
// get the first element
match parse(input) {
Err(_) => Ok((res, input)),
Ok((o, i)) => {
if i == input {
return parse_error();
}
input = i;
res.push_value(o);
// get the separator first
while let Ok((s, i2)) = P::parse(input) {
if i2 == input {
break;
}
// get the element next
if let Ok((o3, i3)) = parse(i2) {
if i3 == i2 {
break;
}
res.push_punct(s);
res.push_value(o3);
input = i3;
} else {
break;
}
}
if terminated {
if let Ok((sep, after)) = P::parse(input) {
res.push_punct(sep);
input = after;
}
}
Ok((res, input))
}
}
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{ToTokens, Tokens};
impl<T, P> ToTokens for Punctuated<T, P>
where
T: ToTokens,
P: ToTokens,
{
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append_all(self.pairs())
}
}
impl<T, P> ToTokens for Pair<T, P>
where
T: ToTokens,
P: ToTokens,
{
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
Pair::Punctuated(ref a, ref b) => {
a.to_tokens(tokens);
b.to_tokens(tokens);
}
Pair::End(ref a) => a.to_tokens(tokens),
}
}
}
}

152
third_party/rust/syn-0.12.12/src/spanned.rs поставляемый
Просмотреть файл

@ -1,152 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A trait that can provide the `Span` of the complete contents of a syntax
//! tree node.
//!
//! *This module is available if Syn is built with both the `"parsing"` and
//! `"printing"` features.*
//!
//! # Example
//!
//! Suppose in a procedural macro we have a [`Type`] that we want to assert
//! implements the [`Sync`] trait. Maybe this is the type of one of the fields
//! of a struct for which we are deriving a trait implementation, and we need to
//! be able to pass a reference to one of those fields across threads.
//!
//! [`Type`]: ../enum.Type.html
//! [`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html
//!
//! If the field type does *not* implement `Sync` as required, we want the
//! compiler to report an error pointing out exactly which type it was.
//!
//! The following macro code takes a variable `ty` of type `Type` and produces a
//! static assertion that `Sync` is implemented for that type.
//!
//! ```
//! #[macro_use]
//! extern crate quote;
//!
//! extern crate syn;
//! extern crate proc_macro;
//! extern crate proc_macro2;
//!
//! use syn::Type;
//! use syn::spanned::Spanned;
//! use proc_macro::TokenStream;
//! use proc_macro2::Span;
//!
//! # const IGNORE_TOKENS: &str = stringify! {
//! #[proc_macro_derive(MyMacro)]
//! # };
//! pub fn my_macro(input: TokenStream) -> TokenStream {
//! # let ty = get_a_type();
//! /* ... */
//!
//! let def_site = Span::def_site();
//! let ty_span = ty.span().resolved_at(def_site);
//! let assert_sync = quote_spanned! {ty_span=>
//! struct _AssertSync where #ty: Sync;
//! };
//!
//! /* ... */
//! # input
//! }
//! #
//! # fn get_a_type() -> Type {
//! # unimplemented!()
//! # }
//! #
//! # fn main() {}
//! ```
//!
//! By inserting this `assert_sync` fragment into the output code generated by
//! our macro, the user's code will fail to compile if `ty` does not implement
//! `Sync`. The errors they would see look like the following.
//!
//! ```text
//! error[E0277]: the trait bound `*const i32: std::marker::Sync` is not satisfied
//! --> src/main.rs:10:21
//! |
//! 10 | bad_field: *const i32,
//! | ^^^^^^^^^^ `*const i32` cannot be shared between threads safely
//! ```
//!
//! In this technique, using the `Type`'s span for the error message makes the
//! error appear in the correct place underlining the right type. But it is
//! **incredibly important** that the span for the assertion is **resolved** at
//! the procedural macro definition site rather than at the `Type`'s span. This
//! way we guarantee that it refers to the `Sync` trait that we expect. If the
//! assertion were **resolved** at the same place that `ty` is resolved, the
//! user could circumvent the check by defining their own `Sync` trait that is
//! implemented for their type.
use proc_macro2::{Span, TokenStream};
use quote::{ToTokens, Tokens};
/// A trait that can provide the `Span` of the complete contents of a syntax
/// tree node.
///
/// This trait is automatically implemented for all types that implement
/// [`ToTokens`] from the `quote` crate.
///
/// [`ToTokens`]: https://docs.rs/quote/0.4/quote/trait.ToTokens.html
///
/// See the [module documentation] for an example.
///
/// [module documentation]: index.html
///
/// *This trait is available if Syn is built with both the `"parsing"` and
/// `"printing"` features.*
pub trait Spanned {
/// Returns a `Span` covering the complete contents of this syntax tree
/// node, or [`Span::call_site()`] if this node is empty.
///
/// [`Span::call_site()`]: https://docs.rs/proc-macro2/0.2/proc_macro2/struct.Span.html#method.call_site
fn span(&self) -> Span;
}
impl<T> Spanned for T
where
T: ToTokens,
{
#[cfg(procmacro2_semver_exempt)]
fn span(&self) -> Span {
let mut tokens = Tokens::new();
self.to_tokens(&mut tokens);
let token_stream = TokenStream::from(tokens);
let mut iter = token_stream.into_iter();
let mut span = match iter.next() {
Some(tt) => tt.span,
None => {
return Span::call_site();
}
};
for tt in iter {
if let Some(joined) = span.join(tt.span) {
span = joined;
}
}
span
}
#[cfg(not(procmacro2_semver_exempt))]
fn span(&self) -> Span {
let mut tokens = Tokens::new();
self.to_tokens(&mut tokens);
let token_stream = TokenStream::from(tokens);
let mut iter = token_stream.into_iter();
// We can't join spans without procmacro2_semver_exempt so just grab the
// first one.
match iter.next() {
Some(tt) => tt.span,
None => Span::call_site(),
}
}
}

232
third_party/rust/syn-0.12.12/src/synom.rs поставляемый
Просмотреть файл

@ -1,232 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Parsing interface for parsing a token stream into a syntax tree node.
//!
//! Parsing in Syn is built on parser functions that take in a [`Cursor`] and
//! produce a [`PResult<T>`] where `T` is some syntax tree node. `Cursor` is a
//! cheaply copyable cursor over a range of tokens in a token stream, and
//! `PResult` is a result that packages together a parsed syntax tree node `T`
//! with a stream of remaining unparsed tokens after `T` represented as another
//! `Cursor`, or a [`ParseError`] if parsing failed.
//!
//! [`Cursor`]: ../buffer/index.html
//! [`PResult<T>`]: type.PResult.html
//! [`ParseError`]: struct.ParseError.html
//!
//! This `Cursor`- and `PResult`-based interface is convenient for parser
//! combinators and parser implementations, but not necessarily when you just
//! have some tokens that you want to parse. For that we expose the following
//! two entry points.
//!
//! ## The `syn::parse*` functions
//!
//! The [`syn::parse`], [`syn::parse2`], and [`syn::parse_str`] functions serve
//! as an entry point for parsing syntax tree nodes that can be parsed in an
//! obvious default way. These functions can return any syntax tree node that
//! implements the [`Synom`] trait, which includes most types in Syn.
//!
//! [`syn::parse`]: ../fn.parse.html
//! [`syn::parse2`]: ../fn.parse2.html
//! [`syn::parse_str`]: ../fn.parse_str.html
//! [`Synom`]: trait.Synom.html
//!
//! ```
//! use syn::Type;
//!
//! # fn run_parser() -> Result<(), syn::synom::ParseError> {
//! let t: Type = syn::parse_str("std::collections::HashMap<String, Value>")?;
//! # Ok(())
//! # }
//! #
//! # fn main() {
//! # run_parser().unwrap();
//! # }
//! ```
//!
//! The [`parse_quote!`] macro also uses this approach.
//!
//! [`parse_quote!`]: ../macro.parse_quote.html
//!
//! ## The `Parser` trait
//!
//! Some types can be parsed in several ways depending on context. For example
//! an [`Attribute`] can be either "outer" like `#[...]` or "inner" like
//! `#![...]` and parsing the wrong one would be a bug. Similarly [`Punctuated`]
//! may or may not allow trailing punctuation, and parsing it the wrong way
//! would either reject valid input or accept invalid input.
//!
//! [`Attribute`]: ../struct.Attribute.html
//! [`Punctuated`]: ../punctuated/index.html
//!
//! The `Synom` trait is not implemented in these cases because there is no good
//! behavior to consider the default.
//!
//! ```ignore
//! // Can't parse `Punctuated` without knowing whether trailing punctuation
//! // should be allowed in this context.
//! let path: Punctuated<PathSegment, Token![::]> = syn::parse(tokens)?;
//! ```
//!
//! In these cases the types provide a choice of parser functions rather than a
//! single `Synom` implementation, and those parser functions can be invoked
//! through the [`Parser`] trait.
//!
//! [`Parser`]: trait.Parser.html
//!
//! ```
//! # #[macro_use]
//! # extern crate syn;
//! #
//! # extern crate proc_macro2;
//! # use proc_macro2::TokenStream;
//! #
//! use syn::synom::Parser;
//! use syn::punctuated::Punctuated;
//! use syn::{PathSegment, Expr, Attribute};
//!
//! # fn run_parsers() -> Result<(), syn::synom::ParseError> {
//! # let tokens = TokenStream::empty().into();
//! // Parse a nonempty sequence of path segments separated by `::` punctuation
//! // with no trailing punctuation.
//! let parser = Punctuated::<PathSegment, Token![::]>::parse_separated_nonempty;
//! let path = parser.parse(tokens)?;
//!
//! # let tokens = TokenStream::empty().into();
//! // Parse a possibly empty sequence of expressions terminated by commas with
//! // an optional trailing punctuation.
//! let parser = Punctuated::<Expr, Token![,]>::parse_terminated;
//! let args = parser.parse(tokens)?;
//!
//! # let tokens = TokenStream::empty().into();
//! // Parse zero or more outer attributes but not inner attributes.
//! named!(outer_attrs -> Vec<Attribute>, many0!(Attribute::parse_outer));
//! let attrs = outer_attrs.parse(tokens)?;
//! #
//! # Ok(())
//! # }
//! #
//! # fn main() {}
//! ```
//!
//! # Implementing a parser function
//!
//! Parser functions are usually implemented using the [`nom`]-style parser
//! combinator macros provided by Syn, but may also be implemented without
//! macros be using the low-level [`Cursor`] API directly.
//!
//! [`nom`]: https://github.com/Geal/nom
//!
//! The following parser combinator macros are available and a `Synom` parsing
//! example is provided for each one.
//!
//! - [`alt!`](../macro.alt.html)
//! - [`braces!`](../macro.braces.html)
//! - [`brackets!`](../macro.brackets.html)
//! - [`call!`](../macro.call.html)
//! - [`cond!`](../macro.cond.html)
//! - [`cond_reduce!`](../macro.cond_reduce.html)
//! - [`do_parse!`](../macro.do_parse.html)
//! - [`epsilon!`](../macro.epsilon.html)
//! - [`input_end!`](../macro.input_end.html)
//! - [`keyword!`](../macro.keyword.html)
//! - [`many0!`](../macro.many0.html)
//! - [`map!`](../macro.map.html)
//! - [`not!`](../macro.not.html)
//! - [`option!`](../macro.option.html)
//! - [`parens!`](../macro.parens.html)
//! - [`punct!`](../macro.punct.html)
//! - [`reject!`](../macro.reject.html)
//! - [`switch!`](../macro.switch.html)
//! - [`syn!`](../macro.syn.html)
//! - [`tuple!`](../macro.tuple.html)
//! - [`value!`](../macro.value.html)
//!
//! *This module is available if Syn is built with the `"parsing"` feature.*
use proc_macro;
use proc_macro2;
pub use error::{PResult, ParseError};
use buffer::{Cursor, TokenBuffer};
/// Parsing interface implemented by all types that can be parsed in a default
/// way from a token stream.
///
/// Refer to the [module documentation] for details about parsing in Syn.
///
/// [module documentation]: index.html
///
/// *This trait is available if Syn is built with the `"parsing"` feature.*
pub trait Synom: Sized {
fn parse(input: Cursor) -> PResult<Self>;
fn description() -> Option<&'static str> {
None
}
}
impl Synom for proc_macro2::TokenStream {
fn parse(input: Cursor) -> PResult<Self> {
Ok((input.token_stream(), Cursor::empty()))
}
fn description() -> Option<&'static str> {
Some("arbitrary token stream")
}
}
/// Parser that can parse Rust tokens into a particular syntax tree node.
///
/// Refer to the [module documentation] for details about parsing in Syn.
///
/// [module documentation]: index.html
///
/// *This trait is available if Syn is built with the `"parsing"` feature.*
pub trait Parser: Sized {
type Output;
/// Parse a proc-macro2 token stream into the chosen syntax tree node.
fn parse2(self, tokens: proc_macro2::TokenStream) -> Result<Self::Output, ParseError>;
/// Parse tokens of source code into the chosen syntax tree node.
fn parse(self, tokens: proc_macro::TokenStream) -> Result<Self::Output, ParseError> {
self.parse2(tokens.into())
}
/// Parse a string of Rust code into the chosen syntax tree node.
///
/// # Hygiene
///
/// Every span in the resulting syntax tree will be set to resolve at the
/// macro call site.
fn parse_str(self, s: &str) -> Result<Self::Output, ParseError> {
match s.parse() {
Ok(tts) => self.parse2(tts),
Err(_) => Err(ParseError::new("error while lexing input string")),
}
}
}
impl<F, T> Parser for F where F: FnOnce(Cursor) -> PResult<T> {
type Output = T;
fn parse2(self, tokens: proc_macro2::TokenStream) -> Result<T, ParseError> {
let buf = TokenBuffer::new2(tokens);
let (t, rest) = self(buf.begin())?;
if rest.eof() {
Ok(t)
} else if rest == buf.begin() {
// parsed nothing
Err(ParseError::new("failed to parse anything"))
} else {
Err(ParseError::new("failed to parse all tokens"))
}
}
}

779
third_party/rust/syn-0.12.12/src/token.rs поставляемый
Просмотреть файл

@ -1,779 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Tokens representing Rust punctuation, keywords, and delimiters.
//!
//! The type names in this module can be difficult to keep straight, so we
//! prefer to use the [`Token!`] macro instead. This is a type-macro that
//! expands to the token type of the given token.
//!
//! [`Token!`]: ../macro.Token.html
//!
//! # Example
//!
//! The [`ItemStatic`] syntax tree node is defined like this.
//!
//! [`ItemStatic`]: ../struct.ItemStatic.html
//!
//! ```
//! # #[macro_use]
//! # extern crate syn;
//! #
//! # use syn::{Attribute, Visibility, Ident, Type, Expr};
//! #
//! pub struct ItemStatic {
//! pub attrs: Vec<Attribute>,
//! pub vis: Visibility,
//! pub static_token: Token![static],
//! pub mutability: Option<Token![mut]>,
//! pub ident: Ident,
//! pub colon_token: Token![:],
//! pub ty: Box<Type>,
//! pub eq_token: Token![=],
//! pub expr: Box<Expr>,
//! pub semi_token: Token![;],
//! }
//! #
//! # fn main() {}
//! ```
//!
//! # Parsing
//!
//! These tokens can be parsed using the [`Synom`] trait and the parser
//! combinator macros [`punct!`], [`keyword!`], [`parens!`], [`braces!`], and
//! [`brackets!`].
//!
//! [`Synom`]: ../synom/trait.Synom.html
//! [`punct!`]: ../macro.punct.html
//! [`keyword!`]: ../macro.keyword.html
//! [`parens!`]: ../macro.parens.html
//! [`braces!`]: ../macro.braces.html
//! [`brackets!`]: ../macro.brackets.html
//!
//! ```
//! #[macro_use]
//! extern crate syn;
//!
//! use syn::synom::Synom;
//! use syn::{Attribute, Visibility, Ident, Type, Expr};
//! #
//! # struct ItemStatic;
//! # use syn::ItemStatic as SynItemStatic;
//!
//! // Parse the ItemStatic struct shown above.
//! impl Synom for ItemStatic {
//! named!(parse -> Self, do_parse!(
//! # (ItemStatic)
//! # ));
//! # }
//! #
//! # mod example {
//! # use super::*;
//! # use super::SynItemStatic as ItemStatic;
//! #
//! # named!(parse -> ItemStatic, do_parse!(
//! attrs: many0!(Attribute::parse_outer) >>
//! vis: syn!(Visibility) >>
//! static_token: keyword!(static) >>
//! mutability: option!(keyword!(mut)) >>
//! ident: syn!(Ident) >>
//! colon_token: punct!(:) >>
//! ty: syn!(Type) >>
//! eq_token: punct!(=) >>
//! expr: syn!(Expr) >>
//! semi_token: punct!(;) >>
//! (ItemStatic {
//! attrs, vis, static_token, mutability, ident, colon_token,
//! ty: Box::new(ty), eq_token, expr: Box::new(expr), semi_token,
//! })
//! ));
//! }
//! #
//! # fn main() {}
//! ```
use proc_macro2::Span;
macro_rules! tokens {
(
punct: {
$($punct:tt pub struct $punct_name:ident/$len:tt #[$punct_doc:meta])*
}
delimiter: {
$($delimiter:tt pub struct $delimiter_name:ident #[$delimiter_doc:meta])*
}
keyword: {
$($keyword:tt pub struct $keyword_name:ident #[$keyword_doc:meta])*
}
) => (
$(token_punct! { #[$punct_doc] $punct pub struct $punct_name/$len })*
$(token_delimiter! { #[$delimiter_doc] $delimiter pub struct $delimiter_name })*
$(token_keyword! { #[$keyword_doc] $keyword pub struct $keyword_name })*
)
}
macro_rules! token_punct {
(#[$doc:meta] $s:tt pub struct $name:ident/$len:tt) => {
#[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
#[$doc]
///
/// Don't try to remember the name of this type -- use the [`Token!`]
/// macro instead.
///
/// [`Token!`]: index.html
pub struct $name(pub [Span; $len]);
impl $name {
pub fn new(span: Span) -> Self {
$name([span; $len])
}
}
impl ::std::default::Default for $name {
fn default() -> Self {
$name([Span::def_site(); $len])
}
}
#[cfg(feature = "extra-traits")]
impl ::std::fmt::Debug for $name {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.write_str(stringify!($name))
}
}
#[cfg(feature = "extra-traits")]
impl ::std::cmp::Eq for $name {}
#[cfg(feature = "extra-traits")]
impl ::std::cmp::PartialEq for $name {
fn eq(&self, _other: &$name) -> bool {
true
}
}
#[cfg(feature = "extra-traits")]
impl ::std::hash::Hash for $name {
fn hash<H>(&self, _state: &mut H)
where H: ::std::hash::Hasher
{}
}
#[cfg(feature = "printing")]
impl ::quote::ToTokens for $name {
fn to_tokens(&self, tokens: &mut ::quote::Tokens) {
printing::punct($s, &self.0, tokens);
}
}
#[cfg(feature = "parsing")]
impl ::Synom for $name {
fn parse(tokens: $crate::buffer::Cursor) -> $crate::synom::PResult<$name> {
parsing::punct($s, tokens, $name)
}
fn description() -> Option<&'static str> {
Some(concat!("`", $s, "`"))
}
}
impl From<Span> for $name {
fn from(span: Span) -> Self {
$name([span; $len])
}
}
}
}
macro_rules! token_keyword {
(#[$doc:meta] $s:tt pub struct $name:ident) => {
#[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
#[$doc]
///
/// Don't try to remember the name of this type -- use the [`Token!`]
/// macro instead.
///
/// [`Token!`]: index.html
pub struct $name(pub Span);
impl ::std::default::Default for $name {
fn default() -> Self {
$name(Span::def_site())
}
}
#[cfg(feature = "extra-traits")]
impl ::std::fmt::Debug for $name {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.write_str(stringify!($name))
}
}
#[cfg(feature = "extra-traits")]
impl ::std::cmp::Eq for $name {}
#[cfg(feature = "extra-traits")]
impl ::std::cmp::PartialEq for $name {
fn eq(&self, _other: &$name) -> bool {
true
}
}
#[cfg(feature = "extra-traits")]
impl ::std::hash::Hash for $name {
fn hash<H>(&self, _state: &mut H)
where H: ::std::hash::Hasher
{}
}
#[cfg(feature = "printing")]
impl ::quote::ToTokens for $name {
fn to_tokens(&self, tokens: &mut ::quote::Tokens) {
printing::keyword($s, &self.0, tokens);
}
}
#[cfg(feature = "parsing")]
impl ::Synom for $name {
fn parse(tokens: $crate::buffer::Cursor) -> $crate::synom::PResult<$name> {
parsing::keyword($s, tokens, $name)
}
fn description() -> Option<&'static str> {
Some(concat!("`", $s, "`"))
}
}
impl From<Span> for $name {
fn from(span: Span) -> Self {
$name(span)
}
}
}
}
macro_rules! token_delimiter {
(#[$doc:meta] $s:tt pub struct $name:ident) => {
#[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
#[$doc]
pub struct $name(pub Span);
impl ::std::default::Default for $name {
fn default() -> Self {
$name(Span::def_site())
}
}
#[cfg(feature = "extra-traits")]
impl ::std::fmt::Debug for $name {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.write_str(stringify!($name))
}
}
#[cfg(feature = "extra-traits")]
impl ::std::cmp::Eq for $name {}
#[cfg(feature = "extra-traits")]
impl ::std::cmp::PartialEq for $name {
fn eq(&self, _other: &$name) -> bool {
true
}
}
#[cfg(feature = "extra-traits")]
impl ::std::hash::Hash for $name {
fn hash<H>(&self, _state: &mut H)
where H: ::std::hash::Hasher
{}
}
impl $name {
#[cfg(feature = "printing")]
pub fn surround<F>(&self,
tokens: &mut ::quote::Tokens,
f: F)
where F: FnOnce(&mut ::quote::Tokens)
{
printing::delim($s, &self.0, tokens, f);
}
#[cfg(feature = "parsing")]
pub fn parse<F, R>(tokens: $crate::buffer::Cursor, f: F) -> $crate::synom::PResult<($name, R)>
where F: FnOnce($crate::buffer::Cursor) -> $crate::synom::PResult<R>
{
parsing::delim($s, tokens, $name, f)
}
}
impl From<Span> for $name {
fn from(span: Span) -> Self {
$name(span)
}
}
}
}
tokens! {
punct: {
"+" pub struct Add/1 /// `+`
"+=" pub struct AddEq/2 /// `+=`
"&" pub struct And/1 /// `&`
"&&" pub struct AndAnd/2 /// `&&`
"&=" pub struct AndEq/2 /// `&=`
"@" pub struct At/1 /// `@`
"!" pub struct Bang/1 /// `!`
"^" pub struct Caret/1 /// `^`
"^=" pub struct CaretEq/2 /// `^=`
":" pub struct Colon/1 /// `:`
"::" pub struct Colon2/2 /// `::`
"," pub struct Comma/1 /// `,`
"/" pub struct Div/1 /// `/`
"/=" pub struct DivEq/2 /// `/=`
"." pub struct Dot/1 /// `.`
".." pub struct Dot2/2 /// `..`
"..." pub struct Dot3/3 /// `...`
"..=" pub struct DotDotEq/3 /// `..=`
"=" pub struct Eq/1 /// `=`
"==" pub struct EqEq/2 /// `==`
">=" pub struct Ge/2 /// `>=`
">" pub struct Gt/1 /// `>`
"<=" pub struct Le/2 /// `<=`
"<" pub struct Lt/1 /// `<`
"*=" pub struct MulEq/2 /// `*=`
"!=" pub struct Ne/2 /// `!=`
"|" pub struct Or/1 /// `|`
"|=" pub struct OrEq/2 /// `|=`
"||" pub struct OrOr/2 /// `||`
"#" pub struct Pound/1 /// `#`
"?" pub struct Question/1 /// `?`
"->" pub struct RArrow/2 /// `->`
"<-" pub struct LArrow/2 /// `<-`
"%" pub struct Rem/1 /// `%`
"%=" pub struct RemEq/2 /// `%=`
"=>" pub struct Rocket/2 /// `=>`
";" pub struct Semi/1 /// `;`
"<<" pub struct Shl/2 /// `<<`
"<<=" pub struct ShlEq/3 /// `<<=`
">>" pub struct Shr/2 /// `>>`
">>=" pub struct ShrEq/3 /// `>>=`
"*" pub struct Star/1 /// `*`
"-" pub struct Sub/1 /// `-`
"-=" pub struct SubEq/2 /// `-=`
"_" pub struct Underscore/1 /// `_`
}
delimiter: {
"{" pub struct Brace /// `{...}`
"[" pub struct Bracket /// `[...]`
"(" pub struct Paren /// `(...)`
" " pub struct Group /// None-delimited group
}
keyword: {
"as" pub struct As /// `as`
"auto" pub struct Auto /// `auto`
"box" pub struct Box /// `box`
"break" pub struct Break /// `break`
"Self" pub struct CapSelf /// `Self`
"catch" pub struct Catch /// `catch`
"const" pub struct Const /// `const`
"continue" pub struct Continue /// `continue`
"crate" pub struct Crate /// `crate`
"default" pub struct Default /// `default`
"do" pub struct Do /// `do`
"dyn" pub struct Dyn /// `dyn`
"else" pub struct Else /// `else`
"enum" pub struct Enum /// `enum`
"extern" pub struct Extern /// `extern`
"fn" pub struct Fn /// `fn`
"for" pub struct For /// `for`
"if" pub struct If /// `if`
"impl" pub struct Impl /// `impl`
"in" pub struct In /// `in`
"let" pub struct Let /// `let`
"loop" pub struct Loop /// `loop`
"macro" pub struct Macro /// `macro`
"match" pub struct Match /// `match`
"mod" pub struct Mod /// `mod`
"move" pub struct Move /// `move`
"mut" pub struct Mut /// `mut`
"pub" pub struct Pub /// `pub`
"ref" pub struct Ref /// `ref`
"return" pub struct Return /// `return`
"self" pub struct Self_ /// `self`
"static" pub struct Static /// `static`
"struct" pub struct Struct /// `struct`
"super" pub struct Super /// `super`
"trait" pub struct Trait /// `trait`
"type" pub struct Type /// `type`
"union" pub struct Union /// `union`
"unsafe" pub struct Unsafe /// `unsafe`
"use" pub struct Use /// `use`
"where" pub struct Where /// `where`
"while" pub struct While /// `while`
"yield" pub struct Yield /// `yield`
}
}
/// A type-macro that expands to the name of the Rust type representation of a
/// given token.
///
/// See the [token module] documentation for details and examples.
///
/// [token module]: token/index.html
// Unfortunate duplication due to a rustdoc bug.
// https://github.com/rust-lang/rust/issues/45939
#[macro_export]
macro_rules! Token {
(+) => { $crate::token::Add };
(+=) => { $crate::token::AddEq };
(&) => { $crate::token::And };
(&&) => { $crate::token::AndAnd };
(&=) => { $crate::token::AndEq };
(@) => { $crate::token::At };
(!) => { $crate::token::Bang };
(^) => { $crate::token::Caret };
(^=) => { $crate::token::CaretEq };
(:) => { $crate::token::Colon };
(::) => { $crate::token::Colon2 };
(,) => { $crate::token::Comma };
(/) => { $crate::token::Div };
(/=) => { $crate::token::DivEq };
(.) => { $crate::token::Dot };
(..) => { $crate::token::Dot2 };
(...) => { $crate::token::Dot3 };
(..=) => { $crate::token::DotDotEq };
(=) => { $crate::token::Eq };
(==) => { $crate::token::EqEq };
(>=) => { $crate::token::Ge };
(>) => { $crate::token::Gt };
(<=) => { $crate::token::Le };
(<) => { $crate::token::Lt };
(*=) => { $crate::token::MulEq };
(!=) => { $crate::token::Ne };
(|) => { $crate::token::Or };
(|=) => { $crate::token::OrEq };
(||) => { $crate::token::OrOr };
(#) => { $crate::token::Pound };
(?) => { $crate::token::Question };
(->) => { $crate::token::RArrow };
(<-) => { $crate::token::LArrow };
(%) => { $crate::token::Rem };
(%=) => { $crate::token::RemEq };
(=>) => { $crate::token::Rocket };
(;) => { $crate::token::Semi };
(<<) => { $crate::token::Shl };
(<<=) => { $crate::token::ShlEq };
(>>) => { $crate::token::Shr };
(>>=) => { $crate::token::ShrEq };
(*) => { $crate::token::Star };
(-) => { $crate::token::Sub };
(-=) => { $crate::token::SubEq };
(_) => { $crate::token::Underscore };
(as) => { $crate::token::As };
(auto) => { $crate::token::Auto };
(box) => { $crate::token::Box };
(break) => { $crate::token::Break };
(Self) => { $crate::token::CapSelf };
(catch) => { $crate::token::Catch };
(const) => { $crate::token::Const };
(continue) => { $crate::token::Continue };
(crate) => { $crate::token::Crate };
(default) => { $crate::token::Default };
(do) => { $crate::token::Do };
(dyn) => { $crate::token::Dyn };
(else) => { $crate::token::Else };
(enum) => { $crate::token::Enum };
(extern) => { $crate::token::Extern };
(fn) => { $crate::token::Fn };
(for) => { $crate::token::For };
(if) => { $crate::token::If };
(impl) => { $crate::token::Impl };
(in) => { $crate::token::In };
(let) => { $crate::token::Let };
(loop) => { $crate::token::Loop };
(macro) => { $crate::token::Macro };
(match) => { $crate::token::Match };
(mod) => { $crate::token::Mod };
(move) => { $crate::token::Move };
(mut) => { $crate::token::Mut };
(pub) => { $crate::token::Pub };
(ref) => { $crate::token::Ref };
(return) => { $crate::token::Return };
(self) => { $crate::token::Self_ };
(static) => { $crate::token::Static };
(struct) => { $crate::token::Struct };
(super) => { $crate::token::Super };
(trait) => { $crate::token::Trait };
(type) => { $crate::token::Type };
(union) => { $crate::token::Union };
(unsafe) => { $crate::token::Unsafe };
(use) => { $crate::token::Use };
(where) => { $crate::token::Where };
(while) => { $crate::token::While };
(yield) => { $crate::token::Yield };
}
/// Parse a single Rust punctuation token.
///
/// See the [token module] documentation for details and examples.
///
/// [token module]: token/index.html
///
/// *This macro is available if Syn is built with the `"parsing"` feature.*
#[cfg(feature = "parsing")]
#[macro_export]
macro_rules! punct {
($i:expr, +) => { call!($i, <$crate::token::Add as $crate::synom::Synom>::parse) };
($i:expr, +=) => { call!($i, <$crate::token::AddEq as $crate::synom::Synom>::parse) };
($i:expr, &) => { call!($i, <$crate::token::And as $crate::synom::Synom>::parse) };
($i:expr, &&) => { call!($i, <$crate::token::AndAnd as $crate::synom::Synom>::parse) };
($i:expr, &=) => { call!($i, <$crate::token::AndEq as $crate::synom::Synom>::parse) };
($i:expr, @) => { call!($i, <$crate::token::At as $crate::synom::Synom>::parse) };
($i:expr, !) => { call!($i, <$crate::token::Bang as $crate::synom::Synom>::parse) };
($i:expr, ^) => { call!($i, <$crate::token::Caret as $crate::synom::Synom>::parse) };
($i:expr, ^=) => { call!($i, <$crate::token::CaretEq as $crate::synom::Synom>::parse) };
($i:expr, :) => { call!($i, <$crate::token::Colon as $crate::synom::Synom>::parse) };
($i:expr, ::) => { call!($i, <$crate::token::Colon2 as $crate::synom::Synom>::parse) };
($i:expr, ,) => { call!($i, <$crate::token::Comma as $crate::synom::Synom>::parse) };
($i:expr, /) => { call!($i, <$crate::token::Div as $crate::synom::Synom>::parse) };
($i:expr, /=) => { call!($i, <$crate::token::DivEq as $crate::synom::Synom>::parse) };
($i:expr, .) => { call!($i, <$crate::token::Dot as $crate::synom::Synom>::parse) };
($i:expr, ..) => { call!($i, <$crate::token::Dot2 as $crate::synom::Synom>::parse) };
($i:expr, ...) => { call!($i, <$crate::token::Dot3 as $crate::synom::Synom>::parse) };
($i:expr, ..=) => { call!($i, <$crate::token::DotDotEq as $crate::synom::Synom>::parse) };
($i:expr, =) => { call!($i, <$crate::token::Eq as $crate::synom::Synom>::parse) };
($i:expr, ==) => { call!($i, <$crate::token::EqEq as $crate::synom::Synom>::parse) };
($i:expr, >=) => { call!($i, <$crate::token::Ge as $crate::synom::Synom>::parse) };
($i:expr, >) => { call!($i, <$crate::token::Gt as $crate::synom::Synom>::parse) };
($i:expr, <=) => { call!($i, <$crate::token::Le as $crate::synom::Synom>::parse) };
($i:expr, <) => { call!($i, <$crate::token::Lt as $crate::synom::Synom>::parse) };
($i:expr, *=) => { call!($i, <$crate::token::MulEq as $crate::synom::Synom>::parse) };
($i:expr, !=) => { call!($i, <$crate::token::Ne as $crate::synom::Synom>::parse) };
($i:expr, |) => { call!($i, <$crate::token::Or as $crate::synom::Synom>::parse) };
($i:expr, |=) => { call!($i, <$crate::token::OrEq as $crate::synom::Synom>::parse) };
($i:expr, ||) => { call!($i, <$crate::token::OrOr as $crate::synom::Synom>::parse) };
($i:expr, #) => { call!($i, <$crate::token::Pound as $crate::synom::Synom>::parse) };
($i:expr, ?) => { call!($i, <$crate::token::Question as $crate::synom::Synom>::parse) };
($i:expr, ->) => { call!($i, <$crate::token::RArrow as $crate::synom::Synom>::parse) };
($i:expr, <-) => { call!($i, <$crate::token::LArrow as $crate::synom::Synom>::parse) };
($i:expr, %) => { call!($i, <$crate::token::Rem as $crate::synom::Synom>::parse) };
($i:expr, %=) => { call!($i, <$crate::token::RemEq as $crate::synom::Synom>::parse) };
($i:expr, =>) => { call!($i, <$crate::token::Rocket as $crate::synom::Synom>::parse) };
($i:expr, ;) => { call!($i, <$crate::token::Semi as $crate::synom::Synom>::parse) };
($i:expr, <<) => { call!($i, <$crate::token::Shl as $crate::synom::Synom>::parse) };
($i:expr, <<=) => { call!($i, <$crate::token::ShlEq as $crate::synom::Synom>::parse) };
($i:expr, >>) => { call!($i, <$crate::token::Shr as $crate::synom::Synom>::parse) };
($i:expr, >>=) => { call!($i, <$crate::token::ShrEq as $crate::synom::Synom>::parse) };
($i:expr, *) => { call!($i, <$crate::token::Star as $crate::synom::Synom>::parse) };
($i:expr, -) => { call!($i, <$crate::token::Sub as $crate::synom::Synom>::parse) };
($i:expr, -=) => { call!($i, <$crate::token::SubEq as $crate::synom::Synom>::parse) };
($i:expr, _) => { call!($i, <$crate::token::Underscore as $crate::synom::Synom>::parse) };
}
/// Parse a single Rust keyword token.
///
/// See the [token module] documentation for details and examples.
///
/// [token module]: token/index.html
///
/// *This macro is available if Syn is built with the `"parsing"` feature.*
#[cfg(feature = "parsing")]
#[macro_export]
macro_rules! keyword {
($i:expr, as) => { call!($i, <$crate::token::As as $crate::synom::Synom>::parse) };
($i:expr, auto) => { call!($i, <$crate::token::Auto as $crate::synom::Synom>::parse) };
($i:expr, box) => { call!($i, <$crate::token::Box as $crate::synom::Synom>::parse) };
($i:expr, break) => { call!($i, <$crate::token::Break as $crate::synom::Synom>::parse) };
($i:expr, Self) => { call!($i, <$crate::token::CapSelf as $crate::synom::Synom>::parse) };
($i:expr, catch) => { call!($i, <$crate::token::Catch as $crate::synom::Synom>::parse) };
($i:expr, const) => { call!($i, <$crate::token::Const as $crate::synom::Synom>::parse) };
($i:expr, continue) => { call!($i, <$crate::token::Continue as $crate::synom::Synom>::parse) };
($i:expr, crate) => { call!($i, <$crate::token::Crate as $crate::synom::Synom>::parse) };
($i:expr, default) => { call!($i, <$crate::token::Default as $crate::synom::Synom>::parse) };
($i:expr, do) => { call!($i, <$crate::token::Do as $crate::synom::Synom>::parse) };
($i:expr, dyn) => { call!($i, <$crate::token::Dyn as $crate::synom::Synom>::parse) };
($i:expr, else) => { call!($i, <$crate::token::Else as $crate::synom::Synom>::parse) };
($i:expr, enum) => { call!($i, <$crate::token::Enum as $crate::synom::Synom>::parse) };
($i:expr, extern) => { call!($i, <$crate::token::Extern as $crate::synom::Synom>::parse) };
($i:expr, fn) => { call!($i, <$crate::token::Fn as $crate::synom::Synom>::parse) };
($i:expr, for) => { call!($i, <$crate::token::For as $crate::synom::Synom>::parse) };
($i:expr, if) => { call!($i, <$crate::token::If as $crate::synom::Synom>::parse) };
($i:expr, impl) => { call!($i, <$crate::token::Impl as $crate::synom::Synom>::parse) };
($i:expr, in) => { call!($i, <$crate::token::In as $crate::synom::Synom>::parse) };
($i:expr, let) => { call!($i, <$crate::token::Let as $crate::synom::Synom>::parse) };
($i:expr, loop) => { call!($i, <$crate::token::Loop as $crate::synom::Synom>::parse) };
($i:expr, macro) => { call!($i, <$crate::token::Macro as $crate::synom::Synom>::parse) };
($i:expr, match) => { call!($i, <$crate::token::Match as $crate::synom::Synom>::parse) };
($i:expr, mod) => { call!($i, <$crate::token::Mod as $crate::synom::Synom>::parse) };
($i:expr, move) => { call!($i, <$crate::token::Move as $crate::synom::Synom>::parse) };
($i:expr, mut) => { call!($i, <$crate::token::Mut as $crate::synom::Synom>::parse) };
($i:expr, pub) => { call!($i, <$crate::token::Pub as $crate::synom::Synom>::parse) };
($i:expr, ref) => { call!($i, <$crate::token::Ref as $crate::synom::Synom>::parse) };
($i:expr, return) => { call!($i, <$crate::token::Return as $crate::synom::Synom>::parse) };
($i:expr, self) => { call!($i, <$crate::token::Self_ as $crate::synom::Synom>::parse) };
($i:expr, static) => { call!($i, <$crate::token::Static as $crate::synom::Synom>::parse) };
($i:expr, struct) => { call!($i, <$crate::token::Struct as $crate::synom::Synom>::parse) };
($i:expr, super) => { call!($i, <$crate::token::Super as $crate::synom::Synom>::parse) };
($i:expr, trait) => { call!($i, <$crate::token::Trait as $crate::synom::Synom>::parse) };
($i:expr, type) => { call!($i, <$crate::token::Type as $crate::synom::Synom>::parse) };
($i:expr, union) => { call!($i, <$crate::token::Union as $crate::synom::Synom>::parse) };
($i:expr, unsafe) => { call!($i, <$crate::token::Unsafe as $crate::synom::Synom>::parse) };
($i:expr, use) => { call!($i, <$crate::token::Use as $crate::synom::Synom>::parse) };
($i:expr, where) => { call!($i, <$crate::token::Where as $crate::synom::Synom>::parse) };
($i:expr, while) => { call!($i, <$crate::token::While as $crate::synom::Synom>::parse) };
($i:expr, yield) => { call!($i, <$crate::token::Yield as $crate::synom::Synom>::parse) };
}
#[cfg(feature = "parsing")]
mod parsing {
use proc_macro2::{Delimiter, Spacing, Span};
use buffer::Cursor;
use parse_error;
use synom::PResult;
pub trait FromSpans: Sized {
fn from_spans(spans: &[Span]) -> Self;
}
impl FromSpans for [Span; 1] {
fn from_spans(spans: &[Span]) -> Self {
[spans[0]]
}
}
impl FromSpans for [Span; 2] {
fn from_spans(spans: &[Span]) -> Self {
[spans[0], spans[1]]
}
}
impl FromSpans for [Span; 3] {
fn from_spans(spans: &[Span]) -> Self {
[spans[0], spans[1], spans[2]]
}
}
pub fn punct<'a, T, R>(s: &str, mut tokens: Cursor<'a>, new: fn(T) -> R) -> PResult<'a, R>
where
T: FromSpans,
{
let mut spans = [Span::def_site(); 3];
assert!(s.len() <= spans.len());
let chars = s.chars();
for (i, (ch, slot)) in chars.zip(&mut spans).enumerate() {
match tokens.op() {
Some((span, op, kind, rest)) if op == ch => {
if i != s.len() - 1 {
match kind {
Spacing::Joint => {}
_ => return parse_error(),
}
}
*slot = span;
tokens = rest;
}
_ => return parse_error(),
}
}
Ok((new(T::from_spans(&spans)), tokens))
}
pub fn keyword<'a, T>(keyword: &str, tokens: Cursor<'a>, new: fn(Span) -> T) -> PResult<'a, T> {
if let Some((span, term, rest)) = tokens.term() {
if term.as_str() == keyword {
return Ok((new(span), rest));
}
}
parse_error()
}
pub fn delim<'a, F, R, T>(
delim: &str,
tokens: Cursor<'a>,
new: fn(Span) -> T,
f: F,
) -> PResult<'a, (T, R)>
where
F: FnOnce(Cursor) -> PResult<R>,
{
// NOTE: We should support none-delimited sequences here.
let delim = match delim {
"(" => Delimiter::Parenthesis,
"{" => Delimiter::Brace,
"[" => Delimiter::Bracket,
" " => Delimiter::None,
_ => panic!("unknown delimiter: {}", delim),
};
if let Some((inside, span, rest)) = tokens.group(delim) {
match f(inside) {
Ok((ret, remaining)) => {
if remaining.eof() {
return Ok(((new(span), ret), rest));
}
}
Err(err) => return Err(err),
}
}
parse_error()
}
}
#[cfg(feature = "printing")]
mod printing {
use proc_macro2::{Delimiter, Spacing, Span, Term, TokenNode, TokenTree};
use quote::Tokens;
pub fn punct(s: &str, spans: &[Span], tokens: &mut Tokens) {
assert_eq!(s.len(), spans.len());
let mut chars = s.chars();
let mut spans = spans.iter();
let ch = chars.next_back().unwrap();
let span = spans.next_back().unwrap();
for (ch, span) in chars.zip(spans) {
tokens.append(TokenTree {
span: *span,
kind: TokenNode::Op(ch, Spacing::Joint),
});
}
tokens.append(TokenTree {
span: *span,
kind: TokenNode::Op(ch, Spacing::Alone),
});
}
pub fn keyword(s: &str, span: &Span, tokens: &mut Tokens) {
tokens.append(TokenTree {
span: *span,
kind: TokenNode::Term(Term::intern(s)),
});
}
pub fn delim<F>(s: &str, span: &Span, tokens: &mut Tokens, f: F)
where
F: FnOnce(&mut Tokens),
{
let delim = match s {
"(" => Delimiter::Parenthesis,
"[" => Delimiter::Bracket,
"{" => Delimiter::Brace,
" " => Delimiter::None,
_ => panic!("unknown delimiter: {}", s),
};
let mut inner = Tokens::new();
f(&mut inner);
tokens.append(TokenTree {
span: *span,
kind: TokenNode::Group(delim, inner.into()),
});
}
}

184
third_party/rust/syn-0.12.12/src/tt.rs поставляемый
Просмотреть файл

@ -1,184 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[cfg(feature = "parsing")]
use buffer::Cursor;
#[cfg(feature = "parsing")]
use synom::PResult;
#[cfg(feature = "parsing")]
use token::{Brace, Bracket, Paren};
#[cfg(feature = "parsing")]
use {parse_error, MacroDelimiter};
#[cfg(feature = "extra-traits")]
use std::hash::{Hash, Hasher};
#[cfg(any(feature = "parsing", feature = "extra-traits"))]
use proc_macro2::{Delimiter, TokenNode, TokenStream, TokenTree};
#[cfg(feature = "parsing")]
pub fn delimited(input: Cursor) -> PResult<(MacroDelimiter, TokenStream)> {
match input.token_tree() {
Some((
TokenTree {
span,
kind: TokenNode::Group(delimiter, tts),
},
rest,
)) => {
let delimiter = match delimiter {
Delimiter::Parenthesis => MacroDelimiter::Paren(Paren(span)),
Delimiter::Brace => MacroDelimiter::Brace(Brace(span)),
Delimiter::Bracket => MacroDelimiter::Bracket(Bracket(span)),
Delimiter::None => return parse_error(),
};
Ok(((delimiter, tts), rest))
}
_ => parse_error(),
}
}
#[cfg(all(feature = "full", feature = "parsing"))]
pub fn braced(input: Cursor) -> PResult<(Brace, TokenStream)> {
match input.token_tree() {
Some((
TokenTree {
span,
kind: TokenNode::Group(Delimiter::Brace, tts),
},
rest,
)) => Ok(((Brace(span), tts), rest)),
_ => parse_error(),
}
}
#[cfg(all(feature = "full", feature = "parsing"))]
pub fn parenthesized(input: Cursor) -> PResult<(Paren, TokenStream)> {
match input.token_tree() {
Some((
TokenTree {
span,
kind: TokenNode::Group(Delimiter::Parenthesis, tts),
},
rest,
)) => Ok(((Paren(span), tts), rest)),
_ => parse_error(),
}
}
#[cfg(feature = "extra-traits")]
pub struct TokenTreeHelper<'a>(pub &'a TokenTree);
#[cfg(feature = "extra-traits")]
impl<'a> PartialEq for TokenTreeHelper<'a> {
fn eq(&self, other: &Self) -> bool {
use proc_macro2::Spacing;
match (&self.0.kind, &other.0.kind) {
(&TokenNode::Group(d1, ref s1), &TokenNode::Group(d2, ref s2)) => {
match (d1, d2) {
(Delimiter::Parenthesis, Delimiter::Parenthesis)
| (Delimiter::Brace, Delimiter::Brace)
| (Delimiter::Bracket, Delimiter::Bracket)
| (Delimiter::None, Delimiter::None) => {}
_ => return false,
}
let s1 = s1.clone().into_iter();
let mut s2 = s2.clone().into_iter();
for item1 in s1 {
let item2 = match s2.next() {
Some(item) => item,
None => return false,
};
if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) {
return false;
}
}
s2.next().is_none()
}
(&TokenNode::Op(o1, k1), &TokenNode::Op(o2, k2)) => {
o1 == o2 && match (k1, k2) {
(Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true,
_ => false,
}
}
(&TokenNode::Literal(ref l1), &TokenNode::Literal(ref l2)) => {
l1.to_string() == l2.to_string()
}
(&TokenNode::Term(ref s1), &TokenNode::Term(ref s2)) => s1.as_str() == s2.as_str(),
_ => false,
}
}
}
#[cfg(feature = "extra-traits")]
impl<'a> Hash for TokenTreeHelper<'a> {
fn hash<H: Hasher>(&self, h: &mut H) {
use proc_macro2::Spacing;
match self.0.kind {
TokenNode::Group(delim, ref stream) => {
0u8.hash(h);
match delim {
Delimiter::Parenthesis => 0u8.hash(h),
Delimiter::Brace => 1u8.hash(h),
Delimiter::Bracket => 2u8.hash(h),
Delimiter::None => 3u8.hash(h),
}
for item in stream.clone() {
TokenTreeHelper(&item).hash(h);
}
0xffu8.hash(h); // terminator w/ a variant we don't normally hash
}
TokenNode::Op(op, kind) => {
1u8.hash(h);
op.hash(h);
match kind {
Spacing::Alone => 0u8.hash(h),
Spacing::Joint => 1u8.hash(h),
}
}
TokenNode::Literal(ref lit) => (2u8, lit.to_string()).hash(h),
TokenNode::Term(ref word) => (3u8, word.as_str()).hash(h),
}
}
}
#[cfg(feature = "extra-traits")]
pub struct TokenStreamHelper<'a>(pub &'a TokenStream);
#[cfg(feature = "extra-traits")]
impl<'a> PartialEq for TokenStreamHelper<'a> {
fn eq(&self, other: &Self) -> bool {
let left = self.0.clone().into_iter().collect::<Vec<_>>();
let right = other.0.clone().into_iter().collect::<Vec<_>>();
if left.len() != right.len() {
return false;
}
for (a, b) in left.into_iter().zip(right) {
if TokenTreeHelper(&a) != TokenTreeHelper(&b) {
return false;
}
}
true
}
}
#[cfg(feature = "extra-traits")]
impl<'a> Hash for TokenStreamHelper<'a> {
fn hash<H: Hasher>(&self, state: &mut H) {
let tts = self.0.clone().into_iter().collect::<Vec<_>>();
tts.len().hash(state);
for tt in tts {
TokenTreeHelper(&tt).hash(state);
}
}
}

811
third_party/rust/syn-0.12.12/src/ty.rs поставляемый
Просмотреть файл

@ -1,811 +0,0 @@
// Copyright 2018 Syn Developers
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use punctuated::Punctuated;
use super::*;
use proc_macro2::TokenStream;
#[cfg(feature = "extra-traits")]
use std::hash::{Hash, Hasher};
#[cfg(feature = "extra-traits")]
use tt::TokenStreamHelper;
ast_enum_of_structs! {
/// The possible types that a Rust value could have.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
pub enum Type {
/// A dynamically sized slice type: `[T]`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Slice(TypeSlice {
pub bracket_token: token::Bracket,
pub elem: Box<Type>,
}),
/// A fixed size array type: `[T; n]`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Array(TypeArray {
pub bracket_token: token::Bracket,
pub elem: Box<Type>,
pub semi_token: Token![;],
pub len: Expr,
}),
/// A raw pointer type: `*const T` or `*mut T`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Ptr(TypePtr {
pub star_token: Token![*],
pub const_token: Option<Token![const]>,
pub mutability: Option<Token![mut]>,
pub elem: Box<Type>,
}),
/// A reference type: `&'a T` or `&'a mut T`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Reference(TypeReference {
pub and_token: Token![&],
pub lifetime: Option<Lifetime>,
pub mutability: Option<Token![mut]>,
pub elem: Box<Type>,
}),
/// A bare function type: `fn(usize) -> bool`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub BareFn(TypeBareFn {
pub unsafety: Option<Token![unsafe]>,
pub abi: Option<Abi>,
pub fn_token: Token![fn],
pub lifetimes: Option<BoundLifetimes>,
pub paren_token: token::Paren,
pub inputs: Punctuated<BareFnArg, Token![,]>,
pub variadic: Option<Token![...]>,
pub output: ReturnType,
}),
/// The never type: `!`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Never(TypeNever {
pub bang_token: Token![!],
}),
/// A tuple type: `(A, B, C, String)`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Tuple(TypeTuple {
pub paren_token: token::Paren,
pub elems: Punctuated<Type, Token![,]>,
}),
/// A path like `std::slice::Iter`, optionally qualified with a
/// self-type as in `<Vec<T> as SomeTrait>::Associated`.
///
/// Type arguments are stored in the Path itself.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Path(TypePath {
pub qself: Option<QSelf>,
pub path: Path,
}),
/// A trait object type `Bound1 + Bound2 + Bound3` where `Bound` is a
/// trait or a lifetime.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub TraitObject(TypeTraitObject {
pub dyn_token: Option<Token![dyn]>,
pub bounds: Punctuated<TypeParamBound, Token![+]>,
}),
/// An `impl Bound1 + Bound2 + Bound3` type where `Bound` is a trait or
/// a lifetime.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub ImplTrait(TypeImplTrait {
pub impl_token: Token![impl],
pub bounds: Punctuated<TypeParamBound, Token![+]>,
}),
/// A parenthesized type equivalent to the inner type.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Paren(TypeParen {
pub paren_token: token::Paren,
pub elem: Box<Type>,
}),
/// A type contained within invisible delimiters.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Group(TypeGroup {
pub group_token: token::Group,
pub elem: Box<Type>,
}),
/// Indication that a type should be inferred by the compiler: `_`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Infer(TypeInfer {
pub underscore_token: Token![_],
}),
/// A macro in the type position.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Macro(TypeMacro {
pub mac: Macro,
}),
/// Tokens in type position not interpreted by Syn.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub Verbatim(TypeVerbatim #manual_extra_traits {
pub tts: TokenStream,
}),
}
}
#[cfg(feature = "extra-traits")]
impl Eq for TypeVerbatim {}
#[cfg(feature = "extra-traits")]
impl PartialEq for TypeVerbatim {
fn eq(&self, other: &Self) -> bool {
TokenStreamHelper(&self.tts) == TokenStreamHelper(&other.tts)
}
}
#[cfg(feature = "extra-traits")]
impl Hash for TypeVerbatim {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
TokenStreamHelper(&self.tts).hash(state);
}
}
ast_struct! {
/// The binary interface of a function: `extern "C"`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct Abi {
pub extern_token: Token![extern],
pub name: Option<LitStr>,
}
}
ast_struct! {
/// An argument in a function type: the `usize` in `fn(usize) -> bool`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct BareFnArg {
pub name: Option<(BareFnArgName, Token![:])>,
pub ty: Type,
}
}
ast_enum! {
/// Name of an argument in a function type: the `n` in `fn(n: usize)`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub enum BareFnArgName {
/// Argument given a name.
Named(Ident),
/// Argument not given a name, matched with `_`.
Wild(Token![_]),
}
}
ast_enum! {
/// Return type of a function signature.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub enum ReturnType {
/// Return type is not specified.
///
/// Functions default to `()` and closures default to type inference.
Default,
/// A particular type is returned.
Type(Token![->], Box<Type>),
}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use synom::Synom;
use path::parsing::qpath;
impl Synom for Type {
named!(parse -> Self, call!(ambig_ty, true));
fn description() -> Option<&'static str> {
Some("type")
}
}
impl Type {
/// In some positions, types may not contain the `+` character, to
/// disambiguate them. For example in the expression `1 as T`, T may not
/// contain a `+` character.
///
/// This parser does not allow a `+`, while the default parser does.
named!(pub without_plus -> Self, call!(ambig_ty, false));
}
named!(ambig_ty(allow_plus: bool) -> Type, alt!(
syn!(TypeGroup) => { Type::Group }
|
// must be before TypeTuple
call!(TypeParen::parse, allow_plus) => { Type::Paren }
|
// must be before TypePath
syn!(TypeMacro) => { Type::Macro }
|
// must be before TypeTraitObject
call!(TypePath::parse, allow_plus) => { Type::Path }
|
// Don't try parsing more than one trait bound if we aren't allowing it.
// must be before TypeTuple
call!(TypeTraitObject::parse, allow_plus) => { Type::TraitObject }
|
syn!(TypeSlice) => { Type::Slice }
|
syn!(TypeArray) => { Type::Array }
|
syn!(TypePtr) => { Type::Ptr }
|
syn!(TypeReference) => { Type::Reference }
|
syn!(TypeBareFn) => { Type::BareFn }
|
syn!(TypeNever) => { Type::Never }
|
syn!(TypeTuple) => { Type::Tuple }
|
syn!(TypeImplTrait) => { Type::ImplTrait }
|
syn!(TypeInfer) => { Type::Infer }
));
impl Synom for TypeSlice {
named!(parse -> Self, map!(
brackets!(syn!(Type)),
|(b, ty)| TypeSlice {
elem: Box::new(ty),
bracket_token: b,
}
));
fn description() -> Option<&'static str> {
Some("slice type")
}
}
impl Synom for TypeArray {
named!(parse -> Self, map!(
brackets!(do_parse!(
elem: syn!(Type) >>
semi: punct!(;) >>
len: syn!(Expr) >>
(elem, semi, len)
)),
|(brackets, (elem, semi, len))| {
TypeArray {
elem: Box::new(elem),
len: len,
bracket_token: brackets,
semi_token: semi,
}
}
));
fn description() -> Option<&'static str> {
Some("array type")
}
}
impl Synom for TypePtr {
named!(parse -> Self, do_parse!(
star: punct!(*) >>
mutability: alt!(
keyword!(const) => { |c| (None, Some(c)) }
|
keyword!(mut) => { |m| (Some(m), None) }
) >>
target: call!(Type::without_plus) >>
(TypePtr {
const_token: mutability.1,
star_token: star,
mutability: mutability.0,
elem: Box::new(target),
})
));
fn description() -> Option<&'static str> {
Some("raw pointer type")
}
}
impl Synom for TypeReference {
named!(parse -> Self, do_parse!(
amp: punct!(&) >>
life: option!(syn!(Lifetime)) >>
mutability: option!(keyword!(mut)) >>
// & binds tighter than +, so we don't allow + here.
target: call!(Type::without_plus) >>
(TypeReference {
lifetime: life,
mutability: mutability,
elem: Box::new(target),
and_token: amp,
})
));
fn description() -> Option<&'static str> {
Some("reference type")
}
}
impl Synom for TypeBareFn {
named!(parse -> Self, do_parse!(
lifetimes: option!(syn!(BoundLifetimes)) >>
unsafety: option!(keyword!(unsafe)) >>
abi: option!(syn!(Abi)) >>
fn_: keyword!(fn) >>
parens: parens!(do_parse!(
inputs: call!(Punctuated::parse_terminated) >>
variadic: option!(cond_reduce!(inputs.empty_or_trailing(), punct!(...))) >>
(inputs, variadic)
)) >>
output: syn!(ReturnType) >>
(TypeBareFn {
unsafety: unsafety,
abi: abi,
lifetimes: lifetimes,
output: output,
variadic: (parens.1).1,
fn_token: fn_,
paren_token: parens.0,
inputs: (parens.1).0,
})
));
fn description() -> Option<&'static str> {
Some("`fn` type")
}
}
impl Synom for TypeNever {
named!(parse -> Self, map!(
punct!(!),
|b| TypeNever { bang_token: b }
));
fn description() -> Option<&'static str> {
Some("never type: `!`")
}
}
impl Synom for TypeInfer {
named!(parse -> Self, map!(
punct!(_),
|u| TypeInfer { underscore_token: u }
));
fn description() -> Option<&'static str> {
Some("inferred type: `_`")
}
}
impl Synom for TypeTuple {
named!(parse -> Self, do_parse!(
data: parens!(Punctuated::parse_terminated) >>
(TypeTuple {
paren_token: data.0,
elems: data.1,
})
));
fn description() -> Option<&'static str> {
Some("tuple type")
}
}
impl Synom for TypeMacro {
named!(parse -> Self, map!(syn!(Macro), |mac| TypeMacro { mac: mac }));
fn description() -> Option<&'static str> {
Some("macro invocation")
}
}
impl Synom for TypePath {
named!(parse -> Self, call!(Self::parse, false));
fn description() -> Option<&'static str> {
Some("type path")
}
}
impl TypePath {
named!(parse(allow_plus: bool) -> Self, do_parse!(
qpath: qpath >>
parenthesized: option!(cond_reduce!(
qpath.1.segments.last().unwrap().value().arguments.is_empty(),
syn!(ParenthesizedGenericArguments)
)) >>
cond!(allow_plus, not!(punct!(+))) >>
({
let (qself, mut path) = qpath;
if let Some(parenthesized) = parenthesized {
let parenthesized = PathArguments::Parenthesized(parenthesized);
path.segments.last_mut().unwrap().value_mut().arguments = parenthesized;
}
TypePath { qself: qself, path: path }
})
));
}
impl Synom for ReturnType {
named!(parse -> Self, alt!(
do_parse!(
arrow: punct!(->) >>
ty: syn!(Type) >>
(ReturnType::Type(arrow, Box::new(ty)))
)
|
epsilon!() => { |_| ReturnType::Default }
));
fn description() -> Option<&'static str> {
Some("return type")
}
}
impl Synom for TypeTraitObject {
named!(parse -> Self, call!(Self::parse, true));
fn description() -> Option<&'static str> {
Some("trait object type")
}
}
fn at_least_one_type(bounds: &Punctuated<TypeParamBound, Token![+]>) -> bool {
for bound in bounds {
if let TypeParamBound::Trait(_) = *bound {
return true;
}
}
false
}
impl TypeTraitObject {
named!(pub without_plus -> Self, call!(Self::parse, false));
// Only allow multiple trait references if allow_plus is true.
named!(parse(allow_plus: bool) -> Self, do_parse!(
dyn_token: option!(keyword!(dyn)) >>
bounds: alt!(
cond_reduce!(allow_plus, Punctuated::parse_terminated_nonempty)
|
syn!(TypeParamBound) => {|x| {
let mut bounds = Punctuated::new();
bounds.push_value(x);
bounds
}}
) >>
// Just lifetimes like `'a + 'b` is not a TraitObject.
cond_reduce!(at_least_one_type(&bounds)) >>
(TypeTraitObject {
dyn_token: dyn_token,
bounds: bounds,
})
));
}
impl Synom for TypeImplTrait {
named!(parse -> Self, do_parse!(
impl_: keyword!(impl) >>
// NOTE: rust-lang/rust#34511 includes discussion about whether or
// not + should be allowed in ImplTrait directly without ().
elem: call!(Punctuated::parse_terminated_nonempty) >>
(TypeImplTrait {
impl_token: impl_,
bounds: elem,
})
));
fn description() -> Option<&'static str> {
Some("`impl Trait` type")
}
}
impl Synom for TypeGroup {
named!(parse -> Self, do_parse!(
data: grouped!(syn!(Type)) >>
(TypeGroup {
group_token: data.0,
elem: Box::new(data.1),
})
));
fn description() -> Option<&'static str> {
Some("type surrounded by invisible delimiters")
}
}
impl Synom for TypeParen {
named!(parse -> Self, call!(Self::parse, false));
fn description() -> Option<&'static str> {
Some("parenthesized type")
}
}
impl TypeParen {
named!(parse(allow_plus: bool) -> Self, do_parse!(
data: parens!(syn!(Type)) >>
cond!(allow_plus, not!(punct!(+))) >>
(TypeParen {
paren_token: data.0,
elem: Box::new(data.1),
})
));
}
impl Synom for BareFnArg {
named!(parse -> Self, do_parse!(
name: option!(do_parse!(
name: syn!(BareFnArgName) >>
not!(punct!(::)) >>
colon: punct!(:) >>
(name, colon)
)) >>
ty: syn!(Type) >>
(BareFnArg {
name: name,
ty: ty,
})
));
fn description() -> Option<&'static str> {
Some("function type argument")
}
}
impl Synom for BareFnArgName {
named!(parse -> Self, alt!(
map!(syn!(Ident), BareFnArgName::Named)
|
map!(punct!(_), BareFnArgName::Wild)
));
fn description() -> Option<&'static str> {
Some("function argument name")
}
}
impl Synom for Abi {
named!(parse -> Self, do_parse!(
extern_: keyword!(extern) >>
name: option!(syn!(LitStr)) >>
(Abi {
extern_token: extern_,
name: name,
})
));
fn description() -> Option<&'static str> {
Some("`extern` ABI qualifier")
}
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{ToTokens, Tokens};
impl ToTokens for TypeSlice {
fn to_tokens(&self, tokens: &mut Tokens) {
self.bracket_token.surround(tokens, |tokens| {
self.elem.to_tokens(tokens);
});
}
}
impl ToTokens for TypeArray {
fn to_tokens(&self, tokens: &mut Tokens) {
self.bracket_token.surround(tokens, |tokens| {
self.elem.to_tokens(tokens);
self.semi_token.to_tokens(tokens);
self.len.to_tokens(tokens);
});
}
}
impl ToTokens for TypePtr {
fn to_tokens(&self, tokens: &mut Tokens) {
self.star_token.to_tokens(tokens);
match self.mutability {
Some(ref tok) => tok.to_tokens(tokens),
None => {
TokensOrDefault(&self.const_token).to_tokens(tokens);
}
}
self.elem.to_tokens(tokens);
}
}
impl ToTokens for TypeReference {
fn to_tokens(&self, tokens: &mut Tokens) {
self.and_token.to_tokens(tokens);
self.lifetime.to_tokens(tokens);
self.mutability.to_tokens(tokens);
self.elem.to_tokens(tokens);
}
}
impl ToTokens for TypeBareFn {
fn to_tokens(&self, tokens: &mut Tokens) {
self.lifetimes.to_tokens(tokens);
self.unsafety.to_tokens(tokens);
self.abi.to_tokens(tokens);
self.fn_token.to_tokens(tokens);
self.paren_token.surround(tokens, |tokens| {
self.inputs.to_tokens(tokens);
if let Some(ref variadic) = self.variadic {
if !self.inputs.empty_or_trailing() {
let span = variadic.0[0];
<Token![,]>::new(span).to_tokens(tokens);
}
variadic.to_tokens(tokens);
}
});
self.output.to_tokens(tokens);
}
}
impl ToTokens for TypeNever {
fn to_tokens(&self, tokens: &mut Tokens) {
self.bang_token.to_tokens(tokens);
}
}
impl ToTokens for TypeTuple {
fn to_tokens(&self, tokens: &mut Tokens) {
self.paren_token.surround(tokens, |tokens| {
self.elems.to_tokens(tokens);
})
}
}
impl ToTokens for TypePath {
fn to_tokens(&self, tokens: &mut Tokens) {
PathTokens(&self.qself, &self.path).to_tokens(tokens);
}
}
impl ToTokens for TypeTraitObject {
fn to_tokens(&self, tokens: &mut Tokens) {
self.dyn_token.to_tokens(tokens);
self.bounds.to_tokens(tokens);
}
}
impl ToTokens for TypeImplTrait {
fn to_tokens(&self, tokens: &mut Tokens) {
self.impl_token.to_tokens(tokens);
self.bounds.to_tokens(tokens);
}
}
impl ToTokens for TypeGroup {
fn to_tokens(&self, tokens: &mut Tokens) {
self.group_token.surround(tokens, |tokens| {
self.elem.to_tokens(tokens);
});
}
}
impl ToTokens for TypeParen {
fn to_tokens(&self, tokens: &mut Tokens) {
self.paren_token.surround(tokens, |tokens| {
self.elem.to_tokens(tokens);
});
}
}
impl ToTokens for TypeInfer {
fn to_tokens(&self, tokens: &mut Tokens) {
self.underscore_token.to_tokens(tokens);
}
}
impl ToTokens for TypeMacro {
fn to_tokens(&self, tokens: &mut Tokens) {
self.mac.to_tokens(tokens);
}
}
impl ToTokens for TypeVerbatim {
fn to_tokens(&self, tokens: &mut Tokens) {
self.tts.to_tokens(tokens);
}
}
impl ToTokens for ReturnType {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
ReturnType::Default => {}
ReturnType::Type(ref arrow, ref ty) => {
arrow.to_tokens(tokens);
ty.to_tokens(tokens);
}
}
}
}
impl ToTokens for BareFnArg {
fn to_tokens(&self, tokens: &mut Tokens) {
if let Some((ref name, ref colon)) = self.name {
name.to_tokens(tokens);
colon.to_tokens(tokens);
}
self.ty.to_tokens(tokens);
}
}
impl ToTokens for BareFnArgName {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
BareFnArgName::Named(ref t) => t.to_tokens(tokens),
BareFnArgName::Wild(ref t) => t.to_tokens(tokens),
}
}
}
impl ToTokens for Abi {
fn to_tokens(&self, tokens: &mut Tokens) {
self.extern_token.to_tokens(tokens);
self.name.to_tokens(tokens);
}
}
}

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"8eaecd0df348097ca723e305ddf7abc2574ceaf89bed04159a40a1dc9dd7ed29","LICENSE":"219920e865eee70b7dcfc948a86b099e7f4fe2de01bcca2ca9a20c0a033f2b59","README.md":"14d3d8351ab70c08799e1f7ab6c0c1a057572bc7acff641ff139521605a24e82","src/lib.rs":"d3a08476db63312803800c8bb7447f14276d2a4e1f24df69517d757de56f4e9f","src/macros.rs":"98cc6d340238d1364dc08be7bcccafbed711c50fbd0e3f899fdb622b5c79fe19"},"package":"010366096045d8250555904c58da03377289e7f4b2ce7a5b1027e2b532f41000"} {"files":{"Cargo.toml":"936c905633b5e43ea48d8a7f34cfe32741e2cabfc5b71541e1e1fa214c9afbe2","LICENSE":"219920e865eee70b7dcfc948a86b099e7f4fe2de01bcca2ca9a20c0a033f2b59","README.md":"16135f8089978a256141f3ffedfeb60df584bbd3f4dd928437cf839fc6920ff9","src/lib.rs":"7bcbf539b948ebb5a77cd636f496598662ad440416c265be1b6bda43ed3f19fb","src/macros.rs":"d9a58b66620003e7500cf53699410639f104c36146fe612d48128f293210a524"},"package":"98cad891cd238c98e1f0aec9f7c0f620aa696e4e5f7daba56ac67b5e86a6b049"}

8
third_party/rust/synstructure/Cargo.toml поставляемый
Просмотреть файл

@ -12,7 +12,7 @@
[package] [package]
name = "synstructure" name = "synstructure"
version = "0.7.0" version = "0.8.1"
authors = ["Nika Layzell <nika@thelayzells.com>"] authors = ["Nika Layzell <nika@thelayzells.com>"]
include = ["src/**/*", "Cargo.toml", "README.md", "LICENSE"] include = ["src/**/*", "Cargo.toml", "README.md", "LICENSE"]
description = "Helper methods and macros for custom derives" description = "Helper methods and macros for custom derives"
@ -22,13 +22,13 @@ keywords = ["syn", "macros", "derive", "expand_substructure", "enum"]
license = "MIT" license = "MIT"
repository = "https://github.com/mystor/synstructure" repository = "https://github.com/mystor/synstructure"
[dependencies.proc-macro2] [dependencies.proc-macro2]
version = "0.2.1" version = "0.3"
[dependencies.quote] [dependencies.quote]
version = "0.4.2" version = "0.5"
[dependencies.syn] [dependencies.syn]
version = "0.12" version = "0.13"
features = ["visit", "extra-traits"] features = ["visit", "extra-traits"]
[dependencies.unicode-xid] [dependencies.unicode-xid]

5
third_party/rust/synstructure/README.md поставляемый
Просмотреть файл

@ -1,6 +1,9 @@
# synstructure # synstructure
[![Documentation](https://docs.rs/synstructure/badge.svg)](https://docs.rs/synstructure) [![Build Status](https://travis-ci.org/mystor/synstructure.svg?branch=master)](https://travis-ci.org/mystor/synstructure) [![Latest Version](https://img.shields.io/crates/v/synstructure.svg)](https://crates.io/crates/synstructure)
[![Documentation](https://docs.rs/synstructure/badge.svg)](https://docs.rs/synstructure)
[![Build Status](https://travis-ci.org/mystor/synstructure.svg?branch=master)](https://travis-ci.org/mystor/synstructure)
[![Rustc Version 1.15+](https://img.shields.io/badge/rustc-1.15+-lightgray.svg)](https://blog.rust-lang.org/2017/02/02/Rust-1.15.html)
> NOTE: What follows is an exerpt from the module level documentation. For full > NOTE: What follows is an exerpt from the module level documentation. For full
> details read the docs on [docs.rs](https://docs.rs/synstructure/) > details read the docs on [docs.rs](https://docs.rs/synstructure/)

483
third_party/rust/synstructure/src/lib.rs поставляемый
Просмотреть файл

@ -28,9 +28,13 @@
//! walk(#bi) //! walk(#bi)
//! }); //! });
//! //!
//! s.bound_impl(quote!(synstructure_test_traits::WalkFields), quote!{ //! s.gen_impl(quote! {
//! fn walk_fields(&self, walk: &mut FnMut(&synstructure_test_traits::WalkFields)) { //! extern crate synstructure_test_traits;
//! match *self { #body } //!
//! gen impl synstructure_test_traits::WalkFields for @Self {
//! fn walk_fields(&self, walk: &mut FnMut(&synstructure_test_traits::WalkFields)) {
//! match *self { #body }
//! }
//! } //! }
//! }) //! })
//! } //! }
@ -97,10 +101,13 @@
//! #acc || synstructure_test_traits::Interest::interesting(#bi) //! #acc || synstructure_test_traits::Interest::interesting(#bi)
//! }); //! });
//! //!
//! s.bound_impl(quote!(synstructure_test_traits::Interest), quote!{ //! s.gen_impl(quote! {
//! fn interesting(&self) -> bool { //! extern crate synstructure_test_traits;
//! match *self { //! gen impl synstructure_test_traits::Interest for @Self {
//! #body //! fn interesting(&self) -> bool {
//! match *self {
//! #body
//! }
//! } //! }
//! } //! }
//! }) //! })
@ -154,6 +161,7 @@ extern crate proc_macro;
extern crate proc_macro2; extern crate proc_macro2;
#[macro_use] #[macro_use]
extern crate quote; extern crate quote;
#[macro_use]
extern crate syn; extern crate syn;
extern crate unicode_xid; extern crate unicode_xid;
@ -167,7 +175,10 @@ use syn::{
}; };
use syn::visit::{self, Visit}; use syn::visit::{self, Visit};
use quote::{ToTokens, Tokens}; // re-export the quote! macro so we can depend on it being around in our macro's
// implementations.
#[doc(hidden)]
pub use quote::*;
use unicode_xid::UnicodeXID; use unicode_xid::UnicodeXID;
@ -215,7 +226,7 @@ fn generics_fuse(res: &mut Vec<bool>, new: &[bool]) {
} }
} }
// Internal method for extracting the set of generics which have been matched // Internal method for extracting the set of generics which have been matched.
fn fetch_generics<'a>(set: &[bool], generics: &'a Generics) -> Vec<&'a Ident> { fn fetch_generics<'a>(set: &[bool], generics: &'a Generics) -> Vec<&'a Ident> {
let mut tys = vec![]; let mut tys = vec![];
for (&seen, param) in set.iter().zip(generics.params.iter()) { for (&seen, param) in set.iter().zip(generics.params.iter()) {
@ -238,7 +249,40 @@ fn sanitize_ident(s: &str) -> Ident {
if res.ends_with('_') && c == '_' { continue } if res.ends_with('_') && c == '_' { continue }
res.push(c); res.push(c);
} }
Ident::new(&res, Span::def_site()) Ident::from(res)
}
// Internal method to merge two Generics objects together intelligently.
fn merge_generics(into: &mut Generics, from: &Generics) {
// Try to add the param into `into`, and merge parmas with identical names.
'outer: for p in &from.params {
for op in &into.params {
match (op, p) {
(&GenericParam::Type(ref otp), &GenericParam::Type(ref tp)) => {
// NOTE: This is only OK because syn ignores the span for equality purposes.
if otp.ident == tp.ident {
panic!("Attempted to merge conflicting generic params: {} and {}", quote!{#op}, quote!{#p});
}
}
(&GenericParam::Lifetime(ref olp), &GenericParam::Lifetime(ref lp)) => {
// NOTE: This is only OK because syn ignores the span for equality purposes.
if olp.lifetime == lp.lifetime {
panic!("Attempted to merge conflicting generic params: {} and {}", quote!{#op}, quote!{#p});
}
}
// We don't support merging Const parameters, because that wouldn't make much sense.
_ => (),
}
}
into.params.push(p.clone());
}
// Add any where clauses from the input generics object.
if let Some(ref from_clause) = from.where_clause {
into.make_where_clause()
.predicates
.extend(from_clause.predicates.iter().cloned());
}
} }
/// Information about a specific binding. This contains both an `Ident` /// Information about a specific binding. This contains both an `Ident`
@ -905,6 +949,7 @@ pub struct Structure<'a> {
variants: Vec<VariantInfo<'a>>, variants: Vec<VariantInfo<'a>>,
omitted_variants: bool, omitted_variants: bool,
ast: &'a DeriveInput, ast: &'a DeriveInput,
extra_impl: Vec<GenericParam>,
} }
impl<'a> Structure<'a> { impl<'a> Structure<'a> {
@ -961,6 +1006,7 @@ impl<'a> Structure<'a> {
variants: variants, variants: variants,
omitted_variants: false, omitted_variants: false,
ast: ast, ast: ast,
extra_impl: vec![],
} }
} }
@ -1396,6 +1442,54 @@ impl<'a> Structure<'a> {
fetch_generics(&flags, &self.ast.generics) fetch_generics(&flags, &self.ast.generics)
} }
/// Adds an `impl<>` generic parameter.
/// This can be used when the trait to be derived needs some extra generic parameters.
///
/// # Example
/// ```
/// # #![recursion_limit="128"]
/// # #[macro_use] extern crate quote;
/// # extern crate synstructure;
/// # #[macro_use] extern crate syn;
/// # use synstructure::*;
/// # fn main() {
/// let di: syn::DeriveInput = parse_quote! {
/// enum A<T, U> {
/// B(T),
/// C(Option<U>),
/// }
/// };
/// let mut s = Structure::new(&di);
/// let generic: syn::GenericParam = parse_quote!(X: krate::AnotherTrait);
///
/// assert_eq!(
/// s.add_impl_generic(generic)
/// .bound_impl(quote!(krate::Trait<X>),
/// quote!{
/// fn a() {}
/// }
/// ),
/// quote!{
/// #[allow(non_upper_case_globals)]
/// const _DERIVE_krate_Trait_X_FOR_A: () = {
/// extern crate krate;
/// impl<T, U, X: krate::AnotherTrait> krate::Trait<X> for A<T, U>
/// where T : krate :: Trait < X >,
/// Option<U>: krate::Trait<X>,
/// U: krate::Trait<X>
/// {
/// fn a() {}
/// }
/// };
/// }
/// );
/// # }
/// ```
pub fn add_impl_generic(&mut self, param: GenericParam) -> &mut Self {
self.extra_impl.push(param);
self
}
/// Add trait bounds for a trait with the given path for each type parmaeter /// Add trait bounds for a trait with the given path for each type parmaeter
/// referenced in the types of non-filtered fields. /// referenced in the types of non-filtered fields.
/// ///
@ -1411,7 +1505,7 @@ impl<'a> Structure<'a> {
// Ensure we have a where clause, because we need to use it. We // Ensure we have a where clause, because we need to use it. We
// can't use `get_or_insert_with`, because it isn't supported on all // can't use `get_or_insert_with`, because it isn't supported on all
// rustc versions we support. // rustc versions we support (this is a Rust 1.20+ feature).
if where_clause.is_none() { if where_clause.is_none() {
*where_clause = Some(WhereClause { *where_clause = Some(WhereClause {
where_token: Default::default(), where_token: Default::default(),
@ -1450,6 +1544,8 @@ impl<'a> Structure<'a> {
} }
} }
/// > NOTE: This methods' features are superceded by `Structure::gen_impl`.
///
/// Creates an `impl` block with the required generic type fields filled in /// Creates an `impl` block with the required generic type fields filled in
/// to implement the trait `path`. /// to implement the trait `path`.
/// ///
@ -1522,6 +1618,8 @@ impl<'a> Structure<'a> {
) )
} }
/// > NOTE: This methods' features are superceded by `Structure::gen_impl`.
///
/// Creates an `impl` block with the required generic type fields filled in /// Creates an `impl` block with the required generic type fields filled in
/// to implement the unsafe trait `path`. /// to implement the unsafe trait `path`.
/// ///
@ -1594,6 +1692,8 @@ impl<'a> Structure<'a> {
) )
} }
/// > NOTE: This methods' features are superceded by `Structure::gen_impl`.
///
/// Creates an `impl` block with the required generic type fields filled in /// Creates an `impl` block with the required generic type fields filled in
/// to implement the trait `path`. /// to implement the trait `path`.
/// ///
@ -1656,6 +1756,8 @@ impl<'a> Structure<'a> {
) )
} }
/// > NOTE: This methods' features are superceded by `Structure::gen_impl`.
///
/// Creates an `impl` block with the required generic type fields filled in /// Creates an `impl` block with the required generic type fields filled in
/// to implement the unsafe trait `path`. /// to implement the unsafe trait `path`.
/// ///
@ -1709,6 +1811,7 @@ impl<'a> Structure<'a> {
/// ); /// );
/// # } /// # }
/// ``` /// ```
#[deprecated]
pub fn unsafe_unbound_impl<P: ToTokens, B: ToTokens>(&self, path: P, body: B) -> Tokens { pub fn unsafe_unbound_impl<P: ToTokens, B: ToTokens>(&self, path: P, body: B) -> Tokens {
self.impl_internal( self.impl_internal(
path.into_tokens(), path.into_tokens(),
@ -1726,7 +1829,10 @@ impl<'a> Structure<'a> {
add_bounds: bool, add_bounds: bool,
) -> Tokens { ) -> Tokens {
let name = &self.ast.ident; let name = &self.ast.ident;
let (impl_generics, ty_generics, where_clause) = self.ast.generics.split_for_impl(); let mut gen_clone = self.ast.generics.clone();
gen_clone.params.extend(self.extra_impl.clone().into_iter());
let (impl_generics, _, _) = gen_clone.split_for_impl();
let (_, ty_generics, where_clause) = self.ast.generics.split_for_impl();
let bound = syn::parse2::<TraitBound>(path.into()) let bound = syn::parse2::<TraitBound>(path.into())
.expect("`path` argument must be a valid rust trait bound"); .expect("`path` argument must be a valid rust trait bound");
@ -1764,4 +1870,357 @@ impl<'a> Structure<'a> {
}; };
} }
} }
/// Generate an impl block for the given struct. This impl block will
/// automatically use hygiene tricks to avoid polluting the caller's
/// namespace, and will automatically add trait bounds for generic type
/// parameters.
///
/// # Syntax
///
/// This function accepts its arguments as a `Tokens`. The recommended way
/// to call this function is passing the result of invoking the `quote!`
/// macro to it.
///
/// ```ignore
/// s.gen_impl(quote! {
/// // You can write any items which you want to import into scope here.
/// // For example, you may want to include an `extern crate` for the
/// // crate which implements your trait. These items will only be
/// // visible to the code you generate, and won't be exposed to the
/// // consuming crate
/// extern crate krate;
///
/// // You can also add `use` statements here to bring types or traits
/// // into scope.
/// //
/// // WARNING: Try not to use common names here, because the stable
/// // version of syn does not support hygiene and you could accidentally
/// // shadow types from the caller crate.
/// use krate::Trait as MyTrait;
///
/// // The actual impl block is a `gen impl` or `gen unsafe impl` block.
/// // You can use `@Self` to refer to the structure's type.
/// gen impl MyTrait for @Self {
/// fn f(&self) { ... }
/// }
/// })
/// ```
///
/// The most common usage of this trait involves loading the crate the
/// target trait comes from with `extern crate`, and then invoking a `gen
/// impl` block.
///
/// # Hygiene
///
/// This method tries to handle hygiene intelligenly for both stable and
/// unstable proc-macro implementations, however there are visible
/// differences.
///
/// The output of every `gen_impl` function is wrapped in a dummy `const`
/// value, to ensure that it is given its own scope, and any values brought
/// into scope are not leaked to the calling crate. For example, the above
/// invocation may generate an output like the following:
///
/// ```ignore
/// const _DERIVE_krate_Trait_FOR_Struct: () = {
/// extern crate krate;
/// use krate::Trait as MyTrait;
/// impl<T> MyTrait for Struct<T> where T: MyTrait {
/// fn f(&self) { ... }
/// }
/// };
/// ```
///
/// ### Using the `std` crate
///
/// If you are using `quote!()` to implement your trait, with the
/// `proc-macro2/nightly` feature, `std` isn't considered to be in scope for
/// your macro. This means that if you use types from `std` in your
/// procedural macro, you'll want to explicitly load it with an `extern
/// crate std;`.
///
/// ### Absolute paths
///
/// You should generally avoid using absolute paths in your generated code,
/// as they will resolve very differently when using the stable and nightly
/// versions of `proc-macro2`. Instead, load the crates you need to use
/// explictly with `extern crate` and
///
/// # Trait Bounds
///
/// This method will automatically add trait bounds for any type parameters
/// which are referenced within the types of non-ignored fields.
///
/// Additional type parameters may be added with the generics syntax after
/// the `impl` keyword.
///
/// ### Type Macro Caveat
///
/// If the method contains any macros in type position, all parameters will
/// be considered bound. This is because we cannot determine which type
/// parameters are bound by type macros.
///
/// # Panics
///
/// This function will panic if the input `Tokens` is not well-formed, or
/// if additional type parameters added by `impl<..>` conflict with generic
/// type parameters on the original struct.
///
/// # Example Usage
///
/// ```
/// # #![recursion_limit="128"]
/// # #[macro_use] extern crate quote;
/// # extern crate synstructure;
/// # #[macro_use] extern crate syn;
/// # use synstructure::*;
/// # fn main() {
/// let di: syn::DeriveInput = parse_quote! {
/// enum A<T, U> {
/// B(T),
/// C(Option<U>),
/// }
/// };
/// let mut s = Structure::new(&di);
///
/// s.filter_variants(|v| v.ast().ident != "B");
///
/// assert_eq!(
/// s.gen_impl(quote! {
/// extern crate krate;
/// gen impl krate::Trait for @Self {
/// fn a() {}
/// }
/// }),
/// quote!{
/// #[allow(non_upper_case_globals)]
/// const _DERIVE_krate_Trait_FOR_A: () = {
/// extern crate krate;
/// impl<T, U> krate::Trait for A<T, U>
/// where
/// Option<U>: krate::Trait,
/// U: krate::Trait
/// {
/// fn a() {}
/// }
/// };
/// }
/// );
///
/// // NOTE: You can also add extra generics after the impl
/// assert_eq!(
/// s.gen_impl(quote! {
/// extern crate krate;
/// gen impl<X: krate::OtherTrait> krate::Trait<X> for @Self
/// where
/// X: Send + Sync,
/// {
/// fn a() {}
/// }
/// }),
/// quote!{
/// #[allow(non_upper_case_globals)]
/// const _DERIVE_krate_Trait_X_FOR_A: () = {
/// extern crate krate;
/// impl<X: krate::OtherTrait, T, U> krate::Trait<X> for A<T, U>
/// where
/// X: Send + Sync,
/// Option<U>: krate::Trait<X>,
/// U: krate::Trait<X>
/// {
/// fn a() {}
/// }
/// };
/// }
/// );
/// # }
/// ```
pub fn gen_impl(&self, cfg: Tokens) -> Tokens {
use syn::buffer::{TokenBuffer, Cursor};
use syn::synom::PResult;
use proc_macro2::TokenStream;
/* Parsing Logic */
fn parse_gen_impl(
c: Cursor,
) -> PResult<
(
Option<token::Unsafe>,
TraitBound,
TokenStream,
syn::Generics,
),
> {
// `gen`
let (id, c) = syn!(c, Ident)?;
if id.as_ref() != "gen" {
let ((), _) = reject!(c,)?;
unreachable!()
}
// `impl` or unsafe impl`
let (unsafe_kw, c) = option!(c, keyword!(unsafe))?;
let (_, c) = syn!(c, token::Impl)?;
// NOTE: After this point we assume they meant to write a gen impl,
// so we panic if we run into an error.
// optional `<>`
let (mut generics, c) = syn!(c, Generics)
.expect("Expected an optional `<>` with generics after `gen impl`");
// @bound
let (bound, c) = syn!(c, TraitBound)
.expect("Expected a trait bound after `gen impl`");
// `for @Self`
let (_, c) = keyword!(c, for)
.expect("Expected `for` after trait bound");
let (_, c) = do_parse!(c, syn!(Token![@]) >> keyword!(Self) >> (()))
.expect("Expected `@Self` after `for`");
// optional `where ...`
// XXX: We have to do this awkward if let because option!() doesn't
// provide enough type information to call expect().
let c = if let Ok((where_clause, c)) = syn!(c, WhereClause) {
generics.where_clause = Some(where_clause);
c
} else { c };
let ((_, body), c) = braces!(c, syn!(TokenStream))
.expect("Expected an impl body after `@Self`");
Ok(((unsafe_kw, bound, body, generics), c))
}
let buf = TokenBuffer::new2(cfg.into());
let mut c = buf.begin();
let mut before = vec![];
// Use uninitialized variables here to avoid using the "break with value"
// language feature, which requires Rust 1.19+.
let ((unsafe_kw, bound, body, mut generics), after) = {
let gen_impl;
let cursor;
loop {
if let Ok((gi, c2)) = parse_gen_impl(c) {
gen_impl = gi;
cursor = c2;
break;
} else if let Some((tt, c2)) = c.token_tree() {
c = c2;
before.push(tt);
} else {
panic!("Expected a gen impl block");
}
}
(gen_impl, cursor.token_stream())
};
/* Codegen Logic */
let name = &self.ast.ident;
// Add the generics from the original struct in, and then add any
// additional trait bounds which we need on the type.
merge_generics(&mut generics, &self.ast.generics);
self.add_trait_bounds(&bound, &mut generics.where_clause);
let (impl_generics, _, where_clause) = generics.split_for_impl();
let (_, ty_generics, _) = self.ast.generics.split_for_impl();
let dummy_const: Ident = sanitize_ident(&format!(
"_DERIVE_{}_FOR_{}",
(&bound).into_tokens(),
name.into_tokens(),
));
quote! {
#[allow(non_upper_case_globals)]
const #dummy_const: () = {
#(#before)*
#unsafe_kw impl #impl_generics #bound for #name #ty_generics #where_clause {
#body
}
#after
};
}
}
}
/// Dumps an unpretty version of a tokenstream. Takes any type which implements
/// `Display`.
///
/// This is mostly useful for visualizing the output of a procedural macro, as
/// it makes it marginally more readable. It is used in the implementation of
/// `test_derive!` to unprettily print the output.
///
/// # Stability
///
/// The stability of the output of this function is not guaranteed. Do not
/// assert that the output of this function does not change between minor
/// versions.
///
/// # Example
///
/// ```
/// # extern crate synstructure;
/// # #[macro_use] extern crate quote;
/// # fn main() {
/// assert_eq!(
/// synstructure::unpretty_print(quote! {
/// #[allow(non_upper_case_globals)]
/// const _DERIVE_krate_Trait_FOR_A: () = {
/// extern crate krate;
/// impl<T, U> krate::Trait for A<T, U>
/// where
/// Option<U>: krate::Trait,
/// U: krate::Trait
/// {
/// fn a() {}
/// }
/// };
/// }),
/// "# [
/// allow (
/// non_upper_case_globals )
/// ]
/// const _DERIVE_krate_Trait_FOR_A : (
/// )
/// = {
/// extern crate krate ;
/// impl < T , U > krate :: Trait for A < T , U > where Option < U > : krate :: Trait , U : krate :: Trait {
/// fn a (
/// )
/// {
/// }
/// }
/// }
/// ;
/// "
/// )
/// # }
/// ```
pub fn unpretty_print<T: std::fmt::Display>(ts: T) -> String {
let mut res = String::new();
let raw_s = ts.to_string();
let mut s = &raw_s[..];
let mut indent = 0;
while let Some(i) = s.find(&['(', '{', '[', ')', '}', ']', ';'][..]) {
match &s[i..i + 1] {
"(" | "{" | "[" => indent += 1,
")" | "}" | "]" => indent -= 1,
_ => {}
}
res.push_str(&s[..i + 1]);
res.push('\n');
for _ in 0..indent {
res.push_str(" ");
}
s = s[i + 1..].trim_left_matches(' ');
}
res.push_str(s);
res
} }

38
third_party/rust/synstructure/src/macros.rs поставляемый
Просмотреть файл

@ -129,7 +129,23 @@ macro_rules! test_derive {
let mut expected_toks = $crate::macros::Tokens::new(); let mut expected_toks = $crate::macros::Tokens::new();
expected_toks.append_all(expected); expected_toks.append_all(expected);
assert_eq!(res, expected_toks) if res != expected_toks {
panic!("\
test_derive failed:
expected:
```
{}
```
got:
```
{}
```\n",
$crate::unpretty_print(&expected_toks),
$crate::unpretty_print(&res),
);
}
// assert_eq!(res, expected_toks)
} }
}; };
} }
@ -237,12 +253,12 @@ macro_rules! simple_derive {
( (
$iname:ident impl $path:path { $($rest:tt)* } $iname:ident impl $path:path { $($rest:tt)* }
) => { ) => {
simple_derive!(@I [$iname, $path] { $($rest)* } [] []); simple_derive!(__I [$iname, $path] { $($rest)* } [] []);
}; };
// Adding a filter block // Adding a filter block
( (
@I $opt:tt { __I $opt:tt {
filter($s:ident) { filter($s:ident) {
$($body:tt)* $($body:tt)*
} }
@ -250,7 +266,7 @@ macro_rules! simple_derive {
} [$($done:tt)*] [$($filter:tt)*] } [$($done:tt)*] [$($filter:tt)*]
) => { ) => {
simple_derive!( simple_derive!(
@I $opt { $($rest)* } [$($done)*] [ __I $opt { $($rest)* } [$($done)*] [
$($filter)* $($filter)*
[ [
st_name = $s, st_name = $s,
@ -264,7 +280,7 @@ macro_rules! simple_derive {
// &self bound method // &self bound method
( (
@I $opt:tt { __I $opt:tt {
fn $fn_name:ident (&self as $s:ident $($params:tt)*) $(-> $t:ty)* { fn $fn_name:ident (&self as $s:ident $($params:tt)*) $(-> $t:ty)* {
$($body:tt)* $($body:tt)*
} }
@ -272,7 +288,7 @@ macro_rules! simple_derive {
} [$($done:tt)*] [$($filter:tt)*] } [$($done:tt)*] [$($filter:tt)*]
) => { ) => {
simple_derive!( simple_derive!(
@I $opt { $($rest)* } [ __I $opt { $($rest)* } [
$($done)* $($done)*
[ [
st_name = $s, st_name = $s,
@ -291,7 +307,7 @@ macro_rules! simple_derive {
// &mut self bound method // &mut self bound method
( (
@I $opt:tt { __I $opt:tt {
fn $fn_name:ident (&mut self as $s:ident $($params:tt)*) $(-> $t:ty)* { fn $fn_name:ident (&mut self as $s:ident $($params:tt)*) $(-> $t:ty)* {
$($body:tt)* $($body:tt)*
} }
@ -299,7 +315,7 @@ macro_rules! simple_derive {
} [$($done:tt)*] [$($filter:tt)*] } [$($done:tt)*] [$($filter:tt)*]
) => { ) => {
simple_derive!( simple_derive!(
@I $opt { $($rest)* } [ __I $opt { $($rest)* } [
$($done)* $($done)*
[ [
st_name = $s, st_name = $s,
@ -318,7 +334,7 @@ macro_rules! simple_derive {
// self bound method // self bound method
( (
@I $opt:tt { __I $opt:tt {
fn $fn_name:ident (self as $s:ident $($params:tt)*) $(-> $t:ty)* { fn $fn_name:ident (self as $s:ident $($params:tt)*) $(-> $t:ty)* {
$($body:tt)* $($body:tt)*
} }
@ -326,7 +342,7 @@ macro_rules! simple_derive {
} [$($done:tt)*] [$($filter:tt)*] } [$($done:tt)*] [$($filter:tt)*]
) => { ) => {
simple_derive!( simple_derive!(
@I $opt { $($rest)* } [ __I $opt { $($rest)* } [
$($done)* $($done)*
[ [
st_name = $s, st_name = $s,
@ -347,7 +363,7 @@ macro_rules! simple_derive {
// codegen after data collection // codegen after data collection
( (
@I [$iname:ident, $path:path] {} [$( __I [$iname:ident, $path:path] {} [$(
[ [
st_name = $st_name:ident, st_name = $st_name:ident,
bind_style = $bind_style:ident, bind_style = $bind_style:ident,