From 5d02e2677d8a178829f1fcbd9eb8b4f0056a02f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Emilio=20Cobos=20=C3=81lvarez?= Date: Mon, 18 Jun 2018 10:39:42 +0200 Subject: [PATCH] Bug 1469228: Revendor rust dependencies. r=me MozReview-Commit-ID: KEx2kxGH0iO --- .../rust/cstr-macros/.cargo-checksum.json | 2 +- third_party/rust/cstr-macros/Cargo.toml | 6 +- third_party/rust/darling/.cargo-checksum.json | 2 +- third_party/rust/darling/CHANGELOG.md | 15 +- third_party/rust/darling/Cargo.toml | 12 +- .../rust/darling_core/.cargo-checksum.json | 2 +- third_party/rust/darling_core/Cargo.toml | 9 +- .../darling_core/src/codegen/default_expr.rs | 4 +- .../src/codegen/outer_from_impl.rs | 1 + .../rust/darling_core/src/from_meta_item.rs | 2 +- third_party/rust/darling_core/src/lib.rs | 1 + third_party/rust/darling_core/src/macros.rs | 6 + .../darling_core/src/options/input_field.rs | 2 +- .../rust/darling_core/src/util/ignored.rs | 2 +- .../rust/darling_macro/.cargo-checksum.json | 2 +- third_party/rust/darling_macro/Cargo.toml | 8 +- .../proc-macro2-0.2.2/.cargo-checksum.json | 1 - .../rust/proc-macro2-0.2.2/.travis.yml | 31 - third_party/rust/proc-macro2-0.2.2/Cargo.toml | 31 - .../rust/proc-macro2-0.2.2/LICENSE-APACHE | 201 - .../rust/proc-macro2-0.2.2/LICENSE-MIT | 25 - third_party/rust/proc-macro2-0.2.2/README.md | 98 - third_party/rust/proc-macro2-0.2.2/src/lib.rs | 337 -- .../rust/proc-macro2-0.2.2/src/macros.rs | 9 - .../rust/proc-macro2-0.2.2/src/stable.rs | 1206 ------ .../rust/proc-macro2-0.2.2/src/strnom.rs | 447 --- .../rust/proc-macro2-0.2.2/src/unstable.rs | 406 -- .../rust/proc-macro2-0.2.2/tests/test.rs | 179 - .../rust/quote-0.4.2/.cargo-checksum.json | 1 - third_party/rust/quote-0.4.2/Cargo.toml | 24 - third_party/rust/quote-0.4.2/LICENSE-APACHE | 201 - third_party/rust/quote-0.4.2/LICENSE-MIT | 25 - third_party/rust/quote-0.4.2/README.md | 135 - third_party/rust/quote-0.4.2/src/lib.rs | 501 --- third_party/rust/quote-0.4.2/src/to_tokens.rs | 175 - third_party/rust/quote-0.4.2/src/tokens.rs | 264 -- third_party/rust/quote-0.4.2/tests/test.rs | 292 -- .../rust/syn-0.12.12/.cargo-checksum.json | 1 - third_party/rust/syn-0.12.12/Cargo.toml | 55 - third_party/rust/syn-0.12.12/LICENSE-APACHE | 201 - third_party/rust/syn-0.12.12/LICENSE-MIT | 25 - third_party/rust/syn-0.12.12/README.md | 277 -- third_party/rust/syn-0.12.12/src/attr.rs | 571 --- third_party/rust/syn-0.12.12/src/buffer.rs | 467 --- third_party/rust/syn-0.12.12/src/data.rs | 388 -- third_party/rust/syn-0.12.12/src/derive.rs | 210 - third_party/rust/syn-0.12.12/src/error.rs | 60 - third_party/rust/syn-0.12.12/src/expr.rs | 3573 ----------------- third_party/rust/syn-0.12.12/src/file.rs | 122 - third_party/rust/syn-0.12.12/src/gen/fold.rs | 2946 -------------- third_party/rust/syn-0.12.12/src/gen/visit.rs | 2278 ----------- .../rust/syn-0.12.12/src/gen/visit_mut.rs | 2279 ----------- .../rust/syn-0.12.12/src/gen_helper.rs | 165 - third_party/rust/syn-0.12.12/src/generics.rs | 984 ----- third_party/rust/syn-0.12.12/src/ident.rs | 295 -- third_party/rust/syn-0.12.12/src/item.rs | 1998 --------- third_party/rust/syn-0.12.12/src/lib.rs | 723 ---- third_party/rust/syn-0.12.12/src/lifetime.rs | 161 - third_party/rust/syn-0.12.12/src/lit.rs | 1028 ----- third_party/rust/syn-0.12.12/src/mac.rs | 115 - third_party/rust/syn-0.12.12/src/macros.rs | 186 - third_party/rust/syn-0.12.12/src/op.rs | 223 - .../rust/syn-0.12.12/src/parse_quote.rs | 160 - third_party/rust/syn-0.12.12/src/parsers.rs | 1384 ------- third_party/rust/syn-0.12.12/src/path.rs | 572 --- .../rust/syn-0.12.12/src/punctuated.rs | 669 --- third_party/rust/syn-0.12.12/src/spanned.rs | 152 - third_party/rust/syn-0.12.12/src/synom.rs | 232 -- third_party/rust/syn-0.12.12/src/token.rs | 779 ---- third_party/rust/syn-0.12.12/src/tt.rs | 184 - third_party/rust/syn-0.12.12/src/ty.rs | 811 ---- .../rust/synstructure/.cargo-checksum.json | 2 +- third_party/rust/synstructure/Cargo.toml | 8 +- third_party/rust/synstructure/README.md | 5 +- third_party/rust/synstructure/src/lib.rs | 483 ++- third_party/rust/synstructure/src/macros.rs | 38 +- 76 files changed, 557 insertions(+), 28918 deletions(-) delete mode 100644 third_party/rust/proc-macro2-0.2.2/.cargo-checksum.json delete mode 100644 third_party/rust/proc-macro2-0.2.2/.travis.yml delete mode 100644 third_party/rust/proc-macro2-0.2.2/Cargo.toml delete mode 100644 third_party/rust/proc-macro2-0.2.2/LICENSE-APACHE delete mode 100644 third_party/rust/proc-macro2-0.2.2/LICENSE-MIT delete mode 100644 third_party/rust/proc-macro2-0.2.2/README.md delete mode 100644 third_party/rust/proc-macro2-0.2.2/src/lib.rs delete mode 100644 third_party/rust/proc-macro2-0.2.2/src/macros.rs delete mode 100644 third_party/rust/proc-macro2-0.2.2/src/stable.rs delete mode 100644 third_party/rust/proc-macro2-0.2.2/src/strnom.rs delete mode 100644 third_party/rust/proc-macro2-0.2.2/src/unstable.rs delete mode 100644 third_party/rust/proc-macro2-0.2.2/tests/test.rs delete mode 100644 third_party/rust/quote-0.4.2/.cargo-checksum.json delete mode 100644 third_party/rust/quote-0.4.2/Cargo.toml delete mode 100644 third_party/rust/quote-0.4.2/LICENSE-APACHE delete mode 100644 third_party/rust/quote-0.4.2/LICENSE-MIT delete mode 100644 third_party/rust/quote-0.4.2/README.md delete mode 100644 third_party/rust/quote-0.4.2/src/lib.rs delete mode 100644 third_party/rust/quote-0.4.2/src/to_tokens.rs delete mode 100644 third_party/rust/quote-0.4.2/src/tokens.rs delete mode 100644 third_party/rust/quote-0.4.2/tests/test.rs delete mode 100644 third_party/rust/syn-0.12.12/.cargo-checksum.json delete mode 100644 third_party/rust/syn-0.12.12/Cargo.toml delete mode 100644 third_party/rust/syn-0.12.12/LICENSE-APACHE delete mode 100644 third_party/rust/syn-0.12.12/LICENSE-MIT delete mode 100644 third_party/rust/syn-0.12.12/README.md delete mode 100644 third_party/rust/syn-0.12.12/src/attr.rs delete mode 100644 third_party/rust/syn-0.12.12/src/buffer.rs delete mode 100644 third_party/rust/syn-0.12.12/src/data.rs delete mode 100644 third_party/rust/syn-0.12.12/src/derive.rs delete mode 100644 third_party/rust/syn-0.12.12/src/error.rs delete mode 100644 third_party/rust/syn-0.12.12/src/expr.rs delete mode 100644 third_party/rust/syn-0.12.12/src/file.rs delete mode 100644 third_party/rust/syn-0.12.12/src/gen/fold.rs delete mode 100644 third_party/rust/syn-0.12.12/src/gen/visit.rs delete mode 100644 third_party/rust/syn-0.12.12/src/gen/visit_mut.rs delete mode 100644 third_party/rust/syn-0.12.12/src/gen_helper.rs delete mode 100644 third_party/rust/syn-0.12.12/src/generics.rs delete mode 100644 third_party/rust/syn-0.12.12/src/ident.rs delete mode 100644 third_party/rust/syn-0.12.12/src/item.rs delete mode 100644 third_party/rust/syn-0.12.12/src/lib.rs delete mode 100644 third_party/rust/syn-0.12.12/src/lifetime.rs delete mode 100644 third_party/rust/syn-0.12.12/src/lit.rs delete mode 100644 third_party/rust/syn-0.12.12/src/mac.rs delete mode 100644 third_party/rust/syn-0.12.12/src/macros.rs delete mode 100644 third_party/rust/syn-0.12.12/src/op.rs delete mode 100644 third_party/rust/syn-0.12.12/src/parse_quote.rs delete mode 100644 third_party/rust/syn-0.12.12/src/parsers.rs delete mode 100644 third_party/rust/syn-0.12.12/src/path.rs delete mode 100644 third_party/rust/syn-0.12.12/src/punctuated.rs delete mode 100644 third_party/rust/syn-0.12.12/src/spanned.rs delete mode 100644 third_party/rust/syn-0.12.12/src/synom.rs delete mode 100644 third_party/rust/syn-0.12.12/src/token.rs delete mode 100644 third_party/rust/syn-0.12.12/src/tt.rs delete mode 100644 third_party/rust/syn-0.12.12/src/ty.rs diff --git a/third_party/rust/cstr-macros/.cargo-checksum.json b/third_party/rust/cstr-macros/.cargo-checksum.json index ee77d3c94c42..f068dc386f7e 100644 --- a/third_party/rust/cstr-macros/.cargo-checksum.json +++ b/third_party/rust/cstr-macros/.cargo-checksum.json @@ -1 +1 @@ -{"files":{"Cargo.toml":"e173ef38709c8afee2488a6b0acf94c7695c3cebe89b3fe444253808bede8405","LICENSE":"2c6fc9268c3b765da5bf34fe4909425437f61be05674c2516c7f8cf1251c20aa","src/lib.rs":"71e7248b21b5e603e31060ecf241cf204efdfea5a0b400d084601f6c8bdfe11c"},"package":"f9f316203d1ea36f4f18316822806f6999aa3dc5ed1adf51e35b77e3b3933d78"} \ No newline at end of file +{"files":{"Cargo.toml":"517c9ae719b876cf67d9bd8dfec71d0b7a35ffe6e68668c35e21e2f6c3dfdc05","LICENSE":"2c6fc9268c3b765da5bf34fe4909425437f61be05674c2516c7f8cf1251c20aa","src/lib.rs":"71e7248b21b5e603e31060ecf241cf204efdfea5a0b400d084601f6c8bdfe11c"},"package":"0472c17c83d3ec1af32fb6ee2b3ad56ae0b6e69355d63d1d30602055c34324a8"} \ No newline at end of file diff --git a/third_party/rust/cstr-macros/Cargo.toml b/third_party/rust/cstr-macros/Cargo.toml index b125ae4e236c..e61ef2528afb 100644 --- a/third_party/rust/cstr-macros/Cargo.toml +++ b/third_party/rust/cstr-macros/Cargo.toml @@ -12,7 +12,7 @@ [package] name = "cstr-macros" -version = "0.1.2" +version = "0.1.3" authors = ["Xidorn Quan "] description = "Procedural macros for cstr" license = "MIT" @@ -24,8 +24,8 @@ proc-macro = true version = "0.1" [dependencies.syn] -version = "0.12" +version = "0.13" features = ["derive", "parsing"] default-features = false [dev-dependencies.quote] -version = "0.4" +version = "0.5" diff --git a/third_party/rust/darling/.cargo-checksum.json b/third_party/rust/darling/.cargo-checksum.json index b859e1dc4c93..86ae88474762 100644 --- a/third_party/rust/darling/.cargo-checksum.json +++ b/third_party/rust/darling/.cargo-checksum.json @@ -1 +1 @@ -{"files":{".travis.yml":"2e3d3211e52ff52d83a0a2a495a28175dbcf2a30ab680d7c8f20622751b04f78","CHANGELOG.md":"fa0ef2d2b2d5382962d371a68e5eba052f5b303d70090e21c2d1f4c5e52b9851","Cargo.toml":"ee3bb349f21a8bf3801692a0af134b3e1071f86bd1d4aa52b47f45c7e9d696eb","LICENSE":"8ea93490d74a5a1b1af3ff71d786271b3f1e5f0bea79ac16e02ec533cef040d6","README.md":"7e05868f02bae698ee3193b86e86f25faa4dbc63661062e1d3f7bff590cfb313","examples/consume_fields.rs":"f32d3873b61e22b1ded500571ec7120420b4825ee7f807d07409e3a257407add","examples/fallible_read.rs":"4e2d558f8a77c4fffa79bde5a6c48df3dbc932822e7bc7cf0a903d1ea38b8d6f","publish.sh":"42795a155247c69402f8c4c917c9874a06dfde5a7606c8b59fc4e9ccd34233dd","src/lib.rs":"c13e19cd0363784d9ec3605bafcaf74648594fb419162143c4ecc3308a8ec695","tests/accrue_errors.rs":"7a995118cfa75ac90accf9a35e17b07a00e8528c8ccc5dd8700ba7b4c59393c1","tests/custom_bound.rs":"cca7c557ac0a2efd9554d31f0df9a98c4f6f01b9f211107846732cc1fe9f7856","tests/enums_newtype.rs":"328ebbbb4aa540f06f13158dff22cf9d98d45dba2331f75e4aa169d348397b76","tests/enums_struct.rs":"560a8bfdea9eca7f8c2024bc8740e71ef1a0681cea36b098ceadba520fad8640","tests/enums_unit.rs":"5e9458af9d695706e181b6622dcbc8b80b9eae33dcc1f15da7eecfd3e7037777","tests/error.rs":"69d12e969555fc1d72990d7596b54adcb469da95f1f257d0273f31bc9c855db5","tests/from_variant.rs":"af60c9dec64e80e2ac3beafb942d8edc1100a1342bb97378e6a35f878dd1fb50","tests/generics.rs":"3d884d65cb6b57d4bc4b3f7c39f321b2df3cd339fa15db5b66dc7c97ef84df17","tests/happy_path.rs":"5143dbf33b59fcab94be61affefddf485857f1a5cb3d45d5583463423d417cdf","tests/multiple.rs":"20e1c5110449db46df68c5a4cdb6e0c4c0e9a6b47451fe73f1523a1cf730196d","tests/newtype.rs":"a8709857e2067bc01b388a11230db5764c9e5fe2341c98d6c819adc01472b988","tests/skip.rs":"e34034c6b5fae80c8cf2caa762a41ab3d971f8af50f1022e68ba299218477892","tests/split_declaration.rs":"d55219ec0dce001ccd1975f0b4fbe0f5e8c5792a1ddf2de5a210d380bc6761e0","tests/supports.rs":"1131c2afd42e20e4a39f922758cbb8d7c5a0167ae41f9cd1cd14b40db055cd10"},"package":"d3effd06d4057f275cb7858889f4952920bab78dd8ff0f6e7dfe0c8d2e67ed89"} \ No newline at end of file +{"files":{".travis.yml":"2e3d3211e52ff52d83a0a2a495a28175dbcf2a30ab680d7c8f20622751b04f78","CHANGELOG.md":"d7322023ffc58d041c542f8de0f43167f34ba4fdacc9a5014308d97055f7b729","Cargo.toml":"c1d8f7f99c24eb373e5aefaf3c678eea57d72552fdbb3547872b62b0d28aa07f","LICENSE":"8ea93490d74a5a1b1af3ff71d786271b3f1e5f0bea79ac16e02ec533cef040d6","README.md":"7e05868f02bae698ee3193b86e86f25faa4dbc63661062e1d3f7bff590cfb313","examples/consume_fields.rs":"f32d3873b61e22b1ded500571ec7120420b4825ee7f807d07409e3a257407add","examples/fallible_read.rs":"4e2d558f8a77c4fffa79bde5a6c48df3dbc932822e7bc7cf0a903d1ea38b8d6f","publish.sh":"42795a155247c69402f8c4c917c9874a06dfde5a7606c8b59fc4e9ccd34233dd","src/lib.rs":"c13e19cd0363784d9ec3605bafcaf74648594fb419162143c4ecc3308a8ec695","tests/accrue_errors.rs":"7a995118cfa75ac90accf9a35e17b07a00e8528c8ccc5dd8700ba7b4c59393c1","tests/custom_bound.rs":"cca7c557ac0a2efd9554d31f0df9a98c4f6f01b9f211107846732cc1fe9f7856","tests/enums_newtype.rs":"328ebbbb4aa540f06f13158dff22cf9d98d45dba2331f75e4aa169d348397b76","tests/enums_struct.rs":"560a8bfdea9eca7f8c2024bc8740e71ef1a0681cea36b098ceadba520fad8640","tests/enums_unit.rs":"5e9458af9d695706e181b6622dcbc8b80b9eae33dcc1f15da7eecfd3e7037777","tests/error.rs":"69d12e969555fc1d72990d7596b54adcb469da95f1f257d0273f31bc9c855db5","tests/from_variant.rs":"af60c9dec64e80e2ac3beafb942d8edc1100a1342bb97378e6a35f878dd1fb50","tests/generics.rs":"3d884d65cb6b57d4bc4b3f7c39f321b2df3cd339fa15db5b66dc7c97ef84df17","tests/happy_path.rs":"5143dbf33b59fcab94be61affefddf485857f1a5cb3d45d5583463423d417cdf","tests/multiple.rs":"20e1c5110449db46df68c5a4cdb6e0c4c0e9a6b47451fe73f1523a1cf730196d","tests/newtype.rs":"a8709857e2067bc01b388a11230db5764c9e5fe2341c98d6c819adc01472b988","tests/skip.rs":"e34034c6b5fae80c8cf2caa762a41ab3d971f8af50f1022e68ba299218477892","tests/split_declaration.rs":"d55219ec0dce001ccd1975f0b4fbe0f5e8c5792a1ddf2de5a210d380bc6761e0","tests/supports.rs":"1131c2afd42e20e4a39f922758cbb8d7c5a0167ae41f9cd1cd14b40db055cd10"},"package":"2a78af487e4eb8f4421a1770687b328af6bb4494ca93435210678c6eea875c11"} \ No newline at end of file diff --git a/third_party/rust/darling/CHANGELOG.md b/third_party/rust/darling/CHANGELOG.md index 50298e908981..3cd7c3832a47 100644 --- a/third_party/rust/darling/CHANGELOG.md +++ b/third_party/rust/darling/CHANGELOG.md @@ -1,6 +1,19 @@ # Changelog -## Unreleased Changes +## Unreleased Features +_None_ + +## v0.4.0 (April 5, 2018) +- Update dependencies on `proc-macro`, `quote`, and `syn` [#26](https://github.com/TedDriggs/darling/pull/26). Thanks to @hcpl + +## v0.3.3 (April 2, 2018) +**YANKED** + +## v0.3.2 (March 13, 2018) +- Derive `Default` on `darling::Ignored` (fixes [#25](https://github.com/TedDriggs/darling/issues/25)). + +## v0.3.1 (March 7, 2018) +- Support proc-macro2/nightly [#24](https://github.com/TedDriggs/darling/pull/24). Thanks to @kdy1 ## v0.3.0 (January 26, 2018) diff --git a/third_party/rust/darling/Cargo.toml b/third_party/rust/darling/Cargo.toml index 9dbbdea8ca1a..32fd8074e97f 100644 --- a/third_party/rust/darling/Cargo.toml +++ b/third_party/rust/darling/Cargo.toml @@ -12,22 +12,22 @@ [package] name = "darling" -version = "0.3.0" +version = "0.4.0" authors = ["Ted Driggs "] description = "A proc-macro library for reading attributes into structs when\nimplementing custom derives.\n" -documentation = "https://docs.rs/darling/0.3.0" +documentation = "https://docs.rs/darling/0.4.0" readme = "README.md" license = "MIT" repository = "https://github.com/TedDriggs/darling" [dependencies.darling_core] -version = "=0.3.0" +version = "=0.4.0" [dependencies.darling_macro] -version = "=0.3.0" +version = "=0.4.0" [dev-dependencies.quote] -version = "0.4" +version = "0.5" [dev-dependencies.syn] -version = "0.12.10" +version = "0.13" [badges.travis-ci] repository = "TedDriggs/darling" diff --git a/third_party/rust/darling_core/.cargo-checksum.json b/third_party/rust/darling_core/.cargo-checksum.json index 0e4490783ff1..0f9dcee3f5da 100644 --- a/third_party/rust/darling_core/.cargo-checksum.json +++ b/third_party/rust/darling_core/.cargo-checksum.json @@ -1 +1 @@ -{"files":{"Cargo.toml":"64569afb788cd8b08c9d13a994f39b9123138f0391cd2aa989d4949eadaf09ad","src/ast.rs":"2538b41e2a579c0a5a49e02f911120ffff39d48dfc0d80570a5fcfe95c971794","src/codegen/default_expr.rs":"8cea4bf29096ad1add5325430a12865173a13a821e1888bed8120ec6120a7764","src/codegen/error.rs":"2a1bde9a20c664f26c6a0017e35ddf82885a31b8be42a628ea5549013b1eab44","src/codegen/field.rs":"ad8355c7bb87269c7dcc1d27695b0f8de410b546625d33d5a219fbadf85f8230","src/codegen/fmi_impl.rs":"89a66b24d7527989dd90ca71d9409fd8cdcf3a659fa1a670448032a4b384e83c","src/codegen/from_derive_impl.rs":"36507c9eddd354a50f96cd28e737c914be494c83ae61202b533524a9d90a2ca9","src/codegen/from_field.rs":"586866442f6628fd055f139b018a8c5c13e3aea20954ec741517aa9ab731c163","src/codegen/from_variant_impl.rs":"d42ecd82d3159aa7ee89ed81ed355c927dea9df2a298cf1db0c486699b77eac2","src/codegen/mod.rs":"46cdb1b4a76eb2e56f01e2c9e2879aed9b1c21ecbed42575a2eeccabf446a27a","src/codegen/outer_from_impl.rs":"a484fc3faed8a722327df18cb5179812b60ff62795a3b00b2b968a40bddec52a","src/codegen/trait_impl.rs":"715ce9dcb82d855e9dd8f2a70599bc3c5328acde70c92b7db5bd4c276598a7d0","src/codegen/variant.rs":"294045aefcfcb54e8b9536d6d91251a46115271869578b3212ae36ae76883b18","src/codegen/variant_data.rs":"efdee90d7e9acce39672024502ceb0616bc162d11e59d255fcbcd23f00f80806","src/error.rs":"55f33c960123799e1ccb27676d780751c0b62d1f52ccb9a2ac69cea4acfe55db","src/from_derive_input.rs":"ea55cc1b2bc17495d36fb616f7cd7aa78e4b74ea7c8554eed9d392ee853700c3","src/from_field.rs":"b42c2fc037aebdd55f195d722ba20075d3e095f03e89890a0d37d406d6419d87","src/from_meta_item.rs":"c4cf05c3649db57ead2d7fd6ae3973b08e74ad20022ac62a7e5e49aa74a46a57","src/from_variant.rs":"2baeb09c8a95ff606d9d5ca8992f10bbad9c4925590332af1a8b5bdae54ebae8","src/lib.rs":"e8b381a74c9303a620e7f210174bfab4c44e3137cba387a9547376982e3fb10a","src/macros.rs":"ff0c87953220702d8440894a7f0f557b1aae930096663c0c98e7ca686a94f305","src/options/core.rs":"689067ee0901714e53caeef5d5634c4bc02f52ff06e3ff286410eecaca665734","src/options/forward_attrs.rs":"35a83a4ae695872413d964d9050e35a0075c8386c286d291b1ecf1779f9ba8a3","src/options/from_derive.rs":"502e18c3d9f90d7a4cebc8c6b60181ab6068958a0ba2e70fe645528dee34b231","src/options/from_field.rs":"7222be5e62026184169f12adb08403abc89d66c53e678f8d8b43afaeceed9e4f","src/options/from_meta_item.rs":"cbc2d747e9e35e0d68b26c9f1592914bb4924cac01a6cdaf9137f643a72b551a","src/options/from_variant.rs":"6f8538da3fb61e614552839ee32bc479d33b5227d7f9d9b357d8d05146b96dac","src/options/input_field.rs":"364c7a30d0c320197062706ba12507742b97513bb64a644243b03210ef3bb334","src/options/input_variant.rs":"2fc064fb87a73542a012a31aa5fd9702cf58b52a1bf37dabbfa7fb2e758ff9cc","src/options/mod.rs":"ceefde4d1dba9b5f3822f667c34a6eb963e5a899973475456bfe7939177f0e19","src/options/outer_from.rs":"3125aad9f8c033727fd3ef4ef1e1881c77fa52463f78342c408bf135e8533037","src/options/shape.rs":"118af560da80a46d6e3f8980c3d9b4863319d224a8b2985520901bfea0eba531","src/util/ident_list.rs":"11b5008380ace89d5745cdd83b73a2841c5382f05d3a8942ba998a7e4d6abb31","src/util/ignored.rs":"89e0b5bc0f9dd8b77a63d5c1b7b3a7bb4b68d539fb97efe0d86cd1dbb46be1e8","src/util/mod.rs":"0c9ee0ba8ec03ca654fd298bd0d82588f224e3743227e6cba2beba4ab2f4dee4","src/util/over_ride.rs":"f63637ff73b3f377a4b1c38714a0f108b98ff40a96dd3ffbebb1e4ecc7523813"},"package":"167dd3e235c2f1da16a635c282630452cdf49191eb05711de1bcd1d3d5068c00"} \ No newline at end of file +{"files":{"Cargo.toml":"06561e115990be3f278ed12704d0eb575c971242cbdfe8ebb32a8132280e054e","src/ast.rs":"2538b41e2a579c0a5a49e02f911120ffff39d48dfc0d80570a5fcfe95c971794","src/codegen/default_expr.rs":"28d750fb5ed3a6344007bf545c48e4d9a15c175209903d4009efc0de257adf2e","src/codegen/error.rs":"2a1bde9a20c664f26c6a0017e35ddf82885a31b8be42a628ea5549013b1eab44","src/codegen/field.rs":"ad8355c7bb87269c7dcc1d27695b0f8de410b546625d33d5a219fbadf85f8230","src/codegen/fmi_impl.rs":"89a66b24d7527989dd90ca71d9409fd8cdcf3a659fa1a670448032a4b384e83c","src/codegen/from_derive_impl.rs":"36507c9eddd354a50f96cd28e737c914be494c83ae61202b533524a9d90a2ca9","src/codegen/from_field.rs":"586866442f6628fd055f139b018a8c5c13e3aea20954ec741517aa9ab731c163","src/codegen/from_variant_impl.rs":"d42ecd82d3159aa7ee89ed81ed355c927dea9df2a298cf1db0c486699b77eac2","src/codegen/mod.rs":"46cdb1b4a76eb2e56f01e2c9e2879aed9b1c21ecbed42575a2eeccabf446a27a","src/codegen/outer_from_impl.rs":"2314c1594bd63e682ebd4a4b4954b2b9f16aa50b1422c05568bce97ae29f9727","src/codegen/trait_impl.rs":"715ce9dcb82d855e9dd8f2a70599bc3c5328acde70c92b7db5bd4c276598a7d0","src/codegen/variant.rs":"294045aefcfcb54e8b9536d6d91251a46115271869578b3212ae36ae76883b18","src/codegen/variant_data.rs":"efdee90d7e9acce39672024502ceb0616bc162d11e59d255fcbcd23f00f80806","src/error.rs":"55f33c960123799e1ccb27676d780751c0b62d1f52ccb9a2ac69cea4acfe55db","src/from_derive_input.rs":"ea55cc1b2bc17495d36fb616f7cd7aa78e4b74ea7c8554eed9d392ee853700c3","src/from_field.rs":"b42c2fc037aebdd55f195d722ba20075d3e095f03e89890a0d37d406d6419d87","src/from_meta_item.rs":"996ccec9dca998ff41f65bb346e5cc75952af5d61339c6951bebdbf8db1212c5","src/from_variant.rs":"2baeb09c8a95ff606d9d5ca8992f10bbad9c4925590332af1a8b5bdae54ebae8","src/lib.rs":"58b910cecc1f1962c2d6059db384f065099547c34631d9ddcc35099db8e16405","src/macros.rs":"ef249cd9ca593aac423b4242df1c39c31610438da094c21562d74a7e5823c700","src/options/core.rs":"689067ee0901714e53caeef5d5634c4bc02f52ff06e3ff286410eecaca665734","src/options/forward_attrs.rs":"35a83a4ae695872413d964d9050e35a0075c8386c286d291b1ecf1779f9ba8a3","src/options/from_derive.rs":"502e18c3d9f90d7a4cebc8c6b60181ab6068958a0ba2e70fe645528dee34b231","src/options/from_field.rs":"7222be5e62026184169f12adb08403abc89d66c53e678f8d8b43afaeceed9e4f","src/options/from_meta_item.rs":"cbc2d747e9e35e0d68b26c9f1592914bb4924cac01a6cdaf9137f643a72b551a","src/options/from_variant.rs":"6f8538da3fb61e614552839ee32bc479d33b5227d7f9d9b357d8d05146b96dac","src/options/input_field.rs":"6d43c2907694c4187e9f182f7945fc769ce210cde8eb1b4a336dea2a7fce3710","src/options/input_variant.rs":"2fc064fb87a73542a012a31aa5fd9702cf58b52a1bf37dabbfa7fb2e758ff9cc","src/options/mod.rs":"ceefde4d1dba9b5f3822f667c34a6eb963e5a899973475456bfe7939177f0e19","src/options/outer_from.rs":"3125aad9f8c033727fd3ef4ef1e1881c77fa52463f78342c408bf135e8533037","src/options/shape.rs":"118af560da80a46d6e3f8980c3d9b4863319d224a8b2985520901bfea0eba531","src/util/ident_list.rs":"11b5008380ace89d5745cdd83b73a2841c5382f05d3a8942ba998a7e4d6abb31","src/util/ignored.rs":"66e2e3201e17e8fffe2f249a4327b8178a20304624a47c0149fe8dd5e05d187c","src/util/mod.rs":"0c9ee0ba8ec03ca654fd298bd0d82588f224e3743227e6cba2beba4ab2f4dee4","src/util/over_ride.rs":"f63637ff73b3f377a4b1c38714a0f108b98ff40a96dd3ffbebb1e4ecc7523813"},"package":"b315f49c7b6db3708bca6e6913c194581a44ec619b7a39e131d4dd63733a3698"} \ No newline at end of file diff --git a/third_party/rust/darling_core/Cargo.toml b/third_party/rust/darling_core/Cargo.toml index d728d2c70e93..362e41db0c38 100644 --- a/third_party/rust/darling_core/Cargo.toml +++ b/third_party/rust/darling_core/Cargo.toml @@ -12,7 +12,7 @@ [package] name = "darling_core" -version = "0.3.0" +version = "0.4.0" authors = ["Ted Driggs "] description = "Helper crate for proc-macro library for reading attributes into structs when\nimplementing custom derives. Use https://crates.io/crates/darling in your code.\n" license = "MIT" @@ -20,11 +20,14 @@ repository = "https://github.com/TedDriggs/darling" [dependencies.ident_case] version = "1.0.0" +[dependencies.proc-macro2] +version = "0.3" + [dependencies.quote] -version = "0.4" +version = "0.5" [dependencies.syn] -version = "0.12.10" +version = "0.13" features = ["extra-traits"] [features] diff --git a/third_party/rust/darling_core/src/codegen/default_expr.rs b/third_party/rust/darling_core/src/codegen/default_expr.rs index 404bf45de37f..3f5a92d45d8e 100644 --- a/third_party/rust/darling_core/src/codegen/default_expr.rs +++ b/third_party/rust/darling_core/src/codegen/default_expr.rs @@ -24,7 +24,7 @@ impl<'a> ToTokens for DefaultExpression<'a> { fn to_tokens(&self, tokens: &mut Tokens) { tokens.append_all(match *self { DefaultExpression::Inherit(ident) => { - let dsn = Ident::from(DEFAULT_STRUCT_NAME); + let dsn = Ident::new(DEFAULT_STRUCT_NAME, ::proc_macro2::Span::call_site()); quote!(#dsn.#ident) }, DefaultExpression::Explicit(path) => quote!(#path()), @@ -38,7 +38,7 @@ pub struct DefaultDeclaration<'a>(&'a DefaultExpression<'a>); impl<'a> ToTokens for DefaultDeclaration<'a> { fn to_tokens(&self, tokens: &mut Tokens) { - let name = Ident::from(DEFAULT_STRUCT_NAME); + let name = Ident::new(DEFAULT_STRUCT_NAME, ::proc_macro2::Span::call_site()); let expr = self.0; tokens.append_all(quote!(let #name: Self = #expr;)); } diff --git a/third_party/rust/darling_core/src/codegen/outer_from_impl.rs b/third_party/rust/darling_core/src/codegen/outer_from_impl.rs index 5f1c9cace95c..5717dc652a8f 100644 --- a/third_party/rust/darling_core/src/codegen/outer_from_impl.rs +++ b/third_party/rust/darling_core/src/codegen/outer_from_impl.rs @@ -37,6 +37,7 @@ fn compute_impl_bounds(bound: Path, mut generics: Generics) -> Generics { } let added_bound = TypeParamBound::Trait(TraitBound { + paren_token: None, modifier: TraitBoundModifier::None, lifetimes: None, path: bound, diff --git a/third_party/rust/darling_core/src/from_meta_item.rs b/third_party/rust/darling_core/src/from_meta_item.rs index d808a1851b94..0efaa270c914 100644 --- a/third_party/rust/darling_core/src/from_meta_item.rs +++ b/third_party/rust/darling_core/src/from_meta_item.rs @@ -192,7 +192,7 @@ impl FromMetaItem for isize { impl FromMetaItem for syn::Ident { fn from_string(value: &str) -> Result { - Ok(syn::Ident::from(value)) + Ok(syn::Ident::new(value, ::proc_macro2::Span::call_site())) } } diff --git a/third_party/rust/darling_core/src/lib.rs b/third_party/rust/darling_core/src/lib.rs index 790c1b0d86b4..50ac06a31d9f 100644 --- a/third_party/rust/darling_core/src/lib.rs +++ b/third_party/rust/darling_core/src/lib.rs @@ -5,6 +5,7 @@ extern crate quote; #[macro_use] extern crate syn; +extern crate proc_macro2; extern crate ident_case; diff --git a/third_party/rust/darling_core/src/macros.rs b/third_party/rust/darling_core/src/macros.rs index 8a57f088e069..65a3cd5750c9 100644 --- a/third_party/rust/darling_core/src/macros.rs +++ b/third_party/rust/darling_core/src/macros.rs @@ -1,3 +1,9 @@ +macro_rules! quote { + ($($tt:tt)*) => { + quote_spanned!(::proc_macro2::Span::call_site() => $($tt)*) + }; +} + macro_rules! path { ($($path:tt)+) => { parse_quote!($($path)+) diff --git a/third_party/rust/darling_core/src/options/input_field.rs b/third_party/rust/darling_core/src/options/input_field.rs index 3b9ef04557cb..34b7f3adeb9e 100644 --- a/third_party/rust/darling_core/src/options/input_field.rs +++ b/third_party/rust/darling_core/src/options/input_field.rs @@ -61,7 +61,7 @@ impl InputField { } pub fn from_field(f: &syn::Field, parent: Option<&Core>) -> Result { - let ident = f.ident.clone().unwrap_or(syn::Ident::from("__unnamed")); + let ident = f.ident.clone().unwrap_or(syn::Ident::new("__unnamed", ::proc_macro2::Span::call_site())); let ty = f.ty.clone(); let base = Self::new(ident, ty).parse_attributes(&f.attrs)?; diff --git a/third_party/rust/darling_core/src/util/ignored.rs b/third_party/rust/darling_core/src/util/ignored.rs index c097a16fc8c8..8edf58a3d71f 100644 --- a/third_party/rust/darling_core/src/util/ignored.rs +++ b/third_party/rust/darling_core/src/util/ignored.rs @@ -6,7 +6,7 @@ use {FromMetaItem, FromDeriveInput, FromField, FromVariant, Result}; /// /// All meta-items, fields, and variants will be successfully read into /// the `Ignored` struct, with all properties discarded. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] pub struct Ignored; impl FromMetaItem for Ignored { diff --git a/third_party/rust/darling_macro/.cargo-checksum.json b/third_party/rust/darling_macro/.cargo-checksum.json index 73e49fe61469..e953c6311ff2 100644 --- a/third_party/rust/darling_macro/.cargo-checksum.json +++ b/third_party/rust/darling_macro/.cargo-checksum.json @@ -1 +1 @@ -{"files":{"Cargo.toml":"24ebe6f6d1e4bd5ca3dd416bda62ab1d12d05f6990d30cff0fd301cc83273ecc","src/lib.rs":"d900da894985945215cb4494ebd4e8b5f697c19bf9e624a1bb03d22a0a5367a5"},"package":"c53edaba455f6073a10c27c72440860eb3f60444f8c8660a391032eeae744d82"} \ No newline at end of file +{"files":{"Cargo.toml":"cf9d579ee6af881a7aa52d43d637b4afa9cf589bfda3fa63159538d681855330","src/lib.rs":"d900da894985945215cb4494ebd4e8b5f697c19bf9e624a1bb03d22a0a5367a5"},"package":"eb69a38fdeaeaf3db712e1df170de67ee9dfc24fb88ca3e9d21e703ec25a4d8e"} \ No newline at end of file diff --git a/third_party/rust/darling_macro/Cargo.toml b/third_party/rust/darling_macro/Cargo.toml index eda5eb56a5bf..1045edebaa48 100644 --- a/third_party/rust/darling_macro/Cargo.toml +++ b/third_party/rust/darling_macro/Cargo.toml @@ -12,7 +12,7 @@ [package] name = "darling_macro" -version = "0.3.0" +version = "0.4.0" authors = ["Ted Driggs "] description = "Internal support for a proc-macro library for reading attributes into structs when\nimplementing custom derives. Use https://crates.io/crates/darling in your code.\n" license = "MIT" @@ -21,10 +21,10 @@ repository = "https://github.com/TedDriggs/darling" [lib] proc-macro = true [dependencies.darling_core] -version = "=0.3.0" +version = "=0.4.0" [dependencies.quote] -version = "0.4" +version = "0.5" [dependencies.syn] -version = "0.12" +version = "0.13" diff --git a/third_party/rust/proc-macro2-0.2.2/.cargo-checksum.json b/third_party/rust/proc-macro2-0.2.2/.cargo-checksum.json deleted file mode 100644 index 6d5e5011ec54..000000000000 --- a/third_party/rust/proc-macro2-0.2.2/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{".travis.yml":"e455a0ed5c3dd056d31f4c7be088bc94f21cab6595a23f2f015b1efc0ac2b55c","Cargo.toml":"0b700f1e7b8ba76ce4678d36b6906d38455e88f51085ea9f120d6ca63f13d5d7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"adf2e822923878c2ebf4a0a782898c598fc6f57a3af905b85d57fc716c836687","src/lib.rs":"fbae25504264b185d877fb8784d4d88333ea34a7cbeddca3277dc8421f179933","src/macros.rs":"414505e520b8d705b4ce5a64ec2e82d6d1af0b88567454169486a668fbc1e9c8","src/stable.rs":"6363c4c0ef989c2ec81aa75be71c69a103d45a1de439f3f3bcb6806d8a78a172","src/strnom.rs":"1baded8543a9930798fb16092fe51e9074591902e327e0f94eb1c908a6370de9","src/unstable.rs":"110d27103e37427b3d1dcb45b6ba9dc9f5641a255766a43d5db0f4fd10a341ed","tests/test.rs":"9e75d5289abc1dc58c1df00ae051d8c3cd2c0d7830cca5ad689007c05acffe26"},"package":"d1cb7aaaa4bf022ec2b14ff2f2ba1643a22f3cee88df014a85e14b392282c61d"} \ No newline at end of file diff --git a/third_party/rust/proc-macro2-0.2.2/.travis.yml b/third_party/rust/proc-macro2-0.2.2/.travis.yml deleted file mode 100644 index a524c0167e1e..000000000000 --- a/third_party/rust/proc-macro2-0.2.2/.travis.yml +++ /dev/null @@ -1,31 +0,0 @@ -language: rust -sudo: false - -matrix: - include: - - rust: 1.15.0 - - rust: stable - - rust: beta - - rust: nightly - before_script: - - pip install 'travis-cargo<0.2' --user && export PATH=$HOME/.local/bin:$PATH - script: - - cargo test - - cargo build --features nightly - - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test - - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build --features nightly - - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo doc --no-deps - after_success: - - travis-cargo --only nightly doc-upload - -script: - - cargo test - - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test -env: - global: - - TRAVIS_CARGO_NIGHTLY_FEATURE="" - - secure: "NAsZghAVTAksrm4WP4I66VmD2wW0eRbwB+ZKHUQfvbgUaCRvVdp4WBbWXGU/f/yHgDFWZwljWR4iPMiBwAK8nZsQFRuLFdHrOOHqbkj639LLdT9A07s1zLMB1GfR1fDttzrGhm903pbT2yxSyqqpahGYM7TaGDYYmKYIk4XyVNA5F5Sk7RI+rCecKraoYDeUEFbjWWYtU2FkEXsELEKj0emX5reWkR+wja3QokFcRZ25+Zd2dRC0K8W5QcY2UokLzKncBMCTC5q70H616S3r/9qW67Si1njsJ7RzP0NlZQUNQ/VCvwr4LCr9w+AD9i1SZtXxuux77tWEWSJvBzUc82dDMUv/floJuF7HTulSxxQoRm+fbzpXj9mgaJNiUHXru6ZRTCRVRUSXpcAco94bVoy/jnjrTe3jgAIZK5w14zA8yLw1Jxof31DlbcWORxgF+6fnY2nKPRN2oiQ50+jm1AuGDZX59/wMiu1QlkjOBHtikHp+u+7mp3SkkM04DvuQ/tWODQQnOOtrA0EB3i5H1zeTSnUcmbJufUljWWOvF1QYII08MccqwfG1KWbpobvdu+cV2iVhkq/lNCEL3Ai101CnmSCnMz+9oK/XxYOrx2TnaD9ootOKgnk7XWxF19GZecQx6O2hHTouxvB/0KcRPGWmMWl0H88f3T/Obql8bG8=" - -notifications: - email: - on_success: never diff --git a/third_party/rust/proc-macro2-0.2.2/Cargo.toml b/third_party/rust/proc-macro2-0.2.2/Cargo.toml deleted file mode 100644 index 17a47c189be7..000000000000 --- a/third_party/rust/proc-macro2-0.2.2/Cargo.toml +++ /dev/null @@ -1,31 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g. crates.io) dependencies -# -# If you believe there's an error in this file please file an -# issue against the rust-lang/cargo repository. If you're -# editing this file be aware that the upstream Cargo.toml -# will likely look very different (and much more reasonable) - -[package] -name = "proc-macro2" -version = "0.2.2" -authors = ["Alex Crichton "] -description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n" -homepage = "https://github.com/alexcrichton/proc-macro2" -documentation = "https://docs.rs/proc-macro2" -readme = "README.md" -keywords = ["macros"] -license = "MIT/Apache-2.0" -repository = "https://github.com/alexcrichton/proc-macro2" - -[lib] -doctest = false -[dependencies.unicode-xid] -version = "0.1" - -[features] -nightly = [] diff --git a/third_party/rust/proc-macro2-0.2.2/LICENSE-APACHE b/third_party/rust/proc-macro2-0.2.2/LICENSE-APACHE deleted file mode 100644 index 16fe87b06e80..000000000000 --- a/third_party/rust/proc-macro2-0.2.2/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/third_party/rust/proc-macro2-0.2.2/LICENSE-MIT b/third_party/rust/proc-macro2-0.2.2/LICENSE-MIT deleted file mode 100644 index 39e0ed660215..000000000000 --- a/third_party/rust/proc-macro2-0.2.2/LICENSE-MIT +++ /dev/null @@ -1,25 +0,0 @@ -Copyright (c) 2014 Alex Crichton - -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/third_party/rust/proc-macro2-0.2.2/README.md b/third_party/rust/proc-macro2-0.2.2/README.md deleted file mode 100644 index c0a62a3a184b..000000000000 --- a/third_party/rust/proc-macro2-0.2.2/README.md +++ /dev/null @@ -1,98 +0,0 @@ -# proc-macro2 - -[![Build Status](https://api.travis-ci.org/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.org/alexcrichton/proc-macro2) -[![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2) -[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2) - -A small shim over the `proc_macro` crate in the compiler intended to multiplex -the current stable interface (as of 2017-07-05) and the [upcoming richer -interface][upcoming]. - -[upcoming]: https://github.com/rust-lang/rust/pull/40939 - -The upcoming support has features like: - -* Span information on tokens -* No need to go in/out through strings -* Structured input/output - -The hope is that libraries ported to `proc_macro2` will be trivial to port to -the real `proc_macro` crate once the support on nightly is stabilize. - -## Usage - -This crate by default compiles on the stable version of the compiler. It only -uses the stable surface area of the `proc_macro` crate upstream in the compiler -itself. Usage is done via: - -```toml -[dependencies] -proc-macro2 = "0.2" -``` - -followed by - -```rust -extern crate proc_macro; -extern crate proc_macro2; - -#[proc_macro_derive(MyDerive)] -pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input: proc_macro2::TokenStream = input.into(); - - let output: proc_macro2::TokenStream = { - /* transform input */ - }; - - output.into() -} -``` - -If you'd like you can enable the `nightly` feature in this crate. This will -cause it to compile against the **unstable and nightly-only** features of the -`proc_macro` crate. This in turn requires a nightly compiler. This should help -preserve span information, however, coming in from the compiler itself. - -You can enable this feature via: - -```toml -[dependencies] -proc-macro2 = { version = "0.2", features = ["nightly"] } -``` - - -## Unstable Features - -`proc-macro2` supports exporting some methods from `proc_macro` which are -currently highly unstable, and may not be stabilized in the first pass of -`proc_macro` stabilizations. These features are not exported by default. Minor -versions of `proc-macro2` may make breaking changes to them at any time. - -To enable these features, the `procmacro2_semver_exempt` config flag must be -passed to rustc. - -``` -RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build -``` - -Note that this must not only be done for your crate, but for any crate that -depends on your crate. This infectious nature is intentional, as it serves as a -reminder that you are outside of the normal semver guarantees. - - -# License - -This project is licensed under either of - - * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or - http://www.apache.org/licenses/LICENSE-2.0) - * MIT license ([LICENSE-MIT](LICENSE-MIT) or - http://opensource.org/licenses/MIT) - -at your option. - -### Contribution - -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in Serde by you, as defined in the Apache-2.0 license, shall be -dual licensed as above, without any additional terms or conditions. diff --git a/third_party/rust/proc-macro2-0.2.2/src/lib.rs b/third_party/rust/proc-macro2-0.2.2/src/lib.rs deleted file mode 100644 index 373174c5f79d..000000000000 --- a/third_party/rust/proc-macro2-0.2.2/src/lib.rs +++ /dev/null @@ -1,337 +0,0 @@ -//! A "shim crate" intended to multiplex the `proc_macro` API on to stable Rust. -//! -//! Procedural macros in Rust operate over the upstream -//! `proc_macro::TokenStream` type. This type currently is quite conservative -//! and exposed no internal implementation details. Nightly compilers, however, -//! contain a much richer interface. This richer interface allows fine-grained -//! inspection of the token stream which avoids stringification/re-lexing and -//! also preserves span information. -//! -//! The upcoming APIs added to `proc_macro` upstream are the foundation for -//! productive procedural macros in the ecosystem. To help prepare the ecosystem -//! for using them this crate serves to both compile on stable and nightly and -//! mirrors the API-to-be. The intention is that procedural macros which switch -//! to use this crate will be trivially able to switch to the upstream -//! `proc_macro` crate once its API stabilizes. -//! -//! In the meantime this crate also has a `nightly` Cargo feature which -//! enables it to reimplement itself with the unstable API of `proc_macro`. -//! This'll allow immediate usage of the beneficial upstream API, particularly -//! around preserving span information. - -// Proc-macro2 types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/proc-macro2/0.2.2")] - -#![cfg_attr(feature = "nightly", feature(proc_macro))] - -extern crate proc_macro; - -#[cfg(not(feature = "nightly"))] -extern crate unicode_xid; - -use std::fmt; -use std::str::FromStr; -use std::iter::FromIterator; - -#[macro_use] -#[cfg(not(feature = "nightly"))] -mod strnom; - -#[path = "stable.rs"] -#[cfg(not(feature = "nightly"))] -mod imp; -#[path = "unstable.rs"] -#[cfg(feature = "nightly")] -mod imp; - -#[macro_use] -mod macros; - -#[derive(Clone)] -pub struct TokenStream(imp::TokenStream); - -pub struct LexError(imp::LexError); - -impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result { - match src.parse() { - Ok(e) => Ok(TokenStream(e)), - Err(e) => Err(LexError(e)), - } - } -} - -impl From for TokenStream { - fn from(inner: proc_macro::TokenStream) -> TokenStream { - TokenStream(inner.into()) - } -} - -impl From for proc_macro::TokenStream { - fn from(inner: TokenStream) -> proc_macro::TokenStream { - inner.0.into() - } -} - -impl From for TokenStream { - fn from(tree: TokenTree) -> TokenStream { - TokenStream(tree.into()) - } -} - -impl> FromIterator for TokenStream { - fn from_iter>(streams: I) -> Self { - TokenStream(streams.into_iter().map(|t| t.into().0).collect()) - } -} - -impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = TokenTreeIter; - - fn into_iter(self) -> TokenTreeIter { - TokenTreeIter(self.0.into_iter()) - } -} - -impl TokenStream { - pub fn empty() -> TokenStream { - TokenStream(imp::TokenStream::empty()) - } - - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } -} - -// Returned by reference, so we can't easily wrap it. -#[cfg(procmacro2_semver_exempt)] -pub use imp::FileName; - -#[cfg(procmacro2_semver_exempt)] -#[derive(Clone, PartialEq, Eq)] -pub struct SourceFile(imp::SourceFile); - -#[cfg(procmacro2_semver_exempt)] -impl SourceFile { - /// Get the path to this source file as a string. - pub fn path(&self) -> &FileName { - self.0.path() - } - - pub fn is_real(&self) -> bool { - self.0.is_real() - } -} - -#[cfg(procmacro2_semver_exempt)] -impl AsRef for SourceFile { - fn as_ref(&self) -> &FileName { - self.0.path() - } -} - -#[cfg(procmacro2_semver_exempt)] -impl fmt::Debug for SourceFile { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -#[cfg(procmacro2_semver_exempt)] -pub struct LineColumn { - pub line: usize, - pub column: usize, -} - -#[derive(Copy, Clone)] -pub struct Span(imp::Span); - -impl Span { - pub fn call_site() -> Span { - Span(imp::Span::call_site()) - } - - pub fn def_site() -> Span { - Span(imp::Span::def_site()) - } - - /// Creates a new span with the same line/column information as `self` but - /// that resolves symbols as though it were at `other`. - pub fn resolved_at(&self, other: Span) -> Span { - Span(self.0.resolved_at(other.0)) - } - - /// Creates a new span with the same name resolution behavior as `self` but - /// with the line/column information of `other`. - pub fn located_at(&self, other: Span) -> Span { - Span(self.0.located_at(other.0)) - } - - /// This method is only available when the `"nightly"` feature is enabled. - #[cfg(feature = "nightly")] - pub fn unstable(self) -> proc_macro::Span { - self.0.unstable() - } - - #[cfg(procmacro2_semver_exempt)] - pub fn source_file(&self) -> SourceFile { - SourceFile(self.0.source_file()) - } - - #[cfg(procmacro2_semver_exempt)] - pub fn start(&self) -> LineColumn { - let imp::LineColumn{ line, column } = self.0.start(); - LineColumn { line: line, column: column } - } - - #[cfg(procmacro2_semver_exempt)] - pub fn end(&self) -> LineColumn { - let imp::LineColumn{ line, column } = self.0.end(); - LineColumn { line: line, column: column } - } - - #[cfg(procmacro2_semver_exempt)] - pub fn join(&self, other: Span) -> Option { - self.0.join(other.0).map(Span) - } -} - -#[derive(Clone, Debug)] -pub struct TokenTree { - pub span: Span, - pub kind: TokenNode, -} - -impl From for TokenTree { - fn from(kind: TokenNode) -> TokenTree { - TokenTree { span: Span::def_site(), kind: kind } - } -} - -impl fmt::Display for TokenTree { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - TokenStream::from(self.clone()).fmt(f) - } -} - -#[derive(Clone, Debug)] -pub enum TokenNode { - Group(Delimiter, TokenStream), - Term(Term), - Op(char, Spacing), - Literal(Literal), -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq)] -pub enum Delimiter { - Parenthesis, - Brace, - Bracket, - None, -} - -#[derive(Copy, Clone)] -pub struct Term(imp::Term); - -impl Term { - pub fn intern(string: &str) -> Term { - Term(imp::Term::intern(string)) - } - - pub fn as_str(&self) -> &str { - self.0.as_str() - } -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq)] -pub enum Spacing { - Alone, - Joint, -} - -#[derive(Clone)] -pub struct Literal(imp::Literal); - -macro_rules! int_literals { - ($($kind:ident,)*) => ($( - pub fn $kind(n: $kind) -> Literal { - Literal(n.into()) - } - )*) -} - -impl Literal { - pub fn integer(s: i64) -> Literal { - Literal(imp::Literal::integer(s)) - } - - int_literals! { - u8, u16, u32, u64, usize, - i8, i16, i32, i64, isize, - } - - pub fn float(f: f64) -> Literal { - Literal(imp::Literal::float(f)) - } - - pub fn f64(f: f64) -> Literal { - Literal(f.into()) - } - - pub fn f32(f: f32) -> Literal { - Literal(f.into()) - } - - pub fn string(string: &str) -> Literal { - Literal(string.into()) - } - - pub fn character(ch: char) -> Literal { - Literal(ch.into()) - } - - pub fn byte_string(s: &[u8]) -> Literal { - Literal(imp::Literal::byte_string(s)) - } - - // ======================================================================= - // Not present upstream in proc_macro yet - - pub fn byte_char(b: u8) -> Literal { - Literal(imp::Literal::byte_char(b)) - } - - pub fn doccomment(s: &str) -> Literal { - Literal(imp::Literal::doccomment(s)) - } - - pub fn raw_string(s: &str, pounds: usize) -> Literal { - Literal(imp::Literal::raw_string(s, pounds)) - } - - pub fn raw_byte_string(s: &str, pounds: usize) -> Literal { - Literal(imp::Literal::raw_byte_string(s, pounds)) - } -} - -pub struct TokenTreeIter(imp::TokenTreeIter); - -impl Iterator for TokenTreeIter { - type Item = TokenTree; - - fn next(&mut self) -> Option { - self.0.next() - } -} - -forward_fmt!(Debug for LexError); -forward_fmt!(Debug for Literal); -forward_fmt!(Debug for Span); -forward_fmt!(Debug for Term); -forward_fmt!(Debug for TokenTreeIter); -forward_fmt!(Debug for TokenStream); -forward_fmt!(Display for Literal); -forward_fmt!(Display for TokenStream); diff --git a/third_party/rust/proc-macro2-0.2.2/src/macros.rs b/third_party/rust/proc-macro2-0.2.2/src/macros.rs deleted file mode 100644 index 89eb554c6c66..000000000000 --- a/third_party/rust/proc-macro2-0.2.2/src/macros.rs +++ /dev/null @@ -1,9 +0,0 @@ -macro_rules! forward_fmt { - ($tr:ident for $ty:ident) => { - impl ::std::fmt::$tr for $ty { - fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { - ::std::fmt::$tr::fmt(&self.0, f) - } - } - } -} diff --git a/third_party/rust/proc-macro2-0.2.2/src/stable.rs b/third_party/rust/proc-macro2-0.2.2/src/stable.rs deleted file mode 100644 index ffa077a29531..000000000000 --- a/third_party/rust/proc-macro2-0.2.2/src/stable.rs +++ /dev/null @@ -1,1206 +0,0 @@ -use std::ascii; -use std::borrow::Borrow; -use std::cell::RefCell; -#[cfg(procmacro2_semver_exempt)] -use std::cmp; -use std::collections::HashMap; -use std::fmt; -use std::iter; -use std::marker::PhantomData; -use std::rc::Rc; -use std::str::FromStr; -use std::vec; - -use proc_macro; -use unicode_xid::UnicodeXID; -use strnom::{Cursor, PResult, skip_whitespace, block_comment, whitespace, word_break}; - -use {TokenTree, TokenNode, Delimiter, Spacing}; - -#[derive(Clone, Debug)] -pub struct TokenStream { - inner: Vec, -} - -#[derive(Debug)] -pub struct LexError; - -impl TokenStream { - pub fn empty() -> TokenStream { - TokenStream { inner: Vec::new() } - } - - pub fn is_empty(&self) -> bool { - self.inner.len() == 0 - } -} - -#[cfg(procmacro2_semver_exempt)] -fn get_cursor(src: &str) -> Cursor { - // Create a dummy file & add it to the codemap - CODEMAP.with(|cm| { - let mut cm = cm.borrow_mut(); - let name = format!("", cm.files.len()); - let span = cm.add_file(&name, src); - Cursor { - rest: src, - off: span.lo, - } - }) -} - -#[cfg(not(procmacro2_semver_exempt))] -fn get_cursor(src: &str) -> Cursor { - Cursor { - rest: src, - } -} - -impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result { - // Create a dummy file & add it to the codemap - let cursor = get_cursor(src); - - match token_stream(cursor) { - Ok((input, output)) => { - if skip_whitespace(input).len() != 0 { - Err(LexError) - } else { - Ok(output.0) - } - } - Err(LexError) => Err(LexError), - } - } -} - -impl fmt::Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut joint = false; - for (i, tt) in self.inner.iter().enumerate() { - if i != 0 && !joint { - write!(f, " ")?; - } - joint = false; - match tt.kind { - TokenNode::Group(delim, ref stream) => { - let (start, end) = match delim { - Delimiter::Parenthesis => ("(", ")"), - Delimiter::Brace => ("{", "}"), - Delimiter::Bracket => ("[", "]"), - Delimiter::None => ("", ""), - }; - if stream.0.inner.len() == 0 { - write!(f, "{} {}", start, end)? - } else { - write!(f, "{} {} {}", start, stream, end)? - } - } - TokenNode::Term(ref sym) => write!(f, "{}", sym.as_str())?, - TokenNode::Op(ch, ref op) => { - write!(f, "{}", ch)?; - match *op { - Spacing::Alone => {} - Spacing::Joint => joint = true, - } - } - TokenNode::Literal(ref literal) => { - write!(f, "{}", literal)?; - // handle comments - if (literal.0).0.starts_with("/") { - write!(f, "\n")?; - } - } - } - } - - Ok(()) - } -} - -impl From for TokenStream { - fn from(inner: proc_macro::TokenStream) -> TokenStream { - inner.to_string().parse().expect("compiler token stream parse failed") - } -} - -impl From for proc_macro::TokenStream { - fn from(inner: TokenStream) -> proc_macro::TokenStream { - inner.to_string().parse().expect("failed to parse to compiler tokens") - } -} - - -impl From for TokenStream { - fn from(tree: TokenTree) -> TokenStream { - TokenStream { inner: vec![tree] } - } -} - -impl iter::FromIterator for TokenStream { - fn from_iter>(streams: I) -> Self { - let mut v = Vec::new(); - - for stream in streams.into_iter() { - v.extend(stream.inner); - } - - TokenStream { inner: v } - } -} - -pub type TokenTreeIter = vec::IntoIter; - -impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = TokenTreeIter; - - fn into_iter(self) -> TokenTreeIter { - self.inner.into_iter() - } -} - -#[cfg(procmacro2_semver_exempt)] -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct FileName(String); - -#[cfg(procmacro2_semver_exempt)] -impl fmt::Display for FileName { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -#[cfg(procmacro2_semver_exempt)] -#[derive(Clone, PartialEq, Eq)] -pub struct SourceFile { - name: FileName, -} - -#[cfg(procmacro2_semver_exempt)] -impl SourceFile { - /// Get the path to this source file as a string. - pub fn path(&self) -> &FileName { - &self.name - } - - pub fn is_real(&self) -> bool { - // XXX(nika): Support real files in the future? - false - } -} - -#[cfg(procmacro2_semver_exempt)] -impl AsRef for SourceFile { - fn as_ref(&self) -> &FileName { - self.path() - } -} - -#[cfg(procmacro2_semver_exempt)] -impl fmt::Debug for SourceFile { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("SourceFile") - .field("path", &self.path()) - .field("is_real", &self.is_real()) - .finish() - } -} - -#[cfg(procmacro2_semver_exempt)] -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -pub struct LineColumn { - pub line: usize, - pub column: usize, -} - -#[cfg(procmacro2_semver_exempt)] -thread_local! { - static CODEMAP: RefCell = RefCell::new(Codemap { - // NOTE: We start with a single dummy file which all call_site() and - // def_site() spans reference. - files: vec![FileInfo { - name: "".to_owned(), - span: Span { lo: 0, hi: 0 }, - lines: vec![0], - }], - }); -} - -#[cfg(procmacro2_semver_exempt)] -struct FileInfo { - name: String, - span: Span, - lines: Vec, -} - -#[cfg(procmacro2_semver_exempt)] -impl FileInfo { - fn offset_line_column(&self, offset: usize) -> LineColumn { - assert!(self.span_within(Span { lo: offset as u32, hi: offset as u32 })); - let offset = offset - self.span.lo as usize; - match self.lines.binary_search(&offset) { - Ok(found) => LineColumn { - line: found + 1, - column: 0 - }, - Err(idx) => LineColumn { - line: idx, - column: offset - self.lines[idx - 1] - }, - } - } - - fn span_within(&self, span: Span) -> bool { - span.lo >= self.span.lo && span.hi <= self.span.hi - } -} - -/// Computes the offsets of each line in the given source string. -#[cfg(procmacro2_semver_exempt)] -fn lines_offsets(s: &str) -> Vec { - let mut lines = vec![0]; - let mut prev = 0; - while let Some(len) = s[prev..].find('\n') { - prev += len + 1; - lines.push(prev); - } - lines -} - -#[cfg(procmacro2_semver_exempt)] -struct Codemap { - files: Vec, -} - -#[cfg(procmacro2_semver_exempt)] -impl Codemap { - fn next_start_pos(&self) -> u32 { - // Add 1 so there's always space between files. - // - // We'll always have at least 1 file, as we initialize our files list - // with a dummy file. - self.files.last().unwrap().span.hi + 1 - } - - fn add_file(&mut self, name: &str, src: &str) -> Span { - let lines = lines_offsets(src); - let lo = self.next_start_pos(); - // XXX(nika): Shouild we bother doing a checked cast or checked add here? - let span = Span { lo: lo, hi: lo + (src.len() as u32) }; - - self.files.push(FileInfo { - name: name.to_owned(), - span: span, - lines: lines, - }); - - span - } - - fn fileinfo(&self, span: Span) -> &FileInfo { - for file in &self.files { - if file.span_within(span) { - return file; - } - } - panic!("Invalid span with no related FileInfo!"); - } -} - -#[derive(Clone, Copy, Debug)] -pub struct Span { - #[cfg(procmacro2_semver_exempt)] - lo: u32, - #[cfg(procmacro2_semver_exempt)] - hi: u32, -} - -impl Span { - #[cfg(not(procmacro2_semver_exempt))] - pub fn call_site() -> Span { - Span {} - } - - #[cfg(procmacro2_semver_exempt)] - pub fn call_site() -> Span { - Span { lo: 0, hi: 0 } - } - - pub fn def_site() -> Span { - Span::call_site() - } - - pub fn resolved_at(&self, _other: Span) -> Span { - // Stable spans consist only of line/column information, so - // `resolved_at` and `located_at` only select which span the - // caller wants line/column information from. - *self - } - - pub fn located_at(&self, other: Span) -> Span { - other - } - - #[cfg(procmacro2_semver_exempt)] - pub fn source_file(&self) -> SourceFile { - CODEMAP.with(|cm| { - let cm = cm.borrow(); - let fi = cm.fileinfo(*self); - SourceFile { - name: FileName(fi.name.clone()), - } - }) - } - - #[cfg(procmacro2_semver_exempt)] - pub fn start(&self) -> LineColumn { - CODEMAP.with(|cm| { - let cm = cm.borrow(); - let fi = cm.fileinfo(*self); - fi.offset_line_column(self.lo as usize) - }) - } - - #[cfg(procmacro2_semver_exempt)] - pub fn end(&self) -> LineColumn { - CODEMAP.with(|cm| { - let cm = cm.borrow(); - let fi = cm.fileinfo(*self); - fi.offset_line_column(self.hi as usize) - }) - } - - #[cfg(procmacro2_semver_exempt)] - pub fn join(&self, other: Span) -> Option { - CODEMAP.with(|cm| { - let cm = cm.borrow(); - // If `other` is not within the same FileInfo as us, return None. - if !cm.fileinfo(*self).span_within(other) { - return None; - } - Some(Span { - lo: cmp::min(self.lo, other.lo), - hi: cmp::max(self.hi, other.hi), - }) - }) - } -} - -#[derive(Copy, Clone)] -pub struct Term { - intern: usize, - not_send_sync: PhantomData<*const ()>, -} - -thread_local!(static SYMBOLS: RefCell = RefCell::new(Interner::new())); - -impl Term { - pub fn intern(string: &str) -> Term { - Term { - intern: SYMBOLS.with(|s| s.borrow_mut().intern(string)), - not_send_sync: PhantomData, - } - } - - pub fn as_str(&self) -> &str { - SYMBOLS.with(|interner| { - let interner = interner.borrow(); - let s = interner.get(self.intern); - unsafe { - &*(s as *const str) - } - }) - } -} - -impl fmt::Debug for Term { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_tuple("Term").field(&self.as_str()).finish() - } -} - -struct Interner { - string_to_index: HashMap, - index_to_string: Vec>, -} - -#[derive(Hash, Eq, PartialEq)] -struct MyRc(Rc); - -impl Borrow for MyRc { - fn borrow(&self) -> &str { - &self.0 - } -} - -impl Interner { - fn new() -> Interner { - Interner { - string_to_index: HashMap::new(), - index_to_string: Vec::new(), - } - } - - fn intern(&mut self, s: &str) -> usize { - if let Some(&idx) = self.string_to_index.get(s) { - return idx - } - let s = Rc::new(s.to_string()); - self.index_to_string.push(s.clone()); - self.string_to_index.insert(MyRc(s), self.index_to_string.len() - 1); - self.index_to_string.len() - 1 - } - - fn get(&self, idx: usize) -> &str { - &self.index_to_string[idx] - } -} - -#[derive(Clone, Debug)] -pub struct Literal(String); - -impl Literal { - pub fn byte_char(byte: u8) -> Literal { - match byte { - 0 => Literal(format!("b'\\0'")), - b'\"' => Literal(format!("b'\"'")), - n => { - let mut escaped = "b'".to_string(); - escaped.extend(ascii::escape_default(n).map(|c| c as char)); - escaped.push('\''); - Literal(escaped) - } - } - } - - pub fn byte_string(bytes: &[u8]) -> Literal { - let mut escaped = "b\"".to_string(); - for b in bytes { - match *b { - b'\0' => escaped.push_str(r"\0"), - b'\t' => escaped.push_str(r"\t"), - b'\n' => escaped.push_str(r"\n"), - b'\r' => escaped.push_str(r"\r"), - b'"' => escaped.push_str("\\\""), - b'\\' => escaped.push_str("\\\\"), - b'\x20' ... b'\x7E' => escaped.push(*b as char), - _ => escaped.push_str(&format!("\\x{:02X}", b)), - } - } - escaped.push('"'); - Literal(escaped) - } - - pub fn doccomment(s: &str) -> Literal { - Literal(s.to_string()) - } - - pub fn float(n: f64) -> Literal { - if !n.is_finite() { - panic!("Invalid float literal {}", n); - } - let mut s = n.to_string(); - if !s.contains('.') { - s += ".0"; - } - Literal(s) - } - - pub fn integer(s: i64) -> Literal { - Literal(s.to_string()) - } - - pub fn raw_string(s: &str, pounds: usize) -> Literal { - let mut ret = format!("r"); - ret.extend((0..pounds).map(|_| "#")); - ret.push('"'); - ret.push_str(s); - ret.push('"'); - ret.extend((0..pounds).map(|_| "#")); - Literal(ret) - } - - pub fn raw_byte_string(s: &str, pounds: usize) -> Literal { - let mut ret = format!("br"); - ret.extend((0..pounds).map(|_| "#")); - ret.push('"'); - ret.push_str(s); - ret.push('"'); - ret.extend((0..pounds).map(|_| "#")); - Literal(ret) - } -} - -impl fmt::Display for Literal { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -macro_rules! ints { - ($($t:ty,)*) => {$( - impl From<$t> for Literal { - fn from(t: $t) -> Literal { - Literal(format!(concat!("{}", stringify!($t)), t)) - } - } - )*} -} - -ints! { - u8, u16, u32, u64, usize, - i8, i16, i32, i64, isize, -} - -macro_rules! floats { - ($($t:ty,)*) => {$( - impl From<$t> for Literal { - fn from(t: $t) -> Literal { - assert!(!t.is_nan()); - assert!(!t.is_infinite()); - Literal(format!(concat!("{}", stringify!($t)), t)) - } - } - )*} -} - -floats! { - f32, f64, -} - -impl<'a> From<&'a str> for Literal { - fn from(t: &'a str) -> Literal { - let mut s = t.chars().flat_map(|c| c.escape_default()).collect::(); - s.push('"'); - s.insert(0, '"'); - Literal(s) - } -} - -impl From for Literal { - fn from(t: char) -> Literal { - Literal(format!("'{}'", t.escape_default().collect::())) - } -} - -named!(token_stream -> ::TokenStream, map!( - many0!(token_tree), - |trees| ::TokenStream(TokenStream { inner: trees }) -)); - -#[cfg(not(procmacro2_semver_exempt))] -fn token_tree(input: Cursor) -> PResult { - let (input, kind) = token_kind(input)?; - Ok((input, TokenTree { - span: ::Span(Span {}), - kind: kind, - })) -} - -#[cfg(procmacro2_semver_exempt)] -fn token_tree(input: Cursor) -> PResult { - let input = skip_whitespace(input); - let lo = input.off; - let (input, kind) = token_kind(input)?; - let hi = input.off; - Ok((input, TokenTree { - span: ::Span(Span { - lo: lo, - hi: hi, - }), - kind: kind, - })) -} - -named!(token_kind -> TokenNode, alt!( - map!(delimited, |(d, s)| TokenNode::Group(d, s)) - | - map!(literal, TokenNode::Literal) // must be before symbol - | - symbol - | - map!(op, |(op, kind)| TokenNode::Op(op, kind)) -)); - -named!(delimited -> (Delimiter, ::TokenStream), alt!( - delimited!( - punct!("("), - token_stream, - punct!(")") - ) => { |ts| (Delimiter::Parenthesis, ts) } - | - delimited!( - punct!("["), - token_stream, - punct!("]") - ) => { |ts| (Delimiter::Bracket, ts) } - | - delimited!( - punct!("{"), - token_stream, - punct!("}") - ) => { |ts| (Delimiter::Brace, ts) } -)); - -fn symbol(mut input: Cursor) -> PResult { - input = skip_whitespace(input); - - let mut chars = input.char_indices(); - - let lifetime = input.starts_with("'"); - if lifetime { - chars.next(); - } - - match chars.next() { - Some((_, ch)) if UnicodeXID::is_xid_start(ch) || ch == '_' => {} - _ => return Err(LexError), - } - - let mut end = input.len(); - for (i, ch) in chars { - if !UnicodeXID::is_xid_continue(ch) { - end = i; - break; - } - } - - if lifetime && &input.rest[..end] != "'static" && KEYWORDS.contains(&&input.rest[1..end]) { - Err(LexError) - } else { - let a = &input.rest[..end]; - if a == "_" { - Ok((input.advance(end), TokenNode::Op('_', Spacing::Alone))) - } else { - Ok((input.advance(end), TokenNode::Term(::Term::intern(a)))) - } - } -} - -// From https://github.com/rust-lang/rust/blob/master/src/libsyntax_pos/symbol.rs -static KEYWORDS: &'static [&'static str] = &[ - "abstract", "alignof", "as", "become", "box", "break", "const", "continue", - "crate", "do", "else", "enum", "extern", "false", "final", "fn", "for", - "if", "impl", "in", "let", "loop", "macro", "match", "mod", "move", "mut", - "offsetof", "override", "priv", "proc", "pub", "pure", "ref", "return", - "self", "Self", "sizeof", "static", "struct", "super", "trait", "true", - "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", - "yield", -]; - -fn literal(input: Cursor) -> PResult<::Literal> { - let input_no_ws = skip_whitespace(input); - - match literal_nocapture(input_no_ws) { - Ok((a, ())) => { - let start = input.len() - input_no_ws.len(); - let len = input_no_ws.len() - a.len(); - let end = start + len; - Ok((a, ::Literal(Literal(input.rest[start..end].to_string())))) - } - Err(LexError) => Err(LexError), - } -} - -named!(literal_nocapture -> (), alt!( - string - | - byte_string - | - byte - | - character - | - float - | - int - | - doc_comment -)); - -named!(string -> (), alt!( - quoted_string - | - preceded!( - punct!("r"), - raw_string - ) => { |_| () } -)); - -named!(quoted_string -> (), delimited!( - punct!("\""), - cooked_string, - tag!("\"") -)); - -fn cooked_string(input: Cursor) -> PResult<()> { - let mut chars = input.char_indices().peekable(); - while let Some((byte_offset, ch)) = chars.next() { - match ch { - '"' => { - return Ok((input.advance(byte_offset), ())); - } - '\r' => { - if let Some((_, '\n')) = chars.next() { - // ... - } else { - break; - } - } - '\\' => { - match chars.next() { - Some((_, 'x')) => { - if !backslash_x_char(&mut chars) { - break - } - } - Some((_, 'n')) | - Some((_, 'r')) | - Some((_, 't')) | - Some((_, '\\')) | - Some((_, '\'')) | - Some((_, '"')) | - Some((_, '0')) => {} - Some((_, 'u')) => { - if !backslash_u(&mut chars) { - break - } - } - Some((_, '\n')) | Some((_, '\r')) => { - while let Some(&(_, ch)) = chars.peek() { - if ch.is_whitespace() { - chars.next(); - } else { - break; - } - } - } - _ => break, - } - } - _ch => {} - } - } - Err(LexError) -} - -named!(byte_string -> (), alt!( - delimited!( - punct!("b\""), - cooked_byte_string, - tag!("\"") - ) => { |_| () } - | - preceded!( - punct!("br"), - raw_string - ) => { |_| () } -)); - -fn cooked_byte_string(mut input: Cursor) -> PResult<()> { - let mut bytes = input.bytes().enumerate(); - 'outer: while let Some((offset, b)) = bytes.next() { - match b { - b'"' => { - return Ok((input.advance(offset), ())); - } - b'\r' => { - if let Some((_, b'\n')) = bytes.next() { - // ... - } else { - break; - } - } - b'\\' => { - match bytes.next() { - Some((_, b'x')) => { - if !backslash_x_byte(&mut bytes) { - break - } - } - Some((_, b'n')) | - Some((_, b'r')) | - Some((_, b't')) | - Some((_, b'\\')) | - Some((_, b'0')) | - Some((_, b'\'')) | - Some((_, b'"')) => {} - Some((newline, b'\n')) | - Some((newline, b'\r')) => { - let rest = input.advance(newline + 1); - for (offset, ch) in rest.char_indices() { - if !ch.is_whitespace() { - input = rest.advance(offset); - bytes = input.bytes().enumerate(); - continue 'outer; - } - } - break; - } - _ => break, - } - } - b if b < 0x80 => {} - _ => break, - } - } - Err(LexError) -} - -fn raw_string(input: Cursor) -> PResult<()> { - let mut chars = input.char_indices(); - let mut n = 0; - while let Some((byte_offset, ch)) = chars.next() { - match ch { - '"' => { - n = byte_offset; - break; - } - '#' => {} - _ => return Err(LexError), - } - } - for (byte_offset, ch) in chars { - match ch { - '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => { - let rest = input.advance(byte_offset + 1 + n); - return Ok((rest, ())) - } - '\r' => {} - _ => {} - } - } - Err(LexError) -} - -named!(byte -> (), do_parse!( - punct!("b") >> - tag!("'") >> - cooked_byte >> - tag!("'") >> - (()) -)); - -fn cooked_byte(input: Cursor) -> PResult<()> { - let mut bytes = input.bytes().enumerate(); - let ok = match bytes.next().map(|(_, b)| b) { - Some(b'\\') => { - match bytes.next().map(|(_, b)| b) { - Some(b'x') => backslash_x_byte(&mut bytes), - Some(b'n') | - Some(b'r') | - Some(b't') | - Some(b'\\') | - Some(b'0') | - Some(b'\'') | - Some(b'"') => true, - _ => false, - } - } - b => b.is_some(), - }; - if ok { - match bytes.next() { - Some((offset, _)) => { - if input.chars().as_str().is_char_boundary(offset) { - Ok((input.advance(offset), ())) - } else { - Err(LexError) - } - } - None => Ok((input.advance(input.len()), ())), - } - } else { - Err(LexError) - } -} - -named!(character -> (), do_parse!( - punct!("'") >> - cooked_char >> - tag!("'") >> - (()) -)); - -fn cooked_char(input: Cursor) -> PResult<()> { - let mut chars = input.char_indices(); - let ok = match chars.next().map(|(_, ch)| ch) { - Some('\\') => { - match chars.next().map(|(_, ch)| ch) { - Some('x') => backslash_x_char(&mut chars), - Some('u') => backslash_u(&mut chars), - Some('n') | - Some('r') | - Some('t') | - Some('\\') | - Some('0') | - Some('\'') | - Some('"') => true, - _ => false, - } - } - ch => ch.is_some(), - }; - if ok { - match chars.next() { - Some((idx, _)) => Ok((input.advance(idx), ())), - None => Ok((input.advance(input.len()), ())), - } - } else { - Err(LexError) - } -} - -macro_rules! next_ch { - ($chars:ident @ $pat:pat $(| $rest:pat)*) => { - match $chars.next() { - Some((_, ch)) => match ch { - $pat $(| $rest)* => ch, - _ => return false, - }, - None => return false - } - }; -} - -fn backslash_x_char(chars: &mut I) -> bool - where I: Iterator -{ - next_ch!(chars @ '0'...'7'); - next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F'); - true -} - -fn backslash_x_byte(chars: &mut I) -> bool - where I: Iterator -{ - next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F'); - next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F'); - true -} - -fn backslash_u(chars: &mut I) -> bool - where I: Iterator -{ - next_ch!(chars @ '{'); - next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F'); - loop { - let c = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F' | '_' | '}'); - if c == '}' { - return true; - } - } -} - -fn float(input: Cursor) -> PResult<()> { - let (rest, ()) = float_digits(input)?; - for suffix in &["f32", "f64"] { - if rest.starts_with(suffix) { - return word_break(rest.advance(suffix.len())); - } - } - word_break(rest) -} - -fn float_digits(input: Cursor) -> PResult<()> { - let mut chars = input.chars().peekable(); - match chars.next() { - Some(ch) if ch >= '0' && ch <= '9' => {} - _ => return Err(LexError), - } - - let mut len = 1; - let mut has_dot = false; - let mut has_exp = false; - while let Some(&ch) = chars.peek() { - match ch { - '0'...'9' | '_' => { - chars.next(); - len += 1; - } - '.' => { - if has_dot { - break; - } - chars.next(); - if chars.peek() - .map(|&ch| ch == '.' || UnicodeXID::is_xid_start(ch)) - .unwrap_or(false) { - return Err(LexError); - } - len += 1; - has_dot = true; - } - 'e' | 'E' => { - chars.next(); - len += 1; - has_exp = true; - break; - } - _ => break, - } - } - - let rest = input.advance(len); - if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) { - return Err(LexError); - } - - if has_exp { - let mut has_exp_value = false; - while let Some(&ch) = chars.peek() { - match ch { - '+' | '-' => { - if has_exp_value { - break; - } - chars.next(); - len += 1; - } - '0'...'9' => { - chars.next(); - len += 1; - has_exp_value = true; - } - '_' => { - chars.next(); - len += 1; - } - _ => break, - } - } - if !has_exp_value { - return Err(LexError); - } - } - - Ok((input.advance(len), ())) -} - -fn int(input: Cursor) -> PResult<()> { - let (rest, ()) = digits(input)?; - for suffix in &[ - "isize", - "i8", - "i16", - "i32", - "i64", - "i128", - "usize", - "u8", - "u16", - "u32", - "u64", - "u128", - ] { - if rest.starts_with(suffix) { - return word_break(rest.advance(suffix.len())); - } - } - word_break(rest) -} - -fn digits(mut input: Cursor) -> PResult<()> { - let base = if input.starts_with("0x") { - input = input.advance(2); - 16 - } else if input.starts_with("0o") { - input = input.advance(2); - 8 - } else if input.starts_with("0b") { - input = input.advance(2); - 2 - } else { - 10 - }; - - let mut len = 0; - let mut empty = true; - for b in input.bytes() { - let digit = match b { - b'0'...b'9' => (b - b'0') as u64, - b'a'...b'f' => 10 + (b - b'a') as u64, - b'A'...b'F' => 10 + (b - b'A') as u64, - b'_' => { - if empty && base == 10 { - return Err(LexError); - } - len += 1; - continue; - } - _ => break, - }; - if digit >= base { - return Err(LexError); - } - len += 1; - empty = false; - } - if empty { - Err(LexError) - } else { - Ok((input.advance(len), ())) - } -} - -fn op(input: Cursor) -> PResult<(char, Spacing)> { - let input = skip_whitespace(input); - match op_char(input) { - Ok((rest, ch)) => { - let kind = match op_char(rest) { - Ok(_) => Spacing::Joint, - Err(LexError) => Spacing::Alone, - }; - Ok((rest, (ch, kind))) - } - Err(LexError) => Err(LexError), - } -} - -fn op_char(input: Cursor) -> PResult { - let mut chars = input.chars(); - let first = match chars.next() { - Some(ch) => ch, - None => { - return Err(LexError); - } - }; - let recognized = "~!@#$%^&*-=+|;:,<.>/?"; - if recognized.contains(first) { - Ok((input.advance(first.len_utf8()), first)) - } else { - Err(LexError) - } -} - -named!(doc_comment -> (), alt!( - do_parse!( - punct!("//!") >> - take_until!("\n") >> - (()) - ) - | - do_parse!( - option!(whitespace) >> - peek!(tag!("/*!")) >> - block_comment >> - (()) - ) - | - do_parse!( - punct!("///") >> - not!(tag!("/")) >> - take_until!("\n") >> - (()) - ) - | - do_parse!( - option!(whitespace) >> - peek!(tuple!(tag!("/**"), not!(tag!("*")))) >> - block_comment >> - (()) - ) -)); diff --git a/third_party/rust/proc-macro2-0.2.2/src/strnom.rs b/third_party/rust/proc-macro2-0.2.2/src/strnom.rs deleted file mode 100644 index 33964f45bda3..000000000000 --- a/third_party/rust/proc-macro2-0.2.2/src/strnom.rs +++ /dev/null @@ -1,447 +0,0 @@ -//! Adapted from [`nom`](https://github.com/Geal/nom). - -use std::str::{Chars, CharIndices, Bytes}; - -use unicode_xid::UnicodeXID; - -use imp::LexError; - -#[derive(Copy, Clone, Eq, PartialEq)] -pub struct Cursor<'a> { - pub rest: &'a str, - #[cfg(procmacro2_semver_exempt)] - pub off: u32, -} - -impl<'a> Cursor<'a> { - #[cfg(not(procmacro2_semver_exempt))] - pub fn advance(&self, amt: usize) -> Cursor<'a> { - Cursor { - rest: &self.rest[amt..], - } - } - #[cfg(procmacro2_semver_exempt)] - pub fn advance(&self, amt: usize) -> Cursor<'a> { - Cursor { - rest: &self.rest[amt..], - off: self.off + (amt as u32), - } - } - - pub fn find(&self, p: char) -> Option { - self.rest.find(p) - } - - pub fn starts_with(&self, s: &str) -> bool { - self.rest.starts_with(s) - } - - pub fn is_empty(&self) -> bool { - self.rest.is_empty() - } - - pub fn len(&self) -> usize { - self.rest.len() - } - - pub fn as_bytes(&self) -> &'a [u8] { - self.rest.as_bytes() - } - - pub fn bytes(&self) -> Bytes<'a> { - self.rest.bytes() - } - - pub fn chars(&self) -> Chars<'a> { - self.rest.chars() - } - - pub fn char_indices(&self) -> CharIndices<'a> { - self.rest.char_indices() - } -} - -pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>; - -pub fn whitespace(input: Cursor) -> PResult<()> { - if input.is_empty() { - return Err(LexError); - } - - let bytes = input.as_bytes(); - let mut i = 0; - while i < bytes.len() { - let s = input.advance(i); - if bytes[i] == b'/' { - if s.starts_with("//") && (!s.starts_with("///") || s.starts_with("////")) && - !s.starts_with("//!") { - if let Some(len) = s.find('\n') { - i += len + 1; - continue; - } - break; - } else if s.starts_with("/**/") { - i += 4; - continue - } else if s.starts_with("/*") && (!s.starts_with("/**") || s.starts_with("/***")) && - !s.starts_with("/*!") { - let (_, com) = block_comment(s)?; - i += com.len(); - continue; - } - } - match bytes[i] { - b' ' | 0x09...0x0d => { - i += 1; - continue; - } - b if b <= 0x7f => {} - _ => { - let ch = s.chars().next().unwrap(); - if is_whitespace(ch) { - i += ch.len_utf8(); - continue; - } - } - } - return if i > 0 { - Ok((s, ())) - } else { - Err(LexError) - }; - } - Ok((input.advance(input.len()), ())) -} - -pub fn block_comment(input: Cursor) -> PResult<&str> { - if !input.starts_with("/*") { - return Err(LexError); - } - - let mut depth = 0; - let bytes = input.as_bytes(); - let mut i = 0; - let upper = bytes.len() - 1; - while i < upper { - if bytes[i] == b'/' && bytes[i + 1] == b'*' { - depth += 1; - i += 1; // eat '*' - } else if bytes[i] == b'*' && bytes[i + 1] == b'/' { - depth -= 1; - if depth == 0 { - return Ok((input.advance(i + 2), &input.rest[..i + 2])); - } - i += 1; // eat '/' - } - i += 1; - } - Err(LexError) -} - -pub fn skip_whitespace(input: Cursor) -> Cursor { - match whitespace(input) { - Ok((rest, _)) => rest, - Err(LexError) => input, - } -} - -fn is_whitespace(ch: char) -> bool { - // Rust treats left-to-right mark and right-to-left mark as whitespace - ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}' -} - -pub fn word_break(input: Cursor) -> PResult<()> { - match input.chars().next() { - Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError), - Some(_) | None => Ok((input, ())), - } -} - -macro_rules! named { - ($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => { - fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> { - $submac!(i, $($args)*) - } - }; -} - -macro_rules! alt { - ($i:expr, $e:ident | $($rest:tt)*) => { - alt!($i, call!($e) | $($rest)*) - }; - - ($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => { - match $subrule!($i, $($args)*) { - res @ Ok(_) => res, - _ => alt!($i, $($rest)*) - } - }; - - ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => { - match $subrule!($i, $($args)*) { - Ok((i, o)) => Ok((i, $gen(o))), - Err(LexError) => alt!($i, $($rest)*) - } - }; - - ($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => { - alt!($i, call!($e) => { $gen } | $($rest)*) - }; - - ($i:expr, $e:ident => { $gen:expr }) => { - alt!($i, call!($e) => { $gen }) - }; - - ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => { - match $subrule!($i, $($args)*) { - Ok((i, o)) => Ok((i, $gen(o))), - Err(LexError) => Err(LexError), - } - }; - - ($i:expr, $e:ident) => { - alt!($i, call!($e)) - }; - - ($i:expr, $subrule:ident!( $($args:tt)*)) => { - $subrule!($i, $($args)*) - }; -} - -macro_rules! do_parse { - ($i:expr, ( $($rest:expr),* )) => { - Ok(($i, ( $($rest),* ))) - }; - - ($i:expr, $e:ident >> $($rest:tt)*) => { - do_parse!($i, call!($e) >> $($rest)*) - }; - - ($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => { - match $submac!($i, $($args)*) { - Err(LexError) => Err(LexError), - Ok((i, _)) => do_parse!(i, $($rest)*), - } - }; - - ($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => { - do_parse!($i, $field: call!($e) >> $($rest)*) - }; - - ($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => { - match $submac!($i, $($args)*) { - Err(LexError) => Err(LexError), - Ok((i, o)) => { - let $field = o; - do_parse!(i, $($rest)*) - }, - } - }; -} - -macro_rules! peek { - ($i:expr, $submac:ident!( $($args:tt)* )) => { - match $submac!($i, $($args)*) { - Ok((_, o)) => Ok(($i, o)), - Err(LexError) => Err(LexError), - } - }; -} - -macro_rules! call { - ($i:expr, $fun:expr $(, $args:expr)*) => { - $fun($i $(, $args)*) - }; -} - -macro_rules! option { - ($i:expr, $f:expr) => { - match $f($i) { - Ok((i, o)) => Ok((i, Some(o))), - Err(LexError) => Ok(($i, None)), - } - }; -} - -macro_rules! take_until { - ($i:expr, $substr:expr) => {{ - if $substr.len() > $i.len() { - Err(LexError) - } else { - let substr_vec: Vec = $substr.chars().collect(); - let mut window: Vec = vec![]; - let mut offset = $i.len(); - let mut parsed = false; - for (o, c) in $i.char_indices() { - window.push(c); - if window.len() > substr_vec.len() { - window.remove(0); - } - if window == substr_vec { - parsed = true; - window.pop(); - let window_len: usize = window.iter() - .map(|x| x.len_utf8()) - .fold(0, |x, y| x + y); - offset = o - window_len; - break; - } - } - if parsed { - Ok(($i.advance(offset), &$i.rest[..offset])) - } else { - Err(LexError) - } - } - }}; -} - -macro_rules! tuple { - ($i:expr, $($rest:tt)*) => { - tuple_parser!($i, (), $($rest)*) - }; -} - -/// Do not use directly. Use `tuple!`. -macro_rules! tuple_parser { - ($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => { - tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*) - }; - - ($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => { - match $submac!($i, $($args)*) { - Err(LexError) => Err(LexError), - Ok((i, o)) => tuple_parser!(i, (o), $($rest)*), - } - }; - - ($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => { - match $submac!($i, $($args)*) { - Err(LexError) => Err(LexError), - Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*), - } - }; - - ($i:expr, ($($parsed:tt),*), $e:ident) => { - tuple_parser!($i, ($($parsed),*), call!($e)) - }; - - ($i:expr, (), $submac:ident!( $($args:tt)* )) => { - $submac!($i, $($args)*) - }; - - ($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => { - match $submac!($i, $($args)*) { - Err(LexError) => Err(LexError), - Ok((i, o)) => Ok((i, ($($parsed),*, o))) - } - }; - - ($i:expr, ($($parsed:expr),*)) => { - Ok(($i, ($($parsed),*))) - }; -} - -macro_rules! not { - ($i:expr, $submac:ident!( $($args:tt)* )) => { - match $submac!($i, $($args)*) { - Ok((_, _)) => Err(LexError), - Err(LexError) => Ok(($i, ())), - } - }; -} - -macro_rules! tag { - ($i:expr, $tag:expr) => { - if $i.starts_with($tag) { - Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()])) - } else { - Err(LexError) - } - }; -} - -macro_rules! punct { - ($i:expr, $punct:expr) => { - $crate::strnom::punct($i, $punct) - }; -} - -/// Do not use directly. Use `punct!`. -pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> { - let input = skip_whitespace(input); - if input.starts_with(token) { - Ok((input.advance(token.len()), token)) - } else { - Err(LexError) - } -} - -macro_rules! preceded { - ($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => { - match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) { - Ok((remaining, (_, o))) => Ok((remaining, o)), - Err(LexError) => Err(LexError), - } - }; - - ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => { - preceded!($i, $submac!($($args)*), call!($g)) - }; -} - -macro_rules! delimited { - ($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => { - match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) { - Err(LexError) => Err(LexError), - Ok((i1, (_, o, _))) => Ok((i1, o)) - } - }; -} - -macro_rules! map { - ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => { - match $submac!($i, $($args)*) { - Err(LexError) => Err(LexError), - Ok((i, o)) => Ok((i, call!(o, $g))) - } - }; - - ($i:expr, $f:expr, $g:expr) => { - map!($i, call!($f), $g) - }; -} - -macro_rules! many0 { - ($i:expr, $f:expr) => {{ - let ret; - let mut res = ::std::vec::Vec::new(); - let mut input = $i; - - loop { - if input.is_empty() { - ret = Ok((input, res)); - break; - } - - match $f(input) { - Err(LexError) => { - ret = Ok((input, res)); - break; - } - Ok((i, o)) => { - // loop trip must always consume (otherwise infinite loops) - if i.len() == input.len() { - ret = Err(LexError); - break; - } - - res.push(o); - input = i; - } - } - } - - ret - }}; -} diff --git a/third_party/rust/proc-macro2-0.2.2/src/unstable.rs b/third_party/rust/proc-macro2-0.2.2/src/unstable.rs deleted file mode 100644 index 1a2508386291..000000000000 --- a/third_party/rust/proc-macro2-0.2.2/src/unstable.rs +++ /dev/null @@ -1,406 +0,0 @@ -use std::ascii; -use std::fmt; -use std::iter; -use std::str::FromStr; - -use proc_macro; - -use {TokenTree, TokenNode, Delimiter, Spacing}; - -#[derive(Clone)] -pub struct TokenStream(proc_macro::TokenStream); - -pub struct LexError(proc_macro::LexError); - -impl TokenStream { - pub fn empty() -> TokenStream { - TokenStream(proc_macro::TokenStream::empty()) - } - - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } -} - -impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result { - Ok(TokenStream(src.parse().map_err(LexError)?)) - } -} - -impl fmt::Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -impl From for TokenStream { - fn from(inner: proc_macro::TokenStream) -> TokenStream { - TokenStream(inner) - } -} - -impl From for proc_macro::TokenStream { - fn from(inner: TokenStream) -> proc_macro::TokenStream { - inner.0 - } -} - -impl From for TokenStream { - fn from(tree: TokenTree) -> TokenStream { - TokenStream(proc_macro::TokenTree { - span: (tree.span.0).0, - kind: match tree.kind { - TokenNode::Group(delim, s) => { - let delim = match delim { - Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis, - Delimiter::Bracket => proc_macro::Delimiter::Bracket, - Delimiter::Brace => proc_macro::Delimiter::Brace, - Delimiter::None => proc_macro::Delimiter::None, - }; - proc_macro::TokenNode::Group(delim, (s.0).0) - } - TokenNode::Op(ch, kind) => { - let kind = match kind { - Spacing::Joint => proc_macro::Spacing::Joint, - Spacing::Alone => proc_macro::Spacing::Alone, - }; - proc_macro::TokenNode::Op(ch, kind) - } - TokenNode::Term(s) => { - proc_macro::TokenNode::Term((s.0).0) - } - TokenNode::Literal(l) => { - proc_macro::TokenNode::Literal((l.0).0) - } - }, - }.into()) - } -} - -impl iter::FromIterator for TokenStream { - fn from_iter>(streams: I) -> Self { - let streams = streams.into_iter().map(|s| s.0); - TokenStream(streams.collect::()) - } -} - -impl fmt::Debug for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -impl fmt::Debug for LexError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -pub struct TokenTreeIter(proc_macro::TokenTreeIter); - -impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = TokenTreeIter; - - fn into_iter(self) -> TokenTreeIter { - TokenTreeIter(self.0.into_iter()) - } -} - -impl Iterator for TokenTreeIter { - type Item = TokenTree; - - fn next(&mut self) -> Option { - let token = match self.0.next() { - Some(n) => n, - None => return None, - }; - Some(TokenTree { - span: ::Span(Span(token.span)), - kind: match token.kind { - proc_macro::TokenNode::Group(delim, s) => { - let delim = match delim { - proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis, - proc_macro::Delimiter::Bracket => Delimiter::Bracket, - proc_macro::Delimiter::Brace => Delimiter::Brace, - proc_macro::Delimiter::None => Delimiter::None, - }; - TokenNode::Group(delim, ::TokenStream(TokenStream(s))) - } - proc_macro::TokenNode::Op(ch, kind) => { - let kind = match kind { - proc_macro::Spacing::Joint => Spacing::Joint, - proc_macro::Spacing::Alone => Spacing::Alone, - }; - TokenNode::Op(ch, kind) - } - proc_macro::TokenNode::Term(s) => { - TokenNode::Term(::Term(Term(s))) - } - proc_macro::TokenNode::Literal(l) => { - TokenNode::Literal(::Literal(Literal(l))) - } - }, - }) - } - - fn size_hint(&self) -> (usize, Option) { - self.0.size_hint() - } -} - -impl fmt::Debug for TokenTreeIter { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("TokenTreeIter").finish() - } -} - -#[cfg(procmacro2_semver_exempt)] -#[derive(Clone, PartialEq, Eq)] -pub struct FileName(String); - -#[cfg(procmacro2_semver_exempt)] -impl fmt::Display for FileName { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -// NOTE: We have to generate our own filename object here because we can't wrap -// the one provided by proc_macro. -#[cfg(procmacro2_semver_exempt)] -#[derive(Clone, PartialEq, Eq)] -pub struct SourceFile(proc_macro::SourceFile, FileName); - -#[cfg(procmacro2_semver_exempt)] -impl SourceFile { - fn new(sf: proc_macro::SourceFile) -> Self { - let filename = FileName(sf.path().to_string()); - SourceFile(sf, filename) - } - - /// Get the path to this source file as a string. - pub fn path(&self) -> &FileName { - &self.1 - } - - pub fn is_real(&self) -> bool { - self.0.is_real() - } -} - -#[cfg(procmacro2_semver_exempt)] -impl AsRef for SourceFile { - fn as_ref(&self) -> &FileName { - self.path() - } -} - -#[cfg(procmacro2_semver_exempt)] -impl fmt::Debug for SourceFile { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -#[cfg(procmacro2_semver_exempt)] -pub struct LineColumn { - pub line: usize, - pub column: usize, -} - -#[derive(Copy, Clone)] -pub struct Span(proc_macro::Span); - -impl From for ::Span { - fn from(proc_span: proc_macro::Span) -> ::Span { - ::Span(Span(proc_span)) - } -} - -impl Span { - pub fn call_site() -> Span { - Span(proc_macro::Span::call_site()) - } - - pub fn def_site() -> Span { - Span(proc_macro::Span::def_site()) - } - - pub fn resolved_at(&self, other: Span) -> Span { - Span(self.0.resolved_at(other.0)) - } - - pub fn located_at(&self, other: Span) -> Span { - Span(self.0.located_at(other.0)) - } - - pub fn unstable(self) -> proc_macro::Span { - self.0 - } - - #[cfg(procmacro2_semver_exempt)] - pub fn source_file(&self) -> SourceFile { - SourceFile::new(self.0.source_file()) - } - - #[cfg(procmacro2_semver_exempt)] - pub fn start(&self) -> LineColumn { - let proc_macro::LineColumn{ line, column } = self.0.start(); - LineColumn { line, column } - } - - #[cfg(procmacro2_semver_exempt)] - pub fn end(&self) -> LineColumn { - let proc_macro::LineColumn{ line, column } = self.0.end(); - LineColumn { line, column } - } - - #[cfg(procmacro2_semver_exempt)] - pub fn join(&self, other: Span) -> Option { - self.0.join(other.0).map(Span) - } -} - -impl fmt::Debug for Span { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -#[derive(Copy, Clone)] -pub struct Term(proc_macro::Term); - -impl Term { - pub fn intern(string: &str) -> Term { - Term(proc_macro::Term::intern(string)) - } - - pub fn as_str(&self) -> &str { - self.0.as_str() - } -} - -impl fmt::Debug for Term { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -#[derive(Clone)] -pub struct Literal(proc_macro::Literal); - -impl Literal { - pub fn byte_char(byte: u8) -> Literal { - match byte { - 0 => Literal(to_literal("b'\\0'")), - b'\"' => Literal(to_literal("b'\"'")), - n => { - let mut escaped = "b'".to_string(); - escaped.extend(ascii::escape_default(n).map(|c| c as char)); - escaped.push('\''); - Literal(to_literal(&escaped)) - } - } - } - - pub fn byte_string(bytes: &[u8]) -> Literal { - Literal(proc_macro::Literal::byte_string(bytes)) - } - - pub fn doccomment(s: &str) -> Literal { - Literal(to_literal(s)) - } - - pub fn float(s: f64) -> Literal { - Literal(proc_macro::Literal::float(s)) - } - - pub fn integer(s: i64) -> Literal { - Literal(proc_macro::Literal::integer(s.into())) - } - - pub fn raw_string(s: &str, pounds: usize) -> Literal { - let mut ret = format!("r"); - ret.extend((0..pounds).map(|_| "#")); - ret.push('"'); - ret.push_str(s); - ret.push('"'); - ret.extend((0..pounds).map(|_| "#")); - Literal(to_literal(&ret)) - } - - pub fn raw_byte_string(s: &str, pounds: usize) -> Literal { - let mut ret = format!("br"); - ret.extend((0..pounds).map(|_| "#")); - ret.push('"'); - ret.push_str(s); - ret.push('"'); - ret.extend((0..pounds).map(|_| "#")); - Literal(to_literal(&ret)) - } -} - -impl fmt::Display for Literal { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -impl fmt::Debug for Literal { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -fn to_literal(s: &str) -> proc_macro::Literal { - let stream = s.parse::().unwrap(); - match stream.into_iter().next().unwrap().kind { - proc_macro::TokenNode::Literal(l) => l, - _ => unreachable!(), - } -} - -macro_rules! ints { - ($($t:ident,)*) => {$( - impl From<$t> for Literal { - fn from(t: $t) -> Literal { - Literal(proc_macro::Literal::$t(t)) - } - } - )*} -} - -ints! { - u8, u16, u32, u64, usize, - i8, i16, i32, i64, isize, -} - -macro_rules! floats { - ($($t:ident,)*) => {$( - impl From<$t> for Literal { - fn from(t: $t) -> Literal { - Literal(proc_macro::Literal::$t(t)) - } - } - )*} -} - -floats! { - f32, f64, -} - -impl<'a> From<&'a str> for Literal { - fn from(t: &'a str) -> Literal { - Literal(proc_macro::Literal::string(t)) - } -} - -impl From for Literal { - fn from(t: char) -> Literal { - Literal(proc_macro::Literal::character(t)) - } -} diff --git a/third_party/rust/proc-macro2-0.2.2/tests/test.rs b/third_party/rust/proc-macro2-0.2.2/tests/test.rs deleted file mode 100644 index ff9c205af9fe..000000000000 --- a/third_party/rust/proc-macro2-0.2.2/tests/test.rs +++ /dev/null @@ -1,179 +0,0 @@ -extern crate proc_macro2; - -use std::str; - -use proc_macro2::{Term, Literal, TokenStream}; - -#[cfg(procmacro2_semver_exempt)] -use proc_macro2::TokenNode; - -#[cfg(procmacro2_semver_exempt)] -#[cfg(not(feature = "nightly"))] -use proc_macro2::Span; - -#[test] -fn symbols() { - assert_eq!(Term::intern("foo").as_str(), "foo"); - assert_eq!(Term::intern("bar").as_str(), "bar"); -} - -#[test] -fn literals() { - assert_eq!(Literal::string("foo").to_string(), "\"foo\""); - assert_eq!(Literal::string("\"").to_string(), "\"\\\"\""); - assert_eq!(Literal::float(10.0).to_string(), "10.0"); -} - -#[test] -fn roundtrip() { - fn roundtrip(p: &str) { - println!("parse: {}", p); - let s = p.parse::().unwrap().to_string(); - println!("first: {}", s); - let s2 = s.to_string().parse::().unwrap().to_string(); - assert_eq!(s, s2); - } - roundtrip("a"); - roundtrip("<<"); - roundtrip("<<="); - roundtrip(" - /// a - wut - "); - roundtrip(" - 1 - 1.0 - 1f32 - 2f64 - 1usize - 4isize - 4e10 - 1_000 - 1_0i32 - 8u8 - 9 - 0 - 0xffffffffffffffffffffffffffffffff - "); - roundtrip("'a"); - roundtrip("'static"); - roundtrip("'\\u{10__FFFF}'"); - roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\""); -} - -#[test] -fn fail() { - fn fail(p: &str) { - if p.parse::().is_ok() { - panic!("should have failed to parse: {}", p); - } - } - fail("1x"); - fail("1u80"); - fail("1f320"); - fail("' static"); - fail("'mut"); -} - -#[cfg(procmacro2_semver_exempt)] -#[test] -fn span_test() { - fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) { - let ts = p.parse::().unwrap(); - check_spans_internal(ts, &mut lines); - } - - fn check_spans_internal( - ts: TokenStream, - lines: &mut &[(usize, usize, usize, usize)], - ) { - for i in ts { - if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() { - *lines = rest; - - let start = i.span.start(); - assert_eq!(start.line, sline, "sline did not match for {}", i); - assert_eq!(start.column, scol, "scol did not match for {}", i); - - let end = i.span.end(); - assert_eq!(end.line, eline, "eline did not match for {}", i); - assert_eq!(end.column, ecol, "ecol did not match for {}", i); - - match i.kind { - TokenNode::Group(_, stream) => - check_spans_internal(stream, lines), - _ => {} - } - } - } - } - - check_spans("\ -/// This is a document comment -testing 123 -{ - testing 234 -}", &[ - (1, 0, 1, 30), - (2, 0, 2, 7), - (2, 8, 2, 11), - (3, 0, 5, 1), - (4, 2, 4, 9), - (4, 10, 4, 13), -]); -} - -#[cfg(procmacro2_semver_exempt)] -#[cfg(not(feature = "nightly"))] -#[test] -fn default_span() { - let start = Span::call_site().start(); - assert_eq!(start.line, 1); - assert_eq!(start.column, 0); - let end = Span::call_site().end(); - assert_eq!(end.line, 1); - assert_eq!(end.column, 0); - let source_file = Span::call_site().source_file(); - assert_eq!(source_file.path().to_string(), ""); - assert!(!source_file.is_real()); -} - -#[cfg(procmacro2_semver_exempt)] -#[test] -fn span_join() { - let source1 = - "aaa\nbbb".parse::().unwrap().into_iter().collect::>(); - let source2 = - "ccc\nddd".parse::().unwrap().into_iter().collect::>(); - - assert!(source1[0].span.source_file() != source2[0].span.source_file()); - assert_eq!(source1[0].span.source_file(), source1[1].span.source_file()); - - let joined1 = source1[0].span.join(source1[1].span); - let joined2 = source1[0].span.join(source2[0].span); - assert!(joined1.is_some()); - assert!(joined2.is_none()); - - let start = joined1.unwrap().start(); - let end = joined1.unwrap().end(); - assert_eq!(start.line, 1); - assert_eq!(start.column, 0); - assert_eq!(end.line, 2); - assert_eq!(end.column, 3); - - assert_eq!(joined1.unwrap().source_file(), source1[0].span.source_file()); -} - -#[test] -fn no_panic() { - let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap(); - assert!(s.parse::().is_err()); -} - -#[test] -fn tricky_doc_commaent() { - let stream = "/**/".parse::().unwrap(); - let tokens = stream.into_iter().collect::>(); - assert!(tokens.is_empty(), "not empty -- {:?}", tokens); -} - diff --git a/third_party/rust/quote-0.4.2/.cargo-checksum.json b/third_party/rust/quote-0.4.2/.cargo-checksum.json deleted file mode 100644 index d2174ba2dcf3..000000000000 --- a/third_party/rust/quote-0.4.2/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{"Cargo.toml":"c8e98953df6fdcc4bdf6a1b7d970c214e8f5eb0f21da327d1c0916735303cd3a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"f1812dcc3e666d6bebca97f3739058e1bd8de1a2542c9a8cb258d0a259bd59e1","src/lib.rs":"b63a044edeff7ae12d0733e0a7fe64babf9b593b624fa753639ad3f340f24031","src/to_tokens.rs":"3b7fe0934ce2d9c23d9851ec624349cfa6e9d5cd9ed31c67f25cecce50dc218f","src/tokens.rs":"963474535197c1a79bf60af570470e7a89dce43546ee3186920197fdb40bdd9b","tests/test.rs":"8db237707035f55af7c7ef82c2c3892a048411963dccd019da0148bacae8c3d2"},"package":"1eca14c727ad12702eb4b6bfb5a232287dcf8385cb8ca83a3eeaf6519c44c408"} \ No newline at end of file diff --git a/third_party/rust/quote-0.4.2/Cargo.toml b/third_party/rust/quote-0.4.2/Cargo.toml deleted file mode 100644 index a5d4b7d8f6a9..000000000000 --- a/third_party/rust/quote-0.4.2/Cargo.toml +++ /dev/null @@ -1,24 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g. crates.io) dependencies -# -# If you believe there's an error in this file please file an -# issue against the rust-lang/cargo repository. If you're -# editing this file be aware that the upstream Cargo.toml -# will likely look very different (and much more reasonable) - -[package] -name = "quote" -version = "0.4.2" -authors = ["David Tolnay "] -include = ["Cargo.toml", "src/**/*.rs", "tests/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"] -description = "Quasi-quoting macro quote!(...)" -documentation = "https://docs.rs/quote/" -keywords = ["syn"] -license = "MIT/Apache-2.0" -repository = "https://github.com/dtolnay/quote" -[dependencies.proc-macro2] -version = "0.2" diff --git a/third_party/rust/quote-0.4.2/LICENSE-APACHE b/third_party/rust/quote-0.4.2/LICENSE-APACHE deleted file mode 100644 index 16fe87b06e80..000000000000 --- a/third_party/rust/quote-0.4.2/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/third_party/rust/quote-0.4.2/LICENSE-MIT b/third_party/rust/quote-0.4.2/LICENSE-MIT deleted file mode 100644 index 40b8817a47be..000000000000 --- a/third_party/rust/quote-0.4.2/LICENSE-MIT +++ /dev/null @@ -1,25 +0,0 @@ -Copyright (c) 2016 The Rust Project Developers - -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/third_party/rust/quote-0.4.2/README.md b/third_party/rust/quote-0.4.2/README.md deleted file mode 100644 index 90db71948200..000000000000 --- a/third_party/rust/quote-0.4.2/README.md +++ /dev/null @@ -1,135 +0,0 @@ -Rust Quasi-Quoting -================== - -[![Build Status](https://api.travis-ci.org/dtolnay/quote.svg?branch=master)](https://travis-ci.org/dtolnay/quote) -[![Latest Version](https://img.shields.io/crates/v/quote.svg)](https://crates.io/crates/quote) -[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/quote/) - -This crate provides the [`quote!`] macro for turning Rust syntax tree data -structures into tokens of source code. - -[`quote!`]: https://docs.rs/quote/0.4/quote/macro.quote.html - -Procedural macros in Rust receive a stream of tokens as input, execute arbitrary -Rust code to determine how to manipulate those tokens, and produce a stream of -tokens to hand back to the compiler to compile into the caller's crate. -Quasi-quoting is a solution to one piece of that -- producing tokens to return -to the compiler. - -The idea of quasi-quoting is that we write *code* that we treat as *data*. -Within the `quote!` macro, we can write what looks like code to our text editor -or IDE. We get all the benefits of the editor's brace matching, syntax -highlighting, indentation, and maybe autocompletion. But rather than compiling -that as code into the current crate, we can treat it as data, pass it around, -mutate it, and eventually hand it back to the compiler as tokens to compile into -the macro caller's crate. - -This crate is motivated by the procedural macro use case, but is a -general-purpose Rust quasi-quoting library and is not specific to procedural -macros. - -*Version requirement: Quote supports any compiler version back to Rust's very -first support for procedural macros in Rust 1.15.0.* - -```toml -[dependencies] -quote = "0.4" -``` - -```rust -#[macro_use] -extern crate quote; -``` - -## Syntax - -The quote crate provides a [`quote!`] macro within which you can write Rust code -that gets packaged into a [`quote::Tokens`] and can be treated as data. You -should think of `Tokens` as representing a fragment of Rust source code. Call -`to_string()` on a `Tokens` to get back the fragment of source code as a string, -or call `into()` to stream them as a `TokenStream` back to the compiler in a -procedural macro. - -[`quote::Tokens`]: https://docs.rs/quote/0.4/quote/struct.Tokens.html - -Within the `quote!` macro, interpolation is done with `#var`. Any type -implementing the [`quote::ToTokens`] trait can be interpolated. This includes -most Rust primitive types as well as most of the syntax tree types from [`syn`]. - -[`quote::ToTokens`]: https://docs.rs/quote/0.4/quote/trait.ToTokens.html -[`syn`]: https://github.com/dtolnay/syn - -```rust -let tokens = quote! { - struct SerializeWith #generics #where_clause { - value: &'a #field_ty, - phantom: ::std::marker::PhantomData<#item_ty>, - } - - impl #generics serde::Serialize for SerializeWith #generics #where_clause { - fn serialize(&self, s: &mut S) -> Result<(), S::Error> - where S: serde::Serializer - { - #path(self.value, s) - } - } - - SerializeWith { - value: #value, - phantom: ::std::marker::PhantomData::<#item_ty>, - } -}; -``` - -## Repetition - -Repetition is done using `#(...)*` or `#(...),*` similar to `macro_rules!`. This -iterates through the elements of any variable interpolated within the repetition -and inserts a copy of the repetition body for each one. The variables in an -interpolation may be anything that implements `IntoIterator`, including `Vec` or -a pre-existing iterator. - -- `#(#var)*` — no separators -- `#(#var),*` — the character before the asterisk is used as a separator -- `#( struct #var; )*` — the repetition can contain other things -- `#( #k => println!("{}", #v), )*` — even multiple interpolations - -Note that there is a difference between `#(#var ,)*` and `#(#var),*`—the latter -does not produce a trailing comma. This matches the behavior of delimiters in -`macro_rules!`. - -## Hygiene - -Any interpolated tokens preserve the `Span` information provided by their -`ToTokens` implementation. Tokens that originate within a `quote!` invocation -are spanned with [`Span::def_site()`]. - -[`Span::def_site()`]: https://docs.rs/proc-macro2/0.2/proc_macro2/struct.Span.html#method.def_site - -A different span can be provided explicitly through the [`quote_spanned!`] -macro. - -[`quote_spanned!`]: https://docs.rs/quote/0.4/quote/macro.quote_spanned.html - -### Recursion limit - -The `quote!` macro relies on deep recursion so some large invocations may fail -with "recursion limit reached" when you compile. If it fails, bump up the -recursion limit by adding `#![recursion_limit = "128"]` to your crate. An even -higher limit may be necessary for especially large invocations. You don't need -this unless the compiler tells you that you need it. - -## License - -Licensed under either of - - * Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) - * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) - -at your option. - -### Contribution - -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in this crate by you, as defined in the Apache-2.0 license, shall -be dual licensed as above, without any additional terms or conditions. diff --git a/third_party/rust/quote-0.4.2/src/lib.rs b/third_party/rust/quote-0.4.2/src/lib.rs deleted file mode 100644 index 7d174343feaf..000000000000 --- a/third_party/rust/quote-0.4.2/src/lib.rs +++ /dev/null @@ -1,501 +0,0 @@ -//! This crate provides the [`quote!`] macro for turning Rust syntax tree data -//! structures into tokens of source code. -//! -//! [`quote!`]: macro.quote.html -//! -//! Procedural macros in Rust receive a stream of tokens as input, execute -//! arbitrary Rust code to determine how to manipulate those tokens, and produce -//! a stream of tokens to hand back to the compiler to compile into the caller's -//! crate. Quasi-quoting is a solution to one piece of that -- producing tokens -//! to return to the compiler. -//! -//! The idea of quasi-quoting is that we write *code* that we treat as *data*. -//! Within the `quote!` macro, we can write what looks like code to our text -//! editor or IDE. We get all the benefits of the editor's brace matching, -//! syntax highlighting, indentation, and maybe autocompletion. But rather than -//! compiling that as code into the current crate, we can treat it as data, pass -//! it around, mutate it, and eventually hand it back to the compiler as tokens -//! to compile into the macro caller's crate. -//! -//! This crate is motivated by the procedural macro use case, but is a -//! general-purpose Rust quasi-quoting library and is not specific to procedural -//! macros. -//! -//! *Version requirement: Quote supports any compiler version back to Rust's -//! very first support for procedural macros in Rust 1.15.0.* -//! -//! ```toml -//! [dependencies] -//! quote = "0.4" -//! ``` -//! -//! ``` -//! #[macro_use] -//! extern crate quote; -//! # -//! # fn main() {} -//! ``` -//! -//! # Example -//! -//! The following quasi-quoted block of code is something you might find in [a] -//! procedural macro having to do with data structure serialization. The `#var` -//! syntax performs interpolation of runtime variables into the quoted tokens. -//! Check out the documentation of the [`quote!`] macro for more detail about -//! the syntax. See also the [`quote_spanned!`] macro which is important for -//! implementing hygienic procedural macros. -//! -//! [a]: https://serde.rs/ -//! [`quote_spanned!`]: macro.quote_spanned.html -//! -//! ``` -//! # #[macro_use] -//! # extern crate quote; -//! # -//! # fn main() { -//! # let generics = ""; -//! # let where_clause = ""; -//! # let field_ty = ""; -//! # let item_ty = ""; -//! # let path = ""; -//! # let value = ""; -//! # -//! let tokens = quote! { -//! struct SerializeWith #generics #where_clause { -//! value: &'a #field_ty, -//! phantom: ::std::marker::PhantomData<#item_ty>, -//! } -//! -//! impl #generics serde::Serialize for SerializeWith #generics #where_clause { -//! fn serialize(&self, s: &mut S) -> Result<(), S::Error> -//! where S: serde::Serializer -//! { -//! #path(self.value, s) -//! } -//! } -//! -//! SerializeWith { -//! value: #value, -//! phantom: ::std::marker::PhantomData::<#item_ty>, -//! } -//! }; -//! # -//! # } -//! ``` -//! -//! ## Recursion limit -//! -//! The `quote!` macro relies on deep recursion so some large invocations may -//! fail with "recursion limit reached" when you compile. If it fails, bump up -//! the recursion limit by adding `#![recursion_limit = "128"]` to your crate. -//! An even higher limit may be necessary for especially large invocations. - -// Quote types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/quote/0.4.2")] - -extern crate proc_macro2; -extern crate proc_macro; - -mod tokens; -pub use tokens::Tokens; - -mod to_tokens; -pub use to_tokens::ToTokens; - -// Not public API. -#[doc(hidden)] -pub mod __rt { - // Not public API. - pub use proc_macro2::*; - - // Not public API. - pub fn parse(tokens: &mut ::Tokens, span: Span, s: &str) { - let s: TokenStream = s.parse().expect("invalid token stream"); - tokens.append_all(s.into_iter().map(|mut t| { - t.span = span; - t - })); - } - - // Not public API. - pub fn append_kind(tokens: &mut ::Tokens, span: Span, kind: TokenNode) { - tokens.append(TokenTree { - span: span, - kind: kind, - }) - } -} - -/// The whole point. -/// -/// Performs variable interpolation against the input and produces it as -/// [`Tokens`]. For returning tokens to the compiler in a procedural macro, use -/// `into()` to build a `TokenStream`. -/// -/// [`Tokens`]: struct.Tokens.html -/// -/// # Interpolation -/// -/// Variable interpolation is done with `#var` (similar to `$var` in -/// `macro_rules!` macros). This grabs the `var` variable that is currently in -/// scope and inserts it in that location in the output tokens. The variable -/// must implement the [`ToTokens`] trait. -/// -/// [`ToTokens`]: trait.ToTokens.html -/// -/// Repetition is done using `#(...)*` or `#(...),*` again similar to -/// `macro_rules!`. This iterates through the elements of any variable -/// interpolated within the repetition and inserts a copy of the repetition body -/// for each one. The variables in an interpolation may be anything that -/// implements `IntoIterator`, including `Vec` or a pre-existing iterator. -/// -/// - `#(#var)*` — no separators -/// - `#(#var),*` — the character before the asterisk is used as a separator -/// - `#( struct #var; )*` — the repetition can contain other tokens -/// - `#( #k => println!("{}", #v), )*` — even multiple interpolations -/// -/// # Hygiene -/// -/// Any interpolated tokens preserve the `Span` information provided by their -/// `ToTokens` implementation. Tokens that originate within the `quote!` -/// invocation are spanned with [`Span::def_site()`]. -/// -/// [`Span::def_site()`]: https://docs.rs/proc-macro2/0.2/proc_macro2/struct.Span.html#method.def_site -/// -/// A different span can be provided through the [`quote_spanned!`] macro. -/// -/// [`quote_spanned!`]: macro.quote_spanned.html -/// -/// # Example -/// -/// ``` -/// extern crate proc_macro; -/// -/// #[macro_use] -/// extern crate quote; -/// -/// use proc_macro::TokenStream; -/// -/// # const IGNORE_TOKENS: &'static str = stringify! { -/// #[proc_macro_derive(HeapSize)] -/// # }; -/// pub fn derive_heap_size(input: TokenStream) -> TokenStream { -/// // Parse the input and figure out what implementation to generate... -/// # const IGNORE_TOKENS: &'static str = stringify! { -/// let name = /* ... */; -/// let expr = /* ... */; -/// # }; -/// # -/// # let name = 0; -/// # let expr = 0; -/// -/// let expanded = quote! { -/// // The generated impl. -/// impl ::heapsize::HeapSize for #name { -/// fn heap_size_of_children(&self) -> usize { -/// #expr -/// } -/// } -/// }; -/// -/// // Hand the output tokens back to the compiler. -/// expanded.into() -/// } -/// # -/// # fn main() {} -/// ``` -#[macro_export] -macro_rules! quote { - ($($tt:tt)*) => (quote_spanned!($crate::__rt::Span::def_site()=> $($tt)*)); -} - -/// Same as `quote!`, but applies a given span to all tokens originating within -/// the macro invocation. -/// -/// # Syntax -/// -/// A span expression of type [`Span`], followed by `=>`, followed by the tokens -/// to quote. The span expression should be brief -- use a variable for anything -/// more than a few characters. There should be no space before the `=>` token. -/// -/// [`Span`]: https://docs.rs/proc-macro2/0.2/proc_macro2/struct.Span.html -/// -/// ``` -/// # #[macro_use] -/// # extern crate quote; -/// # extern crate proc_macro2; -/// # -/// # use proc_macro2::Span; -/// # -/// # fn main() { -/// # const IGNORE_TOKENS: &'static str = stringify! { -/// let span = /* ... */; -/// # }; -/// # let span = Span::call_site(); -/// # let init = 0; -/// -/// // On one line, use parentheses. -/// let tokens = quote_spanned!(span=> Box::into_raw(Box::new(#init))); -/// -/// // On multiple lines, place the span at the top and use braces. -/// let tokens = quote_spanned! {span=> -/// Box::into_raw(Box::new(#init)) -/// }; -/// # } -/// ``` -/// -/// # Hygiene -/// -/// Any interpolated tokens preserve the `Span` information provided by their -/// `ToTokens` implementation. Tokens that originate within the `quote_spanned!` -/// invocation are spanned with the given span argument. -/// -/// # Example -/// -/// The following procedural macro code uses `quote_spanned!` to assert that a -/// particular Rust type implements the [`Sync`] trait so that references can be -/// safely shared between threads. -/// -/// [`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html -/// -/// ``` -/// # #[macro_use] -/// # extern crate quote; -/// # extern crate proc_macro2; -/// # -/// # use quote::{Tokens, ToTokens}; -/// # use proc_macro2::Span; -/// # -/// # struct Type; -/// # -/// # impl Type { -/// # fn span(&self) -> Span { -/// # Span::call_site() -/// # } -/// # } -/// # -/// # impl ToTokens for Type { -/// # fn to_tokens(&self, _tokens: &mut Tokens) {} -/// # } -/// # -/// # fn main() { -/// # let ty = Type; -/// # let def_site = Span::def_site(); -/// # -/// let ty_span = ty.span().resolved_at(def_site); -/// let assert_sync = quote_spanned! {ty_span=> -/// struct _AssertSync where #ty: Sync; -/// }; -/// # } -/// ``` -/// -/// If the assertion fails, the user will see an error like the following. The -/// input span of their type is hightlighted in the error. -/// -/// ```text -/// error[E0277]: the trait bound `*const (): std::marker::Sync` is not satisfied -/// --> src/main.rs:10:21 -/// | -/// 10 | static ref PTR: *const () = &(); -/// | ^^^^^^^^^ `*const ()` cannot be shared between threads safely -/// ``` -/// -/// In this example it is important for the where-clause to be spanned with the -/// line/column information of the user's input type so that error messages are -/// placed appropriately by the compiler. But it is also incredibly important -/// that `Sync` resolves at the macro definition site and not the macro call -/// site. If we resolve `Sync` at the same span that the user's type is going to -/// be resolved, then they could bypass our check by defining their own trait -/// named `Sync` that is implemented for their type. -#[macro_export] -macro_rules! quote_spanned { - ($span:expr=> $($tt:tt)*) => { - { - let mut _s = $crate::Tokens::new(); - let _span = $span; - quote_each_token!(_s _span $($tt)*); - _s - } - }; -} - -// Extract the names of all #metavariables and pass them to the $finish macro. -// -// in: pounded_var_names!(then () a #b c #( #d )* #e) -// out: then!(() b d e) -#[macro_export] -#[doc(hidden)] -macro_rules! pounded_var_names { - ($finish:ident ($($found:ident)*) # ( $($inner:tt)* ) $($rest:tt)*) => { - pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*) - }; - - ($finish:ident ($($found:ident)*) # [ $($inner:tt)* ] $($rest:tt)*) => { - pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*) - }; - - ($finish:ident ($($found:ident)*) # { $($inner:tt)* } $($rest:tt)*) => { - pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*) - }; - - ($finish:ident ($($found:ident)*) # $first:ident $($rest:tt)*) => { - pounded_var_names!($finish ($($found)* $first) $($rest)*) - }; - - ($finish:ident ($($found:ident)*) ( $($inner:tt)* ) $($rest:tt)*) => { - pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*) - }; - - ($finish:ident ($($found:ident)*) [ $($inner:tt)* ] $($rest:tt)*) => { - pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*) - }; - - ($finish:ident ($($found:ident)*) { $($inner:tt)* } $($rest:tt)*) => { - pounded_var_names!($finish ($($found)*) $($inner)* $($rest)*) - }; - - ($finish:ident ($($found:ident)*) $ignore:tt $($rest:tt)*) => { - pounded_var_names!($finish ($($found)*) $($rest)*) - }; - - ($finish:ident ($($found:ident)*)) => { - $finish!(() $($found)*) - }; -} - -// in: nested_tuples_pat!(() a b c d e) -// out: ((((a b) c) d) e) -// -// in: nested_tuples_pat!(() a) -// out: a -#[macro_export] -#[doc(hidden)] -macro_rules! nested_tuples_pat { - (()) => { - &() - }; - - (() $first:ident $($rest:ident)*) => { - nested_tuples_pat!(($first) $($rest)*) - }; - - (($pat:pat) $first:ident $($rest:ident)*) => { - nested_tuples_pat!((($pat, $first)) $($rest)*) - }; - - (($done:pat)) => { - $done - }; -} - -// in: multi_zip_expr!(() a b c d e) -// out: a.into_iter().zip(b).zip(c).zip(d).zip(e) -// -// in: multi_zip_iter!(() a) -// out: a -#[macro_export] -#[doc(hidden)] -macro_rules! multi_zip_expr { - (()) => { - &[] - }; - - (() $single:ident) => { - $single - }; - - (() $first:ident $($rest:ident)*) => { - multi_zip_expr!(($first.into_iter()) $($rest)*) - }; - - (($zips:expr) $first:ident $($rest:ident)*) => { - multi_zip_expr!(($zips.zip($first)) $($rest)*) - }; - - (($done:expr)) => { - $done - }; -} - -#[macro_export] -#[doc(hidden)] -macro_rules! quote_each_token { - ($tokens:ident $span:ident) => {}; - - ($tokens:ident $span:ident # ! $($rest:tt)*) => { - quote_each_token!($tokens $span #); - quote_each_token!($tokens $span !); - quote_each_token!($tokens $span $($rest)*); - }; - - ($tokens:ident $span:ident # ( $($inner:tt)* ) * $($rest:tt)*) => { - for pounded_var_names!(nested_tuples_pat () $($inner)*) - in pounded_var_names!(multi_zip_expr () $($inner)*) { - quote_each_token!($tokens $span $($inner)*); - } - quote_each_token!($tokens $span $($rest)*); - }; - - ($tokens:ident $span:ident # ( $($inner:tt)* ) $sep:tt * $($rest:tt)*) => { - for (_i, pounded_var_names!(nested_tuples_pat () $($inner)*)) - in pounded_var_names!(multi_zip_expr () $($inner)*).into_iter().enumerate() { - if _i > 0 { - quote_each_token!($tokens $span $sep); - } - quote_each_token!($tokens $span $($inner)*); - } - quote_each_token!($tokens $span $($rest)*); - }; - - ($tokens:ident $span:ident # [ $($inner:tt)* ] $($rest:tt)*) => { - quote_each_token!($tokens $span #); - $crate::__rt::append_kind(&mut $tokens, - $span, - $crate::__rt::TokenNode::Group( - $crate::__rt::Delimiter::Bracket, - quote_spanned!($span=> $($inner)*).into() - )); - quote_each_token!($tokens $span $($rest)*); - }; - - ($tokens:ident $span:ident # $first:ident $($rest:tt)*) => { - $crate::ToTokens::to_tokens(&$first, &mut $tokens); - quote_each_token!($tokens $span $($rest)*); - }; - - ($tokens:ident $span:ident ( $($first:tt)* ) $($rest:tt)*) => { - $crate::__rt::append_kind(&mut $tokens, - $span, - $crate::__rt::TokenNode::Group( - $crate::__rt::Delimiter::Parenthesis, - quote_spanned!($span=> $($first)*).into() - )); - quote_each_token!($tokens $span $($rest)*); - }; - - ($tokens:ident $span:ident [ $($first:tt)* ] $($rest:tt)*) => { - $crate::__rt::append_kind(&mut $tokens, - $span, - $crate::__rt::TokenNode::Group( - $crate::__rt::Delimiter::Bracket, - quote_spanned!($span=> $($first)*).into() - )); - quote_each_token!($tokens $span $($rest)*); - }; - - ($tokens:ident $span:ident { $($first:tt)* } $($rest:tt)*) => { - $crate::__rt::append_kind(&mut $tokens, - $span, - $crate::__rt::TokenNode::Group( - $crate::__rt::Delimiter::Brace, - quote_spanned!($span=> $($first)*).into() - )); - quote_each_token!($tokens $span $($rest)*); - }; - - ($tokens:ident $span:ident $first:tt $($rest:tt)*) => { - // TODO: this seems slow... special case some `:tt` arguments? - $crate::__rt::parse(&mut $tokens, $span, stringify!($first)); - quote_each_token!($tokens $span $($rest)*); - }; -} diff --git a/third_party/rust/quote-0.4.2/src/to_tokens.rs b/third_party/rust/quote-0.4.2/src/to_tokens.rs deleted file mode 100644 index 277d0a509898..000000000000 --- a/third_party/rust/quote-0.4.2/src/to_tokens.rs +++ /dev/null @@ -1,175 +0,0 @@ -use super::Tokens; - -use std::borrow::Cow; - -use proc_macro2::{Literal, Span, Term, TokenNode, TokenTree, TokenStream}; - -fn tt(kind: TokenNode) -> TokenTree { - TokenTree { - span: Span::def_site(), - kind: kind, - } -} - -/// Types that can be interpolated inside a [`quote!`] invocation. -/// -/// [`quote!`]: macro.quote.html -pub trait ToTokens { - /// Write `self` to the given `Tokens`. - /// - /// Example implementation for a struct representing Rust paths like - /// `std::cmp::PartialEq`: - /// - /// ``` - /// extern crate quote; - /// use quote::{Tokens, ToTokens}; - /// - /// extern crate proc_macro2; - /// use proc_macro2::{TokenTree, TokenNode, Spacing, Span}; - /// - /// pub struct Path { - /// pub global: bool, - /// pub segments: Vec, - /// } - /// - /// impl ToTokens for Path { - /// fn to_tokens(&self, tokens: &mut Tokens) { - /// for (i, segment) in self.segments.iter().enumerate() { - /// if i > 0 || self.global { - /// // Double colon `::` - /// tokens.append(TokenTree { - /// span: Span::def_site(), - /// kind: TokenNode::Op(':', Spacing::Joint), - /// }); - /// tokens.append(TokenTree { - /// span: Span::def_site(), - /// kind: TokenNode::Op(':', Spacing::Alone), - /// }); - /// } - /// segment.to_tokens(tokens); - /// } - /// } - /// } - /// # - /// # pub struct PathSegment; - /// # - /// # impl ToTokens for PathSegment { - /// # fn to_tokens(&self, tokens: &mut Tokens) { - /// # unimplemented!() - /// # } - /// # } - /// # - /// # fn main() {} - /// ``` - fn to_tokens(&self, tokens: &mut Tokens); - - /// Convert `self` directly into a `Tokens` object. - /// - /// This method is implicitly implemented using `to_tokens`, and acts as a - /// convenience method for consumers of the `ToTokens` trait. - fn into_tokens(self) -> Tokens - where - Self: Sized, - { - let mut tokens = Tokens::new(); - self.to_tokens(&mut tokens); - tokens - } -} - -impl<'a, T: ?Sized + ToTokens> ToTokens for &'a T { - fn to_tokens(&self, tokens: &mut Tokens) { - (**self).to_tokens(tokens); - } -} - -impl<'a, T: ?Sized + ToOwned + ToTokens> ToTokens for Cow<'a, T> { - fn to_tokens(&self, tokens: &mut Tokens) { - (**self).to_tokens(tokens); - } -} - -impl ToTokens for Box { - fn to_tokens(&self, tokens: &mut Tokens) { - (**self).to_tokens(tokens); - } -} - -impl ToTokens for Option { - fn to_tokens(&self, tokens: &mut Tokens) { - if let Some(ref t) = *self { - t.to_tokens(tokens); - } - } -} - -impl ToTokens for str { - fn to_tokens(&self, tokens: &mut Tokens) { - tokens.append(tt(TokenNode::Literal(Literal::string(self)))); - } -} - -impl ToTokens for String { - fn to_tokens(&self, tokens: &mut Tokens) { - self.as_str().to_tokens(tokens); - } -} - -macro_rules! primitive { - ($($t:ident)*) => ($( - impl ToTokens for $t { - fn to_tokens(&self, tokens: &mut Tokens) { - tokens.append(tt(TokenNode::Literal(Literal::$t(*self)))); - } - } - )*) -} - -primitive! { - i8 i16 i32 i64 isize - u8 u16 u32 u64 usize - f32 f64 -} - -impl ToTokens for char { - fn to_tokens(&self, tokens: &mut Tokens) { - tokens.append(tt(TokenNode::Literal(Literal::character(*self)))); - } -} - -impl ToTokens for bool { - fn to_tokens(&self, tokens: &mut Tokens) { - let word = if *self { "true" } else { "false" }; - tokens.append(tt(TokenNode::Term(Term::intern(word)))); - } -} - -impl ToTokens for Term { - fn to_tokens(&self, tokens: &mut Tokens) { - tokens.append(tt(TokenNode::Term(*self))); - } -} - -impl ToTokens for Literal { - fn to_tokens(&self, tokens: &mut Tokens) { - tokens.append(tt(TokenNode::Literal(self.clone()))); - } -} - -impl ToTokens for TokenNode { - fn to_tokens(&self, tokens: &mut Tokens) { - tokens.append(tt(self.clone())); - } -} - -impl ToTokens for TokenTree { - fn to_tokens(&self, dst: &mut Tokens) { - dst.append(self.clone()); - } -} - -impl ToTokens for TokenStream { - fn to_tokens(&self, dst: &mut Tokens) { - dst.append_all(self.clone().into_iter()); - } -} diff --git a/third_party/rust/quote-0.4.2/src/tokens.rs b/third_party/rust/quote-0.4.2/src/tokens.rs deleted file mode 100644 index 8fa2e44318e1..000000000000 --- a/third_party/rust/quote-0.4.2/src/tokens.rs +++ /dev/null @@ -1,264 +0,0 @@ -use super::ToTokens; -use std::fmt::{self, Debug, Display}; -use std::hash::{Hash, Hasher}; - -use proc_macro; -use proc_macro2::{TokenStream, TokenTree}; - -/// Tokens produced by a [`quote!`] invocation. -/// -/// [`quote!`]: macro.quote.html -#[derive(Clone, Default)] -pub struct Tokens { - tts: Vec, -} - -impl Tokens { - /// Empty tokens. - pub fn new() -> Self { - Tokens { tts: Vec::new() } - } - - /// For use by `ToTokens` implementations. - /// - /// Appends the token specified to this list of tokens. - pub fn append(&mut self, token: U) - where - U: Into, - { - self.tts.push(token.into()); - } - - /// For use by `ToTokens` implementations. - /// - /// ``` - /// # #[macro_use] extern crate quote; - /// # use quote::{Tokens, ToTokens}; - /// # fn main() { - /// struct X; - /// - /// impl ToTokens for X { - /// fn to_tokens(&self, tokens: &mut Tokens) { - /// tokens.append_all(&[true, false]); - /// } - /// } - /// - /// let tokens = quote!(#X); - /// assert_eq!(tokens.to_string(), "true false"); - /// # } - /// ``` - pub fn append_all(&mut self, iter: I) - where - T: ToTokens, - I: IntoIterator, - { - for token in iter { - token.to_tokens(self); - } - } - - /// For use by `ToTokens` implementations. - /// - /// Appends all of the items in the iterator `I`, separated by the tokens - /// `U`. - pub fn append_separated(&mut self, iter: I, op: U) - where - T: ToTokens, - I: IntoIterator, - U: ToTokens, - { - for (i, token) in iter.into_iter().enumerate() { - if i > 0 { - op.to_tokens(self); - } - token.to_tokens(self); - } - } - - /// For use by `ToTokens` implementations. - /// - /// Appends all tokens in the iterator `I`, appending `U` after each - /// element, including after the last element of the iterator. - pub fn append_terminated(&mut self, iter: I, term: U) - where - T: ToTokens, - I: IntoIterator, - U: ToTokens, - { - for token in iter { - token.to_tokens(self); - term.to_tokens(self); - } - } -} - -impl ToTokens for Tokens { - fn to_tokens(&self, dst: &mut Tokens) { - dst.tts.extend(self.tts.iter().cloned()); - } - - fn into_tokens(self) -> Tokens { - self - } -} - -impl From for TokenStream { - fn from(tokens: Tokens) -> TokenStream { - tokens.tts.into_iter().collect() - } -} - -impl From for proc_macro::TokenStream { - fn from(tokens: Tokens) -> proc_macro::TokenStream { - TokenStream::from(tokens).into() - } -} - -/// Allows a `Tokens` to be passed to `Tokens::append_all`. -impl IntoIterator for Tokens { - type Item = TokenTree; - type IntoIter = private::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - private::into_iter(self.tts.into_iter()) - } -} - -mod private { - use std::vec; - use proc_macro2::TokenTree; - - pub struct IntoIter(vec::IntoIter); - - pub fn into_iter(tts: vec::IntoIter) -> IntoIter { - IntoIter(tts) - } - - impl Iterator for IntoIter { - type Item = TokenTree; - - fn next(&mut self) -> Option { - self.0.next() - } - } -} - -impl Display for Tokens { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(&TokenStream::from(self.clone()), formatter) - } -} - -impl Debug for Tokens { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - struct DebugAsDisplay<'a, T: 'a>(&'a T); - - impl<'a, T> Debug for DebugAsDisplay<'a, T> - where - T: Display, - { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(self.0, formatter) - } - } - - formatter - .debug_tuple("Tokens") - .field(&DebugAsDisplay(self)) - .finish() - } -} - -fn tt_eq(a: &TokenTree, b: &TokenTree) -> bool { - use proc_macro2::{TokenNode, Delimiter, Spacing}; - - match (&a.kind, &b.kind) { - (&TokenNode::Group(d1, ref s1), &TokenNode::Group(d2, ref s2)) => { - match (d1, d2) { - (Delimiter::Parenthesis, Delimiter::Parenthesis) - | (Delimiter::Brace, Delimiter::Brace) - | (Delimiter::Bracket, Delimiter::Bracket) - | (Delimiter::None, Delimiter::None) => {} - _ => return false, - } - - let s1 = s1.clone().into_iter(); - let mut s2 = s2.clone().into_iter(); - - for item1 in s1 { - let item2 = match s2.next() { - Some(item) => item, - None => return false, - }; - if !tt_eq(&item1, &item2) { - return false; - } - } - s2.next().is_none() - } - (&TokenNode::Op(o1, k1), &TokenNode::Op(o2, k2)) => { - o1 == o2 && match (k1, k2) { - (Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true, - _ => false, - } - } - (&TokenNode::Literal(ref l1), &TokenNode::Literal(ref l2)) => { - l1.to_string() == l2.to_string() - } - (&TokenNode::Term(ref s1), &TokenNode::Term(ref s2)) => s1.as_str() == s2.as_str(), - _ => false, - } -} - -impl PartialEq for Tokens { - fn eq(&self, other: &Self) -> bool { - if self.tts.len() != other.tts.len() { - return false; - } - - self.tts - .iter() - .zip(other.tts.iter()) - .all(|(a, b)| tt_eq(a, b)) - } -} - -fn tt_hash(tt: &TokenTree, h: &mut H) { - use proc_macro2::{TokenNode, Delimiter, Spacing}; - - match tt.kind { - TokenNode::Group(delim, ref stream) => { - 0u8.hash(h); - match delim { - Delimiter::Parenthesis => 0u8.hash(h), - Delimiter::Brace => 1u8.hash(h), - Delimiter::Bracket => 2u8.hash(h), - Delimiter::None => 3u8.hash(h), - } - - for item in stream.clone() { - tt_hash(&item, h); - } - 0xffu8.hash(h); // terminator w/ a variant we don't normally hash - } - TokenNode::Op(op, kind) => { - 1u8.hash(h); - op.hash(h); - match kind { - Spacing::Alone => 0u8.hash(h), - Spacing::Joint => 1u8.hash(h), - } - } - TokenNode::Literal(ref lit) => (2u8, lit.to_string()).hash(h), - TokenNode::Term(ref word) => (3u8, word.as_str()).hash(h), - } -} - -impl<'a> Hash for Tokens { - fn hash(&self, h: &mut H) { - self.tts.len().hash(h); - for tt in &self.tts { - tt_hash(&tt, h); - } - } -} diff --git a/third_party/rust/quote-0.4.2/tests/test.rs b/third_party/rust/quote-0.4.2/tests/test.rs deleted file mode 100644 index 2a48f1f59355..000000000000 --- a/third_party/rust/quote-0.4.2/tests/test.rs +++ /dev/null @@ -1,292 +0,0 @@ -#![cfg_attr(feature = "cargo-clippy", allow(blacklisted_name))] - -use std::borrow::Cow; - -extern crate proc_macro2; -#[macro_use] -extern crate quote; - -use proc_macro2::{Span, Term}; - -struct X; - -impl quote::ToTokens for X { - fn to_tokens(&self, tokens: &mut quote::Tokens) { - tokens.append(proc_macro2::TokenTree { - kind: proc_macro2::TokenNode::Term(Term::intern("X")), - span: Span::def_site(), - }); - } -} - -#[test] -fn test_quote_impl() { - let tokens = quote!( - impl<'a, T: ToTokens> ToTokens for &'a T { - fn to_tokens(&self, tokens: &mut Tokens) { - (**self).to_tokens(tokens) - } - } - ); - - let expected = concat!( - "impl < 'a , T : ToTokens > ToTokens for & 'a T { ", - "fn to_tokens ( & self , tokens : & mut Tokens ) { ", - "( * * self ) . to_tokens ( tokens ) ", - "} ", - "}" - ); - - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_substitution() { - let x = X; - let tokens = quote!(#x <#x> (#x) [#x] {#x}); - - let expected = "X < X > ( X ) [ X ] { X }"; - - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_iter() { - let primes = &[X, X, X, X]; - - assert_eq!("X X X X", quote!(#(#primes)*).to_string()); - - assert_eq!("X , X , X , X ,", quote!(#(#primes,)*).to_string()); - - assert_eq!("X , X , X , X", quote!(#(#primes),*).to_string()); -} - -#[test] -fn test_advanced() { - let generics = quote!( <'a, T> ); - - let where_clause = quote!( where T: Serialize ); - - let field_ty = quote!(String); - - let item_ty = quote!(Cow<'a, str>); - - let path = quote!(SomeTrait::serialize_with); - - let value = quote!(self.x); - - let tokens = quote! { - struct SerializeWith #generics #where_clause { - value: &'a #field_ty, - phantom: ::std::marker::PhantomData<#item_ty>, - } - - impl #generics ::serde::Serialize for SerializeWith #generics #where_clause { - fn serialize(&self, s: &mut S) -> Result<(), S::Error> - where S: ::serde::Serializer - { - #path(self.value, s) - } - } - - SerializeWith { - value: #value, - phantom: ::std::marker::PhantomData::<#item_ty>, - } - }; - - let expected = concat!( - "struct SerializeWith < 'a , T > where T : Serialize { ", - "value : & 'a String , ", - "phantom : :: std :: marker :: PhantomData < Cow < 'a , str > > , ", - "} ", - "impl < 'a , T > :: serde :: Serialize for SerializeWith < 'a , T > where T : Serialize { ", - "fn serialize < S > ( & self , s : & mut S ) -> Result < ( ) , S :: Error > ", - "where S : :: serde :: Serializer ", - "{ ", - "SomeTrait :: serialize_with ( self . value , s ) ", - "} ", - "} ", - "SerializeWith { ", - "value : self . x , ", - "phantom : :: std :: marker :: PhantomData :: < Cow < 'a , str > > , ", - "}" - ); - - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_integer() { - let ii8 = -1i8; - let ii16 = -1i16; - let ii32 = -1i32; - let ii64 = -1i64; - let iisize = -1isize; - let uu8 = 1u8; - let uu16 = 1u16; - let uu32 = 1u32; - let uu64 = 1u64; - let uusize = 1usize; - - let tokens = quote! { - #ii8 #ii16 #ii32 #ii64 #iisize - #uu8 #uu16 #uu32 #uu64 #uusize - }; - let expected = "-1i8 -1i16 -1i32 -1i64 -1isize 1u8 1u16 1u32 1u64 1usize"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_floating() { - let e32 = 2.345f32; - - let e64 = 2.345f64; - - let tokens = quote! { - #e32 - #e64 - }; - let expected = concat!("2.345f32 2.345f64"); - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_char() { - let zero = '\0'; - let pound = '#'; - let quote = '"'; - let apost = '\''; - let newline = '\n'; - let heart = '\u{2764}'; - - let tokens = quote! { - #zero #pound #quote #apost #newline #heart - }; - let expected = "'\\u{0}' '#' '\\\"' '\\'' '\\n' '\\u{2764}'"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_str() { - let s = "\0 a 'b \" c"; - let tokens = quote!(#s); - let expected = "\"\\u{0} a \\'b \\\" c\""; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_string() { - let s = "\0 a 'b \" c".to_string(); - let tokens = quote!(#s); - let expected = "\"\\u{0} a \\'b \\\" c\""; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_ident() { - let foo = Term::intern("Foo"); - let bar = Term::intern(&format!("Bar{}", 7)); - let tokens = quote!(struct #foo; enum #bar {}); - let expected = "struct Foo ; enum Bar7 { }"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_duplicate() { - let ch = 'x'; - - let tokens = quote!(#ch #ch); - - let expected = "'x' 'x'"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_fancy_repetition() { - let foo = vec!["a", "b"]; - let bar = vec![true, false]; - - let tokens = quote! { - #(#foo: #bar),* - }; - - let expected = r#""a" : true , "b" : false"#; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_nested_fancy_repetition() { - let nested = vec![vec!['a', 'b', 'c'], vec!['x', 'y', 'z']]; - - let tokens = quote! { - #( - #(#nested)* - ),* - }; - - let expected = "'a' 'b' 'c' , 'x' 'y' 'z'"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_empty_repetition() { - let tokens = quote!(#(a b)* #(c d),*); - assert_eq!("", tokens.to_string()); -} - -#[test] -fn test_variable_name_conflict() { - // The implementation of `#(...),*` uses the variable `_i` but it should be - // fine, if a little confusing when debugging. - let _i = vec!['a', 'b']; - let tokens = quote! { #(#_i),* }; - let expected = "'a' , 'b'"; - assert_eq!(expected, tokens.to_string()); -} - -#[test] -fn test_empty_quote() { - let tokens = quote!(); - assert_eq!("", tokens.to_string()); -} - -#[test] -fn test_box_str() { - let b = "str".to_owned().into_boxed_str(); - let tokens = quote! { #b }; - assert_eq!("\"str\"", tokens.to_string()); -} - -#[test] -fn test_cow() { - let owned: Cow = Cow::Owned(Term::intern("owned")); - - let ident = Term::intern("borrowed"); - let borrowed = Cow::Borrowed(&ident); - - let tokens = quote! { #owned #borrowed }; - assert_eq!("owned borrowed", tokens.to_string()); -} - -#[test] -fn test_closure() { - fn field_i(i: usize) -> Term { - Term::intern(&format!("__field{}", i)) - } - - let fields = (0usize..3) - .map(field_i as fn(_) -> _) - .map(|var| quote! { #var }); - - let tokens = quote! { #(#fields)* }; - assert_eq!("__field0 __field1 __field2", tokens.to_string()); -} - -#[test] -fn test_append_tokens() { - let mut a = quote!(a); - let b = quote!(b); - a.append_all(b); - assert_eq!("a b", a.to_string()); -} diff --git a/third_party/rust/syn-0.12.12/.cargo-checksum.json b/third_party/rust/syn-0.12.12/.cargo-checksum.json deleted file mode 100644 index 1db087bec244..000000000000 --- a/third_party/rust/syn-0.12.12/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{"Cargo.toml":"8c4299d297c1f5d0f6afcfedde821e5cdf1893607290af92aebd9b4b86c48386","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"f033c371492a4769d377a8bf1a139adc7bf94ea00595b867a3e234eeab994c8c","README.md":"632c404dd064731af5fd71e643fbba83ced6ac9198497b672139bada194cf41b","src/attr.rs":"35cc8556dd26662d79c3059315de5a5758ce18b697a8ce32361b09ce1a820a0e","src/buffer.rs":"81ca1506c599cfba1136d9fec2d83137f4a4085b341658a4155d978f31b64552","src/data.rs":"d052602339af37df6454a07fd1163e382571d8665529450339cfe55773e308ae","src/derive.rs":"eed8e88cd763e852782b10d2366a81e52d3ec0777676eaa94827ea3b46151134","src/error.rs":"db9f0648e7399cfcaef9c431b452b5012a6056e75c586212e2cfdb1e18b8c69a","src/expr.rs":"274d9f462225432a3ff0c611a1db44b0b3cc6a673b8c8df2a53bd00cf3a5be95","src/file.rs":"43d5b12828a5c2a0bbef7baa56b0b9638575d73d84e0babaf0e85cdc2d573f16","src/gen/fold.rs":"3bf425ce1a461ce527799e2624a311081e35adc133de3acd5195638024a6872f","src/gen/visit.rs":"6f58e84d9b304d8e8493c01ac71d1d36e61c9e18355d60d06e890b94cdb513d0","src/gen/visit_mut.rs":"537dc348ce5a7353d0d0456138043e4ce916c2ca57c90da10500856ac6393dfe","src/gen_helper.rs":"2be46ff201fd53908350bde4d6c8b7dc427dbd156fa538869a9ccbdf6279af04","src/generics.rs":"5dc13558159085b4743d7a0a65f6cfda735c2a23071c588a7dc215834e0c6b44","src/ident.rs":"ea657c29ee5d483ac59664f808fae3e522e18578d86764b7efcceafc6d8236ca","src/item.rs":"f000f831e6f24de60a7ff7e6164f6adc93ae0fb5d0dc5efc6488bb9519f43dd8","src/lib.rs":"dd1212af93788c6a01193d67185b4258a92c7b6c3f34af395f3ed08174117fdd","src/lifetime.rs":"2c92822a1dfc72738b64d787ef9f7ceb1251e689cacb9e780dc784f99b2d9321","src/lit.rs":"7d0eea1b0057d0ae1bce21815b8fe0ee97b0cd287a8d10e4ff74419b7d6fbefe","src/mac.rs":"ec945e4926de028c153c87f1643e8910e53738d8a4b519b74254479e61acabec","src/macros.rs":"b975b110f9b904e5d82bd0222f7cd0398825fcde146c2b98b581daccf2eb8341","src/op.rs":"b9f9ff8027fc65403858d8fed7d0ac3c919d70f97f830fd7532fb22e1dea993b","src/parse_quote.rs":"b0221332823786d664de3e4976cdd3962ca5fa3c1558bb57f9c8810348ded0b0","src/parsers.rs":"9ef1c5e7760a7a4132fe6834dda5871ff9b6513f51243008f710ee4fe973529d","src/path.rs":"01455551da12e8782b4c97ccb8c670d81ea1db1264393a73577535141d5266a8","src/punctuated.rs":"87312dc0d057935774ac64e2c2fdfdae7f693b128c53415d76d78ca377098ced","src/spanned.rs":"9343c288a6d07a0d306c9bd2f332b8eb91fb657f88ec8fbb336ad1b667b583dd","src/synom.rs":"74d8c022ef216f798268999c9c934dca05ad75914d60fc2b445d3a7201826f0d","src/token.rs":"4f16136315cc9ff1e707e80bde148d9471f7e4708c30e07b9c1d3a0bc98805fd","src/tt.rs":"2e10762c00bce15a8e22125bba26c856112e701a82f849aa0d46701e6228823f","src/ty.rs":"0f73cc4626171be4ab2a1de9b2c0c94229055a762ba50ba9ba41b29908447867"},"package":"9e1c669ed757c0ebd04337f6a5bb972d05e0c08fe2540dd3ee3dd9e4daf1604c"} \ No newline at end of file diff --git a/third_party/rust/syn-0.12.12/Cargo.toml b/third_party/rust/syn-0.12.12/Cargo.toml deleted file mode 100644 index 4dbdedb961d1..000000000000 --- a/third_party/rust/syn-0.12.12/Cargo.toml +++ /dev/null @@ -1,55 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g. crates.io) dependencies -# -# If you believe there's an error in this file please file an -# issue against the rust-lang/cargo repository. If you're -# editing this file be aware that the upstream Cargo.toml -# will likely look very different (and much more reasonable) - -[package] -name = "syn" -version = "0.12.12" -authors = ["David Tolnay "] -include = ["/Cargo.toml", "/src/**/*.rs", "/README.md", "/LICENSE-APACHE", "/LICENSE-MIT"] -description = "Nom parser for Rust source code" -documentation = "https://docs.rs/syn" -categories = ["development-tools::procedural-macro-helpers"] -license = "MIT/Apache-2.0" -repository = "https://github.com/dtolnay/syn" -[package.metadata.docs.rs] -all-features = true - -[[example]] -name = "dump-syntax" -path = "examples/dump-syntax/main.rs" -required-features = ["full", "parsing", "extra-traits"] -[dependencies.proc-macro2] -version = "0.2" - -[dependencies.quote] -version = "0.4" -optional = true - -[dependencies.unicode-xid] -version = "0.1" -[dev-dependencies.rayon] -version = "0.9" - -[dev-dependencies.walkdir] -version = "1.0.1" - -[features] -clone-impls = [] -default = ["derive", "parsing", "printing", "clone-impls"] -derive = [] -extra-traits = [] -fold = [] -full = [] -parsing = [] -printing = ["quote"] -visit = [] -visit-mut = [] diff --git a/third_party/rust/syn-0.12.12/LICENSE-APACHE b/third_party/rust/syn-0.12.12/LICENSE-APACHE deleted file mode 100644 index 16fe87b06e80..000000000000 --- a/third_party/rust/syn-0.12.12/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/third_party/rust/syn-0.12.12/LICENSE-MIT b/third_party/rust/syn-0.12.12/LICENSE-MIT deleted file mode 100644 index 5767dea4b109..000000000000 --- a/third_party/rust/syn-0.12.12/LICENSE-MIT +++ /dev/null @@ -1,25 +0,0 @@ -Copyright (c) 2018 Syn Developers - -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/third_party/rust/syn-0.12.12/README.md b/third_party/rust/syn-0.12.12/README.md deleted file mode 100644 index 6cf18befeb8d..000000000000 --- a/third_party/rust/syn-0.12.12/README.md +++ /dev/null @@ -1,277 +0,0 @@ -Nom parser for Rust source code -=============================== - -[![Build Status](https://api.travis-ci.org/dtolnay/syn.svg?branch=master)](https://travis-ci.org/dtolnay/syn) -[![Latest Version](https://img.shields.io/crates/v/syn.svg)](https://crates.io/crates/syn) -[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/syn/0.12/syn/) - -Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree -of Rust source code. - -Currently this library is geared toward the [custom derive] use case but -contains some APIs that may be useful for Rust procedural macros more generally. - -[custom derive]: https://github.com/rust-lang/rfcs/blob/master/text/1681-macros-1.1.md - -- **Data structures** — Syn provides a complete syntax tree that can represent - any valid Rust source code. The syntax tree is rooted at [`syn::File`] which - represents a full source file, but there are other entry points that may be - useful to procedural macros including [`syn::Item`], [`syn::Expr`] and - [`syn::Type`]. - -- **Custom derives** — Of particular interest to custom derives is - [`syn::DeriveInput`] which is any of the three legal input items to a derive - macro. An example below shows using this type in a library that can derive - implementations of a trait of your own. - -- **Parser combinators** — Parsing in Syn is built on a suite of public parser - combinator macros that you can use for parsing any token-based syntax you - dream up within a `functionlike!(...)` procedural macro. Every syntax tree - node defined by Syn is individually parsable and may be used as a building - block for custom syntaxes, or you may do it all yourself working from the most - primitive tokens. - -- **Location information** — Every token parsed by Syn is associated with a - `Span` that tracks line and column information back to the source of that - token. These spans allow a procedural macro to display detailed error messages - pointing to all the right places in the user's code. There is an example of - this below. - -- **Feature flags** — Functionality is aggressively feature gated so your - procedural macros enable only what they need, and do not pay in compile time - for all the rest. - -[`syn::File`]: https://docs.rs/syn/0.12/syn/struct.File.html -[`syn::Item`]: https://docs.rs/syn/0.12/syn/enum.Item.html -[`syn::Expr`]: https://docs.rs/syn/0.12/syn/enum.Expr.html -[`syn::Type`]: https://docs.rs/syn/0.12/syn/enum.Type.html -[`syn::DeriveInput`]: https://docs.rs/syn/0.12/syn/struct.DeriveInput.html - -If you get stuck with anything involving procedural macros in Rust I am happy to -provide help even if the issue is not related to Syn. Please file a ticket in -this repo. - -*Version requirement: Syn supports any compiler version back to Rust's very -first support for procedural macros in Rust 1.15.0. Some features especially -around error reporting are only available in newer compilers or on the nightly -channel.* - -## Example of a custom derive - -The canonical custom derive using Syn looks like this. We write an ordinary Rust -function tagged with a `proc_macro_derive` attribute and the name of the trait -we are deriving. Any time that derive appears in the user's code, the Rust -compiler passes their data structure as tokens into our macro. We get to execute -arbitrary Rust code to figure out what to do with those tokens, then hand some -tokens back to the compiler to compile into the user's crate. - -[`TokenStream`]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html - -```toml -[dependencies] -syn = "0.12" -quote = "0.4" - -[lib] -proc-macro = true -``` - -```rust -extern crate proc_macro; -extern crate syn; - -#[macro_use] -extern crate quote; - -use proc_macro::TokenStream; -use syn::DeriveInput; - -#[proc_macro_derive(MyMacro)] -pub fn my_macro(input: TokenStream) -> TokenStream { - // Parse the input tokens into a syntax tree - let input: DeriveInput = syn::parse(input).unwrap(); - - // Build the output, possibly using quasi-quotation - let expanded = quote! { - // ... - }; - - // Hand the output tokens back to the compiler - expanded.into() -} -``` - -The [`heapsize`] example directory shows a complete working Macros 1.1 -implementation of a custom derive. It works on any Rust compiler \>=1.15.0. The -example derives a `HeapSize` trait which computes an estimate of the amount of -heap memory owned by a value. - -[`heapsize`]: examples/heapsize - -```rust -pub trait HeapSize { - /// Total number of bytes of heap memory owned by `self`. - fn heap_size_of_children(&self) -> usize; -} -``` - -The custom derive allows users to write `#[derive(HeapSize)]` on data structures -in their program. - -```rust -#[derive(HeapSize)] -struct Demo<'a, T: ?Sized> { - a: Box, - b: u8, - c: &'a str, - d: String, -} -``` - -## Spans and error reporting - -The [`heapsize2`] example directory is an extension of the `heapsize` example -that demonstrates some of the hygiene and error reporting properties of Macros -2.0. This example currently requires a nightly Rust compiler \>=1.24.0-nightly -but we are working to stabilize all of the APIs involved. - -[`heapsize2`]: examples/heapsize2 - -The token-based procedural macro API provides great control over where the -compiler's error messages are displayed in user code. Consider the error the -user sees if one of their field types does not implement `HeapSize`. - -```rust -#[derive(HeapSize)] -struct Broken { - ok: String, - bad: std::thread::Thread, -} -``` - -In the Macros 1.1 string-based procedural macro world, the resulting error would -point unhelpfully to the invocation of the derive macro and not to the actual -problematic field. - -``` -error[E0599]: no method named `heap_size_of_children` found for type `std::thread::Thread` in the current scope - --> src/main.rs:4:10 - | -4 | #[derive(HeapSize)] - | ^^^^^^^^ -``` - -By tracking span information all the way through the expansion of a procedural -macro as shown in the `heapsize2` example, token-based macros in Syn are able to -trigger errors that directly pinpoint the source of the problem. - -``` -error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not satisfied - --> src/main.rs:7:5 - | -7 | bad: std::thread::Thread, - | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `HeapSize` is not implemented for `std::thread::Thread` -``` - -## Parsing a custom syntax using combinators - -The [`lazy-static`] example directory shows the implementation of a -`functionlike!(...)` procedural macro in which the input tokens are parsed using -[`nom`]-style parser combinators. - -[`lazy-static`]: examples/lazy-static -[`nom`]: https://github.com/Geal/nom - -The example reimplements the popular `lazy_static` crate from crates.io as a -procedural macro. - -``` -lazy_static! { - static ref USERNAME: Regex = Regex::new("^[a-z0-9_-]{3,16}$").unwrap(); -} -``` - -The implementation shows how to trigger custom warnings and error messages on -the macro input. - -``` -warning: come on, pick a more creative name - --> src/main.rs:10:16 - | -10 | static ref FOO: String = "lazy_static".to_owned(); - | ^^^ -``` - -## Debugging - -When developing a procedural macro it can be helpful to look at what the -generated code looks like. Use `cargo rustc -- -Zunstable-options ---pretty=expanded` or the [`cargo expand`] subcommand. - -[`cargo expand`]: https://github.com/dtolnay/cargo-expand - -To show the expanded code for some crate that uses your procedural macro, run -`cargo expand` from that crate. To show the expanded code for one of your own -test cases, run `cargo expand --test the_test_case` where the last argument is -the name of the test file without the `.rs` extension. - -This write-up by Brandon W Maister discusses debugging in more detail: -[Debugging Rust's new Custom Derive system][debugging]. - -[debugging]: https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/ - -## Optional features - -Syn puts a lot of functionality behind optional features in order to optimize -compile time for the most common use cases. The following features are -available. - -- **`derive`** *(enabled by default)* — Data structures for representing the - possible input to a custom derive, including structs and enums and types. -- **`full`** — Data structures for representing the syntax tree of all valid - Rust source code, including items and expressions. -- **`parsing`** *(enabled by default)* — Ability to parse input tokens into a - syntax tree node of a chosen type. -- **`printing`** *(enabled by default)* — Ability to print a syntax tree node as - tokens of Rust source code. -- **`visit`** — Trait for traversing a syntax tree. -- **`visit-mut`** — Trait for traversing and mutating in place a syntax tree. -- **`fold`** — Trait for transforming an owned syntax tree. -- **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree - types. -- **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree - types. - -## Nightly features - -By default Syn uses the [`proc-macro2`] crate to emulate the nightly compiler's -procedural macro API in a stable way that works all the way back to Rust 1.15.0. -This shim makes it possible to write code without regard for whether the current -compiler version supports the features we use. - -[`proc-macro2`]: https://github.com/alexcrichton/proc-macro2 - -On a nightly compiler, to eliminate the stable shim and use the compiler's -`proc-macro` directly, add `proc-macro2` to your Cargo.toml and set its -`"nightly"` feature which bypasses the stable shim. - -```toml -[dependencies] -syn = "0.12" -proc-macro2 = { version = "0.2", features = ["nightly"] } -``` - -## License - -Licensed under either of - - * Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) - * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) - -at your option. - -### Contribution - -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in this crate by you, as defined in the Apache-2.0 license, shall -be dual licensed as above, without any additional terms or conditions. diff --git a/third_party/rust/syn-0.12.12/src/attr.rs b/third_party/rust/syn-0.12.12/src/attr.rs deleted file mode 100644 index 57accef6de30..000000000000 --- a/third_party/rust/syn-0.12.12/src/attr.rs +++ /dev/null @@ -1,571 +0,0 @@ -// Copyright 2018 Syn Developers -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use super::*; -use punctuated::Punctuated; - -use std::iter; - -use proc_macro2::{Delimiter, Spacing, TokenNode, TokenStream, TokenTree}; - -#[cfg(feature = "extra-traits")] -use std::hash::{Hash, Hasher}; -#[cfg(feature = "extra-traits")] -use tt::TokenStreamHelper; - -ast_struct! { - /// An attribute like `#[repr(transparent)]`. - /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// - /// # Syntax - /// - /// Rust has six types of attributes. - /// - /// - Outer attributes like `#[repr(transparent)]`. These appear outside or - /// in front of the item they describe. - /// - Inner attributes like `#![feature(proc_macro)]`. These appear inside - /// of the item they describe, usually a module. - /// - Outer doc comments like `/// # Example`. - /// - Inner doc comments like `//! Please file an issue`. - /// - Outer block comments `/** # Example */`. - /// - Inner block comments `/*! Please file an issue */`. - /// - /// The `style` field of type `AttrStyle` distinguishes whether an attribute - /// is outer or inner. Doc comments and block comments are promoted to - /// attributes that have `is_sugared_doc` set to true, as this is how they - /// are processed by the compiler and by `macro_rules!` macros. - /// - /// The `path` field gives the possibly colon-delimited path against which - /// the attribute is resolved. It is equal to `"doc"` for desugared doc - /// comments. The `tts` field contains the rest of the attribute body as - /// tokens. - /// - /// ```text - /// #[derive(Copy)] #[crate::precondition x < 5] - /// ^^^^^^~~~~~~ ^^^^^^^^^^^^^^^^^^^ ~~~~~ - /// path tts path tts - /// ``` - /// - /// Use the [`interpret_meta`] method to try parsing the tokens of an - /// attribute into the structured representation that is used by convention - /// across most Rust libraries. - /// - /// [`interpret_meta`]: #method.interpret_meta - pub struct Attribute #manual_extra_traits { - pub pound_token: Token![#], - pub style: AttrStyle, - pub bracket_token: token::Bracket, - pub path: Path, - pub tts: TokenStream, - pub is_sugared_doc: bool, - } -} - -#[cfg(feature = "extra-traits")] -impl Eq for Attribute {} - -#[cfg(feature = "extra-traits")] -impl PartialEq for Attribute { - fn eq(&self, other: &Self) -> bool { - self.style == other.style && self.pound_token == other.pound_token - && self.bracket_token == other.bracket_token && self.path == other.path - && TokenStreamHelper(&self.tts) == TokenStreamHelper(&other.tts) - && self.is_sugared_doc == other.is_sugared_doc - } -} - -#[cfg(feature = "extra-traits")] -impl Hash for Attribute { - fn hash(&self, state: &mut H) - where - H: Hasher, - { - self.style.hash(state); - self.pound_token.hash(state); - self.bracket_token.hash(state); - self.path.hash(state); - TokenStreamHelper(&self.tts).hash(state); - self.is_sugared_doc.hash(state); - } -} - -impl Attribute { - /// Parses the tokens after the path as a [`Meta`](enum.Meta.html) if - /// possible. - pub fn interpret_meta(&self) -> Option { - let name = if self.path.segments.len() == 1 { - &self.path.segments.first().unwrap().value().ident - } else { - return None; - }; - - if self.tts.is_empty() { - return Some(Meta::Word(*name)); - } - - let tts = self.tts.clone().into_iter().collect::>(); - - if tts.len() == 1 { - if let TokenNode::Group(Delimiter::Parenthesis, ref ts) = tts[0].kind { - let tokens = ts.clone().into_iter().collect::>(); - if let Some(nested_meta_items) = list_of_nested_meta_items_from_tokens(&tokens) { - return Some(Meta::List(MetaList { - paren_token: token::Paren(tts[0].span), - ident: *name, - nested: nested_meta_items, - })); - } - } - } - - if tts.len() == 2 { - if let TokenNode::Op('=', Spacing::Alone) = tts[0].kind { - if let TokenNode::Literal(ref lit) = tts[1].kind { - if !lit.to_string().starts_with('/') { - return Some(Meta::NameValue(MetaNameValue { - ident: *name, - eq_token: Token![=]([tts[0].span]), - lit: Lit::new(lit.clone(), tts[1].span), - })); - } - } else if let TokenNode::Term(ref term) = tts[1].kind { - match term.as_str() { - v @ "true" | v @ "false" => { - return Some(Meta::NameValue(MetaNameValue { - ident: *name, - eq_token: Token![=]([tts[0].span]), - lit: Lit::Bool(LitBool { value: v == "true", span: tts[1].span }), - })); - }, - _ => {} - } - } - } - } - - None - } -} - -fn nested_meta_item_from_tokens(tts: &[TokenTree]) -> Option<(NestedMeta, &[TokenTree])> { - assert!(!tts.is_empty()); - - match tts[0].kind { - TokenNode::Literal(ref lit) => { - if lit.to_string().starts_with('/') { - None - } else { - let lit = Lit::new(lit.clone(), tts[0].span); - Some((NestedMeta::Literal(lit), &tts[1..])) - } - } - - TokenNode::Term(sym) => { - let ident = Ident::new(sym.as_str(), tts[0].span); - if tts.len() >= 3 { - if let TokenNode::Op('=', Spacing::Alone) = tts[1].kind { - if let TokenNode::Literal(ref lit) = tts[2].kind { - if !lit.to_string().starts_with('/') { - let pair = MetaNameValue { - ident: Ident::new(sym.as_str(), tts[0].span), - eq_token: Token![=]([tts[1].span]), - lit: Lit::new(lit.clone(), tts[2].span), - }; - return Some((Meta::NameValue(pair).into(), &tts[3..])); - } - } else if let TokenNode::Term(ref term) = tts[2].kind { - match term.as_str() { - v @ "true" | v @ "false" => { - let pair = MetaNameValue { - ident: Ident::new(sym.as_str(), tts[0].span), - eq_token: Token![=]([tts[1].span]), - lit: Lit::Bool(LitBool { value: v == "true", span: tts[2].span }), - }; - return Some((Meta::NameValue(pair).into(), &tts[3..])); - }, - _ => {} - } - } - } - } - - if tts.len() >= 2 { - if let TokenNode::Group(Delimiter::Parenthesis, ref inner_tts) = tts[1].kind { - let inner_tts = inner_tts.clone().into_iter().collect::>(); - return match list_of_nested_meta_items_from_tokens(&inner_tts) { - Some(nested_meta_items) => { - let list = MetaList { - ident: ident, - paren_token: token::Paren(tts[1].span), - nested: nested_meta_items, - }; - Some((Meta::List(list).into(), &tts[2..])) - } - - None => None, - }; - } - } - - Some((Meta::Word(ident).into(), &tts[1..])) - } - - _ => None, - } -} - -fn list_of_nested_meta_items_from_tokens( - mut tts: &[TokenTree], -) -> Option> { - let mut nested_meta_items = Punctuated::new(); - let mut first = true; - - while !tts.is_empty() { - let prev_comma = if first { - first = false; - None - } else if let TokenNode::Op(',', Spacing::Alone) = tts[0].kind { - let tok = Token![,]([tts[0].span]); - tts = &tts[1..]; - if tts.is_empty() { - break; - } - Some(tok) - } else { - return None; - }; - let (nested, rest) = match nested_meta_item_from_tokens(tts) { - Some(pair) => pair, - None => return None, - }; - if let Some(comma) = prev_comma { - nested_meta_items.push_punct(comma); - } - nested_meta_items.push_value(nested); - tts = rest; - } - - Some(nested_meta_items) -} - -ast_enum! { - /// Distinguishes between attributes that decorate an item and attributes - /// that are contained within an item. - /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// - /// # Outer attributes - /// - /// - `#[repr(transparent)]` - /// - `/// # Example` - /// - `/** Please file an issue */` - /// - /// # Inner attributes - /// - /// - `#![feature(proc_macro)]` - /// - `//! # Example` - /// - `/*! Please file an issue */` - #[cfg_attr(feature = "clone-impls", derive(Copy))] - pub enum AttrStyle { - Outer, - Inner(Token![!]), - } -} - -ast_enum_of_structs! { - /// Content of a compile-time structured attribute. - /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// - /// ## Word - /// - /// A meta word is like the `test` in `#[test]`. - /// - /// ## List - /// - /// A meta list is like the `derive(Copy)` in `#[derive(Copy)]`. - /// - /// ## NameValue - /// - /// A name-value meta is like the `path = "..."` in `#[path = - /// "sys/windows.rs"]`. - /// - /// # Syntax tree enum - /// - /// This type is a [syntax tree enum]. - /// - /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums - pub enum Meta { - pub Word(Ident), - /// A structured list within an attribute, like `derive(Copy, Clone)`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* - pub List(MetaList { - pub ident: Ident, - pub paren_token: token::Paren, - pub nested: Punctuated, - }), - /// A name-value pair within an attribute, like `feature = "nightly"`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* - pub NameValue(MetaNameValue { - pub ident: Ident, - pub eq_token: Token![=], - pub lit: Lit, - }), - } -} - -impl Meta { - /// Returns the identifier that begins this structured meta item. - /// - /// For example this would return the `test` in `#[test]`, the `derive` in - /// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`. - pub fn name(&self) -> Ident { - match *self { - Meta::Word(ref meta) => *meta, - Meta::List(ref meta) => meta.ident, - Meta::NameValue(ref meta) => meta.ident, - } - } -} - -ast_enum_of_structs! { - /// Element of a compile-time attribute list. - /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* - pub enum NestedMeta { - /// A structured meta item, like the `Copy` in `#[derive(Copy)]` which - /// would be a nested `Meta::Word`. - pub Meta(Meta), - - /// A Rust literal, like the `"new_name"` in `#[rename("new_name")]`. - pub Literal(Lit), - } -} - -pub trait FilterAttrs<'a> { - type Ret: Iterator; - - fn outer(self) -> Self::Ret; - fn inner(self) -> Self::Ret; -} - -impl<'a, T> FilterAttrs<'a> for T -where - T: IntoIterator, -{ - type Ret = iter::Filter bool>; - - fn outer(self) -> Self::Ret { - fn is_outer(attr: &&Attribute) -> bool { - match attr.style { - AttrStyle::Outer => true, - _ => false, - } - } - self.into_iter().filter(is_outer) - } - - fn inner(self) -> Self::Ret { - fn is_inner(attr: &&Attribute) -> bool { - match attr.style { - AttrStyle::Inner(_) => true, - _ => false, - } - } - self.into_iter().filter(is_inner) - } -} - -#[cfg(feature = "parsing")] -pub mod parsing { - use super::*; - use buffer::Cursor; - use parse_error; - use synom::PResult; - use proc_macro2::{Literal, Spacing, Span, TokenNode, TokenTree}; - - fn eq(span: Span) -> TokenTree { - TokenTree { - span: span, - kind: TokenNode::Op('=', Spacing::Alone), - } - } - - impl Attribute { - named!(pub parse_inner -> Self, alt!( - do_parse!( - pound: punct!(#) >> - bang: punct!(!) >> - path_and_tts: brackets!(tuple!( - call!(Path::parse_mod_style), - syn!(TokenStream) - )) >> - ({ - let (bracket, (path, tts)) = path_and_tts; - - Attribute { - style: AttrStyle::Inner(bang), - path: path, - tts: tts, - is_sugared_doc: false, - pound_token: pound, - bracket_token: bracket, - } - }) - ) - | - map!( - call!(lit_doc_comment, Comment::Inner), - |lit| { - let span = lit.span; - Attribute { - style: AttrStyle::Inner(::new(span)), - path: Ident::new("doc", span).into(), - tts: vec![ - eq(span), - lit, - ].into_iter().collect(), - is_sugared_doc: true, - pound_token: ::new(span), - bracket_token: token::Bracket(span), - } - } - ) - )); - - named!(pub parse_outer -> Self, alt!( - do_parse!( - pound: punct!(#) >> - path_and_tts: brackets!(tuple!( - call!(Path::parse_mod_style), - syn!(TokenStream) - )) >> - ({ - let (bracket, (path, tts)) = path_and_tts; - - Attribute { - style: AttrStyle::Outer, - path: path, - tts: tts, - is_sugared_doc: false, - pound_token: pound, - bracket_token: bracket, - } - }) - ) - | - map!( - call!(lit_doc_comment, Comment::Outer), - |lit| { - let span = lit.span; - Attribute { - style: AttrStyle::Outer, - path: Ident::new("doc", span).into(), - tts: vec![ - eq(span), - lit, - ].into_iter().collect(), - is_sugared_doc: true, - pound_token: ::new(span), - bracket_token: token::Bracket(span), - } - } - ) - )); - } - - enum Comment { - Inner, - Outer, - } - - fn lit_doc_comment(input: Cursor, style: Comment) -> PResult { - match input.literal() { - Some((span, lit, rest)) => { - let string = lit.to_string(); - let ok = match style { - Comment::Inner => string.starts_with("//!") || string.starts_with("/*!"), - Comment::Outer => string.starts_with("///") || string.starts_with("/**"), - }; - if ok { - Ok(( - TokenTree { - span: span, - kind: TokenNode::Literal(Literal::string(&string)), - }, - rest, - )) - } else { - parse_error() - } - } - _ => parse_error(), - } - } -} - -#[cfg(feature = "printing")] -mod printing { - use super::*; - use quote::{ToTokens, Tokens}; - use proc_macro2::Literal; - - impl ToTokens for Attribute { - fn to_tokens(&self, tokens: &mut Tokens) { - // If this was a sugared doc, emit it in its original form instead of `#[doc = "..."]` - if self.is_sugared_doc { - if let Some(Meta::NameValue(ref pair)) = self.interpret_meta() { - if pair.ident == "doc" { - if let Lit::Str(ref comment) = pair.lit { - tokens.append(TokenTree { - span: comment.span, - kind: TokenNode::Literal(Literal::doccomment(&comment.value())), - }); - return; - } - } - } - } - - self.pound_token.to_tokens(tokens); - if let AttrStyle::Inner(ref b) = self.style { - b.to_tokens(tokens); - } - self.bracket_token.surround(tokens, |tokens| { - self.path.to_tokens(tokens); - self.tts.to_tokens(tokens); - }); - } - } - - impl ToTokens for MetaList { - fn to_tokens(&self, tokens: &mut Tokens) { - self.ident.to_tokens(tokens); - self.paren_token.surround(tokens, |tokens| { - self.nested.to_tokens(tokens); - }) - } - } - - impl ToTokens for MetaNameValue { - fn to_tokens(&self, tokens: &mut Tokens) { - self.ident.to_tokens(tokens); - self.eq_token.to_tokens(tokens); - self.lit.to_tokens(tokens); - } - } -} diff --git a/third_party/rust/syn-0.12.12/src/buffer.rs b/third_party/rust/syn-0.12.12/src/buffer.rs deleted file mode 100644 index 6b05d5010d06..000000000000 --- a/third_party/rust/syn-0.12.12/src/buffer.rs +++ /dev/null @@ -1,467 +0,0 @@ -// Copyright 2018 Syn Developers -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! A stably addressed token buffer supporting efficient traversal based on a -//! cheaply copyable cursor. -//! -//! The [`Synom`] trait is implemented for syntax tree types that can be parsed -//! from one of these token cursors. -//! -//! [`Synom`]: ../synom/trait.Synom.html -//! -//! *This module is available if Syn is built with the `"parsing"` feature.* -//! -//! # Example -//! -//! This example shows a basic token parser for parsing a token stream without -//! using Syn's parser combinator macros. -//! -//! ``` -//! #![feature(proc_macro)] -//! -//! extern crate syn; -//! extern crate proc_macro; -//! -//! #[macro_use] -//! extern crate quote; -//! -//! use syn::{token, ExprTuple}; -//! use syn::buffer::{Cursor, TokenBuffer}; -//! use syn::spanned::Spanned; -//! use syn::synom::Synom; -//! use proc_macro::{Diagnostic, Span, TokenStream}; -//! -//! /// A basic token parser for parsing a token stream without using Syn's -//! /// parser combinator macros. -//! pub struct Parser<'a> { -//! cursor: Cursor<'a>, -//! } -//! -//! impl<'a> Parser<'a> { -//! pub fn new(cursor: Cursor<'a>) -> Self { -//! Parser { cursor } -//! } -//! -//! pub fn current_span(&self) -> Span { -//! self.cursor.span().unstable() -//! } -//! -//! pub fn parse(&mut self) -> Result { -//! let (val, rest) = T::parse(self.cursor) -//! .map_err(|e| match T::description() { -//! Some(desc) => { -//! self.current_span().error(format!("{}: expected {}", e, desc)) -//! } -//! None => { -//! self.current_span().error(e.to_string()) -//! } -//! })?; -//! -//! self.cursor = rest; -//! Ok(val) -//! } -//! -//! pub fn expect_eof(&mut self) -> Result<(), Diagnostic> { -//! if !self.cursor.eof() { -//! return Err(self.current_span().error("trailing characters; expected eof")); -//! } -//! -//! Ok(()) -//! } -//! } -//! -//! fn eval(input: TokenStream) -> Result { -//! let buffer = TokenBuffer::new(input); -//! let mut parser = Parser::new(buffer.begin()); -//! -//! // Parse some syntax tree types out of the input tokens. In this case we -//! // expect something like: -//! // -//! // (a, b, c) = (1, 2, 3) -//! let a = parser.parse::()?; -//! parser.parse::()?; -//! let b = parser.parse::()?; -//! parser.expect_eof()?; -//! -//! // Perform some validation and report errors. -//! let (a_len, b_len) = (a.elems.len(), b.elems.len()); -//! if a_len != b_len { -//! let diag = b.span().unstable() -//! .error(format!("expected {} element(s), got {}", a_len, b_len)) -//! .span_note(a.span().unstable(), "because of this"); -//! -//! return Err(diag); -//! } -//! -//! // Build the output tokens. -//! let out = quote! { -//! println!("All good! Received two tuples of size {}", #a_len); -//! }; -//! -//! Ok(out.into()) -//! } -//! # -//! # extern crate proc_macro2; -//! # -//! # // This method exists on proc_macro2::Span but is behind the "nightly" -//! # // feature. -//! # trait ToUnstableSpan { -//! # fn unstable(&self) -> Span; -//! # } -//! # -//! # impl ToUnstableSpan for proc_macro2::Span { -//! # fn unstable(&self) -> Span { -//! # unimplemented!() -//! # } -//! # } -//! # -//! # fn main() {} -//! ``` - -// This module is heavily commented as it contains the only unsafe code in Syn, -// and caution should be used when editing it. The public-facing interface is -// 100% safe but the implementation is fragile internally. - -use proc_macro as pm; -use proc_macro2::{Delimiter, Literal, Spacing, Span, Term, TokenNode, TokenStream, TokenTree}; - -use std::ptr; -use std::marker::PhantomData; - -#[cfg(synom_verbose_trace)] -use std::fmt::{self, Debug}; - -/// Internal type which is used instead of `TokenTree` to represent a token tree -/// within a `TokenBuffer`. -enum Entry { - // Mimicking types from proc-macro. - Group(Span, Delimiter, TokenBuffer), - Term(Span, Term), - Op(Span, char, Spacing), - Literal(Span, Literal), - // End entries contain a raw pointer to the entry from the containing - // token tree, or null if this is the outermost level. - End(*const Entry), -} - -/// A buffer that can be efficiently traversed multiple times, unlike -/// `TokenStream` which requires a deep copy in order to traverse more than -/// once. -/// -/// See the [module documentation] for an example of `TokenBuffer` in action. -/// -/// [module documentation]: index.html -/// -/// *This type is available if Syn is built with the `"parsing"` feature.* -pub struct TokenBuffer { - // NOTE: Do not derive clone on this - there are raw pointers inside which - // will be messed up. Moving the `TokenBuffer` itself is safe as the actual - // backing slices won't be moved. - data: Box<[Entry]>, -} - -impl TokenBuffer { - // NOTE: DO NOT MUTATE THE `Vec` RETURNED FROM THIS FUNCTION ONCE IT - // RETURNS, THE ADDRESS OF ITS BACKING MEMORY MUST REMAIN STABLE. - fn inner_new(stream: TokenStream, up: *const Entry) -> TokenBuffer { - // Build up the entries list, recording the locations of any Groups - // in the list to be processed later. - let mut entries = Vec::new(); - let mut seqs = Vec::new(); - for tt in stream { - match tt.kind { - TokenNode::Term(sym) => { - entries.push(Entry::Term(tt.span, sym)); - } - TokenNode::Op(chr, ok) => { - entries.push(Entry::Op(tt.span, chr, ok)); - } - TokenNode::Literal(lit) => { - entries.push(Entry::Literal(tt.span, lit)); - } - TokenNode::Group(delim, seq_stream) => { - // Record the index of the interesting entry, and store an - // `End(null)` there temporarially. - seqs.push((entries.len(), tt.span, delim, seq_stream)); - entries.push(Entry::End(ptr::null())); - } - } - } - // Add an `End` entry to the end with a reference to the enclosing token - // stream which was passed in. - entries.push(Entry::End(up)); - - // NOTE: This is done to ensure that we don't accidentally modify the - // length of the backing buffer. The backing buffer must remain at a - // constant address after this point, as we are going to store a raw - // pointer into it. - let mut entries = entries.into_boxed_slice(); - for (idx, span, delim, seq_stream) in seqs { - // We know that this index refers to one of the temporary - // `End(null)` entries, and we know that the last entry is - // `End(up)`, so the next index is also valid. - let seq_up = &entries[idx + 1] as *const Entry; - - // The end entry stored at the end of this Entry::Group should - // point to the Entry which follows the Group in the list. - let inner = Self::inner_new(seq_stream, seq_up); - entries[idx] = Entry::Group(span, delim, inner); - } - - TokenBuffer { data: entries } - } - - /// Creates a `TokenBuffer` containing all the tokens from the input - /// `TokenStream`. - pub fn new(stream: pm::TokenStream) -> TokenBuffer { - Self::new2(stream.into()) - } - - /// Creates a `TokenBuffer` containing all the tokens from the input - /// `TokenStream`. - pub fn new2(stream: TokenStream) -> TokenBuffer { - Self::inner_new(stream, ptr::null()) - } - - /// Creates a cursor referencing the first token in the buffer and able to - /// traverse until the end of the buffer. - pub fn begin(&self) -> Cursor { - unsafe { Cursor::create(&self.data[0], &self.data[self.data.len() - 1]) } - } -} - -/// A cheaply copyable cursor into a `TokenBuffer`. -/// -/// This cursor holds a shared reference into the immutable data which is used -/// internally to represent a `TokenStream`, and can be efficiently manipulated -/// and copied around. -/// -/// An empty `Cursor` can be created directly, or one may create a `TokenBuffer` -/// object and get a cursor to its first token with `begin()`. -/// -/// Two cursors are equal if they have the same location in the same input -/// stream, and have the same scope. -/// -/// See the [module documentation] for an example of a `Cursor` in action. -/// -/// [module documentation]: index.html -/// -/// *This type is available if Syn is built with the `"parsing"` feature.* -#[derive(Copy, Clone, Eq, PartialEq)] -pub struct Cursor<'a> { - /// The current entry which the `Cursor` is pointing at. - ptr: *const Entry, - /// This is the only `Entry::End(..)` object which this cursor is allowed to - /// point at. All other `End` objects are skipped over in `Cursor::create`. - scope: *const Entry, - /// This uses the &'a reference which guarantees that these pointers are - /// still valid. - marker: PhantomData<&'a Entry>, -} - -impl<'a> Cursor<'a> { - /// Creates a cursor referencing a static empty TokenStream. - pub fn empty() -> Self { - // It's safe in this situation for us to put an `Entry` object in global - // storage, despite it not actually being safe to send across threads - // (`Term` is a reference into a thread-local table). This is because - // this entry never includes a `Term` object. - // - // This wrapper struct allows us to break the rules and put a `Sync` - // object in global storage. - struct UnsafeSyncEntry(Entry); - unsafe impl Sync for UnsafeSyncEntry {} - static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0 as *const Entry)); - - Cursor { - ptr: &EMPTY_ENTRY.0, - scope: &EMPTY_ENTRY.0, - marker: PhantomData, - } - } - - /// This create method intelligently exits non-explicitly-entered - /// `None`-delimited scopes when the cursor reaches the end of them, - /// allowing for them to be treated transparently. - unsafe fn create(mut ptr: *const Entry, scope: *const Entry) -> Self { - // NOTE: If we're looking at a `End(..)`, we want to advance the cursor - // past it, unless `ptr == scope`, which means that we're at the edge of - // our cursor's scope. We should only have `ptr != scope` at the exit - // from None-delimited groups entered with `ignore_none`. - while let Entry::End(exit) = *ptr { - if ptr == scope { - break; - } - ptr = exit; - } - - Cursor { - ptr: ptr, - scope: scope, - marker: PhantomData, - } - } - - /// Get the current entry. - fn entry(self) -> &'a Entry { - unsafe { &*self.ptr } - } - - /// Bump the cursor to point at the next token after the current one. This - /// is undefined behavior if the cursor is currently looking at an - /// `Entry::End`. - unsafe fn bump(self) -> Cursor<'a> { - Cursor::create(self.ptr.offset(1), self.scope) - } - - /// If the cursor is looking at a `None`-delimited group, move it to look at - /// the first token inside instead. If the group is empty, this will move - /// the cursor past the `None`-delimited group. - /// - /// WARNING: This mutates its argument. - fn ignore_none(&mut self) { - if let Entry::Group(_, Delimiter::None, ref buf) = *self.entry() { - // NOTE: We call `Cursor::create` here to make sure that situations - // where we should immediately exit the span after entering it are - // handled correctly. - unsafe { - *self = Cursor::create(&buf.data[0], self.scope); - } - } - } - - /// Checks whether the cursor is currently pointing at the end of its valid - /// scope. - #[inline] - pub fn eof(self) -> bool { - // We're at eof if we're at the end of our scope. - self.ptr == self.scope - } - - /// If the cursor is pointing at a `Group` with the given delimiter, returns - /// a cursor into that group and one pointing to the next `TokenTree`. - pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, Span, Cursor<'a>)> { - // If we're not trying to enter a none-delimited group, we want to - // ignore them. We have to make sure to _not_ ignore them when we want - // to enter them, of course. For obvious reasons. - if delim != Delimiter::None { - self.ignore_none(); - } - - if let Entry::Group(span, group_delim, ref buf) = *self.entry() { - if group_delim == delim { - return Some((buf.begin(), span, unsafe { self.bump() })); - } - } - - None - } - - /// If the cursor is pointing at a `Term`, returns it along with a cursor - /// pointing at the next `TokenTree`. - pub fn term(mut self) -> Option<(Span, Term, Cursor<'a>)> { - self.ignore_none(); - match *self.entry() { - Entry::Term(span, term) => Some((span, term, unsafe { self.bump() })), - _ => None, - } - } - - /// If the cursor is pointing at an `Op`, returns it along with a cursor - /// pointing at the next `TokenTree`. - pub fn op(mut self) -> Option<(Span, char, Spacing, Cursor<'a>)> { - self.ignore_none(); - match *self.entry() { - Entry::Op(span, op, spacing) => Some((span, op, spacing, unsafe { self.bump() })), - _ => None, - } - } - - /// If the cursor is pointing at a `Literal`, return it along with a cursor - /// pointing at the next `TokenTree`. - pub fn literal(mut self) -> Option<(Span, Literal, Cursor<'a>)> { - self.ignore_none(); - match *self.entry() { - Entry::Literal(span, ref lit) => Some((span, lit.clone(), unsafe { self.bump() })), - _ => None, - } - } - - /// Copies all remaining tokens visible from this cursor into a - /// `TokenStream`. - pub fn token_stream(self) -> TokenStream { - let mut tts = Vec::new(); - let mut cursor = self; - while let Some((tt, rest)) = cursor.token_tree() { - tts.push(tt); - cursor = rest; - } - tts.into_iter().collect() - } - - /// If the cursor is pointing at a `TokenTree`, returns it along with a - /// cursor pointing at the next `TokenTree`. - /// - /// Returns `None` if the cursor has reached the end of its stream. - /// - /// This method does not treat `None`-delimited groups as transparent, and - /// will return a `Group(None, ..)` if the cursor is looking at one. - pub fn token_tree(self) -> Option<(TokenTree, Cursor<'a>)> { - let tree = match *self.entry() { - Entry::Group(span, delim, ref buf) => { - let stream = buf.begin().token_stream(); - TokenTree { - span: span, - kind: TokenNode::Group(delim, stream), - } - } - Entry::Literal(span, ref lit) => TokenTree { - span: span, - kind: TokenNode::Literal(lit.clone()), - }, - Entry::Term(span, sym) => TokenTree { - span: span, - kind: TokenNode::Term(sym), - }, - Entry::Op(span, chr, spacing) => TokenTree { - span: span, - kind: TokenNode::Op(chr, spacing), - }, - Entry::End(..) => { - return None; - } - }; - - Some((tree, unsafe { self.bump() })) - } - - /// Returns the `Span` of the current token, or `Span::call_site()` if this - /// cursor points to eof. - pub fn span(self) -> Span { - match *self.entry() { - Entry::Group(span, ..) - | Entry::Literal(span, ..) - | Entry::Term(span, ..) - | Entry::Op(span, ..) => span, - Entry::End(..) => Span::call_site(), - } - } -} - -// We do a custom implementation for `Debug` as the default implementation is -// pretty useless. -#[cfg(synom_verbose_trace)] -impl<'a> Debug for Cursor<'a> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - // Print what the cursor is currently looking at. - // This will look like Cursor("some remaining tokens here") - f.debug_tuple("Cursor") - .field(&self.token_stream().to_string()) - .finish() - } -} diff --git a/third_party/rust/syn-0.12.12/src/data.rs b/third_party/rust/syn-0.12.12/src/data.rs deleted file mode 100644 index 4f5b4cb041c4..000000000000 --- a/third_party/rust/syn-0.12.12/src/data.rs +++ /dev/null @@ -1,388 +0,0 @@ -// Copyright 2018 Syn Developers -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use super::*; -use punctuated::Punctuated; - -ast_struct! { - /// An enum variant. - /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* - pub struct Variant { - /// Attributes tagged on the variant. - pub attrs: Vec, - - /// Name of the variant. - pub ident: Ident, - - /// Content stored in the variant. - pub fields: Fields, - - /// Explicit discriminant: `Variant = 1` - pub discriminant: Option<(Token![=], Expr)>, - } -} - -ast_enum_of_structs! { - /// Data stored within an enum variant or struct. - /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// - /// # Syntax tree enum - /// - /// This type is a [syntax tree enum]. - /// - /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums - pub enum Fields { - /// Named fields of a struct or struct variant such as `Point { x: f64, - /// y: f64 }`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* - pub Named(FieldsNamed { - pub brace_token: token::Brace, - pub named: Punctuated, - }), - - /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* - pub Unnamed(FieldsUnnamed { - pub paren_token: token::Paren, - pub unnamed: Punctuated, - }), - - /// Unit struct or unit variant such as `None`. - pub Unit, - } -} - -impl Fields { - /// Get an iterator over the [`Field`] items in this object. This iterator - /// can be used to iterate over a named or unnamed struct or variant's - /// fields uniformly. - /// - /// [`Field`]: struct.Field.html - pub fn iter(&self) -> punctuated::Iter { - match *self { - Fields::Unit => punctuated::Iter::private_empty(), - Fields::Named(ref f) => f.named.iter(), - Fields::Unnamed(ref f) => f.unnamed.iter(), - } - } -} - -impl<'a> IntoIterator for &'a Fields { - type Item = &'a Field; - type IntoIter = punctuated::Iter<'a, Field, Token![,]>; - - fn into_iter(self) -> Self::IntoIter { - self.iter() - } -} - -ast_struct! { - /// A field of a struct or enum variant. - /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* - pub struct Field { - /// Attributes tagged on the field. - pub attrs: Vec, - - /// Visibility of the field. - pub vis: Visibility, - - /// Name of the field, if any. - /// - /// Fields of tuple structs have no names. - pub ident: Option, - - pub colon_token: Option, - - /// Type of the field. - pub ty: Type, - } -} - -ast_enum_of_structs! { - /// The visibility level of an item: inherited or `pub` or - /// `pub(restricted)`. - /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// - /// # Syntax tree enum - /// - /// This type is a [syntax tree enum]. - /// - /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums - pub enum Visibility { - /// A public visibility level: `pub`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* - pub Public(VisPublic { - pub pub_token: Token![pub], - }), - - /// A crate-level visibility: `pub(crate)`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* - pub Crate(VisCrate { - pub pub_token: Token![pub], - pub paren_token: token::Paren, - pub crate_token: Token![crate], - }), - - /// A visibility level restricted to some path: `pub(self)` or - /// `pub(super)` or `pub(in some::module)`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* - pub Restricted(VisRestricted { - pub pub_token: Token![pub], - pub paren_token: token::Paren, - pub in_token: Option, - pub path: Box, - }), - - /// An inherited visibility, which usually means private. - pub Inherited, - } -} - -#[cfg(feature = "parsing")] -pub mod parsing { - use super::*; - - use synom::Synom; - - impl Synom for Variant { - named!(parse -> Self, do_parse!( - attrs: many0!(Attribute::parse_outer) >> - id: syn!(Ident) >> - fields: alt!( - syn!(FieldsNamed) => { Fields::Named } - | - syn!(FieldsUnnamed) => { Fields::Unnamed } - | - epsilon!() => { |_| Fields::Unit } - ) >> - disr: option!(tuple!(punct!(=), syn!(Expr))) >> - (Variant { - ident: id, - attrs: attrs, - fields: fields, - discriminant: disr, - }) - )); - - fn description() -> Option<&'static str> { - Some("enum variant") - } - } - - impl Synom for FieldsNamed { - named!(parse -> Self, map!( - braces!(call!(Punctuated::parse_terminated_with, Field::parse_named)), - |(brace, fields)| FieldsNamed { - brace_token: brace, - named: fields, - } - )); - - fn description() -> Option<&'static str> { - Some("named fields in a struct or struct variant") - } - } - - impl Synom for FieldsUnnamed { - named!(parse -> Self, map!( - parens!(call!(Punctuated::parse_terminated_with, Field::parse_unnamed)), - |(paren, fields)| FieldsUnnamed { - paren_token: paren, - unnamed: fields, - } - )); - - fn description() -> Option<&'static str> { - Some("unnamed fields in a tuple struct or tuple variant") - } - } - - impl Field { - named!(pub parse_named -> Self, do_parse!( - attrs: many0!(Attribute::parse_outer) >> - vis: syn!(Visibility) >> - id: syn!(Ident) >> - colon: punct!(:) >> - ty: syn!(Type) >> - (Field { - ident: Some(id), - vis: vis, - attrs: attrs, - ty: ty, - colon_token: Some(colon), - }) - )); - - named!(pub parse_unnamed -> Self, do_parse!( - attrs: many0!(Attribute::parse_outer) >> - vis: syn!(Visibility) >> - ty: syn!(Type) >> - (Field { - ident: None, - colon_token: None, - vis: vis, - attrs: attrs, - ty: ty, - }) - )); - } - - impl Synom for Visibility { - named!(parse -> Self, alt!( - do_parse!( - pub_token: keyword!(pub) >> - other: parens!(keyword!(crate)) >> - (Visibility::Crate(VisCrate { - pub_token: pub_token, - paren_token: other.0, - crate_token: other.1, - })) - ) - | - do_parse!( - pub_token: keyword!(pub) >> - other: parens!(keyword!(self)) >> - (Visibility::Restricted(VisRestricted { - pub_token: pub_token, - paren_token: other.0, - in_token: None, - path: Box::new(other.1.into()), - })) - ) - | - do_parse!( - pub_token: keyword!(pub) >> - other: parens!(keyword!(super)) >> - (Visibility::Restricted(VisRestricted { - pub_token: pub_token, - paren_token: other.0, - in_token: None, - path: Box::new(other.1.into()), - })) - ) - | - do_parse!( - pub_token: keyword!(pub) >> - other: parens!(do_parse!( - in_tok: keyword!(in) >> - restricted: call!(Path::parse_mod_style) >> - (in_tok, restricted) - )) >> - (Visibility::Restricted(VisRestricted { - pub_token: pub_token, - paren_token: other.0, - in_token: Some((other.1).0), - path: Box::new((other.1).1), - })) - ) - | - keyword!(pub) => { |tok| { - Visibility::Public(VisPublic { - pub_token: tok, - }) - } } - | - epsilon!() => { |_| Visibility::Inherited } - )); - - fn description() -> Option<&'static str> { - Some("visibility qualifier such as `pub`") - } - } -} - -#[cfg(feature = "printing")] -mod printing { - use super::*; - use quote::{ToTokens, Tokens}; - - impl ToTokens for Variant { - fn to_tokens(&self, tokens: &mut Tokens) { - tokens.append_all(&self.attrs); - self.ident.to_tokens(tokens); - self.fields.to_tokens(tokens); - if let Some((ref eq_token, ref disc)) = self.discriminant { - eq_token.to_tokens(tokens); - disc.to_tokens(tokens); - } - } - } - - impl ToTokens for FieldsNamed { - fn to_tokens(&self, tokens: &mut Tokens) { - self.brace_token.surround(tokens, |tokens| { - self.named.to_tokens(tokens); - }); - } - } - - impl ToTokens for FieldsUnnamed { - fn to_tokens(&self, tokens: &mut Tokens) { - self.paren_token.surround(tokens, |tokens| { - self.unnamed.to_tokens(tokens); - }); - } - } - - impl ToTokens for Field { - fn to_tokens(&self, tokens: &mut Tokens) { - tokens.append_all(&self.attrs); - self.vis.to_tokens(tokens); - if let Some(ref ident) = self.ident { - ident.to_tokens(tokens); - TokensOrDefault(&self.colon_token).to_tokens(tokens); - } - self.ty.to_tokens(tokens); - } - } - - impl ToTokens for VisPublic { - fn to_tokens(&self, tokens: &mut Tokens) { - self.pub_token.to_tokens(tokens) - } - } - - impl ToTokens for VisCrate { - fn to_tokens(&self, tokens: &mut Tokens) { - self.pub_token.to_tokens(tokens); - self.paren_token.surround(tokens, |tokens| { - self.crate_token.to_tokens(tokens); - }) - } - } - - impl ToTokens for VisRestricted { - fn to_tokens(&self, tokens: &mut Tokens) { - self.pub_token.to_tokens(tokens); - self.paren_token.surround(tokens, |tokens| { - // XXX: If we have a path which is not "self" or "super", - // automatically add the "in" token. - self.in_token.to_tokens(tokens); - self.path.to_tokens(tokens); - }); - } - } -} diff --git a/third_party/rust/syn-0.12.12/src/derive.rs b/third_party/rust/syn-0.12.12/src/derive.rs deleted file mode 100644 index 3ad893d8bb39..000000000000 --- a/third_party/rust/syn-0.12.12/src/derive.rs +++ /dev/null @@ -1,210 +0,0 @@ -// Copyright 2018 Syn Developers -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use super::*; -use punctuated::Punctuated; - -ast_struct! { - /// Data structure sent to a `proc_macro_derive` macro. - /// - /// *This type is available if Syn is built with the `"derive"` feature.* - pub struct DeriveInput { - /// Attributes tagged on the whole struct or enum. - pub attrs: Vec, - - /// Visibility of the struct or enum. - pub vis: Visibility, - - /// Name of the struct or enum. - pub ident: Ident, - - /// Generics required to complete the definition. - pub generics: Generics, - - /// Data within the struct or enum. - pub data: Data, - } -} - -ast_enum_of_structs! { - /// The storage of a struct, enum or union data structure. - /// - /// *This type is available if Syn is built with the `"derive"` feature.* - /// - /// # Syntax tree enum - /// - /// This type is a [syntax tree enum]. - /// - /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums - pub enum Data { - /// A struct input to a `proc_macro_derive` macro. - /// - /// *This type is available if Syn is built with the `"derive"` - /// feature.* - pub Struct(DataStruct { - pub struct_token: Token![struct], - pub fields: Fields, - pub semi_token: Option, - }), - - /// An enum input to a `proc_macro_derive` macro. - /// - /// *This type is available if Syn is built with the `"derive"` - /// feature.* - pub Enum(DataEnum { - pub enum_token: Token![enum], - pub brace_token: token::Brace, - pub variants: Punctuated, - }), - - /// A tagged union input to a `proc_macro_derive` macro. - /// - /// *This type is available if Syn is built with the `"derive"` - /// feature.* - pub Union(DataUnion { - pub union_token: Token![union], - pub fields: FieldsNamed, - }), - } - - do_not_generate_to_tokens -} - -#[cfg(feature = "parsing")] -pub mod parsing { - use super::*; - - use synom::Synom; - - impl Synom for DeriveInput { - named!(parse -> Self, do_parse!( - attrs: many0!(Attribute::parse_outer) >> - vis: syn!(Visibility) >> - which: alt!( - keyword!(struct) => { Ok } - | - keyword!(enum) => { Err } - ) >> - id: syn!(Ident) >> - generics: syn!(Generics) >> - item: switch!(value!(which), - Ok(s) => map!(data_struct, move |(wh, fields, semi)| DeriveInput { - ident: id, - vis: vis, - attrs: attrs, - generics: Generics { - where_clause: wh, - .. generics - }, - data: Data::Struct(DataStruct { - struct_token: s, - fields: fields, - semi_token: semi, - }), - }) - | - Err(e) => map!(data_enum, move |(wh, brace, variants)| DeriveInput { - ident: id, - vis: vis, - attrs: attrs, - generics: Generics { - where_clause: wh, - .. generics - }, - data: Data::Enum(DataEnum { - variants: variants, - brace_token: brace, - enum_token: e, - }), - }) - ) >> - (item) - )); - - fn description() -> Option<&'static str> { - Some("derive input") - } - } - - named!(data_struct -> (Option, Fields, Option), alt!( - do_parse!( - wh: option!(syn!(WhereClause)) >> - fields: syn!(FieldsNamed) >> - (wh, Fields::Named(fields), None) - ) - | - do_parse!( - fields: syn!(FieldsUnnamed) >> - wh: option!(syn!(WhereClause)) >> - semi: punct!(;) >> - (wh, Fields::Unnamed(fields), Some(semi)) - ) - | - do_parse!( - wh: option!(syn!(WhereClause)) >> - semi: punct!(;) >> - (wh, Fields::Unit, Some(semi)) - ) - )); - - named!(data_enum -> (Option, token::Brace, Punctuated), do_parse!( - wh: option!(syn!(WhereClause)) >> - data: braces!(Punctuated::parse_terminated) >> - (wh, data.0, data.1) - )); -} - -#[cfg(feature = "printing")] -mod printing { - use super::*; - use attr::FilterAttrs; - use quote::{ToTokens, Tokens}; - - impl ToTokens for DeriveInput { - fn to_tokens(&self, tokens: &mut Tokens) { - for attr in self.attrs.outer() { - attr.to_tokens(tokens); - } - self.vis.to_tokens(tokens); - match self.data { - Data::Struct(ref d) => d.struct_token.to_tokens(tokens), - Data::Enum(ref d) => d.enum_token.to_tokens(tokens), - Data::Union(ref d) => d.union_token.to_tokens(tokens), - } - self.ident.to_tokens(tokens); - self.generics.to_tokens(tokens); - match self.data { - Data::Struct(ref data) => match data.fields { - Fields::Named(ref fields) => { - self.generics.where_clause.to_tokens(tokens); - fields.to_tokens(tokens); - } - Fields::Unnamed(ref fields) => { - fields.to_tokens(tokens); - self.generics.where_clause.to_tokens(tokens); - TokensOrDefault(&data.semi_token).to_tokens(tokens); - } - Fields::Unit => { - self.generics.where_clause.to_tokens(tokens); - TokensOrDefault(&data.semi_token).to_tokens(tokens); - } - }, - Data::Enum(ref data) => { - self.generics.where_clause.to_tokens(tokens); - data.brace_token.surround(tokens, |tokens| { - data.variants.to_tokens(tokens); - }); - } - Data::Union(ref data) => { - self.generics.where_clause.to_tokens(tokens); - data.fields.to_tokens(tokens); - } - } - } - } -} diff --git a/third_party/rust/syn-0.12.12/src/error.rs b/third_party/rust/syn-0.12.12/src/error.rs deleted file mode 100644 index 23d2a1aa0821..000000000000 --- a/third_party/rust/syn-0.12.12/src/error.rs +++ /dev/null @@ -1,60 +0,0 @@ -// Copyright 2018 Syn Developers -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use std::error::Error; -use buffer::Cursor; -use std::fmt::{self, Display}; - -/// The result of a `Synom` parser. -/// -/// Refer to the [module documentation] for details about parsing in Syn. -/// -/// [module documentation]: index.html -/// -/// *This type is available if Syn is built with the `"parsing"` feature.* -pub type PResult<'a, O> = Result<(O, Cursor<'a>), ParseError>; - -/// An error with a default error message. -/// -/// NOTE: We should provide better error messages in the future. -pub fn parse_error() -> PResult<'static, O> { - Err(ParseError(None)) -} - -/// Error returned when a `Synom` parser cannot parse the input tokens. -/// -/// Refer to the [module documentation] for details about parsing in Syn. -/// -/// [module documentation]: index.html -/// -/// *This type is available if Syn is built with the `"parsing"` feature.* -#[derive(Debug)] -pub struct ParseError(Option); - -impl Error for ParseError { - fn description(&self) -> &str { - match self.0 { - Some(ref desc) => desc, - None => "failed to parse", - } - } -} - -impl Display for ParseError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(self.description(), f) - } -} - -impl ParseError { - // For syn use only. Not public API. - #[doc(hidden)] - pub fn new>(msg: T) -> Self { - ParseError(Some(msg.into())) - } -} diff --git a/third_party/rust/syn-0.12.12/src/expr.rs b/third_party/rust/syn-0.12.12/src/expr.rs deleted file mode 100644 index d83167028a10..000000000000 --- a/third_party/rust/syn-0.12.12/src/expr.rs +++ /dev/null @@ -1,3573 +0,0 @@ -// Copyright 2018 Syn Developers -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use super::*; -use punctuated::Punctuated; -use proc_macro2::{Span, TokenStream}; -#[cfg(feature = "extra-traits")] -use std::hash::{Hash, Hasher}; -#[cfg(feature = "extra-traits")] -use tt::TokenStreamHelper; -#[cfg(feature = "full")] -use std::mem; - -ast_enum_of_structs! { - /// A Rust expression. - /// - /// *This type is available if Syn is built with the `"derive"` or `"full"` - /// feature.* - /// - /// # Syntax tree enums - /// - /// This type is a syntax tree enum. In Syn this and other syntax tree enums - /// are designed to be traversed using the following rebinding idiom. - /// - /// ``` - /// # use syn::Expr; - /// # - /// # fn example(expr: Expr) { - /// # const IGNORE: &str = stringify! { - /// let expr: Expr = /* ... */; - /// # }; - /// match expr { - /// Expr::MethodCall(expr) => { - /// /* ... */ - /// } - /// Expr::Cast(expr) => { - /// /* ... */ - /// } - /// Expr::IfLet(expr) => { - /// /* ... */ - /// } - /// /* ... */ - /// # _ => {} - /// } - /// # } - /// ``` - /// - /// We begin with a variable `expr` of type `Expr` that has no fields - /// (because it is an enum), and by matching on it and rebinding a variable - /// with the same name `expr` we effectively imbue our variable with all of - /// the data fields provided by the variant that it turned out to be. So for - /// example above if we ended up in the `MethodCall` case then we get to use - /// `expr.receiver`, `expr.args` etc; if we ended up in the `IfLet` case we - /// get to use `expr.pat`, `expr.then_branch`, `expr.else_branch`. - /// - /// The pattern is similar if the input expression is borrowed: - /// - /// ``` - /// # use syn::Expr; - /// # - /// # fn example(expr: &Expr) { - /// match *expr { - /// Expr::MethodCall(ref expr) => { - /// # } - /// # _ => {} - /// # } - /// # } - /// ``` - /// - /// This approach avoids repeating the variant names twice on every line. - /// - /// ``` - /// # use syn::{Expr, ExprMethodCall}; - /// # - /// # fn example(expr: Expr) { - /// # match expr { - /// Expr::MethodCall(ExprMethodCall { method, args, .. }) => { // repetitive - /// # } - /// # _ => {} - /// # } - /// # } - /// ``` - /// - /// In general, the name to which a syntax tree enum variant is bound should - /// be a suitable name for the complete syntax tree enum type. - /// - /// ``` - /// # use syn::{Expr, ExprField}; - /// # - /// # fn example(discriminant: &ExprField) { - /// // Binding is called `base` which is the name I would use if I were - /// // assigning `*discriminant.base` without an `if let`. - /// if let Expr::Tuple(ref base) = *discriminant.base { - /// # } - /// # } - /// ``` - /// - /// A sign that you may not be choosing the right variable names is if you - /// see names getting repeated in your code, like accessing - /// `receiver.receiver` or `pat.pat` or `cond.cond`. - pub enum Expr { - /// A box expression: `box f`. - /// - /// *This type is available if Syn is built with the `"full"` feature.* - pub Box(ExprBox #full { - pub attrs: Vec, - pub box_token: Token![box], - pub expr: Box, - }), - - /// A placement expression: `place <- value`. - /// - /// *This type is available if Syn is built with the `"full"` feature.* - pub InPlace(ExprInPlace #full { - pub attrs: Vec, - pub place: Box, - pub arrow_token: Token![<-], - pub value: Box, - }), - - /// A slice literal expression: `[a, b, c, d]`. - /// - /// *This type is available if Syn is built with the `"full"` feature.* - pub Array(ExprArray #full { - pub attrs: Vec, - pub bracket_token: token::Bracket, - pub elems: Punctuated, - }), - - /// A function call expression: `invoke(a, b)`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* - pub Call(ExprCall { - pub attrs: Vec, - pub func: Box, - pub paren_token: token::Paren, - pub args: Punctuated, - }), - - /// A method call expression: `x.foo::(a, b)`. - /// - /// *This type is available if Syn is built with the `"full"` feature.* - pub MethodCall(ExprMethodCall #full { - pub attrs: Vec, - pub receiver: Box, - pub dot_token: Token![.], - pub method: Ident, - pub turbofish: Option, - pub paren_token: token::Paren, - pub args: Punctuated, - }), - - /// A tuple expression: `(a, b, c, d)`. - /// - /// *This type is available if Syn is built with the `"full"` feature.* - pub Tuple(ExprTuple #full { - pub attrs: Vec, - pub paren_token: token::Paren, - pub elems: Punctuated, - }), - - /// A binary operation: `a + b`, `a * b`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* - pub Binary(ExprBinary { - pub attrs: Vec, - pub left: Box, - pub op: BinOp, - pub right: Box, - }), - - /// A unary operation: `!x`, `*x`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* - pub Unary(ExprUnary { - pub attrs: Vec, - pub op: UnOp, - pub expr: Box, - }), - - /// A literal in place of an expression: `1`, `"foo"`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* - pub Lit(ExprLit { - pub attrs: Vec, - pub lit: Lit, - }), - - /// A cast expression: `foo as f64`. - /// - /// *This type is available if Syn is built with the `"derive"` or - /// `"full"` feature.* - pub Cast(ExprCast { - pub attrs: Vec, - pub expr: Box, - pub as_token: Token![as], - pub ty: Box, - }), - - /// A type ascription expression: `foo: f64`. - /// - /// *This type is available if Syn is built with the `"full"` feature.* - pub Type(ExprType #full { - pub attrs: Vec, - pub expr: Box, - pub colon_token: Token![:], - pub ty: Box, - }), - - /// An `if` expression with an optional `else` block: `if expr { ... } - /// else { ... }`. - /// - /// The `else` branch expression may only be an `If`, `IfLet`, or - /// `Block` expression, not any of the other types of expression. - /// - /// *This type is available if Syn is built with the `"full"` feature.* - pub If(ExprIf #full { - pub attrs: Vec, - pub if_token: Token![if], - pub cond: Box, - pub then_branch: Block, - pub else_branch: Option<(Token![else], Box)>, - }), - - /// An `if let` expression with an optional `else` block: `if let pat = - /// expr { ... } else { ... }`. - /// - /// The `else` branch expression may only be an `If`, `IfLet`, or - /// `Block` expression, not any of the other types of expression. - /// - /// *This type is available if Syn is built with the `"full"` feature.* - pub IfLet(ExprIfLet #full { - pub attrs: Vec, - pub if_token: Token![if], - pub let_token: Token![let], - pub pat: Box, - pub eq_token: Token![=], - pub expr: Box, - pub then_branch: Block, - pub else_branch: Option<(Token![else], Box)>, - }), - - /// A while loop: `while expr { ... }`. - /// - /// *This type is available if Syn is built with the `"full"` feature.* - pub While(ExprWhile #full { - pub attrs: Vec, - pub label: Option