зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1836219 - Upgrade serde to 1.0.163. r=emilio,supply-chain-reviewers
Differential Revision: https://phabricator.services.mozilla.com/D179644
This commit is contained in:
Родитель
7d27eea89e
Коммит
f98d016db4
|
@ -176,7 +176,7 @@ checksum = "87bf87e6e8b47264efa9bde63d6225c6276a52e05e91bf37eaa8afd0032d6b71"
|
|||
dependencies = [
|
||||
"askama_shared",
|
||||
"proc-macro2",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -198,7 +198,7 @@ dependencies = [
|
|||
"proc-macro2",
|
||||
"quote",
|
||||
"serde",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
"toml",
|
||||
]
|
||||
|
||||
|
@ -216,7 +216,7 @@ checksum = "1cd7fce9ba8c3c042128ce72d8b2ddbf3a05747efb67ea0313c635e10bda47a2"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -428,7 +428,7 @@ dependencies = [
|
|||
"regex",
|
||||
"rustc-hash",
|
||||
"shlex",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -756,7 +756,7 @@ dependencies = [
|
|||
"proc-macro-error",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1012,7 +1012,7 @@ dependencies = [
|
|||
"proc-macro2",
|
||||
"quote",
|
||||
"smallvec",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1020,7 +1020,7 @@ name = "cssparser-macros"
|
|||
version = "0.6.0"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1166,7 +1166,7 @@ dependencies = [
|
|||
"proc-macro2",
|
||||
"quote",
|
||||
"strsim",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1177,7 +1177,7 @@ checksum = "b36230598a2d5de7ec1c6f51f72d8a99a9208daff41de2084d06e3fd3ea56685"
|
|||
dependencies = [
|
||||
"darling_core",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1237,7 +1237,7 @@ checksum = "8beee4701e2e229e8098bbdecdca12449bc3e322f137d269182fa1291e20bd00"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1247,7 +1247,7 @@ dependencies = [
|
|||
"darling 0.14.3",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
|
@ -1259,7 +1259,7 @@ checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1319,7 +1319,7 @@ checksum = "3bf95dc3f046b9da4f2d51833c0d3547d8564ef6910f5c1ed130306a75b92886"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1437,7 +1437,7 @@ checksum = "c375b9c5eadb68d0a6efee2999fef292f45854c3444c86f09d8ab086ba942b0e"
|
|||
dependencies = [
|
||||
"num-traits",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1458,7 +1458,7 @@ dependencies = [
|
|||
"darling 0.14.3",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1506,7 +1506,7 @@ source = "git+https://github.com/mozilla/application-services?rev=86c84c217036c1
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1539,7 +1539,7 @@ dependencies = [
|
|||
"proc-macro-error",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1885,7 +1885,7 @@ checksum = "95a73af87da33b5acf53acfebdc339fe592ecf5357ac7c0a7734ab9d8c876a70"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -2575,7 +2575,7 @@ checksum = "cb659d59c4af6c9dc568b13db431174ab5fa961aa53f5aad7f42fb710c06bc46"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -3068,7 +3068,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "632647502a8bfa82458c07134791fffa7a719f00427d1afd79c3cb6d4960a982"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
|
@ -3378,7 +3378,7 @@ dependencies = [
|
|||
"quote",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
"void",
|
||||
"winapi",
|
||||
]
|
||||
|
@ -3704,7 +3704,7 @@ checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -3924,7 +3924,7 @@ version = "0.3.0"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
"synstructure",
|
||||
"unicode-xid",
|
||||
]
|
||||
|
@ -3983,7 +3983,7 @@ dependencies = [
|
|||
"proc-macro-hack",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -4012,7 +4012,7 @@ checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -4112,7 +4112,7 @@ dependencies = [
|
|||
"proc-macro-error-attr",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
"version_check",
|
||||
]
|
||||
|
||||
|
@ -4135,9 +4135,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068"
|
|||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.51"
|
||||
version = "1.0.59"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6"
|
||||
checksum = "6aeca18b86b413c660b781aa319e4e2648a3e6f9eadc9b47e9038e6fe9f3451b"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
@ -4157,7 +4157,7 @@ name = "profiler-macros"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -4197,7 +4197,7 @@ dependencies = [
|
|||
"itertools",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -4244,9 +4244,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
|
|||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.23"
|
||||
version = "1.0.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
|
||||
checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
@ -4572,7 +4572,7 @@ checksum = "bdbda6ac5cd1321e724fa9cee216f3a61885889b896f073b8f82322789c5250e"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -4611,9 +4611,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.152"
|
||||
version = "1.0.163"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
|
||||
checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
@ -4639,13 +4639,13 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.152"
|
||||
version = "1.0.163"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
|
||||
checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 2.0.18",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -4668,7 +4668,7 @@ checksum = "9a5ec9fa74a20ebbe5d9ac23dac1fc96ba0ecfe9f50f2843b52e537b10fbcb4e"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -4702,7 +4702,7 @@ dependencies = [
|
|||
"darling 0.13.99",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -4810,7 +4810,7 @@ checksum = "133659a15339456eeeb07572eb02a91c91e9815e9cbc89566944d2c8d3efdbf6"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -4970,7 +4970,7 @@ dependencies = [
|
|||
"derive_common",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
|
@ -5027,6 +5027,17 @@ dependencies = [
|
|||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sync-guid"
|
||||
version = "0.1.0"
|
||||
|
@ -5064,7 +5075,7 @@ checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
"unicode-xid",
|
||||
]
|
||||
|
||||
|
@ -5155,7 +5166,7 @@ checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -5237,7 +5248,7 @@ dependencies = [
|
|||
"derive_common",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
|
@ -5269,7 +5280,7 @@ checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -5339,7 +5350,7 @@ checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -5450,7 +5461,7 @@ checksum = "1f5cdec05b907f4e2f6843f4354f4ce6a5bebe1a56df320a49134944477ce4d8"
|
|||
dependencies = [
|
||||
"proc-macro-hack",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
"unic-langid-impl",
|
||||
]
|
||||
|
||||
|
@ -5643,7 +5654,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "03de61393a42b4ad4984a3763c0600594ac3e57e5aaa1d05cede933958987c03"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -5675,7 +5686,7 @@ dependencies = [
|
|||
"proc-macro2",
|
||||
"quote",
|
||||
"serde",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
"toml",
|
||||
"uniffi_build",
|
||||
"uniffi_meta",
|
||||
|
@ -6313,7 +6324,7 @@ dependencies = [
|
|||
"mozbuild",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn 1.0.107",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
@ -107,6 +107,8 @@ TOLERATED_DUPES = {
|
|||
# and hasn't been updated in 1.5 years (an hypothetical update is
|
||||
# expected to remove the dependency on time altogether).
|
||||
"time": 2,
|
||||
# Transition is underway from syn 1.x to 2.x. (bug 1835053)
|
||||
"syn": 2,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -3688,6 +3688,18 @@ user-id = 3618 # David Tolnay (dtolnay)
|
|||
start = "2019-04-16"
|
||||
end = "2024-04-25"
|
||||
|
||||
[[trusted.proc-macro2]]
|
||||
criteria = "safe-to-deploy"
|
||||
user-id = 3618 # David Tolnay (dtolnay)
|
||||
start = "2019-04-23"
|
||||
end = "2024-05-30"
|
||||
|
||||
[[trusted.quote]]
|
||||
criteria = "safe-to-deploy"
|
||||
user-id = 3618 # David Tolnay (dtolnay)
|
||||
start = "2019-04-09"
|
||||
end = "2024-05-30"
|
||||
|
||||
[[trusted.regex]]
|
||||
criteria = "safe-to-deploy"
|
||||
user-id = 189 # Andrew Gallant (BurntSushi)
|
||||
|
|
|
@ -302,6 +302,20 @@ user-id = 3618
|
|||
user-login = "dtolnay"
|
||||
user-name = "David Tolnay"
|
||||
|
||||
[[publisher.proc-macro2]]
|
||||
version = "1.0.59"
|
||||
when = "2023-05-25"
|
||||
user-id = 3618
|
||||
user-login = "dtolnay"
|
||||
user-name = "David Tolnay"
|
||||
|
||||
[[publisher.quote]]
|
||||
version = "1.0.28"
|
||||
when = "2023-05-25"
|
||||
user-id = 3618
|
||||
user-login = "dtolnay"
|
||||
user-name = "David Tolnay"
|
||||
|
||||
[[publisher.regex]]
|
||||
version = "1.7.1"
|
||||
when = "2023-01-09"
|
||||
|
@ -345,8 +359,8 @@ user-login = "Amanieu"
|
|||
user-name = "Amanieu d'Antras"
|
||||
|
||||
[[publisher.serde]]
|
||||
version = "1.0.152"
|
||||
when = "2022-12-26"
|
||||
version = "1.0.163"
|
||||
when = "2023-05-11"
|
||||
user-id = 3618
|
||||
user-login = "dtolnay"
|
||||
user-name = "David Tolnay"
|
||||
|
@ -359,8 +373,8 @@ user-login = "dtolnay"
|
|||
user-name = "David Tolnay"
|
||||
|
||||
[[publisher.serde_derive]]
|
||||
version = "1.0.152"
|
||||
when = "2022-12-26"
|
||||
version = "1.0.163"
|
||||
when = "2023-05-10"
|
||||
user-id = 3618
|
||||
user-login = "dtolnay"
|
||||
user-name = "David Tolnay"
|
||||
|
@ -400,6 +414,13 @@ user-id = 3618
|
|||
user-login = "dtolnay"
|
||||
user-name = "David Tolnay"
|
||||
|
||||
[[publisher.syn]]
|
||||
version = "2.0.18"
|
||||
when = "2023-05-26"
|
||||
user-id = 3618
|
||||
user-login = "dtolnay"
|
||||
user-name = "David Tolnay"
|
||||
|
||||
[[publisher.termcolor]]
|
||||
version = "1.2.0"
|
||||
when = "2023-01-15"
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"7d4723ca4eea6d781e7e67c85a4f3537723b89e6c8e1b843e9f3a090d6c02d00","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"32cbd395594db59ecc43d7866cfa2663f3687bb7df631781d60ae83200dae8a8","build.rs":"275f7a9ee0b9eff972124951de544ae17ee3e698a4e89b0f0393b334344f5e30","src/detection.rs":"ed9a5f9a979ab01247d7a68eeb1afa3c13209334c5bfff0f9289cb07e5bb4e8b","src/fallback.rs":"b0b98566421529c309f4310565f24fa27608078c03c84e07a65f5aa1c0df2ae8","src/lib.rs":"4ba3c39bb516c6acbcfc5cfb45888ca79c93953768ac6a45bb2fb6f342f24874","src/location.rs":"f55d2e61f1bb1af65e14ed04c9e91eb1ddbf8430e8c05f2048d1cd538d27368e","src/marker.rs":"344a8394f06a1d43355b514920e7e3c0c6dce507be767e3a590bbe3552edd110","src/parse.rs":"06bd29cf594bb5d5cfff9b3371ce6a3367a13788354135e51e8b5ff195d06481","src/rcvec.rs":"49b6784c6ca5f32573cd8a83758b485d8acbfa126e5fb516ae439e429ef4c144","src/wrapper.rs":"6932058819a5c31c1765e6294f1a8279ab4ea1807de21a488c869fdfe13bf9d5","tests/comments.rs":"31115b3a56c83d93eef2fb4c9566bf4543e302560732986161b98aef504785ed","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"cb6d776eba6a238d726b0f531883adf41957e06f2717ee8a069821c81e7081d6","tests/test.rs":"cf3c944f1c4a09c326b1e639f70c173f0d93d916fb50c085542e44fad691eea7","tests/test_fmt.rs":"9357769945784354909259084ec8b34d2aa52081dd3967cac6dae3a5e3df3bc0"},"package":"5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6"}
|
||||
{"files":{"Cargo.toml":"dcdd2b69dae96b5536340b3c2d8f0c1bcd1c2d0b8f15be935bdffd6a3c5603bd","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"32cbd395594db59ecc43d7866cfa2663f3687bb7df631781d60ae83200dae8a8","build.rs":"50edda08f36b17b0313aa92dd275d4d6954ab3d1837d637d4e0168a4cc2cb869","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/convert.rs":"0f8e0f472e49e0be79e65654065a752df1ac9ad55da43952ee7c86cb56940171","src/detection.rs":"ed9a5f9a979ab01247d7a68eeb1afa3c13209334c5bfff0f9289cb07e5bb4e8b","src/extra.rs":"3447c89e4d83a94ebdf3599adb64050b92502da2a1f99a5cf36706e52d2c56dc","src/fallback.rs":"2b16b67a19d42bf8abc2e004cc8edb1d02b9cbd01a5b45e2fb2387a0ea32dc64","src/lib.rs":"8ea28369b33e44b9180f42e473367006459204f1501ffa35280e1b093107c2b1","src/location.rs":"f55d2e61f1bb1af65e14ed04c9e91eb1ddbf8430e8c05f2048d1cd538d27368e","src/marker.rs":"344a8394f06a1d43355b514920e7e3c0c6dce507be767e3a590bbe3552edd110","src/parse.rs":"5b0171c73228f4daa350af678c3e593e08207fd989ebd4f1c77fca097f87e76b","src/rcvec.rs":"6233164ae0afc5c74ddc9e27c7869ec523385a3e5bdb83c3662841e78af14982","src/wrapper.rs":"7313d0fa3d5c20a660f82a829a394bf1ec9592d7c2f3fe8d666453465a539010","tests/comments.rs":"31115b3a56c83d93eef2fb4c9566bf4543e302560732986161b98aef504785ed","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"bc86b7260e29dfc8cd3e01b0d3fb9e88f17442dc83235f264e8cacc5ab4fe23d","tests/test.rs":"0445ac5c5993b5195c2bcba766984349e5b0bc69f180f45562411bb5cd6bd03b","tests/test_fmt.rs":"9357769945784354909259084ec8b34d2aa52081dd3967cac6dae3a5e3df3bc0","tests/test_size.rs":"acf05963c1e62052d769d237b50844a2c59b4182b491231b099a4f74e5456ab0"},"package":"6aeca18b86b413c660b781aa319e4e2648a3e6f9eadc9b47e9038e6fe9f3451b"}
|
|
@ -13,7 +13,7 @@
|
|||
edition = "2018"
|
||||
rust-version = "1.31"
|
||||
name = "proc-macro2"
|
||||
version = "1.0.51"
|
||||
version = "1.0.59"
|
||||
authors = [
|
||||
"David Tolnay <dtolnay@gmail.com>",
|
||||
"Alex Crichton <alex@alexcrichton.com>",
|
||||
|
@ -56,6 +56,9 @@ version = "1.0"
|
|||
version = "1.0"
|
||||
default_features = false
|
||||
|
||||
[dev-dependencies.rustversion]
|
||||
version = "1"
|
||||
|
||||
[features]
|
||||
default = ["proc-macro"]
|
||||
nightly = []
|
||||
|
|
|
@ -1,11 +1,5 @@
|
|||
// rustc-cfg emitted by the build script:
|
||||
//
|
||||
// "use_proc_macro"
|
||||
// Link to extern crate proc_macro. Available on any compiler and any target
|
||||
// except wasm32. Requires "proc-macro" Cargo cfg to be enabled (default is
|
||||
// enabled). On wasm32 we never link to proc_macro even if "proc-macro" cfg
|
||||
// is enabled.
|
||||
//
|
||||
// "wrap_proc_macro"
|
||||
// Wrap types from libproc_macro rather than polyfilling the whole API.
|
||||
// Enabled on rustc 1.29+ as long as procmacro2_semver_exempt is not set,
|
||||
|
@ -43,14 +37,15 @@
|
|||
use std::env;
|
||||
use std::process::{self, Command};
|
||||
use std::str;
|
||||
use std::u32;
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
|
||||
let version = match rustc_version() {
|
||||
Some(version) => version,
|
||||
None => return,
|
||||
};
|
||||
let version = rustc_version().unwrap_or(RustcVersion {
|
||||
minor: u32::MAX,
|
||||
nightly: false,
|
||||
});
|
||||
|
||||
if version.minor < 31 {
|
||||
eprintln!("Minimum supported rustc version is 1.31");
|
||||
|
@ -72,6 +67,10 @@ fn main() {
|
|||
println!("cargo:rustc-cfg=no_libprocmacro_unwind_safe");
|
||||
}
|
||||
|
||||
if version.minor < 34 {
|
||||
println!("cargo:rustc-cfg=no_try_from");
|
||||
}
|
||||
|
||||
if version.minor < 39 {
|
||||
println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard");
|
||||
}
|
||||
|
@ -100,12 +99,13 @@ fn main() {
|
|||
println!("cargo:rustc-cfg=no_is_available");
|
||||
}
|
||||
|
||||
let target = env::var("TARGET").unwrap();
|
||||
if !enable_use_proc_macro(&target) {
|
||||
return;
|
||||
if version.minor < 66 {
|
||||
println!("cargo:rustc-cfg=no_source_text");
|
||||
}
|
||||
|
||||
println!("cargo:rustc-cfg=use_proc_macro");
|
||||
if !cfg!(feature = "proc-macro") {
|
||||
return;
|
||||
}
|
||||
|
||||
if version.nightly || !semver_exempt {
|
||||
println!("cargo:rustc-cfg=wrap_proc_macro");
|
||||
|
@ -123,16 +123,6 @@ fn main() {
|
|||
}
|
||||
}
|
||||
|
||||
fn enable_use_proc_macro(target: &str) -> bool {
|
||||
// wasm targets don't have the `proc_macro` crate, disable this feature.
|
||||
if target.contains("wasm32") {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Otherwise, only enable it if our feature is actually enabled.
|
||||
cfg!(feature = "proc-macro")
|
||||
}
|
||||
|
||||
struct RustcVersion {
|
||||
minor: u32,
|
||||
nightly: bool,
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
[toolchain]
|
||||
components = ["rust-src"]
|
|
@ -0,0 +1,19 @@
|
|||
pub(crate) fn usize_to_u32(u: usize) -> Option<u32> {
|
||||
#[cfg(not(no_try_from))]
|
||||
{
|
||||
use core::convert::TryFrom;
|
||||
|
||||
u32::try_from(u).ok()
|
||||
}
|
||||
|
||||
#[cfg(no_try_from)]
|
||||
{
|
||||
use core::mem;
|
||||
|
||||
if mem::size_of::<usize>() <= mem::size_of::<u32>() || u <= u32::max_value() as usize {
|
||||
Some(u as u32)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,100 @@
|
|||
//! Items which do not have a correspondence to any API in the proc_macro crate,
|
||||
//! but are necessary to include in proc-macro2.
|
||||
|
||||
use crate::fallback;
|
||||
use crate::imp;
|
||||
use crate::marker::Marker;
|
||||
use crate::Span;
|
||||
use core::fmt::{self, Debug};
|
||||
|
||||
/// An object that holds a [`Group`]'s `span_open()` and `span_close()` together
|
||||
/// (in a more compact representation than holding those 2 spans individually.
|
||||
///
|
||||
/// [`Group`]: crate::Group
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct DelimSpan {
|
||||
inner: DelimSpanEnum,
|
||||
_marker: Marker,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
enum DelimSpanEnum {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
Compiler {
|
||||
join: proc_macro::Span,
|
||||
#[cfg(not(no_group_open_close))]
|
||||
open: proc_macro::Span,
|
||||
#[cfg(not(no_group_open_close))]
|
||||
close: proc_macro::Span,
|
||||
},
|
||||
Fallback(fallback::Span),
|
||||
}
|
||||
|
||||
impl DelimSpan {
|
||||
pub(crate) fn new(group: &imp::Group) -> Self {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
let inner = match group {
|
||||
imp::Group::Compiler(group) => DelimSpanEnum::Compiler {
|
||||
join: group.span(),
|
||||
#[cfg(not(no_group_open_close))]
|
||||
open: group.span_open(),
|
||||
#[cfg(not(no_group_open_close))]
|
||||
close: group.span_close(),
|
||||
},
|
||||
imp::Group::Fallback(group) => DelimSpanEnum::Fallback(group.span()),
|
||||
};
|
||||
|
||||
#[cfg(not(wrap_proc_macro))]
|
||||
let inner = DelimSpanEnum::Fallback(group.span());
|
||||
|
||||
DelimSpan {
|
||||
inner,
|
||||
_marker: Marker,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a span covering the entire delimited group.
|
||||
pub fn join(&self) -> Span {
|
||||
match &self.inner {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
DelimSpanEnum::Compiler { join, .. } => Span::_new(imp::Span::Compiler(*join)),
|
||||
DelimSpanEnum::Fallback(span) => Span::_new_fallback(*span),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a span for the opening punctuation of the group only.
|
||||
pub fn open(&self) -> Span {
|
||||
match &self.inner {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
DelimSpanEnum::Compiler {
|
||||
#[cfg(not(no_group_open_close))]
|
||||
open,
|
||||
#[cfg(no_group_open_close)]
|
||||
join: open,
|
||||
..
|
||||
} => Span::_new(imp::Span::Compiler(*open)),
|
||||
DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.first_byte()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a span for the closing punctuation of the group only.
|
||||
pub fn close(&self) -> Span {
|
||||
match &self.inner {
|
||||
#[cfg(wrap_proc_macro)]
|
||||
DelimSpanEnum::Compiler {
|
||||
#[cfg(not(no_group_open_close))]
|
||||
close,
|
||||
#[cfg(no_group_open_close)]
|
||||
join: close,
|
||||
..
|
||||
} => Span::_new(imp::Span::Compiler(*close)),
|
||||
DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.last_byte()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for DelimSpan {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
Debug::fmt(&self.join(), f)
|
||||
}
|
||||
}
|
|
@ -3,7 +3,7 @@ use crate::location::LineColumn;
|
|||
use crate::parse::{self, Cursor};
|
||||
use crate::rcvec::{RcVec, RcVecBuilder, RcVecIntoIter, RcVecMut};
|
||||
use crate::{Delimiter, Spacing, TokenTree};
|
||||
#[cfg(span_locations)]
|
||||
#[cfg(all(span_locations, not(fuzzing)))]
|
||||
use core::cell::RefCell;
|
||||
#[cfg(span_locations)]
|
||||
use core::cmp;
|
||||
|
@ -13,8 +13,6 @@ use core::mem::ManuallyDrop;
|
|||
use core::ops::RangeBounds;
|
||||
use core::ptr;
|
||||
use core::str::FromStr;
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Force use of proc-macro2's fallback implementation of the API for now, even
|
||||
|
@ -94,7 +92,7 @@ fn push_token_from_proc_macro(mut vec: RcVecMut<TokenTree>, token: TokenTree) {
|
|||
if literal.repr.starts_with('-') {
|
||||
push_negative_literal(vec, literal);
|
||||
} else {
|
||||
vec.push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
|
||||
vec.push(TokenTree::Literal(crate::Literal::_new_fallback(literal)));
|
||||
}
|
||||
}
|
||||
_ => vec.push(token),
|
||||
|
@ -104,9 +102,9 @@ fn push_token_from_proc_macro(mut vec: RcVecMut<TokenTree>, token: TokenTree) {
|
|||
fn push_negative_literal(mut vec: RcVecMut<TokenTree>, mut literal: Literal) {
|
||||
literal.repr.remove(0);
|
||||
let mut punct = crate::Punct::new('-', Spacing::Alone);
|
||||
punct.set_span(crate::Span::_new_stable(literal.span));
|
||||
punct.set_span(crate::Span::_new_fallback(literal.span));
|
||||
vec.push(TokenTree::Punct(punct));
|
||||
vec.push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
|
||||
vec.push(TokenTree::Literal(crate::Literal::_new_fallback(literal)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -162,11 +160,14 @@ impl TokenStreamBuilder {
|
|||
|
||||
#[cfg(span_locations)]
|
||||
fn get_cursor(src: &str) -> Cursor {
|
||||
#[cfg(fuzzing)]
|
||||
return Cursor { rest: src, off: 1 };
|
||||
|
||||
// Create a dummy file & add it to the source map
|
||||
#[cfg(not(fuzzing))]
|
||||
SOURCE_MAP.with(|cm| {
|
||||
let mut cm = cm.borrow_mut();
|
||||
let name = format!("<parsed string {}>", cm.files.len());
|
||||
let span = cm.add_file(&name, src);
|
||||
let span = cm.add_file(src);
|
||||
Cursor {
|
||||
rest: src,
|
||||
off: span.lo,
|
||||
|
@ -232,7 +233,7 @@ impl Debug for TokenStream {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(use_proc_macro)]
|
||||
#[cfg(feature = "proc-macro")]
|
||||
impl From<proc_macro::TokenStream> for TokenStream {
|
||||
fn from(inner: proc_macro::TokenStream) -> Self {
|
||||
inner
|
||||
|
@ -242,7 +243,7 @@ impl From<proc_macro::TokenStream> for TokenStream {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(use_proc_macro)]
|
||||
#[cfg(feature = "proc-macro")]
|
||||
impl From<TokenStream> for proc_macro::TokenStream {
|
||||
fn from(inner: TokenStream) -> Self {
|
||||
inner
|
||||
|
@ -334,29 +335,27 @@ impl Debug for SourceFile {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
#[cfg(all(span_locations, not(fuzzing)))]
|
||||
thread_local! {
|
||||
static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
|
||||
// NOTE: We start with a single dummy file which all call_site() and
|
||||
// def_site() spans reference.
|
||||
files: vec![FileInfo {
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
name: "<unspecified>".to_owned(),
|
||||
source_text: String::new(),
|
||||
span: Span { lo: 0, hi: 0 },
|
||||
lines: vec![0],
|
||||
}],
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
#[cfg(all(span_locations, not(fuzzing)))]
|
||||
struct FileInfo {
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
name: String,
|
||||
source_text: String,
|
||||
span: Span,
|
||||
lines: Vec<usize>,
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
#[cfg(all(span_locations, not(fuzzing)))]
|
||||
impl FileInfo {
|
||||
fn offset_line_column(&self, offset: usize) -> LineColumn {
|
||||
assert!(self.span_within(Span {
|
||||
|
@ -379,11 +378,17 @@ impl FileInfo {
|
|||
fn span_within(&self, span: Span) -> bool {
|
||||
span.lo >= self.span.lo && span.hi <= self.span.hi
|
||||
}
|
||||
|
||||
fn source_text(&self, span: Span) -> String {
|
||||
let lo = (span.lo - self.span.lo) as usize;
|
||||
let hi = (span.hi - self.span.lo) as usize;
|
||||
self.source_text[lo..hi].to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes the offsets of each line in the given source string
|
||||
/// and the total number of characters
|
||||
#[cfg(span_locations)]
|
||||
#[cfg(all(span_locations, not(fuzzing)))]
|
||||
fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
|
||||
let mut lines = vec![0];
|
||||
let mut total = 0;
|
||||
|
@ -398,12 +403,12 @@ fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
|
|||
(total, lines)
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
#[cfg(all(span_locations, not(fuzzing)))]
|
||||
struct SourceMap {
|
||||
files: Vec<FileInfo>,
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
#[cfg(all(span_locations, not(fuzzing)))]
|
||||
impl SourceMap {
|
||||
fn next_start_pos(&self) -> u32 {
|
||||
// Add 1 so there's always space between files.
|
||||
|
@ -413,7 +418,7 @@ impl SourceMap {
|
|||
self.files.last().unwrap().span.hi + 1
|
||||
}
|
||||
|
||||
fn add_file(&mut self, name: &str, src: &str) -> Span {
|
||||
fn add_file(&mut self, src: &str) -> Span {
|
||||
let (len, lines) = lines_offsets(src);
|
||||
let lo = self.next_start_pos();
|
||||
// XXX(nika): Should we bother doing a checked cast or checked add here?
|
||||
|
@ -423,25 +428,35 @@ impl SourceMap {
|
|||
};
|
||||
|
||||
self.files.push(FileInfo {
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
name: name.to_owned(),
|
||||
source_text: src.to_owned(),
|
||||
span,
|
||||
lines,
|
||||
});
|
||||
|
||||
#[cfg(not(procmacro2_semver_exempt))]
|
||||
let _ = name;
|
||||
|
||||
span
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
fn filepath(&self, span: Span) -> PathBuf {
|
||||
for (i, file) in self.files.iter().enumerate() {
|
||||
if file.span_within(span) {
|
||||
return PathBuf::from(if i == 0 {
|
||||
"<unspecified>".to_owned()
|
||||
} else {
|
||||
format!("<parsed string {}>", i)
|
||||
});
|
||||
}
|
||||
}
|
||||
unreachable!("Invalid span with no related FileInfo!");
|
||||
}
|
||||
|
||||
fn fileinfo(&self, span: Span) -> &FileInfo {
|
||||
for file in &self.files {
|
||||
if file.span_within(span) {
|
||||
return file;
|
||||
}
|
||||
}
|
||||
panic!("Invalid span with no related FileInfo!");
|
||||
unreachable!("Invalid span with no related FileInfo!");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -487,17 +502,25 @@ impl Span {
|
|||
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
pub fn source_file(&self) -> SourceFile {
|
||||
#[cfg(fuzzing)]
|
||||
return SourceFile {
|
||||
path: PathBuf::from("<unspecified>"),
|
||||
};
|
||||
|
||||
#[cfg(not(fuzzing))]
|
||||
SOURCE_MAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
let fi = cm.fileinfo(*self);
|
||||
SourceFile {
|
||||
path: Path::new(&fi.name).to_owned(),
|
||||
}
|
||||
let path = cm.filepath(*self);
|
||||
SourceFile { path }
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
pub fn start(&self) -> LineColumn {
|
||||
#[cfg(fuzzing)]
|
||||
return LineColumn { line: 0, column: 0 };
|
||||
|
||||
#[cfg(not(fuzzing))]
|
||||
SOURCE_MAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
let fi = cm.fileinfo(*self);
|
||||
|
@ -507,6 +530,10 @@ impl Span {
|
|||
|
||||
#[cfg(span_locations)]
|
||||
pub fn end(&self) -> LineColumn {
|
||||
#[cfg(fuzzing)]
|
||||
return LineColumn { line: 0, column: 0 };
|
||||
|
||||
#[cfg(not(fuzzing))]
|
||||
SOURCE_MAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
let fi = cm.fileinfo(*self);
|
||||
|
@ -541,6 +568,13 @@ impl Span {
|
|||
|
||||
#[cfg(span_locations)]
|
||||
pub fn join(&self, other: Span) -> Option<Span> {
|
||||
#[cfg(fuzzing)]
|
||||
return {
|
||||
let _ = other;
|
||||
None
|
||||
};
|
||||
|
||||
#[cfg(not(fuzzing))]
|
||||
SOURCE_MAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
// If `other` is not within the same FileInfo as us, return None.
|
||||
|
@ -555,12 +589,32 @@ impl Span {
|
|||
}
|
||||
|
||||
#[cfg(not(span_locations))]
|
||||
fn first_byte(self) -> Self {
|
||||
pub fn source_text(&self) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
pub fn source_text(&self) -> Option<String> {
|
||||
#[cfg(fuzzing)]
|
||||
return None;
|
||||
|
||||
#[cfg(not(fuzzing))]
|
||||
{
|
||||
if self.is_call_site() {
|
||||
None
|
||||
} else {
|
||||
Some(SOURCE_MAP.with(|cm| cm.borrow().fileinfo(*self).source_text(*self)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(span_locations))]
|
||||
pub(crate) fn first_byte(self) -> Self {
|
||||
self
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
fn first_byte(self) -> Self {
|
||||
pub(crate) fn first_byte(self) -> Self {
|
||||
Span {
|
||||
lo: self.lo,
|
||||
hi: cmp::min(self.lo.saturating_add(1), self.hi),
|
||||
|
@ -568,17 +622,22 @@ impl Span {
|
|||
}
|
||||
|
||||
#[cfg(not(span_locations))]
|
||||
fn last_byte(self) -> Self {
|
||||
pub(crate) fn last_byte(self) -> Self {
|
||||
self
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
fn last_byte(self) -> Self {
|
||||
pub(crate) fn last_byte(self) -> Self {
|
||||
Span {
|
||||
lo: cmp::max(self.hi.saturating_sub(1), self.lo),
|
||||
hi: self.hi,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
fn is_call_site(&self) -> bool {
|
||||
self.lo == 0 && self.hi == 0
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Span {
|
||||
|
@ -594,7 +653,7 @@ impl Debug for Span {
|
|||
pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
|
||||
#[cfg(span_locations)]
|
||||
{
|
||||
if span.lo == 0 && span.hi == 0 {
|
||||
if span.is_call_site() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -899,12 +958,25 @@ impl Literal {
|
|||
pub fn string(t: &str) -> Literal {
|
||||
let mut repr = String::with_capacity(t.len() + 2);
|
||||
repr.push('"');
|
||||
for c in t.chars() {
|
||||
if c == '\'' {
|
||||
let mut chars = t.chars();
|
||||
while let Some(ch) = chars.next() {
|
||||
if ch == '\0' {
|
||||
repr.push_str(
|
||||
if chars
|
||||
.as_str()
|
||||
.starts_with(|next| '0' <= next && next <= '7')
|
||||
{
|
||||
// circumvent clippy::octal_escapes lint
|
||||
"\\x00"
|
||||
} else {
|
||||
"\\0"
|
||||
},
|
||||
);
|
||||
} else if ch == '\'' {
|
||||
// escape_debug turns this into "\'" which is unnecessary.
|
||||
repr.push(c);
|
||||
repr.push(ch);
|
||||
} else {
|
||||
repr.extend(c.escape_debug());
|
||||
repr.extend(ch.escape_debug());
|
||||
}
|
||||
}
|
||||
repr.push('"');
|
||||
|
@ -926,16 +998,21 @@ impl Literal {
|
|||
|
||||
pub fn byte_string(bytes: &[u8]) -> Literal {
|
||||
let mut escaped = "b\"".to_string();
|
||||
for b in bytes {
|
||||
let mut bytes = bytes.iter();
|
||||
while let Some(&b) = bytes.next() {
|
||||
#[allow(clippy::match_overlapping_arm)]
|
||||
match *b {
|
||||
b'\0' => escaped.push_str(r"\0"),
|
||||
match b {
|
||||
b'\0' => escaped.push_str(match bytes.as_slice().first() {
|
||||
// circumvent clippy::octal_escapes lint
|
||||
Some(b'0'..=b'7') => r"\x00",
|
||||
_ => r"\0",
|
||||
}),
|
||||
b'\t' => escaped.push_str(r"\t"),
|
||||
b'\n' => escaped.push_str(r"\n"),
|
||||
b'\r' => escaped.push_str(r"\r"),
|
||||
b'"' => escaped.push_str("\\\""),
|
||||
b'\\' => escaped.push_str("\\\\"),
|
||||
b'\x20'..=b'\x7E' => escaped.push(*b as char),
|
||||
b'\x20'..=b'\x7E' => escaped.push(b as char),
|
||||
_ => {
|
||||
let _ = write!(escaped, "\\x{:02X}", b);
|
||||
}
|
||||
|
@ -953,28 +1030,76 @@ impl Literal {
|
|||
self.span = span;
|
||||
}
|
||||
|
||||
pub fn subspan<R: RangeBounds<usize>>(&self, _range: R) -> Option<Span> {
|
||||
None
|
||||
pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
|
||||
#[cfg(not(span_locations))]
|
||||
{
|
||||
let _ = range;
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
{
|
||||
use crate::convert::usize_to_u32;
|
||||
use core::ops::Bound;
|
||||
|
||||
let lo = match range.start_bound() {
|
||||
Bound::Included(start) => {
|
||||
let start = usize_to_u32(*start)?;
|
||||
self.span.lo.checked_add(start)?
|
||||
}
|
||||
Bound::Excluded(start) => {
|
||||
let start = usize_to_u32(*start)?;
|
||||
self.span.lo.checked_add(start)?.checked_add(1)?
|
||||
}
|
||||
Bound::Unbounded => self.span.lo,
|
||||
};
|
||||
let hi = match range.end_bound() {
|
||||
Bound::Included(end) => {
|
||||
let end = usize_to_u32(*end)?;
|
||||
self.span.lo.checked_add(end)?.checked_add(1)?
|
||||
}
|
||||
Bound::Excluded(end) => {
|
||||
let end = usize_to_u32(*end)?;
|
||||
self.span.lo.checked_add(end)?
|
||||
}
|
||||
Bound::Unbounded => self.span.hi,
|
||||
};
|
||||
if lo <= hi && hi <= self.span.hi {
|
||||
Some(Span { lo, hi })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Literal {
|
||||
type Err = LexError;
|
||||
|
||||
fn from_str(mut repr: &str) -> Result<Self, Self::Err> {
|
||||
let negative = repr.starts_with('-');
|
||||
fn from_str(repr: &str) -> Result<Self, Self::Err> {
|
||||
let mut cursor = get_cursor(repr);
|
||||
#[cfg(span_locations)]
|
||||
let lo = cursor.off;
|
||||
|
||||
let negative = cursor.starts_with_char('-');
|
||||
if negative {
|
||||
repr = &repr[1..];
|
||||
if !repr.starts_with(|ch: char| ch.is_ascii_digit()) {
|
||||
cursor = cursor.advance(1);
|
||||
if !cursor.starts_with_fn(|ch| ch.is_ascii_digit()) {
|
||||
return Err(LexError::call_site());
|
||||
}
|
||||
}
|
||||
let cursor = get_cursor(repr);
|
||||
if let Ok((_rest, mut literal)) = parse::literal(cursor) {
|
||||
if literal.repr.len() == repr.len() {
|
||||
|
||||
if let Ok((rest, mut literal)) = parse::literal(cursor) {
|
||||
if rest.is_empty() {
|
||||
if negative {
|
||||
literal.repr.insert(0, '-');
|
||||
}
|
||||
literal.span = Span {
|
||||
#[cfg(span_locations)]
|
||||
lo,
|
||||
#[cfg(span_locations)]
|
||||
hi: rest.off,
|
||||
};
|
||||
return Ok(literal);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -86,7 +86,7 @@
|
|||
//! a different thread.
|
||||
|
||||
// Proc-macro2 types in rustdoc of other crates get linked to here.
|
||||
#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.51")]
|
||||
#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.59")]
|
||||
#![cfg_attr(
|
||||
any(proc_macro_span, super_unstable),
|
||||
feature(proc_macro_span, proc_macro_span_shrink)
|
||||
|
@ -98,9 +98,11 @@
|
|||
clippy::cast_possible_truncation,
|
||||
clippy::doc_markdown,
|
||||
clippy::items_after_statements,
|
||||
clippy::let_underscore_untyped,
|
||||
clippy::manual_assert,
|
||||
clippy::must_use_candidate,
|
||||
clippy::needless_doctest_main,
|
||||
clippy::new_without_default,
|
||||
clippy::return_self_not_must_use,
|
||||
clippy::shadow_unrelated,
|
||||
clippy::trivially_copy_pass_by_ref,
|
||||
|
@ -118,7 +120,7 @@ compile_error! {"\
|
|||
build script as well.
|
||||
"}
|
||||
|
||||
#[cfg(use_proc_macro)]
|
||||
#[cfg(feature = "proc-macro")]
|
||||
extern crate proc_macro;
|
||||
|
||||
mod marker;
|
||||
|
@ -133,15 +135,20 @@ mod detection;
|
|||
#[doc(hidden)]
|
||||
pub mod fallback;
|
||||
|
||||
pub mod extra;
|
||||
|
||||
#[cfg(not(wrap_proc_macro))]
|
||||
use crate::fallback as imp;
|
||||
#[path = "wrapper.rs"]
|
||||
#[cfg(wrap_proc_macro)]
|
||||
mod imp;
|
||||
|
||||
#[cfg(span_locations)]
|
||||
mod convert;
|
||||
#[cfg(span_locations)]
|
||||
mod location;
|
||||
|
||||
use crate::extra::DelimSpan;
|
||||
use crate::marker::Marker;
|
||||
use core::cmp::Ordering;
|
||||
use core::fmt::{self, Debug, Display};
|
||||
|
@ -183,7 +190,7 @@ impl TokenStream {
|
|||
}
|
||||
}
|
||||
|
||||
fn _new_stable(inner: fallback::TokenStream) -> Self {
|
||||
fn _new_fallback(inner: fallback::TokenStream) -> Self {
|
||||
TokenStream {
|
||||
inner: inner.into(),
|
||||
_marker: Marker,
|
||||
|
@ -229,14 +236,16 @@ impl FromStr for TokenStream {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(use_proc_macro)]
|
||||
#[cfg(feature = "proc-macro")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))]
|
||||
impl From<proc_macro::TokenStream> for TokenStream {
|
||||
fn from(inner: proc_macro::TokenStream) -> Self {
|
||||
TokenStream::_new(inner.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(use_proc_macro)]
|
||||
#[cfg(feature = "proc-macro")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))]
|
||||
impl From<TokenStream> for proc_macro::TokenStream {
|
||||
fn from(inner: TokenStream) -> Self {
|
||||
inner.inner.into()
|
||||
|
@ -377,7 +386,7 @@ impl Span {
|
|||
}
|
||||
}
|
||||
|
||||
fn _new_stable(inner: fallback::Span) -> Self {
|
||||
fn _new_fallback(inner: fallback::Span) -> Self {
|
||||
Span {
|
||||
inner: inner.into(),
|
||||
_marker: Marker,
|
||||
|
@ -524,6 +533,17 @@ impl Span {
|
|||
pub fn eq(&self, other: &Span) -> bool {
|
||||
self.inner.eq(&other.inner)
|
||||
}
|
||||
|
||||
/// Returns the source text behind a span. This preserves the original
|
||||
/// source code, including spaces and comments. It only returns a result if
|
||||
/// the span corresponds to real source code.
|
||||
///
|
||||
/// Note: The observable result of a macro should only rely on the tokens
|
||||
/// and not on this source text. The result of this function is a best
|
||||
/// effort to be used for diagnostics only.
|
||||
pub fn source_text(&self) -> Option<String> {
|
||||
self.inner.source_text()
|
||||
}
|
||||
}
|
||||
|
||||
/// Prints a span in a form convenient for debugging.
|
||||
|
@ -664,7 +684,7 @@ impl Group {
|
|||
Group { inner }
|
||||
}
|
||||
|
||||
fn _new_stable(inner: fallback::Group) -> Self {
|
||||
fn _new_fallback(inner: fallback::Group) -> Self {
|
||||
Group {
|
||||
inner: inner.into(),
|
||||
}
|
||||
|
@ -681,7 +701,8 @@ impl Group {
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns the delimiter of this `Group`
|
||||
/// Returns the punctuation used as the delimiter for this group: a set of
|
||||
/// parentheses, square brackets, or curly braces.
|
||||
pub fn delimiter(&self) -> Delimiter {
|
||||
self.inner.delimiter()
|
||||
}
|
||||
|
@ -725,6 +746,13 @@ impl Group {
|
|||
Span::_new(self.inner.span_close())
|
||||
}
|
||||
|
||||
/// Returns an object that holds this group's `span_open()` and
|
||||
/// `span_close()` together (in a more compact representation than holding
|
||||
/// those 2 spans individually).
|
||||
pub fn delim_span(&self) -> DelimSpan {
|
||||
DelimSpan::new(&self.inner)
|
||||
}
|
||||
|
||||
/// Configures the span for this `Group`'s delimiters, but not its internal
|
||||
/// tokens.
|
||||
///
|
||||
|
@ -1081,7 +1109,7 @@ impl Literal {
|
|||
}
|
||||
}
|
||||
|
||||
fn _new_stable(inner: fallback::Literal) -> Self {
|
||||
fn _new_fallback(inner: fallback::Literal) -> Self {
|
||||
Literal {
|
||||
inner: inner.into(),
|
||||
_marker: Marker,
|
||||
|
|
|
@ -27,7 +27,18 @@ impl<'a> Cursor<'a> {
|
|||
self.rest.starts_with(s)
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
pub fn starts_with_char(&self, ch: char) -> bool {
|
||||
self.rest.starts_with(ch)
|
||||
}
|
||||
|
||||
pub fn starts_with_fn<Pattern>(&self, f: Pattern) -> bool
|
||||
where
|
||||
Pattern: FnMut(char) -> bool,
|
||||
{
|
||||
self.rest.starts_with(f)
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.rest.is_empty()
|
||||
}
|
||||
|
||||
|
@ -217,13 +228,13 @@ pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> {
|
|||
hi: input.off,
|
||||
});
|
||||
trees = outer;
|
||||
trees.push_token_from_parser(TokenTree::Group(crate::Group::_new_stable(g)));
|
||||
trees.push_token_from_parser(TokenTree::Group(crate::Group::_new_fallback(g)));
|
||||
} else {
|
||||
let (rest, mut tt) = match leaf_token(input) {
|
||||
Ok((rest, tt)) => (rest, tt),
|
||||
Err(Reject) => return Err(lex_error(input)),
|
||||
};
|
||||
tt.set_span(crate::Span::_new_stable(Span {
|
||||
tt.set_span(crate::Span::_new_fallback(Span {
|
||||
#[cfg(span_locations)]
|
||||
lo,
|
||||
#[cfg(span_locations)]
|
||||
|
@ -251,7 +262,7 @@ fn lex_error(cursor: Cursor) -> LexError {
|
|||
fn leaf_token(input: Cursor) -> PResult<TokenTree> {
|
||||
if let Ok((input, l)) = literal(input) {
|
||||
// must be parsed before ident
|
||||
Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l))))
|
||||
Ok((input, TokenTree::Literal(crate::Literal::_new_fallback(l))))
|
||||
} else if let Ok((input, p)) = punct(input) {
|
||||
Ok((input, TokenTree::Punct(p)))
|
||||
} else if let Ok((input, i)) = ident(input) {
|
||||
|
@ -756,7 +767,7 @@ fn digits(mut input: Cursor) -> Result<Cursor, Reject> {
|
|||
fn punct(input: Cursor) -> PResult<Punct> {
|
||||
let (rest, ch) = punct_char(input)?;
|
||||
if ch == '\'' {
|
||||
if ident_any(rest)?.0.starts_with("'") {
|
||||
if ident_any(rest)?.0.starts_with_char('\'') {
|
||||
Err(Reject)
|
||||
} else {
|
||||
Ok((rest, Punct::new('\'', Spacing::Joint)))
|
||||
|
@ -795,7 +806,7 @@ fn doc_comment<'a>(input: Cursor<'a>, trees: &mut TokenStreamBuilder) -> PResult
|
|||
#[cfg(span_locations)]
|
||||
let lo = input.off;
|
||||
let (rest, (comment, inner)) = doc_comment_contents(input)?;
|
||||
let span = crate::Span::_new_stable(Span {
|
||||
let span = crate::Span::_new_fallback(Span {
|
||||
#[cfg(span_locations)]
|
||||
lo,
|
||||
#[cfg(span_locations)]
|
||||
|
@ -831,7 +842,7 @@ fn doc_comment<'a>(input: Cursor<'a>, trees: &mut TokenStreamBuilder) -> PResult
|
|||
bracketed.push_token_from_parser(TokenTree::Punct(equal));
|
||||
bracketed.push_token_from_parser(TokenTree::Literal(literal));
|
||||
let group = Group::new(Delimiter::Bracket, bracketed.build());
|
||||
let mut group = crate::Group::_new_stable(group);
|
||||
let mut group = crate::Group::_new_fallback(group);
|
||||
group.set_span(span);
|
||||
trees.push_token_from_parser(TokenTree::Group(group));
|
||||
|
||||
|
@ -848,7 +859,7 @@ fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
|
|||
Ok((input, (&s[3..s.len() - 2], true)))
|
||||
} else if input.starts_with("///") {
|
||||
let input = input.advance(3);
|
||||
if input.starts_with("/") {
|
||||
if input.starts_with_char('/') {
|
||||
return Err(Reject);
|
||||
}
|
||||
let (input, s) = take_until_newline_or_eof(input);
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use core::mem;
|
||||
use core::slice;
|
||||
use std::panic::RefUnwindSafe;
|
||||
use std::rc::Rc;
|
||||
use std::vec;
|
||||
|
||||
|
@ -140,3 +141,5 @@ impl<T> Iterator for RcVecIntoIter<T> {
|
|||
self.inner.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RefUnwindSafe for RcVec<T> where T: RefUnwindSafe {}
|
||||
|
|
|
@ -40,7 +40,7 @@ impl LexError {
|
|||
}
|
||||
|
||||
fn mismatch() -> ! {
|
||||
panic!("stable/nightly mismatch")
|
||||
panic!("compiler/fallback mismatch")
|
||||
}
|
||||
|
||||
impl DeferredTokenStream {
|
||||
|
@ -470,12 +470,6 @@ impl Span {
|
|||
#[cfg(span_locations)]
|
||||
pub fn start(&self) -> LineColumn {
|
||||
match self {
|
||||
#[cfg(proc_macro_span)]
|
||||
Span::Compiler(s) => {
|
||||
let proc_macro::LineColumn { line, column } = s.start();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
#[cfg(not(proc_macro_span))]
|
||||
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
|
||||
Span::Fallback(s) => s.start(),
|
||||
}
|
||||
|
@ -484,12 +478,6 @@ impl Span {
|
|||
#[cfg(span_locations)]
|
||||
pub fn end(&self) -> LineColumn {
|
||||
match self {
|
||||
#[cfg(proc_macro_span)]
|
||||
Span::Compiler(s) => {
|
||||
let proc_macro::LineColumn { line, column } = s.end();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
#[cfg(not(proc_macro_span))]
|
||||
Span::Compiler(_) => LineColumn { line: 0, column: 0 },
|
||||
Span::Fallback(s) => s.end(),
|
||||
}
|
||||
|
@ -530,6 +518,16 @@ impl Span {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn source_text(&self) -> Option<String> {
|
||||
match self {
|
||||
#[cfg(not(no_source_text))]
|
||||
Span::Compiler(s) => s.source_text(),
|
||||
#[cfg(no_source_text)]
|
||||
Span::Compiler(_) => None,
|
||||
Span::Fallback(s) => s.source_text(),
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_nightly(self) -> proc_macro::Span {
|
||||
match self {
|
||||
Span::Compiler(s) => s,
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#![allow(clippy::extra_unused_type_parameters)]
|
||||
|
||||
use proc_macro2::{
|
||||
Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
|
||||
};
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
#![allow(
|
||||
clippy::assertions_on_result_states,
|
||||
clippy::items_after_statements,
|
||||
clippy::non_ascii_literal
|
||||
clippy::non_ascii_literal,
|
||||
clippy::octal_escapes
|
||||
)]
|
||||
|
||||
use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
|
||||
|
@ -114,6 +115,10 @@ fn literal_string() {
|
|||
assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
|
||||
assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
|
||||
assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
|
||||
assert_eq!(
|
||||
Literal::string("a\00b\07c\08d\0e\0").to_string(),
|
||||
"\"a\\x000b\\x007c\\08d\\0e\\0\"",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -147,6 +152,10 @@ fn literal_byte_string() {
|
|||
Literal::byte_string(b"\0\t\n\r\"\\2\x10").to_string(),
|
||||
"b\"\\0\\t\\n\\r\\\"\\\\2\\x10\"",
|
||||
);
|
||||
assert_eq!(
|
||||
Literal::byte_string(b"a\00b\07c\08d\0e\0").to_string(),
|
||||
"b\"a\\x000b\\x007c\\08d\\0e\\0\"",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -264,6 +273,30 @@ fn literal_parse() {
|
|||
assert!("-\"\"".parse::<Literal>().is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_span() {
|
||||
let positive = "0.1".parse::<Literal>().unwrap();
|
||||
let negative = "-0.1".parse::<Literal>().unwrap();
|
||||
let subspan = positive.subspan(1..2);
|
||||
|
||||
#[cfg(not(span_locations))]
|
||||
{
|
||||
let _ = negative;
|
||||
assert!(subspan.is_none());
|
||||
}
|
||||
|
||||
#[cfg(span_locations)]
|
||||
{
|
||||
assert_eq!(positive.span().start().column, 0);
|
||||
assert_eq!(positive.span().end().column, 3);
|
||||
assert_eq!(negative.span().start().column, 0);
|
||||
assert_eq!(negative.span().end().column, 4);
|
||||
assert_eq!(subspan.unwrap().source_text().unwrap(), ".");
|
||||
}
|
||||
|
||||
assert!(positive.subspan(1..4).is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn roundtrip() {
|
||||
fn roundtrip(p: &str) {
|
||||
|
|
|
@ -0,0 +1,42 @@
|
|||
extern crate proc_macro;
|
||||
|
||||
use std::mem;
|
||||
|
||||
#[rustversion::attr(before(1.32), ignore)]
|
||||
#[test]
|
||||
fn test_proc_macro_span_size() {
|
||||
assert_eq!(mem::size_of::<proc_macro::Span>(), 4);
|
||||
assert_eq!(mem::size_of::<Option<proc_macro::Span>>(), 4);
|
||||
}
|
||||
|
||||
#[cfg_attr(not(all(not(wrap_proc_macro), not(span_locations))), ignore)]
|
||||
#[test]
|
||||
fn test_proc_macro2_fallback_span_size_without_locations() {
|
||||
assert_eq!(mem::size_of::<proc_macro2::Span>(), 0);
|
||||
assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 1);
|
||||
}
|
||||
|
||||
#[cfg_attr(not(all(not(wrap_proc_macro), span_locations)), ignore)]
|
||||
#[test]
|
||||
fn test_proc_macro2_fallback_span_size_with_locations() {
|
||||
assert_eq!(mem::size_of::<proc_macro2::Span>(), 8);
|
||||
assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 12);
|
||||
}
|
||||
|
||||
#[rustversion::attr(before(1.32), ignore)]
|
||||
#[rustversion::attr(
|
||||
since(1.32),
|
||||
cfg_attr(not(all(wrap_proc_macro, not(span_locations))), ignore)
|
||||
)]
|
||||
#[test]
|
||||
fn test_proc_macro2_wrapper_span_size_without_locations() {
|
||||
assert_eq!(mem::size_of::<proc_macro2::Span>(), 4);
|
||||
assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 8);
|
||||
}
|
||||
|
||||
#[cfg_attr(not(all(wrap_proc_macro, span_locations)), ignore)]
|
||||
#[test]
|
||||
fn test_proc_macro2_wrapper_span_size_with_locations() {
|
||||
assert_eq!(mem::size_of::<proc_macro2::Span>(), 12);
|
||||
assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 12);
|
||||
}
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"b31678b5e9696b0320493f7120e873490183308fc5afb052dc23a265048b8e16","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"66f3cf08338e47618fd23d810355b075da573815d9c1e158a7f7ab140decc16d","build.rs":"3733c86ae2733629f873f93c2f45da30164beee8de9ee0833099fac6a05a3e6b","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/ext.rs":"9881576cac3e476a4bf04f9b601cf9a53b79399fb0ca9634e8b861ac91709843","src/format.rs":"c595015418f35e6992e710441b9999f09b2afe4678b138039d670d100c0bdd86","src/ident_fragment.rs":"66788c5f57681547d936a9bcf51873b658630c76b2e690df4b3158edf573384a","src/lib.rs":"5f0dac39c736d01c698745909c93efb7e701aed4493c488a32239a7efd7d4469","src/runtime.rs":"79bbb2fe5b18bc3ec9f8f8143bd120b45680a3027c89f37b0a6a6b97bdaadb21","src/spanned.rs":"43ff919f1d2d27dff6b2db409539b1c697e913eb8c3131cf5de45a845752b7b5","src/to_tokens.rs":"99bb6f467289c32af6c1f7af0d45cc6ac7b31e2436774e616770152a49e6ac0f","tests/compiletest.rs":"022a8e400ef813d7ea1875b944549cee5125f6a995dc33e93b48cba3e1b57bd1","tests/test.rs":"c4967a33fcf7c2effd1979bcb4c03ae797359eeab92c627ab4b609cd8678ff78","tests/ui/does-not-have-iter-interpolated-dup.rs":"ad13eea21d4cdd2ab6c082f633392e1ff20fb0d1af5f2177041e0bf7f30da695","tests/ui/does-not-have-iter-interpolated-dup.stderr":"be67a6c99eed689aa08b46afd0ab3ed4e71fde42e5efed41ab05741710f42fe5","tests/ui/does-not-have-iter-interpolated.rs":"83a5b3f240651adcbe4b6e51076d76d653ad439b37442cf4054f1fd3c073f3b7","tests/ui/does-not-have-iter-interpolated.stderr":"ed05bc229abf5a267ea3d194336a3a845a061bd10c1be7020b9351f81e737946","tests/ui/does-not-have-iter-separated.rs":"fe413c48331d5e3a7ae5fef6a5892a90c72f610d54595879eb49d0a94154ba3f","tests/ui/does-not-have-iter-separated.stderr":"873f4db0ec63606d64d46790f3ee24bdb4dd04379b8e57dc5ac1114cc3775fb3","tests/ui/does-not-have-iter.rs":"09dc9499d861b63cebb0848b855b78e2dc9497bfde37ba6339f3625ae009a62f","tests/ui/does-not-have-iter.stderr":"0e3de2635a79cce9226113fa8cb6bdbdc0ffcd487d7537d4dd0dc8222adf4a8a","tests/ui/not-quotable.rs":"d630ed8e5fe16f125015999d068569cc3fe5dc1033a56e622690ec2c080c13f4","tests/ui/not-quotable.stderr":"4b81ec7bb82ba197ede6d47c1b6e5cacc0999cc8c9e2fa77a46db5e29397153c","tests/ui/not-repeatable.rs":"dbfedcad67b57543aa7d6684b6549db90fbdb74ffebcae42323d31eb88e59c87","tests/ui/not-repeatable.stderr":"a578a6293fef33c54f8e8114bf72a933a1315b45e866e4bcef1e31ce2ce55dcd","tests/ui/wrong-type-span.rs":"6195e35ea844c0c52ba1cff5d790c3a371af6915d137d377834ad984229ef9ea","tests/ui/wrong-type-span.stderr":"c986de5cb858272636c9e36ae5f57e5ee13589d4f1a73a050b21824010314f8d"},"package":"8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"}
|
||||
{"files":{"Cargo.toml":"04fa432ffe813738cb59dc66248022406bbff02ad50a6ec5445e0cfa89e7c5cb","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"15d3ee300a69f4eec5f81b74fce4652fa22ce84d44413379bc756e4aaca23300","build.rs":"3733c86ae2733629f873f93c2f45da30164beee8de9ee0833099fac6a05a3e6b","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/ext.rs":"9881576cac3e476a4bf04f9b601cf9a53b79399fb0ca9634e8b861ac91709843","src/format.rs":"c595015418f35e6992e710441b9999f09b2afe4678b138039d670d100c0bdd86","src/ident_fragment.rs":"6b3b6621cd20bae776d2455f2b05f04433d8ce617c113e83a3683edef0667d1f","src/lib.rs":"71df353876cc945acc3e8cabfb640c1c691601dfb8aeb0470fb1b8556a2abaea","src/runtime.rs":"31b2159986c68dc1c78801a92f795435dbc0bcea859ca342df280889e82c6c4d","src/spanned.rs":"0ccaae1137af5f3e54eae75c3bdc637be74cfa56a857f2c0f85a041c9ba26838","src/to_tokens.rs":"99bb6f467289c32af6c1f7af0d45cc6ac7b31e2436774e616770152a49e6ac0f","tests/compiletest.rs":"022a8e400ef813d7ea1875b944549cee5125f6a995dc33e93b48cba3e1b57bd1","tests/test.rs":"3be80741f84a707376c230d9cf70ce9537caa359691d8d4c34968e28175e4ad7","tests/ui/does-not-have-iter-interpolated-dup.rs":"ad13eea21d4cdd2ab6c082f633392e1ff20fb0d1af5f2177041e0bf7f30da695","tests/ui/does-not-have-iter-interpolated-dup.stderr":"90a4bdb9267535f5d2785940148338d6b7d905548051d2c9c5dcbd58f2c11d8e","tests/ui/does-not-have-iter-interpolated.rs":"83a5b3f240651adcbe4b6e51076d76d653ad439b37442cf4054f1fd3c073f3b7","tests/ui/does-not-have-iter-interpolated.stderr":"ae7c2739554c862b331705e82781aa4687a4375210cef6ae899a4be4a4ec2d97","tests/ui/does-not-have-iter-separated.rs":"fe413c48331d5e3a7ae5fef6a5892a90c72f610d54595879eb49d0a94154ba3f","tests/ui/does-not-have-iter-separated.stderr":"03fd560979ebcd5aa6f83858bc2c3c01ba6546c16335101275505304895c1ae9","tests/ui/does-not-have-iter.rs":"09dc9499d861b63cebb0848b855b78e2dc9497bfde37ba6339f3625ae009a62f","tests/ui/does-not-have-iter.stderr":"d6da483c29e232ced72059bbdf05d31afb1df9e02954edaa9cfaea1ec6df72dc","tests/ui/not-quotable.rs":"5759d0884943417609f28faadc70254a3e2fd3d9bd6ff7297a3fb70a77fafd8a","tests/ui/not-quotable.stderr":"efcace9419fdf64d6beca7e135c3b7daff74038d4449475896cbe8cbf2566ade","tests/ui/not-repeatable.rs":"a4b115c04e4e41049a05f5b69450503fbffeba031218b4189cb931839f7f9a9c","tests/ui/not-repeatable.stderr":"594249d59d16f039c16816f1aaf9933176994e296fcf81d1b8b24d5b66ae0d0a","tests/ui/wrong-type-span.rs":"6195e35ea844c0c52ba1cff5d790c3a371af6915d137d377834ad984229ef9ea","tests/ui/wrong-type-span.stderr":"cad072e40e0ecc04f375122ae41aede2f0da2a9244492b3fcf70249e59d1b128"},"package":"1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"}
|
|
@ -13,7 +13,7 @@
|
|||
edition = "2018"
|
||||
rust-version = "1.31"
|
||||
name = "quote"
|
||||
version = "1.0.23"
|
||||
version = "1.0.28"
|
||||
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
||||
autobenches = false
|
||||
description = "Quasi-quoting macro quote!(...)"
|
||||
|
@ -34,7 +34,7 @@ targets = ["x86_64-unknown-linux-gnu"]
|
|||
doc-scrape-examples = false
|
||||
|
||||
[dependencies.proc-macro2]
|
||||
version = "1.0.40"
|
||||
version = "1.0.52"
|
||||
default-features = false
|
||||
|
||||
[dev-dependencies.rustversion]
|
||||
|
|
|
@ -174,28 +174,3 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
|
|
@ -233,15 +233,14 @@ macro.
|
|||
## Non-macro code generators
|
||||
|
||||
When using `quote` in a build.rs or main.rs and writing the output out to a
|
||||
file, consider having the code generator pass the tokens through [rustfmt]
|
||||
before writing (either by shelling out to the `rustfmt` binary or by pulling in
|
||||
the `rustfmt` library as a dependency). This way if an error occurs in the
|
||||
generated code it is convenient for a human to read and debug.
|
||||
file, consider having the code generator pass the tokens through [prettyplease]
|
||||
before writing. This way if an error occurs in the generated code it is
|
||||
convenient for a human to read and debug.
|
||||
|
||||
Be aware that no kind of hygiene or span information is retained when tokens are
|
||||
written to a file; the conversion from tokens to source code is lossy.
|
||||
|
||||
[rustfmt]: https://github.com/rust-lang/rustfmt
|
||||
[prettyplease]: https://github.com/dtolnay/prettyplease
|
||||
|
||||
<br>
|
||||
|
||||
|
|
|
@ -8,6 +8,8 @@ use std::borrow::Cow;
|
|||
/// stripped, if present.
|
||||
///
|
||||
/// See [`format_ident!`] for more information.
|
||||
///
|
||||
/// [`format_ident!`]: crate::format_ident
|
||||
pub trait IdentFragment {
|
||||
/// Format this value as an identifier fragment.
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result;
|
||||
|
|
|
@ -79,9 +79,20 @@
|
|||
//! }
|
||||
//! };
|
||||
//! ```
|
||||
//!
|
||||
//! <br>
|
||||
//!
|
||||
//! # Non-macro code generators
|
||||
//!
|
||||
//! When using `quote` in a build.rs or main.rs and writing the output out to a
|
||||
//! file, consider having the code generator pass the tokens through
|
||||
//! [prettyplease] before writing. This way if an error occurs in the generated
|
||||
//! code it is convenient for a human to read and debug.
|
||||
//!
|
||||
//! [prettyplease]: https://github.com/dtolnay/prettyplease
|
||||
|
||||
// Quote types in rustdoc of other crates get linked to here.
|
||||
#![doc(html_root_url = "https://docs.rs/quote/1.0.23")]
|
||||
#![doc(html_root_url = "https://docs.rs/quote/1.0.28")]
|
||||
#![allow(
|
||||
clippy::doc_markdown,
|
||||
clippy::missing_errors_doc,
|
||||
|
@ -91,10 +102,7 @@
|
|||
clippy::wrong_self_convention,
|
||||
)]
|
||||
|
||||
#[cfg(all(
|
||||
not(all(target_arch = "wasm32", target_os = "unknown")),
|
||||
feature = "proc-macro"
|
||||
))]
|
||||
#[cfg(feature = "proc-macro")]
|
||||
extern crate proc_macro;
|
||||
|
||||
mod ext;
|
||||
|
@ -619,14 +627,14 @@ macro_rules! quote_spanned {
|
|||
#[macro_export]
|
||||
macro_rules! quote_spanned {
|
||||
($span:expr=>) => {{
|
||||
let _: $crate::__private::Span = $span;
|
||||
let _: $crate::__private::Span = $crate::__private::get_span($span).__into_span();
|
||||
$crate::__private::TokenStream::new()
|
||||
}};
|
||||
|
||||
// Special case rule for a single tt, for performance.
|
||||
($span:expr=> $tt:tt) => {{
|
||||
let mut _s = $crate::__private::TokenStream::new();
|
||||
let _span: $crate::__private::Span = $span;
|
||||
let _span: $crate::__private::Span = $crate::__private::get_span($span).__into_span();
|
||||
$crate::quote_token_spanned!{$tt _s _span}
|
||||
_s
|
||||
}};
|
||||
|
@ -634,13 +642,13 @@ macro_rules! quote_spanned {
|
|||
// Special case rules for two tts, for performance.
|
||||
($span:expr=> # $var:ident) => {{
|
||||
let mut _s = $crate::__private::TokenStream::new();
|
||||
let _: $crate::__private::Span = $span;
|
||||
let _: $crate::__private::Span = $crate::__private::get_span($span).__into_span();
|
||||
$crate::ToTokens::to_tokens(&$var, &mut _s);
|
||||
_s
|
||||
}};
|
||||
($span:expr=> $tt1:tt $tt2:tt) => {{
|
||||
let mut _s = $crate::__private::TokenStream::new();
|
||||
let _span: $crate::__private::Span = $span;
|
||||
let _span: $crate::__private::Span = $crate::__private::get_span($span).__into_span();
|
||||
$crate::quote_token_spanned!{$tt1 _s _span}
|
||||
$crate::quote_token_spanned!{$tt2 _s _span}
|
||||
_s
|
||||
|
@ -649,7 +657,7 @@ macro_rules! quote_spanned {
|
|||
// Rule for any other number of tokens.
|
||||
($span:expr=> $($tt:tt)*) => {{
|
||||
let mut _s = $crate::__private::TokenStream::new();
|
||||
let _span: $crate::__private::Span = $span;
|
||||
let _span: $crate::__private::Span = $crate::__private::get_span($span).__into_span();
|
||||
$crate::quote_each_token_spanned!{_s _span $($tt)*}
|
||||
_s
|
||||
}};
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
use self::get_span::{GetSpan, GetSpanBase, GetSpanInner};
|
||||
use crate::{IdentFragment, ToTokens, TokenStreamExt};
|
||||
use core::fmt;
|
||||
use core::iter;
|
||||
use core::ops::BitOr;
|
||||
use proc_macro2::{Group, Ident, Punct, Spacing, TokenTree};
|
||||
|
||||
pub use core::option::Option;
|
||||
pub use proc_macro2::*;
|
||||
pub use proc_macro2::{Delimiter, Span, TokenStream};
|
||||
pub use std::format;
|
||||
|
||||
pub struct HasIterator; // True
|
||||
|
@ -164,6 +166,62 @@ impl<T: ToTokens> ToTokens for RepInterp<T> {
|
|||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn get_span<T>(span: T) -> GetSpan<T> {
|
||||
GetSpan(GetSpanInner(GetSpanBase(span)))
|
||||
}
|
||||
|
||||
mod get_span {
|
||||
use core::ops::Deref;
|
||||
use proc_macro2::extra::DelimSpan;
|
||||
use proc_macro2::Span;
|
||||
|
||||
pub struct GetSpan<T>(pub(crate) GetSpanInner<T>);
|
||||
|
||||
pub struct GetSpanInner<T>(pub(crate) GetSpanBase<T>);
|
||||
|
||||
pub struct GetSpanBase<T>(pub(crate) T);
|
||||
|
||||
impl GetSpan<Span> {
|
||||
#[inline]
|
||||
pub fn __into_span(self) -> Span {
|
||||
((self.0).0).0
|
||||
}
|
||||
}
|
||||
|
||||
impl GetSpanInner<DelimSpan> {
|
||||
#[inline]
|
||||
pub fn __into_span(&self) -> Span {
|
||||
(self.0).0.join()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> GetSpanBase<T> {
|
||||
#[allow(clippy::unused_self)]
|
||||
pub fn __into_span(&self) -> T {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for GetSpan<T> {
|
||||
type Target = GetSpanInner<T>;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for GetSpanInner<T> {
|
||||
type Target = GetSpanBase<T>;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push_group(tokens: &mut TokenStream, delimiter: Delimiter, inner: TokenStream) {
|
||||
tokens.append(Group::new(delimiter, inner));
|
||||
}
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
use crate::ToTokens;
|
||||
use proc_macro2::extra::DelimSpan;
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
|
||||
pub trait Spanned {
|
||||
// Not public API other than via the syn crate. Use syn::spanned::Spanned.
|
||||
pub trait Spanned: private::Sealed {
|
||||
fn __span(&self) -> Span;
|
||||
}
|
||||
|
||||
|
@ -11,6 +13,12 @@ impl Spanned for Span {
|
|||
}
|
||||
}
|
||||
|
||||
impl Spanned for DelimSpan {
|
||||
fn __span(&self) -> Span {
|
||||
self.join()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + ToTokens> Spanned for T {
|
||||
fn __span(&self) -> Span {
|
||||
join_spans(self.into_token_stream())
|
||||
|
@ -41,3 +49,14 @@ fn join_spans(tokens: TokenStream) -> Span {
|
|||
.and_then(|last| first.join(last))
|
||||
.unwrap_or(first)
|
||||
}
|
||||
|
||||
mod private {
|
||||
use crate::ToTokens;
|
||||
use proc_macro2::extra::DelimSpan;
|
||||
use proc_macro2::Span;
|
||||
|
||||
pub trait Sealed {}
|
||||
impl Sealed for Span {}
|
||||
impl Sealed for DelimSpan {}
|
||||
impl<T: ?Sized + ToTokens> Sealed for T {}
|
||||
}
|
||||
|
|
|
@ -1,14 +1,17 @@
|
|||
#![allow(
|
||||
clippy::disallowed_names,
|
||||
clippy::let_underscore_untyped,
|
||||
clippy::shadow_unrelated,
|
||||
clippy::unseparated_literal_suffix,
|
||||
clippy::used_underscore_binding
|
||||
)]
|
||||
|
||||
extern crate proc_macro;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use proc_macro2::{Ident, Span, TokenStream};
|
||||
use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream};
|
||||
use quote::{format_ident, quote, quote_spanned, TokenStreamExt};
|
||||
|
||||
struct X;
|
||||
|
@ -517,3 +520,30 @@ fn test_quote_raw_id() {
|
|||
let id = quote!(r#raw_id);
|
||||
assert_eq!(id.to_string(), "r#raw_id");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_type_inference_for_span() {
|
||||
trait CallSite {
|
||||
fn get() -> Self;
|
||||
}
|
||||
|
||||
impl CallSite for Span {
|
||||
fn get() -> Self {
|
||||
Span::call_site()
|
||||
}
|
||||
}
|
||||
|
||||
let span = Span::call_site();
|
||||
let _ = quote_spanned!(span=> ...);
|
||||
|
||||
let delim_span = Group::new(Delimiter::Parenthesis, TokenStream::new()).delim_span();
|
||||
let _ = quote_spanned!(delim_span=> ...);
|
||||
|
||||
let inferred = CallSite::get();
|
||||
let _ = quote_spanned!(inferred=> ...);
|
||||
|
||||
if false {
|
||||
let proc_macro_span = proc_macro::Span::call_site();
|
||||
let _ = quote_spanned!(proc_macro_span.into()=> ...);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,7 +4,8 @@ error[E0308]: mismatched types
|
|||
8 | quote!(#(#nonrep #nonrep)*);
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
| |
|
||||
| expected struct `HasIterator`, found struct `ThereIsNoIteratorInRepetition`
|
||||
| expected `HasIterator`, found `ThereIsNoIteratorInRepetition`
|
||||
| expected due to this
|
||||
| here the type of `has_iter` is inferred to be `ThereIsNoIteratorInRepetition`
|
||||
|
|
||||
= note: this error originates in the macro `$crate::quote_token_with_context` which comes from the expansion of the macro `quote` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
|
|
@ -4,7 +4,8 @@ error[E0308]: mismatched types
|
|||
8 | quote!(#(#nonrep)*);
|
||||
| ^^^^^^^^^^^^^^^^^^^
|
||||
| |
|
||||
| expected struct `HasIterator`, found struct `ThereIsNoIteratorInRepetition`
|
||||
| expected `HasIterator`, found `ThereIsNoIteratorInRepetition`
|
||||
| expected due to this
|
||||
| here the type of `has_iter` is inferred to be `ThereIsNoIteratorInRepetition`
|
||||
|
|
||||
= note: this error originates in the macro `$crate::quote_token_with_context` which comes from the expansion of the macro `quote` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
|
|
@ -4,7 +4,7 @@ error[E0308]: mismatched types
|
|||
4 | quote!(#(a b),*);
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
| |
|
||||
| expected struct `HasIterator`, found struct `ThereIsNoIteratorInRepetition`
|
||||
| expected `HasIterator`, found `ThereIsNoIteratorInRepetition`
|
||||
| expected due to this
|
||||
|
|
||||
= note: this error originates in the macro `$crate::quote_token_with_context` which comes from the expansion of the macro `quote` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
|
|
@ -4,7 +4,7 @@ error[E0308]: mismatched types
|
|||
4 | quote!(#(a b)*);
|
||||
| ^^^^^^^^^^^^^^^
|
||||
| |
|
||||
| expected struct `HasIterator`, found struct `ThereIsNoIteratorInRepetition`
|
||||
| expected `HasIterator`, found `ThereIsNoIteratorInRepetition`
|
||||
| expected due to this
|
||||
|
|
||||
= note: this error originates in the macro `$crate::quote_token_with_context` which comes from the expansion of the macro `quote` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
|
|
@ -3,5 +3,5 @@ use std::net::Ipv4Addr;
|
|||
|
||||
fn main() {
|
||||
let ip = Ipv4Addr::LOCALHOST;
|
||||
_ = quote! { #ip };
|
||||
let _ = quote! { #ip };
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
error[E0277]: the trait bound `Ipv4Addr: ToTokens` is not satisfied
|
||||
--> tests/ui/not-quotable.rs:6:9
|
||||
--> tests/ui/not-quotable.rs:6:13
|
||||
|
|
||||
6 | _ = quote! { #ip };
|
||||
| ^^^^^^^^^^^^^^
|
||||
| |
|
||||
| the trait `ToTokens` is not implemented for `Ipv4Addr`
|
||||
| required by a bound introduced by this call
|
||||
6 | let _ = quote! { #ip };
|
||||
| ^^^^^^^^^^^^^^
|
||||
| |
|
||||
| the trait `ToTokens` is not implemented for `Ipv4Addr`
|
||||
| required by a bound introduced by this call
|
||||
|
|
||||
= help: the following other types implement trait `ToTokens`:
|
||||
&'a T
|
||||
|
|
|
@ -4,5 +4,5 @@ struct Ipv4Addr;
|
|||
|
||||
fn main() {
|
||||
let ip = Ipv4Addr;
|
||||
_ = quote! { #(#ip)* };
|
||||
let _ = quote! { #(#ip)* };
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error[E0599]: the method `quote_into_iter` exists for struct `Ipv4Addr`, but its trait bounds were not satisfied
|
||||
--> tests/ui/not-repeatable.rs:7:9
|
||||
--> tests/ui/not-repeatable.rs:7:13
|
||||
|
|
||||
3 | struct Ipv4Addr;
|
||||
| ---------------
|
||||
|
@ -10,8 +10,8 @@ error[E0599]: the method `quote_into_iter` exists for struct `Ipv4Addr`, but its
|
|||
| doesn't satisfy `Ipv4Addr: ext::RepIteratorExt`
|
||||
| doesn't satisfy `Ipv4Addr: ext::RepToTokensExt`
|
||||
...
|
||||
7 | _ = quote! { #(#ip)* };
|
||||
| ^^^^^^^^^^^^^^^^^^ method cannot be called on `Ipv4Addr` due to unsatisfied trait bounds
|
||||
7 | let _ = quote! { #(#ip)* };
|
||||
| ^^^^^^^^^^^^^^^^^^ method cannot be called on `Ipv4Addr` due to unsatisfied trait bounds
|
||||
|
|
||||
= note: the following trait bounds were not satisfied:
|
||||
`Ipv4Addr: Iterator`
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
error[E0308]: mismatched types
|
||||
--> tests/ui/wrong-type-span.rs:6:20
|
||||
--> tests/ui/wrong-type-span.rs:6:5
|
||||
|
|
||||
6 | quote_spanned!(span=> #x);
|
||||
| ---------------^^^^------
|
||||
| | |
|
||||
| | expected struct `Span`, found `&str`
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
| |
|
||||
| expected `Span`, found `&str`
|
||||
| expected due to this
|
||||
|
|
||||
= note: this error originates in the macro `quote_spanned` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"e390e019c701323f7a6f3b42dc1242445a0ea6c1188d91b1d3513fcebc7afe1d","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"3c12b60b6c77a201665344a7612b42392e77ddc3e907f5a14e3f3a4bb6c4692e","build.rs":"bf6f7f791517c2d583457e74452c68a7308c5e5106d1a2cebcfa19c55a9c1a42","crates-io.md":"ee22254ee64c3189eef3e707c8d75dc66a8df2a7ee9e518d95238950780ec387","src/de/format.rs":"84f902fd4c3be66e81ac01d5b21cd876113c16f9890ff8bab5faa0d085386294","src/de/ignored_any.rs":"967184c86707c99b77a1cfb218dfc823f560fae227b6635aee6af19ee82962f5","src/de/impls.rs":"2d4d9985b1048f5f6371984c9bc8f273ad685901ba22ad27483dfb7cec65898c","src/de/mod.rs":"71198e80e4c64aa686b5ceb6e8bce10db20845a87a30fa14227ecbe365a046d5","src/de/seed.rs":"e8cf0233afe0af5b8fb9e4c94f301c92729c5ba417280af9e2201b732e374a72","src/de/utf8.rs":"f17524ee0af98ec3abcfd7d0b812fbd1033263bd8e2ce2f57c1e1999ce153558","src/de/value.rs":"aa5055923e2c3fd1c1f1abdfb380a1d63d07cf4d602ef62d2df2b7da33dd8c81","src/integer128.rs":"ca49591abde2d8c4f582174533fee28f0fa9139e5d71bf22b25a6b175f8abccc","src/lib.rs":"a359fe67a2d3a8bfe27219b35992c0ed390a65a409c69db75e3bf7e63fd16dde","src/macros.rs":"3d695a51f0a07f9f719dcb5620012c21a1b084c06a6283349cabf574ceba8123","src/private/de.rs":"a85efe9af4f5629ac7d946af56e20fbc184df6ac40a6cfe47bf3997a95b2ea20","src/private/doc.rs":"e9801a43c3088fccd5f1fac76416698f948e65b647024aa9da17d673e1e8c217","src/private/mod.rs":"37b204775e572396515477b393ce793b2579de48e5971e6f596ba3723c489fd6","src/private/ser.rs":"57fbff98429e870da86edcf61c0831caaa3b708c0c32e3038c4b2179e8dff73e","src/private/size_hint.rs":"605521227e9ba3100fbb9d5ea7fd5853385097c35015ce6908bd5f1ea20d59ad","src/ser/fmt.rs":"7827ed07fd8897e6324f75625ba0c926a4c4e7ec2914cd067391ce54d942ac7b","src/ser/impls.rs":"8cbe2b66ae950cbc5223e41ac82194cccfc2c26300acfe6328e5f20081f23af3","src/ser/impossible.rs":"db17913522c1c27389c5a085113911353b9813c1b116518681362e7c8b692c3a","src/ser/mod.rs":"e1e6c764837c70b6410dcf1949a0dae1b4b4ffce65b87607d3d173b612e9bccf","src/std_error.rs":"3aac687856c035517fae44ed2906dd4a1e3184bae4bf613adcdeb73f74126c57"},"package":"bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"}
|
||||
{"files":{"Cargo.toml":"e2c66f13b0b543c7d4e1e3ff6aeca3e26d0ddc25a0a13202c989a3d75c1e1efb","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"13c66875efb67f64fdec817725f34ceb07913e1ebea4adc240868d2ed581d3da","build.rs":"976e089a5f74fc03e23465744bcc02b3b600a59f1c098da60c29d3979c5b35df","crates-io.md":"ee22254ee64c3189eef3e707c8d75dc66a8df2a7ee9e518d95238950780ec387","src/de/format.rs":"84f902fd4c3be66e81ac01d5b21cd876113c16f9890ff8bab5faa0d085386294","src/de/ignored_any.rs":"967184c86707c99b77a1cfb218dfc823f560fae227b6635aee6af19ee82962f5","src/de/impls.rs":"e8440409d11b73433b7ad8496cebf8be8f56160e90cf8328bb1d33318db40dea","src/de/mod.rs":"71198e80e4c64aa686b5ceb6e8bce10db20845a87a30fa14227ecbe365a046d5","src/de/seed.rs":"e8cf0233afe0af5b8fb9e4c94f301c92729c5ba417280af9e2201b732e374a72","src/de/utf8.rs":"f17524ee0af98ec3abcfd7d0b812fbd1033263bd8e2ce2f57c1e1999ce153558","src/de/value.rs":"aa5055923e2c3fd1c1f1abdfb380a1d63d07cf4d602ef62d2df2b7da33dd8c81","src/integer128.rs":"ca49591abde2d8c4f582174533fee28f0fa9139e5d71bf22b25a6b175f8abccc","src/lib.rs":"c6b7f850eb432d070f23c7c882ad5c5352ba62962407bdfbfbf8904b99bbb2ef","src/macros.rs":"3d695a51f0a07f9f719dcb5620012c21a1b084c06a6283349cabf574ceba8123","src/private/de.rs":"c448b4351aa73afd8c9b9629e4f48bfee8dc59de9e07764be1d17d3c12f5974f","src/private/doc.rs":"e9801a43c3088fccd5f1fac76416698f948e65b647024aa9da17d673e1e8c217","src/private/mod.rs":"37b204775e572396515477b393ce793b2579de48e5971e6f596ba3723c489fd6","src/private/ser.rs":"57fbff98429e870da86edcf61c0831caaa3b708c0c32e3038c4b2179e8dff73e","src/private/size_hint.rs":"605521227e9ba3100fbb9d5ea7fd5853385097c35015ce6908bd5f1ea20d59ad","src/ser/fmt.rs":"7827ed07fd8897e6324f75625ba0c926a4c4e7ec2914cd067391ce54d942ac7b","src/ser/impls.rs":"46229722b7f0d8c4f01c43567c765608bf2c1974a5f24ce2525815c5bfd42ff5","src/ser/impossible.rs":"db17913522c1c27389c5a085113911353b9813c1b116518681362e7c8b692c3a","src/ser/mod.rs":"e1e6c764837c70b6410dcf1949a0dae1b4b4ffce65b87607d3d173b612e9bccf","src/std_error.rs":"3aac687856c035517fae44ed2906dd4a1e3184bae4bf613adcdeb73f74126c57"},"package":"2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2"}
|
|
@ -10,9 +10,9 @@
|
|||
# See Cargo.toml.orig for the original contents.
|
||||
|
||||
[package]
|
||||
rust-version = "1.13"
|
||||
rust-version = "1.19"
|
||||
name = "serde"
|
||||
version = "1.0.152"
|
||||
version = "1.0.163"
|
||||
authors = [
|
||||
"Erick Tryzelaar <erick.tryzelaar@gmail.com>",
|
||||
"David Tolnay <dtolnay@gmail.com>",
|
||||
|
@ -42,20 +42,21 @@ categories = [
|
|||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/serde-rs/serde"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
features = ["derive"]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
||||
[package.metadata.playground]
|
||||
features = [
|
||||
"derive",
|
||||
"rc",
|
||||
]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
||||
[lib]
|
||||
doc-scrape-examples = false
|
||||
|
||||
[dependencies.serde_derive]
|
||||
version = "=1.0.152"
|
||||
version = "=1.0.163"
|
||||
optional = true
|
||||
|
||||
[dev-dependencies.serde_derive]
|
||||
|
|
|
@ -174,28 +174,3 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
# Serde   [![Build Status]][actions] [![Latest Version]][crates.io] [![serde: rustc 1.13+]][Rust 1.13] [![serde_derive: rustc 1.31+]][Rust 1.31]
|
||||
# Serde   [![Build Status]][actions] [![Latest Version]][crates.io] [![serde: rustc 1.19+]][Rust 1.19] [![serde_derive: rustc 1.56+]][Rust 1.56]
|
||||
|
||||
[Build Status]: https://img.shields.io/github/actions/workflow/status/serde-rs/serde/ci.yml?branch=master
|
||||
[actions]: https://github.com/serde-rs/serde/actions?query=branch%3Amaster
|
||||
[Latest Version]: https://img.shields.io/crates/v/serde.svg
|
||||
[crates.io]: https://crates.io/crates/serde
|
||||
[serde: rustc 1.13+]: https://img.shields.io/badge/serde-rustc_1.13+-lightgray.svg
|
||||
[serde_derive: rustc 1.31+]: https://img.shields.io/badge/serde_derive-rustc_1.31+-lightgray.svg
|
||||
[Rust 1.13]: https://blog.rust-lang.org/2016/11/10/Rust-1.13.html
|
||||
[Rust 1.31]: https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html
|
||||
[serde: rustc 1.19+]: https://img.shields.io/badge/serde-rustc_1.19+-lightgray.svg
|
||||
[serde_derive: rustc 1.56+]: https://img.shields.io/badge/serde_derive-rustc_1.56+-lightgray.svg
|
||||
[Rust 1.19]: https://blog.rust-lang.org/2017/07/20/Rust-1.19.html
|
||||
[Rust 1.56]: https://blog.rust-lang.org/2021/10/21/Rust-1.56.0.html
|
||||
|
||||
**Serde is a framework for *ser*ializing and *de*serializing Rust data structures efficiently and generically.**
|
||||
|
||||
|
|
|
@ -78,11 +78,6 @@ fn main() {
|
|||
println!("cargo:rustc-cfg=no_num_nonzero");
|
||||
}
|
||||
|
||||
// Current minimum supported version of serde_derive crate is Rust 1.31.
|
||||
if minor < 31 {
|
||||
println!("cargo:rustc-cfg=no_serde_derive");
|
||||
}
|
||||
|
||||
// TryFrom, Atomic types, non-zero signed integers, and SystemTime::checked_add
|
||||
// stabilized in Rust 1.34:
|
||||
// https://blog.rust-lang.org/2019/04/11/Rust-1.34.0.html#tryfrom-and-tryinto
|
||||
|
@ -94,6 +89,11 @@ fn main() {
|
|||
println!("cargo:rustc-cfg=no_relaxed_trait_bounds");
|
||||
}
|
||||
|
||||
// Current minimum supported version of serde_derive crate is Rust 1.56.
|
||||
if minor < 56 {
|
||||
println!("cargo:rustc-cfg=no_serde_derive");
|
||||
}
|
||||
|
||||
// Support for #[cfg(target_has_atomic = "...")] stabilized in Rust 1.60.
|
||||
if minor < 60 {
|
||||
println!("cargo:rustc-cfg=no_target_has_atomic");
|
||||
|
@ -114,6 +114,12 @@ fn main() {
|
|||
println!("cargo:rustc-cfg=no_std_atomic");
|
||||
}
|
||||
}
|
||||
|
||||
// Support for core::ffi::CStr and alloc::ffi::CString stabilized in Rust 1.64.
|
||||
// https://blog.rust-lang.org/2022/09/22/Rust-1.64.0.html#c-compatible-ffi-types-in-core-and-alloc
|
||||
if minor < 64 {
|
||||
println!("cargo:rustc-cfg=no_core_cstr");
|
||||
}
|
||||
}
|
||||
|
||||
fn rustc_minor_version() -> Option<u32> {
|
||||
|
|
|
@ -666,10 +666,10 @@ impl<'de: 'a, 'a> Deserialize<'de> for &'a [u8] {
|
|||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
#[cfg(any(feature = "std", all(not(no_core_cstr), feature = "alloc")))]
|
||||
struct CStringVisitor;
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
#[cfg(any(feature = "std", all(not(no_core_cstr), feature = "alloc")))]
|
||||
impl<'de> Visitor<'de> for CStringVisitor {
|
||||
type Value = CString;
|
||||
|
||||
|
@ -720,7 +720,7 @@ impl<'de> Visitor<'de> for CStringVisitor {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
#[cfg(any(feature = "std", all(not(no_core_cstr), feature = "alloc")))]
|
||||
impl<'de> Deserialize<'de> for CString {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
|
@ -747,7 +747,10 @@ macro_rules! forwarded_impl {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "std", not(no_de_boxed_c_str)))]
|
||||
#[cfg(all(
|
||||
any(feature = "std", all(not(no_core_cstr), feature = "alloc")),
|
||||
not(no_de_boxed_c_str)
|
||||
))]
|
||||
forwarded_impl!((), Box<CStr>, CString::into_boxed_c_str);
|
||||
|
||||
#[cfg(not(no_core_reverse))]
|
||||
|
@ -991,7 +994,8 @@ seq_impl!(
|
|||
HashSet::clear,
|
||||
HashSet::with_capacity_and_hasher(size_hint::cautious(seq.size_hint()), S::default()),
|
||||
HashSet::reserve,
|
||||
HashSet::insert);
|
||||
HashSet::insert
|
||||
);
|
||||
|
||||
#[cfg(any(feature = "std", feature = "alloc"))]
|
||||
seq_impl!(
|
||||
|
@ -1406,16 +1410,14 @@ macro_rules! map_impl {
|
|||
}
|
||||
|
||||
#[cfg(any(feature = "std", feature = "alloc"))]
|
||||
map_impl!(
|
||||
BTreeMap<K: Ord, V>,
|
||||
map,
|
||||
BTreeMap::new());
|
||||
map_impl!(BTreeMap<K: Ord, V>, map, BTreeMap::new());
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
map_impl!(
|
||||
HashMap<K: Eq + Hash, V, S: BuildHasher + Default>,
|
||||
map,
|
||||
HashMap::with_capacity_and_hasher(size_hint::cautious(map.size_hint()), S::default()));
|
||||
HashMap::with_capacity_and_hasher(size_hint::cautious(map.size_hint()), S::default())
|
||||
);
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
|
|
@ -45,6 +45,8 @@
|
|||
//! definition.
|
||||
//! - [JSON5], a superset of JSON including some productions from ES5.
|
||||
//! - [URL] query strings, in the x-www-form-urlencoded format.
|
||||
//! - [Starlark], the format used for describing build targets by the Bazel and
|
||||
//! Buck build systems. *(serialization only)*
|
||||
//! - [Envy], a way to deserialize environment variables into Rust structs.
|
||||
//! *(deserialization only)*
|
||||
//! - [Envy Store], a way to deserialize [AWS Parameter Store] parameters into
|
||||
|
@ -74,6 +76,7 @@
|
|||
//! [Avro]: https://docs.rs/apache-avro
|
||||
//! [JSON5]: https://github.com/callum-oakley/json5-rs
|
||||
//! [URL]: https://docs.rs/serde_qs
|
||||
//! [Starlark]: https://github.com/dtolnay/serde-starlark
|
||||
//! [Envy]: https://github.com/softprops/envy
|
||||
//! [Envy Store]: https://github.com/softprops/envy-store
|
||||
//! [Cargo]: https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
@ -90,7 +93,7 @@
|
|||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// Serde types in rustdoc of other crates get linked to here.
|
||||
#![doc(html_root_url = "https://docs.rs/serde/1.0.152")]
|
||||
#![doc(html_root_url = "https://docs.rs/serde/1.0.163")]
|
||||
// Support using Serde without the standard library!
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
// Unstable functionality only if the user asks for it. For tracking and
|
||||
|
@ -127,6 +130,7 @@
|
|||
derive_partial_eq_without_eq,
|
||||
enum_glob_use,
|
||||
explicit_auto_deref,
|
||||
let_underscore_untyped,
|
||||
map_err_ignore,
|
||||
new_without_default,
|
||||
result_unit_err,
|
||||
|
@ -215,13 +219,23 @@ mod lib {
|
|||
#[cfg(feature = "std")]
|
||||
pub use std::collections::{BTreeMap, BTreeSet, BinaryHeap, LinkedList, VecDeque};
|
||||
|
||||
#[cfg(all(not(no_core_cstr), not(feature = "std")))]
|
||||
pub use core::ffi::CStr;
|
||||
#[cfg(feature = "std")]
|
||||
pub use std::ffi::CStr;
|
||||
|
||||
#[cfg(all(not(no_core_cstr), feature = "alloc", not(feature = "std")))]
|
||||
pub use alloc::ffi::CString;
|
||||
#[cfg(feature = "std")]
|
||||
pub use std::ffi::CString;
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
pub use std::{error, net};
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
pub use std::collections::{HashMap, HashSet};
|
||||
#[cfg(feature = "std")]
|
||||
pub use std::ffi::{CStr, CString, OsStr, OsString};
|
||||
pub use std::ffi::{OsStr, OsString};
|
||||
#[cfg(feature = "std")]
|
||||
pub use std::hash::{BuildHasher, Hash};
|
||||
#[cfg(feature = "std")]
|
||||
|
@ -324,9 +338,10 @@ mod std_error;
|
|||
#[allow(unused_imports)]
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
|
||||
/// Derive macro available if serde is built with `features = ["derive"]`.
|
||||
#[cfg(feature = "serde_derive")]
|
||||
#[doc(hidden)]
|
||||
pub use serde_derive::*;
|
||||
pub use serde_derive::{Deserialize, Serialize};
|
||||
|
||||
#[cfg(all(not(no_serde_derive), any(feature = "std", feature = "alloc")))]
|
||||
mod actually_private {
|
||||
|
|
|
@ -982,9 +982,16 @@ mod content {
|
|||
where
|
||||
E: de::Error,
|
||||
{
|
||||
if field == self.tag {
|
||||
self.visit_bytes(field.as_bytes())
|
||||
}
|
||||
|
||||
fn visit_bytes<E>(self, field: &[u8]) -> Result<Self::Value, E>
|
||||
where
|
||||
E: de::Error,
|
||||
{
|
||||
if field == self.tag.as_bytes() {
|
||||
Ok(TagContentOtherField::Tag)
|
||||
} else if field == self.content {
|
||||
} else if field == self.content.as_bytes() {
|
||||
Ok(TagContentOtherField::Content)
|
||||
} else {
|
||||
Ok(TagContentOtherField::Other)
|
||||
|
@ -2731,11 +2738,7 @@ where
|
|||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_map(FlatInternallyTaggedAccess {
|
||||
iter: self.0.iter_mut(),
|
||||
pending: None,
|
||||
_marker: PhantomData,
|
||||
})
|
||||
self.deserialize_map(visitor)
|
||||
}
|
||||
|
||||
fn deserialize_enum<V>(
|
||||
|
@ -2747,17 +2750,8 @@ where
|
|||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
for item in self.0.iter_mut() {
|
||||
// items in the vector are nulled out when used. So we can only use
|
||||
// an item if it's still filled in and if the field is one we care
|
||||
// about.
|
||||
let use_item = match *item {
|
||||
None => false,
|
||||
Some((ref c, _)) => c.as_str().map_or(false, |x| variants.contains(&x)),
|
||||
};
|
||||
|
||||
if use_item {
|
||||
let (key, value) = item.take().unwrap();
|
||||
for entry in self.0.iter_mut() {
|
||||
if let Some((key, value)) = flat_map_take_entry(entry, variants) {
|
||||
return visitor.visit_enum(EnumDeserializer::new(key, Some(value)));
|
||||
}
|
||||
}
|
||||
|
@ -2772,7 +2766,11 @@ where
|
|||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_map(FlatMapAccess::new(self.0.iter()))
|
||||
visitor.visit_map(FlatMapAccess {
|
||||
iter: self.0.iter(),
|
||||
pending_content: None,
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
fn deserialize_struct<V>(
|
||||
|
@ -2784,7 +2782,12 @@ where
|
|||
where
|
||||
V: Visitor<'de>,
|
||||
{
|
||||
visitor.visit_map(FlatStructAccess::new(self.0.iter_mut(), fields))
|
||||
visitor.visit_map(FlatStructAccess {
|
||||
iter: self.0.iter_mut(),
|
||||
pending_content: None,
|
||||
fields: fields,
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
fn deserialize_newtype_struct<V>(self, _name: &str, visitor: V) -> Result<V::Value, Self::Error>
|
||||
|
@ -2838,25 +2841,12 @@ where
|
|||
}
|
||||
|
||||
#[cfg(any(feature = "std", feature = "alloc"))]
|
||||
pub struct FlatMapAccess<'a, 'de: 'a, E> {
|
||||
struct FlatMapAccess<'a, 'de: 'a, E> {
|
||||
iter: slice::Iter<'a, Option<(Content<'de>, Content<'de>)>>,
|
||||
pending_content: Option<&'a Content<'de>>,
|
||||
_marker: PhantomData<E>,
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "std", feature = "alloc"))]
|
||||
impl<'a, 'de, E> FlatMapAccess<'a, 'de, E> {
|
||||
fn new(
|
||||
iter: slice::Iter<'a, Option<(Content<'de>, Content<'de>)>>,
|
||||
) -> FlatMapAccess<'a, 'de, E> {
|
||||
FlatMapAccess {
|
||||
iter: iter,
|
||||
pending_content: None,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "std", feature = "alloc"))]
|
||||
impl<'a, 'de, E> MapAccess<'de> for FlatMapAccess<'a, 'de, E>
|
||||
where
|
||||
|
@ -2871,6 +2861,10 @@ where
|
|||
for item in &mut self.iter {
|
||||
// Items in the vector are nulled out when used by a struct.
|
||||
if let Some((ref key, ref content)) = *item {
|
||||
// Do not take(), instead borrow this entry. The internally tagged
|
||||
// enum does its own buffering so we can't tell whether this entry
|
||||
// is going to be consumed. Borrowing here leaves the entry
|
||||
// available for later flattened fields.
|
||||
self.pending_content = Some(content);
|
||||
return seed.deserialize(ContentRefDeserializer::new(key)).map(Some);
|
||||
}
|
||||
|
@ -2890,28 +2884,13 @@ where
|
|||
}
|
||||
|
||||
#[cfg(any(feature = "std", feature = "alloc"))]
|
||||
pub struct FlatStructAccess<'a, 'de: 'a, E> {
|
||||
struct FlatStructAccess<'a, 'de: 'a, E> {
|
||||
iter: slice::IterMut<'a, Option<(Content<'de>, Content<'de>)>>,
|
||||
pending_content: Option<Content<'de>>,
|
||||
fields: &'static [&'static str],
|
||||
_marker: PhantomData<E>,
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "std", feature = "alloc"))]
|
||||
impl<'a, 'de, E> FlatStructAccess<'a, 'de, E> {
|
||||
fn new(
|
||||
iter: slice::IterMut<'a, Option<(Content<'de>, Content<'de>)>>,
|
||||
fields: &'static [&'static str],
|
||||
) -> FlatStructAccess<'a, 'de, E> {
|
||||
FlatStructAccess {
|
||||
iter: iter,
|
||||
pending_content: None,
|
||||
fields: fields,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "std", feature = "alloc"))]
|
||||
impl<'a, 'de, E> MapAccess<'de> for FlatStructAccess<'a, 'de, E>
|
||||
where
|
||||
|
@ -2923,17 +2902,8 @@ where
|
|||
where
|
||||
T: DeserializeSeed<'de>,
|
||||
{
|
||||
while let Some(item) = self.iter.next() {
|
||||
// items in the vector are nulled out when used. So we can only use
|
||||
// an item if it's still filled in and if the field is one we care
|
||||
// about. In case we do not know which fields we want, we take them all.
|
||||
let use_item = match *item {
|
||||
None => false,
|
||||
Some((ref c, _)) => c.as_str().map_or(false, |key| self.fields.contains(&key)),
|
||||
};
|
||||
|
||||
if use_item {
|
||||
let (key, content) = item.take().unwrap();
|
||||
for entry in self.iter.by_ref() {
|
||||
if let Some((key, content)) = flat_map_take_entry(entry, self.fields) {
|
||||
self.pending_content = Some(content);
|
||||
return seed.deserialize(ContentDeserializer::new(key)).map(Some);
|
||||
}
|
||||
|
@ -2952,44 +2922,24 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
/// Claims one key-value pair from a FlatMapDeserializer's field buffer if the
|
||||
/// field name matches any of the recognized ones.
|
||||
#[cfg(any(feature = "std", feature = "alloc"))]
|
||||
pub struct FlatInternallyTaggedAccess<'a, 'de: 'a, E> {
|
||||
iter: slice::IterMut<'a, Option<(Content<'de>, Content<'de>)>>,
|
||||
pending: Option<&'a Content<'de>>,
|
||||
_marker: PhantomData<E>,
|
||||
}
|
||||
fn flat_map_take_entry<'de>(
|
||||
entry: &mut Option<(Content<'de>, Content<'de>)>,
|
||||
recognized: &[&str],
|
||||
) -> Option<(Content<'de>, Content<'de>)> {
|
||||
// Entries in the FlatMapDeserializer buffer are nulled out as they get
|
||||
// claimed for deserialization. We only use an entry if it is still present
|
||||
// and if the field is one recognized by the current data structure.
|
||||
let is_recognized = match entry {
|
||||
None => false,
|
||||
Some((k, _v)) => k.as_str().map_or(false, |name| recognized.contains(&name)),
|
||||
};
|
||||
|
||||
#[cfg(any(feature = "std", feature = "alloc"))]
|
||||
impl<'a, 'de, E> MapAccess<'de> for FlatInternallyTaggedAccess<'a, 'de, E>
|
||||
where
|
||||
E: Error,
|
||||
{
|
||||
type Error = E;
|
||||
|
||||
fn next_key_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>, Self::Error>
|
||||
where
|
||||
T: DeserializeSeed<'de>,
|
||||
{
|
||||
for item in &mut self.iter {
|
||||
if let Some((ref key, ref content)) = *item {
|
||||
// Do not take(), instead borrow this entry. The internally tagged
|
||||
// enum does its own buffering so we can't tell whether this entry
|
||||
// is going to be consumed. Borrowing here leaves the entry
|
||||
// available for later flattened fields.
|
||||
self.pending = Some(content);
|
||||
return seed.deserialize(ContentRefDeserializer::new(key)).map(Some);
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn next_value_seed<T>(&mut self, seed: T) -> Result<T::Value, Self::Error>
|
||||
where
|
||||
T: DeserializeSeed<'de>,
|
||||
{
|
||||
match self.pending.take() {
|
||||
Some(value) => seed.deserialize(ContentRefDeserializer::new(value)),
|
||||
None => panic!("value is missing"),
|
||||
}
|
||||
if is_recognized {
|
||||
entry.take()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ impl<'a> Serialize for fmt::Arguments<'a> {
|
|||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
#[cfg(any(feature = "std", not(no_core_cstr)))]
|
||||
impl Serialize for CStr {
|
||||
#[inline]
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
|
@ -83,7 +83,7 @@ impl Serialize for CStr {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
#[cfg(any(feature = "std", all(not(no_core_cstr), feature = "alloc")))]
|
||||
impl Serialize for CString {
|
||||
#[inline]
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"72ed2b0578c6c4fbbd14ab2062502092990c48f4687a01a4a07d7fdbb6330756","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"3c12b60b6c77a201665344a7612b42392e77ddc3e907f5a14e3f3a4bb6c4692e","build.rs":"d9a0df0c4dd552ff7fd0c3b3828cb1fff4fc4ab15bd98539881929b76b98003b","crates-io.md":"ee22254ee64c3189eef3e707c8d75dc66a8df2a7ee9e518d95238950780ec387","src/bound.rs":"268b4995a5d0a129dcbd6e32ef11f587bd271df3f6c4f7230ed54bc99f5ce871","src/de.rs":"b28c2fcf5214d33ba1dc855b60634db18608a26f39f9f92bbedf62c456fa8d10","src/dummy.rs":"cb154465020973be8ab6079ab8574df46f38fbe028a5561cd6b1a8bfa1a35478","src/fragment.rs":"5548ba65a53d90a296f60c1328a7a7fb040db467f59c2f5210b2fb320457145d","src/internals/ast.rs":"b019865eef92c1ddbb9029423ac22179f132dc655a51c09fb2a42f4aaef172fd","src/internals/attr.rs":"778074380c4e353b77e03aff9edf15fda9e15a0e7ec25cdfc51d79a26636ddef","src/internals/case.rs":"9492f0c5142d7b7e8cd39c86d13a855e5ce4489425adb2b96aed89e1b7851ac0","src/internals/check.rs":"6b84278b034a156784fc56153df3def1660bcfcfde0cd59f8facce1750717c7d","src/internals/ctxt.rs":"6fa544ae52914498a62a395818ebdc1b36ac2fb5903c60afb741a864ad559f1c","src/internals/mod.rs":"f32138ff19d57eb00f88ba11f6b015efab2102657804f71ebbf386a3698dad91","src/internals/receiver.rs":"cd125ba4a3dd6250ed4737555c58627bffd630a536cd7223068eed7c10a170d8","src/internals/respan.rs":"899753859c58ce5f532a3ec4584796a52f13ed5a0533191e48c953ba5c1b52ff","src/internals/symbol.rs":"3c9ce461773b7df3bb64d82aa5a0d93052c3bb0e60209db6c0b5c10ee9cfc9cf","src/lib.rs":"6a80c0114dcf9924cbbbc03f443cfd0d299be9f89ba6c4fdc2867d990aba5063","src/pretend.rs":"4aa53bf6c1350fbcfc8c4997f720cde61a8eb3aab73bb8c101b0f0a74901892b","src/ser.rs":"8f9ffe1d8bcd28bd40e8d94d688547fa1d518cc722d0292f47d951152c406dd9","src/this.rs":"a2c128955324c2994ed7cdc3fe4eeceb7ad8a0f9d071665a8378c85c8df64ce2","src/try.rs":"b171b0088c23ebf4bfa07ba457881b41ac5e547d55dd16f737ea988d34badf61"},"package":"af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"}
|
||||
{"files":{"Cargo.toml":"104314bef2b50d7b381bce3e998a0e05ea5c32f5c5f571727d9264cb32daa051","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"13c66875efb67f64fdec817725f34ceb07913e1ebea4adc240868d2ed581d3da","crates-io.md":"ee22254ee64c3189eef3e707c8d75dc66a8df2a7ee9e518d95238950780ec387","src/bound.rs":"9211d852730380be8e0af9ed5daa52e61563e598eef458739025551ba76aa7c6","src/de.rs":"adfef386e8bee44ac7c1a58480011094eb0626ffb9f9e7cc8605b168ecf805db","src/dummy.rs":"1b7de5bfe1158ea7e70d668d4f76fdccf7f63144ac7869e82e8bf1e7ea0db13c","src/fragment.rs":"5548ba65a53d90a296f60c1328a7a7fb040db467f59c2f5210b2fb320457145d","src/internals/ast.rs":"b019865eef92c1ddbb9029423ac22179f132dc655a51c09fb2a42f4aaef172fd","src/internals/attr.rs":"7407c6e85afd197bdbf715bd681bd171db37b2264f617e148ca340817f56e684","src/internals/case.rs":"9492f0c5142d7b7e8cd39c86d13a855e5ce4489425adb2b96aed89e1b7851ac0","src/internals/check.rs":"0449cc7653fc9e596f65028835bbb7d1545c10002c79c7608547f45a722c0040","src/internals/ctxt.rs":"6fa544ae52914498a62a395818ebdc1b36ac2fb5903c60afb741a864ad559f1c","src/internals/mod.rs":"f32138ff19d57eb00f88ba11f6b015efab2102657804f71ebbf386a3698dad91","src/internals/receiver.rs":"6b016351b8294539039095863d8c99e81dd4530d7f769003d12d4ca73cca172c","src/internals/respan.rs":"899753859c58ce5f532a3ec4584796a52f13ed5a0533191e48c953ba5c1b52ff","src/internals/symbol.rs":"2bf0287da64d28da7e8673af60f66aaf6b29efe33131e56b24d6fa55edb533ad","src/lib.rs":"d4eff14b2dc5821721442293283e639fbed58071739c5bb45c5fbdd883ba556b","src/pretend.rs":"0e570faf787015535ea6b6683ebc271633c1ca945d3ee1d072c497a1a920c380","src/ser.rs":"565191e9ffdc3455472e11a624e45cfdb74b1e6947d3e39c7c5249932a27497b","src/this.rs":"a2c128955324c2994ed7cdc3fe4eeceb7ad8a0f9d071665a8378c85c8df64ce2","src/try.rs":"b171b0088c23ebf4bfa07ba457881b41ac5e547d55dd16f737ea988d34badf61"},"package":"8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e"}
|
|
@ -10,9 +10,9 @@
|
|||
# See Cargo.toml.orig for the original contents.
|
||||
|
||||
[package]
|
||||
rust-version = "1.31"
|
||||
rust-version = "1.56"
|
||||
name = "serde_derive"
|
||||
version = "1.0.152"
|
||||
version = "1.0.163"
|
||||
authors = [
|
||||
"Erick Tryzelaar <erick.tryzelaar@gmail.com>",
|
||||
"David Tolnay <dtolnay@gmail.com>",
|
||||
|
@ -53,7 +53,7 @@ version = "1.0"
|
|||
version = "1.0"
|
||||
|
||||
[dependencies.syn]
|
||||
version = "1.0.104"
|
||||
version = "2.0.3"
|
||||
|
||||
[dev-dependencies.serde]
|
||||
version = "1.0"
|
||||
|
|
|
@ -174,28 +174,3 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
# Serde   [![Build Status]][actions] [![Latest Version]][crates.io] [![serde: rustc 1.13+]][Rust 1.13] [![serde_derive: rustc 1.31+]][Rust 1.31]
|
||||
# Serde   [![Build Status]][actions] [![Latest Version]][crates.io] [![serde: rustc 1.19+]][Rust 1.19] [![serde_derive: rustc 1.56+]][Rust 1.56]
|
||||
|
||||
[Build Status]: https://img.shields.io/github/actions/workflow/status/serde-rs/serde/ci.yml?branch=master
|
||||
[actions]: https://github.com/serde-rs/serde/actions?query=branch%3Amaster
|
||||
[Latest Version]: https://img.shields.io/crates/v/serde.svg
|
||||
[crates.io]: https://crates.io/crates/serde
|
||||
[serde: rustc 1.13+]: https://img.shields.io/badge/serde-rustc_1.13+-lightgray.svg
|
||||
[serde_derive: rustc 1.31+]: https://img.shields.io/badge/serde_derive-rustc_1.31+-lightgray.svg
|
||||
[Rust 1.13]: https://blog.rust-lang.org/2016/11/10/Rust-1.13.html
|
||||
[Rust 1.31]: https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html
|
||||
[serde: rustc 1.19+]: https://img.shields.io/badge/serde-rustc_1.19+-lightgray.svg
|
||||
[serde_derive: rustc 1.56+]: https://img.shields.io/badge/serde_derive-rustc_1.56+-lightgray.svg
|
||||
[Rust 1.19]: https://blog.rust-lang.org/2017/07/20/Rust-1.19.html
|
||||
[Rust 1.56]: https://blog.rust-lang.org/2021/10/21/Rust-1.56.0.html
|
||||
|
||||
**Serde is a framework for *ser*ializing and *de*serializing Rust data structures efficiently and generically.**
|
||||
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
use std::env;
|
||||
use std::process::Command;
|
||||
use std::str;
|
||||
|
||||
// The rustc-cfg strings below are *not* public API. Please let us know by
|
||||
// opening a GitHub issue if your build environment requires some way to enable
|
||||
// these cfgs other than by executing our build script.
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
|
||||
let minor = match rustc_minor_version() {
|
||||
Some(minor) => minor,
|
||||
None => return,
|
||||
};
|
||||
|
||||
// Underscore const names stabilized in Rust 1.37:
|
||||
// https://blog.rust-lang.org/2019/08/15/Rust-1.37.0.html#using-unnamed-const-items-for-macros
|
||||
if minor < 37 {
|
||||
println!("cargo:rustc-cfg=no_underscore_consts");
|
||||
}
|
||||
|
||||
// The ptr::addr_of! macro stabilized in Rust 1.51:
|
||||
// https://blog.rust-lang.org/2021/03/25/Rust-1.51.0.html#stabilized-apis
|
||||
if minor < 51 {
|
||||
println!("cargo:rustc-cfg=no_ptr_addr_of");
|
||||
}
|
||||
}
|
||||
|
||||
fn rustc_minor_version() -> Option<u32> {
|
||||
let rustc = env::var_os("RUSTC")?;
|
||||
let output = Command::new(rustc).arg("--version").output().ok()?;
|
||||
let version = str::from_utf8(&output.stdout).ok()?;
|
||||
let mut pieces = version.split('.');
|
||||
if pieces.next() != Some("rustc 1") {
|
||||
return None;
|
||||
}
|
||||
pieces.next()?.parse().ok()
|
||||
}
|
|
@ -200,10 +200,16 @@ pub fn with_bound(
|
|||
for arg in &arguments.args {
|
||||
match arg {
|
||||
syn::GenericArgument::Type(arg) => self.visit_type(arg),
|
||||
syn::GenericArgument::Binding(arg) => self.visit_type(&arg.ty),
|
||||
syn::GenericArgument::AssocType(arg) => self.visit_type(&arg.ty),
|
||||
syn::GenericArgument::Lifetime(_)
|
||||
| syn::GenericArgument::Constraint(_)
|
||||
| syn::GenericArgument::Const(_) => {}
|
||||
| syn::GenericArgument::Const(_)
|
||||
| syn::GenericArgument::AssocConst(_)
|
||||
| syn::GenericArgument::Constraint(_) => {}
|
||||
#[cfg_attr(
|
||||
all(test, exhaustive),
|
||||
deny(non_exhaustive_omitted_patterns)
|
||||
)]
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -226,7 +232,9 @@ pub fn with_bound(
|
|||
fn visit_type_param_bound(&mut self, bound: &'ast syn::TypeParamBound) {
|
||||
match bound {
|
||||
syn::TypeParamBound::Trait(bound) => self.visit_path(&bound.path),
|
||||
syn::TypeParamBound::Lifetime(_) => {}
|
||||
syn::TypeParamBound::Lifetime(_) | syn::TypeParamBound::Verbatim(_) => {}
|
||||
#[cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -334,7 +342,7 @@ pub fn with_self_bound(
|
|||
|
||||
pub fn with_lifetime_bound(generics: &syn::Generics, lifetime: &str) -> syn::Generics {
|
||||
let bound = syn::Lifetime::new(lifetime, Span::call_site());
|
||||
let def = syn::LifetimeDef {
|
||||
let def = syn::LifetimeParam {
|
||||
attrs: Vec::new(),
|
||||
lifetime: bound.clone(),
|
||||
colon_token: None,
|
||||
|
|
|
@ -69,8 +69,6 @@ pub fn expand_derive_deserialize(
|
|||
|
||||
Ok(dummy::wrap_in_const(
|
||||
cont.attrs.custom_serde_path(),
|
||||
"DESERIALIZE",
|
||||
ident,
|
||||
impl_block,
|
||||
))
|
||||
}
|
||||
|
@ -244,9 +242,9 @@ impl BorrowedLifetimes {
|
|||
}
|
||||
}
|
||||
|
||||
fn de_lifetime_def(&self) -> Option<syn::LifetimeDef> {
|
||||
fn de_lifetime_param(&self) -> Option<syn::LifetimeParam> {
|
||||
match self {
|
||||
BorrowedLifetimes::Borrowed(bounds) => Some(syn::LifetimeDef {
|
||||
BorrowedLifetimes::Borrowed(bounds) => Some(syn::LifetimeParam {
|
||||
attrs: Vec::new(),
|
||||
lifetime: syn::Lifetime::new("'de", Span::call_site()),
|
||||
colon_token: None,
|
||||
|
@ -419,6 +417,7 @@ fn deserialize_unit_struct(params: &Parameters, cattrs: &attr::Container) -> Fra
|
|||
let expecting = cattrs.expecting().unwrap_or(&expecting);
|
||||
|
||||
quote_block! {
|
||||
#[doc(hidden)]
|
||||
struct __Visitor;
|
||||
|
||||
impl<'de> _serde::de::Visitor<'de> for __Visitor {
|
||||
|
@ -515,6 +514,7 @@ fn deserialize_tuple(
|
|||
};
|
||||
|
||||
quote_block! {
|
||||
#[doc(hidden)]
|
||||
struct __Visitor #de_impl_generics #where_clause {
|
||||
marker: _serde::__private::PhantomData<#this_type #ty_generics>,
|
||||
lifetime: _serde::__private::PhantomData<&#delife ()>,
|
||||
|
@ -605,6 +605,7 @@ fn deserialize_tuple_in_place(
|
|||
let place_life = place_lifetime();
|
||||
|
||||
quote_block! {
|
||||
#[doc(hidden)]
|
||||
struct __Visitor #in_place_impl_generics #where_clause {
|
||||
place: &#place_life mut #this_type #ty_generics,
|
||||
lifetime: _serde::__private::PhantomData<&#delife ()>,
|
||||
|
@ -954,6 +955,7 @@ fn deserialize_struct(
|
|||
lifetime: _serde::__private::PhantomData,
|
||||
}
|
||||
};
|
||||
let need_seed = deserializer.is_none();
|
||||
let dispatch = if let Some(deserializer) = deserializer {
|
||||
quote! {
|
||||
_serde::Deserializer::deserialize_any(#deserializer, #visitor_expr)
|
||||
|
@ -999,14 +1001,14 @@ fn deserialize_struct(
|
|||
_ => None,
|
||||
};
|
||||
|
||||
let visitor_seed = if is_enum && cattrs.has_flatten() {
|
||||
let visitor_seed = if need_seed && is_enum && cattrs.has_flatten() {
|
||||
Some(quote! {
|
||||
impl #de_impl_generics _serde::de::DeserializeSeed<#delife> for __Visitor #de_ty_generics #where_clause {
|
||||
type Value = #this_type #ty_generics;
|
||||
|
||||
fn deserialize<__D>(self, __deserializer: __D) -> _serde::__private::Result<Self::Value, __D::Error>
|
||||
where
|
||||
__D: _serde::Deserializer<'de>,
|
||||
__D: _serde::Deserializer<#delife>,
|
||||
{
|
||||
_serde::Deserializer::deserialize_map(__deserializer, self)
|
||||
}
|
||||
|
@ -1019,6 +1021,7 @@ fn deserialize_struct(
|
|||
quote_block! {
|
||||
#field_visitor
|
||||
|
||||
#[doc(hidden)]
|
||||
struct __Visitor #de_impl_generics #where_clause {
|
||||
marker: _serde::__private::PhantomData<#this_type #ty_generics>,
|
||||
lifetime: _serde::__private::PhantomData<&#delife ()>,
|
||||
|
@ -1131,6 +1134,7 @@ fn deserialize_struct_in_place(
|
|||
Some(quote_block! {
|
||||
#field_visitor
|
||||
|
||||
#[doc(hidden)]
|
||||
struct __Visitor #in_place_impl_generics #where_clause {
|
||||
place: &#place_life mut #this_type #ty_generics,
|
||||
lifetime: _serde::__private::PhantomData<&#delife ()>,
|
||||
|
@ -1202,6 +1206,7 @@ fn prepare_enum_variant_enum(
|
|||
let variants_stmt = {
|
||||
let variant_names = variant_names_idents.iter().map(|(name, _, _)| name);
|
||||
quote! {
|
||||
#[doc(hidden)]
|
||||
const VARIANTS: &'static [&'static str] = &[ #(#variant_names),* ];
|
||||
}
|
||||
};
|
||||
|
@ -1256,7 +1261,7 @@ fn deserialize_externally_tagged_enum(
|
|||
// This is an empty enum like `enum Impossible {}` or an enum in which
|
||||
// all variants have `#[serde(skip_deserializing)]`.
|
||||
quote! {
|
||||
// FIXME: Once we drop support for Rust 1.15:
|
||||
// FIXME: Once feature(exhaustive_patterns) is stable:
|
||||
// let _serde::__private::Err(__err) = _serde::de::EnumAccess::variant::<__Field>(__data);
|
||||
// _serde::__private::Err(__err)
|
||||
_serde::__private::Result::map(
|
||||
|
@ -1274,6 +1279,7 @@ fn deserialize_externally_tagged_enum(
|
|||
quote_block! {
|
||||
#variant_visitor
|
||||
|
||||
#[doc(hidden)]
|
||||
struct __Visitor #de_impl_generics #where_clause {
|
||||
marker: _serde::__private::PhantomData<#this_type #ty_generics>,
|
||||
lifetime: _serde::__private::PhantomData<&#delife ()>,
|
||||
|
@ -1521,6 +1527,7 @@ fn deserialize_adjacently_tagged_enum(
|
|||
|
||||
#variants_stmt
|
||||
|
||||
#[doc(hidden)]
|
||||
struct __Seed #de_impl_generics #where_clause {
|
||||
field: __Field,
|
||||
marker: _serde::__private::PhantomData<#this_type #ty_generics>,
|
||||
|
@ -1540,6 +1547,7 @@ fn deserialize_adjacently_tagged_enum(
|
|||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
struct __Visitor #de_impl_generics #where_clause {
|
||||
marker: _serde::__private::PhantomData<#this_type #ty_generics>,
|
||||
lifetime: _serde::__private::PhantomData<&#delife ()>,
|
||||
|
@ -1642,6 +1650,7 @@ fn deserialize_adjacently_tagged_enum(
|
|||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
const FIELDS: &'static [&'static str] = &[#tag, #content];
|
||||
_serde::Deserializer::deserialize_struct(
|
||||
__deserializer,
|
||||
|
@ -1953,11 +1962,13 @@ fn deserialize_generated_identifier(
|
|||
|
||||
quote_block! {
|
||||
#[allow(non_camel_case_types)]
|
||||
#[doc(hidden)]
|
||||
enum __Field #lifetime {
|
||||
#(#field_idents,)*
|
||||
#ignore_variant
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
struct __FieldVisitor;
|
||||
|
||||
impl<'de> _serde::de::Visitor<'de> for __FieldVisitor {
|
||||
|
@ -2045,11 +2056,13 @@ fn deserialize_custom_identifier(
|
|||
None
|
||||
} else if is_variant {
|
||||
let variants = quote! {
|
||||
#[doc(hidden)]
|
||||
const VARIANTS: &'static [&'static str] = &[ #(#names),* ];
|
||||
};
|
||||
Some(variants)
|
||||
} else {
|
||||
let fields = quote! {
|
||||
#[doc(hidden)]
|
||||
const FIELDS: &'static [&'static str] = &[ #(#names),* ];
|
||||
};
|
||||
Some(fields)
|
||||
|
@ -2071,6 +2084,7 @@ fn deserialize_custom_identifier(
|
|||
quote_block! {
|
||||
#names_const
|
||||
|
||||
#[doc(hidden)]
|
||||
struct __FieldVisitor #de_impl_generics #where_clause {
|
||||
marker: _serde::__private::PhantomData<#this_type #ty_generics>,
|
||||
lifetime: _serde::__private::PhantomData<&#delife ()>,
|
||||
|
@ -2400,8 +2414,12 @@ fn deserialize_struct_as_struct_visitor(
|
|||
.collect();
|
||||
|
||||
let fields_stmt = {
|
||||
let field_names = field_names_idents.iter().map(|(name, _, _)| name);
|
||||
let field_names = field_names_idents
|
||||
.iter()
|
||||
.flat_map(|(_, _, aliases)| aliases);
|
||||
|
||||
quote_block! {
|
||||
#[doc(hidden)]
|
||||
const FIELDS: &'static [&'static str] = &[ #(#field_names),* ];
|
||||
}
|
||||
};
|
||||
|
@ -2535,7 +2553,7 @@ fn deserialize_map(
|
|||
let all_skipped = fields.iter().all(|field| field.attrs.skip_deserializing());
|
||||
let match_keys = if cattrs.deny_unknown_fields() && all_skipped {
|
||||
quote! {
|
||||
// FIXME: Once we drop support for Rust 1.15:
|
||||
// FIXME: Once feature(exhaustive_patterns) is stable:
|
||||
// let _serde::__private::None::<__Field> = try!(_serde::de::MapAccess::next_key(&mut __map));
|
||||
_serde::__private::Option::map(
|
||||
try!(_serde::de::MapAccess::next_key::<__Field>(&mut __map)),
|
||||
|
@ -2680,6 +2698,7 @@ fn deserialize_struct_as_struct_in_place_visitor(
|
|||
let fields_stmt = {
|
||||
let field_names = field_names_idents.iter().map(|(name, _, _)| name);
|
||||
quote_block! {
|
||||
#[doc(hidden)]
|
||||
const FIELDS: &'static [&'static str] = &[ #(#field_names),* ];
|
||||
}
|
||||
};
|
||||
|
@ -2768,7 +2787,7 @@ fn deserialize_map_in_place(
|
|||
|
||||
let match_keys = if cattrs.deny_unknown_fields() && all_skipped {
|
||||
quote! {
|
||||
// FIXME: Once we drop support for Rust 1.15:
|
||||
// FIXME: Once feature(exhaustive_patterns) is stable:
|
||||
// let _serde::__private::None::<__Field> = try!(_serde::de::MapAccess::next_key(&mut __map));
|
||||
_serde::__private::Option::map(
|
||||
try!(_serde::de::MapAccess::next_key::<__Field>(&mut __map)),
|
||||
|
@ -2860,6 +2879,7 @@ fn wrap_deserialize_with(
|
|||
let delife = params.borrowed.de_lifetime();
|
||||
|
||||
let wrapper = quote! {
|
||||
#[doc(hidden)]
|
||||
struct __DeserializeWith #de_impl_generics #where_clause {
|
||||
value: #value_ty,
|
||||
phantom: _serde::__private::PhantomData<#this_type #ty_generics>,
|
||||
|
@ -3007,7 +3027,7 @@ struct InPlaceImplGenerics<'a>(&'a Parameters);
|
|||
impl<'a> ToTokens for DeImplGenerics<'a> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let mut generics = self.0.generics.clone();
|
||||
if let Some(de_lifetime) = self.0.borrowed.de_lifetime_def() {
|
||||
if let Some(de_lifetime) = self.0.borrowed.de_lifetime_param() {
|
||||
generics.params = Some(syn::GenericParam::Lifetime(de_lifetime))
|
||||
.into_iter()
|
||||
.chain(generics.params)
|
||||
|
@ -3042,7 +3062,7 @@ impl<'a> ToTokens for InPlaceImplGenerics<'a> {
|
|||
.into_iter()
|
||||
.chain(generics.params)
|
||||
.collect();
|
||||
if let Some(de_lifetime) = self.0.borrowed.de_lifetime_def() {
|
||||
if let Some(de_lifetime) = self.0.borrowed.de_lifetime_param() {
|
||||
generics.params = Some(syn::GenericParam::Lifetime(de_lifetime))
|
||||
.into_iter()
|
||||
.chain(generics.params)
|
||||
|
@ -3064,23 +3084,31 @@ struct DeTypeGenerics<'a>(&'a Parameters);
|
|||
#[cfg(feature = "deserialize_in_place")]
|
||||
struct InPlaceTypeGenerics<'a>(&'a Parameters);
|
||||
|
||||
fn de_type_generics_to_tokens(
|
||||
mut generics: syn::Generics,
|
||||
borrowed: &BorrowedLifetimes,
|
||||
tokens: &mut TokenStream,
|
||||
) {
|
||||
if borrowed.de_lifetime_param().is_some() {
|
||||
let def = syn::LifetimeParam {
|
||||
attrs: Vec::new(),
|
||||
lifetime: syn::Lifetime::new("'de", Span::call_site()),
|
||||
colon_token: None,
|
||||
bounds: Punctuated::new(),
|
||||
};
|
||||
// Prepend 'de lifetime to list of generics
|
||||
generics.params = Some(syn::GenericParam::Lifetime(def))
|
||||
.into_iter()
|
||||
.chain(generics.params)
|
||||
.collect();
|
||||
}
|
||||
let (_, ty_generics, _) = generics.split_for_impl();
|
||||
ty_generics.to_tokens(tokens);
|
||||
}
|
||||
|
||||
impl<'a> ToTokens for DeTypeGenerics<'a> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let mut generics = self.0.generics.clone();
|
||||
if self.0.borrowed.de_lifetime_def().is_some() {
|
||||
let def = syn::LifetimeDef {
|
||||
attrs: Vec::new(),
|
||||
lifetime: syn::Lifetime::new("'de", Span::call_site()),
|
||||
colon_token: None,
|
||||
bounds: Punctuated::new(),
|
||||
};
|
||||
generics.params = Some(syn::GenericParam::Lifetime(def))
|
||||
.into_iter()
|
||||
.chain(generics.params)
|
||||
.collect();
|
||||
}
|
||||
let (_, ty_generics, _) = generics.split_for_impl();
|
||||
ty_generics.to_tokens(tokens);
|
||||
de_type_generics_to_tokens(self.0.generics.clone(), &self.0.borrowed, tokens);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3093,20 +3121,7 @@ impl<'a> ToTokens for InPlaceTypeGenerics<'a> {
|
|||
.chain(generics.params)
|
||||
.collect();
|
||||
|
||||
if self.0.borrowed.de_lifetime_def().is_some() {
|
||||
let def = syn::LifetimeDef {
|
||||
attrs: Vec::new(),
|
||||
lifetime: syn::Lifetime::new("'de", Span::call_site()),
|
||||
colon_token: None,
|
||||
bounds: Punctuated::new(),
|
||||
};
|
||||
generics.params = Some(syn::GenericParam::Lifetime(def))
|
||||
.into_iter()
|
||||
.chain(generics.params)
|
||||
.collect();
|
||||
}
|
||||
let (_, ty_generics, _) = generics.split_for_impl();
|
||||
ty_generics.to_tokens(tokens);
|
||||
de_type_generics_to_tokens(generics, &self.0.borrowed, tokens);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3118,8 +3133,8 @@ impl<'a> DeTypeGenerics<'a> {
|
|||
}
|
||||
|
||||
#[cfg(feature = "deserialize_in_place")]
|
||||
fn place_lifetime() -> syn::LifetimeDef {
|
||||
syn::LifetimeDef {
|
||||
fn place_lifetime() -> syn::LifetimeParam {
|
||||
syn::LifetimeParam {
|
||||
attrs: Vec::new(),
|
||||
lifetime: syn::Lifetime::new("'place", Span::call_site()),
|
||||
colon_token: None,
|
||||
|
|
|
@ -1,23 +1,11 @@
|
|||
use proc_macro2::{Ident, TokenStream};
|
||||
use quote::format_ident;
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
use syn;
|
||||
use try;
|
||||
|
||||
pub fn wrap_in_const(
|
||||
serde_path: Option<&syn::Path>,
|
||||
trait_: &str,
|
||||
ty: &Ident,
|
||||
code: TokenStream,
|
||||
) -> TokenStream {
|
||||
pub fn wrap_in_const(serde_path: Option<&syn::Path>, code: TokenStream) -> TokenStream {
|
||||
let try_replacement = try::replacement();
|
||||
|
||||
let dummy_const = if cfg!(no_underscore_consts) {
|
||||
format_ident!("_IMPL_{}_FOR_{}", trait_, unraw(ty))
|
||||
} else {
|
||||
format_ident!("_")
|
||||
};
|
||||
|
||||
let use_serde = match serde_path {
|
||||
Some(path) => quote! {
|
||||
use #path as _serde;
|
||||
|
@ -31,18 +19,10 @@ pub fn wrap_in_const(
|
|||
quote! {
|
||||
#[doc(hidden)]
|
||||
#[allow(non_upper_case_globals, unused_attributes, unused_qualifications)]
|
||||
const #dummy_const: () = {
|
||||
const _: () = {
|
||||
#use_serde
|
||||
#try_replacement
|
||||
#code
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(deprecated)]
|
||||
fn unraw(ident: &Ident) -> String {
|
||||
// str::trim_start_matches was added in 1.30, trim_left_matches deprecated
|
||||
// in 1.33. We currently support rustc back to 1.15 so we need to continue
|
||||
// to use the deprecated one.
|
||||
ident.to_string().trim_left_matches("r#").to_owned()
|
||||
}
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -3,8 +3,8 @@ use internals::attr::{Identifier, TagType};
|
|||
use internals::{ungroup, Ctxt, Derive};
|
||||
use syn::{Member, Type};
|
||||
|
||||
/// Cross-cutting checks that require looking at more than a single attrs
|
||||
/// object. Simpler checks should happen when parsing and building the attrs.
|
||||
// Cross-cutting checks that require looking at more than a single attrs object.
|
||||
// Simpler checks should happen when parsing and building the attrs.
|
||||
pub fn check(cx: &Ctxt, cont: &mut Container, derive: Derive) {
|
||||
check_remote_generic(cx, cont);
|
||||
check_getter(cx, cont);
|
||||
|
@ -17,18 +17,18 @@ pub fn check(cx: &Ctxt, cont: &mut Container, derive: Derive) {
|
|||
check_from_and_try_from(cx, cont);
|
||||
}
|
||||
|
||||
/// Remote derive definition type must have either all of the generics of the
|
||||
/// remote type:
|
||||
///
|
||||
/// #[serde(remote = "Generic")]
|
||||
/// struct Generic<T> {…}
|
||||
///
|
||||
/// or none of them, i.e. defining impls for one concrete instantiation of the
|
||||
/// remote type only:
|
||||
///
|
||||
/// #[serde(remote = "Generic<T>")]
|
||||
/// struct ConcreteDef {…}
|
||||
///
|
||||
// Remote derive definition type must have either all of the generics of the
|
||||
// remote type:
|
||||
//
|
||||
// #[serde(remote = "Generic")]
|
||||
// struct Generic<T> {…}
|
||||
//
|
||||
// or none of them, i.e. defining impls for one concrete instantiation of the
|
||||
// remote type only:
|
||||
//
|
||||
// #[serde(remote = "Generic<T>")]
|
||||
// struct ConcreteDef {…}
|
||||
//
|
||||
fn check_remote_generic(cx: &Ctxt, cont: &Container) {
|
||||
if let Some(remote) = cont.attrs.remote() {
|
||||
let local_has_generic = !cont.generics.params.is_empty();
|
||||
|
@ -39,8 +39,8 @@ fn check_remote_generic(cx: &Ctxt, cont: &Container) {
|
|||
}
|
||||
}
|
||||
|
||||
/// Getters are only allowed inside structs (not enums) with the `remote`
|
||||
/// attribute.
|
||||
// Getters are only allowed inside structs (not enums) with the `remote`
|
||||
// attribute.
|
||||
fn check_getter(cx: &Ctxt, cont: &Container) {
|
||||
match cont.data {
|
||||
Data::Enum(_) => {
|
||||
|
@ -62,7 +62,7 @@ fn check_getter(cx: &Ctxt, cont: &Container) {
|
|||
}
|
||||
}
|
||||
|
||||
/// Flattening has some restrictions we can test.
|
||||
// Flattening has some restrictions we can test.
|
||||
fn check_flatten(cx: &Ctxt, cont: &Container) {
|
||||
match &cont.data {
|
||||
Data::Enum(variants) => {
|
||||
|
@ -101,12 +101,12 @@ fn check_flatten_field(cx: &Ctxt, style: Style, field: &Field) {
|
|||
}
|
||||
}
|
||||
|
||||
/// The `other` attribute must be used at most once and it must be the last
|
||||
/// variant of an enum.
|
||||
///
|
||||
/// Inside a `variant_identifier` all variants must be unit variants. Inside a
|
||||
/// `field_identifier` all but possibly one variant must be unit variants. The
|
||||
/// last variant may be a newtype variant which is an implicit "other" case.
|
||||
// The `other` attribute must be used at most once and it must be the last
|
||||
// variant of an enum.
|
||||
//
|
||||
// Inside a `variant_identifier` all variants must be unit variants. Inside a
|
||||
// `field_identifier` all but possibly one variant must be unit variants. The
|
||||
// last variant may be a newtype variant which is an implicit "other" case.
|
||||
fn check_identifier(cx: &Ctxt, cont: &Container) {
|
||||
let variants = match &cont.data {
|
||||
Data::Enum(variants) => variants,
|
||||
|
@ -189,8 +189,8 @@ fn check_identifier(cx: &Ctxt, cont: &Container) {
|
|||
}
|
||||
}
|
||||
|
||||
/// Skip-(de)serializing attributes are not allowed on variants marked
|
||||
/// (de)serialize_with.
|
||||
// Skip-(de)serializing attributes are not allowed on variants marked
|
||||
// (de)serialize_with.
|
||||
fn check_variant_skip_attrs(cx: &Ctxt, cont: &Container) {
|
||||
let variants = match &cont.data {
|
||||
Data::Enum(variants) => variants,
|
||||
|
@ -264,10 +264,9 @@ fn check_variant_skip_attrs(cx: &Ctxt, cont: &Container) {
|
|||
}
|
||||
}
|
||||
|
||||
/// The tag of an internally-tagged struct variant must not be
|
||||
/// the same as either one of its fields, as this would result in
|
||||
/// duplicate keys in the serialized output and/or ambiguity in
|
||||
/// the to-be-deserialized input.
|
||||
// The tag of an internally-tagged struct variant must not be the same as either
|
||||
// one of its fields, as this would result in duplicate keys in the serialized
|
||||
// output and/or ambiguity in the to-be-deserialized input.
|
||||
fn check_internal_tag_field_name_conflict(cx: &Ctxt, cont: &Container) {
|
||||
let variants = match &cont.data {
|
||||
Data::Enum(variants) => variants,
|
||||
|
@ -313,8 +312,8 @@ fn check_internal_tag_field_name_conflict(cx: &Ctxt, cont: &Container) {
|
|||
}
|
||||
}
|
||||
|
||||
/// In the case of adjacently-tagged enums, the type and the
|
||||
/// contents tag must differ, for the same reason.
|
||||
// In the case of adjacently-tagged enums, the type and the contents tag must
|
||||
// differ, for the same reason.
|
||||
fn check_adjacent_tag_conflict(cx: &Ctxt, cont: &Container) {
|
||||
let (type_tag, content_tag) = match cont.attrs.tag() {
|
||||
TagType::Adjacent { tag, content } => (tag, content),
|
||||
|
@ -332,7 +331,7 @@ fn check_adjacent_tag_conflict(cx: &Ctxt, cont: &Container) {
|
|||
}
|
||||
}
|
||||
|
||||
/// Enums and unit structs cannot be transparent.
|
||||
// Enums and unit structs cannot be transparent.
|
||||
fn check_transparent(cx: &Ctxt, cont: &mut Container, derive: Derive) {
|
||||
if !cont.attrs.transparent() {
|
||||
return;
|
||||
|
|
|
@ -179,10 +179,13 @@ impl ReplaceReceiver<'_> {
|
|||
for arg in &mut arguments.args {
|
||||
match arg {
|
||||
GenericArgument::Type(arg) => self.visit_type_mut(arg),
|
||||
GenericArgument::Binding(arg) => self.visit_type_mut(&mut arg.ty),
|
||||
GenericArgument::AssocType(arg) => self.visit_type_mut(&mut arg.ty),
|
||||
GenericArgument::Lifetime(_)
|
||||
| GenericArgument::Constraint(_)
|
||||
| GenericArgument::Const(_) => {}
|
||||
| GenericArgument::Const(_)
|
||||
| GenericArgument::AssocConst(_)
|
||||
| GenericArgument::Constraint(_) => {}
|
||||
#[cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -205,7 +208,9 @@ impl ReplaceReceiver<'_> {
|
|||
fn visit_type_param_bound_mut(&mut self, bound: &mut TypeParamBound) {
|
||||
match bound {
|
||||
TypeParamBound::Trait(bound) => self.visit_path_mut(&mut bound.path),
|
||||
TypeParamBound::Lifetime(_) => {}
|
||||
TypeParamBound::Lifetime(_) | TypeParamBound::Verbatim(_) => {}
|
||||
#[cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -229,7 +234,9 @@ impl ReplaceReceiver<'_> {
|
|||
self.visit_type_param_bound_mut(bound);
|
||||
}
|
||||
}
|
||||
WherePredicate::Lifetime(_) | WherePredicate::Eq(_) => {}
|
||||
WherePredicate::Lifetime(_) => {}
|
||||
#[cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@ pub const DEFAULT: Symbol = Symbol("default");
|
|||
pub const DENY_UNKNOWN_FIELDS: Symbol = Symbol("deny_unknown_fields");
|
||||
pub const DESERIALIZE: Symbol = Symbol("deserialize");
|
||||
pub const DESERIALIZE_WITH: Symbol = Symbol("deserialize_with");
|
||||
pub const EXPECTING: Symbol = Symbol("expecting");
|
||||
pub const FIELD_IDENTIFIER: Symbol = Symbol("field_identifier");
|
||||
pub const FLATTEN: Symbol = Symbol("flatten");
|
||||
pub const FROM: Symbol = Symbol("from");
|
||||
|
@ -22,6 +23,7 @@ pub const OTHER: Symbol = Symbol("other");
|
|||
pub const REMOTE: Symbol = Symbol("remote");
|
||||
pub const RENAME: Symbol = Symbol("rename");
|
||||
pub const RENAME_ALL: Symbol = Symbol("rename_all");
|
||||
pub const REPR: Symbol = Symbol("repr");
|
||||
pub const SERDE: Symbol = Symbol("serde");
|
||||
pub const SERIALIZE: Symbol = Symbol("serialize");
|
||||
pub const SERIALIZE_WITH: Symbol = Symbol("serialize_with");
|
||||
|
@ -35,7 +37,6 @@ pub const TRY_FROM: Symbol = Symbol("try_from");
|
|||
pub const UNTAGGED: Symbol = Symbol("untagged");
|
||||
pub const VARIANT_IDENTIFIER: Symbol = Symbol("variant_identifier");
|
||||
pub const WITH: Symbol = Symbol("with");
|
||||
pub const EXPECTING: Symbol = Symbol("expecting");
|
||||
|
||||
impl PartialEq<Symbol> for Ident {
|
||||
fn eq(&self, word: &Symbol) -> bool {
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
//!
|
||||
//! [https://serde.rs/derive.html]: https://serde.rs/derive.html
|
||||
|
||||
#![doc(html_root_url = "https://docs.rs/serde_derive/1.0.152")]
|
||||
#![doc(html_root_url = "https://docs.rs/serde_derive/1.0.163")]
|
||||
#![allow(unknown_lints, bare_trait_objects)]
|
||||
// Ignored clippy lints
|
||||
#![allow(
|
||||
|
@ -43,6 +43,7 @@
|
|||
clippy::enum_glob_use,
|
||||
clippy::indexing_slicing,
|
||||
clippy::items_after_statements,
|
||||
clippy::let_underscore_untyped,
|
||||
clippy::manual_assert,
|
||||
clippy::map_err_ignore,
|
||||
clippy::match_same_arms,
|
||||
|
|
|
@ -97,29 +97,14 @@ fn pretend_fields_used_struct_packed(cont: &Container, fields: &[Field]) -> Toke
|
|||
|
||||
let members = fields.iter().map(|field| &field.member).collect::<Vec<_>>();
|
||||
|
||||
#[cfg(not(no_ptr_addr_of))]
|
||||
{
|
||||
quote! {
|
||||
match _serde::__private::None::<&#type_ident #ty_generics> {
|
||||
_serde::__private::Some(__v @ #type_ident { #(#members: _),* }) => {
|
||||
#(
|
||||
let _ = _serde::__private::ptr::addr_of!(__v.#members);
|
||||
)*
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(no_ptr_addr_of)]
|
||||
{
|
||||
let placeholders = (0usize..).map(|i| format_ident!("__v{}", i));
|
||||
|
||||
quote! {
|
||||
match _serde::__private::None::<#type_ident #ty_generics> {
|
||||
_serde::__private::Some(#type_ident { #(#members: #placeholders),* }) => {}
|
||||
_ => {}
|
||||
quote! {
|
||||
match _serde::__private::None::<&#type_ident #ty_generics> {
|
||||
_serde::__private::Some(__v @ #type_ident { #(#members: _),* }) => {
|
||||
#(
|
||||
let _ = _serde::__private::ptr::addr_of!(__v.#members);
|
||||
)*
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,8 +59,6 @@ pub fn expand_derive_serialize(
|
|||
|
||||
Ok(dummy::wrap_in_const(
|
||||
cont.attrs.custom_serde_path(),
|
||||
"SERIALIZE",
|
||||
ident,
|
||||
impl_block,
|
||||
))
|
||||
}
|
||||
|
@ -719,6 +717,7 @@ fn serialize_adjacently_tagged_variant(
|
|||
let (wrapper_impl_generics, wrapper_ty_generics, _) = wrapper_generics.split_for_impl();
|
||||
|
||||
quote_block! {
|
||||
#[doc(hidden)]
|
||||
struct __AdjacentlyTagged #wrapper_generics #where_clause {
|
||||
data: (#(&'__a #fields_ty,)*),
|
||||
phantom: _serde::__private::PhantomData<#this_type #ty_generics>,
|
||||
|
@ -982,6 +981,7 @@ fn serialize_struct_variant_with_flatten(
|
|||
let (wrapper_impl_generics, wrapper_ty_generics, _) = wrapper_generics.split_for_impl();
|
||||
|
||||
quote_block! {
|
||||
#[doc(hidden)]
|
||||
struct __EnumFlatten #wrapper_generics #where_clause {
|
||||
data: (#(&'__a #fields_ty,)*),
|
||||
phantom: _serde::__private::PhantomData<#this_type #ty_generics>,
|
||||
|
@ -1212,6 +1212,7 @@ fn wrap_serialize_with(
|
|||
});
|
||||
|
||||
quote!({
|
||||
#[doc(hidden)]
|
||||
struct __SerializeWith #wrapper_impl_generics #where_clause {
|
||||
values: (#(&'__a #field_tys, )*),
|
||||
phantom: _serde::__private::PhantomData<#this_type #ty_generics>,
|
||||
|
|
Различия файлов скрыты, потому что одна или несколько строк слишком длинны
|
@ -0,0 +1,147 @@
|
|||
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||
#
|
||||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies.
|
||||
#
|
||||
# If you are reading this file be aware that the original Cargo.toml
|
||||
# will likely look very different (and much more reasonable).
|
||||
# See Cargo.toml.orig for the original contents.
|
||||
|
||||
[package]
|
||||
edition = "2018"
|
||||
rust-version = "1.31"
|
||||
name = "syn"
|
||||
version = "1.0.107"
|
||||
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
||||
include = [
|
||||
"/benches/**",
|
||||
"/build.rs",
|
||||
"/Cargo.toml",
|
||||
"/LICENSE-APACHE",
|
||||
"/LICENSE-MIT",
|
||||
"/README.md",
|
||||
"/src/**",
|
||||
"/tests/**",
|
||||
]
|
||||
description = "Parser for Rust source code"
|
||||
documentation = "https://docs.rs/syn"
|
||||
readme = "README.md"
|
||||
keywords = [
|
||||
"macros",
|
||||
"syn",
|
||||
]
|
||||
categories = [
|
||||
"development-tools::procedural-macro-helpers",
|
||||
"parser-implementations",
|
||||
]
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/dtolnay/syn"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
rustdoc-args = [
|
||||
"--cfg",
|
||||
"doc_cfg",
|
||||
]
|
||||
|
||||
[package.metadata.playground]
|
||||
features = [
|
||||
"full",
|
||||
"visit",
|
||||
"visit-mut",
|
||||
"fold",
|
||||
"extra-traits",
|
||||
]
|
||||
|
||||
[lib]
|
||||
doc-scrape-examples = false
|
||||
|
||||
[[bench]]
|
||||
name = "rust"
|
||||
harness = false
|
||||
required-features = [
|
||||
"full",
|
||||
"parsing",
|
||||
]
|
||||
|
||||
[[bench]]
|
||||
name = "file"
|
||||
required-features = [
|
||||
"full",
|
||||
"parsing",
|
||||
]
|
||||
|
||||
[dependencies.proc-macro2]
|
||||
version = "1.0.46"
|
||||
default-features = false
|
||||
|
||||
[dependencies.quote]
|
||||
version = "1.0"
|
||||
optional = true
|
||||
default-features = false
|
||||
|
||||
[dependencies.unicode-ident]
|
||||
version = "1.0"
|
||||
|
||||
[dev-dependencies.anyhow]
|
||||
version = "1.0"
|
||||
|
||||
[dev-dependencies.automod]
|
||||
version = "1.0"
|
||||
|
||||
[dev-dependencies.flate2]
|
||||
version = "1.0"
|
||||
|
||||
[dev-dependencies.insta]
|
||||
version = "1.0"
|
||||
|
||||
[dev-dependencies.rayon]
|
||||
version = "1.0"
|
||||
|
||||
[dev-dependencies.ref-cast]
|
||||
version = "1.0"
|
||||
|
||||
[dev-dependencies.regex]
|
||||
version = "1.0"
|
||||
|
||||
[dev-dependencies.reqwest]
|
||||
version = "0.11"
|
||||
features = ["blocking"]
|
||||
|
||||
[dev-dependencies.syn-test-suite]
|
||||
version = "0"
|
||||
|
||||
[dev-dependencies.tar]
|
||||
version = "0.4.16"
|
||||
|
||||
[dev-dependencies.termcolor]
|
||||
version = "1.0"
|
||||
|
||||
[dev-dependencies.walkdir]
|
||||
version = "2.1"
|
||||
|
||||
[features]
|
||||
clone-impls = []
|
||||
default = [
|
||||
"derive",
|
||||
"parsing",
|
||||
"printing",
|
||||
"clone-impls",
|
||||
"proc-macro",
|
||||
]
|
||||
derive = []
|
||||
extra-traits = []
|
||||
fold = []
|
||||
full = []
|
||||
parsing = []
|
||||
printing = ["quote"]
|
||||
proc-macro = [
|
||||
"proc-macro2/proc-macro",
|
||||
"quote/proc-macro",
|
||||
]
|
||||
test = ["syn-test-suite/all-features"]
|
||||
visit = []
|
||||
visit-mut = []
|
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -0,0 +1,23 @@
|
|||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
|
@ -0,0 +1,285 @@
|
|||
Parser for Rust source code
|
||||
===========================
|
||||
|
||||
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/syn-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/syn)
|
||||
[<img alt="crates.io" src="https://img.shields.io/crates/v/syn.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/syn)
|
||||
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/syn)
|
||||
[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/syn/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster)
|
||||
|
||||
Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree
|
||||
of Rust source code.
|
||||
|
||||
Currently this library is geared toward use in Rust procedural macros, but
|
||||
contains some APIs that may be useful more generally.
|
||||
|
||||
- **Data structures** — Syn provides a complete syntax tree that can represent
|
||||
any valid Rust source code. The syntax tree is rooted at [`syn::File`] which
|
||||
represents a full source file, but there are other entry points that may be
|
||||
useful to procedural macros including [`syn::Item`], [`syn::Expr`] and
|
||||
[`syn::Type`].
|
||||
|
||||
- **Derives** — Of particular interest to derive macros is [`syn::DeriveInput`]
|
||||
which is any of the three legal input items to a derive macro. An example
|
||||
below shows using this type in a library that can derive implementations of a
|
||||
user-defined trait.
|
||||
|
||||
- **Parsing** — Parsing in Syn is built around [parser functions] with the
|
||||
signature `fn(ParseStream) -> Result<T>`. Every syntax tree node defined by
|
||||
Syn is individually parsable and may be used as a building block for custom
|
||||
syntaxes, or you may dream up your own brand new syntax without involving any
|
||||
of our syntax tree types.
|
||||
|
||||
- **Location information** — Every token parsed by Syn is associated with a
|
||||
`Span` that tracks line and column information back to the source of that
|
||||
token. These spans allow a procedural macro to display detailed error messages
|
||||
pointing to all the right places in the user's code. There is an example of
|
||||
this below.
|
||||
|
||||
- **Feature flags** — Functionality is aggressively feature gated so your
|
||||
procedural macros enable only what they need, and do not pay in compile time
|
||||
for all the rest.
|
||||
|
||||
[`syn::File`]: https://docs.rs/syn/1.0/syn/struct.File.html
|
||||
[`syn::Item`]: https://docs.rs/syn/1.0/syn/enum.Item.html
|
||||
[`syn::Expr`]: https://docs.rs/syn/1.0/syn/enum.Expr.html
|
||||
[`syn::Type`]: https://docs.rs/syn/1.0/syn/enum.Type.html
|
||||
[`syn::DeriveInput`]: https://docs.rs/syn/1.0/syn/struct.DeriveInput.html
|
||||
[parser functions]: https://docs.rs/syn/1.0/syn/parse/index.html
|
||||
|
||||
*Version requirement: Syn supports rustc 1.31 and up.*
|
||||
|
||||
[*Release notes*](https://github.com/dtolnay/syn/releases)
|
||||
|
||||
<br>
|
||||
|
||||
## Resources
|
||||
|
||||
The best way to learn about procedural macros is by writing some. Consider
|
||||
working through [this procedural macro workshop][workshop] to get familiar with
|
||||
the different types of procedural macros. The workshop contains relevant links
|
||||
into the Syn documentation as you work through each project.
|
||||
|
||||
[workshop]: https://github.com/dtolnay/proc-macro-workshop
|
||||
|
||||
<br>
|
||||
|
||||
## Example of a derive macro
|
||||
|
||||
The canonical derive macro using Syn looks like this. We write an ordinary Rust
|
||||
function tagged with a `proc_macro_derive` attribute and the name of the trait
|
||||
we are deriving. Any time that derive appears in the user's code, the Rust
|
||||
compiler passes their data structure as tokens into our macro. We get to execute
|
||||
arbitrary Rust code to figure out what to do with those tokens, then hand some
|
||||
tokens back to the compiler to compile into the user's crate.
|
||||
|
||||
[`TokenStream`]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
syn = "1.0"
|
||||
quote = "1.0"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
```
|
||||
|
||||
```rust
|
||||
use proc_macro::TokenStream;
|
||||
use quote::quote;
|
||||
use syn::{parse_macro_input, DeriveInput};
|
||||
|
||||
#[proc_macro_derive(MyMacro)]
|
||||
pub fn my_macro(input: TokenStream) -> TokenStream {
|
||||
// Parse the input tokens into a syntax tree
|
||||
let input = parse_macro_input!(input as DeriveInput);
|
||||
|
||||
// Build the output, possibly using quasi-quotation
|
||||
let expanded = quote! {
|
||||
// ...
|
||||
};
|
||||
|
||||
// Hand the output tokens back to the compiler
|
||||
TokenStream::from(expanded)
|
||||
}
|
||||
```
|
||||
|
||||
The [`heapsize`] example directory shows a complete working implementation of a
|
||||
derive macro. It works on any Rust compiler 1.31+. The example derives a
|
||||
`HeapSize` trait which computes an estimate of the amount of heap memory owned
|
||||
by a value.
|
||||
|
||||
[`heapsize`]: examples/heapsize
|
||||
|
||||
```rust
|
||||
pub trait HeapSize {
|
||||
/// Total number of bytes of heap memory owned by `self`.
|
||||
fn heap_size_of_children(&self) -> usize;
|
||||
}
|
||||
```
|
||||
|
||||
The derive macro allows users to write `#[derive(HeapSize)]` on data structures
|
||||
in their program.
|
||||
|
||||
```rust
|
||||
#[derive(HeapSize)]
|
||||
struct Demo<'a, T: ?Sized> {
|
||||
a: Box<T>,
|
||||
b: u8,
|
||||
c: &'a str,
|
||||
d: String,
|
||||
}
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
## Spans and error reporting
|
||||
|
||||
The token-based procedural macro API provides great control over where the
|
||||
compiler's error messages are displayed in user code. Consider the error the
|
||||
user sees if one of their field types does not implement `HeapSize`.
|
||||
|
||||
```rust
|
||||
#[derive(HeapSize)]
|
||||
struct Broken {
|
||||
ok: String,
|
||||
bad: std::thread::Thread,
|
||||
}
|
||||
```
|
||||
|
||||
By tracking span information all the way through the expansion of a procedural
|
||||
macro as shown in the `heapsize` example, token-based macros in Syn are able to
|
||||
trigger errors that directly pinpoint the source of the problem.
|
||||
|
||||
```console
|
||||
error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not satisfied
|
||||
--> src/main.rs:7:5
|
||||
|
|
||||
7 | bad: std::thread::Thread,
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `HeapSize` is not implemented for `std::thread::Thread`
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
## Parsing a custom syntax
|
||||
|
||||
The [`lazy-static`] example directory shows the implementation of a
|
||||
`functionlike!(...)` procedural macro in which the input tokens are parsed using
|
||||
Syn's parsing API.
|
||||
|
||||
[`lazy-static`]: examples/lazy-static
|
||||
|
||||
The example reimplements the popular `lazy_static` crate from crates.io as a
|
||||
procedural macro.
|
||||
|
||||
```rust
|
||||
lazy_static! {
|
||||
static ref USERNAME: Regex = Regex::new("^[a-z0-9_-]{3,16}$").unwrap();
|
||||
}
|
||||
```
|
||||
|
||||
The implementation shows how to trigger custom warnings and error messages on
|
||||
the macro input.
|
||||
|
||||
```console
|
||||
warning: come on, pick a more creative name
|
||||
--> src/main.rs:10:16
|
||||
|
|
||||
10 | static ref FOO: String = "lazy_static".to_owned();
|
||||
| ^^^
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
## Testing
|
||||
|
||||
When testing macros, we often care not just that the macro can be used
|
||||
successfully but also that when the macro is provided with invalid input it
|
||||
produces maximally helpful error messages. Consider using the [`trybuild`] crate
|
||||
to write tests for errors that are emitted by your macro or errors detected by
|
||||
the Rust compiler in the expanded code following misuse of the macro. Such tests
|
||||
help avoid regressions from later refactors that mistakenly make an error no
|
||||
longer trigger or be less helpful than it used to be.
|
||||
|
||||
[`trybuild`]: https://github.com/dtolnay/trybuild
|
||||
|
||||
<br>
|
||||
|
||||
## Debugging
|
||||
|
||||
When developing a procedural macro it can be helpful to look at what the
|
||||
generated code looks like. Use `cargo rustc -- -Zunstable-options
|
||||
--pretty=expanded` or the [`cargo expand`] subcommand.
|
||||
|
||||
[`cargo expand`]: https://github.com/dtolnay/cargo-expand
|
||||
|
||||
To show the expanded code for some crate that uses your procedural macro, run
|
||||
`cargo expand` from that crate. To show the expanded code for one of your own
|
||||
test cases, run `cargo expand --test the_test_case` where the last argument is
|
||||
the name of the test file without the `.rs` extension.
|
||||
|
||||
This write-up by Brandon W Maister discusses debugging in more detail:
|
||||
[Debugging Rust's new Custom Derive system][debugging].
|
||||
|
||||
[debugging]: https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/
|
||||
|
||||
<br>
|
||||
|
||||
## Optional features
|
||||
|
||||
Syn puts a lot of functionality behind optional features in order to optimize
|
||||
compile time for the most common use cases. The following features are
|
||||
available.
|
||||
|
||||
- **`derive`** *(enabled by default)* — Data structures for representing the
|
||||
possible input to a derive macro, including structs and enums and types.
|
||||
- **`full`** — Data structures for representing the syntax tree of all valid
|
||||
Rust source code, including items and expressions.
|
||||
- **`parsing`** *(enabled by default)* — Ability to parse input tokens into a
|
||||
syntax tree node of a chosen type.
|
||||
- **`printing`** *(enabled by default)* — Ability to print a syntax tree node as
|
||||
tokens of Rust source code.
|
||||
- **`visit`** — Trait for traversing a syntax tree.
|
||||
- **`visit-mut`** — Trait for traversing and mutating in place a syntax tree.
|
||||
- **`fold`** — Trait for transforming an owned syntax tree.
|
||||
- **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree
|
||||
types.
|
||||
- **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree
|
||||
types.
|
||||
- **`proc-macro`** *(enabled by default)* — Runtime dependency on the dynamic
|
||||
library libproc_macro from rustc toolchain.
|
||||
|
||||
<br>
|
||||
|
||||
## Proc macro shim
|
||||
|
||||
Syn operates on the token representation provided by the [proc-macro2] crate
|
||||
from crates.io rather than using the compiler's built in proc-macro crate
|
||||
directly. This enables code using Syn to execute outside of the context of a
|
||||
procedural macro, such as in unit tests or build.rs, and we avoid needing
|
||||
incompatible ecosystems for proc macros vs non-macro use cases.
|
||||
|
||||
In general all of your code should be written against proc-macro2 rather than
|
||||
proc-macro. The one exception is in the signatures of procedural macro entry
|
||||
points, which are required by the language to use `proc_macro::TokenStream`.
|
||||
|
||||
The proc-macro2 crate will automatically detect and use the compiler's data
|
||||
structures when a procedural macro is active.
|
||||
|
||||
[proc-macro2]: https://docs.rs/proc-macro2/1.0/proc_macro2/
|
||||
|
||||
<br>
|
||||
|
||||
#### License
|
||||
|
||||
<sup>
|
||||
Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
|
||||
2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
|
||||
</sup>
|
||||
|
||||
<br>
|
||||
|
||||
<sub>
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
|
||||
be dual licensed as above, without any additional terms or conditions.
|
||||
</sub>
|
|
@ -0,0 +1,55 @@
|
|||
// $ cargo bench --features full,test --bench file
|
||||
|
||||
#![feature(rustc_private, test)]
|
||||
#![recursion_limit = "1024"]
|
||||
#![allow(
|
||||
clippy::items_after_statements,
|
||||
clippy::missing_panics_doc,
|
||||
clippy::must_use_candidate
|
||||
)]
|
||||
|
||||
extern crate test;
|
||||
|
||||
#[macro_use]
|
||||
#[path = "../tests/macros/mod.rs"]
|
||||
mod macros;
|
||||
|
||||
#[path = "../tests/common/mod.rs"]
|
||||
mod common;
|
||||
#[path = "../tests/repo/mod.rs"]
|
||||
pub mod repo;
|
||||
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use std::fs;
|
||||
use std::str::FromStr;
|
||||
use syn::parse::{ParseStream, Parser};
|
||||
use test::Bencher;
|
||||
|
||||
const FILE: &str = "tests/rust/library/core/src/str/mod.rs";
|
||||
|
||||
fn get_tokens() -> TokenStream {
|
||||
repo::clone_rust();
|
||||
let content = fs::read_to_string(FILE).unwrap();
|
||||
TokenStream::from_str(&content).unwrap()
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn baseline(b: &mut Bencher) {
|
||||
let tokens = get_tokens();
|
||||
b.iter(|| drop(tokens.clone()));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn create_token_buffer(b: &mut Bencher) {
|
||||
let tokens = get_tokens();
|
||||
fn immediate_fail(_input: ParseStream) -> syn::Result<()> {
|
||||
Err(syn::Error::new(Span::call_site(), ""))
|
||||
}
|
||||
b.iter(|| immediate_fail.parse2(tokens.clone()));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn parse_file(b: &mut Bencher) {
|
||||
let tokens = get_tokens();
|
||||
b.iter(|| syn::parse2::<syn::File>(tokens.clone()));
|
||||
}
|
|
@ -0,0 +1,170 @@
|
|||
// $ cargo bench --features full,test --bench rust
|
||||
//
|
||||
// Syn only, useful for profiling:
|
||||
// $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full,test --bench rust
|
||||
|
||||
#![cfg_attr(not(syn_only), feature(rustc_private))]
|
||||
#![recursion_limit = "1024"]
|
||||
#![allow(clippy::cast_lossless, clippy::unnecessary_wraps)]
|
||||
|
||||
#[macro_use]
|
||||
#[path = "../tests/macros/mod.rs"]
|
||||
mod macros;
|
||||
|
||||
#[path = "../tests/common/mod.rs"]
|
||||
mod common;
|
||||
#[path = "../tests/repo/mod.rs"]
|
||||
mod repo;
|
||||
|
||||
use std::fs;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
#[cfg(not(syn_only))]
|
||||
mod tokenstream_parse {
|
||||
use proc_macro2::TokenStream;
|
||||
use std::str::FromStr;
|
||||
|
||||
pub fn bench(content: &str) -> Result<(), ()> {
|
||||
TokenStream::from_str(content).map(drop).map_err(drop)
|
||||
}
|
||||
}
|
||||
|
||||
mod syn_parse {
|
||||
pub fn bench(content: &str) -> Result<(), ()> {
|
||||
syn::parse_file(content).map(drop).map_err(drop)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(syn_only))]
|
||||
mod librustc_parse {
|
||||
extern crate rustc_data_structures;
|
||||
extern crate rustc_error_messages;
|
||||
extern crate rustc_errors;
|
||||
extern crate rustc_parse;
|
||||
extern crate rustc_session;
|
||||
extern crate rustc_span;
|
||||
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_error_messages::FluentBundle;
|
||||
use rustc_errors::{emitter::Emitter, translation::Translate, Diagnostic, Handler};
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::source_map::{FilePathMapping, SourceMap};
|
||||
use rustc_span::{edition::Edition, FileName};
|
||||
|
||||
pub fn bench(content: &str) -> Result<(), ()> {
|
||||
struct SilentEmitter;
|
||||
|
||||
impl Emitter for SilentEmitter {
|
||||
fn emit_diagnostic(&mut self, _diag: &Diagnostic) {}
|
||||
fn source_map(&self) -> Option<&Lrc<SourceMap>> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl Translate for SilentEmitter {
|
||||
fn fluent_bundle(&self) -> Option<&Lrc<FluentBundle>> {
|
||||
None
|
||||
}
|
||||
fn fallback_fluent_bundle(&self) -> &FluentBundle {
|
||||
panic!("silent emitter attempted to translate a diagnostic");
|
||||
}
|
||||
}
|
||||
|
||||
rustc_span::create_session_if_not_set_then(Edition::Edition2018, |_| {
|
||||
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let emitter = Box::new(SilentEmitter);
|
||||
let handler = Handler::with_emitter(false, None, emitter);
|
||||
let sess = ParseSess::with_span_handler(handler, cm);
|
||||
if let Err(diagnostic) = rustc_parse::parse_crate_from_source_str(
|
||||
FileName::Custom("bench".to_owned()),
|
||||
content.to_owned(),
|
||||
&sess,
|
||||
) {
|
||||
diagnostic.cancel();
|
||||
return Err(());
|
||||
};
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(syn_only))]
|
||||
mod read_from_disk {
|
||||
pub fn bench(content: &str) -> Result<(), ()> {
|
||||
_ = content;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn exec(mut codepath: impl FnMut(&str) -> Result<(), ()>) -> Duration {
|
||||
let begin = Instant::now();
|
||||
let mut success = 0;
|
||||
let mut total = 0;
|
||||
|
||||
walkdir::WalkDir::new("tests/rust/src")
|
||||
.into_iter()
|
||||
.filter_entry(repo::base_dir_filter)
|
||||
.for_each(|entry| {
|
||||
let entry = entry.unwrap();
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
return;
|
||||
}
|
||||
let content = fs::read_to_string(path).unwrap();
|
||||
let ok = codepath(&content).is_ok();
|
||||
success += ok as usize;
|
||||
total += 1;
|
||||
if !ok {
|
||||
eprintln!("FAIL {}", path.display());
|
||||
}
|
||||
});
|
||||
|
||||
assert_eq!(success, total);
|
||||
begin.elapsed()
|
||||
}
|
||||
|
||||
fn main() {
|
||||
repo::clone_rust();
|
||||
|
||||
macro_rules! testcases {
|
||||
($($(#[$cfg:meta])* $name:ident,)*) => {
|
||||
[
|
||||
$(
|
||||
$(#[$cfg])*
|
||||
(stringify!($name), $name::bench as fn(&str) -> Result<(), ()>),
|
||||
)*
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(not(syn_only))]
|
||||
{
|
||||
let mut lines = 0;
|
||||
let mut files = 0;
|
||||
exec(|content| {
|
||||
lines += content.lines().count();
|
||||
files += 1;
|
||||
Ok(())
|
||||
});
|
||||
eprintln!("\n{} lines in {} files", lines, files);
|
||||
}
|
||||
|
||||
for (name, f) in testcases!(
|
||||
#[cfg(not(syn_only))]
|
||||
read_from_disk,
|
||||
#[cfg(not(syn_only))]
|
||||
tokenstream_parse,
|
||||
syn_parse,
|
||||
#[cfg(not(syn_only))]
|
||||
librustc_parse,
|
||||
) {
|
||||
eprint!("{:20}", format!("{}:", name));
|
||||
let elapsed = exec(f);
|
||||
eprintln!(
|
||||
"elapsed={}.{:03}s",
|
||||
elapsed.as_secs(),
|
||||
elapsed.subsec_millis(),
|
||||
);
|
||||
}
|
||||
eprintln!();
|
||||
}
|
|
@ -0,0 +1,662 @@
|
|||
use super::*;
|
||||
use crate::punctuated::Punctuated;
|
||||
use proc_macro2::TokenStream;
|
||||
use std::iter;
|
||||
use std::slice;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::parse::{Parse, ParseBuffer, ParseStream, Parser, Result};
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::punctuated::Pair;
|
||||
|
||||
ast_struct! {
|
||||
/// An attribute like `#[repr(transparent)]`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Syntax
|
||||
///
|
||||
/// Rust has six types of attributes.
|
||||
///
|
||||
/// - Outer attributes like `#[repr(transparent)]`. These appear outside or
|
||||
/// in front of the item they describe.
|
||||
/// - Inner attributes like `#![feature(proc_macro)]`. These appear inside
|
||||
/// of the item they describe, usually a module.
|
||||
/// - Outer doc comments like `/// # Example`.
|
||||
/// - Inner doc comments like `//! Please file an issue`.
|
||||
/// - Outer block comments `/** # Example */`.
|
||||
/// - Inner block comments `/*! Please file an issue */`.
|
||||
///
|
||||
/// The `style` field of type `AttrStyle` distinguishes whether an attribute
|
||||
/// is outer or inner. Doc comments and block comments are promoted to
|
||||
/// attributes, as this is how they are processed by the compiler and by
|
||||
/// `macro_rules!` macros.
|
||||
///
|
||||
/// The `path` field gives the possibly colon-delimited path against which
|
||||
/// the attribute is resolved. It is equal to `"doc"` for desugared doc
|
||||
/// comments. The `tokens` field contains the rest of the attribute body as
|
||||
/// tokens.
|
||||
///
|
||||
/// ```text
|
||||
/// #[derive(Copy)] #[crate::precondition x < 5]
|
||||
/// ^^^^^^~~~~~~ ^^^^^^^^^^^^^^^^^^^ ~~~~~
|
||||
/// path tokens path tokens
|
||||
/// ```
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Parsing from tokens to Attribute
|
||||
///
|
||||
/// This type does not implement the [`Parse`] trait and thus cannot be
|
||||
/// parsed directly by [`ParseStream::parse`]. Instead use
|
||||
/// [`ParseStream::call`] with one of the two parser functions
|
||||
/// [`Attribute::parse_outer`] or [`Attribute::parse_inner`] depending on
|
||||
/// which you intend to parse.
|
||||
///
|
||||
/// [`Parse`]: parse::Parse
|
||||
/// [`ParseStream::parse`]: parse::ParseBuffer::parse
|
||||
/// [`ParseStream::call`]: parse::ParseBuffer::call
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{Attribute, Ident, Result, Token};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// // Parses a unit struct with attributes.
|
||||
/// //
|
||||
/// // #[path = "s.tmpl"]
|
||||
/// // struct S;
|
||||
/// struct UnitStruct {
|
||||
/// attrs: Vec<Attribute>,
|
||||
/// struct_token: Token![struct],
|
||||
/// name: Ident,
|
||||
/// semi_token: Token![;],
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for UnitStruct {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// Ok(UnitStruct {
|
||||
/// attrs: input.call(Attribute::parse_outer)?,
|
||||
/// struct_token: input.parse()?,
|
||||
/// name: input.parse()?,
|
||||
/// semi_token: input.parse()?,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// <p><br></p>
|
||||
///
|
||||
/// # Parsing from Attribute to structured arguments
|
||||
///
|
||||
/// The grammar of attributes in Rust is very flexible, which makes the
|
||||
/// syntax tree not that useful on its own. In particular, arguments of the
|
||||
/// attribute are held in an arbitrary `tokens: TokenStream`. Macros are
|
||||
/// expected to check the `path` of the attribute, decide whether they
|
||||
/// recognize it, and then parse the remaining tokens according to whatever
|
||||
/// grammar they wish to require for that kind of attribute.
|
||||
///
|
||||
/// If the attribute you are parsing is expected to conform to the
|
||||
/// conventional structured form of attribute, use [`parse_meta()`] to
|
||||
/// obtain that structured representation. If the attribute follows some
|
||||
/// other grammar of its own, use [`parse_args()`] to parse that into the
|
||||
/// expected data structure.
|
||||
///
|
||||
/// [`parse_meta()`]: Attribute::parse_meta
|
||||
/// [`parse_args()`]: Attribute::parse_args
|
||||
///
|
||||
/// <p><br></p>
|
||||
///
|
||||
/// # Doc comments
|
||||
///
|
||||
/// The compiler transforms doc comments, such as `/// comment` and `/*!
|
||||
/// comment */`, into attributes before macros are expanded. Each comment is
|
||||
/// expanded into an attribute of the form `#[doc = r"comment"]`.
|
||||
///
|
||||
/// As an example, the following `mod` items are expanded identically:
|
||||
///
|
||||
/// ```
|
||||
/// # use syn::{ItemMod, parse_quote};
|
||||
/// let doc: ItemMod = parse_quote! {
|
||||
/// /// Single line doc comments
|
||||
/// /// We write so many!
|
||||
/// /**
|
||||
/// * Multi-line comments...
|
||||
/// * May span many lines
|
||||
/// */
|
||||
/// mod example {
|
||||
/// //! Of course, they can be inner too
|
||||
/// /*! And fit in a single line */
|
||||
/// }
|
||||
/// };
|
||||
/// let attr: ItemMod = parse_quote! {
|
||||
/// #[doc = r" Single line doc comments"]
|
||||
/// #[doc = r" We write so many!"]
|
||||
/// #[doc = r"
|
||||
/// * Multi-line comments...
|
||||
/// * May span many lines
|
||||
/// "]
|
||||
/// mod example {
|
||||
/// #![doc = r" Of course, they can be inner too"]
|
||||
/// #![doc = r" And fit in a single line "]
|
||||
/// }
|
||||
/// };
|
||||
/// assert_eq!(doc, attr);
|
||||
/// ```
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Attribute {
|
||||
pub pound_token: Token![#],
|
||||
pub style: AttrStyle,
|
||||
pub bracket_token: token::Bracket,
|
||||
pub path: Path,
|
||||
pub tokens: TokenStream,
|
||||
}
|
||||
}
|
||||
|
||||
impl Attribute {
|
||||
/// Parses the content of the attribute, consisting of the path and tokens,
|
||||
/// as a [`Meta`] if possible.
|
||||
///
|
||||
/// *This function is available only if Syn is built with the `"parsing"`
|
||||
/// feature.*
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_meta(&self) -> Result<Meta> {
|
||||
fn clone_ident_segment(segment: &PathSegment) -> PathSegment {
|
||||
PathSegment {
|
||||
ident: segment.ident.clone(),
|
||||
arguments: PathArguments::None,
|
||||
}
|
||||
}
|
||||
|
||||
let path = Path {
|
||||
leading_colon: self
|
||||
.path
|
||||
.leading_colon
|
||||
.as_ref()
|
||||
.map(|colon| Token![::](colon.spans)),
|
||||
segments: self
|
||||
.path
|
||||
.segments
|
||||
.pairs()
|
||||
.map(|pair| match pair {
|
||||
Pair::Punctuated(seg, punct) => {
|
||||
Pair::Punctuated(clone_ident_segment(seg), Token![::](punct.spans))
|
||||
}
|
||||
Pair::End(seg) => Pair::End(clone_ident_segment(seg)),
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
|
||||
let parser = |input: ParseStream| parsing::parse_meta_after_path(path, input);
|
||||
parse::Parser::parse2(parser, self.tokens.clone())
|
||||
}
|
||||
|
||||
/// Parse the arguments to the attribute as a syntax tree.
|
||||
///
|
||||
/// This is similar to `syn::parse2::<T>(attr.tokens)` except that:
|
||||
///
|
||||
/// - the surrounding delimiters are *not* included in the input to the
|
||||
/// parser; and
|
||||
/// - the error message has a more useful span when `tokens` is empty.
|
||||
///
|
||||
/// ```text
|
||||
/// #[my_attr(value < 5)]
|
||||
/// ^^^^^^^^^ what gets parsed
|
||||
/// ```
|
||||
///
|
||||
/// *This function is available only if Syn is built with the `"parsing"`
|
||||
/// feature.*
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_args<T: Parse>(&self) -> Result<T> {
|
||||
self.parse_args_with(T::parse)
|
||||
}
|
||||
|
||||
/// Parse the arguments to the attribute using the given parser.
|
||||
///
|
||||
/// *This function is available only if Syn is built with the `"parsing"`
|
||||
/// feature.*
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
|
||||
let parser = |input: ParseStream| {
|
||||
let args = enter_args(self, input)?;
|
||||
parse::parse_stream(parser, &args)
|
||||
};
|
||||
parser.parse2(self.tokens.clone())
|
||||
}
|
||||
|
||||
/// Parses zero or more outer attributes from the stream.
|
||||
///
|
||||
/// *This function is available only if Syn is built with the `"parsing"`
|
||||
/// feature.*
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> {
|
||||
let mut attrs = Vec::new();
|
||||
while input.peek(Token![#]) {
|
||||
attrs.push(input.call(parsing::single_parse_outer)?);
|
||||
}
|
||||
Ok(attrs)
|
||||
}
|
||||
|
||||
/// Parses zero or more inner attributes from the stream.
|
||||
///
|
||||
/// *This function is available only if Syn is built with the `"parsing"`
|
||||
/// feature.*
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> {
|
||||
let mut attrs = Vec::new();
|
||||
parsing::parse_inner(input, &mut attrs)?;
|
||||
Ok(attrs)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
fn expected_parentheses(attr: &Attribute) -> String {
|
||||
let style = match attr.style {
|
||||
AttrStyle::Outer => "#",
|
||||
AttrStyle::Inner(_) => "#!",
|
||||
};
|
||||
|
||||
let mut path = String::new();
|
||||
for segment in &attr.path.segments {
|
||||
if !path.is_empty() || attr.path.leading_colon.is_some() {
|
||||
path += "::";
|
||||
}
|
||||
path += &segment.ident.to_string();
|
||||
}
|
||||
|
||||
format!("{}[{}(...)]", style, path)
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
fn enter_args<'a>(attr: &Attribute, input: ParseStream<'a>) -> Result<ParseBuffer<'a>> {
|
||||
if input.is_empty() {
|
||||
let expected = expected_parentheses(attr);
|
||||
let msg = format!("expected attribute arguments in parentheses: {}", expected);
|
||||
return Err(crate::error::new2(
|
||||
attr.pound_token.span,
|
||||
attr.bracket_token.span,
|
||||
msg,
|
||||
));
|
||||
} else if input.peek(Token![=]) {
|
||||
let expected = expected_parentheses(attr);
|
||||
let msg = format!("expected parentheses: {}", expected);
|
||||
return Err(input.error(msg));
|
||||
};
|
||||
|
||||
let content;
|
||||
if input.peek(token::Paren) {
|
||||
parenthesized!(content in input);
|
||||
} else if input.peek(token::Bracket) {
|
||||
bracketed!(content in input);
|
||||
} else if input.peek(token::Brace) {
|
||||
braced!(content in input);
|
||||
} else {
|
||||
return Err(input.error("unexpected token in attribute arguments"));
|
||||
}
|
||||
|
||||
if input.is_empty() {
|
||||
Ok(content)
|
||||
} else {
|
||||
Err(input.error("unexpected token in attribute arguments"))
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// Distinguishes between attributes that decorate an item and attributes
|
||||
/// that are contained within an item.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
///
|
||||
/// # Outer attributes
|
||||
///
|
||||
/// - `#[repr(transparent)]`
|
||||
/// - `/// # Example`
|
||||
/// - `/** Please file an issue */`
|
||||
///
|
||||
/// # Inner attributes
|
||||
///
|
||||
/// - `#![feature(proc_macro)]`
|
||||
/// - `//! # Example`
|
||||
/// - `/*! Please file an issue */`
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum AttrStyle {
|
||||
Outer,
|
||||
Inner(Token![!]),
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum_of_structs! {
|
||||
/// Content of a compile-time structured attribute.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
///
|
||||
/// ## Path
|
||||
///
|
||||
/// A meta path is like the `test` in `#[test]`.
|
||||
///
|
||||
/// ## List
|
||||
///
|
||||
/// A meta list is like the `derive(Copy)` in `#[derive(Copy)]`.
|
||||
///
|
||||
/// ## NameValue
|
||||
///
|
||||
/// A name-value meta is like the `path = "..."` in `#[path =
|
||||
/// "sys/windows.rs"]`.
|
||||
///
|
||||
/// # Syntax tree enum
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: Expr#syntax-tree-enums
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum Meta {
|
||||
Path(Path),
|
||||
|
||||
/// A structured list within an attribute, like `derive(Copy, Clone)`.
|
||||
List(MetaList),
|
||||
|
||||
/// A name-value pair within an attribute, like `feature = "nightly"`.
|
||||
NameValue(MetaNameValue),
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A structured list within an attribute, like `derive(Copy, Clone)`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct MetaList {
|
||||
pub path: Path,
|
||||
pub paren_token: token::Paren,
|
||||
pub nested: Punctuated<NestedMeta, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A name-value pair within an attribute, like `feature = "nightly"`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct MetaNameValue {
|
||||
pub path: Path,
|
||||
pub eq_token: Token![=],
|
||||
pub lit: Lit,
|
||||
}
|
||||
}
|
||||
|
||||
impl Meta {
|
||||
/// Returns the identifier that begins this structured meta item.
|
||||
///
|
||||
/// For example this would return the `test` in `#[test]`, the `derive` in
|
||||
/// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`.
|
||||
pub fn path(&self) -> &Path {
|
||||
match self {
|
||||
Meta::Path(path) => path,
|
||||
Meta::List(meta) => &meta.path,
|
||||
Meta::NameValue(meta) => &meta.path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum_of_structs! {
|
||||
/// Element of a compile-time attribute list.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum NestedMeta {
|
||||
/// A structured meta item, like the `Copy` in `#[derive(Copy)]` which
|
||||
/// would be a nested `Meta::Path`.
|
||||
Meta(Meta),
|
||||
|
||||
/// A Rust literal, like the `"new_name"` in `#[rename("new_name")]`.
|
||||
Lit(Lit),
|
||||
}
|
||||
}
|
||||
|
||||
/// Conventional argument type associated with an invocation of an attribute
|
||||
/// macro.
|
||||
///
|
||||
/// For example if we are developing an attribute macro that is intended to be
|
||||
/// invoked on function items as follows:
|
||||
///
|
||||
/// ```
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[my_attribute(path = "/v1/refresh")]
|
||||
/// # };
|
||||
/// pub fn refresh() {
|
||||
/// /* ... */
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// The implementation of this macro would want to parse its attribute arguments
|
||||
/// as type `AttributeArgs`.
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// use proc_macro::TokenStream;
|
||||
/// use syn::{parse_macro_input, AttributeArgs, ItemFn};
|
||||
///
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[proc_macro_attribute]
|
||||
/// # };
|
||||
/// pub fn my_attribute(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||
/// let args = parse_macro_input!(args as AttributeArgs);
|
||||
/// let input = parse_macro_input!(input as ItemFn);
|
||||
///
|
||||
/// /* ... */
|
||||
/// # "".parse().unwrap()
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub type AttributeArgs = Vec<NestedMeta>;
|
||||
|
||||
pub trait FilterAttrs<'a> {
|
||||
type Ret: Iterator<Item = &'a Attribute>;
|
||||
|
||||
fn outer(self) -> Self::Ret;
|
||||
fn inner(self) -> Self::Ret;
|
||||
}
|
||||
|
||||
impl<'a> FilterAttrs<'a> for &'a [Attribute] {
|
||||
type Ret = iter::Filter<slice::Iter<'a, Attribute>, fn(&&Attribute) -> bool>;
|
||||
|
||||
fn outer(self) -> Self::Ret {
|
||||
fn is_outer(attr: &&Attribute) -> bool {
|
||||
match attr.style {
|
||||
AttrStyle::Outer => true,
|
||||
AttrStyle::Inner(_) => false,
|
||||
}
|
||||
}
|
||||
self.iter().filter(is_outer)
|
||||
}
|
||||
|
||||
fn inner(self) -> Self::Ret {
|
||||
fn is_inner(attr: &&Attribute) -> bool {
|
||||
match attr.style {
|
||||
AttrStyle::Inner(_) => true,
|
||||
AttrStyle::Outer => false,
|
||||
}
|
||||
}
|
||||
self.iter().filter(is_inner)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
use crate::ext::IdentExt;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
pub fn parse_inner(input: ParseStream, attrs: &mut Vec<Attribute>) -> Result<()> {
|
||||
while input.peek(Token![#]) && input.peek2(Token![!]) {
|
||||
attrs.push(input.call(parsing::single_parse_inner)?);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn single_parse_inner(input: ParseStream) -> Result<Attribute> {
|
||||
let content;
|
||||
Ok(Attribute {
|
||||
pound_token: input.parse()?,
|
||||
style: AttrStyle::Inner(input.parse()?),
|
||||
bracket_token: bracketed!(content in input),
|
||||
path: content.call(Path::parse_mod_style)?,
|
||||
tokens: content.parse()?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn single_parse_outer(input: ParseStream) -> Result<Attribute> {
|
||||
let content;
|
||||
Ok(Attribute {
|
||||
pound_token: input.parse()?,
|
||||
style: AttrStyle::Outer,
|
||||
bracket_token: bracketed!(content in input),
|
||||
path: content.call(Path::parse_mod_style)?,
|
||||
tokens: content.parse()?,
|
||||
})
|
||||
}
|
||||
|
||||
// Like Path::parse_mod_style but accepts keywords in the path.
|
||||
fn parse_meta_path(input: ParseStream) -> Result<Path> {
|
||||
Ok(Path {
|
||||
leading_colon: input.parse()?,
|
||||
segments: {
|
||||
let mut segments = Punctuated::new();
|
||||
while input.peek(Ident::peek_any) {
|
||||
let ident = Ident::parse_any(input)?;
|
||||
segments.push_value(PathSegment::from(ident));
|
||||
if !input.peek(Token![::]) {
|
||||
break;
|
||||
}
|
||||
let punct = input.parse()?;
|
||||
segments.push_punct(punct);
|
||||
}
|
||||
if segments.is_empty() {
|
||||
return Err(input.error("expected path"));
|
||||
} else if segments.trailing_punct() {
|
||||
return Err(input.error("expected path segment"));
|
||||
}
|
||||
segments
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Meta {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let path = input.call(parse_meta_path)?;
|
||||
parse_meta_after_path(path, input)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for MetaList {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let path = input.call(parse_meta_path)?;
|
||||
parse_meta_list_after_path(path, input)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for MetaNameValue {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let path = input.call(parse_meta_path)?;
|
||||
parse_meta_name_value_after_path(path, input)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for NestedMeta {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if input.peek(Lit) && !(input.peek(LitBool) && input.peek2(Token![=])) {
|
||||
input.parse().map(NestedMeta::Lit)
|
||||
} else if input.peek(Ident::peek_any)
|
||||
|| input.peek(Token![::]) && input.peek3(Ident::peek_any)
|
||||
{
|
||||
input.parse().map(NestedMeta::Meta)
|
||||
} else {
|
||||
Err(input.error("expected identifier or literal"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_meta_after_path(path: Path, input: ParseStream) -> Result<Meta> {
|
||||
if input.peek(token::Paren) {
|
||||
parse_meta_list_after_path(path, input).map(Meta::List)
|
||||
} else if input.peek(Token![=]) {
|
||||
parse_meta_name_value_after_path(path, input).map(Meta::NameValue)
|
||||
} else {
|
||||
Ok(Meta::Path(path))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_meta_list_after_path(path: Path, input: ParseStream) -> Result<MetaList> {
|
||||
let content;
|
||||
Ok(MetaList {
|
||||
path,
|
||||
paren_token: parenthesized!(content in input),
|
||||
nested: content.parse_terminated(NestedMeta::parse)?,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_meta_name_value_after_path(path: Path, input: ParseStream) -> Result<MetaNameValue> {
|
||||
Ok(MetaNameValue {
|
||||
path,
|
||||
eq_token: input.parse()?,
|
||||
lit: input.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Attribute {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.pound_token.to_tokens(tokens);
|
||||
if let AttrStyle::Inner(b) = &self.style {
|
||||
b.to_tokens(tokens);
|
||||
}
|
||||
self.bracket_token.surround(tokens, |tokens| {
|
||||
self.path.to_tokens(tokens);
|
||||
self.tokens.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for MetaList {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.path.to_tokens(tokens);
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.nested.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for MetaNameValue {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.path.to_tokens(tokens);
|
||||
self.eq_token.to_tokens(tokens);
|
||||
self.lit.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,66 @@
|
|||
use std::ops::{AddAssign, MulAssign};
|
||||
|
||||
// For implementing base10_digits() accessor on LitInt.
|
||||
pub struct BigInt {
|
||||
digits: Vec<u8>,
|
||||
}
|
||||
|
||||
impl BigInt {
|
||||
pub fn new() -> Self {
|
||||
BigInt { digits: Vec::new() }
|
||||
}
|
||||
|
||||
pub fn to_string(&self) -> String {
|
||||
let mut repr = String::with_capacity(self.digits.len());
|
||||
|
||||
let mut has_nonzero = false;
|
||||
for digit in self.digits.iter().rev() {
|
||||
has_nonzero |= *digit != 0;
|
||||
if has_nonzero {
|
||||
repr.push((*digit + b'0') as char);
|
||||
}
|
||||
}
|
||||
|
||||
if repr.is_empty() {
|
||||
repr.push('0');
|
||||
}
|
||||
|
||||
repr
|
||||
}
|
||||
|
||||
fn reserve_two_digits(&mut self) {
|
||||
let len = self.digits.len();
|
||||
let desired =
|
||||
len + !self.digits.ends_with(&[0, 0]) as usize + !self.digits.ends_with(&[0]) as usize;
|
||||
self.digits.resize(desired, 0);
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign<u8> for BigInt {
|
||||
// Assumes increment <16.
|
||||
fn add_assign(&mut self, mut increment: u8) {
|
||||
self.reserve_two_digits();
|
||||
|
||||
let mut i = 0;
|
||||
while increment > 0 {
|
||||
let sum = self.digits[i] + increment;
|
||||
self.digits[i] = sum % 10;
|
||||
increment = sum / 10;
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MulAssign<u8> for BigInt {
|
||||
// Assumes base <=16.
|
||||
fn mul_assign(&mut self, base: u8) {
|
||||
self.reserve_two_digits();
|
||||
|
||||
let mut carry = 0;
|
||||
for digit in &mut self.digits {
|
||||
let prod = *digit * base + carry;
|
||||
*digit = prod % 10;
|
||||
carry = prod / 10;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,398 @@
|
|||
//! A stably addressed token buffer supporting efficient traversal based on a
|
||||
//! cheaply copyable cursor.
|
||||
//!
|
||||
//! *This module is available only if Syn is built with the `"parsing"` feature.*
|
||||
|
||||
// This module is heavily commented as it contains most of the unsafe code in
|
||||
// Syn, and caution should be used when editing it. The public-facing interface
|
||||
// is 100% safe but the implementation is fragile internally.
|
||||
|
||||
#[cfg(all(
|
||||
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
|
||||
feature = "proc-macro"
|
||||
))]
|
||||
use crate::proc_macro as pm;
|
||||
use crate::Lifetime;
|
||||
use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
|
||||
use std::cmp::Ordering;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
/// Internal type which is used instead of `TokenTree` to represent a token tree
|
||||
/// within a `TokenBuffer`.
|
||||
enum Entry {
|
||||
// Mimicking types from proc-macro.
|
||||
// Group entries contain the offset to the matching End entry.
|
||||
Group(Group, usize),
|
||||
Ident(Ident),
|
||||
Punct(Punct),
|
||||
Literal(Literal),
|
||||
// End entries contain the offset (negative) to the start of the buffer.
|
||||
End(isize),
|
||||
}
|
||||
|
||||
/// A buffer that can be efficiently traversed multiple times, unlike
|
||||
/// `TokenStream` which requires a deep copy in order to traverse more than
|
||||
/// once.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"parsing"` feature.*
|
||||
pub struct TokenBuffer {
|
||||
// NOTE: Do not implement clone on this - while the current design could be
|
||||
// cloned, other designs which could be desirable may not be cloneable.
|
||||
entries: Box<[Entry]>,
|
||||
}
|
||||
|
||||
impl TokenBuffer {
|
||||
fn recursive_new(entries: &mut Vec<Entry>, stream: TokenStream) {
|
||||
for tt in stream {
|
||||
match tt {
|
||||
TokenTree::Ident(ident) => entries.push(Entry::Ident(ident)),
|
||||
TokenTree::Punct(punct) => entries.push(Entry::Punct(punct)),
|
||||
TokenTree::Literal(literal) => entries.push(Entry::Literal(literal)),
|
||||
TokenTree::Group(group) => {
|
||||
let group_start_index = entries.len();
|
||||
entries.push(Entry::End(0)); // we replace this below
|
||||
Self::recursive_new(entries, group.stream());
|
||||
let group_end_index = entries.len();
|
||||
entries.push(Entry::End(-(group_end_index as isize)));
|
||||
let group_end_offset = group_end_index - group_start_index;
|
||||
entries[group_start_index] = Entry::Group(group, group_end_offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a `TokenBuffer` containing all the tokens from the input
|
||||
/// `proc_macro::TokenStream`.
|
||||
///
|
||||
/// *This method is available only if Syn is built with both the `"parsing"` and
|
||||
/// `"proc-macro"` features.*
|
||||
#[cfg(all(
|
||||
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
|
||||
feature = "proc-macro"
|
||||
))]
|
||||
pub fn new(stream: pm::TokenStream) -> Self {
|
||||
Self::new2(stream.into())
|
||||
}
|
||||
|
||||
/// Creates a `TokenBuffer` containing all the tokens from the input
|
||||
/// `proc_macro2::TokenStream`.
|
||||
pub fn new2(stream: TokenStream) -> Self {
|
||||
let mut entries = Vec::new();
|
||||
Self::recursive_new(&mut entries, stream);
|
||||
entries.push(Entry::End(-(entries.len() as isize)));
|
||||
Self {
|
||||
entries: entries.into_boxed_slice(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a cursor referencing the first token in the buffer and able to
|
||||
/// traverse until the end of the buffer.
|
||||
pub fn begin(&self) -> Cursor {
|
||||
let ptr = self.entries.as_ptr();
|
||||
unsafe { Cursor::create(ptr, ptr.add(self.entries.len() - 1)) }
|
||||
}
|
||||
}
|
||||
|
||||
/// A cheaply copyable cursor into a `TokenBuffer`.
|
||||
///
|
||||
/// This cursor holds a shared reference into the immutable data which is used
|
||||
/// internally to represent a `TokenStream`, and can be efficiently manipulated
|
||||
/// and copied around.
|
||||
///
|
||||
/// An empty `Cursor` can be created directly, or one may create a `TokenBuffer`
|
||||
/// object and get a cursor to its first token with `begin()`.
|
||||
///
|
||||
/// Two cursors are equal if they have the same location in the same input
|
||||
/// stream, and have the same scope.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"parsing"` feature.*
|
||||
pub struct Cursor<'a> {
|
||||
// The current entry which the `Cursor` is pointing at.
|
||||
ptr: *const Entry,
|
||||
// This is the only `Entry::End` object which this cursor is allowed to
|
||||
// point at. All other `End` objects are skipped over in `Cursor::create`.
|
||||
scope: *const Entry,
|
||||
// Cursor is covariant in 'a. This field ensures that our pointers are still
|
||||
// valid.
|
||||
marker: PhantomData<&'a Entry>,
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a> {
|
||||
/// Creates a cursor referencing a static empty TokenStream.
|
||||
pub fn empty() -> Self {
|
||||
// It's safe in this situation for us to put an `Entry` object in global
|
||||
// storage, despite it not actually being safe to send across threads
|
||||
// (`Ident` is a reference into a thread-local table). This is because
|
||||
// this entry never includes a `Ident` object.
|
||||
//
|
||||
// This wrapper struct allows us to break the rules and put a `Sync`
|
||||
// object in global storage.
|
||||
struct UnsafeSyncEntry(Entry);
|
||||
unsafe impl Sync for UnsafeSyncEntry {}
|
||||
static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0));
|
||||
|
||||
Cursor {
|
||||
ptr: &EMPTY_ENTRY.0,
|
||||
scope: &EMPTY_ENTRY.0,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// This create method intelligently exits non-explicitly-entered
|
||||
/// `None`-delimited scopes when the cursor reaches the end of them,
|
||||
/// allowing for them to be treated transparently.
|
||||
unsafe fn create(mut ptr: *const Entry, scope: *const Entry) -> Self {
|
||||
// NOTE: If we're looking at a `End`, we want to advance the cursor
|
||||
// past it, unless `ptr == scope`, which means that we're at the edge of
|
||||
// our cursor's scope. We should only have `ptr != scope` at the exit
|
||||
// from None-delimited groups entered with `ignore_none`.
|
||||
while let Entry::End(_) = *ptr {
|
||||
if ptr == scope {
|
||||
break;
|
||||
}
|
||||
ptr = ptr.add(1);
|
||||
}
|
||||
|
||||
Cursor {
|
||||
ptr,
|
||||
scope,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the current entry.
|
||||
fn entry(self) -> &'a Entry {
|
||||
unsafe { &*self.ptr }
|
||||
}
|
||||
|
||||
/// Bump the cursor to point at the next token after the current one. This
|
||||
/// is undefined behavior if the cursor is currently looking at an
|
||||
/// `Entry::End`.
|
||||
///
|
||||
/// If the cursor is looking at an `Entry::Group`, the bumped cursor will
|
||||
/// point at the first token in the group (with the same scope end).
|
||||
unsafe fn bump_ignore_group(self) -> Cursor<'a> {
|
||||
Cursor::create(self.ptr.offset(1), self.scope)
|
||||
}
|
||||
|
||||
/// While the cursor is looking at a `None`-delimited group, move it to look
|
||||
/// at the first token inside instead. If the group is empty, this will move
|
||||
/// the cursor past the `None`-delimited group.
|
||||
///
|
||||
/// WARNING: This mutates its argument.
|
||||
fn ignore_none(&mut self) {
|
||||
while let Entry::Group(group, _) = self.entry() {
|
||||
if group.delimiter() == Delimiter::None {
|
||||
unsafe { *self = self.bump_ignore_group() };
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks whether the cursor is currently pointing at the end of its valid
|
||||
/// scope.
|
||||
pub fn eof(self) -> bool {
|
||||
// We're at eof if we're at the end of our scope.
|
||||
self.ptr == self.scope
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `Group` with the given delimiter, returns
|
||||
/// a cursor into that group and one pointing to the next `TokenTree`.
|
||||
pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, Span, Cursor<'a>)> {
|
||||
// If we're not trying to enter a none-delimited group, we want to
|
||||
// ignore them. We have to make sure to _not_ ignore them when we want
|
||||
// to enter them, of course. For obvious reasons.
|
||||
if delim != Delimiter::None {
|
||||
self.ignore_none();
|
||||
}
|
||||
|
||||
if let Entry::Group(group, end_offset) = self.entry() {
|
||||
if group.delimiter() == delim {
|
||||
let end_of_group = unsafe { self.ptr.add(*end_offset) };
|
||||
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
|
||||
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
|
||||
return Some((inside_of_group, group.span(), after_group));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `Ident`, returns it along with a cursor
|
||||
/// pointing at the next `TokenTree`.
|
||||
pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> {
|
||||
self.ignore_none();
|
||||
match self.entry() {
|
||||
Entry::Ident(ident) => Some((ident.clone(), unsafe { self.bump_ignore_group() })),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `Punct`, returns it along with a cursor
|
||||
/// pointing at the next `TokenTree`.
|
||||
pub fn punct(mut self) -> Option<(Punct, Cursor<'a>)> {
|
||||
self.ignore_none();
|
||||
match self.entry() {
|
||||
Entry::Punct(punct) if punct.as_char() != '\'' => {
|
||||
Some((punct.clone(), unsafe { self.bump_ignore_group() }))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `Literal`, return it along with a cursor
|
||||
/// pointing at the next `TokenTree`.
|
||||
pub fn literal(mut self) -> Option<(Literal, Cursor<'a>)> {
|
||||
self.ignore_none();
|
||||
match self.entry() {
|
||||
Entry::Literal(literal) => Some((literal.clone(), unsafe { self.bump_ignore_group() })),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `Lifetime`, returns it along with a
|
||||
/// cursor pointing at the next `TokenTree`.
|
||||
pub fn lifetime(mut self) -> Option<(Lifetime, Cursor<'a>)> {
|
||||
self.ignore_none();
|
||||
match self.entry() {
|
||||
Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => {
|
||||
let next = unsafe { self.bump_ignore_group() };
|
||||
let (ident, rest) = next.ident()?;
|
||||
let lifetime = Lifetime {
|
||||
apostrophe: punct.span(),
|
||||
ident,
|
||||
};
|
||||
Some((lifetime, rest))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Copies all remaining tokens visible from this cursor into a
|
||||
/// `TokenStream`.
|
||||
pub fn token_stream(self) -> TokenStream {
|
||||
let mut tts = Vec::new();
|
||||
let mut cursor = self;
|
||||
while let Some((tt, rest)) = cursor.token_tree() {
|
||||
tts.push(tt);
|
||||
cursor = rest;
|
||||
}
|
||||
tts.into_iter().collect()
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `TokenTree`, returns it along with a
|
||||
/// cursor pointing at the next `TokenTree`.
|
||||
///
|
||||
/// Returns `None` if the cursor has reached the end of its stream.
|
||||
///
|
||||
/// This method does not treat `None`-delimited groups as transparent, and
|
||||
/// will return a `Group(None, ..)` if the cursor is looking at one.
|
||||
pub fn token_tree(self) -> Option<(TokenTree, Cursor<'a>)> {
|
||||
let (tree, len) = match self.entry() {
|
||||
Entry::Group(group, end_offset) => (group.clone().into(), *end_offset),
|
||||
Entry::Literal(literal) => (literal.clone().into(), 1),
|
||||
Entry::Ident(ident) => (ident.clone().into(), 1),
|
||||
Entry::Punct(punct) => (punct.clone().into(), 1),
|
||||
Entry::End(_) => return None,
|
||||
};
|
||||
|
||||
let rest = unsafe { Cursor::create(self.ptr.add(len), self.scope) };
|
||||
Some((tree, rest))
|
||||
}
|
||||
|
||||
/// Returns the `Span` of the current token, or `Span::call_site()` if this
|
||||
/// cursor points to eof.
|
||||
pub fn span(self) -> Span {
|
||||
match self.entry() {
|
||||
Entry::Group(group, _) => group.span(),
|
||||
Entry::Literal(literal) => literal.span(),
|
||||
Entry::Ident(ident) => ident.span(),
|
||||
Entry::Punct(punct) => punct.span(),
|
||||
Entry::End(_) => Span::call_site(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Skip over the next token without cloning it. Returns `None` if this
|
||||
/// cursor points to eof.
|
||||
///
|
||||
/// This method treats `'lifetimes` as a single token.
|
||||
pub(crate) fn skip(self) -> Option<Cursor<'a>> {
|
||||
let len = match self.entry() {
|
||||
Entry::End(_) => return None,
|
||||
|
||||
// Treat lifetimes as a single tt for the purposes of 'skip'.
|
||||
Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => {
|
||||
match unsafe { &*self.ptr.add(1) } {
|
||||
Entry::Ident(_) => 2,
|
||||
_ => 1,
|
||||
}
|
||||
}
|
||||
|
||||
Entry::Group(_, end_offset) => *end_offset,
|
||||
_ => 1,
|
||||
};
|
||||
|
||||
Some(unsafe { Cursor::create(self.ptr.add(len), self.scope) })
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Copy for Cursor<'a> {}
|
||||
|
||||
impl<'a> Clone for Cursor<'a> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Eq for Cursor<'a> {}
|
||||
|
||||
impl<'a> PartialEq for Cursor<'a> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.ptr == other.ptr
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialOrd for Cursor<'a> {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
if same_buffer(*self, *other) {
|
||||
Some(self.ptr.cmp(&other.ptr))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool {
|
||||
a.scope == b.scope
|
||||
}
|
||||
|
||||
pub(crate) fn same_buffer(a: Cursor, b: Cursor) -> bool {
|
||||
unsafe {
|
||||
match (&*a.scope, &*b.scope) {
|
||||
(Entry::End(a_offset), Entry::End(b_offset)) => {
|
||||
a.scope.offset(*a_offset) == b.scope.offset(*b_offset)
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
pub(crate) fn cmp_assuming_same_buffer(a: Cursor, b: Cursor) -> Ordering {
|
||||
a.ptr.cmp(&b.ptr)
|
||||
}
|
||||
|
||||
pub(crate) fn open_span_of_group(cursor: Cursor) -> Span {
|
||||
match cursor.entry() {
|
||||
Entry::Group(group, _) => group.span_open(),
|
||||
_ => cursor.span(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn close_span_of_group(cursor: Cursor) -> Span {
|
||||
match cursor.entry() {
|
||||
Entry::Group(group, _) => group.span_close(),
|
||||
_ => cursor.span(),
|
||||
}
|
||||
}
|
|
@ -0,0 +1,253 @@
|
|||
/// Define a type that supports parsing and printing a given identifier as if it
|
||||
/// were a keyword.
|
||||
///
|
||||
/// # Usage
|
||||
///
|
||||
/// As a convention, it is recommended that this macro be invoked within a
|
||||
/// module called `kw` or `keyword` and that the resulting parser be invoked
|
||||
/// with a `kw::` or `keyword::` prefix.
|
||||
///
|
||||
/// ```
|
||||
/// mod kw {
|
||||
/// syn::custom_keyword!(whatever);
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// The generated syntax tree node supports the following operations just like
|
||||
/// any built-in keyword token.
|
||||
///
|
||||
/// - [Peeking] — `input.peek(kw::whatever)`
|
||||
///
|
||||
/// - [Parsing] — `input.parse::<kw::whatever>()?`
|
||||
///
|
||||
/// - [Printing] — `quote!( ... #whatever_token ... )`
|
||||
///
|
||||
/// - Construction from a [`Span`] — `let whatever_token = kw::whatever(sp)`
|
||||
///
|
||||
/// - Field access to its span — `let sp = whatever_token.span`
|
||||
///
|
||||
/// [Peeking]: crate::parse::ParseBuffer::peek
|
||||
/// [Parsing]: crate::parse::ParseBuffer::parse
|
||||
/// [Printing]: quote::ToTokens
|
||||
/// [`Span`]: proc_macro2::Span
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// This example parses input that looks like `bool = true` or `str = "value"`.
|
||||
/// The key must be either the identifier `bool` or the identifier `str`. If
|
||||
/// `bool`, the value may be either `true` or `false`. If `str`, the value may
|
||||
/// be any string literal.
|
||||
///
|
||||
/// The symbols `bool` and `str` are not reserved keywords in Rust so these are
|
||||
/// not considered keywords in the `syn::token` module. Like any other
|
||||
/// identifier that is not a keyword, these can be declared as custom keywords
|
||||
/// by crates that need to use them as such.
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{LitBool, LitStr, Result, Token};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// mod kw {
|
||||
/// syn::custom_keyword!(bool);
|
||||
/// syn::custom_keyword!(str);
|
||||
/// }
|
||||
///
|
||||
/// enum Argument {
|
||||
/// Bool {
|
||||
/// bool_token: kw::bool,
|
||||
/// eq_token: Token![=],
|
||||
/// value: LitBool,
|
||||
/// },
|
||||
/// Str {
|
||||
/// str_token: kw::str,
|
||||
/// eq_token: Token![=],
|
||||
/// value: LitStr,
|
||||
/// },
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for Argument {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let lookahead = input.lookahead1();
|
||||
/// if lookahead.peek(kw::bool) {
|
||||
/// Ok(Argument::Bool {
|
||||
/// bool_token: input.parse::<kw::bool>()?,
|
||||
/// eq_token: input.parse()?,
|
||||
/// value: input.parse()?,
|
||||
/// })
|
||||
/// } else if lookahead.peek(kw::str) {
|
||||
/// Ok(Argument::Str {
|
||||
/// str_token: input.parse::<kw::str>()?,
|
||||
/// eq_token: input.parse()?,
|
||||
/// value: input.parse()?,
|
||||
/// })
|
||||
/// } else {
|
||||
/// Err(lookahead.error())
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! custom_keyword {
|
||||
($ident:ident) => {
|
||||
#[allow(non_camel_case_types)]
|
||||
pub struct $ident {
|
||||
pub span: $crate::__private::Span,
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[allow(dead_code, non_snake_case)]
|
||||
pub fn $ident<__S: $crate::__private::IntoSpans<[$crate::__private::Span; 1]>>(
|
||||
span: __S,
|
||||
) -> $ident {
|
||||
$ident {
|
||||
span: $crate::__private::IntoSpans::into_spans(span)[0],
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::__private::Default for $ident {
|
||||
fn default() -> Self {
|
||||
$ident {
|
||||
span: $crate::__private::Span::call_site(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$crate::impl_parse_for_custom_keyword!($ident);
|
||||
$crate::impl_to_tokens_for_custom_keyword!($ident);
|
||||
$crate::impl_clone_for_custom_keyword!($ident);
|
||||
$crate::impl_extra_traits_for_custom_keyword!($ident);
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_parse_for_custom_keyword {
|
||||
($ident:ident) => {
|
||||
// For peek.
|
||||
impl $crate::token::CustomToken for $ident {
|
||||
fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool {
|
||||
if let $crate::__private::Some((ident, _rest)) = cursor.ident() {
|
||||
ident == stringify!($ident)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn display() -> &'static $crate::__private::str {
|
||||
concat!("`", stringify!($ident), "`")
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::parse::Parse for $ident {
|
||||
fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
|
||||
input.step(|cursor| {
|
||||
if let $crate::__private::Some((ident, rest)) = cursor.ident() {
|
||||
if ident == stringify!($ident) {
|
||||
return $crate::__private::Ok(($ident { span: ident.span() }, rest));
|
||||
}
|
||||
}
|
||||
$crate::__private::Err(cursor.error(concat!(
|
||||
"expected `",
|
||||
stringify!($ident),
|
||||
"`"
|
||||
)))
|
||||
})
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "parsing"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_parse_for_custom_keyword {
|
||||
($ident:ident) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "printing")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_to_tokens_for_custom_keyword {
|
||||
($ident:ident) => {
|
||||
impl $crate::__private::ToTokens for $ident {
|
||||
fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) {
|
||||
let ident = $crate::Ident::new(stringify!($ident), self.span);
|
||||
$crate::__private::TokenStreamExt::append(tokens, ident);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "printing"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_to_tokens_for_custom_keyword {
|
||||
($ident:ident) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_clone_for_custom_keyword {
|
||||
($ident:ident) => {
|
||||
impl $crate::__private::Copy for $ident {}
|
||||
|
||||
#[allow(clippy::expl_impl_clone_on_copy)]
|
||||
impl $crate::__private::Clone for $ident {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "clone-impls"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_clone_for_custom_keyword {
|
||||
($ident:ident) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_extra_traits_for_custom_keyword {
|
||||
($ident:ident) => {
|
||||
impl $crate::__private::Debug for $ident {
|
||||
fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::fmt::Result {
|
||||
$crate::__private::Formatter::write_str(
|
||||
f,
|
||||
concat!("Keyword [", stringify!($ident), "]"),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::__private::Eq for $ident {}
|
||||
|
||||
impl $crate::__private::PartialEq for $ident {
|
||||
fn eq(&self, _other: &Self) -> $crate::__private::bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::__private::Hash for $ident {
|
||||
fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "extra-traits"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_extra_traits_for_custom_keyword {
|
||||
($ident:ident) => {};
|
||||
}
|
|
@ -0,0 +1,300 @@
|
|||
/// Define a type that supports parsing and printing a multi-character symbol
|
||||
/// as if it were a punctuation token.
|
||||
///
|
||||
/// # Usage
|
||||
///
|
||||
/// ```
|
||||
/// syn::custom_punctuation!(LeftRightArrow, <=>);
|
||||
/// ```
|
||||
///
|
||||
/// The generated syntax tree node supports the following operations just like
|
||||
/// any built-in punctuation token.
|
||||
///
|
||||
/// - [Peeking] — `input.peek(LeftRightArrow)`
|
||||
///
|
||||
/// - [Parsing] — `input.parse::<LeftRightArrow>()?`
|
||||
///
|
||||
/// - [Printing] — `quote!( ... #lrarrow ... )`
|
||||
///
|
||||
/// - Construction from a [`Span`] — `let lrarrow = LeftRightArrow(sp)`
|
||||
///
|
||||
/// - Construction from multiple [`Span`] — `let lrarrow = LeftRightArrow([sp, sp, sp])`
|
||||
///
|
||||
/// - Field access to its spans — `let spans = lrarrow.spans`
|
||||
///
|
||||
/// [Peeking]: crate::parse::ParseBuffer::peek
|
||||
/// [Parsing]: crate::parse::ParseBuffer::parse
|
||||
/// [Printing]: quote::ToTokens
|
||||
/// [`Span`]: proc_macro2::Span
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use proc_macro2::{TokenStream, TokenTree};
|
||||
/// use syn::parse::{Parse, ParseStream, Peek, Result};
|
||||
/// use syn::punctuated::Punctuated;
|
||||
/// use syn::Expr;
|
||||
///
|
||||
/// syn::custom_punctuation!(PathSeparator, </>);
|
||||
///
|
||||
/// // expr </> expr </> expr ...
|
||||
/// struct PathSegments {
|
||||
/// segments: Punctuated<Expr, PathSeparator>,
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for PathSegments {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let mut segments = Punctuated::new();
|
||||
///
|
||||
/// let first = parse_until(input, PathSeparator)?;
|
||||
/// segments.push_value(syn::parse2(first)?);
|
||||
///
|
||||
/// while input.peek(PathSeparator) {
|
||||
/// segments.push_punct(input.parse()?);
|
||||
///
|
||||
/// let next = parse_until(input, PathSeparator)?;
|
||||
/// segments.push_value(syn::parse2(next)?);
|
||||
/// }
|
||||
///
|
||||
/// Ok(PathSegments { segments })
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// fn parse_until<E: Peek>(input: ParseStream, end: E) -> Result<TokenStream> {
|
||||
/// let mut tokens = TokenStream::new();
|
||||
/// while !input.is_empty() && !input.peek(end) {
|
||||
/// let next: TokenTree = input.parse()?;
|
||||
/// tokens.extend(Some(next));
|
||||
/// }
|
||||
/// Ok(tokens)
|
||||
/// }
|
||||
///
|
||||
/// fn main() {
|
||||
/// let input = r#" a::b </> c::d::e "#;
|
||||
/// let _: PathSegments = syn::parse_str(input).unwrap();
|
||||
/// }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
pub struct $ident {
|
||||
pub spans: $crate::custom_punctuation_repr!($($tt)+),
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[allow(dead_code, non_snake_case)]
|
||||
pub fn $ident<__S: $crate::__private::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>(
|
||||
spans: __S,
|
||||
) -> $ident {
|
||||
let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*;
|
||||
$ident {
|
||||
spans: $crate::__private::IntoSpans::into_spans(spans)
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::__private::Default for $ident {
|
||||
fn default() -> Self {
|
||||
$ident($crate::__private::Span::call_site())
|
||||
}
|
||||
}
|
||||
|
||||
$crate::impl_parse_for_custom_punctuation!($ident, $($tt)+);
|
||||
$crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
|
||||
$crate::impl_clone_for_custom_punctuation!($ident, $($tt)+);
|
||||
$crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_parse_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
impl $crate::token::CustomToken for $ident {
|
||||
fn peek(cursor: $crate::buffer::Cursor) -> bool {
|
||||
$crate::token::parsing::peek_punct(cursor, $crate::stringify_punct!($($tt)+))
|
||||
}
|
||||
|
||||
fn display() -> &'static $crate::__private::str {
|
||||
concat!("`", $crate::stringify_punct!($($tt)+), "`")
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::parse::Parse for $ident {
|
||||
fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
|
||||
let spans: $crate::custom_punctuation_repr!($($tt)+) =
|
||||
$crate::token::parsing::punct(input, $crate::stringify_punct!($($tt)+))?;
|
||||
Ok($ident(spans))
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "parsing"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_parse_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "printing")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_to_tokens_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
impl $crate::__private::ToTokens for $ident {
|
||||
fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) {
|
||||
$crate::token::printing::punct($crate::stringify_punct!($($tt)+), &self.spans, tokens)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "printing"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_to_tokens_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_clone_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
impl $crate::__private::Copy for $ident {}
|
||||
|
||||
#[allow(clippy::expl_impl_clone_on_copy)]
|
||||
impl $crate::__private::Clone for $ident {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "clone-impls"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_clone_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_extra_traits_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
impl $crate::__private::Debug for $ident {
|
||||
fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::fmt::Result {
|
||||
$crate::__private::Formatter::write_str(f, stringify!($ident))
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::__private::Eq for $ident {}
|
||||
|
||||
impl $crate::__private::PartialEq for $ident {
|
||||
fn eq(&self, _other: &Self) -> $crate::__private::bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::__private::Hash for $ident {
|
||||
fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(not(feature = "extra-traits"))]
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! impl_extra_traits_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! custom_punctuation_repr {
|
||||
($($tt:tt)+) => {
|
||||
[$crate::__private::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+]
|
||||
};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
#[rustfmt::skip]
|
||||
macro_rules! custom_punctuation_len {
|
||||
($mode:ident, +) => { 1 };
|
||||
($mode:ident, +=) => { 2 };
|
||||
($mode:ident, &) => { 1 };
|
||||
($mode:ident, &&) => { 2 };
|
||||
($mode:ident, &=) => { 2 };
|
||||
($mode:ident, @) => { 1 };
|
||||
($mode:ident, !) => { 1 };
|
||||
($mode:ident, ^) => { 1 };
|
||||
($mode:ident, ^=) => { 2 };
|
||||
($mode:ident, :) => { 1 };
|
||||
($mode:ident, ::) => { 2 };
|
||||
($mode:ident, ,) => { 1 };
|
||||
($mode:ident, /) => { 1 };
|
||||
($mode:ident, /=) => { 2 };
|
||||
($mode:ident, .) => { 1 };
|
||||
($mode:ident, ..) => { 2 };
|
||||
($mode:ident, ...) => { 3 };
|
||||
($mode:ident, ..=) => { 3 };
|
||||
($mode:ident, =) => { 1 };
|
||||
($mode:ident, ==) => { 2 };
|
||||
($mode:ident, >=) => { 2 };
|
||||
($mode:ident, >) => { 1 };
|
||||
($mode:ident, <=) => { 2 };
|
||||
($mode:ident, <) => { 1 };
|
||||
($mode:ident, *=) => { 2 };
|
||||
($mode:ident, !=) => { 2 };
|
||||
($mode:ident, |) => { 1 };
|
||||
($mode:ident, |=) => { 2 };
|
||||
($mode:ident, ||) => { 2 };
|
||||
($mode:ident, #) => { 1 };
|
||||
($mode:ident, ?) => { 1 };
|
||||
($mode:ident, ->) => { 2 };
|
||||
($mode:ident, <-) => { 2 };
|
||||
($mode:ident, %) => { 1 };
|
||||
($mode:ident, %=) => { 2 };
|
||||
($mode:ident, =>) => { 2 };
|
||||
($mode:ident, ;) => { 1 };
|
||||
($mode:ident, <<) => { 2 };
|
||||
($mode:ident, <<=) => { 3 };
|
||||
($mode:ident, >>) => { 2 };
|
||||
($mode:ident, >>=) => { 3 };
|
||||
($mode:ident, *) => { 1 };
|
||||
($mode:ident, -) => { 1 };
|
||||
($mode:ident, -=) => { 2 };
|
||||
($mode:ident, ~) => { 1 };
|
||||
(lenient, $tt:tt) => { 0 };
|
||||
(strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! custom_punctuation_unexpected {
|
||||
() => {};
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! stringify_punct {
|
||||
($($tt:tt)+) => {
|
||||
concat!($(stringify!($tt)),+)
|
||||
};
|
||||
}
|
|
@ -0,0 +1,493 @@
|
|||
use super::*;
|
||||
use crate::punctuated::Punctuated;
|
||||
|
||||
ast_struct! {
|
||||
/// An enum variant.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Variant {
|
||||
/// Attributes tagged on the variant.
|
||||
pub attrs: Vec<Attribute>,
|
||||
|
||||
/// Name of the variant.
|
||||
pub ident: Ident,
|
||||
|
||||
/// Content stored in the variant.
|
||||
pub fields: Fields,
|
||||
|
||||
/// Explicit discriminant: `Variant = 1`
|
||||
pub discriminant: Option<(Token![=], Expr)>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum_of_structs! {
|
||||
/// Data stored within an enum variant or struct.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
///
|
||||
/// # Syntax tree enum
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: Expr#syntax-tree-enums
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum Fields {
|
||||
/// Named fields of a struct or struct variant such as `Point { x: f64,
|
||||
/// y: f64 }`.
|
||||
Named(FieldsNamed),
|
||||
|
||||
/// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
|
||||
Unnamed(FieldsUnnamed),
|
||||
|
||||
/// Unit struct or unit variant such as `None`.
|
||||
Unit,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// Named fields of a struct or struct variant such as `Point { x: f64,
|
||||
/// y: f64 }`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct FieldsNamed {
|
||||
pub brace_token: token::Brace,
|
||||
pub named: Punctuated<Field, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct FieldsUnnamed {
|
||||
pub paren_token: token::Paren,
|
||||
pub unnamed: Punctuated<Field, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
impl Fields {
|
||||
/// Get an iterator over the borrowed [`Field`] items in this object. This
|
||||
/// iterator can be used to iterate over a named or unnamed struct or
|
||||
/// variant's fields uniformly.
|
||||
pub fn iter(&self) -> punctuated::Iter<Field> {
|
||||
match self {
|
||||
Fields::Unit => crate::punctuated::empty_punctuated_iter(),
|
||||
Fields::Named(f) => f.named.iter(),
|
||||
Fields::Unnamed(f) => f.unnamed.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get an iterator over the mutably borrowed [`Field`] items in this
|
||||
/// object. This iterator can be used to iterate over a named or unnamed
|
||||
/// struct or variant's fields uniformly.
|
||||
pub fn iter_mut(&mut self) -> punctuated::IterMut<Field> {
|
||||
match self {
|
||||
Fields::Unit => crate::punctuated::empty_punctuated_iter_mut(),
|
||||
Fields::Named(f) => f.named.iter_mut(),
|
||||
Fields::Unnamed(f) => f.unnamed.iter_mut(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the number of fields.
|
||||
pub fn len(&self) -> usize {
|
||||
match self {
|
||||
Fields::Unit => 0,
|
||||
Fields::Named(f) => f.named.len(),
|
||||
Fields::Unnamed(f) => f.unnamed.len(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if there are zero fields.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
Fields::Unit => true,
|
||||
Fields::Named(f) => f.named.is_empty(),
|
||||
Fields::Unnamed(f) => f.unnamed.is_empty(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for Fields {
|
||||
type Item = Field;
|
||||
type IntoIter = punctuated::IntoIter<Field>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
match self {
|
||||
Fields::Unit => Punctuated::<Field, ()>::new().into_iter(),
|
||||
Fields::Named(f) => f.named.into_iter(),
|
||||
Fields::Unnamed(f) => f.unnamed.into_iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a Fields {
|
||||
type Item = &'a Field;
|
||||
type IntoIter = punctuated::Iter<'a, Field>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a mut Fields {
|
||||
type Item = &'a mut Field;
|
||||
type IntoIter = punctuated::IterMut<'a, Field>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter_mut()
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A field of a struct or enum variant.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Field {
|
||||
/// Attributes tagged on the field.
|
||||
pub attrs: Vec<Attribute>,
|
||||
|
||||
/// Visibility of the field.
|
||||
pub vis: Visibility,
|
||||
|
||||
/// Name of the field, if any.
|
||||
///
|
||||
/// Fields of tuple structs have no names.
|
||||
pub ident: Option<Ident>,
|
||||
|
||||
pub colon_token: Option<Token![:]>,
|
||||
|
||||
/// Type of the field.
|
||||
pub ty: Type,
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum_of_structs! {
|
||||
/// The visibility level of an item: inherited or `pub` or
|
||||
/// `pub(restricted)`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
///
|
||||
/// # Syntax tree enum
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: Expr#syntax-tree-enums
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum Visibility {
|
||||
/// A public visibility level: `pub`.
|
||||
Public(VisPublic),
|
||||
|
||||
/// A crate-level visibility: `crate`.
|
||||
Crate(VisCrate),
|
||||
|
||||
/// A visibility level restricted to some path: `pub(self)` or
|
||||
/// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
|
||||
Restricted(VisRestricted),
|
||||
|
||||
/// An inherited visibility, which usually means private.
|
||||
Inherited,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A public visibility level: `pub`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct VisPublic {
|
||||
pub pub_token: Token![pub],
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A crate-level visibility: `crate`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct VisCrate {
|
||||
pub crate_token: Token![crate],
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A visibility level restricted to some path: `pub(self)` or
|
||||
/// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct VisRestricted {
|
||||
pub pub_token: Token![pub],
|
||||
pub paren_token: token::Paren,
|
||||
pub in_token: Option<Token![in]>,
|
||||
pub path: Box<Path>,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
use crate::ext::IdentExt;
|
||||
use crate::parse::discouraged::Speculative;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Variant {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let attrs = input.call(Attribute::parse_outer)?;
|
||||
let _visibility: Visibility = input.parse()?;
|
||||
let ident: Ident = input.parse()?;
|
||||
let fields = if input.peek(token::Brace) {
|
||||
Fields::Named(input.parse()?)
|
||||
} else if input.peek(token::Paren) {
|
||||
Fields::Unnamed(input.parse()?)
|
||||
} else {
|
||||
Fields::Unit
|
||||
};
|
||||
let discriminant = if input.peek(Token![=]) {
|
||||
let eq_token: Token![=] = input.parse()?;
|
||||
let discriminant: Expr = input.parse()?;
|
||||
Some((eq_token, discriminant))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(Variant {
|
||||
attrs,
|
||||
ident,
|
||||
fields,
|
||||
discriminant,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for FieldsNamed {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let content;
|
||||
Ok(FieldsNamed {
|
||||
brace_token: braced!(content in input),
|
||||
named: content.parse_terminated(Field::parse_named)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for FieldsUnnamed {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let content;
|
||||
Ok(FieldsUnnamed {
|
||||
paren_token: parenthesized!(content in input),
|
||||
unnamed: content.parse_terminated(Field::parse_unnamed)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Field {
|
||||
/// Parses a named (braced struct) field.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_named(input: ParseStream) -> Result<Self> {
|
||||
Ok(Field {
|
||||
attrs: input.call(Attribute::parse_outer)?,
|
||||
vis: input.parse()?,
|
||||
ident: Some(if input.peek(Token![_]) {
|
||||
input.call(Ident::parse_any)
|
||||
} else {
|
||||
input.parse()
|
||||
}?),
|
||||
colon_token: Some(input.parse()?),
|
||||
ty: input.parse()?,
|
||||
})
|
||||
}
|
||||
|
||||
/// Parses an unnamed (tuple struct) field.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_unnamed(input: ParseStream) -> Result<Self> {
|
||||
Ok(Field {
|
||||
attrs: input.call(Attribute::parse_outer)?,
|
||||
vis: input.parse()?,
|
||||
ident: None,
|
||||
colon_token: None,
|
||||
ty: input.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Visibility {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
// Recognize an empty None-delimited group, as produced by a $:vis
|
||||
// matcher that matched no tokens.
|
||||
if input.peek(token::Group) {
|
||||
let ahead = input.fork();
|
||||
let group = crate::group::parse_group(&ahead)?;
|
||||
if group.content.is_empty() {
|
||||
input.advance_to(&ahead);
|
||||
return Ok(Visibility::Inherited);
|
||||
}
|
||||
}
|
||||
|
||||
if input.peek(Token![pub]) {
|
||||
Self::parse_pub(input)
|
||||
} else if input.peek(Token![crate]) {
|
||||
Self::parse_crate(input)
|
||||
} else {
|
||||
Ok(Visibility::Inherited)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Visibility {
|
||||
fn parse_pub(input: ParseStream) -> Result<Self> {
|
||||
let pub_token = input.parse::<Token![pub]>()?;
|
||||
|
||||
if input.peek(token::Paren) {
|
||||
let ahead = input.fork();
|
||||
|
||||
let content;
|
||||
let paren_token = parenthesized!(content in ahead);
|
||||
if content.peek(Token![crate])
|
||||
|| content.peek(Token![self])
|
||||
|| content.peek(Token![super])
|
||||
{
|
||||
let path = content.call(Ident::parse_any)?;
|
||||
|
||||
// Ensure there are no additional tokens within `content`.
|
||||
// Without explicitly checking, we may misinterpret a tuple
|
||||
// field as a restricted visibility, causing a parse error.
|
||||
// e.g. `pub (crate::A, crate::B)` (Issue #720).
|
||||
if content.is_empty() {
|
||||
input.advance_to(&ahead);
|
||||
return Ok(Visibility::Restricted(VisRestricted {
|
||||
pub_token,
|
||||
paren_token,
|
||||
in_token: None,
|
||||
path: Box::new(Path::from(path)),
|
||||
}));
|
||||
}
|
||||
} else if content.peek(Token![in]) {
|
||||
let in_token: Token![in] = content.parse()?;
|
||||
let path = content.call(Path::parse_mod_style)?;
|
||||
|
||||
input.advance_to(&ahead);
|
||||
return Ok(Visibility::Restricted(VisRestricted {
|
||||
pub_token,
|
||||
paren_token,
|
||||
in_token: Some(in_token),
|
||||
path: Box::new(path),
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Visibility::Public(VisPublic { pub_token }))
|
||||
}
|
||||
|
||||
fn parse_crate(input: ParseStream) -> Result<Self> {
|
||||
if input.peek2(Token![::]) {
|
||||
Ok(Visibility::Inherited)
|
||||
} else {
|
||||
Ok(Visibility::Crate(VisCrate {
|
||||
crate_token: input.parse()?,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
pub(crate) fn is_some(&self) -> bool {
|
||||
match self {
|
||||
Visibility::Inherited => false,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use crate::print::TokensOrDefault;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Variant {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(&self.attrs);
|
||||
self.ident.to_tokens(tokens);
|
||||
self.fields.to_tokens(tokens);
|
||||
if let Some((eq_token, disc)) = &self.discriminant {
|
||||
eq_token.to_tokens(tokens);
|
||||
disc.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for FieldsNamed {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.brace_token.surround(tokens, |tokens| {
|
||||
self.named.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for FieldsUnnamed {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.unnamed.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Field {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(&self.attrs);
|
||||
self.vis.to_tokens(tokens);
|
||||
if let Some(ident) = &self.ident {
|
||||
ident.to_tokens(tokens);
|
||||
TokensOrDefault(&self.colon_token).to_tokens(tokens);
|
||||
}
|
||||
self.ty.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for VisPublic {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.pub_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for VisCrate {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.crate_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for VisRestricted {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.pub_token.to_tokens(tokens);
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
// TODO: If we have a path which is not "self" or "super" or
|
||||
// "crate", automatically add the "in" token.
|
||||
self.in_token.to_tokens(tokens);
|
||||
self.path.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,274 @@
|
|||
use super::*;
|
||||
use crate::punctuated::Punctuated;
|
||||
|
||||
ast_struct! {
|
||||
/// Data structure sent to a `proc_macro_derive` macro.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
|
||||
pub struct DeriveInput {
|
||||
/// Attributes tagged on the whole struct or enum.
|
||||
pub attrs: Vec<Attribute>,
|
||||
|
||||
/// Visibility of the struct or enum.
|
||||
pub vis: Visibility,
|
||||
|
||||
/// Name of the struct or enum.
|
||||
pub ident: Ident,
|
||||
|
||||
/// Generics required to complete the definition.
|
||||
pub generics: Generics,
|
||||
|
||||
/// Data within the struct or enum.
|
||||
pub data: Data,
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum_of_structs! {
|
||||
/// The storage of a struct, enum or union data structure.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` feature.*
|
||||
///
|
||||
/// # Syntax tree enum
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: Expr#syntax-tree-enums
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
|
||||
pub enum Data {
|
||||
/// A struct input to a `proc_macro_derive` macro.
|
||||
Struct(DataStruct),
|
||||
|
||||
/// An enum input to a `proc_macro_derive` macro.
|
||||
Enum(DataEnum),
|
||||
|
||||
/// An untagged union input to a `proc_macro_derive` macro.
|
||||
Union(DataUnion),
|
||||
}
|
||||
|
||||
do_not_generate_to_tokens
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// A struct input to a `proc_macro_derive` macro.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
|
||||
pub struct DataStruct {
|
||||
pub struct_token: Token![struct],
|
||||
pub fields: Fields,
|
||||
pub semi_token: Option<Token![;]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// An enum input to a `proc_macro_derive` macro.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
|
||||
pub struct DataEnum {
|
||||
pub enum_token: Token![enum],
|
||||
pub brace_token: token::Brace,
|
||||
pub variants: Punctuated<Variant, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
ast_struct! {
|
||||
/// An untagged union input to a `proc_macro_derive` macro.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
|
||||
pub struct DataUnion {
|
||||
pub union_token: Token![union],
|
||||
pub fields: FieldsNamed,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for DeriveInput {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let attrs = input.call(Attribute::parse_outer)?;
|
||||
let vis = input.parse::<Visibility>()?;
|
||||
|
||||
let lookahead = input.lookahead1();
|
||||
if lookahead.peek(Token![struct]) {
|
||||
let struct_token = input.parse::<Token![struct]>()?;
|
||||
let ident = input.parse::<Ident>()?;
|
||||
let generics = input.parse::<Generics>()?;
|
||||
let (where_clause, fields, semi) = data_struct(input)?;
|
||||
Ok(DeriveInput {
|
||||
attrs,
|
||||
vis,
|
||||
ident,
|
||||
generics: Generics {
|
||||
where_clause,
|
||||
..generics
|
||||
},
|
||||
data: Data::Struct(DataStruct {
|
||||
struct_token,
|
||||
fields,
|
||||
semi_token: semi,
|
||||
}),
|
||||
})
|
||||
} else if lookahead.peek(Token![enum]) {
|
||||
let enum_token = input.parse::<Token![enum]>()?;
|
||||
let ident = input.parse::<Ident>()?;
|
||||
let generics = input.parse::<Generics>()?;
|
||||
let (where_clause, brace, variants) = data_enum(input)?;
|
||||
Ok(DeriveInput {
|
||||
attrs,
|
||||
vis,
|
||||
ident,
|
||||
generics: Generics {
|
||||
where_clause,
|
||||
..generics
|
||||
},
|
||||
data: Data::Enum(DataEnum {
|
||||
enum_token,
|
||||
brace_token: brace,
|
||||
variants,
|
||||
}),
|
||||
})
|
||||
} else if lookahead.peek(Token![union]) {
|
||||
let union_token = input.parse::<Token![union]>()?;
|
||||
let ident = input.parse::<Ident>()?;
|
||||
let generics = input.parse::<Generics>()?;
|
||||
let (where_clause, fields) = data_union(input)?;
|
||||
Ok(DeriveInput {
|
||||
attrs,
|
||||
vis,
|
||||
ident,
|
||||
generics: Generics {
|
||||
where_clause,
|
||||
..generics
|
||||
},
|
||||
data: Data::Union(DataUnion {
|
||||
union_token,
|
||||
fields,
|
||||
}),
|
||||
})
|
||||
} else {
|
||||
Err(lookahead.error())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn data_struct(
|
||||
input: ParseStream,
|
||||
) -> Result<(Option<WhereClause>, Fields, Option<Token![;]>)> {
|
||||
let mut lookahead = input.lookahead1();
|
||||
let mut where_clause = None;
|
||||
if lookahead.peek(Token![where]) {
|
||||
where_clause = Some(input.parse()?);
|
||||
lookahead = input.lookahead1();
|
||||
}
|
||||
|
||||
if where_clause.is_none() && lookahead.peek(token::Paren) {
|
||||
let fields = input.parse()?;
|
||||
|
||||
lookahead = input.lookahead1();
|
||||
if lookahead.peek(Token![where]) {
|
||||
where_clause = Some(input.parse()?);
|
||||
lookahead = input.lookahead1();
|
||||
}
|
||||
|
||||
if lookahead.peek(Token![;]) {
|
||||
let semi = input.parse()?;
|
||||
Ok((where_clause, Fields::Unnamed(fields), Some(semi)))
|
||||
} else {
|
||||
Err(lookahead.error())
|
||||
}
|
||||
} else if lookahead.peek(token::Brace) {
|
||||
let fields = input.parse()?;
|
||||
Ok((where_clause, Fields::Named(fields), None))
|
||||
} else if lookahead.peek(Token![;]) {
|
||||
let semi = input.parse()?;
|
||||
Ok((where_clause, Fields::Unit, Some(semi)))
|
||||
} else {
|
||||
Err(lookahead.error())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn data_enum(
|
||||
input: ParseStream,
|
||||
) -> Result<(
|
||||
Option<WhereClause>,
|
||||
token::Brace,
|
||||
Punctuated<Variant, Token![,]>,
|
||||
)> {
|
||||
let where_clause = input.parse()?;
|
||||
|
||||
let content;
|
||||
let brace = braced!(content in input);
|
||||
let variants = content.parse_terminated(Variant::parse)?;
|
||||
|
||||
Ok((where_clause, brace, variants))
|
||||
}
|
||||
|
||||
pub fn data_union(input: ParseStream) -> Result<(Option<WhereClause>, FieldsNamed)> {
|
||||
let where_clause = input.parse()?;
|
||||
let fields = input.parse()?;
|
||||
Ok((where_clause, fields))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use crate::attr::FilterAttrs;
|
||||
use crate::print::TokensOrDefault;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for DeriveInput {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
for attr in self.attrs.outer() {
|
||||
attr.to_tokens(tokens);
|
||||
}
|
||||
self.vis.to_tokens(tokens);
|
||||
match &self.data {
|
||||
Data::Struct(d) => d.struct_token.to_tokens(tokens),
|
||||
Data::Enum(d) => d.enum_token.to_tokens(tokens),
|
||||
Data::Union(d) => d.union_token.to_tokens(tokens),
|
||||
}
|
||||
self.ident.to_tokens(tokens);
|
||||
self.generics.to_tokens(tokens);
|
||||
match &self.data {
|
||||
Data::Struct(data) => match &data.fields {
|
||||
Fields::Named(fields) => {
|
||||
self.generics.where_clause.to_tokens(tokens);
|
||||
fields.to_tokens(tokens);
|
||||
}
|
||||
Fields::Unnamed(fields) => {
|
||||
fields.to_tokens(tokens);
|
||||
self.generics.where_clause.to_tokens(tokens);
|
||||
TokensOrDefault(&data.semi_token).to_tokens(tokens);
|
||||
}
|
||||
Fields::Unit => {
|
||||
self.generics.where_clause.to_tokens(tokens);
|
||||
TokensOrDefault(&data.semi_token).to_tokens(tokens);
|
||||
}
|
||||
},
|
||||
Data::Enum(data) => {
|
||||
self.generics.where_clause.to_tokens(tokens);
|
||||
data.brace_token.surround(tokens, |tokens| {
|
||||
data.variants.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
Data::Union(data) => {
|
||||
self.generics.where_clause.to_tokens(tokens);
|
||||
data.fields.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,194 @@
|
|||
//! Extensions to the parsing API with niche applicability.
|
||||
|
||||
use super::*;
|
||||
|
||||
/// Extensions to the `ParseStream` API to support speculative parsing.
|
||||
pub trait Speculative {
|
||||
/// Advance this parse stream to the position of a forked parse stream.
|
||||
///
|
||||
/// This is the opposite operation to [`ParseStream::fork`]. You can fork a
|
||||
/// parse stream, perform some speculative parsing, then join the original
|
||||
/// stream to the fork to "commit" the parsing from the fork to the main
|
||||
/// stream.
|
||||
///
|
||||
/// If you can avoid doing this, you should, as it limits the ability to
|
||||
/// generate useful errors. That said, it is often the only way to parse
|
||||
/// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem
|
||||
/// is that when the fork fails to parse an `A`, it's impossible to tell
|
||||
/// whether that was because of a syntax error and the user meant to provide
|
||||
/// an `A`, or that the `A`s are finished and it's time to start parsing
|
||||
/// `B`s. Use with care.
|
||||
///
|
||||
/// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by
|
||||
/// parsing `B*` and removing the leading members of `A` from the
|
||||
/// repetition, bypassing the need to involve the downsides associated with
|
||||
/// speculative parsing.
|
||||
///
|
||||
/// [`ParseStream::fork`]: ParseBuffer::fork
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// There has been chatter about the possibility of making the colons in the
|
||||
/// turbofish syntax like `path::to::<T>` no longer required by accepting
|
||||
/// `path::to<T>` in expression position. Specifically, according to [RFC
|
||||
/// 2544], [`PathSegment`] parsing should always try to consume a following
|
||||
/// `<` token as the start of generic arguments, and reset to the `<` if
|
||||
/// that fails (e.g. the token is acting as a less-than operator).
|
||||
///
|
||||
/// This is the exact kind of parsing behavior which requires the "fork,
|
||||
/// try, commit" behavior that [`ParseStream::fork`] discourages. With
|
||||
/// `advance_to`, we can avoid having to parse the speculatively parsed
|
||||
/// content a second time.
|
||||
///
|
||||
/// This change in behavior can be implemented in syn by replacing just the
|
||||
/// `Parse` implementation for `PathSegment`:
|
||||
///
|
||||
/// ```
|
||||
/// # use syn::ext::IdentExt;
|
||||
/// use syn::parse::discouraged::Speculative;
|
||||
/// # use syn::parse::{Parse, ParseStream};
|
||||
/// # use syn::{Ident, PathArguments, Result, Token};
|
||||
///
|
||||
/// pub struct PathSegment {
|
||||
/// pub ident: Ident,
|
||||
/// pub arguments: PathArguments,
|
||||
/// }
|
||||
/// #
|
||||
/// # impl<T> From<T> for PathSegment
|
||||
/// # where
|
||||
/// # T: Into<Ident>,
|
||||
/// # {
|
||||
/// # fn from(ident: T) -> Self {
|
||||
/// # PathSegment {
|
||||
/// # ident: ident.into(),
|
||||
/// # arguments: PathArguments::None,
|
||||
/// # }
|
||||
/// # }
|
||||
/// # }
|
||||
///
|
||||
/// impl Parse for PathSegment {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// if input.peek(Token![super])
|
||||
/// || input.peek(Token![self])
|
||||
/// || input.peek(Token![Self])
|
||||
/// || input.peek(Token![crate])
|
||||
/// {
|
||||
/// let ident = input.call(Ident::parse_any)?;
|
||||
/// return Ok(PathSegment::from(ident));
|
||||
/// }
|
||||
///
|
||||
/// let ident = input.parse()?;
|
||||
/// if input.peek(Token![::]) && input.peek3(Token![<]) {
|
||||
/// return Ok(PathSegment {
|
||||
/// ident,
|
||||
/// arguments: PathArguments::AngleBracketed(input.parse()?),
|
||||
/// });
|
||||
/// }
|
||||
/// if input.peek(Token![<]) && !input.peek(Token![<=]) {
|
||||
/// let fork = input.fork();
|
||||
/// if let Ok(arguments) = fork.parse() {
|
||||
/// input.advance_to(&fork);
|
||||
/// return Ok(PathSegment {
|
||||
/// ident,
|
||||
/// arguments: PathArguments::AngleBracketed(arguments),
|
||||
/// });
|
||||
/// }
|
||||
/// }
|
||||
/// Ok(PathSegment::from(ident))
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// # syn::parse_str::<PathSegment>("a<b,c>").unwrap();
|
||||
/// ```
|
||||
///
|
||||
/// # Drawbacks
|
||||
///
|
||||
/// The main drawback of this style of speculative parsing is in error
|
||||
/// presentation. Even if the lookahead is the "correct" parse, the error
|
||||
/// that is shown is that of the "fallback" parse. To use the same example
|
||||
/// as the turbofish above, take the following unfinished "turbofish":
|
||||
///
|
||||
/// ```text
|
||||
/// let _ = f<&'a fn(), for<'a> serde::>();
|
||||
/// ```
|
||||
///
|
||||
/// If this is parsed as generic arguments, we can provide the error message
|
||||
///
|
||||
/// ```text
|
||||
/// error: expected identifier
|
||||
/// --> src.rs:L:C
|
||||
/// |
|
||||
/// L | let _ = f<&'a fn(), for<'a> serde::>();
|
||||
/// | ^
|
||||
/// ```
|
||||
///
|
||||
/// but if parsed using the above speculative parsing, it falls back to
|
||||
/// assuming that the `<` is a less-than when it fails to parse the generic
|
||||
/// arguments, and tries to interpret the `&'a` as the start of a labelled
|
||||
/// loop, resulting in the much less helpful error
|
||||
///
|
||||
/// ```text
|
||||
/// error: expected `:`
|
||||
/// --> src.rs:L:C
|
||||
/// |
|
||||
/// L | let _ = f<&'a fn(), for<'a> serde::>();
|
||||
/// | ^^
|
||||
/// ```
|
||||
///
|
||||
/// This can be mitigated with various heuristics (two examples: show both
|
||||
/// forks' parse errors, or show the one that consumed more tokens), but
|
||||
/// when you can control the grammar, sticking to something that can be
|
||||
/// parsed LL(3) and without the LL(*) speculative parsing this makes
|
||||
/// possible, displaying reasonable errors becomes much more simple.
|
||||
///
|
||||
/// [RFC 2544]: https://github.com/rust-lang/rfcs/pull/2544
|
||||
/// [`PathSegment`]: crate::PathSegment
|
||||
///
|
||||
/// # Performance
|
||||
///
|
||||
/// This method performs a cheap fixed amount of work that does not depend
|
||||
/// on how far apart the two streams are positioned.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// The forked stream in the argument of `advance_to` must have been
|
||||
/// obtained by forking `self`. Attempting to advance to any other stream
|
||||
/// will cause a panic.
|
||||
fn advance_to(&self, fork: &Self);
|
||||
}
|
||||
|
||||
impl<'a> Speculative for ParseBuffer<'a> {
|
||||
fn advance_to(&self, fork: &Self) {
|
||||
if !crate::buffer::same_scope(self.cursor(), fork.cursor()) {
|
||||
panic!("Fork was not derived from the advancing parse stream");
|
||||
}
|
||||
|
||||
let (self_unexp, self_sp) = inner_unexpected(self);
|
||||
let (fork_unexp, fork_sp) = inner_unexpected(fork);
|
||||
if !Rc::ptr_eq(&self_unexp, &fork_unexp) {
|
||||
match (fork_sp, self_sp) {
|
||||
// Unexpected set on the fork, but not on `self`, copy it over.
|
||||
(Some(span), None) => {
|
||||
self_unexp.set(Unexpected::Some(span));
|
||||
}
|
||||
// Unexpected unset. Use chain to propagate errors from fork.
|
||||
(None, None) => {
|
||||
fork_unexp.set(Unexpected::Chain(self_unexp));
|
||||
|
||||
// Ensure toplevel 'unexpected' tokens from the fork don't
|
||||
// bubble up the chain by replacing the root `unexpected`
|
||||
// pointer, only 'unexpected' tokens from existing group
|
||||
// parsers should bubble.
|
||||
fork.unexpected
|
||||
.set(Some(Rc::new(Cell::new(Unexpected::None))));
|
||||
}
|
||||
// Unexpected has been set on `self`. No changes needed.
|
||||
(_, Some(_)) => {}
|
||||
}
|
||||
}
|
||||
|
||||
// See comment on `cell` in the struct definition.
|
||||
self.cell
|
||||
.set(unsafe { mem::transmute::<Cursor, Cursor<'static>>(fork.cursor()) });
|
||||
}
|
||||
}
|
|
@ -0,0 +1,58 @@
|
|||
use std::iter;
|
||||
use std::mem::ManuallyDrop;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::option;
|
||||
use std::slice;
|
||||
|
||||
#[repr(transparent)]
|
||||
pub(crate) struct NoDrop<T: ?Sized>(ManuallyDrop<T>);
|
||||
|
||||
impl<T> NoDrop<T> {
|
||||
pub(crate) fn new(value: T) -> Self
|
||||
where
|
||||
T: TrivialDrop,
|
||||
{
|
||||
NoDrop(ManuallyDrop::new(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> Deref for NoDrop<T> {
|
||||
type Target = T;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> DerefMut for NoDrop<T> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait TrivialDrop {}
|
||||
|
||||
impl<T> TrivialDrop for iter::Empty<T> {}
|
||||
impl<'a, T> TrivialDrop for slice::Iter<'a, T> {}
|
||||
impl<'a, T> TrivialDrop for slice::IterMut<'a, T> {}
|
||||
impl<'a, T> TrivialDrop for option::IntoIter<&'a T> {}
|
||||
impl<'a, T> TrivialDrop for option::IntoIter<&'a mut T> {}
|
||||
|
||||
#[test]
|
||||
fn test_needs_drop() {
|
||||
use std::mem::needs_drop;
|
||||
|
||||
struct NeedsDrop;
|
||||
|
||||
impl Drop for NeedsDrop {
|
||||
fn drop(&mut self) {}
|
||||
}
|
||||
|
||||
assert!(needs_drop::<NeedsDrop>());
|
||||
|
||||
// Test each of the types with a handwritten TrivialDrop impl above.
|
||||
assert!(!needs_drop::<iter::Empty<NeedsDrop>>());
|
||||
assert!(!needs_drop::<slice::Iter<NeedsDrop>>());
|
||||
assert!(!needs_drop::<slice::IterMut<NeedsDrop>>());
|
||||
assert!(!needs_drop::<option::IntoIter<&NeedsDrop>>());
|
||||
assert!(!needs_drop::<option::IntoIter<&mut NeedsDrop>>());
|
||||
}
|
|
@ -0,0 +1,428 @@
|
|||
#[cfg(feature = "parsing")]
|
||||
use crate::buffer::Cursor;
|
||||
use crate::thread::ThreadBound;
|
||||
use proc_macro2::{
|
||||
Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
|
||||
};
|
||||
#[cfg(feature = "printing")]
|
||||
use quote::ToTokens;
|
||||
use std::fmt::{self, Debug, Display};
|
||||
use std::iter::FromIterator;
|
||||
use std::slice;
|
||||
use std::vec;
|
||||
|
||||
/// The result of a Syn parser.
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
/// Error returned when a Syn parser cannot parse the input tokens.
|
||||
///
|
||||
/// # Error reporting in proc macros
|
||||
///
|
||||
/// The correct way to report errors back to the compiler from a procedural
|
||||
/// macro is by emitting an appropriately spanned invocation of
|
||||
/// [`compile_error!`] in the generated code. This produces a better diagnostic
|
||||
/// message than simply panicking the macro.
|
||||
///
|
||||
/// [`compile_error!`]: std::compile_error!
|
||||
///
|
||||
/// When parsing macro input, the [`parse_macro_input!`] macro handles the
|
||||
/// conversion to `compile_error!` automatically.
|
||||
///
|
||||
/// [`parse_macro_input!`]: crate::parse_macro_input!
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// use proc_macro::TokenStream;
|
||||
/// use syn::{parse_macro_input, AttributeArgs, ItemFn};
|
||||
///
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[proc_macro_attribute]
|
||||
/// # };
|
||||
/// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||
/// let args = parse_macro_input!(args as AttributeArgs);
|
||||
/// let input = parse_macro_input!(input as ItemFn);
|
||||
///
|
||||
/// /* ... */
|
||||
/// # TokenStream::new()
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// For errors that arise later than the initial parsing stage, the
|
||||
/// [`.to_compile_error()`] or [`.into_compile_error()`] methods can be used to
|
||||
/// perform an explicit conversion to `compile_error!`.
|
||||
///
|
||||
/// [`.to_compile_error()`]: Error::to_compile_error
|
||||
/// [`.into_compile_error()`]: Error::into_compile_error
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// # use proc_macro::TokenStream;
|
||||
/// # use syn::{parse_macro_input, DeriveInput};
|
||||
/// #
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[proc_macro_derive(MyDerive)]
|
||||
/// # };
|
||||
/// pub fn my_derive(input: TokenStream) -> TokenStream {
|
||||
/// let input = parse_macro_input!(input as DeriveInput);
|
||||
///
|
||||
/// // fn(DeriveInput) -> syn::Result<proc_macro2::TokenStream>
|
||||
/// expand::my_derive(input)
|
||||
/// .unwrap_or_else(syn::Error::into_compile_error)
|
||||
/// .into()
|
||||
/// }
|
||||
/// #
|
||||
/// # mod expand {
|
||||
/// # use proc_macro2::TokenStream;
|
||||
/// # use syn::{DeriveInput, Result};
|
||||
/// #
|
||||
/// # pub fn my_derive(input: DeriveInput) -> Result<TokenStream> {
|
||||
/// # unimplemented!()
|
||||
/// # }
|
||||
/// # }
|
||||
/// ```
|
||||
pub struct Error {
|
||||
messages: Vec<ErrorMessage>,
|
||||
}
|
||||
|
||||
struct ErrorMessage {
|
||||
// Span is implemented as an index into a thread-local interner to keep the
|
||||
// size small. It is not safe to access from a different thread. We want
|
||||
// errors to be Send and Sync to play nicely with the Failure crate, so pin
|
||||
// the span we're given to its original thread and assume it is
|
||||
// Span::call_site if accessed from any other thread.
|
||||
start_span: ThreadBound<Span>,
|
||||
end_span: ThreadBound<Span>,
|
||||
message: String,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
struct _Test
|
||||
where
|
||||
Error: Send + Sync;
|
||||
|
||||
impl Error {
|
||||
/// Usually the [`ParseStream::error`] method will be used instead, which
|
||||
/// automatically uses the correct span from the current position of the
|
||||
/// parse stream.
|
||||
///
|
||||
/// Use `Error::new` when the error needs to be triggered on some span other
|
||||
/// than where the parse stream is currently positioned.
|
||||
///
|
||||
/// [`ParseStream::error`]: crate::parse::ParseBuffer::error
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{Error, Ident, LitStr, Result, Token};
|
||||
/// use syn::parse::ParseStream;
|
||||
///
|
||||
/// // Parses input that looks like `name = "string"` where the key must be
|
||||
/// // the identifier `name` and the value may be any string literal.
|
||||
/// // Returns the string literal.
|
||||
/// fn parse_name(input: ParseStream) -> Result<LitStr> {
|
||||
/// let name_token: Ident = input.parse()?;
|
||||
/// if name_token != "name" {
|
||||
/// // Trigger an error not on the current position of the stream,
|
||||
/// // but on the position of the unexpected identifier.
|
||||
/// return Err(Error::new(name_token.span(), "expected `name`"));
|
||||
/// }
|
||||
/// input.parse::<Token![=]>()?;
|
||||
/// let s: LitStr = input.parse()?;
|
||||
/// Ok(s)
|
||||
/// }
|
||||
/// ```
|
||||
pub fn new<T: Display>(span: Span, message: T) -> Self {
|
||||
return new(span, message.to_string());
|
||||
|
||||
fn new(span: Span, message: String) -> Error {
|
||||
Error {
|
||||
messages: vec![ErrorMessage {
|
||||
start_span: ThreadBound::new(span),
|
||||
end_span: ThreadBound::new(span),
|
||||
message,
|
||||
}],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates an error with the specified message spanning the given syntax
|
||||
/// tree node.
|
||||
///
|
||||
/// Unlike the `Error::new` constructor, this constructor takes an argument
|
||||
/// `tokens` which is a syntax tree node. This allows the resulting `Error`
|
||||
/// to attempt to span all tokens inside of `tokens`. While you would
|
||||
/// typically be able to use the `Spanned` trait with the above `Error::new`
|
||||
/// constructor, implementation limitations today mean that
|
||||
/// `Error::new_spanned` may provide a higher-quality error message on
|
||||
/// stable Rust.
|
||||
///
|
||||
/// When in doubt it's recommended to stick to `Error::new` (or
|
||||
/// `ParseStream::error`)!
|
||||
#[cfg(feature = "printing")]
|
||||
pub fn new_spanned<T: ToTokens, U: Display>(tokens: T, message: U) -> Self {
|
||||
return new_spanned(tokens.into_token_stream(), message.to_string());
|
||||
|
||||
fn new_spanned(tokens: TokenStream, message: String) -> Error {
|
||||
let mut iter = tokens.into_iter();
|
||||
let start = iter.next().map_or_else(Span::call_site, |t| t.span());
|
||||
let end = iter.last().map_or(start, |t| t.span());
|
||||
Error {
|
||||
messages: vec![ErrorMessage {
|
||||
start_span: ThreadBound::new(start),
|
||||
end_span: ThreadBound::new(end),
|
||||
message,
|
||||
}],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The source location of the error.
|
||||
///
|
||||
/// Spans are not thread-safe so this function returns `Span::call_site()`
|
||||
/// if called from a different thread than the one on which the `Error` was
|
||||
/// originally created.
|
||||
pub fn span(&self) -> Span {
|
||||
let start = match self.messages[0].start_span.get() {
|
||||
Some(span) => *span,
|
||||
None => return Span::call_site(),
|
||||
};
|
||||
let end = match self.messages[0].end_span.get() {
|
||||
Some(span) => *span,
|
||||
None => return Span::call_site(),
|
||||
};
|
||||
start.join(end).unwrap_or(start)
|
||||
}
|
||||
|
||||
/// Render the error as an invocation of [`compile_error!`].
|
||||
///
|
||||
/// The [`parse_macro_input!`] macro provides a convenient way to invoke
|
||||
/// this method correctly in a procedural macro.
|
||||
///
|
||||
/// [`compile_error!`]: std::compile_error!
|
||||
/// [`parse_macro_input!`]: crate::parse_macro_input!
|
||||
pub fn to_compile_error(&self) -> TokenStream {
|
||||
self.messages
|
||||
.iter()
|
||||
.map(ErrorMessage::to_compile_error)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Render the error as an invocation of [`compile_error!`].
|
||||
///
|
||||
/// [`compile_error!`]: std::compile_error!
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// use proc_macro::TokenStream;
|
||||
/// use syn::{parse_macro_input, DeriveInput, Error};
|
||||
///
|
||||
/// # const _: &str = stringify! {
|
||||
/// #[proc_macro_derive(MyTrait)]
|
||||
/// # };
|
||||
/// pub fn derive_my_trait(input: TokenStream) -> TokenStream {
|
||||
/// let input = parse_macro_input!(input as DeriveInput);
|
||||
/// my_trait::expand(input)
|
||||
/// .unwrap_or_else(Error::into_compile_error)
|
||||
/// .into()
|
||||
/// }
|
||||
///
|
||||
/// mod my_trait {
|
||||
/// use proc_macro2::TokenStream;
|
||||
/// use syn::{DeriveInput, Result};
|
||||
///
|
||||
/// pub(crate) fn expand(input: DeriveInput) -> Result<TokenStream> {
|
||||
/// /* ... */
|
||||
/// # unimplemented!()
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
pub fn into_compile_error(self) -> TokenStream {
|
||||
self.to_compile_error()
|
||||
}
|
||||
|
||||
/// Add another error message to self such that when `to_compile_error()` is
|
||||
/// called, both errors will be emitted together.
|
||||
pub fn combine(&mut self, another: Error) {
|
||||
self.messages.extend(another.messages);
|
||||
}
|
||||
}
|
||||
|
||||
impl ErrorMessage {
|
||||
fn to_compile_error(&self) -> TokenStream {
|
||||
let start = self
|
||||
.start_span
|
||||
.get()
|
||||
.cloned()
|
||||
.unwrap_or_else(Span::call_site);
|
||||
let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site);
|
||||
|
||||
// compile_error!($message)
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("compile_error", start)),
|
||||
TokenTree::Punct({
|
||||
let mut punct = Punct::new('!', Spacing::Alone);
|
||||
punct.set_span(start);
|
||||
punct
|
||||
}),
|
||||
TokenTree::Group({
|
||||
let mut group = Group::new(Delimiter::Brace, {
|
||||
TokenStream::from_iter(vec![TokenTree::Literal({
|
||||
let mut string = Literal::string(&self.message);
|
||||
string.set_span(end);
|
||||
string
|
||||
})])
|
||||
});
|
||||
group.set_span(end);
|
||||
group
|
||||
}),
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub fn new_at<T: Display>(scope: Span, cursor: Cursor, message: T) -> Error {
|
||||
if cursor.eof() {
|
||||
Error::new(scope, format!("unexpected end of input, {}", message))
|
||||
} else {
|
||||
let span = crate::buffer::open_span_of_group(cursor);
|
||||
Error::new(span, message)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
|
||||
pub fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
|
||||
return new2(start, end, message.to_string());
|
||||
|
||||
fn new2(start: Span, end: Span, message: String) -> Error {
|
||||
Error {
|
||||
messages: vec![ErrorMessage {
|
||||
start_span: ThreadBound::new(start),
|
||||
end_span: ThreadBound::new(end),
|
||||
message,
|
||||
}],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Error {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
if self.messages.len() == 1 {
|
||||
formatter
|
||||
.debug_tuple("Error")
|
||||
.field(&self.messages[0])
|
||||
.finish()
|
||||
} else {
|
||||
formatter
|
||||
.debug_tuple("Error")
|
||||
.field(&self.messages)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for ErrorMessage {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
Debug::fmt(&self.message, formatter)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Error {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str(&self.messages[0].message)
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Error {
|
||||
fn clone(&self) -> Self {
|
||||
Error {
|
||||
messages: self.messages.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for ErrorMessage {
|
||||
fn clone(&self) -> Self {
|
||||
let start = self
|
||||
.start_span
|
||||
.get()
|
||||
.cloned()
|
||||
.unwrap_or_else(Span::call_site);
|
||||
let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site);
|
||||
ErrorMessage {
|
||||
start_span: ThreadBound::new(start),
|
||||
end_span: ThreadBound::new(end),
|
||||
message: self.message.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
impl From<LexError> for Error {
|
||||
fn from(err: LexError) -> Self {
|
||||
Error::new(err.span(), "lex error")
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for Error {
|
||||
type Item = Error;
|
||||
type IntoIter = IntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
IntoIter {
|
||||
messages: self.messages.into_iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct IntoIter {
|
||||
messages: vec::IntoIter<ErrorMessage>,
|
||||
}
|
||||
|
||||
impl Iterator for IntoIter {
|
||||
type Item = Error;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
Some(Error {
|
||||
messages: vec![self.messages.next()?],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a Error {
|
||||
type Item = Error;
|
||||
type IntoIter = Iter<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
Iter {
|
||||
messages: self.messages.iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Iter<'a> {
|
||||
messages: slice::Iter<'a, ErrorMessage>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Iter<'a> {
|
||||
type Item = Error;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
Some(Error {
|
||||
messages: vec![self.messages.next()?.clone()],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<Error> for Error {
|
||||
fn extend<T: IntoIterator<Item = Error>>(&mut self, iter: T) {
|
||||
for err in iter {
|
||||
self.combine(err);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
pub use std::clone::Clone;
|
||||
pub use std::cmp::{Eq, PartialEq};
|
||||
pub use std::default::Default;
|
||||
pub use std::fmt::{self, Debug, Formatter};
|
||||
pub use std::hash::{Hash, Hasher};
|
||||
pub use std::marker::Copy;
|
||||
pub use std::option::Option::{None, Some};
|
||||
pub use std::result::Result::{Err, Ok};
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
pub extern crate quote;
|
||||
|
||||
pub use proc_macro2::{Span, TokenStream as TokenStream2};
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub use crate::group::{parse_braces, parse_brackets, parse_parens};
|
||||
|
||||
pub use crate::span::IntoSpans;
|
||||
|
||||
#[cfg(all(
|
||||
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
|
||||
feature = "proc-macro"
|
||||
))]
|
||||
pub use proc_macro::TokenStream;
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
pub use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[allow(non_camel_case_types)]
|
||||
pub type bool = help::Bool;
|
||||
#[allow(non_camel_case_types)]
|
||||
pub type str = help::Str;
|
||||
|
||||
mod help {
|
||||
pub type Bool = bool;
|
||||
pub type Str = str;
|
||||
}
|
||||
|
||||
pub struct private(pub(crate) ());
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,139 @@
|
|||
//! Extension traits to provide parsing methods on foreign types.
|
||||
//!
|
||||
//! *This module is available only if Syn is built with the `"parsing"` feature.*
|
||||
|
||||
use crate::buffer::Cursor;
|
||||
use crate::parse::Peek;
|
||||
use crate::parse::{ParseStream, Result};
|
||||
use crate::sealed::lookahead;
|
||||
use crate::token::CustomToken;
|
||||
use proc_macro2::Ident;
|
||||
|
||||
/// Additional methods for `Ident` not provided by proc-macro2 or libproc_macro.
|
||||
///
|
||||
/// This trait is sealed and cannot be implemented for types outside of Syn. It
|
||||
/// is implemented only for `proc_macro2::Ident`.
|
||||
///
|
||||
/// *This trait is available only if Syn is built with the `"parsing"` feature.*
|
||||
pub trait IdentExt: Sized + private::Sealed {
|
||||
/// Parses any identifier including keywords.
|
||||
///
|
||||
/// This is useful when parsing macro input which allows Rust keywords as
|
||||
/// identifiers.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{Error, Ident, Result, Token};
|
||||
/// use syn::ext::IdentExt;
|
||||
/// use syn::parse::ParseStream;
|
||||
///
|
||||
/// mod kw {
|
||||
/// syn::custom_keyword!(name);
|
||||
/// }
|
||||
///
|
||||
/// // Parses input that looks like `name = NAME` where `NAME` can be
|
||||
/// // any identifier.
|
||||
/// //
|
||||
/// // Examples:
|
||||
/// //
|
||||
/// // name = anything
|
||||
/// // name = impl
|
||||
/// fn parse_dsl(input: ParseStream) -> Result<Ident> {
|
||||
/// input.parse::<kw::name>()?;
|
||||
/// input.parse::<Token![=]>()?;
|
||||
/// let name = input.call(Ident::parse_any)?;
|
||||
/// Ok(name)
|
||||
/// }
|
||||
/// ```
|
||||
fn parse_any(input: ParseStream) -> Result<Self>;
|
||||
|
||||
/// Peeks any identifier including keywords. Usage:
|
||||
/// `input.peek(Ident::peek_any)`
|
||||
///
|
||||
/// This is different from `input.peek(Ident)` which only returns true in
|
||||
/// the case of an ident which is not a Rust keyword.
|
||||
#[allow(non_upper_case_globals)]
|
||||
const peek_any: private::PeekFn = private::PeekFn;
|
||||
|
||||
/// Strips the raw marker `r#`, if any, from the beginning of an ident.
|
||||
///
|
||||
/// - unraw(`x`) = `x`
|
||||
/// - unraw(`move`) = `move`
|
||||
/// - unraw(`r#move`) = `move`
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// In the case of interop with other languages like Python that have a
|
||||
/// different set of keywords than Rust, we might come across macro input
|
||||
/// that involves raw identifiers to refer to ordinary variables in the
|
||||
/// other language with a name that happens to be a Rust keyword.
|
||||
///
|
||||
/// The function below appends an identifier from the caller's input onto a
|
||||
/// fixed prefix. Without using `unraw()`, this would tend to produce
|
||||
/// invalid identifiers like `__pyo3_get_r#move`.
|
||||
///
|
||||
/// ```
|
||||
/// use proc_macro2::Span;
|
||||
/// use syn::Ident;
|
||||
/// use syn::ext::IdentExt;
|
||||
///
|
||||
/// fn ident_for_getter(variable: &Ident) -> Ident {
|
||||
/// let getter = format!("__pyo3_get_{}", variable.unraw());
|
||||
/// Ident::new(&getter, Span::call_site())
|
||||
/// }
|
||||
/// ```
|
||||
fn unraw(&self) -> Ident;
|
||||
}
|
||||
|
||||
impl IdentExt for Ident {
|
||||
fn parse_any(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| match cursor.ident() {
|
||||
Some((ident, rest)) => Ok((ident, rest)),
|
||||
None => Err(cursor.error("expected ident")),
|
||||
})
|
||||
}
|
||||
|
||||
fn unraw(&self) -> Ident {
|
||||
let string = self.to_string();
|
||||
if string.starts_with("r#") {
|
||||
Ident::new(&string[2..], self.span())
|
||||
} else {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Peek for private::PeekFn {
|
||||
type Token = private::IdentAny;
|
||||
}
|
||||
|
||||
impl CustomToken for private::IdentAny {
|
||||
fn peek(cursor: Cursor) -> bool {
|
||||
cursor.ident().is_some()
|
||||
}
|
||||
|
||||
fn display() -> &'static str {
|
||||
"identifier"
|
||||
}
|
||||
}
|
||||
|
||||
impl lookahead::Sealed for private::PeekFn {}
|
||||
|
||||
mod private {
|
||||
use proc_macro2::Ident;
|
||||
|
||||
pub trait Sealed {}
|
||||
|
||||
impl Sealed for Ident {}
|
||||
|
||||
pub struct PeekFn;
|
||||
pub struct IdentAny;
|
||||
|
||||
impl Copy for PeekFn {}
|
||||
impl Clone for PeekFn {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,125 @@
|
|||
use super::*;
|
||||
|
||||
ast_struct! {
|
||||
/// A complete file of Rust source code.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// Parse a Rust source file into a `syn::File` and print out a debug
|
||||
/// representation of the syntax tree.
|
||||
///
|
||||
/// ```
|
||||
/// use std::env;
|
||||
/// use std::fs::File;
|
||||
/// use std::io::Read;
|
||||
/// use std::process;
|
||||
///
|
||||
/// fn main() {
|
||||
/// # }
|
||||
/// #
|
||||
/// # fn fake_main() {
|
||||
/// let mut args = env::args();
|
||||
/// let _ = args.next(); // executable name
|
||||
///
|
||||
/// let filename = match (args.next(), args.next()) {
|
||||
/// (Some(filename), None) => filename,
|
||||
/// _ => {
|
||||
/// eprintln!("Usage: dump-syntax path/to/filename.rs");
|
||||
/// process::exit(1);
|
||||
/// }
|
||||
/// };
|
||||
///
|
||||
/// let mut file = File::open(&filename).expect("Unable to open file");
|
||||
///
|
||||
/// let mut src = String::new();
|
||||
/// file.read_to_string(&mut src).expect("Unable to read file");
|
||||
///
|
||||
/// let syntax = syn::parse_file(&src).expect("Unable to parse file");
|
||||
///
|
||||
/// // Debug impl is available if Syn is built with "extra-traits" feature.
|
||||
/// println!("{:#?}", syntax);
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Running with its own source code as input, this program prints output
|
||||
/// that begins with:
|
||||
///
|
||||
/// ```text
|
||||
/// File {
|
||||
/// shebang: None,
|
||||
/// attrs: [],
|
||||
/// items: [
|
||||
/// Use(
|
||||
/// ItemUse {
|
||||
/// attrs: [],
|
||||
/// vis: Inherited,
|
||||
/// use_token: Use,
|
||||
/// leading_colon: None,
|
||||
/// tree: Path(
|
||||
/// UsePath {
|
||||
/// ident: Ident(
|
||||
/// std,
|
||||
/// ),
|
||||
/// colon2_token: Colon2,
|
||||
/// tree: Name(
|
||||
/// UseName {
|
||||
/// ident: Ident(
|
||||
/// env,
|
||||
/// ),
|
||||
/// },
|
||||
/// ),
|
||||
/// },
|
||||
/// ),
|
||||
/// semi_token: Semi,
|
||||
/// },
|
||||
/// ),
|
||||
/// ...
|
||||
/// ```
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct File {
|
||||
pub shebang: Option<String>,
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub items: Vec<Item>,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for File {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(File {
|
||||
shebang: None,
|
||||
attrs: input.call(Attribute::parse_inner)?,
|
||||
items: {
|
||||
let mut items = Vec::new();
|
||||
while !input.is_empty() {
|
||||
items.push(input.parse()?);
|
||||
}
|
||||
items
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use crate::attr::FilterAttrs;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for File {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.inner());
|
||||
tokens.append_all(&self.items);
|
||||
}
|
||||
}
|
||||
}
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,154 @@
|
|||
#[cfg(feature = "fold")]
|
||||
pub mod fold {
|
||||
use crate::fold::Fold;
|
||||
use crate::punctuated::{Pair, Punctuated};
|
||||
use proc_macro2::Span;
|
||||
|
||||
pub trait FoldHelper {
|
||||
type Item;
|
||||
fn lift<F>(self, f: F) -> Self
|
||||
where
|
||||
F: FnMut(Self::Item) -> Self::Item;
|
||||
}
|
||||
|
||||
impl<T> FoldHelper for Vec<T> {
|
||||
type Item = T;
|
||||
fn lift<F>(self, f: F) -> Self
|
||||
where
|
||||
F: FnMut(Self::Item) -> Self::Item,
|
||||
{
|
||||
self.into_iter().map(f).collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, U> FoldHelper for Punctuated<T, U> {
|
||||
type Item = T;
|
||||
fn lift<F>(self, mut f: F) -> Self
|
||||
where
|
||||
F: FnMut(Self::Item) -> Self::Item,
|
||||
{
|
||||
self.into_pairs()
|
||||
.map(Pair::into_tuple)
|
||||
.map(|(t, u)| Pair::new(f(t), u))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tokens_helper<F: Fold + ?Sized, S: Spans>(folder: &mut F, spans: &S) -> S {
|
||||
spans.fold(folder)
|
||||
}
|
||||
|
||||
pub trait Spans {
|
||||
fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self;
|
||||
}
|
||||
|
||||
impl Spans for Span {
|
||||
fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self {
|
||||
folder.fold_span(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Spans for [Span; 1] {
|
||||
fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self {
|
||||
[folder.fold_span(self[0])]
|
||||
}
|
||||
}
|
||||
|
||||
impl Spans for [Span; 2] {
|
||||
fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self {
|
||||
[folder.fold_span(self[0]), folder.fold_span(self[1])]
|
||||
}
|
||||
}
|
||||
|
||||
impl Spans for [Span; 3] {
|
||||
fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self {
|
||||
[
|
||||
folder.fold_span(self[0]),
|
||||
folder.fold_span(self[1]),
|
||||
folder.fold_span(self[2]),
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "visit")]
|
||||
pub mod visit {
|
||||
use crate::visit::Visit;
|
||||
use proc_macro2::Span;
|
||||
|
||||
pub fn tokens_helper<'ast, V: Visit<'ast> + ?Sized, S: Spans>(visitor: &mut V, spans: &S) {
|
||||
spans.visit(visitor);
|
||||
}
|
||||
|
||||
pub trait Spans {
|
||||
fn visit<'ast, V: Visit<'ast> + ?Sized>(&self, visitor: &mut V);
|
||||
}
|
||||
|
||||
impl Spans for Span {
|
||||
fn visit<'ast, V: Visit<'ast> + ?Sized>(&self, visitor: &mut V) {
|
||||
visitor.visit_span(self);
|
||||
}
|
||||
}
|
||||
|
||||
impl Spans for [Span; 1] {
|
||||
fn visit<'ast, V: Visit<'ast> + ?Sized>(&self, visitor: &mut V) {
|
||||
visitor.visit_span(&self[0]);
|
||||
}
|
||||
}
|
||||
|
||||
impl Spans for [Span; 2] {
|
||||
fn visit<'ast, V: Visit<'ast> + ?Sized>(&self, visitor: &mut V) {
|
||||
visitor.visit_span(&self[0]);
|
||||
visitor.visit_span(&self[1]);
|
||||
}
|
||||
}
|
||||
|
||||
impl Spans for [Span; 3] {
|
||||
fn visit<'ast, V: Visit<'ast> + ?Sized>(&self, visitor: &mut V) {
|
||||
visitor.visit_span(&self[0]);
|
||||
visitor.visit_span(&self[1]);
|
||||
visitor.visit_span(&self[2]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "visit-mut")]
|
||||
pub mod visit_mut {
|
||||
use crate::visit_mut::VisitMut;
|
||||
use proc_macro2::Span;
|
||||
|
||||
pub fn tokens_helper<V: VisitMut + ?Sized, S: Spans>(visitor: &mut V, spans: &mut S) {
|
||||
spans.visit_mut(visitor);
|
||||
}
|
||||
|
||||
pub trait Spans {
|
||||
fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V);
|
||||
}
|
||||
|
||||
impl Spans for Span {
|
||||
fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V) {
|
||||
visitor.visit_span_mut(self);
|
||||
}
|
||||
}
|
||||
|
||||
impl Spans for [Span; 1] {
|
||||
fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V) {
|
||||
visitor.visit_span_mut(&mut self[0]);
|
||||
}
|
||||
}
|
||||
|
||||
impl Spans for [Span; 2] {
|
||||
fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V) {
|
||||
visitor.visit_span_mut(&mut self[0]);
|
||||
visitor.visit_span_mut(&mut self[1]);
|
||||
}
|
||||
}
|
||||
|
||||
impl Spans for [Span; 3] {
|
||||
fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V) {
|
||||
visitor.visit_span_mut(&mut self[0]);
|
||||
visitor.visit_span_mut(&mut self[1]);
|
||||
visitor.visit_span_mut(&mut self[2]);
|
||||
}
|
||||
}
|
||||
}
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,282 @@
|
|||
use crate::error::Result;
|
||||
use crate::parse::ParseBuffer;
|
||||
use crate::token;
|
||||
use proc_macro2::{Delimiter, Span};
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub struct Parens<'a> {
|
||||
pub token: token::Paren,
|
||||
pub content: ParseBuffer<'a>,
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub struct Braces<'a> {
|
||||
pub token: token::Brace,
|
||||
pub content: ParseBuffer<'a>,
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub struct Brackets<'a> {
|
||||
pub token: token::Bracket,
|
||||
pub content: ParseBuffer<'a>,
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
#[doc(hidden)]
|
||||
pub struct Group<'a> {
|
||||
pub token: token::Group,
|
||||
pub content: ParseBuffer<'a>,
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub fn parse_parens<'a>(input: &ParseBuffer<'a>) -> Result<Parens<'a>> {
|
||||
parse_delimited(input, Delimiter::Parenthesis).map(|(span, content)| Parens {
|
||||
token: token::Paren(span),
|
||||
content,
|
||||
})
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub fn parse_braces<'a>(input: &ParseBuffer<'a>) -> Result<Braces<'a>> {
|
||||
parse_delimited(input, Delimiter::Brace).map(|(span, content)| Braces {
|
||||
token: token::Brace(span),
|
||||
content,
|
||||
})
|
||||
}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub fn parse_brackets<'a>(input: &ParseBuffer<'a>) -> Result<Brackets<'a>> {
|
||||
parse_delimited(input, Delimiter::Bracket).map(|(span, content)| Brackets {
|
||||
token: token::Bracket(span),
|
||||
content,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
pub(crate) fn parse_group<'a>(input: &ParseBuffer<'a>) -> Result<Group<'a>> {
|
||||
parse_delimited(input, Delimiter::None).map(|(span, content)| Group {
|
||||
token: token::Group(span),
|
||||
content,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_delimited<'a>(
|
||||
input: &ParseBuffer<'a>,
|
||||
delimiter: Delimiter,
|
||||
) -> Result<(Span, ParseBuffer<'a>)> {
|
||||
input.step(|cursor| {
|
||||
if let Some((content, span, rest)) = cursor.group(delimiter) {
|
||||
let scope = crate::buffer::close_span_of_group(*cursor);
|
||||
let nested = crate::parse::advance_step_cursor(cursor, content);
|
||||
let unexpected = crate::parse::get_unexpected(input);
|
||||
let content = crate::parse::new_parse_buffer(scope, nested, unexpected);
|
||||
Ok(((span, content), rest))
|
||||
} else {
|
||||
let message = match delimiter {
|
||||
Delimiter::Parenthesis => "expected parentheses",
|
||||
Delimiter::Brace => "expected curly braces",
|
||||
Delimiter::Bracket => "expected square brackets",
|
||||
Delimiter::None => "expected invisible group",
|
||||
};
|
||||
Err(cursor.error(message))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Parse a set of parentheses and expose their content to subsequent parsers.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// use syn::{parenthesized, token, Ident, Result, Token, Type};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
/// use syn::punctuated::Punctuated;
|
||||
///
|
||||
/// // Parse a simplified tuple struct syntax like:
|
||||
/// //
|
||||
/// // struct S(A, B);
|
||||
/// struct TupleStruct {
|
||||
/// struct_token: Token![struct],
|
||||
/// ident: Ident,
|
||||
/// paren_token: token::Paren,
|
||||
/// fields: Punctuated<Type, Token![,]>,
|
||||
/// semi_token: Token![;],
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for TupleStruct {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let content;
|
||||
/// Ok(TupleStruct {
|
||||
/// struct_token: input.parse()?,
|
||||
/// ident: input.parse()?,
|
||||
/// paren_token: parenthesized!(content in input),
|
||||
/// fields: content.parse_terminated(Type::parse)?,
|
||||
/// semi_token: input.parse()?,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
/// #
|
||||
/// # fn main() {
|
||||
/// # let input = quote! {
|
||||
/// # struct S(A, B);
|
||||
/// # };
|
||||
/// # syn::parse2::<TupleStruct>(input).unwrap();
|
||||
/// # }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
macro_rules! parenthesized {
|
||||
($content:ident in $cursor:expr) => {
|
||||
match $crate::__private::parse_parens(&$cursor) {
|
||||
$crate::__private::Ok(parens) => {
|
||||
$content = parens.content;
|
||||
parens.token
|
||||
}
|
||||
$crate::__private::Err(error) => {
|
||||
return $crate::__private::Err(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Parse a set of curly braces and expose their content to subsequent parsers.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// use syn::{braced, token, Ident, Result, Token, Type};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
/// use syn::punctuated::Punctuated;
|
||||
///
|
||||
/// // Parse a simplified struct syntax like:
|
||||
/// //
|
||||
/// // struct S {
|
||||
/// // a: A,
|
||||
/// // b: B,
|
||||
/// // }
|
||||
/// struct Struct {
|
||||
/// struct_token: Token![struct],
|
||||
/// ident: Ident,
|
||||
/// brace_token: token::Brace,
|
||||
/// fields: Punctuated<Field, Token![,]>,
|
||||
/// }
|
||||
///
|
||||
/// struct Field {
|
||||
/// name: Ident,
|
||||
/// colon_token: Token![:],
|
||||
/// ty: Type,
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for Struct {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let content;
|
||||
/// Ok(Struct {
|
||||
/// struct_token: input.parse()?,
|
||||
/// ident: input.parse()?,
|
||||
/// brace_token: braced!(content in input),
|
||||
/// fields: content.parse_terminated(Field::parse)?,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for Field {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// Ok(Field {
|
||||
/// name: input.parse()?,
|
||||
/// colon_token: input.parse()?,
|
||||
/// ty: input.parse()?,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
/// #
|
||||
/// # fn main() {
|
||||
/// # let input = quote! {
|
||||
/// # struct S {
|
||||
/// # a: A,
|
||||
/// # b: B,
|
||||
/// # }
|
||||
/// # };
|
||||
/// # syn::parse2::<Struct>(input).unwrap();
|
||||
/// # }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
macro_rules! braced {
|
||||
($content:ident in $cursor:expr) => {
|
||||
match $crate::__private::parse_braces(&$cursor) {
|
||||
$crate::__private::Ok(braces) => {
|
||||
$content = braces.content;
|
||||
braces.token
|
||||
}
|
||||
$crate::__private::Err(error) => {
|
||||
return $crate::__private::Err(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Parse a set of square brackets and expose their content to subsequent
|
||||
/// parsers.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # use quote::quote;
|
||||
/// #
|
||||
/// use proc_macro2::TokenStream;
|
||||
/// use syn::{bracketed, token, Result, Token};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// // Parse an outer attribute like:
|
||||
/// //
|
||||
/// // #[repr(C, packed)]
|
||||
/// struct OuterAttribute {
|
||||
/// pound_token: Token![#],
|
||||
/// bracket_token: token::Bracket,
|
||||
/// content: TokenStream,
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for OuterAttribute {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let content;
|
||||
/// Ok(OuterAttribute {
|
||||
/// pound_token: input.parse()?,
|
||||
/// bracket_token: bracketed!(content in input),
|
||||
/// content: content.parse()?,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
/// #
|
||||
/// # fn main() {
|
||||
/// # let input = quote! {
|
||||
/// # #[repr(C, packed)]
|
||||
/// # };
|
||||
/// # syn::parse2::<OuterAttribute>(input).unwrap();
|
||||
/// # }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
macro_rules! bracketed {
|
||||
($content:ident in $cursor:expr) => {
|
||||
match $crate::__private::parse_brackets(&$cursor) {
|
||||
$crate::__private::Ok(brackets) => {
|
||||
$content = brackets.content;
|
||||
brackets.token
|
||||
}
|
||||
$crate::__private::Err(error) => {
|
||||
return $crate::__private::Err(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
|
@ -0,0 +1,101 @@
|
|||
#[cfg(feature = "parsing")]
|
||||
use crate::buffer::Cursor;
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::lookahead;
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::token::Token;
|
||||
|
||||
pub use proc_macro2::Ident;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[doc(hidden)]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn Ident(marker: lookahead::TokenMarker) -> Ident {
|
||||
match marker {}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
fn accept_as_ident(ident: &Ident) -> bool {
|
||||
match ident.to_string().as_str() {
|
||||
"_" |
|
||||
// Based on https://doc.rust-lang.org/grammar.html#keywords
|
||||
// and https://github.com/rust-lang/rfcs/blob/master/text/2421-unreservations-2018.md
|
||||
// and https://github.com/rust-lang/rfcs/blob/master/text/2420-unreserve-proc.md
|
||||
"abstract" | "as" | "become" | "box" | "break" | "const" | "continue" |
|
||||
"crate" | "do" | "else" | "enum" | "extern" | "false" | "final" | "fn" |
|
||||
"for" | "if" | "impl" | "in" | "let" | "loop" | "macro" | "match" |
|
||||
"mod" | "move" | "mut" | "override" | "priv" | "pub" | "ref" |
|
||||
"return" | "Self" | "self" | "static" | "struct" | "super" | "trait" |
|
||||
"true" | "type" | "typeof" | "unsafe" | "unsized" | "use" | "virtual" |
|
||||
"where" | "while" | "yield" => false,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Ident {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| {
|
||||
if let Some((ident, rest)) = cursor.ident() {
|
||||
if accept_as_ident(&ident) {
|
||||
return Ok((ident, rest));
|
||||
}
|
||||
}
|
||||
Err(cursor.error("expected identifier"))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
impl Token for Ident {
|
||||
fn peek(cursor: Cursor) -> bool {
|
||||
if let Some((ident, _rest)) = cursor.ident() {
|
||||
accept_as_ident(&ident)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn display() -> &'static str {
|
||||
"identifier"
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! ident_from_token {
|
||||
($token:ident) => {
|
||||
impl From<Token![$token]> for Ident {
|
||||
fn from(token: Token![$token]) -> Ident {
|
||||
Ident::new(stringify!($token), token.span)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
ident_from_token!(self);
|
||||
ident_from_token!(Self);
|
||||
ident_from_token!(super);
|
||||
ident_from_token!(crate);
|
||||
ident_from_token!(extern);
|
||||
|
||||
impl From<Token![_]> for Ident {
|
||||
fn from(token: Token![_]) -> Ident {
|
||||
Ident::new("_", token.span)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn xid_ok(symbol: &str) -> bool {
|
||||
let mut chars = symbol.chars();
|
||||
let first = chars.next().unwrap();
|
||||
if !(first == '_' || unicode_ident::is_xid_start(first)) {
|
||||
return false;
|
||||
}
|
||||
for ch in chars {
|
||||
if !unicode_ident::is_xid_continue(ch) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,984 @@
|
|||
//! [![github]](https://github.com/dtolnay/syn) [![crates-io]](https://crates.io/crates/syn) [![docs-rs]](crate)
|
||||
//!
|
||||
//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
|
||||
//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
|
||||
//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
|
||||
//!
|
||||
//! <br>
|
||||
//!
|
||||
//! Syn is a parsing library for parsing a stream of Rust tokens into a syntax
|
||||
//! tree of Rust source code.
|
||||
//!
|
||||
//! Currently this library is geared toward use in Rust procedural macros, but
|
||||
//! contains some APIs that may be useful more generally.
|
||||
//!
|
||||
//! - **Data structures** — Syn provides a complete syntax tree that can
|
||||
//! represent any valid Rust source code. The syntax tree is rooted at
|
||||
//! [`syn::File`] which represents a full source file, but there are other
|
||||
//! entry points that may be useful to procedural macros including
|
||||
//! [`syn::Item`], [`syn::Expr`] and [`syn::Type`].
|
||||
//!
|
||||
//! - **Derives** — Of particular interest to derive macros is
|
||||
//! [`syn::DeriveInput`] which is any of the three legal input items to a
|
||||
//! derive macro. An example below shows using this type in a library that can
|
||||
//! derive implementations of a user-defined trait.
|
||||
//!
|
||||
//! - **Parsing** — Parsing in Syn is built around [parser functions] with the
|
||||
//! signature `fn(ParseStream) -> Result<T>`. Every syntax tree node defined
|
||||
//! by Syn is individually parsable and may be used as a building block for
|
||||
//! custom syntaxes, or you may dream up your own brand new syntax without
|
||||
//! involving any of our syntax tree types.
|
||||
//!
|
||||
//! - **Location information** — Every token parsed by Syn is associated with a
|
||||
//! `Span` that tracks line and column information back to the source of that
|
||||
//! token. These spans allow a procedural macro to display detailed error
|
||||
//! messages pointing to all the right places in the user's code. There is an
|
||||
//! example of this below.
|
||||
//!
|
||||
//! - **Feature flags** — Functionality is aggressively feature gated so your
|
||||
//! procedural macros enable only what they need, and do not pay in compile
|
||||
//! time for all the rest.
|
||||
//!
|
||||
//! [`syn::File`]: File
|
||||
//! [`syn::Item`]: Item
|
||||
//! [`syn::Expr`]: Expr
|
||||
//! [`syn::Type`]: Type
|
||||
//! [`syn::DeriveInput`]: DeriveInput
|
||||
//! [parser functions]: mod@parse
|
||||
//!
|
||||
//! <br>
|
||||
//!
|
||||
//! # Example of a derive macro
|
||||
//!
|
||||
//! The canonical derive macro using Syn looks like this. We write an ordinary
|
||||
//! Rust function tagged with a `proc_macro_derive` attribute and the name of
|
||||
//! the trait we are deriving. Any time that derive appears in the user's code,
|
||||
//! the Rust compiler passes their data structure as tokens into our macro. We
|
||||
//! get to execute arbitrary Rust code to figure out what to do with those
|
||||
//! tokens, then hand some tokens back to the compiler to compile into the
|
||||
//! user's crate.
|
||||
//!
|
||||
//! [`TokenStream`]: proc_macro::TokenStream
|
||||
//!
|
||||
//! ```toml
|
||||
//! [dependencies]
|
||||
//! syn = "1.0"
|
||||
//! quote = "1.0"
|
||||
//!
|
||||
//! [lib]
|
||||
//! proc-macro = true
|
||||
//! ```
|
||||
//!
|
||||
//! ```
|
||||
//! # extern crate proc_macro;
|
||||
//! #
|
||||
//! use proc_macro::TokenStream;
|
||||
//! use quote::quote;
|
||||
//! use syn::{parse_macro_input, DeriveInput};
|
||||
//!
|
||||
//! # const IGNORE_TOKENS: &str = stringify! {
|
||||
//! #[proc_macro_derive(MyMacro)]
|
||||
//! # };
|
||||
//! pub fn my_macro(input: TokenStream) -> TokenStream {
|
||||
//! // Parse the input tokens into a syntax tree
|
||||
//! let input = parse_macro_input!(input as DeriveInput);
|
||||
//!
|
||||
//! // Build the output, possibly using quasi-quotation
|
||||
//! let expanded = quote! {
|
||||
//! // ...
|
||||
//! };
|
||||
//!
|
||||
//! // Hand the output tokens back to the compiler
|
||||
//! TokenStream::from(expanded)
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! The [`heapsize`] example directory shows a complete working implementation
|
||||
//! of a derive macro. It works on any Rust compiler 1.31+. The example derives
|
||||
//! a `HeapSize` trait which computes an estimate of the amount of heap memory
|
||||
//! owned by a value.
|
||||
//!
|
||||
//! [`heapsize`]: https://github.com/dtolnay/syn/tree/master/examples/heapsize
|
||||
//!
|
||||
//! ```
|
||||
//! pub trait HeapSize {
|
||||
//! /// Total number of bytes of heap memory owned by `self`.
|
||||
//! fn heap_size_of_children(&self) -> usize;
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! The derive macro allows users to write `#[derive(HeapSize)]` on data
|
||||
//! structures in their program.
|
||||
//!
|
||||
//! ```
|
||||
//! # const IGNORE_TOKENS: &str = stringify! {
|
||||
//! #[derive(HeapSize)]
|
||||
//! # };
|
||||
//! struct Demo<'a, T: ?Sized> {
|
||||
//! a: Box<T>,
|
||||
//! b: u8,
|
||||
//! c: &'a str,
|
||||
//! d: String,
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! <p><br></p>
|
||||
//!
|
||||
//! # Spans and error reporting
|
||||
//!
|
||||
//! The token-based procedural macro API provides great control over where the
|
||||
//! compiler's error messages are displayed in user code. Consider the error the
|
||||
//! user sees if one of their field types does not implement `HeapSize`.
|
||||
//!
|
||||
//! ```
|
||||
//! # const IGNORE_TOKENS: &str = stringify! {
|
||||
//! #[derive(HeapSize)]
|
||||
//! # };
|
||||
//! struct Broken {
|
||||
//! ok: String,
|
||||
//! bad: std::thread::Thread,
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! By tracking span information all the way through the expansion of a
|
||||
//! procedural macro as shown in the `heapsize` example, token-based macros in
|
||||
//! Syn are able to trigger errors that directly pinpoint the source of the
|
||||
//! problem.
|
||||
//!
|
||||
//! ```text
|
||||
//! error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not satisfied
|
||||
//! --> src/main.rs:7:5
|
||||
//! |
|
||||
//! 7 | bad: std::thread::Thread,
|
||||
//! | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `HeapSize` is not implemented for `Thread`
|
||||
//! ```
|
||||
//!
|
||||
//! <br>
|
||||
//!
|
||||
//! # Parsing a custom syntax
|
||||
//!
|
||||
//! The [`lazy-static`] example directory shows the implementation of a
|
||||
//! `functionlike!(...)` procedural macro in which the input tokens are parsed
|
||||
//! using Syn's parsing API.
|
||||
//!
|
||||
//! [`lazy-static`]: https://github.com/dtolnay/syn/tree/master/examples/lazy-static
|
||||
//!
|
||||
//! The example reimplements the popular `lazy_static` crate from crates.io as a
|
||||
//! procedural macro.
|
||||
//!
|
||||
//! ```
|
||||
//! # macro_rules! lazy_static {
|
||||
//! # ($($tt:tt)*) => {}
|
||||
//! # }
|
||||
//! #
|
||||
//! lazy_static! {
|
||||
//! static ref USERNAME: Regex = Regex::new("^[a-z0-9_-]{3,16}$").unwrap();
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! The implementation shows how to trigger custom warnings and error messages
|
||||
//! on the macro input.
|
||||
//!
|
||||
//! ```text
|
||||
//! warning: come on, pick a more creative name
|
||||
//! --> src/main.rs:10:16
|
||||
//! |
|
||||
//! 10 | static ref FOO: String = "lazy_static".to_owned();
|
||||
//! | ^^^
|
||||
//! ```
|
||||
//!
|
||||
//! <br>
|
||||
//!
|
||||
//! # Testing
|
||||
//!
|
||||
//! When testing macros, we often care not just that the macro can be used
|
||||
//! successfully but also that when the macro is provided with invalid input it
|
||||
//! produces maximally helpful error messages. Consider using the [`trybuild`]
|
||||
//! crate to write tests for errors that are emitted by your macro or errors
|
||||
//! detected by the Rust compiler in the expanded code following misuse of the
|
||||
//! macro. Such tests help avoid regressions from later refactors that
|
||||
//! mistakenly make an error no longer trigger or be less helpful than it used
|
||||
//! to be.
|
||||
//!
|
||||
//! [`trybuild`]: https://github.com/dtolnay/trybuild
|
||||
//!
|
||||
//! <br>
|
||||
//!
|
||||
//! # Debugging
|
||||
//!
|
||||
//! When developing a procedural macro it can be helpful to look at what the
|
||||
//! generated code looks like. Use `cargo rustc -- -Zunstable-options
|
||||
//! --pretty=expanded` or the [`cargo expand`] subcommand.
|
||||
//!
|
||||
//! [`cargo expand`]: https://github.com/dtolnay/cargo-expand
|
||||
//!
|
||||
//! To show the expanded code for some crate that uses your procedural macro,
|
||||
//! run `cargo expand` from that crate. To show the expanded code for one of
|
||||
//! your own test cases, run `cargo expand --test the_test_case` where the last
|
||||
//! argument is the name of the test file without the `.rs` extension.
|
||||
//!
|
||||
//! This write-up by Brandon W Maister discusses debugging in more detail:
|
||||
//! [Debugging Rust's new Custom Derive system][debugging].
|
||||
//!
|
||||
//! [debugging]: https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/
|
||||
//!
|
||||
//! <br>
|
||||
//!
|
||||
//! # Optional features
|
||||
//!
|
||||
//! Syn puts a lot of functionality behind optional features in order to
|
||||
//! optimize compile time for the most common use cases. The following features
|
||||
//! are available.
|
||||
//!
|
||||
//! - **`derive`** *(enabled by default)* — Data structures for representing the
|
||||
//! possible input to a derive macro, including structs and enums and types.
|
||||
//! - **`full`** — Data structures for representing the syntax tree of all valid
|
||||
//! Rust source code, including items and expressions.
|
||||
//! - **`parsing`** *(enabled by default)* — Ability to parse input tokens into
|
||||
//! a syntax tree node of a chosen type.
|
||||
//! - **`printing`** *(enabled by default)* — Ability to print a syntax tree
|
||||
//! node as tokens of Rust source code.
|
||||
//! - **`visit`** — Trait for traversing a syntax tree.
|
||||
//! - **`visit-mut`** — Trait for traversing and mutating in place a syntax
|
||||
//! tree.
|
||||
//! - **`fold`** — Trait for transforming an owned syntax tree.
|
||||
//! - **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree
|
||||
//! types.
|
||||
//! - **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree
|
||||
//! types.
|
||||
//! - **`proc-macro`** *(enabled by default)* — Runtime dependency on the
|
||||
//! dynamic library libproc_macro from rustc toolchain.
|
||||
|
||||
// Syn types in rustdoc of other crates get linked to here.
|
||||
#![doc(html_root_url = "https://docs.rs/syn/1.0.107")]
|
||||
#![cfg_attr(doc_cfg, feature(doc_cfg))]
|
||||
#![allow(non_camel_case_types)]
|
||||
#![allow(
|
||||
clippy::bool_to_int_with_if,
|
||||
clippy::cast_lossless,
|
||||
clippy::cast_possible_truncation,
|
||||
clippy::cast_possible_wrap,
|
||||
clippy::cast_ptr_alignment,
|
||||
clippy::default_trait_access,
|
||||
clippy::doc_markdown,
|
||||
clippy::expl_impl_clone_on_copy,
|
||||
clippy::explicit_auto_deref,
|
||||
clippy::if_not_else,
|
||||
clippy::inherent_to_string,
|
||||
clippy::items_after_statements,
|
||||
clippy::large_enum_variant,
|
||||
clippy::manual_assert,
|
||||
clippy::match_on_vec_items,
|
||||
clippy::match_same_arms,
|
||||
clippy::match_wildcard_for_single_variants, // clippy bug: https://github.com/rust-lang/rust-clippy/issues/6984
|
||||
clippy::missing_errors_doc,
|
||||
clippy::missing_panics_doc,
|
||||
clippy::module_name_repetitions,
|
||||
clippy::must_use_candidate,
|
||||
clippy::needless_doctest_main,
|
||||
clippy::needless_pass_by_value,
|
||||
clippy::never_loop,
|
||||
clippy::redundant_else,
|
||||
clippy::return_self_not_must_use,
|
||||
clippy::similar_names,
|
||||
clippy::single_match_else,
|
||||
clippy::too_many_arguments,
|
||||
clippy::too_many_lines,
|
||||
clippy::trivially_copy_pass_by_ref,
|
||||
clippy::unnecessary_unwrap,
|
||||
clippy::used_underscore_binding,
|
||||
clippy::wildcard_imports
|
||||
)]
|
||||
|
||||
#[cfg(all(
|
||||
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
|
||||
feature = "proc-macro"
|
||||
))]
|
||||
extern crate proc_macro;
|
||||
extern crate proc_macro2;
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
extern crate quote;
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[macro_use]
|
||||
mod group;
|
||||
|
||||
#[macro_use]
|
||||
pub mod token;
|
||||
|
||||
mod ident;
|
||||
pub use crate::ident::Ident;
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
mod attr;
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
pub use crate::attr::{
|
||||
AttrStyle, Attribute, AttributeArgs, Meta, MetaList, MetaNameValue, NestedMeta,
|
||||
};
|
||||
|
||||
mod bigint;
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
mod data;
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
pub use crate::data::{
|
||||
Field, Fields, FieldsNamed, FieldsUnnamed, Variant, VisCrate, VisPublic, VisRestricted,
|
||||
Visibility,
|
||||
};
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
mod expr;
|
||||
#[cfg(feature = "full")]
|
||||
pub use crate::expr::{
|
||||
Arm, FieldValue, GenericMethodArgument, Label, MethodTurbofish, RangeLimits,
|
||||
};
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
pub use crate::expr::{
|
||||
Expr, ExprArray, ExprAssign, ExprAssignOp, ExprAsync, ExprAwait, ExprBinary, ExprBlock,
|
||||
ExprBox, ExprBreak, ExprCall, ExprCast, ExprClosure, ExprContinue, ExprField, ExprForLoop,
|
||||
ExprGroup, ExprIf, ExprIndex, ExprLet, ExprLit, ExprLoop, ExprMacro, ExprMatch, ExprMethodCall,
|
||||
ExprParen, ExprPath, ExprRange, ExprReference, ExprRepeat, ExprReturn, ExprStruct, ExprTry,
|
||||
ExprTryBlock, ExprTuple, ExprType, ExprUnary, ExprUnsafe, ExprWhile, ExprYield, Index, Member,
|
||||
};
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
mod generics;
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
pub use crate::generics::{
|
||||
BoundLifetimes, ConstParam, GenericParam, Generics, LifetimeDef, PredicateEq,
|
||||
PredicateLifetime, PredicateType, TraitBound, TraitBoundModifier, TypeParam, TypeParamBound,
|
||||
WhereClause, WherePredicate,
|
||||
};
|
||||
#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
|
||||
pub use crate::generics::{ImplGenerics, Turbofish, TypeGenerics};
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
mod item;
|
||||
#[cfg(feature = "full")]
|
||||
pub use crate::item::{
|
||||
FnArg, ForeignItem, ForeignItemFn, ForeignItemMacro, ForeignItemStatic, ForeignItemType,
|
||||
ImplItem, ImplItemConst, ImplItemMacro, ImplItemMethod, ImplItemType, Item, ItemConst,
|
||||
ItemEnum, ItemExternCrate, ItemFn, ItemForeignMod, ItemImpl, ItemMacro, ItemMacro2, ItemMod,
|
||||
ItemStatic, ItemStruct, ItemTrait, ItemTraitAlias, ItemType, ItemUnion, ItemUse, Receiver,
|
||||
Signature, TraitItem, TraitItemConst, TraitItemMacro, TraitItemMethod, TraitItemType, UseGlob,
|
||||
UseGroup, UseName, UsePath, UseRename, UseTree,
|
||||
};
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
mod file;
|
||||
#[cfg(feature = "full")]
|
||||
pub use crate::file::File;
|
||||
|
||||
mod lifetime;
|
||||
pub use crate::lifetime::Lifetime;
|
||||
|
||||
mod lit;
|
||||
pub use crate::lit::{
|
||||
Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr, StrStyle,
|
||||
};
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
mod mac;
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
pub use crate::mac::{Macro, MacroDelimiter};
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
mod derive;
|
||||
#[cfg(feature = "derive")]
|
||||
pub use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput};
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
mod op;
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
pub use crate::op::{BinOp, UnOp};
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
mod stmt;
|
||||
#[cfg(feature = "full")]
|
||||
pub use crate::stmt::{Block, Local, Stmt};
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
mod ty;
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
pub use crate::ty::{
|
||||
Abi, BareFnArg, ReturnType, Type, TypeArray, TypeBareFn, TypeGroup, TypeImplTrait, TypeInfer,
|
||||
TypeMacro, TypeNever, TypeParen, TypePath, TypePtr, TypeReference, TypeSlice, TypeTraitObject,
|
||||
TypeTuple, Variadic,
|
||||
};
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
mod pat;
|
||||
#[cfg(feature = "full")]
|
||||
pub use crate::pat::{
|
||||
FieldPat, Pat, PatBox, PatIdent, PatLit, PatMacro, PatOr, PatPath, PatRange, PatReference,
|
||||
PatRest, PatSlice, PatStruct, PatTuple, PatTupleStruct, PatType, PatWild,
|
||||
};
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
mod path;
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
pub use crate::path::{
|
||||
AngleBracketedGenericArguments, Binding, Constraint, GenericArgument,
|
||||
ParenthesizedGenericArguments, Path, PathArguments, PathSegment, QSelf,
|
||||
};
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub mod buffer;
|
||||
mod drops;
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub mod ext;
|
||||
pub mod punctuated;
|
||||
#[cfg(all(any(feature = "full", feature = "derive"), feature = "extra-traits"))]
|
||||
mod tt;
|
||||
|
||||
// Not public API except the `parse_quote!` macro.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[doc(hidden)]
|
||||
pub mod parse_quote;
|
||||
|
||||
// Not public API except the `parse_macro_input!` macro.
|
||||
#[cfg(all(
|
||||
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
|
||||
feature = "parsing",
|
||||
feature = "proc-macro"
|
||||
))]
|
||||
#[doc(hidden)]
|
||||
pub mod parse_macro_input;
|
||||
|
||||
#[cfg(all(feature = "parsing", feature = "printing"))]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "printing"))))]
|
||||
pub mod spanned;
|
||||
|
||||
#[cfg(all(feature = "parsing", feature = "full"))]
|
||||
mod whitespace;
|
||||
|
||||
mod gen {
|
||||
/// Syntax tree traversal to walk a shared borrow of a syntax tree.
|
||||
///
|
||||
/// Each method of the [`Visit`] trait is a hook that can be overridden to
|
||||
/// customize the behavior when visiting the corresponding type of node. By
|
||||
/// default, every method recursively visits the substructure of the input
|
||||
/// by invoking the right visitor method of each of its fields.
|
||||
///
|
||||
/// [`Visit`]: visit::Visit
|
||||
///
|
||||
/// ```
|
||||
/// # use syn::{Attribute, BinOp, Expr, ExprBinary};
|
||||
/// #
|
||||
/// pub trait Visit<'ast> {
|
||||
/// /* ... */
|
||||
///
|
||||
/// fn visit_expr_binary(&mut self, node: &'ast ExprBinary) {
|
||||
/// visit_expr_binary(self, node);
|
||||
/// }
|
||||
///
|
||||
/// /* ... */
|
||||
/// # fn visit_attribute(&mut self, node: &'ast Attribute);
|
||||
/// # fn visit_expr(&mut self, node: &'ast Expr);
|
||||
/// # fn visit_bin_op(&mut self, node: &'ast BinOp);
|
||||
/// }
|
||||
///
|
||||
/// pub fn visit_expr_binary<'ast, V>(v: &mut V, node: &'ast ExprBinary)
|
||||
/// where
|
||||
/// V: Visit<'ast> + ?Sized,
|
||||
/// {
|
||||
/// for attr in &node.attrs {
|
||||
/// v.visit_attribute(attr);
|
||||
/// }
|
||||
/// v.visit_expr(&*node.left);
|
||||
/// v.visit_bin_op(&node.op);
|
||||
/// v.visit_expr(&*node.right);
|
||||
/// }
|
||||
///
|
||||
/// /* ... */
|
||||
/// ```
|
||||
///
|
||||
/// *This module is available only if Syn is built with the `"visit"` feature.*
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// This visitor will print the name of every freestanding function in the
|
||||
/// syntax tree, including nested functions.
|
||||
///
|
||||
/// ```
|
||||
/// // [dependencies]
|
||||
/// // quote = "1.0"
|
||||
/// // syn = { version = "1.0", features = ["full", "visit"] }
|
||||
///
|
||||
/// use quote::quote;
|
||||
/// use syn::visit::{self, Visit};
|
||||
/// use syn::{File, ItemFn};
|
||||
///
|
||||
/// struct FnVisitor;
|
||||
///
|
||||
/// impl<'ast> Visit<'ast> for FnVisitor {
|
||||
/// fn visit_item_fn(&mut self, node: &'ast ItemFn) {
|
||||
/// println!("Function with name={}", node.sig.ident);
|
||||
///
|
||||
/// // Delegate to the default impl to visit any nested functions.
|
||||
/// visit::visit_item_fn(self, node);
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// fn main() {
|
||||
/// let code = quote! {
|
||||
/// pub fn f() {
|
||||
/// fn g() {}
|
||||
/// }
|
||||
/// };
|
||||
///
|
||||
/// let syntax_tree: File = syn::parse2(code).unwrap();
|
||||
/// FnVisitor.visit_file(&syntax_tree);
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// The `'ast` lifetime on the input references means that the syntax tree
|
||||
/// outlives the complete recursive visit call, so the visitor is allowed to
|
||||
/// hold on to references into the syntax tree.
|
||||
///
|
||||
/// ```
|
||||
/// use quote::quote;
|
||||
/// use syn::visit::{self, Visit};
|
||||
/// use syn::{File, ItemFn};
|
||||
///
|
||||
/// struct FnVisitor<'ast> {
|
||||
/// functions: Vec<&'ast ItemFn>,
|
||||
/// }
|
||||
///
|
||||
/// impl<'ast> Visit<'ast> for FnVisitor<'ast> {
|
||||
/// fn visit_item_fn(&mut self, node: &'ast ItemFn) {
|
||||
/// self.functions.push(node);
|
||||
/// visit::visit_item_fn(self, node);
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// fn main() {
|
||||
/// let code = quote! {
|
||||
/// pub fn f() {
|
||||
/// fn g() {}
|
||||
/// }
|
||||
/// };
|
||||
///
|
||||
/// let syntax_tree: File = syn::parse2(code).unwrap();
|
||||
/// let mut visitor = FnVisitor { functions: Vec::new() };
|
||||
/// visitor.visit_file(&syntax_tree);
|
||||
/// for f in visitor.functions {
|
||||
/// println!("Function with name={}", f.sig.ident);
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg(feature = "visit")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "visit")))]
|
||||
#[rustfmt::skip]
|
||||
pub mod visit;
|
||||
|
||||
/// Syntax tree traversal to mutate an exclusive borrow of a syntax tree in
|
||||
/// place.
|
||||
///
|
||||
/// Each method of the [`VisitMut`] trait is a hook that can be overridden
|
||||
/// to customize the behavior when mutating the corresponding type of node.
|
||||
/// By default, every method recursively visits the substructure of the
|
||||
/// input by invoking the right visitor method of each of its fields.
|
||||
///
|
||||
/// [`VisitMut`]: visit_mut::VisitMut
|
||||
///
|
||||
/// ```
|
||||
/// # use syn::{Attribute, BinOp, Expr, ExprBinary};
|
||||
/// #
|
||||
/// pub trait VisitMut {
|
||||
/// /* ... */
|
||||
///
|
||||
/// fn visit_expr_binary_mut(&mut self, node: &mut ExprBinary) {
|
||||
/// visit_expr_binary_mut(self, node);
|
||||
/// }
|
||||
///
|
||||
/// /* ... */
|
||||
/// # fn visit_attribute_mut(&mut self, node: &mut Attribute);
|
||||
/// # fn visit_expr_mut(&mut self, node: &mut Expr);
|
||||
/// # fn visit_bin_op_mut(&mut self, node: &mut BinOp);
|
||||
/// }
|
||||
///
|
||||
/// pub fn visit_expr_binary_mut<V>(v: &mut V, node: &mut ExprBinary)
|
||||
/// where
|
||||
/// V: VisitMut + ?Sized,
|
||||
/// {
|
||||
/// for attr in &mut node.attrs {
|
||||
/// v.visit_attribute_mut(attr);
|
||||
/// }
|
||||
/// v.visit_expr_mut(&mut *node.left);
|
||||
/// v.visit_bin_op_mut(&mut node.op);
|
||||
/// v.visit_expr_mut(&mut *node.right);
|
||||
/// }
|
||||
///
|
||||
/// /* ... */
|
||||
/// ```
|
||||
///
|
||||
/// *This module is available only if Syn is built with the `"visit-mut"`
|
||||
/// feature.*
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// This mut visitor replace occurrences of u256 suffixed integer literals
|
||||
/// like `999u256` with a macro invocation `bigint::u256!(999)`.
|
||||
///
|
||||
/// ```
|
||||
/// // [dependencies]
|
||||
/// // quote = "1.0"
|
||||
/// // syn = { version = "1.0", features = ["full", "visit-mut"] }
|
||||
///
|
||||
/// use quote::quote;
|
||||
/// use syn::visit_mut::{self, VisitMut};
|
||||
/// use syn::{parse_quote, Expr, File, Lit, LitInt};
|
||||
///
|
||||
/// struct BigintReplace;
|
||||
///
|
||||
/// impl VisitMut for BigintReplace {
|
||||
/// fn visit_expr_mut(&mut self, node: &mut Expr) {
|
||||
/// if let Expr::Lit(expr) = &node {
|
||||
/// if let Lit::Int(int) = &expr.lit {
|
||||
/// if int.suffix() == "u256" {
|
||||
/// let digits = int.base10_digits();
|
||||
/// let unsuffixed: LitInt = syn::parse_str(digits).unwrap();
|
||||
/// *node = parse_quote!(bigint::u256!(#unsuffixed));
|
||||
/// return;
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// // Delegate to the default impl to visit nested expressions.
|
||||
/// visit_mut::visit_expr_mut(self, node);
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// fn main() {
|
||||
/// let code = quote! {
|
||||
/// fn main() {
|
||||
/// let _ = 999u256;
|
||||
/// }
|
||||
/// };
|
||||
///
|
||||
/// let mut syntax_tree: File = syn::parse2(code).unwrap();
|
||||
/// BigintReplace.visit_file_mut(&mut syntax_tree);
|
||||
/// println!("{}", quote!(#syntax_tree));
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg(feature = "visit-mut")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "visit-mut")))]
|
||||
#[rustfmt::skip]
|
||||
pub mod visit_mut;
|
||||
|
||||
/// Syntax tree traversal to transform the nodes of an owned syntax tree.
|
||||
///
|
||||
/// Each method of the [`Fold`] trait is a hook that can be overridden to
|
||||
/// customize the behavior when transforming the corresponding type of node.
|
||||
/// By default, every method recursively visits the substructure of the
|
||||
/// input by invoking the right visitor method of each of its fields.
|
||||
///
|
||||
/// [`Fold`]: fold::Fold
|
||||
///
|
||||
/// ```
|
||||
/// # use syn::{Attribute, BinOp, Expr, ExprBinary};
|
||||
/// #
|
||||
/// pub trait Fold {
|
||||
/// /* ... */
|
||||
///
|
||||
/// fn fold_expr_binary(&mut self, node: ExprBinary) -> ExprBinary {
|
||||
/// fold_expr_binary(self, node)
|
||||
/// }
|
||||
///
|
||||
/// /* ... */
|
||||
/// # fn fold_attribute(&mut self, node: Attribute) -> Attribute;
|
||||
/// # fn fold_expr(&mut self, node: Expr) -> Expr;
|
||||
/// # fn fold_bin_op(&mut self, node: BinOp) -> BinOp;
|
||||
/// }
|
||||
///
|
||||
/// pub fn fold_expr_binary<V>(v: &mut V, node: ExprBinary) -> ExprBinary
|
||||
/// where
|
||||
/// V: Fold + ?Sized,
|
||||
/// {
|
||||
/// ExprBinary {
|
||||
/// attrs: node
|
||||
/// .attrs
|
||||
/// .into_iter()
|
||||
/// .map(|attr| v.fold_attribute(attr))
|
||||
/// .collect(),
|
||||
/// left: Box::new(v.fold_expr(*node.left)),
|
||||
/// op: v.fold_bin_op(node.op),
|
||||
/// right: Box::new(v.fold_expr(*node.right)),
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// /* ... */
|
||||
/// ```
|
||||
///
|
||||
/// *This module is available only if Syn is built with the `"fold"` feature.*
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// This fold inserts parentheses to fully parenthesizes any expression.
|
||||
///
|
||||
/// ```
|
||||
/// // [dependencies]
|
||||
/// // quote = "1.0"
|
||||
/// // syn = { version = "1.0", features = ["fold", "full"] }
|
||||
///
|
||||
/// use quote::quote;
|
||||
/// use syn::fold::{fold_expr, Fold};
|
||||
/// use syn::{token, Expr, ExprParen};
|
||||
///
|
||||
/// struct ParenthesizeEveryExpr;
|
||||
///
|
||||
/// impl Fold for ParenthesizeEveryExpr {
|
||||
/// fn fold_expr(&mut self, expr: Expr) -> Expr {
|
||||
/// Expr::Paren(ExprParen {
|
||||
/// attrs: Vec::new(),
|
||||
/// expr: Box::new(fold_expr(self, expr)),
|
||||
/// paren_token: token::Paren::default(),
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// fn main() {
|
||||
/// let code = quote! { a() + b(1) * c.d };
|
||||
/// let expr: Expr = syn::parse2(code).unwrap();
|
||||
/// let parenthesized = ParenthesizeEveryExpr.fold_expr(expr);
|
||||
/// println!("{}", quote!(#parenthesized));
|
||||
///
|
||||
/// // Output: (((a)()) + (((b)((1))) * ((c).d)))
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg(feature = "fold")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "fold")))]
|
||||
#[rustfmt::skip]
|
||||
pub mod fold;
|
||||
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[rustfmt::skip]
|
||||
mod clone;
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[rustfmt::skip]
|
||||
mod eq;
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[rustfmt::skip]
|
||||
mod hash;
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[rustfmt::skip]
|
||||
mod debug;
|
||||
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
#[path = "../gen_helper.rs"]
|
||||
mod helper;
|
||||
}
|
||||
pub use crate::gen::*;
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
#[path = "export.rs"]
|
||||
pub mod __private;
|
||||
|
||||
mod custom_keyword;
|
||||
mod custom_punctuation;
|
||||
mod sealed;
|
||||
mod span;
|
||||
mod thread;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
mod lookahead;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub mod parse;
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
mod reserved;
|
||||
|
||||
#[cfg(all(any(feature = "full", feature = "derive"), feature = "parsing"))]
|
||||
mod verbatim;
|
||||
|
||||
#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
|
||||
mod print;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
mod error;
|
||||
pub use crate::error::{Error, Result};
|
||||
|
||||
/// Parse tokens of source code into the chosen syntax tree node.
|
||||
///
|
||||
/// This is preferred over parsing a string because tokens are able to preserve
|
||||
/// information about where in the user's code they were originally written (the
|
||||
/// "span" of the token), possibly allowing the compiler to produce better error
|
||||
/// messages.
|
||||
///
|
||||
/// This function parses a `proc_macro::TokenStream` which is the type used for
|
||||
/// interop with the compiler in a procedural macro. To parse a
|
||||
/// `proc_macro2::TokenStream`, use [`syn::parse2`] instead.
|
||||
///
|
||||
/// [`syn::parse2`]: parse2
|
||||
///
|
||||
/// *This function is available only if Syn is built with both the `"parsing"` and
|
||||
/// `"proc-macro"` features.*
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// use proc_macro::TokenStream;
|
||||
/// use quote::quote;
|
||||
/// use syn::DeriveInput;
|
||||
///
|
||||
/// # const IGNORE_TOKENS: &str = stringify! {
|
||||
/// #[proc_macro_derive(MyMacro)]
|
||||
/// # };
|
||||
/// pub fn my_macro(input: TokenStream) -> TokenStream {
|
||||
/// // Parse the tokens into a syntax tree
|
||||
/// let ast: DeriveInput = syn::parse(input).unwrap();
|
||||
///
|
||||
/// // Build the output, possibly using quasi-quotation
|
||||
/// let expanded = quote! {
|
||||
/// /* ... */
|
||||
/// };
|
||||
///
|
||||
/// // Convert into a token stream and return it
|
||||
/// expanded.into()
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg(all(
|
||||
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
|
||||
feature = "parsing",
|
||||
feature = "proc-macro"
|
||||
))]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "proc-macro"))))]
|
||||
pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> {
|
||||
parse::Parser::parse(T::parse, tokens)
|
||||
}
|
||||
|
||||
/// Parse a proc-macro2 token stream into the chosen syntax tree node.
|
||||
///
|
||||
/// This function will check that the input is fully parsed. If there are
|
||||
/// any unparsed tokens at the end of the stream, an error is returned.
|
||||
///
|
||||
/// This function parses a `proc_macro2::TokenStream` which is commonly useful
|
||||
/// when the input comes from a node of the Syn syntax tree, for example the
|
||||
/// body tokens of a [`Macro`] node. When in a procedural macro parsing the
|
||||
/// `proc_macro::TokenStream` provided by the compiler, use [`syn::parse`]
|
||||
/// instead.
|
||||
///
|
||||
/// [`syn::parse`]: parse()
|
||||
///
|
||||
/// *This function is available only if Syn is built with the `"parsing"` feature.*
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
|
||||
parse::Parser::parse2(T::parse, tokens)
|
||||
}
|
||||
|
||||
/// Parse a string of Rust code into the chosen syntax tree node.
|
||||
///
|
||||
/// *This function is available only if Syn is built with the `"parsing"` feature.*
|
||||
///
|
||||
/// # Hygiene
|
||||
///
|
||||
/// Every span in the resulting syntax tree will be set to resolve at the macro
|
||||
/// call site.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{Expr, Result};
|
||||
///
|
||||
/// fn run() -> Result<()> {
|
||||
/// let code = "assert_eq!(u8::max_value(), 255)";
|
||||
/// let expr = syn::parse_str::<Expr>(code)?;
|
||||
/// println!("{:#?}", expr);
|
||||
/// Ok(())
|
||||
/// }
|
||||
/// #
|
||||
/// # run().unwrap();
|
||||
/// ```
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
|
||||
parse::Parser::parse_str(T::parse, s)
|
||||
}
|
||||
|
||||
// FIXME the name parse_file makes it sound like you might pass in a path to a
|
||||
// file, rather than the content.
|
||||
/// Parse the content of a file of Rust code.
|
||||
///
|
||||
/// This is different from `syn::parse_str::<File>(content)` in two ways:
|
||||
///
|
||||
/// - It discards a leading byte order mark `\u{FEFF}` if the file has one.
|
||||
/// - It preserves the shebang line of the file, such as `#!/usr/bin/env rustx`.
|
||||
///
|
||||
/// If present, either of these would be an error using `from_str`.
|
||||
///
|
||||
/// *This function is available only if Syn is built with the `"parsing"` and
|
||||
/// `"full"` features.*
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```no_run
|
||||
/// use std::error::Error;
|
||||
/// use std::fs::File;
|
||||
/// use std::io::Read;
|
||||
///
|
||||
/// fn run() -> Result<(), Box<Error>> {
|
||||
/// let mut file = File::open("path/to/code.rs")?;
|
||||
/// let mut content = String::new();
|
||||
/// file.read_to_string(&mut content)?;
|
||||
///
|
||||
/// let ast = syn::parse_file(&content)?;
|
||||
/// if let Some(shebang) = ast.shebang {
|
||||
/// println!("{}", shebang);
|
||||
/// }
|
||||
/// println!("{} items", ast.items.len());
|
||||
///
|
||||
/// Ok(())
|
||||
/// }
|
||||
/// #
|
||||
/// # run().unwrap();
|
||||
/// ```
|
||||
#[cfg(all(feature = "parsing", feature = "full"))]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "full"))))]
|
||||
pub fn parse_file(mut content: &str) -> Result<File> {
|
||||
// Strip the BOM if it is present
|
||||
const BOM: &str = "\u{feff}";
|
||||
if content.starts_with(BOM) {
|
||||
content = &content[BOM.len()..];
|
||||
}
|
||||
|
||||
let mut shebang = None;
|
||||
if content.starts_with("#!") {
|
||||
let rest = whitespace::skip(&content[2..]);
|
||||
if !rest.starts_with('[') {
|
||||
if let Some(idx) = content.find('\n') {
|
||||
shebang = Some(content[..idx].to_string());
|
||||
content = &content[idx..];
|
||||
} else {
|
||||
shebang = Some(content.to_string());
|
||||
content = "";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut file: File = parse_str(content)?;
|
||||
file.shebang = shebang;
|
||||
Ok(file)
|
||||
}
|
|
@ -0,0 +1,154 @@
|
|||
use proc_macro2::{Ident, Span};
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt::{self, Display};
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::lookahead;
|
||||
|
||||
/// A Rust lifetime: `'a`.
|
||||
///
|
||||
/// Lifetime names must conform to the following rules:
|
||||
///
|
||||
/// - Must start with an apostrophe.
|
||||
/// - Must not consist of just an apostrophe: `'`.
|
||||
/// - Character after the apostrophe must be `_` or a Unicode code point with
|
||||
/// the XID_Start property.
|
||||
/// - All following characters must be Unicode code points with the XID_Continue
|
||||
/// property.
|
||||
pub struct Lifetime {
|
||||
pub apostrophe: Span,
|
||||
pub ident: Ident,
|
||||
}
|
||||
|
||||
impl Lifetime {
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the lifetime does not conform to the bulleted rules above.
|
||||
///
|
||||
/// # Invocation
|
||||
///
|
||||
/// ```
|
||||
/// # use proc_macro2::Span;
|
||||
/// # use syn::Lifetime;
|
||||
/// #
|
||||
/// # fn f() -> Lifetime {
|
||||
/// Lifetime::new("'a", Span::call_site())
|
||||
/// # }
|
||||
/// ```
|
||||
pub fn new(symbol: &str, span: Span) -> Self {
|
||||
if !symbol.starts_with('\'') {
|
||||
panic!(
|
||||
"lifetime name must start with apostrophe as in \"'a\", got {:?}",
|
||||
symbol
|
||||
);
|
||||
}
|
||||
|
||||
if symbol == "'" {
|
||||
panic!("lifetime name must not be empty");
|
||||
}
|
||||
|
||||
if !crate::ident::xid_ok(&symbol[1..]) {
|
||||
panic!("{:?} is not a valid lifetime name", symbol);
|
||||
}
|
||||
|
||||
Lifetime {
|
||||
apostrophe: span,
|
||||
ident: Ident::new(&symbol[1..], span),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
self.apostrophe
|
||||
.join(self.ident.span())
|
||||
.unwrap_or(self.apostrophe)
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.apostrophe = span;
|
||||
self.ident.set_span(span);
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Lifetime {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
"'".fmt(formatter)?;
|
||||
self.ident.fmt(formatter)
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Lifetime {
|
||||
fn clone(&self) -> Self {
|
||||
Lifetime {
|
||||
apostrophe: self.apostrophe,
|
||||
ident: self.ident.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Lifetime {
|
||||
fn eq(&self, other: &Lifetime) -> bool {
|
||||
self.ident.eq(&other.ident)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Lifetime {}
|
||||
|
||||
impl PartialOrd for Lifetime {
|
||||
fn partial_cmp(&self, other: &Lifetime) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Lifetime {
|
||||
fn cmp(&self, other: &Lifetime) -> Ordering {
|
||||
self.ident.cmp(&other.ident)
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for Lifetime {
|
||||
fn hash<H: Hasher>(&self, h: &mut H) {
|
||||
self.ident.hash(h);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[doc(hidden)]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn Lifetime(marker: lookahead::TokenMarker) -> Lifetime {
|
||||
match marker {}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Lifetime {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| {
|
||||
cursor
|
||||
.lifetime()
|
||||
.ok_or_else(|| cursor.error("expected lifetime"))
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use proc_macro2::{Punct, Spacing, TokenStream};
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Lifetime {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let mut apostrophe = Punct::new('\'', Spacing::Joint);
|
||||
apostrophe.set_span(self.apostrophe);
|
||||
tokens.append(apostrophe);
|
||||
self.ident.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,169 @@
|
|||
use crate::buffer::Cursor;
|
||||
use crate::error::{self, Error};
|
||||
use crate::sealed::lookahead::Sealed;
|
||||
use crate::span::IntoSpans;
|
||||
use crate::token::Token;
|
||||
use proc_macro2::{Delimiter, Span};
|
||||
use std::cell::RefCell;
|
||||
|
||||
/// Support for checking the next token in a stream to decide how to parse.
|
||||
///
|
||||
/// An important advantage over [`ParseStream::peek`] is that here we
|
||||
/// automatically construct an appropriate error message based on the token
|
||||
/// alternatives that get peeked. If you are producing your own error message,
|
||||
/// go ahead and use `ParseStream::peek` instead.
|
||||
///
|
||||
/// Use [`ParseStream::lookahead1`] to construct this object.
|
||||
///
|
||||
/// [`ParseStream::peek`]: crate::parse::ParseBuffer::peek
|
||||
/// [`ParseStream::lookahead1`]: crate::parse::ParseBuffer::lookahead1
|
||||
///
|
||||
/// Consuming tokens from the source stream after constructing a lookahead
|
||||
/// object does not also advance the lookahead object.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{ConstParam, Ident, Lifetime, LifetimeDef, Result, Token, TypeParam};
|
||||
/// use syn::parse::{Parse, ParseStream};
|
||||
///
|
||||
/// // A generic parameter, a single one of the comma-separated elements inside
|
||||
/// // angle brackets in:
|
||||
/// //
|
||||
/// // fn f<T: Clone, 'a, 'b: 'a, const N: usize>() { ... }
|
||||
/// //
|
||||
/// // On invalid input, lookahead gives us a reasonable error message.
|
||||
/// //
|
||||
/// // error: expected one of: identifier, lifetime, `const`
|
||||
/// // |
|
||||
/// // 5 | fn f<!Sized>() {}
|
||||
/// // | ^
|
||||
/// enum GenericParam {
|
||||
/// Type(TypeParam),
|
||||
/// Lifetime(LifetimeDef),
|
||||
/// Const(ConstParam),
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for GenericParam {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let lookahead = input.lookahead1();
|
||||
/// if lookahead.peek(Ident) {
|
||||
/// input.parse().map(GenericParam::Type)
|
||||
/// } else if lookahead.peek(Lifetime) {
|
||||
/// input.parse().map(GenericParam::Lifetime)
|
||||
/// } else if lookahead.peek(Token![const]) {
|
||||
/// input.parse().map(GenericParam::Const)
|
||||
/// } else {
|
||||
/// Err(lookahead.error())
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
pub struct Lookahead1<'a> {
|
||||
scope: Span,
|
||||
cursor: Cursor<'a>,
|
||||
comparisons: RefCell<Vec<&'static str>>,
|
||||
}
|
||||
|
||||
pub fn new(scope: Span, cursor: Cursor) -> Lookahead1 {
|
||||
Lookahead1 {
|
||||
scope,
|
||||
cursor,
|
||||
comparisons: RefCell::new(Vec::new()),
|
||||
}
|
||||
}
|
||||
|
||||
fn peek_impl(
|
||||
lookahead: &Lookahead1,
|
||||
peek: fn(Cursor) -> bool,
|
||||
display: fn() -> &'static str,
|
||||
) -> bool {
|
||||
if peek(lookahead.cursor) {
|
||||
return true;
|
||||
}
|
||||
lookahead.comparisons.borrow_mut().push(display());
|
||||
false
|
||||
}
|
||||
|
||||
impl<'a> Lookahead1<'a> {
|
||||
/// Looks at the next token in the parse stream to determine whether it
|
||||
/// matches the requested type of token.
|
||||
///
|
||||
/// # Syntax
|
||||
///
|
||||
/// Note that this method does not use turbofish syntax. Pass the peek type
|
||||
/// inside of parentheses.
|
||||
///
|
||||
/// - `input.peek(Token![struct])`
|
||||
/// - `input.peek(Token![==])`
|
||||
/// - `input.peek(Ident)` *(does not accept keywords)*
|
||||
/// - `input.peek(Ident::peek_any)`
|
||||
/// - `input.peek(Lifetime)`
|
||||
/// - `input.peek(token::Brace)`
|
||||
pub fn peek<T: Peek>(&self, token: T) -> bool {
|
||||
let _ = token;
|
||||
peek_impl(self, T::Token::peek, T::Token::display)
|
||||
}
|
||||
|
||||
/// Triggers an error at the current position of the parse stream.
|
||||
///
|
||||
/// The error message will identify all of the expected token types that
|
||||
/// have been peeked against this lookahead instance.
|
||||
pub fn error(self) -> Error {
|
||||
let comparisons = self.comparisons.borrow();
|
||||
match comparisons.len() {
|
||||
0 => {
|
||||
if self.cursor.eof() {
|
||||
Error::new(self.scope, "unexpected end of input")
|
||||
} else {
|
||||
Error::new(self.cursor.span(), "unexpected token")
|
||||
}
|
||||
}
|
||||
1 => {
|
||||
let message = format!("expected {}", comparisons[0]);
|
||||
error::new_at(self.scope, self.cursor, message)
|
||||
}
|
||||
2 => {
|
||||
let message = format!("expected {} or {}", comparisons[0], comparisons[1]);
|
||||
error::new_at(self.scope, self.cursor, message)
|
||||
}
|
||||
_ => {
|
||||
let join = comparisons.join(", ");
|
||||
let message = format!("expected one of: {}", join);
|
||||
error::new_at(self.scope, self.cursor, message)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Types that can be parsed by looking at just one token.
|
||||
///
|
||||
/// Use [`ParseStream::peek`] to peek one of these types in a parse stream
|
||||
/// without consuming it from the stream.
|
||||
///
|
||||
/// This trait is sealed and cannot be implemented for types outside of Syn.
|
||||
///
|
||||
/// [`ParseStream::peek`]: crate::parse::ParseBuffer::peek
|
||||
pub trait Peek: Sealed {
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
type Token: Token;
|
||||
}
|
||||
|
||||
impl<F: Copy + FnOnce(TokenMarker) -> T, T: Token> Peek for F {
|
||||
type Token = T;
|
||||
}
|
||||
|
||||
pub enum TokenMarker {}
|
||||
|
||||
impl<S> IntoSpans<S> for TokenMarker {
|
||||
fn into_spans(self) -> S {
|
||||
match self {}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_delimiter(cursor: Cursor, delimiter: Delimiter) -> bool {
|
||||
cursor.group(delimiter).is_some()
|
||||
}
|
||||
|
||||
impl<F: Copy + FnOnce(TokenMarker) -> T, T: Token> Sealed for F {}
|
|
@ -0,0 +1,219 @@
|
|||
use super::*;
|
||||
use crate::token::{Brace, Bracket, Paren};
|
||||
use proc_macro2::TokenStream;
|
||||
#[cfg(feature = "parsing")]
|
||||
use proc_macro2::{Delimiter, Group, Span, TokenTree};
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::parse::{Parse, ParseStream, Parser, Result};
|
||||
|
||||
ast_struct! {
|
||||
/// A macro invocation: `println!("{}", mac)`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Macro {
|
||||
pub path: Path,
|
||||
pub bang_token: Token![!],
|
||||
pub delimiter: MacroDelimiter,
|
||||
pub tokens: TokenStream,
|
||||
}
|
||||
}
|
||||
|
||||
ast_enum! {
|
||||
/// A grouping token that surrounds a macro body: `m!(...)` or `m!{...}` or `m![...]`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum MacroDelimiter {
|
||||
Paren(Paren),
|
||||
Brace(Brace),
|
||||
Bracket(Bracket),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
fn delimiter_span_close(macro_delimiter: &MacroDelimiter) -> Span {
|
||||
let delimiter = match macro_delimiter {
|
||||
MacroDelimiter::Paren(_) => Delimiter::Parenthesis,
|
||||
MacroDelimiter::Brace(_) => Delimiter::Brace,
|
||||
MacroDelimiter::Bracket(_) => Delimiter::Bracket,
|
||||
};
|
||||
let mut group = Group::new(delimiter, TokenStream::new());
|
||||
group.set_span(match macro_delimiter {
|
||||
MacroDelimiter::Paren(token) => token.span,
|
||||
MacroDelimiter::Brace(token) => token.span,
|
||||
MacroDelimiter::Bracket(token) => token.span,
|
||||
});
|
||||
group.span_close()
|
||||
}
|
||||
|
||||
impl Macro {
|
||||
/// Parse the tokens within the macro invocation's delimiters into a syntax
|
||||
/// tree.
|
||||
///
|
||||
/// This is equivalent to `syn::parse2::<T>(mac.tokens)` except that it
|
||||
/// produces a more useful span when `tokens` is empty.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use syn::{parse_quote, Expr, ExprLit, Ident, Lit, LitStr, Macro, Token};
|
||||
/// use syn::ext::IdentExt;
|
||||
/// use syn::parse::{Error, Parse, ParseStream, Result};
|
||||
/// use syn::punctuated::Punctuated;
|
||||
///
|
||||
/// // The arguments expected by libcore's format_args macro, and as a
|
||||
/// // result most other formatting and printing macros like println.
|
||||
/// //
|
||||
/// // println!("{} is {number:.prec$}", "x", prec=5, number=0.01)
|
||||
/// struct FormatArgs {
|
||||
/// format_string: Expr,
|
||||
/// positional_args: Vec<Expr>,
|
||||
/// named_args: Vec<(Ident, Expr)>,
|
||||
/// }
|
||||
///
|
||||
/// impl Parse for FormatArgs {
|
||||
/// fn parse(input: ParseStream) -> Result<Self> {
|
||||
/// let format_string: Expr;
|
||||
/// let mut positional_args = Vec::new();
|
||||
/// let mut named_args = Vec::new();
|
||||
///
|
||||
/// format_string = input.parse()?;
|
||||
/// while !input.is_empty() {
|
||||
/// input.parse::<Token![,]>()?;
|
||||
/// if input.is_empty() {
|
||||
/// break;
|
||||
/// }
|
||||
/// if input.peek(Ident::peek_any) && input.peek2(Token![=]) {
|
||||
/// while !input.is_empty() {
|
||||
/// let name: Ident = input.call(Ident::parse_any)?;
|
||||
/// input.parse::<Token![=]>()?;
|
||||
/// let value: Expr = input.parse()?;
|
||||
/// named_args.push((name, value));
|
||||
/// if input.is_empty() {
|
||||
/// break;
|
||||
/// }
|
||||
/// input.parse::<Token![,]>()?;
|
||||
/// }
|
||||
/// break;
|
||||
/// }
|
||||
/// positional_args.push(input.parse()?);
|
||||
/// }
|
||||
///
|
||||
/// Ok(FormatArgs {
|
||||
/// format_string,
|
||||
/// positional_args,
|
||||
/// named_args,
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// // Extract the first argument, the format string literal, from an
|
||||
/// // invocation of a formatting or printing macro.
|
||||
/// fn get_format_string(m: &Macro) -> Result<LitStr> {
|
||||
/// let args: FormatArgs = m.parse_body()?;
|
||||
/// match args.format_string {
|
||||
/// Expr::Lit(ExprLit { lit: Lit::Str(lit), .. }) => Ok(lit),
|
||||
/// other => {
|
||||
/// // First argument was not a string literal expression.
|
||||
/// // Maybe something like: println!(concat!(...), ...)
|
||||
/// Err(Error::new_spanned(other, "format string must be a string literal"))
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// fn main() {
|
||||
/// let invocation = parse_quote! {
|
||||
/// println!("{:?}", Instant::now())
|
||||
/// };
|
||||
/// let lit = get_format_string(&invocation).unwrap();
|
||||
/// assert_eq!(lit.value(), "{:?}");
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_body<T: Parse>(&self) -> Result<T> {
|
||||
self.parse_body_with(T::parse)
|
||||
}
|
||||
|
||||
/// Parse the tokens within the macro invocation's delimiters using the
|
||||
/// given parser.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_body_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
|
||||
let scope = delimiter_span_close(&self.delimiter);
|
||||
crate::parse::parse_scoped(parser, scope, self.tokens.clone())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub fn parse_delimiter(input: ParseStream) -> Result<(MacroDelimiter, TokenStream)> {
|
||||
input.step(|cursor| {
|
||||
if let Some((TokenTree::Group(g), rest)) = cursor.token_tree() {
|
||||
let span = g.span();
|
||||
let delimiter = match g.delimiter() {
|
||||
Delimiter::Parenthesis => MacroDelimiter::Paren(Paren(span)),
|
||||
Delimiter::Brace => MacroDelimiter::Brace(Brace(span)),
|
||||
Delimiter::Bracket => MacroDelimiter::Bracket(Bracket(span)),
|
||||
Delimiter::None => {
|
||||
return Err(cursor.error("expected delimiter"));
|
||||
}
|
||||
};
|
||||
Ok(((delimiter, g.stream()), rest))
|
||||
} else {
|
||||
Err(cursor.error("expected delimiter"))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Macro {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let tokens;
|
||||
Ok(Macro {
|
||||
path: input.call(Path::parse_mod_style)?,
|
||||
bang_token: input.parse()?,
|
||||
delimiter: {
|
||||
let (delimiter, content) = parse_delimiter(input)?;
|
||||
tokens = content;
|
||||
delimiter
|
||||
},
|
||||
tokens,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Macro {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.path.to_tokens(tokens);
|
||||
self.bang_token.to_tokens(tokens);
|
||||
match &self.delimiter {
|
||||
MacroDelimiter::Paren(paren) => {
|
||||
paren.surround(tokens, |tokens| self.tokens.to_tokens(tokens));
|
||||
}
|
||||
MacroDelimiter::Brace(brace) => {
|
||||
brace.surround(tokens, |tokens| self.tokens.to_tokens(tokens));
|
||||
}
|
||||
MacroDelimiter::Bracket(bracket) => {
|
||||
bracket.surround(tokens, |tokens| self.tokens.to_tokens(tokens));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче