зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1772068 - Force update paste to 1.0. r=emilio
Differential Revision: https://phabricator.services.mozilla.com/D147838
This commit is contained in:
Родитель
37111304e0
Коммит
9d7c444899
|
@ -3845,22 +3845,16 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "paste"
|
||||
version = "0.1.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "45ca20c77d80be666aef2b45486da86238fabe33e38306bd3118fe4af33fa880"
|
||||
version = "0.1.999"
|
||||
dependencies = [
|
||||
"paste-impl",
|
||||
"proc-macro-hack",
|
||||
"paste 1.0.7",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "paste-impl"
|
||||
version = "0.1.18"
|
||||
name = "paste"
|
||||
version = "1.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d95a7db200b97ef370c8e6de0088252f7e0dfff7d047a28528e47456c0fc98b6"
|
||||
dependencies = [
|
||||
"proc-macro-hack",
|
||||
]
|
||||
checksum = "0c520e05135d6e763148b6426a837e239041653ba7becd2e538c076c738025fc"
|
||||
|
||||
[[package]]
|
||||
name = "peek-poke"
|
||||
|
@ -4341,7 +4335,7 @@ dependencies = [
|
|||
"lmdb-rkv",
|
||||
"log",
|
||||
"ordered-float",
|
||||
"paste",
|
||||
"paste 0.1.999",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"thiserror",
|
||||
|
|
|
@ -123,6 +123,9 @@ tokio-util = { path = "build/rust/tokio-util" }
|
|||
# Patch env_logger 0.8 to 0.9
|
||||
env_logger = { path = "build/rust/env_logger" }
|
||||
|
||||
# Patch paste 0.1 to 1.0
|
||||
paste = { path = "build/rust/paste" }
|
||||
|
||||
# Patch autocfg to hide rustc output. Workaround for https://github.com/cuviper/autocfg/issues/30
|
||||
autocfg = { path = "third_party/rust/autocfg" }
|
||||
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
[package]
|
||||
name = "paste"
|
||||
version = "0.1.999"
|
||||
edition = "2018"
|
||||
license = "MPL-2.0"
|
||||
|
||||
[lib]
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
paste = "1.0"
|
|
@ -0,0 +1,5 @@
|
|||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
pub use paste::*;
|
|
@ -1 +0,0 @@
|
|||
{"files":{"Cargo.toml":"81f9fbb93a68bab36249f394bce4d7efaa551eb3d1697dadc02fd5ede96b31cc","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7f697f191d6ffb32881a6bcf535e817ebbebaaa11b9aacf4f0b26fd32e2cc201","src/enum_hack.rs":"649ec2dd8b44607ca00daba8b0e3a054b6e8f3d1cb55bacb88b03a5570311e86","src/error.rs":"be08d9d48b4cb9984b5141b12f21c31bd7293942c18b3ce25754723930cf54f5","src/lib.rs":"474883fa7898ace1f4c8d08a250777093656296ceb47924986ba4f62c4761fb7"},"package":"d95a7db200b97ef370c8e6de0088252f7e0dfff7d047a28528e47456c0fc98b6"}
|
|
@ -1,27 +0,0 @@
|
|||
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
|
||||
#
|
||||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
# editing this file be aware that the upstream Cargo.toml
|
||||
# will likely look very different (and much more reasonable)
|
||||
|
||||
[package]
|
||||
edition = "2018"
|
||||
name = "paste-impl"
|
||||
version = "0.1.18"
|
||||
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
||||
description = "Implementation detail of the `paste` crate"
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/dtolnay/paste"
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
[dependencies.proc-macro-hack]
|
||||
version = "0.5"
|
|
@ -1,201 +0,0 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -1,25 +0,0 @@
|
|||
Copyright (c) 2018
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
|
@ -1,86 +0,0 @@
|
|||
use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::iter::FromIterator;
|
||||
|
||||
pub fn wrap(output: TokenStream) -> TokenStream {
|
||||
let mut hasher = DefaultHasher::default();
|
||||
output.to_string().hash(&mut hasher);
|
||||
let mangled_name = format!("_paste_{}", hasher.finish());
|
||||
let ident = Ident::new(&mangled_name, Span::call_site());
|
||||
|
||||
// #[derive(paste::EnumHack)]
|
||||
// enum #ident {
|
||||
// Value = (stringify! {
|
||||
// #output
|
||||
// }, 0).1,
|
||||
// }
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Punct(Punct::new('#', Spacing::Alone)),
|
||||
TokenTree::Group(Group::new(
|
||||
Delimiter::Bracket,
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("derive", Span::call_site())),
|
||||
TokenTree::Group(Group::new(
|
||||
Delimiter::Parenthesis,
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("paste", Span::call_site())),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
|
||||
TokenTree::Ident(Ident::new("EnumHack", Span::call_site())),
|
||||
]),
|
||||
)),
|
||||
]),
|
||||
)),
|
||||
TokenTree::Ident(Ident::new("enum", Span::call_site())),
|
||||
TokenTree::Ident(ident),
|
||||
TokenTree::Group(Group::new(
|
||||
Delimiter::Brace,
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("Value", Span::call_site())),
|
||||
TokenTree::Punct(Punct::new('=', Spacing::Alone)),
|
||||
TokenTree::Group(Group::new(
|
||||
Delimiter::Parenthesis,
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("stringify", Span::call_site())),
|
||||
TokenTree::Punct(Punct::new('!', Spacing::Alone)),
|
||||
TokenTree::Group(Group::new(Delimiter::Brace, output)),
|
||||
TokenTree::Punct(Punct::new(',', Spacing::Alone)),
|
||||
TokenTree::Literal(Literal::usize_unsuffixed(0)),
|
||||
]),
|
||||
)),
|
||||
TokenTree::Punct(Punct::new('.', Spacing::Alone)),
|
||||
TokenTree::Literal(Literal::usize_unsuffixed(1)),
|
||||
TokenTree::Punct(Punct::new(',', Spacing::Alone)),
|
||||
]),
|
||||
)),
|
||||
])
|
||||
}
|
||||
|
||||
pub fn extract(input: TokenStream) -> TokenStream {
|
||||
let mut tokens = input.into_iter();
|
||||
let _ = tokens.next().expect("enum");
|
||||
let _ = tokens.next().expect("#ident");
|
||||
let mut braces = match tokens.next().expect("{...}") {
|
||||
TokenTree::Group(group) => group.stream().into_iter(),
|
||||
_ => unreachable!("{...}"),
|
||||
};
|
||||
let _ = braces.next().expect("Value");
|
||||
let _ = braces.next().expect("=");
|
||||
let mut parens = match braces.next().expect("(...)") {
|
||||
TokenTree::Group(group) => group.stream().into_iter(),
|
||||
_ => unreachable!("(...)"),
|
||||
};
|
||||
let _ = parens.next().expect("stringify");
|
||||
let _ = parens.next().expect("!");
|
||||
let token_stream = match parens.next().expect("{...}") {
|
||||
TokenTree::Group(group) => group.stream(),
|
||||
_ => unreachable!("{...}"),
|
||||
};
|
||||
let _ = parens.next().expect(",");
|
||||
let _ = parens.next().expect("0");
|
||||
let _ = braces.next().expect(".");
|
||||
let _ = braces.next().expect("1");
|
||||
let _ = braces.next().expect(",");
|
||||
token_stream
|
||||
}
|
|
@ -1,443 +0,0 @@
|
|||
extern crate proc_macro;
|
||||
|
||||
mod enum_hack;
|
||||
mod error;
|
||||
|
||||
use crate::error::{Error, Result};
|
||||
use proc_macro::{
|
||||
token_stream, Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree,
|
||||
};
|
||||
use proc_macro_hack::proc_macro_hack;
|
||||
use std::iter::{self, FromIterator, Peekable};
|
||||
use std::panic;
|
||||
|
||||
#[proc_macro]
|
||||
pub fn item(input: TokenStream) -> TokenStream {
|
||||
expand_paste(input)
|
||||
}
|
||||
|
||||
#[proc_macro]
|
||||
pub fn item_with_macros(input: TokenStream) -> TokenStream {
|
||||
enum_hack::wrap(expand_paste(input))
|
||||
}
|
||||
|
||||
#[proc_macro_hack]
|
||||
pub fn expr(input: TokenStream) -> TokenStream {
|
||||
TokenStream::from(TokenTree::Group(Group::new(
|
||||
Delimiter::Brace,
|
||||
expand_paste(input),
|
||||
)))
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[proc_macro_derive(EnumHack)]
|
||||
pub fn enum_hack(input: TokenStream) -> TokenStream {
|
||||
enum_hack::extract(input)
|
||||
}
|
||||
|
||||
fn expand_paste(input: TokenStream) -> TokenStream {
|
||||
let mut contains_paste = false;
|
||||
match expand(input, &mut contains_paste) {
|
||||
Ok(expanded) => expanded,
|
||||
Err(err) => err.to_compile_error(),
|
||||
}
|
||||
}
|
||||
|
||||
fn expand(input: TokenStream, contains_paste: &mut bool) -> Result<TokenStream> {
|
||||
let mut expanded = TokenStream::new();
|
||||
let (mut prev_colon, mut colon) = (false, false);
|
||||
let mut prev_none_group = None::<Group>;
|
||||
let mut tokens = input.into_iter().peekable();
|
||||
loop {
|
||||
let token = tokens.next();
|
||||
if let Some(group) = prev_none_group.take() {
|
||||
if match (&token, tokens.peek()) {
|
||||
(Some(TokenTree::Punct(fst)), Some(TokenTree::Punct(snd))) => {
|
||||
fst.as_char() == ':' && snd.as_char() == ':' && fst.spacing() == Spacing::Joint
|
||||
}
|
||||
_ => false,
|
||||
} {
|
||||
expanded.extend(group.stream());
|
||||
*contains_paste = true;
|
||||
} else {
|
||||
expanded.extend(iter::once(TokenTree::Group(group)));
|
||||
}
|
||||
}
|
||||
match token {
|
||||
Some(TokenTree::Group(group)) => {
|
||||
let delimiter = group.delimiter();
|
||||
let content = group.stream();
|
||||
let span = group.span();
|
||||
if delimiter == Delimiter::Bracket && is_paste_operation(&content) {
|
||||
let segments = parse_bracket_as_segments(content, span)?;
|
||||
let pasted = paste_segments(span, &segments)?;
|
||||
expanded.extend(pasted);
|
||||
*contains_paste = true;
|
||||
} else if is_none_delimited_flat_group(delimiter, &content) {
|
||||
expanded.extend(content);
|
||||
*contains_paste = true;
|
||||
} else {
|
||||
let mut group_contains_paste = false;
|
||||
let nested = expand(content, &mut group_contains_paste)?;
|
||||
let group = if group_contains_paste {
|
||||
let mut group = Group::new(delimiter, nested);
|
||||
group.set_span(span);
|
||||
*contains_paste = true;
|
||||
group
|
||||
} else {
|
||||
group.clone()
|
||||
};
|
||||
if delimiter != Delimiter::None {
|
||||
expanded.extend(iter::once(TokenTree::Group(group)));
|
||||
} else if prev_colon {
|
||||
expanded.extend(group.stream());
|
||||
*contains_paste = true;
|
||||
} else {
|
||||
prev_none_group = Some(group);
|
||||
}
|
||||
}
|
||||
prev_colon = false;
|
||||
colon = false;
|
||||
}
|
||||
Some(other) => {
|
||||
match &other {
|
||||
TokenTree::Punct(punct) if punct.as_char() == ':' => {
|
||||
prev_colon = colon;
|
||||
colon = punct.spacing() == Spacing::Joint;
|
||||
}
|
||||
_ => {
|
||||
prev_colon = false;
|
||||
colon = false;
|
||||
}
|
||||
}
|
||||
expanded.extend(iter::once(other));
|
||||
}
|
||||
None => return Ok(expanded),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// https://github.com/dtolnay/paste/issues/26
|
||||
fn is_none_delimited_flat_group(delimiter: Delimiter, input: &TokenStream) -> bool {
|
||||
if delimiter != Delimiter::None {
|
||||
return false;
|
||||
}
|
||||
|
||||
#[derive(PartialEq)]
|
||||
enum State {
|
||||
Init,
|
||||
Ident,
|
||||
Literal,
|
||||
Apostrophe,
|
||||
Lifetime,
|
||||
Colon1,
|
||||
Colon2,
|
||||
}
|
||||
|
||||
let mut state = State::Init;
|
||||
for tt in input.clone() {
|
||||
state = match (state, &tt) {
|
||||
(State::Init, TokenTree::Ident(_)) => State::Ident,
|
||||
(State::Init, TokenTree::Literal(_)) => State::Literal,
|
||||
(State::Init, TokenTree::Punct(punct)) if punct.as_char() == '\'' => State::Apostrophe,
|
||||
(State::Apostrophe, TokenTree::Ident(_)) => State::Lifetime,
|
||||
(State::Ident, TokenTree::Punct(punct))
|
||||
if punct.as_char() == ':' && punct.spacing() == Spacing::Joint =>
|
||||
{
|
||||
State::Colon1
|
||||
}
|
||||
(State::Colon1, TokenTree::Punct(punct))
|
||||
if punct.as_char() == ':' && punct.spacing() == Spacing::Alone =>
|
||||
{
|
||||
State::Colon2
|
||||
}
|
||||
(State::Colon2, TokenTree::Ident(_)) => State::Ident,
|
||||
_ => return false,
|
||||
};
|
||||
}
|
||||
|
||||
state == State::Ident || state == State::Literal || state == State::Lifetime
|
||||
}
|
||||
|
||||
struct LitStr {
|
||||
value: String,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
struct Colon {
|
||||
span: Span,
|
||||
}
|
||||
|
||||
enum Segment {
|
||||
String(String),
|
||||
Apostrophe(Span),
|
||||
Env(LitStr),
|
||||
Modifier(Colon, Ident),
|
||||
}
|
||||
|
||||
fn is_paste_operation(input: &TokenStream) -> bool {
|
||||
let mut tokens = input.clone().into_iter();
|
||||
|
||||
match &tokens.next() {
|
||||
Some(TokenTree::Punct(punct)) if punct.as_char() == '<' => {}
|
||||
_ => return false,
|
||||
}
|
||||
|
||||
let mut has_token = false;
|
||||
loop {
|
||||
match &tokens.next() {
|
||||
Some(TokenTree::Punct(punct)) if punct.as_char() == '>' => {
|
||||
return has_token && tokens.next().is_none();
|
||||
}
|
||||
Some(_) => has_token = true,
|
||||
None => return false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_bracket_as_segments(input: TokenStream, scope: Span) -> Result<Vec<Segment>> {
|
||||
let mut tokens = input.into_iter().peekable();
|
||||
|
||||
match &tokens.next() {
|
||||
Some(TokenTree::Punct(punct)) if punct.as_char() == '<' => {}
|
||||
Some(wrong) => return Err(Error::new(wrong.span(), "expected `<`")),
|
||||
None => return Err(Error::new(scope, "expected `[< ... >]`")),
|
||||
}
|
||||
|
||||
let segments = parse_segments(&mut tokens, scope)?;
|
||||
|
||||
match &tokens.next() {
|
||||
Some(TokenTree::Punct(punct)) if punct.as_char() == '>' => {}
|
||||
Some(wrong) => return Err(Error::new(wrong.span(), "expected `>`")),
|
||||
None => return Err(Error::new(scope, "expected `[< ... >]`")),
|
||||
}
|
||||
|
||||
match tokens.next() {
|
||||
Some(unexpected) => Err(Error::new(
|
||||
unexpected.span(),
|
||||
"unexpected input, expected `[< ... >]`",
|
||||
)),
|
||||
None => Ok(segments),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_segments(
|
||||
tokens: &mut Peekable<token_stream::IntoIter>,
|
||||
scope: Span,
|
||||
) -> Result<Vec<Segment>> {
|
||||
let mut segments = Vec::new();
|
||||
while match tokens.peek() {
|
||||
None => false,
|
||||
Some(TokenTree::Punct(punct)) => punct.as_char() != '>',
|
||||
Some(_) => true,
|
||||
} {
|
||||
match tokens.next().unwrap() {
|
||||
TokenTree::Ident(ident) => {
|
||||
let mut fragment = ident.to_string();
|
||||
if fragment.starts_with("r#") {
|
||||
fragment = fragment.split_off(2);
|
||||
}
|
||||
if fragment == "env"
|
||||
&& match tokens.peek() {
|
||||
Some(TokenTree::Punct(punct)) => punct.as_char() == '!',
|
||||
_ => false,
|
||||
}
|
||||
{
|
||||
tokens.next().unwrap(); // `!`
|
||||
let expect_group = tokens.next();
|
||||
let parenthesized = match &expect_group {
|
||||
Some(TokenTree::Group(group))
|
||||
if group.delimiter() == Delimiter::Parenthesis =>
|
||||
{
|
||||
group
|
||||
}
|
||||
Some(wrong) => return Err(Error::new(wrong.span(), "expected `(`")),
|
||||
None => return Err(Error::new(scope, "expected `(` after `env!`")),
|
||||
};
|
||||
let mut inner = parenthesized.stream().into_iter();
|
||||
let lit = match inner.next() {
|
||||
Some(TokenTree::Literal(lit)) => lit,
|
||||
Some(wrong) => {
|
||||
return Err(Error::new(wrong.span(), "expected string literal"))
|
||||
}
|
||||
None => {
|
||||
return Err(Error::new2(
|
||||
ident.span(),
|
||||
parenthesized.span(),
|
||||
"expected string literal as argument to env! macro",
|
||||
))
|
||||
}
|
||||
};
|
||||
let lit_string = lit.to_string();
|
||||
if lit_string.starts_with('"')
|
||||
&& lit_string.ends_with('"')
|
||||
&& lit_string.len() >= 2
|
||||
{
|
||||
// TODO: maybe handle escape sequences in the string if
|
||||
// someone has a use case.
|
||||
segments.push(Segment::Env(LitStr {
|
||||
value: lit_string[1..lit_string.len() - 1].to_owned(),
|
||||
span: lit.span(),
|
||||
}));
|
||||
} else {
|
||||
return Err(Error::new(lit.span(), "expected string literal"));
|
||||
}
|
||||
if let Some(unexpected) = inner.next() {
|
||||
return Err(Error::new(
|
||||
unexpected.span(),
|
||||
"unexpected token in env! macro",
|
||||
));
|
||||
}
|
||||
} else {
|
||||
segments.push(Segment::String(fragment));
|
||||
}
|
||||
}
|
||||
TokenTree::Literal(lit) => {
|
||||
let mut lit_string = lit.to_string();
|
||||
if lit_string.contains(&['#', '\\', '.', '+'][..]) {
|
||||
return Err(Error::new(lit.span(), "unsupported literal"));
|
||||
}
|
||||
lit_string = lit_string
|
||||
.replace('"', "")
|
||||
.replace('\'', "")
|
||||
.replace('-', "_");
|
||||
segments.push(Segment::String(lit_string));
|
||||
}
|
||||
TokenTree::Punct(punct) => match punct.as_char() {
|
||||
'_' => segments.push(Segment::String("_".to_owned())),
|
||||
'\'' => segments.push(Segment::Apostrophe(punct.span())),
|
||||
':' => {
|
||||
let colon = Colon { span: punct.span() };
|
||||
let ident = match tokens.next() {
|
||||
Some(TokenTree::Ident(ident)) => ident,
|
||||
wrong => {
|
||||
let span = wrong.as_ref().map_or(scope, TokenTree::span);
|
||||
return Err(Error::new(span, "expected identifier after `:`"));
|
||||
}
|
||||
};
|
||||
segments.push(Segment::Modifier(colon, ident));
|
||||
}
|
||||
_ => return Err(Error::new(punct.span(), "unexpected punct")),
|
||||
},
|
||||
TokenTree::Group(group) => {
|
||||
if group.delimiter() == Delimiter::None {
|
||||
let mut inner = group.stream().into_iter().peekable();
|
||||
let nested = parse_segments(&mut inner, group.span())?;
|
||||
if let Some(unexpected) = inner.next() {
|
||||
return Err(Error::new(unexpected.span(), "unexpected token"));
|
||||
}
|
||||
segments.extend(nested);
|
||||
} else {
|
||||
return Err(Error::new(group.span(), "unexpected token"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(segments)
|
||||
}
|
||||
|
||||
fn paste_segments(span: Span, segments: &[Segment]) -> Result<TokenStream> {
|
||||
let mut evaluated = Vec::new();
|
||||
let mut is_lifetime = false;
|
||||
|
||||
for segment in segments {
|
||||
match segment {
|
||||
Segment::String(segment) => {
|
||||
evaluated.push(segment.clone());
|
||||
}
|
||||
Segment::Apostrophe(span) => {
|
||||
if is_lifetime {
|
||||
return Err(Error::new(*span, "unexpected lifetime"));
|
||||
}
|
||||
is_lifetime = true;
|
||||
}
|
||||
Segment::Env(var) => {
|
||||
let resolved = match std::env::var(&var.value) {
|
||||
Ok(resolved) => resolved,
|
||||
Err(_) => {
|
||||
return Err(Error::new(
|
||||
var.span,
|
||||
&format!("no such env var: {:?}", var.value),
|
||||
));
|
||||
}
|
||||
};
|
||||
let resolved = resolved.replace('-', "_");
|
||||
evaluated.push(resolved);
|
||||
}
|
||||
Segment::Modifier(colon, ident) => {
|
||||
let last = match evaluated.pop() {
|
||||
Some(last) => last,
|
||||
None => {
|
||||
return Err(Error::new2(colon.span, ident.span(), "unexpected modifier"))
|
||||
}
|
||||
};
|
||||
match ident.to_string().as_str() {
|
||||
"lower" => {
|
||||
evaluated.push(last.to_lowercase());
|
||||
}
|
||||
"upper" => {
|
||||
evaluated.push(last.to_uppercase());
|
||||
}
|
||||
"snake" => {
|
||||
let mut acc = String::new();
|
||||
let mut prev = '_';
|
||||
for ch in last.chars() {
|
||||
if ch.is_uppercase() && prev != '_' {
|
||||
acc.push('_');
|
||||
}
|
||||
acc.push(ch);
|
||||
prev = ch;
|
||||
}
|
||||
evaluated.push(acc.to_lowercase());
|
||||
}
|
||||
"camel" => {
|
||||
let mut acc = String::new();
|
||||
let mut prev = '_';
|
||||
for ch in last.chars() {
|
||||
if ch != '_' {
|
||||
if prev == '_' {
|
||||
for chu in ch.to_uppercase() {
|
||||
acc.push(chu);
|
||||
}
|
||||
} else if prev.is_uppercase() {
|
||||
for chl in ch.to_lowercase() {
|
||||
acc.push(chl);
|
||||
}
|
||||
} else {
|
||||
acc.push(ch);
|
||||
}
|
||||
}
|
||||
prev = ch;
|
||||
}
|
||||
evaluated.push(acc);
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::new2(
|
||||
colon.span,
|
||||
ident.span(),
|
||||
"unsupported modifier",
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let pasted = evaluated.into_iter().collect::<String>();
|
||||
let ident = match panic::catch_unwind(|| Ident::new(&pasted, span)) {
|
||||
Ok(ident) => TokenTree::Ident(ident),
|
||||
Err(_) => {
|
||||
return Err(Error::new(
|
||||
span,
|
||||
&format!("`{:?}` is not a valid identifier", pasted),
|
||||
));
|
||||
}
|
||||
};
|
||||
let tokens = if is_lifetime {
|
||||
let apostrophe = TokenTree::Punct(Punct::new('\'', Spacing::Joint));
|
||||
vec![apostrophe, ident]
|
||||
} else {
|
||||
vec![ident]
|
||||
};
|
||||
Ok(TokenStream::from_iter(tokens))
|
||||
}
|
|
@ -1 +1 @@
|
|||
{"files":{"Cargo.toml":"064af5acccc5515046c87e41197abcf454e90e6c7ff50f39f5340f292eb95e9b","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7f697f191d6ffb32881a6bcf535e817ebbebaaa11b9aacf4f0b26fd32e2cc201","README.md":"335f564b68e242d62b67840255d3fdad348fd51d5bb101b60af0092fc5ff99d8","src/lib.rs":"70031c2c904303aa950aeddd83a86f463eec273ea27317846e9adf716af2a08b","tests/compiletest.rs":"0a52a44786aea1c299c695bf948b2ed2081e4cc344e5c2cadceab4eb03d0010d","tests/test.rs":"1494b8209233e06c649df28312b8ee5e7c5b8021a9098132748f1500ee3f33c0","tests/ui/case-warning.rs":"905a6721d6ca7820cb37acfa9d684ba0e6c6ef043e75d494b697d0b97a8894d0","tests/ui/case-warning.stderr":"c86e816d08af760a01840b1de831c57751469ed6cb1fe1e51af527cee2359dac","tests/ui/env-empty.rs":"048eef1cc976d8614395859dc73dfccb1da58c534f9186c0333c8e8416488a1b","tests/ui/env-empty.stderr":"5b49cf08982fbd405873ba24ba1c43be789aeef9f8cfcc0c1aa7d580ebdb8655","tests/ui/env-non-string.rs":"cae61651d8396a68b08e46b5b0f5cc0775dabcb5d1b0adfd2755b621574b7f5e","tests/ui/env-non-string.stderr":"9842a3e9726d2fd2e108f12965011dd236b4cb51c2419f7bb8d9fc3fa99ea8b9","tests/ui/env-suffix.rs":"60a8fbcd2158c87ecdbbbe12b7d5ccd5513a36ad89639b669af06b39a099a10e","tests/ui/env-suffix.stderr":"eb5dac88beee8e04ec3d1d84acde701633dd232a331171b4fb6062f018e15197","tests/ui/env-unexpected.rs":"3f9383ffed8de76c4b51702926e32d477d6ffddfc47353f1ec4fb5d6c33cab69","tests/ui/env-unexpected.stderr":"702beeed06dbf977ac8772e8503c7e64f7810172dc4a802a3091b3048b266831","tests/ui/invalid-ident.rs":"04d7d4dceb69c4863f6c83af0bc55cb530d59fff26bea2757bc1b3461953f22f","tests/ui/invalid-ident.stderr":"df4fad1ac86261ff24eed29bb3a377f6cb96987c7acd470514cea212e96d703a","tests/ui/missing-paren-on-env.rs":"349699dd03df912d9dc0a9b26c2982040ff6358819169cfb9595608a8eaff28f","tests/ui/missing-paren-on-env.stderr":"75292b34e5f8508a2880f4031ccfee58938c40ddb7b8d5ffe23e56646ccc0b54","tests/ui/no-env-var.rs":"3b93779a3889236256520addafa3210fa95115b4a37b43890e726c0e4c6b41c4","tests/ui/no-env-var.stderr":"61a27339ad4be930da94d90fdba814745cd4831ecf61e8e468067df73067ce52","tests/ui/no-ident-after-colon.rs":"0d3b2d881cfe2f0c3e6a340be74ca7cf2ad0355dcf808ab0563ab14b5729d61d","tests/ui/no-ident-after-colon.stderr":"e9c6ea76ac796095fff9bdca26a3d22f4bae4481971b4594f58c878aa9ba60aa","tests/ui/unexpected-group.rs":"89e5a33d615ed92fb61b99c666f8579ed83d7039a2784e7c9a2812cc43977244","tests/ui/unexpected-group.stderr":"9c79da4bb8a7755745ddef5dc6b0df136a7138a9f1c64daaa238a6207441f744","tests/ui/unexpected-modifier.rs":"61c2a233cd947fdf04c44472608fbf04d797bfa3dac8d66ccdb91b01c7a27ca0","tests/ui/unexpected-modifier.stderr":"33b47ba0cfea8e93cd9e30456cc522e35d4ef7c64d5a46eb943e46ab20dc037e","tests/ui/unexpected-punct.rs":"c52dc3b112c61e109690a7ed78fc6fcd7ea95413e30143f8f9cd532ccc2b3d24","tests/ui/unexpected-punct.stderr":"b5d2b17e2a62fd6db2bf5962cb95ee6e7628950e889a6fd05ae528ba01e5722f","tests/ui/unsupported-literal.rs":"e74336730fdfc643ef0a2fbc2b32bb3b731cf8b43e963161531a9331020d1b26","tests/ui/unsupported-literal.stderr":"df9198b3cdb188a19bf919d43ee0f1d6946da45b6449103db8f5e5a5c85a1e08","tests/ui/unsupported-modifier.rs":"5b7159f9467ffaa790c82c0126a99e770a0637eadc4c0f9b5cdf43d0034fdc9e","tests/ui/unsupported-modifier.stderr":"5020c31d20e558846b82036dd940e09d92e40e09b5431f4c8450bbc6a3ba5114"},"package":"45ca20c77d80be666aef2b45486da86238fabe33e38306bd3118fe4af33fa880"}
|
||||
{"files":{"Cargo.toml":"a68a26137fa51a2085fa49145db08b971a2223f73b85dd4e7b3075253fe8bb7c","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7f697f191d6ffb32881a6bcf535e817ebbebaaa11b9aacf4f0b26fd32e2cc201","README.md":"4a8427c143bb2f92790df53858c5219e7579f0bdd2f577352b33bd076b58ff42","src/attr.rs":"e8301f019e107ef9f5d8a3bbed355ffa9c0fd73396646d7fedbba333d341b7bc","src/error.rs":"be08d9d48b4cb9984b5141b12f21c31bd7293942c18b3ce25754723930cf54f5","src/lib.rs":"7ab5cc2b88a547b8833ef31e939e64551772cd2babb57d63401c9ca3de16c432","src/segment.rs":"462be04eea3ad48028d0841e8000c13205eb049522b8003bcc2926f21e93cb78","tests/compiletest.rs":"0a52a44786aea1c299c695bf948b2ed2081e4cc344e5c2cadceab4eb03d0010d","tests/test_attr.rs":"810d5230f4e5e23662f743808141fac50d6c6c8d0c0c4a51260df3f5a1f47a00","tests/test_doc.rs":"b8a2023e707c85ca562947e75d20293506aa303e07da3e485f3b57d87e63bb02","tests/test_expr.rs":"c52a4a4aed8eb432206fa4ee912d421dd315b0b7d8a81464f0a29d949e73d59e","tests/test_item.rs":"10f10b44e56028cc6f795b4209003bc3503dffcf373dd295b35331c0c0908198","tests/ui/case-warning.rs":"6b7cb803527911fe698e7fbe080160bc5d07c0da36fac0ec29d599753ce8bd10","tests/ui/case-warning.stderr":"7b7d470edc2a11b0864534099b360e0c42d423e068f49cea7858e0b340c2ef3d","tests/ui/env-empty.rs":"093fb0844475f4710580b5e4cad162918896d4fb48d1516bae04eb39dddd265a","tests/ui/env-empty.stderr":"3462f06f6f21130a81efff3be3bc838b395f6107bfbf753e479ec5a60e09b15d","tests/ui/env-non-string.rs":"516460860e7790b412b0fcd203289720a155dfba2498616967e4d7e6e6ab4990","tests/ui/env-non-string.stderr":"fea08d588a238e0f4c06de5c5ae97ac195bb2bcfc070040fb8087bdf4d1f4eee","tests/ui/env-suffix.rs":"0f1756608efc7a6c5810b8b21528092546ac95371e048c6060064e747c1b0a27","tests/ui/env-suffix.stderr":"287fecb2a91abc61501e0bd71d7f111670a526960ab18a554f4d16ed83d940b5","tests/ui/env-unexpected.rs":"18f06317e72ea16dae6ebe7c20bf5ef26b46e01a5c54fd31dc2a351ba68e8ca5","tests/ui/env-unexpected.stderr":"14d9a40160daf0191efc2b53be5fdde5b066674053e76af170e58af08f4022aa","tests/ui/invalid-ident.rs":"c3f375cac04d56684e427834076128b6e77eeeb9e99067a06307e27693b0ff51","tests/ui/invalid-ident.stderr":"a66ec74bf630c5538fcecf4e3d770d7c6cd6fffe404932a2187d2fb041afd579","tests/ui/missing-paren-on-env.rs":"05cafef972e8c99f82834e164e7713f6056ac1a24b1f31d0320ac4fe361a53bd","tests/ui/missing-paren-on-env.stderr":"baf7b420d201f033e2113eaebd85376047397d991c35c441f07e5c6f1ec2b685","tests/ui/no-env-var.rs":"330bb71f2f3d98d23176acc5aeb1c0b66443af14c0857bb5bc92bfd130eb4a62","tests/ui/no-env-var.stderr":"68af072d3ccb2537bca9a5cd6d5796bb5ab5408f59564c8e8c66b1590ac7c5a5","tests/ui/no-ident-after-colon.rs":"a5af9548ab8d7f5b6130e1410c90f2cc589c595eb99f6939e6deb3f54e7262a2","tests/ui/no-ident-after-colon.stderr":"efac036a78f077c9c10a1ed048d69531293dcb97784ed784af0b24788ecbab54","tests/ui/unexpected-group.rs":"6b2e398a9ad3e077ea49c68b7c134ec3268e398723f4d9d5ef7b4d7b8513ac44","tests/ui/unexpected-group.stderr":"1260b5a4c295639958e3dd0da475c385184f0a916f5fdc4f1f2bfe232688f43c","tests/ui/unexpected-modifier.rs":"8b7e4c93f06fe7e7be469375e8bbaf19fec5fdffe688a0c29d2de2944a420337","tests/ui/unexpected-modifier.stderr":"581212b89358ac6bd38632bfd4322c25c171046875711fd639cfb498e01629c8","tests/ui/unexpected-punct.rs":"430e98bc03abdcc46f72cea01264c278a9c25f3d1d63064a6d78f4ae71b6a2d9","tests/ui/unexpected-punct.stderr":"24ab2e32d2da8dad93ef04dc05a2ecf6fdeb11bdebeae9454ad4759cc2b16551","tests/ui/unsupported-literal.rs":"1b2d3634fba5dc35d5ac9012d488aa93e329d1fa4489df1eadee65613a010e59","tests/ui/unsupported-literal.stderr":"6b137ccd6bc2a341aeb1e0d5321a896aa6c8db9a4e2571f4e41bd744511760bf","tests/ui/unsupported-modifier.rs":"03bb0239880070e8b470a6072c06094e1483ea91256e7cb4b9f2e8761696fa5a","tests/ui/unsupported-modifier.stderr":"890d7f484fdbd46da72923db4d7aba0ac3eaece253bb7195b7a36ae9a918027e"},"package":"0c520e05135d6e763148b6426a837e239041653ba7becd2e538c076c738025fc"}
|
|
@ -3,31 +3,36 @@
|
|||
# When uploading crates to the registry Cargo will automatically
|
||||
# "normalize" Cargo.toml files for maximal compatibility
|
||||
# with all versions of Cargo and also rewrite `path` dependencies
|
||||
# to registry (e.g., crates.io) dependencies
|
||||
# to registry (e.g., crates.io) dependencies.
|
||||
#
|
||||
# If you believe there's an error in this file please file an
|
||||
# issue against the rust-lang/cargo repository. If you're
|
||||
# editing this file be aware that the upstream Cargo.toml
|
||||
# will likely look very different (and much more reasonable)
|
||||
# If you are reading this file be aware that the original Cargo.toml
|
||||
# will likely look very different (and much more reasonable).
|
||||
# See Cargo.toml.orig for the original contents.
|
||||
|
||||
[package]
|
||||
edition = "2018"
|
||||
rust-version = "1.31"
|
||||
name = "paste"
|
||||
version = "0.1.18"
|
||||
version = "1.0.7"
|
||||
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
||||
description = "Macros for all your token pasting needs"
|
||||
readme = "README.md"
|
||||
categories = ["no-std"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/dtolnay/paste"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
[dependencies.paste-impl]
|
||||
version = "=0.1.18"
|
||||
|
||||
[dependencies.proc-macro-hack]
|
||||
version = "0.5.9"
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[dev-dependencies.paste-test-suite]
|
||||
version = "0"
|
||||
|
||||
[dev-dependencies.rustversion]
|
||||
version = "1.0"
|
||||
|
||||
[dev-dependencies.trybuild]
|
||||
version = "1.0"
|
||||
version = "1.0.49"
|
||||
features = ["diff"]
|
||||
|
|
|
@ -17,32 +17,29 @@ including using pasted identifiers to define new items.
|
|||
|
||||
```toml
|
||||
[dependencies]
|
||||
paste = "0.1"
|
||||
paste = "1.0"
|
||||
```
|
||||
|
||||
This approach works with any stable or nightly Rust compiler 1.30+.
|
||||
This approach works with any Rust compiler 1.31+.
|
||||
|
||||
<br>
|
||||
|
||||
## Pasting identifiers
|
||||
|
||||
There are two entry points, `paste::expr!` for macros in expression position and
|
||||
`paste::item!` for macros in item position.
|
||||
|
||||
Within either one, identifiers inside `[<`...`>]` are pasted together to form a
|
||||
single identifier.
|
||||
Within the `paste!` macro, identifiers inside `[<`...`>]` are pasted together to
|
||||
form a single identifier.
|
||||
|
||||
```rust
|
||||
// Macro in item position: at module scope or inside of an impl block.
|
||||
paste::item! {
|
||||
use paste::paste;
|
||||
|
||||
paste! {
|
||||
// Defines a const called `QRST`.
|
||||
const [<Q R S T>]: &str = "success!";
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// Macro in expression position: inside a function body.
|
||||
assert_eq!(
|
||||
paste::expr! { [<Q R S T>].len() },
|
||||
paste! { [<Q R S T>].len() },
|
||||
8,
|
||||
);
|
||||
}
|
||||
|
@ -50,34 +47,15 @@ fn main() {
|
|||
|
||||
<br>
|
||||
|
||||
## More elaborate examples
|
||||
|
||||
This program demonstrates how you may want to bundle a paste invocation inside
|
||||
of a more convenient user-facing macro of your own. Here the `routes!(A, B)`
|
||||
macro expands to a vector containing `ROUTE_A` and `ROUTE_B`.
|
||||
|
||||
```rust
|
||||
const ROUTE_A: &str = "/a";
|
||||
const ROUTE_B: &str = "/b";
|
||||
|
||||
macro_rules! routes {
|
||||
($($route:ident),*) => {{
|
||||
paste::expr! {
|
||||
vec![$( [<ROUTE_ $route>] ),*]
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let routes = routes!(A, B);
|
||||
assert_eq!(routes, vec!["/a", "/b"]);
|
||||
}
|
||||
```
|
||||
## More elaborate example
|
||||
|
||||
The next example shows a macro that generates accessor methods for some struct
|
||||
fields.
|
||||
fields. It demonstrates how you might find it useful to bundle a paste
|
||||
invocation inside of a macro\_rules macro.
|
||||
|
||||
```rust
|
||||
use paste::paste;
|
||||
|
||||
macro_rules! make_a_struct_and_getters {
|
||||
($name:ident { $($field:ident),* }) => {
|
||||
// Define a struct. This expands to:
|
||||
|
@ -100,7 +78,7 @@ macro_rules! make_a_struct_and_getters {
|
|||
// pub fn get_b(&self) -> &str { &self.b }
|
||||
// pub fn get_c(&self) -> &str { &self.c }
|
||||
// }
|
||||
paste::item! {
|
||||
paste! {
|
||||
impl $name {
|
||||
$(
|
||||
pub fn [<get_ $field>](&self) -> &str {
|
||||
|
@ -139,6 +117,30 @@ The precise Unicode conversions are as defined by [`str::to_lowercase`] and
|
|||
|
||||
<br>
|
||||
|
||||
## Pasting documentation strings
|
||||
|
||||
Within the `paste!` macro, arguments to a #\[doc ...\] attribute are implicitly
|
||||
concatenated together to form a coherent documentation string.
|
||||
|
||||
```rust
|
||||
use paste::paste;
|
||||
|
||||
macro_rules! method_new {
|
||||
($ret:ident) => {
|
||||
paste! {
|
||||
#[doc = "Create a new `" $ret "` object."]
|
||||
pub fn new() -> $ret { todo!() }
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub struct Paste {}
|
||||
|
||||
method_new!(Paste); // expands to #[doc = "Create a new `Paste` object"]
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
#### License
|
||||
|
||||
<sup>
|
||||
|
|
|
@ -0,0 +1,164 @@
|
|||
use crate::error::Result;
|
||||
use crate::segment::{self, Segment};
|
||||
use proc_macro::{Delimiter, Group, Spacing, Span, TokenStream, TokenTree};
|
||||
use std::iter;
|
||||
use std::mem;
|
||||
use std::str::FromStr;
|
||||
|
||||
pub fn expand_attr(
|
||||
attr: TokenStream,
|
||||
span: Span,
|
||||
contains_paste: &mut bool,
|
||||
) -> Result<TokenStream> {
|
||||
let mut tokens = attr.clone().into_iter();
|
||||
let mut leading_colons = 0; // $(::)?
|
||||
let mut leading_path = 0; // $($ident)::+
|
||||
|
||||
let mut token;
|
||||
let group = loop {
|
||||
token = tokens.next();
|
||||
match token {
|
||||
// colon after `$(:)?`
|
||||
Some(TokenTree::Punct(ref punct))
|
||||
if punct.as_char() == ':' && leading_colons < 2 && leading_path == 0 =>
|
||||
{
|
||||
leading_colons += 1;
|
||||
}
|
||||
// ident after `$(::)? $($ident ::)*`
|
||||
Some(TokenTree::Ident(_)) if leading_colons != 1 && leading_path % 3 == 0 => {
|
||||
leading_path += 1;
|
||||
}
|
||||
// colon after `$(::)? $($ident ::)* $ident $(:)?`
|
||||
Some(TokenTree::Punct(ref punct)) if punct.as_char() == ':' && leading_path % 3 > 0 => {
|
||||
leading_path += 1;
|
||||
}
|
||||
// eq+value after `$(::)? $($ident)::+`
|
||||
Some(TokenTree::Punct(ref punct))
|
||||
if punct.as_char() == '=' && leading_path % 3 == 1 =>
|
||||
{
|
||||
let mut count = 0;
|
||||
if tokens.inspect(|_| count += 1).all(|tt| is_stringlike(&tt)) && count > 1 {
|
||||
*contains_paste = true;
|
||||
let leading = leading_colons + leading_path;
|
||||
return do_paste_name_value_attr(attr, span, leading);
|
||||
}
|
||||
return Ok(attr);
|
||||
}
|
||||
// parens after `$(::)? $($ident)::+`
|
||||
Some(TokenTree::Group(ref group))
|
||||
if group.delimiter() == Delimiter::Parenthesis && leading_path % 3 == 1 =>
|
||||
{
|
||||
break group;
|
||||
}
|
||||
// bail out
|
||||
_ => return Ok(attr),
|
||||
}
|
||||
};
|
||||
|
||||
// There can't be anything else after the first group in a valid attribute.
|
||||
if tokens.next().is_some() {
|
||||
return Ok(attr);
|
||||
}
|
||||
|
||||
let mut group_contains_paste = false;
|
||||
let mut expanded = TokenStream::new();
|
||||
let mut nested_attr = TokenStream::new();
|
||||
for tt in group.stream() {
|
||||
match &tt {
|
||||
TokenTree::Punct(punct) if punct.as_char() == ',' => {
|
||||
expanded.extend(expand_attr(
|
||||
nested_attr,
|
||||
group.span(),
|
||||
&mut group_contains_paste,
|
||||
)?);
|
||||
expanded.extend(iter::once(tt));
|
||||
nested_attr = TokenStream::new();
|
||||
}
|
||||
_ => nested_attr.extend(iter::once(tt)),
|
||||
}
|
||||
}
|
||||
|
||||
if !nested_attr.is_empty() {
|
||||
expanded.extend(expand_attr(
|
||||
nested_attr,
|
||||
group.span(),
|
||||
&mut group_contains_paste,
|
||||
)?);
|
||||
}
|
||||
|
||||
if group_contains_paste {
|
||||
*contains_paste = true;
|
||||
let mut group = Group::new(Delimiter::Parenthesis, expanded);
|
||||
group.set_span(span);
|
||||
Ok(attr
|
||||
.into_iter()
|
||||
// Just keep the initial ident in `#[ident(...)]`.
|
||||
.take(leading_colons + leading_path)
|
||||
.chain(iter::once(TokenTree::Group(group)))
|
||||
.collect())
|
||||
} else {
|
||||
Ok(attr)
|
||||
}
|
||||
}
|
||||
|
||||
fn do_paste_name_value_attr(attr: TokenStream, span: Span, leading: usize) -> Result<TokenStream> {
|
||||
let mut expanded = TokenStream::new();
|
||||
let mut tokens = attr.into_iter().peekable();
|
||||
expanded.extend(tokens.by_ref().take(leading + 1)); // `doc =`
|
||||
|
||||
let mut segments = segment::parse(&mut tokens)?;
|
||||
|
||||
for segment in &mut segments {
|
||||
if let Segment::String(string) = segment {
|
||||
if let Some(open_quote) = string.value.find('"') {
|
||||
if open_quote == 0 {
|
||||
string.value.truncate(string.value.len() - 1);
|
||||
string.value.remove(0);
|
||||
} else {
|
||||
let begin = open_quote + 1;
|
||||
let end = string.value.rfind('"').unwrap();
|
||||
let raw_string = mem::replace(&mut string.value, String::new());
|
||||
for ch in raw_string[begin..end].chars() {
|
||||
string.value.extend(ch.escape_default());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut lit = segment::paste(&segments)?;
|
||||
lit.insert(0, '"');
|
||||
lit.push('"');
|
||||
|
||||
let mut lit = TokenStream::from_str(&lit)
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.next()
|
||||
.unwrap();
|
||||
lit.set_span(span);
|
||||
expanded.extend(iter::once(lit));
|
||||
Ok(expanded)
|
||||
}
|
||||
|
||||
fn is_stringlike(token: &TokenTree) -> bool {
|
||||
match token {
|
||||
TokenTree::Ident(_) => true,
|
||||
TokenTree::Literal(literal) => {
|
||||
let repr = literal.to_string();
|
||||
!repr.starts_with('b') && !repr.starts_with('\'')
|
||||
}
|
||||
TokenTree::Group(group) => {
|
||||
if group.delimiter() != Delimiter::None {
|
||||
return false;
|
||||
}
|
||||
let mut inner = group.stream().into_iter();
|
||||
match inner.next() {
|
||||
Some(first) => inner.next().is_none() && is_stringlike(&first),
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
TokenTree::Punct(punct) => {
|
||||
punct.as_char() == '\'' || punct.as_char() == ':' && punct.spacing() == Spacing::Alone
|
||||
}
|
||||
}
|
||||
}
|
|
@ -15,29 +15,26 @@
|
|||
//! This crate provides a flexible way to paste together identifiers in a macro,
|
||||
//! including using pasted identifiers to define new items.
|
||||
//!
|
||||
//! This approach works with any stable or nightly Rust compiler 1.30+.
|
||||
//! This approach works with any Rust compiler 1.31+.
|
||||
//!
|
||||
//! <br>
|
||||
//!
|
||||
//! # Pasting identifiers
|
||||
//!
|
||||
//! There are two entry points, `paste::expr!` for macros in expression position and
|
||||
//! `paste::item!` for macros in item position.
|
||||
//!
|
||||
//! Within either one, identifiers inside `[<`...`>]` are pasted together to form a
|
||||
//! single identifier.
|
||||
//! Within the `paste!` macro, identifiers inside `[<`...`>]` are pasted
|
||||
//! together to form a single identifier.
|
||||
//!
|
||||
//! ```
|
||||
//! // Macro in item position: at module scope or inside of an impl block.
|
||||
//! paste::item! {
|
||||
//! use paste::paste;
|
||||
//!
|
||||
//! paste! {
|
||||
//! // Defines a const called `QRST`.
|
||||
//! const [<Q R S T>]: &str = "success!";
|
||||
//! }
|
||||
//!
|
||||
//! fn main() {
|
||||
//! // Macro in expression position: inside a function body.
|
||||
//! assert_eq!(
|
||||
//! paste::expr! { [<Q R S T>].len() },
|
||||
//! paste! { [<Q R S T>].len() },
|
||||
//! 8,
|
||||
//! );
|
||||
//! }
|
||||
|
@ -45,34 +42,15 @@
|
|||
//!
|
||||
//! <br><br>
|
||||
//!
|
||||
//! # More elaborate examples
|
||||
//! # More elaborate example
|
||||
//!
|
||||
//! This program demonstrates how you may want to bundle a paste invocation inside
|
||||
//! of a more convenient user-facing macro of your own. Here the `routes!(A, B)`
|
||||
//! macro expands to a vector containing `ROUTE_A` and `ROUTE_B`.
|
||||
//! The next example shows a macro that generates accessor methods for some
|
||||
//! struct fields. It demonstrates how you might find it useful to bundle a
|
||||
//! paste invocation inside of a macro\_rules macro.
|
||||
//!
|
||||
//! ```
|
||||
//! const ROUTE_A: &str = "/a";
|
||||
//! const ROUTE_B: &str = "/b";
|
||||
//! use paste::paste;
|
||||
//!
|
||||
//! macro_rules! routes {
|
||||
//! ($($route:ident),*) => {{
|
||||
//! paste::expr! {
|
||||
//! vec![$( [<ROUTE_ $route>] ),*]
|
||||
//! }
|
||||
//! }}
|
||||
//! }
|
||||
//!
|
||||
//! fn main() {
|
||||
//! let routes = routes!(A, B);
|
||||
//! assert_eq!(routes, vec!["/a", "/b"]);
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! The next example shows a macro that generates accessor methods for some struct
|
||||
//! fields.
|
||||
//!
|
||||
//! ```
|
||||
//! macro_rules! make_a_struct_and_getters {
|
||||
//! ($name:ident { $($field:ident),* }) => {
|
||||
//! // Define a struct. This expands to:
|
||||
|
@ -95,7 +73,7 @@
|
|||
//! // pub fn get_b(&self) -> &str { &self.b }
|
||||
//! // pub fn get_c(&self) -> &str { &self.c }
|
||||
//! // }
|
||||
//! paste::item! {
|
||||
//! paste! {
|
||||
//! impl $name {
|
||||
//! $(
|
||||
//! pub fn [<get_ $field>](&self) -> &str {
|
||||
|
@ -134,25 +112,311 @@
|
|||
//!
|
||||
//! [`str::to_lowercase`]: https://doc.rust-lang.org/std/primitive.str.html#method.to_lowercase
|
||||
//! [`str::to_uppercase`]: https://doc.rust-lang.org/std/primitive.str.html#method.to_uppercase
|
||||
//!
|
||||
//! <br>
|
||||
//!
|
||||
//! # Pasting documentation strings
|
||||
//!
|
||||
//! Within the `paste!` macro, arguments to a #\[doc ...\] attribute are
|
||||
//! implicitly concatenated together to form a coherent documentation string.
|
||||
//!
|
||||
//! ```
|
||||
//! use paste::paste;
|
||||
//!
|
||||
//! macro_rules! method_new {
|
||||
//! ($ret:ident) => {
|
||||
//! paste! {
|
||||
//! #[doc = "Create a new `" $ret "` object."]
|
||||
//! pub fn new() -> $ret { todo!() }
|
||||
//! }
|
||||
//! };
|
||||
//! }
|
||||
//!
|
||||
//! pub struct Paste {}
|
||||
//!
|
||||
//! method_new!(Paste); // expands to #[doc = "Create a new `Paste` object"]
|
||||
//! ```
|
||||
|
||||
#![no_std]
|
||||
#![allow(
|
||||
clippy::doc_markdown,
|
||||
clippy::match_same_arms,
|
||||
clippy::module_name_repetitions,
|
||||
clippy::needless_doctest_main,
|
||||
clippy::too_many_lines
|
||||
)]
|
||||
|
||||
use proc_macro_hack::proc_macro_hack;
|
||||
extern crate proc_macro;
|
||||
|
||||
/// Paste identifiers within a macro invocation that expands to an expression.
|
||||
#[proc_macro_hack]
|
||||
pub use paste_impl::expr;
|
||||
mod attr;
|
||||
mod error;
|
||||
mod segment;
|
||||
|
||||
/// Paste identifiers within a macro invocation that expands to one or more
|
||||
/// items.
|
||||
///
|
||||
/// An item is like a struct definition, function, impl block, or anything else
|
||||
/// that can appear at the top level of a module scope.
|
||||
pub use paste_impl::item;
|
||||
use crate::attr::expand_attr;
|
||||
use crate::error::{Error, Result};
|
||||
use crate::segment::Segment;
|
||||
use proc_macro::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
|
||||
use std::char;
|
||||
use std::iter;
|
||||
use std::panic;
|
||||
|
||||
/// Paste identifiers within a macro invocation that expands to one or more
|
||||
/// macro_rules macros or items containing macros.
|
||||
pub use paste_impl::item_with_macros;
|
||||
#[proc_macro]
|
||||
pub fn paste(input: TokenStream) -> TokenStream {
|
||||
let mut contains_paste = false;
|
||||
let flatten_single_interpolation = true;
|
||||
match expand(input, &mut contains_paste, flatten_single_interpolation) {
|
||||
Ok(expanded) => expanded,
|
||||
Err(err) => err.to_compile_error(),
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub use paste_impl::EnumHack;
|
||||
#[proc_macro]
|
||||
pub fn item(input: TokenStream) -> TokenStream {
|
||||
paste(input)
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[proc_macro]
|
||||
pub fn expr(input: TokenStream) -> TokenStream {
|
||||
paste(input)
|
||||
}
|
||||
|
||||
fn expand(
|
||||
input: TokenStream,
|
||||
contains_paste: &mut bool,
|
||||
flatten_single_interpolation: bool,
|
||||
) -> Result<TokenStream> {
|
||||
let mut expanded = TokenStream::new();
|
||||
let mut lookbehind = Lookbehind::Other;
|
||||
let mut prev_none_group = None::<Group>;
|
||||
let mut tokens = input.into_iter().peekable();
|
||||
loop {
|
||||
let token = tokens.next();
|
||||
if let Some(group) = prev_none_group.take() {
|
||||
if match (&token, tokens.peek()) {
|
||||
(Some(TokenTree::Punct(fst)), Some(TokenTree::Punct(snd))) => {
|
||||
fst.as_char() == ':' && snd.as_char() == ':' && fst.spacing() == Spacing::Joint
|
||||
}
|
||||
_ => false,
|
||||
} {
|
||||
expanded.extend(group.stream());
|
||||
*contains_paste = true;
|
||||
} else {
|
||||
expanded.extend(iter::once(TokenTree::Group(group)));
|
||||
}
|
||||
}
|
||||
match token {
|
||||
Some(TokenTree::Group(group)) => {
|
||||
let delimiter = group.delimiter();
|
||||
let content = group.stream();
|
||||
let span = group.span();
|
||||
if delimiter == Delimiter::Bracket && is_paste_operation(&content) {
|
||||
let segments = parse_bracket_as_segments(content, span)?;
|
||||
let pasted = segment::paste(&segments)?;
|
||||
let tokens = pasted_to_tokens(pasted, span)?;
|
||||
expanded.extend(tokens);
|
||||
*contains_paste = true;
|
||||
} else if flatten_single_interpolation
|
||||
&& delimiter == Delimiter::None
|
||||
&& is_single_interpolation_group(&content)
|
||||
{
|
||||
expanded.extend(content);
|
||||
*contains_paste = true;
|
||||
} else {
|
||||
let mut group_contains_paste = false;
|
||||
let is_attribute = delimiter == Delimiter::Bracket
|
||||
&& (lookbehind == Lookbehind::Pound || lookbehind == Lookbehind::PoundBang);
|
||||
let mut nested = expand(
|
||||
content,
|
||||
&mut group_contains_paste,
|
||||
flatten_single_interpolation && !is_attribute,
|
||||
)?;
|
||||
if is_attribute {
|
||||
nested = expand_attr(nested, span, &mut group_contains_paste)?;
|
||||
}
|
||||
let group = if group_contains_paste {
|
||||
let mut group = Group::new(delimiter, nested);
|
||||
group.set_span(span);
|
||||
*contains_paste = true;
|
||||
group
|
||||
} else {
|
||||
group.clone()
|
||||
};
|
||||
if delimiter != Delimiter::None {
|
||||
expanded.extend(iter::once(TokenTree::Group(group)));
|
||||
} else if lookbehind == Lookbehind::DoubleColon {
|
||||
expanded.extend(group.stream());
|
||||
*contains_paste = true;
|
||||
} else {
|
||||
prev_none_group = Some(group);
|
||||
}
|
||||
}
|
||||
lookbehind = Lookbehind::Other;
|
||||
}
|
||||
Some(TokenTree::Punct(punct)) => {
|
||||
lookbehind = match punct.as_char() {
|
||||
':' if lookbehind == Lookbehind::JointColon => Lookbehind::DoubleColon,
|
||||
':' if punct.spacing() == Spacing::Joint => Lookbehind::JointColon,
|
||||
'#' => Lookbehind::Pound,
|
||||
'!' if lookbehind == Lookbehind::Pound => Lookbehind::PoundBang,
|
||||
_ => Lookbehind::Other,
|
||||
};
|
||||
expanded.extend(iter::once(TokenTree::Punct(punct)));
|
||||
}
|
||||
Some(other) => {
|
||||
lookbehind = Lookbehind::Other;
|
||||
expanded.extend(iter::once(other));
|
||||
}
|
||||
None => return Ok(expanded),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq)]
|
||||
enum Lookbehind {
|
||||
JointColon,
|
||||
DoubleColon,
|
||||
Pound,
|
||||
PoundBang,
|
||||
Other,
|
||||
}
|
||||
|
||||
// https://github.com/dtolnay/paste/issues/26
|
||||
fn is_single_interpolation_group(input: &TokenStream) -> bool {
|
||||
#[derive(PartialEq)]
|
||||
enum State {
|
||||
Init,
|
||||
Ident,
|
||||
Literal,
|
||||
Apostrophe,
|
||||
Lifetime,
|
||||
Colon1,
|
||||
Colon2,
|
||||
}
|
||||
|
||||
let mut state = State::Init;
|
||||
for tt in input.clone() {
|
||||
state = match (state, &tt) {
|
||||
(State::Init, TokenTree::Ident(_)) => State::Ident,
|
||||
(State::Init, TokenTree::Literal(_)) => State::Literal,
|
||||
(State::Init, TokenTree::Punct(punct)) if punct.as_char() == '\'' => State::Apostrophe,
|
||||
(State::Apostrophe, TokenTree::Ident(_)) => State::Lifetime,
|
||||
(State::Ident, TokenTree::Punct(punct))
|
||||
if punct.as_char() == ':' && punct.spacing() == Spacing::Joint =>
|
||||
{
|
||||
State::Colon1
|
||||
}
|
||||
(State::Colon1, TokenTree::Punct(punct))
|
||||
if punct.as_char() == ':' && punct.spacing() == Spacing::Alone =>
|
||||
{
|
||||
State::Colon2
|
||||
}
|
||||
(State::Colon2, TokenTree::Ident(_)) => State::Ident,
|
||||
_ => return false,
|
||||
};
|
||||
}
|
||||
|
||||
state == State::Ident || state == State::Literal || state == State::Lifetime
|
||||
}
|
||||
|
||||
fn is_paste_operation(input: &TokenStream) -> bool {
|
||||
let mut tokens = input.clone().into_iter();
|
||||
|
||||
match &tokens.next() {
|
||||
Some(TokenTree::Punct(punct)) if punct.as_char() == '<' => {}
|
||||
_ => return false,
|
||||
}
|
||||
|
||||
let mut has_token = false;
|
||||
loop {
|
||||
match &tokens.next() {
|
||||
Some(TokenTree::Punct(punct)) if punct.as_char() == '>' => {
|
||||
return has_token && tokens.next().is_none();
|
||||
}
|
||||
Some(_) => has_token = true,
|
||||
None => return false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_bracket_as_segments(input: TokenStream, scope: Span) -> Result<Vec<Segment>> {
|
||||
let mut tokens = input.into_iter().peekable();
|
||||
|
||||
match &tokens.next() {
|
||||
Some(TokenTree::Punct(punct)) if punct.as_char() == '<' => {}
|
||||
Some(wrong) => return Err(Error::new(wrong.span(), "expected `<`")),
|
||||
None => return Err(Error::new(scope, "expected `[< ... >]`")),
|
||||
}
|
||||
|
||||
let mut segments = segment::parse(&mut tokens)?;
|
||||
|
||||
match &tokens.next() {
|
||||
Some(TokenTree::Punct(punct)) if punct.as_char() == '>' => {}
|
||||
Some(wrong) => return Err(Error::new(wrong.span(), "expected `>`")),
|
||||
None => return Err(Error::new(scope, "expected `[< ... >]`")),
|
||||
}
|
||||
|
||||
if let Some(unexpected) = tokens.next() {
|
||||
return Err(Error::new(
|
||||
unexpected.span(),
|
||||
"unexpected input, expected `[< ... >]`",
|
||||
));
|
||||
}
|
||||
|
||||
for segment in &mut segments {
|
||||
if let Segment::String(string) = segment {
|
||||
if string.value.starts_with("'\\u{") {
|
||||
let hex = &string.value[4..string.value.len() - 2];
|
||||
if let Ok(unsigned) = u32::from_str_radix(hex, 16) {
|
||||
if let Some(ch) = char::from_u32(unsigned) {
|
||||
string.value.clear();
|
||||
string.value.push(ch);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
if string.value.contains(&['#', '\\', '.', '+'][..])
|
||||
|| string.value.starts_with("b'")
|
||||
|| string.value.starts_with("b\"")
|
||||
|| string.value.starts_with("br\"")
|
||||
{
|
||||
return Err(Error::new(string.span, "unsupported literal"));
|
||||
}
|
||||
let mut range = 0..string.value.len();
|
||||
if string.value.starts_with("r\"") {
|
||||
range.start += 2;
|
||||
range.end -= 1;
|
||||
} else if string.value.starts_with(&['"', '\''][..]) {
|
||||
range.start += 1;
|
||||
range.end -= 1;
|
||||
}
|
||||
string.value = string.value[range].replace('-', "_");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(segments)
|
||||
}
|
||||
|
||||
fn pasted_to_tokens(mut pasted: String, span: Span) -> Result<TokenStream> {
|
||||
let mut tokens = TokenStream::new();
|
||||
|
||||
if pasted.starts_with('\'') {
|
||||
let mut apostrophe = TokenTree::Punct(Punct::new('\'', Spacing::Joint));
|
||||
apostrophe.set_span(span);
|
||||
tokens.extend(iter::once(apostrophe));
|
||||
pasted.remove(0);
|
||||
}
|
||||
|
||||
let ident = match panic::catch_unwind(|| Ident::new(&pasted, span)) {
|
||||
Ok(ident) => TokenTree::Ident(ident),
|
||||
Err(_) => {
|
||||
return Err(Error::new(
|
||||
span,
|
||||
&format!("`{:?}` is not a valid identifier", pasted),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
tokens.extend(iter::once(ident));
|
||||
Ok(tokens)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,233 @@
|
|||
use crate::error::{Error, Result};
|
||||
use proc_macro::{token_stream, Delimiter, Ident, Span, TokenTree};
|
||||
use std::iter::Peekable;
|
||||
|
||||
pub(crate) enum Segment {
|
||||
String(LitStr),
|
||||
Apostrophe(Span),
|
||||
Env(LitStr),
|
||||
Modifier(Colon, Ident),
|
||||
}
|
||||
|
||||
pub(crate) struct LitStr {
|
||||
pub value: String,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
pub(crate) struct Colon {
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
pub(crate) fn parse(tokens: &mut Peekable<token_stream::IntoIter>) -> Result<Vec<Segment>> {
|
||||
let mut segments = Vec::new();
|
||||
while match tokens.peek() {
|
||||
None => false,
|
||||
Some(TokenTree::Punct(punct)) => punct.as_char() != '>',
|
||||
Some(_) => true,
|
||||
} {
|
||||
match tokens.next().unwrap() {
|
||||
TokenTree::Ident(ident) => {
|
||||
let mut fragment = ident.to_string();
|
||||
if fragment.starts_with("r#") {
|
||||
fragment = fragment.split_off(2);
|
||||
}
|
||||
if fragment == "env"
|
||||
&& match tokens.peek() {
|
||||
Some(TokenTree::Punct(punct)) => punct.as_char() == '!',
|
||||
_ => false,
|
||||
}
|
||||
{
|
||||
let bang = tokens.next().unwrap(); // `!`
|
||||
let expect_group = tokens.next();
|
||||
let parenthesized = match &expect_group {
|
||||
Some(TokenTree::Group(group))
|
||||
if group.delimiter() == Delimiter::Parenthesis =>
|
||||
{
|
||||
group
|
||||
}
|
||||
Some(wrong) => return Err(Error::new(wrong.span(), "expected `(`")),
|
||||
None => {
|
||||
return Err(Error::new2(
|
||||
ident.span(),
|
||||
bang.span(),
|
||||
"expected `(` after `env!`",
|
||||
));
|
||||
}
|
||||
};
|
||||
let mut inner = parenthesized.stream().into_iter();
|
||||
let lit = match inner.next() {
|
||||
Some(TokenTree::Literal(lit)) => lit,
|
||||
Some(wrong) => {
|
||||
return Err(Error::new(wrong.span(), "expected string literal"))
|
||||
}
|
||||
None => {
|
||||
return Err(Error::new2(
|
||||
ident.span(),
|
||||
parenthesized.span(),
|
||||
"expected string literal as argument to env! macro",
|
||||
))
|
||||
}
|
||||
};
|
||||
let lit_string = lit.to_string();
|
||||
if lit_string.starts_with('"')
|
||||
&& lit_string.ends_with('"')
|
||||
&& lit_string.len() >= 2
|
||||
{
|
||||
// TODO: maybe handle escape sequences in the string if
|
||||
// someone has a use case.
|
||||
segments.push(Segment::Env(LitStr {
|
||||
value: lit_string[1..lit_string.len() - 1].to_owned(),
|
||||
span: lit.span(),
|
||||
}));
|
||||
} else {
|
||||
return Err(Error::new(lit.span(), "expected string literal"));
|
||||
}
|
||||
if let Some(unexpected) = inner.next() {
|
||||
return Err(Error::new(
|
||||
unexpected.span(),
|
||||
"unexpected token in env! macro",
|
||||
));
|
||||
}
|
||||
} else {
|
||||
segments.push(Segment::String(LitStr {
|
||||
value: fragment,
|
||||
span: ident.span(),
|
||||
}));
|
||||
}
|
||||
}
|
||||
TokenTree::Literal(lit) => {
|
||||
segments.push(Segment::String(LitStr {
|
||||
value: lit.to_string(),
|
||||
span: lit.span(),
|
||||
}));
|
||||
}
|
||||
TokenTree::Punct(punct) => match punct.as_char() {
|
||||
'_' => segments.push(Segment::String(LitStr {
|
||||
value: "_".to_owned(),
|
||||
span: punct.span(),
|
||||
})),
|
||||
'\'' => segments.push(Segment::Apostrophe(punct.span())),
|
||||
':' => {
|
||||
let colon_span = punct.span();
|
||||
let colon = Colon { span: colon_span };
|
||||
let ident = match tokens.next() {
|
||||
Some(TokenTree::Ident(ident)) => ident,
|
||||
wrong => {
|
||||
let span = wrong.as_ref().map_or(colon_span, TokenTree::span);
|
||||
return Err(Error::new(span, "expected identifier after `:`"));
|
||||
}
|
||||
};
|
||||
segments.push(Segment::Modifier(colon, ident));
|
||||
}
|
||||
_ => return Err(Error::new(punct.span(), "unexpected punct")),
|
||||
},
|
||||
TokenTree::Group(group) => {
|
||||
if group.delimiter() == Delimiter::None {
|
||||
let mut inner = group.stream().into_iter().peekable();
|
||||
let nested = parse(&mut inner)?;
|
||||
if let Some(unexpected) = inner.next() {
|
||||
return Err(Error::new(unexpected.span(), "unexpected token"));
|
||||
}
|
||||
segments.extend(nested);
|
||||
} else {
|
||||
return Err(Error::new(group.span(), "unexpected token"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(segments)
|
||||
}
|
||||
|
||||
pub(crate) fn paste(segments: &[Segment]) -> Result<String> {
|
||||
let mut evaluated = Vec::new();
|
||||
let mut is_lifetime = false;
|
||||
|
||||
for segment in segments {
|
||||
match segment {
|
||||
Segment::String(segment) => {
|
||||
evaluated.push(segment.value.clone());
|
||||
}
|
||||
Segment::Apostrophe(span) => {
|
||||
if is_lifetime {
|
||||
return Err(Error::new(*span, "unexpected lifetime"));
|
||||
}
|
||||
is_lifetime = true;
|
||||
}
|
||||
Segment::Env(var) => {
|
||||
let resolved = match std::env::var(&var.value) {
|
||||
Ok(resolved) => resolved,
|
||||
Err(_) => {
|
||||
return Err(Error::new(
|
||||
var.span,
|
||||
&format!("no such env var: {:?}", var.value),
|
||||
));
|
||||
}
|
||||
};
|
||||
let resolved = resolved.replace('-', "_");
|
||||
evaluated.push(resolved);
|
||||
}
|
||||
Segment::Modifier(colon, ident) => {
|
||||
let last = match evaluated.pop() {
|
||||
Some(last) => last,
|
||||
None => {
|
||||
return Err(Error::new2(colon.span, ident.span(), "unexpected modifier"))
|
||||
}
|
||||
};
|
||||
match ident.to_string().as_str() {
|
||||
"lower" => {
|
||||
evaluated.push(last.to_lowercase());
|
||||
}
|
||||
"upper" => {
|
||||
evaluated.push(last.to_uppercase());
|
||||
}
|
||||
"snake" => {
|
||||
let mut acc = String::new();
|
||||
let mut prev = '_';
|
||||
for ch in last.chars() {
|
||||
if ch.is_uppercase() && prev != '_' {
|
||||
acc.push('_');
|
||||
}
|
||||
acc.push(ch);
|
||||
prev = ch;
|
||||
}
|
||||
evaluated.push(acc.to_lowercase());
|
||||
}
|
||||
"camel" => {
|
||||
let mut acc = String::new();
|
||||
let mut prev = '_';
|
||||
for ch in last.chars() {
|
||||
if ch != '_' {
|
||||
if prev == '_' {
|
||||
for chu in ch.to_uppercase() {
|
||||
acc.push(chu);
|
||||
}
|
||||
} else if prev.is_uppercase() {
|
||||
for chl in ch.to_lowercase() {
|
||||
acc.push(chl);
|
||||
}
|
||||
} else {
|
||||
acc.push(ch);
|
||||
}
|
||||
}
|
||||
prev = ch;
|
||||
}
|
||||
evaluated.push(acc);
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::new2(
|
||||
colon.span,
|
||||
ident.span(),
|
||||
"unsupported modifier",
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut pasted = evaluated.into_iter().collect::<String>();
|
||||
if is_lifetime {
|
||||
pasted.insert(0, '\'');
|
||||
}
|
||||
Ok(pasted)
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
use paste::paste;
|
||||
use paste_test_suite::paste_test;
|
||||
|
||||
#[test]
|
||||
fn test_attr() {
|
||||
paste! {
|
||||
#[paste_test(k = "val" "ue")]
|
||||
struct A;
|
||||
|
||||
#[paste_test_suite::paste_test(k = "val" "ue")]
|
||||
struct B;
|
||||
|
||||
#[::paste_test_suite::paste_test(k = "val" "ue")]
|
||||
struct C;
|
||||
|
||||
#[paste_test(k = "va" [<l u>] e)]
|
||||
struct D;
|
||||
}
|
||||
|
||||
let _ = A;
|
||||
let _ = B;
|
||||
let _ = C;
|
||||
let _ = D;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_paste_cfg() {
|
||||
macro_rules! m {
|
||||
($ret:ident, $width:expr) => {
|
||||
paste! {
|
||||
#[cfg(any(feature = "protocol_feature_" $ret:snake, target_pointer_width = "" $width))]
|
||||
fn new() -> $ret { todo!() }
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
struct Paste;
|
||||
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
m!(Paste, 64);
|
||||
#[cfg(target_pointer_width = "32")]
|
||||
m!(Paste, 32);
|
||||
|
||||
let _ = new;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_path_in_attr() {
|
||||
macro_rules! m {
|
||||
(#[x = $x:ty]) => {
|
||||
stringify!($x)
|
||||
};
|
||||
}
|
||||
|
||||
let ty = paste! {
|
||||
m!(#[x = foo::Bar])
|
||||
};
|
||||
|
||||
assert_eq!("foo::Bar", ty);
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
use paste::paste;
|
||||
|
||||
#[test]
|
||||
fn test_paste_doc() {
|
||||
macro_rules! m {
|
||||
($ret:ident) => {
|
||||
paste! {
|
||||
#[doc = "Create a new [`" $ret "`] object."]
|
||||
fn new() -> $ret { todo!() }
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
struct Paste;
|
||||
m!(Paste);
|
||||
|
||||
let _ = new;
|
||||
}
|
||||
|
||||
macro_rules! get_doc {
|
||||
(#[doc = $literal:tt]) => {
|
||||
$literal
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_escaping() {
|
||||
let doc = paste! {
|
||||
get_doc!(#[doc = "s\"" r#"r#""#])
|
||||
};
|
||||
|
||||
let expected = "s\"r#\"";
|
||||
assert_eq!(doc, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_literals() {
|
||||
let doc = paste! {
|
||||
get_doc!(#[doc = "int=" 0x1 " bool=" true " float=" 0.01])
|
||||
};
|
||||
|
||||
let expected = "int=0x1 bool=true float=0.01";
|
||||
assert_eq!(doc, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_case() {
|
||||
let doc = paste! {
|
||||
get_doc!(#[doc = "HTTP " get:upper "!"])
|
||||
};
|
||||
|
||||
let expected = "HTTP GET!";
|
||||
assert_eq!(doc, expected);
|
||||
}
|
||||
|
||||
// https://github.com/dtolnay/paste/issues/63
|
||||
#[test]
|
||||
fn test_stringify() {
|
||||
macro_rules! create {
|
||||
($doc:expr) => {
|
||||
paste! {
|
||||
#[doc = $doc]
|
||||
pub struct Struct;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! forward {
|
||||
($name:ident) => {
|
||||
create!(stringify!($name));
|
||||
};
|
||||
}
|
||||
|
||||
forward!(documentation);
|
||||
|
||||
let _ = Struct;
|
||||
}
|
|
@ -0,0 +1,249 @@
|
|||
use paste::paste;
|
||||
|
||||
#[test]
|
||||
fn test_shared_hygiene() {
|
||||
paste! {
|
||||
let [<a a>] = 1;
|
||||
assert_eq!([<a a>], 1);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_repeat() {
|
||||
const ROCKET_A: &str = "/a";
|
||||
const ROCKET_B: &str = "/b";
|
||||
|
||||
macro_rules! routes {
|
||||
($($route:ident),*) => {{
|
||||
paste! {
|
||||
vec![$( [<ROCKET_ $route>] ),*]
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
let routes = routes!(A, B);
|
||||
assert_eq!(routes, vec!["/a", "/b"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_literals() {
|
||||
const CONST0: &str = "const0";
|
||||
|
||||
let pasted = paste!([<CONST 0>]);
|
||||
assert_eq!(pasted, CONST0);
|
||||
|
||||
let pasted = paste!([<CONST '0'>]);
|
||||
assert_eq!(pasted, CONST0);
|
||||
|
||||
let pasted = paste!([<CONST "0">]);
|
||||
assert_eq!(pasted, CONST0);
|
||||
|
||||
let pasted = paste!([<CONST r"0">]);
|
||||
assert_eq!(pasted, CONST0);
|
||||
|
||||
let pasted = paste!([<CONST '\u{30}'>]);
|
||||
assert_eq!(pasted, CONST0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_underscore() {
|
||||
paste! {
|
||||
const A_B: usize = 0;
|
||||
assert_eq!([<A _ B>], 0);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lifetime() {
|
||||
paste! {
|
||||
#[allow(dead_code)]
|
||||
struct S<[<'d e>]> {
|
||||
q: &[<'d e>] str,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_keyword() {
|
||||
paste! {
|
||||
struct [<F move>];
|
||||
|
||||
let _ = Fmove;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_literal_str() {
|
||||
paste! {
|
||||
#[allow(non_camel_case_types)]
|
||||
struct [<Foo "Bar-Baz">];
|
||||
|
||||
let _ = FooBar_Baz;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_env_literal() {
|
||||
paste! {
|
||||
struct [<Lib env bar>];
|
||||
|
||||
let _ = Libenvbar;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_env_present() {
|
||||
paste! {
|
||||
struct [<Lib env!("CARGO_PKG_NAME")>];
|
||||
|
||||
let _ = Libpaste;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_raw_identifier() {
|
||||
paste! {
|
||||
struct [<F r#move>];
|
||||
|
||||
let _ = Fmove;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_false_start() {
|
||||
trait Trait {
|
||||
fn f() -> usize;
|
||||
}
|
||||
|
||||
struct S;
|
||||
|
||||
impl Trait for S {
|
||||
fn f() -> usize {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
paste! {
|
||||
let x = [<S as Trait>::f()];
|
||||
assert_eq!(x[0], 0);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_local_variable() {
|
||||
let yy = 0;
|
||||
|
||||
paste! {
|
||||
assert_eq!([<y y>], 0);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty() {
|
||||
paste! {
|
||||
assert_eq!(stringify!([<y y>]), "yy");
|
||||
assert_eq!(stringify!([<>]).replace(' ', ""), "[<>]");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_env_to_lower() {
|
||||
paste! {
|
||||
struct [<Lib env!("CARGO_PKG_NAME"):lower>];
|
||||
|
||||
let _ = Libpaste;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_env_to_upper() {
|
||||
paste! {
|
||||
const [<LIB env!("CARGO_PKG_NAME"):upper>]: &str = "libpaste";
|
||||
|
||||
let _ = LIBPASTE;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_env_to_snake() {
|
||||
paste! {
|
||||
const [<LIB env!("CARGO_PKG_NAME"):snake:upper>]: &str = "libpaste";
|
||||
|
||||
let _ = LIBPASTE;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_env_to_camel() {
|
||||
paste! {
|
||||
#[allow(non_upper_case_globals)]
|
||||
const [<LIB env!("CARGO_PKG_NAME"):camel>]: &str = "libpaste";
|
||||
|
||||
let _ = LIBPaste;
|
||||
}
|
||||
}
|
||||
|
||||
mod test_x86_feature_literal {
|
||||
// work around https://github.com/rust-lang/rust/issues/72726
|
||||
|
||||
use paste::paste;
|
||||
|
||||
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
|
||||
macro_rules! my_is_x86_feature_detected {
|
||||
($feat:literal) => {
|
||||
paste! {
|
||||
#[test]
|
||||
fn test() {
|
||||
let _ = is_x86_feature_detected!($feat);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
|
||||
macro_rules! my_is_x86_feature_detected {
|
||||
($feat:literal) => {
|
||||
#[ignore]
|
||||
#[test]
|
||||
fn test() {}
|
||||
};
|
||||
}
|
||||
|
||||
my_is_x86_feature_detected!("mmx");
|
||||
}
|
||||
|
||||
#[rustversion::since(1.46)]
|
||||
mod test_local_setter {
|
||||
// https://github.com/dtolnay/paste/issues/7
|
||||
|
||||
use paste::paste;
|
||||
|
||||
#[derive(Default)]
|
||||
struct Test {
|
||||
val: i32,
|
||||
}
|
||||
|
||||
impl Test {
|
||||
fn set_val(&mut self, arg: i32) {
|
||||
self.val = arg;
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! setter {
|
||||
($obj:expr, $field:ident, $value:expr) => {
|
||||
paste! { $obj.[<set_ $field>]($value); }
|
||||
};
|
||||
|
||||
($field:ident, $value:expr) => {{
|
||||
let mut new = Test::default();
|
||||
setter!(new, val, $value);
|
||||
new
|
||||
}};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_local_setter() {
|
||||
let a = setter!(val, 42);
|
||||
assert_eq!(a.val, 42);
|
||||
}
|
||||
}
|
|
@ -1,7 +1,9 @@
|
|||
mod test_basic {
|
||||
use paste::paste;
|
||||
|
||||
struct Struct;
|
||||
|
||||
paste::item! {
|
||||
paste! {
|
||||
impl Struct {
|
||||
fn [<a b c>]() {}
|
||||
}
|
||||
|
@ -14,10 +16,12 @@ mod test_basic {
|
|||
}
|
||||
|
||||
mod test_in_impl {
|
||||
use paste::paste;
|
||||
|
||||
struct Struct;
|
||||
|
||||
impl Struct {
|
||||
paste::item! {
|
||||
paste! {
|
||||
fn [<a b c>]() {}
|
||||
}
|
||||
}
|
||||
|
@ -28,143 +32,12 @@ mod test_in_impl {
|
|||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_shared_hygiene() {
|
||||
paste::expr! {
|
||||
let [<a a>] = 1;
|
||||
assert_eq!([<a a>], 1);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_repeat() {
|
||||
const ROCKET_A: &'static str = "/a";
|
||||
const ROCKET_B: &'static str = "/b";
|
||||
|
||||
macro_rules! routes {
|
||||
($($route:ident),*) => {{
|
||||
paste::expr! {
|
||||
vec![$( [<ROCKET_ $route>] ),*]
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
let routes = routes!(A, B);
|
||||
assert_eq!(routes, vec!["/a", "/b"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_integer() {
|
||||
const CONST0: &'static str = "const0";
|
||||
|
||||
let pasted = paste::expr!([<CONST 0>]);
|
||||
assert_eq!(pasted, CONST0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_underscore() {
|
||||
paste::expr! {
|
||||
const A_B: usize = 0;
|
||||
assert_eq!([<A _ B>], 0);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_lifetime() {
|
||||
paste::expr! {
|
||||
#[allow(dead_code)]
|
||||
struct S<[<'d e>]> {
|
||||
q: &[<'d e>] str,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_keyword() {
|
||||
paste::expr! {
|
||||
struct [<F move>];
|
||||
|
||||
let _ = Fmove;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_literal_str() {
|
||||
paste::expr! {
|
||||
struct [<Foo "Bar-Baz">];
|
||||
|
||||
let _ = FooBar_Baz;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_env_literal() {
|
||||
paste::expr! {
|
||||
struct [<Lib env bar>];
|
||||
|
||||
let _ = Libenvbar;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_env_present() {
|
||||
paste::expr! {
|
||||
struct [<Lib env!("CARGO_PKG_NAME")>];
|
||||
|
||||
let _ = Libpaste;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_raw_identifier() {
|
||||
paste::expr! {
|
||||
struct [<F r#move>];
|
||||
|
||||
let _ = Fmove;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_false_start() {
|
||||
trait Trait {
|
||||
fn f() -> usize;
|
||||
}
|
||||
|
||||
struct S;
|
||||
|
||||
impl Trait for S {
|
||||
fn f() -> usize {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
paste::expr! {
|
||||
let x = [<S as Trait>::f()];
|
||||
assert_eq!(x[0], 0);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_local_variable() {
|
||||
let yy = 0;
|
||||
|
||||
paste::expr! {
|
||||
assert_eq!([<y y>], 0);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty() {
|
||||
paste::expr! {
|
||||
assert_eq!(stringify!([<y y>]), "yy");
|
||||
assert_eq!(stringify!([<>]).replace(' ', ""), "[<>]");
|
||||
}
|
||||
}
|
||||
|
||||
mod test_none_delimited_single_ident {
|
||||
use paste::paste;
|
||||
|
||||
macro_rules! m {
|
||||
($id:ident) => {
|
||||
paste::item! {
|
||||
paste! {
|
||||
fn f() -> &'static str {
|
||||
stringify!($id)
|
||||
}
|
||||
|
@ -181,9 +54,11 @@ mod test_none_delimited_single_ident {
|
|||
}
|
||||
|
||||
mod test_none_delimited_single_lifetime {
|
||||
use paste::paste;
|
||||
|
||||
macro_rules! m {
|
||||
($life:lifetime) => {
|
||||
paste::item! {
|
||||
paste! {
|
||||
pub struct S;
|
||||
impl<$life> S {
|
||||
fn f() {}
|
||||
|
@ -201,9 +76,11 @@ mod test_none_delimited_single_lifetime {
|
|||
}
|
||||
|
||||
mod test_to_lower {
|
||||
use paste::paste;
|
||||
|
||||
macro_rules! m {
|
||||
($id:ident) => {
|
||||
paste::item! {
|
||||
paste! {
|
||||
fn [<my_ $id:lower _here>](_arg: u8) -> &'static str {
|
||||
stringify!([<$id:lower>])
|
||||
}
|
||||
|
@ -219,19 +96,12 @@ mod test_to_lower {
|
|||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_env_to_lower() {
|
||||
paste::expr! {
|
||||
struct [<Lib env!("CARGO_PKG_NAME"):lower>];
|
||||
|
||||
let _ = Libpaste;
|
||||
}
|
||||
}
|
||||
|
||||
mod test_to_upper {
|
||||
use paste::paste;
|
||||
|
||||
macro_rules! m {
|
||||
($id:ident) => {
|
||||
paste::item! {
|
||||
paste! {
|
||||
const [<MY_ $id:upper _HERE>]: &str = stringify!([<$id:upper>]);
|
||||
}
|
||||
};
|
||||
|
@ -245,19 +115,12 @@ mod test_to_upper {
|
|||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_env_to_upper() {
|
||||
paste::expr! {
|
||||
const [<LIB env!("CARGO_PKG_NAME"):upper>]: &str = "libpaste";
|
||||
|
||||
let _ = LIBPASTE;
|
||||
}
|
||||
}
|
||||
|
||||
mod test_to_snake {
|
||||
use paste::paste;
|
||||
|
||||
macro_rules! m {
|
||||
($id:ident) => {
|
||||
paste::item! {
|
||||
paste! {
|
||||
const DEFAULT_SNAKE: &str = stringify!([<$id:snake>]);
|
||||
const LOWER_SNAKE: &str = stringify!([<$id:snake:lower>]);
|
||||
const UPPER_SNAKE: &str = stringify!([<$id:snake:upper>]);
|
||||
|
@ -275,19 +138,12 @@ mod test_to_snake {
|
|||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_env_to_snake() {
|
||||
paste::expr! {
|
||||
const [<LIB env!("CARGO_PKG_NAME"):snake:upper>]: &str = "libpaste";
|
||||
|
||||
let _ = LIBPASTE;
|
||||
}
|
||||
}
|
||||
|
||||
mod test_to_camel {
|
||||
use paste::paste;
|
||||
|
||||
macro_rules! m {
|
||||
($id:ident) => {
|
||||
paste::item! {
|
||||
paste! {
|
||||
const DEFAULT_CAMEL: &str = stringify!([<$id:camel>]);
|
||||
const LOWER_CAMEL: &str = stringify!([<$id:camel:lower>]);
|
||||
const UPPER_CAMEL: &str = stringify!([<$id:camel:upper>]);
|
||||
|
@ -305,21 +161,14 @@ mod test_to_camel {
|
|||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_env_to_camel() {
|
||||
paste::expr! {
|
||||
const [<LIB env!("CARGO_PKG_NAME"):camel>]: &str = "libpaste";
|
||||
|
||||
let _ = LIBPaste;
|
||||
}
|
||||
}
|
||||
|
||||
mod test_doc_expr {
|
||||
// https://github.com/dtolnay/paste/issues/29
|
||||
|
||||
use paste::paste;
|
||||
|
||||
macro_rules! doc_expr {
|
||||
($doc:expr) => {
|
||||
paste::item! {
|
||||
paste! {
|
||||
#[doc = $doc]
|
||||
pub struct S;
|
||||
}
|
||||
|
@ -337,6 +186,8 @@ mod test_doc_expr {
|
|||
mod test_type_in_path {
|
||||
// https://github.com/dtolnay/paste/issues/31
|
||||
|
||||
use paste::paste;
|
||||
|
||||
mod keys {
|
||||
#[derive(Default)]
|
||||
pub struct Mib<T = ()>(std::marker::PhantomData<T>);
|
||||
|
@ -344,7 +195,7 @@ mod test_type_in_path {
|
|||
|
||||
macro_rules! types {
|
||||
($mib:ty) => {
|
||||
paste::item! {
|
||||
paste! {
|
||||
#[derive(Default)]
|
||||
pub struct S(pub keys::$mib);
|
||||
}
|
||||
|
@ -353,7 +204,7 @@ mod test_type_in_path {
|
|||
|
||||
macro_rules! write {
|
||||
($fn:ident, $field:ty) => {
|
||||
paste::item! {
|
||||
paste! {
|
||||
pub fn $fn() -> $field {
|
||||
$field::default()
|
||||
}
|
||||
|
@ -376,11 +227,13 @@ mod test_type_in_path {
|
|||
mod test_type_in_fn_arg {
|
||||
// https://github.com/dtolnay/paste/issues/38
|
||||
|
||||
use paste::paste;
|
||||
|
||||
fn _jit_address(_node: ()) {}
|
||||
|
||||
macro_rules! jit_reexport {
|
||||
($fn:ident, $arg:ident : $typ:ty) => {
|
||||
paste::item! {
|
||||
paste! {
|
||||
pub fn $fn($arg: $typ) {
|
||||
[<_jit_ $fn>]($arg);
|
||||
}
|
||||
|
@ -399,9 +252,11 @@ mod test_type_in_fn_arg {
|
|||
mod test_pat_in_expr_position {
|
||||
// https://github.com/xiph/rav1e/pull/2324/files
|
||||
|
||||
use paste::paste;
|
||||
|
||||
macro_rules! rav1e_bad {
|
||||
($e:pat) => {
|
||||
paste::item! {
|
||||
paste! {
|
||||
#[test]
|
||||
fn test() {
|
||||
let _ = $e;
|
||||
|
@ -412,31 +267,3 @@ mod test_pat_in_expr_position {
|
|||
|
||||
rav1e_bad!(std::fmt::Error);
|
||||
}
|
||||
|
||||
#[cfg(not(no_literal_matcher))]
|
||||
mod test_x86_feature_literal {
|
||||
// work around https://github.com/rust-lang/rust/issues/72726
|
||||
|
||||
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
|
||||
macro_rules! my_is_x86_feature_detected {
|
||||
($feat:literal) => {
|
||||
paste::item! {
|
||||
#[test]
|
||||
fn test() {
|
||||
let _ = is_x86_feature_detected!($feat);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
|
||||
macro_rules! my_is_x86_feature_detected {
|
||||
($feat:literal) => {
|
||||
#[ignore]
|
||||
#[test]
|
||||
fn test() {}
|
||||
};
|
||||
}
|
||||
|
||||
my_is_x86_feature_detected!("mmx");
|
||||
}
|
|
@ -1,8 +1,10 @@
|
|||
#![deny(warnings)]
|
||||
|
||||
use paste::paste;
|
||||
|
||||
macro_rules! m {
|
||||
($i:ident) => {
|
||||
paste::item! {
|
||||
paste! {
|
||||
pub fn [<foo $i>]() {}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
error: function `fooBar` should have a snake case name
|
||||
--> $DIR/case-warning.rs:6:20
|
||||
--> tests/ui/case-warning.rs:8:20
|
||||
|
|
||||
6 | pub fn [<foo $i>]() {}
|
||||
8 | pub fn [<foo $i>]() {}
|
||||
| ^^^^^^^^^^ help: convert the identifier to snake case: `foo_bar`
|
||||
...
|
||||
11 | m!(Bar);
|
||||
| -------- in this macro invocation
|
||||
13 | m!(Bar);
|
||||
| ------- in this macro invocation
|
||||
|
|
||||
note: the lint level is defined here
|
||||
--> $DIR/case-warning.rs:1:9
|
||||
--> tests/ui/case-warning.rs:1:9
|
||||
|
|
||||
1 | #![deny(warnings)]
|
||||
| ^^^^^^^^
|
||||
= note: `#[deny(non_snake_case)]` implied by `#[deny(warnings)]`
|
||||
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
= note: this error originates in the macro `m` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
paste::item! {
|
||||
use paste::paste;
|
||||
|
||||
paste! {
|
||||
fn [<env!()>]() {}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error: expected string literal as argument to env! macro
|
||||
--> $DIR/env-empty.rs:2:10
|
||||
--> tests/ui/env-empty.rs:4:10
|
||||
|
|
||||
2 | fn [<env!()>]() {}
|
||||
4 | fn [<env!()>]() {}
|
||||
| ^^^^^^
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
paste::item! {
|
||||
use paste::paste;
|
||||
|
||||
paste! {
|
||||
fn [<env!(1.31)>]() {}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error: expected string literal
|
||||
--> $DIR/env-non-string.rs:2:15
|
||||
--> tests/ui/env-non-string.rs:4:15
|
||||
|
|
||||
2 | fn [<env!(1.31)>]() {}
|
||||
4 | fn [<env!(1.31)>]() {}
|
||||
| ^^^^
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
paste::item! {
|
||||
use paste::paste;
|
||||
|
||||
paste! {
|
||||
fn [<env!("VAR"suffix)>]() {}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error: expected string literal
|
||||
--> $DIR/env-suffix.rs:2:15
|
||||
--> tests/ui/env-suffix.rs:4:15
|
||||
|
|
||||
2 | fn [<env!("VAR"suffix)>]() {}
|
||||
4 | fn [<env!("VAR"suffix)>]() {}
|
||||
| ^^^^^^^^^^^
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
paste::item! {
|
||||
use paste::paste;
|
||||
|
||||
paste! {
|
||||
fn [<env!("VAR" "VAR")>]() {}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error: unexpected token in env! macro
|
||||
--> $DIR/env-unexpected.rs:2:21
|
||||
--> tests/ui/env-unexpected.rs:4:21
|
||||
|
|
||||
2 | fn [<env!("VAR" "VAR")>]() {}
|
||||
4 | fn [<env!("VAR" "VAR")>]() {}
|
||||
| ^^^^^
|
||||
|
|
|
@ -1,5 +1,15 @@
|
|||
paste::item! {
|
||||
use paste::paste;
|
||||
|
||||
paste! {
|
||||
fn [<0 f>]() {}
|
||||
}
|
||||
|
||||
paste! {
|
||||
fn [<f '"'>]() {}
|
||||
}
|
||||
|
||||
paste! {
|
||||
fn [<f "'">]() {}
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
|
|
@ -1,5 +1,17 @@
|
|||
error: `"0f"` is not a valid identifier
|
||||
--> $DIR/invalid-ident.rs:2:8
|
||||
--> tests/ui/invalid-ident.rs:4:8
|
||||
|
|
||||
2 | fn [<0 f>]() {}
|
||||
4 | fn [<0 f>]() {}
|
||||
| ^^^^^^^
|
||||
|
||||
error: `"f\""` is not a valid identifier
|
||||
--> tests/ui/invalid-ident.rs:8:8
|
||||
|
|
||||
8 | fn [<f '"'>]() {}
|
||||
| ^^^^^^^^^
|
||||
|
||||
error: `"f'"` is not a valid identifier
|
||||
--> tests/ui/invalid-ident.rs:12:8
|
||||
|
|
||||
12 | fn [<f "'">]() {}
|
||||
| ^^^^^^^^^
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
paste::item! {
|
||||
use paste::paste;
|
||||
|
||||
paste! {
|
||||
fn [<env! huh>]() {}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error: expected `(`
|
||||
--> $DIR/missing-paren-on-env.rs:2:15
|
||||
--> tests/ui/missing-paren-on-env.rs:4:15
|
||||
|
|
||||
2 | fn [<env! huh>]() {}
|
||||
4 | fn [<env! huh>]() {}
|
||||
| ^^^
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
paste::item! {
|
||||
use paste::paste;
|
||||
|
||||
paste! {
|
||||
fn [<a env!("PASTE_UNKNOWN") b>]() {}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error: no such env var: "PASTE_UNKNOWN"
|
||||
--> $DIR/no-env-var.rs:2:17
|
||||
--> tests/ui/no-env-var.rs:4:17
|
||||
|
|
||||
2 | fn [<a env!("PASTE_UNKNOWN") b>]() {}
|
||||
4 | fn [<a env!("PASTE_UNKNOWN") b>]() {}
|
||||
| ^^^^^^^^^^^^^^^
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
paste::item! {
|
||||
use paste::paste;
|
||||
|
||||
paste! {
|
||||
fn [<name:0>]() {}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error: expected identifier after `:`
|
||||
--> $DIR/no-ident-after-colon.rs:2:15
|
||||
--> tests/ui/no-ident-after-colon.rs:4:15
|
||||
|
|
||||
2 | fn [<name:0>]() {}
|
||||
4 | fn [<name:0>]() {}
|
||||
| ^
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
paste::item! {
|
||||
use paste::paste;
|
||||
|
||||
paste! {
|
||||
fn [<a {} b>]() {}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error: unexpected token
|
||||
--> $DIR/unexpected-group.rs:2:12
|
||||
--> tests/ui/unexpected-group.rs:4:12
|
||||
|
|
||||
2 | fn [<a {} b>]() {}
|
||||
4 | fn [<a {} b>]() {}
|
||||
| ^^
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
paste::item! {
|
||||
use paste::paste;
|
||||
|
||||
paste! {
|
||||
fn [<:lower x>]() {}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error: unexpected modifier
|
||||
--> $DIR/unexpected-modifier.rs:2:10
|
||||
--> tests/ui/unexpected-modifier.rs:4:10
|
||||
|
|
||||
2 | fn [<:lower x>]() {}
|
||||
4 | fn [<:lower x>]() {}
|
||||
| ^^^^^^
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
paste::item! {
|
||||
use paste::paste;
|
||||
|
||||
paste! {
|
||||
fn [<a + b>]() {}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error: unexpected punct
|
||||
--> $DIR/unexpected-punct.rs:2:12
|
||||
--> tests/ui/unexpected-punct.rs:4:12
|
||||
|
|
||||
2 | fn [<a + b>]() {}
|
||||
4 | fn [<a + b>]() {}
|
||||
| ^
|
||||
|
|
|
@ -1,5 +1,21 @@
|
|||
paste::item! {
|
||||
fn [<1e+100>]() {}
|
||||
use paste::paste;
|
||||
|
||||
paste! {
|
||||
fn [<x 1e+100 z>]() {}
|
||||
}
|
||||
|
||||
paste! {
|
||||
// `xyz` is not correct. `xbyz` is certainly not correct. Maybe `x121z`
|
||||
// would be justifiable but for now don't accept this.
|
||||
fn [<x b'y' z>]() {}
|
||||
}
|
||||
|
||||
paste! {
|
||||
fn [<x b"y" z>]() {}
|
||||
}
|
||||
|
||||
paste! {
|
||||
fn [<x br"y" z>]() {}
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
|
|
@ -1,5 +1,23 @@
|
|||
error: unsupported literal
|
||||
--> $DIR/unsupported-literal.rs:2:10
|
||||
--> tests/ui/unsupported-literal.rs:4:12
|
||||
|
|
||||
2 | fn [<1e+100>]() {}
|
||||
| ^^^^^^
|
||||
4 | fn [<x 1e+100 z>]() {}
|
||||
| ^^^^^^
|
||||
|
||||
error: unsupported literal
|
||||
--> tests/ui/unsupported-literal.rs:10:12
|
||||
|
|
||||
10 | fn [<x b'y' z>]() {}
|
||||
| ^^^^
|
||||
|
||||
error: unsupported literal
|
||||
--> tests/ui/unsupported-literal.rs:14:12
|
||||
|
|
||||
14 | fn [<x b"y" z>]() {}
|
||||
| ^^^^
|
||||
|
||||
error: unsupported literal
|
||||
--> tests/ui/unsupported-literal.rs:18:12
|
||||
|
|
||||
18 | fn [<x br"y" z>]() {}
|
||||
| ^^^^^
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
paste::item! {
|
||||
use paste::paste;
|
||||
|
||||
paste! {
|
||||
fn [<a:pillow>]() {}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
error: unsupported modifier
|
||||
--> $DIR/unsupported-modifier.rs:2:11
|
||||
--> tests/ui/unsupported-modifier.rs:4:11
|
||||
|
|
||||
2 | fn [<a:pillow>]() {}
|
||||
4 | fn [<a:pillow>]() {}
|
||||
| ^^^^^^^
|
||||
|
|
Загрузка…
Ссылка в новой задаче