Bug 1663715 - Update syn and proc-macro2 so that Firefox can build on Rust nightly again. r=froydnj

Generated with:

  cargo update -p syn --precise 1.0.40
  ./mach vendor rust

Rust issue: https://github.com/rust-lang/rust/issues/76482

Differential Revision: https://phabricator.services.mozilla.com/D89473
This commit is contained in:
Emilio Cobos Álvarez 2020-09-08 17:00:48 +00:00
Родитель b51ef49138
Коммит 188dcede21
99 изменённых файлов: 17100 добавлений и 5789 удалений

8
Cargo.lock сгенерированный
Просмотреть файл

@ -3821,9 +3821,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.5"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"
checksum = "175c513d55719db99da20232b06cda8bab6b83ec2d04e3283edf0213c37c1a29"
dependencies = [
"unicode-xid",
]
@ -4817,9 +4817,9 @@ dependencies = [
[[package]]
name = "syn"
version = "1.0.5"
version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"
checksum = "963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"
dependencies = [
"proc-macro2",
"quote",

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"e2c1fc6ed317eeef8462fcd192f6b6389e1d84f0d7afeac78f12c23903deddf8","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"362a2156f7645528061b6e8487a2eb0f32f1693012ed82ee57afa05c039bba0d","build.rs":"0cc6e2cb919ddbff59cf1d810283939f97a59f0037540c0f2ee3453237635ff8","src/fallback.rs":"5c6379a90735e27abcc40253b223158c6b1e5784f3850bc423335363e87ef038","src/lib.rs":"ae5251296ad3fcd8b600919a993fec0afd8b56da3e11fef6bc7265b273129936","src/strnom.rs":"37f7791f73f123817ad5403af1d4e2a0714be27401729a2d451bc80b1f26bac9","src/wrapper.rs":"81372e910604217a625aa71c47d43e65f4e008456eae93ac39325c9abf10701a","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"8c427be9cba1fa8d4a16647e53e3545e5863e29e2c0b311c93c9dd1399abf6a1"},"package":"90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"}
{"files":{"Cargo.toml":"c20c4c52342e65ea11ad8382edc636e628e8f8c5ab7cffddc32426b2fe8fe4cd","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"e1f9d4fc22cff2c049f166a403b41458632a94357890d31cf0e3ad83807fb430","build.rs":"332185d7ad4c859210f5edd7a76bc95146c8277726a2f81417f34927c4424d68","src/detection.rs":"9d25d896889e65330858f2d6f6223c1b98cd1dad189813ad4161ff189fbda2b8","src/fallback.rs":"239f9a25c0f2ab57592288d944c7f1a0f887536b6d4dc2428a17640af8d10a41","src/lib.rs":"2b1d98424c9b23b547dabf85554120e5e65472026a0f3f711b3a097bca7c32fe","src/parse.rs":"500edee9773132e27e44d0fdaa042b1cb9451e29e65124493986f51710c0664c","src/wrapper.rs":"d36c0dced7ec0e7585c1f935cda836080bcae6de1de3d7851d962e9e11a3ac48","tests/comments.rs":"ea6cbe6f4c8852e6a0612893c7d4f2c144a2e6a134a6c3db641a320cbfc3c800","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"310c856e27ff61c9ec7f0a5cd96031aac02971557b1621f5e17b089d58e79bcd","tests/test_fmt.rs":"745dfdc41d09c5308c221395eb43f2041f0a1413d2927a813bc2ad4554438fe2"},"package":"175c513d55719db99da20232b06cda8bab6b83ec2d04e3283edf0213c37c1a29"}

15
third_party/rust/proc-macro2/Cargo.toml поставляемый
Просмотреть файл

@ -13,21 +13,22 @@
[package]
edition = "2018"
name = "proc-macro2"
version = "1.0.5"
authors = ["Alex Crichton <alex@alexcrichton.com>"]
description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
homepage = "https://github.com/alexcrichton/proc-macro2"
version = "1.0.20"
authors = ["Alex Crichton <alex@alexcrichton.com>", "David Tolnay <dtolnay@gmail.com>"]
description = "A substitute implementation of the compiler's `proc_macro` API to decouple\ntoken-based libraries from the procedural macro use case.\n"
documentation = "https://docs.rs/proc-macro2"
readme = "README.md"
keywords = ["macros"]
categories = ["development-tools::procedural-macro-helpers"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/alexcrichton/proc-macro2"
[package.metadata.docs.rs]
rustc-args = ["--cfg", "procmacro2_semver_exempt"]
rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
targets = ["x86_64-unknown-linux-gnu"]
[lib]
name = "proc_macro2"
[package.metadata.playground]
features = ["span-locations"]
[dependencies.unicode-xid]
version = "0.2"
[dev-dependencies.quote]
@ -39,5 +40,3 @@ default = ["proc-macro"]
nightly = []
proc-macro = []
span-locations = []
[badges.travis-ci]
repository = "alexcrichton/proc-macro2"

2
third_party/rust/proc-macro2/README.md поставляемый
Просмотреть файл

@ -1,6 +1,6 @@
# proc-macro2
[![Build Status](https://api.travis-ci.com/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.com/alexcrichton/proc-macro2)
[![Build Status](https://img.shields.io/github/workflow/status/alexcrichton/proc-macro2/build%20and%20test)](https://github.com/alexcrichton/proc-macro2/actions)
[![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2)
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)

12
third_party/rust/proc-macro2/build.rs поставляемый
Просмотреть файл

@ -14,6 +14,10 @@
// procmacro2_semver_exempt surface area is implemented by using the
// nightly-only proc_macro API.
//
// "hygiene"
// Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at
// and Span::located_at. Enabled on Rust 1.45+.
//
// "proc_macro_span"
// Enable non-dummy behavior of Span::start and Span::end methods which
// requires an unstable compiler feature. Enabled when building with
@ -57,6 +61,14 @@ fn main() {
println!("cargo:rustc-cfg=span_locations");
}
if version.minor < 39 {
println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard");
}
if version.minor >= 45 {
println!("cargo:rustc-cfg=hygiene");
}
let target = env::var("TARGET").unwrap();
if !enable_use_proc_macro(&target) {
return;

67
third_party/rust/proc-macro2/src/detection.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,67 @@
use std::panic::{self, PanicInfo};
use std::sync::atomic::*;
use std::sync::Once;
static WORKS: AtomicUsize = AtomicUsize::new(0);
static INIT: Once = Once::new();
pub(crate) fn inside_proc_macro() -> bool {
match WORKS.load(Ordering::SeqCst) {
1 => return false,
2 => return true,
_ => {}
}
INIT.call_once(initialize);
inside_proc_macro()
}
pub(crate) fn force_fallback() {
WORKS.store(1, Ordering::SeqCst);
}
pub(crate) fn unforce_fallback() {
initialize();
}
// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
// then use catch_unwind to determine whether the compiler's proc_macro is
// working. When proc-macro2 is used from outside of a procedural macro all
// of the proc_macro crate's APIs currently panic.
//
// The Once is to prevent the possibility of this ordering:
//
// thread 1 calls take_hook, gets the user's original hook
// thread 1 calls set_hook with the null hook
// thread 2 calls take_hook, thinks null hook is the original hook
// thread 2 calls set_hook with the null hook
// thread 1 calls set_hook with the actual original hook
// thread 2 calls set_hook with what it thinks is the original hook
//
// in which the user's hook has been lost.
//
// There is still a race condition where a panic in a different thread can
// happen during the interval that the user's original panic hook is
// unregistered such that their hook is incorrectly not called. This is
// sufficiently unlikely and less bad than printing panic messages to stderr
// on correct use of this crate. Maybe there is a libstd feature request
// here. For now, if a user needs to guarantee that this failure mode does
// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
// the main thread before launching any other threads.
fn initialize() {
type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
let sanity_check = &*null_hook as *const PanicHook;
let original_hook = panic::take_hook();
panic::set_hook(null_hook);
let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
WORKS.store(works as usize + 1, Ordering::SeqCst);
let hopefully_null_hook = panic::take_hook();
panic::set_hook(original_hook);
if sanity_check != &*hopefully_null_hook {
panic!("observed race condition in proc_macro2::inside_proc_macro");
}
}

1004
third_party/rust/proc-macro2/src/fallback.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

151
third_party/rust/proc-macro2/src/lib.rs поставляемый
Просмотреть файл

@ -78,15 +78,16 @@
//! a different thread.
// Proc-macro2 types in rustdoc of other crates get linked to here.
#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.5")]
#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.20")]
#![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
#![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))]
#![allow(clippy::needless_doctest_main)]
#[cfg(use_proc_macro)]
extern crate proc_macro;
use std::cmp::Ordering;
use std::fmt;
use std::fmt::{self, Debug, Display};
use std::hash::{Hash, Hasher};
use std::iter::FromIterator;
use std::marker;
@ -96,9 +97,15 @@ use std::path::PathBuf;
use std::rc::Rc;
use std::str::FromStr;
#[macro_use]
mod strnom;
mod fallback;
mod parse;
#[cfg(wrap_proc_macro)]
mod detection;
// Public for proc_macro2::fallback::force() and unforce(), but those are quite
// a niche use case so we omit it from rustdoc.
#[doc(hidden)]
pub mod fallback;
#[cfg(not(wrap_proc_macro))]
use crate::fallback as imp;
@ -228,22 +235,22 @@ impl FromIterator<TokenStream> for TokenStream {
/// convertible back into the same token stream (modulo spans), except for
/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
/// numeric literals.
impl fmt::Display for TokenStream {
impl Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
Display::fmt(&self.inner, f)
}
}
/// Prints token in a form convenient for debugging.
impl fmt::Debug for TokenStream {
impl Debug for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
Debug::fmt(&self.inner, f)
}
}
impl fmt::Debug for LexError {
impl Debug for LexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
Debug::fmt(&self.inner, f)
}
}
@ -291,9 +298,9 @@ impl SourceFile {
}
#[cfg(procmacro2_semver_exempt)]
impl fmt::Debug for SourceFile {
impl Debug for SourceFile {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
Debug::fmt(&self.inner, f)
}
}
@ -311,6 +318,22 @@ pub struct LineColumn {
pub column: usize,
}
#[cfg(span_locations)]
impl Ord for LineColumn {
fn cmp(&self, other: &Self) -> Ordering {
self.line
.cmp(&other.line)
.then(self.column.cmp(&other.column))
}
}
#[cfg(span_locations)]
impl PartialOrd for LineColumn {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
/// A region of source code, along with macro expansion information.
#[derive(Copy, Clone)]
pub struct Span {
@ -342,6 +365,16 @@ impl Span {
Span::_new(imp::Span::call_site())
}
/// The span located at the invocation of the procedural macro, but with
/// local variables, labels, and `$crate` resolved at the definition site
/// of the macro. This is the same hygiene behavior as `macro_rules`.
///
/// This function requires Rust 1.45 or later.
#[cfg(hygiene)]
pub fn mixed_site() -> Span {
Span::_new(imp::Span::mixed_site())
}
/// A span that resolves at the macro definition site.
///
/// This method is semver exempt and not exposed by default.
@ -352,18 +385,12 @@ impl Span {
/// Creates a new span with the same line/column information as `self` but
/// that resolves symbols as though it were at `other`.
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
pub fn resolved_at(&self, other: Span) -> Span {
Span::_new(self.inner.resolved_at(other.inner))
}
/// Creates a new span with the same name resolution behavior as `self` but
/// with the line/column information of `other`.
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
pub fn located_at(&self, other: Span) -> Span {
Span::_new(self.inner.located_at(other.inner))
}
@ -439,9 +466,9 @@ impl Span {
}
/// Prints a span in a form convenient for debugging.
impl fmt::Debug for Span {
impl Debug for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
Debug::fmt(&self.inner, f)
}
}
@ -462,11 +489,11 @@ impl TokenTree {
/// Returns the span of this tree, delegating to the `span` method of
/// the contained token or a delimited stream.
pub fn span(&self) -> Span {
match *self {
TokenTree::Group(ref t) => t.span(),
TokenTree::Ident(ref t) => t.span(),
TokenTree::Punct(ref t) => t.span(),
TokenTree::Literal(ref t) => t.span(),
match self {
TokenTree::Group(t) => t.span(),
TokenTree::Ident(t) => t.span(),
TokenTree::Punct(t) => t.span(),
TokenTree::Literal(t) => t.span(),
}
}
@ -476,11 +503,11 @@ impl TokenTree {
/// the span of each of the internal tokens, this will simply delegate to
/// the `set_span` method of each variant.
pub fn set_span(&mut self, span: Span) {
match *self {
TokenTree::Group(ref mut t) => t.set_span(span),
TokenTree::Ident(ref mut t) => t.set_span(span),
TokenTree::Punct(ref mut t) => t.set_span(span),
TokenTree::Literal(ref mut t) => t.set_span(span),
match self {
TokenTree::Group(t) => t.set_span(span),
TokenTree::Ident(t) => t.set_span(span),
TokenTree::Punct(t) => t.set_span(span),
TokenTree::Literal(t) => t.set_span(span),
}
}
}
@ -513,32 +540,32 @@ impl From<Literal> for TokenTree {
/// convertible back into the same token tree (modulo spans), except for
/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
/// numeric literals.
impl fmt::Display for TokenTree {
impl Display for TokenTree {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
TokenTree::Group(ref t) => t.fmt(f),
TokenTree::Ident(ref t) => t.fmt(f),
TokenTree::Punct(ref t) => t.fmt(f),
TokenTree::Literal(ref t) => t.fmt(f),
match self {
TokenTree::Group(t) => Display::fmt(t, f),
TokenTree::Ident(t) => Display::fmt(t, f),
TokenTree::Punct(t) => Display::fmt(t, f),
TokenTree::Literal(t) => Display::fmt(t, f),
}
}
}
/// Prints token tree in a form convenient for debugging.
impl fmt::Debug for TokenTree {
impl Debug for TokenTree {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Each of these has the name in the struct type in the derived debug,
// so don't bother with an extra layer of indirection
match *self {
TokenTree::Group(ref t) => t.fmt(f),
TokenTree::Ident(ref t) => {
match self {
TokenTree::Group(t) => Debug::fmt(t, f),
TokenTree::Ident(t) => {
let mut debug = f.debug_struct("Ident");
debug.field("sym", &format_args!("{}", t));
imp::debug_span_field_if_nontrivial(&mut debug, t.span().inner);
debug.finish()
}
TokenTree::Punct(ref t) => t.fmt(f),
TokenTree::Literal(ref t) => t.fmt(f),
TokenTree::Punct(t) => Debug::fmt(t, f),
TokenTree::Literal(t) => Debug::fmt(t, f),
}
}
}
@ -651,15 +678,15 @@ impl Group {
/// Prints the group as a string that should be losslessly convertible back
/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
/// with `Delimiter::None` delimiters.
impl fmt::Display for Group {
impl Display for Group {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.inner, formatter)
Display::fmt(&self.inner, formatter)
}
}
impl fmt::Debug for Group {
impl Debug for Group {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.inner, formatter)
Debug::fmt(&self.inner, formatter)
}
}
@ -730,13 +757,13 @@ impl Punct {
/// Prints the punctuation character as a string that should be losslessly
/// convertible back into the same character.
impl fmt::Display for Punct {
impl Display for Punct {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.op.fmt(f)
Display::fmt(&self.op, f)
}
}
impl fmt::Debug for Punct {
impl Debug for Punct {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let mut debug = fmt.debug_struct("Punct");
debug.field("op", &self.op);
@ -920,15 +947,15 @@ impl Hash for Ident {
/// Prints the identifier as a string that should be losslessly convertible back
/// into the same identifier.
impl fmt::Display for Ident {
impl Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
Display::fmt(&self.inner, f)
}
}
impl fmt::Debug for Ident {
impl Debug for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
Debug::fmt(&self.inner, f)
}
}
@ -1140,26 +1167,26 @@ impl Literal {
}
}
impl fmt::Debug for Literal {
impl Debug for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
Debug::fmt(&self.inner, f)
}
}
impl fmt::Display for Literal {
impl Display for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
Display::fmt(&self.inner, f)
}
}
/// Public implementation details for the `TokenStream` type, such as iterators.
pub mod token_stream {
use std::fmt;
use crate::{imp, TokenTree};
use std::fmt::{self, Debug};
use std::marker;
use std::rc::Rc;
pub use crate::TokenStream;
use crate::{imp, TokenTree};
/// An iterator over `TokenStream`'s `TokenTree`s.
///
@ -1179,9 +1206,9 @@ pub mod token_stream {
}
}
impl fmt::Debug for IntoIter {
impl Debug for IntoIter {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
Debug::fmt(&self.inner, f)
}
}

791
third_party/rust/proc-macro2/src/parse.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,791 @@
use crate::fallback::{
is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
};
use crate::{Delimiter, Punct, Spacing, TokenTree};
use std::str::{Bytes, CharIndices, Chars};
use unicode_xid::UnicodeXID;
#[derive(Copy, Clone, Eq, PartialEq)]
pub(crate) struct Cursor<'a> {
pub rest: &'a str,
#[cfg(span_locations)]
pub off: u32,
}
impl<'a> Cursor<'a> {
fn advance(&self, bytes: usize) -> Cursor<'a> {
let (_front, rest) = self.rest.split_at(bytes);
Cursor {
rest,
#[cfg(span_locations)]
off: self.off + _front.chars().count() as u32,
}
}
fn starts_with(&self, s: &str) -> bool {
self.rest.starts_with(s)
}
pub(crate) fn is_empty(&self) -> bool {
self.rest.is_empty()
}
fn len(&self) -> usize {
self.rest.len()
}
fn as_bytes(&self) -> &'a [u8] {
self.rest.as_bytes()
}
fn bytes(&self) -> Bytes<'a> {
self.rest.bytes()
}
fn chars(&self) -> Chars<'a> {
self.rest.chars()
}
fn char_indices(&self) -> CharIndices<'a> {
self.rest.char_indices()
}
fn parse(&self, tag: &str) -> Result<Cursor<'a>, LexError> {
if self.starts_with(tag) {
Ok(self.advance(tag.len()))
} else {
Err(LexError)
}
}
}
type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
fn skip_whitespace(input: Cursor) -> Cursor {
let mut s = input;
while !s.is_empty() {
let byte = s.as_bytes()[0];
if byte == b'/' {
if s.starts_with("//")
&& (!s.starts_with("///") || s.starts_with("////"))
&& !s.starts_with("//!")
{
let (cursor, _) = take_until_newline_or_eof(s);
s = cursor;
continue;
} else if s.starts_with("/**/") {
s = s.advance(4);
continue;
} else if s.starts_with("/*")
&& (!s.starts_with("/**") || s.starts_with("/***"))
&& !s.starts_with("/*!")
{
match block_comment(s) {
Ok((rest, _)) => {
s = rest;
continue;
}
Err(LexError) => return s,
}
}
}
match byte {
b' ' | 0x09..=0x0d => {
s = s.advance(1);
continue;
}
b if b <= 0x7f => {}
_ => {
let ch = s.chars().next().unwrap();
if is_whitespace(ch) {
s = s.advance(ch.len_utf8());
continue;
}
}
}
return s;
}
s
}
fn block_comment(input: Cursor) -> PResult<&str> {
if !input.starts_with("/*") {
return Err(LexError);
}
let mut depth = 0;
let bytes = input.as_bytes();
let mut i = 0;
let upper = bytes.len() - 1;
while i < upper {
if bytes[i] == b'/' && bytes[i + 1] == b'*' {
depth += 1;
i += 1; // eat '*'
} else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
depth -= 1;
if depth == 0 {
return Ok((input.advance(i + 2), &input.rest[..i + 2]));
}
i += 1; // eat '/'
}
i += 1;
}
Err(LexError)
}
fn is_whitespace(ch: char) -> bool {
// Rust treats left-to-right mark and right-to-left mark as whitespace
ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
}
fn word_break(input: Cursor) -> Result<Cursor, LexError> {
match input.chars().next() {
Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
Some(_) | None => Ok(input),
}
}
pub(crate) fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
let mut trees = Vec::new();
let mut stack = Vec::new();
loop {
input = skip_whitespace(input);
if let Ok((rest, tt)) = doc_comment(input) {
trees.extend(tt);
input = rest;
continue;
}
#[cfg(span_locations)]
let lo = input.off;
let first = match input.bytes().next() {
Some(first) => first,
None => break,
};
if let Some(open_delimiter) = match first {
b'(' => Some(Delimiter::Parenthesis),
b'[' => Some(Delimiter::Bracket),
b'{' => Some(Delimiter::Brace),
_ => None,
} {
input = input.advance(1);
let frame = (open_delimiter, trees);
#[cfg(span_locations)]
let frame = (lo, frame);
stack.push(frame);
trees = Vec::new();
} else if let Some(close_delimiter) = match first {
b')' => Some(Delimiter::Parenthesis),
b']' => Some(Delimiter::Bracket),
b'}' => Some(Delimiter::Brace),
_ => None,
} {
input = input.advance(1);
let frame = stack.pop().ok_or(LexError)?;
#[cfg(span_locations)]
let (lo, frame) = frame;
let (open_delimiter, outer) = frame;
if open_delimiter != close_delimiter {
return Err(LexError);
}
let mut g = Group::new(open_delimiter, TokenStream { inner: trees });
g.set_span(Span {
#[cfg(span_locations)]
lo,
#[cfg(span_locations)]
hi: input.off,
});
trees = outer;
trees.push(TokenTree::Group(crate::Group::_new_stable(g)));
} else {
let (rest, mut tt) = leaf_token(input)?;
tt.set_span(crate::Span::_new_stable(Span {
#[cfg(span_locations)]
lo,
#[cfg(span_locations)]
hi: rest.off,
}));
trees.push(tt);
input = rest;
}
}
if stack.is_empty() {
Ok((input, TokenStream { inner: trees }))
} else {
Err(LexError)
}
}
fn leaf_token(input: Cursor) -> PResult<TokenTree> {
if let Ok((input, l)) = literal(input) {
// must be parsed before ident
Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l))))
} else if let Ok((input, p)) = op(input) {
Ok((input, TokenTree::Punct(p)))
} else if let Ok((input, i)) = ident(input) {
Ok((input, TokenTree::Ident(i)))
} else {
Err(LexError)
}
}
fn ident(input: Cursor) -> PResult<crate::Ident> {
let raw = input.starts_with("r#");
let rest = input.advance((raw as usize) << 1);
let (rest, sym) = ident_not_raw(rest)?;
if !raw {
let ident = crate::Ident::new(sym, crate::Span::call_site());
return Ok((rest, ident));
}
if sym == "_" {
return Err(LexError);
}
let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
Ok((rest, ident))
}
fn ident_not_raw(input: Cursor) -> PResult<&str> {
let mut chars = input.char_indices();
match chars.next() {
Some((_, ch)) if is_ident_start(ch) => {}
_ => return Err(LexError),
}
let mut end = input.len();
for (i, ch) in chars {
if !is_ident_continue(ch) {
end = i;
break;
}
}
Ok((input.advance(end), &input.rest[..end]))
}
fn literal(input: Cursor) -> PResult<Literal> {
match literal_nocapture(input) {
Ok(a) => {
let end = input.len() - a.len();
Ok((a, Literal::_new(input.rest[..end].to_string())))
}
Err(LexError) => Err(LexError),
}
}
fn literal_nocapture(input: Cursor) -> Result<Cursor, LexError> {
if let Ok(ok) = string(input) {
Ok(ok)
} else if let Ok(ok) = byte_string(input) {
Ok(ok)
} else if let Ok(ok) = byte(input) {
Ok(ok)
} else if let Ok(ok) = character(input) {
Ok(ok)
} else if let Ok(ok) = float(input) {
Ok(ok)
} else if let Ok(ok) = int(input) {
Ok(ok)
} else {
Err(LexError)
}
}
fn literal_suffix(input: Cursor) -> Cursor {
match ident_not_raw(input) {
Ok((input, _)) => input,
Err(LexError) => input,
}
}
fn string(input: Cursor) -> Result<Cursor, LexError> {
if let Ok(input) = input.parse("\"") {
cooked_string(input)
} else if let Ok(input) = input.parse("r") {
raw_string(input)
} else {
Err(LexError)
}
}
fn cooked_string(input: Cursor) -> Result<Cursor, LexError> {
let mut chars = input.char_indices().peekable();
while let Some((i, ch)) = chars.next() {
match ch {
'"' => {
let input = input.advance(i + 1);
return Ok(literal_suffix(input));
}
'\r' => {
if let Some((_, '\n')) = chars.next() {
// ...
} else {
break;
}
}
'\\' => match chars.next() {
Some((_, 'x')) => {
if !backslash_x_char(&mut chars) {
break;
}
}
Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
| Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
Some((_, 'u')) => {
if !backslash_u(&mut chars) {
break;
}
}
Some((_, '\n')) | Some((_, '\r')) => {
while let Some(&(_, ch)) = chars.peek() {
if ch.is_whitespace() {
chars.next();
} else {
break;
}
}
}
_ => break,
},
_ch => {}
}
}
Err(LexError)
}
fn byte_string(input: Cursor) -> Result<Cursor, LexError> {
if let Ok(input) = input.parse("b\"") {
cooked_byte_string(input)
} else if let Ok(input) = input.parse("br") {
raw_string(input)
} else {
Err(LexError)
}
}
fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, LexError> {
let mut bytes = input.bytes().enumerate();
'outer: while let Some((offset, b)) = bytes.next() {
match b {
b'"' => {
let input = input.advance(offset + 1);
return Ok(literal_suffix(input));
}
b'\r' => {
if let Some((_, b'\n')) = bytes.next() {
// ...
} else {
break;
}
}
b'\\' => match bytes.next() {
Some((_, b'x')) => {
if !backslash_x_byte(&mut bytes) {
break;
}
}
Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
| Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
Some((newline, b'\n')) | Some((newline, b'\r')) => {
let rest = input.advance(newline + 1);
for (offset, ch) in rest.char_indices() {
if !ch.is_whitespace() {
input = rest.advance(offset);
bytes = input.bytes().enumerate();
continue 'outer;
}
}
break;
}
_ => break,
},
b if b < 0x80 => {}
_ => break,
}
}
Err(LexError)
}
fn raw_string(input: Cursor) -> Result<Cursor, LexError> {
let mut chars = input.char_indices();
let mut n = 0;
while let Some((i, ch)) = chars.next() {
match ch {
'"' => {
n = i;
break;
}
'#' => {}
_ => return Err(LexError),
}
}
for (i, ch) in chars {
match ch {
'"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => {
let rest = input.advance(i + 1 + n);
return Ok(literal_suffix(rest));
}
'\r' => {}
_ => {}
}
}
Err(LexError)
}
fn byte(input: Cursor) -> Result<Cursor, LexError> {
let input = input.parse("b'")?;
let mut bytes = input.bytes().enumerate();
let ok = match bytes.next().map(|(_, b)| b) {
Some(b'\\') => match bytes.next().map(|(_, b)| b) {
Some(b'x') => backslash_x_byte(&mut bytes),
Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
| Some(b'"') => true,
_ => false,
},
b => b.is_some(),
};
if !ok {
return Err(LexError);
}
let (offset, _) = bytes.next().ok_or(LexError)?;
if !input.chars().as_str().is_char_boundary(offset) {
return Err(LexError);
}
let input = input.advance(offset).parse("'")?;
Ok(literal_suffix(input))
}
fn character(input: Cursor) -> Result<Cursor, LexError> {
let input = input.parse("'")?;
let mut chars = input.char_indices();
let ok = match chars.next().map(|(_, ch)| ch) {
Some('\\') => match chars.next().map(|(_, ch)| ch) {
Some('x') => backslash_x_char(&mut chars),
Some('u') => backslash_u(&mut chars),
Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
true
}
_ => false,
},
ch => ch.is_some(),
};
if !ok {
return Err(LexError);
}
let (idx, _) = chars.next().ok_or(LexError)?;
let input = input.advance(idx).parse("'")?;
Ok(literal_suffix(input))
}
macro_rules! next_ch {
($chars:ident @ $pat:pat $(| $rest:pat)*) => {
match $chars.next() {
Some((_, ch)) => match ch {
$pat $(| $rest)* => ch,
_ => return false,
},
None => return false,
}
};
}
fn backslash_x_char<I>(chars: &mut I) -> bool
where
I: Iterator<Item = (usize, char)>,
{
next_ch!(chars @ '0'..='7');
next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
true
}
fn backslash_x_byte<I>(chars: &mut I) -> bool
where
I: Iterator<Item = (usize, u8)>,
{
next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
true
}
fn backslash_u<I>(chars: &mut I) -> bool
where
I: Iterator<Item = (usize, char)>,
{
next_ch!(chars @ '{');
next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
loop {
let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
if c == '}' {
return true;
}
}
}
fn float(input: Cursor) -> Result<Cursor, LexError> {
let mut rest = float_digits(input)?;
if let Some(ch) = rest.chars().next() {
if is_ident_start(ch) {
rest = ident_not_raw(rest)?.0;
}
}
word_break(rest)
}
fn float_digits(input: Cursor) -> Result<Cursor, LexError> {
let mut chars = input.chars().peekable();
match chars.next() {
Some(ch) if ch >= '0' && ch <= '9' => {}
_ => return Err(LexError),
}
let mut len = 1;
let mut has_dot = false;
let mut has_exp = false;
while let Some(&ch) = chars.peek() {
match ch {
'0'..='9' | '_' => {
chars.next();
len += 1;
}
'.' => {
if has_dot {
break;
}
chars.next();
if chars
.peek()
.map(|&ch| ch == '.' || is_ident_start(ch))
.unwrap_or(false)
{
return Err(LexError);
}
len += 1;
has_dot = true;
}
'e' | 'E' => {
chars.next();
len += 1;
has_exp = true;
break;
}
_ => break,
}
}
let rest = input.advance(len);
if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
return Err(LexError);
}
if has_exp {
let mut has_exp_value = false;
while let Some(&ch) = chars.peek() {
match ch {
'+' | '-' => {
if has_exp_value {
break;
}
chars.next();
len += 1;
}
'0'..='9' => {
chars.next();
len += 1;
has_exp_value = true;
}
'_' => {
chars.next();
len += 1;
}
_ => break,
}
}
if !has_exp_value {
return Err(LexError);
}
}
Ok(input.advance(len))
}
fn int(input: Cursor) -> Result<Cursor, LexError> {
let mut rest = digits(input)?;
if let Some(ch) = rest.chars().next() {
if is_ident_start(ch) {
rest = ident_not_raw(rest)?.0;
}
}
word_break(rest)
}
fn digits(mut input: Cursor) -> Result<Cursor, LexError> {
let base = if input.starts_with("0x") {
input = input.advance(2);
16
} else if input.starts_with("0o") {
input = input.advance(2);
8
} else if input.starts_with("0b") {
input = input.advance(2);
2
} else {
10
};
let mut len = 0;
let mut empty = true;
for b in input.bytes() {
let digit = match b {
b'0'..=b'9' => (b - b'0') as u64,
b'a'..=b'f' => 10 + (b - b'a') as u64,
b'A'..=b'F' => 10 + (b - b'A') as u64,
b'_' => {
if empty && base == 10 {
return Err(LexError);
}
len += 1;
continue;
}
_ => break,
};
if digit >= base {
return Err(LexError);
}
len += 1;
empty = false;
}
if empty {
Err(LexError)
} else {
Ok(input.advance(len))
}
}
fn op(input: Cursor) -> PResult<Punct> {
match op_char(input) {
Ok((rest, '\'')) => {
ident(rest)?;
Ok((rest, Punct::new('\'', Spacing::Joint)))
}
Ok((rest, ch)) => {
let kind = match op_char(rest) {
Ok(_) => Spacing::Joint,
Err(LexError) => Spacing::Alone,
};
Ok((rest, Punct::new(ch, kind)))
}
Err(LexError) => Err(LexError),
}
}
fn op_char(input: Cursor) -> PResult<char> {
if input.starts_with("//") || input.starts_with("/*") {
// Do not accept `/` of a comment as an op.
return Err(LexError);
}
let mut chars = input.chars();
let first = match chars.next() {
Some(ch) => ch,
None => {
return Err(LexError);
}
};
let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
if recognized.contains(first) {
Ok((input.advance(first.len_utf8()), first))
} else {
Err(LexError)
}
}
fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
#[cfg(span_locations)]
let lo = input.off;
let (rest, (comment, inner)) = doc_comment_contents(input)?;
let span = crate::Span::_new_stable(Span {
#[cfg(span_locations)]
lo,
#[cfg(span_locations)]
hi: rest.off,
});
let mut scan_for_bare_cr = comment;
while let Some(cr) = scan_for_bare_cr.find('\r') {
let rest = &scan_for_bare_cr[cr + 1..];
if !rest.starts_with('\n') {
return Err(LexError);
}
scan_for_bare_cr = rest;
}
let mut trees = Vec::new();
trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
if inner {
trees.push(Punct::new('!', Spacing::Alone).into());
}
let mut stream = vec![
TokenTree::Ident(crate::Ident::new("doc", span)),
TokenTree::Punct(Punct::new('=', Spacing::Alone)),
TokenTree::Literal(crate::Literal::string(comment)),
];
for tt in stream.iter_mut() {
tt.set_span(span);
}
let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
trees.push(crate::Group::_new_stable(group).into());
for tt in trees.iter_mut() {
tt.set_span(span);
}
Ok((rest, trees))
}
fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
if input.starts_with("//!") {
let input = input.advance(3);
let (input, s) = take_until_newline_or_eof(input);
Ok((input, (s, true)))
} else if input.starts_with("/*!") {
let (input, s) = block_comment(input)?;
Ok((input, (&s[3..s.len() - 2], true)))
} else if input.starts_with("///") {
let input = input.advance(3);
if input.starts_with("/") {
return Err(LexError);
}
let (input, s) = take_until_newline_or_eof(input);
Ok((input, (s, false)))
} else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
let (input, s) = block_comment(input)?;
Ok((input, (&s[3..s.len() - 2], false)))
} else {
Err(LexError)
}
}
fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
let chars = input.char_indices();
for (i, ch) in chars {
if ch == '\n' {
return (input.advance(i), &input.rest[..i]);
} else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
return (input.advance(i + 1), &input.rest[..i]);
}
}
(input.advance(input.len()), input.rest)
}

391
third_party/rust/proc-macro2/src/strnom.rs поставляемый
Просмотреть файл

@ -1,391 +0,0 @@
//! Adapted from [`nom`](https://github.com/Geal/nom).
use crate::fallback::LexError;
use std::str::{Bytes, CharIndices, Chars};
use unicode_xid::UnicodeXID;
#[derive(Copy, Clone, Eq, PartialEq)]
pub struct Cursor<'a> {
pub rest: &'a str,
#[cfg(span_locations)]
pub off: u32,
}
impl<'a> Cursor<'a> {
#[cfg(not(span_locations))]
pub fn advance(&self, amt: usize) -> Cursor<'a> {
Cursor {
rest: &self.rest[amt..],
}
}
#[cfg(span_locations)]
pub fn advance(&self, amt: usize) -> Cursor<'a> {
Cursor {
rest: &self.rest[amt..],
off: self.off + (amt as u32),
}
}
pub fn find(&self, p: char) -> Option<usize> {
self.rest.find(p)
}
pub fn starts_with(&self, s: &str) -> bool {
self.rest.starts_with(s)
}
pub fn is_empty(&self) -> bool {
self.rest.is_empty()
}
pub fn len(&self) -> usize {
self.rest.len()
}
pub fn as_bytes(&self) -> &'a [u8] {
self.rest.as_bytes()
}
pub fn bytes(&self) -> Bytes<'a> {
self.rest.bytes()
}
pub fn chars(&self) -> Chars<'a> {
self.rest.chars()
}
pub fn char_indices(&self) -> CharIndices<'a> {
self.rest.char_indices()
}
}
pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
pub fn whitespace(input: Cursor) -> PResult<()> {
if input.is_empty() {
return Err(LexError);
}
let bytes = input.as_bytes();
let mut i = 0;
while i < bytes.len() {
let s = input.advance(i);
if bytes[i] == b'/' {
if s.starts_with("//")
&& (!s.starts_with("///") || s.starts_with("////"))
&& !s.starts_with("//!")
{
if let Some(len) = s.find('\n') {
i += len + 1;
continue;
}
break;
} else if s.starts_with("/**/") {
i += 4;
continue;
} else if s.starts_with("/*")
&& (!s.starts_with("/**") || s.starts_with("/***"))
&& !s.starts_with("/*!")
{
let (_, com) = block_comment(s)?;
i += com.len();
continue;
}
}
match bytes[i] {
b' ' | 0x09..=0x0d => {
i += 1;
continue;
}
b if b <= 0x7f => {}
_ => {
let ch = s.chars().next().unwrap();
if is_whitespace(ch) {
i += ch.len_utf8();
continue;
}
}
}
return if i > 0 { Ok((s, ())) } else { Err(LexError) };
}
Ok((input.advance(input.len()), ()))
}
pub fn block_comment(input: Cursor) -> PResult<&str> {
if !input.starts_with("/*") {
return Err(LexError);
}
let mut depth = 0;
let bytes = input.as_bytes();
let mut i = 0;
let upper = bytes.len() - 1;
while i < upper {
if bytes[i] == b'/' && bytes[i + 1] == b'*' {
depth += 1;
i += 1; // eat '*'
} else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
depth -= 1;
if depth == 0 {
return Ok((input.advance(i + 2), &input.rest[..i + 2]));
}
i += 1; // eat '/'
}
i += 1;
}
Err(LexError)
}
pub fn skip_whitespace(input: Cursor) -> Cursor {
match whitespace(input) {
Ok((rest, _)) => rest,
Err(LexError) => input,
}
}
fn is_whitespace(ch: char) -> bool {
// Rust treats left-to-right mark and right-to-left mark as whitespace
ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
}
pub fn word_break(input: Cursor) -> PResult<()> {
match input.chars().next() {
Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
Some(_) | None => Ok((input, ())),
}
}
macro_rules! named {
($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
$submac!(i, $($args)*)
}
};
}
macro_rules! alt {
($i:expr, $e:ident | $($rest:tt)*) => {
alt!($i, call!($e) | $($rest)*)
};
($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => {
match $subrule!($i, $($args)*) {
res @ Ok(_) => res,
_ => alt!($i, $($rest)*)
}
};
($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => {
match $subrule!($i, $($args)*) {
Ok((i, o)) => Ok((i, $gen(o))),
Err(LexError) => alt!($i, $($rest)*)
}
};
($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => {
alt!($i, call!($e) => { $gen } | $($rest)*)
};
($i:expr, $e:ident => { $gen:expr }) => {
alt!($i, call!($e) => { $gen })
};
($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => {
match $subrule!($i, $($args)*) {
Ok((i, o)) => Ok((i, $gen(o))),
Err(LexError) => Err(LexError),
}
};
($i:expr, $e:ident) => {
alt!($i, call!($e))
};
($i:expr, $subrule:ident!( $($args:tt)*)) => {
$subrule!($i, $($args)*)
};
}
macro_rules! do_parse {
($i:expr, ( $($rest:expr),* )) => {
Ok(($i, ( $($rest),* )))
};
($i:expr, $e:ident >> $($rest:tt)*) => {
do_parse!($i, call!($e) >> $($rest)*)
};
($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, _)) => do_parse!(i, $($rest)*),
}
};
($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => {
do_parse!($i, $field: call!($e) >> $($rest)*)
};
($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, o)) => {
let $field = o;
do_parse!(i, $($rest)*)
},
}
};
}
macro_rules! peek {
($i:expr, $submac:ident!( $($args:tt)* )) => {
match $submac!($i, $($args)*) {
Ok((_, o)) => Ok(($i, o)),
Err(LexError) => Err(LexError),
}
};
}
macro_rules! call {
($i:expr, $fun:expr $(, $args:expr)*) => {
$fun($i $(, $args)*)
};
}
macro_rules! option {
($i:expr, $f:expr) => {
match $f($i) {
Ok((i, o)) => Ok((i, Some(o))),
Err(LexError) => Ok(($i, None)),
}
};
}
macro_rules! take_until_newline_or_eof {
($i:expr,) => {{
if $i.len() == 0 {
Ok(($i, ""))
} else {
match $i.find('\n') {
Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
}
}
}};
}
macro_rules! tuple {
($i:expr, $($rest:tt)*) => {
tuple_parser!($i, (), $($rest)*)
};
}
/// Do not use directly. Use `tuple!`.
macro_rules! tuple_parser {
($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => {
tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*)
};
($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, o)) => tuple_parser!(i, (o), $($rest)*),
}
};
($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*),
}
};
($i:expr, ($($parsed:tt),*), $e:ident) => {
tuple_parser!($i, ($($parsed),*), call!($e))
};
($i:expr, (), $submac:ident!( $($args:tt)* )) => {
$submac!($i, $($args)*)
};
($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, o)) => Ok((i, ($($parsed),*, o)))
}
};
($i:expr, ($($parsed:expr),*)) => {
Ok(($i, ($($parsed),*)))
};
}
macro_rules! not {
($i:expr, $submac:ident!( $($args:tt)* )) => {
match $submac!($i, $($args)*) {
Ok((_, _)) => Err(LexError),
Err(LexError) => Ok(($i, ())),
}
};
}
macro_rules! tag {
($i:expr, $tag:expr) => {
if $i.starts_with($tag) {
Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
} else {
Err(LexError)
}
};
}
macro_rules! punct {
($i:expr, $punct:expr) => {
$crate::strnom::punct($i, $punct)
};
}
/// Do not use directly. Use `punct!`.
pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
let input = skip_whitespace(input);
if input.starts_with(token) {
Ok((input.advance(token.len()), token))
} else {
Err(LexError)
}
}
macro_rules! preceded {
($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => {
match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) {
Ok((remaining, (_, o))) => Ok((remaining, o)),
Err(LexError) => Err(LexError),
}
};
($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
preceded!($i, $submac!($($args)*), call!($g))
};
}
macro_rules! delimited {
($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => {
match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) {
Err(LexError) => Err(LexError),
Ok((i1, (_, o, _))) => Ok((i1, o))
}
};
}
macro_rules! map {
($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
match $submac!($i, $($args)*) {
Err(LexError) => Err(LexError),
Ok((i, o)) => Ok((i, call!(o, $g)))
}
};
($i:expr, $f:expr, $g:expr) => {
map!($i, call!($f), $g)
};
}

240
third_party/rust/proc-macro2/src/wrapper.rs поставляемый
Просмотреть файл

@ -1,15 +1,15 @@
use std::fmt;
use std::iter;
use crate::detection::inside_proc_macro;
use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
use std::fmt::{self, Debug, Display};
use std::iter::FromIterator;
use std::ops::RangeBounds;
use std::panic::{self, PanicInfo};
use std::panic;
#[cfg(super_unstable)]
use std::path::PathBuf;
use std::str::FromStr;
use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
#[derive(Clone)]
pub enum TokenStream {
pub(crate) enum TokenStream {
Compiler(DeferredTokenStream),
Fallback(fallback::TokenStream),
}
@ -19,73 +19,16 @@ pub enum TokenStream {
// we hold on to the appended tokens and do proc_macro::TokenStream::extend as
// late as possible to batch together consecutive uses of the Extend impl.
#[derive(Clone)]
pub struct DeferredTokenStream {
pub(crate) struct DeferredTokenStream {
stream: proc_macro::TokenStream,
extra: Vec<proc_macro::TokenTree>,
}
pub enum LexError {
pub(crate) enum LexError {
Compiler(proc_macro::LexError),
Fallback(fallback::LexError),
}
fn nightly_works() -> bool {
use std::sync::atomic::*;
use std::sync::Once;
static WORKS: AtomicUsize = AtomicUsize::new(0);
static INIT: Once = Once::new();
match WORKS.load(Ordering::SeqCst) {
1 => return false,
2 => return true,
_ => {}
}
// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
// then use catch_unwind to determine whether the compiler's proc_macro is
// working. When proc-macro2 is used from outside of a procedural macro all
// of the proc_macro crate's APIs currently panic.
//
// The Once is to prevent the possibility of this ordering:
//
// thread 1 calls take_hook, gets the user's original hook
// thread 1 calls set_hook with the null hook
// thread 2 calls take_hook, thinks null hook is the original hook
// thread 2 calls set_hook with the null hook
// thread 1 calls set_hook with the actual original hook
// thread 2 calls set_hook with what it thinks is the original hook
//
// in which the user's hook has been lost.
//
// There is still a race condition where a panic in a different thread can
// happen during the interval that the user's original panic hook is
// unregistered such that their hook is incorrectly not called. This is
// sufficiently unlikely and less bad than printing panic messages to stderr
// on correct use of this crate. Maybe there is a libstd feature request
// here. For now, if a user needs to guarantee that this failure mode does
// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
// the main thread before launching any other threads.
INIT.call_once(|| {
type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
let sanity_check = &*null_hook as *const PanicHook;
let original_hook = panic::take_hook();
panic::set_hook(null_hook);
let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
WORKS.store(works as usize + 1, Ordering::SeqCst);
let hopefully_null_hook = panic::take_hook();
panic::set_hook(original_hook);
if sanity_check != &*hopefully_null_hook {
panic!("observed race condition in proc_macro2::nightly_works");
}
});
nightly_works()
}
fn mismatch() -> ! {
panic!("stable/nightly mismatch")
}
@ -103,7 +46,12 @@ impl DeferredTokenStream {
}
fn evaluate_now(&mut self) {
self.stream.extend(self.extra.drain(..));
// If-check provides a fast short circuit for the common case of `extra`
// being empty, which saves a round trip over the proc macro bridge.
// Improves macro expansion time in winrt by 6% in debug mode.
if !self.extra.is_empty() {
self.stream.extend(self.extra.drain(..));
}
}
fn into_token_stream(mut self) -> proc_macro::TokenStream {
@ -114,7 +62,7 @@ impl DeferredTokenStream {
impl TokenStream {
pub fn new() -> TokenStream {
if nightly_works() {
if inside_proc_macro() {
TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
} else {
TokenStream::Fallback(fallback::TokenStream::new())
@ -147,9 +95,9 @@ impl FromStr for TokenStream {
type Err = LexError;
fn from_str(src: &str) -> Result<TokenStream, LexError> {
if nightly_works() {
if inside_proc_macro() {
Ok(TokenStream::Compiler(DeferredTokenStream::new(
src.parse()?,
proc_macro_parse(src)?,
)))
} else {
Ok(TokenStream::Fallback(src.parse()?))
@ -157,11 +105,17 @@ impl FromStr for TokenStream {
}
}
impl fmt::Display for TokenStream {
// Work around https://github.com/rust-lang/rust/issues/58736.
fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
panic::catch_unwind(|| src.parse().map_err(LexError::Compiler))
.unwrap_or(Err(LexError::Fallback(fallback::LexError)))
}
impl Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
TokenStream::Fallback(tts) => tts.fmt(f),
TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f),
TokenStream::Fallback(tts) => Display::fmt(tts, f),
}
}
}
@ -187,7 +141,7 @@ impl From<fallback::TokenStream> for TokenStream {
}
}
// Assumes nightly_works().
// Assumes inside_proc_macro().
fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
match token {
TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
@ -207,7 +161,7 @@ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
impl From<TokenTree> for TokenStream {
fn from(token: TokenTree) -> TokenStream {
if nightly_works() {
if inside_proc_macro() {
TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
} else {
TokenStream::Fallback(token.into())
@ -215,9 +169,9 @@ impl From<TokenTree> for TokenStream {
}
}
impl iter::FromIterator<TokenTree> for TokenStream {
impl FromIterator<TokenTree> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
if nightly_works() {
if inside_proc_macro() {
TokenStream::Compiler(DeferredTokenStream::new(
trees.into_iter().map(into_compiler_token).collect(),
))
@ -227,7 +181,7 @@ impl iter::FromIterator<TokenTree> for TokenStream {
}
}
impl iter::FromIterator<TokenStream> for TokenStream {
impl FromIterator<TokenStream> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
let mut streams = streams.into_iter();
match streams.next() {
@ -252,14 +206,15 @@ impl iter::FromIterator<TokenStream> for TokenStream {
}
impl Extend<TokenTree> for TokenStream {
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) {
match self {
TokenStream::Compiler(tts) => {
// Here is the reason for DeferredTokenStream.
tts.extra
.extend(streams.into_iter().map(into_compiler_token));
for token in stream {
tts.extra.push(into_compiler_token(token));
}
}
TokenStream::Fallback(tts) => tts.extend(streams),
TokenStream::Fallback(tts) => tts.extend(stream),
}
}
}
@ -270,20 +225,20 @@ impl Extend<TokenStream> for TokenStream {
TokenStream::Compiler(tts) => {
tts.evaluate_now();
tts.stream
.extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
.extend(streams.into_iter().map(TokenStream::unwrap_nightly));
}
TokenStream::Fallback(tts) => {
tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()));
tts.extend(streams.into_iter().map(TokenStream::unwrap_stable));
}
}
}
}
impl fmt::Debug for TokenStream {
impl Debug for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
TokenStream::Fallback(tts) => tts.fmt(f),
TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f),
TokenStream::Fallback(tts) => Debug::fmt(tts, f),
}
}
}
@ -300,17 +255,17 @@ impl From<fallback::LexError> for LexError {
}
}
impl fmt::Debug for LexError {
impl Debug for LexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
LexError::Compiler(e) => e.fmt(f),
LexError::Fallback(e) => e.fmt(f),
LexError::Compiler(e) => Debug::fmt(e, f),
LexError::Fallback(e) => Debug::fmt(e, f),
}
}
}
#[derive(Clone)]
pub enum TokenTreeIter {
pub(crate) enum TokenTreeIter {
Compiler(proc_macro::token_stream::IntoIter),
Fallback(fallback::TokenTreeIter),
}
@ -361,7 +316,7 @@ impl Iterator for TokenTreeIter {
}
}
impl fmt::Debug for TokenTreeIter {
impl Debug for TokenTreeIter {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("TokenTreeIter").finish()
}
@ -369,7 +324,7 @@ impl fmt::Debug for TokenTreeIter {
#[derive(Clone, PartialEq, Eq)]
#[cfg(super_unstable)]
pub enum SourceFile {
pub(crate) enum SourceFile {
Compiler(proc_macro::SourceFile),
Fallback(fallback::SourceFile),
}
@ -397,58 +352,77 @@ impl SourceFile {
}
#[cfg(super_unstable)]
impl fmt::Debug for SourceFile {
impl Debug for SourceFile {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
SourceFile::Compiler(a) => a.fmt(f),
SourceFile::Fallback(a) => a.fmt(f),
SourceFile::Compiler(a) => Debug::fmt(a, f),
SourceFile::Fallback(a) => Debug::fmt(a, f),
}
}
}
#[cfg(any(super_unstable, feature = "span-locations"))]
pub struct LineColumn {
pub(crate) struct LineColumn {
pub line: usize,
pub column: usize,
}
#[derive(Copy, Clone)]
pub enum Span {
pub(crate) enum Span {
Compiler(proc_macro::Span),
Fallback(fallback::Span),
}
impl Span {
pub fn call_site() -> Span {
if nightly_works() {
if inside_proc_macro() {
Span::Compiler(proc_macro::Span::call_site())
} else {
Span::Fallback(fallback::Span::call_site())
}
}
#[cfg(hygiene)]
pub fn mixed_site() -> Span {
if inside_proc_macro() {
Span::Compiler(proc_macro::Span::mixed_site())
} else {
Span::Fallback(fallback::Span::mixed_site())
}
}
#[cfg(super_unstable)]
pub fn def_site() -> Span {
if nightly_works() {
if inside_proc_macro() {
Span::Compiler(proc_macro::Span::def_site())
} else {
Span::Fallback(fallback::Span::def_site())
}
}
#[cfg(super_unstable)]
pub fn resolved_at(&self, other: Span) -> Span {
match (self, other) {
#[cfg(hygiene)]
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
// Name resolution affects semantics, but location is only cosmetic
#[cfg(not(hygiene))]
(Span::Compiler(_), Span::Compiler(_)) => other,
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
_ => mismatch(),
}
}
#[cfg(super_unstable)]
pub fn located_at(&self, other: Span) -> Span {
match (self, other) {
#[cfg(hygiene)]
(Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
// Name resolution affects semantics, but location is only cosmetic
#[cfg(not(hygiene))]
(Span::Compiler(_), Span::Compiler(_)) => *self,
(Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
_ => mismatch(),
}
@ -542,16 +516,16 @@ impl From<fallback::Span> for Span {
}
}
impl fmt::Debug for Span {
impl Debug for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Span::Compiler(s) => s.fmt(f),
Span::Fallback(s) => s.fmt(f),
Span::Compiler(s) => Debug::fmt(s, f),
Span::Fallback(s) => Debug::fmt(s, f),
}
}
}
pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
match span {
Span::Compiler(s) => {
debug.field("span", &s);
@ -561,7 +535,7 @@ pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span)
}
#[derive(Clone)]
pub enum Group {
pub(crate) enum Group {
Compiler(proc_macro::Group),
Fallback(fallback::Group),
}
@ -652,26 +626,26 @@ impl From<fallback::Group> for Group {
}
}
impl fmt::Display for Group {
impl Display for Group {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match self {
Group::Compiler(group) => group.fmt(formatter),
Group::Fallback(group) => group.fmt(formatter),
Group::Compiler(group) => Display::fmt(group, formatter),
Group::Fallback(group) => Display::fmt(group, formatter),
}
}
}
impl fmt::Debug for Group {
impl Debug for Group {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match self {
Group::Compiler(group) => group.fmt(formatter),
Group::Fallback(group) => group.fmt(formatter),
Group::Compiler(group) => Debug::fmt(group, formatter),
Group::Fallback(group) => Debug::fmt(group, formatter),
}
}
}
#[derive(Clone)]
pub enum Ident {
pub(crate) enum Ident {
Compiler(proc_macro::Ident),
Fallback(fallback::Ident),
}
@ -747,26 +721,26 @@ where
}
}
impl fmt::Display for Ident {
impl Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Ident::Compiler(t) => t.fmt(f),
Ident::Fallback(t) => t.fmt(f),
Ident::Compiler(t) => Display::fmt(t, f),
Ident::Fallback(t) => Display::fmt(t, f),
}
}
}
impl fmt::Debug for Ident {
impl Debug for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Ident::Compiler(t) => t.fmt(f),
Ident::Fallback(t) => t.fmt(f),
Ident::Compiler(t) => Debug::fmt(t, f),
Ident::Fallback(t) => Debug::fmt(t, f),
}
}
}
#[derive(Clone)]
pub enum Literal {
pub(crate) enum Literal {
Compiler(proc_macro::Literal),
Fallback(fallback::Literal),
}
@ -774,7 +748,7 @@ pub enum Literal {
macro_rules! suffixed_numbers {
($($name:ident => $kind:ident,)*) => ($(
pub fn $name(n: $kind) -> Literal {
if nightly_works() {
if inside_proc_macro() {
Literal::Compiler(proc_macro::Literal::$name(n))
} else {
Literal::Fallback(fallback::Literal::$name(n))
@ -786,7 +760,7 @@ macro_rules! suffixed_numbers {
macro_rules! unsuffixed_integers {
($($name:ident => $kind:ident,)*) => ($(
pub fn $name(n: $kind) -> Literal {
if nightly_works() {
if inside_proc_macro() {
Literal::Compiler(proc_macro::Literal::$name(n))
} else {
Literal::Fallback(fallback::Literal::$name(n))
@ -830,7 +804,7 @@ impl Literal {
}
pub fn f32_unsuffixed(f: f32) -> Literal {
if nightly_works() {
if inside_proc_macro() {
Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
} else {
Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
@ -838,7 +812,7 @@ impl Literal {
}
pub fn f64_unsuffixed(f: f64) -> Literal {
if nightly_works() {
if inside_proc_macro() {
Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
} else {
Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
@ -846,7 +820,7 @@ impl Literal {
}
pub fn string(t: &str) -> Literal {
if nightly_works() {
if inside_proc_macro() {
Literal::Compiler(proc_macro::Literal::string(t))
} else {
Literal::Fallback(fallback::Literal::string(t))
@ -854,7 +828,7 @@ impl Literal {
}
pub fn character(t: char) -> Literal {
if nightly_works() {
if inside_proc_macro() {
Literal::Compiler(proc_macro::Literal::character(t))
} else {
Literal::Fallback(fallback::Literal::character(t))
@ -862,7 +836,7 @@ impl Literal {
}
pub fn byte_string(bytes: &[u8]) -> Literal {
if nightly_works() {
if inside_proc_macro() {
Literal::Compiler(proc_macro::Literal::byte_string(bytes))
} else {
Literal::Fallback(fallback::Literal::byte_string(bytes))
@ -908,20 +882,20 @@ impl From<fallback::Literal> for Literal {
}
}
impl fmt::Display for Literal {
impl Display for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Literal::Compiler(t) => t.fmt(f),
Literal::Fallback(t) => t.fmt(f),
Literal::Compiler(t) => Display::fmt(t, f),
Literal::Fallback(t) => Display::fmt(t, f),
}
}
}
impl fmt::Debug for Literal {
impl Debug for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Literal::Compiler(t) => t.fmt(f),
Literal::Fallback(t) => t.fmt(f),
Literal::Compiler(t) => Debug::fmt(t, f),
Literal::Fallback(t) => Debug::fmt(t, f),
}
}
}

103
third_party/rust/proc-macro2/tests/comments.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,103 @@
use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree};
// #[doc = "..."] -> "..."
fn lit_of_outer_doc_comment(tokens: TokenStream) -> Literal {
lit_of_doc_comment(tokens, false)
}
// #![doc = "..."] -> "..."
fn lit_of_inner_doc_comment(tokens: TokenStream) -> Literal {
lit_of_doc_comment(tokens, true)
}
fn lit_of_doc_comment(tokens: TokenStream, inner: bool) -> Literal {
let mut iter = tokens.clone().into_iter();
match iter.next().unwrap() {
TokenTree::Punct(punct) => {
assert_eq!(punct.as_char(), '#');
assert_eq!(punct.spacing(), Spacing::Alone);
}
_ => panic!("wrong token {:?}", tokens),
}
if inner {
match iter.next().unwrap() {
TokenTree::Punct(punct) => {
assert_eq!(punct.as_char(), '!');
assert_eq!(punct.spacing(), Spacing::Alone);
}
_ => panic!("wrong token {:?}", tokens),
}
}
iter = match iter.next().unwrap() {
TokenTree::Group(group) => {
assert_eq!(group.delimiter(), Delimiter::Bracket);
assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
group.stream().into_iter()
}
_ => panic!("wrong token {:?}", tokens),
};
match iter.next().unwrap() {
TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"),
_ => panic!("wrong token {:?}", tokens),
}
match iter.next().unwrap() {
TokenTree::Punct(punct) => {
assert_eq!(punct.as_char(), '=');
assert_eq!(punct.spacing(), Spacing::Alone);
}
_ => panic!("wrong token {:?}", tokens),
}
match iter.next().unwrap() {
TokenTree::Literal(literal) => {
assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
literal
}
_ => panic!("wrong token {:?}", tokens),
}
}
#[test]
fn closed_immediately() {
let stream = "/**/".parse::<TokenStream>().unwrap();
let tokens = stream.into_iter().collect::<Vec<_>>();
assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
}
#[test]
fn incomplete() {
assert!("/*/".parse::<TokenStream>().is_err());
}
#[test]
fn lit() {
let stream = "/// doc".parse::<TokenStream>().unwrap();
let lit = lit_of_outer_doc_comment(stream);
assert_eq!(lit.to_string(), "\" doc\"");
let stream = "//! doc".parse::<TokenStream>().unwrap();
let lit = lit_of_inner_doc_comment(stream);
assert_eq!(lit.to_string(), "\" doc\"");
let stream = "/** doc */".parse::<TokenStream>().unwrap();
let lit = lit_of_outer_doc_comment(stream);
assert_eq!(lit.to_string(), "\" doc \"");
let stream = "/*! doc */".parse::<TokenStream>().unwrap();
let lit = lit_of_inner_doc_comment(stream);
assert_eq!(lit.to_string(), "\" doc \"");
}
#[test]
fn carriage_return() {
let stream = "///\r\n".parse::<TokenStream>().unwrap();
let lit = lit_of_outer_doc_comment(stream);
assert_eq!(lit.to_string(), "\"\"");
let stream = "/**\r\n*/".parse::<TokenStream>().unwrap();
let lit = lit_of_outer_doc_comment(stream);
assert_eq!(lit.to_string(), "\"\\r\\n\"");
"///\r".parse::<TokenStream>().unwrap_err();
"///\r \n".parse::<TokenStream>().unwrap_err();
"/**\r \n*/".parse::<TokenStream>().unwrap_err();
}

195
third_party/rust/proc-macro2/tests/test.rs поставляемый
Просмотреть файл

@ -1,6 +1,5 @@
use std::str::{self, FromStr};
use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
use std::str::{self, FromStr};
#[test]
fn idents() {
@ -110,6 +109,33 @@ fn literal_suffix() {
assert_eq!(token_count("1._0"), 3);
assert_eq!(token_count("1._m"), 3);
assert_eq!(token_count("\"\"s"), 1);
assert_eq!(token_count("r\"\"r"), 1);
assert_eq!(token_count("b\"\"b"), 1);
assert_eq!(token_count("br\"\"br"), 1);
assert_eq!(token_count("r#\"\"#r"), 1);
assert_eq!(token_count("'c'c"), 1);
assert_eq!(token_count("b'b'b"), 1);
}
#[test]
fn literal_iter_negative() {
let negative_literal = Literal::i32_suffixed(-3);
let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
let mut iter = tokens.into_iter();
match iter.next().unwrap() {
TokenTree::Punct(punct) => {
assert_eq!(punct.as_char(), '-');
assert_eq!(punct.spacing(), Spacing::Alone);
}
unexpected => panic!("unexpected token {:?}", unexpected),
}
match iter.next().unwrap() {
TokenTree::Literal(literal) => {
assert_eq!(literal.to_string(), "3i32");
}
unexpected => panic!("unexpected token {:?}", unexpected),
}
assert!(iter.next().is_none());
}
#[test]
@ -166,36 +192,6 @@ fn fail() {
#[cfg(span_locations)]
#[test]
fn span_test() {
use proc_macro2::TokenTree;
fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
let ts = p.parse::<TokenStream>().unwrap();
check_spans_internal(ts, &mut lines);
}
fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
for i in ts {
if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
*lines = rest;
let start = i.span().start();
assert_eq!(start.line, sline, "sline did not match for {}", i);
assert_eq!(start.column, scol, "scol did not match for {}", i);
let end = i.span().end();
assert_eq!(end.line, eline, "eline did not match for {}", i);
assert_eq!(end.column, ecol, "ecol did not match for {}", i);
match i {
TokenTree::Group(ref g) => {
check_spans_internal(g.stream().clone(), lines);
}
_ => {}
}
}
}
}
check_spans(
"\
/// This is a document comment
@ -274,49 +270,7 @@ fn span_join() {
#[test]
fn no_panic() {
let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
assert!(s.parse::<proc_macro2::TokenStream>().is_err());
}
#[test]
fn tricky_doc_comment() {
let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
let tokens = stream.into_iter().collect::<Vec<_>>();
assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
let tokens = stream.into_iter().collect::<Vec<_>>();
assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
match tokens[0] {
proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
_ => panic!("wrong token {:?}", tokens[0]),
}
let mut tokens = match tokens[1] {
proc_macro2::TokenTree::Group(ref tt) => {
assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
tt.stream().into_iter()
}
_ => panic!("wrong token {:?}", tokens[0]),
};
match tokens.next().unwrap() {
proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
t => panic!("wrong token {:?}", t),
}
match tokens.next().unwrap() {
proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
t => panic!("wrong token {:?}", t),
}
match tokens.next().unwrap() {
proc_macro2::TokenTree::Literal(ref tt) => {
assert_eq!(tt.to_string(), "\" doc\"");
}
t => panic!("wrong token {:?}", t),
}
assert!(tokens.next().is_none());
let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
let tokens = stream.into_iter().collect::<Vec<_>>();
assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
assert!(s.parse::<TokenStream>().is_err());
}
#[test]
@ -345,11 +299,11 @@ fn raw_identifier() {
fn test_debug_ident() {
let ident = Ident::new("proc_macro", Span::call_site());
#[cfg(not(procmacro2_semver_exempt))]
#[cfg(not(span_locations))]
let expected = "Ident(proc_macro)";
#[cfg(procmacro2_semver_exempt)]
let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
#[cfg(span_locations)]
let expected = "Ident { sym: proc_macro }";
assert_eq!(expected, format!("{:?}", ident));
}
@ -358,7 +312,7 @@ fn test_debug_ident() {
fn test_debug_tokenstream() {
let tts = TokenStream::from_str("[a + 1]").unwrap();
#[cfg(not(procmacro2_semver_exempt))]
#[cfg(not(span_locations))]
let expected = "\
TokenStream [
Group {
@ -379,7 +333,7 @@ TokenStream [
]\
";
#[cfg(not(procmacro2_semver_exempt))]
#[cfg(not(span_locations))]
let expected_before_trailing_commas = "\
TokenStream [
Group {
@ -400,7 +354,7 @@ TokenStream [
]\
";
#[cfg(procmacro2_semver_exempt)]
#[cfg(span_locations)]
let expected = "\
TokenStream [
Group {
@ -425,7 +379,7 @@ TokenStream [
]\
";
#[cfg(procmacro2_semver_exempt)]
#[cfg(span_locations)]
let expected_before_trailing_commas = "\
TokenStream [
Group {
@ -464,3 +418,80 @@ fn default_tokenstream_is_empty() {
assert!(default_token_stream.is_empty());
}
#[test]
fn tuple_indexing() {
// This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
assert_eq!("tuple", tokens.next().unwrap().to_string());
assert_eq!(".", tokens.next().unwrap().to_string());
assert_eq!("0.0", tokens.next().unwrap().to_string());
assert!(tokens.next().is_none());
}
#[cfg(span_locations)]
#[test]
fn non_ascii_tokens() {
check_spans("// abc", &[]);
check_spans("// ábc", &[]);
check_spans("// abc x", &[]);
check_spans("// ábc x", &[]);
check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
check_spans(r#""abc""#, &[(1, 0, 1, 5)]);
check_spans(r#""ábc""#, &[(1, 0, 1, 5)]);
check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]);
check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]);
check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
check_spans("'a'", &[(1, 0, 1, 3)]);
check_spans("'á'", &[(1, 0, 1, 3)]);
check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
check_spans("abc", &[(1, 0, 1, 3)]);
check_spans("ábc", &[(1, 0, 1, 3)]);
check_spans("ábć", &[(1, 0, 1, 3)]);
check_spans("abc// foo", &[(1, 0, 1, 3)]);
check_spans("ábc// foo", &[(1, 0, 1, 3)]);
check_spans("ábć// foo", &[(1, 0, 1, 3)]);
check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]);
}
#[cfg(span_locations)]
fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
let ts = p.parse::<TokenStream>().unwrap();
check_spans_internal(ts, &mut lines);
assert!(lines.is_empty(), "leftover ranges: {:?}", lines);
}
#[cfg(span_locations)]
fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
for i in ts {
if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
*lines = rest;
let start = i.span().start();
assert_eq!(start.line, sline, "sline did not match for {}", i);
assert_eq!(start.column, scol, "scol did not match for {}", i);
let end = i.span().end();
assert_eq!(end.line, eline, "eline did not match for {}", i);
assert_eq!(end.column, ecol, "ecol did not match for {}", i);
if let TokenTree::Group(g) = i {
check_spans_internal(g.stream().clone(), lines);
}
}
}
}

26
third_party/rust/proc-macro2/tests/test_fmt.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,26 @@
use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
use std::iter::{self, FromIterator};
#[test]
fn test_fmt_group() {
let ident = Ident::new("x", Span::call_site());
let inner = TokenStream::from_iter(iter::once(TokenTree::Ident(ident)));
let parens_empty = Group::new(Delimiter::Parenthesis, TokenStream::new());
let parens_nonempty = Group::new(Delimiter::Parenthesis, inner.clone());
let brackets_empty = Group::new(Delimiter::Bracket, TokenStream::new());
let brackets_nonempty = Group::new(Delimiter::Bracket, inner.clone());
let braces_empty = Group::new(Delimiter::Brace, TokenStream::new());
let braces_nonempty = Group::new(Delimiter::Brace, inner.clone());
let none_empty = Group::new(Delimiter::None, TokenStream::new());
let none_nonempty = Group::new(Delimiter::None, inner.clone());
// Matches libproc_macro.
assert_eq!("()", parens_empty.to_string());
assert_eq!("(x)", parens_nonempty.to_string());
assert_eq!("[]", brackets_empty.to_string());
assert_eq!("[x]", brackets_nonempty.to_string());
assert_eq!("{ }", braces_empty.to_string());
assert_eq!("{ x }", braces_nonempty.to_string());
assert_eq!("", none_empty.to_string());
assert_eq!("x", none_nonempty.to_string());
}

2
third_party/rust/syn/.cargo-checksum.json поставляемый

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

35
third_party/rust/syn/Cargo.toml поставляемый
Просмотреть файл

@ -13,7 +13,7 @@
[package]
edition = "2018"
name = "syn"
version = "1.0.5"
version = "1.0.40"
authors = ["David Tolnay <dtolnay@gmail.com>"]
include = ["/benches/**", "/build.rs", "/Cargo.toml", "/LICENSE-APACHE", "/LICENSE-MIT", "/README.md", "/src/**", "/tests/**"]
description = "Parser for Rust source code"
@ -24,25 +24,21 @@ license = "MIT OR Apache-2.0"
repository = "https://github.com/dtolnay/syn"
[package.metadata.docs.rs]
all-features = true
targets = ["x86_64-unknown-linux-gnu"]
[package.metadata.playground]
all-features = true
[lib]
name = "syn"
features = ["full", "visit", "visit-mut", "fold", "extra-traits"]
[[bench]]
name = "rust"
harness = false
required-features = ["full", "parsing"]
edition = "2018"
[[bench]]
name = "file"
required-features = ["full", "parsing"]
edition = "2018"
[dependencies.proc-macro2]
version = "1.0"
version = "1.0.13"
default-features = false
[dependencies.quote]
@ -52,18 +48,34 @@ default-features = false
[dependencies.unicode-xid]
version = "0.2"
[dev-dependencies.anyhow]
version = "1.0"
[dev-dependencies.flate2]
version = "1.0"
[dev-dependencies.insta]
version = "0.9"
version = "0.16"
[dev-dependencies.rayon]
version = "1.0"
[dev-dependencies.ref-cast]
version = "0.2"
version = "1.0"
[dev-dependencies.regex]
version = "1.0"
[dev-dependencies.reqwest]
version = "0.10"
features = ["blocking"]
[dev-dependencies.syn-test-suite]
version = "0"
[dev-dependencies.tar]
version = "0.4"
[dev-dependencies.termcolor]
version = "1.0"
@ -80,7 +92,6 @@ full = []
parsing = []
printing = ["quote"]
proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"]
test = ["syn-test-suite/all-features"]
visit = []
visit-mut = []
[badges.travis-ci]
repository = "dtolnay/syn"

16
third_party/rust/syn/README.md поставляемый
Просмотреть файл

@ -1,10 +1,10 @@
Parser for Rust source code
===========================
[![Build Status](https://api.travis-ci.org/dtolnay/syn.svg?branch=master)](https://travis-ci.org/dtolnay/syn)
[![Latest Version](https://img.shields.io/crates/v/syn.svg)](https://crates.io/crates/syn)
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/syn/1.0/syn/)
[![Rustc Version 1.31+](https://img.shields.io/badge/rustc-1.31+-lightgray.svg)](https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html)
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/syn-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/syn)
[<img alt="crates.io" src="https://img.shields.io/crates/v/syn.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/syn)
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K" height="20">](https://docs.rs/syn)
[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/syn/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster)
Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree
of Rust source code.
@ -46,10 +46,6 @@ contains some APIs that may be useful more generally.
[`syn::DeriveInput`]: https://docs.rs/syn/1.0/syn/struct.DeriveInput.html
[parser functions]: https://docs.rs/syn/1.0/syn/parse/index.html
If you get stuck with anything involving procedural macros in Rust I am happy to
provide help even if the issue is not related to Syn. Please file a ticket in
this repo.
*Version requirement: Syn supports rustc 1.31 and up.*
[*Release notes*](https://github.com/dtolnay/syn/releases)
@ -88,8 +84,6 @@ proc-macro = true
```
```rust
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, DeriveInput};
@ -271,7 +265,7 @@ points, which are required by the language to use `proc_macro::TokenStream`.
The proc-macro2 crate will automatically detect and use the compiler's data
structures when a procedural macro is active.
[proc-macro2]: https://docs.rs/proc-macro2/1.0.0/proc_macro2/
[proc-macro2]: https://docs.rs/proc-macro2/1.0/proc_macro2/
<br>

7
third_party/rust/syn/benches/file.rs поставляемый
Просмотреть файл

@ -1,9 +1,16 @@
// $ cargo bench --features full --bench file
#![feature(rustc_private, test)]
#![recursion_limit = "1024"]
extern crate test;
#[macro_use]
#[path = "../tests/macros/mod.rs"]
mod macros;
#[path = "../tests/common/mod.rs"]
mod common;
#[path = "../tests/repo/mod.rs"]
pub mod repo;

45
third_party/rust/syn/benches/rust.rs поставляемый
Просмотреть файл

@ -4,7 +4,14 @@
// $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full --bench rust
#![cfg_attr(not(syn_only), feature(rustc_private))]
#![recursion_limit = "1024"]
#[macro_use]
#[path = "../tests/macros/mod.rs"]
mod macros;
#[path = "../tests/common/mod.rs"]
mod common;
#[path = "../tests/repo/mod.rs"]
mod repo;
@ -28,31 +35,35 @@ mod syn_parse {
}
#[cfg(not(syn_only))]
mod libsyntax_parse {
mod librustc_parse {
extern crate rustc_data_structures;
extern crate syntax;
extern crate syntax_pos;
extern crate rustc_errors;
extern crate rustc_parse;
extern crate rustc_session;
extern crate rustc_span;
use rustc_data_structures::sync::Lrc;
use syntax::edition::Edition;
use syntax::errors::{emitter::Emitter, DiagnosticBuilder, Handler};
use syntax::parse::ParseSess;
use syntax::source_map::{FilePathMapping, SourceMap};
use syntax_pos::FileName;
use rustc_errors::{emitter::Emitter, Diagnostic, Handler};
use rustc_session::parse::ParseSess;
use rustc_span::source_map::{FilePathMapping, SourceMap};
use rustc_span::{edition::Edition, FileName};
pub fn bench(content: &str) -> Result<(), ()> {
struct SilentEmitter;
impl Emitter for SilentEmitter {
fn emit_diagnostic(&mut self, _db: &DiagnosticBuilder) {}
fn emit_diagnostic(&mut self, _diag: &Diagnostic) {}
fn source_map(&self) -> Option<&Lrc<SourceMap>> {
None
}
}
syntax::with_globals(Edition::Edition2018, || {
rustc_span::with_session_globals(Edition::Edition2018, || {
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let emitter = Box::new(SilentEmitter);
let handler = Handler::with_emitter(false, None, emitter);
let sess = ParseSess::with_span_handler(handler, cm);
if let Err(mut diagnostic) = syntax::parse::parse_crate_from_source_str(
if let Err(mut diagnostic) = rustc_parse::parse_crate_from_source_str(
FileName::Custom("bench".to_owned()),
content.to_owned(),
&sess,
@ -104,11 +115,11 @@ fn main() {
repo::clone_rust();
macro_rules! testcases {
($($(#[$cfg:meta])* $name:path,)*) => {
($($(#[$cfg:meta])* $name:ident,)*) => {
vec![
$(
$(#[$cfg])*
(stringify!($name), $name as fn(&str) -> Result<(), ()>),
(stringify!($name), $name::bench as fn(&str) -> Result<(), ()>),
)*
]
};
@ -128,12 +139,12 @@ fn main() {
for (name, f) in testcases!(
#[cfg(not(syn_only))]
read_from_disk::bench,
read_from_disk,
#[cfg(not(syn_only))]
tokenstream_parse::bench,
syn_parse::bench,
tokenstream_parse,
syn_parse,
#[cfg(not(syn_only))]
libsyntax_parse::bench,
librustc_parse,
) {
eprint!("{:20}", format!("{}:", name));
let elapsed = exec(f);

38
third_party/rust/syn/build.rs поставляемый
Просмотреть файл

@ -1,6 +1,6 @@
use std::env;
use std::process::Command;
use std::str::{self, FromStr};
use std::str;
// The rustc-cfg strings below are *not* public API. Please let us know by
// opening a GitHub issue if your build environment requires some way to enable
@ -26,38 +26,14 @@ struct Compiler {
}
fn rustc_version() -> Option<Compiler> {
let rustc = match env::var_os("RUSTC") {
Some(rustc) => rustc,
None => return None,
};
let output = match Command::new(rustc).arg("--version").output() {
Ok(output) => output,
Err(_) => return None,
};
let version = match str::from_utf8(&output.stdout) {
Ok(version) => version,
Err(_) => return None,
};
let rustc = env::var_os("RUSTC")?;
let output = Command::new(rustc).arg("--version").output().ok()?;
let version = str::from_utf8(&output.stdout).ok()?;
let mut pieces = version.split('.');
if pieces.next() != Some("rustc 1") {
return None;
}
let next = match pieces.next() {
Some(next) => next,
None => return None,
};
let minor = match u32::from_str(next) {
Ok(minor) => minor,
Err(_) => return None,
};
Some(Compiler {
minor: minor,
nightly: version.contains("nightly"),
})
let minor = pieces.next()?.parse().ok()?;
let nightly = version.contains("nightly");
Some(Compiler { minor, nightly })
}

126
third_party/rust/syn/src/attr.rs поставляемый
Просмотреть файл

@ -9,15 +9,11 @@ use proc_macro2::TokenStream;
use crate::parse::{Parse, ParseBuffer, ParseStream, Parser, Result};
#[cfg(feature = "parsing")]
use crate::punctuated::Pair;
#[cfg(feature = "extra-traits")]
use crate::tt::TokenStreamHelper;
#[cfg(feature = "extra-traits")]
use std::hash::{Hash, Hasher};
ast_struct! {
/// An attribute like `#[repr(transparent)]`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// <br>
@ -111,7 +107,46 @@ ast_struct! {
///
/// [`parse_meta()`]: Attribute::parse_meta
/// [`parse_args()`]: Attribute::parse_args
pub struct Attribute #manual_extra_traits {
///
/// <p><br></p>
///
/// # Doc comments
///
/// The compiler transforms doc comments, such as `/// comment` and `/*!
/// comment */`, into attributes before macros are expanded. Each comment is
/// expanded into an attribute of the form `#[doc = r"comment"]`.
///
/// As an example, the following `mod` items are expanded identically:
///
/// ```
/// # use syn::{ItemMod, parse_quote};
/// let doc: ItemMod = parse_quote! {
/// /// Single line doc comments
/// /// We write so many!
/// /**
/// * Multi-line comments...
/// * May span many lines
/// */
/// mod example {
/// //! Of course, they can be inner too
/// /*! And fit in a single line */
/// }
/// };
/// let attr: ItemMod = parse_quote! {
/// #[doc = r" Single line doc comments"]
/// #[doc = r" We write so many!"]
/// #[doc = r"
/// * Multi-line comments...
/// * May span many lines
/// "]
/// mod example {
/// #![doc = r" Of course, they can be inner too"]
/// #![doc = r" And fit in a single line "]
/// }
/// };
/// assert_eq!(doc, attr);
/// ```
pub struct Attribute {
pub pound_token: Token![#],
pub style: AttrStyle,
pub bracket_token: token::Bracket,
@ -120,39 +155,11 @@ ast_struct! {
}
}
#[cfg(feature = "extra-traits")]
impl Eq for Attribute {}
#[cfg(feature = "extra-traits")]
impl PartialEq for Attribute {
fn eq(&self, other: &Self) -> bool {
self.style == other.style
&& self.pound_token == other.pound_token
&& self.bracket_token == other.bracket_token
&& self.path == other.path
&& TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
}
}
#[cfg(feature = "extra-traits")]
impl Hash for Attribute {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
self.style.hash(state);
self.pound_token.hash(state);
self.bracket_token.hash(state);
self.path.hash(state);
TokenStreamHelper(&self.tokens).hash(state);
}
}
impl Attribute {
/// Parses the content of the attribute, consisting of the path and tokens,
/// as a [`Meta`] if possible.
///
/// *This function is available if Syn is built with the `"parsing"`
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
pub fn parse_meta(&self) -> Result<Meta> {
@ -199,7 +206,7 @@ impl Attribute {
/// ^^^^^^^^^ what gets parsed
/// ```
///
/// *This function is available if Syn is built with the `"parsing"`
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
pub fn parse_args<T: Parse>(&self) -> Result<T> {
@ -208,7 +215,7 @@ impl Attribute {
/// Parse the arguments to the attribute using the given parser.
///
/// *This function is available if Syn is built with the `"parsing"`
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
@ -221,7 +228,7 @@ impl Attribute {
/// Parses zero or more outer attributes from the stream.
///
/// *This function is available if Syn is built with the `"parsing"`
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> {
@ -234,7 +241,7 @@ impl Attribute {
/// Parses zero or more inner attributes from the stream.
///
/// *This function is available if Syn is built with the `"parsing"`
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> {
@ -247,7 +254,7 @@ impl Attribute {
}
#[cfg(feature = "parsing")]
fn error_expected_args(attr: &Attribute) -> Error {
fn expected_parentheses(attr: &Attribute) -> String {
let style = match attr.style {
AttrStyle::Outer => "#",
AttrStyle::Inner(_) => "#!",
@ -261,19 +268,23 @@ fn error_expected_args(attr: &Attribute) -> Error {
path += &segment.ident.to_string();
}
let msg = format!("expected attribute arguments: {}[{}(...)]", style, path);
#[cfg(feature = "printing")]
return Error::new_spanned(attr, msg);
#[cfg(not(feature = "printing"))]
return Error::new(attr.bracket_token.span, msg);
format!("{}[{}(...)]", style, path)
}
#[cfg(feature = "parsing")]
fn enter_args<'a>(attr: &Attribute, input: ParseStream<'a>) -> Result<ParseBuffer<'a>> {
if input.is_empty() {
return Err(error_expected_args(attr));
let expected = expected_parentheses(attr);
let msg = format!("expected attribute arguments in parentheses: {}", expected);
return Err(crate::error::new2(
attr.pound_token.span,
attr.bracket_token.span,
msg,
));
} else if input.peek(Token![=]) {
let expected = expected_parentheses(attr);
let msg = format!("expected parentheses: {}", expected);
return Err(input.error(msg));
};
let content;
@ -298,7 +309,7 @@ ast_enum! {
/// Distinguishes between attributes that decorate an item and attributes
/// that are contained within an item.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Outer attributes
@ -312,7 +323,6 @@ ast_enum! {
/// - `#![feature(proc_macro)]`
/// - `//! # Example`
/// - `/*! Please file an issue */`
#[cfg_attr(feature = "clone-impls", derive(Copy))]
pub enum AttrStyle {
Outer,
Inner(Token![!]),
@ -322,7 +332,7 @@ ast_enum! {
ast_enum_of_structs! {
/// Content of a compile-time structured attribute.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// ## Path
@ -360,7 +370,7 @@ ast_enum_of_structs! {
ast_struct! {
/// A structured list within an attribute, like `derive(Copy, Clone)`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct MetaList {
pub path: Path,
@ -372,7 +382,7 @@ ast_struct! {
ast_struct! {
/// A name-value pair within an attribute, like `feature = "nightly"`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct MetaNameValue {
pub path: Path,
@ -398,7 +408,7 @@ impl Meta {
ast_enum_of_structs! {
/// Element of a compile-time attribute list.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub enum NestedMeta {
/// A structured meta item, like the `Copy` in `#[derive(Copy)]` which
@ -429,8 +439,8 @@ ast_enum_of_structs! {
/// as type `AttributeArgs`.
///
/// ```
/// extern crate proc_macro;
///
/// # extern crate proc_macro;
/// #
/// use proc_macro::TokenStream;
/// use syn::{parse_macro_input, AttributeArgs, ItemFn};
///
@ -464,7 +474,7 @@ where
fn is_outer(attr: &&Attribute) -> bool {
match attr.style {
AttrStyle::Outer => true,
_ => false,
AttrStyle::Inner(_) => false,
}
}
self.into_iter().filter(is_outer)
@ -474,7 +484,7 @@ where
fn is_inner(attr: &&Attribute) -> bool {
match attr.style {
AttrStyle::Inner(_) => true,
_ => false,
AttrStyle::Outer => false,
}
}
self.into_iter().filter(is_inner)

56
third_party/rust/syn/src/buffer.rs поставляемый
Просмотреть файл

@ -1,7 +1,7 @@
//! A stably addressed token buffer supporting efficient traversal based on a
//! cheaply copyable cursor.
//!
//! *This module is available if Syn is built with the `"parsing"` feature.*
//! *This module is available only if Syn is built with the `"parsing"` feature.*
// This module is heavily commented as it contains most of the unsafe code in
// Syn, and caution should be used when editing it. The public-facing interface
@ -36,7 +36,7 @@ enum Entry {
/// `TokenStream` which requires a deep copy in order to traverse more than
/// once.
///
/// *This type is available if Syn is built with the `"parsing"` feature.*
/// *This type is available only if Syn is built with the `"parsing"` feature.*
pub struct TokenBuffer {
// NOTE: Do not derive clone on this - there are raw pointers inside which
// will be messed up. Moving the `TokenBuffer` itself is safe as the actual
@ -98,7 +98,7 @@ impl TokenBuffer {
/// Creates a `TokenBuffer` containing all the tokens from the input
/// `TokenStream`.
///
/// *This method is available if Syn is built with both the `"parsing"` and
/// *This method is available only if Syn is built with both the `"parsing"` and
/// `"proc-macro"` features.*
#[cfg(all(
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
@ -133,8 +133,7 @@ impl TokenBuffer {
/// Two cursors are equal if they have the same location in the same input
/// stream, and have the same scope.
///
/// *This type is available if Syn is built with the `"parsing"` feature.*
#[derive(Copy, Clone, Eq, PartialEq)]
/// *This type is available only if Syn is built with the `"parsing"` feature.*
pub struct Cursor<'a> {
// The current entry which the `Cursor` is pointing at.
ptr: *const Entry,
@ -201,13 +200,13 @@ impl<'a> Cursor<'a> {
Cursor::create(self.ptr.offset(1), self.scope)
}
/// If the cursor is looking at a `None`-delimited group, move it to look at
/// the first token inside instead. If the group is empty, this will move
/// While the cursor is looking at a `None`-delimited group, move it to look
/// at the first token inside instead. If the group is empty, this will move
/// the cursor past the `None`-delimited group.
///
/// WARNING: This mutates its argument.
fn ignore_none(&mut self) {
if let Entry::Group(group, buf) = self.entry() {
while let Entry::Group(group, buf) = self.entry() {
if group.delimiter() == Delimiter::None {
// NOTE: We call `Cursor::create` here to make sure that
// situations where we should immediately exit the span after
@ -215,13 +214,14 @@ impl<'a> Cursor<'a> {
unsafe {
*self = Cursor::create(&buf.data[0], self.scope);
}
} else {
break;
}
}
}
/// Checks whether the cursor is currently pointing at the end of its valid
/// scope.
#[inline]
pub fn eof(self) -> bool {
// We're at eof if we're at the end of our scope.
self.ptr == self.scope
@ -342,6 +342,44 @@ impl<'a> Cursor<'a> {
Entry::End(..) => Span::call_site(),
}
}
/// Skip over the next token without cloning it. Returns `None` if this
/// cursor points to eof.
///
/// This method treats `'lifetimes` as a single token.
pub(crate) fn skip(self) -> Option<Cursor<'a>> {
match self.entry() {
Entry::End(..) => None,
// Treat lifetimes as a single tt for the purposes of 'skip'.
Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => {
let next = unsafe { self.bump() };
match next.entry() {
Entry::Ident(_) => Some(unsafe { next.bump() }),
_ => Some(next),
}
}
_ => Some(unsafe { self.bump() }),
}
}
}
impl<'a> Copy for Cursor<'a> {}
impl<'a> Clone for Cursor<'a> {
fn clone(&self) -> Self {
*self
}
}
impl<'a> Eq for Cursor<'a> {}
impl<'a> PartialEq for Cursor<'a> {
fn eq(&self, other: &Self) -> bool {
let Cursor { ptr, scope, marker } = self;
let _ = marker;
*ptr == other.ptr && *scope == other.scope
}
}
pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool {

12
third_party/rust/syn/src/custom_keyword.rs поставляемый
Просмотреть файл

@ -86,7 +86,7 @@
/// }
/// }
/// ```
#[macro_export(local_inner_macros)]
#[macro_export]
macro_rules! custom_keyword {
($ident:ident) => {
#[allow(non_camel_case_types)]
@ -95,7 +95,7 @@ macro_rules! custom_keyword {
}
#[doc(hidden)]
#[allow(non_snake_case)]
#[allow(dead_code, non_snake_case)]
pub fn $ident<__S: $crate::export::IntoSpans<[$crate::export::Span; 1]>>(
span: __S,
) -> $ident {
@ -112,10 +112,10 @@ macro_rules! custom_keyword {
}
}
impl_parse_for_custom_keyword!($ident);
impl_to_tokens_for_custom_keyword!($ident);
impl_clone_for_custom_keyword!($ident);
impl_extra_traits_for_custom_keyword!($ident);
$crate::impl_parse_for_custom_keyword!($ident);
$crate::impl_to_tokens_for_custom_keyword!($ident);
$crate::impl_clone_for_custom_keyword!($ident);
$crate::impl_extra_traits_for_custom_keyword!($ident);
};
}

Просмотреть файл

@ -74,19 +74,19 @@
/// let _: PathSegments = syn::parse_str(input).unwrap();
/// }
/// ```
#[macro_export(local_inner_macros)]
#[macro_export]
macro_rules! custom_punctuation {
($ident:ident, $($tt:tt)+) => {
pub struct $ident {
pub spans: custom_punctuation_repr!($($tt)+),
pub spans: $crate::custom_punctuation_repr!($($tt)+),
}
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn $ident<__S: $crate::export::IntoSpans<custom_punctuation_repr!($($tt)+)>>(
#[allow(dead_code, non_snake_case)]
pub fn $ident<__S: $crate::export::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>(
spans: __S,
) -> $ident {
let _validate_len = 0 $(+ custom_punctuation_len!(strict, $tt))*;
let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*;
$ident {
spans: $crate::export::IntoSpans::into_spans(spans)
}
@ -98,33 +98,33 @@ macro_rules! custom_punctuation {
}
}
impl_parse_for_custom_punctuation!($ident, $($tt)+);
impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
impl_clone_for_custom_punctuation!($ident, $($tt)+);
impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
$crate::impl_parse_for_custom_punctuation!($ident, $($tt)+);
$crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
$crate::impl_clone_for_custom_punctuation!($ident, $($tt)+);
$crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
};
}
// Not public API.
#[cfg(feature = "parsing")]
#[doc(hidden)]
#[macro_export(local_inner_macros)]
#[macro_export]
macro_rules! impl_parse_for_custom_punctuation {
($ident:ident, $($tt:tt)+) => {
impl $crate::token::CustomToken for $ident {
fn peek(cursor: $crate::buffer::Cursor) -> bool {
$crate::token::parsing::peek_punct(cursor, stringify_punct!($($tt)+))
$crate::token::parsing::peek_punct(cursor, $crate::stringify_punct!($($tt)+))
}
fn display() -> &'static $crate::export::str {
custom_punctuation_concat!("`", stringify_punct!($($tt)+), "`")
concat!("`", $crate::stringify_punct!($($tt)+), "`")
}
}
impl $crate::parse::Parse for $ident {
fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
let spans: custom_punctuation_repr!($($tt)+) =
$crate::token::parsing::punct(input, stringify_punct!($($tt)+))?;
let spans: $crate::custom_punctuation_repr!($($tt)+) =
$crate::token::parsing::punct(input, $crate::stringify_punct!($($tt)+))?;
Ok($ident(spans))
}
}
@ -142,12 +142,12 @@ macro_rules! impl_parse_for_custom_punctuation {
// Not public API.
#[cfg(feature = "printing")]
#[doc(hidden)]
#[macro_export(local_inner_macros)]
#[macro_export]
macro_rules! impl_to_tokens_for_custom_punctuation {
($ident:ident, $($tt:tt)+) => {
impl $crate::export::ToTokens for $ident {
fn to_tokens(&self, tokens: &mut $crate::export::TokenStream2) {
$crate::token::printing::punct(stringify_punct!($($tt)+), &self.spans, tokens)
$crate::token::printing::punct($crate::stringify_punct!($($tt)+), &self.spans, tokens)
}
}
};
@ -221,16 +221,16 @@ macro_rules! impl_extra_traits_for_custom_punctuation {
// Not public API.
#[doc(hidden)]
#[macro_export(local_inner_macros)]
#[macro_export]
macro_rules! custom_punctuation_repr {
($($tt:tt)+) => {
[$crate::export::Span; 0 $(+ custom_punctuation_len!(lenient, $tt))+]
[$crate::export::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+]
};
}
// Not public API.
#[doc(hidden)]
#[macro_export(local_inner_macros)]
#[macro_export]
#[rustfmt::skip]
macro_rules! custom_punctuation_len {
($mode:ident, +) => { 1 };
@ -279,7 +279,7 @@ macro_rules! custom_punctuation_len {
($mode:ident, -=) => { 2 };
($mode:ident, ~) => { 1 };
(lenient, $tt:tt) => { 0 };
(strict, $tt:tt) => {{ custom_punctuation_unexpected!($tt); 0 }};
(strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }};
}
// Not public API.
@ -297,13 +297,3 @@ macro_rules! stringify_punct {
concat!($(stringify!($tt)),+)
};
}
// Not public API.
// Without this, local_inner_macros breaks when looking for concat!
#[doc(hidden)]
#[macro_export]
macro_rules! custom_punctuation_concat {
($($tt:tt)*) => {
concat!($($tt)*)
};
}

96
third_party/rust/syn/src/data.rs поставляемый
Просмотреть файл

@ -4,7 +4,7 @@ use crate::punctuated::Punctuated;
ast_struct! {
/// An enum variant.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct Variant {
/// Attributes tagged on the variant.
@ -24,7 +24,7 @@ ast_struct! {
ast_enum_of_structs! {
/// Data stored within an enum variant or struct.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Syntax tree enum
@ -52,7 +52,7 @@ ast_struct! {
/// Named fields of a struct or struct variant such as `Point { x: f64,
/// y: f64 }`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct FieldsNamed {
pub brace_token: token::Brace,
@ -63,7 +63,7 @@ ast_struct! {
ast_struct! {
/// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct FieldsUnnamed {
pub paren_token: token::Paren,
@ -93,6 +93,24 @@ impl Fields {
Fields::Unnamed(f) => f.unnamed.iter_mut(),
}
}
/// Returns the number of fields.
pub fn len(&self) -> usize {
match self {
Fields::Unit => 0,
Fields::Named(f) => f.named.len(),
Fields::Unnamed(f) => f.unnamed.len(),
}
}
/// Returns `true` if there are zero fields.
pub fn is_empty(&self) -> bool {
match self {
Fields::Unit => true,
Fields::Named(f) => f.named.is_empty(),
Fields::Unnamed(f) => f.unnamed.is_empty(),
}
}
}
impl IntoIterator for Fields {
@ -129,7 +147,7 @@ impl<'a> IntoIterator for &'a mut Fields {
ast_struct! {
/// A field of a struct or enum variant.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct Field {
/// Attributes tagged on the field.
@ -154,7 +172,7 @@ ast_enum_of_structs! {
/// The visibility level of an item: inherited or `pub` or
/// `pub(restricted)`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Syntax tree enum
@ -184,7 +202,7 @@ ast_enum_of_structs! {
ast_struct! {
/// A public visibility level: `pub`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct VisPublic {
pub pub_token: Token![pub],
@ -194,7 +212,7 @@ ast_struct! {
ast_struct! {
/// A crate-level visibility: `crate`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct VisCrate {
pub crate_token: Token![crate],
@ -205,7 +223,7 @@ ast_struct! {
/// A visibility level restricted to some path: `pub(self)` or
/// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct VisRestricted {
pub pub_token: Token![pub],
@ -220,12 +238,15 @@ pub mod parsing {
use super::*;
use crate::ext::IdentExt;
use crate::parse::discouraged::Speculative;
use crate::parse::{Parse, ParseStream, Result};
impl Parse for Variant {
fn parse(input: ParseStream) -> Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let _visibility: Visibility = input.parse()?;
Ok(Variant {
attrs: input.call(Attribute::parse_outer)?,
attrs,
ident: input.parse()?,
fields: {
if input.peek(token::Brace) {
@ -295,6 +316,17 @@ pub mod parsing {
impl Parse for Visibility {
fn parse(input: ParseStream) -> Result<Self> {
// Recognize an empty None-delimited group, as produced by a $:vis
// matcher that matched no tokens.
if input.peek(token::Group) {
let ahead = input.fork();
let group = crate::group::parse_group(&ahead)?;
if group.content.is_empty() {
input.advance_to(&ahead);
return Ok(Visibility::Inherited);
}
}
if input.peek(Token![pub]) {
Self::parse_pub(input)
} else if input.peek(Token![crate]) {
@ -310,27 +342,39 @@ pub mod parsing {
let pub_token = input.parse::<Token![pub]>()?;
if input.peek(token::Paren) {
// TODO: optimize using advance_to
let ahead = input.fork();
let mut content;
parenthesized!(content in ahead);
let content;
let paren_token = parenthesized!(content in ahead);
if content.peek(Token![crate])
|| content.peek(Token![self])
|| content.peek(Token![super])
{
return Ok(Visibility::Restricted(VisRestricted {
pub_token,
paren_token: parenthesized!(content in input),
in_token: None,
path: Box::new(Path::from(content.call(Ident::parse_any)?)),
}));
let path = content.call(Ident::parse_any)?;
// Ensure there are no additional tokens within `content`.
// Without explicitly checking, we may misinterpret a tuple
// field as a restricted visibility, causing a parse error.
// e.g. `pub (crate::A, crate::B)` (Issue #720).
if content.is_empty() {
input.advance_to(&ahead);
return Ok(Visibility::Restricted(VisRestricted {
pub_token,
paren_token,
in_token: None,
path: Box::new(Path::from(path)),
}));
}
} else if content.peek(Token![in]) {
let in_token: Token![in] = content.parse()?;
let path = content.call(Path::parse_mod_style)?;
input.advance_to(&ahead);
return Ok(Visibility::Restricted(VisRestricted {
pub_token,
paren_token: parenthesized!(content in input),
in_token: Some(content.parse()?),
path: Box::new(content.call(Path::parse_mod_style)?),
paren_token,
in_token: Some(in_token),
path: Box::new(path),
}));
}
}
@ -347,6 +391,14 @@ pub mod parsing {
}))
}
}
#[cfg(feature = "full")]
pub(crate) fn is_some(&self) -> bool {
match self {
Visibility::Inherited => false,
_ => true,
}
}
}
}

10
third_party/rust/syn/src/derive.rs поставляемый
Просмотреть файл

@ -4,7 +4,7 @@ use crate::punctuated::Punctuated;
ast_struct! {
/// Data structure sent to a `proc_macro_derive` macro.
///
/// *This type is available if Syn is built with the `"derive"` feature.*
/// *This type is available only if Syn is built with the `"derive"` feature.*
pub struct DeriveInput {
/// Attributes tagged on the whole struct or enum.
pub attrs: Vec<Attribute>,
@ -26,7 +26,7 @@ ast_struct! {
ast_enum_of_structs! {
/// The storage of a struct, enum or union data structure.
///
/// *This type is available if Syn is built with the `"derive"` feature.*
/// *This type is available only if Syn is built with the `"derive"` feature.*
///
/// # Syntax tree enum
///
@ -53,7 +53,7 @@ ast_enum_of_structs! {
ast_struct! {
/// A struct input to a `proc_macro_derive` macro.
///
/// *This type is available if Syn is built with the `"derive"`
/// *This type is available only if Syn is built with the `"derive"`
/// feature.*
pub struct DataStruct {
pub struct_token: Token![struct],
@ -65,7 +65,7 @@ ast_struct! {
ast_struct! {
/// An enum input to a `proc_macro_derive` macro.
///
/// *This type is available if Syn is built with the `"derive"`
/// *This type is available only if Syn is built with the `"derive"`
/// feature.*
pub struct DataEnum {
pub enum_token: Token![enum],
@ -77,7 +77,7 @@ ast_struct! {
ast_struct! {
/// An untagged union input to a `proc_macro_derive` macro.
///
/// *This type is available if Syn is built with the `"derive"`
/// *This type is available only if Syn is built with the `"derive"`
/// feature.*
pub struct DataUnion {
pub union_token: Token![union],

27
third_party/rust/syn/src/discouraged.rs поставляемый
Просмотреть файл

@ -16,7 +16,7 @@ pub trait Speculative {
/// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem
/// is that when the fork fails to parse an `A`, it's impossible to tell
/// whether that was because of a syntax error and the user meant to provide
/// an `A`, or that the `A`s are finished and its time to start parsing
/// an `A`, or that the `A`s are finished and it's time to start parsing
/// `B`s. Use with care.
///
/// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by
@ -72,7 +72,6 @@ pub trait Speculative {
/// || input.peek(Token![self])
/// || input.peek(Token![Self])
/// || input.peek(Token![crate])
/// || input.peek(Token![extern])
/// {
/// let ident = input.call(Ident::parse_any)?;
/// return Ok(PathSegment::from(ident));
@ -164,6 +163,30 @@ impl<'a> Speculative for ParseBuffer<'a> {
panic!("Fork was not derived from the advancing parse stream");
}
let (self_unexp, self_sp) = inner_unexpected(self);
let (fork_unexp, fork_sp) = inner_unexpected(fork);
if !Rc::ptr_eq(&self_unexp, &fork_unexp) {
match (fork_sp, self_sp) {
// Unexpected set on the fork, but not on `self`, copy it over.
(Some(span), None) => {
self_unexp.set(Unexpected::Some(span));
}
// Unexpected unset. Use chain to propagate errors from fork.
(None, None) => {
fork_unexp.set(Unexpected::Chain(self_unexp));
// Ensure toplevel 'unexpected' tokens from the fork don't
// bubble up the chain by replacing the root `unexpected`
// pointer, only 'unexpected' tokens from existing group
// parsers should bubble.
fork.unexpected
.set(Some(Rc::new(Cell::new(Unexpected::None))));
}
// Unexpected has been set on `self`. No changes needed.
(_, Some(_)) => {}
}
}
// See comment on `cell` in the struct definition.
self.cell
.set(unsafe { mem::transmute::<Cursor, Cursor<'static>>(fork.cursor()) })

33
third_party/rust/syn/src/error.rs поставляемый
Просмотреть файл

@ -1,4 +1,3 @@
use std;
use std::fmt::{self, Debug, Display};
use std::iter::FromIterator;
use std::slice;
@ -32,8 +31,8 @@ pub type Result<T> = std::result::Result<T, Error>;
/// conversion to `compile_error!` automatically.
///
/// ```
/// extern crate proc_macro;
///
/// # extern crate proc_macro;
/// #
/// use proc_macro::TokenStream;
/// use syn::{parse_macro_input, AttributeArgs, ItemFn};
///
@ -82,7 +81,6 @@ pub type Result<T> = std::result::Result<T, Error>;
/// # }
/// # }
/// ```
#[derive(Clone)]
pub struct Error {
messages: Vec<ErrorMessage>,
}
@ -250,6 +248,17 @@ pub fn new_at<T: Display>(scope: Span, cursor: Cursor, message: T) -> Error {
}
}
#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
pub fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
Error {
messages: vec![ErrorMessage {
start_span: ThreadBound::new(start),
end_span: ThreadBound::new(end),
message: message.to_string(),
}],
}
}
impl Debug for Error {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
if self.messages.len() == 1 {
@ -278,6 +287,14 @@ impl Display for Error {
}
}
impl Clone for Error {
fn clone(&self) -> Self {
Error {
messages: self.messages.clone(),
}
}
}
impl Clone for ErrorMessage {
fn clone(&self) -> Self {
let start = self
@ -355,3 +372,11 @@ impl<'a> Iterator for Iter<'a> {
})
}
}
impl Extend<Error> for Error {
fn extend<T: IntoIterator<Item = Error>>(&mut self, iter: T) {
for err in iter {
self.combine(err);
}
}
}

826
third_party/rust/syn/src/expr.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

12
third_party/rust/syn/src/ext.rs поставляемый
Просмотреть файл

@ -1,6 +1,6 @@
//! Extension traits to provide parsing methods on foreign types.
//!
//! *This module is available if Syn is built with the `"parsing"` feature.*
//! *This module is available only if Syn is built with the `"parsing"` feature.*
use proc_macro2::Ident;
@ -16,7 +16,7 @@ use crate::token::CustomToken;
/// This trait is sealed and cannot be implemented for types outside of Syn. It
/// is implemented only for `proc_macro2::Ident`.
///
/// *This trait is available if Syn is built with the `"parsing"` feature.*
/// *This trait is available only if Syn is built with the `"parsing"` feature.*
pub trait IdentExt: Sized + private::Sealed {
/// Parses any identifier including keywords.
///
@ -129,7 +129,13 @@ mod private {
impl Sealed for Ident {}
#[derive(Copy, Clone)]
pub struct PeekFn;
pub struct IdentAny;
impl Copy for PeekFn {}
impl Clone for PeekFn {
fn clone(&self) -> Self {
*self
}
}
}

4
third_party/rust/syn/src/file.rs поставляемый
Просмотреть файл

@ -3,7 +3,7 @@ use super::*;
ast_struct! {
/// A complete file of Rust source code.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
///
/// # Example
///
@ -37,6 +37,8 @@ ast_struct! {
/// file.read_to_string(&mut src).expect("Unable to read file");
///
/// let syntax = syn::parse_file(&src).expect("Unable to parse file");
///
/// // Debug impl is available if Syn is built with "extra-traits" feature.
/// println!("{:#?}", syntax);
/// }
/// ```

2051
third_party/rust/syn/src/gen/clone.rs поставляемый Normal file

Разница между файлами не показана из-за своего большого размера Загрузить разницу

2857
third_party/rust/syn/src/gen/debug.rs поставляемый Normal file

Разница между файлами не показана из-за своего большого размера Загрузить разницу

1930
third_party/rust/syn/src/gen/eq.rs поставляемый Normal file

Разница между файлами не показана из-за своего большого размера Загрузить разницу

287
third_party/rust/syn/src/gen/fold.rs поставляемый
Просмотреть файл

@ -2,6 +2,7 @@
// It is not intended for manual editing.
#![allow(unreachable_code, unused_variables)]
#![allow(clippy::match_wildcard_for_single_variants)]
#[cfg(any(feature = "full", feature = "derive"))]
use crate::gen::helper::fold::*;
#[cfg(any(feature = "full", feature = "derive"))]
@ -26,7 +27,7 @@ macro_rules! full {
///
/// [module documentation]: self
///
/// *This trait is available if Syn is built with the `"fold"` feature.*
/// *This trait is available only if Syn is built with the `"fold"` feature.*
pub trait Fold {
#[cfg(any(feature = "derive", feature = "full"))]
fn fold_abi(&mut self, i: Abi) -> Abi {
@ -433,35 +434,27 @@ pub trait Fold {
fn fold_lifetime_def(&mut self, i: LifetimeDef) -> LifetimeDef {
fold_lifetime_def(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn fold_lit(&mut self, i: Lit) -> Lit {
fold_lit(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn fold_lit_bool(&mut self, i: LitBool) -> LitBool {
fold_lit_bool(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn fold_lit_byte(&mut self, i: LitByte) -> LitByte {
fold_lit_byte(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn fold_lit_byte_str(&mut self, i: LitByteStr) -> LitByteStr {
fold_lit_byte_str(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn fold_lit_char(&mut self, i: LitChar) -> LitChar {
fold_lit_char(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn fold_lit_float(&mut self, i: LitFloat) -> LitFloat {
fold_lit_float(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn fold_lit_int(&mut self, i: LitInt) -> LitInt {
fold_lit_int(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn fold_lit_str(&mut self, i: LitStr) -> LitStr {
fold_lit_str(self, i)
}
@ -799,10 +792,10 @@ where
F: Fold + ?Sized,
{
AngleBracketedGenericArguments {
colon2_token: (node.colon2_token).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
colon2_token: (node.colon2_token).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
args: FoldHelper::lift(node.args, |it| f.fold_generic_argument(it)),
gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
}
}
#[cfg(feature = "full")]
@ -819,9 +812,9 @@ where
Box::new(f.fold_expr(*(it).1)),
)
}),
fat_arrow_token: Token ! [ => ](tokens_helper(f, &node.fat_arrow_token.spans)),
fat_arrow_token: Token ! [=>](tokens_helper(f, &node.fat_arrow_token.spans)),
body: Box::new(f.fold_expr(*node.body)),
comma: (node.comma).map(|it| Token ! [ , ](tokens_helper(f, &it.spans))),
comma: (node.comma).map(|it| Token ! [,](tokens_helper(f, &it.spans))),
}
}
#[cfg(any(feature = "derive", feature = "full"))]
@ -842,7 +835,7 @@ where
F: Fold + ?Sized,
{
Attribute {
pound_token: Token ! [ # ](tokens_helper(f, &node.pound_token.spans)),
pound_token: Token ! [#](tokens_helper(f, &node.pound_token.spans)),
style: f.fold_attr_style(node.style),
bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
path: f.fold_path(node.path),
@ -859,7 +852,7 @@ where
name: (node.name).map(|it| {
(
f.fold_ident((it).0),
Token ! [ : ](tokens_helper(f, &(it).1.spans)),
Token ! [:](tokens_helper(f, &(it).1.spans)),
)
}),
ty: f.fold_type(node.ty),
@ -871,59 +864,47 @@ where
F: Fold + ?Sized,
{
match node {
BinOp::Add(_binding_0) => BinOp::Add(Token ! [ + ](tokens_helper(f, &_binding_0.spans))),
BinOp::Sub(_binding_0) => BinOp::Sub(Token ! [ - ](tokens_helper(f, &_binding_0.spans))),
BinOp::Mul(_binding_0) => BinOp::Mul(Token ! [ * ](tokens_helper(f, &_binding_0.spans))),
BinOp::Div(_binding_0) => BinOp::Div(Token ! [ / ](tokens_helper(f, &_binding_0.spans))),
BinOp::Rem(_binding_0) => BinOp::Rem(Token ! [ % ](tokens_helper(f, &_binding_0.spans))),
BinOp::And(_binding_0) => BinOp::And(Token ! [ && ](tokens_helper(f, &_binding_0.spans))),
BinOp::Or(_binding_0) => BinOp::Or(Token ! [ || ](tokens_helper(f, &_binding_0.spans))),
BinOp::Add(_binding_0) => BinOp::Add(Token ! [+](tokens_helper(f, &_binding_0.spans))),
BinOp::Sub(_binding_0) => BinOp::Sub(Token ! [-](tokens_helper(f, &_binding_0.spans))),
BinOp::Mul(_binding_0) => BinOp::Mul(Token ! [*](tokens_helper(f, &_binding_0.spans))),
BinOp::Div(_binding_0) => BinOp::Div(Token ! [/](tokens_helper(f, &_binding_0.spans))),
BinOp::Rem(_binding_0) => BinOp::Rem(Token ! [%](tokens_helper(f, &_binding_0.spans))),
BinOp::And(_binding_0) => BinOp::And(Token ! [&&](tokens_helper(f, &_binding_0.spans))),
BinOp::Or(_binding_0) => BinOp::Or(Token ! [||](tokens_helper(f, &_binding_0.spans))),
BinOp::BitXor(_binding_0) => {
BinOp::BitXor(Token ! [ ^ ](tokens_helper(f, &_binding_0.spans)))
BinOp::BitXor(Token ! [^](tokens_helper(f, &_binding_0.spans)))
}
BinOp::BitAnd(_binding_0) => {
BinOp::BitAnd(Token ! [ & ](tokens_helper(f, &_binding_0.spans)))
}
BinOp::BitOr(_binding_0) => {
BinOp::BitOr(Token ! [ | ](tokens_helper(f, &_binding_0.spans)))
}
BinOp::Shl(_binding_0) => BinOp::Shl(Token ! [ << ](tokens_helper(f, &_binding_0.spans))),
BinOp::Shr(_binding_0) => BinOp::Shr(Token ! [ >> ](tokens_helper(f, &_binding_0.spans))),
BinOp::Eq(_binding_0) => BinOp::Eq(Token ! [ == ](tokens_helper(f, &_binding_0.spans))),
BinOp::Lt(_binding_0) => BinOp::Lt(Token ! [ < ](tokens_helper(f, &_binding_0.spans))),
BinOp::Le(_binding_0) => BinOp::Le(Token ! [ <= ](tokens_helper(f, &_binding_0.spans))),
BinOp::Ne(_binding_0) => BinOp::Ne(Token ! [ != ](tokens_helper(f, &_binding_0.spans))),
BinOp::Ge(_binding_0) => BinOp::Ge(Token ! [ >= ](tokens_helper(f, &_binding_0.spans))),
BinOp::Gt(_binding_0) => BinOp::Gt(Token ! [ > ](tokens_helper(f, &_binding_0.spans))),
BinOp::AddEq(_binding_0) => {
BinOp::AddEq(Token ! [ += ](tokens_helper(f, &_binding_0.spans)))
}
BinOp::SubEq(_binding_0) => {
BinOp::SubEq(Token ! [ -= ](tokens_helper(f, &_binding_0.spans)))
}
BinOp::MulEq(_binding_0) => {
BinOp::MulEq(Token ! [ *= ](tokens_helper(f, &_binding_0.spans)))
}
BinOp::DivEq(_binding_0) => {
BinOp::DivEq(Token ! [ /= ](tokens_helper(f, &_binding_0.spans)))
}
BinOp::RemEq(_binding_0) => {
BinOp::RemEq(Token ! [ %= ](tokens_helper(f, &_binding_0.spans)))
BinOp::BitAnd(Token ! [&](tokens_helper(f, &_binding_0.spans)))
}
BinOp::BitOr(_binding_0) => BinOp::BitOr(Token ! [|](tokens_helper(f, &_binding_0.spans))),
BinOp::Shl(_binding_0) => BinOp::Shl(Token ! [<<](tokens_helper(f, &_binding_0.spans))),
BinOp::Shr(_binding_0) => BinOp::Shr(Token ! [>>](tokens_helper(f, &_binding_0.spans))),
BinOp::Eq(_binding_0) => BinOp::Eq(Token ! [==](tokens_helper(f, &_binding_0.spans))),
BinOp::Lt(_binding_0) => BinOp::Lt(Token ! [<](tokens_helper(f, &_binding_0.spans))),
BinOp::Le(_binding_0) => BinOp::Le(Token ! [<=](tokens_helper(f, &_binding_0.spans))),
BinOp::Ne(_binding_0) => BinOp::Ne(Token ! [!=](tokens_helper(f, &_binding_0.spans))),
BinOp::Ge(_binding_0) => BinOp::Ge(Token ! [>=](tokens_helper(f, &_binding_0.spans))),
BinOp::Gt(_binding_0) => BinOp::Gt(Token ! [>](tokens_helper(f, &_binding_0.spans))),
BinOp::AddEq(_binding_0) => BinOp::AddEq(Token ! [+=](tokens_helper(f, &_binding_0.spans))),
BinOp::SubEq(_binding_0) => BinOp::SubEq(Token ! [-=](tokens_helper(f, &_binding_0.spans))),
BinOp::MulEq(_binding_0) => BinOp::MulEq(Token ! [*=](tokens_helper(f, &_binding_0.spans))),
BinOp::DivEq(_binding_0) => BinOp::DivEq(Token ! [/=](tokens_helper(f, &_binding_0.spans))),
BinOp::RemEq(_binding_0) => BinOp::RemEq(Token ! [%=](tokens_helper(f, &_binding_0.spans))),
BinOp::BitXorEq(_binding_0) => {
BinOp::BitXorEq(Token ! [ ^= ](tokens_helper(f, &_binding_0.spans)))
BinOp::BitXorEq(Token ! [^=](tokens_helper(f, &_binding_0.spans)))
}
BinOp::BitAndEq(_binding_0) => {
BinOp::BitAndEq(Token ! [ &= ](tokens_helper(f, &_binding_0.spans)))
BinOp::BitAndEq(Token ! [&=](tokens_helper(f, &_binding_0.spans)))
}
BinOp::BitOrEq(_binding_0) => {
BinOp::BitOrEq(Token ! [ |= ](tokens_helper(f, &_binding_0.spans)))
BinOp::BitOrEq(Token ! [|=](tokens_helper(f, &_binding_0.spans)))
}
BinOp::ShlEq(_binding_0) => {
BinOp::ShlEq(Token ! [ <<= ](tokens_helper(f, &_binding_0.spans)))
BinOp::ShlEq(Token ! [<<=](tokens_helper(f, &_binding_0.spans)))
}
BinOp::ShrEq(_binding_0) => {
BinOp::ShrEq(Token ! [ >>= ](tokens_helper(f, &_binding_0.spans)))
BinOp::ShrEq(Token ! [>>=](tokens_helper(f, &_binding_0.spans)))
}
}
}
@ -934,7 +915,7 @@ where
{
Binding {
ident: f.fold_ident(node.ident),
eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
ty: f.fold_type(node.ty),
}
}
@ -955,9 +936,9 @@ where
{
BoundLifetimes {
for_token: Token![for](tokens_helper(f, &node.for_token.span)),
lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
lifetimes: FoldHelper::lift(node.lifetimes, |it| f.fold_lifetime_def(it)),
gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
}
}
#[cfg(any(feature = "derive", feature = "full"))]
@ -969,9 +950,9 @@ where
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
const_token: Token![const](tokens_helper(f, &node.const_token.span)),
ident: f.fold_ident(node.ident),
colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
ty: f.fold_type(node.ty),
eq_token: (node.eq_token).map(|it| Token ! [ = ](tokens_helper(f, &it.spans))),
eq_token: (node.eq_token).map(|it| Token ! [=](tokens_helper(f, &it.spans))),
default: (node.default).map(|it| f.fold_expr(it)),
}
}
@ -982,7 +963,7 @@ where
{
Constraint {
ident: f.fold_ident(node.ident),
colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
}
}
@ -1016,7 +997,7 @@ where
DataStruct {
struct_token: Token![struct](tokens_helper(f, &node.struct_token.span)),
fields: f.fold_fields(node.fields),
semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
}
}
#[cfg(feature = "derive")]
@ -1112,7 +1093,7 @@ where
ExprAssign {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
left: Box::new(f.fold_expr(*node.left)),
eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
right: Box::new(f.fold_expr(*node.right)),
}
}
@ -1148,7 +1129,7 @@ where
ExprAwait {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
base: Box::new(f.fold_expr(*node.base)),
dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
await_token: crate::token::Await(tokens_helper(f, &node.await_token.span)),
}
}
@ -1232,9 +1213,9 @@ where
asyncness: (node.asyncness).map(|it| Token![async](tokens_helper(f, &it.span))),
movability: (node.movability).map(|it| Token![static](tokens_helper(f, &it.span))),
capture: (node.capture).map(|it| Token![move](tokens_helper(f, &it.span))),
or1_token: Token ! [ | ](tokens_helper(f, &node.or1_token.spans)),
or1_token: Token ! [|](tokens_helper(f, &node.or1_token.spans)),
inputs: FoldHelper::lift(node.inputs, |it| f.fold_pat(it)),
or2_token: Token ! [ | ](tokens_helper(f, &node.or2_token.spans)),
or2_token: Token ! [|](tokens_helper(f, &node.or2_token.spans)),
output: f.fold_return_type(node.output),
body: Box::new(f.fold_expr(*node.body)),
}
@ -1258,7 +1239,7 @@ where
ExprField {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
base: Box::new(f.fold_expr(*node.base)),
dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
member: f.fold_member(node.member),
}
}
@ -1327,7 +1308,7 @@ where
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
let_token: Token![let](tokens_helper(f, &node.let_token.span)),
pat: f.fold_pat(node.pat),
eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
expr: Box::new(f.fold_expr(*node.expr)),
}
}
@ -1384,7 +1365,7 @@ where
ExprMethodCall {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
receiver: Box::new(f.fold_expr(*node.receiver)),
dot_token: Token ! [ . ](tokens_helper(f, &node.dot_token.spans)),
dot_token: Token ! [.](tokens_helper(f, &node.dot_token.spans)),
method: f.fold_ident(node.method),
turbofish: (node.turbofish).map(|it| f.fold_method_turbofish(it)),
paren_token: Paren(tokens_helper(f, &node.paren_token.span)),
@ -1432,7 +1413,7 @@ where
{
ExprReference {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
raw: node.raw,
mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
expr: Box::new(f.fold_expr(*node.expr)),
@ -1447,7 +1428,7 @@ where
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
expr: Box::new(f.fold_expr(*node.expr)),
semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
len: Box::new(f.fold_expr(*node.len)),
}
}
@ -1484,7 +1465,7 @@ where
ExprTry {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
expr: Box::new(f.fold_expr(*node.expr)),
question_token: Token ! [ ? ](tokens_helper(f, &node.question_token.spans)),
question_token: Token ! [?](tokens_helper(f, &node.question_token.spans)),
}
}
#[cfg(feature = "full")]
@ -1517,7 +1498,7 @@ where
ExprType {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
expr: Box::new(f.fold_expr(*node.expr)),
colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
ty: Box::new(f.fold_type(*node.ty)),
}
}
@ -1576,7 +1557,7 @@ where
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
vis: f.fold_visibility(node.vis),
ident: (node.ident).map(|it| f.fold_ident(it)),
colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
ty: f.fold_type(node.ty),
}
}
@ -1588,7 +1569,7 @@ where
FieldPat {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
member: f.fold_member(node.member),
colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
pat: Box::new(f.fold_pat(*node.pat)),
}
}
@ -1600,7 +1581,7 @@ where
FieldValue {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
member: f.fold_member(node.member),
colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
expr: f.fold_expr(node.expr),
}
}
@ -1681,7 +1662,7 @@ where
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
vis: f.fold_visibility(node.vis),
sig: f.fold_signature(node.sig),
semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
}
}
#[cfg(feature = "full")]
@ -1692,7 +1673,7 @@ where
ForeignItemMacro {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
mac: f.fold_macro(node.mac),
semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
}
}
#[cfg(feature = "full")]
@ -1706,9 +1687,9 @@ where
static_token: Token![static](tokens_helper(f, &node.static_token.span)),
mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
ident: f.fold_ident(node.ident),
colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
ty: Box::new(f.fold_type(*node.ty)),
semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
}
}
#[cfg(feature = "full")]
@ -1721,7 +1702,7 @@ where
vis: f.fold_visibility(node.vis),
type_token: Token![type](tokens_helper(f, &node.type_token.span)),
ident: f.fold_ident(node.ident),
semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
}
}
#[cfg(any(feature = "derive", feature = "full"))]
@ -1779,9 +1760,9 @@ where
F: Fold + ?Sized,
{
Generics {
lt_token: (node.lt_token).map(|it| Token ! [ < ](tokens_helper(f, &it.spans))),
lt_token: (node.lt_token).map(|it| Token ! [<](tokens_helper(f, &it.spans))),
params: FoldHelper::lift(node.params, |it| f.fold_generic_param(it)),
gt_token: (node.gt_token).map(|it| Token ! [ > ](tokens_helper(f, &it.spans))),
gt_token: (node.gt_token).map(|it| Token ! [>](tokens_helper(f, &it.spans))),
where_clause: (node.where_clause).map(|it| f.fold_where_clause(it)),
}
}
@ -1819,11 +1800,11 @@ where
defaultness: (node.defaultness).map(|it| Token![default](tokens_helper(f, &it.span))),
const_token: Token![const](tokens_helper(f, &node.const_token.span)),
ident: f.fold_ident(node.ident),
colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
ty: f.fold_type(node.ty),
eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
expr: f.fold_expr(node.expr),
semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
}
}
#[cfg(feature = "full")]
@ -1834,7 +1815,7 @@ where
ImplItemMacro {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
mac: f.fold_macro(node.mac),
semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
}
}
#[cfg(feature = "full")]
@ -1862,9 +1843,9 @@ where
type_token: Token![type](tokens_helper(f, &node.type_token.span)),
ident: f.fold_ident(node.ident),
generics: f.fold_generics(node.generics),
eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
ty: f.fold_type(node.ty),
semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
}
}
#[cfg(any(feature = "derive", feature = "full"))]
@ -1913,11 +1894,11 @@ where
vis: f.fold_visibility(node.vis),
const_token: Token![const](tokens_helper(f, &node.const_token.span)),
ident: f.fold_ident(node.ident),
colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
ty: Box::new(f.fold_type(*node.ty)),
eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
expr: Box::new(f.fold_expr(*node.expr)),
semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
}
}
#[cfg(feature = "full")]
@ -1952,7 +1933,7 @@ where
f.fold_ident((it).1),
)
}),
semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
}
}
#[cfg(feature = "full")]
@ -2011,7 +1992,7 @@ where
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
ident: (node.ident).map(|it| f.fold_ident(it)),
mac: f.fold_macro(node.mac),
semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
}
}
#[cfg(feature = "full")]
@ -2043,7 +2024,7 @@ where
FoldHelper::lift((it).1, |it| f.fold_item(it)),
)
}),
semi: (node.semi).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
semi: (node.semi).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
}
}
#[cfg(feature = "full")]
@ -2057,11 +2038,11 @@ where
static_token: Token![static](tokens_helper(f, &node.static_token.span)),
mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
ident: f.fold_ident(node.ident),
colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
ty: Box::new(f.fold_type(*node.ty)),
eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
expr: Box::new(f.fold_expr(*node.expr)),
semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
}
}
#[cfg(feature = "full")]
@ -2076,7 +2057,7 @@ where
ident: f.fold_ident(node.ident),
generics: f.fold_generics(node.generics),
fields: f.fold_fields(node.fields),
semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
}
}
#[cfg(feature = "full")]
@ -2092,7 +2073,7 @@ where
trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)),
ident: f.fold_ident(node.ident),
generics: f.fold_generics(node.generics),
colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
supertraits: FoldHelper::lift(node.supertraits, |it| f.fold_type_param_bound(it)),
brace_token: Brace(tokens_helper(f, &node.brace_token.span)),
items: FoldHelper::lift(node.items, |it| f.fold_trait_item(it)),
@ -2109,9 +2090,9 @@ where
trait_token: Token![trait](tokens_helper(f, &node.trait_token.span)),
ident: f.fold_ident(node.ident),
generics: f.fold_generics(node.generics),
eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
}
}
#[cfg(feature = "full")]
@ -2125,9 +2106,9 @@ where
type_token: Token![type](tokens_helper(f, &node.type_token.span)),
ident: f.fold_ident(node.ident),
generics: f.fold_generics(node.generics),
eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
ty: Box::new(f.fold_type(*node.ty)),
semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
}
}
#[cfg(feature = "full")]
@ -2153,9 +2134,9 @@ where
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
vis: f.fold_visibility(node.vis),
use_token: Token![use](tokens_helper(f, &node.use_token.span)),
leading_colon: (node.leading_colon).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
leading_colon: (node.leading_colon).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
tree: f.fold_use_tree(node.tree),
semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
}
}
#[cfg(feature = "full")]
@ -2165,7 +2146,7 @@ where
{
Label {
name: f.fold_lifetime(node.name),
colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
}
}
pub fn fold_lifetime<F>(f: &mut F, node: Lifetime) -> Lifetime
@ -2185,11 +2166,10 @@ where
LifetimeDef {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
lifetime: f.fold_lifetime(node.lifetime),
colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)),
}
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn fold_lit<F>(f: &mut F, node: Lit) -> Lit
where
F: Fold + ?Sized,
@ -2205,7 +2185,6 @@ where
Lit::Verbatim(_binding_0) => Lit::Verbatim(_binding_0),
}
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn fold_lit_bool<F>(f: &mut F, node: LitBool) -> LitBool
where
F: Fold + ?Sized,
@ -2215,7 +2194,6 @@ where
span: f.fold_span(node.span),
}
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn fold_lit_byte<F>(f: &mut F, node: LitByte) -> LitByte
where
F: Fold + ?Sized,
@ -2225,7 +2203,6 @@ where
node.set_span(span);
node
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn fold_lit_byte_str<F>(f: &mut F, node: LitByteStr) -> LitByteStr
where
F: Fold + ?Sized,
@ -2235,7 +2212,6 @@ where
node.set_span(span);
node
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn fold_lit_char<F>(f: &mut F, node: LitChar) -> LitChar
where
F: Fold + ?Sized,
@ -2245,7 +2221,6 @@ where
node.set_span(span);
node
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn fold_lit_float<F>(f: &mut F, node: LitFloat) -> LitFloat
where
F: Fold + ?Sized,
@ -2255,7 +2230,6 @@ where
node.set_span(span);
node
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn fold_lit_int<F>(f: &mut F, node: LitInt) -> LitInt
where
F: Fold + ?Sized,
@ -2265,7 +2239,6 @@ where
node.set_span(span);
node
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn fold_lit_str<F>(f: &mut F, node: LitStr) -> LitStr
where
F: Fold + ?Sized,
@ -2286,11 +2259,11 @@ where
pat: f.fold_pat(node.pat),
init: (node.init).map(|it| {
(
Token ! [ = ](tokens_helper(f, &(it).0.spans)),
Token ! [=](tokens_helper(f, &(it).0.spans)),
Box::new(f.fold_expr(*(it).1)),
)
}),
semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
}
}
#[cfg(any(feature = "derive", feature = "full"))]
@ -2361,7 +2334,7 @@ where
{
MetaNameValue {
path: f.fold_path(node.path),
eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
lit: f.fold_lit(node.lit),
}
}
@ -2371,10 +2344,10 @@ where
F: Fold + ?Sized,
{
MethodTurbofish {
colon2_token: Token ! [ :: ](tokens_helper(f, &node.colon2_token.spans)),
lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
colon2_token: Token ! [::](tokens_helper(f, &node.colon2_token.spans)),
lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
args: FoldHelper::lift(node.args, |it| f.fold_generic_method_argument(it)),
gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
}
}
#[cfg(any(feature = "derive", feature = "full"))]
@ -2449,7 +2422,7 @@ where
ident: f.fold_ident(node.ident),
subpat: (node.subpat).map(|it| {
(
Token ! [ @ ](tokens_helper(f, &(it).0.spans)),
Token ! [@](tokens_helper(f, &(it).0.spans)),
Box::new(f.fold_pat(*(it).1)),
)
}),
@ -2482,7 +2455,7 @@ where
{
PatOr {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
leading_vert: (node.leading_vert).map(|it| Token ! [ | ](tokens_helper(f, &it.spans))),
leading_vert: (node.leading_vert).map(|it| Token ! [|](tokens_helper(f, &it.spans))),
cases: FoldHelper::lift(node.cases, |it| f.fold_pat(it)),
}
}
@ -2516,7 +2489,7 @@ where
{
PatReference {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
pat: Box::new(f.fold_pat(*node.pat)),
}
@ -2585,7 +2558,7 @@ where
PatType {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
pat: Box::new(f.fold_pat(*node.pat)),
colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
ty: Box::new(f.fold_type(*node.ty)),
}
}
@ -2605,7 +2578,7 @@ where
F: Fold + ?Sized,
{
Path {
leading_colon: (node.leading_colon).map(|it| Token ! [ :: ](tokens_helper(f, &it.spans))),
leading_colon: (node.leading_colon).map(|it| Token ! [::](tokens_helper(f, &it.spans))),
segments: FoldHelper::lift(node.segments, |it| f.fold_path_segment(it)),
}
}
@ -2641,7 +2614,7 @@ where
{
PredicateEq {
lhs_ty: f.fold_type(node.lhs_ty),
eq_token: Token ! [ = ](tokens_helper(f, &node.eq_token.spans)),
eq_token: Token ! [=](tokens_helper(f, &node.eq_token.spans)),
rhs_ty: f.fold_type(node.rhs_ty),
}
}
@ -2652,7 +2625,7 @@ where
{
PredicateLifetime {
lifetime: f.fold_lifetime(node.lifetime),
colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
bounds: FoldHelper::lift(node.bounds, |it| f.fold_lifetime(it)),
}
}
@ -2664,7 +2637,7 @@ where
PredicateType {
lifetimes: (node.lifetimes).map(|it| f.fold_bound_lifetimes(it)),
bounded_ty: f.fold_type(node.bounded_ty),
colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
}
}
@ -2674,11 +2647,11 @@ where
F: Fold + ?Sized,
{
QSelf {
lt_token: Token ! [ < ](tokens_helper(f, &node.lt_token.spans)),
lt_token: Token ! [<](tokens_helper(f, &node.lt_token.spans)),
ty: Box::new(f.fold_type(*node.ty)),
position: node.position,
as_token: (node.as_token).map(|it| Token![as](tokens_helper(f, &it.span))),
gt_token: Token ! [ > ](tokens_helper(f, &node.gt_token.spans)),
gt_token: Token ! [>](tokens_helper(f, &node.gt_token.spans)),
}
}
#[cfg(feature = "full")]
@ -2691,7 +2664,7 @@ where
RangeLimits::HalfOpen(Token![..](tokens_helper(f, &_binding_0.spans)))
}
RangeLimits::Closed(_binding_0) => {
RangeLimits::Closed(Token ! [ ..= ](tokens_helper(f, &_binding_0.spans)))
RangeLimits::Closed(Token ! [..=](tokens_helper(f, &_binding_0.spans)))
}
}
}
@ -2704,7 +2677,7 @@ where
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
reference: (node.reference).map(|it| {
(
Token ! [ & ](tokens_helper(f, &(it).0.spans)),
Token ! [&](tokens_helper(f, &(it).0.spans)),
((it).1).map(|it| f.fold_lifetime(it)),
)
}),
@ -2720,7 +2693,7 @@ where
match node {
ReturnType::Default => ReturnType::Default,
ReturnType::Type(_binding_0, _binding_1) => ReturnType::Type(
Token ! [ -> ](tokens_helper(f, &_binding_0.spans)),
Token ! [->](tokens_helper(f, &_binding_0.spans)),
Box::new(f.fold_type(*_binding_1)),
),
}
@ -2761,7 +2734,7 @@ where
Stmt::Expr(_binding_0) => Stmt::Expr(f.fold_expr(_binding_0)),
Stmt::Semi(_binding_0, _binding_1) => Stmt::Semi(
f.fold_expr(_binding_0),
Token ! [ ; ](tokens_helper(f, &_binding_1.spans)),
Token ! [;](tokens_helper(f, &_binding_1.spans)),
),
}
}
@ -2785,7 +2758,7 @@ where
match node {
TraitBoundModifier::None => TraitBoundModifier::None,
TraitBoundModifier::Maybe(_binding_0) => {
TraitBoundModifier::Maybe(Token ! [ ? ](tokens_helper(f, &_binding_0.spans)))
TraitBoundModifier::Maybe(Token ! [?](tokens_helper(f, &_binding_0.spans)))
}
}
}
@ -2812,15 +2785,15 @@ where
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
const_token: Token![const](tokens_helper(f, &node.const_token.span)),
ident: f.fold_ident(node.ident),
colon_token: Token ! [ : ](tokens_helper(f, &node.colon_token.spans)),
colon_token: Token ! [:](tokens_helper(f, &node.colon_token.spans)),
ty: f.fold_type(node.ty),
default: (node.default).map(|it| {
(
Token ! [ = ](tokens_helper(f, &(it).0.spans)),
Token ! [=](tokens_helper(f, &(it).0.spans)),
f.fold_expr((it).1),
)
}),
semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
}
}
#[cfg(feature = "full")]
@ -2831,7 +2804,7 @@ where
TraitItemMacro {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
mac: f.fold_macro(node.mac),
semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
}
}
#[cfg(feature = "full")]
@ -2843,7 +2816,7 @@ where
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
sig: f.fold_signature(node.sig),
default: (node.default).map(|it| f.fold_block(it)),
semi_token: (node.semi_token).map(|it| Token ! [ ; ](tokens_helper(f, &it.spans))),
semi_token: (node.semi_token).map(|it| Token ! [;](tokens_helper(f, &it.spans))),
}
}
#[cfg(feature = "full")]
@ -2856,15 +2829,15 @@ where
type_token: Token![type](tokens_helper(f, &node.type_token.span)),
ident: f.fold_ident(node.ident),
generics: f.fold_generics(node.generics),
colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
default: (node.default).map(|it| {
(
Token ! [ = ](tokens_helper(f, &(it).0.spans)),
Token ! [=](tokens_helper(f, &(it).0.spans)),
f.fold_type((it).1),
)
}),
semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
}
}
#[cfg(any(feature = "derive", feature = "full"))]
@ -2899,7 +2872,7 @@ where
TypeArray {
bracket_token: Bracket(tokens_helper(f, &node.bracket_token.span)),
elem: Box::new(f.fold_type(*node.elem)),
semi_token: Token ! [ ; ](tokens_helper(f, &node.semi_token.spans)),
semi_token: Token ! [;](tokens_helper(f, &node.semi_token.spans)),
len: f.fold_expr(node.len),
}
}
@ -2974,9 +2947,9 @@ where
TypeParam {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
ident: f.fold_ident(node.ident),
colon_token: (node.colon_token).map(|it| Token ! [ : ](tokens_helper(f, &it.spans))),
colon_token: (node.colon_token).map(|it| Token ! [:](tokens_helper(f, &it.spans))),
bounds: FoldHelper::lift(node.bounds, |it| f.fold_type_param_bound(it)),
eq_token: (node.eq_token).map(|it| Token ! [ = ](tokens_helper(f, &it.spans))),
eq_token: (node.eq_token).map(|it| Token ! [=](tokens_helper(f, &it.spans))),
default: (node.default).map(|it| f.fold_type(it)),
}
}
@ -3018,7 +2991,7 @@ where
F: Fold + ?Sized,
{
TypePtr {
star_token: Token ! [ * ](tokens_helper(f, &node.star_token.spans)),
star_token: Token ! [*](tokens_helper(f, &node.star_token.spans)),
const_token: (node.const_token).map(|it| Token![const](tokens_helper(f, &it.span))),
mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
elem: Box::new(f.fold_type(*node.elem)),
@ -3030,7 +3003,7 @@ where
F: Fold + ?Sized,
{
TypeReference {
and_token: Token ! [ & ](tokens_helper(f, &node.and_token.spans)),
and_token: Token ! [&](tokens_helper(f, &node.and_token.spans)),
lifetime: (node.lifetime).map(|it| f.fold_lifetime(it)),
mutability: (node.mutability).map(|it| Token![mut](tokens_helper(f, &it.span))),
elem: Box::new(f.fold_type(*node.elem)),
@ -3072,9 +3045,9 @@ where
F: Fold + ?Sized,
{
match node {
UnOp::Deref(_binding_0) => UnOp::Deref(Token ! [ * ](tokens_helper(f, &_binding_0.spans))),
UnOp::Deref(_binding_0) => UnOp::Deref(Token ! [*](tokens_helper(f, &_binding_0.spans))),
UnOp::Not(_binding_0) => UnOp::Not(Token![!](tokens_helper(f, &_binding_0.spans))),
UnOp::Neg(_binding_0) => UnOp::Neg(Token ! [ - ](tokens_helper(f, &_binding_0.spans))),
UnOp::Neg(_binding_0) => UnOp::Neg(Token ! [-](tokens_helper(f, &_binding_0.spans))),
}
}
#[cfg(feature = "full")]
@ -3083,7 +3056,7 @@ where
F: Fold + ?Sized,
{
UseGlob {
star_token: Token ! [ * ](tokens_helper(f, &node.star_token.spans)),
star_token: Token ! [*](tokens_helper(f, &node.star_token.spans)),
}
}
#[cfg(feature = "full")]
@ -3112,7 +3085,7 @@ where
{
UsePath {
ident: f.fold_ident(node.ident),
colon2_token: Token ! [ :: ](tokens_helper(f, &node.colon2_token.spans)),
colon2_token: Token ! [::](tokens_helper(f, &node.colon2_token.spans)),
tree: Box::new(f.fold_use_tree(*node.tree)),
}
}
@ -3147,7 +3120,7 @@ where
{
Variadic {
attrs: FoldHelper::lift(node.attrs, |it| f.fold_attribute(it)),
dots: Token ! [ ... ](tokens_helper(f, &node.dots.spans)),
dots: Token ! [...](tokens_helper(f, &node.dots.spans)),
}
}
#[cfg(any(feature = "derive", feature = "full"))]
@ -3161,7 +3134,7 @@ where
fields: f.fold_fields(node.fields),
discriminant: (node.discriminant).map(|it| {
(
Token ! [ = ](tokens_helper(f, &(it).0.spans)),
Token ! [=](tokens_helper(f, &(it).0.spans)),
f.fold_expr((it).1),
)
}),

2691
third_party/rust/syn/src/gen/hash.rs поставляемый Normal file

Разница между файлами не показана из-за своего большого размера Загрузить разницу

19
third_party/rust/syn/src/gen/visit.rs поставляемый
Просмотреть файл

@ -20,7 +20,6 @@ macro_rules! full {
unreachable!()
};
}
#[cfg(any(feature = "full", feature = "derive"))]
macro_rules! skip {
($($tt:tt)*) => {};
}
@ -30,7 +29,7 @@ macro_rules! skip {
///
/// [module documentation]: self
///
/// *This trait is available if Syn is built with the `"visit"` feature.*
/// *This trait is available only if Syn is built with the `"visit"` feature.*
pub trait Visit<'ast> {
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_abi(&mut self, i: &'ast Abi) {
@ -434,35 +433,27 @@ pub trait Visit<'ast> {
fn visit_lifetime_def(&mut self, i: &'ast LifetimeDef) {
visit_lifetime_def(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_lit(&mut self, i: &'ast Lit) {
visit_lit(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_lit_bool(&mut self, i: &'ast LitBool) {
visit_lit_bool(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_lit_byte(&mut self, i: &'ast LitByte) {
visit_lit_byte(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_lit_byte_str(&mut self, i: &'ast LitByteStr) {
visit_lit_byte_str(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_lit_char(&mut self, i: &'ast LitChar) {
visit_lit_char(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_lit_float(&mut self, i: &'ast LitFloat) {
visit_lit_float(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_lit_int(&mut self, i: &'ast LitInt) {
visit_lit_int(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_lit_str(&mut self, i: &'ast LitStr) {
visit_lit_str(self, i)
}
@ -2537,7 +2528,6 @@ where
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn visit_lit<'ast, V>(v: &mut V, node: &'ast Lit)
where
V: Visit<'ast> + ?Sized,
@ -2569,7 +2559,6 @@ where
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn visit_lit_bool<'ast, V>(v: &mut V, node: &'ast LitBool)
where
V: Visit<'ast> + ?Sized,
@ -2577,37 +2566,31 @@ where
skip!(node.value);
v.visit_span(&node.span);
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn visit_lit_byte<'ast, V>(v: &mut V, node: &'ast LitByte)
where
V: Visit<'ast> + ?Sized,
{
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn visit_lit_byte_str<'ast, V>(v: &mut V, node: &'ast LitByteStr)
where
V: Visit<'ast> + ?Sized,
{
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn visit_lit_char<'ast, V>(v: &mut V, node: &'ast LitChar)
where
V: Visit<'ast> + ?Sized,
{
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn visit_lit_float<'ast, V>(v: &mut V, node: &'ast LitFloat)
where
V: Visit<'ast> + ?Sized,
{
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn visit_lit_int<'ast, V>(v: &mut V, node: &'ast LitInt)
where
V: Visit<'ast> + ?Sized,
{
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn visit_lit_str<'ast, V>(v: &mut V, node: &'ast LitStr)
where
V: Visit<'ast> + ?Sized,

19
third_party/rust/syn/src/gen/visit_mut.rs поставляемый
Просмотреть файл

@ -20,7 +20,6 @@ macro_rules! full {
unreachable!()
};
}
#[cfg(any(feature = "full", feature = "derive"))]
macro_rules! skip {
($($tt:tt)*) => {};
}
@ -31,7 +30,7 @@ macro_rules! skip {
///
/// [module documentation]: self
///
/// *This trait is available if Syn is built with the `"visit-mut"` feature.*
/// *This trait is available only if Syn is built with the `"visit-mut"` feature.*
pub trait VisitMut {
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_abi_mut(&mut self, i: &mut Abi) {
@ -438,35 +437,27 @@ pub trait VisitMut {
fn visit_lifetime_def_mut(&mut self, i: &mut LifetimeDef) {
visit_lifetime_def_mut(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_lit_mut(&mut self, i: &mut Lit) {
visit_lit_mut(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_lit_bool_mut(&mut self, i: &mut LitBool) {
visit_lit_bool_mut(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_lit_byte_mut(&mut self, i: &mut LitByte) {
visit_lit_byte_mut(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_lit_byte_str_mut(&mut self, i: &mut LitByteStr) {
visit_lit_byte_str_mut(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_lit_char_mut(&mut self, i: &mut LitChar) {
visit_lit_char_mut(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_lit_float_mut(&mut self, i: &mut LitFloat) {
visit_lit_float_mut(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_lit_int_mut(&mut self, i: &mut LitInt) {
visit_lit_int_mut(self, i)
}
#[cfg(any(feature = "derive", feature = "full"))]
fn visit_lit_str_mut(&mut self, i: &mut LitStr) {
visit_lit_str_mut(self, i)
}
@ -2543,7 +2534,6 @@ where
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn visit_lit_mut<V>(v: &mut V, node: &mut Lit)
where
V: VisitMut + ?Sized,
@ -2575,7 +2565,6 @@ where
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn visit_lit_bool_mut<V>(v: &mut V, node: &mut LitBool)
where
V: VisitMut + ?Sized,
@ -2583,37 +2572,31 @@ where
skip!(node.value);
v.visit_span_mut(&mut node.span);
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn visit_lit_byte_mut<V>(v: &mut V, node: &mut LitByte)
where
V: VisitMut + ?Sized,
{
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn visit_lit_byte_str_mut<V>(v: &mut V, node: &mut LitByteStr)
where
V: VisitMut + ?Sized,
{
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn visit_lit_char_mut<V>(v: &mut V, node: &mut LitChar)
where
V: VisitMut + ?Sized,
{
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn visit_lit_float_mut<V>(v: &mut V, node: &mut LitFloat)
where
V: VisitMut + ?Sized,
{
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn visit_lit_int_mut<V>(v: &mut V, node: &mut LitInt)
where
V: VisitMut + ?Sized,
{
}
#[cfg(any(feature = "derive", feature = "full"))]
pub fn visit_lit_str_mut<V>(v: &mut V, node: &mut LitStr)
where
V: VisitMut + ?Sized,

259
third_party/rust/syn/src/generics.rs поставляемый
Просмотреть файл

@ -1,13 +1,16 @@
use super::*;
use crate::punctuated::{Iter, IterMut, Punctuated};
#[cfg(all(feature = "printing", feature = "extra-traits"))]
use std::fmt::{self, Debug};
#[cfg(all(feature = "printing", feature = "extra-traits"))]
use std::hash::{Hash, Hasher};
ast_struct! {
/// Lifetimes and type parameters attached to a declaration of a function,
/// enum, trait, etc.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[derive(Default)]
pub struct Generics {
pub lt_token: Option<Token![<]>,
pub params: Punctuated<GenericParam, Token![,]>,
@ -20,7 +23,7 @@ ast_enum_of_structs! {
/// A generic type parameter, lifetime, or const generic: `T: Into<String>`,
/// `'a: 'b`, `const LEN: usize`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Syntax tree enum
@ -28,9 +31,6 @@ ast_enum_of_structs! {
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
//
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
// blocked on https://github.com/rust-lang/rust/issues/62833
pub enum GenericParam {
/// A generic type parameter: `T: Into<String>`.
Type(TypeParam),
@ -46,7 +46,7 @@ ast_enum_of_structs! {
ast_struct! {
/// A generic type parameter: `T: Into<String>`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct TypeParam {
pub attrs: Vec<Attribute>,
@ -61,7 +61,7 @@ ast_struct! {
ast_struct! {
/// A lifetime definition: `'a: 'b + 'c + 'd`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct LifetimeDef {
pub attrs: Vec<Attribute>,
@ -74,7 +74,7 @@ ast_struct! {
ast_struct! {
/// A const generic parameter: `const LENGTH: usize`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct ConstParam {
pub attrs: Vec<Attribute>,
@ -87,6 +87,17 @@ ast_struct! {
}
}
impl Default for Generics {
fn default() -> Self {
Generics {
lt_token: None,
params: Punctuated::new(),
gt_token: None,
where_clause: None,
}
}
}
impl Generics {
/// Returns an
/// <code
@ -280,29 +291,23 @@ impl<'a> Iterator for ConstParamsMut<'a> {
/// Returned by `Generics::split_for_impl`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature and the `"printing"` feature.*
#[cfg(feature = "printing")]
#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
pub struct ImplGenerics<'a>(&'a Generics);
/// Returned by `Generics::split_for_impl`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature and the `"printing"` feature.*
#[cfg(feature = "printing")]
#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
pub struct TypeGenerics<'a>(&'a Generics);
/// Returned by `TypeGenerics::as_turbofish`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature and the `"printing"` feature.*
#[cfg(feature = "printing")]
#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
pub struct Turbofish<'a>(&'a Generics);
#[cfg(feature = "printing")]
@ -314,9 +319,8 @@ impl Generics {
/// # use proc_macro2::{Span, Ident};
/// # use quote::quote;
/// #
/// # fn main() {
/// # let generics: syn::Generics = Default::default();
/// # let name = Ident::new("MyType", Span::call_site());
/// # let generics: syn::Generics = Default::default();
/// # let name = Ident::new("MyType", Span::call_site());
/// #
/// let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
/// quote! {
@ -324,11 +328,10 @@ impl Generics {
/// // ...
/// }
/// }
/// # ;
/// # }
/// # ;
/// ```
///
/// *This method is available if Syn is built with the `"derive"` or
/// *This method is available only if Syn is built with the `"derive"` or
/// `"full"` feature and the `"printing"` feature.*
pub fn split_for_impl(&self) -> (ImplGenerics, TypeGenerics, Option<&WhereClause>) {
(
@ -339,11 +342,57 @@ impl Generics {
}
}
#[cfg(feature = "printing")]
macro_rules! generics_wrapper_impls {
($ty:ident) => {
#[cfg(feature = "clone-impls")]
impl<'a> Clone for $ty<'a> {
fn clone(&self) -> Self {
$ty(self.0)
}
}
#[cfg(feature = "extra-traits")]
impl<'a> Debug for $ty<'a> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter
.debug_tuple(stringify!($ty))
.field(self.0)
.finish()
}
}
#[cfg(feature = "extra-traits")]
impl<'a> Eq for $ty<'a> {}
#[cfg(feature = "extra-traits")]
impl<'a> PartialEq for $ty<'a> {
fn eq(&self, other: &Self) -> bool {
self.0 == other.0
}
}
#[cfg(feature = "extra-traits")]
impl<'a> Hash for $ty<'a> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.hash(state);
}
}
};
}
#[cfg(feature = "printing")]
generics_wrapper_impls!(ImplGenerics);
#[cfg(feature = "printing")]
generics_wrapper_impls!(TypeGenerics);
#[cfg(feature = "printing")]
generics_wrapper_impls!(Turbofish);
#[cfg(feature = "printing")]
impl<'a> TypeGenerics<'a> {
/// Turn a type's generics like `<X, Y>` into a turbofish like `::<X, Y>`.
///
/// *This method is available if Syn is built with the `"derive"` or
/// *This method is available only if Syn is built with the `"derive"` or
/// `"full"` feature and the `"printing"` feature.*
pub fn as_turbofish(&self) -> Turbofish {
Turbofish(self.0)
@ -353,9 +402,8 @@ impl<'a> TypeGenerics<'a> {
ast_struct! {
/// A set of bound lifetimes: `for<'a, 'b, 'c>`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[derive(Default)]
pub struct BoundLifetimes {
pub for_token: Token![for],
pub lt_token: Token![<],
@ -364,6 +412,17 @@ ast_struct! {
}
}
impl Default for BoundLifetimes {
fn default() -> Self {
BoundLifetimes {
for_token: Default::default(),
lt_token: Default::default(),
lifetimes: Punctuated::new(),
gt_token: Default::default(),
}
}
}
impl LifetimeDef {
pub fn new(lifetime: Lifetime) -> Self {
LifetimeDef {
@ -391,7 +450,7 @@ impl From<Ident> for TypeParam {
ast_enum_of_structs! {
/// A trait or lifetime used as a bound on a type parameter.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub enum TypeParamBound {
Trait(TraitBound),
@ -402,7 +461,7 @@ ast_enum_of_structs! {
ast_struct! {
/// A trait used as a bound on a type parameter.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct TraitBound {
pub paren_token: Option<token::Paren>,
@ -418,9 +477,8 @@ ast_enum! {
/// A modifier on a trait bound, currently only used for the `?` in
/// `?Sized`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(feature = "clone-impls", derive(Copy))]
pub enum TraitBoundModifier {
None,
Maybe(Token![?]),
@ -431,7 +489,7 @@ ast_struct! {
/// A `where` clause in a definition: `where T: Deserialize<'de>, D:
/// 'static`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct WhereClause {
pub where_token: Token![where],
@ -442,7 +500,7 @@ ast_struct! {
ast_enum_of_structs! {
/// A single predicate in a `where` clause: `T: Deserialize<'de>`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Syntax tree enum
@ -450,9 +508,6 @@ ast_enum_of_structs! {
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
//
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
// blocked on https://github.com/rust-lang/rust/issues/62833
pub enum WherePredicate {
/// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
Type(PredicateType),
@ -468,7 +523,7 @@ ast_enum_of_structs! {
ast_struct! {
/// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct PredicateType {
/// Any lifetimes from a `for` binding
@ -484,7 +539,7 @@ ast_struct! {
ast_struct! {
/// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct PredicateLifetime {
pub lifetime: Lifetime,
@ -496,7 +551,7 @@ ast_struct! {
ast_struct! {
/// An equality predicate in a `where` clause (unsupported).
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct PredicateEq {
pub lhs_ty: Type,
@ -521,7 +576,6 @@ pub mod parsing {
let mut params = Punctuated::new();
let mut allow_lifetime_param = true;
let mut allow_type_param = true;
loop {
if input.peek(Token![>]) {
break;
@ -534,7 +588,7 @@ pub mod parsing {
attrs,
..input.parse()?
}));
} else if allow_type_param && lookahead.peek(Ident) {
} else if lookahead.peek(Ident) {
allow_lifetime_param = false;
params.push_value(GenericParam::Type(TypeParam {
attrs,
@ -542,7 +596,6 @@ pub mod parsing {
}));
} else if lookahead.peek(Token![const]) {
allow_lifetime_param = false;
allow_type_param = false;
params.push_value(GenericParam::Const(ConstParam {
attrs,
..input.parse()?
@ -665,57 +718,53 @@ pub mod parsing {
impl Parse for TypeParam {
fn parse(input: ParseStream) -> Result<Self> {
let has_colon;
let has_default;
let attrs = input.call(Attribute::parse_outer)?;
let ident: Ident = input.parse()?;
let colon_token: Option<Token![:]> = input.parse()?;
let begin_bound = input.fork();
let mut is_maybe_const = false;
let mut bounds = Punctuated::new();
if colon_token.is_some() {
loop {
if input.peek(Token![,]) || input.peek(Token![>]) || input.peek(Token![=]) {
break;
}
if input.peek(Token![?]) && input.peek2(Token![const]) {
input.parse::<Token![?]>()?;
input.parse::<Token![const]>()?;
is_maybe_const = true;
}
let value: TypeParamBound = input.parse()?;
bounds.push_value(value);
if !input.peek(Token![+]) {
break;
}
let punct: Token![+] = input.parse()?;
bounds.push_punct(punct);
}
}
let mut eq_token: Option<Token![=]> = input.parse()?;
let mut default = if eq_token.is_some() {
Some(input.parse::<Type>()?)
} else {
None
};
if is_maybe_const {
bounds.clear();
eq_token = None;
default = Some(Type::Verbatim(verbatim::between(begin_bound, input)));
}
Ok(TypeParam {
attrs: input.call(Attribute::parse_outer)?,
ident: input.parse()?,
colon_token: {
if input.peek(Token![:]) {
has_colon = true;
Some(input.parse()?)
} else {
has_colon = false;
None
}
},
bounds: {
let mut bounds = Punctuated::new();
if has_colon {
loop {
if input.peek(Token![,])
|| input.peek(Token![>])
|| input.peek(Token![=])
{
break;
}
let value = input.parse()?;
bounds.push_value(value);
if !input.peek(Token![+]) {
break;
}
let punct = input.parse()?;
bounds.push_punct(punct);
}
}
bounds
},
eq_token: {
if input.peek(Token![=]) {
has_default = true;
Some(input.parse()?)
} else {
has_default = false;
None
}
},
default: {
if has_default {
Some(input.parse()?)
} else {
None
}
},
attrs,
ident,
colon_token,
bounds,
eq_token,
default,
})
}
}
@ -898,6 +947,8 @@ mod printing {
use super::*;
use proc_macro2::TokenStream;
#[cfg(feature = "full")]
use proc_macro2::TokenTree;
use quote::{ToTokens, TokenStreamExt};
use crate::attr::FilterAttrs;
@ -1080,9 +1131,25 @@ mod printing {
TokensOrDefault(&self.colon_token).to_tokens(tokens);
self.bounds.to_tokens(tokens);
}
if self.default.is_some() {
if let Some(default) = &self.default {
#[cfg(feature = "full")]
{
if self.eq_token.is_none() {
if let Type::Verbatim(default) = default {
let mut iter = default.clone().into_iter();
match (iter.next(), iter.next()) {
(Some(TokenTree::Punct(ref q)), Some(TokenTree::Ident(ref c)))
if q.as_char() == '?' && c == "const" =>
{
return default.to_tokens(tokens);
}
_ => {}
}
}
}
}
TokensOrDefault(&self.eq_token).to_tokens(tokens);
self.default.to_tokens(tokens);
default.to_tokens(tokens);
}
}
}
@ -1117,9 +1184,9 @@ mod printing {
self.ident.to_tokens(tokens);
self.colon_token.to_tokens(tokens);
self.ty.to_tokens(tokens);
if self.default.is_some() {
if let Some(default) = &self.default {
TokensOrDefault(&self.eq_token).to_tokens(tokens);
self.default.to_tokens(tokens);
default.to_tokens(tokens);
}
}
}

1513
third_party/rust/syn/src/item.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

0
third_party/rust/syn/src/keyword.rs поставляемый
Просмотреть файл

109
third_party/rust/syn/src/lib.rs поставляемый
Просмотреть файл

@ -1,3 +1,11 @@
//! [![github]](https://github.com/dtolnay/syn)&ensp;[![crates-io]](https://crates.io/crates/syn)&ensp;[![docs-rs]](https://docs.rs/syn)
//!
//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K
//!
//! <br>
//!
//! Syn is a parsing library for parsing a stream of Rust tokens into a syntax
//! tree of Rust source code.
//!
@ -62,8 +70,8 @@
//! ```
//!
//! ```
//! extern crate proc_macro;
//!
//! # extern crate proc_macro;
//! #
//! use proc_macro::TokenStream;
//! use quote::quote;
//! use syn::{parse_macro_input, DeriveInput};
@ -242,35 +250,48 @@
//! dynamic library libproc_macro from rustc toolchain.
// Syn types in rustdoc of other crates get linked to here.
#![doc(html_root_url = "https://docs.rs/syn/1.0.5")]
#![doc(html_root_url = "https://docs.rs/syn/1.0.40")]
#![deny(clippy::all, clippy::pedantic)]
// Ignored clippy lints.
#![allow(
clippy::block_in_if_condition_stmt,
clippy::blocks_in_if_conditions,
clippy::cognitive_complexity,
clippy::doc_markdown,
clippy::eval_order_dependence,
clippy::inherent_to_string,
clippy::large_enum_variant,
clippy::manual_non_exhaustive,
clippy::match_like_matches_macro,
clippy::match_on_vec_items,
clippy::needless_doctest_main,
clippy::needless_pass_by_value,
clippy::never_loop,
clippy::suspicious_op_assign_impl,
clippy::too_many_arguments,
clippy::trivially_copy_pass_by_ref
clippy::trivially_copy_pass_by_ref,
clippy::unnecessary_unwrap
)]
// Ignored clippy_pedantic lints.
#![allow(
clippy::cast_possible_truncation,
clippy::default_trait_access,
clippy::empty_enum,
clippy::expl_impl_clone_on_copy,
clippy::if_not_else,
clippy::items_after_statements,
clippy::match_same_arms,
clippy::missing_errors_doc,
clippy::module_name_repetitions,
clippy::must_use_candidate,
clippy::option_if_let_else,
clippy::shadow_unrelated,
clippy::similar_names,
clippy::single_match_else,
clippy::too_many_lines,
clippy::unseparated_literal_suffix,
clippy::use_self,
clippy::used_underscore_binding
clippy::used_underscore_binding,
clippy::wildcard_imports
)]
#[cfg(all(
@ -284,7 +305,6 @@ extern crate unicode_xid;
#[cfg(feature = "printing")]
extern crate quote;
#[cfg(any(feature = "full", feature = "derive"))]
#[macro_use]
mod macros;
@ -307,7 +327,6 @@ pub use crate::attr::{
AttrStyle, Attribute, AttributeArgs, Meta, MetaList, MetaNameValue, NestedMeta,
};
#[cfg(any(feature = "full", feature = "derive"))]
mod bigint;
#[cfg(any(feature = "full", feature = "derive"))]
@ -364,9 +383,7 @@ pub use crate::file::File;
mod lifetime;
pub use crate::lifetime::Lifetime;
#[cfg(any(feature = "full", feature = "derive"))]
mod lit;
#[cfg(any(feature = "full", feature = "derive"))]
pub use crate::lit::{
Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr, StrStyle,
};
@ -441,6 +458,9 @@ pub mod parse_macro_input;
#[cfg(all(feature = "parsing", feature = "printing"))]
pub mod spanned;
#[cfg(all(feature = "parsing", feature = "full"))]
mod whitespace;
mod gen {
/// Syntax tree traversal to walk a shared borrow of a syntax tree.
///
@ -482,7 +502,7 @@ mod gen {
/// /* ... */
/// ```
///
/// *This module is available if Syn is built with the `"visit"` feature.*
/// *This module is available only if Syn is built with the `"visit"` feature.*
///
/// <br>
///
@ -603,7 +623,7 @@ mod gen {
/// /* ... */
/// ```
///
/// *This module is available if Syn is built with the `"visit-mut"`
/// *This module is available only if Syn is built with the `"visit-mut"`
/// feature.*
///
/// <br>
@ -702,7 +722,7 @@ mod gen {
/// /* ... */
/// ```
///
/// *This module is available if Syn is built with the `"fold"` feature.*
/// *This module is available only if Syn is built with the `"fold"` feature.*
///
/// <br>
///
@ -744,6 +764,22 @@ mod gen {
#[rustfmt::skip]
pub mod fold;
#[cfg(feature = "clone-impls")]
#[rustfmt::skip]
mod clone;
#[cfg(feature = "extra-traits")]
#[rustfmt::skip]
mod eq;
#[cfg(feature = "extra-traits")]
#[rustfmt::skip]
mod hash;
#[cfg(feature = "extra-traits")]
#[rustfmt::skip]
mod debug;
#[cfg(any(feature = "full", feature = "derive"))]
#[path = "../gen_helper.rs"]
mod helper;
@ -757,6 +793,8 @@ pub mod export;
mod custom_keyword;
mod custom_punctuation;
mod sealed;
mod span;
mod thread;
#[cfg(feature = "parsing")]
mod lookahead;
@ -764,13 +802,15 @@ mod lookahead;
#[cfg(feature = "parsing")]
pub mod parse;
mod span;
#[cfg(feature = "full")]
mod reserved;
#[cfg(all(any(feature = "full", feature = "derive"), feature = "parsing"))]
mod verbatim;
#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
mod print;
mod thread;
////////////////////////////////////////////////////////////////////////////////
#[allow(dead_code, non_camel_case_types)]
@ -800,14 +840,14 @@ pub use crate::error::{Error, Result};
///
/// [`syn::parse2`]: parse2
///
/// *This function is available if Syn is built with both the `"parsing"` and
/// *This function is available only if Syn is built with both the `"parsing"` and
/// `"proc-macro"` features.*
///
/// # Examples
///
/// ```
/// extern crate proc_macro;
///
/// # extern crate proc_macro;
/// #
/// use proc_macro::TokenStream;
/// use quote::quote;
/// use syn::DeriveInput;
@ -847,7 +887,7 @@ pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> {
///
/// [`syn::parse`]: parse()
///
/// *This function is available if Syn is built with the `"parsing"` feature.*
/// *This function is available only if Syn is built with the `"parsing"` feature.*
#[cfg(feature = "parsing")]
pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
parse::Parser::parse2(T::parse, tokens)
@ -855,7 +895,7 @@ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
/// Parse a string of Rust code into the chosen syntax tree node.
///
/// *This function is available if Syn is built with the `"parsing"` feature.*
/// *This function is available only if Syn is built with the `"parsing"` feature.*
///
/// # Hygiene
///
@ -874,9 +914,7 @@ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
/// Ok(())
/// }
/// #
/// # fn main() {
/// # run().unwrap();
/// # }
/// # run().unwrap();
/// ```
#[cfg(feature = "parsing")]
pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
@ -894,7 +932,7 @@ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
///
/// If present, either of these would be an error using `from_str`.
///
/// *This function is available if Syn is built with the `"parsing"` and
/// *This function is available only if Syn is built with the `"parsing"` and
/// `"full"` features.*
///
/// # Examples
@ -918,9 +956,7 @@ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
/// Ok(())
/// }
/// #
/// # fn main() {
/// # run().unwrap();
/// # }
/// # run().unwrap();
/// ```
#[cfg(all(feature = "parsing", feature = "full"))]
pub fn parse_file(mut content: &str) -> Result<File> {
@ -931,13 +967,16 @@ pub fn parse_file(mut content: &str) -> Result<File> {
}
let mut shebang = None;
if content.starts_with("#!") && !content.starts_with("#![") {
if let Some(idx) = content.find('\n') {
shebang = Some(content[..idx].to_string());
content = &content[idx..];
} else {
shebang = Some(content.to_string());
content = "";
if content.starts_with("#!") {
let rest = whitespace::skip(&content[2..]);
if !rest.starts_with('[') {
if let Some(idx) = content.find('\n') {
shebang = Some(content[..idx].to_string());
content = &content[idx..];
} else {
shebang = Some(content.to_string());
content = "";
}
}
}

13
third_party/rust/syn/src/lifetime.rs поставляемый
Просмотреть файл

@ -18,10 +18,8 @@ use crate::lookahead;
/// - All following characters must be Unicode code points with the XID_Continue
/// property.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(feature = "extra-traits", derive(Debug))]
#[derive(Clone)]
pub struct Lifetime {
pub apostrophe: Span,
pub ident: Ident,
@ -72,6 +70,15 @@ impl Display for Lifetime {
}
}
impl Clone for Lifetime {
fn clone(&self) -> Self {
Lifetime {
apostrophe: self.apostrophe,
ident: self.ident.clone(),
}
}
}
impl PartialEq for Lifetime {
fn eq(&self, other: &Lifetime) -> bool {
self.ident.eq(&other.ident)

581
third_party/rust/syn/src/lit.rs поставляемый
Просмотреть файл

@ -22,9 +22,6 @@ use crate::{Error, Result};
ast_enum_of_structs! {
/// A Rust literal such as a string or integer or boolean.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
@ -33,7 +30,7 @@ ast_enum_of_structs! {
//
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
// blocked on https://github.com/rust-lang/rust/issues/62833
pub enum Lit #manual_extra_traits {
pub enum Lit {
/// A UTF-8 string literal: `"foo"`.
Str(LitStr),
@ -64,61 +61,44 @@ ast_enum_of_structs! {
ast_struct! {
/// A UTF-8 string literal: `"foo"`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct LitStr #manual_extra_traits_debug {
repr: Box<LitStrRepr>,
pub struct LitStr {
repr: Box<LitRepr>,
}
}
#[cfg_attr(feature = "clone-impls", derive(Clone))]
struct LitStrRepr {
token: Literal,
suffix: Box<str>,
}
ast_struct! {
/// A byte string literal: `b"foo"`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct LitByteStr #manual_extra_traits_debug {
token: Literal,
pub struct LitByteStr {
repr: Box<LitRepr>,
}
}
ast_struct! {
/// A byte literal: `b'f'`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct LitByte #manual_extra_traits_debug {
token: Literal,
pub struct LitByte {
repr: Box<LitRepr>,
}
}
ast_struct! {
/// A character literal: `'a'`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct LitChar #manual_extra_traits_debug {
token: Literal,
pub struct LitChar {
repr: Box<LitRepr>,
}
}
struct LitRepr {
token: Literal,
suffix: Box<str>,
}
ast_struct! {
/// An integer literal: `1` or `1u16`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct LitInt #manual_extra_traits_debug {
pub struct LitInt {
repr: Box<LitIntRepr>,
}
}
#[cfg_attr(feature = "clone-impls", derive(Clone))]
struct LitIntRepr {
token: Literal,
digits: Box<str>,
@ -129,15 +109,11 @@ ast_struct! {
/// A floating point literal: `1f64` or `1.0e10f64`.
///
/// Must be finite. May not be infinte or NaN.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct LitFloat #manual_extra_traits_debug {
pub struct LitFloat {
repr: Box<LitFloatRepr>,
}
}
#[cfg_attr(feature = "clone-impls", derive(Clone))]
struct LitFloatRepr {
token: Literal,
digits: Box<str>,
@ -146,92 +122,27 @@ struct LitFloatRepr {
ast_struct! {
/// A boolean literal: `true` or `false`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct LitBool #manual_extra_traits_debug {
pub struct LitBool {
pub value: bool,
pub span: Span,
}
}
#[cfg(feature = "extra-traits")]
impl Eq for Lit {}
#[cfg(feature = "extra-traits")]
impl PartialEq for Lit {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Lit::Str(this), Lit::Str(other)) => this == other,
(Lit::ByteStr(this), Lit::ByteStr(other)) => this == other,
(Lit::Byte(this), Lit::Byte(other)) => this == other,
(Lit::Char(this), Lit::Char(other)) => this == other,
(Lit::Int(this), Lit::Int(other)) => this == other,
(Lit::Float(this), Lit::Float(other)) => this == other,
(Lit::Bool(this), Lit::Bool(other)) => this == other,
(Lit::Verbatim(this), Lit::Verbatim(other)) => this.to_string() == other.to_string(),
_ => false,
}
}
}
#[cfg(feature = "extra-traits")]
impl Hash for Lit {
fn hash<H>(&self, hash: &mut H)
where
H: Hasher,
{
match self {
Lit::Str(lit) => {
hash.write_u8(0);
lit.hash(hash);
}
Lit::ByteStr(lit) => {
hash.write_u8(1);
lit.hash(hash);
}
Lit::Byte(lit) => {
hash.write_u8(2);
lit.hash(hash);
}
Lit::Char(lit) => {
hash.write_u8(3);
lit.hash(hash);
}
Lit::Int(lit) => {
hash.write_u8(4);
lit.hash(hash);
}
Lit::Float(lit) => {
hash.write_u8(5);
lit.hash(hash);
}
Lit::Bool(lit) => {
hash.write_u8(6);
lit.hash(hash);
}
Lit::Verbatim(lit) => {
hash.write_u8(7);
lit.to_string().hash(hash);
}
}
}
}
impl LitStr {
pub fn new(value: &str, span: Span) -> Self {
let mut lit = Literal::string(value);
lit.set_span(span);
let mut token = Literal::string(value);
token.set_span(span);
LitStr {
repr: Box::new(LitStrRepr {
token: lit,
repr: Box::new(LitRepr {
token,
suffix: Box::<str>::default(),
}),
}
}
pub fn value(&self) -> String {
let (value, _) = value::parse_lit_str(&self.repr.token.to_string());
let repr = self.repr.token.to_string();
let (value, _suffix) = value::parse_lit_str(&repr);
String::from(value)
}
@ -311,7 +222,7 @@ impl LitStr {
fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
match &mut token {
TokenTree::Group(g) => {
let stream = respan_token_stream(g.stream().clone(), span);
let stream = respan_token_stream(g.stream(), span);
*g = Group::new(g.delimiter(), stream);
g.set_span(span);
}
@ -345,19 +256,30 @@ impl LitByteStr {
pub fn new(value: &[u8], span: Span) -> Self {
let mut token = Literal::byte_string(value);
token.set_span(span);
LitByteStr { token }
LitByteStr {
repr: Box::new(LitRepr {
token,
suffix: Box::<str>::default(),
}),
}
}
pub fn value(&self) -> Vec<u8> {
value::parse_lit_byte_str(&self.token.to_string())
let repr = self.repr.token.to_string();
let (value, _suffix) = value::parse_lit_byte_str(&repr);
value
}
pub fn span(&self) -> Span {
self.token.span()
self.repr.token.span()
}
pub fn set_span(&mut self, span: Span) {
self.token.set_span(span)
self.repr.token.set_span(span)
}
pub fn suffix(&self) -> &str {
&self.repr.suffix
}
}
@ -365,19 +287,30 @@ impl LitByte {
pub fn new(value: u8, span: Span) -> Self {
let mut token = Literal::u8_suffixed(value);
token.set_span(span);
LitByte { token }
LitByte {
repr: Box::new(LitRepr {
token,
suffix: Box::<str>::default(),
}),
}
}
pub fn value(&self) -> u8 {
value::parse_lit_byte(&self.token.to_string())
let repr = self.repr.token.to_string();
let (value, _suffix) = value::parse_lit_byte(&repr);
value
}
pub fn span(&self) -> Span {
self.token.span()
self.repr.token.span()
}
pub fn set_span(&mut self, span: Span) {
self.token.set_span(span)
self.repr.token.set_span(span)
}
pub fn suffix(&self) -> &str {
&self.repr.suffix
}
}
@ -385,36 +318,52 @@ impl LitChar {
pub fn new(value: char, span: Span) -> Self {
let mut token = Literal::character(value);
token.set_span(span);
LitChar { token }
LitChar {
repr: Box::new(LitRepr {
token,
suffix: Box::<str>::default(),
}),
}
}
pub fn value(&self) -> char {
value::parse_lit_char(&self.token.to_string())
let repr = self.repr.token.to_string();
let (value, _suffix) = value::parse_lit_char(&repr);
value
}
pub fn span(&self) -> Span {
self.token.span()
self.repr.token.span()
}
pub fn set_span(&mut self, span: Span) {
self.token.set_span(span)
self.repr.token.set_span(span)
}
pub fn suffix(&self) -> &str {
&self.repr.suffix
}
}
impl LitInt {
pub fn new(repr: &str, span: Span) -> Self {
if let Some((digits, suffix)) = value::parse_lit_int(repr) {
let mut token = value::to_literal(repr);
token.set_span(span);
LitInt {
repr: Box::new(LitIntRepr {
token,
digits,
suffix,
}),
}
} else {
panic!("Not an integer literal: `{}`", repr);
let (digits, suffix) = match value::parse_lit_int(repr) {
Some(parse) => parse,
None => panic!("Not an integer literal: `{}`", repr),
};
let mut token = match value::to_literal(repr, &digits, &suffix) {
Some(token) => token,
None => panic!("Unsupported integer literal: `{}`", repr),
};
token.set_span(span);
LitInt {
repr: Box::new(LitIntRepr {
token,
digits,
suffix,
}),
}
}
@ -492,18 +441,23 @@ impl Display for LitInt {
impl LitFloat {
pub fn new(repr: &str, span: Span) -> Self {
if let Some((digits, suffix)) = value::parse_lit_float(repr) {
let mut token = value::to_literal(repr);
token.set_span(span);
LitFloat {
repr: Box::new(LitFloatRepr {
token,
digits,
suffix,
}),
}
} else {
panic!("Not a float literal: `{}`", repr);
let (digits, suffix) = match value::parse_lit_float(repr) {
Some(parse) => parse,
None => panic!("Not a float literal: `{}`", repr),
};
let mut token = match value::to_literal(repr, &digits, &suffix) {
Some(token) => token,
None => panic!("Unsupported float literal: `{}`", repr),
};
token.set_span(span);
LitFloat {
repr: Box::new(LitFloatRepr {
token,
digits,
suffix,
}),
}
}
@ -575,7 +529,7 @@ mod debug_impls {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter
.debug_struct("LitByteStr")
.field("token", &format_args!("{}", self.token))
.field("token", &format_args!("{}", self.repr.token))
.finish()
}
}
@ -584,7 +538,7 @@ mod debug_impls {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter
.debug_struct("LitByte")
.field("token", &format_args!("{}", self.token))
.field("token", &format_args!("{}", self.repr.token))
.finish()
}
}
@ -593,7 +547,7 @@ mod debug_impls {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter
.debug_struct("LitChar")
.field("token", &format_args!("{}", self.token))
.field("token", &format_args!("{}", self.repr.token))
.finish()
}
}
@ -626,15 +580,53 @@ mod debug_impls {
}
}
#[cfg(feature = "clone-impls")]
impl Clone for LitRepr {
fn clone(&self) -> Self {
LitRepr {
token: self.token.clone(),
suffix: self.suffix.clone(),
}
}
}
#[cfg(feature = "clone-impls")]
impl Clone for LitIntRepr {
fn clone(&self) -> Self {
LitIntRepr {
token: self.token.clone(),
digits: self.digits.clone(),
suffix: self.suffix.clone(),
}
}
}
#[cfg(feature = "clone-impls")]
impl Clone for LitFloatRepr {
fn clone(&self) -> Self {
LitFloatRepr {
token: self.token.clone(),
digits: self.digits.clone(),
suffix: self.suffix.clone(),
}
}
}
macro_rules! lit_extra_traits {
($ty:ident, $($field:ident).+) => {
#[cfg(feature = "extra-traits")]
impl Eq for $ty {}
($ty:ident) => {
#[cfg(feature = "clone-impls")]
impl Clone for $ty {
fn clone(&self) -> Self {
$ty {
repr: self.repr.clone(),
}
}
}
#[cfg(feature = "extra-traits")]
impl PartialEq for $ty {
fn eq(&self, other: &Self) -> bool {
self.$($field).+.to_string() == other.$($field).+.to_string()
self.repr.token.to_string() == other.repr.token.to_string()
}
}
@ -644,7 +636,7 @@ macro_rules! lit_extra_traits {
where
H: Hasher,
{
self.$($field).+.to_string().hash(state);
self.repr.token.to_string().hash(state);
}
}
@ -657,20 +649,23 @@ macro_rules! lit_extra_traits {
};
}
lit_extra_traits!(LitStr, repr.token);
lit_extra_traits!(LitByteStr, token);
lit_extra_traits!(LitByte, token);
lit_extra_traits!(LitChar, token);
lit_extra_traits!(LitInt, repr.token);
lit_extra_traits!(LitFloat, repr.token);
lit_extra_traits!(LitBool, value);
lit_extra_traits!(LitStr);
lit_extra_traits!(LitByteStr);
lit_extra_traits!(LitByte);
lit_extra_traits!(LitChar);
lit_extra_traits!(LitInt);
lit_extra_traits!(LitFloat);
#[cfg(feature = "parsing")]
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn LitBool(marker: lookahead::TokenMarker) -> LitBool {
match marker {}
}
ast_enum! {
/// The style of a string literal, either plain quoted or a raw string like
/// `r##"data"##`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub enum StrStyle #no_visit {
/// An ordinary string like `"data"`.
Cooked,
@ -691,7 +686,9 @@ pub fn Lit(marker: lookahead::TokenMarker) -> Lit {
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use crate::buffer::Cursor;
use crate::parse::{Parse, ParseStream, Result};
use proc_macro2::Punct;
impl Parse for Lit {
fn parse(input: ParseStream) -> Result<Self> {
@ -699,25 +696,73 @@ pub mod parsing {
if let Some((lit, rest)) = cursor.literal() {
return Ok((Lit::new(lit), rest));
}
while let Some((ident, rest)) = cursor.ident() {
let value = if ident == "true" {
true
} else if ident == "false" {
false
} else {
break;
};
let lit_bool = LitBool {
value,
span: ident.span(),
};
return Ok((Lit::Bool(lit_bool), rest));
if let Some((ident, rest)) = cursor.ident() {
let value = ident == "true";
if value || ident == "false" {
let lit_bool = LitBool {
value,
span: ident.span(),
};
return Ok((Lit::Bool(lit_bool), rest));
}
}
if let Some((punct, rest)) = cursor.punct() {
if punct.as_char() == '-' {
if let Some((lit, rest)) = parse_negative_lit(punct, rest) {
return Ok((lit, rest));
}
}
}
Err(cursor.error("expected literal"))
})
}
}
fn parse_negative_lit(neg: Punct, cursor: Cursor) -> Option<(Lit, Cursor)> {
let (lit, rest) = cursor.literal()?;
let mut span = neg.span();
span = span.join(lit.span()).unwrap_or(span);
let mut repr = lit.to_string();
repr.insert(0, '-');
if !(repr.ends_with("f32") || repr.ends_with("f64")) {
if let Some((digits, suffix)) = value::parse_lit_int(&repr) {
if let Some(mut token) = value::to_literal(&repr, &digits, &suffix) {
token.set_span(span);
return Some((
Lit::Int(LitInt {
repr: Box::new(LitIntRepr {
token,
digits,
suffix,
}),
}),
rest,
));
}
}
}
let (digits, suffix) = value::parse_lit_float(&repr)?;
let mut token = value::to_literal(&repr, &digits, &suffix)?;
token.set_span(span);
Some((
Lit::Float(LitFloat {
repr: Box::new(LitFloatRepr {
token,
digits,
suffix,
}),
}),
rest,
))
}
impl Parse for LitStr {
fn parse(input: ParseStream) -> Result<Self> {
let head = input.fork();
@ -803,19 +848,19 @@ mod printing {
impl ToTokens for LitByteStr {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.token.to_tokens(tokens);
self.repr.token.to_tokens(tokens);
}
}
impl ToTokens for LitByte {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.token.to_tokens(tokens);
self.repr.token.to_tokens(tokens);
}
}
impl ToTokens for LitChar {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.token.to_tokens(tokens);
self.repr.token.to_tokens(tokens);
}
}
@ -855,20 +900,29 @@ mod value {
b'"' | b'r' => {
let (_, suffix) = parse_lit_str(&repr);
return Lit::Str(LitStr {
repr: Box::new(LitStrRepr { token, suffix }),
repr: Box::new(LitRepr { token, suffix }),
});
}
b'b' => match byte(&repr, 1) {
b'"' | b'r' => {
return Lit::ByteStr(LitByteStr { token });
let (_, suffix) = parse_lit_byte_str(&repr);
return Lit::ByteStr(LitByteStr {
repr: Box::new(LitRepr { token, suffix }),
});
}
b'\'' => {
return Lit::Byte(LitByte { token });
let (_, suffix) = parse_lit_byte(&repr);
return Lit::Byte(LitByte {
repr: Box::new(LitRepr { token, suffix }),
});
}
_ => {}
},
b'\'' => {
return Lit::Char(LitChar { token });
let (_, suffix) = parse_lit_char(&repr);
return Lit::Char(LitChar {
repr: Box::new(LitRepr { token, suffix }),
});
}
b'0'..=b'9' | b'-' => {
if !(repr.ends_with("f32") || repr.ends_with("f64")) {
@ -905,6 +959,44 @@ mod value {
panic!("Unrecognized literal: `{}`", repr);
}
pub fn suffix(&self) -> &str {
match self {
Lit::Str(lit) => lit.suffix(),
Lit::ByteStr(lit) => lit.suffix(),
Lit::Byte(lit) => lit.suffix(),
Lit::Char(lit) => lit.suffix(),
Lit::Int(lit) => lit.suffix(),
Lit::Float(lit) => lit.suffix(),
Lit::Bool(_) | Lit::Verbatim(_) => "",
}
}
pub fn span(&self) -> Span {
match self {
Lit::Str(lit) => lit.span(),
Lit::ByteStr(lit) => lit.span(),
Lit::Byte(lit) => lit.span(),
Lit::Char(lit) => lit.span(),
Lit::Int(lit) => lit.span(),
Lit::Float(lit) => lit.span(),
Lit::Bool(lit) => lit.span,
Lit::Verbatim(lit) => lit.span(),
}
}
pub fn set_span(&mut self, span: Span) {
match self {
Lit::Str(lit) => lit.set_span(span),
Lit::ByteStr(lit) => lit.set_span(span),
Lit::Byte(lit) => lit.set_span(span),
Lit::Char(lit) => lit.set_span(span),
Lit::Int(lit) => lit.set_span(span),
Lit::Float(lit) => lit.set_span(span),
Lit::Bool(lit) => lit.span = span,
Lit::Verbatim(lit) => lit.set_span(span),
}
}
}
/// Get the byte at offset idx, or a default of `b'\0'` if we're looking
@ -1004,19 +1096,18 @@ mod value {
pounds += 1;
}
assert_eq!(byte(s, pounds), b'"');
assert_eq!(byte(s, s.len() - pounds - 1), b'"');
for end in s[s.len() - pounds..].bytes() {
let close = s.rfind('"').unwrap();
for end in s[close + 1..close + 1 + pounds].bytes() {
assert_eq!(end, b'#');
}
let content = s[pounds + 1..s.len() - pounds - 1]
.to_owned()
.into_boxed_str();
let suffix = Box::<str>::default(); // todo
let content = s[pounds + 1..close].to_owned().into_boxed_str();
let suffix = s[close + 1 + pounds..].to_owned().into_boxed_str();
(content, suffix)
}
pub fn parse_lit_byte_str(s: &str) -> Vec<u8> {
// Returns (content, suffix).
pub fn parse_lit_byte_str(s: &str) -> (Vec<u8>, Box<str>) {
assert_eq!(byte(s, 0), b'b');
match byte(s, 1) {
b'"' => parse_lit_byte_str_cooked(s),
@ -1028,25 +1119,25 @@ mod value {
// Clippy false positive
// https://github.com/rust-lang-nursery/rust-clippy/issues/2329
#[allow(clippy::needless_continue)]
fn parse_lit_byte_str_cooked(mut s: &str) -> Vec<u8> {
fn parse_lit_byte_str_cooked(mut s: &str) -> (Vec<u8>, Box<str>) {
assert_eq!(byte(s, 0), b'b');
assert_eq!(byte(s, 1), b'"');
s = &s[2..];
// We're going to want to have slices which don't respect codepoint boundaries.
let mut s = s.as_bytes();
let mut v = s.as_bytes();
let mut out = Vec::new();
'outer: loop {
let byte = match byte(s, 0) {
let byte = match byte(v, 0) {
b'"' => break,
b'\\' => {
let b = byte(s, 1);
s = &s[2..];
let b = byte(v, 1);
v = &v[2..];
match b {
b'x' => {
let (b, rest) = backslash_x(s);
s = rest;
let (b, rest) = backslash_x(v);
v = rest;
b
}
b'n' => b'\n',
@ -1057,10 +1148,10 @@ mod value {
b'\'' => b'\'',
b'"' => b'"',
b'\r' | b'\n' => loop {
let byte = byte(s, 0);
let byte = byte(v, 0);
let ch = char::from_u32(u32::from(byte)).unwrap();
if ch.is_whitespace() {
s = &s[1..];
v = &v[1..];
} else {
continue 'outer;
}
@ -1069,42 +1160,45 @@ mod value {
}
}
b'\r' => {
assert_eq!(byte(s, 1), b'\n', "Bare CR not allowed in string");
s = &s[2..];
assert_eq!(byte(v, 1), b'\n', "Bare CR not allowed in string");
v = &v[2..];
b'\n'
}
b => {
s = &s[1..];
v = &v[1..];
b
}
};
out.push(byte);
}
assert_eq!(s, b"\"");
out
assert_eq!(byte(v, 0), b'"');
let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
(out, suffix)
}
fn parse_lit_byte_str_raw(s: &str) -> Vec<u8> {
fn parse_lit_byte_str_raw(s: &str) -> (Vec<u8>, Box<str>) {
assert_eq!(byte(s, 0), b'b');
String::from(parse_lit_str_raw(&s[1..]).0).into_bytes()
let (value, suffix) = parse_lit_str_raw(&s[1..]);
(String::from(value).into_bytes(), suffix)
}
pub fn parse_lit_byte(s: &str) -> u8 {
// Returns (value, suffix).
pub fn parse_lit_byte(s: &str) -> (u8, Box<str>) {
assert_eq!(byte(s, 0), b'b');
assert_eq!(byte(s, 1), b'\'');
// We're going to want to have slices which don't respect codepoint boundaries.
let mut s = s[2..].as_bytes();
let mut v = s[2..].as_bytes();
let b = match byte(s, 0) {
let b = match byte(v, 0) {
b'\\' => {
let b = byte(s, 1);
s = &s[2..];
let b = byte(v, 1);
v = &v[2..];
match b {
b'x' => {
let (b, rest) = backslash_x(s);
s = rest;
let (b, rest) = backslash_x(v);
v = rest;
b
}
b'n' => b'\n',
@ -1118,16 +1212,18 @@ mod value {
}
}
b => {
s = &s[1..];
v = &v[1..];
b
}
};
assert_eq!(byte(s, 0), b'\'');
b
assert_eq!(byte(v, 0), b'\'');
let suffix = s[s.len() - v.len() + 1..].to_owned().into_boxed_str();
(b, suffix)
}
pub fn parse_lit_char(mut s: &str) -> char {
// Returns (value, suffix).
pub fn parse_lit_char(mut s: &str) -> (char, Box<str>) {
assert_eq!(byte(s, 0), b'\'');
s = &s[1..];
@ -1163,8 +1259,9 @@ mod value {
ch
}
};
assert_eq!(s, "\'", "Expected end of char literal");
ch
assert_eq!(byte(s, 0), b'\'');
let suffix = s[1..].to_owned().into_boxed_str();
(ch, suffix)
}
fn backslash_x<S>(s: &S) -> (u8, &S)
@ -1334,7 +1431,11 @@ mod value {
}
b'e' | b'E' => {
if has_e {
return None;
if has_exponent {
break;
} else {
return None;
}
}
has_e = true;
bytes[write] = b'e';
@ -1372,11 +1473,33 @@ mod value {
}
}
pub fn to_literal(s: &str) -> Literal {
let stream = s.parse::<TokenStream>().unwrap();
match stream.into_iter().next().unwrap() {
TokenTree::Literal(l) => l,
_ => unreachable!(),
pub fn to_literal(repr: &str, digits: &str, suffix: &str) -> Option<Literal> {
if repr.starts_with('-') {
if suffix == "f64" {
digits.parse().ok().map(Literal::f64_suffixed)
} else if suffix == "f32" {
digits.parse().ok().map(Literal::f32_suffixed)
} else if suffix == "i64" {
digits.parse().ok().map(Literal::i64_suffixed)
} else if suffix == "i32" {
digits.parse().ok().map(Literal::i32_suffixed)
} else if suffix == "i16" {
digits.parse().ok().map(Literal::i16_suffixed)
} else if suffix == "i8" {
digits.parse().ok().map(Literal::i8_suffixed)
} else if !suffix.is_empty() {
None
} else if digits.contains('.') {
digits.parse().ok().map(Literal::f64_unsuffixed)
} else {
digits.parse().ok().map(Literal::i64_unsuffixed)
}
} else {
let stream = repr.parse::<TokenStream>().unwrap();
match stream.into_iter().next().unwrap() {
TokenTree::Literal(l) => Some(l),
_ => unreachable!(),
}
}
}
}

55
third_party/rust/syn/src/mac.rs поставляемый
Просмотреть файл

@ -2,21 +2,17 @@ use super::*;
use crate::token::{Brace, Bracket, Paren};
use proc_macro2::TokenStream;
#[cfg(feature = "parsing")]
use proc_macro2::{Delimiter, Span, TokenTree};
use proc_macro2::{Delimiter, Group, Span, TokenTree};
#[cfg(feature = "parsing")]
use crate::parse::{Parse, ParseStream, Parser, Result};
#[cfg(feature = "extra-traits")]
use crate::tt::TokenStreamHelper;
#[cfg(feature = "extra-traits")]
use std::hash::{Hash, Hasher};
ast_struct! {
/// A macro invocation: `println!("{}", mac)`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct Macro #manual_extra_traits {
pub struct Macro {
pub path: Path,
pub bang_token: Token![!],
pub delimiter: MacroDelimiter,
@ -27,7 +23,7 @@ ast_struct! {
ast_enum! {
/// A grouping token that surrounds a macro body: `m!(...)` or `m!{...}` or `m![...]`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub enum MacroDelimiter {
Paren(Paren),
@ -36,39 +32,20 @@ ast_enum! {
}
}
#[cfg(feature = "extra-traits")]
impl Eq for Macro {}
#[cfg(feature = "extra-traits")]
impl PartialEq for Macro {
fn eq(&self, other: &Self) -> bool {
self.path == other.path
&& self.bang_token == other.bang_token
&& self.delimiter == other.delimiter
&& TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
}
}
#[cfg(feature = "extra-traits")]
impl Hash for Macro {
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
self.path.hash(state);
self.bang_token.hash(state);
self.delimiter.hash(state);
TokenStreamHelper(&self.tokens).hash(state);
}
}
#[cfg(feature = "parsing")]
fn delimiter_span(delimiter: &MacroDelimiter) -> Span {
match delimiter {
fn delimiter_span_close(macro_delimiter: &MacroDelimiter) -> Span {
let delimiter = match macro_delimiter {
MacroDelimiter::Paren(_) => Delimiter::Parenthesis,
MacroDelimiter::Brace(_) => Delimiter::Brace,
MacroDelimiter::Bracket(_) => Delimiter::Bracket,
};
let mut group = Group::new(delimiter, TokenStream::new());
group.set_span(match macro_delimiter {
MacroDelimiter::Paren(token) => token.span,
MacroDelimiter::Brace(token) => token.span,
MacroDelimiter::Bracket(token) => token.span,
}
});
group.span_close()
}
impl Macro {
@ -163,9 +140,7 @@ impl Macro {
/// given parser.
#[cfg(feature = "parsing")]
pub fn parse_body_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
// TODO: see if we can get a group.span_close() span in here as the
// scope, rather than the span of the whole group.
let scope = delimiter_span(&self.delimiter);
let scope = delimiter_span_close(&self.delimiter);
crate::parse::parse_scoped(parser, scope, self.tokens.clone())
}
}

61
third_party/rust/syn/src/macros.rs поставляемый
Просмотреть файл

@ -4,15 +4,11 @@ macro_rules! ast_struct {
struct $name:ident #full $($rest:tt)*
) => {
#[cfg(feature = "full")]
#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
$($attrs_pub)* struct $name $($rest)*
#[cfg(not(feature = "full"))]
#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
$($attrs_pub)* struct $name {
_noconstruct: (),
_noconstruct: ::std::marker::PhantomData<::proc_macro2::Span>,
}
#[cfg(all(not(feature = "full"), feature = "printing"))]
@ -23,29 +19,10 @@ macro_rules! ast_struct {
}
};
(
[$($attrs_pub:tt)*]
struct $name:ident #manual_extra_traits $($rest:tt)*
) => {
#[cfg_attr(feature = "extra-traits", derive(Debug))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
$($attrs_pub)* struct $name $($rest)*
};
(
[$($attrs_pub:tt)*]
struct $name:ident #manual_extra_traits_debug $($rest:tt)*
) => {
#[cfg_attr(feature = "clone-impls", derive(Clone))]
$($attrs_pub)* struct $name $($rest)*
};
(
[$($attrs_pub:tt)*]
struct $name:ident $($rest:tt)*
) => {
#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
$($attrs_pub)* struct $name $($rest)*
};
@ -63,21 +40,10 @@ macro_rules! ast_enum {
ast_enum!([$($attrs_pub)*] enum $name $($rest)*);
);
(
[$($attrs_pub:tt)*]
enum $name:ident #manual_extra_traits $($rest:tt)*
) => (
#[cfg_attr(feature = "extra-traits", derive(Debug))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
$($attrs_pub)* enum $name $($rest)*
);
(
[$($attrs_pub:tt)*]
enum $name:ident $($rest:tt)*
) => (
#[cfg_attr(feature = "extra-traits", derive(Debug, Eq, PartialEq, Hash))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
$($attrs_pub)* enum $name $($rest)*
);
@ -120,15 +86,9 @@ macro_rules! ast_enum_of_structs_impl {
check_keyword_matches!(pub $pub);
check_keyword_matches!(enum $enum);
$(
$(
impl From<$member> for $name {
fn from(e: $member) -> $name {
$name::$variant(e)
}
}
)*
)*
$($(
ast_enum_from_struct!($name::$variant, $member);
)*)*
#[cfg(feature = "printing")]
generate_to_tokens! {
@ -140,6 +100,19 @@ macro_rules! ast_enum_of_structs_impl {
};
}
macro_rules! ast_enum_from_struct {
// No From<TokenStream> for verbatim variants.
($name:ident::Verbatim, $member:ident) => {};
($name:ident::$variant:ident, $member:ident) => {
impl From<$member> for $name {
fn from(e: $member) -> $name {
$name::$variant(e)
}
}
};
}
#[cfg(feature = "printing")]
macro_rules! generate_to_tokens {
(do_not_generate_to_tokens $($foo:tt)*) => ();

6
third_party/rust/syn/src/op.rs поставляемый
Просмотреть файл

@ -1,9 +1,8 @@
ast_enum! {
/// A binary operator: `+`, `+=`, `&`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(feature = "clone-impls", derive(Copy))]
pub enum BinOp {
/// The `+` operator (addition)
Add(Token![+]),
@ -67,9 +66,8 @@ ast_enum! {
ast_enum! {
/// A unary operator: `*`, `!`, `-`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(feature = "clone-impls", derive(Copy))]
pub enum UnOp {
/// The `*` operator for dereferencing
Deref(Token![*]),

211
third_party/rust/syn/src/parse.rs поставляемый
Просмотреть файл

@ -26,8 +26,8 @@
//! [`parse_macro_input!`]: ../macro.parse_macro_input.html
//!
//! ```
//! extern crate proc_macro;
//!
//! # extern crate proc_macro;
//! #
//! use proc_macro::TokenStream;
//! use syn::{braced, parse_macro_input, token, Field, Ident, Result, Token};
//! use syn::parse::{Parse, ParseStream};
@ -109,9 +109,7 @@
//! # Ok(())
//! # }
//! #
//! # fn main() {
//! # run_parser().unwrap();
//! # }
//! # run_parser().unwrap();
//! ```
//!
//! The [`parse_quote!`] macro also uses this approach.
@ -155,8 +153,8 @@
//! [`Parser`]: trait.Parser.html
//!
//! ```
//! extern crate proc_macro;
//!
//! # extern crate proc_macro;
//! #
//! use proc_macro::TokenStream;
//! use syn::parse::Parser;
//! use syn::punctuated::Punctuated;
@ -186,7 +184,7 @@
//!
//! ---
//!
//! *This module is available if Syn is built with the `"parsing"` feature.*
//! *This module is available only if Syn is built with the `"parsing"` feature.*
#[path = "discouraged.rs"]
pub mod discouraged;
@ -217,6 +215,11 @@ pub use crate::lookahead::{Lookahead1, Peek};
/// Parsing interface implemented by all types that can be parsed in a default
/// way from a token stream.
///
/// Refer to the [module documentation] for details about implementing and using
/// the `Parse` trait.
///
/// [module documentation]: self
pub trait Parse: Sized {
fn parse(input: ParseStream) -> Result<Self>;
}
@ -263,13 +266,16 @@ pub struct ParseBuffer<'a> {
// the cell.
cell: Cell<Cursor<'static>>,
marker: PhantomData<Cursor<'a>>,
unexpected: Rc<Cell<Option<Span>>>,
unexpected: Cell<Option<Rc<Cell<Unexpected>>>>,
}
impl<'a> Drop for ParseBuffer<'a> {
fn drop(&mut self) {
if !self.is_empty() && self.unexpected.get().is_none() {
self.unexpected.set(Some(self.cursor().span()));
if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(self.cursor()) {
let (inner, old_span) = inner_unexpected(self);
if old_span.is_none() {
inner.set(Unexpected::Some(unexpected_span));
}
}
}
}
@ -324,15 +330,12 @@ impl<'a> Debug for ParseBuffer<'a> {
/// # input.parse()
/// # }
/// #
/// # fn main() {
/// # use syn::parse::Parser;
/// # let remainder = remainder_after_skipping_past_next_at
/// # .parse_str("a @ b c")
/// # .unwrap();
/// # assert_eq!(remainder.to_string(), "b c");
/// # }
/// # use syn::parse::Parser;
/// # let remainder = remainder_after_skipping_past_next_at
/// # .parse_str("a @ b c")
/// # .unwrap();
/// # assert_eq!(remainder.to_string(), "b c");
/// ```
#[derive(Copy, Clone)]
pub struct StepCursor<'c, 'a> {
scope: Span,
// This field is covariant in 'c.
@ -356,6 +359,14 @@ impl<'c, 'a> Deref for StepCursor<'c, 'a> {
}
}
impl<'c, 'a> Copy for StepCursor<'c, 'a> {}
impl<'c, 'a> Clone for StepCursor<'c, 'a> {
fn clone(&self) -> Self {
*self
}
}
impl<'c, 'a> StepCursor<'c, 'a> {
/// Triggers an error at the current position of the parse stream.
///
@ -375,36 +386,81 @@ pub(crate) fn advance_step_cursor<'c, 'a>(proof: StepCursor<'c, 'a>, to: Cursor<
unsafe { mem::transmute::<Cursor<'c>, Cursor<'a>>(to) }
}
fn skip(input: ParseStream) -> bool {
input
.step(|cursor| {
if let Some((_lifetime, rest)) = cursor.lifetime() {
Ok((true, rest))
} else if let Some((_token, rest)) = cursor.token_tree() {
Ok((true, rest))
} else {
Ok((false, *cursor))
}
})
.unwrap()
}
pub(crate) fn new_parse_buffer(
scope: Span,
cursor: Cursor,
unexpected: Rc<Cell<Option<Span>>>,
unexpected: Rc<Cell<Unexpected>>,
) -> ParseBuffer {
ParseBuffer {
scope,
// See comment on `cell` in the struct definition.
cell: Cell::new(unsafe { mem::transmute::<Cursor, Cursor<'static>>(cursor) }),
marker: PhantomData,
unexpected,
unexpected: Cell::new(Some(unexpected)),
}
}
pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Option<Span>>> {
buffer.unexpected.clone()
pub(crate) enum Unexpected {
None,
Some(Span),
Chain(Rc<Cell<Unexpected>>),
}
impl Default for Unexpected {
fn default() -> Self {
Unexpected::None
}
}
impl Clone for Unexpected {
fn clone(&self) -> Self {
match self {
Unexpected::None => Unexpected::None,
Unexpected::Some(span) => Unexpected::Some(*span),
Unexpected::Chain(next) => Unexpected::Chain(next.clone()),
}
}
}
// We call this on Cell<Unexpected> and Cell<Option<T>> where temporarily
// swapping in a None is cheap.
fn cell_clone<T: Default + Clone>(cell: &Cell<T>) -> T {
let prev = cell.take();
let ret = prev.clone();
cell.set(prev);
ret
}
fn inner_unexpected(buffer: &ParseBuffer) -> (Rc<Cell<Unexpected>>, Option<Span>) {
let mut unexpected = get_unexpected(buffer);
loop {
match cell_clone(&unexpected) {
Unexpected::None => return (unexpected, None),
Unexpected::Some(span) => return (unexpected, Some(span)),
Unexpected::Chain(next) => unexpected = next,
}
}
}
pub(crate) fn get_unexpected(buffer: &ParseBuffer) -> Rc<Cell<Unexpected>> {
cell_clone(&buffer.unexpected).unwrap()
}
fn span_of_unexpected_ignoring_nones(mut cursor: Cursor) -> Option<Span> {
if cursor.eof() {
return None;
}
while let Some((inner, _span, rest)) = cursor.group(Delimiter::None) {
if let Some(unexpected) = span_of_unexpected_ignoring_nones(inner) {
return Some(unexpected);
}
cursor = rest;
}
if cursor.eof() {
None
} else {
Some(cursor.span())
}
}
impl<'a> ParseBuffer<'a> {
@ -566,14 +622,17 @@ impl<'a> ParseBuffer<'a> {
/// }
/// ```
pub fn peek2<T: Peek>(&self, token: T) -> bool {
let ahead = self.fork();
skip(&ahead) && ahead.peek(token)
let _ = token;
self.cursor().skip().map_or(false, T::Token::peek)
}
/// Looks at the third-next token in the parse stream.
pub fn peek3<T: Peek>(&self, token: T) -> bool {
let ahead = self.fork();
skip(&ahead) && skip(&ahead) && ahead.peek(token)
let _ = token;
self.cursor()
.skip()
.and_then(Cursor::skip)
.map_or(false, T::Token::peek)
}
/// Parses zero or more occurrences of `T` separated by punctuation of type
@ -615,12 +674,10 @@ impl<'a> ParseBuffer<'a> {
/// }
/// }
/// #
/// # fn main() {
/// # let input = quote! {
/// # struct S(A, B);
/// # };
/// # syn::parse2::<TupleStruct>(input).unwrap();
/// # }
/// # let input = quote! {
/// # struct S(A, B);
/// # };
/// # syn::parse2::<TupleStruct>(input).unwrap();
/// ```
pub fn parse_terminated<T, P: Parse>(
&self,
@ -847,8 +904,8 @@ impl<'a> ParseBuffer<'a> {
cell: self.cell.clone(),
marker: PhantomData,
// Not the parent's unexpected. Nothing cares whether the clone
// parses all the way.
unexpected: Rc::new(Cell::new(None)),
// parses all the way unless we `advance_to`.
unexpected: Cell::new(Some(Rc::new(Cell::new(Unexpected::None)))),
}
}
@ -923,13 +980,11 @@ impl<'a> ParseBuffer<'a> {
/// # input.parse()
/// # }
/// #
/// # fn main() {
/// # use syn::parse::Parser;
/// # let remainder = remainder_after_skipping_past_next_at
/// # .parse_str("a @ b c")
/// # .unwrap();
/// # assert_eq!(remainder.to_string(), "b c");
/// # }
/// # use syn::parse::Parser;
/// # let remainder = remainder_after_skipping_past_next_at
/// # .parse_str("a @ b c")
/// # .unwrap();
/// # assert_eq!(remainder.to_string(), "b c");
/// ```
pub fn step<F, R>(&self, function: F) -> Result<R>
where
@ -961,6 +1016,18 @@ impl<'a> ParseBuffer<'a> {
Ok(node)
}
/// Returns the `Span` of the next token in the parse stream, or
/// `Span::call_site()` if this parse stream has completely exhausted its
/// input `TokenStream`.
pub fn span(&self) -> Span {
let cursor = self.cursor();
if cursor.eof() {
self.scope
} else {
crate::buffer::open_span_of_group(cursor)
}
}
/// Provides low-level access to the token representation underlying this
/// parse stream.
///
@ -971,7 +1038,7 @@ impl<'a> ParseBuffer<'a> {
}
fn check_unexpected(&self) -> Result<()> {
match self.unexpected.get() {
match inner_unexpected(self).1 {
Some(span) => Err(Error::new(span, "unexpected token")),
None => Ok(()),
}
@ -1048,7 +1115,7 @@ impl Parse for Literal {
///
/// [module documentation]: self
///
/// *This trait is available if Syn is built with the `"parsing"` feature.*
/// *This trait is available only if Syn is built with the `"parsing"` feature.*
pub trait Parser: Sized {
type Output;
@ -1063,7 +1130,7 @@ pub trait Parser: Sized {
/// This function will check that the input is fully parsed. If there are
/// any unparsed tokens at the end of the stream, an error is returned.
///
/// *This method is available if Syn is built with both the `"parsing"` and
/// *This method is available only if Syn is built with both the `"parsing"` and
/// `"proc-macro"` features.*
#[cfg(all(
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
@ -1088,6 +1155,7 @@ pub trait Parser: Sized {
// Not public API.
#[doc(hidden)]
#[cfg(any(feature = "full", feature = "derive"))]
fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> {
let _ = scope;
self.parse2(tokens)
@ -1095,6 +1163,7 @@ pub trait Parser: Sized {
// Not public API.
#[doc(hidden)]
#[cfg(any(feature = "full", feature = "derive"))]
fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
input.parse().and_then(|tokens| self.parse2(tokens))
}
@ -1103,7 +1172,7 @@ pub trait Parser: Sized {
fn tokens_to_parse_buffer(tokens: &TokenBuffer) -> ParseBuffer {
let scope = Span::call_site();
let cursor = tokens.begin();
let unexpected = Rc::new(Cell::new(None));
let unexpected = Rc::new(Cell::new(Unexpected::None));
new_parse_buffer(scope, cursor, unexpected)
}
@ -1118,38 +1187,42 @@ where
let state = tokens_to_parse_buffer(&buf);
let node = self(&state)?;
state.check_unexpected()?;
if state.is_empty() {
Ok(node)
if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) {
Err(Error::new(unexpected_span, "unexpected token"))
} else {
Err(state.error("unexpected token"))
Ok(node)
}
}
#[doc(hidden)]
#[cfg(any(feature = "full", feature = "derive"))]
fn __parse_scoped(self, scope: Span, tokens: TokenStream) -> Result<Self::Output> {
let buf = TokenBuffer::new2(tokens);
let cursor = buf.begin();
let unexpected = Rc::new(Cell::new(None));
let unexpected = Rc::new(Cell::new(Unexpected::None));
let state = new_parse_buffer(scope, cursor, unexpected);
let node = self(&state)?;
state.check_unexpected()?;
if state.is_empty() {
Ok(node)
if let Some(unexpected_span) = span_of_unexpected_ignoring_nones(state.cursor()) {
Err(Error::new(unexpected_span, "unexpected token"))
} else {
Err(state.error("unexpected token"))
Ok(node)
}
}
#[doc(hidden)]
#[cfg(any(feature = "full", feature = "derive"))]
fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
self(input)
}
}
#[cfg(any(feature = "full", feature = "derive"))]
pub(crate) fn parse_scoped<F: Parser>(f: F, scope: Span, tokens: TokenStream) -> Result<F::Output> {
f.__parse_scoped(scope, tokens)
}
#[cfg(any(feature = "full", feature = "derive"))]
pub(crate) fn parse_stream<F: Parser>(f: F, input: ParseStream) -> Result<F::Output> {
f.__parse_stream(input)
}
@ -1160,8 +1233,8 @@ pub(crate) fn parse_stream<F: Parser>(f: F, input: ParseStream) -> Result<F::Out
/// provided any attribute args.
///
/// ```
/// extern crate proc_macro;
///
/// # extern crate proc_macro;
/// #
/// use proc_macro::TokenStream;
/// use syn::parse_macro_input;
/// use syn::parse::Nothing;

32
third_party/rust/syn/src/parse_macro_input.rs поставляемый
Просмотреть файл

@ -16,8 +16,8 @@
/// #\[proc_macro_attribute\] attribute.
///
/// ```
/// extern crate proc_macro;
///
/// # extern crate proc_macro;
/// #
/// use proc_macro::TokenStream;
/// use syn::{parse_macro_input, Result};
/// use syn::parse::{Parse, ParseStream};
@ -43,7 +43,31 @@
/// # "".parse().unwrap()
/// }
/// ```
#[macro_export(local_inner_macros)]
///
/// <br>
///
/// # Expansion
///
/// `parse_macro_input!($variable as $Type)` expands to something like:
///
/// ```no_run
/// # extern crate proc_macro;
/// #
/// # macro_rules! doc_test {
/// # ($variable:ident as $Type:ty) => {
/// match syn::parse::<$Type>($variable) {
/// Ok(syntax_tree) => syntax_tree,
/// Err(err) => return proc_macro::TokenStream::from(err.to_compile_error()),
/// }
/// # };
/// # }
/// #
/// # fn test(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
/// # let _ = doc_test!(input as syn::Ident);
/// # proc_macro::TokenStream::new()
/// # }
/// ```
#[macro_export]
macro_rules! parse_macro_input {
($tokenstream:ident as $ty:ty) => {
match $crate::parse_macro_input::parse::<$ty>($tokenstream) {
@ -54,7 +78,7 @@ macro_rules! parse_macro_input {
}
};
($tokenstream:ident) => {
parse_macro_input!($tokenstream as _)
$crate::parse_macro_input!($tokenstream as _)
};
}

15
third_party/rust/syn/src/parse_quote.rs поставляемый
Просмотреть файл

@ -24,7 +24,7 @@
/// }
/// ```
///
/// *This macro is available if Syn is built with the `"parsing"` feature,
/// *This macro is available only if Syn is built with the `"parsing"` feature,
/// although interpolation of syntax tree nodes into the quoted tokens is only
/// supported if Syn is built with the `"printing"` feature as well.*
///
@ -56,8 +56,10 @@
/// or inner like `#![...]`
/// - [`Punctuated<T, P>`] — parses zero or more `T` separated by punctuation
/// `P` with optional trailing punctuation
/// - [`Vec<Stmt>`] — parses the same as `Block::parse_within`
///
/// [`Punctuated<T, P>`]: punctuated::Punctuated
/// [`Vec<Stmt>`]: Block::parse_within
///
/// # Panics
///
@ -67,7 +69,7 @@
//
// TODO: allow Punctuated to be inferred as intra doc link, currently blocked on
// https://github.com/rust-lang/rust/issues/62834
#[macro_export(local_inner_macros)]
#[macro_export]
macro_rules! parse_quote {
($($tt:tt)*) => {
$crate::parse_quote::parse(
@ -112,6 +114,8 @@ impl<T: Parse> ParseQuote for T {
use crate::punctuated::Punctuated;
#[cfg(any(feature = "full", feature = "derive"))]
use crate::{attr, Attribute};
#[cfg(feature = "full")]
use crate::{Block, Stmt};
#[cfg(any(feature = "full", feature = "derive"))]
impl ParseQuote for Attribute {
@ -129,3 +133,10 @@ impl<T: Parse, P: Parse> ParseQuote for Punctuated<T, P> {
Self::parse_terminated(input)
}
}
#[cfg(feature = "full")]
impl ParseQuote for Vec<Stmt> {
fn parse(input: ParseStream) -> Result<Self> {
Block::parse_within(input)
}
}

313
third_party/rust/syn/src/pat.rs поставляемый
Просмотреть файл

@ -1,16 +1,12 @@
use super::*;
use crate::punctuated::Punctuated;
#[cfg(feature = "extra-traits")]
use crate::tt::TokenStreamHelper;
use proc_macro2::TokenStream;
#[cfg(feature = "extra-traits")]
use std::hash::{Hash, Hasher};
ast_enum_of_structs! {
/// A pattern in a local binding, function signature, match expression, or
/// various other places.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
///
/// # Syntax tree enum
///
@ -20,7 +16,7 @@ ast_enum_of_structs! {
//
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
// blocked on https://github.com/rust-lang/rust/issues/62833
pub enum Pat #manual_extra_traits {
pub enum Pat {
/// A box pattern: `box v`.
Box(PatBox),
@ -86,7 +82,7 @@ ast_enum_of_structs! {
ast_struct! {
/// A box pattern: `box v`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct PatBox {
pub attrs: Vec<Attribute>,
pub box_token: Token![box],
@ -97,7 +93,10 @@ ast_struct! {
ast_struct! {
/// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// It may also be a unit struct or struct variant (e.g. `None`), or a
/// constant; these cannot be distinguished syntactically.
///
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct PatIdent {
pub attrs: Vec<Attribute>,
pub by_ref: Option<Token![ref]>,
@ -113,7 +112,7 @@ ast_struct! {
/// This holds an `Expr` rather than a `Lit` because negative numbers
/// are represented as an `Expr::Unary`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct PatLit {
pub attrs: Vec<Attribute>,
pub expr: Box<Expr>,
@ -123,7 +122,7 @@ ast_struct! {
ast_struct! {
/// A macro in pattern position.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct PatMacro {
pub attrs: Vec<Attribute>,
pub mac: Macro,
@ -133,7 +132,7 @@ ast_struct! {
ast_struct! {
/// A pattern that matches any one of a set of cases.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct PatOr {
pub attrs: Vec<Attribute>,
pub leading_vert: Option<Token![|]>,
@ -150,7 +149,7 @@ ast_struct! {
/// `<A>::B::C` and `<A as Trait>::B::C` can only legally refer to
/// associated constants.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct PatPath {
pub attrs: Vec<Attribute>,
pub qself: Option<QSelf>,
@ -161,7 +160,7 @@ ast_struct! {
ast_struct! {
/// A range pattern: `1..=2`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct PatRange {
pub attrs: Vec<Attribute>,
pub lo: Box<Expr>,
@ -173,7 +172,7 @@ ast_struct! {
ast_struct! {
/// A reference pattern: `&mut var`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct PatReference {
pub attrs: Vec<Attribute>,
pub and_token: Token![&],
@ -185,7 +184,7 @@ ast_struct! {
ast_struct! {
/// The dots in a tuple or slice pattern: `[0, 1, ..]`
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct PatRest {
pub attrs: Vec<Attribute>,
pub dot2_token: Token![..],
@ -195,7 +194,7 @@ ast_struct! {
ast_struct! {
/// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct PatSlice {
pub attrs: Vec<Attribute>,
pub bracket_token: token::Bracket,
@ -206,7 +205,7 @@ ast_struct! {
ast_struct! {
/// A struct or struct variant pattern: `Variant { x, y, .. }`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct PatStruct {
pub attrs: Vec<Attribute>,
pub path: Path,
@ -219,7 +218,7 @@ ast_struct! {
ast_struct! {
/// A tuple pattern: `(a, b)`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct PatTuple {
pub attrs: Vec<Attribute>,
pub paren_token: token::Paren,
@ -230,7 +229,7 @@ ast_struct! {
ast_struct! {
/// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct PatTupleStruct {
pub attrs: Vec<Attribute>,
pub path: Path,
@ -241,7 +240,7 @@ ast_struct! {
ast_struct! {
/// A type ascription pattern: `foo: f64`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct PatType {
pub attrs: Vec<Attribute>,
pub pat: Box<Pat>,
@ -253,7 +252,7 @@ ast_struct! {
ast_struct! {
/// A pattern that matches any value: `_`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct PatWild {
pub attrs: Vec<Attribute>,
pub underscore_token: Token![_],
@ -266,7 +265,7 @@ ast_struct! {
/// Patterns like the fields of Foo `{ x, ref y, ref mut z }` are treated
/// the same as `x: x, y: ref y, z: ref mut z` but there is no colon token.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct FieldPat {
pub attrs: Vec<Attribute>,
pub member: Member,
@ -275,122 +274,17 @@ ast_struct! {
}
}
#[cfg(feature = "extra-traits")]
impl Eq for Pat {}
#[cfg(feature = "extra-traits")]
impl PartialEq for Pat {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Pat::Box(this), Pat::Box(other)) => this == other,
(Pat::Ident(this), Pat::Ident(other)) => this == other,
(Pat::Lit(this), Pat::Lit(other)) => this == other,
(Pat::Macro(this), Pat::Macro(other)) => this == other,
(Pat::Or(this), Pat::Or(other)) => this == other,
(Pat::Path(this), Pat::Path(other)) => this == other,
(Pat::Range(this), Pat::Range(other)) => this == other,
(Pat::Reference(this), Pat::Reference(other)) => this == other,
(Pat::Rest(this), Pat::Rest(other)) => this == other,
(Pat::Slice(this), Pat::Slice(other)) => this == other,
(Pat::Struct(this), Pat::Struct(other)) => this == other,
(Pat::Tuple(this), Pat::Tuple(other)) => this == other,
(Pat::TupleStruct(this), Pat::TupleStruct(other)) => this == other,
(Pat::Type(this), Pat::Type(other)) => this == other,
(Pat::Verbatim(this), Pat::Verbatim(other)) => {
TokenStreamHelper(this) == TokenStreamHelper(other)
}
(Pat::Wild(this), Pat::Wild(other)) => this == other,
_ => false,
}
}
}
#[cfg(feature = "extra-traits")]
impl Hash for Pat {
fn hash<H>(&self, hash: &mut H)
where
H: Hasher,
{
match self {
Pat::Box(pat) => {
hash.write_u8(0);
pat.hash(hash);
}
Pat::Ident(pat) => {
hash.write_u8(1);
pat.hash(hash);
}
Pat::Lit(pat) => {
hash.write_u8(2);
pat.hash(hash);
}
Pat::Macro(pat) => {
hash.write_u8(3);
pat.hash(hash);
}
Pat::Or(pat) => {
hash.write_u8(4);
pat.hash(hash);
}
Pat::Path(pat) => {
hash.write_u8(5);
pat.hash(hash);
}
Pat::Range(pat) => {
hash.write_u8(6);
pat.hash(hash);
}
Pat::Reference(pat) => {
hash.write_u8(7);
pat.hash(hash);
}
Pat::Rest(pat) => {
hash.write_u8(8);
pat.hash(hash);
}
Pat::Slice(pat) => {
hash.write_u8(9);
pat.hash(hash);
}
Pat::Struct(pat) => {
hash.write_u8(10);
pat.hash(hash);
}
Pat::Tuple(pat) => {
hash.write_u8(11);
pat.hash(hash);
}
Pat::TupleStruct(pat) => {
hash.write_u8(12);
pat.hash(hash);
}
Pat::Type(pat) => {
hash.write_u8(13);
pat.hash(hash);
}
Pat::Verbatim(pat) => {
hash.write_u8(14);
TokenStreamHelper(pat).hash(hash);
}
Pat::Wild(pat) => {
hash.write_u8(15);
pat.hash(hash);
}
Pat::__Nonexhaustive => unreachable!(),
}
}
}
#[cfg(feature = "parsing")]
mod parsing {
pub mod parsing {
use super::*;
use crate::ext::IdentExt;
use crate::parse::{Parse, ParseStream, Result};
use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
use crate::path;
impl Parse for Pat {
fn parse(input: ParseStream) -> Result<Self> {
let begin = input.fork();
let lookahead = input.lookahead1();
if lookahead.peek(Ident)
&& ({
@ -411,7 +305,6 @@ mod parsing {
|| lookahead.peek(Token![<])
|| input.peek(Token![Self])
|| input.peek(Token![super])
|| input.peek(Token![extern])
|| input.peek(Token![crate])
{
pat_path_or_macro_or_struct_or_range(input)
@ -434,7 +327,7 @@ mod parsing {
} else if lookahead.peek(token::Bracket) {
input.call(pat_slice).map(Pat::Slice)
} else if lookahead.peek(Token![..]) && !input.peek(Token![...]) {
input.call(pat_rest).map(Pat::Rest)
pat_range_half_open(input, begin)
} else {
Err(lookahead.error())
}
@ -442,10 +335,11 @@ mod parsing {
}
fn pat_path_or_macro_or_struct_or_range(input: ParseStream) -> Result<Pat> {
let begin = input.fork();
let (qself, path) = path::parsing::qpath(input, true)?;
if input.peek(Token![..]) {
return pat_range(input, qself, path).map(Pat::Range);
return pat_range(input, begin, qself, path);
}
if qself.is_some() {
@ -487,7 +381,7 @@ mod parsing {
} else if input.peek(token::Paren) {
pat_tuple_struct(input, path).map(Pat::TupleStruct)
} else if input.peek(Token![..]) {
pat_range(input, qself, path).map(Pat::Range)
pat_range(input, begin, qself, path)
} else {
Ok(Pat::Path(PatPath {
attrs: Vec::new(),
@ -546,7 +440,7 @@ mod parsing {
while !content.is_empty() && !content.peek(Token![..]) {
let value = content.call(field_pat)?;
fields.push_value(value);
if !content.peek(Token![,]) {
if content.is_empty() {
break;
}
let punct: Token![,] = content.parse()?;
@ -578,6 +472,7 @@ mod parsing {
}
fn field_pat(input: ParseStream) -> Result<FieldPat> {
let attrs = input.call(Attribute::parse_outer)?;
let boxed: Option<Token![box]> = input.parse()?;
let by_ref: Option<Token![ref]> = input.parse()?;
let mutability: Option<Token![mut]> = input.parse()?;
@ -587,10 +482,10 @@ mod parsing {
|| member.is_unnamed()
{
return Ok(FieldPat {
attrs: Vec::new(),
attrs,
member,
colon_token: input.parse()?,
pat: input.parse()?,
pat: Box::new(multi_pat(input)?),
});
}
@ -610,30 +505,57 @@ mod parsing {
if let Some(boxed) = boxed {
pat = Pat::Box(PatBox {
attrs: Vec::new(),
pat: Box::new(pat),
box_token: boxed,
pat: Box::new(pat),
});
}
Ok(FieldPat {
attrs,
member: Member::Named(ident),
pat: Box::new(pat),
attrs: Vec::new(),
colon_token: None,
pat: Box::new(pat),
})
}
fn pat_range(input: ParseStream, qself: Option<QSelf>, path: Path) -> Result<PatRange> {
Ok(PatRange {
attrs: Vec::new(),
lo: Box::new(Expr::Path(ExprPath {
fn pat_range(
input: ParseStream,
begin: ParseBuffer,
qself: Option<QSelf>,
path: Path,
) -> Result<Pat> {
let limits: RangeLimits = input.parse()?;
let hi = input.call(pat_lit_expr)?;
if let Some(hi) = hi {
Ok(Pat::Range(PatRange {
attrs: Vec::new(),
qself,
path,
})),
limits: input.parse()?,
hi: input.call(pat_lit_expr)?,
})
lo: Box::new(Expr::Path(ExprPath {
attrs: Vec::new(),
qself,
path,
})),
limits,
hi,
}))
} else {
Ok(Pat::Verbatim(verbatim::between(begin, input)))
}
}
fn pat_range_half_open(input: ParseStream, begin: ParseBuffer) -> Result<Pat> {
let limits: RangeLimits = input.parse()?;
let hi = input.call(pat_lit_expr)?;
if hi.is_some() {
Ok(Pat::Verbatim(verbatim::between(begin, input)))
} else {
match limits {
RangeLimits::HalfOpen(dot2_token) => Ok(Pat::Rest(PatRest {
attrs: Vec::new(),
dot2_token,
})),
RangeLimits::Closed(_) => Err(input.error("expected range upper bound")),
}
}
}
fn pat_tuple(input: ParseStream) -> Result<PatTuple> {
@ -642,7 +564,7 @@ mod parsing {
let mut elems = Punctuated::new();
while !content.is_empty() {
let value: Pat = content.parse()?;
let value = multi_pat(&content)?;
elems.push_value(value);
if content.is_empty() {
break;
@ -668,14 +590,21 @@ mod parsing {
}
fn pat_lit_or_range(input: ParseStream) -> Result<Pat> {
let lo = input.call(pat_lit_expr)?;
let begin = input.fork();
let lo = input.call(pat_lit_expr)?.unwrap();
if input.peek(Token![..]) {
Ok(Pat::Range(PatRange {
attrs: Vec::new(),
lo,
limits: input.parse()?,
hi: input.call(pat_lit_expr)?,
}))
let limits: RangeLimits = input.parse()?;
let hi = input.call(pat_lit_expr)?;
if let Some(hi) = hi {
Ok(Pat::Range(PatRange {
attrs: Vec::new(),
lo,
limits,
hi,
}))
} else {
Ok(Pat::Verbatim(verbatim::between(begin, input)))
}
} else {
Ok(Pat::Lit(PatLit {
attrs: Vec::new(),
@ -684,7 +613,17 @@ mod parsing {
}
}
fn pat_lit_expr(input: ParseStream) -> Result<Box<Expr>> {
fn pat_lit_expr(input: ParseStream) -> Result<Option<Box<Expr>>> {
if input.is_empty()
|| input.peek(Token![|])
|| input.peek(Token![=>])
|| input.peek(Token![:]) && !input.peek(Token![::])
|| input.peek(Token![,])
|| input.peek(Token![;])
{
return Ok(None);
}
let neg: Option<Token![-]> = input.parse()?;
let lookahead = input.lookahead1();
@ -696,7 +635,6 @@ mod parsing {
|| lookahead.peek(Token![self])
|| lookahead.peek(Token![Self])
|| lookahead.peek(Token![super])
|| lookahead.peek(Token![extern])
|| lookahead.peek(Token![crate])
{
Expr::Path(input.parse()?)
@ -704,7 +642,7 @@ mod parsing {
return Err(lookahead.error());
};
Ok(Box::new(if let Some(neg) = neg {
Ok(Some(Box::new(if let Some(neg) = neg {
Expr::Unary(ExprUnary {
attrs: Vec::new(),
op: UnOp::Neg(neg),
@ -712,7 +650,7 @@ mod parsing {
})
} else {
expr
}))
})))
}
fn pat_slice(input: ParseStream) -> Result<PatSlice> {
@ -721,7 +659,7 @@ mod parsing {
let mut elems = Punctuated::new();
while !content.is_empty() {
let value: Pat = content.parse()?;
let value = multi_pat(&content)?;
elems.push_value(value);
if content.is_empty() {
break;
@ -737,11 +675,35 @@ mod parsing {
})
}
fn pat_rest(input: ParseStream) -> Result<PatRest> {
Ok(PatRest {
attrs: Vec::new(),
dot2_token: input.parse()?,
})
pub fn multi_pat(input: ParseStream) -> Result<Pat> {
multi_pat_impl(input, None)
}
pub fn multi_pat_with_leading_vert(input: ParseStream) -> Result<Pat> {
let leading_vert: Option<Token![|]> = input.parse()?;
multi_pat_impl(input, leading_vert)
}
fn multi_pat_impl(input: ParseStream, leading_vert: Option<Token![|]>) -> Result<Pat> {
let mut pat: Pat = input.parse()?;
if leading_vert.is_some()
|| input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
{
let mut cases = Punctuated::new();
cases.push_value(pat);
while input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=]) {
let punct = input.parse()?;
cases.push_punct(punct);
let pat: Pat = input.parse()?;
cases.push_value(pat);
}
pat = Pat::Or(PatOr {
attrs: Vec::new(),
leading_vert,
cases,
});
}
Ok(pat)
}
}
@ -756,12 +718,14 @@ mod printing {
impl ToTokens for PatWild {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.underscore_token.to_tokens(tokens);
}
}
impl ToTokens for PatIdent {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.by_ref.to_tokens(tokens);
self.mutability.to_tokens(tokens);
self.ident.to_tokens(tokens);
@ -774,6 +738,7 @@ mod printing {
impl ToTokens for PatStruct {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.path.to_tokens(tokens);
self.brace_token.surround(tokens, |tokens| {
self.fields.to_tokens(tokens);
@ -788,6 +753,7 @@ mod printing {
impl ToTokens for PatTupleStruct {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.path.to_tokens(tokens);
self.pat.to_tokens(tokens);
}
@ -804,12 +770,14 @@ mod printing {
impl ToTokens for PatPath {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
private::print_path(tokens, &self.qself, &self.path);
}
}
impl ToTokens for PatTuple {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.paren_token.surround(tokens, |tokens| {
self.elems.to_tokens(tokens);
});
@ -818,6 +786,7 @@ mod printing {
impl ToTokens for PatBox {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.box_token.to_tokens(tokens);
self.pat.to_tokens(tokens);
}
@ -825,6 +794,7 @@ mod printing {
impl ToTokens for PatReference {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.and_token.to_tokens(tokens);
self.mutability.to_tokens(tokens);
self.pat.to_tokens(tokens);
@ -833,18 +803,21 @@ mod printing {
impl ToTokens for PatRest {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.dot2_token.to_tokens(tokens);
}
}
impl ToTokens for PatLit {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.expr.to_tokens(tokens);
}
}
impl ToTokens for PatRange {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.lo.to_tokens(tokens);
match &self.limits {
RangeLimits::HalfOpen(t) => t.to_tokens(tokens),
@ -856,6 +829,7 @@ mod printing {
impl ToTokens for PatSlice {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.bracket_token.surround(tokens, |tokens| {
self.elems.to_tokens(tokens);
});
@ -864,12 +838,14 @@ mod printing {
impl ToTokens for PatMacro {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.mac.to_tokens(tokens);
}
}
impl ToTokens for PatOr {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.leading_vert.to_tokens(tokens);
self.cases.to_tokens(tokens);
}
@ -877,6 +853,7 @@ mod printing {
impl ToTokens for FieldPat {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
if let Some(colon_token) = &self.colon_token {
self.member.to_tokens(tokens);
colon_token.to_tokens(tokens);

33
third_party/rust/syn/src/path.rs поставляемый
Просмотреть файл

@ -2,9 +2,9 @@ use super::*;
use crate::punctuated::Punctuated;
ast_struct! {
/// A path at which a named item is exported: `std::collections::HashMap`.
/// A path at which a named item is exported (e.g. `std::collections::HashMap`).
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct Path {
pub leading_colon: Option<Token![::]>,
@ -29,7 +29,7 @@ where
ast_struct! {
/// A segment of a path together with any path arguments on that segment.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct PathSegment {
pub ident: Ident,
@ -52,7 +52,7 @@ where
ast_enum! {
/// Angle bracketed or parenthesized arguments of a path segment.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// ## Angle bracketed
@ -98,7 +98,7 @@ impl PathArguments {
ast_enum! {
/// An individual generic argument, like `'a`, `T`, or `Item = T`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub enum GenericArgument {
/// A lifetime argument.
@ -122,7 +122,7 @@ ast_struct! {
/// Angle bracketed arguments of a path segment: the `<K, V>` in `HashMap<K,
/// V>`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct AngleBracketedGenericArguments {
pub colon2_token: Option<Token![::]>,
@ -135,7 +135,7 @@ ast_struct! {
ast_struct! {
/// A binding (equality constraint) on an associated type: `Item = u8`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct Binding {
pub ident: Ident,
@ -147,7 +147,7 @@ ast_struct! {
ast_struct! {
/// An associated type bound: `Iterator<Item: Display>`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct Constraint {
pub ident: Ident,
@ -160,7 +160,7 @@ ast_struct! {
/// Arguments of a function path segment: the `(A, B) -> C` in `Fn(A,B) ->
/// C`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct ParenthesizedGenericArguments {
pub paren_token: token::Paren,
@ -189,7 +189,7 @@ ast_struct! {
/// ty position = 0
/// ```
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct QSelf {
pub lt_token: Token![<],
@ -291,11 +291,7 @@ pub mod parsing {
impl PathSegment {
fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
if input.peek(Token![super])
|| input.peek(Token![self])
|| input.peek(Token![crate])
|| input.peek(Token![extern])
{
if input.peek(Token![super]) || input.peek(Token![self]) || input.peek(Token![crate]) {
let ident = input.call(Ident::parse_any)?;
return Ok(PathSegment::from(ident));
}
@ -358,7 +354,7 @@ pub mod parsing {
impl Path {
/// Parse a `Path` containing no path arguments on any of its segments.
///
/// *This function is available if Syn is built with the `"parsing"`
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
///
/// # Example
@ -400,7 +396,6 @@ pub mod parsing {
&& !input.peek(Token![self])
&& !input.peek(Token![Self])
&& !input.peek(Token![crate])
&& !input.peek(Token![extern])
{
break;
}
@ -433,7 +428,7 @@ pub mod parsing {
/// path arguments, and
/// - the ident of the first path segment is equal to the given one.
///
/// *This function is available if Syn is built with the `"parsing"`
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
///
/// # Example
@ -472,7 +467,7 @@ pub mod parsing {
/// - the first path segment has no angle bracketed or parenthesized
/// path arguments.
///
/// *This function is available if Syn is built with the `"parsing"`
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
pub fn get_ident(&self) -> Option<&Ident> {
if self.leading_colon.is_none()

123
third_party/rust/syn/src/punctuated.rs поставляемый
Просмотреть файл

@ -22,6 +22,8 @@
#[cfg(feature = "extra-traits")]
use std::fmt::{self, Debug};
#[cfg(feature = "extra-traits")]
use std::hash::{Hash, Hasher};
#[cfg(any(feature = "full", feature = "derive"))]
use std::iter;
use std::iter::FromIterator;
@ -41,8 +43,6 @@ use crate::token::Token;
/// Refer to the [module documentation] for details about punctuated sequences.
///
/// [module documentation]: self
#[cfg_attr(feature = "extra-traits", derive(Eq, PartialEq, Hash))]
#[cfg_attr(feature = "clone-impls", derive(Clone))]
pub struct Punctuated<T, P> {
inner: Vec<(T, P)>,
last: Option<Box<T>>,
@ -76,22 +76,19 @@ impl<T, P> Punctuated<T, P> {
self.iter().next()
}
/// Mutably borrows the first element in this sequence.
pub fn first_mut(&mut self) -> Option<&mut T> {
self.iter_mut().next()
}
/// Borrows the last element in this sequence.
pub fn last(&self) -> Option<&T> {
if self.last.is_some() {
self.last.as_ref().map(Box::as_ref)
} else {
self.inner.last().map(|pair| &pair.0)
}
self.iter().next_back()
}
/// Mutably borrows the last element in this sequence.
pub fn last_mut(&mut self) -> Option<&mut T> {
if self.last.is_some() {
self.last.as_mut().map(Box::as_mut)
} else {
self.inner.last_mut().map(|pair| &mut pair.0)
}
self.iter_mut().next_back()
}
/// Returns an iterator over borrowed syntax tree nodes of type `&T`.
@ -230,13 +227,19 @@ impl<T, P> Punctuated<T, P> {
}
}
/// Clears the sequence of all values and punctuation, making it empty.
pub fn clear(&mut self) {
self.inner.clear();
self.last = None;
}
/// Parses zero or more occurrences of `T` separated by punctuation of type
/// `P`, with optional trailing punctuation.
///
/// Parsing continues until the end of this parse stream. The entire content
/// of this parse stream must consist of `T` and `P`.
///
/// *This function is available if Syn is built with the `"parsing"`
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
pub fn parse_terminated(input: ParseStream) -> Result<Self>
@ -256,7 +259,7 @@ impl<T, P> Punctuated<T, P> {
///
/// [`parse_terminated`]: Punctuated::parse_terminated
///
/// *This function is available if Syn is built with the `"parsing"`
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
pub fn parse_terminated_with(
@ -292,7 +295,7 @@ impl<T, P> Punctuated<T, P> {
/// is not followed by a `P`, even if there are remaining tokens in the
/// stream.
///
/// *This function is available if Syn is built with the `"parsing"`
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
pub fn parse_separated_nonempty(input: ParseStream) -> Result<Self>
@ -312,7 +315,7 @@ impl<T, P> Punctuated<T, P> {
///
/// [`parse_separated_nonempty`]: Punctuated::parse_separated_nonempty
///
/// *This function is available if Syn is built with the `"parsing"`
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
pub fn parse_separated_nonempty_with(
@ -338,6 +341,53 @@ impl<T, P> Punctuated<T, P> {
}
}
#[cfg(feature = "clone-impls")]
impl<T, P> Clone for Punctuated<T, P>
where
T: Clone,
P: Clone,
{
fn clone(&self) -> Self {
Punctuated {
inner: self.inner.clone(),
last: self.last.clone(),
}
}
}
#[cfg(feature = "extra-traits")]
impl<T, P> Eq for Punctuated<T, P>
where
T: Eq,
P: Eq,
{
}
#[cfg(feature = "extra-traits")]
impl<T, P> PartialEq for Punctuated<T, P>
where
T: PartialEq,
P: PartialEq,
{
fn eq(&self, other: &Self) -> bool {
let Punctuated { inner, last } = self;
*inner == other.inner && *last == other.last
}
}
#[cfg(feature = "extra-traits")]
impl<T, P> Hash for Punctuated<T, P>
where
T: Hash,
P: Hash,
{
fn hash<H: Hasher>(&self, state: &mut H) {
let Punctuated { inner, last } = self;
inner.hash(state);
last.hash(state);
}
}
#[cfg(feature = "extra-traits")]
impl<T: Debug, P: Debug> Debug for Punctuated<T, P> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
@ -536,7 +586,6 @@ impl<'a, T, P> ExactSizeIterator for PairsMut<'a, T, P> {
/// Refer to the [module documentation] for details about punctuated sequences.
///
/// [module documentation]: self
#[derive(Clone)]
pub struct IntoPairs<T, P> {
inner: vec::IntoIter<(T, P)>,
last: option::IntoIter<T>,
@ -572,12 +621,24 @@ impl<T, P> ExactSizeIterator for IntoPairs<T, P> {
}
}
impl<T, P> Clone for IntoPairs<T, P>
where
T: Clone,
P: Clone,
{
fn clone(&self) -> Self {
IntoPairs {
inner: self.inner.clone(),
last: self.last.clone(),
}
}
}
/// An iterator over owned values of type `T`.
///
/// Refer to the [module documentation] for details about punctuated sequences.
///
/// [module documentation]: self
#[derive(Clone)]
pub struct IntoIter<T> {
inner: vec::IntoIter<T>,
}
@ -606,6 +667,17 @@ impl<T> ExactSizeIterator for IntoIter<T> {
}
}
impl<T> Clone for IntoIter<T>
where
T: Clone,
{
fn clone(&self) -> Self {
IntoIter {
inner: self.inner.clone(),
}
}
}
/// An iterator over borrowed values of type `&T`.
///
/// Refer to the [module documentation] for details about punctuated sequences.
@ -799,7 +871,6 @@ impl<'a, T: 'a, I: 'a> IterMutTrait<'a, T> for I where
/// Refer to the [module documentation] for details about punctuated sequences.
///
/// [module documentation]: self
#[cfg_attr(feature = "clone-impls", derive(Clone))]
pub enum Pair<T, P> {
Punctuated(T, P),
End(T),
@ -856,6 +927,20 @@ impl<T, P> Pair<T, P> {
}
}
#[cfg(feature = "clone-impls")]
impl<T, P> Clone for Pair<T, P>
where
T: Clone,
P: Clone,
{
fn clone(&self) -> Self {
match self {
Pair::Punctuated(t, p) => Pair::Punctuated(t.clone(), p.clone()),
Pair::End(t) => Pair::End(t.clone()),
}
}
}
impl<T, P> Index<usize> for Punctuated<T, P> {
type Output = T;

42
third_party/rust/syn/src/reserved.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,42 @@
// Type for a syntax tree node that is reserved for future use.
//
// For example ExprReference contains a field `raw` of type Reserved. If `&raw
// place` syntax becomes a thing as per https://github.com/rust-lang/rfcs/pull/2582,
// we can backward compatibly change `raw`'s type to Option<Token![raw]> without
// the possibility of breaking any code.
use proc_macro2::Span;
use std::marker::PhantomData;
#[cfg(feature = "extra-traits")]
use std::fmt::{self, Debug};
ast_struct! {
pub struct Reserved {
_private: PhantomData<Span>,
}
}
impl Default for Reserved {
fn default() -> Self {
Reserved {
_private: PhantomData,
}
}
}
#[cfg(feature = "clone-impls")]
impl Clone for Reserved {
fn clone(&self) -> Self {
Reserved {
_private: self._private,
}
}
}
#[cfg(feature = "extra-traits")]
impl Debug for Reserved {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.debug_struct("Reserved").finish()
}
}

4
third_party/rust/syn/src/spanned.rs поставляемый
Просмотреть файл

@ -1,7 +1,7 @@
//! A trait that can provide the `Span` of the complete contents of a syntax
//! tree node.
//!
//! *This module is available if Syn is built with both the `"parsing"` and
//! *This module is available only if Syn is built with both the `"parsing"` and
//! `"printing"` features.*
//!
//! <br>
@ -97,7 +97,7 @@ use quote::spanned::Spanned as ToTokens;
///
/// [module documentation]: self
///
/// *This trait is available if Syn is built with both the `"parsing"` and
/// *This trait is available only if Syn is built with both the `"parsing"` and
/// `"printing"` features.*
pub trait Spanned {
/// Returns a `Span` covering the complete contents of this syntax tree

143
third_party/rust/syn/src/stmt.rs поставляемый
Просмотреть файл

@ -3,7 +3,7 @@ use super::*;
ast_struct! {
/// A braced block containing Rust statements.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct Block {
pub brace_token: token::Brace,
/// Statements in a block
@ -14,7 +14,7 @@ ast_struct! {
ast_enum! {
/// A statement, usually ending in a semicolon.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub enum Stmt {
/// A local (let) binding.
Local(Local),
@ -33,7 +33,7 @@ ast_enum! {
ast_struct! {
/// A local `let` binding: `let x: u64 = s.parse()?`.
///
/// *This type is available if Syn is built with the `"full"` feature.*
/// *This type is available only if Syn is built with the `"full"` feature.*
pub struct Local {
pub attrs: Vec<Attribute>,
pub let_token: Token![let],
@ -47,14 +47,15 @@ ast_struct! {
pub mod parsing {
use super::*;
use crate::parse::discouraged::Speculative;
use crate::parse::{Parse, ParseStream, Result};
use crate::punctuated::Punctuated;
use proc_macro2::TokenStream;
impl Block {
/// Parse the body of a block as zero or more statements, possibly
/// including one trailing expression.
///
/// *This function is available if Syn is built with the `"parsing"`
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
///
/// # Example
@ -106,8 +107,8 @@ pub mod parsing {
pub fn parse_within(input: ParseStream) -> Result<Vec<Stmt>> {
let mut stmts = Vec::new();
loop {
while input.peek(Token![;]) {
input.parse::<Token![;]>()?;
while let Some(semi) = input.parse::<Option<Token![;]>>()? {
stmts.push(Stmt::Semi(Expr::Verbatim(TokenStream::new()), semi));
}
if input.is_empty() {
break;
@ -146,55 +147,55 @@ pub mod parsing {
}
fn parse_stmt(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> {
// TODO: optimize using advance_to
let ahead = input.fork();
ahead.call(Attribute::parse_outer)?;
let mut attrs = input.call(Attribute::parse_outer)?;
if {
let ahead = ahead.fork();
// Only parse braces here; paren and bracket will get parsed as
// expression statements
ahead.call(Path::parse_mod_style).is_ok()
&& ahead.parse::<Token![!]>().is_ok()
&& (ahead.peek(token::Brace) || ahead.peek(Ident))
} {
stmt_mac(input)
} else if ahead.peek(Token![let]) {
stmt_local(input).map(Stmt::Local)
} else if ahead.peek(Token![pub])
|| ahead.peek(Token![crate]) && !ahead.peek2(Token![::])
|| ahead.peek(Token![extern]) && !ahead.peek2(Token![::])
|| ahead.peek(Token![use])
|| ahead.peek(Token![static]) && (ahead.peek2(Token![mut]) || ahead.peek2(Ident))
|| ahead.peek(Token![const])
|| ahead.peek(Token![unsafe]) && !ahead.peek2(token::Brace)
|| ahead.peek(Token![async])
&& (ahead.peek2(Token![unsafe])
|| ahead.peek2(Token![extern])
|| ahead.peek2(Token![fn]))
|| ahead.peek(Token![fn])
|| ahead.peek(Token![mod])
|| ahead.peek(Token![type])
|| ahead.peek(item::parsing::existential) && ahead.peek2(Token![type])
|| ahead.peek(Token![struct])
|| ahead.peek(Token![enum])
|| ahead.peek(Token![union]) && ahead.peek2(Ident)
|| ahead.peek(Token![auto]) && ahead.peek2(Token![trait])
|| ahead.peek(Token![trait])
|| ahead.peek(Token![default])
&& (ahead.peek2(Token![unsafe]) || ahead.peek2(Token![impl]))
|| ahead.peek(Token![impl])
|| ahead.peek(Token![macro])
// brace-style macros; paren and bracket macros get parsed as
// expression statements.
let ahead = input.fork();
if let Ok(path) = ahead.call(Path::parse_mod_style) {
if ahead.peek(Token![!]) && (ahead.peek2(token::Brace) || ahead.peek2(Ident)) {
input.advance_to(&ahead);
return stmt_mac(input, attrs, path);
}
}
if input.peek(Token![let]) {
stmt_local(input, attrs).map(Stmt::Local)
} else if input.peek(Token![pub])
|| input.peek(Token![crate]) && !input.peek2(Token![::])
|| input.peek(Token![extern])
|| input.peek(Token![use])
|| input.peek(Token![static]) && (input.peek2(Token![mut]) || input.peek2(Ident))
|| input.peek(Token![const])
|| input.peek(Token![unsafe]) && !input.peek2(token::Brace)
|| input.peek(Token![async])
&& (input.peek2(Token![unsafe])
|| input.peek2(Token![extern])
|| input.peek2(Token![fn]))
|| input.peek(Token![fn])
|| input.peek(Token![mod])
|| input.peek(Token![type])
|| input.peek(item::parsing::existential) && input.peek2(Token![type])
|| input.peek(Token![struct])
|| input.peek(Token![enum])
|| input.peek(Token![union]) && input.peek2(Ident)
|| input.peek(Token![auto]) && input.peek2(Token![trait])
|| input.peek(Token![trait])
|| input.peek(Token![default])
&& (input.peek2(Token![unsafe]) || input.peek2(Token![impl]))
|| input.peek(Token![impl])
|| input.peek(Token![macro])
{
input.parse().map(Stmt::Item)
let mut item: Item = input.parse()?;
attrs.extend(item.replace_attrs(Vec::new()));
item.replace_attrs(attrs);
Ok(Stmt::Item(item))
} else {
stmt_expr(input, allow_nosemi)
stmt_expr(input, allow_nosemi, attrs)
}
}
fn stmt_mac(input: ParseStream) -> Result<Stmt> {
let attrs = input.call(Attribute::parse_outer)?;
let path = input.call(Path::parse_mod_style)?;
fn stmt_mac(input: ParseStream, attrs: Vec<Attribute>, path: Path) -> Result<Stmt> {
let bang_token: Token![!] = input.parse()?;
let ident: Option<Ident> = input.parse()?;
let (delimiter, tokens) = mac::parse_delimiter(input)?;
@ -213,33 +214,12 @@ pub mod parsing {
})))
}
fn stmt_local(input: ParseStream) -> Result<Local> {
fn stmt_local(input: ParseStream, attrs: Vec<Attribute>) -> Result<Local> {
Ok(Local {
attrs: input.call(Attribute::parse_outer)?,
attrs,
let_token: input.parse()?,
pat: {
let leading_vert: Option<Token![|]> = input.parse()?;
let mut pat: Pat = input.parse()?;
if leading_vert.is_some()
|| input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
{
let mut cases = Punctuated::new();
cases.push_value(pat);
while input.peek(Token![|])
&& !input.peek(Token![||])
&& !input.peek(Token![|=])
{
let punct = input.parse()?;
cases.push_punct(punct);
let pat: Pat = input.parse()?;
cases.push_value(pat);
}
pat = Pat::Or(PatOr {
attrs: Vec::new(),
leading_vert,
cases,
});
}
let mut pat: Pat = pat::parsing::multi_pat_with_leading_vert(input)?;
if input.peek(Token![:]) {
let colon_token: Token![:] = input.parse()?;
let ty: Type = input.parse()?;
@ -265,12 +245,19 @@ pub mod parsing {
})
}
fn stmt_expr(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> {
let mut attrs = input.call(Attribute::parse_outer)?;
fn stmt_expr(
input: ParseStream,
allow_nosemi: bool,
mut attrs: Vec<Attribute>,
) -> Result<Stmt> {
let mut e = expr::parsing::expr_early(input)?;
attrs.extend(e.replace_attrs(Vec::new()));
e.replace_attrs(attrs);
let mut attr_target = &mut e;
while let Expr::Binary(e) = attr_target {
attr_target = &mut e.left;
}
attrs.extend(attr_target.replace_attrs(Vec::new()));
attr_target.replace_attrs(attrs);
if input.peek(Token![;]) {
return Ok(Stmt::Semi(e, input.parse()?));

99
third_party/rust/syn/src/token.rs поставляемый
Просмотреть файл

@ -88,7 +88,6 @@
//! [Printing]: https://docs.rs/quote/1.0/quote/trait.ToTokens.html
//! [`Span`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html
use std;
#[cfg(feature = "extra-traits")]
use std::cmp;
#[cfg(feature = "extra-traits")]
@ -97,13 +96,13 @@ use std::fmt::{self, Debug};
use std::hash::{Hash, Hasher};
use std::ops::{Deref, DerefMut};
#[cfg(feature = "parsing")]
use proc_macro2::Delimiter;
#[cfg(any(feature = "parsing", feature = "printing"))]
use proc_macro2::Ident;
use proc_macro2::Span;
#[cfg(feature = "printing")]
use proc_macro2::TokenStream;
#[cfg(feature = "parsing")]
use proc_macro2::{Delimiter, Literal, Punct, TokenTree};
#[cfg(feature = "printing")]
use quote::{ToTokens, TokenStreamExt};
@ -112,10 +111,8 @@ use self::private::WithSpan;
use crate::buffer::Cursor;
#[cfg(feature = "parsing")]
use crate::error::Result;
#[cfg(any(feature = "full", feature = "derive"))]
#[cfg(feature = "parsing")]
use crate::lifetime::Lifetime;
#[cfg(any(feature = "full", feature = "derive"))]
#[cfg(feature = "parsing")]
use crate::lit::{Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr};
#[cfg(feature = "parsing")]
@ -155,21 +152,20 @@ mod private {
#[cfg(feature = "parsing")]
impl private::Sealed for Ident {}
#[cfg(any(feature = "full", feature = "derive"))]
#[cfg(feature = "parsing")]
fn peek_impl(cursor: Cursor, peek: fn(ParseStream) -> bool) -> bool {
use crate::parse::Unexpected;
use std::cell::Cell;
use std::rc::Rc;
let scope = Span::call_site();
let unexpected = Rc::new(Cell::new(None));
let unexpected = Rc::new(Cell::new(Unexpected::None));
let buffer = crate::parse::new_parse_buffer(scope, cursor, unexpected);
peek(&buffer)
}
#[cfg(any(feature = "full", feature = "derive"))]
macro_rules! impl_token {
($name:ident $display:expr) => {
($display:tt $name:ty) => {
#[cfg(feature = "parsing")]
impl Token for $name {
fn peek(cursor: Cursor) -> bool {
@ -189,24 +185,38 @@ macro_rules! impl_token {
};
}
#[cfg(any(feature = "full", feature = "derive"))]
impl_token!(Lifetime "lifetime");
#[cfg(any(feature = "full", feature = "derive"))]
impl_token!(Lit "literal");
#[cfg(any(feature = "full", feature = "derive"))]
impl_token!(LitStr "string literal");
#[cfg(any(feature = "full", feature = "derive"))]
impl_token!(LitByteStr "byte string literal");
#[cfg(any(feature = "full", feature = "derive"))]
impl_token!(LitByte "byte literal");
#[cfg(any(feature = "full", feature = "derive"))]
impl_token!(LitChar "character literal");
#[cfg(any(feature = "full", feature = "derive"))]
impl_token!(LitInt "integer literal");
#[cfg(any(feature = "full", feature = "derive"))]
impl_token!(LitFloat "floating point literal");
#[cfg(any(feature = "full", feature = "derive"))]
impl_token!(LitBool "boolean literal");
impl_token!("lifetime" Lifetime);
impl_token!("literal" Lit);
impl_token!("string literal" LitStr);
impl_token!("byte string literal" LitByteStr);
impl_token!("byte literal" LitByte);
impl_token!("character literal" LitChar);
impl_token!("integer literal" LitInt);
impl_token!("floating point literal" LitFloat);
impl_token!("boolean literal" LitBool);
impl_token!("group token" proc_macro2::Group);
macro_rules! impl_low_level_token {
($display:tt $ty:ident $get:ident) => {
#[cfg(feature = "parsing")]
impl Token for $ty {
fn peek(cursor: Cursor) -> bool {
cursor.$get().is_some()
}
fn display() -> &'static str {
$display
}
}
#[cfg(feature = "parsing")]
impl private::Sealed for $ty {}
};
}
impl_low_level_token!("punctuation token" Punct punct);
impl_low_level_token!("literal" Literal literal);
impl_low_level_token!("token" TokenTree token_tree);
// Not public API.
#[doc(hidden)]
@ -233,7 +243,6 @@ impl<T: CustomToken> Token for T {
macro_rules! define_keywords {
($($token:tt pub struct $name:ident #[$doc:meta])*) => {
$(
#[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
#[$doc]
///
/// Don't try to remember the name of this type &mdash; use the
@ -260,6 +269,16 @@ macro_rules! define_keywords {
}
}
#[cfg(feature = "clone-impls")]
impl Copy for $name {}
#[cfg(feature = "clone-impls")]
impl Clone for $name {
fn clone(&self) -> Self {
*self
}
}
#[cfg(feature = "extra-traits")]
impl Debug for $name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
@ -338,7 +357,6 @@ macro_rules! impl_deref_if_len_is_1 {
macro_rules! define_punctuation_structs {
($($token:tt pub struct $name:ident/$len:tt #[$doc:meta])*) => {
$(
#[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
#[repr(C)]
#[$doc]
///
@ -366,6 +384,16 @@ macro_rules! define_punctuation_structs {
}
}
#[cfg(feature = "clone-impls")]
impl Copy for $name {}
#[cfg(feature = "clone-impls")]
impl Clone for $name {
fn clone(&self) -> Self {
*self
}
}
#[cfg(feature = "extra-traits")]
impl Debug for $name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
@ -436,7 +464,6 @@ macro_rules! define_punctuation {
macro_rules! define_delimiters {
($($token:tt pub struct $name:ident #[$doc:meta])*) => {
$(
#[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
#[$doc]
pub struct $name {
pub span: Span,
@ -458,6 +485,16 @@ macro_rules! define_delimiters {
}
}
#[cfg(feature = "clone-impls")]
impl Copy for $name {}
#[cfg(feature = "clone-impls")]
impl Clone for $name {
fn clone(&self) -> Self {
*self
}
}
#[cfg(feature = "extra-traits")]
impl Debug for $name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
@ -855,7 +892,7 @@ pub mod parsing {
}
pub fn punct<S: FromSpans>(input: ParseStream, token: &str) -> Result<S> {
let mut spans = [input.cursor().span(); 3];
let mut spans = [input.span(); 3];
punct_helper(input, token, &mut spans)?;
Ok(S::from_spans(&spans))
}

6
third_party/rust/syn/src/tt.rs поставляемый
Просмотреть файл

@ -18,8 +18,8 @@ impl<'a> PartialEq for TokenTreeHelper<'a> {
_ => return false,
}
let s1 = g1.stream().clone().into_iter();
let mut s2 = g2.stream().clone().into_iter();
let s1 = g1.stream().into_iter();
let mut s2 = g2.stream().into_iter();
for item1 in s1 {
let item2 = match s2.next() {
@ -60,7 +60,7 @@ impl<'a> Hash for TokenTreeHelper<'a> {
Delimiter::None => 3u8.hash(h),
}
for item in g.stream().clone() {
for item in g.stream() {
TokenTreeHelper(&item).hash(h);
}
0xffu8.hash(h); // terminator w/ a variant we don't normally hash

366
third_party/rust/syn/src/ty.rs поставляемый
Просмотреть файл

@ -1,15 +1,11 @@
use super::*;
use crate::punctuated::Punctuated;
#[cfg(feature = "extra-traits")]
use crate::tt::TokenStreamHelper;
use proc_macro2::TokenStream;
#[cfg(feature = "extra-traits")]
use std::hash::{Hash, Hasher};
ast_enum_of_structs! {
/// The possible types that a Rust value could have.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Syntax tree enum
@ -20,7 +16,7 @@ ast_enum_of_structs! {
//
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
// blocked on https://github.com/rust-lang/rust/issues/62833
pub enum Type #manual_extra_traits {
pub enum Type {
/// A fixed size array type: `[T; n]`.
Array(TypeArray),
@ -77,7 +73,7 @@ ast_enum_of_structs! {
ast_struct! {
/// A fixed size array type: `[T; n]`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct TypeArray {
pub bracket_token: token::Bracket,
@ -90,7 +86,7 @@ ast_struct! {
ast_struct! {
/// A bare function type: `fn(usize) -> bool`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct TypeBareFn {
pub lifetimes: Option<BoundLifetimes>,
@ -107,7 +103,7 @@ ast_struct! {
ast_struct! {
/// A type contained within invisible delimiters.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct TypeGroup {
pub group_token: token::Group,
@ -119,7 +115,7 @@ ast_struct! {
/// An `impl Bound1 + Bound2 + Bound3` type where `Bound` is a trait or
/// a lifetime.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct TypeImplTrait {
pub impl_token: Token![impl],
@ -130,7 +126,7 @@ ast_struct! {
ast_struct! {
/// Indication that a type should be inferred by the compiler: `_`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct TypeInfer {
pub underscore_token: Token![_],
@ -140,7 +136,7 @@ ast_struct! {
ast_struct! {
/// A macro in the type position.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct TypeMacro {
pub mac: Macro,
@ -150,7 +146,7 @@ ast_struct! {
ast_struct! {
/// The never type: `!`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct TypeNever {
pub bang_token: Token![!],
@ -160,7 +156,7 @@ ast_struct! {
ast_struct! {
/// A parenthesized type equivalent to the inner type.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct TypeParen {
pub paren_token: token::Paren,
@ -172,7 +168,7 @@ ast_struct! {
/// A path like `std::slice::Iter`, optionally qualified with a
/// self-type as in `<Vec<T> as SomeTrait>::Associated`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct TypePath {
pub qself: Option<QSelf>,
@ -183,7 +179,7 @@ ast_struct! {
ast_struct! {
/// A raw pointer type: `*const T` or `*mut T`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct TypePtr {
pub star_token: Token![*],
@ -196,7 +192,7 @@ ast_struct! {
ast_struct! {
/// A reference type: `&'a T` or `&'a mut T`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct TypeReference {
pub and_token: Token![&],
@ -209,7 +205,7 @@ ast_struct! {
ast_struct! {
/// A dynamically sized slice type: `[T]`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct TypeSlice {
pub bracket_token: token::Bracket,
@ -221,7 +217,7 @@ ast_struct! {
/// A trait object type `Bound1 + Bound2 + Bound3` where `Bound` is a
/// trait or a lifetime.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct TypeTraitObject {
pub dyn_token: Option<Token![dyn]>,
@ -232,7 +228,7 @@ ast_struct! {
ast_struct! {
/// A tuple type: `(A, B, C, String)`.
///
/// *This type is available if Syn is built with the `"derive"` or
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
pub struct TypeTuple {
pub paren_token: token::Paren,
@ -240,111 +236,10 @@ ast_struct! {
}
}
#[cfg(feature = "extra-traits")]
impl Eq for Type {}
#[cfg(feature = "extra-traits")]
impl PartialEq for Type {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Type::Array(this), Type::Array(other)) => this == other,
(Type::BareFn(this), Type::BareFn(other)) => this == other,
(Type::Group(this), Type::Group(other)) => this == other,
(Type::ImplTrait(this), Type::ImplTrait(other)) => this == other,
(Type::Infer(this), Type::Infer(other)) => this == other,
(Type::Macro(this), Type::Macro(other)) => this == other,
(Type::Never(this), Type::Never(other)) => this == other,
(Type::Paren(this), Type::Paren(other)) => this == other,
(Type::Path(this), Type::Path(other)) => this == other,
(Type::Ptr(this), Type::Ptr(other)) => this == other,
(Type::Reference(this), Type::Reference(other)) => this == other,
(Type::Slice(this), Type::Slice(other)) => this == other,
(Type::TraitObject(this), Type::TraitObject(other)) => this == other,
(Type::Tuple(this), Type::Tuple(other)) => this == other,
(Type::Verbatim(this), Type::Verbatim(other)) => {
TokenStreamHelper(this) == TokenStreamHelper(other)
}
_ => false,
}
}
}
#[cfg(feature = "extra-traits")]
impl Hash for Type {
fn hash<H>(&self, hash: &mut H)
where
H: Hasher,
{
match self {
Type::Array(ty) => {
hash.write_u8(0);
ty.hash(hash);
}
Type::BareFn(ty) => {
hash.write_u8(1);
ty.hash(hash);
}
Type::Group(ty) => {
hash.write_u8(2);
ty.hash(hash);
}
Type::ImplTrait(ty) => {
hash.write_u8(3);
ty.hash(hash);
}
Type::Infer(ty) => {
hash.write_u8(4);
ty.hash(hash);
}
Type::Macro(ty) => {
hash.write_u8(5);
ty.hash(hash);
}
Type::Never(ty) => {
hash.write_u8(6);
ty.hash(hash);
}
Type::Paren(ty) => {
hash.write_u8(7);
ty.hash(hash);
}
Type::Path(ty) => {
hash.write_u8(8);
ty.hash(hash);
}
Type::Ptr(ty) => {
hash.write_u8(9);
ty.hash(hash);
}
Type::Reference(ty) => {
hash.write_u8(10);
ty.hash(hash);
}
Type::Slice(ty) => {
hash.write_u8(11);
ty.hash(hash);
}
Type::TraitObject(ty) => {
hash.write_u8(12);
ty.hash(hash);
}
Type::Tuple(ty) => {
hash.write_u8(13);
ty.hash(hash);
}
Type::Verbatim(ty) => {
hash.write_u8(14);
TokenStreamHelper(ty).hash(hash);
}
Type::__Nonexhaustive => unreachable!(),
}
}
}
ast_struct! {
/// The binary interface of a function: `extern "C"`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct Abi {
pub extern_token: Token![extern],
@ -355,7 +250,7 @@ ast_struct! {
ast_struct! {
/// An argument in a function type: the `usize` in `fn(usize) -> bool`.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct BareFnArg {
pub attrs: Vec<Attribute>,
@ -377,7 +272,7 @@ ast_struct! {
/// }
/// ```
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub struct Variadic {
pub attrs: Vec<Attribute>,
@ -388,7 +283,7 @@ ast_struct! {
ast_enum! {
/// Return type of a function signature.
///
/// *This type is available if Syn is built with the `"derive"` or `"full"`
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
pub enum ReturnType {
/// Return type is not specified.
@ -407,10 +302,13 @@ pub mod parsing {
use crate::ext::IdentExt;
use crate::parse::{Parse, ParseStream, Result};
use crate::path;
use proc_macro2::{Punct, Spacing, TokenTree};
use std::iter::FromIterator;
impl Parse for Type {
fn parse(input: ParseStream) -> Result<Self> {
ambig_ty(input, true)
let allow_plus = true;
ambig_ty(input, allow_plus)
}
}
@ -421,15 +319,17 @@ pub mod parsing {
///
/// This parser does not allow a `+`, while the default parser does.
pub fn without_plus(input: ParseStream) -> Result<Self> {
ambig_ty(input, false)
let allow_plus = false;
ambig_ty(input, allow_plus)
}
}
fn ambig_ty(input: ParseStream, allow_plus: bool) -> Result<Type> {
if input.peek(token::Group) {
if input.peek(token::Group) && !input.peek2(Token![::]) && !input.peek2(Token![<]) {
return input.parse().map(Type::Group);
}
let begin = input.fork();
let mut lifetimes = None::<BoundLifetimes>;
let mut lookahead = input.lookahead1();
if lookahead.peek(Token![for]) {
@ -524,7 +424,7 @@ pub mod parsing {
..trait_bound
})
}
other => other,
other @ TypeParamBound::Lifetime(_) => other,
}
}
_ => break,
@ -549,17 +449,20 @@ pub mod parsing {
}))
} else if lookahead.peek(Token![fn])
|| lookahead.peek(Token![unsafe])
|| lookahead.peek(Token![extern]) && !input.peek2(Token![::])
|| lookahead.peek(Token![extern])
{
let mut bare_fn: TypeBareFn = input.parse()?;
bare_fn.lifetimes = lifetimes;
Ok(Type::BareFn(bare_fn))
let allow_mut_self = true;
if let Some(mut bare_fn) = parse_bare_fn(input, allow_mut_self)? {
bare_fn.lifetimes = lifetimes;
Ok(Type::BareFn(bare_fn))
} else {
Ok(Type::Verbatim(verbatim::between(begin, input)))
}
} else if lookahead.peek(Ident)
|| input.peek(Token![super])
|| input.peek(Token![self])
|| input.peek(Token![Self])
|| input.peek(Token![crate])
|| input.peek(Token![extern])
|| lookahead.peek(Token![::])
|| lookahead.peek(Token![<])
{
@ -722,38 +625,58 @@ pub mod parsing {
impl Parse for TypeBareFn {
fn parse(input: ParseStream) -> Result<Self> {
let args;
let allow_variadic;
Ok(TypeBareFn {
lifetimes: input.parse()?,
unsafety: input.parse()?,
abi: input.parse()?,
fn_token: input.parse()?,
paren_token: parenthesized!(args in input),
inputs: {
let mut inputs = Punctuated::new();
while !args.is_empty() && !args.peek(Token![...]) {
inputs.push_value(args.parse()?);
if args.is_empty() {
break;
}
inputs.push_punct(args.parse()?);
}
allow_variadic = inputs.empty_or_trailing();
inputs
},
variadic: {
if allow_variadic && args.peek(Token![...]) {
Some(Variadic {
attrs: Vec::new(),
let allow_mut_self = false;
parse_bare_fn(input, allow_mut_self).map(Option::unwrap)
}
}
fn parse_bare_fn(input: ParseStream, allow_mut_self: bool) -> Result<Option<TypeBareFn>> {
let args;
let mut variadic = None;
let mut has_mut_self = false;
let bare_fn = TypeBareFn {
lifetimes: input.parse()?,
unsafety: input.parse()?,
abi: input.parse()?,
fn_token: input.parse()?,
paren_token: parenthesized!(args in input),
inputs: {
let mut inputs = Punctuated::new();
while !args.is_empty() {
let attrs = args.call(Attribute::parse_outer)?;
if inputs.empty_or_trailing() && args.peek(Token![...]) {
variadic = Some(Variadic {
attrs,
dots: args.parse()?,
})
} else {
None
});
break;
}
},
output: input.call(ReturnType::without_plus)?,
})
if let Some(arg) = parse_bare_fn_arg(&args, allow_mut_self)? {
inputs.push_value(BareFnArg { attrs, ..arg });
} else {
has_mut_self = true;
}
if args.is_empty() {
break;
}
inputs.push_punct(args.parse()?);
}
inputs
},
variadic,
output: input.call(ReturnType::without_plus)?,
};
if has_mut_self {
Ok(None)
} else {
Ok(Some(bare_fn))
}
}
@ -776,9 +699,27 @@ pub mod parsing {
impl Parse for TypeTuple {
fn parse(input: ParseStream) -> Result<Self> {
let content;
let paren_token = parenthesized!(content in input);
if content.is_empty() {
return Ok(TypeTuple {
paren_token,
elems: Punctuated::new(),
});
}
let first: Type = content.parse()?;
Ok(TypeTuple {
paren_token: parenthesized!(content in input),
elems: content.parse_terminated(Type::parse)?,
paren_token,
elems: {
let mut elems = Punctuated::new();
elems.push_value(first);
elems.push_punct(content.parse()?);
let rest: Punctuated<Type, Token![,]> =
content.parse_terminated(Parse::parse)?;
elems.extend(rest);
elems
},
})
}
}
@ -807,9 +748,11 @@ pub mod parsing {
impl ReturnType {
pub fn without_plus(input: ParseStream) -> Result<Self> {
Self::parse(input, false)
let allow_plus = false;
Self::parse(input, allow_plus)
}
#[doc(hidden)]
pub fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
if input.peek(Token![->]) {
let arrow = input.parse()?;
@ -844,10 +787,12 @@ pub mod parsing {
impl TypeTraitObject {
pub fn without_plus(input: ParseStream) -> Result<Self> {
Self::parse(input, false)
let allow_plus = false;
Self::parse(input, allow_plus)
}
// Only allow multiple trait references if allow_plus is true.
#[doc(hidden)]
pub fn parse(input: ParseStream, allow_plus: bool) -> Result<Self> {
Ok(TypeTraitObject {
dyn_token: input.parse()?,
@ -910,7 +855,8 @@ pub mod parsing {
impl Parse for TypeParen {
fn parse(input: ParseStream) -> Result<Self> {
Self::parse(input, false)
let allow_plus = false;
Self::parse(input, allow_plus)
}
}
@ -926,22 +872,72 @@ pub mod parsing {
impl Parse for BareFnArg {
fn parse(input: ParseStream) -> Result<Self> {
Ok(BareFnArg {
attrs: input.call(Attribute::parse_outer)?,
name: {
if (input.peek(Ident) || input.peek(Token![_]))
&& input.peek2(Token![:])
&& !input.peek2(Token![::])
{
let name = input.call(Ident::parse_any)?;
let colon: Token![:] = input.parse()?;
Some((name, colon))
} else {
None
}
},
ty: input.parse()?,
})
let allow_mut_self = false;
parse_bare_fn_arg(input, allow_mut_self).map(Option::unwrap)
}
}
fn parse_bare_fn_arg(
input: ParseStream,
mut allow_mut_self: bool,
) -> Result<Option<BareFnArg>> {
let mut has_mut_self = false;
let arg = BareFnArg {
attrs: input.call(Attribute::parse_outer)?,
name: {
if (input.peek(Ident) || input.peek(Token![_]) || input.peek(Token![self]))
&& input.peek2(Token![:])
&& !input.peek2(Token![::])
{
let name = input.call(Ident::parse_any)?;
let colon: Token![:] = input.parse()?;
Some((name, colon))
} else if allow_mut_self
&& input.peek(Token![mut])
&& input.peek2(Token![self])
&& input.peek3(Token![:])
&& !input.peek3(Token![::])
{
has_mut_self = true;
allow_mut_self = false;
input.parse::<Token![mut]>()?;
input.parse::<Token![self]>()?;
input.parse::<Token![:]>()?;
None
} else {
None
}
},
ty: if !has_mut_self && input.peek(Token![...]) {
let dot3 = input.parse::<Token![...]>()?;
let args = vec![
TokenTree::Punct(Punct::new('.', Spacing::Joint)),
TokenTree::Punct(Punct::new('.', Spacing::Joint)),
TokenTree::Punct(Punct::new('.', Spacing::Alone)),
];
let tokens = TokenStream::from_iter(args.into_iter().zip(&dot3.spans).map(
|(mut arg, span)| {
arg.set_span(*span);
arg
},
));
Type::Verbatim(tokens)
} else if allow_mut_self && input.peek(Token![mut]) && input.peek2(Token![self]) {
has_mut_self = true;
input.parse::<Token![mut]>()?;
Type::Path(TypePath {
qself: None,
path: input.parse::<Token![self]>()?.into(),
})
} else {
input.parse()?
},
};
if has_mut_self {
Ok(None)
} else {
Ok(Some(arg))
}
}

15
third_party/rust/syn/src/verbatim.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,15 @@
use crate::parse::{ParseBuffer, ParseStream};
use proc_macro2::TokenStream;
use std::iter;
pub fn between<'a>(begin: ParseBuffer<'a>, end: ParseStream<'a>) -> TokenStream {
let end = end.cursor();
let mut cursor = begin.cursor();
let mut tokens = TokenStream::new();
while cursor != end {
let (tt, next) = cursor.token_tree().unwrap();
tokens.extend(iter::once(tt));
cursor = next;
}
tokens
}

65
third_party/rust/syn/src/whitespace.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,65 @@
pub fn skip(mut s: &str) -> &str {
'skip: while !s.is_empty() {
let byte = s.as_bytes()[0];
if byte == b'/' {
if s.starts_with("//")
&& (!s.starts_with("///") || s.starts_with("////"))
&& !s.starts_with("//!")
{
if let Some(i) = s.find('\n') {
s = &s[i + 1..];
continue;
} else {
return "";
}
} else if s.starts_with("/**/") {
s = &s[4..];
continue;
} else if s.starts_with("/*")
&& (!s.starts_with("/**") || s.starts_with("/***"))
&& !s.starts_with("/*!")
{
let mut depth = 0;
let bytes = s.as_bytes();
let mut i = 0;
let upper = bytes.len() - 1;
while i < upper {
if bytes[i] == b'/' && bytes[i + 1] == b'*' {
depth += 1;
i += 1; // eat '*'
} else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
depth -= 1;
if depth == 0 {
s = &s[i + 2..];
continue 'skip;
}
i += 1; // eat '/'
}
i += 1;
}
return s;
}
}
match byte {
b' ' | 0x09..=0x0d => {
s = &s[1..];
continue;
}
b if b <= 0x7f => {}
_ => {
let ch = s.chars().next().unwrap();
if is_whitespace(ch) {
s = &s[ch.len_utf8()..];
continue;
}
}
}
return s;
}
s
}
fn is_whitespace(ch: char) -> bool {
// Rust treats left-to-right mark and right-to-left mark as whitespace
ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
}

1
third_party/rust/syn/tests/.gitignore поставляемый Normal file
Просмотреть файл

@ -0,0 +1 @@
/*.pending-snap

16
third_party/rust/syn/tests/clone.sh поставляемый
Просмотреть файл

@ -1,16 +0,0 @@
#!/bin/bash
REV=4560cb830fce63fcffdc4558f4281aaac6a3a1ba
set -euo pipefail
cd "$(dirname "${BASH_SOURCE[0]}")"
mkdir -p rust
touch rust/COMMIT
if [ "$(cat rust/COMMIT)" != "$REV" ]; then
rm -rf rust
mkdir rust
curl -L "https://github.com/rust-lang/rust/archive/${REV}.tar.gz" \
| tar xz --directory rust --strip-components 1
echo "$REV" > rust/COMMIT
fi

247
third_party/rust/syn/tests/common/eq.rs поставляемый
Просмотреть файл

@ -1,36 +1,35 @@
extern crate rustc_ast;
extern crate rustc_data_structures;
extern crate rustc_span;
extern crate rustc_target;
extern crate syntax;
extern crate syntax_pos;
use std::mem;
use self::rustc_data_structures::sync::Lrc;
use self::rustc_data_structures::thin_vec::ThinVec;
use self::rustc_target::abi::FloatTy;
use self::rustc_target::spec::abi::Abi;
use self::syntax::ast::{
AngleBracketedArgs, AnonConst, Arg, Arm, AsmDialect, AssocTyConstraint, AssocTyConstraintKind,
AttrId, AttrStyle, Attribute, BareFnTy, BinOpKind, BindingMode, Block, BlockCheckMode,
CaptureBy, Constness, Crate, CrateSugar, Defaultness, EnumDef, Expr, ExprKind, Field, FieldPat,
FnDecl, FnHeader, ForeignItem, ForeignItemKind, ForeignMod, FunctionRetTy, GenericArg,
GenericArgs, GenericBound, GenericParam, GenericParamKind, Generics, GlobalAsm, Ident,
ImplItem, ImplItemKind, ImplPolarity, InlineAsm, InlineAsmOutput, IntTy, IsAsync, IsAuto, Item,
ItemKind, Label, Lifetime, Lit, LitIntType, LitKind, Local, Mac, MacDelimiter, MacStmtStyle,
MacroDef, MethodSig, Mod, Movability, MutTy, Mutability, NodeId, ParenthesizedArgs, Pat,
PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt,
StmtKind, StrStyle, StructField, TraitBoundModifier, TraitItem, TraitItemKind,
TraitObjectSyntax, TraitRef, Ty, TyKind, UintTy, UnOp, UnsafeSource, Unsafety, UseTree,
UseTreeKind, Variant, VariantData, VisibilityKind, WhereBoundPredicate, WhereClause,
WhereEqPredicate, WherePredicate, WhereRegionPredicate,
use rustc_ast::ast::{
AngleBracketedArg, AngleBracketedArgs, AnonConst, Arm, AssocItemKind, AssocTyConstraint,
AssocTyConstraintKind, Async, AttrId, AttrItem, AttrKind, AttrStyle, Attribute, BareFnTy,
BinOpKind, BindingMode, Block, BlockCheckMode, BorrowKind, CaptureBy, Const, Crate, CrateSugar,
Defaultness, EnumDef, Expr, ExprKind, Extern, Field, FieldPat, FloatTy, FnDecl, FnHeader,
FnRetTy, FnSig, ForeignItemKind, ForeignMod, GenericArg, GenericArgs, GenericBound,
GenericParam, GenericParamKind, Generics, GlobalAsm, ImplPolarity, InlineAsm, InlineAsmOperand,
InlineAsmOptions, InlineAsmRegOrRegClass, InlineAsmTemplatePiece, IntTy, IsAuto, Item,
ItemKind, Label, Lifetime, Lit, LitFloatType, LitIntType, LitKind, LlvmAsmDialect,
LlvmInlineAsm, LlvmInlineAsmOutput, Local, MacArgs, MacCall, MacCallStmt, MacDelimiter,
MacStmtStyle, MacroDef, Mod, Movability, MutTy, Mutability, NodeId, Param, ParenthesizedArgs,
Pat, PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt,
StmtKind, StrLit, StrStyle, StructField, TraitBoundModifier, TraitObjectSyntax, TraitRef, Ty,
TyKind, UintTy, UnOp, Unsafe, UnsafeSource, UseTree, UseTreeKind, Variant, VariantData,
VisibilityKind, WhereBoundPredicate, WhereClause, WhereEqPredicate, WherePredicate,
WhereRegionPredicate,
};
use self::syntax::parse::lexer::comments;
use self::syntax::parse::token::{self, DelimToken, Token, TokenKind};
use self::syntax::ptr::P;
use self::syntax::source_map::Spanned;
use self::syntax::symbol::{sym, Symbol};
use self::syntax::tokenstream::{DelimSpan, TokenStream, TokenTree};
use self::syntax_pos::{Span, SyntaxContext, DUMMY_SP};
use rustc_ast::ptr::P;
use rustc_ast::token::{self, CommentKind, DelimToken, Token, TokenKind};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
use rustc_data_structures::sync::Lrc;
use rustc_data_structures::thin_vec::ThinVec;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::Ident;
use rustc_span::{Span, Symbol, SyntaxContext};
pub trait SpanlessEq {
fn eq(&self, other: &Self) -> bool;
@ -86,14 +85,6 @@ impl<A: SpanlessEq, B: SpanlessEq> SpanlessEq for (A, B) {
}
}
impl<A: SpanlessEq, B: SpanlessEq, C: SpanlessEq> SpanlessEq for (A, B, C) {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&self.0, &other.0)
&& SpanlessEq::eq(&self.1, &other.1)
&& SpanlessEq::eq(&self.2, &other.2)
}
}
macro_rules! spanless_eq_true {
($name:ident) => {
impl SpanlessEq for $name {
@ -126,17 +117,19 @@ spanless_eq_partial_eq!(u16);
spanless_eq_partial_eq!(u128);
spanless_eq_partial_eq!(usize);
spanless_eq_partial_eq!(char);
spanless_eq_partial_eq!(String);
spanless_eq_partial_eq!(Symbol);
spanless_eq_partial_eq!(Abi);
spanless_eq_partial_eq!(CommentKind);
spanless_eq_partial_eq!(DelimToken);
spanless_eq_partial_eq!(InlineAsmOptions);
macro_rules! spanless_eq_struct {
{
$name:ident;
$name:ident $(<$param:ident>)?;
$([$field:ident $other:ident])*
$(![$ignore:ident])*
} => {
impl SpanlessEq for $name {
impl $(<$param: SpanlessEq>)* SpanlessEq for $name $(<$param>)* {
fn eq(&self, other: &Self) -> bool {
let $name { $($field,)* $($ignore: _,)* } = self;
let $name { $($field: $other,)* $($ignore: _,)* } = other;
@ -146,14 +139,14 @@ macro_rules! spanless_eq_struct {
};
{
$name:ident;
$name:ident $(<$param:ident>)?;
$([$field:ident $other:ident])*
$next:ident
$($rest:ident)*
$(!$ignore:ident)*
} => {
spanless_eq_struct! {
$name;
$name $(<$param>)*;
$([$field $other])*
[$next other]
$($rest)*
@ -162,14 +155,14 @@ macro_rules! spanless_eq_struct {
};
{
$name:ident;
$name:ident $(<$param:ident>)?;
$([$field:ident $other:ident])*
$(![$ignore:ident])*
!$next:ident
$(!$rest:ident)*
} => {
spanless_eq_struct! {
$name;
$name $(<$param>)*;
$([$field $other])*
$(![$ignore])*
![$next]
@ -263,119 +256,131 @@ macro_rules! spanless_eq_enum {
};
}
spanless_eq_struct!(AngleBracketedArgs; span args constraints);
spanless_eq_struct!(AngleBracketedArgs; span args);
spanless_eq_struct!(AnonConst; id value);
spanless_eq_struct!(Arg; attrs ty pat id span);
spanless_eq_struct!(Arm; attrs pats guard body span id);
spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder);
spanless_eq_struct!(AssocTyConstraint; id ident kind span);
spanless_eq_struct!(Attribute; id style path tokens span !is_sugared_doc);
spanless_eq_struct!(BareFnTy; unsafety abi generic_params decl);
spanless_eq_struct!(AttrItem; path args);
spanless_eq_struct!(Attribute; kind id style span);
spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl);
spanless_eq_struct!(Block; stmts id rules span);
spanless_eq_struct!(Crate; module attrs span);
spanless_eq_struct!(Crate; module attrs span proc_macros);
spanless_eq_struct!(EnumDef; variants);
spanless_eq_struct!(Expr; id node span attrs);
spanless_eq_struct!(Field; ident expr span is_shorthand attrs id);
spanless_eq_struct!(FieldPat; ident pat is_shorthand attrs id span);
spanless_eq_struct!(FnDecl; inputs output c_variadic);
spanless_eq_struct!(FnHeader; constness asyncness unsafety abi);
spanless_eq_struct!(ForeignItem; ident attrs node id span vis);
spanless_eq_struct!(Expr; id kind span attrs !tokens);
spanless_eq_struct!(Field; attrs id span ident expr is_shorthand is_placeholder);
spanless_eq_struct!(FieldPat; ident pat is_shorthand attrs id span is_placeholder);
spanless_eq_struct!(FnDecl; inputs output);
spanless_eq_struct!(FnHeader; constness asyncness unsafety ext);
spanless_eq_struct!(FnSig; header decl span);
spanless_eq_struct!(ForeignMod; abi items);
spanless_eq_struct!(GenericParam; id ident attrs bounds kind);
spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind);
spanless_eq_struct!(Generics; params where_clause span);
spanless_eq_struct!(GlobalAsm; asm);
spanless_eq_struct!(ImplItem; id ident vis defaultness attrs generics node span !tokens);
spanless_eq_struct!(InlineAsm; asm asm_str_style outputs inputs clobbers volatile alignstack dialect);
spanless_eq_struct!(InlineAsmOutput; constraint expr is_rw is_indirect);
spanless_eq_struct!(Item; ident attrs id node vis span !tokens);
spanless_eq_struct!(InlineAsm; template operands options line_spans);
spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens);
spanless_eq_struct!(Label; ident);
spanless_eq_struct!(Lifetime; id ident);
spanless_eq_struct!(Lit; token node span);
spanless_eq_struct!(Lit; token kind span);
spanless_eq_struct!(LlvmInlineAsm; asm asm_str_style outputs inputs clobbers volatile alignstack dialect);
spanless_eq_struct!(LlvmInlineAsmOutput; constraint expr is_rw is_indirect);
spanless_eq_struct!(Local; pat ty init id span attrs);
spanless_eq_struct!(Mac; path delim tts span prior_type_ascription);
spanless_eq_struct!(MacroDef; tokens legacy);
spanless_eq_struct!(MethodSig; header decl);
spanless_eq_struct!(MacCall; path args prior_type_ascription);
spanless_eq_struct!(MacCallStmt; mac style attrs);
spanless_eq_struct!(MacroDef; body macro_rules);
spanless_eq_struct!(Mod; inner items inline);
spanless_eq_struct!(MutTy; ty mutbl);
spanless_eq_struct!(Param; attrs ty pat id span is_placeholder);
spanless_eq_struct!(ParenthesizedArgs; span inputs output);
spanless_eq_struct!(Pat; id node span);
spanless_eq_struct!(Pat; id kind span tokens);
spanless_eq_struct!(Path; span segments);
spanless_eq_struct!(PathSegment; ident id args);
spanless_eq_struct!(PolyTraitRef; bound_generic_params trait_ref span);
spanless_eq_struct!(QSelf; ty path_span position);
spanless_eq_struct!(Stmt; id node span);
spanless_eq_struct!(StructField; span ident vis id ty attrs);
spanless_eq_struct!(Stmt; id kind span);
spanless_eq_struct!(StrLit; style symbol suffix span symbol_unescaped);
spanless_eq_struct!(StructField; attrs id span vis ident ty is_placeholder);
spanless_eq_struct!(Token; kind span);
spanless_eq_struct!(TraitItem; id ident attrs generics node span !tokens);
spanless_eq_struct!(TraitRef; path ref_id);
spanless_eq_struct!(Ty; id node span);
spanless_eq_struct!(Ty; id kind span);
spanless_eq_struct!(UseTree; prefix kind span);
spanless_eq_struct!(Variant; ident attrs id data disr_expr span);
spanless_eq_struct!(Variant; attrs id span vis ident data disr_expr is_placeholder);
spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bounds);
spanless_eq_struct!(WhereClause; predicates span);
spanless_eq_struct!(WhereClause; has_where_token predicates span);
spanless_eq_struct!(WhereEqPredicate; id span lhs_ty rhs_ty);
spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds);
spanless_eq_enum!(AsmDialect; Att Intel);
spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
spanless_eq_enum!(AssocTyConstraintKind; Equality(ty) Bound(bounds));
spanless_eq_enum!(Async; Yes(span closure_id return_impl_trait_id) No);
spanless_eq_enum!(AttrKind; Normal(0) DocComment(0 1));
spanless_eq_enum!(AttrStyle; Outer Inner);
spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt);
spanless_eq_enum!(BindingMode; ByRef(0) ByValue(0));
spanless_eq_enum!(BlockCheckMode; Default Unsafe(0));
spanless_eq_enum!(BorrowKind; Ref Raw);
spanless_eq_enum!(CaptureBy; Value Ref);
spanless_eq_enum!(Constness; Const NotConst);
spanless_eq_enum!(Const; Yes(0) No);
spanless_eq_enum!(CrateSugar; PubCrate JustCrate);
spanless_eq_enum!(Defaultness; Default Final);
spanless_eq_enum!(Defaultness; Default(0) Final);
spanless_eq_enum!(Extern; None Implicit Explicit(0));
spanless_eq_enum!(FloatTy; F32 F64);
spanless_eq_enum!(ForeignItemKind; Fn(0 1) Static(0 1) Ty Macro(0));
spanless_eq_enum!(FunctionRetTy; Default(0) Ty(0));
spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0));
spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0));
spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0));
spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty));
spanless_eq_enum!(ImplItemKind; Const(0 1) Method(0 1) TyAlias(0) OpaqueTy(0) Macro(0));
spanless_eq_enum!(ImplPolarity; Positive Negative);
spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty kw_span));
spanless_eq_enum!(ImplPolarity; Positive Negative(0));
spanless_eq_enum!(InlineAsmRegOrRegClass; Reg(0) RegClass(0));
spanless_eq_enum!(InlineAsmTemplatePiece; String(0) Placeholder(operand_idx modifier span));
spanless_eq_enum!(IntTy; Isize I8 I16 I32 I64 I128);
spanless_eq_enum!(IsAsync; Async(closure_id return_impl_trait_id) NotAsync);
spanless_eq_enum!(IsAuto; Yes No);
spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed);
spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed);
spanless_eq_enum!(LlvmAsmDialect; Att Intel);
spanless_eq_enum!(MacArgs; Empty Delimited(0 1 2) Eq(0 1));
spanless_eq_enum!(MacDelimiter; Parenthesis Bracket Brace);
spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
spanless_eq_enum!(Movability; Static Movable);
spanless_eq_enum!(Mutability; Mutable Immutable);
spanless_eq_enum!(Mutability; Mut Not);
spanless_eq_enum!(RangeEnd; Included(0) Excluded);
spanless_eq_enum!(RangeLimits; HalfOpen Closed);
spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Mac(0));
spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Empty MacCall(0));
spanless_eq_enum!(StrStyle; Cooked Raw(0));
spanless_eq_enum!(TokenTree; Token(0) Delimited(0 1 2));
spanless_eq_enum!(TraitBoundModifier; None Maybe);
spanless_eq_enum!(TraitItemKind; Const(0 1) Method(0 1) Type(0 1) Macro(0));
spanless_eq_enum!(TraitBoundModifier; None Maybe MaybeConst MaybeConstMaybe);
spanless_eq_enum!(TraitObjectSyntax; Dyn None);
spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
spanless_eq_enum!(UnOp; Deref Not Neg);
spanless_eq_enum!(Unsafe; Yes(0) No);
spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided);
spanless_eq_enum!(Unsafety; Unsafe Normal);
spanless_eq_enum!(UseTreeKind; Simple(0 1 2) Nested(0) Glob);
spanless_eq_enum!(VariantData; Struct(0 1) Tuple(0 1) Unit(0));
spanless_eq_enum!(VisibilityKind; Public Crate(0) Restricted(path id) Inherited);
spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
spanless_eq_enum!(ExprKind; Box(0) Array(0) Call(0 1) MethodCall(0 1) Tup(0)
spanless_eq_enum!(ExprKind; Box(0) Array(0) Call(0 1) MethodCall(0 1 2) Tup(0)
Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1) If(0 1 2)
While(0 1 2) ForLoop(0 1 2 3) Loop(0 1) Match(0 1) Closure(0 1 2 3 4 5)
Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1) AssignOp(0 1 2)
Field(0 1) Index(0 1) Range(0 1 2) Path(0 1) AddrOf(0 1) Break(0 1)
Continue(0) Ret(0) InlineAsm(0) Mac(0) Struct(0 1 2) Repeat(0 1) Paren(0)
Try(0) Yield(0) Err);
spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1)
Fn(0 1 2 3) Mod(0) ForeignMod(0) GlobalAsm(0) TyAlias(0 1) OpaqueTy(0 1)
Enum(0 1) Struct(0 1) Union(0 1) Trait(0 1 2 3 4) TraitAlias(0 1)
Impl(0 1 2 3 4 5 6) Mac(0) MacroDef(0));
Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1 2) AssignOp(0 1 2)
Field(0 1) Index(0 1) Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1)
Continue(0) Ret(0) InlineAsm(0) LlvmInlineAsm(0) MacCall(0) Struct(0 1 2)
Repeat(0 1) Paren(0) Try(0) Yield(0) Err);
spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(expr)
Sym(expr));
spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1 2)
Fn(0 1 2 3) Mod(0) ForeignMod(0) GlobalAsm(0) TyAlias(0 1 2 3) Enum(0 1)
Struct(0 1) Union(0 1) Trait(0 1 2 3 4) TraitAlias(0 1)
Impl(unsafety polarity defaultness constness generics of_trait self_ty items)
MacCall(0) MacroDef(0));
spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0) Byte(0) Char(0) Int(0 1)
Float(0 1) FloatUnsuffixed(0) Bool(0) Err(0));
Float(0 1) Bool(0) Err(0));
spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2) TupleStruct(0 1)
Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest
Paren(0) Mac(0));
Paren(0) MacCall(0));
spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Rptr(0 1) BareFn(0) Never
Tup(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1) Paren(0) Typeof(0) Infer
ImplicitSelf Mac(0) Err CVarArgs);
ImplicitSelf MacCall(0) Err CVarArgs);
impl SpanlessEq for Ident {
fn eq(&self, other: &Self) -> bool {
@ -414,44 +419,20 @@ impl SpanlessEq for TokenKind {
impl SpanlessEq for TokenStream {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&expand_tts(self), &expand_tts(other))
}
}
fn expand_tts(tts: &TokenStream) -> Vec<TokenTree> {
let mut tokens = Vec::new();
for tt in tts.clone().into_trees() {
let c = match tt {
TokenTree::Token(Token {
kind: TokenKind::DocComment(c),
..
}) => c,
_ => {
tokens.push(tt);
continue;
let mut this = self.clone().into_trees();
let mut other = other.clone().into_trees();
loop {
let this = match this.next() {
None => return other.next().is_none(),
Some(val) => val,
};
let other = match other.next() {
None => return false,
Some(val) => val,
};
if !SpanlessEq::eq(&this, &other) {
return false;
}
};
let contents = comments::strip_doc_comment_decoration(&c.as_str());
let style = comments::doc_comment_style(&c.as_str());
tokens.push(TokenTree::token(TokenKind::Pound, DUMMY_SP));
if style == AttrStyle::Inner {
tokens.push(TokenTree::token(TokenKind::Not, DUMMY_SP));
}
let lit = token::Lit {
kind: token::LitKind::Str,
symbol: Symbol::intern(&contents),
suffix: None,
};
let tts = vec![
TokenTree::token(TokenKind::Ident(sym::doc, false), DUMMY_SP),
TokenTree::token(TokenKind::Eq, DUMMY_SP),
TokenTree::token(TokenKind::Literal(lit), DUMMY_SP),
];
tokens.push(TokenTree::Delimited(
DelimSpan::dummy(),
DelimToken::Bracket,
tts.into_iter().collect::<TokenStream>().into(),
));
}
tokens
}

13
third_party/rust/syn/tests/common/mod.rs поставляемый
Просмотреть файл

@ -1,5 +1,6 @@
#![allow(dead_code)]
use rayon::ThreadPoolBuilder;
use std::env;
pub mod eq;
@ -12,3 +13,15 @@ pub fn abort_after() -> usize {
Err(_) => usize::max_value(),
}
}
/// Configure Rayon threadpool.
pub fn rayon_init() {
let stack_size = match env::var("RUST_MIN_STACK") {
Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"),
Err(_) => 20 * 1024 * 1024,
};
ThreadPoolBuilder::new()
.stack_size(stack_size)
.build_global()
.unwrap();
}

24
third_party/rust/syn/tests/common/parse.rs поставляемый
Просмотреть файл

@ -1,20 +1,20 @@
extern crate proc_macro2;
extern crate syn;
extern crate syntax;
extern crate syntax_pos;
extern crate rustc_ast;
extern crate rustc_expand;
extern crate rustc_parse as parse;
extern crate rustc_session;
extern crate rustc_span;
use self::syntax::ast;
use self::syntax::parse::{self, ParseSess};
use self::syntax::ptr::P;
use self::syntax::source_map::FilePathMapping;
use self::syntax_pos::FileName;
use rustc_ast::ast;
use rustc_ast::ptr::P;
use rustc_session::parse::ParseSess;
use rustc_span::source_map::FilePathMapping;
use rustc_span::FileName;
use std::panic;
pub fn libsyntax_expr(input: &str) -> Option<P<ast::Expr>> {
pub fn librustc_expr(input: &str) -> Option<P<ast::Expr>> {
match panic::catch_unwind(|| {
let sess = ParseSess::new(FilePathMapping::empty());
sess.span_diagnostic.set_continue_after_error(false);
let e = parse::new_parser_from_source_str(
&sess,
FileName::Custom("test_precedence".to_string()),
@ -32,7 +32,7 @@ pub fn libsyntax_expr(input: &str) -> Option<P<ast::Expr>> {
Ok(Some(e)) => Some(e),
Ok(None) => None,
Err(_) => {
errorf!("libsyntax panicked\n");
errorf!("librustc panicked\n");
None
}
}

50
third_party/rust/syn/tests/debug/gen.rs поставляемый
Просмотреть файл

@ -2,7 +2,7 @@
// It is not intended for manual editing.
use super::{Lite, RefCast};
use std::fmt::{self, Debug};
use std::fmt::{self, Debug, Display};
impl Debug for Lite<syn::Abi> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
let _val = &self.value;
@ -1039,9 +1039,9 @@ impl Debug for Lite<syn::Expr> {
}
syn::Expr::Verbatim(_val) => {
formatter.write_str("Verbatim")?;
formatter.write_str("(")?;
Debug::fmt(Lite(_val), formatter)?;
formatter.write_str(")")?;
formatter.write_str("(`")?;
Display::fmt(_val, formatter)?;
formatter.write_str("`)")?;
Ok(())
}
syn::Expr::While(_val) => {
@ -2116,9 +2116,9 @@ impl Debug for Lite<syn::ForeignItem> {
}
syn::ForeignItem::Verbatim(_val) => {
formatter.write_str("Verbatim")?;
formatter.write_str("(")?;
Debug::fmt(Lite(_val), formatter)?;
formatter.write_str(")")?;
formatter.write_str("(`")?;
Display::fmt(_val, formatter)?;
formatter.write_str("`)")?;
Ok(())
}
_ => unreachable!(),
@ -2432,9 +2432,9 @@ impl Debug for Lite<syn::ImplItem> {
}
syn::ImplItem::Verbatim(_val) => {
formatter.write_str("Verbatim")?;
formatter.write_str("(")?;
Debug::fmt(Lite(_val), formatter)?;
formatter.write_str(")")?;
formatter.write_str("(`")?;
Display::fmt(_val, formatter)?;
formatter.write_str("`)")?;
Ok(())
}
_ => unreachable!(),
@ -2940,9 +2940,9 @@ impl Debug for Lite<syn::Item> {
}
syn::Item::Verbatim(_val) => {
formatter.write_str("Verbatim")?;
formatter.write_str("(")?;
Debug::fmt(Lite(_val), formatter)?;
formatter.write_str(")")?;
formatter.write_str("(`")?;
Display::fmt(_val, formatter)?;
formatter.write_str("`)")?;
Ok(())
}
_ => unreachable!(),
@ -3437,9 +3437,9 @@ impl Debug for Lite<syn::Lit> {
}
syn::Lit::Verbatim(_val) => {
formatter.write_str("Verbatim")?;
formatter.write_str("(")?;
Debug::fmt(Lite(_val), formatter)?;
formatter.write_str(")")?;
formatter.write_str("(`")?;
Display::fmt(_val, formatter)?;
formatter.write_str("`)")?;
Ok(())
}
}
@ -3878,9 +3878,9 @@ impl Debug for Lite<syn::Pat> {
}
syn::Pat::Verbatim(_val) => {
formatter.write_str("Verbatim")?;
formatter.write_str("(")?;
Debug::fmt(Lite(_val), formatter)?;
formatter.write_str(")")?;
formatter.write_str("(`")?;
Display::fmt(_val, formatter)?;
formatter.write_str("`)")?;
Ok(())
}
syn::Pat::Wild(_val) => {
@ -4674,9 +4674,9 @@ impl Debug for Lite<syn::TraitItem> {
}
syn::TraitItem::Verbatim(_val) => {
formatter.write_str("Verbatim")?;
formatter.write_str("(")?;
Debug::fmt(Lite(_val), formatter)?;
formatter.write_str(")")?;
formatter.write_str("(`")?;
Display::fmt(_val, formatter)?;
formatter.write_str("`)")?;
Ok(())
}
_ => unreachable!(),
@ -5040,9 +5040,9 @@ impl Debug for Lite<syn::Type> {
}
syn::Type::Verbatim(_val) => {
formatter.write_str("Verbatim")?;
formatter.write_str("(")?;
Debug::fmt(Lite(_val), formatter)?;
formatter.write_str(")")?;
formatter.write_str("(`")?;
Display::fmt(_val, formatter)?;
formatter.write_str("`)")?;
Ok(())
}
_ => unreachable!(),

17
third_party/rust/syn/tests/debug/mod.rs поставляемый
Просмотреть файл

@ -1,10 +1,7 @@
extern crate proc_macro2;
extern crate ref_cast;
mod gen;
use self::proc_macro2::{Ident, Literal, TokenStream};
use self::ref_cast::RefCast;
use proc_macro2::{Ident, Literal, TokenStream};
use ref_cast::RefCast;
use std::fmt::{self, Debug};
use std::ops::Deref;
use syn::punctuated::Punctuated;
@ -66,7 +63,15 @@ impl Debug for Lite<Literal> {
impl Debug for Lite<TokenStream> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "`{}`", self.value)
let string = self.value.to_string();
if string.len() <= 80 {
write!(formatter, "TokenStream(`{}`)", self.value)
} else {
formatter
.debug_tuple("TokenStream")
.field(&format_args!("`{}`", string))
.finish()
}
}
}

Просмотреть файл

@ -1 +0,0 @@
"Hello! You want: cargo test --release --all-features"

22
third_party/rust/syn/tests/features/mod.rs поставляемый
Просмотреть файл

@ -1,22 +0,0 @@
#[allow(unused_macros)]
macro_rules! hide_from_rustfmt {
($mod:item) => {
$mod
};
}
#[cfg(not(all(
feature = "derive",
feature = "full",
feature = "parsing",
feature = "printing",
feature = "visit",
feature = "visit-mut",
feature = "fold",
feature = "clone-impls",
feature = "extra-traits",
feature = "proc-macro",
)))]
hide_from_rustfmt! {
mod error;
}

8
third_party/rust/syn/tests/macros/mod.rs поставляемый
Просмотреть файл

@ -1,5 +1,3 @@
extern crate proc_macro2;
#[path = "../debug/mod.rs"]
pub mod debug;
@ -42,18 +40,18 @@ macro_rules! snapshot_impl {
(($expr:ident) as $t:ty, @$snapshot:literal) => {
let $expr = crate::macros::Tokens::parse::<$t>($expr).unwrap();
let debug = crate::macros::debug::Lite(&$expr);
insta::assert_debug_snapshot_matches!(debug, @$snapshot);
insta::assert_debug_snapshot!(debug, @$snapshot);
};
(($($expr:tt)*) as $t:ty, @$snapshot:literal) => {{
let syntax_tree = crate::macros::Tokens::parse::<$t>($($expr)*).unwrap();
let debug = crate::macros::debug::Lite(&syntax_tree);
insta::assert_debug_snapshot_matches!(debug, @$snapshot);
insta::assert_debug_snapshot!(debug, @$snapshot);
syntax_tree
}};
(($($expr:tt)*) , @$snapshot:literal) => {{
let syntax_tree = $($expr)*;
let debug = crate::macros::debug::Lite(&syntax_tree);
insta::assert_debug_snapshot_matches!(debug, @$snapshot);
insta::assert_debug_snapshot!(debug, @$snapshot);
syntax_tree
}};
(($($expr:tt)*) $next:tt $($rest:tt)*) => {

137
third_party/rust/syn/tests/repo/mod.rs поставляемый
Просмотреть файл

@ -1,8 +1,37 @@
extern crate walkdir;
mod progress;
use std::process::Command;
use self::progress::Progress;
use anyhow::Result;
use flate2::read::GzDecoder;
use std::fs;
use std::path::Path;
use tar::Archive;
use walkdir::DirEntry;
use self::walkdir::DirEntry;
const REVISION: &str = "792c645ca7d11a8d254df307d019c5bf01445c37";
#[rustfmt::skip]
static EXCLUDE: &[&str] = &[
// Compile-fail expr parameter in const generic position: f::<1 + 2>()
"test/ui/const-generics/const-expression-parameter.rs",
// Deprecated anonymous parameter syntax in traits
"test/ui/issues/issue-13105.rs",
"test/ui/issues/issue-13775.rs",
"test/ui/issues/issue-34074.rs",
"test/ui/proc-macro/trait-fn-args-2015.rs",
// Not actually test cases
"test/rustdoc-ui/test-compile-fail2.rs",
"test/rustdoc-ui/test-compile-fail3.rs",
"test/ui/include-single-expr-helper.rs",
"test/ui/include-single-expr-helper-1.rs",
"test/ui/issues/auxiliary/issue-21146-inc.rs",
"test/ui/json-bom-plus-crlf-multifile-aux.rs",
"test/ui/lint/expansion-time-include.rs",
"test/ui/macros/auxiliary/macro-comma-support.rs",
"test/ui/macros/auxiliary/macro-include-items-expr.rs",
];
pub fn base_dir_filter(entry: &DirEntry) -> bool {
let path = entry.path();
@ -12,49 +41,95 @@ pub fn base_dir_filter(entry: &DirEntry) -> bool {
if path.extension().map(|e| e != "rs").unwrap_or(true) {
return false;
}
let path_string = path.to_string_lossy();
let path_string = if cfg!(windows) {
path_string.replace('\\', "/").into()
let mut path_string = path.to_string_lossy();
if cfg!(windows) {
path_string = path_string.replace('\\', "/").into();
}
let path = if let Some(path) = path_string.strip_prefix("tests/rust/src/") {
path
} else if let Some(path) = path_string.strip_prefix("tests/rust/library/") {
path
} else {
path_string
panic!("unexpected path in Rust dist: {}", path_string);
};
// TODO assert that parsing fails on the parse-fail cases
if path_string.starts_with("tests/rust/src/test/parse-fail")
|| path_string.starts_with("tests/rust/src/test/compile-fail")
|| path_string.starts_with("tests/rust/src/test/rustfix")
if path.starts_with("test/parse-fail")
|| path.starts_with("test/compile-fail")
|| path.starts_with("test/rustfix")
{
return false;
}
if path_string.starts_with("tests/rust/src/test/ui") {
let stderr_path = path.with_extension("stderr");
if path.starts_with("test/ui") {
let stderr_path = entry.path().with_extension("stderr");
if stderr_path.exists() {
// Expected to fail in some way
return false;
}
}
match path_string.as_ref() {
// Deprecated placement syntax
"tests/rust/src/test/ui/obsolete-in-place/bad.rs" |
// Deprecated anonymous parameter syntax in traits
"tests/rust/src/test/ui/error-codes/e0119/auxiliary/issue-23563-a.rs" |
"tests/rust/src/test/ui/issues/issue-13105.rs" |
"tests/rust/src/test/ui/issues/issue-13775.rs" |
"tests/rust/src/test/ui/issues/issue-34074.rs" |
// Deprecated await macro syntax
"tests/rust/src/test/ui/async-await/await-macro.rs" |
// 2015-style dyn that libsyntax rejects
"tests/rust/src/test/ui/dyn-keyword/dyn-2015-no-warnings-without-lints.rs" |
// not actually test cases
"tests/rust/src/test/ui/macros/auxiliary/macro-comma-support.rs" |
"tests/rust/src/test/ui/macros/auxiliary/macro-include-items-expr.rs" |
"tests/rust/src/test/ui/issues/auxiliary/issue-21146-inc.rs" => false,
_ => true,
!EXCLUDE.contains(&path)
}
#[allow(dead_code)]
pub fn edition(path: &Path) -> &'static str {
if path.ends_with("dyn-2015-no-warnings-without-lints.rs") {
"2015"
} else {
"2018"
}
}
pub fn clone_rust() {
let result = Command::new("tests/clone.sh").status().unwrap();
assert!(result.success());
let needs_clone = match fs::read_to_string("tests/rust/COMMIT") {
Err(_) => true,
Ok(contents) => contents.trim() != REVISION,
};
if needs_clone {
download_and_unpack().unwrap();
}
let mut missing = String::new();
let test_src = Path::new("tests/rust/src");
for exclude in EXCLUDE {
if !test_src.join(exclude).exists() {
missing += "\ntests/rust/src/";
missing += exclude;
}
}
if !missing.is_empty() {
panic!("excluded test file does not exist:{}\n", missing);
}
}
fn download_and_unpack() -> Result<()> {
let url = format!(
"https://github.com/rust-lang/rust/archive/{}.tar.gz",
REVISION
);
let response = reqwest::blocking::get(&url)?.error_for_status()?;
let progress = Progress::new(response);
let decoder = GzDecoder::new(progress);
let mut archive = Archive::new(decoder);
let prefix = format!("rust-{}", REVISION);
let tests_rust = Path::new("tests/rust");
if tests_rust.exists() {
fs::remove_dir_all(tests_rust)?;
}
for entry in archive.entries()? {
let mut entry = entry?;
let path = entry.path()?;
if path == Path::new("pax_global_header") {
continue;
}
let relative = path.strip_prefix(&prefix)?;
let out = tests_rust.join(relative);
entry.unpack(&out)?;
}
fs::write("tests/rust/COMMIT", REVISION)?;
Ok(())
}

37
third_party/rust/syn/tests/repo/progress.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,37 @@
use std::io::{Read, Result};
use std::time::{Duration, Instant};
pub struct Progress<R> {
bytes: usize,
tick: Instant,
stream: R,
}
impl<R> Progress<R> {
pub fn new(stream: R) -> Self {
Progress {
bytes: 0,
tick: Instant::now() + Duration::from_millis(2000),
stream,
}
}
}
impl<R: Read> Read for Progress<R> {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
let num = self.stream.read(buf)?;
self.bytes += num;
let now = Instant::now();
if now > self.tick {
self.tick = now + Duration::from_millis(500);
errorf!("downloading... {} bytes\n", self.bytes);
}
Ok(num)
}
}
impl<R> Drop for Progress<R> {
fn drop(&mut self) {
errorf!("done ({} bytes)\n", self.bytes);
}
}

38
third_party/rust/syn/tests/test_asyncness.rs поставляемый
Просмотреть файл

@ -1,7 +1,3 @@
extern crate syn;
mod features;
#[macro_use]
mod macros;
@ -12,16 +8,16 @@ fn test_async_fn() {
let input = "async fn process() {}";
snapshot!(input as Item, @r###"
Item::Fn {
vis: Inherited,
sig: Signature {
asyncness: Some,
ident: "process",
generics: Generics,
output: Default,
},
block: Block,
}
Item::Fn {
vis: Inherited,
sig: Signature {
asyncness: Some,
ident: "process",
generics: Generics,
output: Default,
},
block: Block,
}
"###);
}
@ -30,12 +26,12 @@ fn test_async_closure() {
let input = "async || {}";
snapshot!(input as Expr, @r###"
Expr::Closure {
asyncness: Some,
output: Default,
body: Expr::Block {
block: Block,
},
}
Expr::Closure {
asyncness: Some,
output: Default,
body: Expr::Block {
block: Block,
},
}
"###);
}

452
third_party/rust/syn/tests/test_attribute.rs поставляемый
Просмотреть файл

@ -1,7 +1,3 @@
extern crate syn;
mod features;
#[macro_use]
mod macros;
@ -13,14 +9,14 @@ fn test_meta_item_word() {
let meta = test("#[foo]");
snapshot!(meta, @r###"
Path(Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
})
Path(Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
})
"###);
}
@ -29,17 +25,17 @@ fn test_meta_item_name_value() {
let meta = test("#[foo = 5]");
snapshot!(meta, @r###"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
lit: 5,
}
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
lit: 5,
}
"###);
}
@ -48,37 +44,37 @@ fn test_meta_item_bool_value() {
let meta = test("#[foo = true]");
snapshot!(meta, @r###"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
lit: Lit::Bool {
value: true,
},
}
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
lit: Lit::Bool {
value: true,
},
}
"###);
let meta = test("#[foo = false]");
snapshot!(meta, @r###"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
lit: Lit::Bool {
value: false,
},
}
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
lit: Lit::Bool {
value: false,
},
}
"###);
}
@ -87,19 +83,19 @@ fn test_meta_item_list_lit() {
let meta = test("#[foo(5)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Lit(5),
],
}
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Lit(5),
],
}
"###);
}
@ -108,26 +104,26 @@ fn test_meta_item_list_word() {
let meta = test("#[foo(bar)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Path(Path {
segments: [
PathSegment {
ident: "bar",
arguments: None,
},
],
})),
],
}
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Path(Path {
segments: [
PathSegment {
ident: "bar",
arguments: None,
},
],
})),
],
}
"###);
}
@ -136,29 +132,29 @@ fn test_meta_item_list_name_value() {
let meta = test("#[foo(bar = 5)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "bar",
arguments: None,
},
],
},
lit: 5,
}),
],
}
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "bar",
arguments: None,
},
],
},
lit: 5,
}),
],
}
"###);
}
@ -167,31 +163,31 @@ fn test_meta_item_list_bool_value() {
let meta = test("#[foo(bar = true)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "bar",
arguments: None,
},
],
},
lit: Lit::Bool {
value: true,
},
}),
],
}
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "bar",
arguments: None,
},
],
},
lit: Lit::Bool {
value: true,
},
}),
],
}
"###);
}
@ -200,68 +196,68 @@ fn test_meta_item_multiple() {
let meta = test("#[foo(word, name = 5, list(name2 = 6), word2)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Path(Path {
segments: [
PathSegment {
ident: "word",
arguments: None,
},
],
})),
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name",
arguments: None,
},
],
},
lit: 5,
}),
Meta(Meta::List {
path: Path {
segments: [
PathSegment {
ident: "list",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name2",
arguments: None,
},
],
},
lit: 6,
}),
],
}),
Meta(Path(Path {
segments: [
PathSegment {
ident: "word2",
arguments: None,
},
],
})),
],
}
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Path(Path {
segments: [
PathSegment {
ident: "word",
arguments: None,
},
],
})),
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name",
arguments: None,
},
],
},
lit: 5,
}),
Meta(Meta::List {
path: Path {
segments: [
PathSegment {
ident: "list",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name2",
arguments: None,
},
],
},
lit: 6,
}),
],
}),
Meta(Path(Path {
segments: [
PathSegment {
ident: "word2",
arguments: None,
},
],
})),
],
}
"###);
}
@ -270,21 +266,63 @@ fn test_bool_lit() {
let meta = test("#[foo(true)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Lit(Lit::Bool {
value: true,
}),
],
}
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Lit(Lit::Bool {
value: true,
}),
],
}
"###);
}
#[test]
fn test_negative_lit() {
let meta = test("#[form(min = -1, max = 200)]");
snapshot!(meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "form",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "min",
arguments: None,
},
],
},
lit: -1,
}),
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "max",
arguments: None,
},
],
},
lit: 200,
}),
],
}
"###);
}

1321
third_party/rust/syn/tests/test_derive_input.rs поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

312
third_party/rust/syn/tests/test_expr.rs поставляемый
Просмотреть файл

@ -1,40 +1,302 @@
extern crate proc_macro2;
extern crate syn;
#[macro_use]
mod macros;
use std::str::FromStr;
use proc_macro2::TokenStream;
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use quote::quote;
use std::iter::FromIterator;
use syn::{Expr, ExprRange};
#[test]
fn test_expr_parse() {
let code = "..100u32";
let tt = TokenStream::from_str(code).unwrap();
let expr: Expr = syn::parse2(tt.clone()).unwrap();
let expr_range: ExprRange = syn::parse2(tt).unwrap();
assert_eq!(expr, Expr::Range(expr_range));
let tokens = quote!(..100u32);
snapshot!(tokens as Expr, @r###"
Expr::Range {
limits: HalfOpen,
to: Some(Expr::Lit {
lit: 100u32,
}),
}
"###);
let tokens = quote!(..100u32);
snapshot!(tokens as ExprRange, @r###"
ExprRange {
limits: HalfOpen,
to: Some(Expr::Lit {
lit: 100u32,
}),
}
"###);
}
#[test]
fn test_await() {
// Must not parse as Expr::Field.
let expr = syn::parse_str::<Expr>("fut.await").unwrap();
let tokens = quote!(fut.await);
snapshot!(expr, @r###"
Expr::Await {
base: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "fut",
arguments: None,
},
],
},
},
}
snapshot!(tokens as Expr, @r###"
Expr::Await {
base: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "fut",
arguments: None,
},
],
},
},
}
"###);
}
#[rustfmt::skip]
#[test]
fn test_tuple_multi_index() {
for &input in &[
"tuple.0.0",
"tuple .0.0",
"tuple. 0.0",
"tuple.0 .0",
"tuple.0. 0",
"tuple . 0 . 0",
] {
snapshot!(input as Expr, @r###"
Expr::Field {
base: Expr::Field {
base: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "tuple",
arguments: None,
},
],
},
},
member: Unnamed(Index {
index: 0,
}),
},
member: Unnamed(Index {
index: 0,
}),
}
"###);
}
for tokens in vec![
quote!(tuple.0.0),
quote!(tuple .0.0),
quote!(tuple. 0.0),
quote!(tuple.0 .0),
quote!(tuple.0. 0),
quote!(tuple . 0 . 0),
] {
snapshot!(tokens as Expr, @r###"
Expr::Field {
base: Expr::Field {
base: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "tuple",
arguments: None,
},
],
},
},
member: Unnamed(Index {
index: 0,
}),
},
member: Unnamed(Index {
index: 0,
}),
}
"###);
}
}
#[test]
fn test_macro_variable_func() {
// mimics the token stream corresponding to `$fn()`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { f })),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
]);
snapshot!(tokens as Expr, @r###"
Expr::Call {
func: Expr::Group {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "f",
arguments: None,
},
],
},
},
},
}
"###);
let tokens = TokenStream::from_iter(vec![
TokenTree::Punct(Punct::new('#', Spacing::Alone)),
TokenTree::Group(Group::new(Delimiter::Bracket, quote! { outside })),
TokenTree::Group(Group::new(Delimiter::None, quote! { #[inside] f })),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
]);
snapshot!(tokens as Expr, @r###"
Expr::Call {
attrs: [
Attribute {
style: Outer,
path: Path {
segments: [
PathSegment {
ident: "outside",
arguments: None,
},
],
},
tokens: TokenStream(``),
},
],
func: Expr::Group {
expr: Expr::Path {
attrs: [
Attribute {
style: Outer,
path: Path {
segments: [
PathSegment {
ident: "inside",
arguments: None,
},
],
},
tokens: TokenStream(``),
},
],
path: Path {
segments: [
PathSegment {
ident: "f",
arguments: None,
},
],
},
},
},
}
"###);
}
#[test]
fn test_macro_variable_macro() {
// mimics the token stream corresponding to `$macro!()`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { m })),
TokenTree::Punct(Punct::new('!', Spacing::Alone)),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
]);
snapshot!(tokens as Expr, @r###"
Expr::Macro {
mac: Macro {
path: Path {
segments: [
PathSegment {
ident: "m",
arguments: None,
},
],
},
delimiter: Paren,
tokens: TokenStream(``),
},
}
"###);
}
#[test]
fn test_macro_variable_struct() {
// mimics the token stream corresponding to `$struct {}`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { S })),
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
]);
snapshot!(tokens as Expr, @r###"
Expr::Struct {
path: Path {
segments: [
PathSegment {
ident: "S",
arguments: None,
},
],
},
}
"###);
}
#[test]
fn test_macro_variable_match_arm() {
// mimics the token stream corresponding to `match v { _ => $expr }`
let tokens = TokenStream::from_iter(vec![
TokenTree::Ident(Ident::new("match", Span::call_site())),
TokenTree::Ident(Ident::new("v", Span::call_site())),
TokenTree::Group(Group::new(
Delimiter::Brace,
TokenStream::from_iter(vec![
TokenTree::Punct(Punct::new('_', Spacing::Alone)),
TokenTree::Punct(Punct::new('=', Spacing::Joint)),
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
TokenTree::Group(Group::new(Delimiter::None, quote! { #[a] () })),
]),
)),
]);
snapshot!(tokens as Expr, @r###"
Expr::Match {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "v",
arguments: None,
},
],
},
},
arms: [
Arm {
pat: Pat::Wild,
body: Expr::Group {
expr: Expr::Tuple {
attrs: [
Attribute {
style: Outer,
path: Path {
segments: [
PathSegment {
ident: "a",
arguments: None,
},
],
},
tokens: TokenStream(``),
},
],
},
},
},
],
}
"###);
}

371
third_party/rust/syn/tests/test_generics.rs поставляемый
Просмотреть файл

@ -1,8 +1,3 @@
extern crate quote;
extern crate syn;
mod features;
#[macro_use]
mod macros;
@ -16,90 +11,90 @@ fn test_split_for_impl() {
};
snapshot!(input as DeriveInput, @r###"
DeriveInput {
vis: Inherited,
ident: "S",
generics: Generics {
lt_token: Some,
params: [
Lifetime(LifetimeDef {
lifetime: Lifetime {
ident: "a",
},
}),
Lifetime(LifetimeDef {
lifetime: Lifetime {
ident: "b",
},
colon_token: Some,
bounds: [
Lifetime {
ident: "a",
},
],
}),
Type(TypeParam {
attrs: [
Attribute {
style: Outer,
path: Path {
segments: [
PathSegment {
ident: "may_dangle",
arguments: None,
},
],
},
tokens: ``,
},
],
ident: "T",
colon_token: Some,
bounds: [
Lifetime(Lifetime {
ident: "a",
}),
],
eq_token: Some,
default: Some(Type::Tuple),
}),
],
gt_token: Some,
where_clause: Some(WhereClause {
predicates: [
Type(PredicateType {
bounded_ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
arguments: None,
},
],
},
},
bounds: [
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "Debug",
arguments: None,
},
],
},
}),
],
}),
],
}),
},
data: Data::Struct {
fields: Unit,
semi_token: Some,
},
}
DeriveInput {
vis: Inherited,
ident: "S",
generics: Generics {
lt_token: Some,
params: [
Lifetime(LifetimeDef {
lifetime: Lifetime {
ident: "a",
},
}),
Lifetime(LifetimeDef {
lifetime: Lifetime {
ident: "b",
},
colon_token: Some,
bounds: [
Lifetime {
ident: "a",
},
],
}),
Type(TypeParam {
attrs: [
Attribute {
style: Outer,
path: Path {
segments: [
PathSegment {
ident: "may_dangle",
arguments: None,
},
],
},
tokens: TokenStream(``),
},
],
ident: "T",
colon_token: Some,
bounds: [
Lifetime(Lifetime {
ident: "a",
}),
],
eq_token: Some,
default: Some(Type::Tuple),
}),
],
gt_token: Some,
where_clause: Some(WhereClause {
predicates: [
Type(PredicateType {
bounded_ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
arguments: None,
},
],
},
},
bounds: [
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "Debug",
arguments: None,
},
],
},
}),
],
}),
],
}),
},
data: Data::Struct {
fields: Unit,
semi_token: Some,
},
}
"###);
let generics = input.generics;
@ -131,46 +126,46 @@ fn test_split_for_impl() {
fn test_ty_param_bound() {
let tokens = quote!('a);
snapshot!(tokens as TypeParamBound, @r###"
Lifetime(Lifetime {
ident: "a",
})
Lifetime(Lifetime {
ident: "a",
})
"###);
let tokens = quote!('_);
snapshot!(tokens as TypeParamBound, @r###"
Lifetime(Lifetime {
ident: "_",
})
Lifetime(Lifetime {
ident: "_",
})
"###);
let tokens = quote!(Debug);
snapshot!(tokens as TypeParamBound, @r###"
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "Debug",
arguments: None,
},
],
},
})
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "Debug",
arguments: None,
},
],
},
})
"###);
let tokens = quote!(?Sized);
snapshot!(tokens as TypeParamBound, @r###"
Trait(TraitBound {
modifier: Maybe,
path: Path {
segments: [
PathSegment {
ident: "Sized",
arguments: None,
},
],
},
})
Trait(TraitBound {
modifier: Maybe,
path: Path {
segments: [
PathSegment {
ident: "Sized",
arguments: None,
},
],
},
})
"###);
}
@ -187,76 +182,76 @@ fn test_fn_precedence_in_where_clause() {
};
snapshot!(input as ItemFn, @r###"
ItemFn {
vis: Inherited,
sig: Signature {
ident: "f",
generics: Generics {
lt_token: Some,
params: [
Type(TypeParam {
ident: "G",
}),
],
gt_token: Some,
where_clause: Some(WhereClause {
predicates: [
Type(PredicateType {
bounded_ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "G",
arguments: None,
},
],
},
},
bounds: [
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "FnOnce",
arguments: PathArguments::Parenthesized {
output: Type(
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "i32",
arguments: None,
},
],
},
},
),
},
},
],
},
}),
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "Send",
arguments: None,
},
],
},
}),
],
}),
],
}),
},
output: Default,
},
block: Block,
}
ItemFn {
vis: Inherited,
sig: Signature {
ident: "f",
generics: Generics {
lt_token: Some,
params: [
Type(TypeParam {
ident: "G",
}),
],
gt_token: Some,
where_clause: Some(WhereClause {
predicates: [
Type(PredicateType {
bounded_ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "G",
arguments: None,
},
],
},
},
bounds: [
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "FnOnce",
arguments: PathArguments::Parenthesized {
output: Type(
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "i32",
arguments: None,
},
],
},
},
),
},
},
],
},
}),
Trait(TraitBound {
modifier: None,
path: Path {
segments: [
PathSegment {
ident: "Send",
arguments: None,
},
],
},
}),
],
}),
],
}),
},
output: Default,
},
block: Block,
}
"###);
let where_clause = input.sig.generics.where_clause.as_ref().unwrap();
@ -270,7 +265,7 @@ fn test_fn_precedence_in_where_clause() {
assert_eq!(predicate.bounds.len(), 2, "{:#?}", predicate.bounds);
let first_bound = &predicate.bounds[0];
assert_eq!(quote!(#first_bound).to_string(), "FnOnce ( ) -> i32");
assert_eq!(quote!(#first_bound).to_string(), "FnOnce () -> i32");
let second_bound = &predicate.bounds[1];
assert_eq!(quote!(#second_bound).to_string(), "Send");

53
third_party/rust/syn/tests/test_grouping.rs поставляемый
Просмотреть файл

@ -1,8 +1,3 @@
extern crate proc_macro2;
extern crate syn;
mod features;
#[macro_use]
mod macros;
@ -28,31 +23,31 @@ fn test_grouping() {
TokenTree::Literal(Literal::i32_suffixed(4)),
]);
assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
snapshot!(tokens as Expr, @r###"
Expr::Binary {
left: Expr::Lit {
lit: 1i32,
},
op: Add,
right: Expr::Binary {
left: Expr::Group {
expr: Expr::Binary {
left: Expr::Lit {
lit: 2i32,
},
op: Add,
right: Expr::Lit {
lit: 3i32,
},
},
},
op: Mul,
right: Expr::Lit {
lit: 4i32,
},
},
}
Expr::Binary {
left: Expr::Lit {
lit: 1i32,
},
op: Add,
right: Expr::Binary {
left: Expr::Group {
expr: Expr::Binary {
left: Expr::Lit {
lit: 2i32,
},
op: Add,
right: Expr::Lit {
lit: 3i32,
},
},
},
op: Mul,
right: Expr::Lit {
lit: 4i32,
},
},
}
"###);
}

5
third_party/rust/syn/tests/test_ident.rs поставляемый
Просмотреть файл

@ -1,8 +1,3 @@
extern crate proc_macro2;
extern crate syn;
mod features;
use proc_macro2::{Ident, Span, TokenStream};
use std::str::FromStr;
use syn::Result;

45
third_party/rust/syn/tests/test_item.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,45 @@
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
use quote::quote;
use std::iter::FromIterator;
use syn::Item;
#[test]
fn test_macro_variable_attr() {
// mimics the token stream corresponding to `$attr fn f() {}`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { #[test] })),
TokenTree::Ident(Ident::new("fn", Span::call_site())),
TokenTree::Ident(Ident::new("f", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
]);
snapshot!(tokens as Item, @r###"
Item::Fn {
attrs: [
Attribute {
style: Outer,
path: Path {
segments: [
PathSegment {
ident: "test",
arguments: None,
},
],
},
tokens: TokenStream(``),
},
],
vis: Inherited,
sig: Signature {
ident: "f",
generics: Generics,
output: Default,
},
block: Block,
}
"###);
}

Просмотреть файл

@ -1,10 +1,5 @@
use syn::punctuated::{Pair, Punctuated};
extern crate quote;
#[macro_use]
extern crate syn;
mod features;
use syn::Token;
#[macro_use]
mod macros;

75
third_party/rust/syn/tests/test_lit.rs поставляемый
Просмотреть файл

@ -1,13 +1,11 @@
extern crate proc_macro2;
extern crate quote;
extern crate syn;
#[macro_use]
mod macros;
mod features;
use proc_macro2::{TokenStream, TokenTree};
use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree};
use quote::ToTokens;
use std::iter::FromIterator;
use std::str::FromStr;
use syn::Lit;
use syn::{Lit, LitFloat, LitInt};
fn lit(s: &str) -> Lit {
match TokenStream::from_str(s)
@ -50,6 +48,9 @@ fn strings() {
"contains\nnewlinesescaped newlines",
);
test_string("r\"raw\nstring\\\nhere\"", "raw\nstring\\\nhere");
test_string("\"...\"q", "...");
test_string("r\"...\"q", "...");
test_string("r##\"...\"##q", "...");
}
#[test]
@ -79,6 +80,9 @@ fn byte_strings() {
b"contains\nnewlinesescaped newlines",
);
test_byte_string("br\"raw\nstring\\\nhere\"", b"raw\nstring\\\nhere");
test_byte_string("b\"...\"q", b"...");
test_byte_string("br\"...\"q", b"...");
test_byte_string("br##\"...\"##q", b"...");
}
#[test]
@ -100,6 +104,7 @@ fn bytes() {
test_byte("b'\\t'", b'\t');
test_byte("b'\\''", b'\'');
test_byte("b'\"'", b'"');
test_byte("b'a'q", b'a');
}
#[test]
@ -125,6 +130,7 @@ fn chars() {
test_char("'\\''", '\'');
test_char("'\"'", '"');
test_char("'\\u{1F415}'", '\u{1F415}');
test_char("'a'q", 'a');
}
#[test]
@ -185,4 +191,59 @@ fn floats() {
test_float("5.5e12", 5.5e12, "");
test_float("1.0__3e-12", 1.03e-12, "");
test_float("1.03e+12", 1.03e12, "");
test_float("9e99e99", 9e99, "e99");
}
#[test]
fn negative() {
let span = Span::call_site();
assert_eq!("-1", LitInt::new("-1", span).to_string());
assert_eq!("-1i8", LitInt::new("-1i8", span).to_string());
assert_eq!("-1i16", LitInt::new("-1i16", span).to_string());
assert_eq!("-1i32", LitInt::new("-1i32", span).to_string());
assert_eq!("-1i64", LitInt::new("-1i64", span).to_string());
assert_eq!("-1.5", LitFloat::new("-1.5", span).to_string());
assert_eq!("-1.5f32", LitFloat::new("-1.5f32", span).to_string());
assert_eq!("-1.5f64", LitFloat::new("-1.5f64", span).to_string());
}
#[test]
fn suffix() {
fn get_suffix(token: &str) -> String {
let lit = syn::parse_str::<Lit>(token).unwrap();
match lit {
Lit::Str(lit) => lit.suffix().to_owned(),
Lit::ByteStr(lit) => lit.suffix().to_owned(),
Lit::Byte(lit) => lit.suffix().to_owned(),
Lit::Char(lit) => lit.suffix().to_owned(),
Lit::Int(lit) => lit.suffix().to_owned(),
Lit::Float(lit) => lit.suffix().to_owned(),
_ => unimplemented!(),
}
}
assert_eq!(get_suffix("\"\"s"), "s");
assert_eq!(get_suffix("r\"\"r"), "r");
assert_eq!(get_suffix("b\"\"b"), "b");
assert_eq!(get_suffix("br\"\"br"), "br");
assert_eq!(get_suffix("r#\"\"#r"), "r");
assert_eq!(get_suffix("'c'c"), "c");
assert_eq!(get_suffix("b'b'b"), "b");
assert_eq!(get_suffix("1i32"), "i32");
assert_eq!(get_suffix("1_i32"), "i32");
assert_eq!(get_suffix("1.0f32"), "f32");
assert_eq!(get_suffix("1.0_f32"), "f32");
}
#[test]
fn test_deep_group_empty() {
let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![TokenTree::Literal(Literal::string("hi"))]),
))]),
))]);
snapshot!(tokens as Lit, @r#""hi""# );
}

498
third_party/rust/syn/tests/test_meta.rs поставляемый
Просмотреть файл

@ -1,7 +1,3 @@
extern crate syn;
mod features;
#[macro_use]
mod macros;
@ -12,14 +8,14 @@ fn test_parse_meta_item_word() {
let input = "hello";
snapshot!(input as Meta, @r###"
Path(Path {
segments: [
PathSegment {
ident: "hello",
arguments: None,
},
],
})
Path(Path {
segments: [
PathSegment {
ident: "hello",
arguments: None,
},
],
})
"###);
}
@ -29,31 +25,31 @@ fn test_parse_meta_name_value() {
let (inner, meta) = (input, input);
snapshot!(inner as MetaNameValue, @r###"
MetaNameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
lit: 5,
}
MetaNameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
lit: 5,
}
"###);
snapshot!(meta as Meta, @r###"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
lit: 5,
}
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
lit: 5,
}
"###);
assert_eq!(meta, inner.into());
@ -65,31 +61,31 @@ fn test_parse_meta_name_value_with_keyword() {
let (inner, meta) = (input, input);
snapshot!(inner as MetaNameValue, @r###"
MetaNameValue {
path: Path {
segments: [
PathSegment {
ident: "static",
arguments: None,
},
],
},
lit: 5,
}
MetaNameValue {
path: Path {
segments: [
PathSegment {
ident: "static",
arguments: None,
},
],
},
lit: 5,
}
"###);
snapshot!(meta as Meta, @r###"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "static",
arguments: None,
},
],
},
lit: 5,
}
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "static",
arguments: None,
},
],
},
lit: 5,
}
"###);
assert_eq!(meta, inner.into());
@ -101,31 +97,31 @@ fn test_parse_meta_name_value_with_bool() {
let (inner, meta) = (input, input);
snapshot!(inner as MetaNameValue, @r###"
MetaNameValue {
path: Path {
segments: [
PathSegment {
ident: "true",
arguments: None,
},
],
},
lit: 5,
}
MetaNameValue {
path: Path {
segments: [
PathSegment {
ident: "true",
arguments: None,
},
],
},
lit: 5,
}
"###);
snapshot!(meta as Meta, @r###"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "true",
arguments: None,
},
],
},
lit: 5,
}
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "true",
arguments: None,
},
],
},
lit: 5,
}
"###);
assert_eq!(meta, inner.into());
@ -137,35 +133,35 @@ fn test_parse_meta_item_list_lit() {
let (inner, meta) = (input, input);
snapshot!(inner as MetaList, @r###"
MetaList {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Lit(5),
],
}
MetaList {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Lit(5),
],
}
"###);
snapshot!(meta as Meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Lit(5),
],
}
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Lit(5),
],
}
"###);
assert_eq!(meta, inner.into());
@ -177,133 +173,133 @@ fn test_parse_meta_item_multiple() {
let (inner, meta) = (input, input);
snapshot!(inner as MetaList, @r###"
MetaList {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Path(Path {
segments: [
PathSegment {
ident: "word",
arguments: None,
},
],
})),
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name",
arguments: None,
},
],
},
lit: 5,
}),
Meta(Meta::List {
path: Path {
segments: [
PathSegment {
ident: "list",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name2",
arguments: None,
},
],
},
lit: 6,
}),
],
}),
Meta(Path(Path {
segments: [
PathSegment {
ident: "word2",
arguments: None,
},
],
})),
],
}
MetaList {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Path(Path {
segments: [
PathSegment {
ident: "word",
arguments: None,
},
],
})),
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name",
arguments: None,
},
],
},
lit: 5,
}),
Meta(Meta::List {
path: Path {
segments: [
PathSegment {
ident: "list",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name2",
arguments: None,
},
],
},
lit: 6,
}),
],
}),
Meta(Path(Path {
segments: [
PathSegment {
ident: "word2",
arguments: None,
},
],
})),
],
}
"###);
snapshot!(meta as Meta, @r###"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Path(Path {
segments: [
PathSegment {
ident: "word",
arguments: None,
},
],
})),
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name",
arguments: None,
},
],
},
lit: 5,
}),
Meta(Meta::List {
path: Path {
segments: [
PathSegment {
ident: "list",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name2",
arguments: None,
},
],
},
lit: 6,
}),
],
}),
Meta(Path(Path {
segments: [
PathSegment {
ident: "word2",
arguments: None,
},
],
})),
],
}
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
arguments: None,
},
],
},
nested: [
Meta(Path(Path {
segments: [
PathSegment {
ident: "word",
arguments: None,
},
],
})),
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name",
arguments: None,
},
],
},
lit: 5,
}),
Meta(Meta::List {
path: Path {
segments: [
PathSegment {
ident: "list",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name2",
arguments: None,
},
],
},
lit: 6,
}),
],
}),
Meta(Path(Path {
segments: [
PathSegment {
ident: "word2",
arguments: None,
},
],
})),
],
}
"###);
assert_eq!(meta, inner.into());
@ -316,28 +312,28 @@ fn test_parse_nested_meta() {
let input = "list(name2 = 6)";
snapshot!(input as NestedMeta, @r###"
Meta(Meta::List {
path: Path {
segments: [
PathSegment {
ident: "list",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name2",
arguments: None,
},
],
},
lit: 6,
}),
],
})
Meta(Meta::List {
path: Path {
segments: [
PathSegment {
ident: "list",
arguments: None,
},
],
},
nested: [
Meta(Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "name2",
arguments: None,
},
],
},
lit: 6,
}),
],
})
"###);
}

Просмотреть файл

@ -1,7 +1,7 @@
#[macro_use]
extern crate syn;
use proc_macro2::{Delimiter, Group, Punct, Spacing, TokenStream, TokenTree};
use std::iter::FromIterator;
use syn::parse::{discouraged::Speculative, Parse, ParseStream, Parser, Result};
use syn::{parenthesized, Token};
#[test]
#[should_panic(expected = "Fork was not derived from the advancing parse stream")]
@ -53,3 +53,38 @@ fn smuggled_speculative_cursor_into_brackets() {
syn::parse_str::<BreakRules>("()").unwrap();
}
#[test]
fn trailing_empty_none_group() {
fn parse(input: ParseStream) -> Result<()> {
input.parse::<Token![+]>()?;
let content;
parenthesized!(content in input);
content.parse::<Token![+]>()?;
Ok(())
}
// `+ ( + <Ø Ø> ) <Ø <Ø Ø> Ø>`
let tokens = TokenStream::from_iter(vec![
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
TokenTree::Group(Group::new(
Delimiter::Parenthesis,
TokenStream::from_iter(vec![
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
]),
)),
TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::new(),
))]),
)),
]);
parse.parse2(tokens).unwrap();
}

12
third_party/rust/syn/tests/test_parse_stream.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,12 @@
use syn::ext::IdentExt;
use syn::parse::ParseStream;
use syn::{Ident, Token};
#[test]
fn test_peek() {
let _ = |input: ParseStream| {
let _ = input.peek(Ident);
let _ = input.peek(Ident::peek_any);
let _ = input.peek(Token![::]);
};
}

27
third_party/rust/syn/tests/test_pat.rs поставляемый
Просмотреть файл

@ -1,10 +1,5 @@
extern crate quote;
extern crate syn;
mod features;
use quote::quote;
use syn::Pat;
use syn::{Item, Pat, Stmt};
#[test]
fn test_pat_ident() {
@ -21,3 +16,23 @@ fn test_pat_path() {
value => panic!("expected PatPath, got {:?}", value),
}
}
#[test]
fn test_leading_vert() {
// https://github.com/rust-lang/rust/blob/1.43.0/src/test/ui/or-patterns/remove-leading-vert.rs
syn::parse_str::<Item>("fn f() {}").unwrap();
syn::parse_str::<Item>("fn fun1(| A: E) {}").unwrap_err();
syn::parse_str::<Item>("fn fun2(|| A: E) {}").unwrap_err();
syn::parse_str::<Stmt>("let | () = ();").unwrap();
syn::parse_str::<Stmt>("let (| A): E;").unwrap_err();
syn::parse_str::<Stmt>("let (|| A): (E);").unwrap_err();
syn::parse_str::<Stmt>("let (| A,): (E,);").unwrap_err();
syn::parse_str::<Stmt>("let [| A]: [E; 1];").unwrap_err();
syn::parse_str::<Stmt>("let [|| A]: [E; 1];").unwrap_err();
syn::parse_str::<Stmt>("let TS(| A): TS;").unwrap_err();
syn::parse_str::<Stmt>("let TS(|| A): TS;").unwrap_err();
syn::parse_str::<Stmt>("let NS { f: | A }: NS;").unwrap_err();
syn::parse_str::<Stmt>("let NS { f: || A }: NS;").unwrap_err();
}

52
third_party/rust/syn/tests/test_path.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,52 @@
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use quote::quote;
use std::iter::FromIterator;
use syn::{Expr, Type};
#[test]
fn parse_interpolated_leading_component() {
// mimics the token stream corresponding to `$mod::rest`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { first })),
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
TokenTree::Ident(Ident::new("rest", Span::call_site())),
]);
snapshot!(tokens.clone() as Expr, @r###"
Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "first",
arguments: None,
},
PathSegment {
ident: "rest",
arguments: None,
},
],
},
}
"###);
snapshot!(tokens as Type, @r###"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "first",
arguments: None,
},
PathSegment {
ident: "rest",
arguments: None,
},
],
},
}
"###);
}

196
third_party/rust/syn/tests/test_precedence.rs поставляемый
Просмотреть файл

@ -4,35 +4,26 @@
//! The tests in this module do the following:
//!
//! 1. Parse a given expression in both `syn` and `libsyntax`.
//! 1. Parse a given expression in both `syn` and `librustc`.
//! 2. Fold over the expression adding brackets around each subexpression (with
//! some complications - see the `syn_brackets` and `libsyntax_brackets`
//! some complications - see the `syn_brackets` and `librustc_brackets`
//! methods).
//! 3. Serialize the `syn` expression back into a string, and re-parse it with
//! `libsyntax`.
//! `librustc`.
//! 4. Respan all of the expressions, replacing the spans with the default
//! spans.
//! 5. Compare the expressions with one another, if they are not equal fail.
extern crate quote;
extern crate rayon;
extern crate regex;
extern crate rustc_ast;
extern crate rustc_data_structures;
extern crate smallvec;
extern crate syn;
extern crate syntax;
extern crate syntax_pos;
extern crate walkdir;
mod features;
extern crate rustc_span;
use quote::quote;
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use regex::Regex;
use smallvec::smallvec;
use syntax::ast;
use syntax::ptr::P;
use syntax_pos::edition::Edition;
use rustc_ast::ast;
use rustc_ast::ptr::P;
use rustc_span::edition::Edition;
use walkdir::{DirEntry, WalkDir};
use std::fs::File;
@ -73,7 +64,7 @@ fn test_simple_precedence() {
continue;
};
let pf = match test_expressions(vec![expr]) {
let pf = match test_expressions(Edition::Edition2018, vec![expr]) {
(1, 0) => "passed",
(0, 1) => {
failed += 1;
@ -91,8 +82,8 @@ fn test_simple_precedence() {
/// Test expressions from rustc, like in `test_round_trip`.
#[test]
#[cfg_attr(target_os = "windows", ignore = "requires nix .sh")]
fn test_rustc_precedence() {
common::rayon_init();
repo::clone_rust();
let abort_after = common::abort_after();
if abort_after == 0 {
@ -118,15 +109,6 @@ fn test_rustc_precedence() {
return;
}
// Our version of `libsyntax` can't parse this tests
if path
.to_str()
.unwrap()
.ends_with("optional_comma_in_match_arm.rs")
{
return;
}
let mut file = File::open(path).unwrap();
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
@ -134,8 +116,9 @@ fn test_rustc_precedence() {
let (l_passed, l_failed) = match syn::parse_file(&content) {
Ok(file) => {
let edition = repo::edition(path).parse().unwrap();
let exprs = collect_exprs(file);
test_expressions(exprs)
test_expressions(edition, exprs)
}
Err(msg) => {
errorf!("syn failed to parse\n{:?}\n", msg);
@ -169,36 +152,36 @@ fn test_rustc_precedence() {
}
}
fn test_expressions(exprs: Vec<syn::Expr>) -> (usize, usize) {
fn test_expressions(edition: Edition, exprs: Vec<syn::Expr>) -> (usize, usize) {
let mut passed = 0;
let mut failed = 0;
syntax::with_globals(Edition::Edition2018, || {
rustc_span::with_session_globals(edition, || {
for expr in exprs {
let raw = quote!(#expr).to_string();
let libsyntax_ast = if let Some(e) = libsyntax_parse_and_rewrite(&raw) {
let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&raw) {
e
} else {
failed += 1;
errorf!("\nFAIL - libsyntax failed to parse raw\n");
errorf!("\nFAIL - librustc failed to parse raw\n");
continue;
};
let syn_expr = syn_brackets(expr);
let syn_ast = if let Some(e) = parse::libsyntax_expr(&quote!(#syn_expr).to_string()) {
let syn_ast = if let Some(e) = parse::librustc_expr(&quote!(#syn_expr).to_string()) {
e
} else {
failed += 1;
errorf!("\nFAIL - libsyntax failed to parse bracketed\n");
errorf!("\nFAIL - librustc failed to parse bracketed\n");
continue;
};
if SpanlessEq::eq(&syn_ast, &libsyntax_ast) {
if SpanlessEq::eq(&syn_ast, &librustc_ast) {
passed += 1;
} else {
failed += 1;
errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, libsyntax_ast);
errorf!("\nFAIL\n{:?}\n!=\n{:?}\n", syn_ast, librustc_ast);
}
}
});
@ -206,54 +189,106 @@ fn test_expressions(exprs: Vec<syn::Expr>) -> (usize, usize) {
(passed, failed)
}
fn libsyntax_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
parse::libsyntax_expr(input).and_then(libsyntax_brackets)
fn librustc_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
parse::librustc_expr(input).and_then(librustc_brackets)
}
/// Wrap every expression which is not already wrapped in parens with parens, to
/// reveal the precidence of the parsed expressions, and produce a stringified
/// form of the resulting expression.
///
/// This method operates on libsyntax objects.
fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
/// This method operates on librustc objects.
fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
use rustc_ast::ast::{
Block, BorrowKind, Expr, ExprKind, Field, GenericArg, MacCall, Pat, Stmt, StmtKind, Ty,
};
use rustc_ast::mut_visit::{noop_visit_generic_arg, MutVisitor};
use rustc_data_structures::map_in_place::MapInPlace;
use rustc_data_structures::thin_vec::ThinVec;
use smallvec::SmallVec;
use rustc_span::DUMMY_SP;
use std::mem;
use syntax::ast::{Expr, ExprKind, Field, Mac, Pat, Stmt, StmtKind, Ty};
use syntax::mut_visit::{noop_visit_expr, MutVisitor};
use syntax_pos::DUMMY_SP;
struct BracketsVisitor {
failed: bool,
};
fn flat_map_field<T: MutVisitor>(mut f: Field, vis: &mut T) -> Vec<Field> {
if f.is_shorthand {
noop_visit_expr(&mut f.expr, vis);
} else {
vis.visit_expr(&mut f.expr);
}
vec![f]
}
fn flat_map_stmt<T: MutVisitor>(stmt: Stmt, vis: &mut T) -> Vec<Stmt> {
let kind = match stmt.kind {
// Don't wrap toplevel expressions in statements.
StmtKind::Expr(mut e) => {
noop_visit_expr(&mut e, vis);
StmtKind::Expr(e)
}
StmtKind::Semi(mut e) => {
noop_visit_expr(&mut e, vis);
StmtKind::Semi(e)
}
s => s,
};
vec![Stmt { kind, ..stmt }]
}
fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) {
use rustc_ast::mut_visit::{noop_visit_expr, visit_opt, visit_thin_attrs};
match &mut e.kind {
ExprKind::AddrOf(BorrowKind::Raw, ..) => {}
ExprKind::Struct(path, fields, expr) => {
vis.visit_path(path);
fields.flat_map_in_place(|field| flat_map_field(field, vis));
visit_opt(expr, |expr| vis.visit_expr(expr));
vis.visit_id(&mut e.id);
vis.visit_span(&mut e.span);
visit_thin_attrs(&mut e.attrs, vis);
}
_ => noop_visit_expr(e, vis),
}
}
impl MutVisitor for BracketsVisitor {
fn visit_expr(&mut self, e: &mut P<Expr>) {
noop_visit_expr(e, self);
match e.node {
match e.kind {
ExprKind::If(..) | ExprKind::Block(..) | ExprKind::Let(..) => {}
_ => {
let inner = mem::replace(
e,
P(Expr {
id: ast::DUMMY_NODE_ID,
node: ExprKind::Err,
kind: ExprKind::Err,
span: DUMMY_SP,
attrs: ThinVec::new(),
tokens: None,
}),
);
e.node = ExprKind::Paren(inner);
e.kind = ExprKind::Paren(inner);
}
}
}
fn flat_map_field(&mut self, mut f: Field) -> SmallVec<[Field; 1]> {
if f.is_shorthand {
noop_visit_expr(&mut f.expr, self);
} else {
self.visit_expr(&mut f.expr);
fn visit_generic_arg(&mut self, arg: &mut GenericArg) {
match arg {
// Don't wrap const generic arg as that's invalid syntax.
GenericArg::Const(arg) => noop_visit_expr(&mut arg.value, self),
_ => noop_visit_generic_arg(arg, self),
}
SmallVec::from([f])
}
fn visit_block(&mut self, block: &mut P<Block>) {
self.visit_id(&mut block.id);
block
.stmts
.flat_map_in_place(|stmt| flat_map_stmt(stmt, self));
self.visit_span(&mut block.span);
}
// We don't want to look at expressions that might appear in patterns or
@ -267,25 +302,8 @@ fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>>
let _ = ty;
}
fn flat_map_stmt(&mut self, stmt: Stmt) -> SmallVec<[Stmt; 1]> {
let node = match stmt.node {
// Don't wrap toplevel expressions in statements.
StmtKind::Expr(mut e) => {
noop_visit_expr(&mut e, self);
StmtKind::Expr(e)
}
StmtKind::Semi(mut e) => {
noop_visit_expr(&mut e, self);
StmtKind::Semi(e)
}
s => s,
};
smallvec![Stmt { node, ..stmt }]
}
fn visit_mac(&mut self, mac: &mut Mac) {
// By default when folding over macros, libsyntax panics. This is
fn visit_mac(&mut self, mac: &mut MacCall) {
// By default when folding over macros, librustc panics. This is
// because it's usually not what you want, you want to run after
// macro expansion. We do want to do that (syn doesn't do macro
// expansion), so we implement visit_mac to just return the macro
@ -295,11 +313,11 @@ fn libsyntax_brackets(mut libsyntax_expr: P<ast::Expr>) -> Option<P<ast::Expr>>
}
let mut folder = BracketsVisitor { failed: false };
folder.visit_expr(&mut libsyntax_expr);
folder.visit_expr(&mut librustc_expr);
if folder.failed {
None
} else {
Some(libsyntax_expr)
Some(librustc_expr)
}
}
@ -318,14 +336,33 @@ fn syn_brackets(syn_expr: syn::Expr) -> syn::Expr {
Expr::If(..) | Expr::Unsafe(..) | Expr::Block(..) | Expr::Let(..) => {
fold_expr(self, expr)
}
node => Expr::Paren(ExprParen {
_ => Expr::Paren(ExprParen {
attrs: Vec::new(),
expr: Box::new(fold_expr(self, node)),
expr: Box::new(fold_expr(self, expr)),
paren_token: token::Paren::default(),
}),
}
}
fn fold_generic_argument(&mut self, arg: GenericArgument) -> GenericArgument {
match arg {
// Don't wrap const generic arg as that's invalid syntax.
GenericArgument::Const(a) => GenericArgument::Const(fold_expr(self, a)),
_ => fold_generic_argument(self, arg),
}
}
fn fold_generic_method_argument(
&mut self,
arg: GenericMethodArgument,
) -> GenericMethodArgument {
match arg {
// Don't wrap const generic arg as that's invalid syntax.
GenericMethodArgument::Const(a) => GenericMethodArgument::Const(fold_expr(self, a)),
_ => fold_generic_method_argument(self, arg),
}
}
fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
match stmt {
// Don't wrap toplevel expressions in statements.
@ -360,7 +397,10 @@ fn collect_exprs(file: syn::File) -> Vec<syn::Expr> {
struct CollectExprs(Vec<Expr>);
impl Fold for CollectExprs {
fn fold_expr(&mut self, expr: Expr) -> Expr {
self.0.push(expr);
match expr {
Expr::Verbatim(tokens) if tokens.is_empty() => {}
_ => self.0.push(expr),
}
Expr::Tuple(ExprTuple {
attrs: vec![],

127
third_party/rust/syn/tests/test_receiver.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,127 @@
use syn::{parse_quote, FnArg, Receiver, TraitItemMethod};
#[test]
fn test_by_value() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn by_value(self: Self);
};
match sig.receiver() {
Some(FnArg::Typed(_)) => (),
value => panic!("expected FnArg::Typed, got {:?}", value),
}
}
#[test]
fn test_by_mut_value() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn by_mut(mut self: Self);
};
match sig.receiver() {
Some(FnArg::Typed(_)) => (),
value => panic!("expected FnArg::Typed, got {:?}", value),
}
}
#[test]
fn test_by_ref() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn by_ref(self: &Self);
};
match sig.receiver() {
Some(FnArg::Typed(_)) => (),
value => panic!("expected FnArg::Typed, got {:?}", value),
}
}
#[test]
fn test_by_box() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn by_box(self: Box<Self>);
};
match sig.receiver() {
Some(FnArg::Typed(_)) => (),
value => panic!("expected FnArg::Typed, got {:?}", value),
}
}
#[test]
fn test_by_pin() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn by_pin(self: Pin<Self>);
};
match sig.receiver() {
Some(FnArg::Typed(_)) => (),
value => panic!("expected FnArg::Typed, got {:?}", value),
}
}
#[test]
fn test_explicit_type() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn explicit_type(self: Pin<MyType>);
};
match sig.receiver() {
Some(FnArg::Typed(_)) => (),
value => panic!("expected FnArg::Typed, got {:?}", value),
}
}
#[test]
fn test_value_shorthand() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn value_shorthand(self);
};
match sig.receiver() {
Some(FnArg::Receiver(Receiver {
reference: None,
mutability: None,
..
})) => (),
value => panic!("expected FnArg::Receiver without ref/mut, got {:?}", value),
}
}
#[test]
fn test_mut_value_shorthand() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn mut_value_shorthand(mut self);
};
match sig.receiver() {
Some(FnArg::Receiver(Receiver {
reference: None,
mutability: Some(_),
..
})) => (),
value => panic!("expected FnArg::Receiver with mut, got {:?}", value),
}
}
#[test]
fn test_ref_shorthand() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn ref_shorthand(&self);
};
match sig.receiver() {
Some(FnArg::Receiver(Receiver {
reference: Some(_),
mutability: None,
..
})) => (),
value => panic!("expected FnArg::Receiver with ref, got {:?}", value),
}
}
#[test]
fn test_ref_mut_shorthand() {
let TraitItemMethod { sig, .. } = parse_quote! {
fn ref_mut_shorthand(&mut self);
};
match sig.receiver() {
Some(FnArg::Receiver(Receiver {
reference: Some(_),
mutability: Some(_),
..
})) => (),
value => panic!("expected FnArg::Receiver with ref+mut, got {:?}", value),
}
}

41
third_party/rust/syn/tests/test_round_trip.rs поставляемый
Просмотреть файл

@ -2,22 +2,20 @@
#![recursion_limit = "1024"]
#![feature(rustc_private)]
extern crate quote;
extern crate rayon;
extern crate syn;
extern crate syntax;
extern crate syntax_pos;
extern crate walkdir;
mod features;
extern crate rustc_ast;
extern crate rustc_errors;
extern crate rustc_expand;
extern crate rustc_parse as parse;
extern crate rustc_session;
extern crate rustc_span;
use quote::quote;
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use syntax::ast;
use syntax::parse::{self, PResult, ParseSess};
use syntax::source_map::FilePathMapping;
use syntax_pos::edition::Edition;
use syntax_pos::FileName;
use rustc_ast::ast;
use rustc_errors::PResult;
use rustc_session::parse::ParseSess;
use rustc_span::source_map::FilePathMapping;
use rustc_span::FileName;
use walkdir::{DirEntry, WalkDir};
use std::fs::File;
@ -38,8 +36,8 @@ mod repo;
use common::eq::SpanlessEq;
#[test]
#[cfg_attr(target_os = "windows", ignore = "requires nix .sh")]
fn test_round_trip() {
common::rayon_init();
repo::clone_rust();
let abort_after = common::abort_after();
if abort_after == 0 {
@ -78,11 +76,12 @@ fn test_round_trip() {
}
};
let back = quote!(#krate).to_string();
let edition = repo::edition(path).parse().unwrap();
let equal = panic::catch_unwind(|| {
syntax::with_globals(Edition::Edition2018, || {
rustc_span::with_session_globals(edition, || {
let sess = ParseSess::new(FilePathMapping::empty());
let before = match libsyntax_parse(content, &sess) {
let before = match librustc_parse(content, &sess) {
Ok(before) => before,
Err(mut diagnostic) => {
diagnostic.cancel();
@ -93,7 +92,7 @@ fn test_round_trip() {
errorf!("=== {}: ignore\n", path.display());
} else {
errorf!(
"=== {}: ignore - libsyntax failed to parse original content: {}\n",
"=== {}: ignore - librustc failed to parse original content: {}\n",
path.display(),
diagnostic.message()
);
@ -101,10 +100,10 @@ fn test_round_trip() {
return true;
}
};
let after = match libsyntax_parse(back, &sess) {
let after = match librustc_parse(back, &sess) {
Ok(after) => after,
Err(mut diagnostic) => {
errorf!("=== {}: libsyntax failed to parse", path.display());
errorf!("=== {}: librustc failed to parse", path.display());
diagnostic.emit();
return false;
}
@ -130,7 +129,7 @@ fn test_round_trip() {
})
});
match equal {
Err(_) => errorf!("=== {}: ignoring libsyntax panic\n", path.display()),
Err(_) => errorf!("=== {}: ignoring librustc panic\n", path.display()),
Ok(true) => {}
Ok(false) => {
let prev_failed = failed.fetch_add(1, Ordering::SeqCst);
@ -147,7 +146,7 @@ fn test_round_trip() {
}
}
fn libsyntax_parse(content: String, sess: &ParseSess) -> PResult<ast::Crate> {
fn librustc_parse(content: String, sess: &ParseSess) -> PResult<ast::Crate> {
let name = FileName::Custom("test_round_trip".to_string());
parse::parse_crate_from_source_str(name, content, sess)
}

59
third_party/rust/syn/tests/test_shebang.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,59 @@
#[macro_use]
mod macros;
#[test]
fn test_basic() {
let content = "#!/usr/bin/env rustx\nfn main() {}";
let file = syn::parse_file(content).unwrap();
snapshot!(file, @r###"
File {
shebang: Some("#!/usr/bin/env rustx"),
items: [
Item::Fn {
vis: Inherited,
sig: Signature {
ident: "main",
generics: Generics,
output: Default,
},
block: Block,
},
],
}
"###);
}
#[test]
fn test_comment() {
let content = "#!//am/i/a/comment\n[allow(dead_code)] fn main() {}";
let file = syn::parse_file(content).unwrap();
snapshot!(file, @r###"
File {
attrs: [
Attribute {
style: Inner,
path: Path {
segments: [
PathSegment {
ident: "allow",
arguments: None,
},
],
},
tokens: TokenStream(`(dead_code)`),
},
],
items: [
Item::Fn {
vis: Inherited,
sig: Signature {
ident: "main",
generics: Generics,
output: Default,
},
block: Block,
},
],
}
"###);
}

Просмотреть файл

@ -1,7 +1,3 @@
extern crate syn;
mod features;
macro_rules! should_parse {
($name:ident, { $($in:tt)* }) => {
#[test]

2
third_party/rust/syn/tests/test_size.rs поставляемый
Просмотреть файл

@ -1,7 +1,5 @@
#![cfg(target_pointer_width = "64")]
mod features;
use std::mem;
use syn::*;

44
third_party/rust/syn/tests/test_stmt.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,44 @@
#[macro_use]
mod macros;
use syn::Stmt;
#[test]
fn test_raw_operator() {
let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap();
snapshot!(stmt, @r###"
Local(Local {
pat: Pat::Wild,
init: Some(Verbatim(`& raw const x`)),
})
"###);
}
#[test]
fn test_raw_variable() {
let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap();
snapshot!(stmt, @r###"
Local(Local {
pat: Pat::Wild,
init: Some(Expr::Reference {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "raw",
arguments: None,
},
],
},
},
}),
})
"###);
}
#[test]
fn test_raw_invalid() {
assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err());
}

Просмотреть файл

@ -1,9 +1,3 @@
extern crate proc_macro2;
extern crate quote;
extern crate syn;
mod features;
#[macro_use]
mod macros;
@ -21,7 +15,11 @@ fn test_struct() {
}
";
snapshot!(input as TokenStream, @"`# [ derive ( Debug , Clone ) ] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`");
snapshot!(input as TokenStream, @r###"
TokenStream(
`# [derive (Debug , Clone)] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`,
)
"###);
}
#[test]

53
third_party/rust/syn/tests/test_ty.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,53 @@
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use quote::quote;
use std::iter::FromIterator;
use syn::Type;
#[test]
fn test_mut_self() {
syn::parse_str::<Type>("fn(mut self)").unwrap();
syn::parse_str::<Type>("fn(mut self: ())").unwrap();
syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err();
syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err();
syn::parse_str::<Type>("fn(mut self::T)").unwrap_err();
}
#[test]
fn test_macro_variable_type() {
// mimics the token stream corresponding to `$ty<T>`
let tokens = TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
TokenTree::Punct(Punct::new('<', Spacing::Alone)),
TokenTree::Ident(Ident::new("T", Span::call_site())),
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
]);
snapshot!(tokens as Type, @r###"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "ty",
arguments: PathArguments::AngleBracketed {
args: [
Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
arguments: None,
},
],
},
}),
],
},
},
],
},
}
"###);
}

145
third_party/rust/syn/tests/test_visibility.rs поставляемый Normal file
Просмотреть файл

@ -0,0 +1,145 @@
#[macro_use]
mod macros;
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use std::iter::FromIterator;
use syn::parse::{Parse, ParseStream};
use syn::{DeriveInput, Result, Visibility};
#[derive(Debug)]
struct VisRest {
vis: Visibility,
rest: TokenStream,
}
impl Parse for VisRest {
fn parse(input: ParseStream) -> Result<Self> {
Ok(VisRest {
vis: input.parse()?,
rest: input.parse()?,
})
}
}
macro_rules! assert_vis_parse {
($input:expr, Ok($p:pat)) => {
assert_vis_parse!($input, Ok($p) + "");
};
($input:expr, Ok($p:pat) + $rest:expr) => {
let expected = $rest.parse::<TokenStream>().unwrap();
let parse: VisRest = syn::parse_str($input).unwrap();
match parse.vis {
$p => {}
_ => panic!("Expected {}, got {:?}", stringify!($p), parse.vis),
}
// NOTE: Round-trips through `to_string` to avoid potential whitespace
// diffs.
assert_eq!(parse.rest.to_string(), expected.to_string());
};
($input:expr, Err) => {
syn::parse2::<VisRest>($input.parse().unwrap()).unwrap_err();
};
}
#[test]
fn test_pub() {
assert_vis_parse!("pub", Ok(Visibility::Public(_)));
}
#[test]
fn test_crate() {
assert_vis_parse!("crate", Ok(Visibility::Crate(_)));
}
#[test]
fn test_inherited() {
assert_vis_parse!("", Ok(Visibility::Inherited));
}
#[test]
fn test_in() {
assert_vis_parse!("pub(in foo::bar)", Ok(Visibility::Restricted(_)));
}
#[test]
fn test_pub_crate() {
assert_vis_parse!("pub(crate)", Ok(Visibility::Restricted(_)));
}
#[test]
fn test_pub_self() {
assert_vis_parse!("pub(self)", Ok(Visibility::Restricted(_)));
}
#[test]
fn test_pub_super() {
assert_vis_parse!("pub(super)", Ok(Visibility::Restricted(_)));
}
#[test]
fn test_missing_in() {
assert_vis_parse!("pub(foo::bar)", Ok(Visibility::Public(_)) + "(foo::bar)");
}
#[test]
fn test_missing_in_path() {
assert_vis_parse!("pub(in)", Err);
}
#[test]
fn test_crate_path() {
assert_vis_parse!("pub(crate::A, crate::B)", Ok(Visibility::Public(_)) + "(crate::A, crate::B)");
}
#[test]
fn test_junk_after_in() {
assert_vis_parse!("pub(in some::path @@garbage)", Err);
}
#[test]
fn test_empty_group_vis() {
// mimics `struct S { $vis $field: () }` where $vis is empty
let tokens = TokenStream::from_iter(vec![
TokenTree::Ident(Ident::new("struct", Span::call_site())),
TokenTree::Ident(Ident::new("S", Span::call_site())),
TokenTree::Group(Group::new(
Delimiter::Brace,
TokenStream::from_iter(vec![
TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter(vec![TokenTree::Ident(Ident::new(
"f",
Span::call_site(),
))]),
)),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
]),
)),
]);
snapshot!(tokens as DeriveInput, @r###"
DeriveInput {
vis: Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Named {
named: [
Field {
vis: Inherited,
ident: Some("f"),
colon_token: Some,
ty: Type::Tuple,
},
],
},
},
}
"###);
}

4
third_party/rust/syn/tests/zzz_stable.rs поставляемый
Просмотреть файл

@ -1,7 +1,5 @@
#![cfg(syn_disable_nightly_tests)]
extern crate termcolor;
use std::io::{self, Write};
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
@ -10,7 +8,7 @@ const MSG: &str = "\
WARNING:
This is not a nightly compiler so not all tests were able to
run. Syn includes tests that compare Syn's parser against the
compiler's parser, which requires access to unstable libsyntax
compiler's parser, which requires access to unstable librustc
data structures and a nightly compiler.
";