Bug 1785002 - Update syn to 1.0.99. r=emilio,supply-chain-reviewers

Differential Revision: https://phabricator.services.mozilla.com/D154753
This commit is contained in:
Mike Hommey 2022-08-16 21:38:24 +00:00
Родитель ca3229307b
Коммит 23df2d1569
15 изменённых файлов: 156 добавлений и 76 удалений

4
Cargo.lock сгенерированный
Просмотреть файл

@ -5057,9 +5057,9 @@ dependencies = [
[[package]]
name = "syn"
version = "1.0.96"
version = "1.0.99"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0748dd251e24453cb8717f0354206b91557e4ec8703673a4b30208f2abaf1ebf"
checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13"
dependencies = [
"proc-macro2",
"quote",

Просмотреть файл

@ -613,6 +613,11 @@ criteria = "safe-to-deploy"
version = "0.4.1"
notes = "Simple string processing with no unsafe code or ambient capability usage."
[[audits.syn]]
who = "Mike Hommey <mh+mozilla@glandium.org>"
criteria = "safe-to-deploy"
delta = "1.0.96 -> 1.0.99"
[[audits.thin-vec]]
who = "Aria Beingessner <a.beingessner@gmail.com>"
criteria = "safe-to-deploy"

2
third_party/rust/syn/.cargo-checksum.json поставляемый

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

11
third_party/rust/syn/Cargo.toml поставляемый
Просмотреть файл

@ -13,7 +13,7 @@
edition = "2018"
rust-version = "1.31"
name = "syn"
version = "1.0.96"
version = "1.0.99"
authors = ["David Tolnay <dtolnay@gmail.com>"]
include = [
"/benches/**",
@ -28,7 +28,14 @@ include = [
description = "Parser for Rust source code"
documentation = "https://docs.rs/syn"
readme = "README.md"
categories = ["development-tools::procedural-macro-helpers"]
keywords = [
"macros",
"syn",
]
categories = [
"development-tools::procedural-macro-helpers",
"parser-implementations",
]
license = "MIT OR Apache-2.0"
repository = "https://github.com/dtolnay/syn"

2
third_party/rust/syn/README.md поставляемый
Просмотреть файл

@ -3,7 +3,7 @@ Parser for Rust source code
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/syn-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/syn)
[<img alt="crates.io" src="https://img.shields.io/crates/v/syn.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/syn)
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K" height="20">](https://docs.rs/syn)
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/syn)
[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/syn/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster)
Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree

34
third_party/rust/syn/benches/file.rs поставляемый
Просмотреть файл

@ -2,7 +2,11 @@
#![feature(rustc_private, test)]
#![recursion_limit = "1024"]
#![allow(clippy::missing_panics_doc, clippy::must_use_candidate)]
#![allow(
clippy::items_after_statements,
clippy::missing_panics_doc,
clippy::must_use_candidate
)]
extern crate test;
@ -15,17 +19,37 @@ mod common;
#[path = "../tests/repo/mod.rs"]
pub mod repo;
use proc_macro2::TokenStream;
use proc_macro2::{Span, TokenStream};
use std::fs;
use std::str::FromStr;
use syn::parse::{ParseStream, Parser};
use test::Bencher;
const FILE: &str = "tests/rust/library/core/src/str/mod.rs";
#[bench]
fn parse_file(b: &mut Bencher) {
fn get_tokens() -> TokenStream {
repo::clone_rust();
let content = fs::read_to_string(FILE).unwrap();
let tokens = TokenStream::from_str(&content).unwrap();
TokenStream::from_str(&content).unwrap()
}
#[bench]
fn baseline(b: &mut Bencher) {
let tokens = get_tokens();
b.iter(|| drop(tokens.clone()));
}
#[bench]
fn create_token_buffer(b: &mut Bencher) {
let tokens = get_tokens();
fn immediate_fail(_input: ParseStream) -> syn::Result<()> {
Err(syn::Error::new(Span::call_site(), ""))
}
b.iter(|| immediate_fail.parse2(tokens.clone()));
}
#[bench]
fn parse_file(b: &mut Bencher) {
let tokens = get_tokens();
b.iter(|| syn::parse2::<syn::File>(tokens.clone()));
}

100
third_party/rust/syn/src/buffer.rs поставляемый
Просмотреть файл

@ -14,7 +14,9 @@
use crate::proc_macro as pm;
use crate::Lifetime;
use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
use std::hint;
use std::marker::PhantomData;
use std::mem;
use std::ptr;
use std::slice;
@ -57,29 +59,39 @@ impl TokenBuffer {
// NOTE: Do not mutate the Vec returned from this function once it returns;
// the address of its backing memory must remain stable.
fn inner_new(stream: TokenStream, up: *const Entry) -> TokenBuffer {
// Build up the entries list, recording the locations of any Groups
// in the list to be processed later.
let mut entries = Vec::new();
let mut groups = Vec::new();
for tt in stream {
let iterator = stream.into_iter();
let mut entries = Vec::with_capacity(iterator.size_hint().0 + 1);
let mut next_index_after_last_group = 0;
for tt in iterator {
match tt {
TokenTree::Ident(sym) => {
entries.push(Entry::Ident(sym));
TokenTree::Ident(ident) => {
entries.push(Entry::Ident(ident));
}
TokenTree::Punct(op) => {
entries.push(Entry::Punct(op));
TokenTree::Punct(punct) => {
entries.push(Entry::Punct(punct));
}
TokenTree::Literal(l) => {
entries.push(Entry::Literal(l));
TokenTree::Literal(literal) => {
entries.push(Entry::Literal(literal));
}
TokenTree::Group(g) => {
// Record the index of the interesting entry, and store an
// `End(null)` there temporarily.
groups.push((entries.len(), g));
entries.push(Entry::End(ptr::null()));
TokenTree::Group(group) => {
// We cannot fill in a real `End` pointer until `entries` is
// finished growing and getting potentially reallocated.
// Instead, we temporarily coopt the spot where the end
// pointer would go, and use it to string together an
// intrusive linked list of all the Entry::Group entries in
// the vector. Later after `entries` is done growing, we'll
// traverse the linked list and fill in all the end
// pointers with a correct value.
let group_up =
ptr::null::<u8>().wrapping_add(next_index_after_last_group) as *const Entry;
let inner = Self::inner_new(group.stream(), group_up);
entries.push(Entry::Group(group, inner));
next_index_after_last_group = entries.len();
}
}
}
// Add an `End` entry to the end with a reference to the enclosing token
// stream which was passed in.
entries.push(Entry::End(up));
@ -90,20 +102,36 @@ impl TokenBuffer {
// pointer into it.
let entries = entries.into_boxed_slice();
let len = entries.len();
// Convert boxed slice into a pointer to the first element early, to
// avoid invalidating pointers into this slice when we move the Box.
// See https://github.com/rust-lang/unsafe-code-guidelines/issues/326
let entries = Box::into_raw(entries) as *mut Entry;
for (idx, group) in groups {
// We know that this index refers to one of the temporary
// `End(null)` entries, and we know that the last entry is
// `End(up)`, so the next index is also valid.
let group_up = unsafe { entries.add(idx + 1) };
// The end entry stored at the end of this Entry::Group should
// point to the Entry which follows the Group in the list.
let inner = Self::inner_new(group.stream(), group_up);
unsafe { *entries.add(idx) = Entry::Group(group, inner) };
// Traverse intrusive linked list of Entry::Group entries and fill in
// correct End pointers.
while let Some(idx) = next_index_after_last_group.checked_sub(1) {
// We know that idx refers to one of the Entry::Group entries, and
// that the very last entry is an Entry::End, so the next index
// after any group entry is a valid index.
let group_up = unsafe { entries.add(next_index_after_last_group) };
// Linked list only takes us to entries which are of type Group.
let token_buffer = match unsafe { &*entries.add(idx) } {
Entry::Group(_group, token_buffer) => token_buffer,
_ => unsafe { hint::unreachable_unchecked() },
};
// Last entry in any TokenBuffer is of type End.
let buffer_ptr = token_buffer.ptr as *mut Entry;
let last_entry = unsafe { &mut *buffer_ptr.add(token_buffer.len - 1) };
let end_ptr_slot = match last_entry {
Entry::End(end_ptr_slot) => end_ptr_slot,
_ => unsafe { hint::unreachable_unchecked() },
};
// Step to next element in linked list.
next_index_after_last_group = mem::replace(end_ptr_slot, group_up) as usize;
}
TokenBuffer { ptr: entries, len }
@ -275,7 +303,9 @@ impl<'a> Cursor<'a> {
pub fn punct(mut self) -> Option<(Punct, Cursor<'a>)> {
self.ignore_none();
match self.entry() {
Entry::Punct(op) if op.as_char() != '\'' => Some((op.clone(), unsafe { self.bump() })),
Entry::Punct(punct) if punct.as_char() != '\'' => {
Some((punct.clone(), unsafe { self.bump() }))
}
_ => None,
}
}
@ -285,7 +315,7 @@ impl<'a> Cursor<'a> {
pub fn literal(mut self) -> Option<(Literal, Cursor<'a>)> {
self.ignore_none();
match self.entry() {
Entry::Literal(lit) => Some((lit.clone(), unsafe { self.bump() })),
Entry::Literal(literal) => Some((literal.clone(), unsafe { self.bump() })),
_ => None,
}
}
@ -295,12 +325,12 @@ impl<'a> Cursor<'a> {
pub fn lifetime(mut self) -> Option<(Lifetime, Cursor<'a>)> {
self.ignore_none();
match self.entry() {
Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => {
Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => {
let next = unsafe { self.bump() };
match next.ident() {
Some((ident, rest)) => {
let lifetime = Lifetime {
apostrophe: op.span(),
apostrophe: punct.span(),
ident,
};
Some((lifetime, rest))
@ -334,9 +364,9 @@ impl<'a> Cursor<'a> {
pub fn token_tree(self) -> Option<(TokenTree, Cursor<'a>)> {
let tree = match self.entry() {
Entry::Group(group, _) => group.clone().into(),
Entry::Literal(lit) => lit.clone().into(),
Entry::Literal(literal) => literal.clone().into(),
Entry::Ident(ident) => ident.clone().into(),
Entry::Punct(op) => op.clone().into(),
Entry::Punct(punct) => punct.clone().into(),
Entry::End(..) => return None,
};
@ -348,9 +378,9 @@ impl<'a> Cursor<'a> {
pub fn span(self) -> Span {
match self.entry() {
Entry::Group(group, _) => group.span(),
Entry::Literal(l) => l.span(),
Entry::Ident(t) => t.span(),
Entry::Punct(o) => o.span(),
Entry::Literal(literal) => literal.span(),
Entry::Ident(ident) => ident.span(),
Entry::Punct(punct) => punct.span(),
Entry::End(..) => Span::call_site(),
}
}
@ -364,7 +394,7 @@ impl<'a> Cursor<'a> {
Entry::End(..) => None,
// Treat lifetimes as a single tt for the purposes of 'skip'.
Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => {
Entry::Punct(punct) if punct.as_char() == '\'' && punct.spacing() == Spacing::Joint => {
let next = unsafe { self.bump() };
match next.entry() {
Entry::Ident(_) => Some(unsafe { next.bump() }),

3
third_party/rust/syn/src/export.rs поставляемый
Просмотреть файл

@ -12,6 +12,9 @@ pub extern crate quote;
pub use proc_macro2::{Span, TokenStream as TokenStream2};
#[cfg(feature = "parsing")]
pub use crate::group::{parse_braces, parse_brackets, parse_parens};
pub use crate::span::IntoSpans;
#[cfg(all(

6
third_party/rust/syn/src/group.rs поставляемый
Просмотреть файл

@ -136,7 +136,7 @@ fn parse_delimited<'a>(
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
macro_rules! parenthesized {
($content:ident in $cursor:expr) => {
match $crate::group::parse_parens(&$cursor) {
match $crate::__private::parse_parens(&$cursor) {
$crate::__private::Ok(parens) => {
$content = parens.content;
parens.token
@ -214,7 +214,7 @@ macro_rules! parenthesized {
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
macro_rules! braced {
($content:ident in $cursor:expr) => {
match $crate::group::parse_braces(&$cursor) {
match $crate::__private::parse_braces(&$cursor) {
$crate::__private::Ok(braces) => {
$content = braces.content;
braces.token
@ -269,7 +269,7 @@ macro_rules! braced {
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
macro_rules! bracketed {
($content:ident in $cursor:expr) => {
match $crate::group::parse_brackets(&$cursor) {
match $crate::__private::parse_brackets(&$cursor) {
$crate::__private::Ok(brackets) => {
$content = brackets.content;
brackets.token

10
third_party/rust/syn/src/lib.rs поставляемый
Просмотреть файл

@ -2,7 +2,7 @@
//!
//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K
//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
//!
//! <br>
//!
@ -250,15 +250,17 @@
//! dynamic library libproc_macro from rustc toolchain.
// Syn types in rustdoc of other crates get linked to here.
#![doc(html_root_url = "https://docs.rs/syn/1.0.96")]
#![doc(html_root_url = "https://docs.rs/syn/1.0.99")]
#![cfg_attr(doc_cfg, feature(doc_cfg))]
#![allow(non_camel_case_types)]
#![allow(
clippy::cast_lossless,
clippy::cast_possible_truncation,
clippy::cast_ptr_alignment,
clippy::default_trait_access,
clippy::doc_markdown,
clippy::expl_impl_clone_on_copy,
clippy::explicit_auto_deref,
clippy::if_not_else,
clippy::inherent_to_string,
clippy::large_enum_variant,
@ -299,11 +301,9 @@ extern crate quote;
#[macro_use]
mod macros;
// Not public API.
#[cfg(feature = "parsing")]
#[doc(hidden)]
#[macro_use]
pub mod group;
mod group;
#[macro_use]
pub mod token;

2
third_party/rust/syn/src/lit.rs поставляемый
Просмотреть файл

@ -1480,7 +1480,7 @@ mod value {
let mut bytes = input.to_owned().into_bytes();
let start = (*bytes.get(0)? == b'-') as usize;
let start = (*bytes.first()? == b'-') as usize;
match bytes.get(start)? {
b'0'..=b'9' => {}
_ => return None,

47
third_party/rust/syn/tests/common/eq.rs поставляемый
Просмотреть файл

@ -24,6 +24,7 @@ use rustc_ast::ast::Block;
use rustc_ast::ast::BlockCheckMode;
use rustc_ast::ast::BorrowKind;
use rustc_ast::ast::CaptureBy;
use rustc_ast::ast::ClosureBinder;
use rustc_ast::ast::Const;
use rustc_ast::ast::Crate;
use rustc_ast::ast::Defaultness;
@ -145,7 +146,7 @@ impl<T: ?Sized + SpanlessEq> SpanlessEq for Box<T> {
}
}
impl<T: SpanlessEq> SpanlessEq for P<T> {
impl<T: ?Sized + SpanlessEq> SpanlessEq for P<T> {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&**self, &**other)
}
@ -466,9 +467,10 @@ spanless_eq_enum!(BindingMode; ByRef(0) ByValue(0));
spanless_eq_enum!(BlockCheckMode; Default Unsafe(0));
spanless_eq_enum!(BorrowKind; Ref Raw);
spanless_eq_enum!(CaptureBy; Value Ref);
spanless_eq_enum!(ClosureBinder; NotPresent For(span generic_params));
spanless_eq_enum!(Const; Yes(0) No);
spanless_eq_enum!(Defaultness; Default(0) Final);
spanless_eq_enum!(Extern; None Implicit Explicit(0));
spanless_eq_enum!(Extern; None Implicit(0) Explicit(0 1));
spanless_eq_enum!(FloatTy; F32 F64);
spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0) TyAlias(0) MacCall(0));
@ -498,7 +500,7 @@ spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Empty MacCall(0));
spanless_eq_enum!(StrStyle; Cooked Raw(0));
spanless_eq_enum!(StructRest; Base(0) Rest(0) None);
spanless_eq_enum!(Term; Ty(0) Const(0));
spanless_eq_enum!(TokenTree; Token(0) Delimited(0 1 2));
spanless_eq_enum!(TokenTree; Token(0 1) Delimited(0 1 2));
spanless_eq_enum!(TraitBoundModifier; None Maybe MaybeConst MaybeConstMaybe);
spanless_eq_enum!(TraitObjectSyntax; Dyn None);
spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
@ -512,7 +514,7 @@ spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredica
spanless_eq_enum!(ExprKind; Box(0) Array(0) ConstBlock(0) Call(0 1)
MethodCall(0 1 2) Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1)
Let(0 1 2) If(0 1 2) While(0 1 2) ForLoop(0 1 2 3) Loop(0 1) Match(0 1)
Closure(0 1 2 3 4 5) Block(0 1) Async(0 1 2) Await(0) TryBlock(0)
Closure(0 1 2 3 4 5 6) Block(0 1) Async(0 1 2) Await(0) TryBlock(0)
Assign(0 1 2) AssignOp(0 1 2) Field(0 1) Index(0 1) Underscore Range(0 1 2)
Path(0 1) AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0) InlineAsm(0)
MacCall(0) Struct(0) Repeat(0 1) Paren(0) Try(0) Yield(0) Yeet(0) Err);
@ -610,7 +612,7 @@ impl SpanlessEq for TokenStream {
if SpanlessEq::eq(this, other) {
continue;
}
if let (TokenTree::Token(this), TokenTree::Token(other)) = (this, other) {
if let (TokenTree::Token(this, _), TokenTree::Token(other, _)) = (this, other) {
if match (&this.kind, &other.kind) {
(TokenKind::Literal(this), TokenKind::Literal(other)) => {
SpanlessEq::eq(this, other)
@ -641,10 +643,13 @@ fn doc_comment<'a>(
AttrStyle::Inner => true,
} {
match trees.next() {
Some(TokenTree::Token(Token {
kind: TokenKind::Not,
span: _,
})) => {}
Some(TokenTree::Token(
Token {
kind: TokenKind::Not,
span: _,
},
_spacing,
)) => {}
_ => return false,
}
}
@ -654,21 +659,27 @@ fn doc_comment<'a>(
};
let mut trees = stream.trees();
match trees.next() {
Some(TokenTree::Token(Token {
kind: TokenKind::Ident(symbol, false),
span: _,
})) if *symbol == sym::doc => {}
Some(TokenTree::Token(
Token {
kind: TokenKind::Ident(symbol, false),
span: _,
},
_spacing,
)) if *symbol == sym::doc => {}
_ => return false,
}
match trees.next() {
Some(TokenTree::Token(Token {
kind: TokenKind::Eq,
span: _,
})) => {}
Some(TokenTree::Token(
Token {
kind: TokenKind::Eq,
span: _,
},
_spacing,
)) => {}
_ => return false,
}
match trees.next() {
Some(TokenTree::Token(token)) => {
Some(TokenTree::Token(token, _spacing)) => {
is_escaped_literal_token(token, unescaped) && trees.next().is_none()
}
_ => false,

2
third_party/rust/syn/tests/debug/mod.rs поставляемый
Просмотреть файл

@ -87,7 +87,7 @@ where
Lite<T>: Debug,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(Lite(&*self.value), formatter)
Debug::fmt(Lite(self.value), formatter)
}
}

Просмотреть файл

@ -1,4 +1,4 @@
#![allow(clippy::too_many_lines)]
#![allow(clippy::assertions_on_result_states, clippy::too_many_lines)]
#[macro_use]
mod macros;

2
third_party/rust/syn/tests/test_stmt.rs поставляемый
Просмотреть файл

@ -1,4 +1,4 @@
#![allow(clippy::non_ascii_literal)]
#![allow(clippy::assertions_on_result_states, clippy::non_ascii_literal)]
#[macro_use]
mod macros;