aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCharisee <chiw@google.com>2023-10-16 19:52:37 +0000
committerAutomerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>2023-10-16 19:52:37 +0000
commit48ffa176f56b98071a4e113af8d2ff5fbf1339e5 (patch)
treebd07790c7f7cdc04de900e11f71fdd3e66f508df
parent5faa65f4be8cda06c688adbd7fca4d0a01ce7b02 (diff)
parent6b8de5fd4fb87a3d57140905a83a404340433d2c (diff)
downloadsyn-48ffa176f56b98071a4e113af8d2ff5fbf1339e5.tar.gz
Update syn crate to 2.0.38 am: 2c9458570a am: 6b8de5fd4f
Original change: https://android-review.googlesource.com/c/platform/external/rust/crates/syn/+/2787035 Change-Id: I551cdf3da041937c36e0c45e78f47a9b6c0d5fad Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
-rw-r--r--Cargo.toml7
-rw-r--r--Cargo.toml.orig10
-rw-r--r--README.md12
-rw-r--r--benches/rust.rs2
-rw-r--r--src/buffer.rs12
-rw-r--r--src/custom_keyword.rs2
-rw-r--r--src/custom_punctuation.rs4
-rw-r--r--src/data.rs38
-rw-r--r--src/error.rs2
-rw-r--r--src/export.rs53
-rw-r--r--src/expr.rs18
-rw-r--r--src/gen/fold.rs6
-rw-r--r--src/gen/visit.rs1
-rw-r--r--src/gen/visit_mut.rs1
-rw-r--r--src/generics.rs2
-rw-r--r--src/group.rs8
-rw-r--r--src/ident.rs11
-rw-r--r--src/item.rs521
-rw-r--r--src/lib.rs22
-rw-r--r--src/lifetime.rs10
-rw-r--r--src/lit.rs48
-rw-r--r--src/macros.rs17
-rw-r--r--src/meta.rs8
-rw-r--r--src/parse.rs10
-rw-r--r--src/parse_quote.rs1
-rw-r--r--src/pat.rs6
-rw-r--r--src/path.rs55
-rw-r--r--src/punctuated.rs5
-rw-r--r--src/span.rs1
-rw-r--r--src/spanned.rs3
-rw-r--r--src/stmt.rs7
-rw-r--r--src/thread.rs20
-rw-r--r--src/token.rs66
-rw-r--r--src/ty.rs4
-rw-r--r--src/verbatim.rs4
-rw-r--r--tests/common/eq.rs19
-rw-r--r--tests/debug/gen.rs1
-rw-r--r--tests/macros/mod.rs15
-rw-r--r--tests/repo/mod.rs45
-rw-r--r--tests/test_expr.rs2
-rw-r--r--tests/test_lit.rs16
-rw-r--r--tests/test_precedence.rs98
-rw-r--r--tests/test_round_trip.rs5
43 files changed, 795 insertions, 403 deletions
diff --git a/Cargo.toml b/Cargo.toml
index b0d283bd..a0e56c52 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2021"
rust-version = "1.56"
name = "syn"
-version = "2.0.16"
+version = "2.0.38"
authors = ["David Tolnay <dtolnay@gmail.com>"]
include = [
"/benches/**",
@@ -43,6 +43,7 @@ all-features = true
rustdoc-args = [
"--cfg",
"doc_cfg",
+ "--generate-link-to-definition",
]
targets = ["x86_64-unknown-linux-gnu"]
@@ -74,11 +75,11 @@ required-features = [
]
[dependencies.proc-macro2]
-version = "1.0.55"
+version = "1.0.67"
default-features = false
[dependencies.quote]
-version = "1.0.25"
+version = "1.0.28"
optional = true
default-features = false
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
index 9dfab95a..1d4231c5 100644
--- a/Cargo.toml.orig
+++ b/Cargo.toml.orig
@@ -1,6 +1,6 @@
[package]
name = "syn"
-version = "2.0.16" # don't forget to update html_root_url and syn.json
+version = "2.0.38" # don't forget to update html_root_url and syn.json
authors = ["David Tolnay <dtolnay@gmail.com>"]
categories = ["development-tools::procedural-macro-helpers", "parser-implementations"]
description = "Parser for Rust source code"
@@ -35,8 +35,8 @@ proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"]
test = ["syn-test-suite/all-features"]
[dependencies]
-proc-macro2 = { version = "1.0.55", default-features = false }
-quote = { version = "1.0.25", optional = true, default-features = false }
+proc-macro2 = { version = "1.0.67", default-features = false }
+quote = { version = "1.0.28", optional = true, default-features = false }
unicode-ident = "1"
[dev-dependencies]
@@ -69,7 +69,7 @@ required-features = ["full", "parsing"]
[package.metadata.docs.rs]
all-features = true
targets = ["x86_64-unknown-linux-gnu"]
-rustdoc-args = ["--cfg", "doc_cfg"]
+rustdoc-args = ["--cfg", "doc_cfg", "--generate-link-to-definition"]
[package.metadata.playground]
features = ["full", "visit", "visit-mut", "fold", "extra-traits"]
@@ -85,7 +85,5 @@ members = [
"examples/lazy-static/lazy-static",
"examples/trace-var/example",
"examples/trace-var/trace-var",
- "json",
- "tests/crates",
"tests/features",
]
diff --git a/README.md b/README.md
index 24aea170..e8d99abc 100644
--- a/README.md
+++ b/README.md
@@ -39,12 +39,12 @@ contains some APIs that may be useful more generally.
procedural macros enable only what they need, and do not pay in compile time
for all the rest.
-[`syn::File`]: https://docs.rs/syn/1.0/syn/struct.File.html
-[`syn::Item`]: https://docs.rs/syn/1.0/syn/enum.Item.html
-[`syn::Expr`]: https://docs.rs/syn/1.0/syn/enum.Expr.html
-[`syn::Type`]: https://docs.rs/syn/1.0/syn/enum.Type.html
-[`syn::DeriveInput`]: https://docs.rs/syn/1.0/syn/struct.DeriveInput.html
-[parser functions]: https://docs.rs/syn/1.0/syn/parse/index.html
+[`syn::File`]: https://docs.rs/syn/2.0/syn/struct.File.html
+[`syn::Item`]: https://docs.rs/syn/2.0/syn/enum.Item.html
+[`syn::Expr`]: https://docs.rs/syn/2.0/syn/enum.Expr.html
+[`syn::Type`]: https://docs.rs/syn/2.0/syn/enum.Type.html
+[`syn::DeriveInput`]: https://docs.rs/syn/2.0/syn/struct.DeriveInput.html
+[parser functions]: https://docs.rs/syn/2.0/syn/parse/index.html
*Version requirement: Syn supports rustc 1.56 and up.*
diff --git a/benches/rust.rs b/benches/rust.rs
index ce6cfde2..64397618 100644
--- a/benches/rust.rs
+++ b/benches/rust.rs
@@ -80,7 +80,7 @@ mod librustc_parse {
rustc_span::create_session_if_not_set_then(Edition::Edition2018, |_| {
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let emitter = Box::new(SilentEmitter);
- let handler = Handler::with_emitter(false, None, emitter);
+ let handler = Handler::with_emitter(emitter);
let sess = ParseSess::with_span_handler(handler, cm);
if let Err(diagnostic) = rustc_parse::parse_crate_from_source_str(
FileName::Custom("bench".to_owned()),
diff --git a/src/buffer.rs b/src/buffer.rs
index e16f2ade..564ccc75 100644
--- a/src/buffer.rs
+++ b/src/buffer.rs
@@ -5,11 +5,6 @@
// Syn, and caution should be used when editing it. The public-facing interface
// is 100% safe but the implementation is fragile internally.
-#[cfg(all(
- not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
- feature = "proc-macro"
-))]
-use crate::proc_macro as pm;
use crate::Lifetime;
use proc_macro2::extra::DelimSpan;
use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
@@ -60,12 +55,9 @@ impl TokenBuffer {
/// Creates a `TokenBuffer` containing all the tokens from the input
/// `proc_macro::TokenStream`.
- #[cfg(all(
- not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
- feature = "proc-macro"
- ))]
+ #[cfg(feature = "proc-macro")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))]
- pub fn new(stream: pm::TokenStream) -> Self {
+ pub fn new(stream: proc_macro::TokenStream) -> Self {
Self::new2(stream.into())
}
diff --git a/src/custom_keyword.rs b/src/custom_keyword.rs
index 379d159e..9f3ad870 100644
--- a/src/custom_keyword.rs
+++ b/src/custom_keyword.rs
@@ -224,7 +224,7 @@ macro_rules! impl_clone_for_custom_keyword {
macro_rules! impl_extra_traits_for_custom_keyword {
($ident:ident) => {
impl $crate::__private::Debug for $ident {
- fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::fmt::Result {
+ fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::FmtResult {
$crate::__private::Formatter::write_str(
f,
$crate::__private::concat!(
diff --git a/src/custom_punctuation.rs b/src/custom_punctuation.rs
index e8cbcd2f..062fe516 100644
--- a/src/custom_punctuation.rs
+++ b/src/custom_punctuation.rs
@@ -114,7 +114,7 @@ macro_rules! custom_punctuation {
macro_rules! impl_parse_for_custom_punctuation {
($ident:ident, $($tt:tt)+) => {
impl $crate::token::CustomToken for $ident {
- fn peek(cursor: $crate::buffer::Cursor) -> bool {
+ fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool {
$crate::__private::peek_punct(cursor, $crate::stringify_punct!($($tt)+))
}
@@ -195,7 +195,7 @@ macro_rules! impl_clone_for_custom_punctuation {
macro_rules! impl_extra_traits_for_custom_punctuation {
($ident:ident, $($tt:tt)+) => {
impl $crate::__private::Debug for $ident {
- fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::fmt::Result {
+ fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::FmtResult {
$crate::__private::Formatter::write_str(f, $crate::__private::stringify!($ident))
}
}
diff --git a/src/data.rs b/src/data.rs
index 185f88ba..431c0857 100644
--- a/src/data.rs
+++ b/src/data.rs
@@ -214,17 +214,37 @@ pub(crate) mod parsing {
/// Parses a named (braced struct) field.
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_named(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+
+ let unnamed_field = cfg!(feature = "full") && input.peek(Token![_]);
+ let ident = if unnamed_field {
+ input.call(Ident::parse_any)
+ } else {
+ input.parse()
+ }?;
+
+ let colon_token: Token![:] = input.parse()?;
+
+ let ty: Type = if unnamed_field
+ && (input.peek(Token![struct])
+ || input.peek(Token![union]) && input.peek2(token::Brace))
+ {
+ let begin = input.fork();
+ input.call(Ident::parse_any)?;
+ input.parse::<FieldsNamed>()?;
+ Type::Verbatim(verbatim::between(&begin, input))
+ } else {
+ input.parse()?
+ };
+
Ok(Field {
- attrs: input.call(Attribute::parse_outer)?,
- vis: input.parse()?,
+ attrs,
+ vis,
mutability: FieldMutability::None,
- ident: Some(if input.peek(Token![_]) {
- input.call(Ident::parse_any)
- } else {
- input.parse()
- }?),
- colon_token: Some(input.parse()?),
- ty: input.parse()?,
+ ident: Some(ident),
+ colon_token: Some(colon_token),
+ ty,
})
}
diff --git a/src/error.rs b/src/error.rs
index 93f20f42..3fe31d5c 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -385,7 +385,7 @@ impl Clone for Error {
impl Clone for ErrorMessage {
fn clone(&self) -> Self {
ErrorMessage {
- span: self.span.clone(),
+ span: self.span,
message: self.message.clone(),
}
}
diff --git a/src/export.rs b/src/export.rs
index c1c16f9e..febd322e 100644
--- a/src/export.rs
+++ b/src/export.rs
@@ -1,50 +1,69 @@
+#[doc(hidden)]
pub use std::clone::Clone;
+#[doc(hidden)]
pub use std::cmp::{Eq, PartialEq};
+#[doc(hidden)]
pub use std::concat;
+#[doc(hidden)]
pub use std::default::Default;
-pub use std::fmt::{self, Debug, Formatter};
+#[doc(hidden)]
+pub use std::fmt::Debug;
+#[doc(hidden)]
pub use std::hash::{Hash, Hasher};
+#[doc(hidden)]
pub use std::marker::Copy;
+#[doc(hidden)]
pub use std::option::Option::{None, Some};
+#[doc(hidden)]
pub use std::result::Result::{Err, Ok};
+#[doc(hidden)]
pub use std::stringify;
+#[doc(hidden)]
+pub type Formatter<'a> = std::fmt::Formatter<'a>;
+#[doc(hidden)]
+pub type FmtResult = std::fmt::Result;
+
+#[doc(hidden)]
+pub type bool = std::primitive::bool;
+#[doc(hidden)]
+pub type str = std::primitive::str;
+
#[cfg(feature = "printing")]
+#[doc(hidden)]
pub use quote;
-pub use proc_macro2::{Span, TokenStream as TokenStream2};
+#[doc(hidden)]
+pub type Span = proc_macro2::Span;
+#[doc(hidden)]
+pub type TokenStream2 = proc_macro2::TokenStream;
#[cfg(feature = "parsing")]
+#[doc(hidden)]
pub use crate::group::{parse_braces, parse_brackets, parse_parens};
+#[doc(hidden)]
pub use crate::span::IntoSpans;
#[cfg(all(feature = "parsing", feature = "printing"))]
+#[doc(hidden)]
pub use crate::parse_quote::parse as parse_quote;
#[cfg(feature = "parsing")]
+#[doc(hidden)]
pub use crate::token::parsing::{peek_punct, punct as parse_punct};
#[cfg(feature = "printing")]
+#[doc(hidden)]
pub use crate::token::printing::punct as print_punct;
-#[cfg(all(
- not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
- feature = "proc-macro"
-))]
-pub use proc_macro::TokenStream;
+#[cfg(feature = "proc-macro")]
+#[doc(hidden)]
+pub type TokenStream = proc_macro::TokenStream;
#[cfg(feature = "printing")]
+#[doc(hidden)]
pub use quote::{ToTokens, TokenStreamExt};
-#[allow(non_camel_case_types)]
-pub type bool = help::Bool;
-#[allow(non_camel_case_types)]
-pub type str = help::Str;
-
-mod help {
- pub type Bool = bool;
- pub type Str = str;
-}
-
+#[doc(hidden)]
pub struct private(pub(crate) ());
diff --git a/src/expr.rs b/src/expr.rs
index 0f1a6953..ae723242 100644
--- a/src/expr.rs
+++ b/src/expr.rs
@@ -1377,7 +1377,7 @@ pub(crate) mod parsing {
}
let expr = Box::new(unary_expr(input, allow_struct)?);
if raw.is_some() {
- Ok(Expr::Verbatim(verbatim::between(begin, input)))
+ Ok(Expr::Verbatim(verbatim::between(&begin, input)))
} else {
Ok(Expr::Reference(ExprReference {
attrs,
@@ -1423,7 +1423,7 @@ pub(crate) mod parsing {
let mut e = trailer_helper(input, atom)?;
if let Expr::Verbatim(tokens) = &mut e {
- *tokens = verbatim::between(begin, input);
+ *tokens = verbatim::between(&begin, input);
} else {
let inner_attrs = e.replace_attrs(Vec::new());
attrs.extend(inner_attrs);
@@ -1663,7 +1663,7 @@ pub(crate) mod parsing {
}
Ok(expr)
} else {
- Err(input.error("expected expression"))
+ Err(input.error("expected an expression"))
}
}
@@ -1691,6 +1691,16 @@ pub(crate) mod parsing {
} else if input.is_empty() {
Err(input.error("expected an expression"))
} else {
+ if input.peek(token::Brace) {
+ let scan = input.fork();
+ let content;
+ braced!(content in scan);
+ if content.parse::<Expr>().is_ok() && content.is_empty() {
+ let expr_block = verbatim::between(input, &scan);
+ input.advance_to(&scan);
+ return Ok(Expr::Verbatim(expr_block));
+ }
+ }
Err(input.error("unsupported expression; enable syn's features=[\"full\"]"))
}
}
@@ -1707,7 +1717,7 @@ pub(crate) mod parsing {
parenthesized!(args in input);
args.parse::<TokenStream>()?;
- Ok(Expr::Verbatim(verbatim::between(begin, input)))
+ Ok(Expr::Verbatim(verbatim::between(&begin, input)))
}
fn path_or_macro_or_struct(
diff --git a/src/gen/fold.rs b/src/gen/fold.rs
index 624c15b1..8ea6c75f 100644
--- a/src/gen/fold.rs
+++ b/src/gen/fold.rs
@@ -2,7 +2,11 @@
// It is not intended for manual editing.
#![allow(unreachable_code, unused_variables)]
-#![allow(clippy::match_wildcard_for_single_variants, clippy::needless_match)]
+#![allow(
+ clippy::match_wildcard_for_single_variants,
+ clippy::needless_match,
+ clippy::needless_pass_by_ref_mut,
+)]
#[cfg(any(feature = "full", feature = "derive"))]
use crate::gen::helper::fold::*;
use crate::*;
diff --git a/src/gen/visit.rs b/src/gen/visit.rs
index 9eaa24f0..fe81fb63 100644
--- a/src/gen/visit.rs
+++ b/src/gen/visit.rs
@@ -2,6 +2,7 @@
// It is not intended for manual editing.
#![allow(unused_variables)]
+#![allow(clippy::needless_pass_by_ref_mut)]
#[cfg(any(feature = "full", feature = "derive"))]
use crate::punctuated::Punctuated;
use crate::*;
diff --git a/src/gen/visit_mut.rs b/src/gen/visit_mut.rs
index 83bd1ccf..9e7d16ff 100644
--- a/src/gen/visit_mut.rs
+++ b/src/gen/visit_mut.rs
@@ -2,6 +2,7 @@
// It is not intended for manual editing.
#![allow(unused_variables)]
+#![allow(clippy::needless_pass_by_ref_mut)]
#[cfg(any(feature = "full", feature = "derive"))]
use crate::punctuated::Punctuated;
use crate::*;
diff --git a/src/generics.rs b/src/generics.rs
index 44a10da7..2ad913d1 100644
--- a/src/generics.rs
+++ b/src/generics.rs
@@ -771,7 +771,7 @@ pub(crate) mod parsing {
bound.paren_token = paren_token;
if is_tilde_const {
- Ok(TypeParamBound::Verbatim(verbatim::between(begin, input)))
+ Ok(TypeParamBound::Verbatim(verbatim::between(&begin, input)))
} else {
Ok(TypeParamBound::Trait(bound))
}
diff --git a/src/group.rs b/src/group.rs
index cccbc467..27302331 100644
--- a/src/group.rs
+++ b/src/group.rs
@@ -7,21 +7,27 @@ use proc_macro2::Delimiter;
// Not public API.
#[doc(hidden)]
pub struct Parens<'a> {
+ #[doc(hidden)]
pub token: token::Paren,
+ #[doc(hidden)]
pub content: ParseBuffer<'a>,
}
// Not public API.
#[doc(hidden)]
pub struct Braces<'a> {
+ #[doc(hidden)]
pub token: token::Brace,
+ #[doc(hidden)]
pub content: ParseBuffer<'a>,
}
// Not public API.
#[doc(hidden)]
pub struct Brackets<'a> {
+ #[doc(hidden)]
pub token: token::Bracket,
+ #[doc(hidden)]
pub content: ParseBuffer<'a>,
}
@@ -29,7 +35,9 @@ pub struct Brackets<'a> {
#[cfg(any(feature = "full", feature = "derive"))]
#[doc(hidden)]
pub struct Group<'a> {
+ #[doc(hidden)]
pub token: token::Group,
+ #[doc(hidden)]
pub content: ParseBuffer<'a>,
}
diff --git a/src/ident.rs b/src/ident.rs
index bd6f3f9f..d0f4ba08 100644
--- a/src/ident.rs
+++ b/src/ident.rs
@@ -3,12 +3,13 @@ use crate::lookahead;
pub use proc_macro2::Ident;
-#[cfg(not(doc))] // rustdoc bug: https://github.com/rust-lang/rust/issues/105735
#[cfg(feature = "parsing")]
-#[doc(hidden)]
-#[allow(non_snake_case)]
-pub fn Ident(marker: lookahead::TokenMarker) -> Ident {
- match marker {}
+pub_if_not_doc! {
+ #[doc(hidden)]
+ #[allow(non_snake_case)]
+ pub fn Ident(marker: lookahead::TokenMarker) -> Ident {
+ match marker {}
+ }
}
macro_rules! ident_from_token {
diff --git a/src/item.rs b/src/item.rs
index 46ccd73f..ee91f591 100644
--- a/src/item.rs
+++ b/src/item.rs
@@ -898,96 +898,76 @@ pub(crate) mod parsing {
impl Parse for Item {
fn parse(input: ParseStream) -> Result<Self> {
let begin = input.fork();
- let mut attrs = input.call(Attribute::parse_outer)?;
- let ahead = input.fork();
- let vis: Visibility = ahead.parse()?;
+ let attrs = input.call(Attribute::parse_outer)?;
+ parse_rest_of_item(begin, attrs, input)
+ }
+ }
+
+ pub(crate) fn parse_rest_of_item(
+ begin: ParseBuffer,
+ mut attrs: Vec<Attribute>,
+ input: ParseStream,
+ ) -> Result<Item> {
+ let ahead = input.fork();
+ let vis: Visibility = ahead.parse()?;
+ let lookahead = ahead.lookahead1();
+ let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
+ let vis: Visibility = input.parse()?;
+ let sig: Signature = input.parse()?;
+ if input.peek(Token![;]) {
+ input.parse::<Token![;]>()?;
+ Ok(Item::Verbatim(verbatim::between(&begin, input)))
+ } else {
+ parse_rest_of_fn(input, Vec::new(), vis, sig).map(Item::Fn)
+ }
+ } else if lookahead.peek(Token![extern]) {
+ ahead.parse::<Token![extern]>()?;
let lookahead = ahead.lookahead1();
- let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
- let vis: Visibility = input.parse()?;
- let sig: Signature = input.parse()?;
- if input.peek(Token![;]) {
- input.parse::<Token![;]>()?;
- Ok(Item::Verbatim(verbatim::between(begin, input)))
- } else {
- parse_rest_of_fn(input, Vec::new(), vis, sig).map(Item::Fn)
- }
- } else if lookahead.peek(Token![extern]) {
- ahead.parse::<Token![extern]>()?;
+ if lookahead.peek(Token![crate]) {
+ input.parse().map(Item::ExternCrate)
+ } else if lookahead.peek(token::Brace) {
+ input.parse().map(Item::ForeignMod)
+ } else if lookahead.peek(LitStr) {
+ ahead.parse::<LitStr>()?;
let lookahead = ahead.lookahead1();
- if lookahead.peek(Token![crate]) {
- input.parse().map(Item::ExternCrate)
- } else if lookahead.peek(token::Brace) {
+ if lookahead.peek(token::Brace) {
input.parse().map(Item::ForeignMod)
- } else if lookahead.peek(LitStr) {
- ahead.parse::<LitStr>()?;
- let lookahead = ahead.lookahead1();
- if lookahead.peek(token::Brace) {
- input.parse().map(Item::ForeignMod)
- } else {
- Err(lookahead.error())
- }
} else {
Err(lookahead.error())
}
- } else if lookahead.peek(Token![use]) {
- let allow_crate_root_in_path = true;
- match parse_item_use(input, allow_crate_root_in_path)? {
- Some(item_use) => Ok(Item::Use(item_use)),
- None => Ok(Item::Verbatim(verbatim::between(begin, input))),
- }
- } else if lookahead.peek(Token![static]) {
- let vis = input.parse()?;
- let static_token = input.parse()?;
- let mutability = input.parse()?;
- let ident = input.parse()?;
- if input.peek(Token![=]) {
- input.parse::<Token![=]>()?;
- input.parse::<Expr>()?;
- input.parse::<Token![;]>()?;
- Ok(Item::Verbatim(verbatim::between(begin, input)))
- } else {
- let colon_token = input.parse()?;
- let ty = input.parse()?;
- if input.peek(Token![;]) {
- input.parse::<Token![;]>()?;
- Ok(Item::Verbatim(verbatim::between(begin, input)))
- } else {
- Ok(Item::Static(ItemStatic {
- attrs: Vec::new(),
- vis,
- static_token,
- mutability,
- ident,
- colon_token,
- ty,
- eq_token: input.parse()?,
- expr: input.parse()?,
- semi_token: input.parse()?,
- }))
- }
- }
- } else if lookahead.peek(Token![const]) {
- let vis = input.parse()?;
- let const_token: Token![const] = input.parse()?;
- let lookahead = input.lookahead1();
- let ident = if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
- input.call(Ident::parse_any)?
- } else {
- return Err(lookahead.error());
- };
+ } else {
+ Err(lookahead.error())
+ }
+ } else if lookahead.peek(Token![use]) {
+ let allow_crate_root_in_path = true;
+ match parse_item_use(input, allow_crate_root_in_path)? {
+ Some(item_use) => Ok(Item::Use(item_use)),
+ None => Ok(Item::Verbatim(verbatim::between(&begin, input))),
+ }
+ } else if lookahead.peek(Token![static]) {
+ let vis = input.parse()?;
+ let static_token = input.parse()?;
+ let mutability = input.parse()?;
+ let ident = input.parse()?;
+ if input.peek(Token![=]) {
+ input.parse::<Token![=]>()?;
+ input.parse::<Expr>()?;
+ input.parse::<Token![;]>()?;
+ Ok(Item::Verbatim(verbatim::between(&begin, input)))
+ } else {
let colon_token = input.parse()?;
let ty = input.parse()?;
if input.peek(Token![;]) {
input.parse::<Token![;]>()?;
- Ok(Item::Verbatim(verbatim::between(begin, input)))
+ Ok(Item::Verbatim(verbatim::between(&begin, input)))
} else {
- Ok(Item::Const(ItemConst {
+ Ok(Item::Static(ItemStatic {
attrs: Vec::new(),
vis,
- const_token,
+ static_token,
+ mutability,
ident,
- generics: Generics::default(),
colon_token,
ty,
eq_token: input.parse()?,
@@ -995,69 +975,108 @@ pub(crate) mod parsing {
semi_token: input.parse()?,
}))
}
- } else if lookahead.peek(Token![unsafe]) {
- ahead.parse::<Token![unsafe]>()?;
- let lookahead = ahead.lookahead1();
- if lookahead.peek(Token![trait])
- || lookahead.peek(Token![auto]) && ahead.peek2(Token![trait])
+ }
+ } else if lookahead.peek(Token![const]) {
+ let vis = input.parse()?;
+ let const_token: Token![const] = input.parse()?;
+ let lookahead = input.lookahead1();
+ let ident = if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+ input.call(Ident::parse_any)?
+ } else {
+ return Err(lookahead.error());
+ };
+ let mut generics: Generics = input.parse()?;
+ let colon_token = input.parse()?;
+ let ty = input.parse()?;
+ let value = if let Some(eq_token) = input.parse::<Option<Token![=]>>()? {
+ let expr: Expr = input.parse()?;
+ Some((eq_token, expr))
+ } else {
+ None
+ };
+ generics.where_clause = input.parse()?;
+ let semi_token: Token![;] = input.parse()?;
+ match value {
+ Some((eq_token, expr))
+ if generics.lt_token.is_none() && generics.where_clause.is_none() =>
{
- input.parse().map(Item::Trait)
- } else if lookahead.peek(Token![impl]) {
- let allow_verbatim_impl = true;
- if let Some(item) = parse_impl(input, allow_verbatim_impl)? {
- Ok(Item::Impl(item))
- } else {
- Ok(Item::Verbatim(verbatim::between(begin, input)))
- }
- } else if lookahead.peek(Token![extern]) {
- input.parse().map(Item::ForeignMod)
- } else if lookahead.peek(Token![mod]) {
- input.parse().map(Item::Mod)
- } else {
- Err(lookahead.error())
+ Ok(Item::Const(ItemConst {
+ attrs: Vec::new(),
+ vis,
+ const_token,
+ ident,
+ generics,
+ colon_token,
+ ty,
+ eq_token,
+ expr: Box::new(expr),
+ semi_token,
+ }))
}
- } else if lookahead.peek(Token![mod]) {
- input.parse().map(Item::Mod)
- } else if lookahead.peek(Token![type]) {
- parse_item_type(begin, input)
- } else if lookahead.peek(Token![struct]) {
- input.parse().map(Item::Struct)
- } else if lookahead.peek(Token![enum]) {
- input.parse().map(Item::Enum)
- } else if lookahead.peek(Token![union]) && ahead.peek2(Ident) {
- input.parse().map(Item::Union)
- } else if lookahead.peek(Token![trait]) {
- input.call(parse_trait_or_trait_alias)
- } else if lookahead.peek(Token![auto]) && ahead.peek2(Token![trait]) {
- input.parse().map(Item::Trait)
- } else if lookahead.peek(Token![impl])
- || lookahead.peek(Token![default]) && !ahead.peek2(Token![!])
+ _ => Ok(Item::Verbatim(verbatim::between(&begin, input))),
+ }
+ } else if lookahead.peek(Token![unsafe]) {
+ ahead.parse::<Token![unsafe]>()?;
+ let lookahead = ahead.lookahead1();
+ if lookahead.peek(Token![trait])
+ || lookahead.peek(Token![auto]) && ahead.peek2(Token![trait])
{
+ input.parse().map(Item::Trait)
+ } else if lookahead.peek(Token![impl]) {
let allow_verbatim_impl = true;
if let Some(item) = parse_impl(input, allow_verbatim_impl)? {
Ok(Item::Impl(item))
} else {
- Ok(Item::Verbatim(verbatim::between(begin, input)))
+ Ok(Item::Verbatim(verbatim::between(&begin, input)))
}
- } else if lookahead.peek(Token![macro]) {
- input.advance_to(&ahead);
- parse_macro2(begin, vis, input)
- } else if vis.is_inherited()
- && (lookahead.peek(Ident)
- || lookahead.peek(Token![self])
- || lookahead.peek(Token![super])
- || lookahead.peek(Token![crate])
- || lookahead.peek(Token![::]))
- {
- input.parse().map(Item::Macro)
+ } else if lookahead.peek(Token![extern]) {
+ input.parse().map(Item::ForeignMod)
+ } else if lookahead.peek(Token![mod]) {
+ input.parse().map(Item::Mod)
} else {
Err(lookahead.error())
- }?;
+ }
+ } else if lookahead.peek(Token![mod]) {
+ input.parse().map(Item::Mod)
+ } else if lookahead.peek(Token![type]) {
+ parse_item_type(begin, input)
+ } else if lookahead.peek(Token![struct]) {
+ input.parse().map(Item::Struct)
+ } else if lookahead.peek(Token![enum]) {
+ input.parse().map(Item::Enum)
+ } else if lookahead.peek(Token![union]) && ahead.peek2(Ident) {
+ input.parse().map(Item::Union)
+ } else if lookahead.peek(Token![trait]) {
+ input.call(parse_trait_or_trait_alias)
+ } else if lookahead.peek(Token![auto]) && ahead.peek2(Token![trait]) {
+ input.parse().map(Item::Trait)
+ } else if lookahead.peek(Token![impl])
+ || lookahead.peek(Token![default]) && !ahead.peek2(Token![!])
+ {
+ let allow_verbatim_impl = true;
+ if let Some(item) = parse_impl(input, allow_verbatim_impl)? {
+ Ok(Item::Impl(item))
+ } else {
+ Ok(Item::Verbatim(verbatim::between(&begin, input)))
+ }
+ } else if lookahead.peek(Token![macro]) {
+ input.advance_to(&ahead);
+ parse_macro2(begin, vis, input)
+ } else if vis.is_inherited()
+ && (lookahead.peek(Ident)
+ || lookahead.peek(Token![self])
+ || lookahead.peek(Token![super])
+ || lookahead.peek(Token![crate])
+ || lookahead.peek(Token![::]))
+ {
+ input.parse().map(Item::Macro)
+ } else {
+ Err(lookahead.error())
+ }?;
- attrs.extend(item.replace_attrs(Vec::new()));
- item.replace_attrs(attrs);
- Ok(item)
- }
+ attrs.extend(item.replace_attrs(Vec::new()));
+ item.replace_attrs(attrs);
+ Ok(item)
}
struct FlexibleItemType {
@@ -1219,7 +1238,7 @@ pub(crate) mod parsing {
return Err(lookahead.error());
}
- Ok(Item::Verbatim(verbatim::between(begin, input)))
+ Ok(Item::Verbatim(verbatim::between(&begin, input)))
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
@@ -1340,22 +1359,28 @@ pub(crate) mod parsing {
let content;
let brace_token = braced!(content in input);
let mut items = Punctuated::new();
- let mut has_crate_root_in_path = false;
+ let mut has_any_crate_root_in_path = false;
loop {
if content.is_empty() {
break;
}
- has_crate_root_in_path |=
+ let this_tree_starts_with_crate_root =
allow_crate_root_in_path && content.parse::<Option<Token![::]>>()?.is_some();
- let tree: UseTree = content.parse()?;
- items.push_value(tree);
+ has_any_crate_root_in_path |= this_tree_starts_with_crate_root;
+ match parse_use_tree(
+ &content,
+ allow_crate_root_in_path && !this_tree_starts_with_crate_root,
+ )? {
+ Some(tree) => items.push_value(tree),
+ None => has_any_crate_root_in_path = true,
+ }
if content.is_empty() {
break;
}
let comma: Token![,] = content.parse()?;
items.push_punct(comma);
}
- if has_crate_root_in_path {
+ if has_any_crate_root_in_path {
Ok(None)
} else {
Ok(Some(UseTree::Group(UseGroup { brace_token, items })))
@@ -1386,24 +1411,34 @@ pub(crate) mod parsing {
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for ItemConst {
fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let const_token: Token![const] = input.parse()?;
+
+ let lookahead = input.lookahead1();
+ let ident = if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+ input.call(Ident::parse_any)?
+ } else {
+ return Err(lookahead.error());
+ };
+
+ let colon_token: Token![:] = input.parse()?;
+ let ty: Type = input.parse()?;
+ let eq_token: Token![=] = input.parse()?;
+ let expr: Expr = input.parse()?;
+ let semi_token: Token![;] = input.parse()?;
+
Ok(ItemConst {
- attrs: input.call(Attribute::parse_outer)?,
- vis: input.parse()?,
- const_token: input.parse()?,
- ident: {
- let lookahead = input.lookahead1();
- if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
- input.call(Ident::parse_any)?
- } else {
- return Err(lookahead.error());
- }
- },
+ attrs,
+ vis,
+ const_token,
+ ident,
generics: Generics::default(),
- colon_token: input.parse()?,
- ty: input.parse()?,
- eq_token: input.parse()?,
- expr: input.parse()?,
- semi_token: input.parse()?,
+ colon_token,
+ ty: Box::new(ty),
+ eq_token,
+ expr: Box::new(expr),
+ semi_token,
})
}
}
@@ -1753,7 +1788,7 @@ pub(crate) mod parsing {
content.call(Attribute::parse_inner)?;
content.call(Block::parse_within)?;
- Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
+ Ok(ForeignItem::Verbatim(verbatim::between(&begin, input)))
} else {
Ok(ForeignItem::Fn(ForeignItemFn {
attrs: Vec::new(),
@@ -1773,7 +1808,7 @@ pub(crate) mod parsing {
input.parse::<Token![=]>()?;
input.parse::<Expr>()?;
input.parse::<Token![;]>()?;
- Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
+ Ok(ForeignItem::Verbatim(verbatim::between(&begin, input)))
} else {
Ok(ForeignItem::Static(ForeignItemStatic {
attrs: Vec::new(),
@@ -1882,7 +1917,7 @@ pub(crate) mod parsing {
)?;
if colon_token.is_some() || ty.is_some() {
- Ok(ForeignItem::Verbatim(verbatim::between(begin, input)))
+ Ok(ForeignItem::Verbatim(verbatim::between(&begin, input)))
} else {
Ok(ForeignItem::Type(ForeignItemType {
attrs: Vec::new(),
@@ -1952,7 +1987,7 @@ pub(crate) mod parsing {
let (eq_token, ty) = match ty {
Some(ty) if colon_token.is_none() => ty,
- _ => return Ok(Item::Verbatim(verbatim::between(begin, input))),
+ _ => return Ok(Item::Verbatim(verbatim::between(&begin, input))),
};
Ok(Item::Type(ItemType {
@@ -2210,10 +2245,36 @@ pub(crate) mod parsing {
let mut item = if lookahead.peek(Token![fn]) || peek_signature(&ahead) {
input.parse().map(TraitItem::Fn)
} else if lookahead.peek(Token![const]) {
- ahead.parse::<Token![const]>()?;
+ let const_token: Token![const] = ahead.parse()?;
let lookahead = ahead.lookahead1();
if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
- input.parse().map(TraitItem::Const)
+ input.advance_to(&ahead);
+ let ident = input.call(Ident::parse_any)?;
+ let mut generics: Generics = input.parse()?;
+ let colon_token: Token![:] = input.parse()?;
+ let ty: Type = input.parse()?;
+ let default = if let Some(eq_token) = input.parse::<Option<Token![=]>>()? {
+ let expr: Expr = input.parse()?;
+ Some((eq_token, expr))
+ } else {
+ None
+ };
+ generics.where_clause = input.parse()?;
+ let semi_token: Token![;] = input.parse()?;
+ if generics.lt_token.is_none() && generics.where_clause.is_none() {
+ Ok(TraitItem::Const(TraitItemConst {
+ attrs: Vec::new(),
+ const_token,
+ ident,
+ generics,
+ colon_token,
+ ty,
+ default,
+ semi_token,
+ }))
+ } else {
+ return Ok(TraitItem::Verbatim(verbatim::between(&begin, input)));
+ }
} else if lookahead.peek(Token![async])
|| lookahead.peek(Token![unsafe])
|| lookahead.peek(Token![extern])
@@ -2240,7 +2301,7 @@ pub(crate) mod parsing {
match (vis, defaultness) {
(Visibility::Inherited, None) => {}
- _ => return Ok(TraitItem::Verbatim(verbatim::between(begin, input))),
+ _ => return Ok(TraitItem::Verbatim(verbatim::between(&begin, input))),
}
let item_attrs = match &mut item {
@@ -2259,30 +2320,36 @@ pub(crate) mod parsing {
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for TraitItemConst {
fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let const_token: Token![const] = input.parse()?;
+
+ let lookahead = input.lookahead1();
+ let ident = if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+ input.call(Ident::parse_any)?
+ } else {
+ return Err(lookahead.error());
+ };
+
+ let colon_token: Token![:] = input.parse()?;
+ let ty: Type = input.parse()?;
+ let default = if input.peek(Token![=]) {
+ let eq_token: Token![=] = input.parse()?;
+ let default: Expr = input.parse()?;
+ Some((eq_token, default))
+ } else {
+ None
+ };
+ let semi_token: Token![;] = input.parse()?;
+
Ok(TraitItemConst {
- attrs: input.call(Attribute::parse_outer)?,
- const_token: input.parse()?,
- ident: {
- let lookahead = input.lookahead1();
- if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
- input.call(Ident::parse_any)?
- } else {
- return Err(lookahead.error());
- }
- },
+ attrs,
+ const_token,
+ ident,
generics: Generics::default(),
- colon_token: input.parse()?,
- ty: input.parse()?,
- default: {
- if input.peek(Token![=]) {
- let eq_token: Token![=] = input.parse()?;
- let default: Expr = input.parse()?;
- Some((eq_token, default))
- } else {
- None
- }
- },
- semi_token: input.parse()?,
+ colon_token,
+ ty,
+ default,
+ semi_token,
})
}
}
@@ -2358,7 +2425,7 @@ pub(crate) mod parsing {
)?;
if vis.is_some() {
- Ok(TraitItem::Verbatim(verbatim::between(begin, input)))
+ Ok(TraitItem::Verbatim(verbatim::between(&begin, input)))
} else {
Ok(TraitItem::Type(TraitItemType {
attrs: Vec::new(),
@@ -2471,7 +2538,7 @@ pub(crate) mod parsing {
self_ty = if polarity.is_none() {
first_ty
} else {
- Type::Verbatim(verbatim::between(begin, input))
+ Type::Verbatim(verbatim::between(&begin, input))
};
}
@@ -2525,7 +2592,7 @@ pub(crate) mod parsing {
if let Some(item) = parse_impl_item_fn(input, allow_omitted_body)? {
Ok(ImplItem::Fn(item))
} else {
- Ok(ImplItem::Verbatim(verbatim::between(begin, input)))
+ Ok(ImplItem::Verbatim(verbatim::between(&begin, input)))
}
} else if lookahead.peek(Token![const]) {
input.advance_to(&ahead);
@@ -2536,26 +2603,37 @@ pub(crate) mod parsing {
} else {
return Err(lookahead.error());
};
+ let mut generics: Generics = input.parse()?;
let colon_token: Token![:] = input.parse()?;
let ty: Type = input.parse()?;
- if let Some(eq_token) = input.parse()? {
- return Ok(ImplItem::Const(ImplItemConst {
- attrs,
- vis,
- defaultness,
- const_token,
- ident,
- generics: Generics::default(),
- colon_token,
- ty,
- eq_token,
- expr: input.parse()?,
- semi_token: input.parse()?,
- }));
+ let value = if let Some(eq_token) = input.parse::<Option<Token![=]>>()? {
+ let expr: Expr = input.parse()?;
+ Some((eq_token, expr))
} else {
- input.parse::<Token![;]>()?;
- return Ok(ImplItem::Verbatim(verbatim::between(begin, input)));
- }
+ None
+ };
+ generics.where_clause = input.parse()?;
+ let semi_token: Token![;] = input.parse()?;
+ return match value {
+ Some((eq_token, expr))
+ if generics.lt_token.is_none() && generics.where_clause.is_none() =>
+ {
+ Ok(ImplItem::Const(ImplItemConst {
+ attrs,
+ vis,
+ defaultness,
+ const_token,
+ ident,
+ generics,
+ colon_token,
+ ty,
+ eq_token,
+ expr,
+ semi_token,
+ }))
+ }
+ _ => Ok(ImplItem::Verbatim(verbatim::between(&begin, input))),
+ };
} else if lookahead.peek(Token![type]) {
parse_impl_item_type(begin, input)
} else if vis.is_inherited()
@@ -2590,25 +2668,36 @@ pub(crate) mod parsing {
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for ImplItemConst {
fn parse(input: ParseStream) -> Result<Self> {
+ let attrs = input.call(Attribute::parse_outer)?;
+ let vis: Visibility = input.parse()?;
+ let defaultness: Option<Token![default]> = input.parse()?;
+ let const_token: Token![const] = input.parse()?;
+
+ let lookahead = input.lookahead1();
+ let ident = if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
+ input.call(Ident::parse_any)?
+ } else {
+ return Err(lookahead.error());
+ };
+
+ let colon_token: Token![:] = input.parse()?;
+ let ty: Type = input.parse()?;
+ let eq_token: Token![=] = input.parse()?;
+ let expr: Expr = input.parse()?;
+ let semi_token: Token![;] = input.parse()?;
+
Ok(ImplItemConst {
- attrs: input.call(Attribute::parse_outer)?,
- vis: input.parse()?,
- defaultness: input.parse()?,
- const_token: input.parse()?,
- ident: {
- let lookahead = input.lookahead1();
- if lookahead.peek(Ident) || lookahead.peek(Token![_]) {
- input.call(Ident::parse_any)?
- } else {
- return Err(lookahead.error());
- }
- },
+ attrs,
+ vis,
+ defaultness,
+ const_token,
+ ident,
generics: Generics::default(),
- colon_token: input.parse()?,
- ty: input.parse()?,
- eq_token: input.parse()?,
- expr: input.parse()?,
- semi_token: input.parse()?,
+ colon_token,
+ ty,
+ eq_token,
+ expr,
+ semi_token,
})
}
}
@@ -2700,7 +2789,7 @@ pub(crate) mod parsing {
let (eq_token, ty) = match ty {
Some(ty) if colon_token.is_none() => ty,
- _ => return Ok(ImplItem::Verbatim(verbatim::between(begin, input))),
+ _ => return Ok(ImplItem::Verbatim(verbatim::between(&begin, input))),
};
Ok(ImplItem::Type(ImplItemType {
diff --git a/src/lib.rs b/src/lib.rs
index 91837a34..a74d4b11 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -249,7 +249,7 @@
//! dynamic library libproc_macro from rustc toolchain.
// Syn types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/syn/2.0.16")]
+#![doc(html_root_url = "https://docs.rs/syn/2.0.38")]
#![cfg_attr(doc_cfg, feature(doc_cfg))]
#![allow(non_camel_case_types)]
#![allow(
@@ -296,10 +296,7 @@
clippy::wildcard_imports,
)]
-#[cfg(all(
- not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
- feature = "proc-macro"
-))]
+#[cfg(feature = "proc-macro")]
extern crate proc_macro;
#[macro_use]
@@ -376,6 +373,7 @@ pub use crate::generics::{
pub use crate::generics::{ImplGenerics, Turbofish, TypeGenerics};
mod ident;
+#[doc(inline)]
pub use crate::ident::Ident;
#[cfg(feature = "full")]
@@ -391,9 +389,11 @@ pub use crate::item::{
};
mod lifetime;
+#[doc(inline)]
pub use crate::lifetime::Lifetime;
mod lit;
+#[doc(inline)]
pub use crate::lit::{
Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr, StrStyle,
};
@@ -422,11 +422,7 @@ pub use crate::op::{BinOp, UnOp};
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub mod parse;
-#[cfg(all(
- not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
- feature = "parsing",
- feature = "proc-macro"
-))]
+#[cfg(all(feature = "parsing", feature = "proc-macro"))]
mod parse_macro_input;
#[cfg(all(feature = "parsing", feature = "printing"))]
@@ -860,11 +856,7 @@ pub mod __private;
/// expanded.into()
/// }
/// ```
-#[cfg(all(
- not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
- feature = "parsing",
- feature = "proc-macro"
-))]
+#[cfg(all(feature = "parsing", feature = "proc-macro"))]
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "proc-macro"))))]
pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> {
parse::Parser::parse(T::parse, tokens)
diff --git a/src/lifetime.rs b/src/lifetime.rs
index 96920ad0..29f4cfdb 100644
--- a/src/lifetime.rs
+++ b/src/lifetime.rs
@@ -113,10 +113,12 @@ impl Hash for Lifetime {
}
#[cfg(feature = "parsing")]
-#[doc(hidden)]
-#[allow(non_snake_case)]
-pub fn Lifetime(marker: lookahead::TokenMarker) -> Lifetime {
- match marker {}
+pub_if_not_doc! {
+ #[doc(hidden)]
+ #[allow(non_snake_case)]
+ pub fn Lifetime(marker: lookahead::TokenMarker) -> Lifetime {
+ match marker {}
+ }
}
#[cfg(feature = "parsing")]
diff --git a/src/lit.rs b/src/lit.rs
index 662ef8b2..f7426ce8 100644
--- a/src/lit.rs
+++ b/src/lit.rs
@@ -228,7 +228,17 @@ impl LitStr {
let mut tokens = TokenStream::from_str(&self.value())?;
tokens = respan_token_stream(tokens, self.span());
- parser.parse2(tokens)
+ let result = parser.parse2(tokens)?;
+
+ let suffix = self.suffix();
+ if !suffix.is_empty() {
+ return Err(Error::new(
+ self.span(),
+ format!("unexpected suffix `{}` on string literal", suffix),
+ ));
+ }
+
+ Ok(result)
}
pub fn span(&self) -> Span {
@@ -748,10 +758,12 @@ macro_rules! lit_extra_traits {
}
#[cfg(feature = "parsing")]
- #[doc(hidden)]
- #[allow(non_snake_case)]
- pub fn $ty(marker: lookahead::TokenMarker) -> $ty {
- match marker {}
+ pub_if_not_doc! {
+ #[doc(hidden)]
+ #[allow(non_snake_case)]
+ pub fn $ty(marker: lookahead::TokenMarker) -> $ty {
+ match marker {}
+ }
}
};
}
@@ -764,10 +776,12 @@ lit_extra_traits!(LitInt);
lit_extra_traits!(LitFloat);
#[cfg(feature = "parsing")]
-#[doc(hidden)]
-#[allow(non_snake_case)]
-pub fn LitBool(marker: lookahead::TokenMarker) -> LitBool {
- match marker {}
+pub_if_not_doc! {
+ #[doc(hidden)]
+ #[allow(non_snake_case)]
+ pub fn LitBool(marker: lookahead::TokenMarker) -> LitBool {
+ match marker {}
+ }
}
ast_enum! {
@@ -784,10 +798,12 @@ ast_enum! {
}
#[cfg(feature = "parsing")]
-#[doc(hidden)]
-#[allow(non_snake_case)]
-pub fn Lit(marker: lookahead::TokenMarker) -> Lit {
- match marker {}
+pub_if_not_doc! {
+ #[doc(hidden)]
+ #[allow(non_snake_case)]
+ pub fn Lit(marker: lookahead::TokenMarker) -> Lit {
+ match marker {}
+ }
}
#[cfg(feature = "parsing")]
@@ -1079,6 +1095,7 @@ mod value {
// c"...", cr"...", cr#"..."#
// TODO: add a Lit::CStr variant?
b'c' => return Lit::Verbatim(token),
+ b'(' if repr == "(/*ERROR*/)" => return Lit::Verbatim(token),
_ => {}
}
@@ -1166,7 +1183,7 @@ mod value {
b'x' => {
let (byte, rest) = backslash_x(s);
s = rest;
- assert!(byte <= 0x80, "Invalid \\x byte in string literal");
+ assert!(byte <= 0x7F, "Invalid \\x byte in string literal");
char::from_u32(u32::from(byte)).unwrap()
}
b'u' => {
@@ -1273,8 +1290,7 @@ mod value {
b'"' => b'"',
b'\r' | b'\n' => loop {
let byte = byte(v, 0);
- let ch = char::from_u32(u32::from(byte)).unwrap();
- if ch.is_whitespace() {
+ if matches!(byte, b' ' | b'\t' | b'\n' | b'\r') {
v = &v[1..];
} else {
continue 'outer;
diff --git a/src/macros.rs b/src/macros.rs
index 953841b6..06ceb542 100644
--- a/src/macros.rs
+++ b/src/macros.rs
@@ -166,3 +166,20 @@ macro_rules! check_keyword_matches {
(enum enum) => {};
(pub pub) => {};
}
+
+// Rustdoc bug: does not respect the doc(hidden) on some items.
+#[cfg(all(doc, feature = "parsing"))]
+macro_rules! pub_if_not_doc {
+ ($(#[$m:meta])* pub $($item:tt)*) => {
+ $(#[$m])*
+ pub(crate) $($item)*
+ };
+}
+
+#[cfg(all(not(doc), feature = "parsing"))]
+macro_rules! pub_if_not_doc {
+ ($(#[$m:meta])* pub $($item:tt)*) => {
+ $(#[$m])*
+ pub $($item)*
+ };
+}
diff --git a/src/meta.rs b/src/meta.rs
index b6bcf983..f17b2802 100644
--- a/src/meta.rs
+++ b/src/meta.rs
@@ -129,7 +129,13 @@ use std::fmt::Display;
/// }
/// ```
pub fn parser(logic: impl FnMut(ParseNestedMeta) -> Result<()>) -> impl Parser<Output = ()> {
- |input: ParseStream| parse_nested_meta(input, logic)
+ |input: ParseStream| {
+ if input.is_empty() {
+ Ok(())
+ } else {
+ parse_nested_meta(input, logic)
+ }
+ }
}
/// Context for parsing a single property in the conventional syntax for
diff --git a/src/parse.rs b/src/parse.rs
index 61a10d2b..5a2aeb62 100644
--- a/src/parse.rs
+++ b/src/parse.rs
@@ -185,10 +185,7 @@ pub mod discouraged;
use crate::buffer::{Cursor, TokenBuffer};
use crate::error;
use crate::lookahead;
-#[cfg(all(
- not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
- feature = "proc-macro"
-))]
+#[cfg(feature = "proc-macro")]
use crate::proc_macro;
use crate::punctuated::Punctuated;
use crate::token::Token;
@@ -1198,10 +1195,7 @@ pub trait Parser: Sized {
///
/// This function will check that the input is fully parsed. If there are
/// any unparsed tokens at the end of the stream, an error is returned.
- #[cfg(all(
- not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
- feature = "proc-macro"
- ))]
+ #[cfg(feature = "proc-macro")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))]
fn parse(self, tokens: proc_macro::TokenStream) -> Result<Self::Output> {
self.parse2(proc_macro2::TokenStream::from(tokens))
diff --git a/src/parse_quote.rs b/src/parse_quote.rs
index f5129439..59e51b41 100644
--- a/src/parse_quote.rs
+++ b/src/parse_quote.rs
@@ -120,6 +120,7 @@ pub fn parse<T: ParseQuote>(token_stream: TokenStream) -> T {
}
}
+#[doc(hidden)]
pub trait ParseQuote: Sized {
fn parse(input: ParseStream) -> Result<Self>;
}
diff --git a/src/pat.rs b/src/pat.rs
index 2e6376b2..df7da5bb 100644
--- a/src/pat.rs
+++ b/src/pat.rs
@@ -422,7 +422,7 @@ pub(crate) mod parsing {
fn pat_box(begin: ParseBuffer, input: ParseStream) -> Result<Pat> {
input.parse::<Token![box]>()?;
Pat::parse_single(input)?;
- Ok(Pat::Verbatim(verbatim::between(begin, input)))
+ Ok(Pat::Verbatim(verbatim::between(&begin, input)))
}
fn pat_ident(input: ParseStream) -> Result<PatIdent> {
@@ -544,7 +544,7 @@ pub(crate) mod parsing {
};
let pat = if boxed.is_some() {
- Pat::Verbatim(verbatim::between(begin, input))
+ Pat::Verbatim(verbatim::between(&begin, input))
} else {
Pat::Ident(PatIdent {
attrs: Vec::new(),
@@ -762,7 +762,7 @@ pub(crate) mod parsing {
content.call(Attribute::parse_inner)?;
content.call(Block::parse_within)?;
- Ok(verbatim::between(begin, input))
+ Ok(verbatim::between(&begin, input))
}
}
diff --git a/src/path.rs b/src/path.rs
index e99a3f87..b9d96e66 100644
--- a/src/path.rs
+++ b/src/path.rs
@@ -53,8 +53,9 @@ impl Path {
/// }
/// }
/// ```
- pub fn is_ident<I: ?Sized>(&self, ident: &I) -> bool
+ pub fn is_ident<I>(&self, ident: &I) -> bool
where
+ I: ?Sized,
Ident: PartialEq<I>,
{
match self.get_ident() {
@@ -81,6 +82,19 @@ impl Path {
None
}
}
+
+ /// An error if this path is not a single ident, as defined in `get_ident`.
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
+ pub fn require_ident(&self) -> Result<&Ident> {
+ self.get_ident().ok_or_else(|| {
+ crate::error::new2(
+ self.segments.first().unwrap().ident.span(),
+ self.segments.last().unwrap().ident.span(),
+ "expected this path to be an identifier",
+ )
+ })
+ }
}
ast_struct! {
@@ -368,7 +382,6 @@ pub(crate) mod parsing {
return Ok(Expr::Lit(lit));
}
- #[cfg(feature = "full")]
if input.peek(Ident) {
let ident: Ident = input.parse()?;
return Ok(Expr::Path(ExprPath {
@@ -391,7 +404,7 @@ pub(crate) mod parsing {
let content;
braced!(content in input);
content.parse::<Expr>()?;
- let verbatim = verbatim::between(begin, input);
+ let verbatim = verbatim::between(&begin, input);
return Ok(Expr::Verbatim(verbatim));
}
}
@@ -649,6 +662,10 @@ pub(crate) mod parsing {
pub(crate) mod printing {
use super::*;
use crate::print::TokensOrDefault;
+ #[cfg(feature = "parsing")]
+ use crate::spanned::Spanned;
+ #[cfg(feature = "parsing")]
+ use proc_macro2::Span;
use proc_macro2::TokenStream;
use quote::ToTokens;
use std::cmp;
@@ -692,10 +709,21 @@ pub(crate) mod printing {
GenericArgument::Lifetime(lt) => lt.to_tokens(tokens),
GenericArgument::Type(ty) => ty.to_tokens(tokens),
GenericArgument::Const(expr) => match expr {
- Expr::Lit(_) => expr.to_tokens(tokens),
+ Expr::Lit(expr) => expr.to_tokens(tokens),
+
+ Expr::Path(expr)
+ if expr.attrs.is_empty()
+ && expr.qself.is_none()
+ && expr.path.get_ident().is_some() =>
+ {
+ expr.to_tokens(tokens);
+ }
#[cfg(feature = "full")]
- Expr::Block(_) => expr.to_tokens(tokens),
+ Expr::Block(expr) => expr.to_tokens(tokens),
+
+ #[cfg(not(feature = "full"))]
+ Expr::Verbatim(expr) => expr.to_tokens(tokens),
// ERROR CORRECTION: Add braces to make sure that the
// generated code is valid.
@@ -826,4 +854,21 @@ pub(crate) mod printing {
segment.to_tokens(tokens);
}
}
+
+ #[cfg(feature = "parsing")]
+ #[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "printing"))))]
+ impl Spanned for QSelf {
+ fn span(&self) -> Span {
+ struct QSelfDelimiters<'a>(&'a QSelf);
+
+ impl<'a> ToTokens for QSelfDelimiters<'a> {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.0.lt_token.to_tokens(tokens);
+ self.0.gt_token.to_tokens(tokens);
+ }
+ }
+
+ QSelfDelimiters(self).span()
+ }
+ }
}
diff --git a/src/punctuated.rs b/src/punctuated.rs
index a4278081..3ea8a1d4 100644
--- a/src/punctuated.rs
+++ b/src/punctuated.rs
@@ -369,6 +369,11 @@ where
last: self.last.clone(),
}
}
+
+ fn clone_from(&mut self, other: &Self) {
+ self.inner.clone_from(&other.inner);
+ self.last.clone_from(&other.last);
+ }
}
#[cfg(feature = "extra-traits")]
diff --git a/src/span.rs b/src/span.rs
index 50a26b83..eb277947 100644
--- a/src/span.rs
+++ b/src/span.rs
@@ -1,6 +1,7 @@
use proc_macro2::extra::DelimSpan;
use proc_macro2::{Delimiter, Group, Span, TokenStream};
+#[doc(hidden)]
pub trait IntoSpans<S> {
fn into_spans(self) -> S;
}
diff --git a/src/spanned.rs b/src/spanned.rs
index 7e101d26..98aa0aa1 100644
--- a/src/spanned.rs
+++ b/src/spanned.rs
@@ -112,4 +112,7 @@ mod private {
pub trait Sealed {}
impl<T: ?Sized + ToTokens> Sealed for T {}
+
+ #[cfg(any(feature = "full", feature = "derive"))]
+ impl Sealed for crate::QSelf {}
}
diff --git a/src/stmt.rs b/src/stmt.rs
index b5434f7c..fb67fecc 100644
--- a/src/stmt.rs
+++ b/src/stmt.rs
@@ -180,7 +180,8 @@ pub(crate) mod parsing {
}
fn parse_stmt(input: ParseStream, allow_nosemi: AllowNoSemi) -> Result<Stmt> {
- let mut attrs = input.call(Attribute::parse_outer)?;
+ let begin = input.fork();
+ let attrs = input.call(Attribute::parse_outer)?;
// brace-style macros; paren and bracket macros get parsed as
// expression statements.
@@ -238,9 +239,7 @@ pub(crate) mod parsing {
|| input.peek(Token![macro])
|| is_item_macro
{
- let mut item: Item = input.parse()?;
- attrs.extend(item.replace_attrs(Vec::new()));
- item.replace_attrs(attrs);
+ let item = item::parsing::parse_rest_of_item(begin, attrs, input)?;
Ok(Stmt::Item(item))
} else {
stmt_expr(input, allow_nosemi, attrs)
diff --git a/src/thread.rs b/src/thread.rs
index 63fdea83..b33d248a 100644
--- a/src/thread.rs
+++ b/src/thread.rs
@@ -12,6 +12,9 @@ pub(crate) struct ThreadBound<T> {
unsafe impl<T> Sync for ThreadBound<T> {}
// Send bound requires Copy, as otherwise Drop could run in the wrong place.
+//
+// Today Copy and Drop are mutually exclusive so `T: Copy` implies `T: !Drop`.
+// This impl needs to be revisited if that restriction is relaxed in the future.
unsafe impl<T: Copy> Send for ThreadBound<T> {}
impl<T> ThreadBound<T> {
@@ -40,11 +43,18 @@ impl<T: Debug> Debug for ThreadBound<T> {
}
}
-impl<T: Clone> Clone for ThreadBound<T> {
+// Copy the bytes of T, even if the currently running thread is the "wrong"
+// thread. This is fine as long as the original thread is not simultaneously
+// mutating this value via interior mutability, which would be a data race.
+//
+// Currently `T: Copy` is sufficient to guarantee that T contains no interior
+// mutability, because _all_ interior mutability in Rust is built on
+// std::cell::UnsafeCell, which has no Copy impl. This impl needs to be
+// revisited if that restriction is relaxed in the future.
+impl<T: Copy> Copy for ThreadBound<T> {}
+
+impl<T: Copy> Clone for ThreadBound<T> {
fn clone(&self) -> Self {
- ThreadBound {
- value: self.value.clone(),
- thread_id: self.thread_id,
- }
+ *self
}
}
diff --git a/src/token.rs b/src/token.rs
index c140571a..af7f25c4 100644
--- a/src/token.rs
+++ b/src/token.rs
@@ -143,6 +143,7 @@ mod private {
/// Support writing `token.span` rather than `token.spans[0]` on tokens that
/// hold a single span.
#[repr(transparent)]
+ #[allow(unknown_lints, repr_transparent_external_private_fields)] // False positive: https://github.com/rust-lang/rust/issues/78586#issuecomment-1722680482
pub struct WithSpan {
pub span: Span,
}
@@ -365,6 +366,7 @@ macro_rules! define_punctuation_structs {
($($token:literal pub struct $name:ident/$len:tt #[doc = $usage:literal])*) => {
$(
#[cfg_attr(not(doc), repr(transparent))]
+ #[allow(unknown_lints, repr_transparent_external_private_fields)] // False positive: https://github.com/rust-lang/rust/issues/78586#issuecomment-1722680482
#[doc = concat!('`', $token, '`')]
///
/// Usage:
@@ -840,6 +842,67 @@ define_delimiters! {
/// A type-macro that expands to the name of the Rust type representation of a
/// given token.
///
+/// As a type, `Token!` is commonly used in the type of struct fields, the type
+/// of a `let` statement, or in turbofish for a `parse` function.
+///
+/// ```
+/// use syn::{Ident, Token};
+/// use syn::parse::{Parse, ParseStream, Result};
+///
+/// // `struct Foo;`
+/// pub struct UnitStruct {
+/// struct_token: Token![struct],
+/// ident: Ident,
+/// semi_token: Token![;],
+/// }
+///
+/// impl Parse for UnitStruct {
+/// fn parse(input: ParseStream) -> Result<Self> {
+/// let struct_token: Token![struct] = input.parse()?;
+/// let ident: Ident = input.parse()?;
+/// let semi_token = input.parse::<Token![;]>()?;
+/// Ok(UnitStruct { struct_token, ident, semi_token })
+/// }
+/// }
+/// ```
+///
+/// As an expression, `Token!` is used for peeking tokens or instantiating
+/// tokens from a span.
+///
+/// ```
+/// # use syn::{Ident, Token};
+/// # use syn::parse::{Parse, ParseStream, Result};
+/// #
+/// # struct UnitStruct {
+/// # struct_token: Token![struct],
+/// # ident: Ident,
+/// # semi_token: Token![;],
+/// # }
+/// #
+/// # impl Parse for UnitStruct {
+/// # fn parse(input: ParseStream) -> Result<Self> {
+/// # unimplemented!()
+/// # }
+/// # }
+/// #
+/// fn make_unit_struct(name: Ident) -> UnitStruct {
+/// let span = name.span();
+/// UnitStruct {
+/// struct_token: Token![struct](span),
+/// ident: name,
+/// semi_token: Token![;](span),
+/// }
+/// }
+///
+/// # fn parse(input: ParseStream) -> Result<()> {
+/// if input.peek(Token![struct]) {
+/// let unit_struct: UnitStruct = input.parse()?;
+/// /* ... */
+/// }
+/// # Ok(())
+/// # }
+/// ```
+///
/// See the [token module] documentation for details and examples.
///
/// [token module]: crate::token
@@ -974,6 +1037,7 @@ pub(crate) mod parsing {
}
}
+ #[doc(hidden)]
pub fn punct<const N: usize>(input: ParseStream, token: &str) -> Result<[Span; N]> {
let mut spans = [input.span(); N];
punct_helper(input, token, &mut spans)?;
@@ -1006,6 +1070,7 @@ pub(crate) mod parsing {
})
}
+ #[doc(hidden)]
pub fn peek_punct(mut cursor: Cursor, token: &str) -> bool {
for (i, ch) in token.chars().enumerate() {
match cursor.punct() {
@@ -1033,6 +1098,7 @@ pub(crate) mod printing {
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream};
use quote::TokenStreamExt;
+ #[doc(hidden)]
pub fn punct(s: &str, spans: &[Span], tokens: &mut TokenStream) {
assert_eq!(s.len(), spans.len());
diff --git a/src/ty.rs b/src/ty.rs
index 9282ba4e..0f41fe4f 100644
--- a/src/ty.rs
+++ b/src/ty.rs
@@ -525,7 +525,7 @@ pub(crate) mod parsing {
let star_token: Option<Token![*]> = input.parse()?;
let bounds = TypeTraitObject::parse_bounds(dyn_span, input, allow_plus)?;
return Ok(if star_token.is_some() {
- Type::Verbatim(verbatim::between(begin, input))
+ Type::Verbatim(verbatim::between(&begin, input))
} else {
Type::TraitObject(TypeTraitObject {
dyn_token: Some(dyn_token),
@@ -947,7 +947,7 @@ pub(crate) mod parsing {
Some(ty) if !has_mut_self => ty,
_ => {
name = None;
- Type::Verbatim(verbatim::between(begin, input))
+ Type::Verbatim(verbatim::between(&begin, input))
}
};
diff --git a/src/verbatim.rs b/src/verbatim.rs
index 436d8734..54dc1cfa 100644
--- a/src/verbatim.rs
+++ b/src/verbatim.rs
@@ -1,9 +1,9 @@
-use crate::parse::{ParseBuffer, ParseStream};
+use crate::parse::ParseStream;
use proc_macro2::{Delimiter, TokenStream};
use std::cmp::Ordering;
use std::iter;
-pub(crate) fn between<'a>(begin: ParseBuffer<'a>, end: ParseStream<'a>) -> TokenStream {
+pub(crate) fn between<'a>(begin: ParseStream<'a>, end: ParseStream<'a>) -> TokenStream {
let end = end.cursor();
let mut cursor = begin.cursor();
assert!(crate::buffer::same_buffer(end, cursor));
diff --git a/tests/common/eq.rs b/tests/common/eq.rs
index 8ca04b6a..3c2b1c12 100644
--- a/tests/common/eq.rs
+++ b/tests/common/eq.rs
@@ -93,7 +93,6 @@ use rustc_ast::ast::Local;
use rustc_ast::ast::LocalKind;
use rustc_ast::ast::MacCall;
use rustc_ast::ast::MacCallStmt;
-use rustc_ast::ast::MacDelimiter;
use rustc_ast::ast::MacStmtStyle;
use rustc_ast::ast::MacroDef;
use rustc_ast::ast::MetaItemLit;
@@ -157,7 +156,7 @@ use rustc_ast::tokenstream::{
use rustc_data_structures::sync::Lrc;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{sym, Ident};
-use rustc_span::{Span, Symbol, SyntaxContext, DUMMY_SP};
+use rustc_span::{ErrorGuaranteed, Span, Symbol, SyntaxContext, DUMMY_SP};
use std::collections::HashMap;
use std::hash::{BuildHasher, Hash};
use thin_vec::ThinVec;
@@ -297,6 +296,7 @@ spanless_eq_partial_eq!(CommentKind);
spanless_eq_partial_eq!(Delimiter);
spanless_eq_partial_eq!(InlineAsmOptions);
spanless_eq_partial_eq!(token::LitKind);
+spanless_eq_partial_eq!(ErrorGuaranteed);
macro_rules! spanless_eq_struct {
{
@@ -457,7 +457,7 @@ spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl decl_span);
spanless_eq_struct!(BindingAnnotation; 0 1);
spanless_eq_struct!(Block; stmts id rules span tokens could_be_bare_literal);
spanless_eq_struct!(Closure; binder capture_clause constness asyncness movability fn_decl body !fn_decl_span !fn_arg_span);
-spanless_eq_struct!(ConstItem; defaultness ty expr);
+spanless_eq_struct!(ConstItem; defaultness generics ty expr);
spanless_eq_struct!(Crate; attrs items spans id is_placeholder);
spanless_eq_struct!(DelimArgs; dspan delim tokens);
spanless_eq_struct!(EnumDef; variants);
@@ -558,7 +558,6 @@ spanless_eq_enum!(IsAuto; Yes No);
spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed);
spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed);
spanless_eq_enum!(LocalKind; Decl Init(0) InitElse(0 1));
-spanless_eq_enum!(MacDelimiter; Parenthesis Bracket Brace);
spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
spanless_eq_enum!(ModKind; Loaded(0 1 2) Unloaded);
spanless_eq_enum!(Movability; Static Movable);
@@ -581,13 +580,13 @@ spanless_eq_enum!(VariantData; Struct(0 1) Tuple(0 1) Unit(0));
spanless_eq_enum!(VisibilityKind; Public Restricted(path id shorthand) Inherited);
spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
spanless_eq_enum!(ExprKind; Array(0) ConstBlock(0) Call(0 1) MethodCall(0)
- Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1 2)
+ Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1 2 3)
If(0 1 2) While(0 1 2) ForLoop(0 1 2 3) Loop(0 1 2) Match(0 1) Closure(0)
Block(0 1) Async(0 1) Await(0 1) TryBlock(0) Assign(0 1 2) AssignOp(0 1 2)
- Field(0 1) Index(0 1) Underscore Range(0 1 2) Path(0 1) AddrOf(0 1 2)
+ Field(0 1) Index(0 1 2) Underscore Range(0 1 2) Path(0 1) AddrOf(0 1 2)
Break(0 1) Continue(0) Ret(0) InlineAsm(0) OffsetOf(0 1) MacCall(0)
- Struct(0) Repeat(0 1) Paren(0) Try(0) Yield(0) Yeet(0) IncludedBytes(0)
- FormatArgs(0) Err);
+ Struct(0) Repeat(0 1) Paren(0) Try(0) Yield(0) Yeet(0) Become(0)
+ IncludedBytes(0) FormatArgs(0) Err);
spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(anon_const)
Sym(sym));
@@ -600,8 +599,8 @@ spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2 3) TupleStruct(0 1 2)
Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest
Paren(0) MacCall(0));
spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Ref(0 1) BareFn(0) Never
- Tup(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1) Paren(0) Typeof(0) Infer
- ImplicitSelf MacCall(0) Err CVarArgs);
+ Tup(0) AnonStruct(0) AnonUnion(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1)
+ Paren(0) Typeof(0) Infer ImplicitSelf MacCall(0) Err CVarArgs);
impl SpanlessEq for Ident {
fn eq(&self, other: &Self) -> bool {
diff --git a/tests/debug/gen.rs b/tests/debug/gen.rs
index b64cc3e7..3f92598d 100644
--- a/tests/debug/gen.rs
+++ b/tests/debug/gen.rs
@@ -1,6 +1,7 @@
// This file is @generated by syn-internal-codegen.
// It is not intended for manual editing.
+#![allow(repr_transparent_external_private_fields)]
#![allow(clippy::match_wildcard_for_single_variants)]
use super::{Lite, Present};
use ref_cast::RefCast;
diff --git a/tests/macros/mod.rs b/tests/macros/mod.rs
index 5ca88b08..3bfbe038 100644
--- a/tests/macros/mod.rs
+++ b/tests/macros/mod.rs
@@ -38,14 +38,20 @@ macro_rules! snapshot_impl {
let $expr = crate::macros::Tokens::parse::<$t>($expr).unwrap();
let debug = crate::macros::debug::Lite(&$expr);
if !cfg!(miri) {
- insta::assert_debug_snapshot!(debug, @$snapshot);
+ #[allow(clippy::needless_raw_string_hashes)] // https://github.com/mitsuhiko/insta/issues/389
+ {
+ insta::assert_debug_snapshot!(debug, @$snapshot);
+ }
}
};
(($($expr:tt)*) as $t:ty, @$snapshot:literal) => {{
let syntax_tree = crate::macros::Tokens::parse::<$t>($($expr)*).unwrap();
let debug = crate::macros::debug::Lite(&syntax_tree);
if !cfg!(miri) {
- insta::assert_debug_snapshot!(debug, @$snapshot);
+ #[allow(clippy::needless_raw_string_hashes)]
+ {
+ insta::assert_debug_snapshot!(debug, @$snapshot);
+ }
}
syntax_tree
}};
@@ -53,7 +59,10 @@ macro_rules! snapshot_impl {
let syntax_tree = $($expr)*;
let debug = crate::macros::debug::Lite(&syntax_tree);
if !cfg!(miri) {
- insta::assert_debug_snapshot!(debug, @$snapshot);
+ #[allow(clippy::needless_raw_string_hashes)]
+ {
+ insta::assert_debug_snapshot!(debug, @$snapshot);
+ }
}
syntax_tree
}};
diff --git a/tests/repo/mod.rs b/tests/repo/mod.rs
index cec42a6c..61d5ff35 100644
--- a/tests/repo/mod.rs
+++ b/tests/repo/mod.rs
@@ -13,10 +13,18 @@ use std::path::{Path, PathBuf};
use tar::Archive;
use walkdir::{DirEntry, WalkDir};
-const REVISION: &str = "5e1d3299a290026b85787bc9c7e72bcc53ac283f";
+const REVISION: &str = "9f5fc1bd443f59583e7af0d94d289f95fe1e20c4";
#[rustfmt::skip]
static EXCLUDE_FILES: &[&str] = &[
+ // TODO: CStr literals: c"…", cr"…"
+ // https://github.com/dtolnay/syn/issues/1502
+ "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0085_expr_literals.rs",
+
+ // TODO: explicit tail calls: `become _g()`
+ // https://github.com/dtolnay/syn/issues/1501
+ "tests/ui/explicit-tail-calls/return-lifetime-sub.rs",
+
// TODO: non-lifetime binders: `where for<'a, T> &'a Struct<T>: Trait`
// https://github.com/dtolnay/syn/issues/1435
"tests/rustdoc-json/non_lifetime_binders.rs",
@@ -24,6 +32,7 @@ static EXCLUDE_FILES: &[&str] = &[
// TODO: return type notation: `where T: Trait<method(): Send>`
// https://github.com/dtolnay/syn/issues/1434
+ "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0208_associated_return_type_bounds.rs",
"tests/ui/associated-type-bounds/return-type-notation/basic.rs",
"tests/ui/feature-gates/feature-gate-return_type_notation.rs",
@@ -37,11 +46,14 @@ static EXCLUDE_FILES: &[&str] = &[
// Need at least one trait in impl Trait, no such type as impl 'static
"tests/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.rs",
+ // Negative polarity trait bound: `where T: !Copy`
+ "src/tools/rustfmt/tests/target/negative-bounds.rs",
+
// Lifetime bound inside for<>: `T: ~const ?for<'a: 'b> Trait<'a>`
- "tests/ui/rfc-2632-const-trait-impl/tilde-const-syntax.rs",
+ "tests/ui/rfcs/rfc-2632-const-trait-impl/tilde-const-syntax.rs",
// Const impl that is not a trait impl: `impl ~const T {}`
- "tests/ui/rfc-2632-const-trait-impl/syntax.rs",
+ "tests/ui/rfcs/rfc-2632-const-trait-impl/syntax.rs",
// Deprecated anonymous parameter syntax in traits
"src/tools/rustfmt/tests/source/trait.rs",
@@ -63,6 +75,7 @@ static EXCLUDE_FILES: &[&str] = &[
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0004_value_parameters_no_patterns.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0104_path_fn_trait_args.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0202_typepathfn_with_coloncolon.rs",
+ "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0209_bare_dyn_types_with_paren_as_generic_args.rs",
"src/tools/rustfmt/tests/source/attrib.rs",
"src/tools/rustfmt/tests/source/closure.rs",
"src/tools/rustfmt/tests/source/existential_type.rs",
@@ -85,8 +98,8 @@ static EXCLUDE_FILES: &[&str] = &[
"tests/codegen-units/item-collection/non-generic-closures.rs",
"tests/debuginfo/recursive-enum.rs",
"tests/pretty/closure-reform-pretty.rs",
- "tests/run-make-fulldeps/reproducible-build-2/reproducible-build.rs",
- "tests/run-make-fulldeps/reproducible-build/reproducible-build.rs",
+ "tests/run-make/reproducible-build-2/reproducible-build.rs",
+ "tests/run-make/reproducible-build/reproducible-build.rs",
"tests/ui/auxiliary/typeid-intrinsic-aux1.rs",
"tests/ui/auxiliary/typeid-intrinsic-aux2.rs",
"tests/ui/impl-trait/generic-with-implicit-hrtb-without-dyn.rs",
@@ -95,10 +108,6 @@ static EXCLUDE_FILES: &[&str] = &[
"tests/ui/lifetimes/bare-trait-object.rs",
"tests/ui/parser/bounds-obj-parens.rs",
- // Old type ascription expression syntax
- "src/tools/rustfmt/tests/source/type-ascription.rs",
- "src/tools/rustfmt/tests/target/type-ascription.rs",
-
// Obsolete box syntax
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0132_box_expr.rs",
@@ -107,7 +116,6 @@ static EXCLUDE_FILES: &[&str] = &[
// Various extensions to Rust syntax made up by rust-analyzer
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0012_type_item_where_clause.rs",
- "src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0040_crate_keyword_vis.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0123_param_list_vararg.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs",
@@ -280,21 +288,38 @@ pub fn clone_rust() {
if needs_clone {
download_and_unpack().unwrap();
}
+
let mut missing = String::new();
let test_src = Path::new("tests/rust");
+
+ let mut exclude_files_set = BTreeSet::new();
for exclude in EXCLUDE_FILES {
+ if !exclude_files_set.insert(exclude) {
+ panic!("duplicate path in EXCLUDE_FILES: {}", exclude);
+ }
+ for dir in EXCLUDE_DIRS {
+ if Path::new(exclude).starts_with(dir) {
+ panic!("excluded file {} is inside an excluded dir", exclude);
+ }
+ }
if !test_src.join(exclude).is_file() {
missing += "\ntests/rust/";
missing += exclude;
}
}
+
+ let mut exclude_dirs_set = BTreeSet::new();
for exclude in EXCLUDE_DIRS {
+ if !exclude_dirs_set.insert(exclude) {
+ panic!("duplicate path in EXCLUDE_DIRS: {}", exclude);
+ }
if !test_src.join(exclude).is_dir() {
missing += "\ntests/rust/";
missing += exclude;
missing += "/";
}
}
+
if !missing.is_empty() {
panic!("excluded test file does not exist:{}\n", missing);
}
diff --git a/tests/test_expr.rs b/tests/test_expr.rs
index c7230c6d..5d529bf1 100644
--- a/tests/test_expr.rs
+++ b/tests/test_expr.rs
@@ -85,7 +85,7 @@ fn test_tuple_multi_index() {
assert_eq!(expected, syn::parse_str(input).unwrap());
}
- for tokens in vec![
+ for tokens in [
quote!(tuple.0.0),
quote!(tuple .0.0),
quote!(tuple. 0.0),
diff --git a/tests/test_lit.rs b/tests/test_lit.rs
index 82d22900..bc50136f 100644
--- a/tests/test_lit.rs
+++ b/tests/test_lit.rs
@@ -1,6 +1,7 @@
#![allow(
clippy::float_cmp,
clippy::non_ascii_literal,
+ clippy::single_match_else,
clippy::uninlined_format_args
)]
@@ -13,14 +14,13 @@ use std::str::FromStr;
use syn::{Lit, LitFloat, LitInt, LitStr};
fn lit(s: &str) -> Lit {
- match TokenStream::from_str(s)
- .unwrap()
- .into_iter()
- .next()
- .unwrap()
- {
- TokenTree::Literal(lit) => Lit::new(lit),
- _ => panic!(),
+ let mut tokens = TokenStream::from_str(s).unwrap().into_iter();
+ match tokens.next().unwrap() {
+ TokenTree::Literal(lit) => {
+ assert!(tokens.next().is_none());
+ Lit::new(lit)
+ }
+ wrong => panic!("{:?}", wrong),
}
}
diff --git a/tests/test_precedence.rs b/tests/test_precedence.rs
index b49577f0..bf0510bb 100644
--- a/tests/test_precedence.rs
+++ b/tests/test_precedence.rs
@@ -30,6 +30,7 @@ extern crate rustc_ast_pretty;
extern crate rustc_data_structures;
extern crate rustc_driver;
extern crate rustc_span;
+extern crate smallvec;
extern crate thin_vec;
use crate::common::eq::SpanlessEq;
@@ -168,15 +169,17 @@ fn librustc_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
/// This method operates on librustc objects.
fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
use rustc_ast::ast::{
- Attribute, BinOpKind, Block, BorrowKind, Expr, ExprField, ExprKind, GenericArg,
- GenericBound, Local, LocalKind, Pat, Stmt, StmtKind, StructExpr, StructRest,
- TraitBoundModifier, Ty,
+ AssocItem, AssocItemKind, Attribute, BinOpKind, Block, BorrowKind, Expr, ExprField,
+ ExprKind, GenericArg, GenericBound, ItemKind, Local, LocalKind, Pat, Stmt, StmtKind,
+ StructExpr, StructRest, TraitBoundModifier, Ty,
};
use rustc_ast::mut_visit::{
- noop_visit_generic_arg, noop_visit_local, noop_visit_param_bound, MutVisitor,
+ noop_flat_map_assoc_item, noop_visit_generic_arg, noop_visit_item_kind, noop_visit_local,
+ noop_visit_param_bound, MutVisitor,
};
use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
use rustc_span::DUMMY_SP;
+ use smallvec::SmallVec;
use std::mem;
use std::ops::DerefMut;
use thin_vec::ThinVec;
@@ -185,6 +188,17 @@ fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
failed: bool,
}
+ fn contains_let_chain(expr: &Expr) -> bool {
+ match &expr.kind {
+ ExprKind::Let(..) => true,
+ ExprKind::Binary(binop, left, right) => {
+ binop.node == BinOpKind::And
+ && (contains_let_chain(left) || contains_let_chain(right))
+ }
+ _ => false,
+ }
+ }
+
fn flat_map_field<T: MutVisitor>(mut f: ExprField, vis: &mut T) -> Vec<ExprField> {
if f.is_shorthand {
noop_visit_expr(&mut f.expr, vis);
@@ -241,12 +255,7 @@ fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
noop_visit_expr(e, self);
match e.kind {
ExprKind::Block(..) | ExprKind::If(..) | ExprKind::Let(..) => {}
- ExprKind::Binary(binop, ref left, ref right)
- if match (&left.kind, binop.node, &right.kind) {
- (ExprKind::Let(..), BinOpKind::And, _)
- | (_, BinOpKind::And, ExprKind::Let(..)) => true,
- _ => false,
- } => {}
+ ExprKind::Binary(..) if contains_let_chain(e) => {}
_ => {
let inner = mem::replace(
e,
@@ -300,6 +309,39 @@ fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
}
}
+ fn visit_item_kind(&mut self, item: &mut ItemKind) {
+ match item {
+ ItemKind::Const(const_item)
+ if !const_item.generics.params.is_empty()
+ || !const_item.generics.where_clause.predicates.is_empty() => {}
+ _ => noop_visit_item_kind(item, self),
+ }
+ }
+
+ fn flat_map_trait_item(&mut self, item: P<AssocItem>) -> SmallVec<[P<AssocItem>; 1]> {
+ match &item.kind {
+ AssocItemKind::Const(const_item)
+ if !const_item.generics.params.is_empty()
+ || !const_item.generics.where_clause.predicates.is_empty() =>
+ {
+ SmallVec::from([item])
+ }
+ _ => noop_flat_map_assoc_item(item, self),
+ }
+ }
+
+ fn flat_map_impl_item(&mut self, item: P<AssocItem>) -> SmallVec<[P<AssocItem>; 1]> {
+ match &item.kind {
+ AssocItemKind::Const(const_item)
+ if !const_item.generics.params.is_empty()
+ || !const_item.generics.where_clause.predicates.is_empty() =>
+ {
+ SmallVec::from([item])
+ }
+ _ => noop_flat_map_assoc_item(item, self),
+ }
+ }
+
// We don't want to look at expressions that might appear in patterns or
// types yet. We'll look into comparing those in the future. For now
// focus on expressions appearing in other places.
@@ -334,28 +376,42 @@ fn syn_brackets(syn_expr: syn::Expr) -> syn::Expr {
struct ParenthesizeEveryExpr;
+ fn parenthesize(expr: Expr) -> Expr {
+ Expr::Paren(ExprParen {
+ attrs: Vec::new(),
+ expr: Box::new(expr),
+ paren_token: token::Paren::default(),
+ })
+ }
+
fn needs_paren(expr: &Expr) -> bool {
match expr {
Expr::Group(_) => unreachable!(),
Expr::If(_) | Expr::Unsafe(_) | Expr::Block(_) | Expr::Let(_) => false,
- Expr::Binary(bin) => match (&*bin.left, bin.op, &*bin.right) {
- (Expr::Let(_), BinOp::And(_), _) | (_, BinOp::And(_), Expr::Let(_)) => false,
- _ => true,
- },
+ Expr::Binary(_) => !contains_let_chain(expr),
_ => true,
}
}
+ fn contains_let_chain(expr: &Expr) -> bool {
+ match expr {
+ Expr::Let(_) => true,
+ Expr::Binary(expr) => {
+ matches!(expr.op, BinOp::And(_))
+ && (contains_let_chain(&expr.left) || contains_let_chain(&expr.right))
+ }
+ _ => false,
+ }
+ }
+
impl Fold for ParenthesizeEveryExpr {
fn fold_expr(&mut self, expr: Expr) -> Expr {
- if needs_paren(&expr) {
- Expr::Paren(ExprParen {
- attrs: Vec::new(),
- expr: Box::new(fold_expr(self, expr)),
- paren_token: token::Paren::default(),
- })
+ let needs_paren = needs_paren(&expr);
+ let folded = fold_expr(self, expr);
+ if needs_paren {
+ parenthesize(folded)
} else {
- fold_expr(self, expr)
+ folded
}
}
diff --git a/tests/test_round_trip.rs b/tests/test_round_trip.rs
index 0ef47b20..c0af30d2 100644
--- a/tests/test_round_trip.rs
+++ b/tests/test_round_trip.rs
@@ -33,6 +33,7 @@ use rustc_errors::{translation, Diagnostic, PResult};
use rustc_session::parse::ParseSess;
use rustc_span::source_map::FilePathMapping;
use rustc_span::FileName;
+use std::borrow::Cow;
use std::fs;
use std::panic;
use std::path::Path;
@@ -154,7 +155,7 @@ fn librustc_parse(content: String, sess: &ParseSess) -> PResult<Crate> {
parse::parse_crate_from_source_str(name, content, sess)
}
-fn translate_message(diagnostic: &Diagnostic) -> String {
+fn translate_message(diagnostic: &Diagnostic) -> Cow<'static, str> {
thread_local! {
static FLUENT_BUNDLE: LazyFallbackBundle = {
let locale_resources = rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec();
@@ -186,7 +187,7 @@ fn translate_message(diagnostic: &Diagnostic) -> String {
let mut err = Vec::new();
let translated = fluent_bundle.format_pattern(value, Some(&args), &mut err);
assert!(err.is_empty());
- translated.into_owned()
+ Cow::Owned(translated.into_owned())
})
}