Backed out 3 changesets (bug 1929483) for causing build bustages @wgpu_ffi_generated.h.stub. CLOSED TREE

Backed out changeset d7135f84903e (bug 1929483)
Backed out changeset c0018aee81d9 (bug 1929483)
Backed out changeset fc17d046ad1e (bug 1929483)
This commit is contained in:
Goloman Adrian
2024-11-06 23:07:07 +02:00
parent 4dac557ada
commit 02a59fe25e
23 changed files with 197 additions and 803 deletions

12
Cargo.lock generated
View File

@@ -5996,9 +5996,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.87"
version = "2.0.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d"
checksum = "e89275301d38033efb81a6e60e3497e734dfcc62571f2854bf4b16690398824c"
dependencies = [
"proc-macro2",
"quote",
@@ -6134,18 +6134,18 @@ checksum = "aac81b6fd6beb5884b0cf3321b8117e6e5d47ecb6fc89f414cfdcca8b2fe2dd8"
[[package]]
name = "thiserror"
version = "1.0.68"
version = "1.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02dd99dc800bbb97186339685293e1cc5d9df1f8fae2d0aecd9ff1c77efea892"
checksum = "5d171f59dbaa811dbbb1aee1e73db92ec2b122911a48e1390dfe327a821ddede"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.68"
version = "1.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7c61ec9a6f64d2793d8a45faba21efbe3ced62a886d44c36a009b2b519b4c7e"
checksum = "b08be0f17bd307950653ce45db00cd31200d82b624b36e181337d9c7d92765b5"
dependencies = [
"proc-macro2",
"quote",

View File

@@ -593,17 +593,10 @@ mod foreign {
impl HasErrorBufferType for CommandEncoderError {
fn error_type(&self) -> ErrorBufferType {
match self {
CommandEncoderError::Device(e) => e.error_type(),
CommandEncoderError::Invalid
| CommandEncoderError::NotRecording
| CommandEncoderError::Locked
| CommandEncoderError::InvalidColorAttachment(..)
| CommandEncoderError::InvalidResource(..)
// N.B: forced non-exhaustiveness
_ => ErrorBufferType::Validation,
}
// We can't classify this ourselves, because inner error classification is private. We
// may need some upstream work to do this properly. For now, we trust that this opaque
// type only ever represents `Validation`.
ErrorBufferType::Validation
}
}

View File

@@ -613,8 +613,8 @@ user-login = "mbrubeck"
user-name = "Matt Brubeck"
[[publisher.syn]]
version = "2.0.87"
when = "2024-11-02"
version = "2.0.86"
when = "2024-10-31"
user-id = 3618
user-login = "dtolnay"
user-name = "David Tolnay"
@@ -627,15 +627,15 @@ user-login = "BurntSushi"
user-name = "Andrew Gallant"
[[publisher.thiserror]]
version = "1.0.68"
when = "2024-11-04"
version = "1.0.66"
when = "2024-10-31"
user-id = 3618
user-login = "dtolnay"
user-name = "David Tolnay"
[[publisher.thiserror-impl]]
version = "1.0.68"
when = "2024-11-04"
version = "1.0.66"
when = "2024-10-31"
user-id = 3618
user-login = "dtolnay"
user-name = "David Tolnay"

File diff suppressed because one or more lines are too long

View File

@@ -13,7 +13,7 @@
edition = "2021"
rust-version = "1.61"
name = "syn"
version = "2.0.87"
version = "2.0.86"
authors = ["David Tolnay <dtolnay@gmail.com>"]
build = false
include = [

View File

@@ -183,6 +183,52 @@ impl<'a> Cursor<'a> {
self.ptr == self.scope
}
/// If the cursor is pointing at a `Group` with the given delimiter, returns
/// a cursor into that group and one pointing to the next `TokenTree`.
pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, DelimSpan, Cursor<'a>)> {
// If we're not trying to enter a none-delimited group, we want to
// ignore them. We have to make sure to _not_ ignore them when we want
// to enter them, of course. For obvious reasons.
if delim != Delimiter::None {
self.ignore_none();
}
if let Entry::Group(group, end_offset) = self.entry() {
if group.delimiter() == delim {
let span = group.delim_span();
let end_of_group = unsafe { self.ptr.add(*end_offset) };
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
return Some((inside_of_group, span, after_group));
}
}
None
}
pub(crate) fn any_group(self) -> Option<(Cursor<'a>, Delimiter, DelimSpan, Cursor<'a>)> {
if let Entry::Group(group, end_offset) = self.entry() {
let delimiter = group.delimiter();
let span = group.delim_span();
let end_of_group = unsafe { self.ptr.add(*end_offset) };
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
return Some((inside_of_group, delimiter, span, after_group));
}
None
}
pub(crate) fn any_group_token(self) -> Option<(Group, Cursor<'a>)> {
if let Entry::Group(group, end_offset) = self.entry() {
let end_of_group = unsafe { self.ptr.add(*end_offset) };
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
return Some((group.clone(), after_group));
}
None
}
/// If the cursor is pointing at a `Ident`, returns it along with a cursor
/// pointing at the next `TokenTree`.
pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> {
@@ -233,54 +279,6 @@ impl<'a> Cursor<'a> {
}
}
/// If the cursor is pointing at a `Group` with the given delimiter, returns
/// a cursor into that group and one pointing to the next `TokenTree`.
pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, DelimSpan, Cursor<'a>)> {
// If we're not trying to enter a none-delimited group, we want to
// ignore them. We have to make sure to _not_ ignore them when we want
// to enter them, of course. For obvious reasons.
if delim != Delimiter::None {
self.ignore_none();
}
if let Entry::Group(group, end_offset) = self.entry() {
if group.delimiter() == delim {
let span = group.delim_span();
let end_of_group = unsafe { self.ptr.add(*end_offset) };
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
return Some((inside_of_group, span, after_group));
}
}
None
}
/// If the cursor is pointing at a `Group`, returns a cursor into the group
/// and one pointing to the next `TokenTree`.
pub fn any_group(self) -> Option<(Cursor<'a>, Delimiter, DelimSpan, Cursor<'a>)> {
if let Entry::Group(group, end_offset) = self.entry() {
let delimiter = group.delimiter();
let span = group.delim_span();
let end_of_group = unsafe { self.ptr.add(*end_offset) };
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
return Some((inside_of_group, delimiter, span, after_group));
}
None
}
pub(crate) fn any_group_token(self) -> Option<(Group, Cursor<'a>)> {
if let Entry::Group(group, end_offset) = self.entry() {
let end_of_group = unsafe { self.ptr.add(*end_offset) };
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
return Some((group.clone(), after_group));
}
None
}
/// Copies all remaining tokens visible from this cursor into a
/// `TokenStream`.
pub fn token_stream(self) -> TokenStream {

View File

@@ -248,8 +248,6 @@ pub(crate) mod parsing {
use crate::parse::discouraged::Speculative as _;
use crate::parse::{Parse, ParseStream};
use crate::restriction::{FieldMutability, Visibility};
#[cfg(not(feature = "full"))]
use crate::scan_expr::scan_expr;
use crate::token;
use crate::ty::Type;
use crate::verbatim;
@@ -278,7 +276,7 @@ pub(crate) mod parsing {
let mut discriminant: Result<Expr> = ahead.parse();
if discriminant.is_ok() {
input.advance_to(&ahead);
} else if scan_expr(input).is_ok() {
} else if scan_lenient_discriminant(input).is_ok() {
discriminant = Ok(Expr::Verbatim(verbatim::between(&begin, input)));
}
discriminant?
@@ -296,6 +294,85 @@ pub(crate) mod parsing {
}
}
#[cfg(not(feature = "full"))]
pub(crate) fn scan_lenient_discriminant(input: ParseStream) -> Result<()> {
use crate::expr::Member;
use crate::lifetime::Lifetime;
use crate::lit::Lit;
use crate::lit::LitFloat;
use crate::op::{BinOp, UnOp};
use crate::path::{self, AngleBracketedGenericArguments};
use proc_macro2::Delimiter::{self, Brace, Bracket, Parenthesis};
let consume = |delimiter: Delimiter| {
Result::unwrap(input.step(|cursor| match cursor.group(delimiter) {
Some((_inside, _span, rest)) => Ok((true, rest)),
None => Ok((false, *cursor)),
}))
};
macro_rules! consume {
[$token:tt] => {
input.parse::<Option<Token![$token]>>().unwrap().is_some()
};
}
let mut initial = true;
let mut depth = 0usize;
loop {
if initial {
if consume![&] {
input.parse::<Option<Token![mut]>>()?;
} else if consume![if] || consume![match] || consume![while] {
depth += 1;
} else if input.parse::<Option<Lit>>()?.is_some()
|| (consume(Brace) || consume(Bracket) || consume(Parenthesis))
|| (consume![async] || consume![const] || consume![loop] || consume![unsafe])
&& (consume(Brace) || break)
{
initial = false;
} else if consume![let] {
while !consume![=] {
if !((consume![|] || consume![ref] || consume![mut] || consume![@])
|| (consume![!] || input.parse::<Option<Lit>>()?.is_some())
|| (consume![..=] || consume![..] || consume![&] || consume![_])
|| (consume(Brace) || consume(Bracket) || consume(Parenthesis)))
{
path::parsing::qpath(input, true)?;
}
}
} else if input.parse::<Option<Lifetime>>()?.is_some() && !consume![:] {
break;
} else if input.parse::<UnOp>().is_err() {
path::parsing::qpath(input, true)?;
initial = consume![!] || depth == 0 && input.peek(token::Brace);
}
} else if input.is_empty() || input.peek(Token![,]) {
return Ok(());
} else if depth > 0 && consume(Brace) {
if consume![else] && !consume(Brace) {
initial = consume![if] || break;
} else {
depth -= 1;
}
} else if input.parse::<BinOp>().is_ok() || (consume![..] | consume![=]) {
initial = true;
} else if consume![.] {
if input.parse::<Option<LitFloat>>()?.is_none()
&& (input.parse::<Member>()?.is_named() && consume![::])
{
AngleBracketedGenericArguments::do_parse(None, input)?;
}
} else if consume![as] {
input.parse::<Type>()?;
} else if !(consume(Brace) || consume(Bracket) || consume(Parenthesis)) {
break;
}
}
Err(input.error("unsupported expression"))
}
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
impl Parse for FieldsNamed {
fn parse(input: ParseStream) -> Result<Self> {

View File

@@ -1,17 +1,15 @@
use crate::attr::Attribute;
#[cfg(all(feature = "parsing", feature = "full"))]
use crate::error::Result;
#[cfg(feature = "parsing")]
use crate::ext::IdentExt as _;
#[cfg(feature = "full")]
use crate::generics::BoundLifetimes;
use crate::ident::Ident;
#[cfg(any(feature = "parsing", feature = "full"))]
#[cfg(feature = "full")]
use crate::lifetime::Lifetime;
use crate::lit::Lit;
use crate::mac::Macro;
use crate::op::{BinOp, UnOp};
#[cfg(feature = "parsing")]
#[cfg(all(feature = "parsing", feature = "full"))]
use crate::parse::ParseStream;
#[cfg(feature = "full")]
use crate::pat::Pat;
@@ -891,36 +889,6 @@ impl Expr {
parsing::parse_with_earlier_boundary_rule(input)
}
/// Returns whether the next token in the parse stream is one that might
/// possibly form the beginning of an expr.
///
/// This classification is a load-bearing part of the grammar of some Rust
/// expressions, notably `return` and `break`. For example `return < …` will
/// never parse `<` as a binary operator regardless of what comes after,
/// because `<` is a legal starting token for an expression and so it's
/// required to be continued as a return value, such as `return <Struct as
/// Trait>::CONST`. Meanwhile `return > …` treats the `>` as a binary
/// operator because it cannot be a starting token for any Rust expression.
#[cfg(feature = "parsing")]
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
pub fn peek(input: ParseStream) -> bool {
input.peek(Ident::peek_any) // value name or keyword
|| input.peek(token::Paren) // tuple
|| input.peek(token::Bracket) // array
|| input.peek(token::Brace) // block
|| input.peek(Lit) // literal
|| input.peek(Token![!]) && !input.peek(Token![!=]) // operator not
|| input.peek(Token![-]) && !input.peek(Token![-=]) && !input.peek(Token![->]) // unary minus
|| input.peek(Token![*]) && !input.peek(Token![*=]) // dereference
|| input.peek(Token![|]) && !input.peek(Token![|=]) // closure
|| input.peek(Token![&]) && !input.peek(Token![&=]) // reference
|| input.peek(Token![..]) // range
|| input.peek(Token![<]) && !input.peek(Token![<=]) && !input.peek(Token![<<=]) // associated path
|| input.peek(Token![::]) // absolute path
|| input.peek(Lifetime) // labeled loop
|| input.peek(Token![#]) // expression attributes
}
#[cfg(all(feature = "parsing", feature = "full"))]
pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
match self {
@@ -1179,6 +1147,8 @@ pub(crate) mod parsing {
FieldValue, Index, Member,
};
#[cfg(feature = "full")]
use crate::ext::IdentExt as _;
#[cfg(feature = "full")]
use crate::generics::BoundLifetimes;
use crate::ident::Ident;
#[cfg(feature = "full")]
@@ -1296,6 +1266,25 @@ pub(crate) mod parsing {
}
}
#[cfg(feature = "full")]
fn can_begin_expr(input: ParseStream) -> bool {
input.peek(Ident::peek_any) // value name or keyword
|| input.peek(token::Paren) // tuple
|| input.peek(token::Bracket) // array
|| input.peek(token::Brace) // block
|| input.peek(Lit) // literal
|| input.peek(Token![!]) && !input.peek(Token![!=]) // operator not
|| input.peek(Token![-]) && !input.peek(Token![-=]) && !input.peek(Token![->]) // unary minus
|| input.peek(Token![*]) && !input.peek(Token![*=]) // dereference
|| input.peek(Token![|]) && !input.peek(Token![|=]) // closure
|| input.peek(Token![&]) && !input.peek(Token![&=]) // reference
|| input.peek(Token![..]) // range notation
|| input.peek(Token![<]) && !input.peek(Token![<=]) && !input.peek(Token![<<=]) // associated path
|| input.peek(Token![::]) // global path
|| input.peek(Lifetime) // labeled loop
|| input.peek(Token![#]) // expression attributes
}
#[cfg(feature = "full")]
fn parse_expr(
input: ParseStream,
@@ -2450,7 +2439,7 @@ pub(crate) mod parsing {
attrs: Vec::new(),
return_token: input.parse()?,
expr: {
if Expr::peek(input) {
if can_begin_expr(input) {
Some(input.parse()?)
} else {
None
@@ -2488,7 +2477,7 @@ pub(crate) mod parsing {
attrs: Vec::new(),
yield_token: input.parse()?,
expr: {
if Expr::peek(input) {
if can_begin_expr(input) {
Some(input.parse()?)
} else {
None
@@ -2701,7 +2690,7 @@ pub(crate) mod parsing {
}
input.advance_to(&ahead);
let expr = if Expr::peek(input) && (allow_struct.0 || !input.peek(token::Brace)) {
let expr = if can_begin_expr(input) && (allow_struct.0 || !input.peek(token::Brace)) {
Some(input.parse()?)
} else {
None

View File

@@ -249,7 +249,7 @@
//! dynamic library libproc_macro from rustc toolchain.
// Syn types in rustdoc of other crates get linked to here.
#![doc(html_root_url = "https://docs.rs/syn/2.0.87")]
#![doc(html_root_url = "https://docs.rs/syn/2.0.86")]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![deny(unsafe_op_in_unsafe_fn)]
#![allow(non_camel_case_types)]
@@ -264,7 +264,6 @@
clippy::derivable_impls,
clippy::diverging_sub_expression,
clippy::doc_markdown,
clippy::enum_glob_use,
clippy::expl_impl_clone_on_copy,
clippy::explicit_auto_deref,
clippy::if_not_else,
@@ -308,8 +307,6 @@
clippy::wildcard_imports,
)]
extern crate self as syn;
#[cfg(feature = "proc-macro")]
extern crate proc_macro;
@@ -512,9 +509,6 @@ pub use crate::restriction::{FieldMutability, VisRestricted, Visibility};
mod sealed;
#[cfg(all(feature = "parsing", feature = "derive", not(feature = "full")))]
mod scan_expr;
mod span;
#[cfg(all(feature = "parsing", feature = "printing"))]

View File

@@ -1,264 +0,0 @@
use self::{Action::*, Input::*};
use proc_macro2::{Delimiter, Ident, Spacing, TokenTree};
use syn::parse::{ParseStream, Result};
use syn::{AngleBracketedGenericArguments, BinOp, Expr, ExprPath, Lifetime, Lit, Token, Type};
enum Input {
Keyword(&'static str),
Punct(&'static str),
ConsumeAny,
ConsumeBinOp,
ConsumeBrace,
ConsumeDelimiter,
ConsumeIdent,
ConsumeLifetime,
ConsumeLiteral,
ConsumeNestedBrace,
ExpectPath,
ExpectTurbofish,
ExpectType,
CanBeginExpr,
Otherwise,
Empty,
}
enum Action {
SetState(&'static [(Input, Action)]),
IncDepth,
DecDepth,
Finish,
}
static INIT: [(Input, Action); 28] = [
(ConsumeDelimiter, SetState(&POSTFIX)),
(Keyword("async"), SetState(&ASYNC)),
(Keyword("break"), SetState(&BREAK_LABEL)),
(Keyword("const"), SetState(&CONST)),
(Keyword("continue"), SetState(&CONTINUE)),
(Keyword("for"), SetState(&FOR)),
(Keyword("if"), IncDepth),
(Keyword("let"), SetState(&PATTERN)),
(Keyword("loop"), SetState(&BLOCK)),
(Keyword("match"), IncDepth),
(Keyword("move"), SetState(&CLOSURE)),
(Keyword("return"), SetState(&RETURN)),
(Keyword("static"), SetState(&CLOSURE)),
(Keyword("unsafe"), SetState(&BLOCK)),
(Keyword("while"), IncDepth),
(Keyword("yield"), SetState(&RETURN)),
(Keyword("_"), SetState(&POSTFIX)),
(Punct("!"), SetState(&INIT)),
(Punct("#"), SetState(&[(ConsumeDelimiter, SetState(&INIT))])),
(Punct("&"), SetState(&REFERENCE)),
(Punct("*"), SetState(&INIT)),
(Punct("-"), SetState(&INIT)),
(Punct("..="), SetState(&INIT)),
(Punct(".."), SetState(&RANGE)),
(Punct("|"), SetState(&CLOSURE_ARGS)),
(ConsumeLifetime, SetState(&[(Punct(":"), SetState(&INIT))])),
(ConsumeLiteral, SetState(&POSTFIX)),
(ExpectPath, SetState(&PATH)),
];
static POSTFIX: [(Input, Action); 10] = [
(Keyword("as"), SetState(&[(ExpectType, SetState(&POSTFIX))])),
(Punct("..="), SetState(&INIT)),
(Punct(".."), SetState(&RANGE)),
(Punct("."), SetState(&DOT)),
(Punct("?"), SetState(&POSTFIX)),
(ConsumeBinOp, SetState(&INIT)),
(Punct("="), SetState(&INIT)),
(ConsumeNestedBrace, SetState(&IF_THEN)),
(ConsumeDelimiter, SetState(&POSTFIX)),
(Empty, Finish),
];
static ASYNC: [(Input, Action); 3] = [
(Keyword("move"), SetState(&ASYNC)),
(Punct("|"), SetState(&CLOSURE_ARGS)),
(ConsumeBrace, SetState(&POSTFIX)),
];
static BLOCK: [(Input, Action); 1] = [(ConsumeBrace, SetState(&POSTFIX))];
static BREAK_LABEL: [(Input, Action); 2] = [
(ConsumeLifetime, SetState(&BREAK_VALUE)),
(Otherwise, SetState(&BREAK_VALUE)),
];
static BREAK_VALUE: [(Input, Action); 3] = [
(ConsumeNestedBrace, SetState(&IF_THEN)),
(CanBeginExpr, SetState(&INIT)),
(Otherwise, SetState(&POSTFIX)),
];
static CLOSURE: [(Input, Action); 6] = [
(Keyword("async"), SetState(&CLOSURE)),
(Keyword("move"), SetState(&CLOSURE)),
(Punct(","), SetState(&CLOSURE)),
(Punct(">"), SetState(&CLOSURE)),
(Punct("|"), SetState(&CLOSURE_ARGS)),
(ConsumeLifetime, SetState(&CLOSURE)),
];
static CLOSURE_ARGS: [(Input, Action); 2] = [
(Punct("|"), SetState(&CLOSURE_RET)),
(ConsumeAny, SetState(&CLOSURE_ARGS)),
];
static CLOSURE_RET: [(Input, Action); 2] = [
(Punct("->"), SetState(&[(ExpectType, SetState(&BLOCK))])),
(Otherwise, SetState(&INIT)),
];
static CONST: [(Input, Action); 2] = [
(Punct("|"), SetState(&CLOSURE_ARGS)),
(ConsumeBrace, SetState(&POSTFIX)),
];
static CONTINUE: [(Input, Action); 2] = [
(ConsumeLifetime, SetState(&POSTFIX)),
(Otherwise, SetState(&POSTFIX)),
];
static DOT: [(Input, Action); 3] = [
(Keyword("await"), SetState(&POSTFIX)),
(ConsumeIdent, SetState(&METHOD)),
(ConsumeLiteral, SetState(&POSTFIX)),
];
static FOR: [(Input, Action); 2] = [
(Punct("<"), SetState(&CLOSURE)),
(Otherwise, SetState(&PATTERN)),
];
static IF_ELSE: [(Input, Action); 2] = [(Keyword("if"), SetState(&INIT)), (ConsumeBrace, DecDepth)];
static IF_THEN: [(Input, Action); 2] =
[(Keyword("else"), SetState(&IF_ELSE)), (Otherwise, DecDepth)];
static METHOD: [(Input, Action); 1] = [(ExpectTurbofish, SetState(&POSTFIX))];
static PATH: [(Input, Action); 4] = [
(Punct("!="), SetState(&INIT)),
(Punct("!"), SetState(&INIT)),
(ConsumeNestedBrace, SetState(&IF_THEN)),
(Otherwise, SetState(&POSTFIX)),
];
static PATTERN: [(Input, Action); 15] = [
(ConsumeDelimiter, SetState(&PATTERN)),
(Keyword("box"), SetState(&PATTERN)),
(Keyword("in"), IncDepth),
(Keyword("mut"), SetState(&PATTERN)),
(Keyword("ref"), SetState(&PATTERN)),
(Keyword("_"), SetState(&PATTERN)),
(Punct("!"), SetState(&PATTERN)),
(Punct("&"), SetState(&PATTERN)),
(Punct("..="), SetState(&PATTERN)),
(Punct(".."), SetState(&PATTERN)),
(Punct("="), SetState(&INIT)),
(Punct("@"), SetState(&PATTERN)),
(Punct("|"), SetState(&PATTERN)),
(ConsumeLiteral, SetState(&PATTERN)),
(ExpectPath, SetState(&PATTERN)),
];
static RANGE: [(Input, Action); 6] = [
(Punct("..="), SetState(&INIT)),
(Punct(".."), SetState(&RANGE)),
(Punct("."), SetState(&DOT)),
(ConsumeNestedBrace, SetState(&IF_THEN)),
(Empty, Finish),
(Otherwise, SetState(&INIT)),
];
static RAW: [(Input, Action); 3] = [
(Keyword("const"), SetState(&INIT)),
(Keyword("mut"), SetState(&INIT)),
(Otherwise, SetState(&POSTFIX)),
];
static REFERENCE: [(Input, Action); 3] = [
(Keyword("mut"), SetState(&INIT)),
(Keyword("raw"), SetState(&RAW)),
(Otherwise, SetState(&INIT)),
];
static RETURN: [(Input, Action); 2] = [
(CanBeginExpr, SetState(&INIT)),
(Otherwise, SetState(&POSTFIX)),
];
pub(crate) fn scan_expr(input: ParseStream) -> Result<()> {
let mut state = INIT.as_slice();
let mut depth = 0usize;
'table: loop {
for rule in state {
if match rule.0 {
Input::Keyword(expected) => input.step(|cursor| match cursor.ident() {
Some((ident, rest)) if ident == expected => Ok((true, rest)),
_ => Ok((false, *cursor)),
})?,
Input::Punct(expected) => input.step(|cursor| {
let begin = *cursor;
let mut cursor = begin;
for (i, ch) in expected.chars().enumerate() {
match cursor.punct() {
Some((punct, _)) if punct.as_char() != ch => break,
Some((_, rest)) if i == expected.len() - 1 => {
return Ok((true, rest));
}
Some((punct, rest)) if punct.spacing() == Spacing::Joint => {
cursor = rest;
}
_ => break,
}
}
Ok((false, begin))
})?,
Input::ConsumeAny => input.parse::<Option<TokenTree>>()?.is_some(),
Input::ConsumeBinOp => input.parse::<BinOp>().is_ok(),
Input::ConsumeBrace | Input::ConsumeNestedBrace => {
(matches!(rule.0, Input::ConsumeBrace) || depth > 0)
&& input.step(|cursor| match cursor.group(Delimiter::Brace) {
Some((_inside, _span, rest)) => Ok((true, rest)),
None => Ok((false, *cursor)),
})?
}
Input::ConsumeDelimiter => input.step(|cursor| match cursor.any_group() {
Some((_inside, _delimiter, _span, rest)) => Ok((true, rest)),
None => Ok((false, *cursor)),
})?,
Input::ConsumeIdent => input.parse::<Option<Ident>>()?.is_some(),
Input::ConsumeLifetime => input.parse::<Option<Lifetime>>()?.is_some(),
Input::ConsumeLiteral => input.parse::<Option<Lit>>()?.is_some(),
Input::ExpectPath => {
input.parse::<ExprPath>()?;
true
}
Input::ExpectTurbofish => {
if input.peek(Token![::]) {
input.parse::<AngleBracketedGenericArguments>()?;
}
true
}
Input::ExpectType => {
Type::without_plus(input)?;
true
}
Input::CanBeginExpr => Expr::peek(input),
Input::Otherwise => true,
Input::Empty => input.is_empty() || input.peek(Token![,]),
} {
state = match rule.1 {
Action::SetState(next) => next,
Action::IncDepth => (depth += 1, &INIT).1,
Action::DecDepth => (depth -= 1, &POSTFIX).1,
Action::Finish => return if depth == 0 { Ok(()) } else { break },
};
continue 'table;
}
}
return Err(input.error("unsupported expression"));
}
}

View File

@@ -498,7 +498,7 @@ spanless_eq_struct!(Fn; defaultness generics sig body);
spanless_eq_struct!(FnDecl; inputs output);
spanless_eq_struct!(FnHeader; constness coroutine_kind safety ext);
spanless_eq_struct!(FnSig; header decl span);
spanless_eq_struct!(ForeignMod; extern_span safety abi items);
spanless_eq_struct!(ForeignMod; safety abi items);
spanless_eq_struct!(FormatArgPosition; index kind span);
spanless_eq_struct!(FormatArgs; span template arguments);
spanless_eq_struct!(FormatArgument; kind expr);

View File

@@ -49,7 +49,6 @@ use std::fs;
use std::path::Path;
use std::process;
use std::sync::atomic::{AtomicUsize, Ordering};
use syn::parse::Parser as _;
#[macro_use]
mod macros;
@@ -57,9 +56,6 @@ mod macros;
mod common;
mod repo;
#[path = "../src/scan_expr.rs"]
mod scan_expr;
#[test]
fn test_rustc_precedence() {
repo::rayon_init();
@@ -119,8 +115,7 @@ fn test_expressions(path: &Path, edition: Edition, exprs: Vec<syn::Expr>) -> (us
rustc_span::create_session_if_not_set_then(edition, |_| {
for expr in exprs {
let expr_tokens = expr.to_token_stream();
let source_code = expr_tokens.to_string();
let source_code = expr.to_token_stream().to_string();
let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&source_code) {
e
} else {
@@ -178,16 +173,6 @@ fn test_expressions(path: &Path, edition: Edition, exprs: Vec<syn::Expr>) -> (us
continue;
}
if scan_expr::scan_expr.parse2(expr_tokens).is_err() {
failed += 1;
errorf!(
"\nFAIL {} - failed to scan expr\n{}\n",
path.display(),
source_code,
);
continue;
}
passed += 1;
}
});

View File

@@ -1 +1 @@
{"files":{"Cargo.toml":"d180d6115d56268eafb6cdfb2a6eb59e0ab11447024232bb644798012c53dc23","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","src/ast.rs":"9b6cd6b1553483c99cd7e36aa422d37f4353c99b15da55534d28822f7fa7fd08","src/attr.rs":"1201dee8b1da10c4dcf5a673412bbd77cda31776deb70b3a423354eca83b917f","src/expand.rs":"50c30146e65a28ac4f6768e5e9d173bde0162b7ad7c5adc39e4eab6e69650371","src/fmt.rs":"63b7d8184308cb1ae1ed0f96980f086a4b255928b05ad9fb44ddcd9ee54c1250","src/generics.rs":"ac493703c9955400d4fab22cbbdbbc4bf4f6f72c112b34be8b784142142ff74f","src/lib.rs":"e114c846bdae34674d3921a344316b33934c467713f593d943c119e5ce73dd9a","src/prop.rs":"5ba613e38430831259f20b258f33d57dcb783fbaeeb49e5faffa7b2a7be99e67","src/scan_expr.rs":"d46ae7a3eaaa6476553db3164676ec71aa82bcd8a2e4cad59deb07893c79f1c8","src/span.rs":"430460a4fa0d1fa9c627c1ddd575d2b101778fea84217591e1a93a5f6a2a0132","src/valid.rs":"ac95253944fd360d3578d0643a7baabb2cfa6bf9fbced7a6ce1f7b0529a3bb98"},"package":"a7c61ec9a6f64d2793d8a45faba21efbe3ced62a886d44c36a009b2b519b4c7e"}
{"files":{"Cargo.toml":"9c8007030ac9efcfa08f9d45013fc1d4e9144f1bc7723121c2cd6b486102199e","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","src/ast.rs":"9b6cd6b1553483c99cd7e36aa422d37f4353c99b15da55534d28822f7fa7fd08","src/attr.rs":"96a4e4fb288556da13b4fc97f240b45aa2765499fb6a71247127e568f0e5ac21","src/expand.rs":"50c30146e65a28ac4f6768e5e9d173bde0162b7ad7c5adc39e4eab6e69650371","src/fmt.rs":"5d1cefc012403c2d4ff7ab2513c0ec559166df4271d5983a6463939b5ec8c3e1","src/generics.rs":"2076cde22271be355a8131a77add4b93f83ab0af4317cd2df5471fffa4f95c66","src/lib.rs":"5eea86c771e643328ad9bc3b881cce4bf9d50adae1b33e0d07645bdd9044003d","src/prop.rs":"5ba613e38430831259f20b258f33d57dcb783fbaeeb49e5faffa7b2a7be99e67","src/span.rs":"430460a4fa0d1fa9c627c1ddd575d2b101778fea84217591e1a93a5f6a2a0132","src/valid.rs":"ac95253944fd360d3578d0643a7baabb2cfa6bf9fbced7a6ce1f7b0529a3bb98"},"package":"b08be0f17bd307950653ce45db00cd31200d82b624b36e181337d9c7d92765b5"}

View File

@@ -13,7 +13,7 @@
edition = "2021"
rust-version = "1.61"
name = "thiserror-impl"
version = "1.0.68"
version = "1.0.66"
authors = ["David Tolnay <dtolnay@gmail.com>"]
build = false
autolib = false
@@ -42,4 +42,4 @@ version = "1.0.74"
version = "1.0.35"
[dependencies.syn]
version = "2.0.87"
version = "2.0.86"

View File

@@ -142,13 +142,6 @@ fn parse_error_attribute<'a>(attrs: &mut Attrs<'a>, attr: &'a Attribute) -> Resu
fn parse_token_expr(input: ParseStream, mut begin_expr: bool) -> Result<TokenStream> {
let mut tokens = Vec::new();
while !input.is_empty() {
if input.peek(token::Group) {
let group: TokenTree = input.parse()?;
tokens.push(group);
begin_expr = false;
continue;
}
if begin_expr && input.peek(Token![.]) {
if input.peek2(Ident) {
input.parse::<Token![.]>()?;

View File

@@ -1,19 +1,17 @@
use crate::ast::Field;
use crate::attr::{Display, Trait};
use crate::scan_expr::scan_expr;
use proc_macro2::{TokenStream, TokenTree};
use quote::{format_ident, quote, quote_spanned};
use proc_macro2::TokenTree;
use quote::{format_ident, quote_spanned};
use std::collections::{BTreeSet as Set, HashMap as Map};
use syn::ext::IdentExt;
use syn::parse::discouraged::Speculative;
use syn::parse::{ParseStream, Parser};
use syn::{Expr, Ident, Index, LitStr, Member, Result, Token};
use syn::{Ident, Index, LitStr, Member, Result, Token};
impl Display<'_> {
// Transform `"error {var}"` to `"error {}", var`.
pub fn expand_shorthand(&mut self, fields: &[Field]) {
let raw_args = self.args.clone();
let mut named_args = explicit_named_args.parse2(raw_args).unwrap().named;
let mut named_args = explicit_named_args.parse2(raw_args).unwrap();
let mut member_index = Map::new();
for (i, field) in fields.iter().enumerate() {
member_index.insert(&field.member, i);
@@ -95,6 +93,11 @@ impl Display<'_> {
if formatvar.to_string().starts_with("r#") {
formatvar = format_ident!("r_{}", formatvar);
}
if formatvar.to_string().starts_with('_') {
// Work around leading underscore being rejected by 1.40 and
// older compilers. https://github.com/rust-lang/rust/pull/66847
formatvar = format_ident!("field_{}", formatvar);
}
out += &formatvar.to_string();
if !named_args.insert(formatvar.clone()) {
// Already specified in the format argument list.
@@ -119,102 +122,21 @@ impl Display<'_> {
}
}
struct FmtArguments {
named: Set<Ident>,
unnamed: bool,
}
#[allow(clippy::unnecessary_wraps)]
fn explicit_named_args(input: ParseStream) -> Result<FmtArguments> {
let ahead = input.fork();
if let Ok(set) = try_explicit_named_args(&ahead) {
input.advance_to(&ahead);
return Ok(set);
}
let ahead = input.fork();
if let Ok(set) = fallback_explicit_named_args(&ahead) {
input.advance_to(&ahead);
return Ok(set);
}
input.parse::<TokenStream>().unwrap();
Ok(FmtArguments {
named: Set::new(),
unnamed: false,
})
}
fn try_explicit_named_args(input: ParseStream) -> Result<FmtArguments> {
let mut syn_full = None;
let mut args = FmtArguments {
named: Set::new(),
unnamed: false,
};
fn explicit_named_args(input: ParseStream) -> Result<Set<Ident>> {
let mut named_args = Set::new();
while !input.is_empty() {
if input.peek(Token![,]) && input.peek2(Ident::peek_any) && input.peek3(Token![=]) {
input.parse::<Token![,]>()?;
if input.is_empty() {
break;
}
if input.peek(Ident::peek_any) && input.peek2(Token![=]) && !input.peek2(Token![==]) {
let ident = input.call(Ident::parse_any)?;
input.parse::<Token![=]>()?;
args.named.insert(ident);
named_args.insert(ident);
} else {
args.unnamed = true;
}
if *syn_full.get_or_insert_with(is_syn_full) {
let ahead = input.fork();
if ahead.parse::<Expr>().is_ok() {
input.advance_to(&ahead);
continue;
}
}
scan_expr(input)?;
}
Ok(args)
}
fn fallback_explicit_named_args(input: ParseStream) -> Result<FmtArguments> {
let mut args = FmtArguments {
named: Set::new(),
unnamed: false,
};
while !input.is_empty() {
if input.peek(Token![,])
&& input.peek2(Ident::peek_any)
&& input.peek3(Token![=])
&& !input.peek3(Token![==])
{
input.parse::<Token![,]>()?;
let ident = input.call(Ident::parse_any)?;
input.parse::<Token![=]>()?;
args.named.insert(ident);
input.parse::<TokenTree>()?;
}
}
Ok(args)
}
fn is_syn_full() -> bool {
// Expr::Block contains syn::Block which contains Vec<syn::Stmt>. In the
// current version of Syn, syn::Stmt is exhaustive and could only plausibly
// represent `trait Trait {}` in Stmt::Item which contains syn::Item. Most
// of the point of syn's non-"full" mode is to avoid compiling Item and the
// entire expansive syntax tree it comprises. So the following expression
// being parsed to Expr::Block is a reliable indication that "full" is
// enabled.
let test = quote!({
trait Trait {}
});
match syn::parse2(test) {
Ok(Expr::Verbatim(_)) | Err(_) => false,
Ok(Expr::Block(_)) => true,
Ok(_) => unreachable!(),
}
Ok(named_args)
}
fn take_int(read: &mut &str) -> String {

View File

@@ -57,6 +57,7 @@ impl InferredBounds {
}
}
#[allow(clippy::type_repetition_in_bounds, clippy::trait_duplication_in_bounds)] // clippy bug: https://github.com/rust-lang/rust-clippy/issues/8771
pub fn insert(&mut self, ty: impl ToTokens, bound: impl ToTokens) {
let ty = ty.to_token_stream();
let bound = bound.to_token_stream();

View File

@@ -2,7 +2,6 @@
clippy::blocks_in_conditions,
clippy::cast_lossless,
clippy::cast_possible_truncation,
clippy::enum_glob_use,
clippy::manual_find,
clippy::manual_let_else,
clippy::manual_map,
@@ -24,7 +23,6 @@ mod expand;
mod fmt;
mod generics;
mod prop;
mod scan_expr;
mod span;
mod valid;

View File

@@ -1,264 +0,0 @@
use self::{Action::*, Input::*};
use proc_macro2::{Delimiter, Ident, Spacing, TokenTree};
use syn::parse::{ParseStream, Result};
use syn::{AngleBracketedGenericArguments, BinOp, Expr, ExprPath, Lifetime, Lit, Token, Type};
enum Input {
Keyword(&'static str),
Punct(&'static str),
ConsumeAny,
ConsumeBinOp,
ConsumeBrace,
ConsumeDelimiter,
ConsumeIdent,
ConsumeLifetime,
ConsumeLiteral,
ConsumeNestedBrace,
ExpectPath,
ExpectTurbofish,
ExpectType,
CanBeginExpr,
Otherwise,
Empty,
}
enum Action {
SetState(&'static [(Input, Action)]),
IncDepth,
DecDepth,
Finish,
}
static INIT: [(Input, Action); 28] = [
(ConsumeDelimiter, SetState(&POSTFIX)),
(Keyword("async"), SetState(&ASYNC)),
(Keyword("break"), SetState(&BREAK_LABEL)),
(Keyword("const"), SetState(&CONST)),
(Keyword("continue"), SetState(&CONTINUE)),
(Keyword("for"), SetState(&FOR)),
(Keyword("if"), IncDepth),
(Keyword("let"), SetState(&PATTERN)),
(Keyword("loop"), SetState(&BLOCK)),
(Keyword("match"), IncDepth),
(Keyword("move"), SetState(&CLOSURE)),
(Keyword("return"), SetState(&RETURN)),
(Keyword("static"), SetState(&CLOSURE)),
(Keyword("unsafe"), SetState(&BLOCK)),
(Keyword("while"), IncDepth),
(Keyword("yield"), SetState(&RETURN)),
(Keyword("_"), SetState(&POSTFIX)),
(Punct("!"), SetState(&INIT)),
(Punct("#"), SetState(&[(ConsumeDelimiter, SetState(&INIT))])),
(Punct("&"), SetState(&REFERENCE)),
(Punct("*"), SetState(&INIT)),
(Punct("-"), SetState(&INIT)),
(Punct("..="), SetState(&INIT)),
(Punct(".."), SetState(&RANGE)),
(Punct("|"), SetState(&CLOSURE_ARGS)),
(ConsumeLifetime, SetState(&[(Punct(":"), SetState(&INIT))])),
(ConsumeLiteral, SetState(&POSTFIX)),
(ExpectPath, SetState(&PATH)),
];
static POSTFIX: [(Input, Action); 10] = [
(Keyword("as"), SetState(&[(ExpectType, SetState(&POSTFIX))])),
(Punct("..="), SetState(&INIT)),
(Punct(".."), SetState(&RANGE)),
(Punct("."), SetState(&DOT)),
(Punct("?"), SetState(&POSTFIX)),
(ConsumeBinOp, SetState(&INIT)),
(Punct("="), SetState(&INIT)),
(ConsumeNestedBrace, SetState(&IF_THEN)),
(ConsumeDelimiter, SetState(&POSTFIX)),
(Empty, Finish),
];
static ASYNC: [(Input, Action); 3] = [
(Keyword("move"), SetState(&ASYNC)),
(Punct("|"), SetState(&CLOSURE_ARGS)),
(ConsumeBrace, SetState(&POSTFIX)),
];
static BLOCK: [(Input, Action); 1] = [(ConsumeBrace, SetState(&POSTFIX))];
static BREAK_LABEL: [(Input, Action); 2] = [
(ConsumeLifetime, SetState(&BREAK_VALUE)),
(Otherwise, SetState(&BREAK_VALUE)),
];
static BREAK_VALUE: [(Input, Action); 3] = [
(ConsumeNestedBrace, SetState(&IF_THEN)),
(CanBeginExpr, SetState(&INIT)),
(Otherwise, SetState(&POSTFIX)),
];
static CLOSURE: [(Input, Action); 6] = [
(Keyword("async"), SetState(&CLOSURE)),
(Keyword("move"), SetState(&CLOSURE)),
(Punct(","), SetState(&CLOSURE)),
(Punct(">"), SetState(&CLOSURE)),
(Punct("|"), SetState(&CLOSURE_ARGS)),
(ConsumeLifetime, SetState(&CLOSURE)),
];
static CLOSURE_ARGS: [(Input, Action); 2] = [
(Punct("|"), SetState(&CLOSURE_RET)),
(ConsumeAny, SetState(&CLOSURE_ARGS)),
];
static CLOSURE_RET: [(Input, Action); 2] = [
(Punct("->"), SetState(&[(ExpectType, SetState(&BLOCK))])),
(Otherwise, SetState(&INIT)),
];
static CONST: [(Input, Action); 2] = [
(Punct("|"), SetState(&CLOSURE_ARGS)),
(ConsumeBrace, SetState(&POSTFIX)),
];
static CONTINUE: [(Input, Action); 2] = [
(ConsumeLifetime, SetState(&POSTFIX)),
(Otherwise, SetState(&POSTFIX)),
];
static DOT: [(Input, Action); 3] = [
(Keyword("await"), SetState(&POSTFIX)),
(ConsumeIdent, SetState(&METHOD)),
(ConsumeLiteral, SetState(&POSTFIX)),
];
static FOR: [(Input, Action); 2] = [
(Punct("<"), SetState(&CLOSURE)),
(Otherwise, SetState(&PATTERN)),
];
static IF_ELSE: [(Input, Action); 2] = [(Keyword("if"), SetState(&INIT)), (ConsumeBrace, DecDepth)];
static IF_THEN: [(Input, Action); 2] =
[(Keyword("else"), SetState(&IF_ELSE)), (Otherwise, DecDepth)];
static METHOD: [(Input, Action); 1] = [(ExpectTurbofish, SetState(&POSTFIX))];
static PATH: [(Input, Action); 4] = [
(Punct("!="), SetState(&INIT)),
(Punct("!"), SetState(&INIT)),
(ConsumeNestedBrace, SetState(&IF_THEN)),
(Otherwise, SetState(&POSTFIX)),
];
static PATTERN: [(Input, Action); 15] = [
(ConsumeDelimiter, SetState(&PATTERN)),
(Keyword("box"), SetState(&PATTERN)),
(Keyword("in"), IncDepth),
(Keyword("mut"), SetState(&PATTERN)),
(Keyword("ref"), SetState(&PATTERN)),
(Keyword("_"), SetState(&PATTERN)),
(Punct("!"), SetState(&PATTERN)),
(Punct("&"), SetState(&PATTERN)),
(Punct("..="), SetState(&PATTERN)),
(Punct(".."), SetState(&PATTERN)),
(Punct("="), SetState(&INIT)),
(Punct("@"), SetState(&PATTERN)),
(Punct("|"), SetState(&PATTERN)),
(ConsumeLiteral, SetState(&PATTERN)),
(ExpectPath, SetState(&PATTERN)),
];
static RANGE: [(Input, Action); 6] = [
(Punct("..="), SetState(&INIT)),
(Punct(".."), SetState(&RANGE)),
(Punct("."), SetState(&DOT)),
(ConsumeNestedBrace, SetState(&IF_THEN)),
(Empty, Finish),
(Otherwise, SetState(&INIT)),
];
static RAW: [(Input, Action); 3] = [
(Keyword("const"), SetState(&INIT)),
(Keyword("mut"), SetState(&INIT)),
(Otherwise, SetState(&POSTFIX)),
];
static REFERENCE: [(Input, Action); 3] = [
(Keyword("mut"), SetState(&INIT)),
(Keyword("raw"), SetState(&RAW)),
(Otherwise, SetState(&INIT)),
];
static RETURN: [(Input, Action); 2] = [
(CanBeginExpr, SetState(&INIT)),
(Otherwise, SetState(&POSTFIX)),
];
pub(crate) fn scan_expr(input: ParseStream) -> Result<()> {
let mut state = INIT.as_slice();
let mut depth = 0usize;
'table: loop {
for rule in state {
if match rule.0 {
Input::Keyword(expected) => input.step(|cursor| match cursor.ident() {
Some((ident, rest)) if ident == expected => Ok((true, rest)),
_ => Ok((false, *cursor)),
})?,
Input::Punct(expected) => input.step(|cursor| {
let begin = *cursor;
let mut cursor = begin;
for (i, ch) in expected.chars().enumerate() {
match cursor.punct() {
Some((punct, _)) if punct.as_char() != ch => break,
Some((_, rest)) if i == expected.len() - 1 => {
return Ok((true, rest));
}
Some((punct, rest)) if punct.spacing() == Spacing::Joint => {
cursor = rest;
}
_ => break,
}
}
Ok((false, begin))
})?,
Input::ConsumeAny => input.parse::<Option<TokenTree>>()?.is_some(),
Input::ConsumeBinOp => input.parse::<BinOp>().is_ok(),
Input::ConsumeBrace | Input::ConsumeNestedBrace => {
(matches!(rule.0, Input::ConsumeBrace) || depth > 0)
&& input.step(|cursor| match cursor.group(Delimiter::Brace) {
Some((_inside, _span, rest)) => Ok((true, rest)),
None => Ok((false, *cursor)),
})?
}
Input::ConsumeDelimiter => input.step(|cursor| match cursor.any_group() {
Some((_inside, _delimiter, _span, rest)) => Ok((true, rest)),
None => Ok((false, *cursor)),
})?,
Input::ConsumeIdent => input.parse::<Option<Ident>>()?.is_some(),
Input::ConsumeLifetime => input.parse::<Option<Lifetime>>()?.is_some(),
Input::ConsumeLiteral => input.parse::<Option<Lit>>()?.is_some(),
Input::ExpectPath => {
input.parse::<ExprPath>()?;
true
}
Input::ExpectTurbofish => {
if input.peek(Token![::]) {
input.parse::<AngleBracketedGenericArguments>()?;
}
true
}
Input::ExpectType => {
Type::without_plus(input)?;
true
}
Input::CanBeginExpr => Expr::peek(input),
Input::Otherwise => true,
Input::Empty => input.is_empty() || input.peek(Token![,]),
} {
state = match rule.1 {
Action::SetState(next) => next,
Action::IncDepth => (depth += 1, &INIT).1,
Action::DecDepth => (depth -= 1, &POSTFIX).1,
Action::Finish => return if depth == 0 { Ok(()) } else { break },
};
continue 'table;
}
}
return Err(input.error("unsupported expression"));
}
}

File diff suppressed because one or more lines are too long

View File

@@ -13,7 +13,7 @@
edition = "2021"
rust-version = "1.61"
name = "thiserror"
version = "1.0.68"
version = "1.0.66"
authors = ["David Tolnay <dtolnay@gmail.com>"]
build = "build.rs"
autolib = false
@@ -94,7 +94,7 @@ name = "test_transparent"
path = "tests/test_transparent.rs"
[dependencies.thiserror-impl]
version = "=1.0.68"
version = "=1.0.66"
[dev-dependencies.anyhow]
version = "1.0.73"

View File

@@ -258,7 +258,7 @@
//!
//! [`anyhow`]: https://github.com/dtolnay/anyhow
#![doc(html_root_url = "https://docs.rs/thiserror/1.0.68")]
#![doc(html_root_url = "https://docs.rs/thiserror/1.0.66")]
#![allow(
clippy::module_name_repetitions,
clippy::needless_lifetimes,

View File

@@ -1,7 +1,6 @@
#![allow(clippy::iter_cloned_collect, clippy::uninlined_format_args)]
use core::fmt::Display;
use std::path::PathBuf;
use thiserror::Error;
// Some of the elaborate cases from the rcc codebase, which is a C compiler in
@@ -51,7 +50,6 @@ pub enum RustupError {
},
}
#[track_caller]
fn assert<T: Display>(expected: &str, value: T) {
assert_eq!(expected, value.to_string());
}
@@ -88,29 +86,3 @@ fn test_rustup() {
},
);
}
// Regression test for https://github.com/dtolnay/thiserror/issues/335
#[test]
#[allow(non_snake_case)]
fn test_assoc_type_equality_constraint() {
pub trait Trait<T>: Display {
type A;
}
impl<T> Trait<T> for i32 {
type A = i32;
}
#[derive(Error, Debug)]
#[error("{A} {b}", b = &0 as &dyn Trait<i32, A = i32>)]
pub struct Error {
pub A: PathBuf,
}
assert(
"... 0",
Error {
A: PathBuf::from("..."),
},
);
}