Overhaul token collection.

This commit does the following.

- Renames `collect_tokens_trailing_token` as `collect_tokens`, because
  (a) it's annoying long, and (b) the `_trailing_token` bit is less
  accurate now that its types have changed.

- In `collect_tokens`, adds a `Option<CollectPos>` argument and a
  `UsePreAttrPos` in the return type of `f`. These are used in
  `parse_expr_force_collect` (for vanilla expressions) and in
  `parse_stmt_without_recovery` (for two different cases of expression
  statements). Together these ensure are enough to fix all the problems
  with token collection and assoc expressions. The changes to the
  `stringify.rs` test demonstrate some of these.

- Adds a new test. The code in this test was causing an assertion
  failure prior to this commit, due to an invalid `NodeRange`.

The extra complexity is annoying, but necessary to fix the existing
problems.
This commit is contained in:
Nicholas Nethercote 2024-08-06 17:16:40 +10:00
parent fe460ac28b
commit 9d31f86f0d
12 changed files with 414 additions and 292 deletions

View File

@ -10,6 +10,7 @@ use tracing::debug;
use super::{
AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing,
UsePreAttrPos,
};
use crate::{errors, fluent_generated as fluent, maybe_whole};
@ -259,7 +260,8 @@ impl<'a> Parser<'a> {
pub fn parse_attr_item(&mut self, force_collect: ForceCollect) -> PResult<'a, ast::AttrItem> {
maybe_whole!(self, NtMeta, |attr| attr.into_inner());
let do_parse = |this: &mut Self, _empty_attrs| {
// Attr items don't have attributes.
self.collect_tokens(None, AttrWrapper::empty(), force_collect, |this, _empty_attrs| {
let is_unsafe = this.eat_keyword(kw::Unsafe);
let unsafety = if is_unsafe {
let unsafe_span = this.prev_token.span;
@ -275,10 +277,12 @@ impl<'a> Parser<'a> {
if is_unsafe {
this.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
}
Ok((ast::AttrItem { unsafety, path, args, tokens: None }, Trailing::No))
};
// Attr items don't have attributes.
self.collect_tokens_trailing_token(AttrWrapper::empty(), force_collect, do_parse)
Ok((
ast::AttrItem { unsafety, path, args, tokens: None },
Trailing::No,
UsePreAttrPos::No,
))
})
}
/// Parses attributes that appear after the opening of an item. These should
@ -311,8 +315,8 @@ impl<'a> Parser<'a> {
};
if let Some(attr) = attr {
// If we are currently capturing tokens (i.e. we are within a call to
// `Parser::collect_tokens_trailing_tokens`) record the token positions of this
// inner attribute, for possible later processing in a `LazyAttrTokenStream`.
// `Parser::collect_tokens`) record the token positions of this inner attribute,
// for possible later processing in a `LazyAttrTokenStream`.
if let Capturing::Yes = self.capture_state.capturing {
let end_pos = self.num_bump_calls;
let parser_range = ParserRange(start_pos..end_pos);

View File

@ -15,6 +15,20 @@ use super::{
TokenCursor, Trailing,
};
// When collecting tokens, this fully captures the start point. Usually its
// just after outer attributes, but occasionally it's before.
#[derive(Clone, Debug)]
pub(super) struct CollectPos {
start_token: (Token, Spacing),
cursor_snapshot: TokenCursor,
start_pos: u32,
}
pub(super) enum UsePreAttrPos {
No,
Yes,
}
/// A wrapper type to ensure that the parser handles outer attributes correctly.
/// When we parse outer attributes, we need to ensure that we capture tokens
/// for the attribute target. This allows us to perform cfg-expansion on
@ -22,7 +36,7 @@ use super::{
///
/// This wrapper prevents direct access to the underlying `ast::AttrVec`.
/// Parsing code can only get access to the underlying attributes
/// by passing an `AttrWrapper` to `collect_tokens_trailing_token`.
/// by passing an `AttrWrapper` to `collect_tokens`.
/// This makes it difficult to accidentally construct an AST node
/// (which stores an `ast::AttrVec`) without first collecting tokens.
///
@ -33,16 +47,18 @@ pub(super) struct AttrWrapper {
attrs: AttrVec,
// The start of the outer attributes in the parser's token stream.
// This lets us create a `NodeReplacement` for the entire attribute
// target, including outer attributes.
start_pos: u32,
// target, including outer attributes. `None` if there are no outer
// attributes.
start_pos: Option<u32>,
}
impl AttrWrapper {
pub(super) fn new(attrs: AttrVec, start_pos: u32) -> AttrWrapper {
AttrWrapper { attrs, start_pos }
AttrWrapper { attrs, start_pos: Some(start_pos) }
}
pub(super) fn empty() -> AttrWrapper {
AttrWrapper { attrs: AttrVec::new(), start_pos: u32::MAX }
AttrWrapper { attrs: AttrVec::new(), start_pos: None }
}
pub(super) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
@ -77,7 +93,7 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
}
// From a value of this type we can reconstruct the `TokenStream` seen by the
// `f` callback passed to a call to `Parser::collect_tokens_trailing_token`, by
// `f` callback passed to a call to `Parser::collect_tokens`, by
// replaying the getting of the tokens. This saves us producing a `TokenStream`
// if it is never needed, e.g. a captured `macro_rules!` argument that is never
// passed to a proc macro. In practice, token stream creation happens rarely
@ -166,16 +182,30 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
}
impl<'a> Parser<'a> {
pub(super) fn collect_pos(&self) -> CollectPos {
CollectPos {
start_token: (self.token.clone(), self.token_spacing),
cursor_snapshot: self.token_cursor.clone(),
start_pos: self.num_bump_calls,
}
}
/// Parses code with `f`. If appropriate, it records the tokens (in
/// `LazyAttrTokenStream` form) that were parsed in the result, accessible
/// via the `HasTokens` trait. The `Trailing` part of the callback's
/// result indicates if an extra token should be captured, e.g. a comma or
/// semicolon.
/// semicolon. The `UsePreAttrPos` part of the callback's result indicates
/// if we should use `pre_attr_pos` as the collection start position (only
/// required in a few cases).
///
/// The `attrs` passed in are in `AttrWrapper` form, which is opaque. The
/// `AttrVec` within is passed to `f`. See the comment on `AttrWrapper` for
/// details.
///
/// `pre_attr_pos` is the position before the outer attributes (or the node
/// itself, if no outer attributes are present). It is only needed if `f`
/// can return `UsePreAttrPos::Yes`.
///
/// Note: If your callback consumes an opening delimiter (including the
/// case where `self.token` is an opening delimiter on entry to this
/// function), you must also consume the corresponding closing delimiter.
@ -197,11 +227,12 @@ impl<'a> Parser<'a> {
/// } // 32..33
/// } // 33..34
/// ```
pub(super) fn collect_tokens_trailing_token<R: HasAttrs + HasTokens>(
pub(super) fn collect_tokens<R: HasAttrs + HasTokens>(
&mut self,
pre_attr_pos: Option<CollectPos>,
attrs: AttrWrapper,
force_collect: ForceCollect,
f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, (R, Trailing)>,
f: impl FnOnce(&mut Self, AttrVec) -> PResult<'a, (R, Trailing, UsePreAttrPos)>,
) -> PResult<'a, R> {
// We must collect if anything could observe the collected tokens, i.e.
// if any of the following conditions hold.
@ -220,23 +251,20 @@ impl<'a> Parser<'a> {
return Ok(f(self, attrs.attrs)?.0);
}
let start_token = (self.token.clone(), self.token_spacing);
let cursor_snapshot = self.token_cursor.clone();
let start_pos = self.num_bump_calls;
let mut collect_pos = self.collect_pos();
let has_outer_attrs = !attrs.attrs.is_empty();
let parser_replacements_start = self.capture_state.parser_replacements.len();
// We set and restore `Capturing::Yes` on either side of the call to
// `f`, so we can distinguish the outermost call to
// `collect_tokens_trailing_token` (e.g. parsing `m` in the example
// above) from any inner (indirectly recursive) calls (e.g. parsing `g`
// in the example above). This distinction is used below and in
// `Parser::parse_inner_attributes`.
let (mut ret, capture_trailing) = {
// `f`, so we can distinguish the outermost call to `collect_tokens`
// (e.g. parsing `m` in the example above) from any inner (indirectly
// recursive) calls (e.g. parsing `g` in the example above). This
// distinction is used below and in `Parser::parse_inner_attributes`.
let (mut ret, capture_trailing, use_pre_attr_pos) = {
let prev_capturing = mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
let f_res = f(self, attrs.attrs);
let res = f(self, attrs.attrs);
self.capture_state.capturing = prev_capturing;
f_res?
res?
};
// When we're not in `capture_cfg` mode, then skip collecting and
@ -279,6 +307,14 @@ impl<'a> Parser<'a> {
return Ok(ret);
}
// Replace the post-attribute collection start position with the
// pre-attribute position supplied, if `f` indicated it is necessary.
// (The caller is responsible for providing a non-`None` `pre_attr_pos`
// if this is a possibility.)
if matches!(use_pre_attr_pos, UsePreAttrPos::Yes) {
collect_pos = pre_attr_pos.unwrap();
}
let parser_replacements_end = self.capture_state.parser_replacements.len();
assert!(
@ -294,7 +330,7 @@ impl<'a> Parser<'a> {
// `AttrTokenStream`, we will create the proper token.
+ self.break_last_token as u32;
let num_calls = end_pos - start_pos;
let num_calls = end_pos - collect_pos.start_pos;
// Take the captured `ParserRange`s for any inner attributes that we parsed in
// `Parser::parse_inner_attributes`, and pair them in a `ParserReplacement` with `None`,
@ -328,7 +364,9 @@ impl<'a> Parser<'a> {
.iter()
.cloned()
.chain(inner_attr_parser_replacements.iter().cloned())
.map(|(parser_range, data)| (NodeRange::new(parser_range, start_pos), data))
.map(|(parser_range, data)| {
(NodeRange::new(parser_range, collect_pos.start_pos), data)
})
.collect()
};
@ -355,9 +393,9 @@ impl<'a> Parser<'a> {
// - `tokens`: lazy tokens for `g` (with its inner attr deleted).
let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl {
start_token,
start_token: collect_pos.start_token,
cursor_snapshot: collect_pos.cursor_snapshot,
num_calls,
cursor_snapshot,
break_last_token: self.break_last_token,
node_replacements,
});
@ -368,9 +406,9 @@ impl<'a> Parser<'a> {
}
// If `capture_cfg` is set and we're inside a recursive call to
// `collect_tokens_trailing_token`, then we need to register a replace range
// if we have `#[cfg]` or `#[cfg_attr]`. This allows us to run eager cfg-expansion
// on the captured token stream.
// `collect_tokens`, then we need to register a replace range if we
// have `#[cfg]` or `#[cfg_attr]`. This allows us to run eager
// cfg-expansion on the captured token stream.
if self.capture_cfg
&& matches!(self.capture_state.capturing, Capturing::Yes)
&& has_cfg_or_cfg_attr(ret.attrs())
@ -389,7 +427,8 @@ impl<'a> Parser<'a> {
// Set things up so that the entire AST node that we just parsed, including attributes,
// will be replaced with `target` in the lazy token stream. This will allow us to
// cfg-expand this AST node.
let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
let start_pos =
if has_outer_attrs { attrs.start_pos.unwrap() } else { collect_pos.start_pos };
let target = AttrsTarget { attrs: ret.attrs().iter().cloned().collect(), tokens };
self.capture_state
.parser_replacements

View File

@ -2487,13 +2487,14 @@ impl<'a> Parser<'a> {
pub(super) fn handle_unambiguous_unbraced_const_arg(&mut self) -> PResult<'a, P<Expr>> {
let start = self.token.span;
let attrs = self.parse_outer_attributes()?;
let expr = self.parse_expr_res(Restrictions::CONST_EXPR, attrs).map_err(|mut err| {
err.span_label(
start.shrink_to_lo(),
"while parsing a const generic argument starting here",
);
err
})?;
let (expr, _) =
self.parse_expr_res(Restrictions::CONST_EXPR, attrs).map_err(|mut err| {
err.span_label(
start.shrink_to_lo(),
"while parsing a const generic argument starting here",
);
err
})?;
if !self.expr_is_valid_const_arg(&expr) {
self.dcx().emit_err(ConstGenericWithoutBraces {
span: expr.span,
@ -2613,7 +2614,7 @@ impl<'a> Parser<'a> {
let attrs = self.parse_outer_attributes()?;
self.parse_expr_res(Restrictions::CONST_EXPR, attrs)
})() {
Ok(expr) => {
Ok((expr, _)) => {
// Find a mistake like `MyTrait<Assoc == S::Assoc>`.
if snapshot.token == token::EqEq {
err.span_suggestion(
@ -2671,7 +2672,7 @@ impl<'a> Parser<'a> {
})() {
// Since we don't know the exact reason why we failed to parse the type or the
// expression, employ a simple heuristic to weed out some pathological cases.
Ok(expr) if let token::Comma | token::Gt = snapshot.token.kind => {
Ok((expr, _)) if let token::Comma | token::Gt = snapshot.token.kind => {
self.restore_snapshot(snapshot);
Some(expr)
}

View File

@ -36,7 +36,7 @@ use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{
AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
SemiColonMode, SeqSep, TokenType, Trailing,
SemiColonMode, SeqSep, TokenType, Trailing, UsePreAttrPos,
};
use crate::{errors, maybe_recover_from_interpolated_ty_qpath};
@ -59,15 +59,30 @@ impl<'a> Parser<'a> {
self.current_closure.take();
let attrs = self.parse_outer_attributes()?;
self.parse_expr_res(Restrictions::empty(), attrs)
self.parse_expr_res(Restrictions::empty(), attrs).map(|res| res.0)
}
/// Parses an expression, forcing tokens to be collected.
pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P<Expr>> {
self.current_closure.take();
// If the expression is associative (e.g. `1 + 2`), then any preceding
// outer attribute actually belongs to the first inner sub-expression.
// In which case we must use the pre-attr pos to include the attribute
// in the collected tokens for the outer expression.
let pre_attr_pos = self.collect_pos();
let attrs = self.parse_outer_attributes()?;
self.collect_tokens_no_attrs(|this| this.parse_expr_res(Restrictions::empty(), attrs))
self.collect_tokens(
Some(pre_attr_pos),
AttrWrapper::empty(),
ForceCollect::Yes,
|this, _empty_attrs| {
let (expr, is_assoc) = this.parse_expr_res(Restrictions::empty(), attrs)?;
let use_pre_attr_pos =
if is_assoc { UsePreAttrPos::Yes } else { UsePreAttrPos::No };
Ok((expr, Trailing::No, use_pre_attr_pos))
},
)
}
pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> {
@ -77,7 +92,7 @@ impl<'a> Parser<'a> {
fn parse_expr_catch_underscore(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> {
let attrs = self.parse_outer_attributes()?;
match self.parse_expr_res(restrictions, attrs) {
Ok(expr) => Ok(expr),
Ok((expr, _)) => Ok(expr),
Err(err) => match self.token.ident() {
Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No))
if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) =>
@ -104,18 +119,20 @@ impl<'a> Parser<'a> {
&mut self,
r: Restrictions,
attrs: AttrWrapper,
) -> PResult<'a, P<Expr>> {
) -> PResult<'a, (P<Expr>, bool)> {
self.with_res(r, |this| this.parse_expr_assoc_with(0, attrs))
}
/// Parses an associative expression with operators of at least `min_prec` precedence.
/// The `bool` in the return value indicates if it was an assoc expr, i.e. with an operator
/// followed by a subexpression (e.g. `1 + 2`).
pub(super) fn parse_expr_assoc_with(
&mut self,
min_prec: usize,
attrs: AttrWrapper,
) -> PResult<'a, P<Expr>> {
) -> PResult<'a, (P<Expr>, bool)> {
let lhs = if self.token.is_range_separator() {
return self.parse_expr_prefix_range(attrs);
return self.parse_expr_prefix_range(attrs).map(|res| (res, false));
} else {
self.parse_expr_prefix(attrs)?
};
@ -123,15 +140,17 @@ impl<'a> Parser<'a> {
}
/// Parses the rest of an associative expression (i.e. the part after the lhs) with operators
/// of at least `min_prec` precedence.
/// of at least `min_prec` precedence. The `bool` in the return value indicates if something
/// was actually parsed.
pub(super) fn parse_expr_assoc_rest_with(
&mut self,
min_prec: usize,
starts_stmt: bool,
mut lhs: P<Expr>,
) -> PResult<'a, P<Expr>> {
) -> PResult<'a, (P<Expr>, bool)> {
let mut parsed_something = false;
if !self.should_continue_as_assoc_expr(&lhs) {
return Ok(lhs);
return Ok((lhs, parsed_something));
}
self.expected_tokens.push(TokenType::Operator);
@ -156,10 +175,11 @@ impl<'a> Parser<'a> {
self.err_larrow_operator(self.token.span);
}
parsed_something = true;
self.bump();
if op.node.is_comparison() {
if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? {
return Ok(expr);
return Ok((expr, parsed_something));
}
}
@ -263,7 +283,7 @@ impl<'a> Parser<'a> {
// the special cases. The code is here only for future convenience.
Fixity::None => 1,
};
let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
let (rhs, _) = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
let attrs = this.parse_outer_attributes()?;
this.parse_expr_assoc_with(prec + prec_adjustment, attrs)
})?;
@ -319,7 +339,7 @@ impl<'a> Parser<'a> {
}
}
Ok(lhs)
Ok((lhs, parsed_something))
}
fn should_continue_as_assoc_expr(&mut self, lhs: &Expr) -> bool {
@ -441,7 +461,8 @@ impl<'a> Parser<'a> {
let attrs = self.parse_outer_attributes()?;
Some(
self.parse_expr_assoc_with(prec + 1, attrs)
.map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))?,
.map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))?
.0,
)
} else {
None
@ -498,7 +519,7 @@ impl<'a> Parser<'a> {
// RHS must be parsed with more associativity than the dots.
let attrs = this.parse_outer_attributes()?;
this.parse_expr_assoc_with(op.unwrap().precedence() + 1, attrs)
.map(|x| (lo.to(x.span), Some(x)))
.map(|(x, _)| (lo.to(x.span), Some(x)))
.map_err(|err| this.maybe_err_dotdotlt_syntax(maybe_lt, err))?
} else {
(lo, None)
@ -2335,7 +2356,7 @@ impl<'a> Parser<'a> {
let token = self.token.clone();
let attrs = self.parse_outer_attributes()?;
match self.parse_expr_res(restrictions, attrs) {
Ok(expr) => expr,
Ok((expr, _)) => expr,
Err(err) => self.recover_closure_body(err, before, prev, token, lo, decl_hi)?,
}
}
@ -2445,7 +2466,7 @@ impl<'a> Parser<'a> {
fn parse_fn_block_param(&mut self) -> PResult<'a, Param> {
let lo = self.token.span;
let attrs = self.parse_outer_attributes()?;
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let pat = this.parse_pat_no_top_alt(Some(Expected::ParameterName), None)?;
let ty = if this.eat(&token::Colon) {
this.parse_ty()?
@ -2463,6 +2484,7 @@ impl<'a> Parser<'a> {
is_placeholder: false,
},
Trailing::from(this.token == token::Comma),
UsePreAttrPos::No,
))
})
}
@ -2583,7 +2605,7 @@ impl<'a> Parser<'a> {
/// Parses the condition of a `if` or `while` expression.
fn parse_expr_cond(&mut self) -> PResult<'a, P<Expr>> {
let attrs = self.parse_outer_attributes()?;
let mut cond =
let (mut cond, _) =
self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, attrs)?;
CondChecker::new(self).visit_expr(&mut cond);
@ -2632,7 +2654,7 @@ impl<'a> Parser<'a> {
self.expect(&token::Eq)?;
}
let attrs = self.parse_outer_attributes()?;
let expr = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), attrs)?;
let (expr, _) = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), attrs)?;
let span = lo.to(expr.span);
Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span, recovered)))
}
@ -2766,7 +2788,7 @@ impl<'a> Parser<'a> {
// We know for sure we have seen `for ($SOMETHING in`. In the happy path this would
// happen right before the return of this method.
let attrs = self.parse_outer_attributes()?;
let expr = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs) {
let (expr, _) = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs) {
Ok(expr) => expr,
Err(expr_err) => {
// We don't know what followed the `in`, so cancel and bubble up the
@ -2801,7 +2823,7 @@ impl<'a> Parser<'a> {
}
self.check_for_for_in_in_typo(self.prev_token.span);
let attrs = self.parse_outer_attributes()?;
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
let (expr, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
Ok((pat, expr))
}
@ -2921,7 +2943,7 @@ impl<'a> Parser<'a> {
fn parse_expr_match(&mut self) -> PResult<'a, P<Expr>> {
let match_span = self.prev_token.span;
let attrs = self.parse_outer_attributes()?;
let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
let (scrutinee, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?;
self.parse_match_block(match_span, match_span, scrutinee, MatchKind::Prefix)
}
@ -3069,7 +3091,7 @@ impl<'a> Parser<'a> {
pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
let attrs = self.parse_outer_attributes()?;
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let lo = this.token.span;
let (pat, guard) = this.parse_match_arm_pat_and_guard()?;
@ -3126,7 +3148,7 @@ impl<'a> Parser<'a> {
let arm_start_span = this.token.span;
let attrs = this.parse_outer_attributes()?;
let expr =
let (expr, _) =
this.parse_expr_res(Restrictions::STMT_EXPR, attrs).map_err(|mut err| {
err.span_label(arrow_span, "while parsing the `match` arm starting here");
err
@ -3244,6 +3266,7 @@ impl<'a> Parser<'a> {
is_placeholder: false,
},
Trailing::No,
UsePreAttrPos::No,
))
})
}
@ -3334,8 +3357,9 @@ impl<'a> Parser<'a> {
fn parse_match_guard_condition(&mut self) -> PResult<'a, P<Expr>> {
let attrs = self.parse_outer_attributes()?;
self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, attrs).map_err(
|mut err| {
match self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, attrs) {
Ok((expr, _)) => Ok(expr),
Err(mut err) => {
if self.prev_token == token::OpenDelim(Delimiter::Brace) {
let sugg_sp = self.prev_token.span.shrink_to_lo();
// Consume everything within the braces, let's avoid further parse
@ -3355,9 +3379,9 @@ impl<'a> Parser<'a> {
err.span_suggestion_verbose(sugg_sp, msg, "=> ", applicability);
}
}
err
},
)
Err(err)
}
}
}
pub(crate) fn is_builtin(&self) -> bool {
@ -3708,7 +3732,7 @@ impl<'a> Parser<'a> {
fn parse_expr_field(&mut self) -> PResult<'a, ExprField> {
let attrs = self.parse_outer_attributes()?;
self.recover_vcs_conflict_marker();
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let lo = this.token.span;
// Check if a colon exists one ahead. This means we're parsing a fieldname.
@ -3753,6 +3777,7 @@ impl<'a> Parser<'a> {
is_placeholder: false,
},
Trailing::from(this.token == token::Comma),
UsePreAttrPos::No,
))
})
}
@ -3846,7 +3871,7 @@ impl<'a> Parser<'a> {
attrs: AttrWrapper,
f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, P<Expr>>,
) -> PResult<'a, P<Expr>> {
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let res = f(this, attrs)?;
let trailing = Trailing::from(
this.restrictions.contains(Restrictions::STMT_EXPR)
@ -3856,7 +3881,7 @@ impl<'a> Parser<'a> {
// `#[attr] expr,` always captures a trailing comma.
|| this.token == token::Comma,
);
Ok((res, trailing))
Ok((res, trailing, UsePreAttrPos::No))
})
}
}

View File

@ -7,7 +7,7 @@ use rustc_span::symbol::{kw, Ident};
use rustc_span::Span;
use thin_vec::ThinVec;
use super::{ForceCollect, Parser, Trailing};
use super::{ForceCollect, Parser, Trailing, UsePreAttrPos};
use crate::errors::{
self, MultipleWhereClauses, UnexpectedDefaultValueForLifetimeInGenericParameters,
UnexpectedSelfInGenericParameters, WhereClauseBeforeTupleStructBody,
@ -169,94 +169,88 @@ impl<'a> Parser<'a> {
let mut done = false;
while !done {
let attrs = self.parse_outer_attributes()?;
let param =
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
if this.eat_keyword_noexpect(kw::SelfUpper) {
// `Self` as a generic param is invalid. Here we emit the diagnostic and continue parsing
// as if `Self` never existed.
this.dcx().emit_err(UnexpectedSelfInGenericParameters {
span: this.prev_token.span,
});
let param = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
if this.eat_keyword_noexpect(kw::SelfUpper) {
// `Self` as a generic param is invalid. Here we emit the diagnostic and continue parsing
// as if `Self` never existed.
this.dcx()
.emit_err(UnexpectedSelfInGenericParameters { span: this.prev_token.span });
// Eat a trailing comma, if it exists.
let _ = this.eat(&token::Comma);
}
// Eat a trailing comma, if it exists.
let _ = this.eat(&token::Comma);
}
let param = if this.check_lifetime() {
let lifetime = this.expect_lifetime();
// Parse lifetime parameter.
let (colon_span, bounds) = if this.eat(&token::Colon) {
(Some(this.prev_token.span), this.parse_lt_param_bounds())
} else {
(None, Vec::new())
};
if this.check_noexpect(&token::Eq)
&& this.look_ahead(1, |t| t.is_lifetime())
{
let lo = this.token.span;
// Parse `= 'lifetime`.
this.bump(); // `=`
this.bump(); // `'lifetime`
let span = lo.to(this.prev_token.span);
this.dcx().emit_err(
UnexpectedDefaultValueForLifetimeInGenericParameters { span },
);
}
Some(ast::GenericParam {
ident: lifetime.ident,
id: lifetime.id,
attrs,
bounds,
kind: ast::GenericParamKind::Lifetime,
is_placeholder: false,
colon_span,
})
} else if this.check_keyword(kw::Const) {
// Parse const parameter.
Some(this.parse_const_param(attrs)?)
} else if this.check_ident() {
// Parse type parameter.
Some(this.parse_ty_param(attrs)?)
} else if this.token.can_begin_type() {
// Trying to write an associated type bound? (#26271)
let snapshot = this.create_snapshot_for_diagnostic();
match this.parse_ty_where_predicate() {
Ok(where_predicate) => {
this.dcx().emit_err(errors::BadAssocTypeBounds {
span: where_predicate.span(),
});
// FIXME - try to continue parsing other generics?
return Ok((None, Trailing::No));
}
Err(err) => {
err.cancel();
// FIXME - maybe we should overwrite 'self' outside of `collect_tokens`?
this.restore_snapshot(snapshot);
return Ok((None, Trailing::No));
}
}
let param = if this.check_lifetime() {
let lifetime = this.expect_lifetime();
// Parse lifetime parameter.
let (colon_span, bounds) = if this.eat(&token::Colon) {
(Some(this.prev_token.span), this.parse_lt_param_bounds())
} else {
// Check for trailing attributes and stop parsing.
if !attrs.is_empty() {
if !params.is_empty() {
this.dcx()
.emit_err(errors::AttrAfterGeneric { span: attrs[0].span });
} else {
this.dcx()
.emit_err(errors::AttrWithoutGenerics { span: attrs[0].span });
}
}
return Ok((None, Trailing::No));
(None, Vec::new())
};
if !this.eat(&token::Comma) {
done = true;
if this.check_noexpect(&token::Eq) && this.look_ahead(1, |t| t.is_lifetime()) {
let lo = this.token.span;
// Parse `= 'lifetime`.
this.bump(); // `=`
this.bump(); // `'lifetime`
let span = lo.to(this.prev_token.span);
this.dcx().emit_err(UnexpectedDefaultValueForLifetimeInGenericParameters {
span,
});
}
// We just ate the comma, so no need to capture the trailing token.
Ok((param, Trailing::No))
})?;
Some(ast::GenericParam {
ident: lifetime.ident,
id: lifetime.id,
attrs,
bounds,
kind: ast::GenericParamKind::Lifetime,
is_placeholder: false,
colon_span,
})
} else if this.check_keyword(kw::Const) {
// Parse const parameter.
Some(this.parse_const_param(attrs)?)
} else if this.check_ident() {
// Parse type parameter.
Some(this.parse_ty_param(attrs)?)
} else if this.token.can_begin_type() {
// Trying to write an associated type bound? (#26271)
let snapshot = this.create_snapshot_for_diagnostic();
match this.parse_ty_where_predicate() {
Ok(where_predicate) => {
this.dcx().emit_err(errors::BadAssocTypeBounds {
span: where_predicate.span(),
});
// FIXME - try to continue parsing other generics?
}
Err(err) => {
err.cancel();
// FIXME - maybe we should overwrite 'self' outside of `collect_tokens`?
this.restore_snapshot(snapshot);
}
}
return Ok((None, Trailing::No, UsePreAttrPos::No));
} else {
// Check for trailing attributes and stop parsing.
if !attrs.is_empty() {
if !params.is_empty() {
this.dcx().emit_err(errors::AttrAfterGeneric { span: attrs[0].span });
} else {
this.dcx()
.emit_err(errors::AttrWithoutGenerics { span: attrs[0].span });
}
}
return Ok((None, Trailing::No, UsePreAttrPos::No));
};
if !this.eat(&token::Comma) {
done = true;
}
// We just ate the comma, so no need to capture the trailing token.
Ok((param, Trailing::No, UsePreAttrPos::No))
})?;
if let Some(param) = param {
params.push(param);

View File

@ -20,7 +20,9 @@ use tracing::debug;
use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Trailing};
use super::{
AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Trailing, UsePreAttrPos,
};
use crate::errors::{self, MacroExpandsToAdtField};
use crate::{fluent_generated as fluent, maybe_whole};
@ -127,7 +129,7 @@ impl<'a> Parser<'a> {
Some(item.into_inner())
});
self.collect_tokens_trailing_token(attrs, force_collect, |this, mut attrs| {
self.collect_tokens(None, attrs, force_collect, |this, mut attrs| {
let lo = this.token.span;
let vis = this.parse_visibility(FollowedByType::No)?;
let mut def = this.parse_defaultness();
@ -145,7 +147,7 @@ impl<'a> Parser<'a> {
let span = lo.to(this.prev_token.span);
let id = DUMMY_NODE_ID;
let item = Item { ident, attrs, id, kind, vis, span, tokens: None };
return Ok((Some(item), Trailing::No));
return Ok((Some(item), Trailing::No, UsePreAttrPos::No));
}
// At this point, we have failed to parse an item.
@ -160,7 +162,7 @@ impl<'a> Parser<'a> {
if !attrs_allowed {
this.recover_attrs_no_item(&attrs)?;
}
Ok((None, Trailing::No))
Ok((None, Trailing::No, UsePreAttrPos::No))
})
}
@ -1546,86 +1548,82 @@ impl<'a> Parser<'a> {
self.recover_vcs_conflict_marker();
let help = "enum variants can be `Variant`, `Variant = <integer>`, \
`Variant(Type, ..., TypeN)` or `Variant { fields: Types }`";
self.collect_tokens_trailing_token(
variant_attrs,
ForceCollect::No,
|this, variant_attrs| {
let vlo = this.token.span;
self.collect_tokens(None, variant_attrs, ForceCollect::No, |this, variant_attrs| {
let vlo = this.token.span;
let vis = this.parse_visibility(FollowedByType::No)?;
if !this.recover_nested_adt_item(kw::Enum)? {
return Ok((None, Trailing::No));
}
let ident = this.parse_field_ident("enum", vlo)?;
let vis = this.parse_visibility(FollowedByType::No)?;
if !this.recover_nested_adt_item(kw::Enum)? {
return Ok((None, Trailing::No, UsePreAttrPos::No));
}
let ident = this.parse_field_ident("enum", vlo)?;
if this.token == token::Not {
if let Err(err) = this.unexpected() {
err.with_note(fluent::parse_macro_expands_to_enum_variant).emit();
}
this.bump();
this.parse_delim_args()?;
return Ok((None, Trailing::from(this.token == token::Comma)));
if this.token == token::Not {
if let Err(err) = this.unexpected() {
err.with_note(fluent::parse_macro_expands_to_enum_variant).emit();
}
let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
// Parse a struct variant.
let (fields, recovered) =
match this.parse_record_struct_body("struct", ident.span, false) {
Ok((fields, recovered)) => (fields, recovered),
Err(mut err) => {
if this.token == token::Colon {
// We handle `enum` to `struct` suggestion in the caller.
return Err(err);
}
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
this.bump(); // }
err.span_label(span, "while parsing this enum");
err.help(help);
let guar = err.emit();
(thin_vec![], Recovered::Yes(guar))
}
};
VariantData::Struct { fields, recovered: recovered.into() }
} else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
let body = match this.parse_tuple_struct_body() {
Ok(body) => body,
this.bump();
this.parse_delim_args()?;
return Ok((None, Trailing::from(this.token == token::Comma), UsePreAttrPos::No));
}
let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
// Parse a struct variant.
let (fields, recovered) =
match this.parse_record_struct_body("struct", ident.span, false) {
Ok((fields, recovered)) => (fields, recovered),
Err(mut err) => {
if this.token == token::Colon {
// We handle `enum` to `struct` suggestion in the caller.
return Err(err);
}
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]);
this.bump(); // )
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
this.bump(); // }
err.span_label(span, "while parsing this enum");
err.help(help);
err.emit();
thin_vec![]
let guar = err.emit();
(thin_vec![], Recovered::Yes(guar))
}
};
VariantData::Tuple(body, DUMMY_NODE_ID)
} else {
VariantData::Unit(DUMMY_NODE_ID)
VariantData::Struct { fields, recovered: recovered.into() }
} else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
let body = match this.parse_tuple_struct_body() {
Ok(body) => body,
Err(mut err) => {
if this.token == token::Colon {
// We handle `enum` to `struct` suggestion in the caller.
return Err(err);
}
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]);
this.bump(); // )
err.span_label(span, "while parsing this enum");
err.help(help);
err.emit();
thin_vec![]
}
};
VariantData::Tuple(body, DUMMY_NODE_ID)
} else {
VariantData::Unit(DUMMY_NODE_ID)
};
let disr_expr =
if this.eat(&token::Eq) { Some(this.parse_expr_anon_const()?) } else { None };
let disr_expr =
if this.eat(&token::Eq) { Some(this.parse_expr_anon_const()?) } else { None };
let vr = ast::Variant {
ident,
vis,
id: DUMMY_NODE_ID,
attrs: variant_attrs,
data: struct_def,
disr_expr,
span: vlo.to(this.prev_token.span),
is_placeholder: false,
};
let vr = ast::Variant {
ident,
vis,
id: DUMMY_NODE_ID,
attrs: variant_attrs,
data: struct_def,
disr_expr,
span: vlo.to(this.prev_token.span),
is_placeholder: false,
};
Ok((Some(vr), Trailing::from(this.token == token::Comma)))
},
)
Ok((Some(vr), Trailing::from(this.token == token::Comma), UsePreAttrPos::No))
})
.map_err(|mut err| {
err.help(help);
err
@ -1777,7 +1775,7 @@ impl<'a> Parser<'a> {
// Unit like structs are handled in parse_item_struct function
self.parse_paren_comma_seq(|p| {
let attrs = p.parse_outer_attributes()?;
p.collect_tokens_trailing_token(attrs, ForceCollect::No, |p, attrs| {
p.collect_tokens(None, attrs, ForceCollect::No, |p, attrs| {
let mut snapshot = None;
if p.is_vcs_conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) {
// Account for `<<<<<<<` diff markers. We can't proactively error here because
@ -1816,6 +1814,7 @@ impl<'a> Parser<'a> {
is_placeholder: false,
},
Trailing::from(p.token == token::Comma),
UsePreAttrPos::No,
))
})
})
@ -1827,11 +1826,11 @@ impl<'a> Parser<'a> {
self.recover_vcs_conflict_marker();
let attrs = self.parse_outer_attributes()?;
self.recover_vcs_conflict_marker();
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let lo = this.token.span;
let vis = this.parse_visibility(FollowedByType::No)?;
this.parse_single_struct_field(adt_ty, lo, vis, attrs)
.map(|field| (field, Trailing::No))
.map(|field| (field, Trailing::No, UsePreAttrPos::No))
})
}
@ -2806,12 +2805,12 @@ impl<'a> Parser<'a> {
fn parse_param_general(&mut self, req_name: ReqName, first_param: bool) -> PResult<'a, Param> {
let lo = self.token.span;
let attrs = self.parse_outer_attributes()?;
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
// Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
if let Some(mut param) = this.parse_self_param()? {
param.attrs = attrs;
let res = if first_param { Ok(param) } else { this.recover_bad_self_param(param) };
return Ok((res?, Trailing::No));
return Ok((res?, Trailing::No, UsePreAttrPos::No));
}
let is_name_required = match this.token.kind {
@ -2827,7 +2826,7 @@ impl<'a> Parser<'a> {
this.parameter_without_type(&mut err, pat, is_name_required, first_param)
{
let guar = err.emit();
Ok((dummy_arg(ident, guar), Trailing::No))
Ok((dummy_arg(ident, guar), Trailing::No, UsePreAttrPos::No))
} else {
Err(err)
};
@ -2871,6 +2870,7 @@ impl<'a> Parser<'a> {
Ok((
Param { attrs, id: ast::DUMMY_NODE_ID, is_placeholder: false, pat, span, ty },
Trailing::No,
UsePreAttrPos::No,
))
})
}

View File

@ -14,7 +14,7 @@ use std::assert_matches::debug_assert_matches;
use std::ops::Range;
use std::{fmt, mem, slice};
use attr_wrapper::AttrWrapper;
use attr_wrapper::{AttrWrapper, UsePreAttrPos};
pub use diagnostics::AttemptLocalParseRecovery;
pub(crate) use expr::ForbiddenLetReason;
pub(crate) use item::FnParseMode;
@ -254,7 +254,7 @@ enum Capturing {
Yes,
}
// This state is used by `Parser::collect_tokens_trailing_token`.
// This state is used by `Parser::collect_tokens`.
#[derive(Clone, Debug)]
struct CaptureState {
capturing: Capturing,
@ -466,8 +466,8 @@ impl<'a> Parser<'a> {
parser.bump();
// Change this from 1 back to 0 after the bump. This eases debugging of
// `Parser::collect_tokens_trailing_token` nicer because it makes the
// token positions 0-indexed which is nicer than 1-indexed.
// `Parser::collect_tokens` because 0-indexed token positions are nicer
// than 1-indexed token positions.
parser.num_bump_calls = 0;
parser
@ -1553,11 +1553,9 @@ impl<'a> Parser<'a> {
) -> PResult<'a, R> {
// The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
// `ForceCollect::Yes`
self.collect_tokens_trailing_token(
AttrWrapper::empty(),
ForceCollect::Yes,
|this, _attrs| Ok((f(this)?, Trailing::No)),
)
self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| {
Ok((f(this)?, Trailing::No, UsePreAttrPos::No))
})
}
/// `::{` or `::*`

View File

@ -13,7 +13,7 @@ use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::{BytePos, ErrorGuaranteed, Span};
use thin_vec::{thin_vec, ThinVec};
use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing};
use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing, UsePreAttrPos};
use crate::errors::{
self, AmbiguousRangePattern, DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed,
DotDotDotRestPattern, EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt,
@ -403,7 +403,7 @@ impl<'a> Parser<'a> {
// Parse an associative expression such as `+ expr`, `% expr`, ...
// Assignements, ranges and `|` are disabled by [`Restrictions::IS_PAT`].
if let Ok(expr) =
if let Ok((expr, _)) =
snapshot.parse_expr_assoc_rest_with(0, false, expr).map_err(|err| err.cancel())
{
// We got a valid expression.
@ -1302,24 +1302,23 @@ impl<'a> Parser<'a> {
}
}
let field =
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
let field = match this.parse_pat_field(lo, attrs) {
Ok(field) => Ok(field),
Err(err) => {
if let Some(delayed_err) = delayed_err.take() {
delayed_err.emit();
}
return Err(err);
let field = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let field = match this.parse_pat_field(lo, attrs) {
Ok(field) => Ok(field),
Err(err) => {
if let Some(delayed_err) = delayed_err.take() {
delayed_err.emit();
}
}?;
ate_comma = this.eat(&token::Comma);
return Err(err);
}
}?;
ate_comma = this.eat(&token::Comma);
last_non_comma_dotdot_span = Some(this.prev_token.span);
last_non_comma_dotdot_span = Some(this.prev_token.span);
// We just ate a comma, so there's no need to capture a trailing token.
Ok((field, Trailing::No))
})?;
// We just ate a comma, so there's no need to capture a trailing token.
Ok((field, Trailing::No, UsePreAttrPos::No))
})?;
fields.push(field)
}

View File

@ -913,7 +913,7 @@ impl<'a> Parser<'a> {
let snapshot = self.create_snapshot_for_diagnostic();
let attrs = self.parse_outer_attributes()?;
match self.parse_expr_res(Restrictions::CONST_EXPR, attrs) {
Ok(expr) => {
Ok((expr, _)) => {
return Ok(Some(self.dummy_const_arg_needs_braces(
self.dcx().struct_span_err(expr.span, "invalid const generic expression"),
expr.span,

View File

@ -21,7 +21,7 @@ use super::pat::{PatternLocation, RecoverComma};
use super::path::PathStyle;
use super::{
AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
Trailing,
Trailing, UsePreAttrPos,
};
use crate::errors::MalformedLoopLabel;
use crate::{errors, maybe_whole};
@ -46,6 +46,7 @@ impl<'a> Parser<'a> {
capture_semi: bool,
force_collect: ForceCollect,
) -> PResult<'a, Option<Stmt>> {
let pre_attr_pos = self.collect_pos();
let attrs = self.parse_outer_attributes()?;
let lo = self.token.span;
@ -66,11 +67,15 @@ impl<'a> Parser<'a> {
}
Ok(Some(if self.token.is_keyword(kw::Let) {
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
self.collect_tokens(None, attrs, force_collect, |this, attrs| {
this.expect_keyword(kw::Let)?;
let local = this.parse_local(attrs)?;
let trailing = Trailing::from(capture_semi && this.token == token::Semi);
Ok((this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), trailing))
Ok((
this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
trailing,
UsePreAttrPos::No,
))
})?
} else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() {
self.recover_stmt_local_after_let(
@ -104,10 +109,18 @@ impl<'a> Parser<'a> {
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
// that starts like a path (1 token), but it fact not a path.
// Also, we avoid stealing syntax from `parse_item_`.
let stmt = self.collect_tokens_trailing_token(
//
// `UsePreAttrPos::Yes` here means the attribute belongs unconditionally to the
// expression, not the statement. (But the statement attributes/tokens are obtained
// from the expression anyway, because `Stmt` delegates `HasAttrs`/`HasTokens` to
// the things within `StmtKind`.)
let stmt = self.collect_tokens(
Some(pre_attr_pos),
AttrWrapper::empty(),
force_collect,
|this, _empty_attrs| Ok((this.parse_stmt_path_start(lo, attrs)?, Trailing::No)),
|this, _empty_attrs| {
Ok((this.parse_stmt_path_start(lo, attrs)?, Trailing::No, UsePreAttrPos::Yes))
},
);
match stmt {
Ok(stmt) => stmt,
@ -129,12 +142,15 @@ impl<'a> Parser<'a> {
self.error_outer_attrs(attrs);
self.mk_stmt(lo, StmtKind::Empty)
} else if self.token != token::CloseDelim(Delimiter::Brace) {
// Remainder are line-expr stmts.
let e = self.collect_tokens_trailing_token(
// Remainder are line-expr stmts. This is similar to the `parse_stmt_path_start` case
// above.
let e = self.collect_tokens(
Some(pre_attr_pos),
AttrWrapper::empty(),
force_collect,
|this, _empty_attrs| {
Ok((this.parse_expr_res(Restrictions::STMT_EXPR, attrs)?, Trailing::No))
let (expr, _) = this.parse_expr_res(Restrictions::STMT_EXPR, attrs)?;
Ok((expr, Trailing::No, UsePreAttrPos::Yes))
},
)?;
if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(kw::Else) {
@ -151,12 +167,16 @@ impl<'a> Parser<'a> {
}
fn parse_stmt_path_start(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a, Stmt> {
let stmt = self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
let stmt = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let path = this.parse_path(PathStyle::Expr)?;
if this.eat(&token::Not) {
let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?;
return Ok((stmt_mac, Trailing::from(this.token == token::Semi)));
return Ok((
stmt_mac,
Trailing::from(this.token == token::Semi),
UsePreAttrPos::No,
));
}
let expr = if this.eat(&token::OpenDelim(Delimiter::Brace)) {
@ -170,13 +190,17 @@ impl<'a> Parser<'a> {
this.parse_expr_dot_or_call_with(attrs, expr, lo)
})?;
// `DUMMY_SP` will get overwritten later in this function
Ok((this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)), Trailing::No))
Ok((
this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)),
Trailing::No,
UsePreAttrPos::No,
))
})?;
if let StmtKind::Expr(expr) = stmt.kind {
// Perform this outside of the `collect_tokens_trailing_token` closure,
// since our outer attributes do not apply to this part of the expression
let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
// Perform this outside of the `collect_tokens` closure, since our
// outer attributes do not apply to this part of the expression.
let (expr, _) = self.with_res(Restrictions::STMT_EXPR, |this| {
this.parse_expr_assoc_rest_with(0, true, expr)
})?;
Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Expr(expr)))
@ -210,7 +234,7 @@ impl<'a> Parser<'a> {
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac));
let e = self.maybe_recover_from_bad_qpath(e)?;
let e = self.parse_expr_dot_or_call_with(attrs, e, lo)?;
let e = self.parse_expr_assoc_rest_with(0, false, e)?;
let (e, _) = self.parse_expr_assoc_rest_with(0, false, e)?;
StmtKind::Expr(e)
};
Ok(self.mk_stmt(lo.to(hi), kind))
@ -240,10 +264,14 @@ impl<'a> Parser<'a> {
subdiagnostic: fn(Span) -> errors::InvalidVariableDeclarationSub,
force_collect: ForceCollect,
) -> PResult<'a, Stmt> {
let stmt = self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
let stmt = self.collect_tokens(None, attrs, force_collect, |this, attrs| {
let local = this.parse_local(attrs)?;
// FIXME - maybe capture semicolon in recovery?
Ok((this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), Trailing::No))
Ok((
this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
Trailing::No,
UsePreAttrPos::No,
))
})?;
self.dcx()
.emit_err(errors::InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) });

View File

@ -0,0 +1,42 @@
//@ check-pass
// This test triggered an assertion failure in token collection due to
// mishandling of attributes on associative expressions.
#![feature(cfg_eval)]
#![feature(rustc_attrs)]
#![feature(stmt_expr_attributes)]
#![allow(internal_features)]
fn main() {}
#[cfg_eval]
struct Foo1(
[ bool; {
let _x = 30;
#[cfg_attr(unix, rustc_dummy(aa))] 1
} ]
);
#[cfg_eval]
struct Foo12(
[ bool; {
let _x = 30;
#[cfg_attr(unix, rustc_dummy(bb))] 1 + 2
} ]
);
#[cfg_eval]
struct Foox(
[ bool; {
let _x = 30;
#[cfg_attr(unix, rustc_dummy(cc))] _x
} ]
);
#[cfg_eval]
struct Foox2(
[ bool; {
let _x = 30;
#[cfg_attr(unix, rustc_dummy(dd))] _x + 2
} ]
);

View File

@ -51,14 +51,6 @@ macro_rules! c1 {
};
}
// FIXME: temporary
macro_rules! c2 {
($frag:ident, [$($tt:tt)*], $s1:literal, $s2:literal) => {
assert_eq!($frag!($($tt)*), $s1);
assert_eq!(stringify!($($tt)*), $s2);
};
}
#[test]
fn test_block() {
c1!(block, [ {} ], "{}");
@ -344,10 +336,10 @@ fn test_expr() {
// Ones involving attributes.
c1!(expr, [ #[aa] 1 ], "#[aa] 1");
c1!(expr, [ #[aa] #[bb] x ], "#[aa] #[bb] x");
c2!(expr, [ #[aa] 1 + 2 ], "1 + 2", "#[aa] 1 + 2"); // FIXME
c2!(expr, [ #[aa] x + 2 ], "x + 2", "#[aa] x + 2"); // FIXME
c2!(expr, [ #[aa] 1 / #[bb] 2 ], "1 / #[bb] 2", "#[aa] 1 / #[bb] 2"); // FIXME
c2!(expr, [ #[aa] x / #[bb] 2 ], "x / #[bb] 2", "#[aa] x / #[bb] 2"); // FIXME
c1!(expr, [ #[aa] 1 + 2 ], "#[aa] 1 + 2");
c1!(expr, [ #[aa] x + 2 ], "#[aa] x + 2");
c1!(expr, [ #[aa] 1 / #[bb] 2 ], "#[aa] 1 / #[bb] 2");
c1!(expr, [ #[aa] x / #[bb] 2 ], "#[aa] x / #[bb] 2");
c1!(expr, [ 1 << #[bb] 2 ], "1 << #[bb] 2");
c1!(expr, [ x << #[bb] 2 ], "x << #[bb] 2");
c1!(expr, [ #[aa] (1 + 2) ], "#[aa] (1 + 2)");
@ -659,10 +651,10 @@ fn test_stmt() {
// Ones involving attributes.
c1!(stmt, [ #[aa] 1 ], "#[aa] 1");
c1!(stmt, [ #[aa] #[bb] x ], "#[aa] #[bb] x");
c2!(stmt, [ #[aa] 1 as u32 ], "1 as u32", "#[aa] 1 as u32"); // FIXME
c2!(stmt, [ #[aa] x as u32 ], "x as u32", "#[aa] x as u32"); // FIXME
c2!(stmt, [ #[aa] 1 .. #[bb] 2 ], "1 .. #[bb] 2", "#[aa] 1 .. #[bb] 2"); // FIXME
c2!(stmt, [ #[aa] x .. #[bb] 2 ], "x .. #[bb] 2", "#[aa] x .. #[bb] 2"); // FIXME
c1!(stmt, [ #[aa] 1 as u32 ], "#[aa] 1 as u32");
c1!(stmt, [ #[aa] x as u32 ], "#[aa] x as u32");
c1!(stmt, [ #[aa] 1 .. #[bb] 2 ], "#[aa] 1 .. #[bb] 2");
c1!(stmt, [ #[aa] x .. #[bb] 2 ], "#[aa] x .. #[bb] 2");
c1!(stmt, [ 1 || #[bb] 2 ], "1 || #[bb] 2");
c1!(stmt, [ x || #[bb] 2 ], "x || #[bb] 2");
c1!(stmt, [ #[aa] (1 + 2) ], "#[aa] (1 + 2)");