Rename some attribute types for consistency.

- `AttributesData` -> `AttrsTarget`
- `AttrTokenTree::Attributes` -> `AttrTokenTree::AttrsTarget`
- `FlatToken::AttrTarget` -> `FlatToken::AttrsTarget`
This commit is contained in:
Nicholas Nethercote 2024-07-07 16:14:30 +10:00
parent 9d33a8fe51
commit 3a5c4b6e4e
6 changed files with 42 additions and 44 deletions

View File

@ -704,7 +704,7 @@ fn visit_attr_tt<T: MutVisitor>(tt: &mut AttrTokenTree, vis: &mut T) {
visit_attr_tts(tts, vis);
visit_delim_span(dspan, vis);
}
AttrTokenTree::Attributes(AttributesData { attrs, tokens }) => {
AttrTokenTree::AttrsTarget(AttrsTarget { attrs, tokens }) => {
visit_attrs(attrs, vis);
visit_lazy_tts_opt_mut(Some(tokens), vis);
}

View File

@ -170,8 +170,8 @@ pub enum AttrTokenTree {
Delimited(DelimSpan, DelimSpacing, Delimiter, AttrTokenStream),
/// Stores the attributes for an attribute target,
/// along with the tokens for that attribute target.
/// See `AttributesData` for more information
Attributes(AttributesData),
/// See `AttrsTarget` for more information
AttrsTarget(AttrsTarget),
}
impl AttrTokenStream {
@ -180,7 +180,7 @@ impl AttrTokenStream {
}
/// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`.
/// During conversion, `AttrTokenTree::Attributes` get 'flattened'
/// During conversion, `AttrTokenTree::AttrsTarget` get 'flattened'
/// back to a `TokenStream` of the form `outer_attr attr_target`.
/// If there are inner attributes, they are inserted into the proper
/// place in the attribute target tokens.
@ -199,13 +199,13 @@ impl AttrTokenStream {
TokenStream::new(stream.to_token_trees()),
))
}
AttrTokenTree::Attributes(data) => {
let idx = data
AttrTokenTree::AttrsTarget(target) => {
let idx = target
.attrs
.partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer));
let (outer_attrs, inner_attrs) = data.attrs.split_at(idx);
let (outer_attrs, inner_attrs) = target.attrs.split_at(idx);
let mut target_tokens = data.tokens.to_attr_token_stream().to_token_trees();
let mut target_tokens = target.tokens.to_attr_token_stream().to_token_trees();
if !inner_attrs.is_empty() {
let mut found = false;
// Check the last two trees (to account for a trailing semi)
@ -262,7 +262,7 @@ impl AttrTokenStream {
/// have an `attrs` field containing the `#[cfg(FALSE)]` attr,
/// and a `tokens` field storing the (unparsed) tokens `struct Foo {}`
#[derive(Clone, Debug, Encodable, Decodable)]
pub struct AttributesData {
pub struct AttrsTarget {
/// Attributes, both outer and inner.
/// These are stored in the original order that they were parsed in.
pub attrs: AttrVec,
@ -444,9 +444,9 @@ impl TokenStream {
let attr_stream = if attrs.is_empty() {
tokens.to_attr_token_stream()
} else {
let attr_data =
AttributesData { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
AttrTokenStream::new(vec![AttrTokenTree::Attributes(attr_data)])
let target =
AttrsTarget { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
AttrTokenStream::new(vec![AttrTokenTree::AttrsTarget(target)])
};
TokenStream::new(attr_stream.to_token_trees())
}

View File

@ -193,7 +193,7 @@ impl CfgEval<'_> {
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
// to the captured `AttrTokenStream` (specifically, we capture
// `AttrTokenTree::AttributesData` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
// `AttrTokenTree::AttrsTarget` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
let mut parser = Parser::new(&self.0.sess.psess, orig_tokens, None);
parser.capture_cfg = true;
match parse_annotatable_with(&mut parser) {

View File

@ -172,7 +172,7 @@ impl<'a> StripUnconfigured<'a> {
fn configure_tokens(&self, stream: &AttrTokenStream) -> AttrTokenStream {
fn can_skip(stream: &AttrTokenStream) -> bool {
stream.0.iter().all(|tree| match tree {
AttrTokenTree::Attributes(_) => false,
AttrTokenTree::AttrsTarget(_) => false,
AttrTokenTree::Token(..) => true,
AttrTokenTree::Delimited(.., inner) => can_skip(inner),
})
@ -186,14 +186,14 @@ impl<'a> StripUnconfigured<'a> {
.0
.iter()
.flat_map(|tree| match tree.clone() {
AttrTokenTree::Attributes(mut data) => {
data.attrs.flat_map_in_place(|attr| self.process_cfg_attr(&attr));
AttrTokenTree::AttrsTarget(mut target) => {
target.attrs.flat_map_in_place(|attr| self.process_cfg_attr(&attr));
if self.in_cfg(&data.attrs) {
data.tokens = LazyAttrTokenStream::new(
self.configure_tokens(&data.tokens.to_attr_token_stream()),
if self.in_cfg(&target.attrs) {
target.tokens = LazyAttrTokenStream::new(
self.configure_tokens(&target.tokens.to_attr_token_stream()),
);
Some(AttrTokenTree::Attributes(data)).into_iter()
Some(AttrTokenTree::AttrsTarget(target)).into_iter()
} else {
None.into_iter()
}

View File

@ -1,6 +1,6 @@
use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttributesData, DelimSpacing};
use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttrsTarget, DelimSpacing};
use rustc_ast::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, ToAttrTokenStream};
use rustc_ast::{self as ast};
use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
@ -145,22 +145,22 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
// start position, we ensure that any replace range which encloses
// another replace range will capture the *replaced* tokens for the inner
// range, not the original tokens.
for (range, attr_data) in replace_ranges.into_iter().rev() {
for (range, target) in replace_ranges.into_iter().rev() {
assert!(!range.is_empty(), "Cannot replace an empty range: {range:?}");
// Replace the tokens in range with zero or one `FlatToken::AttrTarget`s, plus
// Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, plus
// enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the
// total length of `tokens` constant throughout the replacement process, allowing
// us to use all of the `ReplaceRanges` entries without adjusting indices.
let attr_data_len = attr_data.is_some() as usize;
let target_len = target.is_some() as usize;
tokens.splice(
(range.start as usize)..(range.end as usize),
attr_data
target
.into_iter()
.map(|attr_data| (FlatToken::AttrTarget(attr_data), Spacing::Alone))
.map(|target| (FlatToken::AttrsTarget(target), Spacing::Alone))
.chain(
iter::repeat((FlatToken::Empty, Spacing::Alone))
.take(range.len() - attr_data_len),
.take(range.len() - target_len),
),
);
}
@ -346,13 +346,12 @@ impl<'a> Parser<'a> {
{
assert!(!self.break_last_token, "Should not have unglued last token with cfg attr");
// Replace the entire AST node that we just parsed, including attributes,
// with `attr_data`. If this AST node is inside an item
// that has `#[derive]`, then this will allow us to cfg-expand this
// AST node.
// Replace the entire AST node that we just parsed, including attributes, with
// `target`. If this AST node is inside an item that has `#[derive]`, then this will
// allow us to cfg-expand this AST node.
let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
let attr_data = AttributesData { attrs: final_attrs.iter().cloned().collect(), tokens };
self.capture_state.replace_ranges.push((start_pos..end_pos, Some(attr_data)));
let target = AttrsTarget { attrs: final_attrs.iter().cloned().collect(), tokens };
self.capture_state.replace_ranges.push((start_pos..end_pos, Some(target)));
self.capture_state.replace_ranges.extend(inner_attr_replace_ranges);
}
@ -414,11 +413,11 @@ fn make_attr_token_stream(
.expect("Bottom token frame is missing!")
.inner
.push(AttrTokenTree::Token(token, spacing)),
FlatToken::AttrTarget(data) => stack
FlatToken::AttrsTarget(target) => stack
.last_mut()
.expect("Bottom token frame is missing!")
.inner
.push(AttrTokenTree::Attributes(data)),
.push(AttrTokenTree::AttrsTarget(target)),
FlatToken::Empty => {}
}
token_and_spacing = iter.next();

View File

@ -20,7 +20,7 @@ use path::PathStyle;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, IdentIsRaw, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing};
use rustc_ast::tokenstream::{AttrsTarget, DelimSpacing, DelimSpan, Spacing};
use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
use rustc_ast::util::case::Case;
use rustc_ast::{
@ -203,13 +203,13 @@ struct ClosureSpans {
}
/// Indicates a range of tokens that should be replaced by
/// the tokens in the provided `AttributesData`. This is used in two
/// the tokens in the provided `AttrsTarget`. This is used in two
/// places during token collection:
///
/// 1. During the parsing of an AST node that may have a `#[derive]`
/// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
/// In this case, we use a `ReplaceRange` to replace the entire inner AST node
/// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion
/// with `FlatToken::AttrsTarget`, allowing us to perform eager cfg-expansion
/// on an `AttrTokenStream`.
///
/// 2. When we parse an inner attribute while collecting tokens. We
@ -219,7 +219,7 @@ struct ClosureSpans {
/// the first macro inner attribute to invoke a proc-macro).
/// When create a `TokenStream`, the inner attributes get inserted
/// into the proper place in the token stream.
type ReplaceRange = (Range<u32>, Option<AttributesData>);
type ReplaceRange = (Range<u32>, Option<AttrsTarget>);
/// Controls how we capture tokens. Capturing can be expensive,
/// so we try to avoid performing capturing in cases where
@ -1608,11 +1608,10 @@ enum FlatToken {
/// A token - this holds both delimiter (e.g. '{' and '}')
/// and non-delimiter tokens
Token(Token),
/// Holds the `AttributesData` for an AST node. The
/// `AttributesData` is inserted directly into the
/// constructed `AttrTokenStream` as
/// an `AttrTokenTree::Attributes`.
AttrTarget(AttributesData),
/// Holds the `AttrsTarget` for an AST node. The `AttrsTarget` is inserted
/// directly into the constructed `AttrTokenStream` as an
/// `AttrTokenTree::AttrsTarget`.
AttrsTarget(AttrsTarget),
/// A special 'empty' token that is ignored during the conversion
/// to an `AttrTokenStream`. This is used to simplify the
/// handling of replace ranges.