Rename AttrAnnotatedToken{Stream,Tree}.

These two type names are long and have long matching prefixes. I find
them hard to read, especially in combinations like
`AttrAnnotatedTokenStream::new(vec![AttrAnnotatedTokenTree::Token(..)])`.

This commit renames them as `AttrToken{Stream,Tree}`.
This commit is contained in:
Nicholas Nethercote 2022-09-09 12:44:05 +10:00
parent 890e759ffc
commit a56d345490
8 changed files with 80 additions and 84 deletions

View file

@ -7,7 +7,7 @@ use crate::ast::{MacArgs, MacArgsEq, MacDelimiter, MetaItem, MetaItemKind, Neste
use crate::ast::{Path, PathSegment}; use crate::ast::{Path, PathSegment};
use crate::ptr::P; use crate::ptr::P;
use crate::token::{self, CommentKind, Delimiter, Token}; use crate::token::{self, CommentKind, Delimiter, Token};
use crate::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree}; use crate::tokenstream::{AttrTokenStream, AttrTokenTree};
use crate::tokenstream::{DelimSpan, Spacing, TokenTree}; use crate::tokenstream::{DelimSpan, Spacing, TokenTree};
use crate::tokenstream::{LazyTokenStream, TokenStream}; use crate::tokenstream::{LazyTokenStream, TokenStream};
use crate::util::comments; use crate::util::comments;
@ -296,7 +296,7 @@ impl Attribute {
} }
} }
pub fn tokens(&self) -> AttrAnnotatedTokenStream { pub fn tokens(&self) -> AttrTokenStream {
match self.kind { match self.kind {
AttrKind::Normal(ref normal) => normal AttrKind::Normal(ref normal) => normal
.tokens .tokens
@ -304,7 +304,7 @@ impl Attribute {
.unwrap_or_else(|| panic!("attribute is missing tokens: {:?}", self)) .unwrap_or_else(|| panic!("attribute is missing tokens: {:?}", self))
.create_token_stream(), .create_token_stream(),
AttrKind::DocComment(comment_kind, data) => { AttrKind::DocComment(comment_kind, data) => {
AttrAnnotatedTokenStream::new(vec![AttrAnnotatedTokenTree::Token( AttrTokenStream::new(vec![AttrTokenTree::Token(
Token::new(token::DocComment(comment_kind, self.style, data), self.span), Token::new(token::DocComment(comment_kind, self.style, data), self.span),
Spacing::Alone, Spacing::Alone,
)]) )])

View file

@ -642,17 +642,17 @@ pub fn noop_flat_map_param<T: MutVisitor>(mut param: Param, vis: &mut T) -> Smal
} }
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. // No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
pub fn visit_attr_annotated_tt<T: MutVisitor>(tt: &mut AttrAnnotatedTokenTree, vis: &mut T) { pub fn visit_attr_tt<T: MutVisitor>(tt: &mut AttrTokenTree, vis: &mut T) {
match tt { match tt {
AttrAnnotatedTokenTree::Token(token, _) => { AttrTokenTree::Token(token, _) => {
visit_token(token, vis); visit_token(token, vis);
} }
AttrAnnotatedTokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => { AttrTokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
vis.visit_span(open); vis.visit_span(open);
vis.visit_span(close); vis.visit_span(close);
visit_attr_annotated_tts(tts, vis); visit_attr_tts(tts, vis);
} }
AttrAnnotatedTokenTree::Attributes(data) => { AttrTokenTree::Attributes(data) => {
for attr in &mut *data.attrs { for attr in &mut *data.attrs {
match &mut attr.kind { match &mut attr.kind {
AttrKind::Normal(normal) => { AttrKind::Normal(normal) => {
@ -690,13 +690,10 @@ pub fn visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &mut T)
} }
} }
pub fn visit_attr_annotated_tts<T: MutVisitor>( pub fn visit_attr_tts<T: MutVisitor>(AttrTokenStream(tts): &mut AttrTokenStream, vis: &mut T) {
AttrAnnotatedTokenStream(tts): &mut AttrAnnotatedTokenStream,
vis: &mut T,
) {
if T::VISIT_TOKENS && !tts.is_empty() { if T::VISIT_TOKENS && !tts.is_empty() {
let tts = Lrc::make_mut(tts); let tts = Lrc::make_mut(tts);
visit_vec(tts, |tree| visit_attr_annotated_tt(tree, vis)); visit_vec(tts, |tree| visit_attr_tt(tree, vis));
} }
} }
@ -704,7 +701,7 @@ pub fn visit_lazy_tts_opt_mut<T: MutVisitor>(lazy_tts: Option<&mut LazyTokenStre
if T::VISIT_TOKENS { if T::VISIT_TOKENS {
if let Some(lazy_tts) = lazy_tts { if let Some(lazy_tts) = lazy_tts {
let mut tts = lazy_tts.create_token_stream(); let mut tts = lazy_tts.create_token_stream();
visit_attr_annotated_tts(&mut tts, vis); visit_attr_tts(&mut tts, vis);
*lazy_tts = LazyTokenStream::new(tts); *lazy_tts = LazyTokenStream::new(tts);
} }
} }

View file

@ -122,11 +122,11 @@ where
} }
pub trait CreateTokenStream: sync::Send + sync::Sync { pub trait CreateTokenStream: sync::Send + sync::Sync {
fn create_token_stream(&self) -> AttrAnnotatedTokenStream; fn create_token_stream(&self) -> AttrTokenStream;
} }
impl CreateTokenStream for AttrAnnotatedTokenStream { impl CreateTokenStream for AttrTokenStream {
fn create_token_stream(&self) -> AttrAnnotatedTokenStream { fn create_token_stream(&self) -> AttrTokenStream {
self.clone() self.clone()
} }
} }
@ -142,7 +142,7 @@ impl LazyTokenStream {
LazyTokenStream(Lrc::new(Box::new(inner))) LazyTokenStream(Lrc::new(Box::new(inner)))
} }
pub fn create_token_stream(&self) -> AttrAnnotatedTokenStream { pub fn create_token_stream(&self) -> AttrTokenStream {
self.0.create_token_stream() self.0.create_token_stream()
} }
} }
@ -172,31 +172,31 @@ impl<CTX> HashStable<CTX> for LazyTokenStream {
} }
} }
/// A `AttrAnnotatedTokenStream` is similar to a `TokenStream`, but with extra /// An `AttrTokenStream` is similar to a `TokenStream`, but with extra
/// information about the tokens for attribute targets. This is used /// information about the tokens for attribute targets. This is used
/// during expansion to perform early cfg-expansion, and to process attributes /// during expansion to perform early cfg-expansion, and to process attributes
/// during proc-macro invocations. /// during proc-macro invocations.
#[derive(Clone, Debug, Default, Encodable, Decodable)] #[derive(Clone, Debug, Default, Encodable, Decodable)]
pub struct AttrAnnotatedTokenStream(pub Lrc<Vec<AttrAnnotatedTokenTree>>); pub struct AttrTokenStream(pub Lrc<Vec<AttrTokenTree>>);
/// Like `TokenTree`, but for `AttrAnnotatedTokenStream` /// Like `TokenTree`, but for `AttrTokenStream`.
#[derive(Clone, Debug, Encodable, Decodable)] #[derive(Clone, Debug, Encodable, Decodable)]
pub enum AttrAnnotatedTokenTree { pub enum AttrTokenTree {
Token(Token, Spacing), Token(Token, Spacing),
Delimited(DelimSpan, Delimiter, AttrAnnotatedTokenStream), Delimited(DelimSpan, Delimiter, AttrTokenStream),
/// Stores the attributes for an attribute target, /// Stores the attributes for an attribute target,
/// along with the tokens for that attribute target. /// along with the tokens for that attribute target.
/// See `AttributesData` for more information /// See `AttributesData` for more information
Attributes(AttributesData), Attributes(AttributesData),
} }
impl AttrAnnotatedTokenStream { impl AttrTokenStream {
pub fn new(tokens: Vec<AttrAnnotatedTokenTree>) -> AttrAnnotatedTokenStream { pub fn new(tokens: Vec<AttrTokenTree>) -> AttrTokenStream {
AttrAnnotatedTokenStream(Lrc::new(tokens)) AttrTokenStream(Lrc::new(tokens))
} }
/// Converts this `AttrAnnotatedTokenStream` to a plain `TokenStream /// Converts this `AttrTokenStream` to a plain `TokenStream`.
/// During conversion, `AttrAnnotatedTokenTree::Attributes` get 'flattened' /// During conversion, `AttrTokenTree::Attributes` get 'flattened'
/// back to a `TokenStream` of the form `outer_attr attr_target`. /// back to a `TokenStream` of the form `outer_attr attr_target`.
/// If there are inner attributes, they are inserted into the proper /// If there are inner attributes, they are inserted into the proper
/// place in the attribute target tokens. /// place in the attribute target tokens.
@ -205,14 +205,14 @@ impl AttrAnnotatedTokenStream {
.0 .0
.iter() .iter()
.flat_map(|tree| match &tree { .flat_map(|tree| match &tree {
AttrAnnotatedTokenTree::Token(inner, spacing) => { AttrTokenTree::Token(inner, spacing) => {
smallvec![TokenTree::Token(inner.clone(), *spacing)].into_iter() smallvec![TokenTree::Token(inner.clone(), *spacing)].into_iter()
} }
AttrAnnotatedTokenTree::Delimited(span, delim, stream) => { AttrTokenTree::Delimited(span, delim, stream) => {
smallvec![TokenTree::Delimited(*span, *delim, stream.to_tokenstream()),] smallvec![TokenTree::Delimited(*span, *delim, stream.to_tokenstream()),]
.into_iter() .into_iter()
} }
AttrAnnotatedTokenTree::Attributes(data) => { AttrTokenTree::Attributes(data) => {
let mut outer_attrs = Vec::new(); let mut outer_attrs = Vec::new();
let mut inner_attrs = Vec::new(); let mut inner_attrs = Vec::new();
for attr in &data.attrs { for attr in &data.attrs {
@ -417,14 +417,14 @@ impl TokenStream {
fn opt_from_ast(node: &(impl HasAttrs + HasTokens)) -> Option<TokenStream> { fn opt_from_ast(node: &(impl HasAttrs + HasTokens)) -> Option<TokenStream> {
let tokens = node.tokens()?; let tokens = node.tokens()?;
let attrs = node.attrs(); let attrs = node.attrs();
let attr_annotated = if attrs.is_empty() { let attr_stream = if attrs.is_empty() {
tokens.create_token_stream() tokens.create_token_stream()
} else { } else {
let attr_data = let attr_data =
AttributesData { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() }; AttributesData { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
AttrAnnotatedTokenStream::new(vec![AttrAnnotatedTokenTree::Attributes(attr_data)]) AttrTokenStream::new(vec![AttrTokenTree::Attributes(attr_data)])
}; };
Some(attr_annotated.to_tokenstream()) Some(attr_stream.to_tokenstream())
} }
// Create a token stream containing a single token with alone spacing. // Create a token stream containing a single token with alone spacing.

View file

@ -188,14 +188,14 @@ impl CfgEval<'_, '_> {
let orig_tokens = annotatable.to_tokens().flattened(); let orig_tokens = annotatable.to_tokens().flattened();
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information // Re-parse the tokens, setting the `capture_cfg` flag to save extra information
// to the captured `AttrAnnotatedTokenStream` (specifically, we capture // to the captured `AttrTokenStream` (specifically, we capture
// `AttrAnnotatedTokenTree::AttributesData` for all occurrences of `#[cfg]` and `#[cfg_attr]`) // `AttrTokenTree::AttributesData` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
let mut parser = let mut parser =
rustc_parse::stream_to_parser(&self.cfg.sess.parse_sess, orig_tokens, None); rustc_parse::stream_to_parser(&self.cfg.sess.parse_sess, orig_tokens, None);
parser.capture_cfg = true; parser.capture_cfg = true;
annotatable = parse_annotatable_with(&mut parser); annotatable = parse_annotatable_with(&mut parser);
// Now that we have our re-parsed `AttrAnnotatedTokenStream`, recursively configuring // Now that we have our re-parsed `AttrTokenStream`, recursively configuring
// our attribute target will correctly the tokens as well. // our attribute target will correctly the tokens as well.
flat_map_annotatable(self, annotatable) flat_map_annotatable(self, annotatable)
} }

View file

@ -2,7 +2,7 @@
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{Delimiter, Token, TokenKind}; use rustc_ast::token::{Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree}; use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree};
use rustc_ast::tokenstream::{DelimSpan, Spacing}; use rustc_ast::tokenstream::{DelimSpan, Spacing};
use rustc_ast::tokenstream::{LazyTokenStream, TokenTree}; use rustc_ast::tokenstream::{LazyTokenStream, TokenTree};
use rustc_ast::NodeId; use rustc_ast::NodeId;
@ -259,8 +259,8 @@ impl<'a> StripUnconfigured<'a> {
fn try_configure_tokens<T: HasTokens>(&self, node: &mut T) { fn try_configure_tokens<T: HasTokens>(&self, node: &mut T) {
if self.config_tokens { if self.config_tokens {
if let Some(Some(tokens)) = node.tokens_mut() { if let Some(Some(tokens)) = node.tokens_mut() {
let attr_annotated_tokens = tokens.create_token_stream(); let attr_stream = tokens.create_token_stream();
*tokens = LazyTokenStream::new(self.configure_tokens(&attr_annotated_tokens)); *tokens = LazyTokenStream::new(self.configure_tokens(&attr_stream));
} }
} }
} }
@ -270,16 +270,16 @@ impl<'a> StripUnconfigured<'a> {
if self.in_cfg(&attrs) { Some(attrs) } else { None } if self.in_cfg(&attrs) { Some(attrs) } else { None }
} }
/// Performs cfg-expansion on `stream`, producing a new `AttrAnnotatedTokenStream`. /// Performs cfg-expansion on `stream`, producing a new `AttrTokenStream`.
/// This is only used during the invocation of `derive` proc-macros, /// This is only used during the invocation of `derive` proc-macros,
/// which require that we cfg-expand their entire input. /// which require that we cfg-expand their entire input.
/// Normal cfg-expansion operates on parsed AST nodes via the `configure` method /// Normal cfg-expansion operates on parsed AST nodes via the `configure` method
fn configure_tokens(&self, stream: &AttrAnnotatedTokenStream) -> AttrAnnotatedTokenStream { fn configure_tokens(&self, stream: &AttrTokenStream) -> AttrTokenStream {
fn can_skip(stream: &AttrAnnotatedTokenStream) -> bool { fn can_skip(stream: &AttrTokenStream) -> bool {
stream.0.iter().all(|tree| match tree { stream.0.iter().all(|tree| match tree {
AttrAnnotatedTokenTree::Attributes(_) => false, AttrTokenTree::Attributes(_) => false,
AttrAnnotatedTokenTree::Token(..) => true, AttrTokenTree::Token(..) => true,
AttrAnnotatedTokenTree::Delimited(_, _, inner) => can_skip(inner), AttrTokenTree::Delimited(_, _, inner) => can_skip(inner),
}) })
} }
@ -291,35 +291,35 @@ impl<'a> StripUnconfigured<'a> {
.0 .0
.iter() .iter()
.flat_map(|tree| match tree.clone() { .flat_map(|tree| match tree.clone() {
AttrAnnotatedTokenTree::Attributes(mut data) => { AttrTokenTree::Attributes(mut data) => {
data.attrs.flat_map_in_place(|attr| self.process_cfg_attr(attr)); data.attrs.flat_map_in_place(|attr| self.process_cfg_attr(attr));
if self.in_cfg(&data.attrs) { if self.in_cfg(&data.attrs) {
data.tokens = LazyTokenStream::new( data.tokens = LazyTokenStream::new(
self.configure_tokens(&data.tokens.create_token_stream()), self.configure_tokens(&data.tokens.create_token_stream()),
); );
Some(AttrAnnotatedTokenTree::Attributes(data)).into_iter() Some(AttrTokenTree::Attributes(data)).into_iter()
} else { } else {
None.into_iter() None.into_iter()
} }
} }
AttrAnnotatedTokenTree::Delimited(sp, delim, mut inner) => { AttrTokenTree::Delimited(sp, delim, mut inner) => {
inner = self.configure_tokens(&inner); inner = self.configure_tokens(&inner);
Some(AttrAnnotatedTokenTree::Delimited(sp, delim, inner)) Some(AttrTokenTree::Delimited(sp, delim, inner))
.into_iter() .into_iter()
} }
AttrAnnotatedTokenTree::Token(ref token, _) if let TokenKind::Interpolated(ref nt) = token.kind => { AttrTokenTree::Token(ref token, _) if let TokenKind::Interpolated(ref nt) = token.kind => {
panic!( panic!(
"Nonterminal should have been flattened at {:?}: {:?}", "Nonterminal should have been flattened at {:?}: {:?}",
token.span, nt token.span, nt
); );
} }
AttrAnnotatedTokenTree::Token(token, spacing) => { AttrTokenTree::Token(token, spacing) => {
Some(AttrAnnotatedTokenTree::Token(token, spacing)).into_iter() Some(AttrTokenTree::Token(token, spacing)).into_iter()
} }
}) })
.collect(); .collect();
AttrAnnotatedTokenStream::new(trees) AttrTokenStream::new(trees)
} }
/// Parse and expand all `cfg_attr` attributes into a list of attributes /// Parse and expand all `cfg_attr` attributes into a list of attributes
@ -404,17 +404,17 @@ impl<'a> StripUnconfigured<'a> {
}; };
let pound_span = pound_token.span; let pound_span = pound_token.span;
let mut trees = vec![AttrAnnotatedTokenTree::Token(pound_token, Spacing::Alone)]; let mut trees = vec![AttrTokenTree::Token(pound_token, Spacing::Alone)];
if attr.style == AttrStyle::Inner { if attr.style == AttrStyle::Inner {
// For inner attributes, we do the same thing for the `!` in `#![some_attr]` // For inner attributes, we do the same thing for the `!` in `#![some_attr]`
let TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _) = orig_trees.next().unwrap() else { let TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _) = orig_trees.next().unwrap() else {
panic!("Bad tokens for attribute {:?}", attr); panic!("Bad tokens for attribute {:?}", attr);
}; };
trees.push(AttrAnnotatedTokenTree::Token(bang_token, Spacing::Alone)); trees.push(AttrTokenTree::Token(bang_token, Spacing::Alone));
} }
// We don't really have a good span to use for the synthesized `[]` // We don't really have a good span to use for the synthesized `[]`
// in `#[attr]`, so just use the span of the `#` token. // in `#[attr]`, so just use the span of the `#` token.
let bracket_group = AttrAnnotatedTokenTree::Delimited( let bracket_group = AttrTokenTree::Delimited(
DelimSpan::from_single(pound_span), DelimSpan::from_single(pound_span),
Delimiter::Bracket, Delimiter::Bracket,
item.tokens item.tokens
@ -423,7 +423,7 @@ impl<'a> StripUnconfigured<'a> {
.create_token_stream(), .create_token_stream(),
); );
trees.push(bracket_group); trees.push(bracket_group);
let tokens = Some(LazyTokenStream::new(AttrAnnotatedTokenStream::new(trees))); let tokens = Some(LazyTokenStream::new(AttrTokenStream::new(trees)));
let attr = attr::mk_attr_from_item(item, tokens, attr.style, item_span); let attr = attr::mk_attr_from_item(item, tokens, attr.style, item_span);
if attr.has_name(sym::crate_type) { if attr.has_name(sym::crate_type) {
self.sess.parse_sess.buffer_lint( self.sess.parse_sess.buffer_lint(

View file

@ -303,7 +303,7 @@ impl<'a> Parser<'a> {
// If we are currently capturing tokens, mark the location of this inner attribute. // If we are currently capturing tokens, mark the location of this inner attribute.
// If capturing ends up creating a `LazyTokenStream`, we will include // If capturing ends up creating a `LazyTokenStream`, we will include
// this replace range with it, removing the inner attribute from the final // this replace range with it, removing the inner attribute from the final
// `AttrAnnotatedTokenStream`. Inner attributes are stored in the parsed AST note. // `AttrTokenStream`. Inner attributes are stored in the parsed AST note.
// During macro expansion, they are selectively inserted back into the // During macro expansion, they are selectively inserted back into the
// token stream (the first inner attribute is removed each time we invoke the // token stream (the first inner attribute is removed each time we invoke the
// corresponding macro). // corresponding macro).

View file

@ -1,7 +1,7 @@
use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken}; use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttributesData, CreateTokenStream}; use rustc_ast::tokenstream::{AttrTokenStream, AttributesData, CreateTokenStream};
use rustc_ast::tokenstream::{AttrAnnotatedTokenTree, DelimSpan, LazyTokenStream, Spacing}; use rustc_ast::tokenstream::{AttrTokenTree, DelimSpan, LazyTokenStream, Spacing};
use rustc_ast::{self as ast}; use rustc_ast::{self as ast};
use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens}; use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
use rustc_errors::PResult; use rustc_errors::PResult;
@ -100,7 +100,7 @@ struct LazyTokenStreamImpl {
rustc_data_structures::static_assert_size!(LazyTokenStreamImpl, 144); rustc_data_structures::static_assert_size!(LazyTokenStreamImpl, 144);
impl CreateTokenStream for LazyTokenStreamImpl { impl CreateTokenStream for LazyTokenStreamImpl {
fn create_token_stream(&self) -> AttrAnnotatedTokenStream { fn create_token_stream(&self) -> AttrTokenStream {
// The token produced by the final call to `{,inlined_}next` was not // The token produced by the final call to `{,inlined_}next` was not
// actually consumed by the callback. The combination of chaining the // actually consumed by the callback. The combination of chaining the
// initial token and using `take` produces the desired result - we // initial token and using `take` produces the desired result - we
@ -298,7 +298,7 @@ impl<'a> Parser<'a> {
// If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens), // If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens),
// then extend the range of captured tokens to include it, since the parser // then extend the range of captured tokens to include it, since the parser
// was not actually bumped past it. When the `LazyTokenStream` gets converted // was not actually bumped past it. When the `LazyTokenStream` gets converted
// into an `AttrAnnotatedTokenStream`, we will create the proper token. // into an `AttrTokenStream`, we will create the proper token.
if self.token_cursor.break_last_token { if self.token_cursor.break_last_token {
assert_eq!( assert_eq!(
trailing, trailing,
@ -317,7 +317,7 @@ impl<'a> Parser<'a> {
} else { } else {
// Grab any replace ranges that occur *inside* the current AST node. // Grab any replace ranges that occur *inside* the current AST node.
// We will perform the actual replacement when we convert the `LazyTokenStream` // We will perform the actual replacement when we convert the `LazyTokenStream`
// to an `AttrAnnotatedTokenStream` // to an `AttrTokenStream`.
let start_calls: u32 = cursor_snapshot_next_calls.try_into().unwrap(); let start_calls: u32 = cursor_snapshot_next_calls.try_into().unwrap();
self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end] self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
.iter() .iter()
@ -392,12 +392,12 @@ impl<'a> Parser<'a> {
fn make_token_stream( fn make_token_stream(
mut iter: impl Iterator<Item = (FlatToken, Spacing)>, mut iter: impl Iterator<Item = (FlatToken, Spacing)>,
break_last_token: bool, break_last_token: bool,
) -> AttrAnnotatedTokenStream { ) -> AttrTokenStream {
#[derive(Debug)] #[derive(Debug)]
struct FrameData { struct FrameData {
// This is `None` for the first frame, `Some` for all others. // This is `None` for the first frame, `Some` for all others.
open_delim_sp: Option<(Delimiter, Span)>, open_delim_sp: Option<(Delimiter, Span)>,
inner: Vec<AttrAnnotatedTokenTree>, inner: Vec<AttrTokenTree>,
} }
let mut stack = vec![FrameData { open_delim_sp: None, inner: vec![] }]; let mut stack = vec![FrameData { open_delim_sp: None, inner: vec![] }];
let mut token_and_spacing = iter.next(); let mut token_and_spacing = iter.next();
@ -418,8 +418,8 @@ fn make_token_stream(
open_delim, span open_delim, span
); );
let dspan = DelimSpan::from_pair(open_sp, span); let dspan = DelimSpan::from_pair(open_sp, span);
let stream = AttrAnnotatedTokenStream::new(frame_data.inner); let stream = AttrTokenStream::new(frame_data.inner);
let delimited = AttrAnnotatedTokenTree::Delimited(dspan, delim, stream); let delimited = AttrTokenTree::Delimited(dspan, delim, stream);
stack stack
.last_mut() .last_mut()
.unwrap_or_else(|| { .unwrap_or_else(|| {
@ -432,12 +432,12 @@ fn make_token_stream(
.last_mut() .last_mut()
.expect("Bottom token frame is missing!") .expect("Bottom token frame is missing!")
.inner .inner
.push(AttrAnnotatedTokenTree::Token(token, spacing)), .push(AttrTokenTree::Token(token, spacing)),
FlatToken::AttrTarget(data) => stack FlatToken::AttrTarget(data) => stack
.last_mut() .last_mut()
.expect("Bottom token frame is missing!") .expect("Bottom token frame is missing!")
.inner .inner
.push(AttrAnnotatedTokenTree::Attributes(data)), .push(AttrTokenTree::Attributes(data)),
FlatToken::Empty => {} FlatToken::Empty => {}
} }
token_and_spacing = iter.next(); token_and_spacing = iter.next();
@ -445,21 +445,20 @@ fn make_token_stream(
let mut final_buf = stack.pop().expect("Missing final buf!"); let mut final_buf = stack.pop().expect("Missing final buf!");
if break_last_token { if break_last_token {
let last_token = final_buf.inner.pop().unwrap(); let last_token = final_buf.inner.pop().unwrap();
if let AttrAnnotatedTokenTree::Token(last_token, spacing) = last_token { if let AttrTokenTree::Token(last_token, spacing) = last_token {
let unglued_first = last_token.kind.break_two_token_op().unwrap().0; let unglued_first = last_token.kind.break_two_token_op().unwrap().0;
// An 'unglued' token is always two ASCII characters // An 'unglued' token is always two ASCII characters
let mut first_span = last_token.span.shrink_to_lo(); let mut first_span = last_token.span.shrink_to_lo();
first_span = first_span.with_hi(first_span.lo() + rustc_span::BytePos(1)); first_span = first_span.with_hi(first_span.lo() + rustc_span::BytePos(1));
final_buf.inner.push(AttrAnnotatedTokenTree::Token( final_buf
Token::new(unglued_first, first_span), .inner
spacing, .push(AttrTokenTree::Token(Token::new(unglued_first, first_span), spacing));
));
} else { } else {
panic!("Unexpected last token {:?}", last_token) panic!("Unexpected last token {:?}", last_token)
} }
} }
assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack); assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack);
AttrAnnotatedTokenStream::new(final_buf.inner) AttrTokenStream::new(final_buf.inner)
} }

View file

@ -170,7 +170,7 @@ pub struct ClosureSpans {
/// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]` /// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
/// In this case, we use a `ReplaceRange` to replace the entire inner AST node /// In this case, we use a `ReplaceRange` to replace the entire inner AST node
/// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion /// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion
/// on an `AttrAnnotatedTokenStream` /// on an `AttrTokenStream`.
/// ///
/// 2. When we parse an inner attribute while collecting tokens. We /// 2. When we parse an inner attribute while collecting tokens. We
/// remove inner attributes from the token stream entirely, and /// remove inner attributes from the token stream entirely, and
@ -183,7 +183,7 @@ pub type ReplaceRange = (Range<u32>, Vec<(FlatToken, Spacing)>);
/// Controls how we capture tokens. Capturing can be expensive, /// Controls how we capture tokens. Capturing can be expensive,
/// so we try to avoid performing capturing in cases where /// so we try to avoid performing capturing in cases where
/// we will never need an `AttrAnnotatedTokenStream` /// we will never need an `AttrTokenStream`.
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
pub enum Capturing { pub enum Capturing {
/// We aren't performing any capturing - this is the default mode. /// We aren't performing any capturing - this is the default mode.
@ -1464,11 +1464,11 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &Pa
} }
} }
/// A helper struct used when building an `AttrAnnotatedTokenStream` from /// A helper struct used when building an `AttrTokenStream` from
/// a `LazyTokenStream`. Both delimiter and non-delimited tokens /// a `LazyTokenStream`. Both delimiter and non-delimited tokens
/// are stored as `FlatToken::Token`. A vector of `FlatToken`s /// are stored as `FlatToken::Token`. A vector of `FlatToken`s
/// is then 'parsed' to build up an `AttrAnnotatedTokenStream` with nested /// is then 'parsed' to build up an `AttrTokenStream` with nested
/// `AttrAnnotatedTokenTree::Delimited` tokens /// `AttrTokenTree::Delimited` tokens.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum FlatToken { pub enum FlatToken {
/// A token - this holds both delimiter (e.g. '{' and '}') /// A token - this holds both delimiter (e.g. '{' and '}')
@ -1476,11 +1476,11 @@ pub enum FlatToken {
Token(Token), Token(Token),
/// Holds the `AttributesData` for an AST node. The /// Holds the `AttributesData` for an AST node. The
/// `AttributesData` is inserted directly into the /// `AttributesData` is inserted directly into the
/// constructed `AttrAnnotatedTokenStream` as /// constructed `AttrTokenStream` as
/// an `AttrAnnotatedTokenTree::Attributes` /// an `AttrTokenTree::Attributes`.
AttrTarget(AttributesData), AttrTarget(AttributesData),
/// A special 'empty' token that is ignored during the conversion /// A special 'empty' token that is ignored during the conversion
/// to an `AttrAnnotatedTokenStream`. This is used to simplify the /// to an `AttrTokenStream`. This is used to simplify the
/// handling of replace ranges. /// handling of replace ranges.
Empty, Empty,
} }