Rename AttrAnnotatedToken{Stream,Tree}
.
These two type names are long and have long matching prefixes. I find them hard to read, especially in combinations like `AttrAnnotatedTokenStream::new(vec![AttrAnnotatedTokenTree::Token(..)])`. This commit renames them as `AttrToken{Stream,Tree}`.
This commit is contained in:
parent
890e759ffc
commit
a56d345490
8 changed files with 80 additions and 84 deletions
|
@ -7,7 +7,7 @@ use crate::ast::{MacArgs, MacArgsEq, MacDelimiter, MetaItem, MetaItemKind, Neste
|
|||
use crate::ast::{Path, PathSegment};
|
||||
use crate::ptr::P;
|
||||
use crate::token::{self, CommentKind, Delimiter, Token};
|
||||
use crate::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
|
||||
use crate::tokenstream::{AttrTokenStream, AttrTokenTree};
|
||||
use crate::tokenstream::{DelimSpan, Spacing, TokenTree};
|
||||
use crate::tokenstream::{LazyTokenStream, TokenStream};
|
||||
use crate::util::comments;
|
||||
|
@ -296,7 +296,7 @@ impl Attribute {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn tokens(&self) -> AttrAnnotatedTokenStream {
|
||||
pub fn tokens(&self) -> AttrTokenStream {
|
||||
match self.kind {
|
||||
AttrKind::Normal(ref normal) => normal
|
||||
.tokens
|
||||
|
@ -304,7 +304,7 @@ impl Attribute {
|
|||
.unwrap_or_else(|| panic!("attribute is missing tokens: {:?}", self))
|
||||
.create_token_stream(),
|
||||
AttrKind::DocComment(comment_kind, data) => {
|
||||
AttrAnnotatedTokenStream::new(vec![AttrAnnotatedTokenTree::Token(
|
||||
AttrTokenStream::new(vec![AttrTokenTree::Token(
|
||||
Token::new(token::DocComment(comment_kind, self.style, data), self.span),
|
||||
Spacing::Alone,
|
||||
)])
|
||||
|
|
|
@ -642,17 +642,17 @@ pub fn noop_flat_map_param<T: MutVisitor>(mut param: Param, vis: &mut T) -> Smal
|
|||
}
|
||||
|
||||
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
|
||||
pub fn visit_attr_annotated_tt<T: MutVisitor>(tt: &mut AttrAnnotatedTokenTree, vis: &mut T) {
|
||||
pub fn visit_attr_tt<T: MutVisitor>(tt: &mut AttrTokenTree, vis: &mut T) {
|
||||
match tt {
|
||||
AttrAnnotatedTokenTree::Token(token, _) => {
|
||||
AttrTokenTree::Token(token, _) => {
|
||||
visit_token(token, vis);
|
||||
}
|
||||
AttrAnnotatedTokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
|
||||
AttrTokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
|
||||
vis.visit_span(open);
|
||||
vis.visit_span(close);
|
||||
visit_attr_annotated_tts(tts, vis);
|
||||
visit_attr_tts(tts, vis);
|
||||
}
|
||||
AttrAnnotatedTokenTree::Attributes(data) => {
|
||||
AttrTokenTree::Attributes(data) => {
|
||||
for attr in &mut *data.attrs {
|
||||
match &mut attr.kind {
|
||||
AttrKind::Normal(normal) => {
|
||||
|
@ -690,13 +690,10 @@ pub fn visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &mut T)
|
|||
}
|
||||
}
|
||||
|
||||
pub fn visit_attr_annotated_tts<T: MutVisitor>(
|
||||
AttrAnnotatedTokenStream(tts): &mut AttrAnnotatedTokenStream,
|
||||
vis: &mut T,
|
||||
) {
|
||||
pub fn visit_attr_tts<T: MutVisitor>(AttrTokenStream(tts): &mut AttrTokenStream, vis: &mut T) {
|
||||
if T::VISIT_TOKENS && !tts.is_empty() {
|
||||
let tts = Lrc::make_mut(tts);
|
||||
visit_vec(tts, |tree| visit_attr_annotated_tt(tree, vis));
|
||||
visit_vec(tts, |tree| visit_attr_tt(tree, vis));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -704,7 +701,7 @@ pub fn visit_lazy_tts_opt_mut<T: MutVisitor>(lazy_tts: Option<&mut LazyTokenStre
|
|||
if T::VISIT_TOKENS {
|
||||
if let Some(lazy_tts) = lazy_tts {
|
||||
let mut tts = lazy_tts.create_token_stream();
|
||||
visit_attr_annotated_tts(&mut tts, vis);
|
||||
visit_attr_tts(&mut tts, vis);
|
||||
*lazy_tts = LazyTokenStream::new(tts);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -122,11 +122,11 @@ where
|
|||
}
|
||||
|
||||
pub trait CreateTokenStream: sync::Send + sync::Sync {
|
||||
fn create_token_stream(&self) -> AttrAnnotatedTokenStream;
|
||||
fn create_token_stream(&self) -> AttrTokenStream;
|
||||
}
|
||||
|
||||
impl CreateTokenStream for AttrAnnotatedTokenStream {
|
||||
fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
|
||||
impl CreateTokenStream for AttrTokenStream {
|
||||
fn create_token_stream(&self) -> AttrTokenStream {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
@ -142,7 +142,7 @@ impl LazyTokenStream {
|
|||
LazyTokenStream(Lrc::new(Box::new(inner)))
|
||||
}
|
||||
|
||||
pub fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
|
||||
pub fn create_token_stream(&self) -> AttrTokenStream {
|
||||
self.0.create_token_stream()
|
||||
}
|
||||
}
|
||||
|
@ -172,31 +172,31 @@ impl<CTX> HashStable<CTX> for LazyTokenStream {
|
|||
}
|
||||
}
|
||||
|
||||
/// A `AttrAnnotatedTokenStream` is similar to a `TokenStream`, but with extra
|
||||
/// An `AttrTokenStream` is similar to a `TokenStream`, but with extra
|
||||
/// information about the tokens for attribute targets. This is used
|
||||
/// during expansion to perform early cfg-expansion, and to process attributes
|
||||
/// during proc-macro invocations.
|
||||
#[derive(Clone, Debug, Default, Encodable, Decodable)]
|
||||
pub struct AttrAnnotatedTokenStream(pub Lrc<Vec<AttrAnnotatedTokenTree>>);
|
||||
pub struct AttrTokenStream(pub Lrc<Vec<AttrTokenTree>>);
|
||||
|
||||
/// Like `TokenTree`, but for `AttrAnnotatedTokenStream`
|
||||
/// Like `TokenTree`, but for `AttrTokenStream`.
|
||||
#[derive(Clone, Debug, Encodable, Decodable)]
|
||||
pub enum AttrAnnotatedTokenTree {
|
||||
pub enum AttrTokenTree {
|
||||
Token(Token, Spacing),
|
||||
Delimited(DelimSpan, Delimiter, AttrAnnotatedTokenStream),
|
||||
Delimited(DelimSpan, Delimiter, AttrTokenStream),
|
||||
/// Stores the attributes for an attribute target,
|
||||
/// along with the tokens for that attribute target.
|
||||
/// See `AttributesData` for more information
|
||||
Attributes(AttributesData),
|
||||
}
|
||||
|
||||
impl AttrAnnotatedTokenStream {
|
||||
pub fn new(tokens: Vec<AttrAnnotatedTokenTree>) -> AttrAnnotatedTokenStream {
|
||||
AttrAnnotatedTokenStream(Lrc::new(tokens))
|
||||
impl AttrTokenStream {
|
||||
pub fn new(tokens: Vec<AttrTokenTree>) -> AttrTokenStream {
|
||||
AttrTokenStream(Lrc::new(tokens))
|
||||
}
|
||||
|
||||
/// Converts this `AttrAnnotatedTokenStream` to a plain `TokenStream
|
||||
/// During conversion, `AttrAnnotatedTokenTree::Attributes` get 'flattened'
|
||||
/// Converts this `AttrTokenStream` to a plain `TokenStream`.
|
||||
/// During conversion, `AttrTokenTree::Attributes` get 'flattened'
|
||||
/// back to a `TokenStream` of the form `outer_attr attr_target`.
|
||||
/// If there are inner attributes, they are inserted into the proper
|
||||
/// place in the attribute target tokens.
|
||||
|
@ -205,14 +205,14 @@ impl AttrAnnotatedTokenStream {
|
|||
.0
|
||||
.iter()
|
||||
.flat_map(|tree| match &tree {
|
||||
AttrAnnotatedTokenTree::Token(inner, spacing) => {
|
||||
AttrTokenTree::Token(inner, spacing) => {
|
||||
smallvec![TokenTree::Token(inner.clone(), *spacing)].into_iter()
|
||||
}
|
||||
AttrAnnotatedTokenTree::Delimited(span, delim, stream) => {
|
||||
AttrTokenTree::Delimited(span, delim, stream) => {
|
||||
smallvec![TokenTree::Delimited(*span, *delim, stream.to_tokenstream()),]
|
||||
.into_iter()
|
||||
}
|
||||
AttrAnnotatedTokenTree::Attributes(data) => {
|
||||
AttrTokenTree::Attributes(data) => {
|
||||
let mut outer_attrs = Vec::new();
|
||||
let mut inner_attrs = Vec::new();
|
||||
for attr in &data.attrs {
|
||||
|
@ -417,14 +417,14 @@ impl TokenStream {
|
|||
fn opt_from_ast(node: &(impl HasAttrs + HasTokens)) -> Option<TokenStream> {
|
||||
let tokens = node.tokens()?;
|
||||
let attrs = node.attrs();
|
||||
let attr_annotated = if attrs.is_empty() {
|
||||
let attr_stream = if attrs.is_empty() {
|
||||
tokens.create_token_stream()
|
||||
} else {
|
||||
let attr_data =
|
||||
AttributesData { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
|
||||
AttrAnnotatedTokenStream::new(vec![AttrAnnotatedTokenTree::Attributes(attr_data)])
|
||||
AttrTokenStream::new(vec![AttrTokenTree::Attributes(attr_data)])
|
||||
};
|
||||
Some(attr_annotated.to_tokenstream())
|
||||
Some(attr_stream.to_tokenstream())
|
||||
}
|
||||
|
||||
// Create a token stream containing a single token with alone spacing.
|
||||
|
|
|
@ -188,14 +188,14 @@ impl CfgEval<'_, '_> {
|
|||
let orig_tokens = annotatable.to_tokens().flattened();
|
||||
|
||||
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
|
||||
// to the captured `AttrAnnotatedTokenStream` (specifically, we capture
|
||||
// `AttrAnnotatedTokenTree::AttributesData` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
|
||||
// to the captured `AttrTokenStream` (specifically, we capture
|
||||
// `AttrTokenTree::AttributesData` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
|
||||
let mut parser =
|
||||
rustc_parse::stream_to_parser(&self.cfg.sess.parse_sess, orig_tokens, None);
|
||||
parser.capture_cfg = true;
|
||||
annotatable = parse_annotatable_with(&mut parser);
|
||||
|
||||
// Now that we have our re-parsed `AttrAnnotatedTokenStream`, recursively configuring
|
||||
// Now that we have our re-parsed `AttrTokenStream`, recursively configuring
|
||||
// our attribute target will correctly the tokens as well.
|
||||
flat_map_annotatable(self, annotatable)
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{Delimiter, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
|
||||
use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree};
|
||||
use rustc_ast::tokenstream::{DelimSpan, Spacing};
|
||||
use rustc_ast::tokenstream::{LazyTokenStream, TokenTree};
|
||||
use rustc_ast::NodeId;
|
||||
|
@ -259,8 +259,8 @@ impl<'a> StripUnconfigured<'a> {
|
|||
fn try_configure_tokens<T: HasTokens>(&self, node: &mut T) {
|
||||
if self.config_tokens {
|
||||
if let Some(Some(tokens)) = node.tokens_mut() {
|
||||
let attr_annotated_tokens = tokens.create_token_stream();
|
||||
*tokens = LazyTokenStream::new(self.configure_tokens(&attr_annotated_tokens));
|
||||
let attr_stream = tokens.create_token_stream();
|
||||
*tokens = LazyTokenStream::new(self.configure_tokens(&attr_stream));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -270,16 +270,16 @@ impl<'a> StripUnconfigured<'a> {
|
|||
if self.in_cfg(&attrs) { Some(attrs) } else { None }
|
||||
}
|
||||
|
||||
/// Performs cfg-expansion on `stream`, producing a new `AttrAnnotatedTokenStream`.
|
||||
/// Performs cfg-expansion on `stream`, producing a new `AttrTokenStream`.
|
||||
/// This is only used during the invocation of `derive` proc-macros,
|
||||
/// which require that we cfg-expand their entire input.
|
||||
/// Normal cfg-expansion operates on parsed AST nodes via the `configure` method
|
||||
fn configure_tokens(&self, stream: &AttrAnnotatedTokenStream) -> AttrAnnotatedTokenStream {
|
||||
fn can_skip(stream: &AttrAnnotatedTokenStream) -> bool {
|
||||
fn configure_tokens(&self, stream: &AttrTokenStream) -> AttrTokenStream {
|
||||
fn can_skip(stream: &AttrTokenStream) -> bool {
|
||||
stream.0.iter().all(|tree| match tree {
|
||||
AttrAnnotatedTokenTree::Attributes(_) => false,
|
||||
AttrAnnotatedTokenTree::Token(..) => true,
|
||||
AttrAnnotatedTokenTree::Delimited(_, _, inner) => can_skip(inner),
|
||||
AttrTokenTree::Attributes(_) => false,
|
||||
AttrTokenTree::Token(..) => true,
|
||||
AttrTokenTree::Delimited(_, _, inner) => can_skip(inner),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -291,35 +291,35 @@ impl<'a> StripUnconfigured<'a> {
|
|||
.0
|
||||
.iter()
|
||||
.flat_map(|tree| match tree.clone() {
|
||||
AttrAnnotatedTokenTree::Attributes(mut data) => {
|
||||
AttrTokenTree::Attributes(mut data) => {
|
||||
data.attrs.flat_map_in_place(|attr| self.process_cfg_attr(attr));
|
||||
|
||||
if self.in_cfg(&data.attrs) {
|
||||
data.tokens = LazyTokenStream::new(
|
||||
self.configure_tokens(&data.tokens.create_token_stream()),
|
||||
);
|
||||
Some(AttrAnnotatedTokenTree::Attributes(data)).into_iter()
|
||||
Some(AttrTokenTree::Attributes(data)).into_iter()
|
||||
} else {
|
||||
None.into_iter()
|
||||
}
|
||||
}
|
||||
AttrAnnotatedTokenTree::Delimited(sp, delim, mut inner) => {
|
||||
AttrTokenTree::Delimited(sp, delim, mut inner) => {
|
||||
inner = self.configure_tokens(&inner);
|
||||
Some(AttrAnnotatedTokenTree::Delimited(sp, delim, inner))
|
||||
Some(AttrTokenTree::Delimited(sp, delim, inner))
|
||||
.into_iter()
|
||||
}
|
||||
AttrAnnotatedTokenTree::Token(ref token, _) if let TokenKind::Interpolated(ref nt) = token.kind => {
|
||||
AttrTokenTree::Token(ref token, _) if let TokenKind::Interpolated(ref nt) = token.kind => {
|
||||
panic!(
|
||||
"Nonterminal should have been flattened at {:?}: {:?}",
|
||||
token.span, nt
|
||||
);
|
||||
}
|
||||
AttrAnnotatedTokenTree::Token(token, spacing) => {
|
||||
Some(AttrAnnotatedTokenTree::Token(token, spacing)).into_iter()
|
||||
AttrTokenTree::Token(token, spacing) => {
|
||||
Some(AttrTokenTree::Token(token, spacing)).into_iter()
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
AttrAnnotatedTokenStream::new(trees)
|
||||
AttrTokenStream::new(trees)
|
||||
}
|
||||
|
||||
/// Parse and expand all `cfg_attr` attributes into a list of attributes
|
||||
|
@ -404,17 +404,17 @@ impl<'a> StripUnconfigured<'a> {
|
|||
};
|
||||
let pound_span = pound_token.span;
|
||||
|
||||
let mut trees = vec![AttrAnnotatedTokenTree::Token(pound_token, Spacing::Alone)];
|
||||
let mut trees = vec![AttrTokenTree::Token(pound_token, Spacing::Alone)];
|
||||
if attr.style == AttrStyle::Inner {
|
||||
// For inner attributes, we do the same thing for the `!` in `#![some_attr]`
|
||||
let TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _) = orig_trees.next().unwrap() else {
|
||||
panic!("Bad tokens for attribute {:?}", attr);
|
||||
};
|
||||
trees.push(AttrAnnotatedTokenTree::Token(bang_token, Spacing::Alone));
|
||||
trees.push(AttrTokenTree::Token(bang_token, Spacing::Alone));
|
||||
}
|
||||
// We don't really have a good span to use for the synthesized `[]`
|
||||
// in `#[attr]`, so just use the span of the `#` token.
|
||||
let bracket_group = AttrAnnotatedTokenTree::Delimited(
|
||||
let bracket_group = AttrTokenTree::Delimited(
|
||||
DelimSpan::from_single(pound_span),
|
||||
Delimiter::Bracket,
|
||||
item.tokens
|
||||
|
@ -423,7 +423,7 @@ impl<'a> StripUnconfigured<'a> {
|
|||
.create_token_stream(),
|
||||
);
|
||||
trees.push(bracket_group);
|
||||
let tokens = Some(LazyTokenStream::new(AttrAnnotatedTokenStream::new(trees)));
|
||||
let tokens = Some(LazyTokenStream::new(AttrTokenStream::new(trees)));
|
||||
let attr = attr::mk_attr_from_item(item, tokens, attr.style, item_span);
|
||||
if attr.has_name(sym::crate_type) {
|
||||
self.sess.parse_sess.buffer_lint(
|
||||
|
|
|
@ -303,7 +303,7 @@ impl<'a> Parser<'a> {
|
|||
// If we are currently capturing tokens, mark the location of this inner attribute.
|
||||
// If capturing ends up creating a `LazyTokenStream`, we will include
|
||||
// this replace range with it, removing the inner attribute from the final
|
||||
// `AttrAnnotatedTokenStream`. Inner attributes are stored in the parsed AST note.
|
||||
// `AttrTokenStream`. Inner attributes are stored in the parsed AST note.
|
||||
// During macro expansion, they are selectively inserted back into the
|
||||
// token stream (the first inner attribute is removed each time we invoke the
|
||||
// corresponding macro).
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
|
||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttributesData, CreateTokenStream};
|
||||
use rustc_ast::tokenstream::{AttrAnnotatedTokenTree, DelimSpan, LazyTokenStream, Spacing};
|
||||
use rustc_ast::tokenstream::{AttrTokenStream, AttributesData, CreateTokenStream};
|
||||
use rustc_ast::tokenstream::{AttrTokenTree, DelimSpan, LazyTokenStream, Spacing};
|
||||
use rustc_ast::{self as ast};
|
||||
use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
|
||||
use rustc_errors::PResult;
|
||||
|
@ -100,7 +100,7 @@ struct LazyTokenStreamImpl {
|
|||
rustc_data_structures::static_assert_size!(LazyTokenStreamImpl, 144);
|
||||
|
||||
impl CreateTokenStream for LazyTokenStreamImpl {
|
||||
fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
|
||||
fn create_token_stream(&self) -> AttrTokenStream {
|
||||
// The token produced by the final call to `{,inlined_}next` was not
|
||||
// actually consumed by the callback. The combination of chaining the
|
||||
// initial token and using `take` produces the desired result - we
|
||||
|
@ -298,7 +298,7 @@ impl<'a> Parser<'a> {
|
|||
// If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens),
|
||||
// then extend the range of captured tokens to include it, since the parser
|
||||
// was not actually bumped past it. When the `LazyTokenStream` gets converted
|
||||
// into an `AttrAnnotatedTokenStream`, we will create the proper token.
|
||||
// into an `AttrTokenStream`, we will create the proper token.
|
||||
if self.token_cursor.break_last_token {
|
||||
assert_eq!(
|
||||
trailing,
|
||||
|
@ -317,7 +317,7 @@ impl<'a> Parser<'a> {
|
|||
} else {
|
||||
// Grab any replace ranges that occur *inside* the current AST node.
|
||||
// We will perform the actual replacement when we convert the `LazyTokenStream`
|
||||
// to an `AttrAnnotatedTokenStream`
|
||||
// to an `AttrTokenStream`.
|
||||
let start_calls: u32 = cursor_snapshot_next_calls.try_into().unwrap();
|
||||
self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
|
||||
.iter()
|
||||
|
@ -392,12 +392,12 @@ impl<'a> Parser<'a> {
|
|||
fn make_token_stream(
|
||||
mut iter: impl Iterator<Item = (FlatToken, Spacing)>,
|
||||
break_last_token: bool,
|
||||
) -> AttrAnnotatedTokenStream {
|
||||
) -> AttrTokenStream {
|
||||
#[derive(Debug)]
|
||||
struct FrameData {
|
||||
// This is `None` for the first frame, `Some` for all others.
|
||||
open_delim_sp: Option<(Delimiter, Span)>,
|
||||
inner: Vec<AttrAnnotatedTokenTree>,
|
||||
inner: Vec<AttrTokenTree>,
|
||||
}
|
||||
let mut stack = vec![FrameData { open_delim_sp: None, inner: vec![] }];
|
||||
let mut token_and_spacing = iter.next();
|
||||
|
@ -418,8 +418,8 @@ fn make_token_stream(
|
|||
open_delim, span
|
||||
);
|
||||
let dspan = DelimSpan::from_pair(open_sp, span);
|
||||
let stream = AttrAnnotatedTokenStream::new(frame_data.inner);
|
||||
let delimited = AttrAnnotatedTokenTree::Delimited(dspan, delim, stream);
|
||||
let stream = AttrTokenStream::new(frame_data.inner);
|
||||
let delimited = AttrTokenTree::Delimited(dspan, delim, stream);
|
||||
stack
|
||||
.last_mut()
|
||||
.unwrap_or_else(|| {
|
||||
|
@ -432,12 +432,12 @@ fn make_token_stream(
|
|||
.last_mut()
|
||||
.expect("Bottom token frame is missing!")
|
||||
.inner
|
||||
.push(AttrAnnotatedTokenTree::Token(token, spacing)),
|
||||
.push(AttrTokenTree::Token(token, spacing)),
|
||||
FlatToken::AttrTarget(data) => stack
|
||||
.last_mut()
|
||||
.expect("Bottom token frame is missing!")
|
||||
.inner
|
||||
.push(AttrAnnotatedTokenTree::Attributes(data)),
|
||||
.push(AttrTokenTree::Attributes(data)),
|
||||
FlatToken::Empty => {}
|
||||
}
|
||||
token_and_spacing = iter.next();
|
||||
|
@ -445,21 +445,20 @@ fn make_token_stream(
|
|||
let mut final_buf = stack.pop().expect("Missing final buf!");
|
||||
if break_last_token {
|
||||
let last_token = final_buf.inner.pop().unwrap();
|
||||
if let AttrAnnotatedTokenTree::Token(last_token, spacing) = last_token {
|
||||
if let AttrTokenTree::Token(last_token, spacing) = last_token {
|
||||
let unglued_first = last_token.kind.break_two_token_op().unwrap().0;
|
||||
|
||||
// An 'unglued' token is always two ASCII characters
|
||||
let mut first_span = last_token.span.shrink_to_lo();
|
||||
first_span = first_span.with_hi(first_span.lo() + rustc_span::BytePos(1));
|
||||
|
||||
final_buf.inner.push(AttrAnnotatedTokenTree::Token(
|
||||
Token::new(unglued_first, first_span),
|
||||
spacing,
|
||||
));
|
||||
final_buf
|
||||
.inner
|
||||
.push(AttrTokenTree::Token(Token::new(unglued_first, first_span), spacing));
|
||||
} else {
|
||||
panic!("Unexpected last token {:?}", last_token)
|
||||
}
|
||||
}
|
||||
assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack);
|
||||
AttrAnnotatedTokenStream::new(final_buf.inner)
|
||||
AttrTokenStream::new(final_buf.inner)
|
||||
}
|
||||
|
|
|
@ -170,7 +170,7 @@ pub struct ClosureSpans {
|
|||
/// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
|
||||
/// In this case, we use a `ReplaceRange` to replace the entire inner AST node
|
||||
/// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion
|
||||
/// on an `AttrAnnotatedTokenStream`
|
||||
/// on an `AttrTokenStream`.
|
||||
///
|
||||
/// 2. When we parse an inner attribute while collecting tokens. We
|
||||
/// remove inner attributes from the token stream entirely, and
|
||||
|
@ -183,7 +183,7 @@ pub type ReplaceRange = (Range<u32>, Vec<(FlatToken, Spacing)>);
|
|||
|
||||
/// Controls how we capture tokens. Capturing can be expensive,
|
||||
/// so we try to avoid performing capturing in cases where
|
||||
/// we will never need an `AttrAnnotatedTokenStream`
|
||||
/// we will never need an `AttrTokenStream`.
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum Capturing {
|
||||
/// We aren't performing any capturing - this is the default mode.
|
||||
|
@ -1464,11 +1464,11 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &Pa
|
|||
}
|
||||
}
|
||||
|
||||
/// A helper struct used when building an `AttrAnnotatedTokenStream` from
|
||||
/// A helper struct used when building an `AttrTokenStream` from
|
||||
/// a `LazyTokenStream`. Both delimiter and non-delimited tokens
|
||||
/// are stored as `FlatToken::Token`. A vector of `FlatToken`s
|
||||
/// is then 'parsed' to build up an `AttrAnnotatedTokenStream` with nested
|
||||
/// `AttrAnnotatedTokenTree::Delimited` tokens
|
||||
/// is then 'parsed' to build up an `AttrTokenStream` with nested
|
||||
/// `AttrTokenTree::Delimited` tokens.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum FlatToken {
|
||||
/// A token - this holds both delimiter (e.g. '{' and '}')
|
||||
|
@ -1476,11 +1476,11 @@ pub enum FlatToken {
|
|||
Token(Token),
|
||||
/// Holds the `AttributesData` for an AST node. The
|
||||
/// `AttributesData` is inserted directly into the
|
||||
/// constructed `AttrAnnotatedTokenStream` as
|
||||
/// an `AttrAnnotatedTokenTree::Attributes`
|
||||
/// constructed `AttrTokenStream` as
|
||||
/// an `AttrTokenTree::Attributes`.
|
||||
AttrTarget(AttributesData),
|
||||
/// A special 'empty' token that is ignored during the conversion
|
||||
/// to an `AttrAnnotatedTokenStream`. This is used to simplify the
|
||||
/// to an `AttrTokenStream`. This is used to simplify the
|
||||
/// handling of replace ranges.
|
||||
Empty,
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue