Split TokenCursor::{next,next_desugared} into inlined and non-inlined halves.

This commit is contained in:
Nicholas Nethercote 2022-03-07 15:55:39 +11:00
parent 4e700a023c
commit f8f1d3f00b
2 changed files with 24 additions and 10 deletions

View file

@ -100,11 +100,12 @@ rustc_data_structures::static_assert_size!(LazyTokenStreamImpl, 144);
impl CreateTokenStream for LazyTokenStreamImpl { impl CreateTokenStream for LazyTokenStreamImpl {
fn create_token_stream(&self) -> AttrAnnotatedTokenStream { fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
// The token produced by the final call to `next` or `next_desugared` // The token produced by the final call to `{,inlined_}next` or
// was not actually consumed by the callback. The combination // `{,inlined_}next_desugared` was not actually consumed by the
// of chaining the initial token and using `take` produces the desired // callback. The combination of chaining the initial token and using
// result - we produce an empty `TokenStream` if no calls were made, // `take` produces the desired result - we produce an empty
// and omit the final token otherwise. // `TokenStream` if no calls were made, and omit the final token
// otherwise.
let mut cursor_snapshot = self.cursor_snapshot.clone(); let mut cursor_snapshot = self.cursor_snapshot.clone();
let tokens = let tokens =
std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1)) std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))

View file

@ -206,8 +206,9 @@ struct TokenCursor {
frame: TokenCursorFrame, frame: TokenCursorFrame,
stack: Vec<TokenCursorFrame>, stack: Vec<TokenCursorFrame>,
desugar_doc_comments: bool, desugar_doc_comments: bool,
// Counts the number of calls to `next` or `next_desugared`, // Counts the number of calls to `{,inlined_}next` or
// depending on whether `desugar_doc_comments` is set. // `{,inlined_}next_desugared`, depending on whether
// `desugar_doc_comments` is set.
num_next_calls: usize, num_next_calls: usize,
// During parsing, we may sometimes need to 'unglue' a // During parsing, we may sometimes need to 'unglue' a
// glued token into two component tokens // glued token into two component tokens
@ -256,6 +257,12 @@ impl TokenCursorFrame {
impl TokenCursor { impl TokenCursor {
fn next(&mut self) -> (Token, Spacing) { fn next(&mut self) -> (Token, Spacing) {
self.inlined_next()
}
/// This always-inlined version should only be used on hot code paths.
#[inline(always)]
fn inlined_next(&mut self) -> (Token, Spacing) {
loop { loop {
let (tree, spacing) = if !self.frame.open_delim { let (tree, spacing) = if !self.frame.open_delim {
self.frame.open_delim = true; self.frame.open_delim = true;
@ -285,7 +292,13 @@ impl TokenCursor {
} }
fn next_desugared(&mut self) -> (Token, Spacing) { fn next_desugared(&mut self) -> (Token, Spacing) {
let (data, attr_style, sp) = match self.next() { self.inlined_next_desugared()
}
/// This always-inlined version should only be used on hot code paths.
#[inline(always)]
fn inlined_next_desugared(&mut self) -> (Token, Spacing) {
let (data, attr_style, sp) = match self.inlined_next() {
(Token { kind: token::DocComment(_, attr_style, data), span }, _) => { (Token { kind: token::DocComment(_, attr_style, data), span }, _) => {
(data, attr_style, span) (data, attr_style, span)
} }
@ -467,9 +480,9 @@ impl<'a> Parser<'a> {
fn next_tok(&mut self, fallback_span: Span) -> (Token, Spacing) { fn next_tok(&mut self, fallback_span: Span) -> (Token, Spacing) {
loop { loop {
let (mut next, spacing) = if self.desugar_doc_comments { let (mut next, spacing) = if self.desugar_doc_comments {
self.token_cursor.next_desugared() self.token_cursor.inlined_next_desugared()
} else { } else {
self.token_cursor.next() self.token_cursor.inlined_next()
}; };
self.token_cursor.num_next_calls += 1; self.token_cursor.num_next_calls += 1;
// We've retrieved an token from the underlying // We've retrieved an token from the underlying