Remove the Option in TokenStream.

It means an allocation is required to create an empty `TokenStream`, but
all other operations are simpler and marginally faster due to not having
to check for `None`. Overall it simplifies the code for a negligible
performance effect.

The commit also removes `TokenStream::empty` by implementing `Default`,
which is now possible.
This commit is contained in:
Nicholas Nethercote 2019-10-10 07:29:02 +11:00
parent 20cc752726
commit 5c93492da9
9 changed files with 97 additions and 141 deletions

View file

@ -551,7 +551,7 @@ impl MetaItem {
impl MetaItemKind {
pub fn tokens(&self, span: Span) -> TokenStream {
match *self {
MetaItemKind::Word => TokenStream::empty(),
MetaItemKind::Word => TokenStream::default(),
MetaItemKind::NameValue(ref lit) => {
let mut vec = vec![TokenTree::token(token::Eq, span).into()];
lit.tokens().append_to_tree_and_joint_vec(&mut vec);

View file

@ -671,12 +671,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
}
}
Some(TokenTree::Token(..)) => {}
None => return TokenStream::empty(),
None => return TokenStream::default(),
}
self.cx.span_err(span, "custom attribute invocations must be \
of the form `#[foo]` or `#[foo(..)]`, the macro name must only be \
followed by a delimiter token");
TokenStream::empty()
TokenStream::default()
}
fn gate_proc_macro_attr_item(&self, span: Span, item: &Annotatable) {

View file

@ -95,7 +95,7 @@ pub(super) fn transcribe(
) -> TokenStream {
// Nothing for us to transcribe...
if src.is_empty() {
return TokenStream::empty();
return TokenStream::default();
}
// We descend into the RHS (`src`), expanding things as we go. This stack contains the things

View file

@ -15,7 +15,7 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
fn mac_placeholder() -> ast::Mac {
ast::Mac {
path: ast::Path { span: DUMMY_SP, segments: Vec::new() },
tts: TokenStream::empty().into(),
tts: TokenStream::default().into(),
delim: ast::MacDelimiter::Brace,
span: DUMMY_SP,
prior_type_ascription: None,

View file

@ -393,7 +393,7 @@ impl server::Types for Rustc<'_> {
impl server::TokenStream for Rustc<'_> {
fn new(&mut self) -> Self::TokenStream {
TokenStream::empty()
TokenStream::default()
}
fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
stream.is_empty()

View file

@ -610,10 +610,8 @@ pub fn noop_visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
}
pub fn noop_visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &mut T) {
visit_opt(tts, |tts| {
let tts = Lrc::make_mut(tts);
visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree));
})
let tts = Lrc::make_mut(tts);
visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree));
}
// Applies ident visitor if it's an ident; applies other visits to interpolated nodes.

View file

@ -203,7 +203,7 @@ impl<'a> Parser<'a> {
};
TokenStream::from_streams(smallvec![eq.into(), tokens])
} else {
TokenStream::empty()
TokenStream::default()
};
ast::AttrItem { path, tokens }
})

View file

@ -137,13 +137,8 @@ impl TokenTree {
/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
/// instead of a representation of the abstract syntax tree.
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat.
///
/// The use of `Option` is an optimization that avoids the need for an
/// allocation when the stream is empty. However, it is not guaranteed that an
/// empty stream is represented with `None`; it may be represented as a `Some`
/// around an empty `Vec`.
#[derive(Clone, Debug)]
pub struct TokenStream(pub Option<Lrc<Vec<TreeAndJoint>>>);
#[derive(Clone, Debug, Default)]
pub struct TokenStream(pub Lrc<Vec<TreeAndJoint>>);
pub type TreeAndJoint = (TokenTree, IsJoint);
@ -164,36 +159,34 @@ impl TokenStream {
/// separating the two arguments with a comma for diagnostic suggestions.
pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> {
// Used to suggest if a user writes `foo!(a b);`
if let Some(ref stream) = self.0 {
let mut suggestion = None;
let mut iter = stream.iter().enumerate().peekable();
while let Some((pos, ts)) = iter.next() {
if let Some((_, next)) = iter.peek() {
let sp = match (&ts, &next) {
(_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
((TokenTree::Token(token_left), NonJoint),
(TokenTree::Token(token_right), _))
if ((token_left.is_ident() && !token_left.is_reserved_ident())
|| token_left.is_lit()) &&
((token_right.is_ident() && !token_right.is_reserved_ident())
|| token_right.is_lit()) => token_left.span,
((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
_ => continue,
};
let sp = sp.shrink_to_hi();
let comma = (TokenTree::token(token::Comma, sp), NonJoint);
suggestion = Some((pos, comma, sp));
}
}
if let Some((pos, comma, sp)) = suggestion {
let mut new_stream = vec![];
let parts = stream.split_at(pos + 1);
new_stream.extend_from_slice(parts.0);
new_stream.push(comma);
new_stream.extend_from_slice(parts.1);
return Some((TokenStream::new(new_stream), sp));
let mut suggestion = None;
let mut iter = self.0.iter().enumerate().peekable();
while let Some((pos, ts)) = iter.next() {
if let Some((_, next)) = iter.peek() {
let sp = match (&ts, &next) {
(_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
((TokenTree::Token(token_left), NonJoint),
(TokenTree::Token(token_right), _))
if ((token_left.is_ident() && !token_left.is_reserved_ident())
|| token_left.is_lit()) &&
((token_right.is_ident() && !token_right.is_reserved_ident())
|| token_right.is_lit()) => token_left.span,
((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
_ => continue,
};
let sp = sp.shrink_to_hi();
let comma = (TokenTree::token(token::Comma, sp), NonJoint);
suggestion = Some((pos, comma, sp));
}
}
if let Some((pos, comma, sp)) = suggestion {
let mut new_stream = vec![];
let parts = self.0.split_at(pos + 1);
new_stream.extend_from_slice(parts.0);
new_stream.push(comma);
new_stream.extend_from_slice(parts.1);
return Some((TokenStream::new(new_stream), sp));
}
None
}
}
@ -225,28 +218,21 @@ impl PartialEq<TokenStream> for TokenStream {
}
impl TokenStream {
pub fn len(&self) -> usize {
if let Some(ref slice) = self.0 {
slice.len()
} else {
0
}
}
pub fn empty() -> TokenStream {
TokenStream(None)
pub fn new(streams: Vec<TreeAndJoint>) -> TokenStream {
TokenStream(Lrc::new(streams))
}
pub fn is_empty(&self) -> bool {
match self.0 {
None => true,
Some(ref stream) => stream.is_empty(),
}
self.0.is_empty()
}
pub fn len(&self) -> usize {
self.0.len()
}
pub(crate) fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
match streams.len() {
0 => TokenStream::empty(),
0 => TokenStream::default(),
1 => streams.pop().unwrap(),
_ => {
// We are going to extend the first stream in `streams` with
@ -270,41 +256,24 @@ impl TokenStream {
// Get the first stream. If it's `None`, create an empty
// stream.
let mut iter = streams.drain();
let mut first_stream_lrc = match iter.next().unwrap().0 {
Some(first_stream_lrc) => first_stream_lrc,
None => Lrc::new(vec![]),
};
let mut first_stream_lrc = iter.next().unwrap().0;
// Append the elements to the first stream, after reserving
// space for them.
let first_vec_mut = Lrc::make_mut(&mut first_stream_lrc);
first_vec_mut.reserve(num_appends);
for stream in iter {
if let Some(stream) = stream.0 {
first_vec_mut.extend(stream.iter().cloned());
}
first_vec_mut.extend(stream.0.iter().cloned());
}
// Create the final `TokenStream`.
match first_vec_mut.len() {
0 => TokenStream(None),
_ => TokenStream(Some(first_stream_lrc)),
}
TokenStream(first_stream_lrc)
}
}
}
pub fn new(streams: Vec<TreeAndJoint>) -> TokenStream {
match streams.len() {
0 => TokenStream(None),
_ => TokenStream(Some(Lrc::new(streams))),
}
}
pub fn append_to_tree_and_joint_vec(self, vec: &mut Vec<TreeAndJoint>) {
if let Some(stream) = self.0 {
vec.extend(stream.iter().cloned());
}
vec.extend(self.0.iter().cloned());
}
pub fn trees(&self) -> Cursor {
@ -371,24 +340,22 @@ impl TokenStream {
}
pub fn map_enumerated<F: FnMut(usize, TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
TokenStream(self.0.map(|stream| {
Lrc::new(
stream
.iter()
.enumerate()
.map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint))
.collect())
}))
TokenStream(Lrc::new(
self.0
.iter()
.enumerate()
.map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint))
.collect()
))
}
pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
TokenStream(self.0.map(|stream| {
Lrc::new(
stream
.iter()
.map(|(tree, is_joint)| (f(tree.clone()), *is_joint))
.collect())
}))
TokenStream(Lrc::new(
self.0
.iter()
.map(|(tree, is_joint)| (f(tree.clone()), *is_joint))
.collect()
))
}
}
@ -406,44 +373,43 @@ impl TokenStreamBuilder {
// If `self` is not empty and the last tree within the last stream is a
// token tree marked with `Joint`...
if let Some(TokenStream(Some(ref mut last_stream_lrc))) = self.0.last_mut() {
if let Some(TokenStream(ref mut last_stream_lrc)) = self.0.last_mut() {
if let Some((TokenTree::Token(last_token), Joint)) = last_stream_lrc.last() {
// ...and `stream` is not empty and the first tree within it is
// a token tree...
if let TokenStream(Some(ref mut stream_lrc)) = stream {
if let Some((TokenTree::Token(token), is_joint)) = stream_lrc.first() {
let TokenStream(ref mut stream_lrc) = stream;
if let Some((TokenTree::Token(token), is_joint)) = stream_lrc.first() {
// ...and the two tokens can be glued together...
if let Some(glued_tok) = last_token.glue(&token) {
// ...and the two tokens can be glued together...
if let Some(glued_tok) = last_token.glue(&token) {
// ...then do so, by overwriting the last token
// tree in `self` and removing the first token tree
// from `stream`. This requires using `make_mut()`
// on the last stream in `self` and on `stream`,
// and in practice this doesn't cause cloning 99.9%
// of the time.
// ...then do so, by overwriting the last token
// tree in `self` and removing the first token tree
// from `stream`. This requires using `make_mut()`
// on the last stream in `self` and on `stream`,
// and in practice this doesn't cause cloning 99.9%
// of the time.
// Overwrite the last token tree with the merged
// token.
let last_vec_mut = Lrc::make_mut(last_stream_lrc);
*last_vec_mut.last_mut().unwrap() =
(TokenTree::Token(glued_tok), *is_joint);
// Overwrite the last token tree with the merged
// token.
let last_vec_mut = Lrc::make_mut(last_stream_lrc);
*last_vec_mut.last_mut().unwrap() =
(TokenTree::Token(glued_tok), *is_joint);
// Remove the first token tree from `stream`. (This
// is almost always the only tree in `stream`.)
let stream_vec_mut = Lrc::make_mut(stream_lrc);
stream_vec_mut.remove(0);
// Remove the first token tree from `stream`. (This
// is almost always the only tree in `stream`.)
let stream_vec_mut = Lrc::make_mut(stream_lrc);
stream_vec_mut.remove(0);
// Don't push `stream` if it's empty -- that could
// block subsequent token gluing, by getting
// between two token trees that should be glued
// together.
if !stream.is_empty() {
self.0.push(stream);
}
return;
// Don't push `stream` if it's empty -- that could
// block subsequent token gluing, by getting
// between two token trees that should be glued
// together.
if !stream.is_empty() {
self.0.push(stream);
}
return;
}
}
}
@ -476,16 +442,11 @@ impl Cursor {
}
pub fn next_with_joint(&mut self) -> Option<TreeAndJoint> {
match self.stream.0 {
None => None,
Some(ref stream) => {
if self.index < stream.len() {
self.index += 1;
Some(stream[self.index - 1].clone())
} else {
None
}
}
if self.index < self.stream.len() {
self.index += 1;
Some(self.stream.0[self.index - 1].clone())
} else {
None
}
}
@ -494,16 +455,13 @@ impl Cursor {
return;
}
let index = self.index;
let stream = mem::replace(&mut self.stream, TokenStream(None));
let stream = mem::take(&mut self.stream);
*self = TokenStream::from_streams(smallvec![stream, new_stream]).into_trees();
self.index = index;
}
pub fn look_ahead(&self, n: usize) -> Option<TokenTree> {
match self.stream.0 {
None => None,
Some(ref stream) => stream[self.index ..].get(n).map(|(tree, _)| tree.clone()),
}
self.stream.0[self.index ..].get(n).map(|(tree, _)| tree.clone())
}
}

View file

@ -20,7 +20,7 @@ fn plugin_macro_def(name: Name, span: Span) -> P<Item> {
attr::mk_word_item(Ident::new(sym::rustc_builtin_macro, span)));
let parens: TreeAndJoint = TokenTree::Delimited(
DelimSpan::from_single(span), token::Paren, TokenStream::empty()
DelimSpan::from_single(span), token::Paren, TokenStream::default()
).into();
let trees = vec![parens.clone(), TokenTree::token(token::FatArrow, span).into(), parens];