Implement slow-path for FirstSets::first
When 2 or more sequences share the same span, we can't use the precomputed map for their first set. So we compute it recursively. Fixes #62831.
This commit is contained in:
parent
eedf6ce4ef
commit
df4b23e721
4 changed files with 119 additions and 27 deletions
|
@ -625,38 +625,37 @@ impl FirstSets {
|
|||
return first;
|
||||
}
|
||||
TokenTree::Sequence(sp, ref seq_rep) => {
|
||||
match self.first.get(&sp.entire()) {
|
||||
Some(&Some(ref subfirst)) => {
|
||||
// If the sequence contents can be empty, then the first
|
||||
// token could be the separator token itself.
|
||||
|
||||
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
|
||||
first.add_one_maybe(TokenTree::Token(sep.clone()));
|
||||
}
|
||||
|
||||
assert!(first.maybe_empty);
|
||||
first.add_all(subfirst);
|
||||
if subfirst.maybe_empty
|
||||
|| seq_rep.kleene.op == quoted::KleeneOp::ZeroOrMore
|
||||
|| seq_rep.kleene.op == quoted::KleeneOp::ZeroOrOne
|
||||
{
|
||||
// continue scanning for more first
|
||||
// tokens, but also make sure we
|
||||
// restore empty-tracking state
|
||||
first.maybe_empty = true;
|
||||
continue;
|
||||
} else {
|
||||
return first;
|
||||
}
|
||||
}
|
||||
|
||||
let subfirst_owned;
|
||||
let subfirst = match self.first.get(&sp.entire()) {
|
||||
Some(&Some(ref subfirst)) => subfirst,
|
||||
Some(&None) => {
|
||||
panic!("assume all sequences have (unique) spans for now");
|
||||
subfirst_owned = self.first(&seq_rep.tts[..]);
|
||||
&subfirst_owned
|
||||
}
|
||||
|
||||
None => {
|
||||
panic!("We missed a sequence during FirstSets construction");
|
||||
}
|
||||
};
|
||||
|
||||
// If the sequence contents can be empty, then the first
|
||||
// token could be the separator token itself.
|
||||
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
|
||||
first.add_one_maybe(TokenTree::Token(sep.clone()));
|
||||
}
|
||||
|
||||
assert!(first.maybe_empty);
|
||||
first.add_all(subfirst);
|
||||
if subfirst.maybe_empty
|
||||
|| seq_rep.kleene.op == quoted::KleeneOp::ZeroOrMore
|
||||
|| seq_rep.kleene.op == quoted::KleeneOp::ZeroOrOne
|
||||
{
|
||||
// Continue scanning for more first
|
||||
// tokens, but also make sure we
|
||||
// restore empty-tracking state.
|
||||
first.maybe_empty = true;
|
||||
continue;
|
||||
} else {
|
||||
return first;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
36
src/test/ui/macros/auxiliary/proc_macro_sequence.rs
Normal file
36
src/test/ui/macros/auxiliary/proc_macro_sequence.rs
Normal file
|
@ -0,0 +1,36 @@
|
|||
// force-host
|
||||
// no-prefer-dynamic
|
||||
|
||||
#![crate_type = "proc-macro"]
|
||||
#![feature(proc_macro_span, proc_macro_hygiene, proc_macro_quote)]
|
||||
|
||||
extern crate proc_macro;
|
||||
|
||||
use proc_macro::{quote, Span, TokenStream};
|
||||
|
||||
fn assert_same_span(a: Span, b: Span) {
|
||||
assert_eq!(a.start(), b.start());
|
||||
assert_eq!(a.end(), b.end());
|
||||
}
|
||||
|
||||
// This macro generates a macro with the same macro definition as `manual_foo` in
|
||||
// `same-sequence-span.rs` but with the same span for all sequences.
|
||||
#[proc_macro]
|
||||
pub fn make_foo(_: TokenStream) -> TokenStream {
|
||||
let result = quote! {
|
||||
macro_rules! generated_foo {
|
||||
(1 $$x:expr $$($$y:tt,)* $$(= $$z:tt)*) => {};
|
||||
}
|
||||
};
|
||||
|
||||
// Check that all spans are equal.
|
||||
let mut span = None;
|
||||
for tt in result.clone() {
|
||||
match span {
|
||||
None => span = Some(tt.span()),
|
||||
Some(span) => assert_same_span(tt.span(), span),
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
23
src/test/ui/macros/same-sequence-span.rs
Normal file
23
src/test/ui/macros/same-sequence-span.rs
Normal file
|
@ -0,0 +1,23 @@
|
|||
// aux-build:proc_macro_sequence.rs
|
||||
|
||||
// Regression test for issue #62831: Check that multiple sequences with the same span in the
|
||||
// left-hand side of a macro definition behave as if they had unique spans, and in particular that
|
||||
// they don't crash the compiler.
|
||||
|
||||
#![feature(proc_macro_hygiene)]
|
||||
#![allow(unused_macros)]
|
||||
|
||||
extern crate proc_macro_sequence;
|
||||
|
||||
// When ignoring spans, this macro has the same macro definition as `generated_foo` in
|
||||
// `proc_macro_sequence.rs`.
|
||||
macro_rules! manual_foo {
|
||||
(1 $x:expr $($y:tt,)* //~ERROR `$x:expr` may be followed by `$y:tt`
|
||||
$(= $z:tt)* //~ERROR `$x:expr` may be followed by `=`
|
||||
) => {};
|
||||
}
|
||||
|
||||
proc_macro_sequence::make_foo!(); //~ERROR `$x:expr` may be followed by `$y:tt`
|
||||
//~^ERROR `$x:expr` may be followed by `=`
|
||||
|
||||
fn main() {}
|
34
src/test/ui/macros/same-sequence-span.stderr
Normal file
34
src/test/ui/macros/same-sequence-span.stderr
Normal file
|
@ -0,0 +1,34 @@
|
|||
error: `$x:expr` may be followed by `$y:tt`, which is not allowed for `expr` fragments
|
||||
--> $DIR/same-sequence-span.rs:15:18
|
||||
|
|
||||
LL | (1 $x:expr $($y:tt,)*
|
||||
| ^^^^^ not allowed after `expr` fragments
|
||||
|
|
||||
= note: allowed there are: `=>`, `,` or `;`
|
||||
|
||||
error: `$x:expr` may be followed by `=`, which is not allowed for `expr` fragments
|
||||
--> $DIR/same-sequence-span.rs:16:18
|
||||
|
|
||||
LL | $(= $z:tt)*
|
||||
| ^ not allowed after `expr` fragments
|
||||
|
|
||||
= note: allowed there are: `=>`, `,` or `;`
|
||||
|
||||
error: `$x:expr` may be followed by `$y:tt`, which is not allowed for `expr` fragments
|
||||
--> $DIR/same-sequence-span.rs:20:1
|
||||
|
|
||||
LL | proc_macro_sequence::make_foo!();
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not allowed after `expr` fragments
|
||||
|
|
||||
= note: allowed there are: `=>`, `,` or `;`
|
||||
|
||||
error: `$x:expr` may be followed by `=`, which is not allowed for `expr` fragments
|
||||
--> $DIR/same-sequence-span.rs:20:1
|
||||
|
|
||||
LL | proc_macro_sequence::make_foo!();
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not allowed after `expr` fragments
|
||||
|
|
||||
= note: allowed there are: `=>`, `,` or `;`
|
||||
|
||||
error: aborting due to 4 previous errors
|
||||
|
Loading…
Reference in a new issue