Refactor how spans are combined in the parser.

This commit is contained in:
Jeffrey Seyfried 2017-03-15 00:22:48 +00:00
parent ec7c0aece1
commit f08d5ad4c5
22 changed files with 363 additions and 438 deletions

View file

@ -29,7 +29,7 @@ use syntax::ast;
use syntax::attr;
use syntax::ptr::P;
use syntax::symbol::keywords;
use syntax_pos::{mk_sp, Span};
use syntax_pos::Span;
use errors::DiagnosticBuilder;
use util::nodemap::{NodeMap, NodeSet, FxHashSet, FxHashMap, DefIdMap};
use rustc_back::slice;
@ -1468,8 +1468,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
self.resolve_lifetime_ref(bound);
} else {
self.insert_lifetime(bound, Region::Static);
let full_span = mk_sp(lifetime_i.lifetime.span.lo, bound.span.hi);
self.sess.struct_span_warn(full_span,
self.sess.struct_span_warn(lifetime_i.lifetime.span.to(bound.span),
&format!("unnecessary lifetime parameter `{}`", lifetime_i.lifetime.name))
.help(&format!("you can use the `'static` lifetime directly, in place \
of `{}`", lifetime_i.lifetime.name))

View file

@ -36,7 +36,7 @@ use syntax::ast;
use syntax::attr;
use syntax::parse::filemap_to_stream;
use syntax::symbol::Symbol;
use syntax_pos::{mk_sp, Span};
use syntax_pos::{Span, NO_EXPANSION};
use rustc::hir::svh::Svh;
use rustc_back::target::Target;
use rustc::hir;
@ -395,7 +395,7 @@ impl CrateStore for cstore::CStore {
let source_name = format!("<{} macros>", name);
let filemap = sess.parse_sess.codemap().new_filemap(source_name, None, def.body);
let local_span = mk_sp(filemap.start_pos, filemap.end_pos);
let local_span = Span { lo: filemap.start_pos, hi: filemap.end_pos, ctxt: NO_EXPANSION };
let body = filemap_to_stream(&sess.parse_sess, filemap);
// Mark the attrs as used

View file

@ -39,7 +39,7 @@ use syntax::attr;
use syntax::ast;
use syntax::codemap;
use syntax::ext::base::MacroKind;
use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP};
use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP, NO_EXPANSION};
pub struct DecodeContext<'a, 'tcx: 'a> {
opaque: opaque::Decoder<'a>,
@ -243,7 +243,7 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for DecodeContext<'a, 'tcx> {
let sess = if let Some(sess) = self.sess {
sess
} else {
return Ok(syntax_pos::mk_sp(lo, hi));
return Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION });
};
let (lo, hi) = if lo > hi {
@ -290,7 +290,7 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for DecodeContext<'a, 'tcx> {
let lo = (lo - filemap.original_start_pos) + filemap.translated_filemap.start_pos;
let hi = (hi - filemap.original_start_pos) + filemap.translated_filemap.start_pos;
Ok(syntax_pos::mk_sp(lo, hi))
Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION })
}
}

View file

@ -742,7 +742,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> {
let ident_start = text.find(&name).expect("Name not in signature?");
let ident_end = ident_start + name.len();
Signature {
span: mk_sp(item.span.lo, item.span.lo + BytePos(text.len() as u32)),
span: Span { hi: item.span.lo + BytePos(text.len() as u32), ..item.span },
text: text,
ident_start: ident_start,
ident_end: ident_end,

View file

@ -305,10 +305,10 @@ impl<'a> SpanUtils<'a> {
continue;
}
if let TokenTree::Token(_, token::Semi) = tok {
return self.snippet(mk_sp(first_span.lo, prev.span().hi));
return self.snippet(first_span.to(prev.span()));
} else if let TokenTree::Delimited(_, ref d) = tok {
if d.delim == token::Brace {
return self.snippet(mk_sp(first_span.lo, prev.span().hi));
return self.snippet(first_span.to(prev.span()));
}
}
prev = tok;

View file

@ -17,7 +17,7 @@ pub use self::PathParameters::*;
pub use symbol::{Ident, Symbol as Name};
pub use util::ThinVec;
use syntax_pos::{mk_sp, BytePos, Span, DUMMY_SP};
use syntax_pos::{Span, DUMMY_SP};
use codemap::{respan, Spanned};
use abi::Abi;
use ext::hygiene::{Mark, SyntaxContext};
@ -1433,7 +1433,7 @@ impl Arg {
TyKind::Rptr(lt, MutTy{ref ty, mutbl}) if ty.node == TyKind::ImplicitSelf => {
Some(respan(self.pat.span, SelfKind::Region(lt, mutbl)))
}
_ => Some(respan(mk_sp(self.pat.span.lo, self.ty.span.hi),
_ => Some(respan(self.pat.span.to(self.ty.span),
SelfKind::Explicit(self.ty.clone(), mutbl))),
}
}
@ -1450,7 +1450,7 @@ impl Arg {
}
pub fn from_self(eself: ExplicitSelf, eself_ident: SpannedIdent) -> Arg {
let span = mk_sp(eself.span.lo, eself_ident.span.hi);
let span = eself.span.to(eself_ident.span);
let infer_ty = P(Ty {
id: DUMMY_NODE_ID,
node: TyKind::ImplicitSelf,
@ -1687,11 +1687,11 @@ pub struct PolyTraitRef {
}
impl PolyTraitRef {
pub fn new(lifetimes: Vec<LifetimeDef>, path: Path, lo: BytePos, hi: BytePos) -> Self {
pub fn new(lifetimes: Vec<LifetimeDef>, path: Path, span: Span) -> Self {
PolyTraitRef {
bound_lifetimes: lifetimes,
trait_ref: TraitRef { path: path, ref_id: DUMMY_NODE_ID },
span: mk_sp(lo, hi),
span: span,
}
}
}

View file

@ -18,8 +18,8 @@ use ast;
use ast::{AttrId, Attribute, Name, Ident};
use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind};
use ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind};
use codemap::{Spanned, spanned, dummy_spanned, mk_sp};
use syntax_pos::{Span, BytePos, DUMMY_SP};
use codemap::{Spanned, respan, dummy_spanned};
use syntax_pos::{Span, DUMMY_SP};
use errors::Handler;
use feature_gate::{Features, GatedCfg};
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
@ -447,17 +447,16 @@ pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute
}
}
pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, lo: BytePos, hi: BytePos)
-> Attribute {
pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, span: Span) -> Attribute {
let style = doc_comment_style(&text.as_str());
let lit = spanned(lo, hi, LitKind::Str(text, ast::StrStyle::Cooked));
let lit = respan(span, LitKind::Str(text, ast::StrStyle::Cooked));
Attribute {
id: id,
style: style,
path: ast::Path::from_ident(mk_sp(lo, hi), ast::Ident::from_str("doc")),
tokens: MetaItemKind::NameValue(lit).tokens(mk_sp(lo, hi)),
path: ast::Path::from_ident(span, ast::Ident::from_str("doc")),
tokens: MetaItemKind::NameValue(lit).tokens(span),
is_sugared_doc: true,
span: mk_sp(lo, hi),
span: span,
}
}

View file

@ -49,10 +49,6 @@ pub struct Spanned<T> {
pub span: Span,
}
pub fn spanned<T>(lo: BytePos, hi: BytePos, t: T) -> Spanned<T> {
respan(mk_sp(lo, hi), t)
}
pub fn respan<T>(sp: Span, t: T) -> Spanned<T> {
Spanned {node: t, span: sp}
}

View file

@ -79,7 +79,7 @@ pub use self::ParseResult::*;
use self::TokenTreeOrTokenTreeVec::*;
use ast::Ident;
use syntax_pos::{self, BytePos, mk_sp, Span};
use syntax_pos::{self, BytePos, Span};
use codemap::Spanned;
use errors::FatalError;
use ext::tt::quoted::{self, TokenTree};
@ -285,7 +285,7 @@ fn inner_parse_loop(sess: &ParseSess,
eof_eis: &mut SmallVector<Box<MatcherPos>>,
bb_eis: &mut SmallVector<Box<MatcherPos>>,
token: &Token,
span: &syntax_pos::Span)
span: syntax_pos::Span)
-> ParseResult<()> {
while let Some(mut ei) = cur_eis.pop() {
// When unzipped trees end, remove them
@ -323,8 +323,7 @@ fn inner_parse_loop(sess: &ParseSess,
for idx in ei.match_lo..ei.match_hi {
let sub = ei.matches[idx].clone();
new_pos.matches[idx]
.push(Rc::new(MatchedSeq(sub, mk_sp(ei.sp_lo,
span.hi))));
.push(Rc::new(MatchedSeq(sub, Span { lo: ei.sp_lo, ..span })));
}
new_pos.match_cur = ei.match_hi;
@ -426,7 +425,7 @@ pub fn parse(sess: &ParseSess, tts: TokenStream, ms: &[TokenTree], directory: Op
assert!(next_eis.is_empty());
match inner_parse_loop(sess, &mut cur_eis, &mut next_eis, &mut eof_eis, &mut bb_eis,
&parser.token, &parser.span) {
&parser.token, parser.span) {
Success(_) => {},
Failure(sp, tok) => return Failure(sp, tok),
Error(sp, msg) => return Error(sp, msg),

View file

@ -10,8 +10,7 @@
use attr;
use ast;
use syntax_pos::{mk_sp, Span};
use codemap::spanned;
use codemap::respan;
use parse::common::SeqSep;
use parse::PResult;
use parse::token::{self, Nonterminal};
@ -49,8 +48,7 @@ impl<'a> Parser<'a> {
just_parsed_doc_comment = false;
}
token::DocComment(s) => {
let Span { lo, hi, .. } = self.span;
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, lo, hi);
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span);
if attr.style != ast::AttrStyle::Outer {
let mut err = self.fatal("expected outer doc comment");
err.note("inner doc comments like this (starting with \
@ -94,7 +92,7 @@ impl<'a> Parser<'a> {
self.token);
let (span, path, tokens, mut style) = match self.token {
token::Pound => {
let lo = self.span.lo;
let lo = self.span;
self.bump();
if inner_parse_policy == InnerAttributeParsePolicy::Permitted {
@ -122,9 +120,9 @@ impl<'a> Parser<'a> {
self.expect(&token::OpenDelim(token::Bracket))?;
let (path, tokens) = self.parse_path_and_tokens()?;
self.expect(&token::CloseDelim(token::Bracket))?;
let hi = self.prev_span.hi;
let hi = self.prev_span;
(mk_sp(lo, hi), path, tokens, style)
(lo.to(hi), path, tokens, style)
}
_ => {
let token_str = self.this_token_to_string();
@ -189,8 +187,7 @@ impl<'a> Parser<'a> {
}
token::DocComment(s) => {
// we need to get the position of this token before we bump.
let Span { lo, hi, .. } = self.span;
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, lo, hi);
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span);
if attr.style == ast::AttrStyle::Inner {
attrs.push(attr);
self.bump();
@ -238,11 +235,10 @@ impl<'a> Parser<'a> {
return Ok(meta);
}
let lo = self.span.lo;
let lo = self.span;
let ident = self.parse_ident()?;
let node = self.parse_meta_item_kind()?;
let hi = self.prev_span.hi;
Ok(ast::MetaItem { name: ident.name, node: node, span: mk_sp(lo, hi) })
Ok(ast::MetaItem { name: ident.name, node: node, span: lo.to(self.prev_span) })
}
pub fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
@ -258,26 +254,25 @@ impl<'a> Parser<'a> {
/// matches meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;
fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> {
let sp = self.span;
let lo = self.span.lo;
let lo = self.span;
match self.parse_unsuffixed_lit() {
Ok(lit) => {
return Ok(spanned(lo, self.prev_span.hi, ast::NestedMetaItemKind::Literal(lit)))
return Ok(respan(lo.to(self.prev_span), ast::NestedMetaItemKind::Literal(lit)))
}
Err(ref mut err) => self.diagnostic().cancel(err)
}
match self.parse_meta_item() {
Ok(mi) => {
return Ok(spanned(lo, self.prev_span.hi, ast::NestedMetaItemKind::MetaItem(mi)))
return Ok(respan(lo.to(self.prev_span), ast::NestedMetaItemKind::MetaItem(mi)))
}
Err(ref mut err) => self.diagnostic().cancel(err)
}
let found = self.this_token_to_string();
let msg = format!("expected unsuffixed literal or identifier, found {}", found);
Err(self.diagnostic().struct_span_err(sp, &msg))
Err(self.diagnostic().struct_span_err(lo, &msg))
}
/// matches meta_seq = ( COMMASEP(meta_item_inner) )

View file

@ -9,7 +9,7 @@
// except according to those terms.
use ast::{self, Ident};
use syntax_pos::{self, BytePos, CharPos, Pos, Span};
use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION};
use codemap::CodeMap;
use errors::{FatalError, DiagnosticBuilder};
use parse::{token, ParseSess};
@ -68,6 +68,10 @@ pub struct StringReader<'a> {
open_braces: Vec<(token::DelimToken, Span)>,
}
fn mk_sp(lo: BytePos, hi: BytePos) -> Span {
Span { lo: lo, hi: hi, ctxt: NO_EXPANSION }
}
impl<'a> StringReader<'a> {
fn next_token(&mut self) -> TokenAndSpan where Self: Sized {
let res = self.try_next_token();
@ -225,12 +229,12 @@ impl<'a> StringReader<'a> {
/// Report a fatal error spanning [`from_pos`, `to_pos`).
fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> FatalError {
self.fatal_span(syntax_pos::mk_sp(from_pos, to_pos), m)
self.fatal_span(mk_sp(from_pos, to_pos), m)
}
/// Report a lexical error spanning [`from_pos`, `to_pos`).
fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) {
self.err_span(syntax_pos::mk_sp(from_pos, to_pos), m)
self.err_span(mk_sp(from_pos, to_pos), m)
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
@ -254,7 +258,7 @@ impl<'a> StringReader<'a> {
for c in c.escape_default() {
m.push(c)
}
self.sess.span_diagnostic.struct_span_fatal(syntax_pos::mk_sp(from_pos, to_pos), &m[..])
self.sess.span_diagnostic.struct_span_fatal(mk_sp(from_pos, to_pos), &m[..])
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
@ -278,7 +282,7 @@ impl<'a> StringReader<'a> {
for c in c.escape_default() {
m.push(c)
}
self.sess.span_diagnostic.struct_span_err(syntax_pos::mk_sp(from_pos, to_pos), &m[..])
self.sess.span_diagnostic.struct_span_err(mk_sp(from_pos, to_pos), &m[..])
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending the
@ -302,11 +306,11 @@ impl<'a> StringReader<'a> {
None => {
if self.is_eof() {
self.peek_tok = token::Eof;
self.peek_span = syntax_pos::mk_sp(self.filemap.end_pos, self.filemap.end_pos);
self.peek_span = mk_sp(self.filemap.end_pos, self.filemap.end_pos);
} else {
let start_bytepos = self.pos;
self.peek_tok = self.next_token_inner()?;
self.peek_span = syntax_pos::mk_sp(start_bytepos, self.pos);
self.peek_span = mk_sp(start_bytepos, self.pos);
};
}
}
@ -489,7 +493,7 @@ impl<'a> StringReader<'a> {
if let Some(c) = self.ch {
if c.is_whitespace() {
let msg = "called consume_any_line_comment, but there was whitespace";
self.sess.span_diagnostic.span_err(syntax_pos::mk_sp(self.pos, self.pos), msg);
self.sess.span_diagnostic.span_err(mk_sp(self.pos, self.pos), msg);
}
}
@ -532,13 +536,13 @@ impl<'a> StringReader<'a> {
Some(TokenAndSpan {
tok: tok,
sp: syntax_pos::mk_sp(start_bpos, self.pos),
sp: mk_sp(start_bpos, self.pos),
})
})
} else {
Some(TokenAndSpan {
tok: token::Comment,
sp: syntax_pos::mk_sp(start_bpos, self.pos),
sp: mk_sp(start_bpos, self.pos),
})
};
}
@ -571,7 +575,7 @@ impl<'a> StringReader<'a> {
}
return Some(TokenAndSpan {
tok: token::Shebang(self.name_from(start)),
sp: syntax_pos::mk_sp(start, self.pos),
sp: mk_sp(start, self.pos),
});
}
}
@ -599,7 +603,7 @@ impl<'a> StringReader<'a> {
}
let c = Some(TokenAndSpan {
tok: token::Whitespace,
sp: syntax_pos::mk_sp(start_bpos, self.pos),
sp: mk_sp(start_bpos, self.pos),
});
debug!("scanning whitespace: {:?}", c);
c
@ -661,7 +665,7 @@ impl<'a> StringReader<'a> {
Some(TokenAndSpan {
tok: tok,
sp: syntax_pos::mk_sp(start_bpos, self.pos),
sp: mk_sp(start_bpos, self.pos),
})
})
}
@ -858,7 +862,7 @@ impl<'a> StringReader<'a> {
let valid = if self.ch_is('{') {
self.scan_unicode_escape(delim) && !ascii_only
} else {
let span = syntax_pos::mk_sp(start, self.pos);
let span = mk_sp(start, self.pos);
self.sess.span_diagnostic
.struct_span_err(span, "incorrect unicode escape sequence")
.span_help(span,
@ -896,13 +900,13 @@ impl<'a> StringReader<'a> {
},
c);
if e == '\r' {
err.span_help(syntax_pos::mk_sp(escaped_pos, pos),
err.span_help(mk_sp(escaped_pos, pos),
"this is an isolated carriage return; consider \
checking your editor and version control \
settings");
}
if (e == '{' || e == '}') && !ascii_only {
err.span_help(syntax_pos::mk_sp(escaped_pos, pos),
err.span_help(mk_sp(escaped_pos, pos),
"if used in a formatting string, curly braces \
are escaped with `{{` and `}}`");
}
@ -1735,7 +1739,7 @@ mod tests {
sp: Span {
lo: BytePos(21),
hi: BytePos(23),
expn_id: NO_EXPANSION,
ctxt: NO_EXPANSION,
},
};
assert_eq!(tok1, tok2);
@ -1749,7 +1753,7 @@ mod tests {
sp: Span {
lo: BytePos(24),
hi: BytePos(28),
expn_id: NO_EXPANSION,
ctxt: NO_EXPANSION,
},
};
assert_eq!(tok3, tok4);
@ -1908,7 +1912,7 @@ mod tests {
let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
let comment = lexer.next_token();
assert_eq!(comment.tok, token::Comment);
assert_eq!(comment.sp, ::syntax_pos::mk_sp(BytePos(0), BytePos(7)));
assert_eq!((comment.sp.lo, comment.sp.hi), (BytePos(0), BytePos(7)));
assert_eq!(lexer.next_token().tok, token::Whitespace);
assert_eq!(lexer.next_token().tok,
token::DocComment(Symbol::intern("/// test")));

View file

@ -11,7 +11,7 @@
// Characters and their corresponding confusables were collected from
// http://www.unicode.org/Public/security/revision-06/confusables.txt
use syntax_pos::mk_sp as make_span;
use syntax_pos::{Span, NO_EXPANSION};
use errors::DiagnosticBuilder;
use super::StringReader;
@ -234,7 +234,7 @@ pub fn check_for_substitution<'a>(reader: &StringReader<'a>,
.iter()
.find(|&&(c, _, _)| c == ch)
.map(|&(_, u_name, ascii_char)| {
let span = make_span(reader.pos, reader.next_pos);
let span = Span { lo: reader.pos, hi: reader.next_pos, ctxt: NO_EXPANSION };
match ASCII_ARRAY.iter().find(|&&(c, _)| c == ascii_char) {
Some(&(ascii_char, ascii_name)) => {
let msg =

View file

@ -12,7 +12,7 @@
use ast::{self, CrateConfig};
use codemap::CodeMap;
use syntax_pos::{self, Span, FileMap};
use syntax_pos::{self, Span, FileMap, NO_EXPANSION};
use errors::{Handler, ColorConfig, DiagnosticBuilder};
use feature_gate::UnstableFeatures;
use parse::parser::Parser;
@ -178,7 +178,7 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Par
let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap));
if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP {
parser.span = syntax_pos::mk_sp(end_pos, end_pos);
parser.span = Span { lo: end_pos, hi: end_pos, ctxt: NO_EXPANSION };
}
parser
@ -665,7 +665,7 @@ mod tests {
// produce a syntax_pos::span
fn sp(a: u32, b: u32) -> Span {
Span {lo: BytePos(a), hi: BytePos(b), expn_id: NO_EXPANSION}
Span {lo: BytePos(a), hi: BytePos(b), ctxt: NO_EXPANSION}
}
fn str2seg(s: &str, lo: u32, hi: u32) -> ast::PathSegment {

File diff suppressed because it is too large Load diff

View file

@ -559,11 +559,7 @@ impl<'a, 'b> Context<'a, 'b> {
let name = self.ecx.ident_of(&format!("__arg{}", i));
pats.push(self.ecx.pat_ident(DUMMY_SP, name));
for ref arg_ty in self.arg_unique_types[i].iter() {
locals.push(Context::format_arg(self.ecx,
self.macsp,
e.span,
arg_ty,
self.ecx.expr_ident(e.span, name)));
locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name));
}
heads.push(self.ecx.expr_addr_of(e.span, e));
}
@ -576,11 +572,7 @@ impl<'a, 'b> Context<'a, 'b> {
Exact(i) => spans_pos[i],
_ => panic!("should never happen"),
};
counts.push(Context::format_arg(self.ecx,
self.macsp,
span,
&Count,
self.ecx.expr_ident(span, name)));
counts.push(Context::format_arg(self.ecx, self.macsp, span, &Count, name));
}
// Now create a vector containing all the arguments
@ -643,9 +635,10 @@ impl<'a, 'b> Context<'a, 'b> {
macsp: Span,
mut sp: Span,
ty: &ArgumentType,
arg: P<ast::Expr>)
arg: ast::Ident)
-> P<ast::Expr> {
sp.ctxt = sp.ctxt.apply_mark(ecx.current_expansion.mark);
let arg = ecx.expr_ident(sp, arg);
let trait_ = match *ty {
Placeholder(ref tyname) => {
match &tyname[..] {

View file

@ -174,6 +174,15 @@ impl Span {
}
result
}
pub fn to(self, end: Span) -> Span {
// FIXME(jseyfried): self.ctxt should always equal end.ctxt here (c.f. issue #23480)
if end.ctxt == SyntaxContext::empty() {
Span { lo: self.lo, ..end }
} else {
Span { hi: end.hi, ..self }
}
}
}
#[derive(Clone, Debug)]
@ -208,7 +217,7 @@ impl serialize::UseSpecializedDecodable for Span {
d.read_struct("Span", 2, |d| {
let lo = d.read_struct_field("lo", 0, Decodable::decode)?;
let hi = d.read_struct_field("hi", 1, Decodable::decode)?;
Ok(mk_sp(lo, hi))
Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION })
})
}
}
@ -696,11 +705,6 @@ pub struct FileLines {
thread_local!(pub static SPAN_DEBUG: Cell<fn(Span, &mut fmt::Formatter) -> fmt::Result> =
Cell::new(default_span_debug));
/* assuming that we're not in macro expansion */
pub fn mk_sp(lo: BytePos, hi: BytePos) -> Span {
Span {lo: lo, hi: hi, ctxt: NO_EXPANSION}
}
pub struct MacroBacktrace {
/// span where macro was applied to generate this code
pub call_site: Span,

View file

@ -25,7 +25,6 @@ fn f() {
bar::m! { //~ ERROR ambiguous
//~| NOTE macro-expanded items do not shadow when used in a macro invocation path
mod bar { pub use two_macros::m; } //~ NOTE could refer to the name defined here
//~^^^ NOTE in this expansion
}
}
@ -37,6 +36,5 @@ fn g() {
baz::m! { //~ ERROR ambiguous
//~| NOTE macro-expanded items do not shadow when used in a macro invocation path
mod baz { pub use two_macros::m; } //~ NOTE could refer to the name defined here
//~^^^ NOTE in this expansion
}
}

View file

@ -28,7 +28,6 @@ mod m2 {
m! { //~ ERROR ambiguous
//~| NOTE macro-expanded macro imports do not shadow
use foo::m; //~ NOTE could refer to the name imported here
//~^^^ NOTE in this expansion
}
}
@ -43,7 +42,6 @@ mod m3 {
m! { //~ ERROR ambiguous
//~| NOTE macro-expanded macro imports do not shadow
use two_macros::n as m; //~ NOTE could refer to the name imported here
//~^^^ NOTE in this expansion
}
}
}

View file

@ -31,7 +31,6 @@ mod m2 {
mod m3 {
::two_macros::m!(use foo::panic;); //~ NOTE `panic` could refer to the name imported here
//~| NOTE in this expansion
fn f() { panic!(); } //~ ERROR ambiguous
//~| NOTE `panic` is also a builtin macro
//~| NOTE macro-expanded macro imports do not shadow

View file

@ -21,5 +21,4 @@ fn main() {
foo!(1i32.foo());
//~^ ERROR no method named `foo` found for type `i32` in the current scope
//~^^ NOTE in this expansion of foo!
}

View file

@ -22,7 +22,7 @@ macro_rules! indirect_line { () => ( line!() ) }
pub fn main() {
assert_eq!(line!(), 24);
assert_eq!(column!(), 4);
assert_eq!(column!(), 15);
assert_eq!(indirect_line!(), 26);
assert!((file!().ends_with("syntax-extension-source-utils.rs")));
assert_eq!(stringify!((2*3) + 5).to_string(), "( 2 * 3 ) + 5".to_string());

View file

@ -2,10 +2,7 @@ error[E0433]: failed to resolve. Use of undeclared type or module `m`
--> $DIR/macro_path_as_generic_bound.rs:17:6
|
17 | foo!(m::m2::A);
| -----^^^^^^^^--
| | |
| | Use of undeclared type or module `m`
| in this macro invocation
| ^^^^^^^^ Use of undeclared type or module `m`
error: cannot continue compilation due to previous error