Auto merge of #23857 - phildawes:libsyntax_nopanic, r=nikomatsakis

Hello! 

I've been working towards a libsyntax without panics. See:
http://internals.rust-lang.org/t/changing-libsyntax-to-use-result-instead-of-panic/1670

This patch changes the internals of parser.rs to use Result<> rather than panicing. It keeps the following old-style panicing functions as a facade:
parse_expr, parse_item, parse_pat, parse_arm, parse_ty, parse_stmt

I left these functions because I wasn't sure what to do about the quote_* macros or how many syntax-extensions would break if these and quoting macros returned Result.

The gyst of the rest of the patch is:

 - Functions in parse/parser.rs return PResult<> rather than panicing
 - Other functions in libsyntax call panic! explicitly if they rely on panicing behaviour.
 - I added a macro 'panictry!()' to act as scaffolding for callers while converting panicing functions. (This does the same as 'unwrap()' but is easier to grep for and turn into try!()).

Am I on the right track?  I'd quite like to get something merged soon as keeping this rebased in the face of libsyntax changes is a lot of work. Please let me know what changes you'd like to see to make this happen.

Thanks!, Phil
This commit is contained in:
bors 2015-04-06 22:08:01 +00:00
commit b49a5ef003
23 changed files with 1397 additions and 1318 deletions

View file

@ -528,7 +528,10 @@ impl<'a> CrateReader<'a> {
source_name.clone(),
body);
let lo = p.span.lo;
let body = p.parse_all_token_trees();
let body = match p.parse_all_token_trees() {
Ok(body) => body,
Err(err) => panic!(err),
};
let span = mk_sp(lo, p.last_span.hi);
p.abort_if_errors();
macros.push(ast::MacroDef {

View file

@ -68,13 +68,13 @@ impl Session {
if self.opts.treat_err_as_bug {
self.span_bug(sp, msg);
}
self.diagnostic().span_fatal(sp, msg)
panic!(self.diagnostic().span_fatal(sp, msg))
}
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> ! {
if self.opts.treat_err_as_bug {
self.span_bug(sp, msg);
}
self.diagnostic().span_fatal_with_code(sp, msg, code)
panic!(self.diagnostic().span_fatal_with_code(sp, msg, code))
}
pub fn fatal(&self, msg: &str) -> ! {
if self.opts.treat_err_as_bug {

View file

@ -503,8 +503,8 @@ pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[P<MetaItem>]) {
let name = meta.name();
if !set.insert(name.clone()) {
diagnostic.span_fatal(meta.span,
&format!("duplicate meta item `{}`", name));
panic!(diagnostic.span_fatal(meta.span,
&format!("duplicate meta item `{}`", name)));
}
}
}

View file

@ -68,10 +68,11 @@ pub trait Emitter {
sp: RenderSpan, msg: &str, lvl: Level);
}
/// This structure is used to signify that a task has panicked with a fatal error
/// from the diagnostics. You can use this with the `Any` trait to figure out
/// how a rustc task died (if so desired).
/// Used as a return value to signify a fatal error occurred. (It is also
/// used as the argument to panic at the moment, but that will eventually
/// not be true.)
#[derive(Copy, Clone)]
#[must_use]
pub struct FatalError;
/// Signifies that the compiler died with an explicit call to `.bug`
@ -88,13 +89,13 @@ pub struct SpanHandler {
}
impl SpanHandler {
pub fn span_fatal(&self, sp: Span, msg: &str) -> ! {
pub fn span_fatal(&self, sp: Span, msg: &str) -> FatalError {
self.handler.emit(Some((&self.cm, sp)), msg, Fatal);
panic!(FatalError);
return FatalError;
}
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> ! {
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> FatalError {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Fatal);
panic!(FatalError);
return FatalError;
}
pub fn span_err(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Error);

View file

@ -91,16 +91,16 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
p.token != token::ModSep {
if outputs.len() != 0 {
p.eat(&token::Comma);
panictry!(p.eat(&token::Comma));
}
let (constraint, _str_style) = p.parse_str();
let (constraint, _str_style) = panictry!(p.parse_str());
let span = p.last_span;
p.expect(&token::OpenDelim(token::Paren));
panictry!(p.expect(&token::OpenDelim(token::Paren)));
let out = p.parse_expr();
p.expect(&token::CloseDelim(token::Paren));
panictry!(p.expect(&token::CloseDelim(token::Paren)));
// Expands a read+write operand into two operands.
//
@ -131,10 +131,10 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
p.token != token::ModSep {
if inputs.len() != 0 {
p.eat(&token::Comma);
panictry!(p.eat(&token::Comma));
}
let (constraint, _str_style) = p.parse_str();
let (constraint, _str_style) = panictry!(p.parse_str());
if constraint.starts_with("=") {
cx.span_err(p.last_span, "input operand constraint contains '='");
@ -142,9 +142,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
cx.span_err(p.last_span, "input operand constraint contains '+'");
}
p.expect(&token::OpenDelim(token::Paren));
panictry!(p.expect(&token::OpenDelim(token::Paren)));
let input = p.parse_expr();
p.expect(&token::CloseDelim(token::Paren));
panictry!(p.expect(&token::CloseDelim(token::Paren)));
inputs.push((constraint, input));
}
@ -155,10 +155,10 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
p.token != token::ModSep {
if clobs.len() != 0 {
p.eat(&token::Comma);
panictry!(p.eat(&token::Comma));
}
let (s, _str_style) = p.parse_str();
let (s, _str_style) = panictry!(p.parse_str());
if OPTIONS.iter().any(|&opt| s == opt) {
cx.span_warn(p.last_span, "expected a clobber, found an option");
@ -167,7 +167,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
}
}
Options => {
let (option, _str_style) = p.parse_str();
let (option, _str_style) = panictry!(p.parse_str());
if option == "volatile" {
// Indicates that the inline assembly has side effects
@ -182,7 +182,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
}
if p.token == token::Comma {
p.eat(&token::Comma);
panictry!(p.eat(&token::Comma));
}
}
StateNone => ()
@ -194,12 +194,12 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
match (&p.token, state.next(), state.next().next()) {
(&token::Colon, StateNone, _) |
(&token::ModSep, _, StateNone) => {
p.bump();
panictry!(p.bump());
break 'statement;
}
(&token::Colon, st, _) |
(&token::ModSep, _, st) => {
p.bump();
panictry!(p.bump());
state = st;
}
(&token::Eof, _, _) => break 'statement,

View file

@ -652,9 +652,9 @@ impl<'a> ExtCtxt<'a> {
pub fn bt_push(&mut self, ei: ExpnInfo) {
self.recursion_count += 1;
if self.recursion_count > self.ecfg.recursion_limit {
self.span_fatal(ei.call_site,
panic!(self.span_fatal(ei.call_site,
&format!("recursion limit reached while expanding the macro `{}`",
ei.callee.name));
ei.callee.name)));
}
let mut call_site = ei.call_site;
@ -699,7 +699,7 @@ impl<'a> ExtCtxt<'a> {
/// value doesn't have to match anything)
pub fn span_fatal(&self, sp: Span, msg: &str) -> ! {
self.print_backtrace();
self.parse_sess.span_diagnostic.span_fatal(sp, msg);
panic!(self.parse_sess.span_diagnostic.span_fatal(sp, msg));
}
/// Emit `msg` attached to `sp`, without immediately stopping
@ -817,7 +817,7 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt,
let mut es = Vec::new();
while p.token != token::Eof {
es.push(cx.expander().fold_expr(p.parse_expr()));
if p.eat(&token::Comma) {
if panictry!(p.eat(&token::Comma)){
continue;
}
if p.token != token::Eof {

View file

@ -29,7 +29,7 @@ pub fn expand_cfg<'cx>(cx: &mut ExtCtxt,
let mut p = cx.new_parser_from_tts(tts);
let cfg = p.parse_meta_item();
if !p.eat(&token::Eof) {
if !panictry!(p.eat(&token::Eof)){
cx.span_err(sp, "expected 1 cfg-pattern");
return DummyResult::expr(sp);
}

View file

@ -1684,7 +1684,7 @@ mod test {
fn expand_crate_str(crate_str: String) -> ast::Crate {
let ps = parse::new_parse_sess();
let crate_ast = string_to_parser(&ps, crate_str).parse_crate_mod();
let crate_ast = panictry!(string_to_parser(&ps, crate_str).parse_crate_mod());
// the cfg argument actually does matter, here...
expand_crate(&ps,test_ecfg(),vec!(),vec!(),crate_ast)
}

View file

@ -92,7 +92,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
let fmtstr = p.parse_expr();
let mut named = false;
while p.token != token::Eof {
if !p.eat(&token::Comma) {
if !panictry!(p.eat(&token::Comma)) {
ecx.span_err(sp, "expected token: `,`");
return None;
}
@ -101,7 +101,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
named = true;
let ident = match p.token {
token::Ident(i, _) => {
p.bump();
panictry!(p.bump());
i
}
_ if named => {
@ -120,7 +120,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
let interned_name = token::get_ident(ident);
let name = &interned_name[..];
p.expect(&token::Eq);
panictry!(p.expect(&token::Eq));
let e = p.parse_expr();
match names.get(name) {
None => {}

View file

@ -781,11 +781,11 @@ fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[ast::TokenTree])
p.quote_depth += 1;
let cx_expr = p.parse_expr();
if !p.eat(&token::Comma) {
p.fatal("expected token `,`");
if !panictry!(p.eat(&token::Comma)) {
panic!(p.fatal("expected token `,`"));
}
let tts = p.parse_all_token_trees();
let tts = panictry!(p.parse_all_token_trees());
p.abort_if_errors();
(cx_expr, tts)

View file

@ -117,11 +117,11 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree
while self.p.token != token::Eof {
match self.p.parse_item() {
Some(item) => ret.push(item),
None => self.p.span_fatal(
None => panic!(self.p.span_fatal(
self.p.span,
&format!("expected item, found `{}`",
self.p.this_token_to_string())
)
))
}
}
Some(ret)

View file

@ -226,10 +226,10 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
}
Occupied(..) => {
let string = token::get_ident(bind_name);
p_s.span_diagnostic
panic!(p_s.span_diagnostic
.span_fatal(sp,
&format!("duplicated bind name: {}",
&string))
&string)))
}
}
}
@ -260,10 +260,10 @@ pub fn parse_or_else(sess: &ParseSess,
match parse(sess, cfg, rdr, &ms[..]) {
Success(m) => m,
Failure(sp, str) => {
sess.span_diagnostic.span_fatal(sp, &str[..])
panic!(sess.span_diagnostic.span_fatal(sp, &str[..]))
}
Error(sp, str) => {
sess.span_diagnostic.span_fatal(sp, &str[..])
panic!(sess.span_diagnostic.span_fatal(sp, &str[..]))
}
}
}
@ -512,46 +512,46 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal {
match name {
"tt" => {
p.quote_depth += 1; //but in theory, non-quoted tts might be useful
let res = token::NtTT(P(p.parse_token_tree()));
let res = token::NtTT(P(panictry!(p.parse_token_tree())));
p.quote_depth -= 1;
return res;
}
_ => {}
}
// check at the beginning and the parser checks after each bump
p.check_unknown_macro_variable();
panictry!(p.check_unknown_macro_variable());
match name {
"item" => match p.parse_item() {
Some(i) => token::NtItem(i),
None => p.fatal("expected an item keyword")
None => panic!(p.fatal("expected an item keyword"))
},
"block" => token::NtBlock(p.parse_block()),
"block" => token::NtBlock(panictry!(p.parse_block())),
"stmt" => match p.parse_stmt() {
Some(s) => token::NtStmt(s),
None => p.fatal("expected a statement")
None => panic!(p.fatal("expected a statement"))
},
"pat" => token::NtPat(p.parse_pat()),
"expr" => token::NtExpr(p.parse_expr()),
"ty" => token::NtTy(p.parse_ty()),
// this could be handled like a token, since it is one
"ident" => match p.token {
token::Ident(sn,b) => { p.bump(); token::NtIdent(box sn,b) }
token::Ident(sn,b) => { panictry!(p.bump()); token::NtIdent(box sn,b) }
_ => {
let token_str = pprust::token_to_string(&p.token);
p.fatal(&format!("expected ident, found {}",
&token_str[..]))
panic!(p.fatal(&format!("expected ident, found {}",
&token_str[..])))
}
},
"path" => {
token::NtPath(box p.parse_path(LifetimeAndTypesWithoutColons))
token::NtPath(box panictry!(p.parse_path(LifetimeAndTypesWithoutColons)))
}
"meta" => token::NtMeta(p.parse_meta_item()),
_ => {
p.span_fatal_help(sp,
panic!(p.span_fatal_help(sp,
&format!("invalid fragment specifier `{}`", name),
"valid fragment specifiers are `ident`, `block`, \
`stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \
and `item`")
and `item`"))
}
}
}

View file

@ -41,7 +41,7 @@ impl<'a> ParserAnyMacro<'a> {
fn ensure_complete_parse(&self, allow_semi: bool) {
let mut parser = self.parser.borrow_mut();
if allow_semi && parser.token == token::Semi {
parser.bump()
panictry!(parser.bump())
}
if parser.token != token::Eof {
let token_str = parser.this_token_to_string();
@ -81,7 +81,7 @@ impl<'a> MacResult for ParserAnyMacro<'a> {
let mut parser = self.parser.borrow_mut();
match parser.token {
token::Eof => break,
_ => ret.push(parser.parse_impl_item())
_ => ret.push(panictry!(parser.parse_impl_item()))
}
}
self.ensure_complete_parse(false);
@ -142,7 +142,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
MatchedNonterminal(NtTT(ref lhs_tt)) => {
let lhs_tt = match **lhs_tt {
TtDelimited(_, ref delim) => &delim.tts[..],
_ => cx.span_fatal(sp, "malformed macro lhs")
_ => panic!(cx.span_fatal(sp, "malformed macro lhs"))
};
match TokenTree::parse(cx, lhs_tt, arg) {
@ -153,7 +153,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
match **tt {
// ignore delimiters
TtDelimited(_, ref delimed) => delimed.tts.clone(),
_ => cx.span_fatal(sp, "macro rhs must be delimited"),
_ => panic!(cx.span_fatal(sp, "macro rhs must be delimited")),
}
},
_ => cx.span_bug(sp, "bad thing in rhs")
@ -164,7 +164,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
imported_from,
rhs);
let mut p = Parser::new(cx.parse_sess(), cx.cfg(), Box::new(trncbr));
p.check_unknown_macro_variable();
panictry!(p.check_unknown_macro_variable());
// Let the context choose how to interpret the result.
// Weird, but useful for X-macros.
return box ParserAnyMacro {
@ -175,13 +175,13 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
best_fail_spot = sp;
best_fail_msg = (*msg).clone();
},
Error(sp, ref msg) => cx.span_fatal(sp, &msg[..])
Error(sp, ref msg) => panic!(cx.span_fatal(sp, &msg[..]))
}
}
_ => cx.bug("non-matcher found in parsed lhses")
}
}
cx.span_fatal(best_fail_spot, &best_fail_msg[..]);
panic!(cx.span_fatal(best_fail_spot, &best_fail_msg[..]));
}
// Note that macro-by-example's input is also matched against a token tree:

View file

@ -247,22 +247,22 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
match lockstep_iter_size(&TtSequence(sp, seq.clone()),
r) {
LisUnconstrained => {
r.sp_diag.span_fatal(
panic!(r.sp_diag.span_fatal(
sp.clone(), /* blame macro writer */
"attempted to repeat an expression \
containing no syntax \
variables matched as repeating at this depth");
variables matched as repeating at this depth"));
}
LisContradiction(ref msg) => {
// FIXME #2887 blame macro invoker instead
r.sp_diag.span_fatal(sp.clone(), &msg[..]);
panic!(r.sp_diag.span_fatal(sp.clone(), &msg[..]));
}
LisConstraint(len, _) => {
if len == 0 {
if seq.op == ast::OneOrMore {
// FIXME #2887 blame invoker
r.sp_diag.span_fatal(sp.clone(),
"this must repeat at least once");
panic!(r.sp_diag.span_fatal(sp.clone(),
"this must repeat at least once"));
}
r.stack.last_mut().unwrap().idx += 1;
@ -306,10 +306,10 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
return ret_val;
}
MatchedSeq(..) => {
r.sp_diag.span_fatal(
panic!(r.sp_diag.span_fatal(
r.cur_span, /* blame the macro writer */
&format!("variable '{:?}' is still repeating at this depth",
token::get_ident(ident)));
token::get_ident(ident))));
}
}
}

View file

@ -50,6 +50,21 @@ extern crate libc;
extern crate serialize as rustc_serialize; // used by deriving
// A variant of 'try!' that panics on Err(FatalError). This is used as a
// crutch on the way towards a non-panic!-prone parser. It should be used
// for fatal parsing errors; eventually we plan to convert all code using
// panictry to just use normal try
macro_rules! panictry {
($e:expr) => ({
use std::result::Result::{Ok, Err};
use diagnostic::FatalError;
match $e {
Ok(e) => e,
Err(FatalError) => panic!(FatalError)
}
})
}
pub mod util {
pub mod interner;
#[cfg(test)]

View file

@ -45,10 +45,10 @@ impl<'a> ParserAttr for Parser<'a> {
self.span.hi
);
if attr.node.style != ast::AttrOuter {
self.fatal("expected outer comment");
panic!(self.fatal("expected outer comment"));
}
attrs.push(attr);
self.bump();
panictry!(self.bump());
}
_ => break
}
@ -66,11 +66,11 @@ impl<'a> ParserAttr for Parser<'a> {
let (span, value, mut style) = match self.token {
token::Pound => {
let lo = self.span.lo;
self.bump();
panictry!(self.bump());
if permit_inner { self.expected_tokens.push(TokenType::Token(token::Not)); }
let style = if self.token == token::Not {
self.bump();
panictry!(self.bump());
if !permit_inner {
let span = self.span;
self.span_err(span,
@ -84,21 +84,21 @@ impl<'a> ParserAttr for Parser<'a> {
ast::AttrOuter
};
self.expect(&token::OpenDelim(token::Bracket));
panictry!(self.expect(&token::OpenDelim(token::Bracket)));
let meta_item = self.parse_meta_item();
let hi = self.span.hi;
self.expect(&token::CloseDelim(token::Bracket));
panictry!(self.expect(&token::CloseDelim(token::Bracket)));
(mk_sp(lo, hi), meta_item, style)
}
_ => {
let token_str = self.this_token_to_string();
self.fatal(&format!("expected `#`, found `{}`", token_str));
panic!(self.fatal(&format!("expected `#`, found `{}`", token_str)));
}
};
if permit_inner && self.token == token::Semi {
self.bump();
panictry!(self.bump());
self.span_warn(span, "this inner attribute syntax is deprecated. \
The new syntax is `#![foo]`, with a bang and no semicolon");
style = ast::AttrInner;
@ -142,7 +142,7 @@ impl<'a> ParserAttr for Parser<'a> {
lo, hi);
if attr.node.style == ast::AttrInner {
attrs.push(attr);
self.bump();
panictry!(self.bump());
} else {
break;
}
@ -166,19 +166,19 @@ impl<'a> ParserAttr for Parser<'a> {
match nt_meta {
Some(meta) => {
self.bump();
panictry!(self.bump());
return meta;
}
None => {}
}
let lo = self.span.lo;
let ident = self.parse_ident();
let ident = panictry!(self.parse_ident());
let name = self.id_to_interned_str(ident);
match self.token {
token::Eq => {
self.bump();
let lit = self.parse_lit();
panictry!(self.bump());
let lit = panictry!(self.parse_lit());
// FIXME #623 Non-string meta items are not serialized correctly;
// just forbid them for now
match lit.node {
@ -206,10 +206,10 @@ impl<'a> ParserAttr for Parser<'a> {
/// matches meta_seq = ( COMMASEP(meta_item) )
fn parse_meta_seq(&mut self) -> Vec<P<ast::MetaItem>> {
self.parse_seq(&token::OpenDelim(token::Paren),
panictry!(self.parse_seq(&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma),
|p| p.parse_meta_item()).node
|p| Ok(p.parse_meta_item()))).node
}
fn parse_optional_meta(&mut self) -> Vec<P<ast::MetaItem>> {

View file

@ -116,7 +116,7 @@ impl<'a> Reader for TtReader<'a> {
r
}
fn fatal(&self, m: &str) -> ! {
self.sp_diag.span_fatal(self.cur_span, m);
panic!(self.sp_diag.span_fatal(self.cur_span, m));
}
fn err(&self, m: &str) {
self.sp_diag.span_err(self.cur_span, m);
@ -181,7 +181,7 @@ impl<'a> StringReader<'a> {
/// Report a fatal lexical error with a given span.
pub fn fatal_span(&self, sp: Span, m: &str) -> ! {
self.span_diagnostic.span_fatal(sp, m)
panic!(self.span_diagnostic.span_fatal(sp, m))
}
/// Report a lexical error with a given span.

View file

@ -12,11 +12,12 @@
use ast;
use codemap::{Span, CodeMap, FileMap};
use diagnostic::{SpanHandler, mk_span_handler, default_handler, Auto};
use diagnostic::{SpanHandler, mk_span_handler, default_handler, Auto, FatalError};
use parse::attr::ParserAttr;
use parse::parser::Parser;
use ptr::P;
use std::cell::{Cell, RefCell};
use std::fs::File;
use std::io::Read;
@ -27,6 +28,8 @@ use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::str;
pub type PResult<T> = Result<T, FatalError>;
#[macro_use]
pub mod parser;
@ -88,7 +91,7 @@ pub fn parse_crate_from_file(
cfg: ast::CrateConfig,
sess: &ParseSess
) -> ast::Crate {
new_parser_from_file(sess, cfg, input).parse_crate_mod()
panictry!(new_parser_from_file(sess, cfg, input).parse_crate_mod())
// why is there no p.abort_if_errors here?
}
@ -109,7 +112,7 @@ pub fn parse_crate_from_source_str(name: String,
cfg,
name,
source);
maybe_aborted(p.parse_crate_mod(),p)
maybe_aborted(panictry!(p.parse_crate_mod()),p)
}
pub fn parse_crate_attrs_from_source_str(name: String,
@ -182,7 +185,7 @@ pub fn parse_tts_from_source_str(name: String,
);
p.quote_depth += 1;
// right now this is re-creating the token trees from ... token trees.
maybe_aborted(p.parse_all_token_trees(),p)
maybe_aborted(panictry!(p.parse_all_token_trees()),p)
}
// Note: keep in sync with `with_hygiene::new_parser_from_source_str`
@ -245,7 +248,7 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
-> Rc<FileMap> {
let err = |msg: &str| {
match spanopt {
Some(sp) => sess.span_diagnostic.span_fatal(sp, msg),
Some(sp) => panic!(sess.span_diagnostic.span_fatal(sp, msg)),
None => sess.span_diagnostic.handler().fatal(msg),
}
};
@ -286,7 +289,7 @@ pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
let cfg = Vec::new();
let srdr = lexer::StringReader::new(&sess.span_diagnostic, filemap);
let mut p1 = Parser::new(sess, cfg, box srdr);
p1.parse_all_token_trees()
panictry!(p1.parse_all_token_trees())
}
/// Given tts and cfg, produce a parser
@ -295,7 +298,7 @@ pub fn tts_to_parser<'a>(sess: &'a ParseSess,
cfg: ast::CrateConfig) -> Parser<'a> {
let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, None, tts);
let mut p = Parser::new(sess, cfg, box trdr);
p.check_unknown_macro_variable();
panictry!(p.check_unknown_macro_variable());
p
}
@ -325,7 +328,7 @@ pub mod with_hygiene {
);
p.quote_depth += 1;
// right now this is re-creating the token trees from ... token trees.
maybe_aborted(p.parse_all_token_trees(),p)
maybe_aborted(panictry!(p.parse_all_token_trees()),p)
}
// Note: keep this in sync with `super::new_parser_from_source_str` until
@ -358,7 +361,7 @@ pub mod with_hygiene {
let cfg = Vec::new();
let srdr = make_reader(&sess.span_diagnostic, filemap);
let mut p1 = Parser::new(sess, cfg, box srdr);
p1.parse_all_token_trees()
panictry!(p1.parse_all_token_trees())
}
}
@ -964,7 +967,7 @@ mod test {
#[test] fn parse_ident_pat () {
let sess = new_parse_sess();
let mut parser = string_to_parser(&sess, "b".to_string());
assert!(parser.parse_pat()
assert!(panictry!(parser.parse_pat_nopanic())
== P(ast::Pat{
id: ast::DUMMY_NODE_ID,
node: ast::PatIdent(ast::BindByValue(ast::MutImmutable),

View file

@ -100,7 +100,7 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> {
fn eat_obsolete_ident(&mut self, ident: &str) -> bool {
if self.is_obsolete_ident(ident) {
self.bump();
panictry!(self.bump());
true
} else {
false

File diff suppressed because it is too large Load diff

View file

@ -125,7 +125,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> {
match i.node {
ast::ItemFn(_, ast::Unsafety::Unsafe, _, _, _) => {
let diag = self.cx.span_diagnostic;
diag.span_fatal(i.span, "unsafe functions cannot be used for tests");
panic!(diag.span_fatal(i.span, "unsafe functions cannot be used for tests"));
}
_ => {
debug!("this is a test function");

View file

@ -44,7 +44,7 @@ fn with_error_checking_parse<T, F>(s: String, f: F) -> T where
/// Parse a string, return a crate.
pub fn string_to_crate (source_str : String) -> ast::Crate {
with_error_checking_parse(source_str, |p| {
p.parse_crate_mod()
panictry!(p.parse_crate_mod())
})
}

View file

@ -17,6 +17,7 @@ extern crate syntax;
use syntax::ext::base::ExtCtxt;
use syntax::ptr::P;
use syntax::parse::PResult;
fn syntax_extension(cx: &ExtCtxt) {
let e_toks : Vec<syntax::ast::TokenTree> = quote_tokens!(cx, 1 + 2);