auto merge of #4922 : jbclements/rust/add-deriving-eq-to-asts, r=catamorphism

r?

Apply deriving_eq to the data structures in ast.rs, and get rid of the custom definitions of eq that were everywhere. resulting ast.rs is about 400 lines shorter.

Also: add a few test cases and a bunch of comments.

Also: change ast_ty_to_ty_cache to use node ids rather than ast::ty's. I believe this was a suggestion related to my changes, and it appears to pass all tests.

Also: tiny doc fix, remove references to crate keywords.
This commit is contained in:
bors 2013-02-13 18:49:29 -08:00
commit 6efa3543a8
10 changed files with 242 additions and 556 deletions

View file

@ -202,13 +202,7 @@ grammar as double-quoted strings. Other tokens have exact rules given.
### Keywords
The keywords in [crate files](#crate-files) are the following strings:
~~~~~~~~ {.keyword}
mod priv pub use
~~~~~~~~
The keywords in [source files](#source-files) are the following strings:
The keywords are the following strings:
~~~~~~~~ {.keyword}
as assert

View file

@ -262,7 +262,7 @@ struct ctxt_ {
needs_drop_cache: HashMap<t, bool>,
needs_unwind_cleanup_cache: HashMap<t, bool>,
mut tc_cache: LinearMap<uint, TypeContents>,
ast_ty_to_ty_cache: HashMap<@ast::Ty, ast_ty_to_ty_cache_entry>,
ast_ty_to_ty_cache: HashMap<node_id, ast_ty_to_ty_cache_entry>,
enum_var_cache: HashMap<def_id, @~[VariantInfo]>,
trait_method_cache: HashMap<def_id, @~[method]>,
ty_param_bounds: HashMap<ast::node_id, param_bounds>,

View file

@ -277,7 +277,7 @@ pub fn ast_ty_to_ty<AC: AstConv, RS: region_scope Copy Durable>(
let tcx = self.tcx();
match tcx.ast_ty_to_ty_cache.find(&ast_ty) {
match tcx.ast_ty_to_ty_cache.find(&ast_ty.id) {
Some(ty::atttce_resolved(ty)) => return ty,
Some(ty::atttce_unresolved) => {
tcx.sess.span_fatal(ast_ty.span, ~"illegal recursive type; \
@ -287,7 +287,7 @@ pub fn ast_ty_to_ty<AC: AstConv, RS: region_scope Copy Durable>(
None => { /* go on */ }
}
tcx.ast_ty_to_ty_cache.insert(ast_ty, ty::atttce_unresolved);
tcx.ast_ty_to_ty_cache.insert(ast_ty.id, ty::atttce_unresolved);
let typ = match /*bad*/copy ast_ty.node {
ast::ty_nil => ty::mk_nil(tcx),
ast::ty_bot => ty::mk_bot(tcx),
@ -409,7 +409,7 @@ pub fn ast_ty_to_ty<AC: AstConv, RS: region_scope Copy Durable>(
}
};
tcx.ast_ty_to_ty_cache.insert(ast_ty, ty::atttce_resolved(typ));
tcx.ast_ty_to_ty_cache.insert(ast_ty.id, ty::atttce_resolved(typ));
return typ;
}

File diff suppressed because it is too large Load diff

View file

@ -130,6 +130,7 @@ pub struct span {
#[auto_encode]
#[auto_decode]
#[deriving_eq]
pub struct spanned<T> { node: T, span: span }
pub impl span : cmp::Eq {

View file

@ -44,8 +44,8 @@ pub struct SyntaxExpanderTT {
span: Option<span>
}
pub type SyntaxExpanderTTFun = fn@(ext_ctxt, span, ~[ast::token_tree])
-> MacResult;
pub type SyntaxExpanderTTFun
= fn@(ext_ctxt, span, ~[ast::token_tree]) -> MacResult;
pub struct SyntaxExpanderTTItem {
expander: SyntaxExpanderTTItemFun,
@ -78,9 +78,11 @@ pub enum SyntaxExtension {
// A temporary hard-coded map of methods for expanding syntax extension
// AST nodes into full ASTs
pub fn syntax_expander_table() -> HashMap<~str, SyntaxExtension> {
// utility function to simplify creating NormalTT syntax extensions
fn builtin_normal_tt(f: SyntaxExpanderTTFun) -> SyntaxExtension {
NormalTT(SyntaxExpanderTT{expander: f, span: None})
}
// utility function to simplify creating ItemTT syntax extensions
fn builtin_item_tt(f: SyntaxExpanderTTItemFun) -> SyntaxExtension {
ItemTT(SyntaxExpanderTTItem{expander: f, span: None})
}
@ -112,8 +114,8 @@ pub fn syntax_expander_table() -> HashMap<~str, SyntaxExtension> {
ext::deriving::expand_deriving_iter_bytes));
// Quasi-quoting expanders
syntax_expanders.insert(
~"quote_tokens", builtin_normal_tt(ext::quote::expand_quote_tokens));
syntax_expanders.insert(~"quote_tokens",
builtin_normal_tt(ext::quote::expand_quote_tokens));
syntax_expanders.insert(~"quote_expr",
builtin_normal_tt(ext::quote::expand_quote_expr));
syntax_expanders.insert(~"quote_ty",

View file

@ -20,6 +20,8 @@ use core::option::{None, Option, Some};
use core::option;
use std::oldmap::HashMap;
// seq_sep : a sequence separator (token)
// and whether a trailing separator is allowed.
pub type seq_sep = {
sep: Option<token::Token>,
trailing_sep_allowed: bool
@ -51,6 +53,8 @@ pub impl Parser {
+ token_to_str(self.reader, self.token) + ~"`");
}
// expect and consume the token t. Signal an error if
// the next token is not t.
fn expect(t: token::Token) {
if self.token == t {
self.bump();
@ -88,6 +92,8 @@ pub impl Parser {
return self.parse_ident();
}
// consume token 'tok' if it exists. Returns true if the given
// token was present, false otherwise.
fn eat(tok: token::Token) -> bool {
return if self.token == tok { self.bump(); true } else { false };
}
@ -185,6 +191,8 @@ pub impl Parser {
}
}
// expect and consume a GT. if a >> is seen, replace it
// with a single > and continue.
fn expect_gt() {
if self.token == token::GT {
self.bump();
@ -202,6 +210,8 @@ pub impl Parser {
}
}
// parse a sequence bracketed by '<' and '>', stopping
// before the '>'.
fn parse_seq_to_before_gt<T: Copy>(sep: Option<token::Token>,
f: fn(Parser) -> T) -> ~[T] {
let mut first = true;
@ -211,7 +221,7 @@ pub impl Parser {
match sep {
Some(ref t) => {
if first { first = false; }
else { self.expect((*t)); }
else { self.expect(*t); }
}
_ => ()
}
@ -229,6 +239,7 @@ pub impl Parser {
return v;
}
// parse a sequence bracketed by '<' and '>'
fn parse_seq_lt_gt<T: Copy>(sep: Option<token::Token>,
f: fn(Parser) -> T) -> spanned<~[T]> {
let lo = self.span.lo;
@ -239,6 +250,9 @@ pub impl Parser {
return spanned(lo, hi, result);
}
// parse a sequence, including the closing delimiter. The function
// f must consume tokens until reaching the next separator or
// closing bracket.
fn parse_seq_to_end<T: Copy>(ket: token::Token, sep: seq_sep,
f: fn(Parser) -> T) -> ~[T] {
let val = self.parse_seq_to_before_end(ket, sep, f);
@ -246,7 +260,9 @@ pub impl Parser {
return val;
}
// parse a sequence, not including the closing delimiter. The function
// f must consume tokens until reaching the next separator or
// closing bracket.
fn parse_seq_to_before_end<T: Copy>(ket: token::Token, sep: seq_sep,
f: fn(Parser) -> T) -> ~[T] {
let mut first: bool = true;
@ -255,7 +271,7 @@ pub impl Parser {
match sep.sep {
Some(ref t) => {
if first { first = false; }
else { self.expect((*t)); }
else { self.expect(*t); }
}
_ => ()
}
@ -265,6 +281,9 @@ pub impl Parser {
return v;
}
// parse a sequence, including the closing delimiter. The function
// f must consume tokens until reaching the next separator or
// closing bracket.
fn parse_unspanned_seq<T: Copy>(bra: token::Token,
ket: token::Token,
sep: seq_sep,

View file

@ -183,7 +183,6 @@ pub fn new_parser_from_file(sess: parse_sess,
let srdr = lexer::new_string_reader(sess.span_diagnostic,
filemap,
sess.interner);
Ok(Parser(sess, cfg, srdr as reader))
}
@ -222,3 +221,58 @@ pub fn new_parser_from_tts(sess: parse_sess, cfg: ast::crate_cfg,
return Parser(sess, cfg, trdr as reader)
}
#[cfg(test)]
mod test {
use super::*;
use std::serialize::Encodable;
use std;
use core::dvec;
use core::str;
use util::testing::*;
#[test] fn to_json_str (val: Encodable<std::json::Encoder>) -> ~str {
let bw = @io::BytesWriter {bytes: dvec::DVec(), pos: 0};
val.encode(~std::json::Encoder(bw as io::Writer));
str::from_bytes(bw.bytes.data)
}
#[test] fn alltts () {
let tts = parse_tts_from_source_str(
~"bogofile",
@~"fn foo (x : int) { x; }",
~[],
new_parse_sess(None));
check_equal(to_json_str(tts as Encodable::<std::json::Encoder>),
~"[[\"tt_tok\",[,[\"IDENT\",[\"fn\",false]]]],\
[\"tt_tok\",[,[\"IDENT\",[\"foo\",false]]]],\
[\"tt_delim\",[[[\"tt_tok\",[,[\"LPAREN\",[]]]],\
[\"tt_tok\",[,[\"IDENT\",[\"x\",false]]]],\
[\"tt_tok\",[,[\"COLON\",[]]]],\
[\"tt_tok\",[,[\"IDENT\",[\"int\",false]]]],\
[\"tt_tok\",[,[\"RPAREN\",[]]]]]]],\
[\"tt_delim\",[[[\"tt_tok\",[,[\"LBRACE\",[]]]],\
[\"tt_tok\",[,[\"IDENT\",[\"x\",false]]]],\
[\"tt_tok\",[,[\"SEMI\",[]]]],\
[\"tt_tok\",[,[\"RBRACE\",[]]]]]]]]"
);
let ast1 = new_parser_from_tts(new_parse_sess(None),~[],tts)
.parse_item(~[]);
let ast2 = parse_item_from_source_str(
~"bogofile",
@~"fn foo (x : int) { x; }",
~[],~[],
new_parse_sess(None));
check_equal(ast1,ast2);
}
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//

View file

@ -182,7 +182,8 @@ pure fn maybe_append(+lhs: ~[attribute], rhs: Option<~[attribute]>)
/* ident is handled by common.rs */
pub fn Parser(sess: parse_sess,
pub fn Parser(sess: parse_sess
,
cfg: ast::crate_cfg,
+rdr: reader) -> Parser {
@ -1238,6 +1239,8 @@ pub impl Parser {
return e;
}
// parse an optional separator followed by a kleene-style
// repetition token (+ or *).
fn parse_sep_and_zerok() -> (Option<token::Token>, bool) {
if self.token == token::BINOP(token::STAR)
|| self.token == token::BINOP(token::PLUS) {
@ -1258,20 +1261,18 @@ pub impl Parser {
}
}
// parse a single token tree from the input.
fn parse_token_tree() -> token_tree {
maybe_whole!(deref self, nt_tt);
fn parse_tt_tok(p: Parser, delim_ok: bool) -> token_tree {
fn parse_non_delim_tt_tok(p: Parser) -> token_tree {
maybe_whole!(deref p, nt_tt);
match p.token {
token::RPAREN | token::RBRACE | token::RBRACKET
if !delim_ok => {
=> {
p.fatal(~"incorrect close delimiter: `"
+ token_to_str(p.reader, p.token) + ~"`");
}
token::EOF => {
p.fatal(~"file ended in the middle of a macro invocation");
}
/* we ought to allow different depths of unquotation */
token::DOLLAR if p.quote_depth > 0u => {
p.bump();
@ -1282,32 +1283,43 @@ pub impl Parser {
seq_sep_none(),
|p| p.parse_token_tree());
let (s, z) = p.parse_sep_and_zerok();
return tt_seq(mk_sp(sp.lo ,p.span.hi), seq.node, s, z);
tt_seq(mk_sp(sp.lo ,p.span.hi), seq.node, s, z)
} else {
return tt_nonterminal(sp, p.parse_ident());
tt_nonterminal(sp, p.parse_ident())
}
}
_ => { /* ok */ }
_ => {
parse_any_tt_tok(p)
}
}
let res = tt_tok(p.span, p.token);
p.bump();
return res;
}
return match self.token {
// turn the next token into a tt_tok:
fn parse_any_tt_tok(p: Parser) -> token_tree{
let res = tt_tok(p.span, p.token);
p.bump();
res
}
match self.token {
token::EOF => {
self.fatal(~"file ended in the middle of a macro invocation");
}
token::LPAREN | token::LBRACE | token::LBRACKET => {
// tjc: ??????
let ket = token::flip_delimiter(copy self.token);
tt_delim(vec::append(
~[parse_tt_tok(self, true)],
// the open delimiter:
~[parse_any_tt_tok(self)],
vec::append(
self.parse_seq_to_before_end(
ket, seq_sep_none(),
|p| p.parse_token_tree()),
~[parse_tt_tok(self, true)])))
// the close delimiter:
~[parse_any_tt_tok(self)])))
}
_ => parse_tt_tok(self, false)
};
_ => parse_non_delim_tt_tok(self)
}
}
fn parse_all_token_trees() -> ~[token_tree] {
@ -3999,6 +4011,7 @@ pub impl Parser {
}
}
//
// Local Variables:
// mode: rust

View file

@ -25,6 +25,7 @@ use std::oldmap::HashMap;
#[auto_encode]
#[auto_decode]
#[deriving_eq]
pub enum binop {
PLUS,
MINUS,
@ -86,6 +87,7 @@ pub enum Token {
LIT_STR(ast::ident),
/* Name components */
// an identifier contains an "is_mod_name" boolean.
IDENT(ast::ident, bool),
UNDERSCORE,
LIFETIME(ast::ident),
@ -517,12 +519,6 @@ pub fn reserved_keyword_table() -> HashMap<~str, ()> {
words
}
impl binop : cmp::Eq {
pure fn eq(&self, other: &binop) -> bool {
((*self) as uint) == ((*other) as uint)
}
pure fn ne(&self, other: &binop) -> bool { !(*self).eq(other) }
}
impl Token : cmp::Eq {
pure fn eq(&self, other: &Token) -> bool {