Switch from booleans to symbolic tags in a few places.

This commit is contained in:
Graydon Hoare 2011-01-24 15:26:10 -08:00
parent 65da18faf8
commit 2d7c2acf09
2 changed files with 64 additions and 46 deletions

View file

@ -12,13 +12,18 @@ import util.common.append;
import util.common.span; import util.common.span;
import util.common.new_str_hash; import util.common.new_str_hash;
tag restriction {
UNRESTRICTED;
RESTRICT_NO_CALL_EXPRS;
}
state type parser = state type parser =
state obj { state obj {
fn peek() -> token.token; fn peek() -> token.token;
impure fn bump(); impure fn bump();
impure fn err(str s); impure fn err(str s);
impure fn restrict(bool r); impure fn restrict(restriction r);
fn is_restricted() -> bool; fn get_restriction() -> restriction;
fn get_session() -> session.session; fn get_session() -> session.session;
fn get_span() -> common.span; fn get_span() -> common.span;
fn next_def_id() -> ast.def_id; fn next_def_id() -> ast.def_id;
@ -31,7 +36,7 @@ impure fn new_parser(session.session sess,
mutable common.pos lo, mutable common.pos lo,
mutable common.pos hi, mutable common.pos hi,
mutable ast.def_num def, mutable ast.def_num def,
mutable bool restricted, mutable restriction res,
ast.crate_num crate, ast.crate_num crate,
lexer.reader rdr) lexer.reader rdr)
{ {
@ -52,12 +57,12 @@ impure fn new_parser(session.session sess,
sess.span_err(span, m); sess.span_err(span, m);
} }
impure fn restrict(bool r) { impure fn restrict(restriction r) {
restricted = r; res = r;
} }
fn is_restricted() -> bool { fn get_restriction() -> restriction {
ret restricted; ret res;
} }
fn get_session() -> session.session { fn get_session() -> session.session {
@ -78,7 +83,7 @@ impure fn new_parser(session.session sess,
auto rdr = lexer.new_reader(srdr, path); auto rdr = lexer.new_reader(srdr, path);
auto npos = rdr.get_curr_pos(); auto npos = rdr.get_curr_pos();
ret stdio_parser(sess, lexer.next_token(rdr), ret stdio_parser(sess, lexer.next_token(rdr),
npos, npos, 0, false, crate, rdr); npos, npos, 0, UNRESTRICTED, crate, rdr);
} }
impure fn unexpected(parser p, token.token t) { impure fn unexpected(parser p, token.token t) {
@ -283,7 +288,7 @@ impure fn parse_ty(parser p) -> @ast.ty {
} }
case (token.IDENT(_)) { case (token.IDENT(_)) {
t = ast.ty_path(parse_path(p, true), none[ast.def]); t = ast.ty_path(parse_path(p, GREEDY), none[ast.def]);
} }
case (_) { case (_) {
@ -380,7 +385,12 @@ fn is_ident(token.token t) -> bool {
ret false; ret false;
} }
impure fn parse_path(parser p, bool greedy) -> ast.path { tag greed {
GREEDY;
MINIMAL;
}
impure fn parse_path(parser p, greed g) -> ast.path {
auto lo = p.get_span(); auto lo = p.get_span();
auto hi = lo; auto hi = lo;
@ -394,7 +404,7 @@ impure fn parse_path(parser p, bool greedy) -> ast.path {
ids += i; ids += i;
p.bump(); p.bump();
if (p.peek() == token.DOT) { if (p.peek() == token.DOT) {
if (greedy) { if (g == GREEDY) {
p.bump(); p.bump();
check (is_ident(p.peek())); check (is_ident(p.peek()));
} else { } else {
@ -456,7 +466,7 @@ impure fn parse_bottom_expr(parser p) -> @ast.expr {
alt (p.peek()) { alt (p.peek()) {
case (token.IDENT(_)) { case (token.IDENT(_)) {
auto pth = parse_path(p, false); auto pth = parse_path(p, MINIMAL);
hi = pth.span; hi = pth.span;
ex = ast.expr_path(pth, none[ast.def], ast.ann_none); ex = ast.expr_path(pth, none[ast.def], ast.ann_none);
} }
@ -521,7 +531,7 @@ impure fn parse_bottom_expr(parser p) -> @ast.expr {
case (token.BIND) { case (token.BIND) {
p.bump(); p.bump();
auto e = parse_restricted_expr(p); auto e = parse_expr_res(p, RESTRICT_NO_CALL_EXPRS);
impure fn parse_expr_opt(parser p) -> option.t[@ast.expr] { impure fn parse_expr_opt(parser p) -> option.t[@ast.expr] {
alt (p.peek()) { alt (p.peek()) {
case (token.UNDERSCORE) { case (token.UNDERSCORE) {
@ -589,7 +599,7 @@ impure fn parse_dot_or_call_expr(parser p) -> @ast.expr {
alt (p.peek()) { alt (p.peek()) {
case (token.LPAREN) { case (token.LPAREN) {
if (p.is_restricted()) { if (p.get_restriction() == RESTRICT_NO_CALL_EXPRS) {
ret e; ret e;
} else { } else {
// Call expr. // Call expr.
@ -983,18 +993,13 @@ impure fn parse_alt_expr(parser p) -> @ast.expr {
ret @spanned(lo, hi, expr); ret @spanned(lo, hi, expr);
} }
impure fn parse_restricted_expr(parser p) -> @ast.expr {
ret parse_expr_res(p, true);
}
impure fn parse_expr(parser p) -> @ast.expr { impure fn parse_expr(parser p) -> @ast.expr {
ret parse_expr_res(p, false); ret parse_expr_res(p, UNRESTRICTED);
} }
impure fn parse_expr_res(parser p, bool restrict) -> @ast.expr { impure fn parse_expr_res(parser p, restriction r) -> @ast.expr {
auto old = p.is_restricted(); auto old = p.get_restriction();
p.restrict(restrict); p.restrict(r);
auto e = parse_expr_inner(p); auto e = parse_expr_inner(p);
p.restrict(old); p.restrict(old);
ret e; ret e;

View file

@ -93,10 +93,16 @@ tag cleanup {
clean(fn(@block_ctxt cx) -> result); clean(fn(@block_ctxt cx) -> result);
} }
tag block_kind {
SCOPE_BLOCK;
NON_SCOPE_BLOCK;
}
state type block_ctxt = rec(BasicBlockRef llbb, state type block_ctxt = rec(BasicBlockRef llbb,
builder build, builder build,
block_parent parent, block_parent parent,
bool is_scope, block_kind kind,
mutable vec[cleanup] cleanups, mutable vec[cleanup] cleanups,
@fn_ctxt fcx); @fn_ctxt fcx);
@ -640,7 +646,7 @@ fn trans_non_gc_free(@block_ctxt cx, ValueRef v) -> result {
} }
fn find_scope_cx(@block_ctxt cx) -> @block_ctxt { fn find_scope_cx(@block_ctxt cx) -> @block_ctxt {
if (cx.is_scope) { if (cx.kind == SCOPE_BLOCK) {
ret cx; ret cx;
} }
alt (cx.parent) { alt (cx.parent) {
@ -1492,8 +1498,13 @@ fn memcpy_ty(@block_ctxt cx,
} }
} }
tag copy_action {
INIT;
DROP_EXISTING;
}
fn copy_ty(@block_ctxt cx, fn copy_ty(@block_ctxt cx,
bool is_init, copy_action action,
ValueRef dst, ValueRef dst,
ValueRef src, ValueRef src,
@ty.t t) -> result { @ty.t t) -> result {
@ -1505,7 +1516,7 @@ fn copy_ty(@block_ctxt cx,
} else if (ty.type_is_boxed(t)) { } else if (ty.type_is_boxed(t)) {
auto r = incr_all_refcnts(cx, src, t); auto r = incr_all_refcnts(cx, src, t);
if (! is_init) { if (action == DROP_EXISTING) {
r = drop_ty(r.bcx, r.bcx.build.Load(dst), t); r = drop_ty(r.bcx, r.bcx.build.Load(dst), t);
} }
ret res(r.bcx, r.bcx.build.Store(src, dst)); ret res(r.bcx, r.bcx.build.Store(src, dst));
@ -1513,7 +1524,7 @@ fn copy_ty(@block_ctxt cx,
} else if (ty.type_is_structural(t) || } else if (ty.type_is_structural(t) ||
ty.type_has_dynamic_size(t)) { ty.type_has_dynamic_size(t)) {
auto r = incr_all_refcnts(cx, src, t); auto r = incr_all_refcnts(cx, src, t);
if (! is_init) { if (action == DROP_EXISTING) {
r = drop_ty(r.bcx, dst, t); r = drop_ty(r.bcx, dst, t);
} }
ret memcpy_ty(r.bcx, dst, src, t); ret memcpy_ty(r.bcx, dst, src, t);
@ -1623,7 +1634,7 @@ fn trans_unary(@block_ctxt cx, ast.unop op,
vec(C_int(0), vec(C_int(0),
C_int(abi.box_rc_field_body))); C_int(abi.box_rc_field_body)));
sub.bcx.build.Store(C_int(1), rc); sub.bcx.build.Store(C_int(1), rc);
sub = copy_ty(sub.bcx, true, body, e_val, e_ty); sub = copy_ty(sub.bcx, INIT, body, e_val, e_ty);
ret res(sub.bcx, box); ret res(sub.bcx, box);
} }
case (ast.deref) { case (ast.deref) {
@ -1815,7 +1826,7 @@ fn trans_for(@block_ctxt cx,
cx.build.Br(scope_cx.llbb); cx.build.Br(scope_cx.llbb);
auto local_res = alloc_local(scope_cx, local); auto local_res = alloc_local(scope_cx, local);
auto bcx = copy_ty(local_res.bcx, true, local_res.val, curr, t).bcx; auto bcx = copy_ty(local_res.bcx, INIT, local_res.val, curr, t).bcx;
bcx = trans_block(bcx, body).bcx; bcx = trans_block(bcx, body).bcx;
bcx.build.Br(next_cx.llbb); bcx.build.Br(next_cx.llbb);
ret res(next_cx, C_nil()); ret res(next_cx, C_nil());
@ -1955,7 +1966,7 @@ fn trans_pat_binding(@block_ctxt cx, @ast.pat pat, ValueRef llval)
cx.fcx.lllocals.insert(def_id, dst); cx.fcx.lllocals.insert(def_id, dst);
cx.cleanups += clean(bind drop_slot(_, dst, ty)); cx.cleanups += clean(bind drop_slot(_, dst, ty));
ret copy_ty(cx, true, dst, llval, ty); ret copy_ty(cx, INIT, dst, llval, ty);
} }
case (ast.pat_tag(_, ?subpats, _, _)) { case (ast.pat_tag(_, ?subpats, _, _)) {
if (_vec.len[@ast.pat](subpats) == 0u) { ret res(cx, llval); } if (_vec.len[@ast.pat](subpats) == 0u) { ret res(cx, llval); }
@ -2496,7 +2507,7 @@ fn trans_bind(@block_ctxt cx, @ast.expr f,
for (ValueRef v in bound_vals) { for (ValueRef v in bound_vals) {
auto bound = bcx.build.GEP(bindings, auto bound = bcx.build.GEP(bindings,
vec(C_int(0),C_int(i))); vec(C_int(0),C_int(i)));
bcx = copy_ty(r.bcx, true, bound, v, bound_tys.(i)).bcx; bcx = copy_ty(r.bcx, INIT, bound, v, bound_tys.(i)).bcx;
i += 1; i += 1;
} }
@ -2608,7 +2619,7 @@ fn trans_tup(@block_ctxt cx, vec[ast.elt] elts,
auto t = ty.expr_ty(e.expr); auto t = ty.expr_ty(e.expr);
auto src_res = trans_expr(r.bcx, e.expr); auto src_res = trans_expr(r.bcx, e.expr);
auto dst_elt = r.bcx.build.GEP(tup_val, vec(C_int(0), C_int(i))); auto dst_elt = r.bcx.build.GEP(tup_val, vec(C_int(0), C_int(i)));
r = copy_ty(src_res.bcx, true, dst_elt, src_res.val, t); r = copy_ty(src_res.bcx, INIT, dst_elt, src_res.val, t);
i += 1; i += 1;
} }
ret res(r.bcx, tup_val); ret res(r.bcx, tup_val);
@ -2645,7 +2656,7 @@ fn trans_vec(@block_ctxt cx, vec[@ast.expr] args,
for (@ast.expr e in args) { for (@ast.expr e in args) {
auto src_res = trans_expr(sub.bcx, e); auto src_res = trans_expr(sub.bcx, e);
auto dst_elt = sub.bcx.build.GEP(body, vec(C_int(0), C_int(i))); auto dst_elt = sub.bcx.build.GEP(body, vec(C_int(0), C_int(i)));
sub = copy_ty(src_res.bcx, true, dst_elt, src_res.val, unit_ty); sub = copy_ty(src_res.bcx, INIT, dst_elt, src_res.val, unit_ty);
i += 1; i += 1;
} }
auto fill = sub.bcx.build.GEP(vec_val, auto fill = sub.bcx.build.GEP(vec_val,
@ -2668,7 +2679,7 @@ fn trans_rec(@block_ctxt cx, vec[ast.field] fields,
auto src_res = trans_expr(r.bcx, f.expr); auto src_res = trans_expr(r.bcx, f.expr);
auto dst_elt = r.bcx.build.GEP(rec_val, vec(C_int(0), C_int(i))); auto dst_elt = r.bcx.build.GEP(rec_val, vec(C_int(0), C_int(i)));
// FIXME: calculate copy init-ness in typestate. // FIXME: calculate copy init-ness in typestate.
r = copy_ty(src_res.bcx, true, dst_elt, src_res.val, t); r = copy_ty(src_res.bcx, INIT, dst_elt, src_res.val, t);
i += 1; i += 1;
} }
ret res(r.bcx, rec_val); ret res(r.bcx, rec_val);
@ -2727,7 +2738,8 @@ fn trans_expr(@block_ctxt cx, @ast.expr e) -> result {
auto rhs_res = trans_expr(lhs_res.res.bcx, src); auto rhs_res = trans_expr(lhs_res.res.bcx, src);
auto t = node_ann_type(cx.fcx.ccx, ann); auto t = node_ann_type(cx.fcx.ccx, ann);
// FIXME: calculate copy init-ness in typestate. // FIXME: calculate copy init-ness in typestate.
ret copy_ty(rhs_res.bcx, false, lhs_res.res.val, rhs_res.val, t); ret copy_ty(rhs_res.bcx, DROP_EXISTING,
lhs_res.res.val, rhs_res.val, t);
} }
case (ast.expr_assign_op(?op, ?dst, ?src, ?ann)) { case (ast.expr_assign_op(?op, ?dst, ?src, ?ann)) {
@ -2739,7 +2751,8 @@ fn trans_expr(@block_ctxt cx, @ast.expr e) -> result {
auto rhs_res = trans_expr(lhs_res.res.bcx, src); auto rhs_res = trans_expr(lhs_res.res.bcx, src);
auto v = trans_eager_binop(rhs_res.bcx, op, lhs_val, rhs_res.val); auto v = trans_eager_binop(rhs_res.bcx, op, lhs_val, rhs_res.val);
// FIXME: calculate copy init-ness in typestate. // FIXME: calculate copy init-ness in typestate.
ret copy_ty(rhs_res.bcx, false, lhs_res.res.val, v, t); ret copy_ty(rhs_res.bcx, DROP_EXISTING,
lhs_res.res.val, v, t);
} }
case (ast.expr_bind(?f, ?args, ?ann)) { case (ast.expr_bind(?f, ?args, ?ann)) {
@ -2889,7 +2902,7 @@ fn trans_ret(@block_ctxt cx, &option.t[@ast.expr] e) -> result {
alt (cx.fcx.llretptr) { alt (cx.fcx.llretptr) {
case (some[ValueRef](?llptr)) { case (some[ValueRef](?llptr)) {
// Generic return via tydesc + retptr. // Generic return via tydesc + retptr.
bcx = copy_ty(bcx, true, llptr, val, t).bcx; bcx = copy_ty(bcx, INIT, llptr, val, t).bcx;
bcx.build.RetVoid(); bcx.build.RetVoid();
} }
case (none[ValueRef]) { case (none[ValueRef]) {
@ -2921,7 +2934,7 @@ fn init_local(@block_ctxt cx, @ast.local local) -> result {
alt (local.init) { alt (local.init) {
case (some[@ast.expr](?e)) { case (some[@ast.expr](?e)) {
auto sub = trans_expr(bcx, e); auto sub = trans_expr(bcx, e);
bcx = copy_ty(sub.bcx, true, llptr, sub.val, ty).bcx; bcx = copy_ty(sub.bcx, INIT, llptr, sub.val, ty).bcx;
} }
case (_) { case (_) {
if (middle.ty.type_has_dynamic_size(ty)) { if (middle.ty.type_has_dynamic_size(ty)) {
@ -2983,7 +2996,7 @@ fn new_builder(BasicBlockRef llbb) -> builder {
// You probably don't want to use this one. See the // You probably don't want to use this one. See the
// next three functions instead. // next three functions instead.
fn new_block_ctxt(@fn_ctxt cx, block_parent parent, fn new_block_ctxt(@fn_ctxt cx, block_parent parent,
bool is_scope, block_kind kind,
str name) -> @block_ctxt { str name) -> @block_ctxt {
let vec[cleanup] cleanups = vec(); let vec[cleanup] cleanups = vec();
let BasicBlockRef llbb = let BasicBlockRef llbb =
@ -2993,24 +3006,24 @@ fn new_block_ctxt(@fn_ctxt cx, block_parent parent,
ret @rec(llbb=llbb, ret @rec(llbb=llbb,
build=new_builder(llbb), build=new_builder(llbb),
parent=parent, parent=parent,
is_scope=is_scope, kind=kind,
mutable cleanups=cleanups, mutable cleanups=cleanups,
fcx=cx); fcx=cx);
} }
// Use this when you're at the top block of a function or the like. // Use this when you're at the top block of a function or the like.
fn new_top_block_ctxt(@fn_ctxt fcx) -> @block_ctxt { fn new_top_block_ctxt(@fn_ctxt fcx) -> @block_ctxt {
ret new_block_ctxt(fcx, parent_none, true, "function top level"); ret new_block_ctxt(fcx, parent_none, SCOPE_BLOCK, "function top level");
} }
// Use this when you're at a curly-brace or similar lexical scope. // Use this when you're at a curly-brace or similar lexical scope.
fn new_scope_block_ctxt(@block_ctxt bcx, str n) -> @block_ctxt { fn new_scope_block_ctxt(@block_ctxt bcx, str n) -> @block_ctxt {
ret new_block_ctxt(bcx.fcx, parent_some(bcx), true, n); ret new_block_ctxt(bcx.fcx, parent_some(bcx), SCOPE_BLOCK, n);
} }
// Use this when you're making a general CFG BB within a scope. // Use this when you're making a general CFG BB within a scope.
fn new_sub_block_ctxt(@block_ctxt bcx, str n) -> @block_ctxt { fn new_sub_block_ctxt(@block_ctxt bcx, str n) -> @block_ctxt {
ret new_block_ctxt(bcx.fcx, parent_some(bcx), false, n); ret new_block_ctxt(bcx.fcx, parent_some(bcx), NON_SCOPE_BLOCK, n);
} }
@ -3018,7 +3031,7 @@ fn trans_block_cleanups(@block_ctxt cx,
@block_ctxt cleanup_cx) -> @block_ctxt { @block_ctxt cleanup_cx) -> @block_ctxt {
auto bcx = cx; auto bcx = cx;
if (!cleanup_cx.is_scope) { if (cleanup_cx.kind != SCOPE_BLOCK) {
check (_vec.len[cleanup](cleanup_cx.cleanups) == 0u); check (_vec.len[cleanup](cleanup_cx.cleanups) == 0u);
} }
@ -3430,7 +3443,7 @@ fn trans_obj(@crate_ctxt cx, &ast._obj ob, ast.def_id oid,
arg = load_scalar_or_boxed(r.bcx, arg, arg_tys.(i).ty); arg = load_scalar_or_boxed(r.bcx, arg, arg_tys.(i).ty);
auto field = r.bcx.build.GEP(body_fields, auto field = r.bcx.build.GEP(body_fields,
vec(C_int(0),C_int(i))); vec(C_int(0),C_int(i)));
r = copy_ty(r.bcx, true, field, arg, arg_tys.(i).ty); r = copy_ty(r.bcx, INIT, field, arg, arg_tys.(i).ty);
i += 1; i += 1;
} }