Move map iface over to more for-friendly iteration methods

This commit is contained in:
Marijn Haverbeke 2012-04-23 13:42:15 +02:00
parent a872a99bfe
commit 9053f54498
18 changed files with 74 additions and 83 deletions

View file

@ -222,7 +222,7 @@ fn try_parse_sources(filename: str, sources: map::hashmap<str, source>) {
let c = io::read_whole_file_str(filename);
alt json::from_str(result::get(c)) {
ok(json::dict(j)) {
j.items { |k, v|
for j.each { |k, v|
sources.insert(k, parse_source(k, v));
#debug("source: %s", k);
}
@ -404,11 +404,11 @@ fn configure(opts: options) -> cargo {
need_dir(c.libdir);
need_dir(c.bindir);
sources.keys { |k|
for sources.each_key { |k|
let mut s = sources.get(k);
load_source_packages(c, s);
sources.insert(k, s);
};
}
if c.pgp {
pgp::init(c.root);
@ -422,11 +422,11 @@ fn configure(opts: options) -> cargo {
}
fn for_each_package(c: cargo, b: fn(source, package)) {
c.sources.values({ |v|
for c.sources.each_value {|v|
for vec::each(copy v.packages) {|p|
b(v, p);
}
})
}
}
// Runs all programs in directory <buildpath>
@ -592,7 +592,7 @@ fn cargo_suggestion(c: cargo, syncing: bool, fallback: fn())
}
if !syncing {
let mut npkg = 0u;
c.sources.values({ |v| npkg += vec::len(v.packages) });
for c.sources.each_value { |v| npkg += vec::len(v.packages) }
if npkg == 0u {
error("No packages known. You may wish to run " +
"\"cargo sync\".");
@ -776,7 +776,7 @@ fn cmd_sync(c: cargo) {
sync_one(c, c.opts.free[2], c.sources.get(c.opts.free[2]));
} else {
cargo_suggestion(c, true, { || } );
c.sources.items { |k, v|
for c.sources.each { |k, v|
sync_one(c, k, v);
}
}

View file

@ -167,7 +167,7 @@ fn use_selectors_to_bind(b: binders, e: @expr) -> option<bindings> {
alt sel(match_expr(e)) { none { ret none; } _ { } }
}
let mut never_mind: bool = false;
b.real_binders.items {|key, val|
for b.real_binders.each {|key, val|
alt val(match_expr(e)) {
none { never_mind = true; }
some(mtc) { res.insert(key, mtc); }
@ -251,7 +251,7 @@ fn free_vars(b: bindings, e: @expr, it: fn(ident)) {
with *default_ast_fold()};
let f = make_fold(f_pre);
f.fold_expr(e); // ignore result
idents.keys {|x| it(x); };
for idents.each_key {|x| it(x); };
}

View file

@ -217,7 +217,7 @@ nonetheless valid as identifiers becasue they are unambiguous.
"]
fn keyword_table() -> hashmap<str, ()> {
let keywords = str_hash();
bad_expr_word_table().keys() {|word|
for bad_expr_word_table().each_key {|word|
keywords.insert(word, ());
}
let other_keywords = [

View file

@ -85,7 +85,7 @@ fn to_writer(wr: io::writer, j: json) {
wr.write_str("{ ");
let mut first = true;
d.items { |key, value|
for d.each { |key, value|
if !first {
wr.write_str(", ");
}
@ -481,7 +481,7 @@ fn eq(value0: json, value1: json) -> bool {
(dict(d0), dict(d1)) {
if d0.size() == d1.size() {
let mut equal = true;
d0.items { |k, v0|
for d0.each { |k, v0|
alt d1.find(k) {
some(v1) {
if !eq(v0, v1) { equal = false; } }

View file

@ -56,13 +56,13 @@ iface map<K: copy, V: copy> {
fn remove(K) -> option<V>;
#[doc = "Iterate over all the key/value pairs in the map"]
fn items(fn(K, V));
fn each(fn(K, V) -> bool);
#[doc = "Iterate over all the keys in the map"]
fn keys(fn(K));
fn each_key(fn(K) -> bool);
#[doc = "Iterate over all the values in the map"]
fn values(fn(V));
fn each_value(fn(V) -> bool);
}
// FIXME: package this up and export it as a datatype usable for
@ -207,49 +207,40 @@ mod chained {
ret vec::to_mut(vec::from_elem(nchains, absent));
}
fn foreach_entry<K: copy, V: copy>(chain0: chain<K,V>,
blk: fn(@entry<K,V>)) {
let mut chain = chain0;
loop {
alt chain {
absent { ret; }
present(entry) {
let next = entry.next;
blk(entry); // may modify entry.next!
chain = next;
}
}
}
}
fn foreach_chain<K: copy, V: copy>(chains: [const chain<K,V>],
blk: fn(@entry<K,V>)) {
let mut i = 0u;
let n = vec::len(chains);
fn each_entry<K: copy, V: copy>(tbl: t<K, V>,
blk: fn(@entry<K,V>) -> bool) {
let mut i = 0u, n = vec::len(tbl.chains);
while i < n {
foreach_entry(chains[i], blk);
let mut chain = tbl.chains[i];
loop {
alt chain {
absent { break; }
present(entry) {
let next = entry.next;
if !blk(entry) { ret; }
chain = next;
}
}
}
i += 1u;
}
}
fn rehash<K: copy, V: copy>(tbl: t<K,V>) {
let old_chains = tbl.chains;
let n_old_chains = vec::len(old_chains);
let n_old_chains = vec::len(tbl.chains);
let n_new_chains: uint = uint::next_power_of_two(n_old_chains + 1u);
tbl.chains = chains(n_new_chains);
foreach_chain(old_chains) { |entry|
let new_chains = chains(n_new_chains);
for each_entry(tbl) {|entry|
let idx = entry.hash % n_new_chains;
entry.next = tbl.chains[idx];
tbl.chains[idx] = present(entry);
entry.next = new_chains[idx];
new_chains[idx] = present(entry);
}
tbl.chains = new_chains;
}
fn items<K: copy, V: copy>(tbl: t<K,V>, blk: fn(K,V)) {
let tbl_chains = tbl.chains; // Satisfy alias checker.
foreach_chain(tbl_chains) { |entry|
let key = entry.key;
let value = entry.value;
blk(key, value);
fn each<K: copy, V: copy>(tbl: t<K,V>, blk: fn(K,V) -> bool) {
for each_entry(tbl) {|entry|
if !blk(copy entry.key, copy entry.value) { break; }
}
}
@ -277,11 +268,11 @@ mod chained {
fn remove(k: K) -> option<V> { remove(self, k) }
fn items(blk: fn(K, V)) { items(self, blk); }
fn each(blk: fn(K, V) -> bool) { each(self, blk); }
fn keys(blk: fn(K)) { items(self) { |k, _v| blk(k) } }
fn each_key(blk: fn(K) -> bool) { each(self) { |k, _v| blk(k)} }
fn values(blk: fn(V)) { items(self) { |_k, v| blk(v) } }
fn each_value(blk: fn(V) -> bool) { each(self) { |_k, v| blk(v)} }
}
fn mk<K: copy, V: copy>(hasher: hashfn<K>, eqer: eqfn<K>) -> t<K,V> {

View file

@ -89,27 +89,27 @@ impl <V: copy> of map::map<uint, V> for smallintmap<V> {
fn get(&&key: uint) -> V { get(self, key) }
fn find(&&key: uint) -> option<V> { find(self, key) }
fn rehash() { fail }
fn items(it: fn(&&uint, V)) {
fn each(it: fn(&&uint, V) -> bool) {
let mut idx = 0u, l = self.v.len();
while idx < l {
alt self.v[idx] {
some(elt) {
it(idx, copy elt);
if !it(idx, copy elt) { break; }
}
none { }
}
idx += 1u;
}
}
fn keys(it: fn(&&uint)) {
fn each_key(it: fn(&&uint) -> bool) {
let mut idx = 0u, l = self.v.len();
while idx < l {
if self.v[idx] != none { it(idx); }
if self.v[idx] != none && !it(idx) { ret; }
idx += 1u;
}
}
fn values(it: fn(V)) {
self.items({|_i, v| it(v)});
fn each_value(it: fn(V) -> bool) {
self.each {|_i, v| it(v)}
}
}

View file

@ -106,7 +106,7 @@ fn have_crate_data(cstore: cstore, cnum: ast::crate_num) -> bool {
}
fn iter_crate_data(cstore: cstore, i: fn(ast::crate_num, crate_metadata)) {
p(cstore).metas.items {|k,v| i(k, v);};
for p(cstore).metas.each {|k,v| i(k, v);};
}
fn add_used_crate_file(cstore: cstore, lib: str) {
@ -155,7 +155,7 @@ fn get_dep_hashes(cstore: cstore) -> [str] {
type crate_hash = {name: str, hash: str};
let mut result = [];
p(cstore).use_crate_map.values {|cnum|
for p(cstore).use_crate_map.each_value {|cnum|
let cdata = cstore::get_crate_data(cstore, cnum);
let hash = decoder::get_crate_hash(cdata.data);
#debug("Add hash[%s]: %s", cdata.name, hash);

View file

@ -206,7 +206,7 @@ fn encode_item_paths(ebml_w: ebml::writer, ecx: @encode_ctxt, crate: @crate)
fn encode_reexport_paths(ebml_w: ebml::writer,
ecx: @encode_ctxt, &index: [entry<str>]) {
let tcx = ecx.ccx.tcx;
ecx.ccx.exp_map.items {|exp_id, defs|
for ecx.ccx.exp_map.each {|exp_id, defs|
for defs.each {|def|
if !def.reexp { cont; }
let path = alt check tcx.items.get(exp_id) {

View file

@ -129,6 +129,6 @@ fn compute_capture_vars(tcx: ty::ctxt,
}
let mut result = [];
cap_map.values { |cap_var| result += [cap_var]; }
for cap_map.each_value { |cap_var| result += [cap_var]; }
ret result;
}

View file

@ -70,7 +70,7 @@ fn find_last_uses(c: @crate, def_map: resolve::def_map,
mut blocks: nil};
visit::visit_crate(*c, cx, v);
let mini_table = std::map::int_hash();
cx.last_uses.items {|key, val|
for cx.last_uses.each {|key, val|
if val {
alt key {
path(id) {

View file

@ -172,7 +172,7 @@ fn time(do_it: bool, what: str, thunk: fn()) {
fn check_item(cx: ctxt, i: @ast::item) {
cx.with_warn_attrs(i.attrs) {|cx|
cx.curr.items {|lint, level|
for cx.curr.each {|lint, level|
alt lint {
ctypes { check_item_ctypes(cx, level, i); }
unused_imports { check_item_unused_imports(cx, level, i); }
@ -265,7 +265,7 @@ fn check_crate(tcx: ty::ctxt, crate: @ast::crate,
tcx: tcx};
// Install defaults.
cx.dict.items {|_k, spec| cx.set_level(spec.lint, spec.default); }
for cx.dict.each {|_k, spec| cx.set_level(spec.lint, spec.default); }
// Install command-line options, overriding defaults.
for lint_opts.each {|pair|

View file

@ -352,7 +352,7 @@ fn map_crate(e: @env, c: @ast::crate) {
fn resolve_imports(e: env) {
e.used_imports.track = true;
e.imports.items {|id, v|
for e.imports.each {|id, v|
alt check v {
todo(name, path, span, scopes) {
resolve_import(e, id, name, *path, span, scopes);
@ -368,7 +368,7 @@ fn resolve_imports(e: env) {
// using lint-specific control flags presently but resolve-specific data
// structures. Should use the general lint framework (with scopes, attrs).
fn check_unused_imports(e: @env, level: lint::level) {
e.imports.items {|k, v|
for e.imports.each {|k, v|
alt v {
resolved(_, _, _, _, name, sp) {
if !vec::contains(e.used_imports.data, k) {
@ -1673,8 +1673,8 @@ fn lookup_external(e: env, cnum: int, ids: [ident], ns: namespace) ->
fn check_for_collisions(e: @env, c: ast::crate) {
// Module indices make checking those relatively simple -- just check each
// name for multiple entities in the same namespace.
e.mod_map.values {|val|
val.index.items {|k, v| check_mod_name(*e, k, v); };
for e.mod_map.each_value {|val|
for val.index.each {|k, v| check_mod_name(*e, k, v); };
};
// Other scopes have to be checked the hard way.
let v =
@ -1912,7 +1912,7 @@ fn check_exports(e: @env) {
assert mid.crate == ast::local_crate;
let ixm = e.mod_map.get(mid.node);
ixm.index.items() {|ident, mies|
for ixm.index.each {|ident, mies|
list::iter(mies) {|mie|
alt mie {
mie_item(item) {
@ -2055,7 +2055,7 @@ fn check_exports(e: @env) {
}
}
e.mod_map.values {|_mod|
for e.mod_map.each_value {|_mod|
alt _mod.m {
some(m) {
let glob_is_re_exported = int_hash();

View file

@ -365,7 +365,7 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef],
some(e) {
// Temporarily set bindings. They'll be rewritten to PHI nodes
// for the actual arm block.
data.id_map.items {|key, val|
for data.id_map.each {|key, val|
let loc = local_mem(option::get(assoc(key, m[0].bound)));
bcx.fcx.lllocals.insert(val, loc);
};
@ -565,7 +565,7 @@ fn make_phi_bindings(bcx: block, map: [exit_node],
let _icx = bcx.insn_ctxt("alt::make_phi_bindings");
let our_block = bcx.llbb as uint;
let mut success = true, bcx = bcx;
ids.items {|name, node_id|
for ids.each {|name, node_id|
let mut llbbs = [];
let mut vals = [];
for vec::each(map) {|ex|
@ -583,7 +583,7 @@ fn make_phi_bindings(bcx: block, map: [exit_node],
};
if success {
// Copy references that the alias analysis considered unsafe
ids.values {|node_id|
for ids.each_value {|node_id|
if bcx.ccx().maps.copy_map.contains_key(node_id) {
let local = alt bcx.fcx.lllocals.find(node_id) {
some(local_mem(x)) { x }

View file

@ -526,7 +526,7 @@ fn make_generic_glue(ccx: @crate_ctxt, t: ty::t, llfn: ValueRef,
fn emit_tydescs(ccx: @crate_ctxt) {
let _icx = ccx.insn_ctxt("emit_tydescs");
ccx.tydescs.items {|key, val|
for ccx.tydescs.each {|key, val|
let glue_fn_ty = T_ptr(T_glue_fn(ccx));
let ti = val;
let take_glue =
@ -4871,7 +4871,7 @@ fn create_module_map(ccx: @crate_ctxt) -> ValueRef {
});
lib::llvm::SetLinkage(map, lib::llvm::InternalLinkage);
let mut elts: [ValueRef] = [];
ccx.module_data.items {|key, val|
for ccx.module_data.each {|key, val|
let elt = C_struct([p2i(ccx, C_cstr(ccx, key)),
p2i(ccx, val)]);
elts += [elt];
@ -5091,7 +5091,7 @@ fn trans_crate(sess: session::session, crate: @ast::crate, tcx: ty::ctxt,
}
if ccx.sess.opts.count_llvm_insns {
ccx.stats.llvm_insns.items() { |k, v|
for ccx.stats.llvm_insns.each { |k, v|
io::println(#fmt("%-7u %s", v, k));
}
}

View file

@ -555,7 +555,7 @@ fn norm_a_constraint(id: def_id, c: constraint) -> [norm_constraint] {
// non-exhaustive match in trans.
fn constraints(fcx: fn_ctxt) -> [norm_constraint] {
let mut rslt: [norm_constraint] = [];
fcx.enclosing.constrs.items {|key, val|
for fcx.enclosing.constrs.each {|key, val|
rslt += norm_a_constraint(key, val);
};
ret rslt;
@ -875,7 +875,7 @@ fn copy_in_poststate_two(fcx: fn_ctxt, src_post: poststate,
}
fcx.enclosing.constrs.values {|val|
for fcx.enclosing.constrs.each_value {|val|
// replace any occurrences of the src def_id with the
// dest def_id
let insts = find_instances(fcx, subst, val);

View file

@ -47,7 +47,7 @@ fn to_assoc_list<K:copy, V:copy>(
) -> [(K, V)] {
let mut vec = [];
map.items {|k, v|
for map.each {|k, v|
vec += [(k, v)];
}
ret vec;
@ -81,7 +81,7 @@ fn from_str_assoc_list<V:copy>(
fn build_reexport_def_set(srv: astsrv::srv) -> def_set {
let assoc_list = astsrv::exec(srv) {|ctxt|
let def_set = common::new_def_hash();
ctxt.exp_map.items {|_id, defs|
for ctxt.exp_map.each {|_id, defs|
for defs.each {|def|
if def.reexp {
def_set.insert(def.id, ());
@ -172,7 +172,7 @@ fn build_reexport_path_map(srv: astsrv::srv, -def_map: def_map) -> path_map {
let def_map = from_def_assoc_list(def_assoc_list);
let path_map = map::str_hash::<[(str,doc::itemtag)]>();
ctxt.exp_map.items {|exp_id, defs|
for ctxt.exp_map.each {|exp_id, defs|
let path = alt check ctxt.ast_map.get(exp_id) {
ast_map::node_export(_, path) { path }
};

View file

@ -93,7 +93,7 @@ mod map_reduce {
map(input, bind emit(intermediates, ctrl, _, _));
intermediates.values {|v| send(v, release); }
for intermediates.each_value {|v| send(v, release); }
send(ctrl, mapper_done);
}
@ -172,7 +172,7 @@ mod map_reduce {
}
}
reducers.values {|v| send(v, done); }
for reducers.each_value {|v| send(v, done); }
for results.each {|r| future::get(r); }
}

View file

@ -10,7 +10,7 @@ fn bitv_to_str(enclosing: fn_info, v: bitv::bitv) -> str {
let s = "";
// error is that the value type in the hash map is var_info, not a box
enclosing.vars.values {|val|
for enclosing.vars.each_value {|val|
if bitv::get(v, val) { s += "foo"; }
}
ret s;