Skip to content

Commit

Permalink
Move map iface over to more for-friendly iteration methods
Browse files Browse the repository at this point in the history
  • Loading branch information
marijnh committed Apr 23, 2012
1 parent a872a99 commit 9053f54
Show file tree
Hide file tree
Showing 18 changed files with 74 additions and 83 deletions.
14 changes: 7 additions & 7 deletions src/cargo/cargo.rs
Expand Up @@ -222,7 +222,7 @@ fn try_parse_sources(filename: str, sources: map::hashmap<str, source>) {
let c = io::read_whole_file_str(filename);
alt json::from_str(result::get(c)) {
ok(json::dict(j)) {
j.items { |k, v|
for j.each { |k, v|
sources.insert(k, parse_source(k, v));
#debug("source: %s", k);
}
Expand Down Expand Up @@ -404,11 +404,11 @@ fn configure(opts: options) -> cargo {
need_dir(c.libdir);
need_dir(c.bindir);

sources.keys { |k|
for sources.each_key { |k|
let mut s = sources.get(k);
load_source_packages(c, s);
sources.insert(k, s);
};
}

if c.pgp {
pgp::init(c.root);
Expand All @@ -422,11 +422,11 @@ fn configure(opts: options) -> cargo {
}

fn for_each_package(c: cargo, b: fn(source, package)) {
c.sources.values({ |v|
for c.sources.each_value {|v|
for vec::each(copy v.packages) {|p|
b(v, p);
}
})
}
}

// Runs all programs in directory <buildpath>
Expand Down Expand Up @@ -592,7 +592,7 @@ fn cargo_suggestion(c: cargo, syncing: bool, fallback: fn())
}
if !syncing {
let mut npkg = 0u;
c.sources.values({ |v| npkg += vec::len(v.packages) });
for c.sources.each_value { |v| npkg += vec::len(v.packages) }
if npkg == 0u {
error("No packages known. You may wish to run " +
"\"cargo sync\".");
Expand Down Expand Up @@ -776,7 +776,7 @@ fn cmd_sync(c: cargo) {
sync_one(c, c.opts.free[2], c.sources.get(c.opts.free[2]));
} else {
cargo_suggestion(c, true, { || } );
c.sources.items { |k, v|
for c.sources.each { |k, v|
sync_one(c, k, v);
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/librustsyntax/ext/simplext.rs
Expand Up @@ -167,7 +167,7 @@ fn use_selectors_to_bind(b: binders, e: @expr) -> option<bindings> {
alt sel(match_expr(e)) { none { ret none; } _ { } }
}
let mut never_mind: bool = false;
b.real_binders.items {|key, val|
for b.real_binders.each {|key, val|
alt val(match_expr(e)) {
none { never_mind = true; }
some(mtc) { res.insert(key, mtc); }
Expand Down Expand Up @@ -251,7 +251,7 @@ fn free_vars(b: bindings, e: @expr, it: fn(ident)) {
with *default_ast_fold()};
let f = make_fold(f_pre);
f.fold_expr(e); // ignore result
idents.keys {|x| it(x); };
for idents.each_key {|x| it(x); };
}


Expand Down
2 changes: 1 addition & 1 deletion src/librustsyntax/parse/token.rs
Expand Up @@ -217,7 +217,7 @@ nonetheless valid as identifiers becasue they are unambiguous.
"]
fn keyword_table() -> hashmap<str, ()> {
let keywords = str_hash();
bad_expr_word_table().keys() {|word|
for bad_expr_word_table().each_key {|word|
keywords.insert(word, ());
}
let other_keywords = [
Expand Down
4 changes: 2 additions & 2 deletions src/libstd/json.rs
Expand Up @@ -85,7 +85,7 @@ fn to_writer(wr: io::writer, j: json) {

wr.write_str("{ ");
let mut first = true;
d.items { |key, value|
for d.each { |key, value|
if !first {
wr.write_str(", ");
}
Expand Down Expand Up @@ -481,7 +481,7 @@ fn eq(value0: json, value1: json) -> bool {
(dict(d0), dict(d1)) {
if d0.size() == d1.size() {
let mut equal = true;
d0.items { |k, v0|
for d0.each { |k, v0|
alt d1.find(k) {
some(v1) {
if !eq(v0, v1) { equal = false; } }
Expand Down
67 changes: 29 additions & 38 deletions src/libstd/map.rs
Expand Up @@ -56,13 +56,13 @@ iface map<K: copy, V: copy> {
fn remove(K) -> option<V>;

#[doc = "Iterate over all the key/value pairs in the map"]
fn items(fn(K, V));
fn each(fn(K, V) -> bool);

#[doc = "Iterate over all the keys in the map"]
fn keys(fn(K));
fn each_key(fn(K) -> bool);

#[doc = "Iterate over all the values in the map"]
fn values(fn(V));
fn each_value(fn(V) -> bool);
}

// FIXME: package this up and export it as a datatype usable for
Expand Down Expand Up @@ -207,49 +207,40 @@ mod chained {
ret vec::to_mut(vec::from_elem(nchains, absent));
}

fn foreach_entry<K: copy, V: copy>(chain0: chain<K,V>,
blk: fn(@entry<K,V>)) {
let mut chain = chain0;
loop {
alt chain {
absent { ret; }
present(entry) {
let next = entry.next;
blk(entry); // may modify entry.next!
chain = next;
}
}
}
}

fn foreach_chain<K: copy, V: copy>(chains: [const chain<K,V>],
blk: fn(@entry<K,V>)) {
let mut i = 0u;
let n = vec::len(chains);
fn each_entry<K: copy, V: copy>(tbl: t<K, V>,
blk: fn(@entry<K,V>) -> bool) {
let mut i = 0u, n = vec::len(tbl.chains);
while i < n {
foreach_entry(chains[i], blk);
let mut chain = tbl.chains[i];
loop {
alt chain {
absent { break; }
present(entry) {
let next = entry.next;
if !blk(entry) { ret; }
chain = next;
}
}
}
i += 1u;
}
}

fn rehash<K: copy, V: copy>(tbl: t<K,V>) {
let old_chains = tbl.chains;
let n_old_chains = vec::len(old_chains);
let n_old_chains = vec::len(tbl.chains);
let n_new_chains: uint = uint::next_power_of_two(n_old_chains + 1u);
tbl.chains = chains(n_new_chains);
foreach_chain(old_chains) { |entry|
let new_chains = chains(n_new_chains);
for each_entry(tbl) {|entry|
let idx = entry.hash % n_new_chains;
entry.next = tbl.chains[idx];
tbl.chains[idx] = present(entry);
entry.next = new_chains[idx];
new_chains[idx] = present(entry);
}
tbl.chains = new_chains;
}

fn items<K: copy, V: copy>(tbl: t<K,V>, blk: fn(K,V)) {
let tbl_chains = tbl.chains; // Satisfy alias checker.
foreach_chain(tbl_chains) { |entry|
let key = entry.key;
let value = entry.value;
blk(key, value);
fn each<K: copy, V: copy>(tbl: t<K,V>, blk: fn(K,V) -> bool) {
for each_entry(tbl) {|entry|
if !blk(copy entry.key, copy entry.value) { break; }
}
}

Expand Down Expand Up @@ -277,11 +268,11 @@ mod chained {

fn remove(k: K) -> option<V> { remove(self, k) }

fn items(blk: fn(K, V)) { items(self, blk); }
fn each(blk: fn(K, V) -> bool) { each(self, blk); }

fn keys(blk: fn(K)) { items(self) { |k, _v| blk(k) } }
fn each_key(blk: fn(K) -> bool) { each(self) { |k, _v| blk(k)} }

fn values(blk: fn(V)) { items(self) { |_k, v| blk(v) } }
fn each_value(blk: fn(V) -> bool) { each(self) { |_k, v| blk(v)} }
}

fn mk<K: copy, V: copy>(hasher: hashfn<K>, eqer: eqfn<K>) -> t<K,V> {
Expand Down
12 changes: 6 additions & 6 deletions src/libstd/smallintmap.rs
Expand Up @@ -89,27 +89,27 @@ impl <V: copy> of map::map<uint, V> for smallintmap<V> {
fn get(&&key: uint) -> V { get(self, key) }
fn find(&&key: uint) -> option<V> { find(self, key) }
fn rehash() { fail }
fn items(it: fn(&&uint, V)) {
fn each(it: fn(&&uint, V) -> bool) {
let mut idx = 0u, l = self.v.len();
while idx < l {
alt self.v[idx] {
some(elt) {
it(idx, copy elt);
if !it(idx, copy elt) { break; }
}
none { }
}
idx += 1u;
}
}
fn keys(it: fn(&&uint)) {
fn each_key(it: fn(&&uint) -> bool) {
let mut idx = 0u, l = self.v.len();
while idx < l {
if self.v[idx] != none { it(idx); }
if self.v[idx] != none && !it(idx) { ret; }
idx += 1u;
}
}
fn values(it: fn(V)) {
self.items({|_i, v| it(v)});
fn each_value(it: fn(V) -> bool) {
self.each {|_i, v| it(v)}
}
}

Expand Down
4 changes: 2 additions & 2 deletions src/rustc/metadata/cstore.rs
Expand Up @@ -106,7 +106,7 @@ fn have_crate_data(cstore: cstore, cnum: ast::crate_num) -> bool {
}

fn iter_crate_data(cstore: cstore, i: fn(ast::crate_num, crate_metadata)) {
p(cstore).metas.items {|k,v| i(k, v);};
for p(cstore).metas.each {|k,v| i(k, v);};
}

fn add_used_crate_file(cstore: cstore, lib: str) {
Expand Down Expand Up @@ -155,7 +155,7 @@ fn get_dep_hashes(cstore: cstore) -> [str] {
type crate_hash = {name: str, hash: str};
let mut result = [];

p(cstore).use_crate_map.values {|cnum|
for p(cstore).use_crate_map.each_value {|cnum|
let cdata = cstore::get_crate_data(cstore, cnum);
let hash = decoder::get_crate_hash(cdata.data);
#debug("Add hash[%s]: %s", cdata.name, hash);
Expand Down
2 changes: 1 addition & 1 deletion src/rustc/metadata/encoder.rs
Expand Up @@ -206,7 +206,7 @@ fn encode_item_paths(ebml_w: ebml::writer, ecx: @encode_ctxt, crate: @crate)
fn encode_reexport_paths(ebml_w: ebml::writer,
ecx: @encode_ctxt, &index: [entry<str>]) {
let tcx = ecx.ccx.tcx;
ecx.ccx.exp_map.items {|exp_id, defs|
for ecx.ccx.exp_map.each {|exp_id, defs|
for defs.each {|def|
if !def.reexp { cont; }
let path = alt check tcx.items.get(exp_id) {
Expand Down
2 changes: 1 addition & 1 deletion src/rustc/middle/capture.rs
Expand Up @@ -129,6 +129,6 @@ fn compute_capture_vars(tcx: ty::ctxt,
}

let mut result = [];
cap_map.values { |cap_var| result += [cap_var]; }
for cap_map.each_value { |cap_var| result += [cap_var]; }
ret result;
}
2 changes: 1 addition & 1 deletion src/rustc/middle/last_use.rs
Expand Up @@ -70,7 +70,7 @@ fn find_last_uses(c: @crate, def_map: resolve::def_map,
mut blocks: nil};
visit::visit_crate(*c, cx, v);
let mini_table = std::map::int_hash();
cx.last_uses.items {|key, val|
for cx.last_uses.each {|key, val|
if val {
alt key {
path(id) {
Expand Down
4 changes: 2 additions & 2 deletions src/rustc/middle/lint.rs
Expand Up @@ -172,7 +172,7 @@ fn time(do_it: bool, what: str, thunk: fn()) {

fn check_item(cx: ctxt, i: @ast::item) {
cx.with_warn_attrs(i.attrs) {|cx|
cx.curr.items {|lint, level|
for cx.curr.each {|lint, level|
alt lint {
ctypes { check_item_ctypes(cx, level, i); }
unused_imports { check_item_unused_imports(cx, level, i); }
Expand Down Expand Up @@ -265,7 +265,7 @@ fn check_crate(tcx: ty::ctxt, crate: @ast::crate,
tcx: tcx};

// Install defaults.
cx.dict.items {|_k, spec| cx.set_level(spec.lint, spec.default); }
for cx.dict.each {|_k, spec| cx.set_level(spec.lint, spec.default); }

// Install command-line options, overriding defaults.
for lint_opts.each {|pair|
Expand Down
12 changes: 6 additions & 6 deletions src/rustc/middle/resolve.rs
Expand Up @@ -352,7 +352,7 @@ fn map_crate(e: @env, c: @ast::crate) {

fn resolve_imports(e: env) {
e.used_imports.track = true;
e.imports.items {|id, v|
for e.imports.each {|id, v|
alt check v {
todo(name, path, span, scopes) {
resolve_import(e, id, name, *path, span, scopes);
Expand All @@ -368,7 +368,7 @@ fn resolve_imports(e: env) {
// using lint-specific control flags presently but resolve-specific data
// structures. Should use the general lint framework (with scopes, attrs).
fn check_unused_imports(e: @env, level: lint::level) {
e.imports.items {|k, v|
for e.imports.each {|k, v|
alt v {
resolved(_, _, _, _, name, sp) {
if !vec::contains(e.used_imports.data, k) {
Expand Down Expand Up @@ -1673,8 +1673,8 @@ fn lookup_external(e: env, cnum: int, ids: [ident], ns: namespace) ->
fn check_for_collisions(e: @env, c: ast::crate) {
// Module indices make checking those relatively simple -- just check each
// name for multiple entities in the same namespace.
e.mod_map.values {|val|
val.index.items {|k, v| check_mod_name(*e, k, v); };
for e.mod_map.each_value {|val|
for val.index.each {|k, v| check_mod_name(*e, k, v); };
};
// Other scopes have to be checked the hard way.
let v =
Expand Down Expand Up @@ -1912,7 +1912,7 @@ fn check_exports(e: @env) {
assert mid.crate == ast::local_crate;
let ixm = e.mod_map.get(mid.node);

ixm.index.items() {|ident, mies|
for ixm.index.each {|ident, mies|
list::iter(mies) {|mie|
alt mie {
mie_item(item) {
Expand Down Expand Up @@ -2055,7 +2055,7 @@ fn check_exports(e: @env) {
}
}

e.mod_map.values {|_mod|
for e.mod_map.each_value {|_mod|
alt _mod.m {
some(m) {
let glob_is_re_exported = int_hash();
Expand Down
6 changes: 3 additions & 3 deletions src/rustc/middle/trans/alt.rs
Expand Up @@ -365,7 +365,7 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef],
some(e) {
// Temporarily set bindings. They'll be rewritten to PHI nodes
// for the actual arm block.
data.id_map.items {|key, val|
for data.id_map.each {|key, val|
let loc = local_mem(option::get(assoc(key, m[0].bound)));
bcx.fcx.lllocals.insert(val, loc);
};
Expand Down Expand Up @@ -565,7 +565,7 @@ fn make_phi_bindings(bcx: block, map: [exit_node],
let _icx = bcx.insn_ctxt("alt::make_phi_bindings");
let our_block = bcx.llbb as uint;
let mut success = true, bcx = bcx;
ids.items {|name, node_id|
for ids.each {|name, node_id|
let mut llbbs = [];
let mut vals = [];
for vec::each(map) {|ex|
Expand All @@ -583,7 +583,7 @@ fn make_phi_bindings(bcx: block, map: [exit_node],
};
if success {
// Copy references that the alias analysis considered unsafe
ids.values {|node_id|
for ids.each_value {|node_id|
if bcx.ccx().maps.copy_map.contains_key(node_id) {
let local = alt bcx.fcx.lllocals.find(node_id) {
some(local_mem(x)) { x }
Expand Down

0 comments on commit 9053f54

Please sign in to comment.