Skip to content

Commit

Permalink
hash_map: adding a rehash() method
Browse files Browse the repository at this point in the history
This allows a highly fragmented hash_map to have tombstones removed as
the values are all rehashed.

It would be nice to make this rehash() automatically, but that currently
presents a challenge where it doesn't work with adapted contexts since
the keys are not preserved in the map for re-hashing and the hash value
is not stored currently, and the non-adapted contexts require a bit of
additional book-keeping to check before calling rehash().
  • Loading branch information
mrjbq7 committed Nov 9, 2023
1 parent 6b9f7e2 commit 93072d5
Showing 1 changed file with 123 additions and 0 deletions.
123 changes: 123 additions & 0 deletions lib/std/hash_map.zig
Original file line number Diff line number Diff line change
Expand Up @@ -681,6 +681,11 @@ pub fn HashMap(
self.unmanaged = .{};
return result;
}

/// Rehash the map, in-place
pub fn rehash(self: *Self) void {
self.unmanaged.rehash(self.ctx);
}
};
}

Expand Down Expand Up @@ -1322,6 +1327,7 @@ pub fn HashMapUnmanaged(
if (@TypeOf(hash) != Hash) {
@compileError("Context " ++ @typeName(@TypeOf(ctx)) ++ " has a generic hash function that returns the wrong type! " ++ @typeName(Hash) ++ " was expected, but found " ++ @typeName(@TypeOf(hash)));
}

const mask = self.capacity() - 1;
const fingerprint = Metadata.takeFingerprint(hash);
var limit = self.capacity();
Expand Down Expand Up @@ -1505,6 +1511,92 @@ pub fn HashMapUnmanaged(
return result;
}

/// Rehash the map, in-place
pub fn rehash(self: *Self, ctx: anytype) void {
const mask = self.capacity() - 1;

var metadata = self.metadata.?;
var keys_ptr = self.keys();
var values_ptr = self.values();
var curr: Size = 0;

// While we are re-hashing every slot, we will use the
// fingerprint to mark used buckets as being used and either free
// (needing to be rehashed) or tombstone (already rehashed).

while (curr < self.capacity()) : (curr += 1) {
metadata[curr].fingerprint = Metadata.free;
}

// Now iterate over all the buckets, rehashing them

curr = 0;
while (curr < self.capacity()) {
if (!metadata[curr].isUsed()) {
assert(metadata[curr].isFree());
curr += 1;
continue;
}

var hash = ctx.hash(keys_ptr[curr]);
var fingerprint = Metadata.takeFingerprint(hash);
var idx = @as(usize, @truncate(hash & mask));

// For each bucket, rehash to an index:
// 1) before the cursor, probed into a free slot, or
// 2) equal to the cursor, no need to move, or
// 3) ahead of the cursor, probing over already rehashed

while ((idx < curr and metadata[idx].isUsed()) or
(idx > curr and metadata[idx].fingerprint == Metadata.tombstone))
{
idx = (idx + 1) & mask;
}

if (idx < curr) {
assert(metadata[idx].isFree());
metadata[idx].fingerprint = fingerprint;
metadata[idx].used = 1;
keys_ptr[idx] = keys_ptr[curr];
values_ptr[idx] = values_ptr[curr];

metadata[curr].used = 0;
assert(metadata[curr].isFree());
keys_ptr[curr] = undefined;
values_ptr[curr] = undefined;

curr += 1;
} else if (idx == curr) {
metadata[idx].fingerprint = fingerprint;
curr += 1;
} else {
assert(metadata[idx].fingerprint != Metadata.tombstone);
metadata[idx].fingerprint = Metadata.tombstone;
if (metadata[idx].isUsed()) {
var tmpkey = keys_ptr[idx];
var tmpvalue = values_ptr[idx];

keys_ptr[idx] = keys_ptr[curr];
values_ptr[idx] = values_ptr[curr];

keys_ptr[curr] = tmpkey;
values_ptr[curr] = tmpvalue;
} else {
metadata[idx].used = 1;
keys_ptr[idx] = keys_ptr[curr];
values_ptr[idx] = values_ptr[curr];

metadata[curr].fingerprint = Metadata.free;
metadata[curr].used = 0;
keys_ptr[curr] = undefined;
values_ptr[curr] = undefined;

curr += 1;
}
}
}
}

fn grow(self: *Self, allocator: Allocator, new_capacity: Size, ctx: Context) Allocator.Error!void {
@setCold(true);
const new_cap = @max(new_capacity, minimal_capacity);
Expand Down Expand Up @@ -2218,3 +2310,34 @@ test "std.hash_map repeat fetchRemove" {
try testing.expect(map.get(2) != null);
try testing.expect(map.get(3) != null);
}

test "std.hash_map rehash" {
var map = AutoHashMap(u32, u32).init(std.testing.allocator);
defer map.deinit();

var prng = std.rand.DefaultPrng.init(0);
const random = prng.random();

const count = 6 * random.intRangeLessThan(u32, 100_000, 500_000);

var i: u32 = 0;
while (i < count) : (i += 1) {
try map.put(i, i);
if (i % 3 == 0) {
try expectEqual(map.remove(i), true);
}
}

map.rehash();

try expectEqual(map.count(), count * 2 / 3);

i = 0;
while (i < count) : (i += 1) {
if (i % 3 == 0) {
try expectEqual(map.get(i), null);
} else {
try expectEqual(map.get(i).?, i);
}
}
}

0 comments on commit 93072d5

Please sign in to comment.