Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

improv(gossip,accountsdb): improve test coverage on important methods #195

Merged
merged 7 commits into from
Jul 15, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"version": "0.2.0",
"configurations": [
{
"type": "lldb",
"request": "launch",
"name": "Debug Tests",
"program": "${workspaceFolder}/zig-out/bin/test",
"args": [],
"cwd": "${workspaceFolder}",
"preLaunchTask": "zig build"
}
]
}
13 changes: 13 additions & 0 deletions .vscode/tasks.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "zig build",
"type": "shell",
"command": "zig",
"args": [
"build"
],
}
]
}
47 changes: 47 additions & 0 deletions docs/CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,53 @@ See the [readme](../readme.md#-setup).

## Style Guide

### Imports

#### Sig Dependencies
By convention, all internal dependencies should be defined as aliases of fully qualified paths from the root module. For example, within 'src/gossip/message.zig' we should import types from 'src/gossip/data.zig' in the following manner:
```zig
const sig = @import("../lib.zig");

const GossipData = sig.gossip.data.GossipData;
```

#### Grouping
Group import statements and alias definitions into the following categories, separated by a newline:

1. @import statements
2. namespace aliases
3. struct aliases
4. function aliases
5. constant aliases

If it improves clarity, split groups into external and sig imports/aliases, otherwise list external imports/aliases before sig imports/aliases. Within groups, try to follow alphabetical order with respect to fully qualified namespace i.e.
```zig
// Import statements
const std = @import("std");
const sig = @import("../lib.zig");

// Namespace aliases
const bincode = sig.bincode;
const pull_request = sig.gossip.pull_request;
const pull_response = sig.gossip.pull_response;

// External aliases
const EndPoint = network.EndPoint;
const UdpSocket = network.Socket;
const ArrayList = std.ArrayList;
const AtomicBool = std.atomic.Value(bool);
const KeyPair = std.crypto.sign.Ed25519.KeyPair;
const Thread = std.Thread;

// Sig aliases
const Hash = sig.core.Hash;
const Pubkey = sig.core.Pubkey;
const Entry = sig.trace.entry.Entry;
const Logger = sig.trace.log.Logger;
const EchoServer = sig.net.echo.Server;
const Packet = sig.net.Packet;
```

### Optional Values

- optional values should be prepended with `maybe_` and unwrapping should follow the `if (maybe_x) |x| {}` format
Expand Down
2 changes: 1 addition & 1 deletion scripts/kcov_fuzz_gossip.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ kcov \
--include-pattern=src/gossip/ \
--exclude-pattern=$HOME/.cache \
kcov-output/ \
./zig-out/bin/fuzz gossip 19 4_000
./zig-out/bin/fuzz gossip 19 50_000

# open report
echo "=> Opening kcov-output/index.html"
Expand Down
45 changes: 33 additions & 12 deletions src/accountsdb/db.zig
Original file line number Diff line number Diff line change
Expand Up @@ -428,8 +428,13 @@ pub const AccountsDB = struct {
}

const counting_alloc_ptr = try self.allocator.create(CountingAllocator);
defer {
if (counting_alloc_ptr.alloc_count == 0) {
self.allocator.destroy(counting_alloc_ptr);
}
}
counting_alloc_ptr.* = .{
.deinit_self_allocator = self.allocator,
.self_allocator = self.allocator,
.references = references,
.alloc_count = 0,
};
Expand Down Expand Up @@ -2074,7 +2079,7 @@ pub const AccountsDB = struct {
/// multiple different ArrayLists -- alloc and resize are not implemented.
const CountingAllocator = struct {
/// optional heap allocator to deinit the ptr on deinit
deinit_self_allocator: std.mem.Allocator,
self_allocator: std.mem.Allocator,
references: ArrayList(AccountRef),
alloc_count: usize,

Expand All @@ -2099,7 +2104,7 @@ const CountingAllocator = struct {
}
self.references.deinit();
// free pointer
self.deinit_self_allocator.destroy(self);
self.self_allocator.destroy(self);
}

pub fn alloc(ctx: *anyopaque, n: usize, log2_align: u8, return_address: usize) ?[*]u8 {
Expand Down Expand Up @@ -2132,7 +2137,7 @@ const CountingAllocator = struct {
}
};

fn loadTestAccountsDB(allocator: std.mem.Allocator, use_disk: bool) !struct { AccountsDB, AllSnapshotFields } {
fn loadTestAccountsDB(allocator: std.mem.Allocator, use_disk: bool, n_threads: usize) !struct { AccountsDB, AllSnapshotFields } {
const dir_path = "test_data";
const dir = try std.fs.cwd().openDir(dir_path, .{ .iterate = true });

Expand All @@ -2142,15 +2147,15 @@ fn loadTestAccountsDB(allocator: std.mem.Allocator, use_disk: bool) !struct { Ac
.noop,
"test_data/snapshot-10-6ExseAZAVJsAZjhimxHTR7N8p6VGXiDNdsajYh1ipjAD.tar.zst",
dir,
1,
n_threads,
true,
);
try parallelUnpackZstdTarBall(
allocator,
.noop,
"test_data/incremental-snapshot-10-25-GXgKvm3NMAPgGdv2verVaNXmKTHQgfy2TAxLVEfAvdCS.tar.zst",
dir,
1,
n_threads,
true,
);

Expand Down Expand Up @@ -2178,7 +2183,7 @@ fn loadTestAccountsDB(allocator: std.mem.Allocator, use_disk: bool) !struct { Ac
try accounts_db.loadFromSnapshot(
snapshot.accounts_db_fields,
accounts_path,
1,
@intCast(n_threads),
allocator,
);

Expand All @@ -2191,7 +2196,7 @@ fn loadTestAccountsDB(allocator: std.mem.Allocator, use_disk: bool) !struct { Ac
test "write and read an account" {
const allocator = std.testing.allocator;

var accounts_db, var snapshots = try loadTestAccountsDB(std.testing.allocator, false);
var accounts_db, var snapshots = try loadTestAccountsDB(std.testing.allocator, false, 1);
defer {
accounts_db.deinit(true);
snapshots.deinit(allocator);
Expand Down Expand Up @@ -2228,7 +2233,23 @@ test "write and read an account" {
test "load and validate from test snapshot using disk index" {
const allocator = std.testing.allocator;

var accounts_db, var snapshots = try loadTestAccountsDB(std.testing.allocator, false);
var accounts_db, var snapshots = try loadTestAccountsDB(std.testing.allocator, false, 1);
defer {
accounts_db.deinit(true);
snapshots.deinit(allocator);
}

try accounts_db.validateLoadFromSnapshot(
snapshots.incremental.?.bank_fields.incremental_snapshot_persistence,
snapshots.full.bank_fields.slot,
snapshots.full.bank_fields.capitalization,
);
}

test "load and validate from test snapshot parallel" {
const allocator = std.testing.allocator;

var accounts_db, var snapshots = try loadTestAccountsDB(std.testing.allocator, false, 2);
defer {
accounts_db.deinit(true);
snapshots.deinit(allocator);
Expand All @@ -2244,7 +2265,7 @@ test "load and validate from test snapshot using disk index" {
test "load and validate from test snapshot" {
const allocator = std.testing.allocator;

var accounts_db, var snapshots = try loadTestAccountsDB(std.testing.allocator, false);
var accounts_db, var snapshots = try loadTestAccountsDB(std.testing.allocator, false, 1);
defer {
accounts_db.deinit(true);
snapshots.deinit(allocator);
Expand All @@ -2260,7 +2281,7 @@ test "load and validate from test snapshot" {
test "load clock sysvar" {
const allocator = std.testing.allocator;

var accounts_db, var snapshots = try loadTestAccountsDB(std.testing.allocator, false);
var accounts_db, var snapshots = try loadTestAccountsDB(std.testing.allocator, false, 1);
defer {
accounts_db.deinit(true);
snapshots.deinit(allocator);
Expand All @@ -2280,7 +2301,7 @@ test "load clock sysvar" {
test "load other sysvars" {
const allocator = std.testing.allocator;

var accounts_db, var snapshots = try loadTestAccountsDB(std.testing.allocator, false);
var accounts_db, var snapshots = try loadTestAccountsDB(std.testing.allocator, false, 1);
defer {
accounts_db.deinit(true);
snapshots.deinit(allocator);
Expand Down
24 changes: 13 additions & 11 deletions src/accountsdb/download.zig
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,15 @@

const std = @import("std");
const curl = @import("curl");
const Pubkey = @import("../core/pubkey.zig").Pubkey;
const gossip = @import("../gossip/service.zig");
const GossipService = gossip.GossipService;
const ContactInfo = @import("../gossip/data.zig").ContactInfo;
const GossipTable = @import("../gossip/table.zig").GossipTable;
const SlotAndHash = @import("./snapshots.zig").SlotAndHash;
const Logger = @import("../trace/log.zig").Logger;
const Hash = @import("../core/hash.zig").Hash;
const sig = @import("../lib.zig");

const Pubkey = sig.core.pubkey.Pubkey;
const GossipService = sig.gossip.GossipService;
const ContactInfo = sig.gossip.data.ContactInfo;
const GossipTable = sig.gossip.table.GossipTable;
const SlotAndHash = sig.accounts_db.snapshots.SlotAndHash;
const Logger = sig.trace.log.Logger;
const Hash = sig.core.hash.Hash;

const DOWNLOAD_PROGRESS_UPDATES_NS = 30 * std.time.ns_per_s;

Expand Down Expand Up @@ -463,9 +464,10 @@ pub fn downloadFile(
}
}

const ThreadPool = @import("../sync/thread_pool.zig").ThreadPool;
const LegacyContactInfo = @import("../gossip/data.zig").LegacyContactInfo;
const SignedGossipData = @import("../gossip/data.zig").SignedGossipData;
const ThreadPool = sig.sync.thread_pool.ThreadPool;
const LegacyContactInfo = sig.gossip.data.LegacyContactInfo;
const SignedGossipData = sig.gossip.data.SignedGossipData;

const KeyPair = std.crypto.sign.Ed25519.KeyPair;

test "accounts_db.download: test remove untrusted peers" {
Expand Down
84 changes: 80 additions & 4 deletions src/accountsdb/index.zig
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,8 @@ pub const AccountIndex = struct {
}

/// adds a reference to the index
/// NOTE: this should only be used when you know the reference does not exist
/// because we never want duplicate state references in the index
pub fn indexRef(self: *Self, account_ref: *AccountRef) void {
const bin_rw = self.getBinFromPubkey(&account_ref.pubkey);

Expand Down Expand Up @@ -1059,7 +1061,81 @@ pub const DiskMemoryAllocator = struct {
}
};

test "tests disk allocator on hashmaps" {
test "swissmap resize" {
var map = SwissMap(Pubkey, AccountRef, pubkey_hash, pubkey_eql).init(std.testing.allocator);
defer map.deinit();

try map.ensureTotalCapacity(100);

const ref = AccountRef.default();
map.putAssumeCapacity(Pubkey.default(), ref);

// this will resize the map with the key still in there
try map.ensureTotalCapacity(200);
const get_ref = map.get(Pubkey.default()) orelse return error.MissingAccount;
try std.testing.expect(std.meta.eql(get_ref, ref));
}

test "account index update/remove reference" {
const allocator = std.testing.allocator;

var index = try AccountIndex.init(allocator, allocator, 8);
defer index.deinit(true);
try index.ensureTotalCapacity(100);

// pubkey -> a
var ref_a = AccountRef.default();
index.indexRef(&ref_a);

var ref_b = AccountRef.default();
ref_b.slot = 1;
index.indexRef(&ref_b);

// make sure indexRef works
{
var ref_head_rw = index.getReference(&ref_a.pubkey).?;
const ref_head, var ref_head_lg = ref_head_rw.writeWithLock();
ref_head_lg.unlock();
_, const ref_max = ref_head.highestRootedSlot(10);
try std.testing.expect(ref_max == 1);
}

// update the tail
try std.testing.expect(ref_b.location == .Cache);
var ref_b2 = ref_b;
ref_b2.location = .{ .File = .{
.file_id = FileId.fromInt(@intCast(1)),
.offset = 10,
} };
try index.updateReference(&ref_b.pubkey, 1, &ref_b2);
{
const ref = index.getReferenceSlot(&ref_a.pubkey, 1).?;
try std.testing.expect(ref.location == .File);
}

// update the head
var ref_a2 = ref_a;
ref_a2.location = .{ .File = .{
.file_id = FileId.fromInt(@intCast(1)),
.offset = 20,
} };
try index.updateReference(&ref_a.pubkey, 0, &ref_a2);
{
const ref = index.getReferenceSlot(&ref_a.pubkey, 0).?;
try std.testing.expect(ref.location == .File);
}

// remove the head
try index.removeReference(&ref_a2.pubkey, 0);
try std.testing.expect(!index.exists(&ref_a2.pubkey, 0));
try std.testing.expect(index.exists(&ref_b2.pubkey, 1));

// remove the tail
try index.removeReference(&ref_b2.pubkey, 1);
try std.testing.expect(!index.exists(&ref_b2.pubkey, 1));
}

test "disk allocator on hashmaps" {
var allocator = DiskMemoryAllocator.init("test_data/tmp");
defer allocator.deinit(null);

Expand All @@ -1076,7 +1152,7 @@ test "tests disk allocator on hashmaps" {
try std.testing.expect(std.meta.eql(r, ref));
}

test "tests disk allocator" {
test "disk allocator" {
var allocator = DiskMemoryAllocator.init("test_data/tmp");

var disk_account_refs = try ArrayList(AccountRef).initCapacity(
Expand Down Expand Up @@ -1121,7 +1197,7 @@ test "tests disk allocator" {
try std.testing.expect(did_error);
}

test "tests swissmap read/write/delete" {
test "swissmap read/write/delete" {
const allocator = std.testing.allocator;

const n_accounts = 10_000;
Expand Down Expand Up @@ -1172,7 +1248,7 @@ test "tests swissmap read/write/delete" {
}
}

test "tests swissmap read/write" {
test "swissmap read/write" {
const allocator = std.testing.allocator;

const n_accounts = 10_000;
Expand Down
Loading