Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add std.valgrind module #1863

Merged
merged 3 commits into from Mar 11, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
3 changes: 3 additions & 0 deletions CMakeLists.txt
Expand Up @@ -667,6 +667,9 @@ set(ZIG_STD_FILES
"statically_initialized_mutex.zig"
"testing.zig"
"unicode.zig"
"valgrind/callgrind.zig"
"valgrind/index.zig"
"valgrind/memcheck.zig"
"zig/ast.zig"
"zig/index.zig"
"zig/parse.zig"
Expand Down
4 changes: 4 additions & 0 deletions std/heap.zig
Expand Up @@ -273,6 +273,10 @@ pub const FixedBufferAllocator = struct {
buffer: []u8,

pub fn init(buffer: []u8) FixedBufferAllocator {
// This loop gets optimized out in ReleaseFast mode
for (buffer) |*byte| {
byte.* = undefined;
}
return FixedBufferAllocator{
.allocator = Allocator{
.allocFn = alloc,
Expand Down
1 change: 1 addition & 0 deletions std/index.zig
Expand Up @@ -44,6 +44,7 @@ pub const rb = @import("rb.zig");
pub const sort = @import("sort.zig");
pub const testing = @import("testing.zig");
pub const unicode = @import("unicode.zig");
pub const valgrind = @import("valgrind/index.zig");
pub const zig = @import("zig/index.zig");

test "std" {
Expand Down
18 changes: 17 additions & 1 deletion std/mem.zig
Expand Up @@ -49,6 +49,7 @@ pub const Allocator = struct {
pub fn destroy(self: *Allocator, ptr: var) void {
const non_const_ptr = @intToPtr([*]u8, @ptrToInt(ptr));
self.freeFn(self, non_const_ptr[0..@sizeOf(@typeOf(ptr).Child)]);
_ = std.valgrind.freeLikeBlock(non_const_ptr, 0);
}

pub fn alloc(self: *Allocator, comptime T: type, n: usize) ![]T {
Expand All @@ -62,6 +63,7 @@ pub const Allocator = struct {
const byte_count = math.mul(usize, @sizeOf(T), n) catch return Error.OutOfMemory;
const byte_slice = try self.allocFn(self, byte_count, alignment);
assert(byte_slice.len == byte_count);
_ = std.valgrind.mallocLikeBlock(byte_slice, 0, false);
// This loop gets optimized out in ReleaseFast mode
for (byte_slice) |*byte| {
byte.* = undefined;
Expand All @@ -86,6 +88,12 @@ pub const Allocator = struct {
const byte_count = math.mul(usize, @sizeOf(T), n) catch return Error.OutOfMemory;
const byte_slice = try self.reallocFn(self, old_byte_slice, byte_count, alignment);
assert(byte_slice.len == byte_count);
if (byte_slice.ptr == old_byte_slice.ptr) {
_ = std.valgrind.resizeInPlaceBlock(old_byte_slice, byte_count, 0);
} else {
_ = std.valgrind.freeLikeBlock(old_byte_slice.ptr, 0);
_ = std.valgrind.mallocLikeBlock(byte_slice, 0, false);
}
if (n > old_mem.len) {
// This loop gets optimized out in ReleaseFast mode
for (byte_slice[old_byte_slice.len..]) |*byte| {
Expand Down Expand Up @@ -114,8 +122,15 @@ pub const Allocator = struct {
// n <= old_mem.len and the multiplication didn't overflow for that operation.
const byte_count = @sizeOf(T) * n;

const byte_slice = self.reallocFn(self, @sliceToBytes(old_mem), byte_count, alignment) catch unreachable;
const old_byte_slice = @sliceToBytes(old_mem);
const byte_slice = self.reallocFn(self, old_byte_slice, byte_count, alignment) catch unreachable;
assert(byte_slice.len == byte_count);
if (byte_slice.ptr == old_byte_slice.ptr) {
_ = std.valgrind.resizeInPlaceBlock(old_byte_slice, byte_count, 0);
} else {
_ = std.valgrind.freeLikeBlock(old_byte_slice.ptr, 0);
_ = std.valgrind.mallocLikeBlock(byte_slice, 0, false);
}
return @bytesToSlice(T, @alignCast(alignment, byte_slice));
}

Expand All @@ -124,6 +139,7 @@ pub const Allocator = struct {
if (bytes.len == 0) return;
const non_const_ptr = @intToPtr([*]u8, @ptrToInt(bytes.ptr));
self.freeFn(self, non_const_ptr[0..bytes.len]);
_ = std.valgrind.freeLikeBlock(non_const_ptr, 0);
}
};

Expand Down
87 changes: 87 additions & 0 deletions std/valgrind/callgrind.zig
@@ -0,0 +1,87 @@
const std = @import("../index.zig");
const valgrind = std.valgrind;

pub const CallgrindClientRequest = extern enum {
DumpStats = valgrind.ToolBase("CT"),
ZeroStats,
ToggleCollect,
DumpStatsAt,
StartInstrumentation,
StopInstrumentation,
};

fn doCallgrindClientRequestExpr(default: usize, request: CallgrindClientRequest,
a1: usize, a2: usize, a3: usize, a4: usize, a5: usize
) usize
{
return valgrind.doClientRequest(
default,
@intCast(usize, @enumToInt(request)),
a1, a2, a3, a4, a5);
}

fn doCallgrindClientRequestStmt(request: CallgrindClientRequest,
a1: usize, a2: usize, a3: usize, a4: usize, a5: usize
) void
{
_ = doCallgrindClientRequestExpr(0, request, a1, a2, a3, a4, a5);
}



/// Dump current state of cost centers, and zero them afterwards
pub fn dumpStats() void {
doCallgrindClientRequestStmt(CallgrindClientRequest.DumpStats,
0, 0, 0, 0, 0);
}


/// Dump current state of cost centers, and zero them afterwards.
/// The argument is appended to a string stating the reason which triggered
/// the dump. This string is written as a description field into the
/// profile data dump.
pub fn dumpStatsAt(pos_str: [*]u8) void {
doCallgrindClientRequestStmt(CallgrindClientRequest.DumpStatsAt,
@ptrToInt(pos_str),
0, 0, 0, 0);
}


/// Zero cost centers
pub fn zeroStats() void {
doCallgrindClientRequestStmt(CallgrindClientRequest.ZeroStats,
0, 0, 0, 0, 0);
}


/// Toggles collection state.
/// The collection state specifies whether the happening of events
/// should be noted or if they are to be ignored. Events are noted
/// by increment of counters in a cost center
pub fn toggleCollect() void {
doCallgrindClientRequestStmt(CallgrindClientRequest.ToggleCollect,
0, 0, 0, 0, 0);
}


/// Start full callgrind instrumentation if not already switched on.
/// When cache simulation is done, it will flush the simulated cache;
/// this will lead to an artificial cache warmup phase afterwards with
/// cache misses which would not have happened in reality.
pub fn startInstrumentation() void {
doCallgrindClientRequestStmt(CallgrindClientRequest.StartInstrumentation,
0, 0, 0, 0, 0);
}


/// Stop full callgrind instrumentation if not already switched off.
/// This flushes Valgrinds translation cache, and does no additional
/// instrumentation afterwards, which effectivly will run at the same
/// speed as the "none" tool (ie. at minimal slowdown).
/// Use this to bypass Callgrind aggregation for uninteresting code parts.
/// To start Callgrind in this mode to ignore the setup phase, use
/// the option "--instr-atstart=no".
pub fn stopInstrumentation() void {
doCallgrindClientRequestStmt(CallgrindClientRequest.StopInstrumentation,
0, 0, 0, 0, 0);
}