Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Introduce profiler interface to benchmark framework. #1533

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions include/benchmark/benchmark.h
Original file line number Diff line number Diff line change
Expand Up @@ -391,6 +391,30 @@ class MemoryManager {
BENCHMARK_EXPORT
void RegisterMemoryManager(MemoryManager* memory_manager);

class BENCHMARK_EXPORT Profiler {
public:
// Called once in each 'RunSpecifiedBenchmarks'.
// Implement this to reset internal states (e.g., counters).
virtual void Init() = 0;

virtual ~Profiler() = default;

// Called after each 'BenchmarkInstance::Setup' to start capturing states.
// Start and Stop are called in pair and injected to skip measurement in
// test set-up and tear-down.
virtual void Start() = 0;

// Called before each 'BenchmarkInstance::TearDown' to pause capturing
// states..
virtual void Stop() = 0;

// Called once after benchmark instances complete.
// Implement this to do post-processing if necessary and save results.
virtual void Finalize() = 0;
};

BENCHMARK_EXPORT void RegisterProfiler(Profiler* profiler);

// Add a key-value pair to output as part of the context stanza in the report.
BENCHMARK_EXPORT
void AddCustomContext(const std::string& key, const std::string& value);
Expand Down
7 changes: 7 additions & 0 deletions src/benchmark.cc
Original file line number Diff line number Diff line change
Expand Up @@ -371,6 +371,8 @@ void RunBenchmarks(const std::vector<BenchmarkInstance>& benchmarks,
std::shuffle(repetition_indices.begin(), repetition_indices.end(), g);
}

if (profiler != nullptr) profiler->Init();

for (size_t repetition_index : repetition_indices) {
internal::BenchmarkRunner& runner = runners[repetition_index];
runner.DoOneRepetition();
Expand All @@ -395,6 +397,9 @@ void RunBenchmarks(const std::vector<BenchmarkInstance>& benchmarks,
Report(display_reporter, file_reporter, run_results);
}
}

if (profiler != nullptr) profiler->Finalize();

display_reporter->Finalize();
if (file_reporter) file_reporter->Finalize();
FlushStreams(display_reporter);
Expand Down Expand Up @@ -565,6 +570,8 @@ void RegisterMemoryManager(MemoryManager* manager) {
internal::memory_manager = manager;
}

void RegisterProfiler(Profiler* profiler) { internal::profiler = profiler; }

void AddCustomContext(const std::string& key, const std::string& value) {
if (internal::global_context == nullptr) {
internal::global_context = new std::map<std::string, std::string>();
Expand Down
3 changes: 3 additions & 0 deletions src/benchmark_runner.cc
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ namespace benchmark {
namespace internal {

MemoryManager* memory_manager = nullptr;
Profiler* profiler = nullptr;

namespace {

Expand Down Expand Up @@ -343,7 +344,9 @@ void BenchmarkRunner::DoOneRepetition() {
// simply use that precomputed iteration count.
for (;;) {
b.Setup();
if (profiler != nullptr) profiler->Start();
i = DoNIterations();
if (profiler != nullptr) profiler->Stop();
b.Teardown();

// Do we consider the results to be significant?
Expand Down
1 change: 1 addition & 0 deletions src/benchmark_runner.h
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ BM_DECLARE_string(benchmark_perf_counters);
namespace internal {

extern MemoryManager* memory_manager;
extern Profiler* profiler;

struct RunResults {
std::vector<BenchmarkReporter::Run> non_aggregates;
Expand Down
1 change: 1 addition & 0 deletions test/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,7 @@ if (BENCHMARK_ENABLE_GTEST_TESTS)
add_gtest(statistics_gtest)
add_gtest(string_util_gtest)
add_gtest(perf_counters_gtest)
add_gtest(profiler_gtest)
add_gtest(time_unit_gtest)
endif(BENCHMARK_ENABLE_GTEST_TESTS)

Expand Down
100 changes: 100 additions & 0 deletions test/profiler_gtest.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
#include <memory>
#include <queue>
#include <string>

#include "../src/check.h"
#include "../src/commandlineflags.h"
#include "../src/string_util.h"
#include "benchmark/benchmark.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"

namespace benchmark {

constexpr int repetitions = 2;

BM_DECLARE_string(benchmark_filter);
BM_DECLARE_string(benchmark_format);

class EventQueue : public std::queue<std::string> {
public:
void Put(const std::string& event) { push(event); }

void Clear() {
while (!empty()) {
pop();
}
}

bool HasNext() { return !empty(); }

std::string Next() {
std::string event = front();
pop();
return event;
}
};

EventQueue* queue = new EventQueue();

class TestProfiler : public benchmark::Profiler {
public:
void Init() BENCHMARK_OVERRIDE { queue->Put("Init"); }

void Start() BENCHMARK_OVERRIDE { queue->Put("Start"); }
void Stop() BENCHMARK_OVERRIDE { queue->Put("Stop"); }
void Finalize() BENCHMARK_OVERRIDE { queue->Put("Finalize"); }
};

class BenchmarkTest : public testing::Test {
public:
static void SetUpTestSuite() { RegisterProfiler(InitTestProfiler()); }

static void TearDownTestSuite() { RegisterProfiler(nullptr); }
static void SetupHook(const benchmark::State& state) { queue->Put("Setup"); }

static void TeardownHook(const benchmark::State& state) {
queue->Put("Teardown");
}

void Execute(const std::string& benchmark_filter) {
queue->Clear();

FLAGS_benchmark_filter = benchmark_filter;
FLAGS_benchmark_format = "console";
RunSpecifiedBenchmarks();
}

private:
static TestProfiler* InitTestProfiler() {
static std::unique_ptr<benchmark::TestProfiler> mm(
new benchmark::TestProfiler());
return mm.get();
}
};

void BM_empty(benchmark::State& state) {
for (auto _ : state) {
benchmark::DoNotOptimize(state.iterations());
}
}
BENCHMARK(BM_empty)
->Repetitions(repetitions)
->Iterations(34)
->Setup(BenchmarkTest::SetupHook)
->Teardown(BenchmarkTest::TeardownHook);

TEST_F(BenchmarkTest, Match) {
Execute("BM_empty");
ASSERT_EQ("Init", queue->Next());
for (int i = 0; i < repetitions; i++) {
ASSERT_EQ("Setup", queue->Next());
ASSERT_EQ("Start", queue->Next());
ASSERT_EQ("Stop", queue->Next());
ASSERT_EQ("Teardown", queue->Next());
}
ASSERT_EQ("Finalize", queue->Next());
ASSERT_TRUE(queue->empty());
}

} // namespace benchmark