Skip to content

Commit

Permalink
Refactor where we're running things in parallel
Browse files Browse the repository at this point in the history
The original fix I had for this was kind of hacky, but this moves the
parallelization down to just where we're measuring runtimes. This
should actually be slightly more efficient when we're running
benchmarks in parallel (and less buggy) since before we were running
the `before_scenario` and `after_scenario` hooks before each _instance_
of the benchmark (when running in parallel). Now we run them only once
before each scenario, and each scenario itself is run in parallel.
  • Loading branch information
devonestes committed Jan 9, 2018
1 parent 7cbc91f commit eaba176
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 24 deletions.
32 changes: 9 additions & 23 deletions lib/benchee/benchmark/runner.ex
Original file line number Diff line number Diff line change
Expand Up @@ -25,29 +25,13 @@ defmodule Benchee.Benchmark.Runner do
"""
@spec run_scenarios([Scenario.t], ScenarioContext.t) :: [Scenario.t]
def run_scenarios(scenarios, scenario_context) do
Enum.flat_map(scenarios, fn(scenario) ->
parallel_benchmark(scenario, scenario_context)
end)
end
%ScenarioContext{printer: printer, config: config} = scenario_context

defp parallel_benchmark(
scenario = %Scenario{job_name: job_name, input_name: input_name},
scenario_context = %ScenarioContext{
printer: printer,
config: config
}) do
printer.benchmarking(job_name, input_name, config)
1..config.parallel
|> Parallel.map(fn(_task_number) ->
Enum.map(scenarios, fn scenario ->
%Scenario{job_name: job_name, input_name: input_name} = scenario
printer.benchmarking(job_name, input_name, config)
run_scenario(scenario, scenario_context)
end)
|> Enum.group_by(fn scenario -> {scenario.function, scenario.input} end)
|> Enum.map(fn {_, like_results} ->
consolidated_results =
Enum.flat_map(like_results, fn scenario -> scenario.run_times end)
[scenario | _] = like_results
%Benchee.Benchmark.Scenario{scenario | run_times: consolidated_results}
end)
end

defp run_scenario(scenario, scenario_context) do
Expand Down Expand Up @@ -154,12 +138,14 @@ defmodule Benchee.Benchmark.Runner do
current_time: current_time, end_time: end_time
}) when current_time > end_time do
# restore correct order - important for graphing
%Scenario{scenario | run_times: Enum.reverse(run_times)}
%Scenario{scenario | run_times: run_times |> List.flatten |> Enum.sort}
end
defp do_benchmark(scenario = %Scenario{run_times: run_times},
scenario_context) do
run_time = iteration_time(scenario, scenario_context)
updated_scenario = %Scenario{scenario | run_times: [run_time | run_times]}
new_run_times = Parallel.map(0..scenario_context.config.parallel, fn _ ->
iteration_time(scenario, scenario_context)
end)
updated_scenario = %Scenario{scenario | run_times: [new_run_times | run_times]}
updated_context =
%ScenarioContext{scenario_context | current_time: current_time()}
do_benchmark(updated_scenario, updated_context)
Expand Down
2 changes: 1 addition & 1 deletion mix.exs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
defmodule Benchee.Mixfile do
use Mix.Project

@version "0.11.1"
@version "0.12.0"

def project do
[
Expand Down

0 comments on commit eaba176

Please sign in to comment.