Skip to content

Commit

Permalink
Merge f51de39 into 3a648f6
Browse files Browse the repository at this point in the history
  • Loading branch information
tkf committed Feb 24, 2020
2 parents 3a648f6 + f51de39 commit f6c85bf
Show file tree
Hide file tree
Showing 5 changed files with 101 additions and 68 deletions.
12 changes: 6 additions & 6 deletions Manifest.toml
Original file line number Diff line number Diff line change
Expand Up @@ -61,12 +61,6 @@ uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
deps = ["Unicode"]
uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"

[[ProgressMeter]]
deps = ["Distributed", "Printf"]
git-tree-sha1 = "ea1f4fa0ff5e8b771bf130d87af5b7ef400760bd"
uuid = "92933f4c-e287-5a05-a399-4b506db050ca"
version = "1.2.0"

[[REPL]]
deps = ["InteractiveUtils", "Markdown", "Sockets"]
uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
Expand All @@ -92,6 +86,12 @@ uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
deps = ["LinearAlgebra", "SparseArrays"]
uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"

[[TerminalLoggers]]
deps = ["Logging", "Printf"]
git-tree-sha1 = "987a3ebb20307530775f4def7eb9109cfa881748"
uuid = "5d786b92-1e48-4d6f-9151-6b4477ca9bed"
version = "0.1.0"

[[Test]]
deps = ["Distributed", "InteractiveUtils", "Logging", "Random"]
uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Expand Down
9 changes: 6 additions & 3 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,23 +8,26 @@ Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
InteractiveUtils = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
LibGit2 = "76f85450-5226-5b5a-8eaa-529ad045b433"
Logging = "56ddb016-857b-54e1-b83d-db4d58db5568"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
ProgressMeter = "92933f4c-e287-5a05-a399-4b506db050ca"
TerminalLoggers = "5d786b92-1e48-4d6f-9151-6b4477ca9bed"
UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"

[compat]
BenchmarkTools = "^0.4.3, 0.5"
JSON = "0.21"
ProgressMeter = "1"
TerminalLoggers = "0.1"
julia = "1"

[extras]
Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
LibGit2 = "76f85450-5226-5b5a-8eaa-529ad045b433"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[targets]
test = ["Dates", "Pkg", "LibGit2", "Statistics", "Test", "Random"]
test = ["Dates", "Documenter", "Pkg", "LibGit2", "Statistics", "Test", "Random"]
4 changes: 3 additions & 1 deletion src/PkgBenchmark.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,14 @@ module PkgBenchmark

using BenchmarkTools
using JSON
using ProgressMeter
using Pkg
using LibGit2
using Dates
using InteractiveUtils
using Printf
using Logging: with_logger
using TerminalLoggers: TerminalLogger
using UUIDs: UUID

export benchmarkpkg, judge, writeresults, readresults, export_markdown, memory
export BenchmarkConfig, BenchmarkResults, BenchmarkJudgement
Expand Down
139 changes: 81 additions & 58 deletions src/runbenchmark.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,10 @@ The argument `pkg` can be a name of a package or a path to a directory to a pack
* `postprocess` - A function to post-process results. Will be passed the `BenchmarkGroup`, which it can modify, or return a new one.
* `resultfile` - If set, saves the output to `resultfile`
* `retune` - Force a re-tune, saving the new tuning to the tune file.
* `progressoptions` - Options (a `NamedTuple`) to be passed as keyword arguments to
`ProgressMeter.Progress`.
* `logger_factory` - Specify the logger used during benchmark. It is a callable object
(typically a type) with no argument that creates a logger. It must exist as a constant
in some package (e.g., an anonymous function does not work).
* `progressoptions` - Deprecated.
The result can be used by functions such as [`judge`](@ref). If you choose to, you can save the results manually using
[`writeresults`](@ref) where `results` is the return value of this function. It can be read back with [`readresults`](@ref).
Expand Down Expand Up @@ -42,9 +44,17 @@ function benchmarkpkg(
postprocess=nothing,
resultfile=nothing,
retune=false,
progressoptions=NamedTuple(),
logger_factory=nothing,
progressoptions=nothing,
custom_loadpath="" #= used in tests =#
)
if progressoptions !== nothing
Base.depwarn(
"Keyword argument `progressoptions` is ignored. Please use `logger_factory`.",
:benchmarkpkg,
)
end

target = BenchmarkConfig(target)

pkgid = Base.identify_package(pkg)
Expand Down Expand Up @@ -96,7 +106,7 @@ function benchmarkpkg(
_runbenchmark(script, f, target, tunefile;
retune = retune,
custom_loadpath = custom_loadpath,
progressoptions = progressoptions)
logger_factory = logger_factory)
end
io = IOBuffer(results_local["results"])
seek(io, 0)
Expand Down Expand Up @@ -138,8 +148,57 @@ function benchmarkpkg(
return results
end

"""
objectpath(x) -> (pkg_uuid::String, pkg_name::String, name::Symbol...)
Get the "fullname" of object, prefixed by package ID.
# Examples
```jldoctest
julia> using PkgBenchmark: objectpath
julia> using Logging
julia> objectpath(ConsoleLogger)
("56ddb016-857b-54e1-b83d-db4d58db5568", "Logging", :ConsoleLogger)
```
"""
function objectpath(x)
m = parentmodule(x)
if x === m
pkg = Base.PkgId(x)
return (string(pkg.uuid), pkg.name)
else
n = nameof(x)
if !isdefined(m, n)
error("Object `$x` is not accessible as `$m.$n`.")
end
return (objectpath(m)..., n)
end
end

"""
loadobject((pkg_uuid, pkg_name, name...))
Inverse of `objectpath`.
# Examples
```jldoctest
julia> using PkgBenchmark: loadobject
julia> using Logging
julia> loadobject(("56ddb016-857b-54e1-b83d-db4d58db5568", "Logging", :ConsoleLogger)) ===
ConsoleLogger
true
```
"""
loadobject(path) = _loadobject(path...)
_loadobject(pkg_uuid, pkg_name, fullname...) =
foldl(getproperty, fullname, init=Base.require(Base.PkgId(UUID(pkg_uuid), pkg_name)))

function _runbenchmark(file::String, output::String, benchmarkconfig::BenchmarkConfig, tunefile::String;
retune = false, custom_loadpath = nothing, progressoptions = NamedTuple())
retune = false, custom_loadpath = nothing, logger_factory = nothing)
color = Base.have_color ? "--color=yes" : "--color=no"
compilecache = "--compiled-modules=" * (Bool(Base.JLOptions().use_compiled_modules) ? "yes" : "no")
_file, _output, _tunefile, _custom_loadpath = map(escape_string, (file, output, tunefile, custom_loadpath))
Expand All @@ -151,11 +210,19 @@ function _runbenchmark(file::String, output::String, benchmarkconfig::BenchmarkC
else
"all"
end
logger_factory_path = if logger_factory === nothing
# Default to `TerminalLoggers.TerminalLogger`; load via
# `PkgBenchmark` namespace so that users don't have to add it
# separately.
(objectpath(@__MODULE__)..., :TerminalLogger)
else
objectpath(logger_factory)
end
exec_str = isempty(_custom_loadpath) ? "" : "push!(LOAD_PATH, \"$(_custom_loadpath)\")\n"
exec_str *=
"""
using PkgBenchmark
PkgBenchmark._runbenchmark_local($(repr(_file)), $(repr(_output)), $(repr(_tunefile)), $(repr(retune)), $(repr(progressoptions)))
PkgBenchmark._runbenchmark_local($(repr(_file)), $(repr(_output)), $(repr(_tunefile)), $(repr(retune)), $(repr(logger_factory_path)))
"""

target_env = [k => v for (k, v) in benchmarkconfig.env]
Expand All @@ -166,8 +233,13 @@ function _runbenchmark(file::String, output::String, benchmarkconfig::BenchmarkC
return JSON.parsefile(output)
end

function _runbenchmark_local(file, output, tunefile, retune, logger_factory_path)
with_logger(loadobject(logger_factory_path)()) do
__runbenchmark_local(file, output, tunefile, retune)
end
end

function _runbenchmark_local(file, output, tunefile, retune, progressoptions)
function __runbenchmark_local(file, output, tunefile, retune)
# Loading
Base.include(Main, file)
if !isdefined(Main, :SUITE)
Expand All @@ -182,12 +254,12 @@ function _runbenchmark_local(file, output, tunefile, retune, progressoptions)
else
_benchinfo("creating benchmark tuning file $(abspath(tunefile))...")
mkpath(dirname(tunefile))
_tune!(suite, progressoptions = progressoptions)
BenchmarkTools.tune!(suite)
BenchmarkTools.save(tunefile, params(suite));
end

# Running
results = _run(suite, progressoptions = progressoptions)
results = run(suite)

# Output
vinfo = first(split(sprint((io) -> versioninfo(io; verbose=true)), "Environment"))
Expand All @@ -202,52 +274,3 @@ function _runbenchmark_local(file, output, tunefile, retune, progressoptions)
end
return nothing
end


function _tune!(group::BenchmarkTools.BenchmarkGroup; verbose::Bool = false, root = true,
progressoptions = NamedTuple(),
prog = Progress(length(BenchmarkTools.leaves(group)); desc = "Tuning: ", progressoptions...),
hierarchy = [], kwargs...)
BenchmarkTools.gcscrub() # run GC before running group, even if individual benchmarks don't manually GC
i = 1
for id in keys(group)
_tune!(group[id]; verbose = verbose, prog = prog, hierarchy = push!(copy(hierarchy), (repr(id), i, length(keys(group)))), kwargs...)
i += 1
end
return group
end

function _tune!(b::BenchmarkTools.Benchmark, p::BenchmarkTools.Parameters = b.params;
prog = nothing, verbose::Bool = false, pad = "", hierarchy = [], kwargs...)
BenchmarkTools.warmup(b, verbose=false)
estimate = ceil(Int, minimum(BenchmarkTools.lineartrial(b, p; kwargs...)))
b.params.evals = BenchmarkTools.guessevals(estimate)
if prog != nothing
indent = 0
ProgressMeter.next!(prog; showvalues = [map(id -> (" "^(indent += 1) * "[$(id[2])/$(id[3])]", id[1]), hierarchy)...])
end
return b
end

function _run(group::BenchmarkTools.BenchmarkGroup, args...;
progressoptions = NamedTuple(),
prog = Progress(length(BenchmarkTools.leaves(group)); desc = "Benchmarking: ", progressoptions...), hierarchy = [], kwargs...)
result = similar(group)
BenchmarkTools.gcscrub() # run GC before running group, even if individual benchmarks don't manually GC
i = 1
for id in keys(group)
result[id] = _run(group[id], args...; prog = prog, hierarchy = push!(copy(hierarchy), (repr(id), i, length(keys(group)))), kwargs...)
i += 1
end
return result
end

function _run(b::BenchmarkTools.Benchmark, p::BenchmarkTools.Parameters = b.params;
prog = nothing, verbose::Bool = false, pad = "", hierarchy = [], kwargs...)
res = BenchmarkTools.run_result(b, p; kwargs...)[1]
if prog != nothing
indent = 0
ProgressMeter.next!(prog; showvalues = [map(id -> (" "^(indent += 1) * "[$(id[2])/$(id[3])]", id[1]), hierarchy)...])
end
return res
end
5 changes: 5 additions & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ using BenchmarkTools
using Statistics
using Test
using Dates
using Documenter: doctest
using LibGit2
using Random
using Pkg
Expand Down Expand Up @@ -204,3 +205,7 @@ temp_pkg_dir(;tmp_dir = tmp_dir) do
@test BenchmarkTools.improvements(memory, judgement) == BenchmarkTools.improvements(memory, judgement.benchmarkgroup)
end
end

@testset "doctest" begin
doctest(PkgBenchmark)
end

0 comments on commit f6c85bf

Please sign in to comment.