Skip to content

Commit

Permalink
add learning_curve (no exclamation point) #377
Browse files Browse the repository at this point in the history
  • Loading branch information
ablaom committed Dec 9, 2019
1 parent 17661f5 commit da7e5a9
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 2 deletions.
1 change: 1 addition & 0 deletions src/MLJ.jl
Expand Up @@ -13,6 +13,7 @@ export @curve, @pcurve, pretty, # utilities.jl
Params, params, set_params!, # parameters.jl
strange, iterator, # parameters.jl
Grid, TunedModel, learning_curve!, # tuning.jl
learning_curve, # tuning.jl
EnsembleModel, # ensembles.jl
rebind!, # networks.jl
machines, sources, anonymize!, # composites.jl
Expand Down
13 changes: 13 additions & 0 deletions src/tuning.jl
Expand Up @@ -469,3 +469,16 @@ function learning_curve!(mach::Machine{<:Supervised};
parameter_values=parameter_values,
measurements = measurements_)
end

"""
learning_curve(model::Supervised, args...; kwargs...)
Plot a learning curve (or curves) without first constructing a
machine. Equivalent to `learing_curve!(machine(model, args...);
kwargs...)
See [learning_curve!](@ref)
"""
learning_curve(model::Supervised, args...; kwargs...) =
learning_curve!(machine(model, args...); kwargs...)
7 changes: 5 additions & 2 deletions test/tuning.jl
Expand Up @@ -191,13 +191,16 @@ end

@testset "learning curves" begin
atom = FooBarRegressor()
ensemble = EnsembleModel(atom=atom, n=500)
ensemble = EnsembleModel(atom=atom, n=50, rng=1)
mach = machine(ensemble, X, y)
r_lambda = range(ensemble, :(atom.lambda), lower=0.0001, upper=0.1, scale=:log10)
r_lambda = range(ensemble, :(atom.lambda),
lower=0.0001, upper=0.1, scale=:log10)
curve = MLJ.learning_curve!(mach; range=r_lambda)
atom.lambda=0.3
r_n = range(ensemble, :n, lower=10, upper=100)
curve2 = MLJ.learning_curve!(mach; range=r_n)
curve3 = learning_curve(ensemble, X, y; range=r_n)
@test curve2.measurements curve3.measurements
end
end # module
true

0 comments on commit da7e5a9

Please sign in to comment.