diff --git a/src/MLJ.jl b/src/MLJ.jl index 540310f97..e6ed00429 100644 --- a/src/MLJ.jl +++ b/src/MLJ.jl @@ -13,6 +13,7 @@ export @curve, @pcurve, pretty, # utilities.jl Params, params, set_params!, # parameters.jl strange, iterator, # parameters.jl Grid, TunedModel, learning_curve!, # tuning.jl + learning_curve, # tuning.jl EnsembleModel, # ensembles.jl rebind!, # networks.jl machines, sources, anonymize!, # composites.jl diff --git a/src/tuning.jl b/src/tuning.jl index 6e4eae464..844c7c43f 100644 --- a/src/tuning.jl +++ b/src/tuning.jl @@ -469,3 +469,16 @@ function learning_curve!(mach::Machine{<:Supervised}; parameter_values=parameter_values, measurements = measurements_) end + +""" + learning_curve(model::Supervised, args...; kwargs...) + +Plot a learning curve (or curves) without first constructing a +machine. Equivalent to `learing_curve!(machine(model, args...); +kwargs...) + +See [learning_curve!](@ref) + +""" +learning_curve(model::Supervised, args...; kwargs...) = + learning_curve!(machine(model, args...); kwargs...) diff --git a/test/tuning.jl b/test/tuning.jl index fe759aac2..71d98f034 100644 --- a/test/tuning.jl +++ b/test/tuning.jl @@ -191,13 +191,16 @@ end @testset "learning curves" begin atom = FooBarRegressor() - ensemble = EnsembleModel(atom=atom, n=500) + ensemble = EnsembleModel(atom=atom, n=50, rng=1) mach = machine(ensemble, X, y) - r_lambda = range(ensemble, :(atom.lambda), lower=0.0001, upper=0.1, scale=:log10) + r_lambda = range(ensemble, :(atom.lambda), + lower=0.0001, upper=0.1, scale=:log10) curve = MLJ.learning_curve!(mach; range=r_lambda) atom.lambda=0.3 r_n = range(ensemble, :n, lower=10, upper=100) curve2 = MLJ.learning_curve!(mach; range=r_n) + curve3 = learning_curve(ensemble, X, y; range=r_n) + @test curve2.measurements ≈ curve3.measurements end end # module true