Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Generalize KNNRegressor to multitarget case #328

Merged
merged 23 commits into from
Oct 27, 2020
Merged
Show file tree
Hide file tree
Changes from 13 commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
63004f7
Merge pull request #313 from alan-turing-institute/dev
ablaom Oct 12, 2020
67e775a
Merge pull request #316 from alan-turing-institute/dev
ablaom Oct 12, 2020
eb59c99
Merge pull request #320 from alan-turing-institute/dev
ablaom Oct 13, 2020
e07b3d5
Merge pull request #324 from alan-turing-institute/dev
ablaom Oct 16, 2020
b02cede
Merge pull request #326 from alan-turing-institute/dev
ablaom Oct 19, 2020
a4c8610
support multivariate kNN regression
mateuszbaran Oct 19, 2020
0b8c3bd
updated target of kNN regressor
mateuszbaran Oct 20, 2020
865a352
changing target of KNNRegressor
mateuszbaran Oct 20, 2020
3062a05
target of kNN regressor again
mateuszbaran Oct 20, 2020
e52ea0a
trying to make the multi-target kNN regressor work with tables
mateuszbaran Oct 21, 2020
a794ade
fixing kNN regressor
mateuszbaran Oct 21, 2020
6f28040
code review fixes
mateuszbaran Oct 22, 2020
9bc0dfc
update model registry
ablaom Oct 22, 2020
16ac6bf
update registry again
ablaom Oct 22, 2020
e7d5853
fix check_registry issue
ablaom Oct 22, 2020
46fea19
Update NearestNeighbors.jl
OkonSamuel Oct 22, 2020
d36948e
fix wrong call signature
OkonSamuel Oct 22, 2020
5325d5b
Update NearestNeighbors.jl
OkonSamuel Oct 22, 2020
a229988
replace `Tables.schema` with `MMI.schema`
OkonSamuel Oct 22, 2020
a1a9ca2
Update NearestNeighbors.jl
OkonSamuel Oct 22, 2020
ea2d9de
Update NearestNeighbors.jl
OkonSamuel Oct 22, 2020
dedcaee
Merge branch 'dev' of https://github.com/alan-turing-institute/MLJMod…
ablaom Oct 26, 2020
c89f593
Merge branch 'dev' into multiple-regression-knn2
ablaom Oct 27, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/MLJModels.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
module MLJModels
module MLJModels

import MLJModelInterface
import MLJModelInterface: MODEL_TRAITS
Expand Down
25 changes: 19 additions & 6 deletions src/NearestNeighbors.jl
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ const MMI = MLJModelInterface
using Distances

import ..NearestNeighbors
import ..Tables

const NN = NearestNeighbors

Expand Down Expand Up @@ -128,26 +129,38 @@ end
function MMI.predict(m::KNNRegressor, (tree, y, w), X)
Xmatrix = MMI.matrix(X, transpose=true) # NOTE: copies the data
idxs, dists = NN.knn(tree, Xmatrix, m.K)
preds = zeros(length(idxs))

if typeof(y) <: AbstractVector
OkonSamuel marked this conversation as resolved.
Show resolved Hide resolved
ymat = reshape(y, length(y), 1)
OkonSamuel marked this conversation as resolved.
Show resolved Hide resolved
preds = similar(ymat, length(idxs), 1)
else # for multi-target prediction
ymat = MMI.matrix(y)
preds = similar(ymat, length(idxs), size(ymat, 2))
end

w_ = ones(m.K)

for i in eachindex(idxs)
idxs_ = idxs[i]
dists_ = dists[i]
values = y[idxs_]
values = [ymat[j,:] for j in idxs_]
if w !== nothing
w_ = w[idxs_]
end
if m.weights == :uniform
preds[i] = sum(values .* w_) / sum(w_)
preds[i,:] .= sum(values .* w_) / sum(w_)
else
preds[i] = sum(values .* w_ .* (1.0 .- dists_ ./ sum(dists_))) / (sum(w_) - 1)
preds[i,:] .= sum(values .* w_ .* (1.0 .- dists_ ./ sum(dists_))) / (sum(w_) - 1)
end
end
return preds
if typeof(y) <: AbstractVector
return preds
else
return MMI.table(preds, names=Tables.schema(y).names, prototype=y)
end
end


# ====

metadata_pkg.((KNNRegressor, KNNClassifier),
Expand All @@ -161,7 +174,7 @@ metadata_pkg.((KNNRegressor, KNNClassifier),

metadata_model(KNNRegressor,
input = Table(Continuous),
target = AbstractVector{Continuous},
target = Union{AbstractVector{Continuous}, Table(Continuous)},
weights = true,
descr = KNNRegressorDescription
)
Expand Down
108 changes: 54 additions & 54 deletions src/registry/Metadata.toml
Original file line number Diff line number Diff line change
@@ -1,52 +1,8 @@

[PartialLeastSquaresRegressor.KPLSRegressor]
":input_scitype" = "`ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)`"
":output_scitype" = "`ScientificTypes.Unknown`"
":target_scitype" = "`Union{AbstractArray{_s57,1} where _s57<:ScientificTypes.Continuous, ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)}`"
":is_pure_julia" = "`true`"
":package_name" = "PartialLeastSquaresRegressor"
":package_license" = "MIT"
":load_path" = "PartialLeastSquaresRegressor.KPLSRegressor"
":package_uuid" = "f4b1acfe-f311-436c-bb79-8483f53c17d5"
":package_url" = "https://github.com/lalvim/PartialLeastSquaresRegressor.jl"
":is_wrapper" = "`false`"
":supports_weights" = "`false`"
":supports_online" = "`false`"
":docstring" = "A Kernel Partial Least Squares Regressor. A Kernel PLS2 NIPALS algorithms. Can be used mainly for regression.\n→ based on [PartialLeastSquaresRegressor](https://github.com/lalvim/PartialLeastSquaresRegressor.jl).\n→ do `@load KPLSRegressor pkg=\"PartialLeastSquaresRegressor\"` to use the model.\n→ do `?KPLSRegressor` for documentation."
":name" = "KPLSRegressor"
":is_supervised" = "`true`"
":prediction_type" = ":deterministic"
":implemented_methods" = [":predict", ":clean!", ":fit"]
":hyperparameters" = "`(:n_factors, :kernel, :width)`"
":hyperparameter_types" = "`(\"Integer\", \"String\", \"Real\")`"
":hyperparameter_ranges" = "`(nothing, nothing, nothing)`"

[PartialLeastSquaresRegressor.PLSRegressor]
":input_scitype" = "`ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)`"
":output_scitype" = "`ScientificTypes.Unknown`"
":target_scitype" = "`Union{AbstractArray{_s57,1} where _s57<:ScientificTypes.Continuous, ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)}`"
":is_pure_julia" = "`true`"
":package_name" = "PartialLeastSquaresRegressor"
":package_license" = "MIT"
":load_path" = "PartialLeastSquaresRegressor.PLSRegressor"
":package_uuid" = "f4b1acfe-f311-436c-bb79-8483f53c17d5"
":package_url" = "https://github.com/lalvim/PartialLeastSquaresRegressor.jl"
":is_wrapper" = "`false`"
":supports_weights" = "`false`"
":supports_online" = "`false`"
":docstring" = "A Partial Least Squares Regressor. Contains PLS1, PLS2 (multi target) algorithms. Can be used mainly for regression.\n→ based on [PartialLeastSquaresRegressor](https://github.com/lalvim/PartialLeastSquaresRegressor.jl).\n→ do `@load PLSRegressor pkg=\"PartialLeastSquaresRegressor\"` to use the model.\n→ do `?PLSRegressor` for documentation."
":name" = "PLSRegressor"
":is_supervised" = "`true`"
":prediction_type" = ":deterministic"
":implemented_methods" = [":predict", ":clean!", ":fit"]
":hyperparameters" = "`(:n_factors,)`"
":hyperparameter_types" = "`(\"Int64\",)`"
":hyperparameter_ranges" = "`(nothing,)`"

[NearestNeighbors.KNNClassifier]
":input_scitype" = "`ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)`"
":output_scitype" = "`ScientificTypes.Unknown`"
":target_scitype" = "`AbstractArray{_s112,1} where _s112<:ScientificTypes.Finite`"
":target_scitype" = "`AbstractArray{_s97,1} where _s97<:ScientificTypes.Finite`"
":is_pure_julia" = "`true`"
":package_name" = "NearestNeighbors"
":package_license" = "MIT"
Expand All @@ -68,7 +24,7 @@
[NearestNeighbors.KNNRegressor]
":input_scitype" = "`ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)`"
":output_scitype" = "`ScientificTypes.Unknown`"
":target_scitype" = "`AbstractArray{ScientificTypes.Continuous,1}`"
":target_scitype" = "`Union{AbstractArray{ScientificTypes.Continuous,1}, ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)}`"
":is_pure_julia" = "`true`"
":package_name" = "NearestNeighbors"
":package_license" = "MIT"
Expand Down Expand Up @@ -1869,6 +1825,50 @@
":hyperparameter_types" = "`(\"Float64\", \"Union{Function, String}\", \"Int64\", \"Union{Float64, String}\", \"Float64\", \"Bool\", \"Float64\", \"Int64\", \"Int64\", \"String\", \"Any\")`"
":hyperparameter_ranges" = "`(nothing, nothing, nothing, nothing, nothing, nothing, nothing, nothing, nothing, nothing, nothing)`"

["Partial Least Squares Regressor".KPLSRegressor]
":input_scitype" = "`ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)`"
":output_scitype" = "`ScientificTypes.Unknown`"
":target_scitype" = "`Union{AbstractArray{_s57,1} where _s57<:ScientificTypes.Continuous, ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)}`"
":is_pure_julia" = "`true`"
":package_name" = "Partial Least Squares Regressor"
":package_license" = "MIT"
":load_path" = "MLJModels.Partial Least Squares Regressor_.KPLSRegressor"
":package_uuid" = "f4b1acfe-f311-436c-bb79-8483f53c17d5"
":package_url" = "https://github.com/lalvim/PartialLeastSquaresRegressor.jl"
":is_wrapper" = "`false`"
":supports_weights" = "`false`"
":supports_online" = "`false`"
":docstring" = "A Kernel Partial Least Squares Regressor. A Kernel PLS2 NIPALS algorithms. Can be used mainly for regression.\n→ based on [Partial Least Squares Regressor](https://github.com/lalvim/PartialLeastSquaresRegressor.jl).\n→ do `@load KPLSRegressor pkg=\"Partial Least Squares Regressor\"` to use the model.\n→ do `?KPLSRegressor` for documentation."
":name" = "KPLSRegressor"
":is_supervised" = "`true`"
":prediction_type" = ":deterministic"
":implemented_methods" = [":predict", ":clean!", ":fit"]
":hyperparameters" = "`(:n_factors, :kernel, :width)`"
":hyperparameter_types" = "`(\"Integer\", \"String\", \"Real\")`"
":hyperparameter_ranges" = "`(nothing, nothing, nothing)`"

["Partial Least Squares Regressor".PLSRegressor]
":input_scitype" = "`ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)`"
":output_scitype" = "`ScientificTypes.Unknown`"
":target_scitype" = "`Union{AbstractArray{_s57,1} where _s57<:ScientificTypes.Continuous, ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)}`"
":is_pure_julia" = "`true`"
":package_name" = "Partial Least Squares Regressor"
":package_license" = "MIT"
":load_path" = "MLJModels.Partial Least Squares Regressor_.PLSRegressor"
":package_uuid" = "f4b1acfe-f311-436c-bb79-8483f53c17d5"
":package_url" = "https://github.com/lalvim/PartialLeastSquaresRegressor.jl"
":is_wrapper" = "`false`"
":supports_weights" = "`false`"
":supports_online" = "`false`"
":docstring" = "A Partial Least Squares Regressor. Contains PLS1, PLS2 (multi target) algorithms. Can be used mainly for regression.\n→ based on [Partial Least Squares Regressor](https://github.com/lalvim/PartialLeastSquaresRegressor.jl).\n→ do `@load PLSRegressor pkg=\"Partial Least Squares Regressor\"` to use the model.\n→ do `?PLSRegressor` for documentation."
":name" = "PLSRegressor"
":is_supervised" = "`true`"
":prediction_type" = ":deterministic"
":implemented_methods" = [":predict", ":clean!", ":fit"]
":hyperparameters" = "`(:n_factors,)`"
":hyperparameter_types" = "`(\"Int64\",)`"
":hyperparameter_ranges" = "`(nothing,)`"

[ParallelKMeans.KMeans]
":input_scitype" = "`ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)`"
":output_scitype" = "`ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)`"
Expand Down Expand Up @@ -2356,7 +2356,7 @@
[DecisionTree.AdaBoostStumpClassifier]
":input_scitype" = "`ScientificTypes.Table{_s24} where _s24<:Union{AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous, AbstractArray{_s23,1} where _s23<:ScientificTypes.Count, AbstractArray{_s23,1} where _s23<:ScientificTypes.OrderedFactor}`"
":output_scitype" = "`ScientificTypes.Unknown`"
":target_scitype" = "`AbstractArray{_s112,1} where _s112<:ScientificTypes.Finite`"
":target_scitype" = "`AbstractArray{_s113,1} where _s113<:ScientificTypes.Finite`"
":is_pure_julia" = "`true`"
":package_name" = "DecisionTree"
":package_license" = "MIT"
Expand Down Expand Up @@ -2400,7 +2400,7 @@
[DecisionTree.DecisionTreeClassifier]
":input_scitype" = "`ScientificTypes.Table{_s24} where _s24<:Union{AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous, AbstractArray{_s23,1} where _s23<:ScientificTypes.Count, AbstractArray{_s23,1} where _s23<:ScientificTypes.OrderedFactor}`"
":output_scitype" = "`ScientificTypes.Unknown`"
":target_scitype" = "`AbstractArray{_s112,1} where _s112<:ScientificTypes.Finite`"
":target_scitype" = "`AbstractArray{_s113,1} where _s113<:ScientificTypes.Finite`"
":is_pure_julia" = "`true`"
":package_name" = "DecisionTree"
":package_license" = "MIT"
Expand Down Expand Up @@ -2444,7 +2444,7 @@
[DecisionTree.RandomForestClassifier]
":input_scitype" = "`ScientificTypes.Table{_s24} where _s24<:Union{AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous, AbstractArray{_s23,1} where _s23<:ScientificTypes.Count, AbstractArray{_s23,1} where _s23<:ScientificTypes.OrderedFactor}`"
":output_scitype" = "`ScientificTypes.Unknown`"
":target_scitype" = "`AbstractArray{_s112,1} where _s112<:ScientificTypes.Finite`"
":target_scitype" = "`AbstractArray{_s113,1} where _s113<:ScientificTypes.Finite`"
":is_pure_julia" = "`true`"
":package_name" = "DecisionTree"
":package_license" = "MIT"
Expand Down Expand Up @@ -3060,7 +3060,7 @@
[LIBSVM.LinearSVC]
":input_scitype" = "`ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)`"
":output_scitype" = "`ScientificTypes.Unknown`"
":target_scitype" = "`AbstractArray{_s111,1} where _s111<:ScientificTypes.Finite`"
":target_scitype" = "`AbstractArray{_s112,1} where _s112<:ScientificTypes.Finite`"
":is_pure_julia" = "`false`"
":package_name" = "LIBSVM"
":package_license" = "unknown"
Expand Down Expand Up @@ -3104,7 +3104,7 @@
[LIBSVM.NuSVC]
":input_scitype" = "`ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)`"
":output_scitype" = "`ScientificTypes.Unknown`"
":target_scitype" = "`AbstractArray{_s111,1} where _s111<:ScientificTypes.Finite`"
":target_scitype" = "`AbstractArray{_s112,1} where _s112<:ScientificTypes.Finite`"
":is_pure_julia" = "`false`"
":package_name" = "LIBSVM"
":package_license" = "unknown"
Expand All @@ -3126,7 +3126,7 @@
[LIBSVM.SVC]
":input_scitype" = "`ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)`"
":output_scitype" = "`ScientificTypes.Unknown`"
":target_scitype" = "`AbstractArray{_s111,1} where _s111<:ScientificTypes.Finite`"
":target_scitype" = "`AbstractArray{_s112,1} where _s112<:ScientificTypes.Finite`"
":is_pure_julia" = "`false`"
":package_name" = "LIBSVM"
":package_license" = "unknown"
Expand All @@ -3147,7 +3147,7 @@

[LIBSVM.OneClassSVM]
":input_scitype" = "`ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)`"
":output_scitype" = "`AbstractArray{_s111,1} where _s111<:ScientificTypes.Finite{2}`"
":output_scitype" = "`AbstractArray{_s112,1} where _s112<:ScientificTypes.Finite{2}`"
":target_scitype" = "`ScientificTypes.Unknown`"
":is_pure_julia" = "`false`"
":package_name" = "LIBSVM"
Expand All @@ -3170,7 +3170,7 @@
[GLM.LinearBinaryClassifier]
":input_scitype" = "`ScientificTypes.Table{_s24} where _s24<:(AbstractArray{_s23,1} where _s23<:ScientificTypes.Continuous)`"
":output_scitype" = "`ScientificTypes.Unknown`"
":target_scitype" = "`AbstractArray{_s112,1} where _s112<:ScientificTypes.Finite{2}`"
":target_scitype" = "`AbstractArray{_s113,1} where _s113<:ScientificTypes.Finite{2}`"
":is_pure_julia" = "`true`"
":package_name" = "GLM"
":package_license" = "MIT"
Expand Down
2 changes: 1 addition & 1 deletion src/registry/Models.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
PartialLeastSquaresRegressor = ["KPLSRegressor", "PLSRegressor"]
NearestNeighbors = ["KNNClassifier", "KNNRegressor"]
MLJLinearModels = ["QuantileRegressor", "LogisticClassifier", "MultinomialClassifier", "LADRegressor", "RidgeRegressor", "RobustRegressor", "ElasticNetRegressor", "LinearRegressor", "LassoRegressor", "HuberRegressor"]
ScikitLearn = ["ProbabilisticSGDClassifier", "RidgeCVClassifier", "LogisticClassifier", "RandomForestRegressor", "ElasticNetCVRegressor", "PerceptronClassifier", "MultiTaskLassoRegressor", "LinearRegressor", "DBSCAN", "RidgeRegressor", "LassoLarsICRegressor", "ARDRegressor", "SVMNuRegressor", "RidgeClassifier", "SGDRegressor", "ComplementNBClassifier", "HuberRegressor", "SVMNuClassifier", "GradientBoostingClassifier", "GaussianProcessRegressor", "SVMLinearRegressor", "LarsRegressor", "MeanShift", "AdaBoostRegressor", "AffinityPropagation", "MultiTaskLassoCVRegressor", "OrthogonalMatchingPursuitRegressor", "RidgeCVRegressor", "PassiveAggressiveClassifier", "SVMRegressor", "BernoulliNBClassifier", "GaussianNBClassifier", "ExtraTreesClassifier", "KMeans", "MultiTaskElasticNetCVRegressor", "LassoLarsCVRegressor", "OrthogonalMatchingPursuitCVRegressor", "AdaBoostClassifier", "PassiveAggressiveRegressor", "BayesianRidgeRegressor", "RANSACRegressor", "BaggingClassifier", "GaussianProcessClassifier", "OPTICS", "KNeighborsRegressor", "MiniBatchKMeans", "LassoCVRegressor", "DummyRegressor", "LassoLarsRegressor", "LarsCVRegressor", "KNeighborsClassifier", "SVMLinearClassifier", "FeatureAgglomeration", "DummyClassifier", "BaggingRegressor", "BayesianQDA", "BayesianLDA", "SGDClassifier", "TheilSenRegressor", "SpectralClustering", "Birch", "AgglomerativeClustering", "ElasticNetRegressor", "RandomForestClassifier", "LogisticCVClassifier", "MultiTaskElasticNetRegressor", "ExtraTreesRegressor", "LassoRegressor", "MultinomialNBClassifier", "GradientBoostingRegressor", "SVMClassifier"]
"Partial Least Squares Regressor" = ["KPLSRegressor", "PLSRegressor"]
ParallelKMeans = ["KMeans"]
NaiveBayes = ["GaussianNBClassifier", "MultinomialNBClassifier"]
MLJBase = ["DeterministicSurrogate", "WrappedFunction", "IntervalSurrogate", "UnsupervisedSurrogate", "JointProbabilisticSurrogate", "Resampler", "StaticSurrogate", "ProbabilisticSurrogate"]
Expand Down
16 changes: 14 additions & 2 deletions test/NearestNeighbors.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ using MLJModels.NearestNeighbors_
using CategoricalArrays
using MLJBase
using Random
using Tables

Random.seed!(5151)

Expand Down Expand Up @@ -109,7 +110,19 @@ p2 = predict(knnr, f2, xtest)
@test all(p[ntest+1:2*ntest] .≈ 2.0)
@test all(p[2*ntest+1:end] .≈ -2.0)

ymat = vcat(fill( 0.0, n, 2), fill(2.0, n, 2), fill(-2.0, n, 2))
yv = Tables.table(ymat; header = [:a, :b])

fv,_,_ = fit(knnr, 1, x, yv)
f2v,_,_ = fit(knnr, 1, x, yv, w)

pv = predict(knnr, fv, xtest)

for col in [:a, :b]
@test all(pv[col][1:ntest] .≈ [0.0])
@test all(pv[col][ntest+1:2*ntest] .≈ [2.0])
@test all(pv[col][2*ntest+1:end] .≈ [-2.0])
end



Expand All @@ -128,8 +141,7 @@ infos[:docstring]
infos = info_dict(knnr)

@test infos[:input_scitype] == Table(Continuous)
@test infos[:target_scitype] == AbstractVector{Continuous}

@test infos[:target_scitype] == Union{AbstractVector{Continuous}, Table(Continuous)}
infos[:docstring]

end
Expand Down