Skip to content

Commit

Permalink
bug fixes for ordinal models and removed mxnet from docs
Browse files Browse the repository at this point in the history
  • Loading branch information
topepo committed Jun 10, 2016
1 parent 88765e9 commit d639b3e
Show file tree
Hide file tree
Showing 6 changed files with 4 additions and 31 deletions.
2 changes: 1 addition & 1 deletion models/files/vglmAdjCat.R
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ modelInfo <- list(
predict(modelFit, newdata = newdata, type = "response")
},
varImp = NULL,
predictors = function(x, ...) predictors(terms(x)),
predictors = function(x, ...) caret:::predictors.terms(x@terms$terms),
levels = function(x)
if (any(names(x) == "obsLevels")) x$obsLevels else NULL,
tags = c("Logistic Regression", "Linear Classifier", "Accepts Case Weights", "Ordinal Outcomes"),
Expand Down
2 changes: 1 addition & 1 deletion models/files/vglmContRatio.R
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ modelInfo <- list(
out
},
varImp = NULL,
predictors = function(x, ...) predictors(terms(x)),
predictors = function(x, ...) caret:::predictors.terms(x@terms$terms),
levels = function(x)
if (any(names(x) == "obsLevels")) x$obsLevels else NULL,
tags = c("Logistic Regression", "Linear Classifier", "Accepts Case Weights", "Ordinal Outcomes"),
Expand Down
2 changes: 1 addition & 1 deletion models/files/vglmCumulative.R
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ modelInfo <- list(
predict(modelFit, newdata = newdata, type = "response")
},
varImp = NULL,
predictors = function(x, ...) predictors(terms(x)),
predictors = function(x, ...) caret:::predictors.terms(x@terms$terms),
levels = function(x)
if (any(names(x) == "obsLevels")) x$obsLevels else NULL,
tags = c("Logistic Regression", "Linear Classifier", "Accepts Case Weights", "Ordinal Outcomes"),
Expand Down
2 changes: 1 addition & 1 deletion models/parseModels.R
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
setwd("~/Code/github/caret/models/files")
modelFiles <- list.files(pattern = "\\.R$")
modelFiles <- modelFiles[!grepl("(rknn)|(mxnet)", modelFiles)]
modelFiles <- modelFiles[!grepl("(rknn)|([mM]xnet)", modelFiles)]


models <- vector(mode = "list", length = length(modelFiles))
Expand Down
Binary file modified pkg/caret/inst/models/models.RData
Binary file not shown.
27 changes: 0 additions & 27 deletions pkg/caret/man/models.Rd
Original file line number Diff line number Diff line change
Expand Up @@ -926,20 +926,6 @@ For classification using package \pkg{bnclassify} with tuning parameters:
}
\strong{Model Averaged Neural Network} (\code{method = 'avMxnet'})
For classification using package \pkg{mxnet} with tuning parameters:
\itemize{
\item Number of Hidden Units in Layer 1 (\code{layer1}, numeric)
\item Number of Hidden Units in Layer 2 (\code{layer2}, numeric)
\item Number of Hidden Units in Layer 3 (\code{layer3}, numeric)
\item Learning Rate (\code{learning.rate}, numeric)
\item Momentum (\code{momentum}, numeric)
\item Dropout Rate (\code{dropout}, numeric)
\item Number of Models (\code{repeats}, numeric)
}
\strong{Model Averaged Neural Network} (\code{method = 'avNNet'})
For classification and regression using package \pkg{nnet} with tuning parameters:
Expand Down Expand Up @@ -1073,19 +1059,6 @@ For classification using package \pkg{pamr} with tuning parameters:
}
\strong{Neural Network} (\code{method = 'mxnet'})
For classification using package \pkg{mxnet} with tuning parameters:
\itemize{
\item Number of Hidden Units in Layer 1 (\code{layer1}, numeric)
\item Number of Hidden Units in Layer 2 (\code{layer2}, numeric)
\item Number of Hidden Units in Layer 3 (\code{layer3}, numeric)
\item Learning Rate (\code{learning.rate}, numeric)
\item Momentum (\code{momentum}, numeric)
\item Dropout Rate (\code{dropout}, numeric)
}
\strong{Neural Network} (\code{method = 'neuralnet'})
For regression using package \pkg{neuralnet} with tuning parameters:
Expand Down

0 comments on commit d639b3e

Please sign in to comment.