Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixes on random forests minNode args. #765

Merged
merged 5 commits into from Nov 21, 2017
Merged
Changes from all commits
Commits
File filter...
Filter file types
Jump to…
Jump to file
Failed to load files.

Always

Just for now

@@ -13,7 +13,7 @@ modelInfo <- list(label = "Random Forest",
minNode = ifelse(is.factor(y), 2, 3))
} else {
out <- data.frame(predFixed = sample(1:ncol(x), size = len, replace = TRUE), #removed unique
minNode = sample(1:(min(20,nrow(trainX))), size = len, replace = TRUE)) # might cause warning for very small samples < 20
minNode = sample(1:(min(20,nrow(x))), size = len, replace = TRUE)) # might cause warning for very small samples < 20
}
out
},
@@ -46,4 +46,4 @@ modelInfo <- list(label = "Random Forest",
e1071::classAgreement(x$validation$confusion)[["kappa"]]))
names(out) <- if(x$problemType == "Regression") c("RMSE", "Rsquared") else c("Accuracy", "Kappa")
out
})
})
@@ -53,6 +53,7 @@ modelInfo <- list(label = "Multilayer Perceptron Network with Weight Decay",
units = param$size,
activation = as.character(param$activation),
input_shape = ncol(x),
kernel_initializer = keras::initializer_glorot_uniform(),
kernel_regularizer = keras::regularizer_l2(param$lambda)
)
if(is.factor(y)) {
@@ -56,6 +56,7 @@ modelInfo <- list(label = "Multilayer Perceptron Network with Weight Decay",
units = param$size,
activation = as.character(param$activation),
input_shape = ncol(x),
kernel_initializer = keras::initializer_glorot_uniform(),
kernel_regularizer = keras::regularizer_l2(param$lambda)
)
y <- class2ind(y)
@@ -53,6 +53,7 @@ modelInfo <- list(label = "Multilayer Perceptron Network with Dropout",
keras::layer_dense(
units = param$size,
activation = as.character(param$activation),
kernel_initializer = keras::initializer_glorot_uniform(),
input_shape = ncol(x)
) %>%
keras::layer_dropout(rate = param$dropout,
@@ -55,6 +55,7 @@ modelInfo <- list(label = "Multilayer Perceptron Network with Dropout",
keras::layer_dense(
units = param$size,
activation = as.character(param$activation),
kernel_initializer = keras::initializer_glorot_uniform(),
input_shape = ncol(x)
) %>%
keras::layer_dropout(rate = param$dropout,
@@ -46,7 +46,7 @@ modelInfo <- list(label = "Neural Network",
array.layout = "rowmajor",
activation = rep( as.character(param$activation), length(num_units)),
# Use He/MSRA when available in R
initializer = mx.init.Xavier(factor_type = "avg", magnitude = 3, rnd_type = 'uniform'),
initializer = mxnet::mx.init.Xavier(factor_type = "avg", magnitude = 3, rnd_type = 'uniform'),
...)
} else {
y <- as.numeric(y) - 1
@@ -60,7 +60,7 @@ modelInfo <- list(label = "Neural Network",
eval.metric = mx.metric.accuracy,
array.layout = "rowmajor",
activation = rep( as.character(param$activation), length(num_units)),
initializer = mx.init.Xavier(factor_type = "avg", magnitude = 3, rnd_type = 'uniform'),
initializer = mxnet::mx.init.Xavier(factor_type = "avg", magnitude = 3, rnd_type = 'uniform'),
...)
}
if(last)
@@ -41,7 +41,7 @@ modelInfo <- list(label = "Neural Network",
hidden_node = num_units,
activation = rep( as.character(param$activation), length(num_units)),
# Consider using He/MSRA paper when available in R
initializer = mx.init.Xavier(factor_type = "avg", magnitude = 3, rnd_type = 'uniform'),
initializer = mxnet::mx.init.Xavier(factor_type = "avg", magnitude = 3, rnd_type = 'uniform'),
...)
} else {
y <- as.numeric(y) - 1
@@ -54,7 +54,7 @@ modelInfo <- list(label = "Neural Network",
dropout = param$dropout,
hidden_node = num_units,
activation = rep( as.character(param$activation), length(num_units)),
initializer = mx.init.Xavier(factor_type = "avg", magnitude = 3, rnd_type = 'uniform'),
initializer = mxnet::mx.init.Xavier(factor_type = "avg", magnitude = 3, rnd_type = 'uniform'),
...)
}
if(last)
@@ -35,7 +35,7 @@ modelInfo <- list(label = "Random Forest",
c("variance", "extratrees", "maxstat")
out <-
data.frame(
min.node.size= sample(0:20, size = len, replace = TRUE),
min.node.size= sample(1:(min(20,nrow(x))), size = len, replace = TRUE),
mtry = sample(1:ncol(x), size = len, replace = TRUE),
splitrule = sample(srules, size = len, replace = TRUE)
)
@@ -38,5 +38,14 @@ modelInfo <- list(label = "Robust Linear Model",
predict(modelFit, newdata)
},
prob = NULL,
varImp = function(object, ...) {
values <- summary(object)$coef
varImps <- abs(values[ !grepl( rownames(values), pattern = 'Intercept' ),
grep("value$", colnames(values)), drop = FALSE])
out <- data.frame(varImps)
colnames(out) <- "Overall"
if(!is.null(names(varImps))) rownames(out) <- names(varImps)
out
},
tags = c("Linear Regression", "Robust Model", "Accepts Case Weights"),
sort = function(x) x)
Binary file not shown.
ProTip! Use n and p to navigate between commits in a pull request.
You can’t perform that action at this time.