/
predict.R
133 lines (101 loc) · 3.43 KB
/
predict.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
## Matt Galloway
#' @title Predict Logistic Regression
#' @description Generates prediction for logistic regression. Note that one can either input a 'logisticr' object or a matrix of beta coefficients.
#'
#' @param object 'logisticr' object or matrix of betas
#' @param X matrix or data frame of (new) observations
#' @param y optional, matrix or vector of response values 0,1
#' @param ... additional arguments
#' @return predictions and loss metrics
#' @export
#' @examples
#' library(dplyr)
#' X = dplyr::select(iris, -Species)
#' y = dplyr::select(iris, Species)
#' y$Species = ifelse(y$Species == 'setosa', 1, 0)
#' logisticr(X, y)
#'
#' fitted = logisticr(X, y, lam = 0.1, penalty = 'ridge', method = 'MM')
#' predict(fitted, X)
predict.logisticr = function(object, X, y = NULL, ...) {
# checks
X = as.matrix(X)
if (!is.null(y)) {
y = as.matrix(y)
if (nrow(X) != nrow(y))
stop("X and y must have equal observations!")
if (all(y == 1 | y == 0) == FALSE)
stop("y must be binary!")
}
# if object is list, extract betas
if (class(object) == "logisticr") {
object = object$coefficients
}
# add intercept, if needed
if (ncol(X) != nrow(object)) {
X = cbind(1, X)
}
# fitted values
fitted = logitc(X %*% object)
class = round(fitted)
# if y, return MSE, misclassification
MSE = NULL
log.loss = NULL
misclassification = NULL
if (!is.null(y)) {
# calculate metrics
MSE = mean((y - fitted)^2)
log.losses = -y * log(fitted) - (1 - y) * log(1 -
fitted)
log.loss = sum(ifelse(is.nan(log.losses), 0, log.losses))
misclassification = mean(y != class)
}
returns = list(fitted.values = fitted, class = class,
MSE = MSE, log.loss = log.loss, misclassification = misclassification)
return(returns)
}
##------------------------------------------------------------------------------------
#' @title Predict Linear Regression
#' @description Generates prediction for linear regression. Note that one can either input a 'linearr' object or a matrix of beta coefficients.
#'
#' @param object 'linearr' object or matrix of betas
#' @param X matrix or data frame of (new) observations
#' @param y optional, matrix or vector of response values
#' @param ... additional arguments
#' @return predictions and loss metrics
#' @export
#' @examples
#' library(dplyr)
#' X = dplyr::select(iris, -c(Species, Sepal.Length))
#' y = dplyr::select(iris, Sepal.Length)
#' fitted = linearr(X, y, lam = 0.1)
#' predict(fitted, X)
predict.linearr = function(object, X, y = NULL, ...) {
# checks
X = as.matrix(X)
if (!is.null(y)) {
y = as.matrix(y)
if (nrow(X) != nrow(y))
stop("X and y must have equal observations!")
}
# if object is list, extract betas
if (class(object) == "linearr") {
object = object$coefficients
}
# add intercept, if needed
if (ncol(X) != nrow(object)) {
X = cbind(1, X)
}
# fitted values
fitted = X %*% object
# if y, return MSE, misclassification
RSS = NULL
MSE = NULL
if (!is.null(y)) {
# calculate metrics
RSS = sum((y - fitted)^2)
MSE = mean((y - fitted)^2)
}
returns = list(fitted.values = fitted, RSS = RSS, MSE = MSE)
return(returns)
}