-
Notifications
You must be signed in to change notification settings - Fork 111
/
relu.R
197 lines (186 loc) · 5.57 KB
/
relu.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
#' Apply (smoothed) rectified linear transformation
#'
#' `step_relu()` creates a *specification* of a recipe step that will add the
#' rectified linear or softplus transformations of a variable to the data set.
#'
#' @inheritParams step_pca
#' @inheritParams step_center
#' @param shift A numeric value dictating a translation to apply to the data.
#' @param reverse A logical to indicate if the left hinge should be used as
#' opposed to the right hinge.
#' @param smooth A logical indicating if the softplus function, a smooth
#' approximation to the rectified linear transformation, should be used.
#' @param prefix A prefix for generated column names, defaults to "right_relu_"
#' for right hinge transformation and "left_relu_" for reversed/left hinge
#' transformations.
#' @template step-return
#' @family individual transformation steps
#' @export
#' @rdname step_relu
#'
#' @details
#'
#' The rectified linear transformation is calculated as
#' \deqn{max(0, x - c)} and is also known as the ReLu or right hinge function.
#' If `reverse` is true, then the transformation is reflected about the
#' y-axis, like so: \deqn{max(0, c - x)} Setting the `smooth` option
#' to true will instead calculate a smooth approximation to ReLu
#' according to \deqn{ln(1 + e^(x - c)} The `reverse` argument may
#' also be applied to this transformation.
#'
#' # Connection to MARS:
#'
#' The rectified linear transformation is used in Multivariate Adaptive
#' Regression Splines as a basis function to fit piecewise linear functions to
#' data in a strategy similar to that employed in tree based models. The
#' transformation is a popular choice as an activation function in many
#' neural networks, which could then be seen as a stacked generalization of
#' MARS when making use of ReLu activations. The hinge function also appears
#' in the loss function of Support Vector Machines, where it penalizes
#' residuals only if they are within a certain margin of the decision boundary.
#'
#' # Tidying
#'
#' When you [`tidy()`][tidy.recipe()] this step, a tibble is returned with
#' columns `terms`, `shift`, `reverse` , and `id`:
#'
#' \describe{
#' \item{terms}{character, the selectors or variables selected}
#' \item{shift}{numeric, location of hinge}
#' \item{reverse}{logical, whether left hinge is used}
#' \item{id}{character, id of this step}
#' }
#'
#' @template case-weights-not-supported
#'
#' @examplesIf rlang::is_installed("modeldata")
#' data(biomass, package = "modeldata")
#'
#' biomass_tr <- biomass[biomass$dataset == "Training", ]
#' biomass_te <- biomass[biomass$dataset == "Testing", ]
#'
#' rec <- recipe(
#' HHV ~ carbon + hydrogen + oxygen + nitrogen + sulfur,
#' data = biomass_tr
#' )
#'
#' transformed_te <- rec %>%
#' step_relu(carbon, shift = 40) %>%
#' prep(biomass_tr) %>%
#' bake(biomass_te)
#'
#' transformed_te
step_relu <-
function(recipe,
...,
role = "predictor",
trained = FALSE,
shift = 0,
reverse = FALSE,
smooth = FALSE,
prefix = "right_relu_",
columns = NULL,
skip = FALSE,
id = rand_id("relu")) {
if (!is_tune(shift)) {
check_number_decimal(shift)
}
if (!is_tune(reverse)) {
check_bool(reverse)
}
if (!is_tune(smooth)) {
check_bool(smooth)
}
if (reverse & prefix == "right_relu_") {
prefix <- "left_relu_"
}
add_step(
recipe,
step_relu_new(
terms = enquos(...),
role = role,
trained = trained,
shift = shift,
reverse = reverse,
smooth = smooth,
prefix = prefix,
columns = columns,
skip = skip,
id = id
)
)
}
step_relu_new <-
function(terms, role, trained, shift, reverse, smooth, prefix, columns, skip, id) {
step(
subclass = "relu",
terms = terms,
role = role,
trained = trained,
shift = shift,
reverse = reverse,
smooth = smooth,
prefix = prefix,
columns = columns,
skip = skip,
id = id
)
}
#' @export
prep.step_relu <- function(x, training, info = NULL, ...) {
columns <- recipes_eval_select(x$terms, training, info)
check_type(training[, columns], types = c("double", "integer"))
step_relu_new(
terms = x$terms,
role = x$role,
trained = TRUE,
shift = x$shift,
reverse = x$reverse,
smooth = x$smooth,
prefix = x$prefix,
columns = columns,
skip = x$skip,
id = x$id
)
}
#' @export
bake.step_relu <- function(object, new_data, ...) {
col_names <- names(object$columns)
check_new_data(col_names, object, new_data)
make_relu_call <- function(col) {
call2("relu", sym(col), object$shift, object$reverse, object$smooth)
}
exprs <- purrr::map(col_names, make_relu_call)
newname <- glue::glue("{object$prefix}{col_names}")
exprs <- check_name(exprs, new_data, object, newname, TRUE)
dplyr::mutate(new_data, !!!exprs)
}
#' @export
print.step_relu <-
function(x, width = max(20, options()$width - 30), ...) {
title <- "Adding relu transform for "
print_step(x$columns, x$terms, x$trained, title, width)
invisible(x)
}
relu <- function(x, shift = 0, reverse = FALSE, smooth = FALSE) {
if (reverse) {
shifted <- shift - x
} else {
shifted <- x - shift
}
if (smooth) {
out <- log1p(exp(shifted)) # use log1p for numerical accuracy
} else {
out <- pmax(shifted, rep(0, length(shifted)))
}
out
}
#' @rdname tidy.recipe
#' @export
tidy.step_relu <- function(x, ...) {
out <- simple_terms(x, ...)
out$shift <- x$shift
out$reverse <- x$reverse
out$id <- x$id
out
}