-
Notifications
You must be signed in to change notification settings - Fork 11
/
regtest-noncyclic_fitting.R
125 lines (88 loc) · 3.56 KB
/
regtest-noncyclic_fitting.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
require("gamboostLSS")
###negbin dist, linear###
set.seed(2611)
x1 <- rnorm(1000)
x2 <- rnorm(1000)
x3 <- rnorm(1000)
x4 <- rnorm(1000)
x5 <- rnorm(1000)
x6 <- rnorm(1000)
mu <- exp(1.5 + x1^2 +0.5 * x2 - 3 * sin(x3) -1 * x4)
sigma <- exp(-0.2 * x4 +0.2 * x5 +0.4 * x6)
y <- numeric(1000)
for (i in 1:1000)
y[i] <- rnbinom(1, size = sigma[i], mu = mu[i])
dat <- data.frame(x1, x2, x3, x4, x5, x6, y)
#fit models at number of params + 1
#glmboost
model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
control = boost_control(mstop = 3), method = "noncyclic")
#linear baselearner with bols
model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
control = boost_control(mstop = 3), method = "noncyclic",
baselearner = "bols")
#nonlinear bbs baselearner
model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
control = boost_control(mstop = 3), method = "noncyclic",
baselearner = "bbs")
#reducing model and increasing it afterwards should yield the same fit
model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
control = boost_control(mstop = 50), method = "noncyclic")
m_co <- coef(model)
mstop(model) <- 5
mstop(model) <- 50
stopifnot(all.equal(m_co, coef(model)))
model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
control = boost_control(mstop = 50), method = "noncyclic",
baselearner = "bols")
m_co <- coef(model)
mstop(model) <- 5
mstop(model) <- 50
stopifnot(all.equal(m_co, coef(model)))
model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
control = boost_control(mstop = 50), method = "noncyclic",
baselearner = "bbs")
m_co <- coef(model)
mstop(model) <- 5
mstop(model) <- 50
stopifnot(all.equal(m_co, coef(model)))
model <- gamboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
control = boost_control(mstop = 50), method = "noncyclic",
baselearner = "bbs")
m_co <- coef(model)
mstop(model) <- 5
mstop(model) <- 50
stopifnot(all.equal(m_co, coef(model)))
## check cvrisk for noncyclic models
model <- glmboostLSS(y ~ ., families = NBinomialLSS(), data = dat,
control = boost_control(mstop = 3), method = "noncyclic")
cvr1 <- cvrisk(model, grid = 1:50, cv(model.weights(model), B = 5))
cvr1
plot(cvr1)
risk(model, merge = TRUE)
risk(model, merge = FALSE)
## test that mstop = 0 is possible
compare_models <- function (m1, m2) {
stopifnot(all.equal(coef(m1), coef(m2)))
stopifnot(all.equal(predict(m1), predict(m2)))
stopifnot(all.equal(fitted(m1), fitted(m2)))
stopifnot(all.equal(selected(m1), selected(m2)))
stopifnot(all.equal(risk(m1), risk(m2)))
## remove obvious differences from objects
m1$control <- m2$control <- NULL
m1$call <- m2$call <- NULL
if (!all.equal(m1, m2))
stop("Objects of offset model + 1 step and model with 1 step not identical")
invisible(NULL)
}
# set up models
mod <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 0))
mod2 <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 1))
mod3 <- glmboostLSS(y ~ ., data = dat, method = "noncyclic", control = boost_control(mstop = 1))
lapply(coef(mod), function(x) stopifnot(is.null(x)))
mstop(mod3) <- 0
mapply(compare_models, m1 = mod, m2 = mod3)
mstop(mod) <- 1
mapply(compare_models, m1 = mod, m2 = mod2)
mstop(mod3) <- 1
mapply(compare_models, m1 = mod, m2 = mod3)