generated from allisonhorst/meds-distill-template
-
Notifications
You must be signed in to change notification settings - Fork 19
/
NN_demo.Rmd
87 lines (68 loc) · 1.22 KB
/
NN_demo.Rmd
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
---
title: "NN_demo"
output: html_document
date: "2024-03-14"
---
```{r keras}
install.packages("keras")
keras::install_keras()
```
```{r sim_lin}
library(tidyverse)
n <- 1000
b <- 30
a <- 5
set.seed(123)
sim <- tibble(
x = runif(n, min = -1, max = 1),
y = b +a*x + rnorm(n)
)
```
```{r plot_lin}
ggplot(sim, aes(x,y)) +
geom_point()+
geom_smooth(method = lm)
```
```
```{r fit_lin}
ols_model <- lm(y ~ x, sim)
sigma(ols_model)
ols_model
```
```{r mod}
x <- matrix(sim$x, ncol = 1)
library(keras)
mod <- keras_model_sequential() %>%
layer_dense(units = 1,
activation = "linear",
input_shape = ncol(x))
```
```{r mod_compile}
mod %>%
compile(optimizer = "sgd", #stochastic gradient descent
loss = "mse") #mean square error
```
```{r mod_hist}
hist <- mod %>%
fit(x, sim$y, #data
batch_size = 16, #mini-batch size
epochs = 20, # n times through full training data
validation_split = .2
)
```
```{r compare_lin}
plot(hist)
keras::get_weights(mod)
coef(ols_model)
history
sigma(ols_model)
```
```{r sim_sine}
set.seed(321)
df <- tibble(
x = seq(from = -1, to = 2 * pi, length = n),
e = rnorm(n, sd = 0.2),
y = sin(x) + e
)
x <- matrix(sim$x, ncol= 1)
```