-
Notifications
You must be signed in to change notification settings - Fork 0
/
models.py
92 lines (73 loc) · 2.84 KB
/
models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
import random
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
class MiniConv(nn.Module):
"""
One convolution, one linear layer
"""
def __init__(self, state_dim, action_dim):
super(MiniConv, self).__init__()
self.filter = 10
self.feature_size = self.filter * 25
self.conv1 = nn.Conv2d(state_dim, self.filter, padding=1, kernel_size=3)
self.lin1 = nn.Linear(self.feature_size, 16)
self.lin_final = nn.Linear(16, action_dim)
def extract_features(self, x):
x = F.relu(self.conv1(x))
x = x.view(-1, self.feature_size)
return F.relu(self.lin1(x))
def forward(self, x):
x = self.extract_features(x)
return F.sigmoid(self.lin_final(x))
class SmallConv(nn.Module):
"""
One convolution, two linear layers
"""
def __init__(self, state_dim, action_dim, n_filter, n_hidden):
super(SmallConv, self).__init__()
self.feature_size = n_filter * 25
self.conv1 = nn.Conv2d(state_dim, n_filter, padding=1, kernel_size=3)
self.conv1_bn = nn.BatchNorm2d(n_filter)
self.lin1 = nn.Linear(self.feature_size, n_hidden)
self.lin_final = nn.Linear(n_hidden, action_dim)
def extract_features(self, x):
x = F.tanh(self.conv1_bn(self.conv1(x)))
x = x.view(-1, self.feature_size)
return F.relu(self.lin1(x))
def forward(self, x):
x = self.extract_features(x)
return F.sigmoid(self.lin_final(x))
class MoreLayer(nn.Module):
"""
One convolution, three linear layers
"""
def __init__(self, state_dim, action_dim, n_filter, n_hidden):
super(MoreLayer, self).__init__()
self.feature_size = n_filter * 25
self.conv1 = nn.Conv2d(state_dim, n_filter, padding=1, kernel_size=3)
self.conv1_bn = nn.BatchNorm2d(n_filter)
self.lin1 = nn.Linear(self.feature_size, n_hidden)
self.lin2 = nn.Linear(n_hidden, n_hidden)
self.lin_final = nn.Linear(n_hidden, action_dim)
def extract_features(self, x):
x = F.tanh(self.conv1_bn(self.conv1(x)))
x = x.view(-1, self.feature_size)
x = F.relu(self.lin1(x))
return F.relu(self.lin2(x))
def forward(self, x):
x = self.extract_features(x)
return F.sigmoid(self.lin_final(x))
class FullConnect(nn.Module):
def __init__(self, state_dim, action_dim, n_hidden=256):
super(FullConnect, self).__init__()
self.feature_size = state_dim * 25
self.lin1 = nn.Linear(self.feature_size, n_hidden)
self.lin_final = nn.Linear(n_hidden, action_dim)
def extract_features(self, x):
x = x.view(-1, self.feature_size)
return F.relu(self.lin1(x))
def forward(self, x):
x = self.extract_features(x)
x = F.sigmoid(self.lin_final(x))
return x