-
Notifications
You must be signed in to change notification settings - Fork 341
/
_classifier.py
78 lines (68 loc) · 2.06 KB
/
_classifier.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
from torch import nn
from scvi.nn import FCLayers
class Classifier(nn.Module):
"""Basic fully-connected NN classifier.
Parameters
----------
n_input
Number of input dimensions
n_hidden
Number of nodes in hidden layer(s). If `0`, the classifier only consists of a
single linear layer.
n_labels
Numput of outputs dimensions
n_layers
Number of hidden layers. If `0`, the classifier only consists of a single
linear layer.
dropout_rate
dropout_rate for nodes
logits
Return logits or not
use_batch_norm
Whether to use batch norm in layers
use_layer_norm
Whether to use layer norm in layers
activation_fn
Valid activation function from torch.nn
**kwargs
Keyword arguments passed into :class:`~scvi.nn.FCLayers`.
"""
def __init__(
self,
n_input: int,
n_hidden: int = 128,
n_labels: int = 5,
n_layers: int = 1,
dropout_rate: float = 0.1,
logits: bool = False,
use_batch_norm: bool = True,
use_layer_norm: bool = False,
activation_fn: nn.Module = nn.ReLU,
**kwargs,
):
super().__init__()
self.logits = logits
layers = []
if n_hidden > 0 and n_layers > 0:
layers.append(
FCLayers(
n_in=n_input,
n_out=n_hidden,
n_layers=n_layers,
n_hidden=n_hidden,
dropout_rate=dropout_rate,
use_batch_norm=use_batch_norm,
use_layer_norm=use_layer_norm,
activation_fn=activation_fn,
**kwargs,
)
)
else:
n_hidden = n_input
layers.append(nn.Linear(n_hidden, n_labels))
if not logits:
layers.append(nn.Softmax(dim=-1))
self.classifier = nn.Sequential(*layers)
def forward(self, x):
"""Forward computation."""
return self.classifier(x)