-
Notifications
You must be signed in to change notification settings - Fork 1
/
binarized_modules.py
63 lines (51 loc) · 2 KB
/
binarized_modules.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
"""
PyTorch-XNOR-GEMM-Extention
Authors: Tairen (tairenpiao@gmail.com)
This code can be used only for research purposes.
For other purposes (e.g., commercial), please contact me.
"""
import torch
import math
import torch.nn as nn
# import xnor_cpu
# import xnor_cuda
def Binarize(tensor):
binarized = torch.where(tensor>0, torch.ones_like(tensor,dtype=torch.float32),
torch.full((tensor.shape),-1, dtype=torch.float32))
return binarized
def xnor_linear(input, weight,bias=True):
output = xnor_cuda.xnor_gemm(input,weight)
if bias is not None:
output += bias
ret = output
return ret
class BinarizeLinear_training(nn.Linear):
def __init__(self, *kargs, **kwargs):
super(BinarizeLinear_training, self).__init__(*kargs, **kwargs)
def forward(self, input):
input.data = Binarize(input.data)
if not hasattr(self.weight,'org'):
self.weight.org=self.weight.data.clone()
if self.weight.data.dtype == torch.float:
self.weight.data = Binarize(self.weight.org)
out = nn.functional.linear(input, self.weight, self.bias)
return out
class BinarizeLinear_inference(nn.Module):
"""
BinarizeLinear_inference class
This class is for xnor inference which modified the original nn.Linear that fit the xnor linear
"""
def __init__(self, in_features, out_features, bias = True):
super(BinarizeLinear_inference, self).__init__()
self.in_features = in_features
self.out_features = out_features
self.weight = nn.Parameter(torch.Tensor(in_features, out_features))
if bias:
self.bias = nn.Parameter(torch.Tensor(out_features))
else:
self.register_parameter('bias', None)
def forward(self, input):
input.data = Binarize(input.data)
out = xnor_linear(input, self.quantized_weight, self.bias)
# out = nn.functional.linear(input, self.quantized_weight, self.bias)
return out