forked from yulonglong/SentenceAnswerCNN
-
Notifications
You must be signed in to change notification settings - Fork 0
/
my_layers.py
90 lines (72 loc) · 2.92 KB
/
my_layers.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
import keras.backend as K
from keras.engine.topology import Layer
from keras.layers.convolutional import Convolution1D
class MeanOverTime(Layer):
def __init__(self, mask_zero=True, **kwargs):
self.mask_zero = mask_zero
self.supports_masking = True
super(MeanOverTime, self).__init__(**kwargs)
def call(self, x, mask=None):
if self.mask_zero:
return K.cast((x.sum(axis=1) / (x.shape[1] - K.equal(x, 0).all(axis=2).sum(axis=1, keepdims=True))), K.floatx())
else:
return K.mean(x, axis=1)
def get_output_shape_for(self, input_shape):
return (input_shape[0], input_shape[2])
def compute_mask(self, x, mask):
return None
def get_config(self):
config = {'mask_zero': self.mask_zero}
base_config = super(MeanOverTime, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class MulConstant(Layer):
def __init__(self, coef=1, **kwargs):
self.coef = coef
super(MulConstant, self).__init__(**kwargs)
def call(self, x, mask=None):
return self.coef * x
def get_output_shape_for(self, input_shape):
return input_shape
def get_config(self):
config = {'coef': self.coef}
base_config = super(MulConstant, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class Conv1DWithMasking(Convolution1D):
def __init__(self, **kwargs):
self.supports_masking = True
super(Conv1DWithMasking, self).__init__(**kwargs)
#def call(self, x, mask=None):
# if self.mask_zero:
# return K.cast((x.sum(axis=1) / (x.shape[1] - K.equal(x, 0).all(axis=2).sum(axis=1, keepdims=True))), K.floatx())
# else:
# return K.mean(x, axis=1)
#def get_output_shape_for(self, input_shape):
# return (input_shape[0], input_shape[2])
def compute_mask(self, x, mask):
return mask
#def get_config(self):
# #config = {'mask_zero': self.mask_zero}
# base_config = super(Conv1DWithMasking, self).get_config()
# return dict(list(base_config.items()) + list(config.items()))
################################################################################################################################################
## Depricated functions
#
from keras.layers.core import Lambda
def MeanOverTime_depricated(mask_zero=True):
if mask_zero:
# Masks the timestep vector if all elements are zero
mean_func = lambda x: K.cast((x.sum(axis=1) / (x.shape[1] - K.equal(x, 0).all(axis=2).sum(axis=1, keepdims=True))), K.floatx())
layer = Lambda(mean_func, output_shape=lambda s: (s[0], s[2]))
else:
# Even if the timestep vector is all zeros, it will be used in averaging (so a notion of sequence length is preserved)
layer = Lambda(lambda x: K.mean(x, axis=1), output_shape=lambda s: (s[0], s[2]))
layer.supports_masking = True
#layer.name = 'MeanOverTime'
def compute_mask(input, mask):
return None
layer.compute_mask = compute_mask
return layer
def MulConstant_depricated(coef):
layer = Lambda(lambda x: coef * x, output_shape=lambda s: s)
#layer.name = 'MulConstant'
return layer