-
Notifications
You must be signed in to change notification settings - Fork 0
/
cifar10_3_2_1_polynom_relu_trainable.py
31 lines (26 loc) · 1.25 KB
/
cifar10_3_2_1_polynom_relu_trainable.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import cifar10_base
import keras
class FusedActivation(keras.layers.Layer):
def _init_(self, **kwargs):
super(FusedActivation, self)._init_(**kwargs)
def build(self, input_shape):
self.coeff1 = self.add_weight(name="coeff1",
shape=(1,),
initializer=keras.initializers.Constant(value=1),
trainable=True)
self.coeff2 = self.add_weight(name="coeff2",
shape=(1,),
initializer=keras.initializers.Constant(value=1),
trainable=True)
self.coeff3 = self.add_weight(name="coeff3",
shape=(1,),
initializer=keras.initializers.Constant(value=1),
trainable=True)
super(FusedActivation, self).build(input_shape)
def call(self, x):
pol = keras.layers.multiply([x, x, x, self.coeff3]) + keras.layers.multiply([x,x,self.coeff2]) + keras.layers.multiply([x,self.coeff1])
return keras.activations.relu(pol)
def compute_output_shape(self, input_shape):
return input_shape
if __name__ == "__main__":
cifar10_base.compute_with_activation(FusedActivation)