Skip to content

Commit

Permalink
changed weighted add
Browse files Browse the repository at this point in the history
  • Loading branch information
haifeng-jin committed Jun 27, 2018
1 parent 729b8e5 commit b098b08
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 46 deletions.
6 changes: 3 additions & 3 deletions autokeras/graph.py
Expand Up @@ -10,7 +10,7 @@
from autokeras import constant
from autokeras.layer_transformer import wider_bn, wider_next_conv, wider_next_dense, wider_pre_dense, wider_pre_conv, \
deeper_conv_block, dense_to_deeper_block, add_noise
from autokeras.layers import StubConcatenate, StubWeightedAdd, StubConv, is_layer, layer_width, \
from autokeras.layers import StubConcatenate, StubAdd, StubConv, is_layer, layer_width, \
to_real_layer
from autokeras.stub import to_stub_model

Expand Down Expand Up @@ -410,7 +410,7 @@ def to_add_skip_model(self, start_id, end_id):

# Add the weighted add layer.
new_node_id = self._add_new_node()
layer = StubWeightedAdd()
layer = StubAdd()
if self.weighted:
layer.set_weights([np.float32(1.0)])

Expand Down Expand Up @@ -526,7 +526,7 @@ def produce_model(self):
for u, layer_id in self.reverse_adj_list[v]:
layer = self.layer_list[layer_id]

if isinstance(layer, (StubWeightedAdd, StubConcatenate)):
if isinstance(layer, (StubAdd, StubConcatenate)):
edge_input_tensor = list(map(lambda x: node_list[x],
self.layer_id_to_input_node_ids[layer_id]))
else:
Expand Down
6 changes: 3 additions & 3 deletions autokeras/layer_transformer.py
Expand Up @@ -2,7 +2,7 @@

from autokeras import constant
from autokeras.layers import StubConv, StubBatchNormalization, StubActivation, StubDropout, StubDense, \
StubWeightedAdd
StubAdd

NOISE_RATIO = 1e-4

Expand Down Expand Up @@ -237,10 +237,10 @@ def wider_weighted_add(layer, n_add, weighted=True):
The wider weighted add layer
"""
if not weighted:
return StubWeightedAdd()
return StubAdd()

n_add += 0
new_layer = StubWeightedAdd()
new_layer = StubAdd()
new_layer.set_weights(layer.get_weights())
return new_layer

Expand Down
43 changes: 5 additions & 38 deletions autokeras/layers.py
@@ -1,43 +1,10 @@
from keras import backend
from keras.engine import InputLayer
from keras.layers import Add, Conv2D, Conv3D, Conv1D, Dense, BatchNormalization, Concatenate, Dropout, Activation, \
Flatten, MaxPooling1D, MaxPooling2D, MaxPooling3D, GlobalAveragePooling1D, GlobalAveragePooling2D, \
GlobalAveragePooling3D
from keras.regularizers import l2


class WeightedAdd(Add):
"""Weighted Add class inherited from Add class
It's used to do add weights for data in Add layer
Attributes:
weights: backend variable
one: const 1.0
kernel: None
_trainable_weights: list that store weight
"""

def __init__(self, **kwargs):
"""Init Weighted add class"""
super(WeightedAdd, self).__init__(**kwargs)
self.weight = backend.variable(1.0)
self.one = backend.constant(1.0)
self.kernel = None
self._trainable_weights.append(self.weight)

def call(self, x, **kwargs):
"""Override call function in Add and return new weights"""
a = backend.tf.scalar_mul(self.weight, x[0])
b = backend.tf.scalar_mul(backend.tf.subtract(self.one, self.weight), x[1])
c = backend.tf.add(a, b)
return c

def compute_output_shape(self, input_shape):
"""Return output_shape"""
return input_shape


class StubLayer:
def __init__(self, input_node=None, output_node=None):
self.input = input_node
Expand Down Expand Up @@ -94,7 +61,7 @@ class StubConcatenate(StubAggregateLayer):
pass


class StubWeightedAdd(StubAggregateLayer):
class StubAdd(StubAggregateLayer):
pass


Expand Down Expand Up @@ -142,8 +109,8 @@ def is_layer(layer, layer_type):
return isinstance(layer, (StubBatchNormalization, BatchNormalization))
if layer_type == 'Concatenate':
return isinstance(layer, (StubConcatenate, Concatenate))
if layer_type == 'WeightedAdd':
return isinstance(layer, (StubWeightedAdd, WeightedAdd))
if layer_type == 'Add':
return isinstance(layer, (StubAdd, Add))
if layer_type == 'Pooling':
return isinstance(layer, StubPooling) or is_pooling_layer(layer)
if layer_type == 'Dropout':
Expand Down Expand Up @@ -217,7 +184,7 @@ def to_real_layer(layer):
if is_layer(layer, 'Concatenate'):
return Concatenate()
if is_layer(layer, 'WeightedAdd'):
return WeightedAdd()
return Add()
if is_layer(layer, 'Dropout'):
return Dropout(layer.rate)
if is_layer(layer, 'Activation'):
Expand All @@ -234,7 +201,7 @@ def to_stub_layer(layer, input_id, output_id):
elif is_layer(layer, 'Dense'):
temp_stub_layer = StubDense(layer.units, layer.activation, input_id, output_id)
elif is_layer(layer, 'WeightedAdd'):
temp_stub_layer = StubWeightedAdd(input_id, output_id)
temp_stub_layer = StubAdd(input_id, output_id)
elif is_layer(layer, 'Concatenate'):
temp_stub_layer = StubConcatenate(input_id, output_id)
elif is_layer(layer, 'BatchNormalization'):
Expand Down
4 changes: 2 additions & 2 deletions tests/test_layer_transformer.py
Expand Up @@ -35,10 +35,10 @@ def test_wider_bn():


def test_wider_weighted_add():
layer = StubWeightedAdd()
layer = StubAdd()
layer.set_weights(get_add_skip_model().layers[13].get_weights())
new_layer = wider_weighted_add(layer, 4)
assert isinstance(new_layer, StubWeightedAdd)
assert isinstance(new_layer, StubAdd)


def test_wider_next_dense():
Expand Down

0 comments on commit b098b08

Please sign in to comment.