Skip to content

Commit

Permalink
Added support for missing activations
Browse files Browse the repository at this point in the history
* Added ThresholdedReLU, selu, softplus, softsign and hard sigmoid
  • Loading branch information
utsavgarg committed Jul 17, 2017
1 parent 10d0e2b commit 7bbda56
Show file tree
Hide file tree
Showing 5 changed files with 188 additions and 3 deletions.
156 changes: 156 additions & 0 deletions ide/static/js/data.js
Original file line number Diff line number Diff line change
Expand Up @@ -2314,6 +2314,42 @@ export default {
},
learn: false
},
ThresholdedReLU: {
name: 'Thresholded ReLU',
color: '#009688',
endpoint: {
src: ['Bottom'],
trg: ['Top']
},
params: {
inplace: {
name: 'Inplace operation',
value: true,
type: 'checkbox',
required: false
},
theta: {
name: 'Theta',
value: 1,
type: 'float',
required: false
},
caffe: {
name: 'Available Caffe',
value: false,
type: 'checkbox',
required: false
}
},
props: {
name: {
name: 'Name',
value: '',
type: 'text'
}
},
learn: false
},
Sigmoid: {
name: 'sigmoid',
color: '#009688',
Expand Down Expand Up @@ -2374,6 +2410,126 @@ export default {
},
learn: false
},
SELU: {
name: 'selu',
color: '#009688',
endpoint: {
src: ['Bottom'],
trg: ['Top']
},
params: {
inplace: {
name: 'Inplace operation',
value: true,
type: 'checkbox',
required: false
},
caffe: {
name: 'Available Caffe',
value: false,
type: 'checkbox',
required: false
}
},
props: {
name: {
name: 'Name',
value: '',
type: 'text'
}
},
learn: false
},
Softplus: {
name: 'softplus',
color: '#009688',
endpoint: {
src: ['Bottom'],
trg: ['Top']
},
params: {
inplace: {
name: 'Inplace operation',
value: true,
type: 'checkbox',
required: false
},
caffe: {
name: 'Available Caffe',
value: false,
type: 'checkbox',
required: false
}
},
props: {
name: {
name: 'Name',
value: '',
type: 'text'
}
},
learn: false
},
Softsign: {
name: 'softsign',
color: '#009688',
endpoint: {
src: ['Bottom'],
trg: ['Top']
},
params: {
inplace: {
name: 'Inplace operation',
value: true,
type: 'checkbox',
required: false
},
caffe: {
name: 'Available Caffe',
value: false,
type: 'checkbox',
required: false
}
},
props: {
name: {
name: 'Name',
value: '',
type: 'text'
}
},
learn: false
},
HardSigmoid: {
name: 'hard sigmoid',
color: '#009688',
endpoint: {
src: ['Bottom'],
trg: ['Top']
},
params: {
inplace: {
name: 'Inplace operation',
value: true,
type: 'checkbox',
required: false
},
caffe: {
name: 'Available Caffe',
value: false,
type: 'checkbox',
required: false
}
},
props: {
name: {
name: 'Name',
value: '',
type: 'text'
}
},
learn: false
},
AbsVal: {
name: 'absval',
color: '#009688',
Expand Down
5 changes: 5 additions & 0 deletions keras_app/views/export_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,11 @@ def exportJson(request):
'ELU': activation,
'Sigmoid': activation,
'TanH': activation,
'ThresholdedReLU': activation,
'SELU': activation,
'Softplus': activation,
'Softsign': activation,
'HardSigmoid': activation,
'Flatten': flatten,
'Reshape': reshape,
'Softmax': activation,
Expand Down
7 changes: 6 additions & 1 deletion keras_app/views/import_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from django.views.decorators.csrf import csrf_exempt
from layers_import import Input, Convolution, Deconvolution, Pooling, Dense, Dropout, Embed,\
Recurrent, BatchNorm, Activation, LeakyReLU, PReLU, ELU, Scale, Flatten, Reshape, Concat, \
Eltwise, Padding, Upsample, LocallyConnected, DepthwiseConv
Eltwise, Padding, Upsample, LocallyConnected, DepthwiseConv, ThresholdedReLU
from keras.models import model_from_json, Sequential


Expand Down Expand Up @@ -56,9 +56,14 @@ def importJson(request):
'relu': Activation,
'softmax': Activation,
'elu': ELU,
'selu': Activation,
'softplus': Activation,
'softsign': Activation,
'hard_sigmoid': Activation,
'tanh': Activation,
'sigmoid': Activation,
'LeakyReLU': LeakyReLU,
'ThresholdedReLU': ThresholdedReLU,
'PReLU': PReLU,
'Flatten': Flatten,
'Reshape': Reshape,
Expand Down
12 changes: 11 additions & 1 deletion keras_app/views/layers_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from keras.layers import SimpleRNN, LSTM, GRU
from keras.layers import Embedding
from keras.layers import add, multiply, maximum, concatenate, average, dot
from keras.layers.advanced_activations import LeakyReLU, PReLU, ELU
from keras.layers.advanced_activations import LeakyReLU, PReLU, ELU, ThresholdedReLU
from keras.layers import BatchNormalization
from keras.layers import Input
from keras import regularizers
Expand Down Expand Up @@ -472,12 +472,22 @@ def activation(layer, layer_in, layerId):
out[layerId] = PReLU()(*layer_in)
elif (layer['info']['type'] == 'ELU'):
out[layerId] = ELU(alpha=layer['params']['alpha'])(*layer_in)
elif (layer['info']['type'] == 'ThresholdedReLU'):
out[layerId] = ThresholdedReLU(theta=layer['params']['theta'])(*layer_in)
elif (layer['info']['type'] == 'Sigmoid'):
out[layerId] = Activation('sigmoid')(*layer_in)
elif (layer['info']['type'] == 'TanH'):
out[layerId] = Activation('tanh')(*layer_in)
elif (layer['info']['type'] == 'Softmax'):
out[layerId] = Activation('softmax')(*layer_in)
elif (layer['info']['type'] == 'SELU'):
out[layerId] = Activation('selu')(*layer_in)
elif (layer['info']['type'] == 'Softplus'):
out[layerId] = Activation('softplus')(*layer_in)
elif (layer['info']['type'] == 'Softsign'):
out[layerId] = Activation('softsign')(*layer_in)
elif (layer['info']['type'] == 'HardSigmoid'):
out[layerId] = Activation('hard_sigmoid')(*layer_in)
return out


Expand Down
11 changes: 10 additions & 1 deletion keras_app/views/layers_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,7 +319,11 @@ def Activation(layer):
'softmax': 'Softmax',
'relu': 'ReLU',
'tanh': 'TanH',
'sigmoid': 'Sigmoid'
'sigmoid': 'Sigmoid',
'selu': 'SELU',
'softplus': 'Softplus',
'softsign': 'Softsign',
'hard_sigmoid': 'HardSigmoid'
}
if (layer.__class__.__name__ == 'Activation'):
return jsonLayer(activationMap[layer.activation.func_name], {}, layer)
Expand All @@ -338,6 +342,11 @@ def PReLU(layer):
return jsonLayer('PReLU', {}, layer)


def ThresholdedReLU(layer):
params = {'theta': layer.theta.tolist()}
return jsonLayer('ThresholdedReLU', params, layer)


def ELU(layer):
params = {'alpha': layer.alpha}
return jsonLayer('ELU', params, layer)
Expand Down

0 comments on commit 7bbda56

Please sign in to comment.