Skip to content
This repository has been archived by the owner on Jan 3, 2024. It is now read-only.

Commit

Permalink
Merge pull request #45 from and1can/activations-should-be-case-insens…
Browse files Browse the repository at this point in the history
…itive

[🔶 Change request ]:  Activations should be case insensitive
  • Loading branch information
matiasvlevi committed Dec 6, 2021
2 parents 7b14202 + 40dab3c commit 859b0a9
Show file tree
Hide file tree
Showing 11 changed files with 74 additions and 55 deletions.
42 changes: 24 additions & 18 deletions build/dann.js
Expand Up @@ -206,38 +206,38 @@ let activations = {
let x1 = 1 / (1 + Math.exp(-x));
return x1 * (1 - x1);
},
siLU(x) {
silu(x) {
return x / (1 + Math.exp(-x));
},
siLU_d(x) {
silu_d(x) {
let top = 1 + Math.exp(-x) + x * Math.exp(-x);
let down = Math.pow(1 + Math.exp(-x), 2);
return top / down;
},
tanH(x) {
tanh(x) {
let top = Math.exp(x) - Math.exp(-x);
let down = Math.exp(x) + Math.exp(-x);
return top / down;
},
tanH_d(x) {
tanh_d(x) {
let numer = Math.pow(Math.exp(2 * x) - 1, 2);
let denom = Math.pow(Math.exp(2 * x) + 1, 2);
return 1 - numer / denom;
},
leakyReLU(x) {
leakyrelu(x) {
return Math.max(x, x * 0.01);
},
leakyReLU_d(x) {
leakyrelu_d(x) {
if (x >= 0) {
return 1;
} else {
return 0.01;
}
},
reLU(x) {
relu(x) {
return Math.max(x, 0);
},
reLU_d(x) {
relu_d(x) {
if (x >= 0) {
return 1;
} else {
Expand Down Expand Up @@ -282,7 +282,7 @@ let activations = {
return 1 / (1 + Math.exp(-x));
},
// Experimental
leakyReLUCapped(x) {
leakyrelucapped(x) {
if (x >= 0 && x <= 6) {
return x;
} else if (x < 0) {
Expand All @@ -291,7 +291,7 @@ let activations = {
return 6;
}
},
leakyReLUCapped_d(x) {
leakyrelucapped_d(x) {
if (x >= 0 && x <= 6) {
return 1;
} else if (x < 0) {
Expand All @@ -300,10 +300,10 @@ let activations = {
return 0;
}
},
leakySigmoid(x) {
leakysigmoid(x) {
return 1 / (1 + Math.exp(-x)) + x / 100;
},
leakySigmoid_d(x) {
leakysigmoid_d(x) {
return Math.exp(-x) / Math.pow(Math.exp(-x) + 1, 2) + 1 / 100;
},
};
Expand Down Expand Up @@ -1574,7 +1574,8 @@ Layer.selectPools = function selectPools(arr, f, s, w, h) {
* @param {String} act The activation function name
*/
Layer.prototype.setFunc = function setFunc(act) {
let obj = Layer.stringTofunc(act);
const lowerCaseAct = act.toLocaleLowerCase();
let obj = Layer.stringTofunc(lowerCaseAct);
if (obj !== undefined) {
this.actname = obj.name;
this.actname_d = obj.name_d;
Expand All @@ -1594,7 +1595,7 @@ Layer.prototype.setFunc = function setFunc(act) {
* @return {Object} Object containing information about an activation function.
*/
Layer.stringTofunc = function stringTofunc(str) {
let act = str;
let act = str.toLocaleLowerCase();
let der = act + '_d';
let func;
let func_d;
Expand Down Expand Up @@ -1933,7 +1934,8 @@ Dann.prototype.addDropout = function addDropout(rate) {
*/
Dann.prototype.addHiddenLayer = function addHiddenLayer(size, act) {
if (act !== undefined) {
if (activations[act] === undefined) {
const lowerCaseAct = act.toLocaleLowerCase();
if (activations[lowerCaseAct] === undefined) {
if (typeof act === 'string') {
DannError.error(
"'" +
Expand All @@ -1947,6 +1949,7 @@ Dann.prototype.addHiddenLayer = function addHiddenLayer(size, act) {
} else {
act = 'sigmoid';
}

this.arch.splice(this.arch.length - 1, 0, size);
let layer = new Layer('hidden', size, act);
this.Layers.splice(this.Layers.length - 1, 0, layer);
Expand Down Expand Up @@ -2076,7 +2079,8 @@ Dann.prototype.makeWeights = function makeWeights(arg1, arg2) {
* </code>
*/
Dann.prototype.outputActivation = function outputActivation(act) {
if (activations[act] === undefined && !isBrowser) {
const lowerCaseAct = act.toLocaleLowerCase();
if (activations[lowerCaseAct] === undefined && !isBrowser) {
if (typeof act === 'string') {
DannError.error(
"'" +
Expand All @@ -2093,6 +2097,7 @@ Dann.prototype.outputActivation = function outputActivation(act) {
return;
}
}

this.Layers[this.Layers.length - 1].setFunc(act);
};

Expand Down Expand Up @@ -3271,8 +3276,9 @@ Add.activation = function (name, activation, derivative) {
);
return;
} else {
activations[name] = activation;
activations[name + '_d'] = derivative;
const lowerCaseAct = name.toLocaleLowerCase();
activations[lowerCaseAct] = activation;
activations[lowerCaseAct + '_d'] = derivative;
return;
}
};
Expand Down
2 changes: 1 addition & 1 deletion build/dann.min.js

Large diffs are not rendered by default.

5 changes: 3 additions & 2 deletions src/classes/add/methods/activation.js
Expand Up @@ -35,8 +35,9 @@ Add.activation = function (name, activation, derivative) {
);
return;
} else {
activations[name] = activation;
activations[name + '_d'] = derivative;
const lowerCaseAct = name.toLocaleLowerCase();
activations[lowerCaseAct] = activation;
activations[lowerCaseAct + '_d'] = derivative;
return;
}
};
4 changes: 3 additions & 1 deletion src/classes/dann/methods/Create/addHiddenLayer.js
Expand Up @@ -71,7 +71,8 @@
*/
Dann.prototype.addHiddenLayer = function addHiddenLayer(size, act) {
if (act !== undefined) {
if (activations[act] === undefined) {
const lowerCaseAct = act.toLocaleLowerCase();
if (activations[lowerCaseAct] === undefined) {
if (typeof act === 'string') {
DannError.error(
"'" +
Expand All @@ -85,6 +86,7 @@ Dann.prototype.addHiddenLayer = function addHiddenLayer(size, act) {
} else {
act = 'sigmoid';
}

this.arch.splice(this.arch.length - 1, 0, size);
let layer = new Layer('hidden', size, act);
this.Layers.splice(this.Layers.length - 1, 0, layer);
Expand Down
4 changes: 3 additions & 1 deletion src/classes/dann/methods/Create/outputActivation.js
Expand Up @@ -66,7 +66,8 @@
* </code>
*/
Dann.prototype.outputActivation = function outputActivation(act) {
if (activations[act] === undefined && !isBrowser) {
const lowerCaseAct = act.toLocaleLowerCase();
if (activations[lowerCaseAct] === undefined && !isBrowser) {
if (typeof act === 'string') {
DannError.error(
"'" +
Expand All @@ -83,5 +84,6 @@ Dann.prototype.outputActivation = function outputActivation(act) {
return;
}
}

this.Layers[this.Layers.length - 1].setFunc(act);
};
3 changes: 2 additions & 1 deletion src/classes/layer/methods/setFunc.js
Expand Up @@ -5,7 +5,8 @@
* @param {String} act The activation function name
*/
Layer.prototype.setFunc = function setFunc(act) {
let obj = Layer.stringTofunc(act);
const lowerCaseAct = act.toLocaleLowerCase();
let obj = Layer.stringTofunc(lowerCaseAct);
if (obj !== undefined) {
this.actname = obj.name;
this.actname_d = obj.name_d;
Expand Down
2 changes: 1 addition & 1 deletion src/classes/layer/methods/stringTofunc.js
Expand Up @@ -6,7 +6,7 @@
* @return {Object} Object containing information about an activation function.
*/
Layer.stringTofunc = function stringTofunc(str) {
let act = str;
let act = str.toLocaleLowerCase();
let der = act + '_d';
let func;
let func_d;
Expand Down
24 changes: 12 additions & 12 deletions src/core/functions/actfuncs.js
Expand Up @@ -10,38 +10,38 @@ let activations = {
let x1 = 1 / (1 + Math.exp(-x));
return x1 * (1 - x1);
},
siLU(x) {
silu(x) {
return x / (1 + Math.exp(-x));
},
siLU_d(x) {
silu_d(x) {
let top = 1 + Math.exp(-x) + x * Math.exp(-x);
let down = Math.pow(1 + Math.exp(-x), 2);
return top / down;
},
tanH(x) {
tanh(x) {
let top = Math.exp(x) - Math.exp(-x);
let down = Math.exp(x) + Math.exp(-x);
return top / down;
},
tanH_d(x) {
tanh_d(x) {
let numer = Math.pow(Math.exp(2 * x) - 1, 2);
let denom = Math.pow(Math.exp(2 * x) + 1, 2);
return 1 - numer / denom;
},
leakyReLU(x) {
leakyrelu(x) {
return Math.max(x, x * 0.01);
},
leakyReLU_d(x) {
leakyrelu_d(x) {
if (x >= 0) {
return 1;
} else {
return 0.01;
}
},
reLU(x) {
relu(x) {
return Math.max(x, 0);
},
reLU_d(x) {
relu_d(x) {
if (x >= 0) {
return 1;
} else {
Expand Down Expand Up @@ -86,7 +86,7 @@ let activations = {
return 1 / (1 + Math.exp(-x));
},
// Experimental
leakyReLUCapped(x) {
leakyrelucapped(x) {
if (x >= 0 && x <= 6) {
return x;
} else if (x < 0) {
Expand All @@ -95,7 +95,7 @@ let activations = {
return 6;
}
},
leakyReLUCapped_d(x) {
leakyrelucapped_d(x) {
if (x >= 0 && x <= 6) {
return 1;
} else if (x < 0) {
Expand All @@ -104,10 +104,10 @@ let activations = {
return 0;
}
},
leakySigmoid(x) {
leakysigmoid(x) {
return 1 / (1 + Math.exp(-x)) + x / 100;
},
leakySigmoid_d(x) {
leakysigmoid_d(x) {
return Math.exp(-x) / Math.pow(Math.exp(-x) + 1, 2) + 1 / 100;
},
};
16 changes: 8 additions & 8 deletions test/unit/classes/dann.js
Expand Up @@ -179,7 +179,7 @@ suite('Dann Object', function () {
}
});
test('Should have initiated an output layer with sigmoid', function () {
let acts = ['leakyReLU', 'leakyReLU', 'leakyReLU', 'tanH'];
let acts = ['leakyrelu', 'leakyrelu', 'leakyrelu', 'tanh'];
//starts at one to ignore input
for (let i = 1; i < acts.length; i++) {
let name = acts[i - 1];
Expand Down Expand Up @@ -317,10 +317,10 @@ suite('Dann Object', function () {
assert.equal(nn.Layers[1].size, 16);
});
test('Should have initiated a tanH activation', function () {
assert.equal('tanH', nn.Layers[1].actname);
assert.equal('tanH_d', nn.Layers[1].actname_d);
assert.equal(activations.tanH, nn.Layers[1].actfunc);
assert.equal(activations.tanH_d, nn.Layers[1].actfunc_d);
assert.equal('tanh', nn.Layers[1].actname);
assert.equal('tanh_d', nn.Layers[1].actname_d);
assert.equal(activations.tanh, nn.Layers[1].actfunc);
assert.equal(activations.tanh_d, nn.Layers[1].actfunc_d);
});
test('Should have initiated matrix', function () {
assert.instanceOf(nn.Layers[1].layer, Matrix);
Expand Down Expand Up @@ -436,7 +436,7 @@ suite('Dann Object', function () {
nn.outputActivation('reLU');
});
test('Should have initiated a reLU activation function for the output', function () {
let name = 'reLU';
let name = 'relu';
let derivative = name + '_d';
assert.equal(name, nn.Layers[2].actname);
assert.equal(derivative, nn.Layers[2].actname_d);
Expand Down Expand Up @@ -531,7 +531,7 @@ suite('Dann Object', function () {
assert.typeOf(nn.epoch, 'Number');
});
test('Should have loaded layers activations', function () {
let acts = ['siLU', 'tanH'];
let acts = ['silu', 'tanh'];
//starts at one to ignore input
for (let i = 1; i < acts.length; i++) {
let name = acts[i - 1];
Expand Down Expand Up @@ -689,7 +689,7 @@ suite('Dann Object', function () {
assert.typeOf(nn.epoch, 'Number');
});
test('Should have loaded layers activations', function () {
let acts = ['leakyReLU', 'tanH'];
let acts = ['leakyrelu', 'tanh'];
//starts at one to ignore input
for (let i = 1; i < acts.length; i++) {
let name = acts[i - 1];
Expand Down

0 comments on commit 859b0a9

Please sign in to comment.