From 63f82093058dd59f16dee102e911be734fb0e62b Mon Sep 17 00:00:00 2001 From: origo Date: Tue, 13 Nov 2018 15:20:08 +0100 Subject: [PATCH 1/3] Added leakyReluAlpha option --- src/neural-network.js | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/neural-network.js b/src/neural-network.js index 4ce3bfa11..c26ad2046 100644 --- a/src/neural-network.js +++ b/src/neural-network.js @@ -32,6 +32,7 @@ export default class NeuralNetwork { static get defaults() { return { + leakyReluAlpha: 0.01, binaryThresh: 0.5, hiddenLayers: [3], // array of ints for the sizes of the hidden layers in the network activation: 'sigmoid' // Supported activation types ['sigmoid', 'relu', 'leaky-relu', 'tanh'] @@ -249,7 +250,7 @@ export default class NeuralNetwork { _runInputLeakyRelu(input) { this.outputs[0] = input; // set output state of input layer - + let alpha = this.leakyReluAlpha; let output = null; for (let layer = 1; layer <= this.outputLayer; layer++) { for (let node = 0; node < this.sizes[layer]; node++) { @@ -260,7 +261,7 @@ export default class NeuralNetwork { sum += weights[k] * input[k]; } //leaky relu - this.outputs[layer][node] = (sum < 0 ? 0 : 0.01 * sum); + this.outputs[layer][node] = (sum < 0 ? 0 : alpha * sum); } output = input = this.outputs[layer]; } @@ -557,6 +558,7 @@ export default class NeuralNetwork { * @param target */ _calculateDeltasLeakyRelu(target) { + let alpha = this.leakyReluAlpha; for (let layer = this.outputLayer; layer >= 0; layer--) { for (let node = 0; node < this.sizes[layer]; node++) { let output = this.outputs[layer][node]; @@ -572,7 +574,7 @@ export default class NeuralNetwork { } } this.errors[layer][node] = error; - this.deltas[layer][node] = output > 0 ? error : 0.01 * error; + this.deltas[layer][node] = output > 0 ? error : alpha * error; } } } From 3d1534fbc882f161e6cc34adaa21daaaad77a0ff Mon Sep 17 00:00:00 2001 From: origo Date: Tue, 13 Nov 2018 15:28:19 +0100 Subject: [PATCH 2/3] Added basic test for leakyReluAlpha option --- test/base/options.js | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/test/base/options.js b/test/base/options.js index 18a62fee7..c4d5ab3ee 100644 --- a/test/base/options.js +++ b/test/base/options.js @@ -108,4 +108,10 @@ describe ('neural network constructor values', () => { var net = new brain.NeuralNetwork(opts); assert.equal(opts.activation, net.activation, `activation => ${net.activation} but should be ${opts.activation}`); }) + + it('leakyReluAlpha should be settable in the constructor', () => { + let opts = { leakyReluAlpha: 0.1337 }; + var net = new brain.NeuralNetwork(opts); + assert.equal(opts.leakyReluAlpha, net.leakyReluAlpha, `leakyReluAlpha => ${net.leakyReluAlpha} but should be ${opts.leakyReluAlpha}`); + }) }); \ No newline at end of file From 7f48c0374ac5d49e6c72bda4edfef31105cb23d6 Mon Sep 17 00:00:00 2001 From: origo Date: Tue, 13 Nov 2018 15:39:40 +0100 Subject: [PATCH 3/3] Added leakyReluAlpha in toFunction --- src/neural-network.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/neural-network.js b/src/neural-network.js index c26ad2046..c28075d70 100644 --- a/src/neural-network.js +++ b/src/neural-network.js @@ -935,6 +935,7 @@ export default class NeuralNetwork { */ toFunction() { const activation = this.activation; + const leakyReluAlpha = this.leakyReluAlpha; let needsVar = false; function nodeHandle(layers, layerNumber, nodeKey) { if (layerNumber === 0) { @@ -964,7 +965,7 @@ export default class NeuralNetwork { } case 'leaky-relu': { needsVar = true; - return `((v=${result.join('')})<0?0:0.01*v)`; + return `((v=${result.join('')})<0?0:${leakyReluAlpha}*v)`; } case 'tanh': return `Math.tanh(${result.join('')})`; @@ -990,4 +991,4 @@ export default class NeuralNetwork { return new Function('input', `${ needsVar ? 'var v;' : '' }return ${result};`); } -} \ No newline at end of file +}