/
rrelu.js
52 lines (46 loc) · 1 KB
/
rrelu.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import Layer from './base.js'
import Tensor from '../../../util/tensor.js'
/**
* Randomized ReLU layer
*/
export default class RandomizedReLULayer extends Layer {
/**
* @param {object} config config
* @param {number} [config.l] Low value
* @param {number} [config.u] High value
*/
constructor({ l = 1.0 / 8, u = 1.0 / 3, ...rest }) {
super(rest)
this._l = l
this._u = u
this._r = null
this._training = false
}
bind({ training }) {
this._training = training
}
calc(x) {
if (this._training) {
this._r = Tensor.random(x.sizes.slice(1), this._l, this._u)
} else {
this._r = new Tensor(x.sizes.slice(1), (this._l + this._u) / 2)
}
this._i = x
const o = x.copy()
o.map((v, i) => (v > 0 ? v : v * this._r.at(i.slice(1))))
return o
}
grad(bo) {
const bi = bo.copy()
bi.map((v, i) => (this._i.at(i) > 0 ? v : v * this._r.at(i.slice(1))))
return bi
}
toObject() {
return {
type: 'rrelu',
l: this._l,
u: this._u,
}
}
}
RandomizedReLULayer.registLayer('rrelu')