diff --git a/js/neuralnetwork_builder.js b/js/neuralnetwork_builder.js
index 925ba638..27e2fc2e 100644
--- a/js/neuralnetwork_builder.js
+++ b/js/neuralnetwork_builder.js
@@ -2,31 +2,89 @@ import * as opt from '../../lib/model/nns/optimizer.js'
const layerTypes = {
abs: {},
- clip: { min: 0, max: 1 },
- conv: { kernel: 5, channel: 16 },
- dropout: { drop_rate: 0.5 },
+ acos: {},
+ acoh: {},
+ asin: {},
+ asinh: {},
+ atan: {},
+ atanh: {},
+ bdaa: { alpha: { type: 'number', default: 1, multipleOf: 0.1 } },
+ bent_identity: {},
+ blu: { beta: { type: 'number', default: 0.1, multipleOf: 0.1 } },
+ brelu: { a: { type: 'number', default: 1, multipleOf: 0.1 } },
+ ceil: {},
+ celu: { a: { type: 'number', default: 1, multipleOf: 0.1 } },
+ clip: {
+ min: { type: 'number', default: 0, multipleOf: 0.1 },
+ max: { type: 'number', default: 1, multipleOf: 0.1 },
+ },
+ cloglog: {},
+ cloglogm: {},
+ conv: { kernel: { type: 'number', default: 5 }, channel: { type: 'number', default: 16 } },
+ cos: {},
+ cosh: {},
+ crelu: {},
+ dropout: {
+ drop_rate: { type: 'number', label: 'Drop rate', default: 0.5, multipleOf: 0.1, minimum: 0, maximum: 1 },
+ },
+ eelu: {
+ k: { type: 'number', default: 1, multipleOf: 0.1 },
+ alpha: { type: 'number', default: 1, multipleOf: 0.1 },
+ beta: { type: 'number', default: 1, multipleOf: 0.1 },
+ },
+ elish: {},
+ elliott: {},
+ elu: { a: { type: 'number', default: 1, multipleOf: 0.1 } },
+ erelu: {},
+ erf: {},
+ eswish: { beta: { type: 'number', default: 1, multipleOf: 0.1 } },
exp: {},
+ felu: { alpha: { type: 'number', default: 1, multipleOf: 0.1 } },
flatten: {},
- full: { size: 10, a: 'sigmoid' },
+ floor: {},
+ frelu: { b: { type: 'number', default: 0, multipleOf: 0.1 } },
+ full: {
+ out_size: { type: 'number', label: 'Output size', default: 10, minimum: 1, maximum: 100 },
+ activation: {
+ type: 'string',
+ label: 'Activation',
+ default: 'sigmoid',
+ enum: [
+ 'sigmoid',
+ 'tanh',
+ 'relu',
+ 'leaky_relu',
+ 'softsign',
+ 'softplus',
+ 'identity',
+ 'polynomial',
+ 'abs',
+ 'gaussian',
+ 'softmax',
+ ],
+ },
+ },
+ function: { func: { type: 'string', default: '2*x' } },
gaussian: {},
- leaky_relu: { a: 0.1 },
+ gelu: {},
+ leaky_relu: { a: { type: 'number', default: 0.1, multipleOf: 0.1, minimum: 0, maximum: 1 } },
identity: {},
log: {},
- mean: { axis: 0 },
+ mean: { axis: { type: 'number', default: 0, minimum: 0, maximum: 10 } },
negative: {},
relu: {},
- reshape: { size: [1, 1] },
+ reshape: { size: { type: 'array', default: [1, 1] } },
sigmoid: {},
softmax: {},
softplus: {},
softsign: {},
- sparsity: { rho: 0.02 },
+ sparsity: { rho: { type: 'number', default: 0.02, multipleOf: 0.01 } },
square: {},
sqrt: {},
- sum: { axis: 0 },
+ sum: { axis: { type: 'number', default: 0, minimum: 0, maximum: 10 } },
tanh: {},
- transpose: { axis: [1, 0] },
- variance: { axis: 0 },
+ transpose: { axis: { type: 'array', default: [1, 0] } },
+ variance: { axis: { type: 'number', default: 0, minimum: 0, maximum: 10 } },
}
const arrayAttrDefinition = {
@@ -49,22 +107,23 @@ const nnModelDefinition = {
const layers = Vue.ref([
{
type: 'full',
- size: 10,
- a: 'sigmoid',
- poly_pow: 2,
+ out_size: 10,
+ activation: 'sigmoid',
},
])
const changeType = function (idx) {
- const layer = { type: layers.value[idx].type, ...layerTypes[layers.value[idx].type] }
+ const layer = { type: layers.value[idx].type }
+ for (const [k, v] of Object.entries(layerTypes[layers.value[idx].type])) {
+ layer[k] = v.default
+ }
layers.value.splice(idx, 1, layer)
}
const addLayer = function () {
layers.value.push({
type: 'full',
- size: 10,
- a: 'sigmoid',
- poly_pow: 2,
+ out_size: 10,
+ activation: 'sigmoid',
})
}
@@ -77,19 +136,7 @@ const nnModelDefinition = {
data: function () {
return {
layerTypeNames: Object.keys(layerTypes),
- activations: [
- 'sigmoid',
- 'tanh',
- 'relu',
- 'leaky_relu',
- 'softsign',
- 'softplus',
- 'identity',
- 'polynomial',
- 'abs',
- 'gaussian',
- 'softmax',
- ],
+ layerTypes: layerTypes,
}
},
template: `
@@ -101,47 +148,24 @@ const nnModelDefinition = {
-
- Min:
- Max:
-
-
- Kernel:
- Channel:
-
-
- Drop Rate:
-
-
- Size:
- Activation:
-
-
-
- Alpha:
-
-
- Axis:
-
-
- n:
-
-
- Sizes:
-
-
- Rho:
-
-
- Axis:
-
-
- Axis:
-
-
- Axis:
+
+ {{ aobj.label ?? attr }}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -160,26 +184,12 @@ export default class NeuralNetworkBuilder {
}
get layers() {
- const l = this._vue ? this._vue.$refs.layerselm.layers : [{ type: 'full', size: 10, a: 'sigmoid' }]
- const r = []
- for (let i = 0; i < l.length; i++) {
- if (l[i].type === 'full') {
- r.push({ type: 'full', out_size: l[i].size })
- r.push({ type: l[i].a, n: l[i].poly_pow })
- } else {
- r.push(l[i])
- }
- }
- return r
+ const l = this._vue ? this._vue.$refs.layerselm.layers : [{ type: 'full', out_size: 10, a: 'sigmoid' }]
+ return l.map(v => ({ ...v }))
}
get invlayers() {
- const l = this.layers
- const r = []
- for (let i = l.length - 1; i >= 0; i -= 2) {
- r.push(l[i - 1], l[i])
- }
- return r
+ return this.layers.concat().reverse()
}
get optimizer() {