-
Notifications
You must be signed in to change notification settings - Fork 2
/
index.js
169 lines (158 loc) · 5.67 KB
/
index.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
/**
* Tensorflow.js Examples for Node.js
* Script adatapted from
* https://github.com/tensorflow/tfjs-examples
* https://groups.google.com/a/tensorflow.org/forum/#!forum/tfjs
* @author Loreto Parisi (loretoparisi@gmail.com)
* @copyright 2018 Loreto Parisi (loretoparisi@gmail.com)
*/
const tf = require('@tensorflow/tfjs-node');
var fs = require('fs');
var performance = require('perf_hooks').performance;
//
// Script adapted from: https://codepen.io/caisq/pen/vrxOvy
//
// TensorFlow.js example: Trains LSTM model to perform the following sequence task:
//
// Given a sequence of 0s and 1s of fixed length (10), output a single binary number (0 or 1).
//
// The training data has the following pattern:
//
// The output (i.e., label) is 1 if there are four or more consecutive and identical
// items (either 0s or 1s) in the input sequence. Otherwise, the output is 0. For example:
// Sequence [0, 1, 0, 1, 0, 1, 0, 0, 1, 0] --> Label: 0.
// Sequence [0, 1, 1, 1, 1, 0, 1, 0, 0, 1] --> Label: 1.
// Sequence [0, 0, 0, 0, 0, 0, 1, 0, 0, 1] --> Label: 1.
const sequenceLength = 10;
const stretchLengthThreshold = 4;
// Generates sequences consisting of 0 and 1 and the associated 0-1 labels.
//
// The sequences and labels follow the pattern described above.
//
// Args:
// len: Length of the sequence.
//
// Returns:
// 1. An Array of randomly-generated 0s and 1s.
// 2. The associated output (label): a 0 or a 1.
function generateSequenceAndLabel(len) {
const sequence = [];
let currentItem = -1;
let stretchLength = 0;
let label = 0;
for (let i = 0; i < len; ++i) {
const item = Math.random() > 0.5 ? 1 : 0;
sequence.push(item);
if (currentItem === item) {
stretchLength++;
} else {
currentItem = item;
stretchLength = 1;
}
if (stretchLength >= stretchLengthThreshold) {
label = 1;
}
}
return [sequence, label];
}
// Generates a dataset consisting of sequences and their corresponding labels.
//
// Args:
// numExamples: Number of examples to generate.
// sequenceLength: Length of each individual sequence.
//
// Returns:
// 1. Sequence Tensor: a Tensor of shape [numExamples, sequenceLength, 2].
// The first dimension is the batch examples.
// The second dimension is the time axis (sequence items).
// The third dimension is the one-hot encoding of the 0/1 items.
// 2. Label Tensor: a Tensor of shape [numExamples, 1].
// Each element of this Tensor is 0 or 1.
function generateDataset(numExamples, sequenceLength) {
const sequencesBuffer = tf.buffer([numExamples, sequenceLength, 2]);
const labelsBuffer = tf.buffer([numExamples, 1]);
for (let i = 0; i < numExamples; ++i) {
const [sequence, label] = generateSequenceAndLabel(sequenceLength);
for (let j = 0; j < sequenceLength; ++j) {
sequencesBuffer.set(1, i, j, sequence[j]);
}
labelsBuffer.set(label, i, 0);
}
return [sequencesBuffer.toTensor(), labelsBuffer.toTensor()];
}
tf.nextFrame = function () {
return new Promise((resolve, reject) => {
process.nextTick(function () {
return resolve(true);
});
});
}
// Train a model to predict the label based on the sequence.
function train() {
return new Promise((resolve, reject) => {
// Define the topology of the model.
const model = tf.sequential();
model.add(tf.layers.lstm({ units: 8, inputShape: [sequenceLength, 2] }));
model.add(tf.layers.dense({ units: 1, activation: 'sigmoid' }));
// Compile model to prepare for training.
const learningRate = 4e-3;
const optimizer = tf.train.rmsprop(learningRate);
model.compile({
loss: 'binaryCrossentropy',
optimizer: optimizer,
metrics: ['acc']
});
// Generate a number of examples for training.
const numTrainExamples = 500;
console.log('Generating training data...');
const [trainSequences, trainLabels] = generateDataset(numTrainExamples, 10);
let status = {
train_epoch: 0,
train_loss: 0,
train_acc: 0,
val_loss: 0,
val_acc: 0
};
console.log('Training model...');
model.fit(
trainSequences, trainLabels, {
epochs: 5,
validationSplit: 0.15,
callbacks: {
onBatchEnd: (batch, logs) => {
console.log(batch,logs);
tf.nextFrame().then(res => {
return;
})
},
onEpochEnd: (epoch, logs) => {
// Update the UI to display the current loss and accuracy values.
status.train_epoch = epoch + 1;
status.train_loss = logs.loss;
status.train_acc = logs.acc;
status.val_loss = logs.val_loss;
status.val_acc = logs.val_acc;
console.log(status);
tf.nextFrame().then(res => {
return;
})
},
}
}).then(fitOutput => {
// Memory clean up: Dispose the training data.
trainSequences.dispose();
trainLabels.dispose();
return resolve(fitOutput);
})
.catch(error => {
return reject(error);
})
})
}
train()
.then(res => {
console.log(res);
})
.catch(error => {
console.error(error);
})