/
neural_net.cc
executable file
·101 lines (79 loc) · 2.89 KB
/
neural_net.cc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
#include "neural_net.h"
NeuralNet::NeuralNet() : layer_connected_(false) {
all_neurons_.resize(1);
}
void NeuralNet::SetInputSize(int size) {
assert(!layer_connected_);
assert(all_neurons_.size() > 0);
all_neurons_[0].resize(size);
}
void NeuralNet::AppendLayer(Layer *layer) {
assert(!layer_connected_);
layers_.push_back(layer);
}
void NeuralNet::ConnectLayers() {
assert(!layer_connected_);
assert(layers_.size() > 0);
all_neurons_.resize(layers_.size()+1);
for (int i=0; i<layers_.size(); i++) {
layers_[i]->CheckInputUnits(all_neurons_[i]);
layers_[i]->ArrangeOutputUnits(all_neurons_[i+1]);
layers_[i]->ConnectNeurons(all_neurons_[i], all_neurons_[i+1]);
}
layer_connected_ = true;
}
void NeuralNet::PropagateLayers(
vector<double> &input,
vector<double> &output) {
assert(layer_connected_);
assert(all_neurons_.size() == layers_.size()+1);
vector<struct Neuron> &first_neurons = all_neurons_[0];
vector<struct Neuron> &last_neurons = all_neurons_[layers_.size()];
assert(first_neurons.size() == input.size());
assert(last_neurons.size() == output.size());
for (int i=0; i<input.size(); i++) {
first_neurons[i].z = input[i];
}
for (int i=0; i<layers_.size(); i++) {
layers_[i]->Propagate(all_neurons_[i], all_neurons_[i+1]);
layers_[i]->CalculateOutputUnits(all_neurons_[i+1]);
}
for (int i=0; i<output.size(); i++) {
output[i] = last_neurons[i].z;
}
}
void NeuralNet::BackPropagateLayers(vector<double> &expected) {
assert(layer_connected_);
vector<double> delta;
vector<double> prev_delta;
int last_idx = layers_.size();
assert(last_idx+1 == all_neurons_.size());
vector<struct Neuron> &last_neurons = all_neurons_[last_idx];
assert(last_neurons.size() == expected.size());
delta.resize(last_neurons.size());
for (int i=0; i<last_neurons.size(); i++) {
delta[i] = last_neurons[i].z - expected[i];
}
for (int i=last_idx-1; i>=1; i--) {
layers_[i]->UpdateLazySubtrahend(all_neurons_[i], delta);
layers_[i]->BackPropagate(all_neurons_[i], delta, layers_[i-1]->f_, prev_delta);
delta = prev_delta;
}
layers_[0]->UpdateLazySubtrahend(all_neurons_[0], delta);
}
void NeuralNet::TrainNetwork(DoubleVector2d &inputs, DoubleVector2d &expected_outputs) {
assert(layer_connected_);
vector<double> tmp;
int last_idx = layers_.size();
assert(last_idx+1 == all_neurons_.size());
vector<struct Neuron> &last_neurons = all_neurons_[last_idx];
tmp.resize(last_neurons.size());
assert(inputs.size() == expected_outputs.size());
for (int i=0; i<inputs.size(); i++) {
PropagateLayers(inputs[i], tmp);
BackPropagateLayers(expected_outputs[i]);
}
for (int i=last_idx-1; i>=0; i--) {
layers_[i]->ApplyLazySubtrahend();
}
}