-
Notifications
You must be signed in to change notification settings - Fork 2
/
MultiLayerPerceptron.cs
219 lines (180 loc) · 7.79 KB
/
MultiLayerPerceptron.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
#region License
/*
* This file is subject to the terms and conditions defined in
* file 'LICENSE.txt', which is part of this source code package.
*/
#endregion
using System;
using System.Collections.Generic;
using System.Linq;
using MathNet.Numerics.LinearAlgebra;
using NeuralNetworks.EventsArgs;
using NeuralNetworks.MultiLayerPerceptronHelpers;
using NeuralNetworks.Utility;
namespace NeuralNetworks.Models
{
public class MultiLayerPerceptron
{
private readonly Randomizer _randomizer;
private readonly List<Layer> _layers;
private readonly DataReader _dataReader;
private bool _abortTeaching;
private Delegates.DerivativeFunction _derivativeFunction;
public MultiLayerPerceptron(int numberOfLayers, List<int> inputsPerLayer, List<int> neuronsPerLayer, Delegates.ActivationFunction activationFunction)
{
_randomizer = new Randomizer();
_layers = new List<Layer>();
_dataReader = new DataReader();
for (int index = 0; index < numberOfLayers; index++)
{
_layers.Add(new Layer(neuronsPerLayer[index], inputsPerLayer[index], activationFunction, _randomizer));
}
SetDerivativeFunctionType(activationFunction);
}
private void SetDerivativeFunctionType(Delegates.ActivationFunction activationFunction)
{
if (activationFunction == Functions.SigmoidBipolar)
{
_derivativeFunction = Functions.SigmoidBipolarDerivative;
}
else
{
_derivativeFunction = Functions.SigmoidUnipolarDerivative;
}
}
public void Teach(string datafilePath, char separator, char decimalPoint, int iterations, double learningRate)
{
_abortTeaching = false;
var outputs = new List<Vector<double>>();
var teachingPercentsProgress = 0;
var inputVectors = _dataReader.ReadPerceptronDataFromTextFile(datafilePath, separator, decimalPoint);
for (int iteration = 0; iteration < iterations; iteration++)
{
var selectedVector = _randomizer.SelectRandomVector(inputVectors);
var expectedClass = selectedVector.Class;
outputs.Clear();
outputs.Add(selectedVector.Data);
CalculateOutputs(outputs);
DoErrorsBackwardPropagation(outputs, learningRate, expectedClass);
var progressMod = iterations / 100;
if (iteration % progressMod == 0)
{
teachingPercentsProgress++;
OnTeachingProgressChanged(teachingPercentsProgress);
}
if (_abortTeaching)
{
return;
}
}
}
private void DoErrorsBackwardPropagation(List<Vector<double>> outputs, double learningRate, int expectedClass)
{
PropagateLastLayer(outputs, expectedClass);
PropagateHiddenLayers(outputs);
AdjustWeights(outputs, learningRate);
}
private void AdjustWeights(List<Vector<double>> outputs, double learningRate)
{
var offset = 1;
for (int layerId = _layers.Count - offset; layerId >= 0; layerId--)
{
var lastNeuronId = _layers[layerId].Neurons.Count - offset;
for (int neuronId = lastNeuronId; neuronId >= 0; neuronId--)
{
var neuron = _layers[layerId].Neurons[neuronId];
for (int connectedNeuronId = 0; connectedNeuronId < neuron.Weights.Count; connectedNeuronId++)
{
neuron.Weights[connectedNeuronId] += learningRate * neuron.RoFactor * outputs[layerId][connectedNeuronId];
}
neuron.BiasWeight = neuron.BiasWeight + learningRate * neuron.RoFactor * neuron.Bias;
}
}
}
private void PropagateHiddenLayers(List<Vector<double>> outputs)
{
const int layersOffset = 2; // One for last layer and one because its indexed from 0.
const int neuronsOffset = 1; // Indexed from 0.
var lastLayerId = _layers.Count - layersOffset;
for (int layerId = lastLayerId; layerId >= 0; layerId--)
{
var lastNeuronId = _layers[layerId].Neurons.Count - neuronsOffset;
for (int neuronId = lastNeuronId; neuronId >= 0; neuronId--)
{
const int nextLayerOffset = 1;
double partOfRoFactor = 0.0;
var derivative = CalculateDerivative(outputs[layerId + nextLayerOffset][neuronId]);
foreach (var connectedNeuron in _layers[layerId + nextLayerOffset].Neurons)
{
partOfRoFactor += connectedNeuron.Weights[neuronId] * connectedNeuron.RoFactor;
}
_layers[layerId].Neurons[neuronId].RoFactor = partOfRoFactor * derivative;
}
}
}
private void PropagateLastLayer(List<Vector<double>> outputs, int expectedClass)
{
foreach (var neuron in _layers.Last().Neurons)
{
var lastOutput = outputs.Last().Last();
var derivative = CalculateDerivative(lastOutput);
var roFactor = (expectedClass - lastOutput) * derivative;
neuron.RoFactor = roFactor;
}
}
private double CalculateDerivative(double output)
{
return _derivativeFunction(output);
}
private void CalculateOutputs(List<Vector<double>> allOutputs)
{
var prevOutputId = 0;
foreach (var layer in _layers)
{
var layerOutputs = new List<double>();
foreach (var neuron in layer.Neurons)
{
var inputData = allOutputs[prevOutputId];
layerOutputs.Add(neuron.CalculateOutput(inputData));
}
allOutputs.Add(Vector<double>.Build.DenseOfEnumerable(layerOutputs));
prevOutputId++;
}
}
public void AbortTeaching()
{
_abortTeaching = true;
}
public event Delegates.TeachingProgressChangedEventHandler TeachingProgressChanged;
protected virtual void OnTeachingProgressChanged(int progress)
{
TeachingProgressChanged?.Invoke(this, new ProgressEventArgs(progress));
}
public void ResetWeights()
{
foreach (var layer in _layers)
{
foreach (var neuron in layer.Neurons)
{
neuron.RoFactor = 0.0;
neuron.Weights = _randomizer.RandomizeWeights(neuron.Weights.Count);
neuron.BiasWeight = _randomizer.RandomizeBias();
}
}
}
public List<Tuple<double, double>> Test(string datafilePath, char separator, char decimalPoint)
{
var testingData = _dataReader.ReadPerceptronDataFromTextFile(datafilePath, separator, decimalPoint);
var resultOutputs = new List<Tuple<double, double>>();
foreach (var neuralVector in testingData)
{
var testingOutputs = new List<Vector<double>> { neuralVector.Data };
CalculateOutputs(testingOutputs);
var expectedOutput = neuralVector.Class;
var calculatedOutput = testingOutputs.Last().Last();
resultOutputs.Add(new Tuple<double, double>(expectedOutput, calculatedOutput));
}
return resultOutputs;
}
}
}