/
NLayer.cpp
139 lines (120 loc) · 3.51 KB
/
NLayer.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
//****************************************************************************
//**
//** NLayer.cpp
//**
//** Copyright (c) 2010 Matthew Robbins
//**
//** Author: Matthew Robbins
//** Created: 04/2010
//**
//****************************************************************************
#include <iostream>
#include "NLayer.h"
#include "MemoryLeak.h"
namespace CarDemo
{
NLayer::NLayer()
{
}
NLayer::~NLayer()
{
}
void NLayer::Evaluate(std::vector<float> input, std::vector<float> &output)
{
int inputIndex = 0;
// Cycle over all the neurons and sum their weights against the inputs.
for (unsigned int i = 0; i < totalNeurons; i++)
{
float activation = 0.0f;
// Sum the weights to the activation value.
// We do the sizeof the weights - 1 so that we can add in the bias to the
// activation afterwards.
for (unsigned int j = 0; j < neurons[i].numInputs - 1; j++)
{
activation += input[inputIndex] * neurons[i].weights[j];
inputIndex++;
}
// Add the bias.
// The bias will act as a threshold value to
activation += neurons[i].weights[neurons[i].numInputs] * BIAS;
output.push_back(Sigmoid(activation, 1.0f));
inputIndex = 0;
}
}
void NLayer::SaveLayer(std::ofstream &fileOut, char* layerType)
{
fileOut << "<NLayer>" << std::endl;
fileOut << "Type=" << layerType << std::endl;
fileOut << "Inputs=" << this->totalInputs << std::endl;
fileOut << "Neurons=" << this->neurons.size() << std::endl;
fileOut << "-Build-" << std::endl;
for (unsigned int i = 0; i < this->neurons.size(); i++)
{
fileOut << "<Neuron>" << std::endl;
fileOut << "Weights=" << this->neurons[i].weights.size() << std::endl;
for (unsigned int j = 0; j < neurons[i].weights.size(); j++)
{
fileOut << "W=" << neurons[i].weights[j] << std::endl;
}
fileOut << "</Neuron>" << std::endl;
}
fileOut << "</NLayer>" << std::endl;
}
void NLayer::LoadLayer(std::vector<Neuron> in)
{
totalNeurons = in.size();
neurons = in;
}
void NLayer::PopulateLayer(int numOfNeurons, int numOfInputs)
{
totalInputs = numOfInputs;
totalNeurons = numOfNeurons;
this->neurons.resize(numOfNeurons);
for (unsigned int i = 0; i < neurons.size(); i++)
{
neurons[i].Populate(numOfInputs);
}
}
void NLayer::SetWeights(std::vector<float> weights, int numOfNeurons, int numOfInputs)
{
int index = 0;
totalInputs = numOfInputs;
totalNeurons = numOfNeurons;
this->neurons.resize(numOfNeurons);
// Copy the weights into the neurons.
for (unsigned int i = 0; i < numOfNeurons; i++)
{
neurons[i].weights.resize(numOfInputs);
for (unsigned int j = 0; j < numOfInputs; j++)
{
neurons[i].weights[j] = weights[index];
index++;
}
}
}
void NLayer::GetWeights(std::vector<float> &out)
{
// Calculate the size of the output vector by calculating the amount of weights in each neurons.
// Avoids having to use a nasty push-back...
size_t size = 0;
for (unsigned int i = 0; i < this->totalNeurons; i++)
{
size += neurons[i].weights.size();
}
out.resize(size);
// Iterate over each neuron and each connection weight and retrieve the weights
for (unsigned int i = 0; i < this->totalNeurons; i++)
{
for (unsigned int j = 0; j < neurons[i].weights.size(); j++)
{
out[totalNeurons * i + j] = neurons[i].weights[j];
}
}
}
void NLayer::SetNeurons(std::vector<Neuron> neurons, int numOfNeurons, int numOfInputs)
{
totalInputs = numOfInputs;
totalNeurons = numOfNeurons;
this->neurons = neurons;
}
}; // End namespace CarDemo.