-
-
Notifications
You must be signed in to change notification settings - Fork 1k
/
Perceptron.cpp
126 lines (107 loc) · 2.91 KB
/
Perceptron.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
/*
* This software is distributed under BSD 3-clause license (see LICENSE file).
*
* Authors: Soeren Sonnenburg, Sergey Lisitsyn, Giovanni De Toni,
* Michele Mazzoni, Heiko Strathmann, Fernando Iglesias
*/
#include <shogun/base/range.h>
#include <shogun/classifier/Perceptron.h>
#include <shogun/features/iterators/DotIterator.h>
#include <shogun/labels/BinaryLabels.h>
#include <shogun/labels/Labels.h>
#include <shogun/lib/Signal.h>
#include <shogun/mathematics/Math.h>
#include <shogun/mathematics/linalg/LinalgNamespace.h>
using namespace shogun;
CPerceptron::CPerceptron()
: CLinearMachine()
{
init();
}
CPerceptron::CPerceptron(CDotFeatures* traindat, CLabels* trainlab)
: CLinearMachine()
{
init();
set_features(traindat);
set_labels(trainlab);
}
void CPerceptron::init()
{
max_iter = 1000;
learn_rate = 0.1;
m_initialize_hyperplane = true;
SG_ADD(
&m_initialize_hyperplane, "initialize_hyperplane",
"Whether to initialize hyperplane.", MS_AVAILABLE);
SG_ADD(&max_iter, "max_iter", "Maximum number of iterations.", MS_AVAILABLE);
SG_ADD(&learn_rate, "learn_rate", "Learning rate.", MS_AVAILABLE);
}
CPerceptron::~CPerceptron()
{
}
bool CPerceptron::train_machine(CFeatures* data)
{
ASSERT(m_labels)
if (data)
{
if (!data->has_property(FP_DOT))
SG_ERROR("Specified features are not of type CDotFeatures\n")
set_features((CDotFeatures*) data);
}
ASSERT(features)
bool converged=false;
int32_t iter=0;
SGVector<int32_t> train_labels = binary_labels(m_labels)->get_int_labels();
int32_t num_feat=features->get_dim_feature_space();
int32_t num_vec=features->get_num_vectors();
ASSERT(num_vec==train_labels.vlen)
SGVector<float64_t> output(num_vec);
SGVector<float64_t> w;
if (m_initialize_hyperplane)
{
w = SGVector<float64_t>(num_feat);
set_w(w);
//start with uniform w, bias=0
w.set_const(1.0 / num_feat);
bias=0;
}
else
{
w = get_w();
}
//loop till we either get everything classified right or reach max_iter
while (!converged && iter < max_iter)
{
COMPUTATION_CONTROLLERS
converged=true;
auto iter_train_labels = train_labels.begin();
auto iter_output = output.begin();
for (const auto& v : DotIterator(features))
{
const auto true_label = *(iter_train_labels++);
auto& predicted_label = *(iter_output++);
predicted_label = v.dot(w) + bias;
if (CMath::sign<float64_t>(predicted_label) != true_label)
{
converged = false;
const auto gradient = learn_rate * true_label;
bias += gradient;
v.add(gradient, w);
}
}
iter++;
}
if (converged)
SG_INFO("Perceptron algorithm converged after %d iterations.\n", iter)
else
SG_WARNING("Perceptron algorithm did not converge after %d iterations.\n", max_iter)
return converged;
}
void CPerceptron::set_initialize_hyperplane(bool initialize_hyperplane)
{
m_initialize_hyperplane = initialize_hyperplane;
}
bool CPerceptron::get_initialize_hyperplane()
{
return m_initialize_hyperplane;
}