-
Notifications
You must be signed in to change notification settings - Fork 182
/
perceptron.py
174 lines (146 loc) · 5.7 KB
/
perceptron.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
import numpy as np
from skmultiflow.core import BaseSKMObject, ClassifierMixin
from sklearn.linear_model import Perceptron
class PerceptronMask(BaseSKMObject, ClassifierMixin):
""" Mask for sklearn.linear_model.Perceptron.
scikit-multiflow requires a few interfaces, not present in scikit-learn,
This mask serves as a wrapper for the Perceptron classifier.
Examples
--------
>>> # Imports
>>> from skmultiflow.neural_networks import PerceptronMask
>>> from skmultiflow.data import SEAGenerator
>>>
>>> # Setup a data stream
>>> stream = SEAGenerator(random_state=1)
>>>
>>> # Setup the Perceptron Mask
>>> perceptron = PerceptronMask()
>>>
>>> n_samples = 0
>>> correct_cnt = 0
>>> while n_samples < 5000 and stream.has_more_samples():
>>> X, y = stream.next_sample()
>>> my_pred = perceptron.predict(X)
>>> if y[0] == my_pred[0]:
>>> correct_cnt += 1
>>> perceptron.partial_fit(X, y, classes=stream.target_values)
>>> n_samples += 1
>>>
>>> # Display the results
>>> print('Perceptron Mask usage example')
>>> print('{} samples analyzed'.format(n_samples))
>>> print("Perceptron's performance: {}".format(correct_cnt / n_samples))
"""
def __init__(self,
penalty=None,
alpha=0.0001,
fit_intercept=True,
max_iter=1000,
tol=0.001,
shuffle=True,
verbose=0,
eta0=1.0,
n_jobs=None,
random_state=0,
early_stopping=False,
validation_fraction=0.1,
n_iter_no_change=5,
class_weight=None,
warm_start=False):
self.penalty = penalty
self.alpha = alpha
self.fit_intercept = fit_intercept
self.max_iter = max_iter
self.tol = tol
self.shuffle = shuffle
self.verbose = verbose
self.eta0 = eta0
self.n_jobs = n_jobs
self.random_state = random_state
self.early_stopping = early_stopping
self.validation_fraction = validation_fraction
self.n_iter_no_change = n_iter_no_change
self.class_weight = class_weight
self.warm_start = warm_start
super().__init__()
self.classifier = Perceptron(penalty=self.penalty,
alpha=self.alpha,
fit_intercept=self.fit_intercept,
max_iter=self.max_iter,
tol=self.tol,
shuffle=self.shuffle,
verbose=self.verbose,
eta0=self.eta0,
n_jobs=self.n_jobs,
random_state=self.random_state,
early_stopping=self.early_stopping,
validation_fraction=self.validation_fraction,
n_iter_no_change=self.n_iter_no_change,
class_weight=self.class_weight,
warm_start=self.warm_start)
def fit(self, X, y, classes=None, sample_weight=None):
""" Calls the Perceptron fit function from sklearn.
Parameters
----------
X: numpy.ndarray of shape (n_samples, n_features)
The feature's matrix.
y: Array-like
The class labels for all samples in X.
classes: Not used.
sample_weight:
Samples weight. If not provided, uniform weights are assumed.
Returns
-------
PerceptronMask
self
"""
self.classifier.fit(X=X, y=y, sample_weight=sample_weight)
return self
def partial_fit(self, X, y, classes=None, sample_weight=None):
""" partial_fit
Calls the Perceptron partial_fit from sklearn.
Parameters
----------
X: numpy.ndarray of shape (n_samples, n_features)
The feature's matrix.
y: Array-like
The class labels for all samples in X.
classes: Not used.
sample_weight:
Samples weight. If not provided, uniform weights are assumed.
Returns
-------
PerceptronMask
self
"""
self.classifier.partial_fit(X=X, y=y, classes=classes, sample_weight=sample_weight)
return self
def predict(self, X):
""" predict
Uses the current model to predict samples in X.
Parameters
----------
X: numpy.ndarray of shape (n_samples, n_features)
The feature's matrix.
Returns
-------
numpy.ndarray
A numpy.ndarray containing the predicted labels for all instances in X.
"""
return np.asarray(self.classifier.predict(X))
def predict_proba(self, X):
""" Predicts the probability of each sample belonging to each one of the known classes.
Parameters
----------
X: Numpy.ndarray of shape (n_samples, n_features)
A matrix of the samples we want to predict.
Returns
-------
numpy.ndarray
An array of shape (n_samples, n_features), in which each outer entry is
associated with the X entry of the same index. And where the list in
index [i] contains len(self.target_values) elements, each of which represents
the probability that the i-th sample of X belongs to a certain label.
"""
return self.classifier._predict_proba_lr(X)