-
Notifications
You must be signed in to change notification settings - Fork 0
/
training.py
39 lines (31 loc) · 1.61 KB
/
training.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
import numpy as np
class TrainingMethod(object):
HEBBIAN_SCALE = 2.0
HEBBIAN_OFFSET = HEBBIAN_SCALE / 2.0
NO_ERROR = 0.0
@staticmethod
def hebbian_learning(layer, modulation):
normalized_input = layer.network.activations[layer.current_layer]
normalized_input = np.reshape(normalized_input, (-1, 1))
normalized_output = layer.network.activations[layer.current_layer + 1]
normalized_output = normalized_output*TrainingMethod.HEBBIAN_SCALE - TrainingMethod.HEBBIAN_OFFSET
normalized_output = np.reshape(normalized_output, (-1, 1))
noise_mag = layer.network.weight_noise_mag
noise = np.random.uniform(-noise_mag, noise_mag, layer.shape)
lr = layer.learning_rate
cap = layer.network.weight_cap
weight_delta = modulation*lr*(normalized_output @ normalized_input.T) + noise
layer.weights = np.clip(layer.weights + weight_delta, -cap, cap)
@staticmethod
def hebbian_history_learning(layer, modulation):
if layer.history_buffer.isfull:
for sample in iter(layer.history_buffer):
normalized_input = np.reshape(sample[0], (-1, 1))
normalized_output = np.reshape(sample[1], (-1, 1))
normalized_output = normalized_output*TrainingMethod.HEBBIAN_SCALE - TrainingMethod.HEBBIAN_OFFSET
noise_mag = layer.network.weight_noise_mag
noise = np.random.uniform(-noise_mag, noise_mag, layer.shape)
lr = layer.learning_rate
cap = layer.network.weight_cap
weight_delta = modulation*lr*(normalized_output @ normalized_input.T) + noise
layer.weights = np.clip(layer.weights + weight_delta, -cap, cap)