-
Notifications
You must be signed in to change notification settings - Fork 0
/
optimization_utilities.py
109 lines (78 loc) · 3.77 KB
/
optimization_utilities.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
#!python3
# Copyright (C) 2020 Victor O. Costa
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import numpy as np
import data_handling
import laguerre_volterra_network_structure
# Normalized mean squared error
def NMSE(y, y_pred, alpha):
if len(y) != len(y_pred):
print("Actual and predicted y have different lengths")
exit(-1)
# Laguerre alpha paremeter determines the system memory (find reference for formula)
M = laguerre_volterra_network_structure.laguerre_filter_memory(alpha)
if len(y) <= M:
print("Data length is less than required by the alpha parameter")
exit(-1)
y = np.array(y)
y_pred = np.array(y_pred)
error = y[M:] - y_pred[M:]
NMSE = sum( error**2 ) / sum( y[M:]**2 )
return NMSE
# Normalized mean squared error
def NMSE_explicit_memory(y, y_pred, M):
if len(y) != len(y_pred):
print("Actual and predicted y have different lengths")
exit(-1)
if len(y) <= M:
print("Data length is less than required by the memory parameter")
exit(-1)
y = np.array(y)
y_pred = np.array(y_pred)
error = y[M:] - y_pred[M:]
NMSE = sum( error**2 ) / sum( y[M:]**2 )
return NMSE
# Break flat list-like solution into [alpha, W, C, offset] for a given LVN structure
def decode_solution(candidate_solution, L, H, Q):
# Identify solution members
alpha = candidate_solution[0]
flat_W = candidate_solution[1 : (H * L + 1)]
flat_C = candidate_solution[(H * L + 1) : (H * L + 1) + H * Q]
offset = candidate_solution[(H * L + 1) + H * Q]
# unflatten W and C
W = []
C = []
for hidden_unit in range(H):
W.append( flat_W[hidden_unit * L : (hidden_unit + 1) * L] )
C.append( flat_C[hidden_unit * Q : (hidden_unit + 1) * Q] )
return alpha, W, C, offset
# Compute cost of candidate solution, which is encoded as a flat array: alpha, W(0,0) ... W(L-1,H-1), C(0,0) ... C(Q-1,H-1), offset
def define_cost(L, H, Q, Fs, train_filename):
# Cost computation parameterized by the nesting function (define_cost)
# modified_variable indicates which parameters were modified in the solution. -1 if all of them were.
def compute_cost(candidate_solution, modified_variable):
# IO
train_input, train_output = data_handling.read_io(train_filename)
# Get parameters from candidate solution
alpha, W, C, offset = decode_solution(candidate_solution, L, H, Q)
# If the weights were modified, set flag so LVN normalizes weights and scales coefficients before output computation
if modified_variable == -1 or (modified_variable >= 1 and modified_variable <= L * H):
weights_modified = True
else:
weights_modified = False
# Generate output and compute cost
solution_system = laguerre_volterra_network_structure.LVN()
solution_system.define_structure(L, H, Q, 1/Fs)
solution_output = solution_system.compute_output(train_input, alpha, W, C, offset, weights_modified)
cost = NMSE(train_output, solution_output, alpha)
return cost
return compute_cost