-
Notifications
You must be signed in to change notification settings - Fork 3
/
board_test.ino
87 lines (77 loc) · 6.38 KB
/
board_test.ino
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
#include <MicroFlow.h>
// Uncomment/comment if needed
#define TEST_XOR
#define TEST_SIN
#if defined(TEST_XOR)
void XORtest(){
int topology[] = {2, 2, 2, 1};
double weights[] = {6.5388827, 2.3116155, 6.5393276, 2.311627, -2.8204367, -2.5849876, 3.4741454, -1.7074409, -2.5904362, -0.8814233};
double biases[] = {-1.4674287, -3.13011, 0.36903697, -0.27291444, 1.5541532};
double inputs[] = {0, 0};
double output[1] = {};
int layers = 4;
MicroMLP mlp(layers, topology, weights, biases, SIGMOID);
mlp.feedforward(inputs, output);
Serial.print("Inputs: "); Serial.print(inputs[0]); Serial.print(", "); Serial.println(inputs[1]);
Serial.print("Neural network output: "); Serial.println(output[0]);
Serial.println();
inputs[0] = 1;
mlp.feedforward(inputs, output);
Serial.print("Inputs: "); Serial.print(inputs[0]); Serial.print(", "); Serial.println(inputs[1]);
Serial.print("Neural network output: "); Serial.println(output[0]);
Serial.println();
inputs[1] = 1;
mlp.feedforward(inputs, output);
Serial.print("Inputs: "); Serial.print(inputs[0]); Serial.print(", "); Serial.println(inputs[1]);
Serial.print("Neural network output: "); Serial.println(output[0]);
Serial.println();
inputs[0] = 0;
mlp.feedforward(inputs, output);
Serial.print("Inputs: "); Serial.print(inputs[0]); Serial.print(", "); Serial.println(inputs[1]);
Serial.print("Neural network output: "); Serial.println(output[0]);
Serial.println();
}
#endif
#if defined(TEST_SIN)
void SINtest(){
//Neural network architecture
int topology[] = {1, 16, 16, 1};
//Total number of layers in the network
int layers = 4;
//The weights obtained by training
double weights[] = {-0.18020691, -0.7092932, 0.53228974, -0.40353614, -0.114111565, -0.2179853, -0.34155884, 0.34106994, -0.76267976, -0.7380614, 0.21569583, -0.2139194, 0.5167245, -0.17401153, -0.20049067, 0.31810948, -0.17641671, -0.11120919, -0.16200458, -0.3888606, -0.6602876, -0.061384033, 0.09223559, 0.37525204, 0.54230714, -0.5420377, 0.40341657, -0.0478172, 0.28266862, -0.16159488, -0.59365463, 0.35424334, 0.3267206, -0.26376727, 0.47587544, -0.33641315, 0.16030453, -0.1666632, 0.40059495, -0.04251305, 0.28301397, 0.5904949, -0.053780366, 0.45721573, 0.25754863, -0.259782, 0.5664863, 0.17266153, 0.25059226, -0.22529407, 0.42905506, -0.33436966, -0.31328633, 0.24930298, 0.44240353, -0.09201871, -0.43963534, -0.5105577, 0.27138227, -0.14273775, -0.34870732, -0.25906843, -0.06970604, 0.18929191, -0.2530467, -0.104917064, 0.0068679363, 0.21536273, -0.30234316, -0.28891304, -0.29332885, 0.32503358, 0.3919649, -0.14170253, 0.11798309, 0.17980376, -0.27192476, 0.4303235, -0.02947342, 0.15107746, -0.2598052, -0.3284807, 0.27634418, -0.57950014, -0.08253329, -0.1491105, 0.010676943, 0.3210985, 0.086009555, 0.023121554, 0.17110659, 0.2914402, -0.5632771, -0.48083237, -0.1702254, -0.6491043, -0.1612504, -0.35910985, -0.43094563, 0.23703146, -0.41070735, 0.05850029, 0.33020738, 0.15090854, 0.25278658, -0.40165377, 0.18157364, 0.18397272, -0.067259625, -0.030623805, -0.38906488, 0.12120787, -0.35681978, -0.015840149, -0.39917877, -0.1926287, 0.15764198, -0.2787829, -0.106904484, -0.1643053, -0.22026277, -0.23380375, -0.26905668, 0.4879554, 0.3085039, 0.056550965, -0.47284326, 0.39882433, -0.3420191, -0.006475827, 0.105744, -0.3470057, 0.30272853, 0.48758823, 0.22344781, -0.043175608, -0.5166243, 0.44461092, -0.30219212, -0.18622203, -0.24710214, -0.27750847, 0.4833279, -0.12194787, -0.27154568, -0.30576727, -0.06120761, 0.11141779, -0.0645274, -0.34420022, 0.096229844, 0.4865377, -0.1540132, 0.23028192, -0.26451805, -0.09253174, -0.18938746, -0.4590808, 0.31901625, -0.24549583, -0.085119374, 0.012193947, 0.49030626, 0.070009544, 0.023369858, 0.116173156, 0.40282694, -0.1602629, 0.044649825, 0.0009477779, 0.46261105, 0.3733308, -0.09214332, -0.19407125, 0.5048038, -0.35704875, 0.261917, 0.2097674, 0.26111174, -0.333162, -0.23447216, -0.2689979, -0.3026904, 0.37091032, -0.47135738, 0.2387071, 0.18147138, -0.42423972, 0.28588268, -0.24024953, -0.01607273, 0.1431519, 0.34595066, -0.3051451, 0.32938224, -0.5095388, -0.02898916, 0.14163107, -0.21729809, -0.10920483, 0.39760444, -0.27399564, -0.10921634, 0.32889658, -0.1374087, 0.40601486, -0.44281253, 0.10765017, -0.49679792, -0.118123636, 0.08053128, -0.39885703, 0.36950678, 0.60096014, 0.12667942, -0.20069866, 0.11185153, 0.061183166, 0.53899825, -0.5582451, -0.10679583, -0.1396875, 0.037886318, -0.26643562, -0.16470054, -0.48525092, 0.18584907, -0.8386027, -0.7082003, -0.14894205, 0.2340593, -0.23536055, 0.589809, -1.0950022, 0.111260764, 0.61106294, -0.6224934, -0.28117228, -0.41649967, -0.8452682, -0.5354197, -0.40446323, 0.58809686, -0.93311673, -0.2974728, 0.09037152, 0.65899205, 0.093838304, 0.0036487002, -1.1248379, 0.47060364, 0.9855872, 0.041951336, -0.10897197, -0.9551882, -0.66548353, -0.49177283, 0.20674603, -0.13553445, -0.19582628, 0.648527, 0.39960903, 0.39197195, -0.20611076, -0.28021467, 0.31050676, -0.1316053, -0.52917576, 0.07111866, 0.23193023, 0.6888892, -0.074324824, 0.5296529, -0.29145825, -0.8676659, 0.86751336, -0.19699064, 0.27004054, -1.145208, -0.43273923, -0.09138022, -0.7838448, -0.4405369, -0.30264875, 0.79583466, 0.7128926, -0.820885, 0.80208766};
//The biases obstained by training
double biases[] = {0.38599014, -0.04392276, -0.2874584, 0.3675327, 0.047080062, 0.20073166, 0.31219038, -0.2136711, 0.05555728, -0.05662671, -0.19972037, 0.40990645, -0.79461235, 0.7073865, 0.91056496, -0.48865843, -0.00048907846, 0.0021766948, -0.07139468, 0.039431103, -0.046511196, -0.09260542, -0.028098255, -0.0614371, 0.0024643205, -0.1337608, -0.060558215, 0.13885157, 0.049416065, 0.047094665, -0.12406271, -0.1402871, 0.049416292};
//Inputs and outputs
double inputs[] = {0};
double output[1] = {};
MicroMLP mlp(layers, topology, weights, biases, TANH);
for (int i = 0; i < 180; i++) {
inputs[0] = i * (3.14/180);
//Feedforward pass through the network
mlp.feedforward(inputs, output);
Serial.print("Inputs: "); Serial.println(inputs[0]);
Serial.print("Neural network output: "); Serial.println(output[0]);
Serial.print("Actual: "); Serial.println(sin(inputs[0]));
Serial.println();
}
}
#endif
void setup() {
Serial.begin(9600);
// Wait for Serial to begin
while (!Serial) {
;
}
#if defined(TEST_XOR)
Serial.println("XOR test\n");
XORtest();
#endif
#if defined(TEST_SIN)
Serial.println("SIN test\n");
SINtest();
#endif
}
void loop() {
;
}