Skip to content

Commit 69eb133

Browse files
committed
Make training test expectations more robust (less precision)
Signed-off-by: Lukasz Stafiniak <lukstafi@gmail.com>
1 parent 8c221e5 commit 69eb133

File tree

8 files changed

+357
-514
lines changed

8 files changed

+357
-514
lines changed

test/training/bigram.expected

Lines changed: 124 additions & 124 deletions
Original file line numberDiff line numberDiff line change
@@ -2,130 +2,130 @@ Retrieving commandline, environment, or config file variable ocannl_log_level
22
Found 0, in the config file
33
bigrams: 228146
44
input_size: 229000
5-
Epoch 0, batch 0, loss=3.384207
6-
Epoch 0, batch 100, loss=2.709038
7-
Epoch 0, batch 200, loss=2.945771
8-
Epoch 0, epoch loss=680.202888
9-
Epoch 1, batch 0, loss=2.788706
10-
Epoch 1, batch 100, loss=2.425731
11-
Epoch 1, batch 200, loss=2.814660
12-
Epoch 1, epoch loss=616.822069
13-
Epoch 2, batch 0, loss=2.649548
14-
Epoch 2, batch 100, loss=2.358366
15-
Epoch 2, batch 200, loss=2.755350
16-
Epoch 2, epoch loss=597.546664
17-
Epoch 3, batch 0, loss=2.588842
18-
Epoch 3, batch 100, loss=2.329691
19-
Epoch 3, batch 200, loss=2.721706
20-
Epoch 3, epoch loss=588.281096
21-
Epoch 4, batch 0, loss=2.554565
22-
Epoch 4, batch 100, loss=2.313664
23-
Epoch 4, batch 200, loss=2.700217
24-
Epoch 4, epoch loss=582.759826
25-
Epoch 5, batch 0, loss=2.532644
26-
Epoch 5, batch 100, loss=2.303379
27-
Epoch 5, batch 200, loss=2.685341
28-
Epoch 5, epoch loss=579.075711
29-
Epoch 6, batch 0, loss=2.517525
30-
Epoch 6, batch 100, loss=2.296246
31-
Epoch 6, batch 200, loss=2.674406
32-
Epoch 6, epoch loss=576.441672
33-
Epoch 7, batch 0, loss=2.506522
34-
Epoch 7, batch 100, loss=2.291044
35-
Epoch 7, batch 200, loss=2.666012
36-
Epoch 7, epoch loss=574.466705
37-
Epoch 8, batch 0, loss=2.498178
38-
Epoch 8, batch 100, loss=2.287141
39-
Epoch 8, batch 200, loss=2.659344
40-
Epoch 8, epoch loss=572.932843
41-
Epoch 9, batch 0, loss=2.491635
42-
Epoch 9, batch 100, loss=2.284144
43-
Epoch 9, batch 200, loss=2.653916
44-
Epoch 9, epoch loss=571.708377
45-
Epoch 10, batch 0, loss=2.486362
46-
Epoch 10, batch 100, loss=2.281802
47-
Epoch 10, batch 200, loss=2.649396
48-
Epoch 10, epoch loss=570.709282
49-
Epoch 11, batch 0, loss=2.482021
50-
Epoch 11, batch 100, loss=2.279949
51-
Epoch 11, batch 200, loss=2.645595
52-
Epoch 11, epoch loss=569.879609
53-
Epoch 12, batch 0, loss=2.478378
54-
Epoch 12, batch 100, loss=2.278475
55-
Epoch 12, batch 200, loss=2.642342
56-
Epoch 12, epoch loss=569.180244
57-
Epoch 13, batch 0, loss=2.475274
58-
Epoch 13, batch 100, loss=2.277280
59-
Epoch 13, batch 200, loss=2.639534
60-
Epoch 13, epoch loss=568.582958
61-
Epoch 14, batch 0, loss=2.472606
62-
Epoch 14, batch 100, loss=2.276310
63-
Epoch 14, batch 200, loss=2.637096
64-
Epoch 14, epoch loss=568.067485
65-
Epoch 15, batch 0, loss=2.470286
66-
Epoch 15, batch 100, loss=2.275532
67-
Epoch 15, batch 200, loss=2.634959
68-
Epoch 15, epoch loss=567.618418
69-
Epoch 16, batch 0, loss=2.468248
70-
Epoch 16, batch 100, loss=2.274891
71-
Epoch 16, batch 200, loss=2.633074
72-
Epoch 16, epoch loss=567.223836
73-
Epoch 17, batch 0, loss=2.466446
74-
Epoch 17, batch 100, loss=2.274361
75-
Epoch 17, batch 200, loss=2.631402
76-
Epoch 17, epoch loss=566.874570
77-
Epoch 18, batch 0, loss=2.464850
78-
Epoch 18, batch 100, loss=2.273928
79-
Epoch 18, batch 200, loss=2.629910
80-
Epoch 18, epoch loss=566.563456
81-
Epoch 19, batch 0, loss=2.463427
82-
Epoch 19, batch 100, loss=2.273579
83-
Epoch 19, batch 200, loss=2.628594
84-
Epoch 19, epoch loss=566.284741
85-
Epoch 20, batch 0, loss=2.462142
86-
Epoch 20, batch 100, loss=2.273286
87-
Epoch 20, batch 200, loss=2.627399
88-
Epoch 20, epoch loss=566.033497
89-
Epoch 21, batch 0, loss=2.460989
90-
Epoch 21, batch 100, loss=2.273043
91-
Epoch 21, batch 200, loss=2.626322
92-
Epoch 21, epoch loss=565.806087
93-
Epoch 22, batch 0, loss=2.459947
94-
Epoch 22, batch 100, loss=2.272847
95-
Epoch 22, batch 200, loss=2.625357
96-
Epoch 22, epoch loss=565.599298
97-
Epoch 23, batch 0, loss=2.459003
98-
Epoch 23, batch 100, loss=2.272677
99-
Epoch 23, batch 200, loss=2.624472
100-
Epoch 23, epoch loss=565.410496
101-
Epoch 24, batch 0, loss=2.458137
102-
Epoch 24, batch 100, loss=2.272539
103-
Epoch 24, batch 200, loss=2.623674
104-
Epoch 24, epoch loss=565.237560
105-
Epoch 25, batch 0, loss=2.457346
106-
Epoch 25, batch 100, loss=2.272424
107-
Epoch 25, batch 200, loss=2.622964
108-
Epoch 25, epoch loss=565.078496
109-
Epoch 26, batch 0, loss=2.456622
110-
Epoch 26, batch 100, loss=2.272328
111-
Epoch 26, batch 200, loss=2.622294
112-
Epoch 26, epoch loss=564.931741
113-
Epoch 27, batch 0, loss=2.455961
114-
Epoch 27, batch 100, loss=2.272249
115-
Epoch 27, batch 200, loss=2.621692
116-
Epoch 27, epoch loss=564.795990
117-
Epoch 28, batch 0, loss=2.455342
118-
Epoch 28, batch 100, loss=2.272188
119-
Epoch 28, batch 200, loss=2.621143
120-
Epoch 28, epoch loss=564.670042
121-
Epoch 29, batch 0, loss=2.454777
122-
Epoch 29, batch 100, loss=2.272128
123-
Epoch 29, batch 200, loss=2.620651
124-
Epoch 29, epoch loss=564.552950
125-
Epoch 30, batch 0, loss=2.454257
126-
Epoch 30, batch 100, loss=2.272082
127-
Epoch 30, batch 200, loss=2.620183
128-
Epoch 30, epoch loss=564.443695
5+
Epoch 0, batch 0, loss=3.38421
6+
Epoch 0, batch 100, loss=2.70904
7+
Epoch 0, batch 200, loss=2.94577
8+
Epoch 0, epoch loss=680.203
9+
Epoch 1, batch 0, loss=2.78871
10+
Epoch 1, batch 100, loss=2.42573
11+
Epoch 1, batch 200, loss=2.81466
12+
Epoch 1, epoch loss=616.822
13+
Epoch 2, batch 0, loss=2.64955
14+
Epoch 2, batch 100, loss=2.35837
15+
Epoch 2, batch 200, loss=2.75535
16+
Epoch 2, epoch loss=597.547
17+
Epoch 3, batch 0, loss=2.58884
18+
Epoch 3, batch 100, loss=2.32969
19+
Epoch 3, batch 200, loss=2.72171
20+
Epoch 3, epoch loss=588.281
21+
Epoch 4, batch 0, loss=2.55456
22+
Epoch 4, batch 100, loss=2.31366
23+
Epoch 4, batch 200, loss=2.70022
24+
Epoch 4, epoch loss=582.76
25+
Epoch 5, batch 0, loss=2.53264
26+
Epoch 5, batch 100, loss=2.30338
27+
Epoch 5, batch 200, loss=2.68534
28+
Epoch 5, epoch loss=579.076
29+
Epoch 6, batch 0, loss=2.51753
30+
Epoch 6, batch 100, loss=2.29625
31+
Epoch 6, batch 200, loss=2.67441
32+
Epoch 6, epoch loss=576.442
33+
Epoch 7, batch 0, loss=2.50652
34+
Epoch 7, batch 100, loss=2.29104
35+
Epoch 7, batch 200, loss=2.66601
36+
Epoch 7, epoch loss=574.467
37+
Epoch 8, batch 0, loss=2.49818
38+
Epoch 8, batch 100, loss=2.28714
39+
Epoch 8, batch 200, loss=2.65934
40+
Epoch 8, epoch loss=572.933
41+
Epoch 9, batch 0, loss=2.49164
42+
Epoch 9, batch 100, loss=2.28414
43+
Epoch 9, batch 200, loss=2.65392
44+
Epoch 9, epoch loss=571.708
45+
Epoch 10, batch 0, loss=2.48636
46+
Epoch 10, batch 100, loss=2.2818
47+
Epoch 10, batch 200, loss=2.6494
48+
Epoch 10, epoch loss=570.709
49+
Epoch 11, batch 0, loss=2.48202
50+
Epoch 11, batch 100, loss=2.27995
51+
Epoch 11, batch 200, loss=2.6456
52+
Epoch 11, epoch loss=569.88
53+
Epoch 12, batch 0, loss=2.47838
54+
Epoch 12, batch 100, loss=2.27847
55+
Epoch 12, batch 200, loss=2.64234
56+
Epoch 12, epoch loss=569.18
57+
Epoch 13, batch 0, loss=2.47527
58+
Epoch 13, batch 100, loss=2.27728
59+
Epoch 13, batch 200, loss=2.63953
60+
Epoch 13, epoch loss=568.583
61+
Epoch 14, batch 0, loss=2.47261
62+
Epoch 14, batch 100, loss=2.27631
63+
Epoch 14, batch 200, loss=2.6371
64+
Epoch 14, epoch loss=568.067
65+
Epoch 15, batch 0, loss=2.47029
66+
Epoch 15, batch 100, loss=2.27553
67+
Epoch 15, batch 200, loss=2.63496
68+
Epoch 15, epoch loss=567.618
69+
Epoch 16, batch 0, loss=2.46825
70+
Epoch 16, batch 100, loss=2.27489
71+
Epoch 16, batch 200, loss=2.63307
72+
Epoch 16, epoch loss=567.224
73+
Epoch 17, batch 0, loss=2.46645
74+
Epoch 17, batch 100, loss=2.27436
75+
Epoch 17, batch 200, loss=2.6314
76+
Epoch 17, epoch loss=566.875
77+
Epoch 18, batch 0, loss=2.46485
78+
Epoch 18, batch 100, loss=2.27393
79+
Epoch 18, batch 200, loss=2.62991
80+
Epoch 18, epoch loss=566.563
81+
Epoch 19, batch 0, loss=2.46343
82+
Epoch 19, batch 100, loss=2.27358
83+
Epoch 19, batch 200, loss=2.62859
84+
Epoch 19, epoch loss=566.285
85+
Epoch 20, batch 0, loss=2.46214
86+
Epoch 20, batch 100, loss=2.27329
87+
Epoch 20, batch 200, loss=2.6274
88+
Epoch 20, epoch loss=566.033
89+
Epoch 21, batch 0, loss=2.46099
90+
Epoch 21, batch 100, loss=2.27304
91+
Epoch 21, batch 200, loss=2.62632
92+
Epoch 21, epoch loss=565.806
93+
Epoch 22, batch 0, loss=2.45995
94+
Epoch 22, batch 100, loss=2.27285
95+
Epoch 22, batch 200, loss=2.62536
96+
Epoch 22, epoch loss=565.599
97+
Epoch 23, batch 0, loss=2.459
98+
Epoch 23, batch 100, loss=2.27268
99+
Epoch 23, batch 200, loss=2.62447
100+
Epoch 23, epoch loss=565.41
101+
Epoch 24, batch 0, loss=2.45814
102+
Epoch 24, batch 100, loss=2.27254
103+
Epoch 24, batch 200, loss=2.62367
104+
Epoch 24, epoch loss=565.238
105+
Epoch 25, batch 0, loss=2.45735
106+
Epoch 25, batch 100, loss=2.27242
107+
Epoch 25, batch 200, loss=2.62296
108+
Epoch 25, epoch loss=565.078
109+
Epoch 26, batch 0, loss=2.45662
110+
Epoch 26, batch 100, loss=2.27233
111+
Epoch 26, batch 200, loss=2.62229
112+
Epoch 26, epoch loss=564.932
113+
Epoch 27, batch 0, loss=2.45596
114+
Epoch 27, batch 100, loss=2.27225
115+
Epoch 27, batch 200, loss=2.62169
116+
Epoch 27, epoch loss=564.796
117+
Epoch 28, batch 0, loss=2.45534
118+
Epoch 28, batch 100, loss=2.27219
119+
Epoch 28, batch 200, loss=2.62114
120+
Epoch 28, epoch loss=564.67
121+
Epoch 29, batch 0, loss=2.45478
122+
Epoch 29, batch 100, loss=2.27213
123+
Epoch 29, batch 200, loss=2.62065
124+
Epoch 29, epoch loss=564.553
125+
Epoch 30, batch 0, loss=2.45426
126+
Epoch 30, batch 100, loss=2.27208
127+
Epoch 30, batch 200, loss=2.62018
128+
Epoch 30, epoch loss=564.444
129129
vax
130130
ah
131131
prerofaers

test/training/bigram.ml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -79,9 +79,9 @@ let () =
7979
Train.run sgd_step;
8080
let loss = batch_loss.@[0] in
8181
epoch_loss := !epoch_loss +. loss;
82-
if batch % 100 = 0 then Stdio.printf "Epoch %d, batch %d, loss=%f\n%!" epoch batch loss;
82+
if batch % 100 = 0 then Stdio.printf "Epoch %d, batch %d, loss=%.6g\n%!" epoch batch loss;
8383
done;
84-
Stdio.printf "Epoch %d, epoch loss=%f\n%!" epoch !epoch_loss
84+
Stdio.printf "Epoch %d, epoch loss=%.6g\n%!" epoch !epoch_loss
8585
done;
8686
(* Train.printf_tree batch_loss; *)
8787

0 commit comments

Comments
 (0)