Skip to content

Commit 1606b94

Browse files
committed
Further reduce the bigram examples
1 parent 4ad35fa commit 1606b94

File tree

4 files changed

+9
-94
lines changed

4 files changed

+9
-94
lines changed

test/training/bigram.expected

Lines changed: 7 additions & 87 deletions
Original file line numberDiff line numberDiff line change
@@ -46,94 +46,13 @@ Epoch 10, batch 0, loss=2.4864
4646
Epoch 10, batch 100, loss=2.2818
4747
Epoch 10, batch 200, loss=2.6494
4848
Epoch 10, epoch loss=570.71
49-
Epoch 11, batch 0, loss=2.482
50-
Epoch 11, batch 100, loss=2.2799
51-
Epoch 11, batch 200, loss=2.6456
52-
Epoch 11, epoch loss=569.88
53-
Epoch 12, batch 0, loss=2.4784
54-
Epoch 12, batch 100, loss=2.2785
55-
Epoch 12, batch 200, loss=2.6423
56-
Epoch 12, epoch loss=569.18
57-
Epoch 13, batch 0, loss=2.4753
58-
Epoch 13, batch 100, loss=2.2773
59-
Epoch 13, batch 200, loss=2.6395
60-
Epoch 13, epoch loss=568.58
61-
Epoch 14, batch 0, loss=2.4726
62-
Epoch 14, batch 100, loss=2.2763
63-
Epoch 14, batch 200, loss=2.6371
64-
Epoch 14, epoch loss=568.07
65-
Epoch 15, batch 0, loss=2.4703
66-
Epoch 15, batch 100, loss=2.2755
67-
Epoch 15, batch 200, loss=2.635
68-
Epoch 15, epoch loss=567.62
69-
Epoch 16, batch 0, loss=2.4682
70-
Epoch 16, batch 100, loss=2.2749
71-
Epoch 16, batch 200, loss=2.6331
72-
Epoch 16, epoch loss=567.22
73-
Epoch 17, batch 0, loss=2.4664
74-
Epoch 17, batch 100, loss=2.2744
75-
Epoch 17, batch 200, loss=2.6314
76-
Epoch 17, epoch loss=566.87
77-
Epoch 18, batch 0, loss=2.4648
78-
Epoch 18, batch 100, loss=2.2739
79-
Epoch 18, batch 200, loss=2.6299
80-
Epoch 18, epoch loss=566.56
81-
Epoch 19, batch 0, loss=2.4634
82-
Epoch 19, batch 100, loss=2.2736
83-
Epoch 19, batch 200, loss=2.6286
84-
Epoch 19, epoch loss=566.28
85-
Epoch 20, batch 0, loss=2.4621
86-
Epoch 20, batch 100, loss=2.2733
87-
Epoch 20, batch 200, loss=2.6274
88-
Epoch 20, epoch loss=566.03
89-
Epoch 21, batch 0, loss=2.461
90-
Epoch 21, batch 100, loss=2.273
91-
Epoch 21, batch 200, loss=2.6263
92-
Epoch 21, epoch loss=565.81
93-
Epoch 22, batch 0, loss=2.4599
94-
Epoch 22, batch 100, loss=2.2728
95-
Epoch 22, batch 200, loss=2.6254
96-
Epoch 22, epoch loss=565.6
97-
Epoch 23, batch 0, loss=2.459
98-
Epoch 23, batch 100, loss=2.2727
99-
Epoch 23, batch 200, loss=2.6245
100-
Epoch 23, epoch loss=565.41
101-
Epoch 24, batch 0, loss=2.4581
102-
Epoch 24, batch 100, loss=2.2725
103-
Epoch 24, batch 200, loss=2.6237
104-
Epoch 24, epoch loss=565.24
105-
Epoch 25, batch 0, loss=2.4573
106-
Epoch 25, batch 100, loss=2.2724
107-
Epoch 25, batch 200, loss=2.623
108-
Epoch 25, epoch loss=565.08
109-
Epoch 26, batch 0, loss=2.4566
110-
Epoch 26, batch 100, loss=2.2723
111-
Epoch 26, batch 200, loss=2.6223
112-
Epoch 26, epoch loss=564.93
113-
Epoch 27, batch 0, loss=2.456
114-
Epoch 27, batch 100, loss=2.2722
115-
Epoch 27, batch 200, loss=2.6217
116-
Epoch 27, epoch loss=564.8
117-
Epoch 28, batch 0, loss=2.4553
118-
Epoch 28, batch 100, loss=2.2722
119-
Epoch 28, batch 200, loss=2.6211
120-
Epoch 28, epoch loss=564.67
121-
Epoch 29, batch 0, loss=2.4548
122-
Epoch 29, batch 100, loss=2.2721
123-
Epoch 29, batch 200, loss=2.6207
124-
Epoch 29, epoch loss=564.55
125-
Epoch 30, batch 0, loss=2.4543
126-
Epoch 30, batch 100, loss=2.2721
127-
Epoch 30, batch 200, loss=2.6202
128-
Epoch 30, epoch loss=564.44
12949
vax
13050
ah
131-
prerofaers
51+
pterofaers
13252
rmanad
133-
toniorelilievon
134-
tpdeee
53+
tonioreliliewwfctpdeee
13554
tole
136-
bieynel
55+
bkaynel
13756
n
13857
ahi
13958
oni
@@ -142,7 +61,8 @@ a
14261
krn
14362
c
14463
r
145-
labha
64+
labik
14665
a
147-
calopbrilaeiery
148-
jthpamahie
66+
calopconaiajary
67+
jthqamahie
68+
hler

test/training/bigram.ml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ let () =
7272

7373
let open Operation.At in
7474
let batch_ref = IDX.find_exn sgd_step.bindings batch_n in
75-
for epoch = 0 to 30 do
75+
for epoch = 0 to 10 do
7676
let epoch_loss = ref 0. in
7777
for batch = 0 to n_batches - 1 do
7878
batch_ref := batch;

test/training/bigram_mlp.expected

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,11 +7,6 @@ Epoch 1, epoch loss=565
77
Epoch 2, epoch loss=562
88
Epoch 3, epoch loss=561
99
Epoch 4, epoch loss=560
10-
Epoch 5, epoch loss=560
11-
Epoch 6, epoch loss=559
12-
Epoch 7, epoch loss=559
13-
Epoch 8, epoch loss=558
14-
Epoch 9, epoch loss=558
1510
nnou
1611
kyns
1712
hain

test/training/bigram_mlp.ml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ let () =
4444
let n_batches = input_size / batch_size in
4545
let batch_n, bindings = IDX.get_static_symbol ~static_range:n_batches IDX.empty in
4646
let step_n, bindings = IDX.get_static_symbol bindings in
47-
let epochs = 10 in
47+
let epochs = 5 in
4848
let steps = epochs * n_batches in
4949

5050
let%op input_gram = inputs @| batch_n in

0 commit comments

Comments
 (0)