Skip to content

Commit 72d12ba

Browse files
committed
Prepare for pre-release 0.4.1~beta2
1 parent 3e0596a commit 72d12ba

File tree

8 files changed

+104
-18
lines changed

8 files changed

+104
-18
lines changed

arrayjit.opam

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# This file is generated by dune, edit dune-project instead
22
opam-version: "2.0"
3-
version: "0.4.1~beta"
3+
version: "0.4.1~beta2"
44
synopsis:
55
"An array language compiler with multiple backends (CPU, CUDA), staged compilation"
66
description:

bin/moons_benchmark.ml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,8 +46,8 @@ let classify_moons ~seed ~on_device ~inlining_cutoff ~num_devices ~batch_size ~b
4646
(* let data_len = 3 * 4 in *)
4747
let flat_len = data_len / 2 in
4848
(* Note: [minibatch_size = batch_size / num_devices] is the actual per-device batch used. *)
49-
let epochs = 200 in
50-
(* let epochs = 50 in *)
49+
(* let epochs = 200 in *)
50+
let epochs = 100 in
5151
(* TINY for debugging: *)
5252
(* let epochs = 2 in *)
5353
(* let epochs = 1 in *)

dune-project

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
(name ocannl)
66

7-
(version 0.4.1~beta)
7+
(version 0.4.1~beta2)
88

99
(generate_opam_files true)
1010

lib/tensor.mli

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -44,16 +44,16 @@ val with_unchanged_roots : f:(unit -> 'a) -> 'a
4444
val default_value_prec : Arrayjit.Ops.prec ref
4545
(** The default precision for the value node of terminal (i.e. non-composite) tensors.
4646
47-
Note: the precision of a node can be set arbitrarily via {!Tnode.update_precision}. The default
48-
precision for value nodes of composite tensors is the maximum of precisions of the value nodes
49-
of sub-tensors. *)
47+
Note: the precision of a node can be set arbitrarily via {!Arrayjit.Tnode.update_precision}. The
48+
default precision for value nodes of composite tensors is the maximum of precisions of the value
49+
nodes of sub-tensors. *)
5050

5151
val default_grad_prec : Arrayjit.Ops.prec ref
5252
(** The default precision for the gradient node of terminal (i.e. non-composite) tensors.
5353
54-
Note: the precision of a node can be set arbitrarily via {!Tnode.update_precision}. The default
55-
precision for gradient nodes of composite tensors is the maximum of precisions of the gradient
56-
nodes of sub-tensors. *)
54+
Note: the precision of a node can be set arbitrarily via {!Arrayjit.Tnode.update_precision}. The
55+
default precision for gradient nodes of composite tensors is the maximum of precisions of the
56+
gradient nodes of sub-tensors. *)
5757

5858
exception Session_error of string * t option
5959

neural_nets_lib.opam

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# This file is generated by dune, edit dune-project instead
22
opam-version: "2.0"
3-
version: "0.4.1~beta"
3+
version: "0.4.1~beta2"
44
synopsis:
55
"A from-scratch Deep Learning framework with an optimizing compiler, shape inference, concise syntax"
66
description:

ocannl_npy.opam

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# This file is generated by dune, edit dune-project instead
22
opam-version: "2.0"
3-
version: "0.4.1~beta"
3+
version: "0.4.1~beta2"
44
synopsis: "Numpy file format support for ocaml"
55
maintainer: ["Lukasz Stafiniak <lukstafi@gmail.com>"]
66
authors: ["Laurent Mazare"]

test/micrograd_demo.ml

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -304,8 +304,8 @@ let%expect_test "Micrograd half-moons example" =
304304
- - - - - -
305305
- -
306306
-
307-
- - - -
308-
307+
- - -
308+
-
309309
-
310310
311311
@@ -326,7 +326,7 @@ let%expect_test "Micrograd half-moons example" =
326326
[%expect
327327
{|
328328
Learning rate:
329-
-1.002e-1-
329+
-1.003e-1-
330330
-----
331331
-----
332332
-----
@@ -358,7 +358,8 @@ let%expect_test "Micrograd half-moons example" =
358358
-2.000e-1----
359359
──────────┼────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
360360
0.000e+0 3.990e+2
361-
step |}];
361+
step
362+
|}];
362363

363364
(* Testing how the syntax extension %op creates labels for the resulting tensors: *)
364365
Stdio.printf "mlp_result's name: %s\n%!" @@ Tensor.debug_name mlp_result;

test/moons_demo_parallel.ml

Lines changed: 87 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -168,6 +168,91 @@ Half-moons scatterplot and decision boundary:
168168
***********.................................................%%%%%%%%%%%%%%%%%.%%%%%%%%%%%%%%%%%%%.......................
169169
**********......................................................%%%%%%.%%%%%%%%%%%%%%%%%%%%%%%..........................
170170
*********..........................................................%....%%%%%.%%..%%%%...%..............................|}
171+
in
172+
let new_typical_target =
173+
{|
174+
Batch=19, step=20, lr=0.195250, batch loss=0.263683, epoch loss=45.768524
175+
Epoch=0, step=20, lr=0.195250, epoch loss=45.768524
176+
Batch=19, step=40, lr=0.190250, batch loss=0.220793, epoch loss=5.662821
177+
Epoch=1, step=40, lr=0.190250, epoch loss=5.662821
178+
Batch=19, step=60, lr=0.185250, batch loss=0.197911, epoch loss=5.259723
179+
Epoch=2, step=60, lr=0.185250, epoch loss=5.259723
180+
Batch=19, step=80, lr=0.180250, batch loss=0.191768, epoch loss=5.281074
181+
Epoch=3, step=80, lr=0.180250, epoch loss=5.281074
182+
Batch=19, step=100, lr=0.175250, batch loss=0.186862, epoch loss=5.097180
183+
Epoch=4, step=100, lr=0.175250, epoch loss=5.097180
184+
Batch=19, step=120, lr=0.170250, batch loss=0.181911, epoch loss=4.987223
185+
Epoch=5, step=120, lr=0.170250, epoch loss=4.987223
186+
Batch=19, step=140, lr=0.165250, batch loss=0.178275, epoch loss=4.835103
187+
Epoch=6, step=140, lr=0.165250, epoch loss=4.835103
188+
Batch=19, step=160, lr=0.160250, batch loss=0.165620, epoch loss=4.702625
189+
Epoch=7, step=160, lr=0.160250, epoch loss=4.702625
190+
Batch=19, step=180, lr=0.155250, batch loss=0.156137, epoch loss=4.458982
191+
Epoch=8, step=180, lr=0.155250, epoch loss=4.458982
192+
Batch=19, step=200, lr=0.150250, batch loss=0.139483, epoch loss=4.074086
193+
Epoch=9, step=200, lr=0.150250, epoch loss=4.074086
194+
Batch=19, step=220, lr=0.145250, batch loss=0.118495, epoch loss=3.605957
195+
Epoch=10, step=220, lr=0.145250, epoch loss=3.605957
196+
Batch=19, step=240, lr=0.140250, batch loss=0.091701, epoch loss=3.061533
197+
Epoch=11, step=240, lr=0.140250, epoch loss=3.061533
198+
Batch=19, step=260, lr=0.135250, batch loss=0.062137, epoch loss=2.342423
199+
Epoch=12, step=260, lr=0.135250, epoch loss=2.342423
200+
Batch=19, step=280, lr=0.130250, batch loss=0.030009, epoch loss=1.588885
201+
Epoch=13, step=280, lr=0.130250, epoch loss=1.588885
202+
Batch=19, step=300, lr=0.125250, batch loss=0.016336, epoch loss=0.904919
203+
Epoch=14, step=300, lr=0.125250, epoch loss=0.904919
204+
Batch=19, step=320, lr=0.120250, batch loss=0.009264, epoch loss=0.579104
205+
Epoch=15, step=320, lr=0.120250, epoch loss=0.579104
206+
Batch=19, step=340, lr=0.115250, batch loss=0.007289, epoch loss=0.451346
207+
Epoch=16, step=340, lr=0.115250, epoch loss=0.451346
208+
Batch=19, step=360, lr=0.110250, batch loss=0.005304, epoch loss=0.342835
209+
Epoch=17, step=360, lr=0.110250, epoch loss=0.342835
210+
Batch=19, step=380, lr=0.105250, batch loss=0.004483, epoch loss=0.259518
211+
Epoch=18, step=380, lr=0.105250, epoch loss=0.259518
212+
Batch=19, step=400, lr=0.100250, batch loss=0.004777, epoch loss=0.212706
213+
Epoch=19, step=400, lr=0.100250, epoch loss=0.212706
214+
215+
Half-moons scatterplot and decision boundary:
216+
***************************************#********************************************************************************
217+
***************************#*#*#########*###**######********************************************************************
218+
***************************######*####*#*#####*########*#***************************************************************
219+
*********************#**#########**#######*###############*###**********************************************************
220+
******************####*####################################*###*********************************************************
221+
***************#*#*###*###*###########*#*##*#####################*******************************************************
222+
************#*######**#########*##*****************##*##*########*#*****************************************************
223+
*************########*#*###*#**********************#******####*######*************************************************..
224+
**************#######*#*##******************************#########*##*##********************************************.....
225+
**********#######*###*#****************************************###**###*#***************************************........
226+
********#*######**##****************.....*********************#*##*####*#************************************...........
227+
********###*#*#**##************..............******************###########*#*******************************.............
228+
******########**************.........%....%.%...*******************##########*****************************............%.
229+
*******#######*************...........%...........******************##*######***************************.........%.%..%.
230+
****##########************............%%%.%%%......*****************##########*************************........%..%%%%%.
231+
*****######*#************............%%%.%...........**************#*#########***********************..........%.%.%%..%
232+
**######*#***************............%%%%%%%%..........****************#*##*###*********************............%%%%%%%.
233+
**##*#####**************..............%%%%%%%...........**************#########*******************..............%%.%%%..
234+
**########*************..............%%%%%%%%.............**************##*######****************...............%%%%%%%.
235+
*########**************..............%%%.%%%.%%.............*************#####*****************...............%%%%%%%%%.
236+
*########*************................%%%%%%%%%..............************###*##*#*************.................%%%%%%%..
237+
##*######*************................%%%%%%%.%................***********######*#**********..................%%%%%%%%..
238+
######*##************.................%%.%%%%%%..................*********########*********...................%%%%.%%.%.
239+
###*##**#***********...................%.%%%%%%%%.................*********#####*#********...................%%%%%%%%...
240+
##*#####************....................%%%%%%.%.%..................******#*#*####******....................%%.%%%%%....
241+
#####*##***********.....................%.%%%%%%%%...................*****##**##*******...................%%%%%%%%%%%...
242+
**#*##*#**********.......................%%%.%%%%%.%...................***#####*#****......................%%%%%%%......
243+
##****##**********........................%%.%%%%%%%%....................***###*##**....................%%%%%%%%%%......
244+
*****************.........................%%.%%%%%%%......................********......................%..%%.%%%.......
245+
****************............................%...%%%%%.%%....................*****..................%.%%%%%%%%%%.........
246+
****************..............................%.%%%%%.%%%%....................*....................%%%%%%%%.%.%%........
247+
***************.................................%..%%%%%...%......................................%%%%%%%%%%............
248+
**************....................................%%%.%%%%%%%%..............................%%..%%%%.%%%%%.%............
249+
**************...................................%%%.%%%%%%.%%...%.........................%.%%%%%%%.%%%.%..............
250+
*************........................................%.%%%.%%%%%%%%%...................%.%%%%%%%%%%%%%.%.%..............
251+
************..........................................%.%%%%.%%%%%%%%%.%%%%%%%%%.%.%%%%%%%%%%%%%%%%%%%.%................
252+
************............................................%%%%%%%%%%%%%%%%%%%%%.%%%%%%%.%%%.%%%%%%%%%%....................
253+
***********.................................................%%%%%%%%%%%%%%%%%.%%%%%%%%%%%%%%%%%%%.......................
254+
**********......................................................%%%%%%.%%%%%%%%%%%%%%%%%%%%%%%..........................
255+
**********.........................................................%....%%%%%.%%..%%%%...%..............................|}
171256
in
172257
let arm64_and_s390x_target =
173258
{|
@@ -340,8 +425,8 @@ Half-moons scatterplot and decision boundary:
340425
**********.........................................................%....%%%%%.%%..%%%%...%..............................|}
341426
in
342427
let result_deltas =
343-
List.map [ typical_target; arm64_and_s390x_target; ppc64_target ] ~f:(fun target ->
344-
Expect_test_patdiff.patdiff target result)
428+
List.map [ new_typical_target; typical_target; arm64_and_s390x_target; ppc64_target ]
429+
~f:(fun target -> Expect_test_patdiff.patdiff target result)
345430
in
346431
(if List.exists ~f:String.is_empty result_deltas then
347432
Stdio.print_string "moons_demo_parallel result is as expected"

0 commit comments

Comments
 (0)