@@ -12,47 +12,49 @@ module type Backend = Ir.Backend_intf.Backend
1212let hello1 () =
1313 Rand. init 0 ;
1414 let module Backend = (val Backends. fresh_backend () ) in
15- let stream = Backend. (new_stream @@ get_device ~ordinal: 0 ) in
16- let ctx = Backend. make_context stream in
15+
16+
1717 let open Operation.TDSL in
1818 (* Hey is inferred to be a matrix. *)
1919 let hey = range_of_shape ~batch_dims: [ 7 ] ~input_dims: [ 9 ; 10 ; 11 ] ~output_dims: [ 13 ; 14 ] () in
2020 let % op hoo = ((1 + 1 ) * hey) - 10 in
2121 (* For convenience, Train.forward will set hoo.value as fully on host. *)
22- Train. forward_and_force (module Backend ) ctx hoo;
22+ ignore ( Train. forward_once (module Backend ) hoo) ;
2323 (* Disable line wrapping for viewing the output. In VSCode: `View: Toggle Word Wrap`. *)
24- Tensor. print_tree ~with_grad: false ~depth: 99 hoo;
25- Tensor. print ~here: [% here] ~with_code: false ~with_grad: false `Default hoo
24+ Train. printf_tree ~with_grad: false ~depth: 99 hoo;
25+ Train. printf ~here: [% here] ~with_code: false ~with_grad: false hoo
2626
2727let hello2 () =
2828 Rand. init 0 ;
2929 let module Backend = (val Backends. fresh_backend () ) in
30- let stream = Backend. (new_stream @@ get_device ~ordinal: 0 ) in
31- let ctx = Backend. make_context stream in
30+
31+
3232 (* Hey is inferred to be a matrix. *)
3333 let % op y = (" hey" * 'q' 2.0 ) + 'p' 1.0 in
3434 (* Punning for ["hey"] above introduced the [hey] identifier. *)
3535 Train. every_non_literal_on_host y;
36- Train. forward_and_force (module Backend ) ctx y ;
37- Tensor. print ~here: [% here] ~with_code: false ~with_grad: false `Default @@ hey;
38- Tensor. print ~here: [% here] ~with_code: false ~with_grad: false `Default @@ y
36+ ignore ( Train. forward_once (module Backend ) y) ;
37+ Train. printf ~here: [% here] ~with_code: false ~with_grad: false hey;
38+ Train. printf ~here: [% here] ~with_code: false ~with_grad: false y
3939
4040let hello3 () =
4141 Rand. init 0 ;
4242 let module Backend = (val Backends. fresh_backend () ) in
43- let stream = Backend. (new_stream @@ get_device ~ordinal: 0 ) in
44- let ctx = Backend. make_context stream in
43+
44+
4545 (* Hey is inferred to be a matrix. *)
4646 let hey = TDSL. param " hey" in
4747 let zero_to_twenty = TDSL. range 20 in
4848 let y = TDSL.O. (( + ) ~label: [ " y" ] (hey * zero_to_twenty) zero_to_twenty) in
4949 Train. set_hosted hey.value;
50+ let stream = Backend. (new_stream @@ get_device ~ordinal: 0 ) in
51+ let ctx = Backend. make_context stream in
5052 let routine = Train. to_routine (module Backend ) ctx IDX. empty @@ Train. forward y in
5153 Stdio. printf " \n %!" ;
5254 Train. run routine;
53- Tensor. print ~here: [% here] ~with_code: true ~with_grad: false `Default y;
55+ Train. printf ~here: [% here] ~with_code: true ~with_grad: false y;
5456 Stdio. printf " \n %!" ;
55- Tensor. print_tree ~with_grad: false ~depth: 9 y;
57+ Train. printf_tree ~with_grad: false ~depth: 9 y;
5658 Stdio. printf " \n %!"
5759
5860let hello4 () =
@@ -65,8 +67,8 @@ let hello4 () =
6567 and type event = Backend. event
6668 and type optimize_ctx = Backend. optimize_ctx)
6769 in
68- let stream = Backend. (new_stream @@ get_device ~ordinal: 0 ) in
69- let ctx = Backend. make_context stream in
70+
71+
7072 Rand. init 0 ;
7173 let ri = TDSL. range 3 in
7274 let % op ti = ri ++ " i=>i0" in
@@ -79,13 +81,13 @@ let hello4 () =
7981 let positions = TDSL. outer_sum " ijl;kl=>ijkl" (TDSL. outer_sum " il;jl=>ijl" ti tj) tk in
8082 Train. set_hosted ti.value;
8183 Train. set_hosted tk.value;
82- Train. forward_and_force backend ctx positions;
84+ ignore ( Train. forward_once backend positions) ;
8385 Stdio. print_endline " positions:" ;
84- Tensor. print ~here: [% here] ~with_code: false ~with_grad: false `Default @@ positions;
86+ Train. printf ~here: [% here] ~with_code: false ~with_grad: false positions;
8587 Stdio. print_endline " tk:" ;
86- Tensor. print ~here: [% here] ~with_code: false ~with_grad: false `Default @@ tk;
88+ Train. printf ~here: [% here] ~with_code: false ~with_grad: false tk;
8789 Stdio. print_endline " ti:" ;
88- Tensor. print ~here: [% here] ~with_code: false ~with_grad: false `Default @@ ti;
90+ Train. printf ~here: [% here] ~with_code: false ~with_grad: false ti;
8991 Stdio. printf " \n %!"
9092
9193let hello5 () =
@@ -98,13 +100,13 @@ let hello5 () =
98100 and type event = Backend. event
99101 and type optimize_ctx = Backend. optimize_ctx)
100102 in
101- let stream = Backend. (new_stream @@ get_device ~ordinal: 0 ) in
102- let ctx = Backend. make_context stream in
103+
104+
103105 Rand. init 0 ;
104106 let hey = TDSL. range_of_shape ~batch_dims: [ 2 ] ~input_dims: [ 3 ] ~output_dims: [ 4 ] () in
105107 let % op ho = hey ++ " ...|1->... => ...|..." in
106- Train. forward_and_force backend ctx ho ;
107- Tensor. print ~here: [% here] ~with_code: false ~with_grad: false `Default @@ ho
108+ ignore ( Train. forward_once backend ho) ;
109+ Train. printf ~here: [% here] ~with_code: false ~with_grad: false ho
108110
109111let hello6 () =
110112 let module Backend = (val Backends. fresh_backend () ) in
@@ -116,14 +118,14 @@ let hello6 () =
116118 and type event = Backend. event
117119 and type optimize_ctx = Backend. optimize_ctx)
118120 in
119- let stream = Backend. (new_stream @@ get_device ~ordinal: 0 ) in
120- let ctx = Backend. make_context stream in
121+
122+
121123 Rand. init 0 ;
122124 (* "Hey" is inferred to be a scalar. *)
123125 let % op y = 2 *. " hey" in
124- Train. forward_and_force backend ctx y ;
125- (* Tensor.print ~here:[%here] ~with_code:false ~with_grad:false `Default @@ hey; *)
126- Tensor. print ~here: [% here] ~with_code: false ~with_grad: false `Default @@ y
126+ ignore ( Train. forward_once backend y) ;
127+ (* Train.printf ~here:[%here] ~with_code:false ~with_grad:false hey; *)
128+ Train. printf ~here: [% here] ~with_code: false ~with_grad: false y
127129
128130let () =
129131 ignore (hello1, hello2, hello3, hello4, hello5, hello6);
0 commit comments