Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Newer
Older
100644 146 lines (145 sloc) 9.278 kB
fccc685 Initial open-source release
MLstate authored
1 open Base
2 let _grammar_rules = [ ("Tokenizer_tokens", None) ; ("Tokenizer_rest", None) ; ("Tokenizer_id", None) ]
3 let parse_with f_noerr f_err _text _start = let _len = ((String.length) (_text))
4 and _get_char = ((String.unsafe_get) (_text))
5 and _get_sub = ((String.sub) (_text)) in
6 (match (f_noerr (_text) (_start)) with
7 Some ( ( pos, res ) ) -> (pos, res)
8 | None -> (match (f_err (_text) (_start)) with
9 Trx_runtime.Ok ( ( ( _, _ ), _ ) ) -> (assert false)
10 | Trx_runtime.Fail ( ( err ) ) -> (Trx_runtime.gen_syntax_error (((FilePos.get_pos_no_cache) (_text))) (err))))
11 let memo_Tokenizer_id = ((Hashtbl.create) (128))
12 let memo_Tokenizer_id_err = ((Hashtbl.create) (128))
13 let memo_Tokenizer_rest = ((Hashtbl.create) (128))
14 let memo_Tokenizer_rest_err = ((Hashtbl.create) (128))
15 let memo_Tokenizer_tokens = ((Hashtbl.create) (128))
16 let memo_Tokenizer_tokens_err = ((Hashtbl.create) (128))
17 let prepare_cache () = ((Hashtbl.clear) (memo_Tokenizer_id)) ; ((Hashtbl.clear) (memo_Tokenizer_id_err)) ; ((Hashtbl.clear) (memo_Tokenizer_rest)) ; ((Hashtbl.clear) (memo_Tokenizer_rest_err)) ; ((Hashtbl.clear) (memo_Tokenizer_tokens)) ; ((Hashtbl.clear) (memo_Tokenizer_tokens_err))
18 let rec try_Tokenizer_id_noerr =
19 #18 "tokenizer.trx"
20 (fun _filename _text input -> let _len = ((String.length) (_text))
21 and _get_char = ((String.unsafe_get) (_text))
22 and _get_sub = ((String.sub) (_text)) in
23 (try (
24 (Hashtbl.find (memo_Tokenizer_id) (input))
25 ) with
26 Not_found -> let res = (match if ( (((<)) (input) (_len)) ) then ( let c = ((_get_char) (input)) in
27 if ( (((||)) ((((&&)) ((((>=)) (c) ('a'))) ((((<=)) (c) ('z'))))) ((((||)) ((((&&)) ((((>=)) (c) ('A'))) ((((<=)) (c) ('Z'))))) ((((=)) (c) ('_')))))) ) then ( Some((((succ) (input)), c)) ) else ( None ) ) else ( None ) with
28 ( None ) as __pat_var -> __pat_var
29 | Some ( ( input_9, __1 ) ) -> (match (Trx_runtime.while_primary_noerr (false) ((fun input_11 -> if ( (((<)) (input_11) (_len)) ) then ( let c = ((_get_char) (input_11)) in
30 if ( (((||)) ((((&&)) ((((>=)) (c) ('a'))) ((((<=)) (c) ('z'))))) ((((||)) ((((&&)) ((((>=)) (c) ('A'))) ((((<=)) (c) ('Z'))))) ((((||)) ((((=)) (c) ('_'))) ((((&&)) ((((>=)) (c) ('0'))) ((((<=)) (c) ('9')))))))))) ) then ( Some((((succ) (input_11)), c)) ) else ( None ) ) else ( None ))) (input_9)) with
31 ( None ) as __pat_var -> __pat_var
32 | Some ( ( input_10, __2 ) ) -> Some((input_10,
33 #18 "tokenizer.trx"
34 ( Tgrammar.string_of_chars (__1::__2) ))))) in
35 (Hashtbl.add (memo_Tokenizer_id) (input) (res)) ; res)
36 )
37 let rec try_Tokenizer_rest_noerr =
38 #20 "tokenizer.trx"
39 (fun _filename _text input -> let _len = ((String.length) (_text))
40 and _get_char = ((String.unsafe_get) (_text))
41 and _get_sub = ((String.sub) (_text)) in
42 (try (
43 (Hashtbl.find (memo_Tokenizer_rest) (input))
44 ) with
45 Not_found -> let res = (match (Trx_runtime.while_primary_noerr (false) ((fun input_6 -> (match (try_Tokenizer_id_noerr (_filename) (_text) (input_6)) with
46 None -> let input_7 = input_6 in
47 let __1 = () in
48 if ( (((<)) (input_7) (_len)) ) then ( let c = ((_get_char) (input_7)) in
49 Some((((succ) (input_7)), c)) ) else ( None )
50 | Some ( ( _, _ ) ) -> None))) (input)) with
51 ( None ) as __pat_var -> __pat_var
52 | Some ( ( input_5, __1 ) ) -> Some((input_5,
53 #20 "tokenizer.trx"
54 ( Tgrammar.string_of_chars __1 )))) in
55 (Hashtbl.add (memo_Tokenizer_rest) (input) (res)) ; res)
56 )
57 let rec try_Tokenizer_tokens_noerr =
58 #22 "tokenizer.trx"
59 (fun _filename _text input -> let _len = ((String.length) (_text))
60 and _get_char = ((String.unsafe_get) (_text))
61 and _get_sub = ((String.sub) (_text)) in
62 (try (
63 (Hashtbl.find (memo_Tokenizer_tokens) (input))
64 ) with
65 Not_found -> let res = (match (try_Tokenizer_rest_noerr (_filename) (_text) (input)) with
66 ( None ) as __pat_var -> __pat_var
67 | Some ( ( input_0, __1 ) ) -> (match (Trx_runtime.while_primary_noerr (false) ((fun input_2 -> (match (try_Tokenizer_id_noerr (_filename) (_text) (input_2)) with
68 ( None ) as __pat_var -> __pat_var
69 | Some ( ( input_3, __1 ) ) -> (match (try_Tokenizer_rest_noerr (_filename) (_text) (input_3)) with
70 ( None ) as __pat_var -> __pat_var
71 | Some ( ( input_4, __2 ) ) -> Some((input_4,
72 #22 "tokenizer.trx"
73 ( [__1; __2] ))))))) (input_0)) with
74 ( None ) as __pat_var -> __pat_var
75 | Some ( ( input_1, __2 ) ) -> Some((input_1,
76 #22 "tokenizer.trx"
77 ( __1::List.concat __2 ))))) in
78 (Hashtbl.add (memo_Tokenizer_tokens) (input) (res)) ; res)
79 )
80 let rec try_Tokenizer_id =
81 #18 "tokenizer.trx"
82 (fun _filename _text input -> let _len = ((String.length) (_text))
83 and _get_char = ((String.unsafe_get) (_text))
84 and _get_sub = ((String.sub) (_text)) in
85 (try (
86 (Hashtbl.find (memo_Tokenizer_id_err) (input))
87 ) with
88 Not_found -> let res = (match (Trx_runtime.option_to_res_err (if ( (((<)) (input) (_len)) ) then ( let c = ((_get_char) (input)) in
89 if ( (((||)) ((((&&)) ((((>=)) (c) ('a'))) ((((<=)) (c) ('z'))))) ((((||)) ((((&&)) ((((>=)) (c) ('A'))) ((((<=)) (c) ('Z'))))) ((((=)) (c) ('_')))))) ) then ( Some((((succ) (input)), c)) ) else ( None ) ) else ( None )) (input) ([ Trx_runtime.Expected(("'_'")) ; Trx_runtime.Expected(("['A'-'Z']")) ; Trx_runtime.Expected(("['a'-'z']")) ])) with
90 ( Trx_runtime.Fail ( ( _err ) ) ) as __pat_var -> __pat_var
91 | Trx_runtime.Ok ( ( ( input_21, __1 ), err ) ) -> (Trx_runtime.addErrorInfo (err) ((match (Trx_runtime.while_primary (false) ((fun input_23 -> (Trx_runtime.option_to_res_err (if ( (((<)) (input_23) (_len)) ) then ( let c = ((_get_char) (input_23)) in
92 if ( (((||)) ((((&&)) ((((>=)) (c) ('a'))) ((((<=)) (c) ('z'))))) ((((||)) ((((&&)) ((((>=)) (c) ('A'))) ((((<=)) (c) ('Z'))))) ((((||)) ((((=)) (c) ('_'))) ((((&&)) ((((>=)) (c) ('0'))) ((((<=)) (c) ('9')))))))))) ) then ( Some((((succ) (input_23)), c)) ) else ( None ) ) else ( None )) (input_23) ([ Trx_runtime.Expected(("'_'")) ; Trx_runtime.Expected(("['0'-'9']")) ; Trx_runtime.Expected(("['A'-'Z']")) ; Trx_runtime.Expected(("['a'-'z']")) ])))) (input_21)) with
93 ( Trx_runtime.Fail ( ( _err ) ) ) as __pat_var -> __pat_var
94 | Trx_runtime.Ok ( ( ( input_22, __2 ), err ) ) -> (Trx_runtime.addErrorInfo (err) (Trx_runtime.Ok(((input_22,
95 #18 "tokenizer.trx"
96 ( Tgrammar.string_of_chars (__1::__2) )), ((Trx_runtime.emptyError) (input_22)))))))))) in
97 (Hashtbl.add (memo_Tokenizer_id_err) (input) (res)) ; res)
98 )
99 let rec try_Tokenizer_rest =
100 #20 "tokenizer.trx"
101 (fun _filename _text input -> let _len = ((String.length) (_text))
102 and _get_char = ((String.unsafe_get) (_text))
103 and _get_sub = ((String.sub) (_text)) in
104 (try (
105 (Hashtbl.find (memo_Tokenizer_rest_err) (input))
106 ) with
107 Not_found -> let res = (match (Trx_runtime.while_primary (false) ((fun input_18 -> (match (try_Tokenizer_id (_filename) (_text) (input_18)) with
108 Trx_runtime.Fail ( ( err ) ) -> let input_19 = input_18 in
109 let __1 = () in
110 (Trx_runtime.addErrorInfo (err) ((match (Trx_runtime.option_to_res_err (if ( (((<)) (input_19) (_len)) ) then ( let c = ((_get_char) (input_19)) in
111 Some((((succ) (input_19)), c)) ) else ( None )) (input_19) (((Trx_runtime.Expected(("any character")))::([])))) with
112 ( Trx_runtime.Fail ( ( _err ) ) ) as __pat_var -> __pat_var
113 | Trx_runtime.Ok ( ( ( input_20, __2 ), err ) ) -> (Trx_runtime.addErrorInfo (err) (Trx_runtime.Ok(((input_20, __2), ((Trx_runtime.emptyError) (input_20)))))))))
114 | Trx_runtime.Ok ( ( ( _, _ ), err ) ) -> let err = err in
115 Trx_runtime.Fail((err))))) (input)) with
116 ( Trx_runtime.Fail ( ( _err ) ) ) as __pat_var -> __pat_var
117 | Trx_runtime.Ok ( ( ( input_17, __1 ), err ) ) -> (Trx_runtime.addErrorInfo (err) (Trx_runtime.Ok(((input_17,
118 #20 "tokenizer.trx"
119 ( Tgrammar.string_of_chars __1 )), ((Trx_runtime.emptyError) (input_17))))))) in
120 (Hashtbl.add (memo_Tokenizer_rest_err) (input) (res)) ; res)
121 )
122 let rec try_Tokenizer_tokens =
123 #22 "tokenizer.trx"
124 (fun _filename _text input -> let _len = ((String.length) (_text))
125 and _get_char = ((String.unsafe_get) (_text))
126 and _get_sub = ((String.sub) (_text)) in
127 (try (
128 (Hashtbl.find (memo_Tokenizer_tokens_err) (input))
129 ) with
130 Not_found -> let res = (match (try_Tokenizer_rest (_filename) (_text) (input)) with
131 ( Trx_runtime.Fail ( ( _err ) ) ) as __pat_var -> __pat_var
132 | Trx_runtime.Ok ( ( ( input_12, __1 ), err ) ) -> (Trx_runtime.addErrorInfo (err) ((match (Trx_runtime.while_primary (false) ((fun input_14 -> (match (try_Tokenizer_id (_filename) (_text) (input_14)) with
133 ( Trx_runtime.Fail ( ( _err ) ) ) as __pat_var -> __pat_var
134 | Trx_runtime.Ok ( ( ( input_15, __1 ), err ) ) -> (Trx_runtime.addErrorInfo (err) ((match (try_Tokenizer_rest (_filename) (_text) (input_15)) with
135 ( Trx_runtime.Fail ( ( _err ) ) ) as __pat_var -> __pat_var
136 | Trx_runtime.Ok ( ( ( input_16, __2 ), err ) ) -> (Trx_runtime.addErrorInfo (err) (Trx_runtime.Ok(((input_16,
137 #22 "tokenizer.trx"
138 ( [__1; __2] )), ((Trx_runtime.emptyError) (input_16)))))))))))) (input_12)) with
139 ( Trx_runtime.Fail ( ( _err ) ) ) as __pat_var -> __pat_var
140 | Trx_runtime.Ok ( ( ( input_13, __2 ), err ) ) -> (Trx_runtime.addErrorInfo (err) (Trx_runtime.Ok(((input_13,
141 #22 "tokenizer.trx"
142 ( __1::List.concat __2 )), ((Trx_runtime.emptyError) (input_13)))))))))) in
143 (Hashtbl.add (memo_Tokenizer_tokens_err) (input) (res)) ; res)
144 )
145 let parse_tokenizer_tokens ?(_filename = "") ?(_start = 0) _text = ((prepare_cache) (())) ; (parse_with (((try_Tokenizer_tokens_noerr) (_filename))) (((try_Tokenizer_tokens) (_filename))) (_text) (_start))
Something went wrong with that request. Please try again.