Skip to content
This repository
tag: v1367
Fetching contributors…

Octocat-spinner-32-eaf2f5

Cannot retrieve contributors at this time

file 146 lines (145 sloc) 9.278 kb
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145
open Base
let _grammar_rules = [ ("Tokenizer_tokens", None) ; ("Tokenizer_rest", None) ; ("Tokenizer_id", None) ]
let parse_with f_noerr f_err _text _start = let _len = ((String.length) (_text))
and _get_char = ((String.unsafe_get) (_text))
and _get_sub = ((String.sub) (_text)) in
(match (f_noerr (_text) (_start)) with
Some ( ( pos, res ) ) -> (pos, res)
| None -> (match (f_err (_text) (_start)) with
Trx_runtime.Ok ( ( ( _, _ ), _ ) ) -> (assert false)
| Trx_runtime.Fail ( ( err ) ) -> (Trx_runtime.gen_syntax_error (((FilePos.get_pos_no_cache) (_text))) (err))))
let memo_Tokenizer_id = ((Hashtbl.create) (128))
let memo_Tokenizer_id_err = ((Hashtbl.create) (128))
let memo_Tokenizer_rest = ((Hashtbl.create) (128))
let memo_Tokenizer_rest_err = ((Hashtbl.create) (128))
let memo_Tokenizer_tokens = ((Hashtbl.create) (128))
let memo_Tokenizer_tokens_err = ((Hashtbl.create) (128))
let prepare_cache () = ((Hashtbl.clear) (memo_Tokenizer_id)) ; ((Hashtbl.clear) (memo_Tokenizer_id_err)) ; ((Hashtbl.clear) (memo_Tokenizer_rest)) ; ((Hashtbl.clear) (memo_Tokenizer_rest_err)) ; ((Hashtbl.clear) (memo_Tokenizer_tokens)) ; ((Hashtbl.clear) (memo_Tokenizer_tokens_err))
let rec try_Tokenizer_id_noerr =
#18 "tokenizer.trx"
(fun _filename _text input -> let _len = ((String.length) (_text))
and _get_char = ((String.unsafe_get) (_text))
and _get_sub = ((String.sub) (_text)) in
(try (
(Hashtbl.find (memo_Tokenizer_id) (input))
) with
Not_found -> let res = (match if ( (((<)) (input) (_len)) ) then ( let c = ((_get_char) (input)) in
if ( (((||)) ((((&&)) ((((>=)) (c) ('a'))) ((((<=)) (c) ('z'))))) ((((||)) ((((&&)) ((((>=)) (c) ('A'))) ((((<=)) (c) ('Z'))))) ((((=)) (c) ('_')))))) ) then ( Some((((succ) (input)), c)) ) else ( None ) ) else ( None ) with
( None ) as __pat_var -> __pat_var
| Some ( ( input_9, __1 ) ) -> (match (Trx_runtime.while_primary_noerr (false) ((fun input_11 -> if ( (((<)) (input_11) (_len)) ) then ( let c = ((_get_char) (input_11)) in
if ( (((||)) ((((&&)) ((((>=)) (c) ('a'))) ((((<=)) (c) ('z'))))) ((((||)) ((((&&)) ((((>=)) (c) ('A'))) ((((<=)) (c) ('Z'))))) ((((||)) ((((=)) (c) ('_'))) ((((&&)) ((((>=)) (c) ('0'))) ((((<=)) (c) ('9')))))))))) ) then ( Some((((succ) (input_11)), c)) ) else ( None ) ) else ( None ))) (input_9)) with
( None ) as __pat_var -> __pat_var
| Some ( ( input_10, __2 ) ) -> Some((input_10,
#18 "tokenizer.trx"
( Tgrammar.string_of_chars (__1::__2) ))))) in
(Hashtbl.add (memo_Tokenizer_id) (input) (res)) ; res)
)
let rec try_Tokenizer_rest_noerr =
#20 "tokenizer.trx"
(fun _filename _text input -> let _len = ((String.length) (_text))
and _get_char = ((String.unsafe_get) (_text))
and _get_sub = ((String.sub) (_text)) in
(try (
(Hashtbl.find (memo_Tokenizer_rest) (input))
) with
Not_found -> let res = (match (Trx_runtime.while_primary_noerr (false) ((fun input_6 -> (match (try_Tokenizer_id_noerr (_filename) (_text) (input_6)) with
None -> let input_7 = input_6 in
let __1 = () in
if ( (((<)) (input_7) (_len)) ) then ( let c = ((_get_char) (input_7)) in
Some((((succ) (input_7)), c)) ) else ( None )
| Some ( ( _, _ ) ) -> None))) (input)) with
( None ) as __pat_var -> __pat_var
| Some ( ( input_5, __1 ) ) -> Some((input_5,
#20 "tokenizer.trx"
( Tgrammar.string_of_chars __1 )))) in
(Hashtbl.add (memo_Tokenizer_rest) (input) (res)) ; res)
)
let rec try_Tokenizer_tokens_noerr =
#22 "tokenizer.trx"
(fun _filename _text input -> let _len = ((String.length) (_text))
and _get_char = ((String.unsafe_get) (_text))
and _get_sub = ((String.sub) (_text)) in
(try (
(Hashtbl.find (memo_Tokenizer_tokens) (input))
) with
Not_found -> let res = (match (try_Tokenizer_rest_noerr (_filename) (_text) (input)) with
( None ) as __pat_var -> __pat_var
| Some ( ( input_0, __1 ) ) -> (match (Trx_runtime.while_primary_noerr (false) ((fun input_2 -> (match (try_Tokenizer_id_noerr (_filename) (_text) (input_2)) with
( None ) as __pat_var -> __pat_var
| Some ( ( input_3, __1 ) ) -> (match (try_Tokenizer_rest_noerr (_filename) (_text) (input_3)) with
( None ) as __pat_var -> __pat_var
| Some ( ( input_4, __2 ) ) -> Some((input_4,
#22 "tokenizer.trx"
( [__1; __2] ))))))) (input_0)) with
( None ) as __pat_var -> __pat_var
| Some ( ( input_1, __2 ) ) -> Some((input_1,
#22 "tokenizer.trx"
( __1::List.concat __2 ))))) in
(Hashtbl.add (memo_Tokenizer_tokens) (input) (res)) ; res)
)
let rec try_Tokenizer_id =
#18 "tokenizer.trx"
(fun _filename _text input -> let _len = ((String.length) (_text))
and _get_char = ((String.unsafe_get) (_text))
and _get_sub = ((String.sub) (_text)) in
(try (
(Hashtbl.find (memo_Tokenizer_id_err) (input))
) with
Not_found -> let res = (match (Trx_runtime.option_to_res_err (if ( (((<)) (input) (_len)) ) then ( let c = ((_get_char) (input)) in
if ( (((||)) ((((&&)) ((((>=)) (c) ('a'))) ((((<=)) (c) ('z'))))) ((((||)) ((((&&)) ((((>=)) (c) ('A'))) ((((<=)) (c) ('Z'))))) ((((=)) (c) ('_')))))) ) then ( Some((((succ) (input)), c)) ) else ( None ) ) else ( None )) (input) ([ Trx_runtime.Expected(("'_'")) ; Trx_runtime.Expected(("['A'-'Z']")) ; Trx_runtime.Expected(("['a'-'z']")) ])) with
( Trx_runtime.Fail ( ( _err ) ) ) as __pat_var -> __pat_var
| Trx_runtime.Ok ( ( ( input_21, __1 ), err ) ) -> (Trx_runtime.addErrorInfo (err) ((match (Trx_runtime.while_primary (false) ((fun input_23 -> (Trx_runtime.option_to_res_err (if ( (((<)) (input_23) (_len)) ) then ( let c = ((_get_char) (input_23)) in
if ( (((||)) ((((&&)) ((((>=)) (c) ('a'))) ((((<=)) (c) ('z'))))) ((((||)) ((((&&)) ((((>=)) (c) ('A'))) ((((<=)) (c) ('Z'))))) ((((||)) ((((=)) (c) ('_'))) ((((&&)) ((((>=)) (c) ('0'))) ((((<=)) (c) ('9')))))))))) ) then ( Some((((succ) (input_23)), c)) ) else ( None ) ) else ( None )) (input_23) ([ Trx_runtime.Expected(("'_'")) ; Trx_runtime.Expected(("['0'-'9']")) ; Trx_runtime.Expected(("['A'-'Z']")) ; Trx_runtime.Expected(("['a'-'z']")) ])))) (input_21)) with
( Trx_runtime.Fail ( ( _err ) ) ) as __pat_var -> __pat_var
| Trx_runtime.Ok ( ( ( input_22, __2 ), err ) ) -> (Trx_runtime.addErrorInfo (err) (Trx_runtime.Ok(((input_22,
#18 "tokenizer.trx"
( Tgrammar.string_of_chars (__1::__2) )), ((Trx_runtime.emptyError) (input_22)))))))))) in
(Hashtbl.add (memo_Tokenizer_id_err) (input) (res)) ; res)
)
let rec try_Tokenizer_rest =
#20 "tokenizer.trx"
(fun _filename _text input -> let _len = ((String.length) (_text))
and _get_char = ((String.unsafe_get) (_text))
and _get_sub = ((String.sub) (_text)) in
(try (
(Hashtbl.find (memo_Tokenizer_rest_err) (input))
) with
Not_found -> let res = (match (Trx_runtime.while_primary (false) ((fun input_18 -> (match (try_Tokenizer_id (_filename) (_text) (input_18)) with
Trx_runtime.Fail ( ( err ) ) -> let input_19 = input_18 in
let __1 = () in
(Trx_runtime.addErrorInfo (err) ((match (Trx_runtime.option_to_res_err (if ( (((<)) (input_19) (_len)) ) then ( let c = ((_get_char) (input_19)) in
Some((((succ) (input_19)), c)) ) else ( None )) (input_19) (((Trx_runtime.Expected(("any character")))::([])))) with
( Trx_runtime.Fail ( ( _err ) ) ) as __pat_var -> __pat_var
| Trx_runtime.Ok ( ( ( input_20, __2 ), err ) ) -> (Trx_runtime.addErrorInfo (err) (Trx_runtime.Ok(((input_20, __2), ((Trx_runtime.emptyError) (input_20)))))))))
| Trx_runtime.Ok ( ( ( _, _ ), err ) ) -> let err = err in
Trx_runtime.Fail((err))))) (input)) with
( Trx_runtime.Fail ( ( _err ) ) ) as __pat_var -> __pat_var
| Trx_runtime.Ok ( ( ( input_17, __1 ), err ) ) -> (Trx_runtime.addErrorInfo (err) (Trx_runtime.Ok(((input_17,
#20 "tokenizer.trx"
( Tgrammar.string_of_chars __1 )), ((Trx_runtime.emptyError) (input_17))))))) in
(Hashtbl.add (memo_Tokenizer_rest_err) (input) (res)) ; res)
)
let rec try_Tokenizer_tokens =
#22 "tokenizer.trx"
(fun _filename _text input -> let _len = ((String.length) (_text))
and _get_char = ((String.unsafe_get) (_text))
and _get_sub = ((String.sub) (_text)) in
(try (
(Hashtbl.find (memo_Tokenizer_tokens_err) (input))
) with
Not_found -> let res = (match (try_Tokenizer_rest (_filename) (_text) (input)) with
( Trx_runtime.Fail ( ( _err ) ) ) as __pat_var -> __pat_var
| Trx_runtime.Ok ( ( ( input_12, __1 ), err ) ) -> (Trx_runtime.addErrorInfo (err) ((match (Trx_runtime.while_primary (false) ((fun input_14 -> (match (try_Tokenizer_id (_filename) (_text) (input_14)) with
( Trx_runtime.Fail ( ( _err ) ) ) as __pat_var -> __pat_var
| Trx_runtime.Ok ( ( ( input_15, __1 ), err ) ) -> (Trx_runtime.addErrorInfo (err) ((match (try_Tokenizer_rest (_filename) (_text) (input_15)) with
( Trx_runtime.Fail ( ( _err ) ) ) as __pat_var -> __pat_var
| Trx_runtime.Ok ( ( ( input_16, __2 ), err ) ) -> (Trx_runtime.addErrorInfo (err) (Trx_runtime.Ok(((input_16,
#22 "tokenizer.trx"
( [__1; __2] )), ((Trx_runtime.emptyError) (input_16)))))))))))) (input_12)) with
( Trx_runtime.Fail ( ( _err ) ) ) as __pat_var -> __pat_var
| Trx_runtime.Ok ( ( ( input_13, __2 ), err ) ) -> (Trx_runtime.addErrorInfo (err) (Trx_runtime.Ok(((input_13,
#22 "tokenizer.trx"
( __1::List.concat __2 )), ((Trx_runtime.emptyError) (input_13)))))))))) in
(Hashtbl.add (memo_Tokenizer_tokens_err) (input) (res)) ; res)
)
let parse_tokenizer_tokens ?(_filename = "") ?(_start = 0) _text = ((prepare_cache) (())) ; (parse_with (((try_Tokenizer_tokens_noerr) (_filename))) (((try_Tokenizer_tokens) (_filename))) (_text) (_start))
Something went wrong with that request. Please try again.