1: # 118 "./lpsrc/flx_tokeniser.ipk" 2: open Flx_parse 3: open Flx_prelex 4: open List 5: 6: let pre_tokens_of_lexbuf buf state = 7: let rec get lst = 8: let t = Flx_lex.pre_flx_lex state buf in 9: match t with 10: | [ENDMARKER] -> 11: [ENDMARKER] @ lst 12: | _ -> 13: match state#get_condition with 14: | `Processing -> 15: get (List.rev_append t lst) 16: | _ -> 17: get lst 18: in 19: let tks = get [] in 20: (* 21: print_endline 22: ( 23: "#included files are " ^ 24: String.concat ", " state#get_include_files 25: ) 26: ; 27: *) 28: let toks = List.rev tks in 29: let includes = state#get_include_files in 30: HASH_INCLUDE_FILES includes :: toks 31: 32: let pre_tokens_of_filename filename dirname incdirs expand_expr = 33: let state = new Flx_lexstate.lexer_state filename dirname incdirs expand_expr in 34: let infile = open_in filename in 35: let src = Lexing.from_channel infile in 36: let toks = pre_tokens_of_lexbuf src state in 37: close_in infile; 38: toks 39: 40: let pre_tokens_of_string s filename expand_expr = 41: let state = new Flx_lexstate.lexer_state filename "" [] expand_expr in 42: pre_tokens_of_lexbuf (Lexing.from_string s) state 43: 44:
1: # 163 "./lpsrc/flx_tokeniser.ipk" 2: open Flx_parse 3: open Flx_ast 4: 5: val pre_tokens_of_filename : 6: string -> string -> string list -> 7: (string -> expr_t -> expr_t) -> 8: token list 9: 10: val pre_tokens_of_string : 11: string -> string -> 12: (string -> expr_t -> expr_t) -> 13: token list 14: