Version française
Home     About     Download     Resources     Contact us    

This site is updated infrequently. For up-to-date information, please visit the new OCaml website at

Browse thread
[Caml-list] parsing included files recursively using ocamllex and ocamlyacc
[ Home ] [ Index: by date | by threads ]
[ Search: ]

[ Message by date: previous | next ] [ Message in thread: previous | next ] [ Thread: previous | next ]
Date: 2003-09-22 (08:41)
From: Hendrik Tews <tews@t...>
Subject: Re: [Caml-list] parsing included files recursively using ocamllex and ocamlyacc

Benjamin Geer writes:
   Subject: [Caml-list] parsing included files recursively using ocamllex and ocamlyacc
   I'm writing an interpreter for a small language (to be released as an 
   open source library), using ocamllex and ocamlyacc.  I'd like this 
   language to support an instruction that, at compile time, recursively 
   includes source code from another file.
I use the following approach:
- recognize the include directive in the lexer
- built an abstract lexer that wraps the original lexer and that
  + maintains a stack of lexers with some additional state
  + treats INCLUDE and EOF tokens
- the parser is called on this abstract lexer

This is all straightforward, the only subtle thing is that the
abstract lexer has to substitute the lexbuf argument coming from
the parser with the top of its internal lexer stack. Except for
the first call: In this case the lexbuf argument describes the
top-level input stream. You use this first lexbuf to initialize
the lexer stack.

The disadvantage of this approach is that you cannot have tokens
that span over several files.

In the following I give a few code samples. 

The lexer contains the following rule:

     | "#include" [' ' '\t'] '"'
				{ let f = string lexbuf
				    INCLUDE( f, get_loc lexbuf )

(get_loc is not relevant; it tracks line and character numbers.
 string matches everything until the next ``"'')

The grammar does not mention the INCLUDE token, instead the
parser is called as 

  Grammar.file Abstract_lexer.token

The Abstract_lexer module contains the following:

    exception Include_error
(* to signal an error with the include directive *)

    let empty s = 
	ignore( s); false
	| Stack.Empty -> true
(* Stack.empty, which was missing in earlier releases *)

    let d s =
      if debug_level _DEBUG_LEXER
      then begin
	prerr_endline s;
	flush stderr
(* give diagnostic output *)

    let get_current_directory () = match !current_top_level_input with
      | None -> assert(false)
      | Some f -> Filename.dirname f
(* compute the base directory for relative includes *)

    type lexing_pos = 
	  lexbuf : Lexing.lexbuf;
	  util_state : Parser_util.state_type;
	  closing_action : unit -> unit
(* the record I save on the lexer stack. The state field saves
   line and character numbers. The closing action is for 
   close_in <include file. I prefer a closure here because the
   toplevel input might be a string.

    let lexer_stack = (Stack.create () : lexing_pos Stack.t)
(* this is the stack of open lexers *)

    let initialize top_file_name =
      current_top_level_input := Some top_file_name;
      Parser_util.reset_line top_file_name;
      Stack.clear lexer_stack
(* initialize this module for the next toplevel input *)

    let token_from_top () = 
      Lexer.token ( lexer_stack).lexbuf
(* read a token from the current lexer;
   might raise Stack.Empty if there is no current lexer, which
   happens on the first token of a toplevel input

(* divert into the next include file *)

    let divert lexbuf file closing_action =
(* give some diagnostics *)
      let _ = d ("Diverting into " ^ file ) in
      let _ =

(* if we are leaving an input stream to process an include, we
   have to save line and character numbers
	if not (empty lexer_stack) then
	  let including_lex_pos =
	    { (Stack.pop lexer_stack) with
		util_state = Parser_util.get_state()
	    Stack.push including_lex_pos lexer_stack 

(* reset line and character numbers *)
      let _ = Parser_util.reset_line file in

(* and put the new lexer on top of the stack
      let included_lex_pos =
	{ lexbuf = lexbuf;
	  util_state = Parser_util.get_state();
	  closing_action = closing_action
	Stack.push included_lex_pos lexer_stack

(* The main function we provide: Read the next token, but treat
   EOF's and INCLUDE directives magically. This function is
   recursive on INCLUDE's, on EOF's of include files, and on the
   first token.
    let rec token lexbuf = 

(* ignore the lexbuf argument, use the lexer on top of the stack *)
	(match token_from_top() with

(* process an INLCUDE *)
	   | INCLUDE(filename, loc) ->
	       let relocated_name = 
		 Filename.concat (get_current_directory()) filename in
	       let included_channel = 
		   open_in relocated_name
		   | Sys_error msg -> 
			 error_message loc msg;
			 raise Include_error
		   divert (Lexing.from_channel included_channel)
		     relocated_name (fun () -> close_in included_channel);
		   token lexbuf

(* process EOF's *)
	   | EOF ->
		 (Stack.pop lexer_stack).closing_action ();

		 if empty lexer_stack 
(* This was EOF on the top level! )
		 then EOF

(* It's an EOF in an include file *)
		   let top = lexer_stack in
		   let top_state = top.util_state in
		   let (line, line_start, file) = top_state 
		   in begin
		       Parser_util.set_state top_state;
		       d ("Continuing lexing in file " ^ file ^
			  " at line " ^
			  (string_of_int line) ^
			  " char " ^
			     ((Lexing.lexeme_end top.lexbuf) - line_start)));
		       token lexbuf

(* no include, no EOF -> pass it on
	   | othertoken -> othertoken

(* catch the empty stack exception on the first token of the
   toplevel input; initialize the stack and retry
	| Stack.Empty ->
	      divert lexbuf 
		(remove_option !current_top_level_input)
(* the toplevel is not closed here *)
		(fun () -> ());
	      token lexbuf



To unsubscribe, mail Archives:
Bug reports: FAQ:
Beginner's list: