line_reader.anubis 3.91 KB
/*
 * Created by PyramIDE.
 * User: ricard
 * Date: 09/09/2008
 * Time: 20:32
 * 
 */



 public type LineReader:
  line_reader(Data_IO       data_io,
              Var(String)   buffer,
              Var(Int)      read_pos).

 public type ReadResult:
  error,
  timeout,
  ok(String).

 public define Maybe(String)
  read_line
  (
    LineReader  lr,
    Int         timeout
  ) = 
  if lr is line_reader(data_io, buffer, read_pos) then
  if find_string(*buffer, crlf, *read_pos) is
  {
    failure then 
    success(end_pos) then
      if sub_string(*buffer, *read_pos, end_pos - *read_pos) is
      {
        failure then failure,
        success(line
      }
  }

//---------------------------------------------------------------------------

read lexical_analysis/fast_lexer_4.anubis   


type Token:
   line(String),
   eol.

public type LineReaderLexer:
  line_reader_lexer((LexingStream, One) -> One -> LexerOutput(Token) /*lexer_base*/).

public type LineReader:
  line_reader(One -> LexerOutput(Token)   /*lexer*/,
              One -> Int                  /*offset*/,
              LineReaderLexer             /*lexer_base*/,
              LexingStream           lexing_stream).

public define Int
  current_offset
  (
    LineReader   lr,
  ) =
  if lr is line_reader(_, offset, _, _) then offset(unique).

public define Maybe(String)
  read_line
  (
    LineReader   lr,
  ) = 
  if lr is line_reader(lexer, offset, _, _) then
  if lexer(unique) is 
  {
    end_of_input   then /* no more token: exit the main loop */ 
      //print("End of input\n");
      failure,  
    
    error(b, l, c)       then
      /* should never happen with this lexer (see the above comment) */
      print("Lexer error: ["+to_string(b)+"]\n"); failure,
    
    token(t)       then 
      /* a token has been recognized */
      if t is 
      {
        line(l) then
          //print("tk: line("+l+") @ "+abs_to_decimal(offset(unique))+"\n");
          forget(lexer(unique)); // reading EOL
          success(l),
        eol then 
          //print("tk: eol\n");
          success("")
      }
  }. 



public define Maybe(LineReaderLexer)
  make_line_reader_lexer
  =
  if make_lexer([
                  lexer_item("#r?#n", return((ByteArray b, LexingTools t, One aux) |-> token(eol))),
                  lexer_item("#r", return((ByteArray b, LexingTools t, One aux) |-> token(eol))),
                  lexer_item("[^\r\n]*", return((ByteArray b, LexingTools t, One aux) |-> token(line(to_string(b))))),
                ],
                              '#') is
  {
    error(msg) then print("Syntax error in regular expression: "+to_English(msg)+"\n"); failure,
    ok(lexer)  then success(line_reader_lexer(lexer))
  }.

public define LineReader
  make_line_reader
  (
    LexingStream    ls,
    LineReaderLexer make_lexer
  ) =
  if make_lexer is line_reader_lexer(lexer) then
  line_reader(lexer(ls, unique), (One u) |-> offset(ls), make_lexer, ls).

public define Maybe(LineReader)
  make_line_reader
  (
    LexingStream  ls,
  ) =
  if make_line_reader_lexer is
  {
    failure         then failure,
    success(lexer)  then 
      /* prepare the input stream */
      success(make_line_reader(ls, lexer))
  }.


public define Maybe(LineReader)
  make_line_reader
  (
    String  s,
  ) =
  make_line_reader(make_lexing_stream("", s)).
  
public define Maybe(LineReader)
  make_line_reader
  (
    RStream f,
    Int     timeout
  ) =
  if make_lexing_stream("",       /* preambule */
                        f,        /* the opened file */
                        65536,    /* size of buffer for the lexing stream */
                        timeout)  /* timeout (seconds) */
  is
  {
    failure     then print("cannot make lexing stream.\n"); failure, 
    success(ls) then make_line_reader(ls)
  }.

   
public define LineReader
  reset_line_reader
  (
    LineReader    lr,
    LexingStream  ls,
  ) =
  if lr is line_reader(lexer, offset, make_lexer, _) then
  make_line_reader(ls, make_lexer).