X-Git-Url: http://nitlanguage.org diff --git a/src/parser/xss/lexer.xss b/src/parser/xss/lexer.xss index a8fa706..6a4d1d0 100644 --- a/src/parser/xss/lexer.xss +++ b/src/parser/xss/lexer.xss @@ -20,24 +20,18 @@ $ template make_lexer() # The lexer extract NIT tokens from an input stream. # It is better user with the Parser class Lexer -special TablesCapable + super TablesCapable # Last peeked token var _token: nullable Token # Lexer current state var _state: Int = 0 - # Name of the stream (as given to tokens) - readable var _filename: String + # The source file + readable var _file: SourceFile - # Input stream where character are read - var _stream: IStream - - # Pushback buffer to store unread character - var _stream_buf: Buffer - - # Number of character stored in the pushback buffer - var _stream_pos: Int + # Current character in the stream + var _stream_pos: Int = 0 # Current line number in the input stream var _line: Int = 0 @@ -48,54 +42,60 @@ special TablesCapable # Was the last character a cariage-return? var _cr: Bool = false - # If the end of stream? - var _eof: Bool = false - - # Current working text read from the input stream - var _text: Buffer - $ foreach {lexer_data/state} # Constante state values private fun state_${translate(@name,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}: Int do return @id end $ end foreach # Create a new lexer for a stream (and a name) - init(stream: IStream, fname: String) + init(file: SourceFile) do - _filename = fname - _text = new Buffer - _stream = stream - _stream_pos = -1 - _stream_buf = new Buffer + _file = file end + # The last peeked token to chain them + private var last_token: nullable Token = null + # Give the next token (but do not consume it) fun peek: Token do - while _token == null do - _token = get_token + var t = _token + if t != null then return t + + t = get_token + while t == null do t = get_token + + var l = last_token + if l != null then + l.next_token = t + t.prev_token = l end - return _token.as(not null) + + last_token = t + _token = t + return t end # Give and consume the next token fun next: Token do - var result = _token - while result == null do - result = get_token - end + var result = peek _token = null - return result.as(not null) + return result end - # Get a token, or null if it is discarded - private fun get_token: nullable Token + # Primitive method to return a token, or return null if it is discarded + # Is used to implement `peek` and `next` + protected fun get_token: nullable Token do var dfa_state = 0 + var sp = _stream_pos + var start_stream_pos = sp var start_pos = _pos var start_line = _line + var string = _file.string + var string_len = string.length var accept_state = -1 var accept_token = -1 @@ -103,34 +103,35 @@ $ end foreach var accept_pos = -1 var accept_line = -1 - var text = _text - text.clear - loop - var c = get_char + if sp >= string_len then + dfa_state = -1 + else + var c = string[sp].ascii + sp += 1 - if c != -1 then var cr = _cr var line = _line var pos = _pos if c == 10 then if cr then cr = false + _file.line_starts[line] = sp else line = line + 1 pos = 0 + _file.line_starts[line] = sp end else if c == 13 then line = line + 1 pos = 0 cr = true + _file.line_starts[line] = sp else pos = pos + 1 cr = false end - text.add(c.ascii) - loop var old_state = dfa_state if dfa_state < -1 then @@ -163,8 +164,6 @@ $ end foreach _cr = cr _line = line _pos = pos - else - dfa_state = -1 end if dfa_state >= 0 then @@ -172,16 +171,16 @@ $ end foreach if tok != -1 then accept_state = dfa_state accept_token = tok - accept_length = text.length + accept_length = sp - start_stream_pos accept_pos = _pos accept_line = _line end else if accept_state != -1 then - var location = new Location(_filename, start_line + 1, accept_line + 1, start_pos + 1, accept_pos) + var location = new Location(_file, start_line + 1, accept_line + 1, start_pos + 1, accept_pos) _pos = accept_pos _line = accept_line - push_back(accept_length) + _stream_pos = start_stream_pos + accept_length $ foreach {//token} if accept_token == ${position()-1} then $ if {count(transition[@from!=@to])!=0} @@ -193,70 +192,25 @@ $ foreach transition in {transition[@from!=@to]} $ end $ end if $ if {@parser_index} -$ if {not(@text)} - var token_text = text.substring(0, accept_length) - return new @ename.init_tk(token_text, location) -$ else return new @ename.init_tk(location) -$ end $ else return null $ end end $ end foreach else - var location = new Location(_filename, start_line + 1, start_line + 1, start_pos + 1, start_pos + 1) - if text.length > 0 then - var token = new PError.init_error("Syntax error: unknown token {text}.", location) + _stream_pos = sp + var location = new Location(_file, start_line + 1, start_line + 1, start_pos + 1, start_pos + 1) + if sp > start_stream_pos then + var text = string.substring(start_stream_pos, sp-start_stream_pos) + var token = new PLexerError.init_lexer_error("Syntax error: unknown token {text}.", location, text) return token else - var token = new EOF(location) + var token = new EOF.init_tk(location) return token end end end - if false then break # FIXME remove once unreach loop exits are in c_src - end - return null # FIXME remove once unreach loop exits are in c_src - end - - # Read the next character. - # The character is read from the stream of from the pushback buffer. - private fun get_char: Int - do - if _eof then - return -1 - end - - var result: Int - - var sp = _stream_pos - if sp >= 0 then - var res = _stream_buf[_stream_pos] - _stream_pos = sp - 1 - result = res.ascii - else - result = _stream.read_char - end - - if result == -1 then - _eof = true - end - - return result - end - - # Unread some characters. - # Unread characters are stored in the pushback buffer. - private fun push_back(accept_length: Int) - do - var length = _text.length - var i = length - 1 - while i >= accept_length do - _eof = false - _stream_pos = _stream_pos + 1 - _stream_buf[_stream_pos] = _text[i] - i = i - 1 end end end