# The lexer extract NIT tokens from an input stream.
# It is better user with the Parser
class Lexer
+ super TablesCapable
# Last peeked token
var _token: nullable Token
# Lexer current state
var _state: Int = 0
- # Name of the stream (as given to tokens)
- readable var _filename: String
+ # The source file
+ readable var _file: SourceFile
- # Input stream where character are read
- var _stream: IStream
-
- # Pushback buffer to store unread character
- var _stream_buf: Buffer
-
- # Number of character stored in the pushback buffer
- var _stream_pos: Int
+ # Current character in the stream
+ var _stream_pos: Int = 0
# Current line number in the input stream
var _line: Int = 0
# Was the last character a cariage-return?
var _cr: Bool = false
- # If the end of stream?
- var _eof: Bool = false
-
- # Current working text read from the input stream
- var _text: Buffer
-
$ foreach {lexer_data/state}
# Constante state values
private fun state_${translate(@name,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}: Int do return @id end
$ end foreach
# Create a new lexer for a stream (and a name)
- init(stream: IStream, fname: String)
+ init(file: SourceFile)
do
- _filename = fname
- _text = new Buffer
- _stream = stream
- _stream_pos = -1
- _stream_buf = new Buffer
- build_goto_table
- build_accept_table
+ _file = file
end
# Give the next token (but do not consume it)
result = get_token
end
_token = null
- return result.as(not null)
+ return result
end
# Get a token, or null if it is discarded
do
var dfa_state = 0
+ var sp = _stream_pos
+ var start_stream_pos = sp
var start_pos = _pos
var start_line = _line
+ var string = _file.string
+ var string_len = string.length
var accept_state = -1
var accept_token = -1
var accept_pos = -1
var accept_line = -1
- var goto_table = _goto_table[_state]
- var accept = _accept_table[_state]
- var text = _text
- text.clear
-
- while true do
- var c = get_char
+ loop
+ if sp >= string_len then
+ dfa_state = -1
+ else
+ var c = string[sp].ascii
+ sp += 1
- if c != -1 then
var cr = _cr
var line = _line
var pos = _pos
if c == 10 then
if cr then
cr = false
+ _file.line_starts[line] = sp
else
line = line + 1
pos = 0
+ _file.line_starts[line] = sp
end
else if c == 13 then
line = line + 1
pos = 0
cr = true
+ _file.line_starts[line] = sp
else
pos = pos + 1
cr = false
end
- text.add(c.ascii)
-
- var first_loop = true # aka until
- while dfa_state < -1 or first_loop do
+ loop
var old_state = dfa_state
if dfa_state < -1 then
old_state = -2 - dfa_state
dfa_state = -1
- var tmp0 = goto_table[old_state]
var low = 0
- var high = tmp0.length - 1
+ var high = lexer_goto(old_state, 0) - 1
if high >= 0 then
- var tmp1 = tmp0.intern_items
while low <= high do
var middle = (low + high) / 2
- var tmp2 = tmp1[middle].intern_items
+ var offset = middle * 3 + 1 # +1 because length is at 0
- if c < tmp2[0] then
+ if c < lexer_goto(old_state, offset) then
high = middle - 1
- else if c > tmp2[1] then
+ else if c > lexer_goto(old_state, offset+1) then
low = middle + 1
else
- dfa_state = tmp2[2]
- low = high + 1 # aka break
+ dfa_state = lexer_goto(old_state, offset+2)
+ break
end
end
end
- first_loop = false # aka until
+ if dfa_state > -2 then break
end
_cr = cr
_line = line
_pos = pos
- else
- dfa_state = -1
end
if dfa_state >= 0 then
- if accept[dfa_state] != -1 then
+ var tok = lexer_accept(dfa_state)
+ if tok != -1 then
accept_state = dfa_state
- accept_token = accept[dfa_state]
- accept_length = text.length
+ accept_token = tok
+ accept_length = sp - start_stream_pos
accept_pos = _pos
accept_line = _line
end
else
if accept_state != -1 then
+ var location = new Location(_file, start_line + 1, accept_line + 1, start_pos + 1, accept_pos)
+ _pos = accept_pos
+ _line = accept_line
+ _stream_pos = start_stream_pos + accept_length
$ foreach {//token}
if accept_token == ${position()-1} then
- var location = new Location(_filename, start_line + 1, accept_line + 1, start_pos + 1, accept_pos)
-$ if {not(@text)}
-$ if {@parser_index}
- var token_text = text.substring(0, accept_length)
- var token = new @ename.init_tk(token_text, location)
-$ end
-$ else
- var token = new @ename.init_tk(location)
-$ end
- push_back(accept_length)
- _pos = accept_pos
- _line = accept_line
$ if {count(transition[@from!=@to])!=0}
var state_id = _state
$ foreach transition in {transition[@from!=@to]}
$ end
$ end if
$ if {@parser_index}
- return token
+$ if {not(@text)}
+ var token_text = string.substring(start_stream_pos, accept_length)
+ return new @ename.init_tk(token_text, location)
+$ else
+ return new @ename.init_tk(location)
+$ end
$ else
return null
$ end
end
$ end foreach
else
- var location = new Location(_filename, start_line + 1, accept_line + 1, start_pos + 1, accept_pos)
- if text.length > 0 then
- var token = new PError.init_error("Unknown token: {text}", location)
+ _stream_pos = sp
+ var location = new Location(_file, start_line + 1, start_line + 1, start_pos + 1, start_pos + 1)
+ if sp > start_stream_pos then
+ var text = string.substring(start_stream_pos, sp-start_stream_pos)
+ var token = new PError.init_error("Syntax error: unknown token {text}.", location)
return token
else
var token = new EOF(location)
end
end
end
- return null
end
+end
- # Read the next character.
- # The character is read from the stream of from the pushback buffer.
- private fun get_char: Int
- do
- if _eof then
- return -1
- end
-
- var result: Int
-
- var sp = _stream_pos
- if sp >= 0 then
- var res = _stream_buf[_stream_pos]
- _stream_pos = sp - 1
- result = res.ascii
- else
- result = _stream.read_char
- end
-
- if result == -1 then
- _eof = true
- end
+$ end template
- return result
- end
- # Unread some characters.
- # Unread characters are stored in the pushback buffer.
- private fun push_back(accept_length: Int)
- do
- var length = _text.length
- var i = length - 1
- while i >= accept_length do
- _eof = false
- _stream_pos = _stream_pos + 1
- _stream_buf[_stream_pos] = _text[i]
- i = i - 1
- end
- end
- var _goto_table: Array[Array[Array[Array[Int]]]]
- private fun build_goto_table
- do
- _goto_table = once [
+$ template make_lexer_table()
$ foreach {lexer_data/goto_table/state}
- [
$ foreach {row}
$ if {count(goto)!=0}
- [
+static const int lexer_goto_row${position()}[] = {
+ ${count(goto)},
$ foreach {goto}
- [@low, @high, @state][-sep ','-]
+ @low, @high, @state[-sep ','-]
$ end foreach
- ][-sep ','-]
+};
+$ end
+$ end foreach
+static const int lexer_goto_row_null[] = {0};
+const int* const lexer_goto_table[] = {
+$ foreach {row}
+$ if {count(goto)!=0}
+ lexer_goto_row${position()}[-sep ','-]
$ else
- nil_array[-sep ','-]
+ lexer_goto_row_null[-sep ','-]
$ end
$ end foreach
- ][-sep ','-]
+};
$ end foreach
- ]
- end
- private fun nil_array: Array[Array[Int]]
- do
- return once new Array[Array[Int]]
- end
-
- var _accept_table: Array[Array[Int]]
- private fun build_accept_table do
- _accept_table = once [
$ foreach {lexer_data/accept_table/state}
- [
- [-foreach {i}-]${.}[-sep ','-][-end foreach-]
-
- ][-sep ','-]
+const int lexer_accept_table[] = {
+ [-foreach {i}-]${.}[-sep ','-][-end foreach-]
+};
$ end foreach
- ]
- end
-end
$ end template