# It is better user with the Parser
class Lexer
# Last peeked token
- attr _token: Token
+ attr _token: nullable Token
# Lexer current state
- attr _state: Int
+ attr _state: Int = 0
# Name of the stream (as given to tokens)
readable attr _filename: String
attr _stream: IStream
# Pushback buffer to store unread character
- attr _stream_buf: String
+ attr _stream_buf: Buffer
# Number of character stored in the pushback buffer
attr _stream_pos: Int
# Current line number in the input stream
- attr _line: Int
+ attr _line: Int = 0
# Current column in the input stream
- attr _pos: Int
+ attr _pos: Int = 0
# Was the last character a cariage-return?
- attr _cr: Bool
+ attr _cr: Bool = false
# If the end of stream?
- attr _eof: Bool
+ attr _eof: Bool = false
# Current working text read from the input stream
- attr _text: String
+ attr _text: Buffer
$ foreach {lexer_data/state}
# Constante state values
init(stream: IStream, fname: String)
do
_filename = fname
- _text = new String
+ _text = new Buffer
_stream = stream
_stream_pos = -1
- _stream_buf = new String
+ _stream_buf = new Buffer
build_goto_table
build_accept_table
end
while _token == null do
_token = get_token
end
- return _token
+ return _token.as(not null)
end
# Give and consume the next token
result = get_token
end
_token = null
- return result
+ return result.as(not null)
end
# Get a token, or null if it is discarded
- private meth get_token: Token
+ private meth get_token: nullable Token
do
var dfa_state = 0