# It is better user with the Parser
class Lexer
super TablesCapable
+
# Last peeked token
- var token: nullable Token
+ var token: nullable Token = null
# Lexer current state
private var state: Int = 0
# Current column in the input stream
var pos: Int = 0
- # Was the last character a cariage-return?
+ # Was the last character a carriage-return?
var cr: Bool = false
# Constante state values
private fun state_initial: Int do return 0 end
- # Create a new lexer for a stream (and a name)
- init(file: SourceFile)
- do
- self.file = file
- end
-
# The last peeked token to chain them
private var last_token: nullable Token = null
end
else
if accept_state != -1 then
- var location = new Location(file, start_line + 1, accept_line + 1, start_pos + 1, accept_pos)
_pos = accept_pos
_line = accept_line
_stream_pos = start_stream_pos + accept_length
if accept_token == 0 then
+ # Ignored token (whitespaces)
return null
end
+ var location = new Location(file, start_line + 1, accept_line + 1, start_pos + 1, accept_pos)
return make_token(accept_token, location)
else
_stream_pos = sp