# It is better user with the Parser
class Lexer
super TablesCapable
+
# Last peeked token
- var token: nullable Token
+ var token: nullable Token = null
# Lexer current state
private var state: Int = 0
# Constante state values
private fun state_initial: Int do return 0 end
- # Create a new lexer for a stream (and a name)
- init(file: SourceFile)
- do
- self.file = file
- end
-
# The last peeked token to chain them
private var last_token: nullable Token = null
end
else
if accept_state != -1 then
- var location = new Location(file, start_line + 1, accept_line + 1, start_pos + 1, accept_pos)
_pos = accept_pos
_line = accept_line
_stream_pos = start_stream_pos + accept_length
if accept_token == 0 then
+ # Ignored token (whitespaces)
return null
end
+ var location = new Location(file, start_line + 1, accept_line + 1, start_pos + 1, accept_pos)
return make_token(accept_token, location)
else
_stream_pos = sp
var location = new Location(file, start_line + 1, start_line + 1, start_pos + 1, start_pos + 1)
if sp > start_stream_pos then
var text = string.substring(start_stream_pos, sp-start_stream_pos)
- var token = new ALexerError.init_lexer_error("Syntax error: unknown token {text}.", location, text)
+ var token = new ALexerError.init_lexer_error("Syntax Error: unknown token `{text}`.", location, text)
file.last_token = token
return token
else