_file = file
end
+ # The last peeked token to chain them
+ private var last_token: nullable Token = null
+
# Give the next token (but do not consume it)
fun peek: Token
do
- while _token == null do
- _token = get_token
+ var t = _token
+ if t != null then return t
+
+ t = get_token
+ while t == null do t = get_token
+
+ var l = last_token
+ if l != null then
+ l.next_token = t
+ t.prev_token = l
end
- return _token.as(not null)
+
+ last_token = t
+ _token = t
+ return t
end
# Give and consume the next token
fun next: Token
do
- var result = _token
- while result == null do
- result = get_token
- end
+ var result = peek
_token = null
return result
end
- # Get a token, or null if it is discarded
- private fun get_token: nullable Token
+ # Primitive method to return a token, or return null if it is discarded
+ # Is used to implement `peek` and `next`
+ protected fun get_token: nullable Token
do
var dfa_state = 0
if c == 10 then
if cr then
cr = false
+ _file.line_starts[line] = sp
else
line = line + 1
pos = 0
+ _file.line_starts[line] = sp
end
else if c == 13 then
line = line + 1
pos = 0
cr = true
+ _file.line_starts[line] = sp
else
pos = pos + 1
cr = false
$ end
$ end if
$ if {@parser_index}
-$ if {not(@text)}
- var token_text = string.substring(start_stream_pos, accept_length)
- return new @ename.init_tk(token_text, location)
-$ else
return new @ename.init_tk(location)
-$ end
$ else
return null
$ end
var location = new Location(_file, start_line + 1, start_line + 1, start_pos + 1, start_pos + 1)
if sp > start_stream_pos then
var text = string.substring(start_stream_pos, sp-start_stream_pos)
- var token = new PError.init_error("Syntax error: unknown token {text}.", location)
+ var token = new PLexerError.init_lexer_error("Syntax error: unknown token {text}.", location, text)
return token
else
- var token = new EOF(location)
+ var token = new EOF.init_tk(location)
return token
end
end