-/* This file is part of NIT ( http://www.nitlanguage.org ).
- *
- * Copyright 2008 Jean Privat <jean@pryen.org>
- * Based on algorithms developped for ( http://www.sablecc.org/ ).
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
+$ // This file is part of NIT ( http://www.nitlanguage.org ).
+$ //
+$ // Copyright 2008 Jean Privat <jean@pryen.org>
+$ // Based on algorithms developped for ( http://www.sablecc.org/ ).
+$ //
+$ // Licensed under the Apache License, Version 2.0 (the "License");
+$ // you may not use this file except in compliance with the License.
+$ // You may obtain a copy of the License at
+$ //
+$ // http://www.apache.org/licenses/LICENSE-2.0
+$ //
+$ // Unless required by applicable law or agreed to in writing, software
+$ // distributed under the License is distributed on an "AS IS" BASIS,
+$ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+$ // See the License for the specific language governing permissions and
+$ // limitations under the License.
$ template make_lexer()
# The lexer extract NIT tokens from an input stream.
# It is better user with the Parser
class Lexer
+ super TablesCapable
# Last peeked token
var _token: nullable Token
# Lexer current state
var _state: Int = 0
- # Name of the stream (as given to tokens)
- readable var _filename: String
+ # The source file
+ readable var _file: SourceFile
- # Input stream where character are read
- var _stream: IStream
-
- # Pushback buffer to store unread character
- var _stream_buf: Buffer
-
- # Number of character stored in the pushback buffer
- var _stream_pos: Int
+ # Current character in the stream
+ var _stream_pos: Int = 0
# Current line number in the input stream
var _line: Int = 0
# Was the last character a cariage-return?
var _cr: Bool = false
- # If the end of stream?
- var _eof: Bool = false
-
- # Current working text read from the input stream
- var _text: Buffer
-
$ foreach {lexer_data/state}
# Constante state values
private fun state_${translate(@name,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}: Int do return @id end
$ end foreach
# Create a new lexer for a stream (and a name)
- init(stream: IStream, fname: String)
+ init(file: SourceFile)
do
- _filename = fname
- _text = new Buffer
- _stream = stream
- _stream_pos = -1
- _stream_buf = new Buffer
- build_goto_table
- build_accept_table
+ _file = file
end
+ # The last peeked token to chain them
+ private var last_token: nullable Token = null
+
# Give the next token (but do not consume it)
fun peek: Token
do
- while _token == null do
- _token = get_token
+ var t = _token
+ if t != null then return t
+
+ t = get_token
+ while t == null do t = get_token
+
+ var l = last_token
+ if l != null then
+ l.next_token = t
+ t.prev_token = l
end
- return _token.as(not null)
+
+ last_token = t
+ _token = t
+ return t
end
# Give and consume the next token
fun next: Token
do
- var result = _token
- while result == null do
- result = get_token
- end
+ var result = peek
_token = null
- return result.as(not null)
+ return result
end
- # Get a token, or null if it is discarded
- private fun get_token: nullable Token
+ # Primitive method to return a token, or return null if it is discarded
+ # Is used to implement `peek` and `next`
+ protected fun get_token: nullable Token
do
var dfa_state = 0
+ var sp = _stream_pos
+ var start_stream_pos = sp
var start_pos = _pos
var start_line = _line
+ var string = _file.string
+ var string_len = string.length
var accept_state = -1
var accept_token = -1
var accept_pos = -1
var accept_line = -1
- var goto_table = _goto_table[_state]
- var accept = _accept_table[_state]
- _text.clear
-
- while true do
- var c = get_char
+ loop
+ if sp >= string_len then
+ dfa_state = -1
+ else
+ var c = string[sp].ascii
+ sp += 1
- if c != -1 then
+ var cr = _cr
+ var line = _line
+ var pos = _pos
if c == 10 then
- if _cr then
- _cr = false
+ if cr then
+ cr = false
+ _file.line_starts[line] = sp
else
- _line = _line + 1
- _pos = 0
+ line = line + 1
+ pos = 0
+ _file.line_starts[line] = sp
end
else if c == 13 then
- _line = _line + 1
- _pos = 0
- _cr = true
+ line = line + 1
+ pos = 0
+ cr = true
+ _file.line_starts[line] = sp
else
- _pos = _pos + 1
- _cr = false
+ pos = pos + 1
+ cr = false
end
- _text.add(c.ascii)
-
- var first_loop = true # aka until
- while dfa_state < -1 or first_loop do
+ loop
var old_state = dfa_state
if dfa_state < -1 then
old_state = -2 - dfa_state
dfa_state = -1
- var tmp1 = goto_table[old_state]
var low = 0
- var high = tmp1.length - 1
-
- while low <= high do
- var middle = (low + high) / 2
- var tmp2 = tmp1[middle]
-
- if c < tmp2[0] then
- high = middle - 1
- else if c > tmp2[1] then
- low = middle + 1
- else
- dfa_state = tmp2[2]
- low = high + 1 # aka break
+ var high = lexer_goto(old_state, 0) - 1
+
+ if high >= 0 then
+ while low <= high do
+ var middle = (low + high) / 2
+ var offset = middle * 3 + 1 # +1 because length is at 0
+
+ if c < lexer_goto(old_state, offset) then
+ high = middle - 1
+ else if c > lexer_goto(old_state, offset+1) then
+ low = middle + 1
+ else
+ dfa_state = lexer_goto(old_state, offset+2)
+ break
+ end
end
end
- first_loop = false # aka until
+ if dfa_state > -2 then break
end
- else
- dfa_state = -1
+
+ _cr = cr
+ _line = line
+ _pos = pos
end
if dfa_state >= 0 then
- if accept[dfa_state] != -1 then
+ var tok = lexer_accept(dfa_state)
+ if tok != -1 then
accept_state = dfa_state
- accept_token = accept[dfa_state]
- accept_length = _text.length
+ accept_token = tok
+ accept_length = sp - start_stream_pos
accept_pos = _pos
accept_line = _line
end
else
if accept_state != -1 then
+ var location = new Location(_file, start_line + 1, accept_line + 1, start_pos + 1, accept_pos)
+ _pos = accept_pos
+ _line = accept_line
+ _stream_pos = start_stream_pos + accept_length
$ foreach {//token}
if accept_token == ${position()-1} then
- var location = new Location(_filename, start_line + 1, accept_line + 1, start_pos + 1, accept_pos)
-$ if {not(@text)}
-$ if {@parser_index}
- var token_text = _text.substring(0, accept_length)
- var token = new @ename.init_tk(token_text, location)
-$ end
-$ else
- var token = new @ename.init_tk(location)
-$ end
- push_back(accept_length)
- _pos = accept_pos
- _line = accept_line
$ if {count(transition[@from!=@to])!=0}
var state_id = _state
$ foreach transition in {transition[@from!=@to]}
$ end
$ end if
$ if {@parser_index}
- return token
+ return new @ename.init_tk(location)
$ else
return null
$ end
end
$ end foreach
else
- var location = new Location(_filename, start_line + 1, accept_line + 1, start_pos + 1, accept_pos)
- if _text.length > 0 then
- var token = new PError.init_error("Unknown token: {_text}", location)
+ _stream_pos = sp
+ var location = new Location(_file, start_line + 1, start_line + 1, start_pos + 1, start_pos + 1)
+ if sp > start_stream_pos then
+ var text = string.substring(start_stream_pos, sp-start_stream_pos)
+ var token = new PLexerError.init_lexer_error("Syntax error: unknown token {text}.", location, text)
return token
else
- var token = new EOF(location)
+ var token = new EOF.init_tk(location)
return token
end
end
end
end
- return null
end
+end
- # Read the next character.
- # The character is read from the stream of from the pushback buffer.
- private fun get_char: Int
- do
- if _eof then
- return -1
- end
-
- var result: Int
-
- var sp = _stream_pos
- if sp >= 0 then
- var res = _stream_buf[_stream_pos]
- _stream_pos = sp - 1
- result = res.ascii
- else
- result = _stream.read_char
- end
-
- if result == -1 then
- _eof = true
- end
+$ end template
- return result
- end
- # Unread some characters.
- # Unread characters are stored in the pushback buffer.
- private fun push_back(accept_length: Int)
- do
- var length = _text.length
- var i = length - 1
- while i >= accept_length do
- _eof = false
- _stream_pos = _stream_pos + 1
- _stream_buf[_stream_pos] = _text[i]
- i = i - 1
- end
- end
- var _goto_table: Array[Array[Array[Array[Int]]]]
- private fun build_goto_table
- do
- _goto_table = once [
+$ template make_lexer_table()
$ foreach {lexer_data/goto_table/state}
- [
$ foreach {row}
$ if {count(goto)!=0}
- [
+static const int lexer_goto_row${position()}[] = {
+ ${count(goto)},
$ foreach {goto}
- [@low, @high, @state] [-sep ','-]
+ @low, @high, @state[-sep ','-]
$ end foreach
- ] [-sep ','-]
+};
+$ end
+$ end foreach
+static const int lexer_goto_row_null[] = {0};
+const int* const lexer_goto_table[] = {
+$ foreach {row}
+$ if {count(goto)!=0}
+ lexer_goto_row${position()}[-sep ','-]
$ else
- nil_array [-sep ','-]
+ lexer_goto_row_null[-sep ','-]
$ end
$ end foreach
- ] [-sep ','-]
+};
$ end foreach
- ]
- end
-
- private fun nil_array: Array[Array[Int]]
- do
- return once new Array[Array[Int]]
- end
- var _accept_table: Array[Array[Int]]
- private fun build_accept_table do
- _accept_table = once [
$ foreach {lexer_data/accept_table/state}
- [
- [-foreach {i}-]${.} [-sep ','-] [-end foreach-]
-
- ] [-sep ','-]
+const int lexer_accept_table[] = {
+ [-foreach {i}-]${.}[-sep ','-][-end foreach-]
+};
$ end foreach
- ]
- end
-end
$ end template