parser: the parent of EOF is Start
[nit.git] / src / parser / xss / lexer.xss
index 2eb31da..affaa11 100644 (file)
@@ -16,246 +16,25 @@ $ // See the License for the specific language governing permissions and
 $ // limitations under the License.
 
 $ template make_lexer()
-
-# The lexer extract NIT tokens from an input stream.
-# It is better user with the Parser
-class Lexer
-special TablesCapable
-       # Last peeked token
-       var _token: nullable Token
-
-       # Lexer current state
-       var _state: Int = 0
-
-       # Name of the stream (as given to tokens)
-       readable var _filename: String
-
-       # Input stream where character are read
-       var _stream: IStream
-
-       # Pushback buffer to store unread character
-       var _stream_buf: Buffer
-
-       # Number of character stored in the pushback buffer
-       var _stream_pos: Int
-
-       # Current line number in the input stream
-       var _line: Int = 0
-
-       # Current column in the input stream
-       var _pos: Int = 0
-
-       # Was the last character a cariage-return?
-       var _cr: Bool = false
-
-       # If the end of stream?
-       var _eof: Bool = false
-
-       # Current working text read from the input stream
-       var _text: Buffer
-
-$ foreach {lexer_data/state}
-       # Constante state values
-       private fun state_${translate(@name,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}: Int do return @id end
-$ end foreach
-
-       # Create a new lexer for a stream (and a name)
-       init(stream: IStream, fname: String)
+redef class Lexer
+       redef fun make_token(accept_token, location)
        do
-               _filename = fname
-               _text = new Buffer
-               _stream = stream
-               _stream_pos = -1
-               _stream_buf = new Buffer
-       end
-
-       # Give the next token (but do not consume it)
-       fun peek: Token
-       do
-               while _token == null do
-                       _token = get_token
-               end
-               return _token.as(not null)
-       end
-
-       # Give and consume the next token
-       fun next: Token
-       do
-               var result = _token
-               while result == null do
-                       result = get_token
-               end
-               _token = null
-               return result
-       end
-
-       # Get a token, or null if it is discarded
-       private fun get_token: nullable Token
-       do
-               var dfa_state = 0
-
-               var start_pos = _pos
-               var start_line = _line
-
-               var accept_state = -1
-               var accept_token = -1
-               var accept_length = -1
-               var accept_pos = -1
-               var accept_line = -1
-
-               var text = _text
-               text.clear
-
-               loop
-                       var c = get_char
-
-                       if c != -1 then
-                               var cr = _cr
-                               var line = _line
-                               var pos = _pos
-                               if c == 10 then
-                                       if cr then
-                                               cr = false
-                                       else
-                                               line = line + 1
-                                               pos = 0
-                                       end
-                               else if c == 13 then
-                                       line = line + 1
-                                       pos = 0
-                                       cr = true
-                               else
-                                       pos = pos + 1
-                                       cr = false
-                               end
-
-                               text.add(c.ascii)
-
-                               loop
-                                       var old_state = dfa_state
-                                       if dfa_state < -1 then
-                                               old_state = -2 - dfa_state
-                                       end
-
-                                       dfa_state = -1
-
-                                       var low = 0
-                                       var high = lexer_goto(old_state, 0) - 1
-
-                                       if high >= 0 then
-                                               while low <= high do
-                                                       var middle = (low + high) / 2
-                                                       var offset = middle * 3 + 1 # +1 because length is at 0
-
-                                                       if c < lexer_goto(old_state, offset) then
-                                                               high = middle - 1
-                                                       else if c > lexer_goto(old_state, offset+1) then
-                                                               low = middle + 1
-                                                       else
-                                                               dfa_state = lexer_goto(old_state, offset+2)
-                                                               break
-                                                       end
-                                               end
-                                       end
-                                       if dfa_state > -2 then break
-                               end
-
-                               _cr = cr
-                               _line = line
-                               _pos = pos
-                       else
-                               dfa_state = -1
-                       end
-
-                       if dfa_state >= 0 then
-                               var tok = lexer_accept(dfa_state)
-                               if tok != -1 then
-                                       accept_state = dfa_state
-                                       accept_token = tok
-                                       accept_length = text.length
-                                       accept_pos = _pos
-                                       accept_line = _line
-                               end
-                       else
-                               if accept_state != -1 then
-                                       var location = new Location(_filename, start_line + 1, accept_line + 1, start_pos + 1, accept_pos)
-                                       _pos = accept_pos
-                                       _line = accept_line
-                                       push_back(accept_length)
 $ foreach {//token}
-                                       if accept_token == ${position()-1} then
-$    if {count(transition[@from!=@to])!=0}
-                                               var state_id = _state
-$        foreach transition in {transition[@from!=@to]}
-                                               if state_id == ${/parser/lexer_data/state[@name=$transition/@from]/@id} then
-                                                       _state = state_${translate(@to,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}
-                                               end
-$        end
-$    end if
 $    if {@parser_index}
-$        if {not(@text)}
-                                               var token_text = text.substring(0, accept_length)
-                                               return new @ename.init_tk(token_text, location)
-$        else
-                                               return new @ename.init_tk(location)
-$        end
-$    else
-                                               return null
-$    end
-                                       end
-$ end foreach
-                               else
-                                       var location = new Location(_filename, start_line + 1, start_line + 1, start_pos + 1, start_pos + 1)
-                                       if text.length > 0 then
-                                               var token = new PError.init_error("Syntax error: unknown token {text}.", location)
-                                               return token
-                                       else
-                                               var token = new EOF(location)
-                                               return token
-                                       end
-                               end
+               if accept_token == ${position()-1} then
+$        if {count(transition[@from!=@to])!=0}
+                       var state_id = _state
+$            foreach transition in {transition[@from!=@to]}
+                       if state_id == ${/parser/lexer_data/state[@name=$transition/@from]/@id} then
+                               _state = state_${translate(@to,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}
                        end
+$            end
+$        end if
+                       return new @ename.init_tk(location)
                end
-       end
-
-       # Read the next character.
-       # The character is read from the stream of from the pushback buffer.
-       private fun get_char: Int
-       do
-               if _eof then
-                       return -1
-               end
-
-               var result: Int
-
-               var sp = _stream_pos
-               if sp >= 0 then
-                       var res = _stream_buf[_stream_pos]
-                       _stream_pos = sp - 1
-                       result = res.ascii
-               else
-                       result = _stream.read_char
-               end
-
-               if result == -1 then
-                       _eof = true
-               end
-
-               return result
-       end
-
-       # Unread some characters.
-       # Unread characters are stored in the pushback buffer.
-       private fun push_back(accept_length: Int)
-       do
-               var length = _text.length
-               var i = length - 1
-               while i >= accept_length do
-                       _eof = false
-                       _stream_pos = _stream_pos + 1
-                       _stream_buf[_stream_pos] = _text[i]
-                       i = i - 1
-               end
+$    end
+$ end foreach
+               abort # unknown token index `accept_token`
        end
 end