Merge: more feature for the AST
[nit.git] / src / parser / xss / lexer.xss
index 7784714..affaa11 100644 (file)
@@ -16,202 +16,25 @@ $ // See the License for the specific language governing permissions and
 $ // limitations under the License.
 
 $ template make_lexer()
-
-# The lexer extract NIT tokens from an input stream.
-# It is better user with the Parser
-class Lexer
-       super TablesCapable
-       # Last peeked token
-       var _token: nullable Token
-
-       # Lexer current state
-       var _state: Int = 0
-
-       # The source file
-       readable var _file: SourceFile
-
-       # Current character in the stream
-       var _stream_pos: Int = 0
-
-       # Current line number in the input stream
-       var _line: Int = 0
-
-       # Current column in the input stream
-       var _pos: Int = 0
-
-       # Was the last character a cariage-return?
-       var _cr: Bool = false
-
-$ foreach {lexer_data/state}
-       # Constante state values
-       private fun state_${translate(@name,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}: Int do return @id end
-$ end foreach
-
-       # Create a new lexer for a stream (and a name)
-       init(file: SourceFile)
-       do
-               _file = file
-       end
-
-       # The last peeked token to chain them
-       private var last_token: nullable Token = null
-
-       # Give the next token (but do not consume it)
-       fun peek: Token
-       do
-               var t = _token
-               if t != null then return t
-
-               t = get_token
-               while t == null do t = get_token
-
-               var l = last_token
-               if l != null then
-                       l.next_token = t
-                       t.prev_token = l
-               end
-
-               last_token = t
-               _token = t
-               return t
-       end
-
-       # Give and consume the next token
-       fun next: Token
+redef class Lexer
+       redef fun make_token(accept_token, location)
        do
-               var result = peek
-               _token = null
-               return result
-       end
-
-       # Primitive method to return a token, or return null if it is discarded
-       # Is used to implement `peek` and `next`
-       protected fun get_token: nullable Token
-       do
-               var dfa_state = 0
-
-               var sp = _stream_pos
-               var start_stream_pos = sp
-               var start_pos = _pos
-               var start_line = _line
-               var string = _file.string
-               var string_len = string.length
-
-               var accept_state = -1
-               var accept_token = -1
-               var accept_length = -1
-               var accept_pos = -1
-               var accept_line = -1
-
-               loop
-                       if sp >= string_len then
-                               dfa_state = -1
-                       else
-                               var c = string.chars[sp].ascii
-                               sp += 1
-
-                               var cr = _cr
-                               var line = _line
-                               var pos = _pos
-                               if c == 10 then
-                                       if cr then
-                                               cr = false
-                                               _file.line_starts[line] = sp
-                                       else
-                                               line = line + 1
-                                               pos = 0
-                                               _file.line_starts[line] = sp
-                                       end
-                               else if c == 13 then
-                                       line = line + 1
-                                       pos = 0
-                                       cr = true
-                                       _file.line_starts[line] = sp
-                               else
-                                       pos = pos + 1
-                                       cr = false
-                               end
-
-                               loop
-                                       var old_state = dfa_state
-                                       if dfa_state < -1 then
-                                               old_state = -2 - dfa_state
-                                       end
-
-                                       dfa_state = -1
-
-                                       var low = 0
-                                       var high = lexer_goto(old_state, 0) - 1
-
-                                       if high >= 0 then
-                                               while low <= high do
-                                                       var middle = (low + high) / 2
-                                                       var offset = middle * 3 + 1 # +1 because length is at 0
-
-                                                       if c < lexer_goto(old_state, offset) then
-                                                               high = middle - 1
-                                                       else if c > lexer_goto(old_state, offset+1) then
-                                                               low = middle + 1
-                                                       else
-                                                               dfa_state = lexer_goto(old_state, offset+2)
-                                                               break
-                                                       end
-                                               end
-                                       end
-                                       if dfa_state > -2 then break
-                               end
-
-                               _cr = cr
-                               _line = line
-                               _pos = pos
-                       end
-
-                       if dfa_state >= 0 then
-                               var tok = lexer_accept(dfa_state)
-                               if tok != -1 then
-                                       accept_state = dfa_state
-                                       accept_token = tok
-                                       accept_length = sp - start_stream_pos
-                                       accept_pos = _pos
-                                       accept_line = _line
-                               end
-                       else
-                               if accept_state != -1 then
-                                       var location = new Location(_file, start_line + 1, accept_line + 1, start_pos + 1, accept_pos)
-                                       _pos = accept_pos
-                                       _line = accept_line
-                                       _stream_pos = start_stream_pos + accept_length
 $ foreach {//token}
-                                       if accept_token == ${position()-1} then
-$    if {count(transition[@from!=@to])!=0}
-                                               var state_id = _state
-$        foreach transition in {transition[@from!=@to]}
-                                               if state_id == ${/parser/lexer_data/state[@name=$transition/@from]/@id} then
-                                                       _state = state_${translate(@to,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}
-                                               end
-$        end
-$    end if
 $    if {@parser_index}
-                                               return new @ename.init_tk(location)
-$    else
-                                               return null
-$    end
-                                       end
-$ end foreach
-                               else
-                                       _stream_pos = sp
-                                       var location = new Location(_file, start_line + 1, start_line + 1, start_pos + 1, start_pos + 1)
-                                       if sp > start_stream_pos then
-                                               var text = string.substring(start_stream_pos, sp-start_stream_pos)
-                                               var token = new PLexerError.init_lexer_error("Syntax error: unknown token {text}.", location, text)
-                                               return token
-                                       else
-                                               var token = new EOF.init_tk(location)
-                                               return token
-                                       end
-                               end
+               if accept_token == ${position()-1} then
+$        if {count(transition[@from!=@to])!=0}
+                       var state_id = _state
+$            foreach transition in {transition[@from!=@to]}
+                       if state_id == ${/parser/lexer_data/state[@name=$transition/@from]/@id} then
+                               _state = state_${translate(@to,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}
                        end
+$            end
+$        end if
+                       return new @ename.init_tk(location)
                end
+$    end
+$ end foreach
+               abort # unknown token index `accept_token`
        end
 end