X-Git-Url: http://nitlanguage.org diff --git a/src/parser/xss/lexer.xss b/src/parser/xss/lexer.xss index 843a52b..780a118 100644 --- a/src/parser/xss/lexer.xss +++ b/src/parser/xss/lexer.xss @@ -1,87 +1,85 @@ -/* This file is part of NIT ( http://www.nitlanguage.org ). - * - * Copyright 2008 Jean Privat - * Based on algorithms developped for ( http://www.sablecc.org/ ). - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +$ // This file is part of NIT ( http://www.nitlanguage.org ). +$ // +$ // Copyright 2008 Jean Privat +$ // Based on algorithms developped for ( http://www.sablecc.org/ ). +$ // +$ // Licensed under the Apache License, Version 2.0 (the "License"); +$ // you may not use this file except in compliance with the License. +$ // You may obtain a copy of the License at +$ // +$ // http://www.apache.org/licenses/LICENSE-2.0 +$ // +$ // Unless required by applicable law or agreed to in writing, software +$ // distributed under the License is distributed on an "AS IS" BASIS, +$ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +$ // See the License for the specific language governing permissions and +$ // limitations under the License. $ template make_lexer() # The lexer extract NIT tokens from an input stream. # It is better user with the Parser class Lexer + super TablesCapable # Last peeked token - attr _token: Token + var _token: nullable Token # Lexer current state - attr _state: Int + var _state: Int = 0 # Name of the stream (as given to tokens) - readable attr _filename: String + readable var _filename: String # Input stream where character are read - attr _stream: IStream + var _stream: IStream # Pushback buffer to store unread character - attr _stream_buf: String + var _stream_buf: Buffer # Number of character stored in the pushback buffer - attr _stream_pos: Int + var _stream_pos: Int # Current line number in the input stream - attr _line: Int + var _line: Int = 0 # Current column in the input stream - attr _pos: Int + var _pos: Int = 0 # Was the last character a cariage-return? - attr _cr: Bool + var _cr: Bool = false # If the end of stream? - attr _eof: Bool + var _eof: Bool = false # Current working text read from the input stream - attr _text: String + var _text: Buffer $ foreach {lexer_data/state} # Constante state values - private meth state_${translate(@name,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}: Int do return @id end + private fun state_${translate(@name,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}: Int do return @id end $ end foreach # Create a new lexer for a stream (and a name) init(stream: IStream, fname: String) do _filename = fname - _text = new String + _text = new Buffer _stream = stream _stream_pos = -1 - _stream_buf = new String - build_goto_table - build_accept_table + _stream_buf = new Buffer end # Give the next token (but do not consume it) - meth peek: Token + fun peek: Token do while _token == null do _token = get_token end - return _token + return _token.as(not null) end # Give and consume the next token - meth next: Token + fun next: Token do var result = _token while result == null do @@ -92,7 +90,7 @@ $ end foreach end # Get a token, or null if it is discarded - private meth get_token: Token + private fun get_token: nullable Token do var dfa_state = 0 @@ -105,34 +103,35 @@ $ end foreach var accept_pos = -1 var accept_line = -1 - var goto_table = _goto_table[_state] - var accept = _accept_table[_state] - _text.clear + var text = _text + text.clear - while true do + loop var c = get_char if c != -1 then + var cr = _cr + var line = _line + var pos = _pos if c == 10 then - if _cr then - _cr = false + if cr then + cr = false else - _line = _line + 1 - _pos = 0 + line = line + 1 + pos = 0 end else if c == 13 then - _line = _line + 1 - _pos = 0 - _cr = true + line = line + 1 + pos = 0 + cr = true else - _pos = _pos + 1 - _cr = false + pos = pos + 1 + cr = false end - _text.add(c.ascii) + text.add(c.ascii) - var first_loop = true # aka until - while dfa_state < -1 or first_loop do + loop var old_state = dfa_state if dfa_state < -1 then old_state = -2 - dfa_state @@ -140,52 +139,51 @@ $ end foreach dfa_state = -1 - var tmp1 = goto_table[old_state] var low = 0 - var high = tmp1.length - 1 - - while low <= high do - var middle = (low + high) / 2 - var tmp2 = tmp1[middle] - - if c < tmp2[0] then - high = middle - 1 - else if c > tmp2[1] then - low = middle + 1 - else - dfa_state = tmp2[2] - low = high + 1 # aka break + var high = lexer_goto(old_state, 0) - 1 + + if high >= 0 then + while low <= high do + var middle = (low + high) / 2 + var offset = middle * 3 + 1 # +1 because length is at 0 + + if c < lexer_goto(old_state, offset) then + high = middle - 1 + else if c > lexer_goto(old_state, offset+1) then + low = middle + 1 + else + dfa_state = lexer_goto(old_state, offset+2) + break + end end end - first_loop = false # aka until + if dfa_state > -2 then break end + + _cr = cr + _line = line + _pos = pos else dfa_state = -1 end if dfa_state >= 0 then - if accept[dfa_state] != -1 then + var tok = lexer_accept(dfa_state) + if tok != -1 then accept_state = dfa_state - accept_token = accept[dfa_state] - accept_length = _text.length + accept_token = tok + accept_length = text.length accept_pos = _pos accept_line = _line end else if accept_state != -1 then + var location = new Location(_filename, start_line + 1, accept_line + 1, start_pos + 1, accept_pos) + _pos = accept_pos + _line = accept_line + push_back(accept_length) $ foreach {//token} if accept_token == ${position()-1} then -$ if {not(@text)} -$ if {@parser_index} - var token_text = _text.substring(0, accept_length) - var token = new @ename(token_text, _filename, start_line + 1, start_pos + 1) -$ end -$ else - var token = new @ename(_filename, start_line + 1, start_pos + 1) -$ end - push_back(accept_length) - _pos = accept_pos - _line = accept_line $ if {count(transition[@from!=@to])!=0} var state_id = _state $ foreach transition in {transition[@from!=@to]} @@ -195,29 +193,34 @@ $ foreach transition in {transition[@from!=@to]} $ end $ end if $ if {@parser_index} - return token +$ if {not(@text)} + var token_text = text.substring(0, accept_length) + return new @ename.init_tk(token_text, location) +$ else + return new @ename.init_tk(location) +$ end $ else return null $ end end $ end foreach else - if _text.length > 0 then - var token = new PError.init_error(_filename, start_line + 1, start_pos + 1, "Unknown token: {_text}") + var location = new Location(_filename, start_line + 1, start_line + 1, start_pos + 1, start_pos + 1) + if text.length > 0 then + var token = new PError.init_error("Syntax error: unknown token {text}.", location) return token else - var token = new EOF(_filename, start_line + 1, start_pos + 1) + var token = new EOF(location) return token end end end end - return null end # Read the next character. # The character is read from the stream of from the pushback buffer. - private meth get_char: Int + private fun get_char: Int do if _eof then return -1 @@ -243,7 +246,7 @@ $ end foreach # Unread some characters. # Unread characters are stored in the pushback buffer. - private meth push_back(accept_length: Int) + private fun push_back(accept_length: Int) do var length = _text.length var i = length - 1 @@ -254,45 +257,40 @@ $ end foreach i = i - 1 end end +end - attr _goto_table: Array[Array[Array[Array[Int]]]] - private meth build_goto_table - do - _goto_table = once [ +$ end template + + + +$ template make_lexer_table() $ foreach {lexer_data/goto_table/state} - [ $ foreach {row} $ if {count(goto)!=0} - [ +static const int lexer_goto_row${position()}[] = { + ${count(goto)}, $ foreach {goto} - [@low, @high, @state] [-sep ','-] + @low, @high, @state[-sep ','-] $ end foreach - ] [-sep ','-] +}; +$ end +$ end foreach +static const int lexer_goto_row_null[] = {0}; +const int* const lexer_goto_table[] = { +$ foreach {row} +$ if {count(goto)!=0} + lexer_goto_row${position()}[-sep ','-] $ else - nil_array [-sep ','-] + lexer_goto_row_null[-sep ','-] $ end $ end foreach - ] [-sep ','-] +}; $ end foreach - ] - end - - private meth nil_array: Array[Array[Int]] - do - return once new Array[Array[Int]] - end - attr _accept_table: Array[Array[Int]] - private meth build_accept_table do - _accept_table = once [ $ foreach {lexer_data/accept_table/state} - [ - [-foreach {i}-]${.} [-sep ','-] [-end foreach-] - - ] [-sep ','-] +const int lexer_accept_table[] = { + [-foreach {i}-]${.}[-sep ','-][-end foreach-] +}; $ end foreach - ] - end -end $ end template