lexer: `get_token` is now protected
[nit.git] / src / parser / xss / lexer.xss
1 $ // This file is part of NIT ( http://www.nitlanguage.org ).
2 $ //
3 $ // Copyright 2008 Jean Privat <jean@pryen.org>
4 $ // Based on algorithms developped for ( http://www.sablecc.org/ ).
5 $ //
6 $ // Licensed under the Apache License, Version 2.0 (the "License");
7 $ // you may not use this file except in compliance with the License.
8 $ // You may obtain a copy of the License at
9 $ //
10 $ //     http://www.apache.org/licenses/LICENSE-2.0
11 $ //
12 $ // Unless required by applicable law or agreed to in writing, software
13 $ // distributed under the License is distributed on an "AS IS" BASIS,
14 $ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 $ // See the License for the specific language governing permissions and
16 $ // limitations under the License.
17
18 $ template make_lexer()
19
20 # The lexer extract NIT tokens from an input stream.
21 # It is better user with the Parser
22 class Lexer
23         super TablesCapable
24         # Last peeked token
25         var _token: nullable Token
26
27         # Lexer current state
28         var _state: Int = 0
29
30         # The source file
31         readable var _file: SourceFile
32
33         # Current character in the stream
34         var _stream_pos: Int = 0
35
36         # Current line number in the input stream
37         var _line: Int = 0
38
39         # Current column in the input stream
40         var _pos: Int = 0
41
42         # Was the last character a cariage-return?
43         var _cr: Bool = false
44
45 $ foreach {lexer_data/state}
46         # Constante state values
47         private fun state_${translate(@name,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}: Int do return @id end
48 $ end foreach
49
50         # Create a new lexer for a stream (and a name)
51         init(file: SourceFile)
52         do
53                 _file = file
54         end
55
56         # Give the next token (but do not consume it)
57         fun peek: Token
58         do
59                 while _token == null do
60                         _token = get_token
61                 end
62                 return _token.as(not null)
63         end
64
65         # Give and consume the next token
66         fun next: Token
67         do
68                 var result = _token
69                 while result == null do
70                         result = get_token
71                 end
72                 _token = null
73                 return result
74         end
75
76         # Primitive method to return a token, or return null if it is discarded
77         # Is used to implement `peek` and `next`
78         protected fun get_token: nullable Token
79         do
80                 var dfa_state = 0
81
82                 var sp = _stream_pos
83                 var start_stream_pos = sp
84                 var start_pos = _pos
85                 var start_line = _line
86                 var string = _file.string
87                 var string_len = string.length
88
89                 var accept_state = -1
90                 var accept_token = -1
91                 var accept_length = -1
92                 var accept_pos = -1
93                 var accept_line = -1
94
95                 loop
96                         if sp >= string_len then
97                                 dfa_state = -1
98                         else
99                                 var c = string[sp].ascii
100                                 sp += 1
101
102                                 var cr = _cr
103                                 var line = _line
104                                 var pos = _pos
105                                 if c == 10 then
106                                         if cr then
107                                                 cr = false
108                                                 _file.line_starts[line] = sp
109                                         else
110                                                 line = line + 1
111                                                 pos = 0
112                                                 _file.line_starts[line] = sp
113                                         end
114                                 else if c == 13 then
115                                         line = line + 1
116                                         pos = 0
117                                         cr = true
118                                         _file.line_starts[line] = sp
119                                 else
120                                         pos = pos + 1
121                                         cr = false
122                                 end
123
124                                 loop
125                                         var old_state = dfa_state
126                                         if dfa_state < -1 then
127                                                 old_state = -2 - dfa_state
128                                         end
129
130                                         dfa_state = -1
131
132                                         var low = 0
133                                         var high = lexer_goto(old_state, 0) - 1
134
135                                         if high >= 0 then
136                                                 while low <= high do
137                                                         var middle = (low + high) / 2
138                                                         var offset = middle * 3 + 1 # +1 because length is at 0
139
140                                                         if c < lexer_goto(old_state, offset) then
141                                                                 high = middle - 1
142                                                         else if c > lexer_goto(old_state, offset+1) then
143                                                                 low = middle + 1
144                                                         else
145                                                                 dfa_state = lexer_goto(old_state, offset+2)
146                                                                 break
147                                                         end
148                                                 end
149                                         end
150                                         if dfa_state > -2 then break
151                                 end
152
153                                 _cr = cr
154                                 _line = line
155                                 _pos = pos
156                         end
157
158                         if dfa_state >= 0 then
159                                 var tok = lexer_accept(dfa_state)
160                                 if tok != -1 then
161                                         accept_state = dfa_state
162                                         accept_token = tok
163                                         accept_length = sp - start_stream_pos
164                                         accept_pos = _pos
165                                         accept_line = _line
166                                 end
167                         else
168                                 if accept_state != -1 then
169                                         var location = new Location(_file, start_line + 1, accept_line + 1, start_pos + 1, accept_pos)
170                                         _pos = accept_pos
171                                         _line = accept_line
172                                         _stream_pos = start_stream_pos + accept_length
173 $ foreach {//token}
174                                         if accept_token == ${position()-1} then
175 $    if {count(transition[@from!=@to])!=0}
176                                                 var state_id = _state
177 $        foreach transition in {transition[@from!=@to]}
178                                                 if state_id == ${/parser/lexer_data/state[@name=$transition/@from]/@id} then
179                                                         _state = state_${translate(@to,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}
180                                                 end
181 $        end
182 $    end if
183 $    if {@parser_index}
184                                                 return new @ename.init_tk(location)
185 $    else
186                                                 return null
187 $    end
188                                         end
189 $ end foreach
190                                 else
191                                         _stream_pos = sp
192                                         var location = new Location(_file, start_line + 1, start_line + 1, start_pos + 1, start_pos + 1)
193                                         if sp > start_stream_pos then
194                                                 var text = string.substring(start_stream_pos, sp-start_stream_pos)
195                                                 var token = new PLexerError.init_lexer_error("Syntax error: unknown token {text}.", location, text)
196                                                 return token
197                                         else
198                                                 var token = new EOF.init_tk(location)
199                                                 return token
200                                         end
201                                 end
202                         end
203                 end
204         end
205 end
206
207 $ end template
208
209
210
211 $ template make_lexer_table()
212 $ foreach {lexer_data/goto_table/state}
213 $     foreach {row}
214 $         if {count(goto)!=0}
215 static const int lexer_goto_row${position()}[] = {
216         ${count(goto)},
217 $             foreach {goto}
218         @low, @high, @state[-sep ','-]
219 $             end foreach
220 };
221 $         end
222 $     end foreach
223 static const int lexer_goto_row_null[] = {0};
224 const int* const lexer_goto_table[] = {
225 $     foreach {row}
226 $         if {count(goto)!=0}
227         lexer_goto_row${position()}[-sep ','-]
228 $         else
229         lexer_goto_row_null[-sep ','-]
230 $         end
231 $     end foreach
232 };
233 $ end foreach
234
235 $ foreach {lexer_data/accept_table/state}
236 const int lexer_accept_table[] = {
237         [-foreach {i}-]${.}[-sep ','-][-end foreach-]
238 };
239 $ end foreach
240
241 $ end template