lexer: implements next with peek
[nit.git] / src / parser / xss / lexer.xss
1 $ // This file is part of NIT ( http://www.nitlanguage.org ).
2 $ //
3 $ // Copyright 2008 Jean Privat <jean@pryen.org>
4 $ // Based on algorithms developped for ( http://www.sablecc.org/ ).
5 $ //
6 $ // Licensed under the Apache License, Version 2.0 (the "License");
7 $ // you may not use this file except in compliance with the License.
8 $ // You may obtain a copy of the License at
9 $ //
10 $ //     http://www.apache.org/licenses/LICENSE-2.0
11 $ //
12 $ // Unless required by applicable law or agreed to in writing, software
13 $ // distributed under the License is distributed on an "AS IS" BASIS,
14 $ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 $ // See the License for the specific language governing permissions and
16 $ // limitations under the License.
17
18 $ template make_lexer()
19
20 # The lexer extract NIT tokens from an input stream.
21 # It is better user with the Parser
22 class Lexer
23         super TablesCapable
24         # Last peeked token
25         var _token: nullable Token
26
27         # Lexer current state
28         var _state: Int = 0
29
30         # The source file
31         readable var _file: SourceFile
32
33         # Current character in the stream
34         var _stream_pos: Int = 0
35
36         # Current line number in the input stream
37         var _line: Int = 0
38
39         # Current column in the input stream
40         var _pos: Int = 0
41
42         # Was the last character a cariage-return?
43         var _cr: Bool = false
44
45 $ foreach {lexer_data/state}
46         # Constante state values
47         private fun state_${translate(@name,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}: Int do return @id end
48 $ end foreach
49
50         # Create a new lexer for a stream (and a name)
51         init(file: SourceFile)
52         do
53                 _file = file
54         end
55
56         # Give the next token (but do not consume it)
57         fun peek: Token
58         do
59                 while _token == null do
60                         _token = get_token
61                 end
62                 return _token.as(not null)
63         end
64
65         # Give and consume the next token
66         fun next: Token
67         do
68                 var result = peek
69                 _token = null
70                 return result
71         end
72
73         # Primitive method to return a token, or return null if it is discarded
74         # Is used to implement `peek` and `next`
75         protected fun get_token: nullable Token
76         do
77                 var dfa_state = 0
78
79                 var sp = _stream_pos
80                 var start_stream_pos = sp
81                 var start_pos = _pos
82                 var start_line = _line
83                 var string = _file.string
84                 var string_len = string.length
85
86                 var accept_state = -1
87                 var accept_token = -1
88                 var accept_length = -1
89                 var accept_pos = -1
90                 var accept_line = -1
91
92                 loop
93                         if sp >= string_len then
94                                 dfa_state = -1
95                         else
96                                 var c = string[sp].ascii
97                                 sp += 1
98
99                                 var cr = _cr
100                                 var line = _line
101                                 var pos = _pos
102                                 if c == 10 then
103                                         if cr then
104                                                 cr = false
105                                                 _file.line_starts[line] = sp
106                                         else
107                                                 line = line + 1
108                                                 pos = 0
109                                                 _file.line_starts[line] = sp
110                                         end
111                                 else if c == 13 then
112                                         line = line + 1
113                                         pos = 0
114                                         cr = true
115                                         _file.line_starts[line] = sp
116                                 else
117                                         pos = pos + 1
118                                         cr = false
119                                 end
120
121                                 loop
122                                         var old_state = dfa_state
123                                         if dfa_state < -1 then
124                                                 old_state = -2 - dfa_state
125                                         end
126
127                                         dfa_state = -1
128
129                                         var low = 0
130                                         var high = lexer_goto(old_state, 0) - 1
131
132                                         if high >= 0 then
133                                                 while low <= high do
134                                                         var middle = (low + high) / 2
135                                                         var offset = middle * 3 + 1 # +1 because length is at 0
136
137                                                         if c < lexer_goto(old_state, offset) then
138                                                                 high = middle - 1
139                                                         else if c > lexer_goto(old_state, offset+1) then
140                                                                 low = middle + 1
141                                                         else
142                                                                 dfa_state = lexer_goto(old_state, offset+2)
143                                                                 break
144                                                         end
145                                                 end
146                                         end
147                                         if dfa_state > -2 then break
148                                 end
149
150                                 _cr = cr
151                                 _line = line
152                                 _pos = pos
153                         end
154
155                         if dfa_state >= 0 then
156                                 var tok = lexer_accept(dfa_state)
157                                 if tok != -1 then
158                                         accept_state = dfa_state
159                                         accept_token = tok
160                                         accept_length = sp - start_stream_pos
161                                         accept_pos = _pos
162                                         accept_line = _line
163                                 end
164                         else
165                                 if accept_state != -1 then
166                                         var location = new Location(_file, start_line + 1, accept_line + 1, start_pos + 1, accept_pos)
167                                         _pos = accept_pos
168                                         _line = accept_line
169                                         _stream_pos = start_stream_pos + accept_length
170 $ foreach {//token}
171                                         if accept_token == ${position()-1} then
172 $    if {count(transition[@from!=@to])!=0}
173                                                 var state_id = _state
174 $        foreach transition in {transition[@from!=@to]}
175                                                 if state_id == ${/parser/lexer_data/state[@name=$transition/@from]/@id} then
176                                                         _state = state_${translate(@to,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}
177                                                 end
178 $        end
179 $    end if
180 $    if {@parser_index}
181                                                 return new @ename.init_tk(location)
182 $    else
183                                                 return null
184 $    end
185                                         end
186 $ end foreach
187                                 else
188                                         _stream_pos = sp
189                                         var location = new Location(_file, start_line + 1, start_line + 1, start_pos + 1, start_pos + 1)
190                                         if sp > start_stream_pos then
191                                                 var text = string.substring(start_stream_pos, sp-start_stream_pos)
192                                                 var token = new PLexerError.init_lexer_error("Syntax error: unknown token {text}.", location, text)
193                                                 return token
194                                         else
195                                                 var token = new EOF.init_tk(location)
196                                                 return token
197                                         end
198                                 end
199                         end
200                 end
201         end
202 end
203
204 $ end template
205
206
207
208 $ template make_lexer_table()
209 $ foreach {lexer_data/goto_table/state}
210 $     foreach {row}
211 $         if {count(goto)!=0}
212 static const int lexer_goto_row${position()}[] = {
213         ${count(goto)},
214 $             foreach {goto}
215         @low, @high, @state[-sep ','-]
216 $             end foreach
217 };
218 $         end
219 $     end foreach
220 static const int lexer_goto_row_null[] = {0};
221 const int* const lexer_goto_table[] = {
222 $     foreach {row}
223 $         if {count(goto)!=0}
224         lexer_goto_row${position()}[-sep ','-]
225 $         else
226         lexer_goto_row_null[-sep ','-]
227 $         end
228 $     end foreach
229 };
230 $ end foreach
231
232 $ foreach {lexer_data/accept_table/state}
233 const int lexer_accept_table[] = {
234         [-foreach {i}-]${.}[-sep ','-][-end foreach-]
235 };
236 $ end foreach
237
238 $ end template