parser: new class SourceFile
[nit.git] / src / parser / xss / lexer.xss
1 $ // This file is part of NIT ( http://www.nitlanguage.org ).
2 $ //
3 $ // Copyright 2008 Jean Privat <jean@pryen.org>
4 $ // Based on algorithms developped for ( http://www.sablecc.org/ ).
5 $ //
6 $ // Licensed under the Apache License, Version 2.0 (the "License");
7 $ // you may not use this file except in compliance with the License.
8 $ // You may obtain a copy of the License at
9 $ //
10 $ //     http://www.apache.org/licenses/LICENSE-2.0
11 $ //
12 $ // Unless required by applicable law or agreed to in writing, software
13 $ // distributed under the License is distributed on an "AS IS" BASIS,
14 $ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 $ // See the License for the specific language governing permissions and
16 $ // limitations under the License.
17
18 $ template make_lexer()
19
20 # The lexer extract NIT tokens from an input stream.
21 # It is better user with the Parser
22 class Lexer
23         super TablesCapable
24         # Last peeked token
25         var _token: nullable Token
26
27         # Lexer current state
28         var _state: Int = 0
29
30         # The source file
31         readable var _file: SourceFile
32
33         # Current character in the stream
34         var _stream_pos: Int = 0
35
36         # Current line number in the input stream
37         var _line: Int = 0
38
39         # Current column in the input stream
40         var _pos: Int = 0
41
42         # Was the last character a cariage-return?
43         var _cr: Bool = false
44
45 $ foreach {lexer_data/state}
46         # Constante state values
47         private fun state_${translate(@name,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}: Int do return @id end
48 $ end foreach
49
50         # Create a new lexer for a stream (and a name)
51         init(file: SourceFile)
52         do
53                 _file = file
54         end
55
56         # Give the next token (but do not consume it)
57         fun peek: Token
58         do
59                 while _token == null do
60                         _token = get_token
61                 end
62                 return _token.as(not null)
63         end
64
65         # Give and consume the next token
66         fun next: Token
67         do
68                 var result = _token
69                 while result == null do
70                         result = get_token
71                 end
72                 _token = null
73                 return result
74         end
75
76         # Get a token, or null if it is discarded
77         private fun get_token: nullable Token
78         do
79                 var dfa_state = 0
80
81                 var sp = _stream_pos
82                 var start_stream_pos = sp
83                 var start_pos = _pos
84                 var start_line = _line
85                 var string = _file.string
86                 var string_len = string.length
87
88                 var accept_state = -1
89                 var accept_token = -1
90                 var accept_length = -1
91                 var accept_pos = -1
92                 var accept_line = -1
93
94                 loop
95                         if sp >= string_len then
96                                 dfa_state = -1
97                         else
98                                 var c = string[sp].ascii
99                                 sp += 1
100
101                                 var cr = _cr
102                                 var line = _line
103                                 var pos = _pos
104                                 if c == 10 then
105                                         if cr then
106                                                 cr = false
107                                         else
108                                                 line = line + 1
109                                                 pos = 0
110                                         end
111                                 else if c == 13 then
112                                         line = line + 1
113                                         pos = 0
114                                         cr = true
115                                 else
116                                         pos = pos + 1
117                                         cr = false
118                                 end
119
120                                 loop
121                                         var old_state = dfa_state
122                                         if dfa_state < -1 then
123                                                 old_state = -2 - dfa_state
124                                         end
125
126                                         dfa_state = -1
127
128                                         var low = 0
129                                         var high = lexer_goto(old_state, 0) - 1
130
131                                         if high >= 0 then
132                                                 while low <= high do
133                                                         var middle = (low + high) / 2
134                                                         var offset = middle * 3 + 1 # +1 because length is at 0
135
136                                                         if c < lexer_goto(old_state, offset) then
137                                                                 high = middle - 1
138                                                         else if c > lexer_goto(old_state, offset+1) then
139                                                                 low = middle + 1
140                                                         else
141                                                                 dfa_state = lexer_goto(old_state, offset+2)
142                                                                 break
143                                                         end
144                                                 end
145                                         end
146                                         if dfa_state > -2 then break
147                                 end
148
149                                 _cr = cr
150                                 _line = line
151                                 _pos = pos
152                         end
153
154                         if dfa_state >= 0 then
155                                 var tok = lexer_accept(dfa_state)
156                                 if tok != -1 then
157                                         accept_state = dfa_state
158                                         accept_token = tok
159                                         accept_length = sp - start_stream_pos
160                                         accept_pos = _pos
161                                         accept_line = _line
162                                 end
163                         else
164                                 if accept_state != -1 then
165                                         var location = new Location(_file, start_line + 1, accept_line + 1, start_pos + 1, accept_pos)
166                                         _pos = accept_pos
167                                         _line = accept_line
168                                         _stream_pos = start_stream_pos + accept_length
169 $ foreach {//token}
170                                         if accept_token == ${position()-1} then
171 $    if {count(transition[@from!=@to])!=0}
172                                                 var state_id = _state
173 $        foreach transition in {transition[@from!=@to]}
174                                                 if state_id == ${/parser/lexer_data/state[@name=$transition/@from]/@id} then
175                                                         _state = state_${translate(@to,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}
176                                                 end
177 $        end
178 $    end if
179 $    if {@parser_index}
180 $        if {not(@text)}
181                                                 var token_text = string.substring(start_stream_pos, accept_length)
182                                                 return new @ename.init_tk(token_text, location)
183 $        else
184                                                 return new @ename.init_tk(location)
185 $        end
186 $    else
187                                                 return null
188 $    end
189                                         end
190 $ end foreach
191                                 else
192                                         _stream_pos = sp
193                                         var location = new Location(_file, start_line + 1, start_line + 1, start_pos + 1, start_pos + 1)
194                                         if sp > start_stream_pos then
195                                                 var text = string.substring(start_stream_pos, sp-start_stream_pos)
196                                                 var token = new PError.init_error("Syntax error: unknown token {text}.", location)
197                                                 return token
198                                         else
199                                                 var token = new EOF(location)
200                                                 return token
201                                         end
202                                 end
203                         end
204                 end
205         end
206 end
207
208 $ end template
209
210
211
212 $ template make_lexer_table()
213 $ foreach {lexer_data/goto_table/state}
214 $     foreach {row}
215 $         if {count(goto)!=0}
216 static const int lexer_goto_row${position()}[] = {
217         ${count(goto)},
218 $             foreach {goto}
219         @low, @high, @state[-sep ','-]
220 $             end foreach
221 };
222 $         end
223 $     end foreach
224 static const int lexer_goto_row_null[] = {0};
225 const int* const lexer_goto_table[] = {
226 $     foreach {row}
227 $         if {count(goto)!=0}
228         lexer_goto_row${position()}[-sep ','-]
229 $         else
230         lexer_goto_row_null[-sep ','-]
231 $         end
232 $     end foreach
233 };
234 $ end foreach
235
236 $ foreach {lexer_data/accept_table/state}
237 const int lexer_accept_table[] = {
238         [-foreach {i}-]${.}[-sep ','-][-end foreach-]
239 };
240 $ end foreach
241
242 $ end template