1 /* This file is part of NIT ( http://www.nitlanguage.org ).
3 * Copyright 2008 Jean Privat <jean@pryen.org>
4 * Based on algorithms developped for ( http://www.sablecc.org/ ).
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
10 * http://www.apache.org/licenses/LICENSE-2.0
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
19 $ template make_parser()
21 # State of the parser automata as stored in the parser stack.
23 # The internal state number
24 readable writable var _state: Int
26 # The node stored with the state in the stack
27 readable writable var _nodes: nullable Object
29 init(state: Int, nodes: nullable Object)
41 # Stack of pushed states and productions
42 var _stack: Array[State]
44 # Position in the stack
47 # Create a new parser based on a given lexer
51 _stack = new Array[State]
58 # Do a transition in the automata
59 private fun go_to(index: Int): Int
62 var table = _goto_table[index]
64 var high = table.length/2 - 1
67 var middle = (low + high) / 2
68 var subindex = middle * 2
70 if state < table[subindex] then
72 else if state > table[subindex] then
75 return table[subindex + 1]
79 return table[1] # Default value
82 # Push someting in the state stack
83 private fun push(numstate: Int, list_node: nullable Object)
85 var pos = _stack_pos + 1
87 if pos < _stack.length then
88 var state = _stack[pos]
89 state.state = numstate
90 state.nodes = list_node
92 _stack.push(new State(numstate, list_node))
97 private fun state: Int
99 return _stack[_stack_pos].state
102 # Pop something from the stack state
103 private fun pop: nullable Object
105 var res = _stack[_stack_pos].nodes
106 _stack_pos = _stack_pos -1
110 # Build and return a full AST.
117 var token = lexer.peek
118 var last_pos = token.location.column_start
119 var last_line = token.location.line_start
121 if token isa PError then
122 return new Start(null, token)
125 var index = token.parser_index
126 var table = _action_table[state]
127 var action_type = table[1]
128 var action_value = table[2]
131 var high = table.length/3 - 1
134 var middle = (low + high) / 2
135 var subindex = middle * 3
137 if index < table[subindex] then
139 else if index > table[subindex] then
142 action_type = table[subindex + 1]
143 action_value = table[subindex + 2]
144 high = low -1 # break
148 if action_type == 0 then # SHIFT
149 push(action_value, lexer.next)
150 else if action_type == 1 then # REDUCE
151 _reduce_table[action_value].action(self)
152 else if action_type == 2 then # ACCEPT
153 var node2 = lexer.next
156 assert node1 isa ${/parser/prods/prod/@ename}
157 var node = new Start(node1, node2)
158 (new SearchTokensVisitor).enter_visit(node)
160 else if action_type == 3 then # ERROR
161 var location = new Location(lexer.filename, last_line, last_line, last_pos, last_pos)
162 var node2 = new PError.init_error(error_messages[errors[action_value]],location)
163 var node = new Start(null, node2)
170 var _reduce_table: Array[ReduceAction]
171 private fun build_reduce_table
173 _reduce_table = new Array[ReduceAction].with_items(
174 $ foreach {rules/rule}
175 new ReduceAction@index[-sep ','-]
181 # Find first and last tokens of production nodes
182 private class SearchTokensVisitor
184 var _untokenned_nodes: Array[Prod]
185 var _last_token: nullable Token = null
186 redef fun visit(n: nullable PNode)
190 else if n isa Token then
192 for no in _untokenned_nodes do
195 _untokenned_nodes.clear
198 _untokenned_nodes.add(n)
200 n.last_token = _last_token
202 if n.first_token != null then
203 var start_location = n.first_token.location
204 var end_location = _last_token.location
206 if start_location != null and end_location != null then
207 var file = end_location.file
208 var line_start = start_location.line_start
209 var line_end = end_location.line_end
210 var column_start = start_location.column_start
211 var column_end = end_location.column_end
212 n.location = new Location(file, line_start, line_end, column_start, column_end)
219 _untokenned_nodes = new Array[Prod]
223 # Each reduca action has its own class, this one is the root of the hierarchy.
224 private abstract class ReduceAction
225 fun action(p: Parser) is abstract
228 $ foreach {rules/rule}
229 private class ReduceAction@index
231 redef fun action(p: Parser)
233 var node_list: nullable Object = null
237 var ${translate(@result,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} = p.pop
239 $ when {@cmd='FETCHLIST'}
240 var ${translate(@result,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} = ${translate(@from,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}
241 assert ${translate(@result,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} isa Array[Object]
243 $ when {@cmd='FETCHNODE'}
244 var ${translate(@result,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} = ${translate(@from,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}
245 assert ${translate(@result,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} isa nullable @etype
247 $ when {@cmd='ADDNODE'}
248 if ${translate(@node,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} != null then
249 ${translate(@tolist,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}.add(${translate(@node,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")})
252 $ when {@cmd='ADDLIST'}
253 # if ${translate(@fromlist,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} != null then
254 if ${translate(@tolist,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}.is_empty then
255 ${translate(@tolist,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} = ${translate(@fromlist,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}
257 ${translate(@tolist,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}.append(${translate(@fromlist,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")})
261 $ when {@cmd='MAKELIST'}
262 var ${translate(@result,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} = new Array[Object]
264 $ when {@cmd='MAKENODE'}
265 var ${translate(@result,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}: nullable @etype = new @etype.init_${translate(@etype,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}(
270 ${translate(.,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}[-sep ','-]
275 $ when {@cmd='RETURNNODE'}
279 node_list = ${translate(@node,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}
282 $ when {@cmd='RETURNLIST'}
283 node_list = ${translate(@list,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}
287 p.push(p.go_to(@leftside), node_list)
294 $ template make_parser_tables()
295 # Parser that build a full AST
296 abstract class ParserTable
297 var _action_table: Array[Array[Int]]
298 private fun build_action_table
300 _action_table = once [
301 $ foreach {parser_data/action_table/row}
302 action_table_row${position()}[-sep ','-]
307 $ foreach {parser_data/action_table/row}
308 private fun action_table_row${position()}: Array[Int]
312 @from, @action, @to[-sep ','-]
318 var _goto_table: Array[Array[Int]]
319 private fun build_goto_table
322 $ foreach {parser_data/goto_table/row}
325 @from, @to[-sep ','-]
332 private fun error_messages: Array[String]
335 $ foreach {parser_data/error_messages/msg}
336 "${sablecc:string2escaped_unicode(.)}"[-sep ','-]
341 private fun errors: Array[Int]
344 [-foreach {parser_data/errors/i}-]${.}[-sep ','-][-end-]