1 $ // This file is part of NIT ( http://www.nitlanguage.org ).
3 $ // Copyright 2008 Jean Privat <jean@pryen.org>
4 $ // Based on algorithms developped for ( http://www.sablecc.org/ ).
6 $ // Licensed under the Apache License, Version 2.0 (the "License");
7 $ // you may not use this file except in compliance with the License.
8 $ // You may obtain a copy of the License at
10 $ // http://www.apache.org/licenses/LICENSE-2.0
12 $ // Unless required by applicable law or agreed to in writing, software
13 $ // distributed under the License is distributed on an "AS IS" BASIS,
14 $ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 $ // See the License for the specific language governing permissions and
16 $ // limitations under the License.
18 $ template make_parser()
20 # State of the parser automata as stored in the parser stack.
22 # The internal state number
23 readable writable var _state: Int
25 # The node stored with the state in the stack
26 readable writable var _nodes: nullable Object
28 init(state: Int, nodes: nullable Object)
40 # Stack of pushed states and productions
41 var _stack: Array[State]
43 # Position in the stack
46 # Create a new parser based on a given lexer
50 _stack = new Array[State]
57 # Do a transition in the automata
58 private fun go_to(index: Int): Int
61 var table = _goto_table[index]
63 var high = table.length/2 - 1
66 var middle = (low + high) / 2
67 var subindex = middle * 2
69 if state < table[subindex] then
71 else if state > table[subindex] then
74 return table[subindex + 1]
78 return table[1] # Default value
81 # Push someting in the state stack
82 private fun push(numstate: Int, list_node: nullable Object)
84 var pos = _stack_pos + 1
86 if pos < _stack.length then
87 var state = _stack[pos]
88 state.state = numstate
89 state.nodes = list_node
91 _stack.push(new State(numstate, list_node))
96 private fun state: Int
98 return _stack[_stack_pos].state
101 # Pop something from the stack state
102 private fun pop: nullable Object
104 var res = _stack[_stack_pos].nodes
105 _stack_pos = _stack_pos -1
109 # Build and return a full AST.
116 var token = lexer.peek
117 var last_pos = token.location.column_start
118 var last_line = token.location.line_start
120 if token isa PError then
121 return new Start(null, token)
124 var index = token.parser_index
125 var table = _action_table[state]
126 var action_type = table[1]
127 var action_value = table[2]
130 var high = table.length/3 - 1
133 var middle = (low + high) / 2
134 var subindex = middle * 3
136 if index < table[subindex] then
138 else if index > table[subindex] then
141 action_type = table[subindex + 1]
142 action_value = table[subindex + 2]
143 high = low -1 # break
147 if action_type == 0 then # SHIFT
148 push(action_value, lexer.next)
149 else if action_type == 1 then # REDUCE
150 _reduce_table[action_value].action(self)
151 else if action_type == 2 then # ACCEPT
152 var node2 = lexer.next
155 assert node1 isa ${/parser/prods/prod/@ename}
156 var node = new Start(node1, node2)
157 (new SearchTokensVisitor).enter_visit(node)
159 else if action_type == 3 then # ERROR
160 var location = new Location(lexer.filename, last_line, last_line, last_pos, last_pos)
161 var node2 = new PError.init_error(error_messages[errors[action_value]],location)
162 var node = new Start(null, node2)
169 var _reduce_table: Array[ReduceAction]
170 private fun build_reduce_table
172 _reduce_table = new Array[ReduceAction].with_items(
173 $ foreach {rules/rule}
174 new ReduceAction@index[-sep ','-]
180 # Find first and last tokens of production nodes
181 private class SearchTokensVisitor
183 var _untokenned_nodes: Array[Prod]
184 var _last_token: nullable Token = null
185 redef fun visit(n: nullable PNode)
189 else if n isa Token then
191 for no in _untokenned_nodes do
194 _untokenned_nodes.clear
197 _untokenned_nodes.add(n)
199 n.last_token = _last_token
201 if n.first_token != null then
202 var start_location = n.first_token.location
203 var end_location = _last_token.location
205 if start_location != null and end_location != null then
206 var file = end_location.file
207 var line_start = start_location.line_start
208 var line_end = end_location.line_end
209 var column_start = start_location.column_start
210 var column_end = end_location.column_end
211 n.location = new Location(file, line_start, line_end, column_start, column_end)
218 _untokenned_nodes = new Array[Prod]
222 # Each reduca action has its own class, this one is the root of the hierarchy.
223 private abstract class ReduceAction
224 fun action(p: Parser) is abstract
227 $ foreach {rules/rule}
228 private class ReduceAction@index
230 redef fun action(p: Parser)
232 var node_list: nullable Object = null
236 var ${translate(@result,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} = p.pop
238 $ when {@cmd='FETCHLIST'}
239 var ${translate(@result,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} = ${translate(@from,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}
240 assert ${translate(@result,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} isa Array[Object]
242 $ when {@cmd='FETCHNODE'}
243 var ${translate(@result,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} = ${translate(@from,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}
244 assert ${translate(@result,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} isa nullable @etype
246 $ when {@cmd='ADDNODE'}
247 if ${translate(@node,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} != null then
248 ${translate(@tolist,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}.add(${translate(@node,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")})
251 $ when {@cmd='ADDLIST'}
252 # if ${translate(@fromlist,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} != null then
253 if ${translate(@tolist,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}.is_empty then
254 ${translate(@tolist,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} = ${translate(@fromlist,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}
256 ${translate(@tolist,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}.append(${translate(@fromlist,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")})
260 $ when {@cmd='MAKELIST'}
261 var ${translate(@result,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")} = new Array[Object]
263 $ when {@cmd='MAKENODE'}
264 var ${translate(@result,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}: nullable @etype = new @etype.init_${translate(@etype,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}(
269 ${translate(.,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}[-sep ','-]
274 $ when {@cmd='RETURNNODE'}
278 node_list = ${translate(@node,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}
281 $ when {@cmd='RETURNLIST'}
282 node_list = ${translate(@list,"ABCDEFGHIJKLMNOPQRSTUVWXYZ","abcdefghijklmnopqrstuvwxyz")}
286 p.push(p.go_to(@leftside), node_list)
293 $ template make_parser_tables()
294 # Parser that build a full AST
295 abstract class ParserTable
296 var _action_table: Array[Array[Int]]
297 private fun build_action_table
299 _action_table = once [
300 $ foreach {parser_data/action_table/row}
301 action_table_row${position()}[-sep ','-]
306 $ foreach {parser_data/action_table/row}
307 private fun action_table_row${position()}: Array[Int]
311 @from, @action, @to[-sep ','-]
317 var _goto_table: Array[Array[Int]]
318 private fun build_goto_table
321 $ foreach {parser_data/goto_table/row}
324 @from, @to[-sep ','-]
331 private fun error_messages: Array[String]
334 $ foreach {parser_data/error_messages/msg}
335 "${sablecc:string2escaped_unicode(.)}"[-sep ','-]
340 private fun errors: Array[Int]
343 [-foreach {parser_data/errors/i}-]${.}[-sep ','-][-end-]