private import tables
redef class Token
- var _text: nullable String
+ private var cached_text: nullable String
redef fun text
do
- var res = _text
+ var res = _cached_text
if res != null then return res
res = location.text
- _text = res
+ _cached_text = res
return res
end
redef fun text=(text)
do
- _text = text
+ _cached_text = text
end
fun parser_index: Int is abstract
init init_tk(loc: Location)
do
- _text = ""
+ _cached_text = ""
_location = loc
end
end
class Lexer
super TablesCapable
# Last peeked token
- var _token: nullable Token
+ var token: nullable Token
# Lexer current state
- var _state: Int = 0
+ private var state: Int = 0
# The source file
var file: SourceFile
# Current character in the stream
- var _stream_pos: Int = 0
+ var stream_pos: Int = 0
# Current line number in the input stream
- var _line: Int = 0
+ var line: Int = 0
# Current column in the input stream
- var _pos: Int = 0
+ var pos: Int = 0
# Was the last character a cariage-return?
- var _cr: Bool = false
+ var cr: Bool = false
# Constante state values
private fun state_initial: Int do return 0 end
super ANode
# All the annotations attached directly to the node
- var _n_annotations: nullable AAnnotations = null
- fun n_annotations: nullable AAnnotations do return _n_annotations
- fun n_annotations=(n_annotations: nullable AAnnotations) do _n_annotations = n_annotations
+ var n_annotations: nullable AAnnotations = null is writable
redef fun replace_with(n: ANode)
do
end
# The current visited node
- var _current_node: nullable ANode = null
- fun current_node: nullable ANode do return _current_node
- fun current_node=(current_node: nullable ANode) do _current_node = current_node
+ var current_node: nullable ANode = null is writable
end
# Token of end of line (basically `\n`)
# State of the parser automata as stored in the parser stack.
private class State
# The internal state number
- var _state: Int
+ var state: Int
# The node stored with the state in the stack
- var _nodes: nullable Object
+ var nodes: nullable Object
init(state: Int, nodes: nullable Object)
do
class Parser
super TablesCapable
# Associated lexer
- var _lexer: Lexer
+ var lexer: Lexer
# Stack of pushed states and productions
- var _stack: Array[State]
+ private var stack: Array[State]
# Position in the stack
- var _stack_pos: Int
+ private var stack_pos: Int
# Create a new parser based on a given lexer
init(lexer: Lexer)
end
end
- var _reduce_table: Array[ReduceAction]
+ private var reduce_table: Array[ReduceAction]
private fun build_reduce_table is abstract
end
redef class Prod
# Location on the first token after the start of a production
# So outside the production for epilon production
- var _first_location: nullable Location
+ var first_location: nullable Location
end
# Find location of production nodes
private class ComputeProdLocationVisitor
super Visitor
# Currenlty visited productions that need a first token
- var _need_first_prods: Array[Prod] = new Array[Prod]
+ var need_first_prods: Array[Prod] = new Array[Prod]
# Already visited epsilon productions that waits something after them
- var _need_after_epsilons: Array[Prod] = new Array[Prod]
+ var need_after_epsilons: Array[Prod] = new Array[Prod]
# Location of the last visited token in the current production
- var _last_location: nullable Location = null
+ var last_location: nullable Location = null
redef fun visit(n: ANode)
do
l1.append(l2)
return l1
end
- var _goto: Int
+ var goto: Int
init(g: Int) do _goto = g
end