end
end
-var t = new MyTest
+var t = new TestParser_calc
var n = t.main
var v = new Calulator
v.enter_visit(n)
end
end
-var t = new MyTest
+var t = new TestParser_minilang
var n = t.main
var v = new Interpretor
# Generate the Nit source code of the lexer
# `filepath` is the name of the ouptit file
# `parser` is the name of the parser module (used to import the token classes)
- fun gen_to_nit(filepath: String, parser: nullable String)
+ fun gen_to_nit(filepath: String, name: String, parser: nullable String)
do
- var gen = new DFAGenerator(filepath, self, parser)
+ var gen = new DFAGenerator(filepath, name, self, parser)
gen.gen_to_nit
end
end
# Generate the Nit source code of the lexer
private class DFAGenerator
var filepath: String
+ var name: String
var automaton: Automaton
var parser: nullable String
var out: OStream
- init(filepath: String, automaton: Automaton, parser: nullable String) do
+ init(filepath: String, name: String, automaton: Automaton, parser: nullable String) do
self.filepath = filepath
+ self.name = name
self.automaton = automaton
self.parser = parser
self.out = new OFStream.open(filepath)
i += 1
end
- add "# Lexer generated by nitcc"
+ add "# Lexer generated by nitcc for the grammar {name}"
add("import nitcc_runtime\n")
var p = parser
if p != null then add("import {p}\n")
- add("class MyLexer\n")
+ add("class Lexer_{name}\n")
add("\tsuper Lexer\n")
add("\tredef fun start_state do return dfastate_{names[automaton.start]}\n")
add("end\n")
end
# Generate the parser of the automaton
- fun gen_to_nit(filepath: String)
+ fun gen_to_nit(filepath: String, name: String)
do
var gen = new Generator
- gen.gen_to_nit(self)
+ gen.gen_to_nit(self, name)
var f = new OFStream.open(filepath)
for s in gen.out do
f.write(s)
private class Generator
var out = new Array[String]
fun add(s: String) do out.add(s)
- fun gen_to_nit(autom: LRAutomaton)
+ fun gen_to_nit(autom: LRAutomaton, name: String)
do
var states = autom.states
var gram = autom.grammar
- add "# Parser generated by nitcc"
+ add "# Parser generated by nitcc for the grammar {name}"
add "import nitcc_runtime"
- add "class MyParser"
+ add "class Parser_{name}"
add "\tsuper Parser"
add "\tredef fun start_state do return state_{states.first.cname}"
add "end"
# Parse the grammar file
-var l = new MyLexer(text)
+var l = new Lexer_nitcc(text)
var ts = l.lex
-var p = new MyParser
+var p = new Parser_nitcc
p.tokens.add_all ts
var node = p.parse
# Generate Nit code
print "Generate {name}_lexer.nit {name}_parser.nit {name}_test_parser.nit"
-dfa.gen_to_nit("{name}_lexer.nit", "{name}_parser")
-lr.gen_to_nit("{name}_parser.nit")
+dfa.gen_to_nit("{name}_lexer.nit", name, "{name}_parser")
+lr.gen_to_nit("{name}_parser.nit", name)
f = new OFStream.open("{name}_test_parser.nit")
f.write """# Generated by nitcc for the language {{{name}}}
import nitcc_runtime
import {{{name}}}_lexer
import {{{name}}}_parser
-class MyTest
+class TestParser_{{{name}}}
super TestParser
redef fun name do return \"{{{name}}}\"
- redef fun new_lexer(text) do return new MyLexer(text)
- redef fun new_parser do return new MyParser
+ redef fun new_lexer(text) do return new Lexer_{{{name}}}(text)
+ redef fun new_parser do return new Parser_{{{name}}}
end
-var t = new MyTest
+var t = new TestParser_{{{name}}}
t.main
"""
f.close
# Hand-writen lexer of nitcc
# Used only for the boostrap of the tool.
-class MyLexer
+class Lexer_nitcc
var text: String
var iter: Iterator[Char] = "".iterator
print "LR automaton: {a.states.length} states (see nitcc0.lr.dot)"
a.to_dot("nitcc0.lr.dot")
-a.gen_to_nit("nitcc_parser.nit")
+a.gen_to_nit("nitcc_parser.nit", "nitcc")
var f = new OFStream.open("nitcc_lexer.nit")
f.write("import nitcc_lexer0\n")