# Parse the grammar file
-var l = new MyLexer(text)
+var l = new Lexer_nitcc(text)
var ts = l.lex
-var p = new MyParser
+var p = new Parser_nitcc
p.tokens.add_all ts
var node = p.parse
if not node isa NProd then
- print node
+ assert node isa NError
+ print "{node.position.to_s} Syntax Error: {node.message}"
exit 1
abort
end
var nbalts = 0
for prod in gram.prods do nbalts += prod.alts.length
-print "Concrete grammar: {gram.prods.length} productions, {nbalts} alternatives (see {name}.concrete_grammar.txt)"
+print "Concrete grammar: {gram.prods.length} productions, {nbalts} alternatives (see {name}.concrete_grammar.out)"
var pretty = gram.pretty
-var f = new OFStream.open("{name}.concrete_grammar.txt")
+var f = new OFStream.open("{name}.concrete_grammar.out")
f.write "// Concrete grammar of {name}\n"
f.write pretty
f.close
-print "LR automaton: {lr.states.length} states (see {name}.lr.dot and {name}.lr.txt)"
+print "LR automaton: {lr.states.length} states (see {name}.lr.dot and {name}.lr.out)"
lr.to_dot("{name}.lr.dot")
pretty = lr.pretty
-f = new OFStream.open("{name}.lr.txt")
+f = new OFStream.open("{name}.lr.out")
f.write "// LR automaton of {name}\n"
f.write pretty
f.close
print "NFA automaton: {nfa.states.length} states (see {name}.nfa.dot)"
nfa.to_dot("{name}.nfa.dot")
-var dfa = nfa.to_dfa
+var dfa = nfa.to_dfa.to_minimal_dfa
+
+dfa.solve_token_inclusion
+
+print "DFA automaton: {dfa.states.length} states (see {name}.dfa.dot)"
+dfa.to_dot("{name}.dfa.dot")
+
if dfa.tags.has_key(dfa.start) then
- print "ERROR: Empty tokens {dfa.tags[dfa.start].join(" ")}"
+ print "Error: Empty tokens {dfa.tags[dfa.start].join(" ")}"
+ exit(1)
end
-dfa.solve_token_inclusion
for s, tks in dfa.tags do
if tks.length <= 1 then continue
- print "ERROR: Conflicting tokens: {tks.join(" ")}"
+ print "Error: Conflicting tokens: {tks.join(" ")}"
+ exit(1)
+end
+for t in gram.tokens do
+ if t.name == "Eof" then continue
+ if dfa.retrotags.has_key(t) and not dfa.retrotags[t].is_empty then continue
+ print "Error: Token {t} matches nothing"
+ exit(1)
end
-print "DFA automaton: {dfa.states.length} states (see {name}.dfa.dot)"
-dfa.to_dot("{name}.dfa.dot")
# Generate Nit code
print "Generate {name}_lexer.nit {name}_parser.nit {name}_test_parser.nit"
-dfa.gen_to_nit("{name}_lexer.nit", "{name}_parser")
-lr.gen_to_nit("{name}_parser.nit")
+dfa.gen_to_nit("{name}_lexer.nit", name, "{name}_parser")
+lr.gen_to_nit("{name}_parser.nit", name)
f = new OFStream.open("{name}_test_parser.nit")
f.write """# Generated by nitcc for the language {{{name}}}
+
+# Standalone parser tester for the language {{{name}}}
+module {{{name}}}_test_parser
import nitcc_runtime
import {{{name}}}_lexer
import {{{name}}}_parser
-class MyTest
+
+# Class to test the parser for the language {{{name}}}
+class TestParser_{{{name}}}
super TestParser
redef fun name do return \"{{{name}}}\"
- redef fun new_lexer(text) do return new MyLexer(text)
- redef fun new_parser do return new MyParser
+ redef fun new_lexer(text) do return new Lexer_{{{name}}}(text)
+ redef fun new_parser do return new Parser_{{{name}}}
end
-var t = new MyTest
+var t = new TestParser_{{{name}}}
t.main
"""
f.close