var text
if fi != "-" then
- var f = new IFStream.open(fi)
+ var f = new FileReader.open(fi)
text = f.read_all
f.close
else
if not node isa NProd then
assert node isa NError
- print "{node.position.to_s} Syntax Error: {node.message}"
+ print "{node.position.as(not null)} Syntax Error: {node.message}"
exit 1
abort
end
var lr = gram.lr0
var conflitcs = new ArraySet[Production]
-for s in lr.states do for t, a in s.guarded_reduce do if a.length > 1 or s.guarded_shift.has_key(t) then
- for i in a do conflitcs.add(i.alt.prod)
+for s in lr.states do
+ for i in s.conflicting_items do conflitcs.add(i.alt.prod)
end
if not conflitcs.is_empty then
print "Concrete grammar: {gram.prods.length} productions, {nbalts} alternatives (see {name}.concrete_grammar.out)"
var pretty = gram.pretty
-var f = new OFStream.open("{name}.concrete_grammar.out")
+var f = new FileWriter.open("{name}.concrete_grammar.out")
f.write "// Concrete grammar of {name}\n"
f.write pretty
f.close
print "LR automaton: {lr.states.length} states (see {name}.lr.dot and {name}.lr.out)"
lr.to_dot("{name}.lr.dot")
pretty = lr.pretty
-f = new OFStream.open("{name}.lr.out")
+f = new FileWriter.open("{name}.lr.out")
f.write "// LR automaton of {name}\n"
f.write pretty
f.close
var nfa = v2.nfa
print "NFA automaton: {nfa.states.length} states (see {name}.nfa.dot)"
-nfa.to_dot("{name}.nfa.dot")
+nfa.to_dot.write_to_file("{name}.nfa.dot")
+var nfanoe = nfa.to_nfa_noe
+nfanoe.to_dot.write_to_file("{name}.nfanoe.dot")
+print "NFA automaton without epsilon: {nfanoe.states.length} states (see {name}.nfanoe.dot)"
-var dfa = nfa.to_dfa.to_minimal_dfa
+var dfa = nfa.to_dfa
+dfa.to_dot.write_to_file("{name}.dfanomin.dot")
+print "DFA automaton (non minimal): {dfa.states.length} states (see {name}.dfanomin.dot)"
+
+dfa = dfa.to_minimal_dfa
dfa.solve_token_inclusion
print "DFA automaton: {dfa.states.length} states (see {name}.dfa.dot)"
-dfa.to_dot("{name}.dfa.dot")
+dfa.to_dot.write_to_file("{name}.dfa.dot")
if dfa.tags.has_key(dfa.start) then
print "Error: Empty tokens {dfa.tags[dfa.start].join(" ")}"
dfa.gen_to_nit("{name}_lexer.nit", name, "{name}_parser")
lr.gen_to_nit("{name}_parser.nit", name)
-f = new OFStream.open("{name}_test_parser.nit")
+f = new FileWriter.open("{name}_test_parser.nit")
f.write """# Generated by nitcc for the language {{{name}}}
# Standalone parser tester for the language {{{name}}}
-module {{{name}}}_test_parser
+module {{{name}}}_test_parser is generated
import nitcc_runtime
import {{{name}}}_lexer
import {{{name}}}_parser