// check if all actions can actually be matched in this DFA public void checkActions(LexScan scanner, LexParse parser) { EOFActions eofActions = parser.getEOFActions(); IEnumerator l = scanner.actions.GetEnumerator(); while (l.MoveNext()) { Object next = l.Current; if (!next.Equals(usedActions[next]) && !eofActions.isEOFAction(next)) { Out.warning(scanner.file, ErrorMessages.NEVER_MATCH, ((Action)next).priority - 1, -1); } } }
public NFA(int numInput, LexScan scanner, RegExps regExps, Macros macros, CharClasses classes) : this(numInput, regExps.NFASize(macros) + 2 * scanner.states.number()) { this.scanner = scanner; this.regExps = regExps; this.macros = macros; this.classes = classes; numLexStates = scanner.states.number(); ensureCapacity(2 * numLexStates); numStates = 2 * numLexStates; }
public const String version = "1.4"; //$NON-NLS-1$ /** * Generates a scanner for the specified input file. * * @param inputFile a file containing a lexical specification * to generate a scanner for. */ public static void generate(File inputFile) { Out.resetCounters(); Timer totalTime = new Timer(); Timer time = new Timer(); LexScan scanner = null; LexParse parser = null; TextReader inputReader = null; totalTime.start(); try { Out.println(ErrorMessages.READING, inputFile.ToString()); inputReader = new StreamReader(inputFile); scanner = new LexScan(inputReader); scanner.setFile(inputFile); parser = new LexParse(scanner); } catch (FileNotFoundException) { Out.error(ErrorMessages.CANNOT_OPEN, inputFile.ToString()); throw new GeneratorException(); } try { NFA nfa = (NFA)parser.parse().value; Out.checkErrors(); if (Options.dump) { Out.dump(ErrorMessages.get(ErrorMessages.NFA_IS) + Out.NL + nfa + Out.NL); } if (Options.dot) { nfa.writeDot(Emitter.normalize("nfa.dot", null)); //$NON-NLS-1$ } Out.println(ErrorMessages.NFA_STATES, nfa.numStates); time.start(); DFA dfa = nfa.getDFA(); time.stop(); Out.time(ErrorMessages.DFA_TOOK, time); dfa.checkActions(scanner, parser); nfa = null; if (Options.dump) { Out.dump(ErrorMessages.get(ErrorMessages.DFA_IS) + Out.NL + dfa + Out.NL); } if (Options.dot) { dfa.writeDot(Emitter.normalize("dfa-big.dot", null)); //$NON-NLS-1$ } time.start(); dfa.minimize(); time.stop(); Out.time(ErrorMessages.MIN_TOOK, time); if (Options.dump) { Out.dump(ErrorMessages.get(ErrorMessages.MIN_DFA_IS) + Out.NL + dfa); } if (Options.dot) { dfa.writeDot(Emitter.normalize("dfa-min.dot", null)); //$NON-NLS-1$ } time.start(); Emitter e = new Emitter(inputFile, parser, dfa); e.emit(); time.stop(); Out.time(ErrorMessages.WRITE_TOOK, time); totalTime.stop(); Out.time(ErrorMessages.TOTAL_TIME, totalTime); } catch (ScannerException e) { Out.error(e.file, e.message, e.line, e.column); throw new GeneratorException(); } catch (MacroException e) { Out.error(e.Message); throw new GeneratorException(); } catch (IOException e) { Out.error(ErrorMessages.IO_ERROR, e.ToString()); throw new GeneratorException(); } catch (OutOfMemoryException) { Out.error(ErrorMessages.OUT_OF_MEMORY); throw new GeneratorException(); } catch (GeneratorException) { throw new GeneratorException(); } catch (Exception e) { Out.error(e.ToString()); throw new GeneratorException(); } }