public int parseFile(ICharStream input, int thread) { Checksum checksum = new CRC32(); Debug.Assert(thread >= 0 && thread < NUMBER_OF_THREADS); try { IParseTreeListener listener = sharedListeners[thread]; if (listener == null) { listener = (IParseTreeListener)Activator.CreateInstance(listenerClass); sharedListeners[thread] = listener; } Lexer lexer = sharedLexers[thread]; if (REUSE_LEXER && lexer != null) { lexer.SetInputStream(input); } else { lexer = (Lexer)lexerCtor.Invoke(new object[] { input }); sharedLexers[thread] = lexer; if (!ENABLE_LEXER_DFA) { lexer.Interpreter = new NonCachingLexerATNSimulator(lexer, lexer.Atn); } else if (!REUSE_LEXER_DFA) { lexer.Interpreter = new LexerATNSimulator(lexer, sharedLexerATNs[thread]); } } lexer.Interpreter.optimize_tail_calls = OPTIMIZE_TAIL_CALLS; if (ENABLE_LEXER_DFA && !REUSE_LEXER_DFA) { lexer.Interpreter.atn.ClearDFA(); } CommonTokenStream tokens = new CommonTokenStream(lexer); tokens.Fill(); Interlocked.Add(ref tokenCount, tokens.Size); if (COMPUTE_CHECKSUM) { foreach (IToken token in tokens.GetTokens()) { updateChecksum(checksum, token); } } if (!RUN_PARSER) { return((int)checksum.Value); } Parser parser = sharedParsers[thread]; if (REUSE_PARSER && parser != null) { parser.SetInputStream(tokens); } else { Parser newParser = (Parser)parserCtor.Invoke(new object[] { tokens }); parser = newParser; sharedParsers[thread] = parser; } parser.RemoveErrorListeners(); if (!TWO_STAGE_PARSING) { parser.AddErrorListener(DescriptiveErrorListener.INSTANCE); parser.AddErrorListener(new SummarizingDiagnosticErrorListener()); } if (!ENABLE_PARSER_DFA) { parser.Interpreter = new NonCachingParserATNSimulator(parser, parser.Atn); } else if (!REUSE_PARSER_DFA) { parser.Interpreter = new ParserATNSimulator(parser, sharedParserATNs[thread]); } if (ENABLE_PARSER_DFA && !REUSE_PARSER_DFA) { parser.Interpreter.atn.ClearDFA(); } parser.Interpreter.PredictionMode = TWO_STAGE_PARSING ? PredictionMode.Sll : PREDICTION_MODE; parser.Interpreter.force_global_context = FORCE_GLOBAL_CONTEXT && !TWO_STAGE_PARSING; parser.Interpreter.always_try_local_context = TRY_LOCAL_CONTEXT_FIRST || TWO_STAGE_PARSING; parser.Interpreter.optimize_ll1 = OPTIMIZE_LL1; parser.Interpreter.optimize_unique_closure = OPTIMIZE_UNIQUE_CLOSURE; parser.Interpreter.optimize_tail_calls = OPTIMIZE_TAIL_CALLS; parser.Interpreter.tail_call_preserves_sll = TAIL_CALL_PRESERVES_SLL; parser.Interpreter.treat_sllk1_conflict_as_ambiguity = TREAT_SLLK1_CONFLICT_AS_AMBIGUITY; parser.BuildParseTree = BUILD_PARSE_TREES; if (!BUILD_PARSE_TREES && BLANK_LISTENER) { parser.AddParseListener(listener); } if (BAIL_ON_ERROR || TWO_STAGE_PARSING) { parser.ErrorHandler = new BailErrorStrategy(); } MethodInfo parseMethod = parserClass.GetMethod(entryPoint); object parseResult; IParseTreeListener checksumParserListener = null; try { if (COMPUTE_CHECKSUM) { checksumParserListener = new ChecksumParseTreeListener(checksum); parser.AddParseListener(checksumParserListener); } parseResult = parseMethod.Invoke(parser, null); } catch (TargetInvocationException ex) { if (!TWO_STAGE_PARSING) { throw; } string sourceName = tokens.SourceName; sourceName = !string.IsNullOrEmpty(sourceName) ? sourceName + ": " : ""; Console.Error.WriteLine(sourceName + "Forced to retry with full context."); if (!(ex.InnerException is ParseCanceledException)) { throw; } tokens.Reset(); if (REUSE_PARSER && sharedParsers[thread] != null) { parser.SetInputStream(tokens); } else { Parser newParser = (Parser)parserCtor.Invoke(new object[] { tokens }); parser = newParser; sharedParsers[thread] = parser; } parser.RemoveErrorListeners(); parser.AddErrorListener(DescriptiveErrorListener.INSTANCE); parser.AddErrorListener(new SummarizingDiagnosticErrorListener()); if (!ENABLE_PARSER_DFA) { parser.Interpreter = new NonCachingParserATNSimulator(parser, parser.Atn); } parser.Interpreter.PredictionMode = PREDICTION_MODE; parser.Interpreter.force_global_context = FORCE_GLOBAL_CONTEXT; parser.Interpreter.always_try_local_context = TRY_LOCAL_CONTEXT_FIRST; parser.Interpreter.optimize_ll1 = OPTIMIZE_LL1; parser.Interpreter.optimize_unique_closure = OPTIMIZE_UNIQUE_CLOSURE; parser.Interpreter.optimize_tail_calls = OPTIMIZE_TAIL_CALLS; parser.Interpreter.tail_call_preserves_sll = TAIL_CALL_PRESERVES_SLL; parser.Interpreter.treat_sllk1_conflict_as_ambiguity = TREAT_SLLK1_CONFLICT_AS_AMBIGUITY; parser.BuildParseTree = BUILD_PARSE_TREES; if (!BUILD_PARSE_TREES && BLANK_LISTENER) { parser.AddParseListener(listener); } if (BAIL_ON_ERROR) { parser.ErrorHandler = new BailErrorStrategy(); } parseResult = parseMethod.Invoke(parser, null); } finally { if (checksumParserListener != null) { parser.RemoveParseListener(checksumParserListener); } } Assert.IsInstanceOfType(parseResult, typeof(IParseTree)); if (BUILD_PARSE_TREES && BLANK_LISTENER) { ParseTreeWalker.Default.Walk(listener, (ParserRuleContext)parseResult); } } catch (Exception e) { if (!REPORT_SYNTAX_ERRORS && e is ParseCanceledException) { return((int)checksum.Value); } throw; } return((int)checksum.Value); }
protected void parseSources(ParserFactory factory, IEnumerable <InputDescriptor> sources) { Stopwatch startTime = Stopwatch.StartNew(); Thread.VolatileWrite(ref tokenCount, 0); int sourceCount = 0; int inputSize = 0; #if NET40PLUS BlockingCollection <int> threadIdentifiers = new BlockingCollection <int>(); for (int i = 0; i < NUMBER_OF_THREADS; i++) { threadIdentifiers.Add(i); } ICollection <Task <int> > results = new List <Task <int> >(); QueuedTaskScheduler executorServiceHost = new QueuedTaskScheduler(NUMBER_OF_THREADS); TaskScheduler executorService = executorServiceHost.ActivateNewQueue(); #else ICollection <Func <int> > results = new List <Func <int> >(); #endif foreach (InputDescriptor inputDescriptor in sources) { ICharStream input = inputDescriptor.GetInputStream(); sourceCount++; input.Seek(0); inputSize += input.Size; #if NET40PLUS Task <int> futureChecksum = Task.Factory.StartNew <int>(new Callable_1(input, factory, threadIdentifiers).call, CancellationToken.None, TaskCreationOptions.None, executorService); #else Func <int> futureChecksum = new Callable_1(input, factory).call; #endif results.Add(futureChecksum); } Checksum checksum = new CRC32(); foreach (var future in results) { #if NET40PLUS int value = future.Result; #else int value = future(); #endif if (COMPUTE_CHECKSUM) { updateChecksum(checksum, value); } } #if NET40PLUS executorServiceHost.Dispose(); #endif Console.Out.WriteLine("Total parse time for {0} files ({1} KB, {2} tokens, checksum 0x{3:X8}): {4}ms", sourceCount, inputSize / 1024, Thread.VolatileRead(ref tokenCount), COMPUTE_CHECKSUM ? checksum.Value : 0, startTime.ElapsedMilliseconds); if (sharedLexers.Length > 0) { Lexer lexer = sharedLexers[0]; LexerATNSimulator lexerInterpreter = lexer.Interpreter; DFA[] modeToDFA = lexerInterpreter.atn.modeToDFA; if (SHOW_DFA_STATE_STATS) { int states = 0; int configs = 0; HashSet <ATNConfig> uniqueConfigs = new HashSet <ATNConfig>(); for (int i = 0; i < modeToDFA.Length; i++) { DFA dfa = modeToDFA[i]; if (dfa == null || dfa.states == null) { continue; } states += dfa.states.Count; foreach (DFAState state in dfa.states.Values) { configs += state.configs.Count; uniqueConfigs.UnionWith(state.configs); } } Console.Out.WriteLine("There are {0} lexer DFAState instances, {1} configs ({2} unique), {3} prediction contexts.", states, configs, uniqueConfigs.Count, lexerInterpreter.atn.ContextCacheSize); } } if (RUN_PARSER && sharedParsers.Length > 0) { Parser parser = sharedParsers[0]; // make sure the individual DFAState objects actually have unique ATNConfig arrays ParserATNSimulator interpreter = parser.Interpreter; DFA[] decisionToDFA = interpreter.atn.decisionToDFA; if (SHOW_DFA_STATE_STATS) { int states = 0; int configs = 0; HashSet <ATNConfig> uniqueConfigs = new HashSet <ATNConfig>(); for (int i = 0; i < decisionToDFA.Length; i++) { DFA dfa = decisionToDFA[i]; if (dfa == null || dfa.states == null) { continue; } states += dfa.states.Count; foreach (DFAState state in dfa.states.Values) { configs += state.configs.Count; uniqueConfigs.UnionWith(state.configs); } } Console.Out.WriteLine("There are {0} parser DFAState instances, {1} configs ({2} unique), {3} prediction contexts.", states, configs, uniqueConfigs.Count, interpreter.atn.ContextCacheSize); } int localDfaCount = 0; int globalDfaCount = 0; int localConfigCount = 0; int globalConfigCount = 0; int[] contextsInDFAState = new int[0]; for (int i = 0; i < decisionToDFA.Length; i++) { DFA dfa = decisionToDFA[i]; if (dfa == null || dfa.states == null) { continue; } if (SHOW_CONFIG_STATS) { foreach (DFAState state in dfa.states.Keys) { if (state.configs.Count >= contextsInDFAState.Length) { Array.Resize(ref contextsInDFAState, state.configs.Count + 1); } if (state.IsAcceptState) { bool hasGlobal = false; foreach (ATNConfig config in state.configs) { if (config.ReachesIntoOuterContext) { globalConfigCount++; hasGlobal = true; } else { localConfigCount++; } } if (hasGlobal) { globalDfaCount++; } else { localDfaCount++; } } contextsInDFAState[state.configs.Count]++; } } if (EXPORT_LARGEST_CONFIG_CONTEXTS) { foreach (DFAState state in dfa.states.Keys) { foreach (ATNConfig config in state.configs) { string configOutput = config.ToDotString(); if (configOutput.Length <= configOutputSize) { continue; } configOutputSize = configOutput.Length; writeFile(tmpdir, "d" + dfa.decision + ".s" + state.stateNumber + ".a" + config.Alt + ".config.dot", configOutput); } } } } if (SHOW_CONFIG_STATS && currentPass == 0) { Console.Out.WriteLine(" DFA accept states: {0} total, {1} with only local context, {2} with a global context", localDfaCount + globalDfaCount, localDfaCount, globalDfaCount); Console.Out.WriteLine(" Config stats: {0} total, {1} local, {2} global", localConfigCount + globalConfigCount, localConfigCount, globalConfigCount); if (SHOW_DFA_STATE_STATS) { for (int i = 0; i < contextsInDFAState.Length; i++) { if (contextsInDFAState[i] != 0) { Console.Out.WriteLine(" {0} configs = {1}", i, contextsInDFAState[i]); } } } } } }