public ParserInterpreter(string grammarFileName, IVocabulary vocabulary, IEnumerable <string> ruleNames, ATN atn, ITokenStream input) : base(input) { this._grammarFileName = grammarFileName; this._atn = atn; this._ruleNames = ruleNames.ToArray(); this.vocabulary = vocabulary; // identify the ATN states where pushNewRecursionContext must be called this.pushRecursionContextStates = new BitSet(atn.states.Count); foreach (ATNState state in atn.states) { if (!(state is StarLoopEntryState)) { continue; } if (((StarLoopEntryState)state).isPrecedenceDecision) { this.pushRecursionContextStates.Set(state.stateNumber); } } //init decision DFA int numberofDecisions = atn.NumberOfDecisions; this._decisionToDFA = new Dfa.DFA[numberofDecisions]; for (int i = 0; i < numberofDecisions; i++) { DecisionState decisionState = atn.GetDecisionState(i); _decisionToDFA[i] = new Dfa.DFA(decisionState, i); } // get atn simulator that knows how to do predictions Interpreter = new ParserATNSimulator(this, atn, _decisionToDFA, null); }
public ParserInterpreter(string grammarFileName, IVocabulary vocabulary, IEnumerable <string> ruleNames, ATN atn, ITokenStream input) : base(input) { this.grammarFileName = grammarFileName; this.atn = atn; #pragma warning disable 612 // Type or member is obsolete this.tokenNames = new string[atn.maxTokenType]; for (int i = 0; i < tokenNames.Length; i++) { tokenNames[i] = vocabulary.GetDisplayName(i); } #pragma warning restore 612 // Type or member is obsolete this.ruleNames = ruleNames.ToArray(); this.vocabulary = vocabulary; // identify the ATN states where pushNewRecursionContext() must be called this.pushRecursionContextStates = new BitSet(atn.states.Count); foreach (ATNState state in atn.states) { if (!(state is StarLoopEntryState)) { continue; } if (((StarLoopEntryState)state).precedenceRuleDecision) { this.pushRecursionContextStates.Set(state.stateNumber); } } // get atn simulator that knows how to do predictions Interpreter = new ParserATNSimulator(this, atn); }
/// <summary> /// A copy constructor that creates a new parser interpreter by reusing /// the fields of a previous interpreter. /// </summary> /// <remarks> /// A copy constructor that creates a new parser interpreter by reusing /// the fields of a previous interpreter. /// </remarks> /// <param name="old">The interpreter to copy</param> /// <since>4.5</since> public ParserInterpreter(Antlr4.Runtime.ParserInterpreter old) : base(((ITokenStream)old.InputStream)) { this.grammarFileName = old.grammarFileName; this.atn = old.atn; this.pushRecursionContextStates = old.pushRecursionContextStates; this.tokenNames = old.tokenNames; this.ruleNames = old.ruleNames; this.vocabulary = old.vocabulary; Interpreter = new ParserATNSimulator(this, atn); }
/// <summary> /// A copy constructor that creates a new parser interpreter by reusing /// the fields of a previous interpreter. /// </summary> /// <param name="old">The interpreter to copy</param> /// <since>4.5</since> public ParserInterpreter(Antlr4.Runtime.ParserInterpreter old) : base(((ITokenStream)old.InputStream)) { // latch and only override once; error might trigger infinite loop this.grammarFileName = old.grammarFileName; this.atn = old.atn; this.pushRecursionContextStates = old.pushRecursionContextStates; #pragma warning disable 612 // Type or member is obsolete this.tokenNames = old.tokenNames; #pragma warning restore 612 // Type or member is obsolete this.ruleNames = old.ruleNames; this.vocabulary = old.vocabulary; Interpreter = new ParserATNSimulator(this, atn); }
public ParserInterpreter(string grammarFileName, IVocabulary vocabulary, IEnumerable <string> ruleNames, ATN atn, ITokenStream input) : base(input) { this._grammarFileName = grammarFileName; this._atn = atn; this._ruleNames = ruleNames.ToArray(); this.vocabulary = vocabulary; // identify the ATN states where pushNewRecursionContext must be called this.pushRecursionContextStates = new BitSet(atn.states.Count); foreach (ATNState state in atn.states) { if (!(state is StarLoopEntryState)) { continue; } if (((StarLoopEntryState)state).precedenceRuleDecision) { this.pushRecursionContextStates.Set(state.stateNumber); } } // get atn simulator that knows how to do predictions Interpreter = new ParserATNSimulator(this, atn); }
public AbcNotationParser(ITokenStream input) : base(input) { Interpreter = new ParserATNSimulator(this, _ATN); }
public ExpressionParser(ITokenStream input) : base(input) { _interp = new ParserATNSimulator(this, _ATN); }
public GramaticaParser(ITokenStream input) : base(input) { _interp = new ParserATNSimulator(this, _ATN); }
public SymbolicStringsParser(ITokenStream input) : base(input) { Interpreter = new ParserATNSimulator(this, _ATN); }
public Combined1Parser(ITokenStream input) : base(input) { _interp = new ParserATNSimulator(this, _ATN); }
public CalculatprParser(ITokenStream input) : base(input) { _interp = new ParserATNSimulator(this, _ATN); }
public RecogniseFormulasParser(ITokenStream input, List <int> col) : base(input) { _interp = new ParserATNSimulator(this, _ATN); this.col = col; }
public InsurancePolicyRulesParser(ITokenStream input) : base(input) { _interp = new ParserATNSimulator(this, _ATN); }
public JSONParser(ITokenStream input) : base(input) { Interpreter = new ParserATNSimulator(this, _ATN, decisionToDFA, sharedContextCache); }
public CodeQuery(ITokenStream input) : base(input) { Interpreter = new ParserATNSimulator(this, _ATN); }
public LeftRecursionGrammarParser(ITokenStream input) : base(input) { _interp = new ParserATNSimulator(this, _ATN); }
public HelloParser(ITokenStream input) : base(input) { Interpreter = new ParserATNSimulator(this, _ATN); }
public DialogicParser(ITokenStream input) : base(input) { _interp = new ParserATNSimulator(this, _ATN); }
public ITVScriptingParser(ITokenStream input, TextWriter output, TextWriter errorOutput) : base(input, output, errorOutput) { Interpreter = new ParserATNSimulator(this, _ATN, decisionToDFA, new PredictionContextCache()); }
protected void parseSources(ParserFactory factory, IEnumerable <InputDescriptor> sources) { Stopwatch startTime = Stopwatch.StartNew(); Thread.VolatileWrite(ref tokenCount, 0); int sourceCount = 0; int inputSize = 0; #if NET40PLUS BlockingCollection <int> threadIdentifiers = new BlockingCollection <int>(); for (int i = 0; i < NUMBER_OF_THREADS; i++) { threadIdentifiers.Add(i); } ICollection <Task <int> > results = new List <Task <int> >(); QueuedTaskScheduler executorServiceHost = new QueuedTaskScheduler(NUMBER_OF_THREADS); TaskScheduler executorService = executorServiceHost.ActivateNewQueue(); #else ICollection <Func <int> > results = new List <Func <int> >(); #endif foreach (InputDescriptor inputDescriptor in sources) { ICharStream input = inputDescriptor.GetInputStream(); sourceCount++; input.Seek(0); inputSize += input.Size; #if NET40PLUS Task <int> futureChecksum = Task.Factory.StartNew <int>(new Callable_1(input, factory, threadIdentifiers).call, CancellationToken.None, TaskCreationOptions.None, executorService); #else Func <int> futureChecksum = new Callable_1(input, factory).call; #endif results.Add(futureChecksum); } Checksum checksum = new CRC32(); foreach (var future in results) { #if NET40PLUS int value = future.Result; #else int value = future(); #endif if (COMPUTE_CHECKSUM) { updateChecksum(checksum, value); } } #if NET40PLUS executorServiceHost.Dispose(); #endif Console.Out.WriteLine("Total parse time for {0} files ({1} KB, {2} tokens, checksum 0x{3:X8}): {4}ms", sourceCount, inputSize / 1024, Thread.VolatileRead(ref tokenCount), COMPUTE_CHECKSUM ? checksum.Value : 0, startTime.ElapsedMilliseconds); if (sharedLexers.Length > 0) { Lexer lexer = sharedLexers[0]; LexerATNSimulator lexerInterpreter = lexer.Interpreter; DFA[] modeToDFA = lexerInterpreter.atn.modeToDFA; if (SHOW_DFA_STATE_STATS) { int states = 0; int configs = 0; HashSet <ATNConfig> uniqueConfigs = new HashSet <ATNConfig>(); for (int i = 0; i < modeToDFA.Length; i++) { DFA dfa = modeToDFA[i]; if (dfa == null || dfa.states == null) { continue; } states += dfa.states.Count; foreach (DFAState state in dfa.states.Values) { configs += state.configs.Count; uniqueConfigs.UnionWith(state.configs); } } Console.Out.WriteLine("There are {0} lexer DFAState instances, {1} configs ({2} unique), {3} prediction contexts.", states, configs, uniqueConfigs.Count, lexerInterpreter.atn.ContextCacheSize); } } if (RUN_PARSER && sharedParsers.Length > 0) { Parser parser = sharedParsers[0]; // make sure the individual DFAState objects actually have unique ATNConfig arrays ParserATNSimulator interpreter = parser.Interpreter; DFA[] decisionToDFA = interpreter.atn.decisionToDFA; if (SHOW_DFA_STATE_STATS) { int states = 0; int configs = 0; HashSet <ATNConfig> uniqueConfigs = new HashSet <ATNConfig>(); for (int i = 0; i < decisionToDFA.Length; i++) { DFA dfa = decisionToDFA[i]; if (dfa == null || dfa.states == null) { continue; } states += dfa.states.Count; foreach (DFAState state in dfa.states.Values) { configs += state.configs.Count; uniqueConfigs.UnionWith(state.configs); } } Console.Out.WriteLine("There are {0} parser DFAState instances, {1} configs ({2} unique), {3} prediction contexts.", states, configs, uniqueConfigs.Count, interpreter.atn.ContextCacheSize); } int localDfaCount = 0; int globalDfaCount = 0; int localConfigCount = 0; int globalConfigCount = 0; int[] contextsInDFAState = new int[0]; for (int i = 0; i < decisionToDFA.Length; i++) { DFA dfa = decisionToDFA[i]; if (dfa == null || dfa.states == null) { continue; } if (SHOW_CONFIG_STATS) { foreach (DFAState state in dfa.states.Keys) { if (state.configs.Count >= contextsInDFAState.Length) { Array.Resize(ref contextsInDFAState, state.configs.Count + 1); } if (state.IsAcceptState) { bool hasGlobal = false; foreach (ATNConfig config in state.configs) { if (config.ReachesIntoOuterContext) { globalConfigCount++; hasGlobal = true; } else { localConfigCount++; } } if (hasGlobal) { globalDfaCount++; } else { localDfaCount++; } } contextsInDFAState[state.configs.Count]++; } } if (EXPORT_LARGEST_CONFIG_CONTEXTS) { foreach (DFAState state in dfa.states.Keys) { foreach (ATNConfig config in state.configs) { string configOutput = config.ToDotString(); if (configOutput.Length <= configOutputSize) { continue; } configOutputSize = configOutput.Length; writeFile(tmpdir, "d" + dfa.decision + ".s" + state.stateNumber + ".a" + config.Alt + ".config.dot", configOutput); } } } } if (SHOW_CONFIG_STATS && currentPass == 0) { Console.Out.WriteLine(" DFA accept states: {0} total, {1} with only local context, {2} with a global context", localDfaCount + globalDfaCount, localDfaCount, globalDfaCount); Console.Out.WriteLine(" Config stats: {0} total, {1} local, {2} global", localConfigCount + globalConfigCount, localConfigCount, globalConfigCount); if (SHOW_DFA_STATE_STATS) { for (int i = 0; i < contextsInDFAState.Length; i++) { if (contextsInDFAState[i] != 0) { Console.Out.WriteLine(" {0} configs = {1}", i, contextsInDFAState[i]); } } } } } }
public NumberRangesParser(ITokenStream input) : base(input) { _interp = new ParserATNSimulator(this, _ATN); }
public RecogniseFormulasParser(ITokenStream input) : base(input) { _interp = new ParserATNSimulator(this, _ATN); }
public IrbisQueryParser(ITokenStream input) : base(input) { _interp = new ParserATNSimulator(this, _ATN); }
public DensityExpressionGrammarParser(ITokenStream input) : base(input) { _interp = new ParserATNSimulator(this, _ATN); }
public VBALikeParser(ITokenStream input) : base(input) { _interp = new ParserATNSimulator(this, _ATN); }
public SpeakParser(ITokenStream input, TextWriter output, TextWriter errorOutput) : base(input, output, errorOutput) { Interpreter = new ParserATNSimulator(this, _ATN, decisionToDFA, sharedContextCache); }
public StringGrammarParser(ITokenStream input) : base(input) { _interp = new ParserATNSimulator(this, _ATN); }
public MessageScriptParser(ITokenStream input) : base(input) { _interp = new ParserATNSimulator(this, _ATN); }
public DotNetIlTypeNameParser(ITokenStream input) : base(input) { _interp = new ParserATNSimulator(this, _ATN); }