private static RuleDependencyChecker.RuleRelations ExtractRuleRelations(Type recognizer) { string serializedATN = GetSerializedATN(recognizer); if (serializedATN == null) { return(null); } ATN atn = new ATNDeserializer().Deserialize(serializedATN.ToCharArray()); RuleDependencyChecker.RuleRelations relations = new RuleDependencyChecker.RuleRelations(atn.ruleToStartState.Length); foreach (ATNState state in atn.states) { if (!state.epsilonOnlyTransitions) { continue; } foreach (Transition transition in state.GetTransitions()) { if (transition.TransitionType != TransitionType.Rule) { continue; } RuleTransition ruleTransition = (RuleTransition)transition; relations.AddRuleInvocation(state.ruleIndex, ruleTransition.target.ruleIndex); } } return(relations); }
private ATN GetOrCreateAtn(bool lexer, bool clear = false) { ATN atn; ConcurrentDictionary <Language, ATN> atns = lexer ? lexerAtns : parserAtns; if (!atns.TryGetValue(Language, out atn) || clear) { string stringAtn = lexer ? LexerSerializedATN : ParserSerializedATN; atn = new ATNDeserializer().Deserialize(stringAtn.ToCharArray()); atns[Language] = atn; } return(atn); }
private static ATN GetATNForDelimiters(char openDelimiter, char closeDelimiter) { int key = (openDelimiter << 16) + (closeDelimiter & 0xFFFF); ATN atn; if (DelimiterToAtn.TryGetValue(key, out atn)) { return(atn); } atn = new ATNDeserializer().Deserialize(_serializedATN.ToCharArray()); foreach (ATNState state in atn.states) { if (state == null) { continue; } for (int i = 0; i < state.NumberOfTransitions; i++) { Transition t = state.Transition(i); Transition updated = updateTransition(t, openDelimiter, closeDelimiter); if (updated != null) { state.SetTransition(i, updated); } } if (!state.IsOptimized) { continue; } for (int i = 0; i < state.NumberOfOptimizedTransitions; i++) { Transition t = state.GetOptimizedTransition(i); Transition updated = updateTransition(t, openDelimiter, closeDelimiter); if (updated != null) { state.SetOptimizedTransition(i, updated); } } } DelimiterToAtn[key] = atn; return(atn); }
protected ATN GetOrCreateAtn(string atnText) { bool lexer = this is AntlrLexer; ATN atn; var atns = lexer ? AntlrLexer.Atns : AntlrParser.Atns; lock (atns) { if (!atns.TryGetValue(Language, out atn)) { atn = new ATNDeserializer().Deserialize(atnText.ToCharArray()); atns.Add(Language, atn); Logger.LogDebug($"New ATN initialized for {Language} {(lexer ? "lexer" : "parser")}."); } } return(atn); }
public virtual ATN GetATNWithBypassAlts() { string serializedAtn = SerializedAtn; if (serializedAtn == null) { throw new NotSupportedException("The current parser does not support an ATN with bypass alternatives."); } lock (bypassAltsAtnCache) { ATN result = bypassAltsAtnCache.Get(serializedAtn); if (result == null) { ATNDeserializationOptions deserializationOptions = new ATNDeserializationOptions(); deserializationOptions.SetGenerateRuleBypassTransitions(true); result = new ATNDeserializer(deserializationOptions).Deserialize(serializedAtn.ToCharArray()); bypassAltsAtnCache.Put(serializedAtn, result); } return result; } }
protected ParserFactory getParserFactory(string lexerName, string parserName, string listenerName, string entryPoint) { Assembly loader = Assembly.LoadFile(Path.Combine(tmpdir, "Parser.dll")); Type lexerClass = loader.GetType(lexerName); Type parserClass = loader.GetType(parserName); Type listenerClass = loader.GetType(listenerName); ConstructorInfo lexerCtor = lexerClass.GetConstructor(new Type[] { typeof(ICharStream) }); ConstructorInfo parserCtor = parserClass.GetConstructor(new Type[] { typeof(ITokenStream) }); // construct initial instances of the lexer and parser to deserialize their ATNs ITokenSource tokenSource = (ITokenSource)lexerCtor.Invoke(new object[] { new AntlrInputStream("") }); parserCtor.Invoke(new object[] { new CommonTokenStream(tokenSource) }); if (!REUSE_LEXER_DFA) { FieldInfo lexerSerializedATNField = lexerClass.GetField("_serializedATN"); string lexerSerializedATN = (string)lexerSerializedATNField.GetValue(null); for (int i = 0; i < NUMBER_OF_THREADS; i++) { sharedLexerATNs[i] = new ATNDeserializer().Deserialize(lexerSerializedATN.ToCharArray()); } } if (RUN_PARSER && !REUSE_PARSER_DFA) { FieldInfo parserSerializedATNField = parserClass.GetField("_serializedATN"); string parserSerializedATN = (string)parserSerializedATNField.GetValue(null); for (int i = 0; i < NUMBER_OF_THREADS; i++) { sharedParserATNs[i] = new ATNDeserializer().Deserialize(parserSerializedATN.ToCharArray()); } } return(new ParserFactory_1(listenerClass, parserClass, lexerCtor, parserCtor, entryPoint)); }
/** Derive a new parser from an old one that has knowledge of the grammar. * The Grammar object is used to correctly compute outer alternative * numbers for parse tree nodes. A parser of the same type is created * for subclasses of {@link ParserInterpreter}. */ public static ParserInterpreter DeriveTempParserInterpreter(Grammar g, Parser originalParser, ITokenStream tokens) { ParserInterpreter parser; if (originalParser is ParserInterpreter) { Type c = originalParser.GetType(); try { parser = (ParserInterpreter)Activator.CreateInstance(originalParser.GetType(), g, originalParser.Atn, originalParser.InputStream); } catch (Exception e) { throw new ArgumentException("can't create parser to match incoming " + c.Name, e); } } else { // must've been a generated parser char[] serializedAtn = ATNSerializer.GetSerializedAsChars(originalParser.Atn, originalParser.RuleNames); ATN deserialized = new ATNDeserializer().Deserialize(serializedAtn); parser = new ParserInterpreter(originalParser.GrammarFileName, originalParser.Vocabulary, originalParser.RuleNames, deserialized, tokens); } parser.SetInputStream(tokens); // Make sure that we don't get any error messages from using this temporary parser parser.ErrorHandler = new BailErrorStrategy(); parser.RemoveErrorListeners(); parser.RemoveParseListeners(); parser.Interpreter.PredictionMode = PredictionMode.LlExactAmbigDetection; return(parser); }