public static void Main(string[] args) { ICharStream input; if ( args.Length>0 ) { input = new ANTLRFileStream(args[0]); } else { input = new ANTLRReaderStream(Console.In); } // BUILD AST PolyLexer lex = new PolyLexer(input); CommonTokenStream tokens = new CommonTokenStream(lex); PolyParser parser = new PolyParser(tokens); PolyParser.poly_return r = parser.poly(); Console.Out.WriteLine("tree="+((ITree)r.Tree).ToStringTree()); // DIFFERENTIATE CommonTreeNodeStream nodes = new CommonTreeNodeStream(r.Tree); nodes.TokenStream = tokens; PolyDifferentiator differ = new PolyDifferentiator(nodes); PolyDifferentiator.poly_return r2 = differ.poly(); Console.Out.WriteLine("d/dx="+((ITree) r2.Tree).ToStringTree()); // SIMPLIFY / NORMALIZE nodes = new CommonTreeNodeStream(r2.Tree); nodes.TokenStream = tokens; Simplifier reducer = new Simplifier(nodes); Simplifier.poly_return r3 = reducer.poly(); Console.Out.WriteLine("simplified="+((ITree) r3.Tree).ToStringTree()); // CONVERT BACK TO POLYNOMIAL nodes = new CommonTreeNodeStream(r3.Tree); nodes.TokenStream = tokens; PolyPrinter printer = new PolyPrinter(nodes); PolyPrinter.poly_return r4 = printer.poly(); Console.Out.WriteLine(r4.ST); }
public static ParserData GetParser(string document, Pos documentPos) { AntlrInputStream inputStream = new AntlrInputStream(document); // Lexer DeltinScriptLexer lexer = new DeltinScriptLexer(inputStream); CommonTokenStream commonTokenStream = new CommonTokenStream(lexer); // Parse DeltinScriptParser parser = new DeltinScriptParser(commonTokenStream); var errorListener = new ErrorListener(); parser.RemoveErrorListeners(); parser.AddErrorListener(errorListener); DeltinScriptParser.RulesetContext ruleSetContext = parser.ruleset(); List <Diagnostic> diagnostics = new List <Diagnostic>(); diagnostics.AddRange(errorListener.Errors); // Get the ruleset node. BuildAstVisitor bav = null; RulesetNode ruleSetNode = null; if (diagnostics.Count == 0) { bav = new BuildAstVisitor(documentPos, diagnostics); ruleSetNode = (RulesetNode)bav.Visit(ruleSetContext); } VarCollection varCollection = null; ScopeGroup root = null; List <UserMethod> userMethods = null; Rule[] rules = null; bool success = false; AdditionalErrorChecking aec = new AdditionalErrorChecking(parser, diagnostics); aec.Visit(ruleSetContext); bool parse = diagnostics.Count == 0; if (parse) { varCollection = new VarCollection(); root = new ScopeGroup(); userMethods = new List <UserMethod>(); foreach (var definedVar in ruleSetNode.DefinedVars) { varCollection.AssignDefinedVar(root, definedVar.IsGlobal, definedVar.VariableName, definedVar.Range); } // Get the user methods. for (int i = 0; i < ruleSetNode.UserMethods.Length; i++) { userMethods.Add(new UserMethod(ruleSetNode.UserMethods[i])); } // Parse the rules. rules = new Rule[ruleSetNode.Rules.Length]; for (int i = 0; i < rules.Length; i++) { try { var result = Translate.GetRule(ruleSetNode.Rules[i], root, varCollection, userMethods.ToArray()); rules[i] = result.Rule; diagnostics.AddRange(result.Diagnostics); } catch (SyntaxErrorException ex) { diagnostics.Add(new Diagnostic(ex.Message, ex.Range) { severity = Diagnostic.Error }); } } success = true; } return(new ParserData() { Parser = parser, RulesetContext = ruleSetContext, RuleSetNode = ruleSetNode, Bav = bav, Diagnostics = diagnostics, Rules = rules, UserMethods = userMethods?.ToArray(), Root = root, Success = success, VarCollection = varCollection }); }
public static IList <EPLModuleParseItem> Parse(String module) { ICharStream input; try { input = new NoCaseSensitiveStream(module); } catch (IOException ex) { Log.Error("Exception reading module expression: " + ex.Message, ex); return(null); } var lex = ParseHelper.NewLexer(input); var tokens = new CommonTokenStream(lex); try { tokens.Fill(); } catch (Exception ex) { String message = "Unexpected exception recognizing module text"; if (ex is LexerNoViableAltException) { if (ParseHelper.HasControlCharacters(module)) { message = "Unrecognized control characters found in text, failed to parse text"; } else { message += ", recognition failed for " + ex; } } else if (ex is RecognitionException) { var recog = (RecognitionException)ex; message += ", recognition failed for " + recog; } else if (!string.IsNullOrWhiteSpace(ex.Message)) { message += ": " + ex.Message; } message += " [" + module + "]"; Log.Error(message, ex); throw new ParseException(message); } var statements = new List <EPLModuleParseItem>(); var current = new StringWriter(); int?lineNum = null; int charPosStart = 0; int charPos = 0; var tokenList = tokens.GetTokens(); var skippedSemicolonIndexes = GetSkippedSemicolons(tokenList); int index = -1; foreach (var token in tokenList.TakeWhile(t => t.Type != EsperEPL2GrammarParser.Eof)) { index++; var t = token; bool semi = t.Type == EsperEPL2GrammarParser.SEMI && !skippedSemicolonIndexes.Contains(index); if (semi) { if (current.ToString().Trim().Length > 0) { statements.Add( new EPLModuleParseItem( current.ToString().Trim(), lineNum ?? 0, charPosStart, charPos)); lineNum = null; } current = new StringWriter(); } else { if ((lineNum == null) && (t.Type != EsperEPL2GrammarParser.WS)) { lineNum = t.Line; charPosStart = charPos; } if (t.Type != EsperEPL2GrammarLexer.Eof) { current.Write(t.Text); charPos += t.Text.Length; } } } if (!string.IsNullOrWhiteSpace(current.ToString())) { statements.Add(new EPLModuleParseItem(current.ToString().Trim(), lineNum ?? 0, 0, 0)); } return(statements); }
do_parse ( string Command ) { Command_Line_Options parsed_command; // // Declare the various streams of characters and tokens for parsing the input // var input_stream = new AntlrInputStream(Command); // Create a stream that reads from the command line var lexer = new CLILexer(input_stream); // Create a lexer that feeds off of the input stream var tokens = new CommonTokenStream(lexer); // Create a buffer of tokens pulled from the lexer var parser = new CLIParser(tokens); // Create a parser that feeds off of the token buffer var tree = parser.start(); // Call the start rule in the grammar to build a parse tree from the input var my_listeners = new CLI_Listener_Overrides(); // Instantiate my listener override functions so they can be used by ParseTreeWalker // // Walk the parse tree and call all the listeners // ParseTreeWalker.Default.Walk(my_listeners, tree); // // Complex command strings may be placed in a script file and referenced using the "@file.txt" syntax. If a script file was specified, // then parse the contents of the file // if (my_listeners.parsed_command_line.script_file.Length != 0) { List <string> script_file = Program.find_files(my_listeners.parsed_command_line.script_file); parsed_command = Command_Parser.do_parse(File.ReadAllText(script_file.First())); } else { parsed_command = my_listeners.parsed_command_line; } // // /HELP shouldn't be specified with any other qualifiers or input files // if (parsed_command.help && (parsed_command.all || parsed_command.exclude_dlls.Count != 0 || parsed_command.exports || parsed_command.file_names.Count != 0 || parsed_command.imports || parsed_command.output.Length != 0 || parsed_command.recurse)) { throw new ArgumentException("/HELP is not valid with any other qualifiers or input file(s)"); } // // All other command formats require input file(s) // if (parsed_command.file_names.Count == 0 && !parsed_command.help) { throw new ArgumentException("Input file(s) not specified; try /HELP for more info"); } // // /EXCLUDE_DLLS is not compatible with /INCLUDE_DLLS // if (parsed_command.exclude_dlls.Count != 0 && parsed_command.include_dlls.Count != 0) { throw new ArgumentException("/EXCLUDE_DLLS and /INCLUDE_DLLS are mutually exclusive; try /HELP for more info"); } // // Ensure that either /EXPORTS or /IMPORTS (or both) was specified, when /HELP isn't specified // if (!parsed_command.help && (!parsed_command.exports && !parsed_command.imports)) { throw new ArgumentException("/EXPORTS or /IMPORTS (or both) must be specified; try /HELP for more info"); } // // Ensure that if /OUTPUT is specified, that /GENERATE is also specified // if (parsed_command.output.Length != 0 && !parsed_command.generate) { throw new ArgumentException("/OUTPUT requires that /GENERATE also be specified; try /HELP for more info"); } // // If /EXCLUDE_DLLS was specified, then convert the list of files (they may contain wildcards) to regular expressions. // The regular expressions are compiled, because the comparison will typically happen frequently // if (parsed_command.exclude_dlls.Count != 0) { parsed_command.exclude_dlls.Distinct().ToList(); // Ensure there aren't any duplicates var regex_list = new List <Regex> (); // // Look at each file name specified // foreach (string name in parsed_command.exclude_dlls) { var path = Path.GetDirectoryName(name); // // Ensure that there isn't a path on the file name // if (path.Length == 0) { regex_list.Add(new Regex(Program.wildcard_to_regexp(name), RegexOptions.IgnoreCase | RegexOptions.Compiled)); } else { throw new ArgumentException("Path not allowed on /EXCLUDE_DLLS file names; try /HELP for more info"); } } parsed_command.excluded_dlls_regex = regex_list; } // // If /INCLUDE_DLLS was specified, then convert the list of files (they may contain wildcards) to regular expressions. // The regular expressions are compiled, because the comparison will typically happen frequently // if (parsed_command.include_dlls.Count != 0) { parsed_command.include_dlls.Distinct().ToList(); // Ensure there aren't any duplicates var regex_list = new List <Regex> (); // // Look at each file name specified // foreach (string name in parsed_command.include_dlls) { var path = Path.GetDirectoryName(name); // // Ensure that there isn't a path on the file name // if (path.Length == 0) { regex_list.Add(new Regex(Program.wildcard_to_regexp(name), RegexOptions.IgnoreCase | RegexOptions.Compiled)); } else { throw new ArgumentException($"Path not allowed on /INCLUDE_DLLS file name {name}; try /HELP for more info"); } } parsed_command.included_dlls_regex = regex_list; } // // If /DATABASE was not specified, then set the default // if (parsed_command.database.Length == 0) { var path = Path.GetFullPath("."); parsed_command.database = Path.Combine(path, "Win32API.accdb"); } else { // // Take the specified database file/path, which may contain relative paths, and convert it to an absolute path // var path = Path.GetFullPath(parsed_command.database); // // If a directory was specified but not a database file, then set the database file name to the default // var file_attrs = File.GetAttributes(path); if ((file_attrs & FileAttributes.Directory) != 0) { path = Path.Combine(path, "Win32API.accdb"); } parsed_command.database = path; } // // If /OUTPUT was not specified, then set the default // if (parsed_command.output.Length == 0) { parsed_command.output = "TraceAPI.cpp"; } else { // // Take the specified output file/path, which may contain relative paths, and convert it to an absolute path // var path = Path.GetFullPath(parsed_command.output); // // If a directory was specified but not an output file, then set the output file name to the default // var file_attrs = File.GetAttributes(path); if ((file_attrs & FileAttributes.Directory) != 0) { path = Path.Combine(path, "TraceAPI.cpp"); } parsed_command.output = path; } // // Display the results of the parse // if (parsed_command.verbose) { Console.WriteLine($"Command line: {Command}"); Console.WriteLine($"All:\t\t{parsed_command.all}"); Console.WriteLine($"Database:\t{parsed_command.database}"); Console.WriteLine($"Exclude_dlls:\t{string.Join (", ", parsed_command.exclude_dlls)}"); Console.WriteLine($"Exports:\t{parsed_command.exports}"); Console.WriteLine($"Generate:\t{parsed_command.generate}"); Console.WriteLine($"Help:\t\t{parsed_command.help}"); Console.WriteLine($"Imports:\t{parsed_command.imports}"); Console.WriteLine($"Include_dlls:\t{string.Join (", ", parsed_command.include_dlls)}"); Console.WriteLine($"Output:\t\t{parsed_command.output}"); Console.WriteLine($"Recurse:\t{parsed_command.recurse}"); Console.WriteLine($"Verbose:\t{parsed_command.verbose}"); Console.WriteLine($"Webscrape:\t{parsed_command.webscrape}"); Console.WriteLine($"Input files:\t{string.Join (", ", parsed_command.file_names)}"); Console.WriteLine(); } return(parsed_command); } // End parse
public IntervalSet Compute(Parser parser, CommonTokenStream token_stream, int line, int col) { _input = new List <IToken>(); _parser = parser; _token_stream = token_stream; _stop_states = new HashSet <ATNState>(); foreach (ATNState s in parser.Atn.ruleToStopState.Select(t => parser.Atn.states[t.stateNumber])) { _stop_states.Add(s); } _start_states = new HashSet <ATNState>(); foreach (ATNState s in parser.Atn.ruleToStartState.Select(t => parser.Atn.states[t.stateNumber])) { _start_states.Add(s); } int currentIndex = _token_stream.Index; _token_stream.Seek(0); int offset = 1; while (true) { IToken token = _token_stream.LT(offset++); _input.Add(token); _cursor = token.TokenIndex; if (token.Type == TokenConstants.EOF) { break; } if (token.Line >= line && token.Column >= col) { break; } } _token_stream.Seek(currentIndex); List <List <Edge> > all_parses = EnterState(new Edge() { _index = 0, _index_at_transition = 0, _to = _parser.Atn.states[0], _type = TransitionType.EPSILON }); // Remove last token on input. _input.RemoveAt(_input.Count - 1); // Eliminate all paths that don't consume all input. List <List <Edge> > temp = new List <List <Edge> >(); if (all_parses != null) { foreach (List <Edge> p in all_parses) { //System.Console.Error.WriteLine(PrintSingle(p)); if (Validate(p, _input)) { temp.Add(p); } } } all_parses = temp; if (all_parses != null && _log_closure) { foreach (List <Edge> p in all_parses) { System.Console.Error.WriteLine("Path " + PrintSingle(p)); } } IntervalSet result = new IntervalSet(); if (all_parses != null) { foreach (List <Edge> p in all_parses) { HashSet <ATNState> set = ComputeSingle(p); if (_log_closure) { System.Console.Error.WriteLine("All states for path " + string.Join(" ", set.ToList())); } foreach (ATNState s in set) { foreach (Transition t in s.TransitionsArray) { switch (t.TransitionType) { case TransitionType.RULE: break; case TransitionType.PREDICATE: break; case TransitionType.WILDCARD: break; default: if (!t.IsEpsilon) { result.AddAll(t.Label); } break; } } } } } return(result); }
public static void HandleLibFunc( CommonTokenStream tokenStream, EsperEPL2GrammarParser.LibFunctionContext ctx, ConfigurationInformation configurationInformation, EngineImportService engineImportService, IDictionary <ITree, ExprNode> astExprNodeMap, LazyAllocatedMap <ConfigurationPlugInAggregationMultiFunction, PlugInAggregationMultiFunctionFactory> plugInAggregations, string engineURI, ExpressionDeclDesc expressionDeclarations, ExprDeclaredService exprDeclaredService, IList <ExpressionScriptProvided> scriptExpressions, ContextDescriptor contextDescriptor, TableService tableService, StatementSpecRaw statementSpec, VariableService variableService) { var model = GetModel(ctx, tokenStream); var duckType = configurationInformation.EngineDefaults.Expression.IsDuckTyping; var udfCache = configurationInformation.EngineDefaults.Expression.IsUdfCache; // handle "some.Xyz(...)" or "some.other.Xyz(...)" if (model.ChainElements.Count == 1 && model.OptionalClassIdent != null && ASTTableExprHelper.CheckTableNameGetExprForProperty(tableService, model.OptionalClassIdent) == null) { var chainSpec = GetLibFunctionChainSpec(model.ChainElements[0], astExprNodeMap); var declaredNodeX = ExprDeclaredHelper.GetExistsDeclaredExpr( model.OptionalClassIdent, Collections.GetEmptyList <ExprNode>(), expressionDeclarations.Expressions, exprDeclaredService, contextDescriptor); if (declaredNodeX != null) { var exprNode = new ExprDotNodeImpl(Collections.SingletonList(chainSpec), duckType, udfCache); exprNode.AddChildNode(declaredNodeX); ASTExprHelper.ExprCollectAddSubNodesAddParentNode(exprNode, ctx, astExprNodeMap); return; } var chainX = new List <ExprChainedSpec>(2); chainX.Add(new ExprChainedSpec(model.OptionalClassIdent, Collections.GetEmptyList <ExprNode>(), true)); chainX.Add(chainSpec); var dotNodeX = new ExprDotNodeImpl( chainX, configurationInformation.EngineDefaults.Expression.IsDuckTyping, configurationInformation.EngineDefaults.Expression.IsUdfCache); if (dotNodeX.IsVariableOpGetName(variableService) != null) { statementSpec.HasVariables = true; } ASTExprHelper.ExprCollectAddSubNodesAddParentNode(dotNodeX, ctx, astExprNodeMap); return; } // try additional built-in single-row function var singleRowExtNode = engineImportService.ResolveSingleRowExtendedBuiltin(model.ChainElements[0].FuncName); if (singleRowExtNode != null) { if (model.ChainElements.Count == 1) { ASTExprHelper.ExprCollectAddSubNodesAddParentNode(singleRowExtNode, ctx, astExprNodeMap); return; } var spec = new List <ExprChainedSpec>(); var firstArgs = model.ChainElements[0].Args; var childExpressions = GetExprNodesLibFunc(firstArgs, astExprNodeMap); singleRowExtNode.AddChildNodes(childExpressions); AddChainRemainderFromOffset(model.ChainElements, 1, spec, astExprNodeMap); var dotNodeX = new ExprDotNodeImpl( spec, configurationInformation.EngineDefaults.Expression.IsDuckTyping, configurationInformation.EngineDefaults.Expression.IsUdfCache); dotNodeX.AddChildNode(singleRowExtNode); ASTExprHelper.ExprCollectAddSubNodesAddParentNode(dotNodeX, ctx, astExprNodeMap); return; } // try plug-in single-row function try { var firstFunctionX = model.ChainElements[0].FuncName; var firstFunctionIsProperty = !model.ChainElements[0].HasLeftParen; var classMethodPair = engineImportService.ResolveSingleRow(firstFunctionX); var spec = new List <ExprChainedSpec>(); var firstArgs = model.ChainElements[0].Args; var childExpressions = GetExprNodesLibFunc(firstArgs, astExprNodeMap); spec.Add( new ExprChainedSpec(classMethodPair.Second.MethodName, childExpressions, firstFunctionIsProperty)); AddChainRemainderFromOffset(model.ChainElements, 1, spec, astExprNodeMap); var plugin = new ExprPlugInSingleRowNode( firstFunctionX, classMethodPair.First, spec, classMethodPair.Second); ASTExprHelper.ExprCollectAddSubNodesAddParentNode(plugin, ctx, astExprNodeMap); return; } catch (EngineImportUndefinedException) { // Not an single-row function } catch (EngineImportException e) { throw new IllegalStateException("Error resolving single-row function: " + e.Message, e); } // special case for min,max var firstFunction = model.ChainElements[0].FuncName; if ((firstFunction.ToLowerInvariant().Equals("max")) || (firstFunction.ToLowerInvariant().Equals("min")) || (firstFunction.ToLowerInvariant().Equals("fmax")) || (firstFunction.ToLowerInvariant().Equals("fmin"))) { var firstArgs = model.ChainElements[0].Args; HandleMinMax(firstFunction, firstArgs, astExprNodeMap); return; } // obtain chain with actual expressions IList <ExprChainedSpec> chain = new List <ExprChainedSpec>(); AddChainRemainderFromOffset(model.ChainElements, 0, chain, astExprNodeMap); // add chain element for class INFO, if any var distinct = model.ChainElements[0].Args != null && model.ChainElements[0].Args.DISTINCT() != null; if (model.OptionalClassIdent != null) { chain.Insert( 0, new ExprChainedSpec(model.OptionalClassIdent, Collections.GetEmptyList <ExprNode>(), true)); distinct = false; } firstFunction = chain[0].Name; // try plug-in aggregation function var aggregationNode = ASTAggregationHelper.TryResolveAsAggregation( engineImportService, distinct, firstFunction, plugInAggregations, engineURI); if (aggregationNode != null) { var firstSpec = chain.DeleteAt(0); aggregationNode.AddChildNodes(firstSpec.Parameters); ExprNode exprNode; if (chain.IsEmpty()) { exprNode = aggregationNode; } else { exprNode = new ExprDotNodeImpl(chain, duckType, udfCache); exprNode.AddChildNode(aggregationNode); } ASTExprHelper.ExprCollectAddSubNodesAddParentNode(exprNode, ctx, astExprNodeMap); return; } // try declared or alias expression var declaredNode = ExprDeclaredHelper.GetExistsDeclaredExpr( firstFunction, chain[0].Parameters, expressionDeclarations.Expressions, exprDeclaredService, contextDescriptor); if (declaredNode != null) { chain.RemoveAt(0); ExprNode exprNode; if (chain.IsEmpty()) { exprNode = declaredNode; } else { exprNode = new ExprDotNodeImpl(chain, duckType, udfCache); exprNode.AddChildNode(declaredNode); } ASTExprHelper.ExprCollectAddSubNodesAddParentNode(exprNode, ctx, astExprNodeMap); return; } // try script var scriptNode = ExprDeclaredHelper.GetExistsScript( configurationInformation.EngineDefaults.Scripts.DefaultDialect, chain[0].Name, chain[0].Parameters, scriptExpressions, exprDeclaredService); if (scriptNode != null) { chain.RemoveAt(0); ExprNode exprNode; if (chain.IsEmpty()) { exprNode = scriptNode; } else { exprNode = new ExprDotNodeImpl(chain, duckType, udfCache); exprNode.AddChildNode(scriptNode); } ASTExprHelper.ExprCollectAddSubNodesAddParentNode(exprNode, ctx, astExprNodeMap); return; } // try table var tableInfo = ASTTableExprHelper.CheckTableNameGetLibFunc( tableService, engineImportService, plugInAggregations, engineURI, firstFunction, chain); if (tableInfo != null) { ASTTableExprHelper.AddTableExpressionReference(statementSpec, tableInfo.First); chain = tableInfo.Second; ExprNode exprNode; if (chain.IsEmpty()) { exprNode = tableInfo.First; } else { exprNode = new ExprDotNodeImpl(chain, duckType, udfCache); exprNode.AddChildNode(tableInfo.First); } ASTExprHelper.ExprCollectAddSubNodesAddParentNode(exprNode, ctx, astExprNodeMap); return; } // Could be a mapped property with an expression-parameter "Mapped(expr)" or array property with an expression-parameter "Array(expr)". ExprDotNode dotNode; if (chain.Count == 1) { dotNode = new ExprDotNodeImpl(chain, false, false); } else { dotNode = new ExprDotNodeImpl(chain, duckType, udfCache); } ASTExprHelper.ExprCollectAddSubNodesAddParentNode(dotNode, ctx, astExprNodeMap); }
public string Compile(ICharStream input) { try { LSLTreeAdaptor lslAdaptor = new LSLTreeAdaptor(); LSLLexer lex = new LSLLexer(input); CommonTokenStream tokens = new CommonTokenStream(lex); LSLParser p = new LSLParser(tokens); p.TreeAdaptor = lslAdaptor; p.TraceDestination = _traceDestination; lex.TraceDestination = _traceDestination; LSLParser.prog_return r = p.prog(); if (p.NumberOfSyntaxErrors > 0) { if (_listener != null) { _listener.Error(Convert.ToString(p.NumberOfSyntaxErrors) + " syntax error(s)"); } return(null); } CommonTree t = (CommonTree)r.Tree; CommonTreeNodeStream nodes = new CommonTreeNodeStream(lslAdaptor, t); nodes.TokenStream = tokens; SymbolTable symtab = new SymbolTable(tokens, Defaults.SystemMethods.Values, DefaultConstants.Constants.Values); if (this.Listener != null) { symtab.StatusListener = this.Listener; } Def def = new Def(nodes, symtab); def.TraceDestination = _traceDestination; def.Downup(t); CommonTreeNodeStream nodes2 = new CommonTreeNodeStream(lslAdaptor, t); nodes2.TokenStream = tokens; Types types = new Types(nodes2, symtab); types.TraceDestination = _traceDestination; types.Downup(t); if (_listener != null) { if (_listener.HasErrors()) { return(null); } } CommonTreeNodeStream nodes4 = new CommonTreeNodeStream(lslAdaptor, t); nodes4.TokenStream = tokens; CommonTreeNodeStream nodes3 = new CommonTreeNodeStream(lslAdaptor, t); nodes3.TokenStream = tokens; TextReader fr = new StreamReader("ByteCode.stg"); StringTemplateGroup templates = new StringTemplateGroup(fr); fr.Close(); DotTreeGenerator dotgen = new DotTreeGenerator(); string dot = dotgen.ToDot(t); TextWriter tw = new StreamWriter("ast.txt"); tw.WriteLine(dot); tw.Close(); Analyze analyze = new Analyze(nodes4, symtab); types.TraceDestination = _traceDestination; analyze.Downup(t); foreach (FunctionBranch b in analyze.FunctionBranches.Where(pred => pred.Type != null)) { if (!b.AllCodePathsReturn()) { if (_listener != null) { _listener.Error("line: " + b.Node.Line + ":" + b.Node.CharPositionInLine + " " + b.Node.Text + "(): Not all control paths return a value"); } } } Gen g = new Gen(nodes3, symtab); g.TemplateGroup = templates; g.TraceDestination = _traceDestination; StringTemplate template = g.script().Template; if (template != null) { string bcOut = template.ToString(); Console.WriteLine("** byte code **\n" + bcOut); return(bcOut); } } catch (TooManyErrorsException e) { if (_listener != null) { _listener.Error(String.Format("Too many errors {0}", e.InnerException.Message)); } } catch (RecognitionException e) { if (_listener != null) { _listener.Error("line: " + e.Line.ToString() + ":" + e.CharPositionInLine.ToString() + " " + e.Message); } } catch (Exception e) { if (_listener != null) { _listener.Error(e.Message); } } return(null); }
protected override void ReParseImpl() { var outputWindow = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); try { Stopwatch stopwatch = Stopwatch.StartNew(); var snapshot = TextBuffer.CurrentSnapshot; SnapshotCharStream input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); GoLexer lexer = new GoLexer(input); GoSemicolonInsertionTokenSource tokenSource = new GoSemicolonInsertionTokenSource(lexer); CommonTokenStream tokens = new CommonTokenStream(tokenSource); GoParser parser = new GoParser(tokens); List <ParseErrorEventArgs> errors = new List <ParseErrorEventArgs>(); parser.ParseError += (sender, e) => { errors.Add(e); string message = e.Message; ITextDocument document; if (TextBuffer.Properties.TryGetProperty(typeof(ITextDocument), out document) && document != null) { string fileName = document.FilePath; var line = snapshot.GetLineFromPosition(e.Span.Start); message = string.Format("{0}({1},{2}): {3}", fileName, line.LineNumber + 1, e.Span.Start - line.Start.Position + 1, message); } if (message.Length > 100) { message = message.Substring(0, 100) + " ..."; } if (outputWindow != null) { outputWindow.WriteLine(message); } if (errors.Count > 100) { throw new OperationCanceledException(); } }; var result = parser.compilationUnit(); OnParseComplete(new AntlrParseResultEventArgs(snapshot, errors, stopwatch.Elapsed, tokens.GetTokens(), result)); } catch (Exception e) when(!ErrorHandler.IsCriticalException(e)) { try { if (outputWindow != null) { outputWindow.WriteLine(e.Message); } } catch (Exception ex2) when(!ErrorHandler.IsCriticalException(ex2)) { } } }
private static void Main(string[] args) { //try //{ string input; //input = @"{'{""hello"":""world""}':[{""hello""} {hello}]}"; input = @"// data context: {'{""firstName"": ""Robert"", ""lastName"": ""Smith"", ""pets"":[ {""type"":""dog"", ""name"": ""Toto""} ,{""type"":""cat"", ""name"": ""Dolly""} ,{""type"":""zebra"", ""name"": ""Stripes""} ]}': // Template: [{""Hello""} {lastName}, {firstName} with {pets=> [{pets:#formatPet.#anded()}] ,[no pets] }!!]} Subtemplates: {#formatPet:[a {type} named {name}]} {#anded:[{$0.Join(', ',' and ')}]}"; input = @"People:{#data:[ {.} {[{ lastName}{ firstName.#Prepend(', ')}{ middleName.#Prepend(' ') }].ToUpper().#Quoted()} {.} Lastname: {lastName.IfMissing('No Last Name')} {.} FirstName: {firstName} {.} Pets {pets=>[({pets.Count()}): {.} Domesticated ({pets.#Domesticated().Count()}): {.} {pets.#Domesticated():[{#formatPet} #{*.Index()}]} {.} Non-domesticated ({pets.#NonDomesticated().Count()}): {.} {pets.#NonDomesticated():[{#formatPet} #{*.Index()}]} ],[no pets] } // ].@MissingValue('No Information Provided')} Subtemplates: {#formatPet:[ a {type.Case('zebra',[{[{name}y].ToLower()}],'dog','friskie','cat','fishy','nice')} ` {type} named {name} ]} {#Domesticated: [{$0.Where(type.#IsDomesticated())}]} {#NonDomesticated: [{$0.Where(!type.#IsDomesticated())}]} {#IsDomesticated:[{$0.Matches('dog','cat')}]} {#Prepend:[{$0->[{$1}{$0}]}]} {#Surround:[{$0->[{$1}{$0}{$1}]}]} {#Quoted:[{$0->[{$0.#Surround('""')}]}]} {#data:[/data/people]}"; input = @"Shows use of bullet styles as well as modular subtemplates, automatic bulleting,` and indenting only for multiple values. You can see different bullet styles ` by changing the parameter to #showStyles from 1 through 5 {''.#showStyles(1)} Subtemplates: {#styleNumber:[1]} {#showStyles:[People:{#data:[ {.}{^.^.$1 = 2->[.0]} {[{ lastName}{ firstName.#Prepend(', ')}{ middleName.#Prepend(' ') }].ToUpper().#Quoted()} {.} Lastname: {lastName.IfMissing('No Last Name')} {.} FirstName: {firstName} {.} Pets {pets=>[({pets.Count()}): {.} Domesticated{#dpetcnt > 1->[ ({#dpetcnt})]}: {pets.#Domesticated():[{#formatPet}]} {.} Non-domesticated{#ndpetcnt > 1->[({#ndpetcnt})]}: {pets.#NonDomesticated():[{#formatPet}]} ],[no pets] } // ].@MissingValue('No Information Provided')}].@Include([{'#style'}{$1}])} {#formatPet:[ a {type.Case('zebra',[{[{name}y].ToLower()}],'dog','friskie','cat','fishy','nice')} ` {type} named {name} ]} {#Domesticated: [{$0.Where(type.#IsDomesticated())}]} {#NonDomesticated: [{$0.Where(!type.#IsDomesticated())}]} {#IsDomesticated:[{$0.Matches('dog','cat')}]} {#Prepend:[{$0->[{$1}{$0}]}]} {#Surround:[{$0->[{$1}{$0}{$1}]}]} {#Quoted:[{$0->[{$0.#Surround('""')}]}]} {#dpetcnt:[{pets.#Domesticated().Count()}]} {#ndpetcnt:[{pets.#NonDomesticated().Count()}]} {#style1:[].@BulletStyle('I.','(a)','•','i.')} {#style2:[].@BulletStyle('1','1.1','1.1.1','1(a)')} {#style3:[].@BulletStyle('-')} {#style4:[].@BulletStyle('')} {#style5:[].@BulletStyle(' I', '•', 'A.', '(1)')} {#data:[/data/people]}"; input = @"{'/data/events'.GroupBy(start.ToDate('YYYY-MM-DDT00:00:00'), 'group', 'start'):[ ` {start.ToUpper()} {group.OrderBy(start.ToDate('HHmm')):[{start.#formatTimes(end)}: {summary} {.} Notes: {description} {.} Location {location}]} ].@DateTest(/(star|end)/i).@BulletStyle('•').@DateFormat('dddd MMM D')} Subtemplates: {#SameDay:[{$0.ToDate('YYYYMMDD') = $1.ToDate('YYYYMMDD')}]} {#SameAMPM:[{$0.ToDate('a') = $1.ToDate('a') & $0.#SameDay($1)}]} {#onHour:[{$0.ToDate('mm') = '00'}]} {#formatTimes:[ { // Don't put am/pm for the start time if on the hour and same as end $0.ToDate([h{!$0.#onHour()->[:mm]}{!$0.#SameAMPM($1)->[a]}]) }-{ !$1.#SameDay($0)->[{$1.ToDate().ToUpper()} at ]}{$1.ToDate('h:mma') } ]}"; input = @"{#tests.Assert(#expected,'ALL TESTS PASSED')} Subtemplates: {#test:[{$0.Matches(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)-> [Test {$0} {$1.Assert($2,'passed')}]}]} {#test1:[ Explicit Single Indent{#data:[ {lastName}]}]} {#test1Result:[ Explicit Single Indent Johnson Smith]} {#test2:[ Implied Single Indent {#data:[{lastName}]}]} {#test2Result:[ Implied Single Indent Johnson Smith]} {#test3:[ Implied Double Indent{#data:[{lastName}{pets:[{name}]}]}]} {#test3Result:[ Implied Double Indent Johnson Buddy Ralph Stripes Smith Toto Dolly Stripes]} {#test4:[ Bulleted Implied Bulleted Indent{#data:[ // {.} {lastName}{pets:[{name}]}]}]} {#test4Result:[ Bulleted Implied Bulleted Indent (0.0) Johnson (1.0) Buddy (1.1) Ralph (1.2) Stripes (0.1) Smith (1.0) Toto (1.1) Dolly (1.2) Stripes].Compose()} {#test5:[ Bulleted Explicit Bulleted Indent{#data:[ // {.} {lastName}{pets:[ // {.} {name}]}]}].Compose()} {#test5Result:[ Bulleted Explicit Bulleted Indent (0.0) Johnson (1.0) Buddy (1.1) Ralph (1.2) Stripes (0.1) Smith (1.0) Toto (1.1) Dolly (1.2) Stripes]} {#test6:[ Hierarchy{#data:[ // {.} {lastName}{pets:[ // {.} {name} who is owned by {.} {^.^:[{firstName} who owns {.} {pets:[{type}]}]}]}]}].Compose()} {#test6Result:[ Hierarchy (0.0) Johnson (1.0) Buddy who is owned by (2.0) Doris who owns (3.0) dog (3.1) lion (3.2) tiger (1.1) Ralph who is owned by (2.0) Doris who owns (3.0) dog (3.1) lion (3.2) tiger (1.2) Stripes who is owned by (2.0) Doris who owns (3.0) dog (3.1) lion (3.2) tiger (0.1) Smith (1.0) Toto who is owned by (2.0) John who owns (3.0) dog (3.1) cat (3.2) zebra (1.1) Dolly who is owned by (2.0) John who owns (3.0) dog (3.1) cat (3.2) zebra (1.2) Stripes who is owned by (2.0) John who owns (3.0) dog (3.1) cat (3.2) zebra]} {#test7:[ Hierarchy that transitions from indent to bullet {#data:[ // {lastName}{pets:[ // {.} {name} who is owned by {.} {^.^:[{firstName} who owns {.} {pets:[{type}]}]}]}]}]} {#test7Result:[ Hierarchy that transitions from indent to bullet Johnson (0.0) Buddy who is owned by (1.0) Doris who owns (2.0) dog (2.1) lion (2.2) tiger (0.1) Ralph who is owned by (1.0) Doris who owns (2.0) dog (2.1) lion (2.2) tiger (0.2) Stripes who is owned by (1.0) Doris who owns (2.0) dog (2.1) lion (2.2) tiger Smith (0.0) Toto who is owned by (1.0) John who owns (2.0) dog (2.1) cat (2.2) zebra (0.1) Dolly who is owned by (1.0) John who owns (2.0) dog (2.1) cat (2.2) zebra (0.2) Stripes who is owned by (1.0) John who owns (2.0) dog (2.1) cat (2.2) zebra]} {#test8:[ Hierarchy that also transitions from indent to bullet {#data:[ // {firstName} {lastName} who owns {pets:[{name} who is owned by {.} {^.^:[{firstName} who owns {.} {pets:[{type}]}]}]}]}]} {#test8Result:[ Hierarchy that also transitions from indent to bullet Doris Johnson who owns Buddy who is owned by (0.0) Doris who owns (1.0) dog (1.1) lion (1.2) tiger Ralph who is owned by (0.0) Doris who owns (1.0) dog (1.1) lion (1.2) tiger Stripes who is owned by (0.0) Doris who owns (1.0) dog (1.1) lion (1.2) tiger John Smith who owns Toto who is owned by (0.0) John who owns (1.0) dog (1.1) cat (1.2) zebra Dolly who is owned by (0.0) John who owns (1.0) dog (1.1) cat (1.2) zebra Stripes who is owned by (0.0) John who owns (1.0) dog (1.1) cat (1.2) zebra]} {#test9:[ Continued list {#data:[{pets:[{name}]}]}]} {#test9Result:[ Continued list Buddy Ralph Stripes Toto Dolly Stripes]} {#test10:[ Bulleted Continued list ` {#data:[{pets:[ {.} {name}]}]}]} {#test10Result:[ Bulleted Continued list (0.0) Buddy (0.1) Ralph (0.2) Stripes (0.3) Toto (0.4) Dolly (0.5) Stripes]} {#test11:[ Hierarchy that also transitions from indent to bullet and back {#data:[ // {lastName} who owns {.} {pets:[{name} who is owned by {^.^:[{firstName} who owns {.} {pets:[{type} // ]}]}]}]}]} {#test11Result:[ Hierarchy that also transitions from indent to bullet and back Johnson who owns (0.0) Buddy who is owned by Doris who owns (1.0) dog (1.1) lion (1.2) tiger (0.1) Ralph who is owned by Doris who owns (1.0) dog (1.1) lion (1.2) tiger (0.2) Stripes who is owned by Doris who owns (1.0) dog (1.1) lion (1.2) tiger Smith who owns (0.0) Toto who is owned by John who owns (1.0) dog (1.1) cat (1.2) zebra (0.1) Dolly who is owned by John who owns (1.0) dog (1.1) cat (1.2) zebra (0.2) Stripes who is owned by John who owns (1.0) dog (1.1) cat (1.2) zebra]} {#test12:[ Additional variation of hierarchy that also transitions from indent to bullet ` and back {#data:[ // {lastName} who owns {.} {pets:[{name} who is owned by {.} {^.^:[{firstName} who owns {pets:[{type} // ]}]}]}]}]} {#test12Result:[ Additional variation of hierarchy that also transitions from indent to bullet and back Johnson who owns (0.0) Buddy who is owned by (1.0) Doris who owns (2.0) dog (2.1) lion (2.2) tiger (0.1) Ralph who is owned by (1.0) Doris who owns (2.0) dog (2.1) lion (2.2) tiger (0.2) Stripes who is owned by (1.0) Doris who owns (2.0) dog (2.1) lion (2.2) tiger Smith who owns (0.0) Toto who is owned by (1.0) John who owns (2.0) dog (2.1) cat (2.2) zebra (0.1) Dolly who is owned by (1.0) John who owns (2.0) dog (2.1) cat (2.2) zebra (0.2) Stripes who is owned by (1.0) John who owns (2.0) dog (2.1) cat (2.2) zebra]} {#test13:[ Yet another variation of hierarchy that also transitions from indent to bullet ` and back {#data:[ // {lastName} who owns {.} {pets:[{name} who is owned by {^.^:[{firstName} who owns {pets:[{type} // ]}]}]}]}]} {#test13Result:[ Yet another variation of hierarchy that also transitions from indent to bullet and back Johnson who owns (0.0) Buddy who is owned by Doris who owns dog lion tiger (0.1) Ralph who is owned by Doris who owns dog lion tiger (0.2) Stripes who is owned by Doris who owns dog lion tiger Smith who owns (0.0) Toto who is owned by John who owns dog cat zebra (0.1) Dolly who is owned by John who owns dog cat zebra (0.2) Stripes who is owned by John who owns dog cat zebra]} {#test14:[ Indent under indent under bullet {#data:[ // {lastName} who owns {.} {pets:[{name} who is owned by {^.^:[{firstName} who owns {pets:[{type} // ]}]}]}]}]} {#test14Result:[ Indent under indent under bullet Johnson who owns (0.0) Buddy who is owned by Doris who owns dog lion tiger (0.1) Ralph who is owned by Doris who owns dog lion tiger (0.2) Stripes who is owned by Doris who owns dog lion tiger Smith who owns (0.0) Toto who is owned by John who owns dog cat zebra (0.1) Dolly who is owned by John who owns dog cat zebra (0.2) Stripes who is owned by John who owns dog cat zebra]} {#test15:[ Automatic bulleting for multi-values{#data:[ // {lastName} who owns {.} {pets:[{name} who is owned by {^.^:[{firstName} who owns {pets:[{type} // ]}]}]}]}]} {#test15Result:[ Automatic bulleting for multi-values Johnson who owns (0.0) Buddy who is owned by Doris who owns dog lion tiger (0.1) Ralph who is owned by Doris who owns dog lion tiger (0.2) Stripes who is owned by Doris who owns dog lion tiger Smith who owns (0.0) Toto who is owned by John who owns dog cat zebra (0.1) Dolly who is owned by John who owns dog cat zebra (0.2) Stripes who is owned by John who owns dog cat zebra]} {#test16:[ {''.Compose() // reset numbering from previous tests }{[ A: People:{'/data/people':[ {.} Pets {[ {.} Domesticated{pets:[{type}]} ] } // ].Compose()} B: People:{'/data/people':[ {.} {lastName}{pets:[{type}]} ].@MissingValue('No Information Provided').Compose()} C: People:{'/data/people':[ {.} {lastName}{pets:[{type}]} ].Compose()} D: People:{'/data/people':[ {lastName}{pets:[{type}]} ].@MissingValue('No Information Provided').Compose()} E: People:{'/data/people':[ {.} {lastName}{pets:[ {.} {type}]} ].@MissingValue('No Information Provided').Compose()}]}]} {#test16Result:[ A: People: (0.0) Pets (1.0) Domesticated (2.0) dog (2.1) lion (2.2) tiger (0.1) Pets (1.0) Domesticated (2.0) dog (2.1) cat (2.2) zebra B: People: (0.0) Johnson (1.0) dog (1.1) lion (1.2) tiger (0.1) Smith (1.0) dog (1.1) cat (1.2) zebra C: People: (0.0) Johnson (1.0) dog (1.1) lion (1.2) tiger (0.1) Smith (1.0) dog (1.1) cat (1.2) zebra D: People:Johnson dog lion tiger Smith dog cat zebra E: People: (0.0) Johnson (1.0) dog (1.1) lion (1.2) tiger (0.1) Smith (1.0) dog (1.1) cat (1.2) zebra]} {#tests:[ // {'1'.#test(#test1,#test1Result)} {'2'.#test(#test2,#test2Result)} {'3'.#test(#test3,#test3Result)} {'4'.#test(#test4,#test4Result)} {'5'.#test(#test5,#test5Result)} {'6'.#test(#test6,#test6Result)} {'7'.#test(#test7,#test7Result)} {'8'.#test(#test8,#test8Result)} {'9'.#test(#test9,#test9Result)} {'10'.#test(#test10,#test10Result)} {'11'.#test(#test11,#test11Result)} {'12'.#test(#test12,#test12Result)} {'13'.#test(#test13,#test13Result)} {'14'.#test(#test14,#test14Result)} {'15'.#test(#test15,#test15Result)} {'16'.#test(#test16,#test16Result)} ]} {#expected:[ // Test 1 passed Test 2 passed Test 3 passed Test 4 passed Test 5 passed Test 6 passed Test 7 passed Test 8 passed Test 9 passed Test 10 passed Test 11 passed Test 12 passed Test 13 passed Test 14 passed Test 15 passed Test 16 passed ]} {#data:[/data/people]}"; input = input.Replace("\r", ""); AntlrInputStream inputStream = new AntlrInputStream(input); TextTemplateLexer textTemplateLexer = new TextTemplateLexer(inputStream); CommonTokenStream commonTokenStream = new CommonTokenStream(textTemplateLexer); TextTemplateParser textTemplateParser = new TextTemplateParser(commonTokenStream); TextTemplateParser.CompilationUnitContext compilationUnitContext = textTemplateParser.compilationUnit(); TextTemplateVisitor visitor = new TextTemplateVisitor(); Func <string, string> urlCallback = url => { string ret = "Bad Url"; switch (url) { case "/data/people": ret = @"[ { ""firstName"": ""Doris"" ,""lastName"": ""Johnson"" ,""pets"": [ { ""type"": ""dog"" ,""name"": ""Buddy"" },{ ""type"": ""lion"" ,""name"": ""Ralph"" },{ ""type"": ""tiger"" ,""name"": ""Stripes"" } ] }, { ""firstName"": ""John"" ,""lastName"": ""Smith"" ,""pets"": [ { ""type"": ""dog"" ,""name"": ""Toto"" },{ ""type"": ""cat"" ,""name"": ""Dolly"" },{ ""type"": ""zebra"" ,""name"": ""Stripes"" } ] } ]"; break; case "/data/events": ret = @"[ {""start"": ""2020-05-20T19:00:00"", ""end"": ""2020-05-20T22:30:00"",""summary"": ""Dinner with Mom"", ""description"":""Dresscode: Elegant and ironed!"", ""location"": ""800 Howard St., San Francisco, CA 94103""}, {""start"": ""2020-06-20T15:00:00"", ""end"": ""2020-06-22T15:30:00"",""summary"": ""Hotdog eating competition"", ""location"": ""43 Chapel Ave, Jersey City, NJ 07305""}, {""start"": ""2020-05-28T10:00:00"", ""end"": ""2020-05-28T12:15:00"",""summary"": ""Vet"", ""description"":""Brush the dog's teeth"", ""location"": ""3771 Van Dyke Ave San Diego, CA 92105""}, {""start"": ""2020-05-28T08:30:00"", ""end"": ""2020-05-28T10:00:00"",""summary"": ""Meet with Paul"", ""description"":""Discussion of future plans"", ""location"": ""1200 Railing St., Brunswick, Md.""}, {""start"": ""2020-06-30T10:00:00"", ""end"": ""2020-06-30T11:30:00"",""summary"": ""Jogging class"", ""description"":""Bring your inhaler"", ""location"": ""3014 Rosalinda San Clemente, CA 92673""} ]"; break; } return(ret); }; Dictionary <string, object> options = new Dictionary <string, object>(); options["urlCallback"] = urlCallback; string result = visitor.interpret(input, options); Debug.Write(result + "\n"); Console.Write(result + "\n"); }
protected override void ReParseImpl() { // lex the entire document to get the set of identifiers we'll need to classify ITextSnapshot snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); var lexer = new AlloyLexer(input); var tokens = new CommonTokenStream(lexer); tokens.Fill(); /* Want to outline the following blocks: * - assert * - enum * - fact * - fun * - pred * - sig (block and body) */ List <IToken> outliningKeywords = new List <IToken>(); while (tokens.LA(1) != CharStreamConstants.EndOfFile) { switch (tokens.LA(1)) { case AlloyLexer.KW_ASSERT: case AlloyLexer.KW_ENUM: case AlloyLexer.KW_FACT: case AlloyLexer.KW_FUN: case AlloyLexer.KW_PRED: case AlloyLexer.KW_SIG: outliningKeywords.Add(tokens.LT(1)); break; case CharStreamConstants.EndOfFile: goto doneLexing; default: break; } tokens.Consume(); } doneLexing: List <ITagSpan <IOutliningRegionTag> > outliningRegions = new List <ITagSpan <IOutliningRegionTag> >(); foreach (var token in outliningKeywords) { tokens.Seek(token.TokenIndex); tokens.Consume(); NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } } interpreter.CombineBoundedStartContexts(); while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } } foreach (var context in interpreter.Contexts) { InterpretTraceTransition firstBraceTransition = context.Transitions.FirstOrDefault(i => i.Symbol == AlloyLexer.LBRACE); InterpretTraceTransition lastBraceTransition = context.Transitions.LastOrDefault(i => i.Transition.IsMatch); if (firstBraceTransition == null || lastBraceTransition == null) { continue; } if (token.Type == AlloyLexer.KW_SIG) { InterpretTraceTransition lastBodyBraceTransition = context.Transitions.LastOrDefault(i => i.Symbol == AlloyLexer.RBRACE && interpreter.Network.StateRules[i.Transition.SourceState.Id].Name == AlloyOutliningAtnBuilder.RuleNames.SigBody); if (lastBodyBraceTransition != lastBraceTransition && lastBodyBraceTransition != null) { var bodySpan = OutlineBlock(firstBraceTransition.Token, lastBodyBraceTransition.Token, snapshot); if (bodySpan != null) { outliningRegions.Add(bodySpan); } firstBraceTransition = context.Transitions.LastOrDefault(i => i.Symbol == AlloyLexer.LBRACE && i.TokenIndex > lastBodyBraceTransition.TokenIndex); } } var blockSpan = OutlineBlock(firstBraceTransition.Token, lastBraceTransition.Token, snapshot); if (blockSpan != null) { outliningRegions.Add(blockSpan); break; } } } _outliningRegions = outliningRegions; OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length)))); }
protected override void ReParseImpl() { Stopwatch timer = Stopwatch.StartNew(); // lex the entire document to get the set of identifiers we'll need to classify ITextSnapshot snapshot = TextBuffer.CurrentSnapshot; var input = new SnapshotCharStream(snapshot, new Span(0, snapshot.Length)); var lexer = new GoLexer(input); var tokenSource = new GoSemicolonInsertionTokenSource(lexer); var tokens = new CommonTokenStream(tokenSource); tokens.Fill(); /* easy to handle the following definitions: * - module (name) * - open (external symbol reference) ... as (name) * - fact (name)? * - assert (name)? * - fun (ref.name | name) * - pred (ref.name | name) * - (name): run|check * - sig (namelist) * - enum (name) * moderate to handle the following definitions: * - decl name(s) * harder to handle the following definitions: */ /* A single name follows the following keywords: * - KW_MODULE * - KW_OPEN * - KW_AS * - KW_ENUM * - KW_FACT (name is optional) * - KW_ASSERT (name is optional) */ List <IToken> nameKeywords = new List <IToken>(); List <IToken> declColons = new List <IToken>(); List <IToken> identifiers = new List <IToken>(); while (tokens.LA(1) != CharStreamConstants.EndOfFile) { switch (tokens.LA(1)) { case GoLexer.IDENTIFIER: identifiers.Add(tokens.LT(1)); break; case GoLexer.KW_PACKAGE: case GoLexer.KW_IMPORT: case GoLexer.KW_TYPE: case GoLexer.KW_VAR: case GoLexer.KW_FUNC: case GoLexer.KW_CONST: //case GoLexer.KW_MODULE: //case GoLexer.KW_OPEN: //case GoLexer.KW_AS: //case GoLexer.KW_ENUM: //case GoLexer.KW_FACT: //case GoLexer.KW_ASSERT: //case GoLexer.KW_RUN: //case GoLexer.KW_CHECK: //case GoLexer.KW_EXTENDS: //case GoLexer.KW_FUN: //case GoLexer.KW_PRED: //case GoLexer.KW_SIG: nameKeywords.Add(tokens.LT(1)); break; case GoLexer.DEFEQ: case GoLexer.COLON: declColons.Add(tokens.LT(1)); break; case CharStreamConstants.EndOfFile: goto doneLexing; default: break; } tokens.Consume(); } doneLexing: HashSet <IToken> definitions = new HashSet <IToken>(TokenIndexEqualityComparer.Default); HashSet <IToken> references = new HashSet <IToken>(TokenIndexEqualityComparer.Default); foreach (var token in nameKeywords) { tokens.Seek(token.TokenIndex); NetworkInterpreter interpreter = CreateTopLevelNetworkInterpreter(tokens); while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } } interpreter.CombineBoundedEndContexts(); foreach (var context in interpreter.Contexts) { foreach (var transition in context.Transitions) { if (!transition.Symbol.HasValue) { continue; } switch (transition.Symbol) { case GoLexer.IDENTIFIER: //case GoLexer.KW_THIS: RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id]; if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier) { references.Add(tokens.Get(transition.TokenIndex.Value)); } else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier) { definitions.Add(tokens.Get(transition.TokenIndex.Value)); } break; default: continue; } } } } foreach (var token in declColons) { tokens.Seek(token.TokenIndex); tokens.Consume(); if (token.Type == GoLexer.COLON) { IToken potentialLabel = tokens.LT(-2); if (potentialLabel.Type != GoLexer.IDENTIFIER) { continue; } } NetworkInterpreter interpreter = CreateVarDeclarationNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, true, definitions, references)); } interpreter.CombineBoundedStartContexts(); if (!AllAgree(interpreter.Contexts)) { while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, false, definitions, references)); } interpreter.CombineBoundedEndContexts(); } foreach (var context in interpreter.Contexts) { foreach (var transition in context.Transitions) { if (!transition.Symbol.HasValue) { continue; } switch (transition.Symbol) { case GoLexer.IDENTIFIER: //case GoLexer.KW_THIS: RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id]; if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier) { references.Add(tokens.Get(transition.TokenIndex.Value)); } else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier) { definitions.Add(tokens.Get(transition.TokenIndex.Value)); } break; default: continue; } } } } #if false foreach (var token in identifiers) { if (definitions.Contains(token) || references.Contains(token)) { continue; } tokens.Seek(token.TokenIndex); tokens.Consume(); NetworkInterpreter interpreter = CreateFullNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, true, definitions, references)); if (AllAgree(interpreter.Contexts)) { break; } } interpreter.CombineBoundedStartContexts(); while (interpreter.TryStepForward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedEnd)) { break; } interpreter.Contexts.RemoveAll(i => !IsConsistentWithPreviousResult(i, false, definitions, references)); if (AllAgree(interpreter.Contexts)) { break; } } interpreter.CombineBoundedEndContexts(); foreach (var context in interpreter.Contexts) { foreach (var transition in context.Transitions) { if (!transition.Symbol.HasValue) { continue; } switch (transition.Symbol) { case GoLexer.IDENTIFIER: //case GoLexer.KW_THIS: RuleBinding rule = interpreter.Network.StateRules[transition.Transition.TargetState.Id]; if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolReferenceIdentifier) { references.Add(tokens.Get(transition.TokenIndex.Value)); } else if (rule.Name == GoSimplifiedAtnBuilder.RuleNames.SymbolDefinitionIdentifier) { definitions.Add(tokens.Get(transition.TokenIndex.Value)); } break; default: continue; } } } } #endif // tokens which are in both the 'definitions' and 'references' sets are actually unknown. HashSet <IToken> unknownIdentifiers = new HashSet <IToken>(definitions, TokenIndexEqualityComparer.Default); unknownIdentifiers.IntersectWith(references); definitions.ExceptWith(unknownIdentifiers); #if true references = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default); references.ExceptWith(definitions); references.ExceptWith(unknownIdentifiers); #else references.ExceptWith(unknownIdentifiers); // the full set of unknown identifiers are any that aren't explicitly classified as a definition or a reference unknownIdentifiers = new HashSet <IToken>(identifiers, TokenIndexEqualityComparer.Default); unknownIdentifiers.ExceptWith(definitions); unknownIdentifiers.ExceptWith(references); #endif List <ITagSpan <IClassificationTag> > tags = new List <ITagSpan <IClassificationTag> >(); IClassificationType definitionClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.Definition); tags.AddRange(ClassifyTokens(snapshot, definitions, new ClassificationTag(definitionClassificationType))); IClassificationType referenceClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.Reference); tags.AddRange(ClassifyTokens(snapshot, references, new ClassificationTag(referenceClassificationType))); IClassificationType unknownClassificationType = _classificationTypeRegistryService.GetClassificationType(GoSymbolTaggerClassificationTypeNames.UnknownIdentifier); tags.AddRange(ClassifyTokens(snapshot, unknownIdentifiers, new ClassificationTag(unknownClassificationType))); _tags = tags; timer.Stop(); IOutputWindowPane pane = OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); if (pane != null) { pane.WriteLine(string.Format("Finished classifying {0} identifiers in {1}ms: {2} definitions, {3} references, {4} unknown", identifiers.Count, timer.ElapsedMilliseconds, definitions.Count, references.Count, unknownIdentifiers.Count)); } OnTagsChanged(new SnapshotSpanEventArgs(new SnapshotSpan(snapshot, new Span(0, snapshot.Length)))); }
void OutputDirectTranslation(TextWriter writer, CommonTokenStream tokens, CommonTree tree, INamedCharacterLookup lookup, RuleStatistics ruleStatistics) { // output ANTLR translation var outputVisitor = new TreeVisitor_OutputTranslation_Direct(tokens, writer, lookup, ruleStatistics); outputVisitor.Visit(tree); }
public int parseFile(ICharStream input, int thread) { Checksum checksum = new CRC32(); Debug.Assert(thread >= 0 && thread < NUMBER_OF_THREADS); try { IParseTreeListener listener = sharedListeners[thread]; if (listener == null) { listener = (IParseTreeListener)Activator.CreateInstance(listenerClass); sharedListeners[thread] = listener; } Lexer lexer = sharedLexers[thread]; if (REUSE_LEXER && lexer != null) { lexer.SetInputStream(input); } else { lexer = (Lexer)lexerCtor.Invoke(new object[] { input }); sharedLexers[thread] = lexer; if (!ENABLE_LEXER_DFA) { lexer.Interpreter = new NonCachingLexerATNSimulator(lexer, lexer.Atn); } else if (!REUSE_LEXER_DFA) { lexer.Interpreter = new LexerATNSimulator(lexer, sharedLexerATNs[thread]); } } lexer.Interpreter.optimize_tail_calls = OPTIMIZE_TAIL_CALLS; if (ENABLE_LEXER_DFA && !REUSE_LEXER_DFA) { lexer.Interpreter.atn.ClearDFA(); } CommonTokenStream tokens = new CommonTokenStream(lexer); tokens.Fill(); Interlocked.Add(ref tokenCount, tokens.Size); if (COMPUTE_CHECKSUM) { foreach (IToken token in tokens.GetTokens()) { updateChecksum(checksum, token); } } if (!RUN_PARSER) { return((int)checksum.Value); } Parser parser = sharedParsers[thread]; if (REUSE_PARSER && parser != null) { parser.SetInputStream(tokens); } else { Parser newParser = (Parser)parserCtor.Invoke(new object[] { tokens }); parser = newParser; sharedParsers[thread] = parser; } parser.RemoveErrorListeners(); if (!TWO_STAGE_PARSING) { parser.AddErrorListener(DescriptiveErrorListener.INSTANCE); parser.AddErrorListener(new SummarizingDiagnosticErrorListener()); } if (!ENABLE_PARSER_DFA) { parser.Interpreter = new NonCachingParserATNSimulator(parser, parser.Atn); } else if (!REUSE_PARSER_DFA) { parser.Interpreter = new ParserATNSimulator(parser, sharedParserATNs[thread]); } if (ENABLE_PARSER_DFA && !REUSE_PARSER_DFA) { parser.Interpreter.atn.ClearDFA(); } parser.Interpreter.PredictionMode = TWO_STAGE_PARSING ? PredictionMode.Sll : PREDICTION_MODE; parser.Interpreter.force_global_context = FORCE_GLOBAL_CONTEXT && !TWO_STAGE_PARSING; parser.Interpreter.always_try_local_context = TRY_LOCAL_CONTEXT_FIRST || TWO_STAGE_PARSING; parser.Interpreter.optimize_ll1 = OPTIMIZE_LL1; parser.Interpreter.optimize_unique_closure = OPTIMIZE_UNIQUE_CLOSURE; parser.Interpreter.optimize_hidden_conflicted_configs = OPTIMIZE_HIDDEN_CONFLICTED_CONFIGS; parser.Interpreter.optimize_tail_calls = OPTIMIZE_TAIL_CALLS; parser.Interpreter.tail_call_preserves_sll = TAIL_CALL_PRESERVES_SLL; parser.Interpreter.treat_sllk1_conflict_as_ambiguity = TREAT_SLLK1_CONFLICT_AS_AMBIGUITY; parser.BuildParseTree = BUILD_PARSE_TREES; if (!BUILD_PARSE_TREES && BLANK_LISTENER) { parser.AddParseListener(listener); } if (BAIL_ON_ERROR || TWO_STAGE_PARSING) { parser.ErrorHandler = new BailErrorStrategy(); } MethodInfo parseMethod = parserClass.GetMethod(entryPoint); object parseResult; IParseTreeListener checksumParserListener = null; try { if (COMPUTE_CHECKSUM) { checksumParserListener = new ChecksumParseTreeListener(checksum); parser.AddParseListener(checksumParserListener); } parseResult = parseMethod.Invoke(parser, null); } catch (TargetInvocationException ex) { if (!TWO_STAGE_PARSING) { throw; } string sourceName = tokens.SourceName; sourceName = !string.IsNullOrEmpty(sourceName) ? sourceName + ": " : ""; Console.Error.WriteLine(sourceName + "Forced to retry with full context."); if (!(ex.InnerException is ParseCanceledException)) { throw; } tokens.Reset(); if (REUSE_PARSER && sharedParsers[thread] != null) { parser.SetInputStream(tokens); } else { Parser newParser = (Parser)parserCtor.Invoke(new object[] { tokens }); parser = newParser; sharedParsers[thread] = parser; } parser.RemoveErrorListeners(); parser.AddErrorListener(DescriptiveErrorListener.INSTANCE); parser.AddErrorListener(new SummarizingDiagnosticErrorListener()); if (!ENABLE_PARSER_DFA) { parser.Interpreter = new NonCachingParserATNSimulator(parser, parser.Atn); } parser.Interpreter.PredictionMode = PREDICTION_MODE; parser.Interpreter.force_global_context = FORCE_GLOBAL_CONTEXT; parser.Interpreter.always_try_local_context = TRY_LOCAL_CONTEXT_FIRST; parser.Interpreter.optimize_ll1 = OPTIMIZE_LL1; parser.Interpreter.optimize_unique_closure = OPTIMIZE_UNIQUE_CLOSURE; parser.Interpreter.optimize_hidden_conflicted_configs = OPTIMIZE_HIDDEN_CONFLICTED_CONFIGS; parser.Interpreter.optimize_tail_calls = OPTIMIZE_TAIL_CALLS; parser.Interpreter.tail_call_preserves_sll = TAIL_CALL_PRESERVES_SLL; parser.Interpreter.treat_sllk1_conflict_as_ambiguity = TREAT_SLLK1_CONFLICT_AS_AMBIGUITY; parser.BuildParseTree = BUILD_PARSE_TREES; if (!BUILD_PARSE_TREES && BLANK_LISTENER) { parser.AddParseListener(listener); } if (BAIL_ON_ERROR) { parser.ErrorHandler = new BailErrorStrategy(); } parseResult = parseMethod.Invoke(parser, null); } finally { if (checksumParserListener != null) { parser.RemoveParseListener(checksumParserListener); } } Assert.IsInstanceOfType(parseResult, typeof(IParseTree)); if (BUILD_PARSE_TREES && BLANK_LISTENER) { ParseTreeWalker.Default.Walk(listener, (ParserRuleContext)parseResult); } } catch (Exception e) { if (!REPORT_SYNTAX_ERRORS && e is ParseCanceledException) { return((int)checksum.Value); } throw; } return((int)checksum.Value); }
public override void paso1(DataGridView dataGridViewIntermedio) { string[] codigo; MyErrorListener errorListener; this.lineasError = new List <bool>(); this.errores = new List <string>(); this.intermedio = new List <string>(); this.tabSim = new Dictionary <string, long>(); for (int i = 0; i < this.archivo.Count; i++) { lexer = new SICExtendidaLexer(new AntlrInputStream(this.Archivo[i])); tokens = new CommonTokenStream(lexer); parser = new SICExtendidaParser(tokens); errorListener = new MyErrorListener(i + 1); parser.AddErrorListener(errorListener); parser.prog(); codigo = this.Archivo[i].Split('\t'); this.lineasError.Add(errorListener.ExisteError); if (!errorListener.ExisteError) { try { if (!codigo[1].Equals("START") & !codigo[1].Equals("END") & !codigo[1].Equals("BASE")) { this.ensamblaIntermedio(dataGridViewIntermedio, codigo, i, "No"); if (!string.IsNullOrEmpty(codigo[0])) { this.TabSim.Add(codigo[0], this.cp); } this.incrementaInstruccionDirectiva(codigo); } else if (codigo[1].Equals("START")) { codigo[2] = codigo[2].ToUpper(); if (codigo[2].Contains("H")) { this.cp = MetodosAuxiliares.hexadecimalADecimal(codigo[2].Replace("H", "")); this.ensamblaIntermedio(dataGridViewIntermedio, codigo, i, "No"); } else { this.cp = long.Parse(codigo[2]); this.ensamblaIntermedio(dataGridViewIntermedio, codigo, i, "no"); } } else if (codigo[1].Equals("END") | codigo[1].Equals("BASE")) { //this.intermedio.Add(this.cp.ToString() + "\t" + this.archivo[i]); this.ensamblaIntermedio(dataGridViewIntermedio, codigo, i, "no"); } } catch (ArgumentException) { this.errores.Add("Linea" + (i + 1).ToString() + ": Error Simbolo repetido"); dataGridViewIntermedio.Rows.Remove(dataGridViewIntermedio.Rows[dataGridViewIntermedio.Rows.Count - 1]); this.intermedio.Remove(this.intermedio.Last()); this.ensamblaIntermedio(dataGridViewIntermedio, codigo, i, "Simbolo"); this.incrementaInstruccionDirectiva(codigo); } } else { if (!string.IsNullOrEmpty(this.Archivo[i])) { if (this.instruccionesFormato1.Keys.Contains(codigo[1]) || this.instruccionesFormato2.Keys.Contains(codigo[1]) || this.instruccionesFormato3.Keys.Contains(codigo[1]) || this.Directivas.Contains(codigo[1])) { this.errores.Add("Linea" + (i + 1).ToString() + ": Error de sintaxis la etiqueta no puede ser la palabra reservada \"" + codigo[0] + "\""); this.ensamblaIntermedio(dataGridViewIntermedio, codigo, i, "Syntax"); } else { this.errores.Add("Linea" + (i + 1).ToString() + ": Error de sintaxis el operando: \"" + codigo[2] + "\" Esta mal escrito"); this.ensamblaIntermedio(dataGridViewIntermedio, codigo, i, "Syntax"); } } else { this.errores.Add("Linea" + (i + 1).ToString() + ": Error de sintaxis no debe haber lineas vacias"); codigo = new string[] { "\t", "\t", "\t" }; this.ensamblaIntermedio(dataGridViewIntermedio, codigo, i, "Vacia"); } } } this.paso1Logrado = true; }
public IntervalSet Compute(Parser parser, CommonTokenStream token_stream) { _input = new List <IToken>(); _parser = parser; _token_stream = token_stream; //_cursor = _token_stream.GetTokens().Select(t => t.Text == "." ? t.TokenIndex : 0).Max(); _stop_states = new HashSet <ATNState>(); foreach (ATNState s in parser.Atn.ruleToStopState.Select(t => parser.Atn.states[t.stateNumber])) { _stop_states.Add(s); } _start_states = new HashSet <ATNState>(); foreach (ATNState s in parser.Atn.ruleToStartState.Select(t => parser.Atn.states[t.stateNumber])) { _start_states.Add(s); } int currentIndex = _token_stream.Index; _token_stream.Seek(0); int offset = 1; while (true) { IToken token = _token_stream.LT(offset++); _input.Add(token); if (token.Type == TokenConstants.EOF) { break; } _cursor = token.TokenIndex; } List <List <Edge> > all_parses = EnterState(null); IntervalSet result = new IntervalSet(); foreach (List <Edge> p in all_parses) { HashSet <ATNState> set = ComputeSingle(p); foreach (ATNState s in set) { foreach (Transition t in s.TransitionsArray) { switch (t.TransitionType) { case TransitionType.RULE: break; case TransitionType.PREDICATE: break; case TransitionType.WILDCARD: break; default: if (!t.IsEpsilon) { result.AddAll(t.Label); } break; } } } } return(result); }
/// <summary> /// Parses the source code passed to the compiler /// </summary> /// <param name="fileIndex">Source file index</param> /// <param name="sourceItem">Source file item</param> /// <param name="sourceText">Source text to parse</param> /// <param name="parsedLines"></param> /// <returns>True, if parsing was successful</returns> private bool ExecuteParse(int fileIndex, SourceFileItem sourceItem, string sourceText, out List <SourceLineBase> parsedLines) { // --- No lines has been parsed yet parsedLines = new List <SourceLineBase>(); // --- Parse all source code lines var inputStream = new AntlrInputStream(sourceText); var lexer = new Z80AsmLexer(inputStream); var tokenStream = new CommonTokenStream(lexer); var parser = new Z80AsmParser(tokenStream); var context = parser.compileUnit(); var visitor = new Z80AsmVisitor(); visitor.Visit(context); var visitedLines = visitor.Compilation; // --- Store any tasks defined by the user StoreTasks(sourceItem, visitedLines.Lines); // --- Collect syntax errors foreach (var error in parser.SyntaxErrors) { ReportError(sourceItem, error); } // --- Exit if there are any errors if (Output.ErrorCount != 0) { return(false); } // --- Now, process directives and the .model pragma var currentLineIndex = 0; var ifdefStack = new Stack <bool?>(); var processOps = true; parsedLines = new List <SourceLineBase>(); // --- Traverse through parsed lines while (currentLineIndex < visitedLines.Lines.Count) { var line = visitedLines.Lines[currentLineIndex]; if (line is ModelPragma modelPragma) { ProcessModelPragma(modelPragma); } else if (line is IncludeDirective incDirective) { // --- Parse the included file if (ApplyIncludeDirective(incDirective, sourceItem, out var includedLines)) { // --- Add the parse result of the include file to the result parsedLines.AddRange(includedLines); } } else if (line is Directive preProc) { ApplyDirective(preProc, ifdefStack, ref processOps); } else if (processOps) { line.FileIndex = fileIndex; line.SourceText = sourceText.Substring(line.FirstPosition, line.LastPosition - line.FirstPosition + 1); parsedLines.Add(line); } currentLineIndex++; } // --- Check if all #if and #ifdef has a closing #endif tag if (ifdefStack.Count > 0 && visitedLines.Lines.Count > 0) { ReportError(Errors.Z0062, visitedLines.Lines.Last()); } return(Output.ErrorCount == 0); }
private void compareArrays(object[,] valueArray, object[,] formulaArray) { var x = valueArray.GetLength(0); var y = valueArray.GetLength(1); var xF = formulaArray.GetLength(0); var yF = formulaArray.GetLength(1); if (x != xF || y != yF) { Logger.DebugLine("Compare Arrays: x and y dimensions dont match between arrays", 1); Logger.DebugLine($"x: {x}, xF: {xF}, y: {y}, yF: {yF}", 1); } for (int i = 1; i <= x; i++) { for (int j = 1; j <= y; j++) { var value = valueArray[i, j]; var formula = formulaArray[i, j]; var equals = false; if (value != null && formula != null) { if (value is double) { string valueString = ((double)value).ToString("G", CultureInfo.InvariantCulture); equals = ((string)formula).Equals(valueString); var valueStringComma = valueString.Replace(".", ","); var valueStringDot = valueString.Replace(",", "."); var formulaStringComma = ((string)formula).Replace(".", ","); var formulaStringDot = ((string)formula).Replace(",", "."); Logger.DebugLine($"Value is double, value: {valueString}, formula: {(string)formula}, equals: {equals}", 1); if (!equals) { equals = ((string)formula).Equals(valueStringComma) || ((string)formula).Equals(valueStringDot); Logger.DebugLine($"Dot Comma conversion check, valueStringComma: {valueStringComma}, valueStringDot: {valueStringDot}, formula: {(string)formula}, equals: {equals}", 1); } if (!equals) { string valueStringInstalled = ((double)value).ToString("G", CultureInfo.InstalledUICulture); equals = ((string)formula).Equals(valueStringInstalled); Logger.DebugLine($"Installed UI Culture ({CultureInfo.InstalledUICulture}) Equal check, value: {valueStringInstalled}, formula: {(string)formula}, equals: {equals}", 1); } if (!equals) { var culture = CultureInfo.CreateSpecificCulture("de-DE"); string valueStringInstalled = ((double)value).ToString("G", culture); equals = ((string)formula).Equals(valueStringInstalled); Logger.DebugLine($"Specific Culture ({culture}) Equal check, value: {valueStringInstalled}, formula: {(string)formula}, equals: {equals}", 1); } if (!equals) { string valueStringDoubleFixedPoint = ((double)value).ToString(FormatStrings.DoubleFixedPoint, CultureInfo.InvariantCulture); equals = ((string)formula).Equals(valueStringDoubleFixedPoint); Logger.DebugLine($"Format DoubleFixedPoint with Culture ({CultureInfo.InvariantCulture}) Equal check, value: {valueStringDoubleFixedPoint}, formula: {(string)formula}, equals: {equals}", 1); } if (!equals && double.TryParse((string)formula, out double parsedFormula)) { equals = IsDoubleEqual((double)value, parsedFormula); Logger.DebugLine($"Parsed formula to double, value: {(double)value}, parsedFormula: {parsedFormula}, equals: {equals}", 1); } if (!equals && (double.TryParse(formulaStringComma, out parsedFormula) || double.TryParse(formulaStringDot, out parsedFormula))) { equals = IsDoubleEqual((double)value, parsedFormula); Logger.DebugLine($"Parsed formula dot comma to double, value: {(double)value}, parsedFormula: {parsedFormula}, equals: {equals}", 1); } } else if (value is DateTime) { Logger.DebugLine($"Value is Datetime, value: {value.ToString()}, formula: {(string)formula}", 1); if (int.TryParse((string)formula, out int result)) { DateTime date = new DateTime(1900, 1, 1).AddDays(result); equals = true; Logger.DebugLine($"Formula is int, date: {date.ToString()}, value date: {(DateTime)value}", 1); } else if (DateTime.TryParse((string)formula, out DateTime resultDate)) { equals = true; Logger.DebugLine($"Formula is datetime, resultDate: {resultDate.ToString()}, value date: {(DateTime)value}", 1); } else if (double.TryParse((string)formula, out double resultDouble)) { equals = true; //DateTime date = new DateTime(1900, 1, 1).AddDays(resultDouble); Logger.DebugLine($"Formula is double, resultDouble: {resultDouble.ToString()}, value date: {(DateTime)value}", 1); } else { Logger.DebugLine($"Couldnt compare DateTime, value: {value.ToString()}, formula: {(string)formula}", 2); equals = false; } } else { equals = formula.Equals(value.ToString()); } } if (equals) { Logger.DebugLine($"{j}, {i}: Value equals Formula"); addConvertedValue(value, i, j); } else if (value == formula) { Logger.DebugLine($"{j}, {i}: Value == Formula"); addConvertedValue(value, i, j); } else if (value == null && string.IsNullOrEmpty((string)formula)) { Logger.DebugLine($"{j}, {i}: Value equals Formula: Both empty"); } else { printObject(value, "Value"); Logger.Debug("----"); printObject(formula, "Formula"); Logger.DebugLine(""); Logger.DebugLine($"{j}, {i}: Value does not equal Formula", 1); SyntaxErrorListener errorListener = new SyntaxErrorListener(); AntlrInputStream inputStream = new AntlrInputStream((string)formula); ExcelFormulaLexer spreadsheetLexer = new ExcelFormulaLexer(inputStream); spreadsheetLexer.RemoveErrorListeners(); spreadsheetLexer.AddErrorListener(errorListener); CommonTokenStream commonTokenStream = new CommonTokenStream(spreadsheetLexer); ExcelFormulaParser excelFormulaParser = new ExcelFormulaParser(commonTokenStream); ExcelFormulaParser.ExcelExprContext context = excelFormulaParser.excelExpr(); if (errorListener.HasError) { Logger.DebugLine($"Found Lexer Error - Dont process formula at {i}, {j} : {(string)formula}", 5); if (GlobalSettings.ImportStopAtSyntaxError) { Console.WriteLine($"Lexer Error - Enter to continue", 10); Console.ReadLine(); } continue; } if (excelFormulaParser.NumberOfSyntaxErrors > 0) { Logger.DebugLine($"Found Syntax Error - Dont process formula at {i}, {j} : {(string)formula}", 5); if (GlobalSettings.ImportStopAtSyntaxError) { Console.WriteLine($"Syntax Error - Enter to continue", 10); Console.ReadLine(); } continue; } ExcelFormulaVisitor visitor = new ExcelFormulaVisitor(); string formulaText = visitor.Visit(context); if (visitor.Error) { continue; } Logger.DebugLine($"FormulaText: {formulaText}", 1); convertedText += $"C[{j},{i}] = {{{formulaText}}} ;\n"; } } } }
public void Compile() { var stream = new AntlrFileStream(File.FullName); var lexer = new LeafLexer(stream); var tokens = new CommonTokenStream(lexer); var parser = new LeafParser(tokens); var entryPoint = parser.entry_point(); var imports = entryPoint.ns_import(); var defs = entryPoint.def(); if (!imports.IsNullOrEmpty()) { foreach (var import in imports) { var alias = import.alias?.Text; var nsName = import.@namespace().GetText(); var ns = Namespace.Context.GetNamespace(nsName); if (alias == null) { ImportedNamespaces.Add(ns); } else { NamespaceAliases.Add(alias, ns); } } } var typeDefs = new List <(LeafParser.Def_typeContext, LeafParser.Attribute_addContext[])>(defs.Length); var funcDefs = new List <(LeafParser.Def_funcContext, LeafParser.Attribute_addContext[])>(defs.Length); var operDefs = new List <(LeafParser.Def_operatorContext, LeafParser.Attribute_addContext[])>(defs.Length); foreach (var def in defs) { var tDef = def.def_type(); var fDef = def.def_func(); var oDef = def.def_operator(); if (tDef != null) { typeDefs.Add((tDef, def.attribute_add())); } if (fDef != null) { funcDefs.Add((fDef, def.attribute_add())); } if (oDef != null) { operDefs.Add((oDef, def.attribute_add())); } } foreach (var(def, attribs) in typeDefs) { DefineType(def, attribs); } foreach (var(def, attribs) in funcDefs) { DefineFunction(def, attribs); } foreach (var(def, attribs) in operDefs) { DefineOperator(def, attribs); } }
/// <exception cref="System.IO.IOException"></exception> /// <exception cref="System.MemberAccessException"></exception> /// <exception cref="System.Reflection.TargetInvocationException"></exception> /// <exception cref="Javax.Print.PrintException"></exception> protected internal virtual void Process <_T0>(Lexer lexer, Type <_T0> parserClass, Parser parser, Stream @is, StreamReader r) where _T0 : Parser { try { AntlrInputStream input = new AntlrInputStream(r); lexer.SetInputStream(input); CommonTokenStream tokens = new CommonTokenStream(lexer); tokens.Fill(); if (showTokens) { foreach (object tok in tokens.GetTokens()) { System.Console.Out.WriteLine(tok); } } if (startRuleName.Equals(LexerStartRuleName)) { return; } if (diagnostics) { parser.AddErrorListener(new DiagnosticErrorListener()); parser.Interpreter.PredictionMode = PredictionMode.LlExactAmbigDetection; } if (printTree || gui || psFile != null) { parser.BuildParseTree = true; } if (Sll) { // overrides diagnostics parser.Interpreter.PredictionMode = PredictionMode.Sll; } parser.SetInputStream(tokens); parser.Trace = trace; try { MethodInfo startRule = parserClass.GetMethod(startRuleName, (Type[])null); ParserRuleContext tree = (ParserRuleContext)startRule.Invoke(parser, (object[])null ); if (printTree) { System.Console.Out.WriteLine(tree.ToStringTree(parser)); } if (gui) { tree.Inspect(parser); } if (psFile != null) { tree.Save(parser, psFile); } } catch (NoSuchMethodException) { // Generate postscript System.Console.Error.WriteLine("No method for rule " + startRuleName + " or it has arguments" ); } } finally { if (r != null) { r.Close(); } if (@is != null) { @is.Close(); } } }
private static (IParseTree parseTree, TokenStreamRewriter rewriter) ParseInternal(ParserMode mode, VBAParser parser, CommonTokenStream tokenStream, ParserStartRule startRule) { if (mode == ParserMode.Ll) { parser.Interpreter.PredictionMode = PredictionMode.Ll; } else { parser.Interpreter.PredictionMode = PredictionMode.Sll; } var tree = startRule.Invoke(parser); return(tree, new TokenStreamRewriter(tokenStream)); }
private static void LogAndReset(string logWarnMessage, Exception exception, VBAParser parser, CommonTokenStream tokenStream) { Logger.Warn(logWarnMessage); var message = "Unknown exception"; if (parser.Interpreter.PredictionMode == PredictionMode.Sll) { message = "SLL mode exception"; } else if (parser.Interpreter.PredictionMode == PredictionMode.Ll) { message = "LL mode exception"; } Logger.Debug(exception, message); tokenStream.Reset(); parser.Reset(); }
private void UpdateQuickInfoContent(IQuickInfoSession session, SnapshotPoint triggerPoint) { /* use the experimental model to locate and process the expression */ Stopwatch stopwatch = Stopwatch.StartNew(); // lex the entire document var currentSnapshot = triggerPoint.Snapshot; var input = new SnapshotCharStream(currentSnapshot, new Span(0, currentSnapshot.Length)); var unicodeInput = new JavaUnicodeStream(input); var lexer = new Java2Lexer(unicodeInput); var tokens = new CommonTokenStream(lexer); tokens.Fill(); // locate the last token before the trigger point while (true) { IToken nextToken = tokens.LT(1); if (nextToken.Type == CharStreamConstants.EndOfFile) { break; } if (nextToken.StartIndex > triggerPoint.Position) { break; } tokens.Consume(); } IToken triggerToken = tokens.LT(-1); if (triggerToken == null) { return; } switch (triggerToken.Type) { // symbol references case Java2Lexer.IDENTIFIER: case Java2Lexer.THIS: case Java2Lexer.SUPER: // primitive types case Java2Lexer.BOOLEAN: case Java2Lexer.CHAR: case Java2Lexer.BYTE: case Java2Lexer.SHORT: case Java2Lexer.INT: case Java2Lexer.LONG: case Java2Lexer.FLOAT: case Java2Lexer.DOUBLE: // literals case Java2Lexer.INTLITERAL: case Java2Lexer.LONGLITERAL: case Java2Lexer.FLOATLITERAL: case Java2Lexer.DOUBLELITERAL: case Java2Lexer.CHARLITERAL: case Java2Lexer.STRINGLITERAL: case Java2Lexer.TRUE: case Java2Lexer.FALSE: case Java2Lexer.NULL: break; default: return; } NetworkInterpreter interpreter = CreateNetworkInterpreter(tokens); while (interpreter.TryStepBackward()) { if (interpreter.Contexts.Count == 0 || interpreter.Contexts.Count > 400) { break; } if (interpreter.Contexts.All(context => context.BoundedStart)) { break; } } interpreter.Contexts.RemoveAll(i => !i.BoundedStart); interpreter.CombineBoundedStartContexts(); IOutputWindowPane pane = Provider.OutputWindowService.TryGetPane(PredefinedOutputWindowPanes.TvlIntellisense); if (pane != null) { pane.WriteLine(string.Format("Located {0} QuickInfo expression(s) in {1}ms.", interpreter.Contexts.Count, stopwatch.ElapsedMilliseconds)); } HashSet <string> intermediateResult = new HashSet <string>(); HashSet <string> finalResult = new HashSet <string>(); List <object> quickInfoContent = new List <object>(); foreach (var context in interpreter.Contexts) { Span?span = null; foreach (var transition in context.Transitions) { if (!transition.Transition.IsMatch) { continue; } IToken token = transition.Token; Span tokenSpan = new Span(token.StartIndex, token.StopIndex - token.StartIndex + 1); if (span == null) { span = tokenSpan; } else { span = Span.FromBounds(Math.Min(span.Value.Start, tokenSpan.Start), Math.Max(span.Value.End, tokenSpan.End)); } } if (span.HasValue && !span.Value.IsEmpty) { string text = currentSnapshot.GetText(span.Value); if (!intermediateResult.Add(text)) { continue; } AstParserRuleReturnScope <CommonTree, CommonToken> result = null; try { var expressionInput = new ANTLRStringStream(text); var expressionUnicodeInput = new JavaUnicodeStream(expressionInput); var expressionLexer = new Java2Lexer(expressionUnicodeInput); var expressionTokens = new CommonTokenStream(expressionLexer); var expressionParser = new Java2Parser(expressionTokens); result = expressionParser.primary(); // anchors experiment Contract.Assert(TextBuffer.CurrentSnapshot == triggerPoint.Snapshot); ClassAnchorTracker tracker = new ClassAnchorTracker(TextBuffer, null); SnapshotSpan trackedSpan = new SnapshotSpan(triggerPoint.Snapshot, 0, triggerPoint.Position); ITagSpan <ScopeAnchorTag>[] tags = tracker.GetTags(new NormalizedSnapshotSpanCollection(trackedSpan)).ToArray(); text = result.Tree.ToStringTree(); } catch (RecognitionException) { text = "Could not parse: " + text; } text = text.Replace("\n", "\\n").Replace("\r", "\\r"); finalResult.Add(text); //if (Regex.IsMatch(text, @"^[A-Za-z_]+(?:\.\w+)*$")) //{ // NameResolutionContext resolutionContext = NameResolutionContext.Global(Provider.IntelliSenseCache); // resolutionContext = resolutionContext.Filter(text, null, true); // CodeElement[] matching = resolutionContext.GetMatchingElements(); // if (matching.Length > 0) // { // foreach (var element in matching) // { // element.AugmentQuickInfoSession(quickInfoContent); // } // } // else // { // // check if this is a package // CodePhysicalFile[] files = Provider.IntelliSenseCache.GetPackageFiles(text, true); // if (files.Length > 0) // { // finalResult.Add(string.Format("package {0}", text)); // } // else // { // // check if this is a type // string typeName = text.Substring(text.LastIndexOf('.') + 1); // CodeType[] types = Provider.IntelliSenseCache.GetTypes(typeName, true); // foreach (var type in types) // { // if (type.FullName == text) // finalResult.Add(string.Format("{0}: {1}", type.GetType().Name, type.FullName)); // } // } // } //} //else //{ // finalResult.Add(text); //} } } ITrackingSpan applicableToSpan = null; foreach (var result in finalResult) { quickInfoContent.Add(result); } applicableToSpan = currentSnapshot.CreateTrackingSpan(new Span(triggerToken.StartIndex, triggerToken.StopIndex - triggerToken.StartIndex + 1), SpanTrackingMode.EdgeExclusive); //try //{ // Expression currentExpression = Provider.IntellisenseCache.ParseExpression(selection); // if (currentExpression != null) // { // SnapshotSpan? span = currentExpression.Span; // if (span.HasValue) // applicableToSpan = span.Value.Snapshot.CreateTrackingSpan(span.Value, SpanTrackingMode.EdgeExclusive); // quickInfoContent.Add(currentExpression.ToString()); // } // else // { // quickInfoContent.Add("Could not parse expression."); // } //} //catch (Exception ex) //{ // if (ErrorHandler.IsCriticalException(ex)) // throw; // quickInfoContent.Add(ex.Message); //} lock (_contentUpdateLock) { _triggerPoint = triggerPoint; _applicableToSpan = applicableToSpan; _quickInfoContent = quickInfoContent; } IWpfTextView wpfTextView = session.TextView as IWpfTextView; if (wpfTextView != null && wpfTextView.VisualElement != null) { ITrackingPoint trackingTriggerPoint = triggerPoint.Snapshot.CreateTrackingPoint(triggerPoint.Position, PointTrackingMode.Negative); wpfTextView.VisualElement.Dispatcher.BeginInvoke((Action <IQuickInfoSession, ITrackingPoint, bool>)RetriggerQuickInfo, session, trackingTriggerPoint, true); } }
private static ASTLibModel GetModel( EsperEPL2GrammarParser.LibFunctionContext ctx, CommonTokenStream tokenStream) { var root = ctx.libFunctionWithClass(); IList <EsperEPL2GrammarParser.LibFunctionNoClassContext> ctxElements = ctx.libFunctionNoClass(); // there are no additional methods if (ctxElements == null || ctxElements.IsEmpty()) { var classIdent = root.classIdentifier() == null ? null : ASTUtil.UnescapeClassIdent(root.classIdentifier()); var ele = FromRoot(root); return(new ASTLibModel(classIdent, Collections.SingletonList(ele))); } // add root and chain to just a list of elements var chainElements = new List <ASTLibModelChainElement>(ctxElements.Count + 1); var rootElement = FromRoot(root); chainElements.Add(rootElement); foreach (var chainedCtx in ctxElements) { var chainedElement = new ASTLibModelChainElement( chainedCtx.funcIdentChained().GetText(), chainedCtx.libFunctionArgs(), chainedCtx.l != null); chainElements.Add(chainedElement); } // determine/remove the list of chain elements, from the start and uninterrupted, that don't have parameters (no parenthesis 'l') var chainElementsNoArgs = new List <ASTLibModelChainElement>(chainElements.Count); for (var ii = 0; ii < chainElements.Count; ii++) { var element = chainElements[ii]; if (!element.HasLeftParen) { // has no parenthesis, therefore part of class identifier chainElementsNoArgs.Add(element); chainElements.RemoveAt(ii); ii--; } else { // else stop here break; } } // write the class identifier including the no-arg chain elements var classIdentBuf = new StringWriter(); var delimiter = ""; if (root.classIdentifier() != null) { classIdentBuf.Write(ASTUtil.UnescapeClassIdent(root.classIdentifier())); delimiter = "."; } foreach (var noarg in chainElementsNoArgs) { classIdentBuf.Write(delimiter); classIdentBuf.Write(noarg.FuncName); delimiter = "."; } if (chainElements.IsEmpty()) { // would this be an event property, but then that is handled greedily by parser throw ASTWalkException.From("Encountered unrecognized lib function call", tokenStream, ctx); } // class ident can be null if empty var classIdentifierString = classIdentBuf.ToString(); var classIdentifier = classIdentifierString.Length > 0 ? classIdentifierString : null; return(new ASTLibModel(classIdentifier, chainElements)); }
private NetworkInterpreter CreateNetworkInterpreter(CommonTokenStream tokens) { Network network = NetworkBuilder <JavaSimplifiedAtnBuilder> .GetOrBuildNetwork(); NetworkInterpreter interpreter = new NetworkInterpreter(network, tokens); IToken previousToken = tokens.LT(-1); if (previousToken == null) { return(new NetworkInterpreter(network, new CommonTokenStream())); } switch (previousToken.Type) { case Java2Lexer.IDENTIFIER: // definitions always appear as a single identifier (at least the part of them we care about for Quick Info) interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.SymbolDefinitionIdentifier)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ClassOrInterfaceType)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.QualifiedName)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ElementValuePair)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.Statement)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.Primary)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.InnerCreator)); break; case Java2Lexer.SUPER: interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.Primary)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.TypeArgument)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ExplicitConstructorInvocation)); break; case Java2Lexer.THIS: interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.Primary)); interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ExplicitConstructorInvocation)); break; case Java2Lexer.CLASS: interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.Primary)); interpreter.ExcludedStartRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.ClassHeader)); break; case Java2Lexer.BOOLEAN: case Java2Lexer.CHAR: case Java2Lexer.BYTE: case Java2Lexer.SHORT: case Java2Lexer.INT: case Java2Lexer.LONG: case Java2Lexer.FLOAT: case Java2Lexer.DOUBLE: interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.PrimitiveType)); break; case Java2Lexer.INTLITERAL: case Java2Lexer.LONGLITERAL: case Java2Lexer.FLOATLITERAL: case Java2Lexer.DOUBLELITERAL: case Java2Lexer.CHARLITERAL: case Java2Lexer.STRINGLITERAL: case Java2Lexer.TRUE: case Java2Lexer.FALSE: case Java2Lexer.NULL: interpreter.BoundaryRules.Add(network.GetRule(JavaAtnBuilder.RuleNames.Literal)); break; default: return(new NetworkInterpreter(network, new CommonTokenStream())); } return(interpreter); }
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline. public void Configure(IApplicationBuilder app, IHostingEnvironment env, ILoggerFactory loggerFactory) { loggerFactory.AddConsole(); if (env.IsDevelopment()) { app.UseDeveloperExceptionPage(); } var routeBuilder = new RouteBuilder(app); routeBuilder.MapPost("parse", context => { string text = ""; using (StreamReader reader = new StreamReader(context.Request.Body)) { text = reader.ReadToEnd(); } AntlrInputStream inputStream = new AntlrInputStream(text); DOTLexer lexer = new DOTLexer(inputStream); CommonTokenStream commonTokenStream = new CommonTokenStream(lexer); DOTParser parser = new DOTParser(commonTokenStream); // the listener gathers the names for the hover information DOTLanguageListener listener = new DOTLanguageListener(); DOTErrorListener errorListener = new DOTErrorListener(); DOTLexerErrorListener lexerErrorListener = new DOTLexerErrorListener(); lexer.RemoveErrorListeners(); lexer.AddErrorListener(lexerErrorListener); parser.RemoveErrorListeners(); parser.AddErrorListener(errorListener); GraphContext graph = parser.graph(); ParseTreeWalker.Default.Walk(listener, graph); StringBuilder json = new StringBuilder(); json.Append("{"); json.Append("\"errors\": ["); json.Append(convertMessagesToJson(lexerErrorListener.Messages)); json.Append(convertMessagesToJson(errorListener.Messages)); if (lexerErrorListener.Messages.Count + errorListener.Messages.Count > 0) { json.Remove(json.Length - 2, 1); } json.Append("], "); json.Append("\"names\": ["); json.Append(convertNamesToJson(listener.Names)); json.Append("]"); json.Append("}"); return(context.Response.WriteAsync(json.ToString())); }); var routes = routeBuilder.Build(); app.UseRouter(routes); }
public CommonTree parseString(string startRule, string inStr, bool includeNet) { if (Cfg.Verbosity > 5) { Console.WriteLine("Parsing fragment "); } ICharStream input = new ANTLRStringStream(inStr); PreProcessor lex = new PreProcessor(); lex.AddDefine(Cfg.MacroDefines); lex.CharStream = input; lex.TraceDestination = Console.Error; CommonTokenStream tokens = new CommonTokenStream(lex); csParser p = new csParser(tokens); p.TraceDestination = Console.Error; p.IsJavaish = true; // Try and call a rule like CSParser.namespace_body() // Use reflection to find the rule to use. MethodInfo mi = p.GetType().GetMethod(startRule); if (mi == null) { throw new Exception("Could not find start rule " + startRule + " in csParser"); } ParserRuleReturnScope csRet = (ParserRuleReturnScope)mi.Invoke(p, new object[0]); CommonTreeNodeStream csTreeStream = new CommonTreeNodeStream(csRet.Tree); csTreeStream.TokenStream = tokens; JavaMaker javaMaker = new JavaMaker(csTreeStream); javaMaker.TraceDestination = Console.Error; javaMaker.Cfg = Cfg; javaMaker.IsJavaish = true; javaMaker.InitParser(); // Try and call a rule like CSParser.namespace_body() // Use reflection to find the rule to use. mi = javaMaker.GetType().GetMethod(startRule); if (mi == null) { throw new Exception("Could not find start rule " + startRule + " in javaMaker"); } TreeRuleReturnScope javaSyntaxRet = (TreeRuleReturnScope)mi.Invoke(javaMaker, new object[0]); CommonTree retAST = (CommonTree)javaSyntaxRet.Tree; if (includeNet) { CommonTreeNodeStream javaSyntaxNodes = new CommonTreeNodeStream(retAST); javaSyntaxNodes.TokenStream = csTreeStream.TokenStream; NetMaker netMaker = new NetMaker(javaSyntaxNodes); netMaker.TraceDestination = Console.Error; netMaker.Cfg = Cfg; netMaker.AppEnv = AppEnv; netMaker.IsJavaish = true; netMaker.InitParser(); retAST = (CommonTree)netMaker.class_member_declarations().Tree; // snaffle additional imports this.AddToImports(netMaker.Imports); } return(retAST); }
public static ParseNode GetModule(EPLModuleParseItem item, String resourceName) { var input = new NoCaseSensitiveStream(item.Expression); var lex = ParseHelper.NewLexer(input); var tokenStream = new CommonTokenStream(lex); tokenStream.Fill(); var tokens = tokenStream.GetTokens(); var beginIndex = 0; var isMeta = false; var isModule = false; var isUses = false; var isExpression = false; while (beginIndex < tokens.Count) { var t = tokens[beginIndex]; if (t.Type == EsperEPL2GrammarParser.Eof) { break; } if ((t.Type == EsperEPL2GrammarParser.WS) || (t.Type == EsperEPL2GrammarParser.SL_COMMENT) || (t.Type == EsperEPL2GrammarParser.ML_COMMENT)) { beginIndex++; continue; } var tokenText = t.Text.Trim().ToLower(); switch (tokenText) { case "module": isModule = true; isMeta = true; break; case "uses": isUses = true; isMeta = true; break; case "import": isMeta = true; break; default: isExpression = true; break; } beginIndex++; beginIndex++; // skip space break; } if (isExpression) { return(new ParseNodeExpression(item)); } if (!isMeta) { return(new ParseNodeComment(item)); } // check meta tag (module, uses, import) var buffer = new StringWriter(); for (int i = beginIndex; i < tokens.Count; i++) { var t = tokens[i]; if (t.Type == EsperEPL2GrammarParser.Eof) { break; } if ((t.Type != EsperEPL2GrammarParser.IDENT) && (t.Type != EsperEPL2GrammarParser.DOT) && (t.Type != EsperEPL2GrammarParser.STAR) && (!t.Text.Matches("[a-zA-Z]*"))) { throw GetMessage(isModule, isUses, resourceName, t.Type); } buffer.Write(t.Text.Trim()); } String result = buffer.ToString().Trim(); if (result.Length == 0) { throw GetMessage(isModule, isUses, resourceName, -1); } if (isModule) { return(new ParseNodeModule(item, result)); } else if (isUses) { return(new ParseNodeUses(item, result)); } return(new ParseNodeImport(item, result)); }
/// <summary> /// Lexes the sample SQL and inserts a "where 1=0" where-clause. /// </summary> /// <param name="querySQL">to inspect using lexer</param> /// <returns>sample SQL with where-clause inserted</returns> /// <throws>ExprValidationException to indicate a lexer problem</throws> public static string LexSampleSQL(string querySQL) { querySQL = querySQL.RegexReplaceAll("\\s\\s+|\\n|\\r", " "); var input = new CaseInsensitiveInputStream(querySQL); var whereIndex = -1; var groupbyIndex = -1; var havingIndex = -1; var orderByIndex = -1; IList<int> unionIndexes = new List<int>(); var lex = ParseHelper.NewLexer(input); var tokens = new CommonTokenStream(lex); tokens.Fill(); var tokenList = tokens.GetTokens(); for (var i = 0; i < tokenList.Count; i++) { var token = tokenList[i]; if ((token == null) || token.Text == null) { break; } var text = token.Text.ToLowerInvariant().Trim(); if (text.Equals("where")) { whereIndex = token.Column + 1; } if (text.Equals("group")) { groupbyIndex = token.Column + 1; } if (text.Equals("having")) { havingIndex = token.Column + 1; } if (text.Equals("order")) { orderByIndex = token.Column + 1; } if (text.Equals("union")) { unionIndexes.Add(token.Column + 1); } } // If we have a union, break string into subselects and process each if (unionIndexes.Count != 0) { var changedSQL = new StringWriter(); var lastIndex = 0; for (var i = 0; i < unionIndexes.Count; i++) { var index = unionIndexes[i]; string fragmentX; if (i > 0) { fragmentX = querySQL.Between(lastIndex + 5, index - 1); } else { fragmentX = querySQL.Between(lastIndex, index - 1); } var lexedFragmentX = LexSampleSQL(fragmentX); if (i > 0) { changedSQL.Write("union "); } changedSQL.Write(lexedFragmentX); lastIndex = index - 1; } // last part after last union var fragment = querySQL.Substring(lastIndex + 5); var lexedFragment = LexSampleSQL(fragment); changedSQL.Write("union "); changedSQL.Write(lexedFragment); return changedSQL.ToString(); } // Found a where clause, simplest cases if (whereIndex != -1) { var changedSQL = new StringWriter(); var prefix = querySQL.Substring(0, whereIndex + 5); var suffix = querySQL.Substring(whereIndex + 5); changedSQL.Write(prefix); changedSQL.Write("1=0 and "); changedSQL.Write(suffix); return changedSQL.ToString(); } // No where clause, find group-by int insertIndex; if (groupbyIndex != -1) { insertIndex = groupbyIndex; } else if (havingIndex != -1) { insertIndex = havingIndex; } else if (orderByIndex != -1) { insertIndex = orderByIndex; } else { var changedSQL = new StringWriter(); changedSQL.Write(querySQL); changedSQL.Write(" where 1=0 "); return changedSQL.ToString(); } try { var changedSQL = new StringWriter(); var prefix = querySQL.Substring(0, insertIndex - 1); changedSQL.Write(prefix); changedSQL.Write("where 1=0 "); var suffix = querySQL.Substring(insertIndex - 1); changedSQL.Write(suffix); return changedSQL.ToString(); } catch (Exception ex) { var text = "Error constructing sample SQL to retrieve metadata for ADO-drivers that don't support metadata, consider using the " + SAMPLE_WHERECLAUSE_PLACEHOLDER + " placeholder or providing a sample SQL"; Log.Error(text, ex); throw new ExprValidationException(text, ex); } }
/// <summary> /// This will attempt to recalculate if an error was encountered. Will try to skip whitespaces /// and comments so to prevent number literals and function qualifiers not being identified. /// </summary> /// <param name="formula"></param> /// <param name="secondRun"></param> /// <returns></returns> private static CalculationResult CalculateResult(string formula, bool secondRun) { if (string.IsNullOrWhiteSpace(formula)) { return(new CalculationResult { IsValid = true, Result = 0 }); } var inputStream = new AntlrInputStream(formula); var lexer = new CalculatorLexer(inputStream); var tokenStream = new CommonTokenStream(lexer); var parser = new CalculatorParser(tokenStream); // Removing default error listeners due to noise in debug lexer.RemoveErrorListeners(); parser.RemoveErrorListeners(); // But adding the custom one var customErrorListener = new CalculatorErrorListener(); parser.AddErrorListener(customErrorListener); var visitor = new CalculatorVisitor(); var calculatorExpression = parser.calculator().expression(); var result = visitor.Visit(calculatorExpression); var isValid = customErrorListener.IsValid; var errorLocation = customErrorListener.ErrorLocation; var errorMessage = customErrorListener.ErrorMessage; if (double.IsInfinity(result)) { isValid = false; } if (!isValid && !secondRun) { var cleanedFormula = string.Empty; var tokenList = tokenStream.GetTokens().ToList(); for (var i = 0; i < tokenList.Count - 1; i++) { cleanedFormula += tokenList[i].Text; } var originalErrorLocation = errorLocation; var retriedResult = CalculateResult(cleanedFormula, true); if (!retriedResult.IsValid) { retriedResult.ErrorPosition = originalErrorLocation; retriedResult.ErrorMessage = errorMessage; } return(retriedResult); } return(new CalculationResult { IsValid = isValid, Result = isValid || double.IsInfinity(result) ? result : double.NaN, ErrorPosition = errorLocation, ErrorMessage = isValid ? null : errorMessage }); }
public CstBuilderForAntlr3WithMemorizingError(CommonTokenStream stream, string[] tokenNames) : base(stream, tokenNames) { }
public abstract Parser GetParser(CommonTokenStream tokenStream);
/// <summary> /// Create a Lexer, /// get the tokens from the source, /// create a Parser. /// </summary> /// <param name="source">The source code</param> /// <returns>The Psimulex Parser</returns> private static PsimulexParser CreateParser(string source) { // StringStreem var stream = new ANTLRStringStream(source); // Lexer PsimulexLexer lexer = new PsimulexLexer(stream); // TokenStreem CommonTokenStream tokenStreem = new CommonTokenStream(lexer); // Parser return new PsimulexParser(tokenStreem); }