private CSharpParser.Compilation_unitContext ParseFile(string file) { IList <IToken> codeTokens = new List <IToken>(); string sourceCode = File.ReadAllText(file); Lexer preprocessorLexer = new CSharpLexer(new AntlrInputStream(sourceCode)); IList <IToken> tokens = preprocessorLexer.GetAllTokens(); IList <IToken> directiveTokens = new List <IToken>(); ListTokenSource directiveTokenSource = new ListTokenSource(directiveTokens); CommonTokenStream directiveTokenStream = new CommonTokenStream(directiveTokenSource, CSharpLexer.DIRECTIVE); CSharpPreprocessorParser preprocessorParser = new CSharpPreprocessorParser(directiveTokenStream); int index = 0; bool compiliedTokens = true; while (index < tokens.Count) { IToken token = tokens[index]; if (token.Type == CSharpLexer.SHARP) { directiveTokens.Clear(); int directiveTokenIndex = index + 1; while (directiveTokenIndex < tokens.Count && tokens[directiveTokenIndex].Type != CSharpLexer.Eof && tokens[directiveTokenIndex].Type != CSharpLexer.DIRECTIVE_NEW_LINE && tokens[directiveTokenIndex].Type != CSharpLexer.SHARP) { if (tokens[directiveTokenIndex].Channel != Lexer.Hidden && tokens[directiveTokenIndex].Channel != CSharpLexer.COMMENTS_CHANNEL) { directiveTokens.Add(tokens[directiveTokenIndex]); } directiveTokenIndex++; } directiveTokenSource = new ListTokenSource(directiveTokens); directiveTokenStream = new CommonTokenStream(directiveTokenSource, CSharpLexer.DIRECTIVE); preprocessorParser.TokenStream = directiveTokenStream; CSharpPreprocessorParser.Preprocessor_directiveContext directive = preprocessorParser.preprocessor_directive(); // if true than next code is valid and not ignored. compiliedTokens = directive.value; index = directiveTokenIndex - 1; } else if (token.Channel != Lexer.Hidden && token.Type != CSharpLexer.DIRECTIVE_NEW_LINE && token.Channel != CSharpLexer.COMMENTS_CHANNEL && compiliedTokens) { codeTokens.Add(token); // Collect code tokens. } index++; } ListTokenSource codeTokenSource = new ListTokenSource(codeTokens); CommonTokenStream codeTokenStream = new CommonTokenStream(codeTokenSource); CSharpParser parser = new CSharpParser(codeTokenStream); return(parser.compilation_unit()); }
public override bool IsImplicitMatch(CSharpLexer lexer) { if (!char.IsWhiteSpace(lexer.Previous) && !lexer.IsSpecialSymbol(lexer.Previous, DelimiterType.End)) { return(false); } bool matchedNumber = false; while (!lexer.EndOfStream) { if (IsNumberOrDecimalPoint(lexer.ReadNext())) { matchedNumber = true; lexer.Commit(); } else { lexer.Rollback(); break; } } return(matchedNumber); }
protected override void AppendParameterDeclaration(T4ParameterDescription description) { foreach (string inspection in DisabledPropertyInspections) { AppendDisabledInspections(inspection); } Result.Append(" private global::"); var type = description.TypeToken; if (CSharpLexer.IsKeyword(type.GetText())) { Result.Append("@"); } Result.AppendMapped(type); Result.Append(" "); var name = description.NameToken; if (CSharpLexer.IsKeyword(name.GetText())) { Result.Append("@"); } Result.AppendMapped(name); Result.Append(" => "); Result.Append(description.FieldNameString); Result.AppendLine(";"); }
private bool IsMatch(CSharpLexer lexer, string commentType) { if (!string.IsNullOrEmpty(commentType)) { lexer.Rollback(); bool match = true; for (int i = 0; i < commentType.Length; i++) { if (commentType[i] != lexer.ReadNext()) { match = false; break; } } if (match) { // Read until end of line or file while (!IsEndLineOrEndFile(lexer, lexer.ReadNext())) { } return(true); } } return(false); }
public ReferenceTable ParseTable(string text) { lexer = new CSharpLexer(new StringBuffer(text)); Start(); XmlToken token = null; try { token = ParseTableName(); } catch (UnexpectedToken ex) { token = ex.ParsingResult as XmlToken; if (token != null) { var @ref = CreateReferenceTable(token); var th = new UnexpectedToken("Unexpected token"); th.ParsingResult = @ref; throw th; } throw; } return CreateReferenceTable(token); }
public static CsharpParseResults InvokeParse(string fileName) { if (string.IsNullOrWhiteSpace(fileName)) { return(null); } if (!System.IO.File.Exists(fileName)) { return(null); } var tr = System.IO.File.OpenRead(fileName); var input = new AntlrInputStream(tr); var lexer = new CSharpLexer(input); var tokens = new CommonTokenStream(lexer); var parser = new CSharpParser(tokens); var tree = parser.compilation_unit(); var walker = new ParseTreeWalker(); var loader = new CsharpParseTree(); walker.Walk(loader, tree); var results = loader.Results; tr.Close(); results.SourceFile = fileName; return(results); }
public static void Main(string[] args) { if (args.Length == 0) { Console.WriteLine("Please name the C# file that you would like to compile as a program argument."); return; } StreamReader sr = new StreamReader(args[0]); ICharStream stream = CharStreams.fromString(sr.ReadToEnd()); CSharpLexer lexer = new CSharpLexer(stream); ITokenStream tokenStream = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokenStream); parser.BuildParseTree = true; IParseTree tree = parser.compilation_unit(); Console.WriteLine(tree.ToStringTree(parser)); AST ast = new AST(); SymbolTable symbolTable = new SymbolTable(); ASTBuilder astBuilder = new ASTBuilder(ast, symbolTable); ParseTreeWalker.Default.Walk(astBuilder, tree); ast.Print(); }
public void StrippedHtmlIsSameAsInput() { var input = SampleFile.Load("csharp-sample.txt"); var tokens = new CSharpLexer() .GetTokens(input) .ToArray(); var subject = new HtmlFormatter(new HtmlFormatterOptions() { NoWrap = true }); var htmlOut = new StringWriter(); subject.Format(tokens, htmlOut); File.WriteAllText("output.html", htmlOut.ToString()); var txtOut = new StringWriter(); new NullFormatter().Format(tokens, txtOut); var strippedHtml = Regex.Replace(htmlOut.ToString(), @"<.*?>", "") .Trim(); var escapedText = HtmlFormatter.EscapeHtml(txtOut.ToString()).Trim(); Check.That(strippedHtml).IsEqualTo(escapedText); }
static void HandleFileCs(TestVisitor visitor, string filePath) { FileInfo info = new FileInfo(filePath); if (info.Extension == ".cs") { AntlrFileStream stream = new AntlrFileStream(filePath); CSharpLexer lexer = new CSharpLexer(stream); CommonTokenStream tokens = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokens); CSharpParser.Compilation_unitContext startContext = parser.compilation_unit(); TestListener listener = new TestListener(parser); IParseTree tree = parser.compilation_unit(); ParseTreeWalker walker = new ParseTreeWalker(); walker.Walk(listener, startContext); StringBuilder streamwritter = new StringBuilder(stream.ToString()); foreach (Tuple <int, string> tup in listener.GetTuples()) { streamwritter.Remove(tup.Item1, tup.Item2.Length).Insert(tup.Item1, tup.Item2); } //visitor.Visit(startContext); StreamWriter writer = new StreamWriter(filePath); writer.Write(streamwritter); writer.Dispose(); } }
static void Main(string[] args) { //TreeScope root = new TreeScope(null, "root", "root"); //TestListener listener = new TestListener(root); //List<string> folderPaths = new List<string>(); ////folderPaths.Add(@"D:\baitap\dau_tieng\QuanLySanLuong\WindowsFormsApplication6\WindowsFormsApplication6"); //folderPaths.Add(@"C:\Users\HONG PHI\Desktop\check preprocessing directive"); //while (folderPaths.Count > 0) //{ // var directories = Directory.GetDirectories(folderPaths[0]); // if (directories.Length > 0) // folderPaths.AddRange(directories); // var files = Directory.GetFiles(folderPaths[0]); // folderPaths.RemoveAt(0); // files.ToList().ForEach(fp => // { // HandleFileCs(listener, fp); // }); //} //listener.ShowTree(root, Console.Out); FileStream stream = new FileStream(@"C:\Users\HONG PHI\source\repos\Caculator\ShowElementOfCSharpFile_InTree\UpperCaseTestFile.cs"); CSharpLexer lexer = new CSharpLexer(stream); CommonTokenStream tokens = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokens); RuleContext context = parser.compilation_unit(); UpperCaseClassName listener = new UpperCaseClassName(tokens); ParseTreeWalker walker = new ParseTreeWalker(); walker.Walk(listener, context); stream.UpdateFile(listener.ValidCode); }
public List <CSharpTokenValue> ProcessUsings(StringBuilder text) { var result = new List <CSharpTokenValue>(); try { ICharStream inputStream = new AntlrInputStream(text.ToString()); CSharpLexer csharpLexer = new CSharpLexer(inputStream); CommonTokenStream commonTokenStream = new CommonTokenStream(csharpLexer); CSharpPreprocessorParser csharpParser = new CSharpPreprocessorParser(commonTokenStream); CSharpPreprocessorParser.Directive_new_line_or_sharpContext preprocessorContext = csharpParser.directive_new_line_or_sharp(); CSharpPreprocessorParserVisitor visitor = new CSharpPreprocessorParserVisitor(); visitor.Visit(preprocessorContext); result = visitor.csharpValues; } catch (Exception ex) { Console.WriteLine("Error: " + ex); } return(result); }
public static IEnumerable <ClassInfo> OuterClassInfosFromCSharpSource( string source, string filePath) { try { var codeArray = source.ToCharArray(); var inputStream = new AntlrInputStream(codeArray, codeArray.Length); var lexer = new CSharpLexer(inputStream); var commonTokenStream = new CommonTokenStream(lexer); var compilationUnitListener = new CompilationUnitListener(filePath); var parser = new CSharpParser(commonTokenStream); parser.RemoveErrorListeners(); parser.AddErrorListener(new ErrorListener()); parser.compilation_unit().EnterRule(compilationUnitListener); return(compilationUnitListener.OuterClassInfos); } catch (Exception e) { Console.WriteLine(e); throw; } return(null); }
//Module quet LOG private static List <ItemObject> scanLogging(string fileName, List <ItemObject> listResult) { if (listResult == null || listResult.Count == 0) { listResult = new List <ItemObject>(); } string code = readFile2(fileName); CSharpLexer lexer = new CSharpLexer(new AntlrInputStream(code)); lexer.RemoveErrorListeners(); CommonTokenStream tokens = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokens); IParseTree tree = parser.compilation_unit(); ParseTreeWalker walker = new ParseTreeWalker(); FindLoggingInMethod uploadListener = new FindLoggingInMethod(parser); walker.Walk(uploadListener, tree); if (uploadListener.listMethod != null) { foreach (var item in uploadListener.listMethod) { ItemObject obj = new ItemObject(item.BaselineItem, item.methodName, null, fileName, item.startLine, "FAIL"); listResult.Add(obj); } } return(listResult); }
public static string EscapeKeyword([NotNull] this string s) { if (!CSharpLexer.IsKeyword(s)) { return(s); } return('@' + s); }
public void AppendName([NotNull] T4CSharpCodeGenerationResult result) { if (CSharpLexer.IsKeyword(NameToken.GetText())) { result.Append("@"); } result.AppendMapped(NameToken); }
protected override void DoRun(IT4AttributeValue element, IHighlightingConsumer consumer) { if (!CSharpLexer.IsKeyword(element.GetText())) { return; } consumer.AddHighlighting(new EscapedKeywordWarning(element)); }
public CSharpExampleFileTests(ITestOutputHelper output) { _output = output; var subject = new CSharpLexer(); _results = subject.GetTokens(SampleFile.Load("csharp-sample.txt")) .ToArray(); }
public void GetsCorrectTokensWhenSourceContainsChar() { const string code = "using System; namespace Foo { class Bar { private char _baz = 'c'; } }"; var subject = new CSharpLexer(); var tokens = subject.GetTokens(code).ToArray(); Check.That(tokens[0]).IsEqualTo(new Token(0, TokenTypes.Keyword, "using")); }
public CSharpParserWrapper(string filepath) { var charStream = CharStreams.fromPath(filepath); var lexer = new CSharpLexer(charStream); Tokens = new CommonTokenStream(lexer); Parser = new CSharpParser(Tokens); Parser.BuildParseTree = true; }
private string GetTypeFqnString() { string typeText = TypeToken.GetText(); if (CSharpLexer.IsKeyword(typeText)) { return($"@{typeText}"); } return(typeText); }
/// <summary> /// Determines if the word is a C# or XmlDoc keyword. /// </summary> /// <param name="word">The word to test.</param> /// <returns>True if a keyword.</returns> public static bool IsKeyword(string word) { if (string.IsNullOrEmpty(word)) { return(false); } var lowerWord = word.ToLower(); return(CSharpLexer.IsKeyword(lowerWord) || XmlDocTagNames.Contains(lowerWord)); }
public bool IsMatch(CSharpLexer lexer) { if (IsImplicitMatch(lexer)) { lexer.Commit(); return(true); } lexer.Rollback(); return(false); }
public void InitOrUpdateParserTreeOfFile(string filePath, string content) { //Build Parser tree from content and save it AntlrInputStream stream = new AntlrInputStream(content); CSharpLexer lexer = new CSharpLexer(stream); CommonTokenStream tokens = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokens); CSharpParser.Compilation_unitContext startContext = parser.compilation_unit(); _parserRuleContextOfFile[filePath] = startContext; tokenStreams[filePath] = tokens; }
public override bool IsImplicitMatch(CSharpLexer lexer) { if (lexer.ReadNext() == '"') { while (!IsClosingQuoteOrEndFile(lexer, lexer.ReadNext())) { } return(true); } return(false); }
private void AppendConstantValue([NotNull] ConstantValue constantValue) { if (constantValue.IsBadValue()) { AppendText("bad value", null); return; } IEnum enumType = constantValue.Type.GetEnumType(); if (enumType != null && AppendEnumValue(constantValue, enumType)) { return; } string presentation = constantValue.GetPresentation(CSharpLanguage.Instance); if (presentation != null && CSharpLexer.IsKeyword(presentation)) { AppendText(presentation, VsHighlightingAttributeIds.Keyword); return; } IType type = constantValue.Type; if (type != null && type.IsNullable()) { type = type.GetNullableUnderlyingType(); } if (type == null) { AppendText(presentation, null); return; } if (type.IsString()) { AppendText(presentation, VsHighlightingAttributeIds.String); } else if (type.IsChar()) { AppendText(presentation, VsHighlightingAttributeIds.String); } else if (type.IsPredefinedNumeric()) { AppendText(presentation, VsHighlightingAttributeIds.Number); } else { AppendText(presentation, null); } }
//Module quet SQL private static List <ItemObject> scanSQL(string fileName, List <ItemObject> listResult) { if (listResult == null || listResult.Count == 0) { listResult = new List <ItemObject>(); } string code = readFile2(fileName); CSharpLexer lexer = new CSharpLexer(new AntlrInputStream(code)); lexer.RemoveErrorListeners(); CommonTokenStream tokens = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokens); IParseTree tree = parser.compilation_unit(); ParseTreeWalker walker = new ParseTreeWalker(); ExtractClassParser listener = new ExtractClassParser(parser); //FindGlobalVariable listener = new FindGlobalVariable(parser); walker.Walk(listener, tree); //} //Main tracer //sql if (listener.listMethodContext != null) { //Console.WriteLine(filename); List <MethodContext> listMethod = listener.getListMethod(); foreach (var method in listMethod) { ParseTreeWalker methodWalker = new ParseTreeWalker(); FindQueryInMethod queryListener = new FindQueryInMethod(parser, method.lineList); methodWalker.Walk(queryListener, method.context); FindLineOfExpression lineListener = new FindLineOfExpression(parser, method.context, queryListener.listExpressLine, queryListener.commandVar, queryListener.queryVar); methodWalker.Walk(lineListener, method.context); method.lineList = lineListener.listExpressLine; FindUsedMethodInClass methodListener = new FindUsedMethodInClass(parser, method); methodWalker.Walk(methodListener, method.context); if (methodListener.listResult != null) { foreach (var item in methodListener.listResult) { ItemObject obj = new ItemObject(item.BaselineItem, item.methodName, item.listExp, fileName, item.startLine, "FAIL"); listResult.Add(obj); } } } } return(listResult); }
/// <summary> /// Create TreeNode from stream /// </summary> /// <param name="stream"></param> /// <param name="tree"></param> private static void GetTreeFromStream(ICharStream stream, TreeNode tree) { CSharpLexer lexer = new CSharpLexer(stream); CommonTokenStream tokens = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokens); CSharpParser.Compilation_unitContext startContext = parser.compilation_unit(); Listener listener = new Listener(tree); IParseTree parserTree = parser.compilation_unit(); ParseTreeWalker walker = new ParseTreeWalker(); walker.Walk(listener, startContext); }
static void Main(string[] args) { CSharpLexer lexer = new CSharpLexer(new StringReader(program)); antlr.IToken token = null; while ((token = lexer.nextToken()).Type != CSharpLexer.EOF) { Console.WriteLine("Token: '{0}', Type: {1}.", token.getText(), TokenClassification.Instance.getTokenType(token.Type) ); } }
private void Form1_Load(object sender, EventArgs e) { //For SciLexer.dll, seek out my modified Scintilla source on GitHub. I modified BraceMatch() to ignore Styling. //The original Scintilla version of BraceMatch() takes Styling into account. Why? //At first, in this project, it looks like it doesn't work. But, it does. You have to mess around with it and all the sudden they all properly highlight. //I can provide a screenshot of a "real" app, where you can see it works right off the bat. I'll try to figure out the difference between that project and this project. Scintilla.SetModulePath("SciLexer.dll"); //Comment this line out sc.CaretStyle = CaretStyle.Block; sc.Lexer = Lexer.Container; sc.StyleResetDefault(); sc.Styles[Style.Default].BackColor = IntToColor(0x1E1E1E); sc.Styles[Style.Default].ForeColor = IntToColor(0xEFEAEF); sc.StyleClearAll(); //Ugly Test Colors sc.Styles[Style.LineNumber].ForeColor = sc.CaretForeColor = IntToColor(0xEFEAEF); sc.Styles[CSharpLexer.StyleDefault].ForeColor = IntToColor(0xEFEAEF); sc.Styles[CSharpLexer.StyleKeyword].ForeColor = IntToColor(0x35aec6); sc.Styles[CSharpLexer.StyleContainerProcedure].ForeColor = Color.HotPink; sc.Styles[CSharpLexer.StyleProcedureContainer].ForeColor = sc.Styles[CSharpLexer.StyleContextual].ForeColor = IntToColor(0xb4ceaf); sc.Styles[CSharpLexer.StyleIdentifier].ForeColor = IntToColor(0xEFEAEF); sc.Styles[CSharpLexer.StyleNumber].ForeColor = Color.Purple; sc.Styles[CSharpLexer.StyleString].ForeColor = Color.Red; sc.Styles[CSharpLexer.StyleComment].ForeColor = Color.Orange; sc.Styles[CSharpLexer.StyleProcedure].ForeColor = IntToColor(0x3ac190); sc.Styles[CSharpLexer.StyleVerbatim].ForeColor = Color.YellowGreen; sc.Styles[CSharpLexer.StylePreprocessor].ForeColor = Color.DarkSlateGray; sc.Styles[CSharpLexer.StyleEscapeSequence].ForeColor = Color.Yellow; sc.Styles[CSharpLexer.StyleOperator].ForeColor = Color.HotPink; sc.Styles[CSharpLexer.StyleBraces].ForeColor = Color.GreenYellow; sc.Styles[CSharpLexer.StyleError].ForeColor = Color.DarkRed; sc.Styles[CSharpLexer.StyleUser].ForeColor = Color.Olive; sc.Styles[CSharpLexer.StyleMultiIdentifier].ForeColor = Color.DeepPink; sc.Styles[CSharpLexer.StyleQuotedString].ForeColor = Color.Yellow; CSharpLexer.Init_Lexer(sc); CSharpLexer.SetKeyWords("abstract add as ascending async await base bool break by byte case catch char checked class const continue decimal default delegate descending do double dynamic else enum equals explicit extern false finally fixed float for foreach from get global global goto goto group if implicit in int interface internal into is join let lock long namespace new null object on operator orderby out override params partial private protected public readonly ref remove return sbyte sealed select set short sizeof stackalloc static string struct switch this throw true try typeof uint ulong unchecked unsafe ushort using value var virtual void volatile where while yield", inUserKeywords: "Goblin Hammer", inMultiStringKeywords: "New York,New Jersey", AutoFillContextual: true ); sc.UpdateUI += (s, ue) => { label3.Text = $"{sc.CurrentLine + 1}"; label4.Text = $"{sc.CurrentPosition + 1}"; label5.Text = $"{(sc.Overtype ? "OVR" : "INS")}"; }; }
static void Main(string[] args) { String input = "using System;"; ICharStream stream = CharStreams.fromstring(input); ITokenSource lexer = new CSharpLexer(stream); ITokenStream tokens = new CommonTokenStream(lexer); var parser = new CSharpParser(tokens); parser.BuildParseTree = true; var tree = parser.compilation_unit(); var listner = new Listener(); listner.EnterCompilation_unit(tree); }
public AnalysisResultData RunAnalysis(string programString) { AntlrInputStream inputStream = new AntlrInputStream(programString); CSharpLexer lexer = new CSharpLexer(inputStream); CommonTokenStream tokenStream = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokenStream); //Grammar.CSharpParserBaseVisitor<Data> visitor = new CSharpParserBaseVisitor<Data>(); //ctx.Accept<Data>(visitor); CustomCSharpListener listener = new CustomCSharpListener(parser); ParseTreeWalker walker = new ParseTreeWalker(); var ctx = parser.using_directives(); walker.Walk(listener, ctx); var ctx1 = parser.namespace_member_declaration(); walker.Walk(listener, ctx1); //var ctx2 = parser.class_member_declaration(); //walker.Walk(listener, ctx1); AnalysisResultData analysisResData = new AnalysisResultData(); var errors = listener.GetNameRuleErrorList(); foreach (var err in errors) { analysisResData.NameRuleErrors.Add(err); } analysisResData.GeneratedInterfaceString = listener.GetConvertedInterfaceString(); //if (string.IsNullOrEmpty(analysisResData.GeneratedInterfaceString) || analysisResData.NameRuleErrors.Count == 0) //{ // var ctx1 = parser.namespace_member_declaration(); // walker.Walk(listener, ctx1); // errors = listener.GetNameRuleErrorList(); // foreach (var err in errors) // { // analysisResData.NameRuleErrors.Add(err); // } //} return(analysisResData); }
private static CSharpAST GetAST(string filename) { FileStream s = new FileStream(filename, FileMode.Open, FileAccess.Read); CSharpLexer lexer = new CSharpLexer(s); lexer.setFilename(filename); CSharpParser parser = new CSharpParser(lexer); parser.setFilename(filename); parser.compilation_unit(); s.Close(); CSharpAST antlrTree = (CSharpAST)(parser.getAST()); antlrTree.FileName = filename; return antlrTree; }