static void Main(string[] args) { StreamReader fileRead = new StreamReader(@"..\..\..\Text.txt", Encoding.GetEncoding(1251)); string text = fileRead.ReadToEnd(); Console.WriteLine("Text: " + text); LexicalAnalyzer lexicalAnalyzer = new LexicalAnalyzer(); lexicalAnalyzer.CheckText(text); }
static void Main(string[] args) { int argCount = args.Count(); //string inputFile = "input.txt", outputFile = "outputFile.txt"; if (argCount < 2) { Console.WriteLine("Error: Not Enough Arguments"); return; } string inputFile = null, outputFile = null; try { inputFile = args[0]; outputFile = args[1]; } catch (Exception ex) { Console.WriteLine("Error: Failed to get arguements: " + ex.Message); return; } LexicalAnalyzer lex = new LexicalAnalyzer(); try { lex.LoadFile(inputFile); //Loads text from file into char cache lex.ParseRecords(); //Parses all Lexors out of input file and saves them to a vector of Records //Exit early if no lexors are parsed if (lex.records.Count == 0) { Console.WriteLine("No Lexors Found In Records"); return; } //Loop through Records and print/write Lexors lex.PrintHeader(); lex.WriteHeader(outputFile); foreach (Record rec in lex.records) { lex.PrintLexor(rec); lex.WriteLexor(outputFile, rec); } } catch (Exception ex) { Console.WriteLine("Error: Lexical Analyzer Failed: " + ex.Message); } }
static void Main(string[] args) { try { var tokens = new LexicalAnalyzer.LexicalAnalyzer() .Analyze(@"C:\Users\User\Desktop\6 семестр\САПР-2\LanguagePOLIZParser\LanguagePOLIZParser\example.diamond"); Console.WriteLine("Tokens table"); using (StreamWriter sw = new StreamWriter("ex.txt")) { foreach (var token in tokens.TokensList) { Console.WriteLine($"Line:{token.Line}; <{token.Lexeme}>; #{token.Code}; P:{token.Position}"); sw.WriteLine($"Line:{token.Line}; <{token.Lexeme}>; #{token.Code}; P:{token.Position}"); } } Console.WriteLine(); Console.WriteLine(new String('#', 40)); foreach (var token in tokens.IdentifiersTable) { Console.WriteLine($"Key: {token.Key}; Name: <{token.Value}>;"); } Console.WriteLine(new String('#', 40)); Console.WriteLine(); var tree = new POLIZ(tokens) .Analyze(); Console.WriteLine(); Console.WriteLine(new String('#', 40)); foreach (var token in tokens.IdentifiersTable) { Console.WriteLine($"Key: {token.Key}; Name: <{token.Value}>;"); } Console.WriteLine(new String('#', 40)); Console.WriteLine(); Console.WriteLine("Syntax tree"); PrintSyntaxTree(tokens, tree); } catch (Exception e) { Console.WriteLine(e.Message); } Console.ReadKey(); }
private static void Main(string[] args) { try { using (var file = new StreamReader(args[0])) { var lex = new LexicalAnalyzer(file); lex.PerformLex(); } } catch (FileNotFoundException ex) { Console.WriteLine("Could not open file for reading. Check that the file exists and is in the source directory."); Console.WriteLine(ex.Message); } }
static void Main(string[] args) { try { var tokensTable = new LexicalAnalyzer().Analyze("example.diamond"); foreach (var token in tokensTable.TokensList) { Console.Write($"{token.Line} {token.Lexeme} {token.Code}"); Console.WriteLine(); } } catch (Exception e) { Console.WriteLine(e.Message); } }