public void ParseWithErrorRunsTest() { var result = SyntacticAnalyzer.Parse(tokensToParseWithError); var errors = result.errorList; Assert.IsTrue(errors.Count == 1); }
private Expression <Func <T, bool> > Compile(IToken token) { if (defaultComparativeOperator == null) { Logger.Error("The defualt comparative operator isn't registered."); throw new InvalidOperationException("Failed to compile the secified search query because this compiler doesn't have comparative operator. You have to register a comparative operator at least one."); } Logger.Info("STAGE 1: Tokenizing the specified query string."); var filters = BuildUpTokenFilters(); IEnumerable <IToken> tokens = new List <IToken> { token }; filters.ToList().ForEach(o => tokens = o.Filter(tokens)); Logger.Info("STAGE 2: Building an abstract syntax tree."); var analyzer = new SyntacticAnalyzer <T>(defaultComparativeOperator); var root = analyzer.Parse(tokens); Logger.Info("STAGE 3: Compiling an abstract syntax tree."); var current = Expression.Parameter(typeof(T), "current"); return(Expression.Lambda <Func <T, bool> >(Compile(current, root).Expression, current)); }
public Form1() { lexAnalyzer = new LexicalAnalyzer(); synAnalyzer = new SyntacticAnalyzer(); InitializeComponent(); }
// TryParse parses the source string and returns a list of errors on failure. // On success it fills the tree out parameter with the parsed tree. public static List <IError> TryParse(string source, out Tree tree) { // FIXME(an): Add try-catch block & convert to IError, // since Try* variants are supposed to be exception safe var lexer = new Lexer(); lexer.Run(source); var sa = new SyntacticAnalyzer(); tree = sa.Run(lexer.GetTokens()); if (sa.GetErrors().Count > 0) { return(sa.GetErrors()); } var treeWalker = new Walker(); var semanticAnalyzer = new SemanticAnalyzer(); treeWalker.Walk(semanticAnalyzer, tree); if (semanticAnalyzer.GetErrors().Count > 0) { return(semanticAnalyzer.GetErrors()); } return(new List <IError>()); }
static void Main(string[] args) { var KeyWords = new ConstTable(@"Tables/KeyWords.txt"); var Delimiters = new ConstTable(@"Tables/Delimiters.txt"); var Operations = new ConstTable(@"Tables/Operations.txt"); var Constants = new VarTable(); var Identifiers = new VarTable(); var Goto = new ConstTable(@"Tables/Goto.txt"); var Labels = new VarTable(); string TestFile = "test3"; LexicAnalyzer LA = new LexicAnalyzer(KeyWords, Delimiters, Operations, Constants, Identifiers); Token[] LexicAnalysisResult = LA.AnalyzeSource(TestFile); SyntacticAnalyzer SA = new SyntacticAnalyzer(KeyWords, Delimiters, Operations, Constants, Identifiers, Goto, Labels); try { SA.AnalyzeTokens(LexicAnalysisResult); Console.WriteLine("Синтаксическтй анализ успешно завершен."); } catch (Exception Ex) { Console.WriteLine("Ошибка: " + Ex.Message); File.WriteAllText(Directory.GetCurrentDirectory() + @"\" + TestFile + ".syn.out.txt", "Ошибка: " + Ex.Message); } CodeBuilder CB = new CodeBuilder(KeyWords, Operations, Constants, Identifiers, Goto, Labels); CB.BuildCode(SA.Output); File.WriteAllText(Directory.GetCurrentDirectory() + @"\" + TestFile + ".asm", CB.ToString()); Console.WriteLine($"Построение кода успешно завершено. Выходной файл: {TestFile}.asm"); }
private void ThompsonToolStripMenuItem_Click(object sender, EventArgs e) { RichTextBox richTextBox = tabControl1.SelectedTab.Controls.Cast <RichTextBox>().FirstOrDefault(x => x is RichTextBox); string content = richTextBox.Text; indexImage = 0; images = new List <string>(); if (automataImage.Image != null) { automataImage.Image.Dispose(); } automataImage.Image = null; indexTable = 0; tables = new List <string>(); if (tableBox.Image != null) { tableBox.Image.Dispose(); } tableBox.Image = null; lexicalAnalyzer = new LexicalAnalyzer(); lexicalAnalyzer.Scanner(content); if (lexicalAnalyzer.ListError.Count() == 0) { syntacticAnalyzer = new SyntacticAnalyzer(lexicalAnalyzer.ListToken); if (syntacticAnalyzer.ListError.Count() == 0) { interpreter = new Interpreter(lexicalAnalyzer.ListToken); images.AddRange(interpreter.RoutesNFA); if (images.Count > 0) { LoadImage(0); } tables.AddRange(interpreter.RoutesTables); if (tables.Count > 0) { LoadTable(0); } commandLineTextBox.Text = interpreter.ConsoleMessage.ToString(); } else { syntacticAnalyzer.GenerateReports(); } } }
public static ProgramAst GetAst(string sourceCode) { var lexer = new Lexer(sourceCode); var tokens = lexer.Parse().ToList(); var tokenReader = new TokenReader(tokens); var syntacticAnalyzer = new SyntacticAnalyzer(tokenReader); var ast = syntacticAnalyzer.AnalyzeProgram(); Assert.True(tokenReader.ReachedEnd); return(ast); }
/// <summary> /// Converte uma expressão na notação infixa para notação pós-fixa /// </summary> /// <param name="infixExpression">Expressão infixa</param> /// <returns>Expressão pós-fixa</returns> public static string ToPostFix(string infixExpression) { try { var syntaxAnalyzer = new SyntacticAnalyzer(); var root = syntaxAnalyzer.Parse(infixExpression); return(root.ToPostFix()); } catch (LexicalException ex) { return(ex.Message); } catch (SyntaxException ex) { return(ex.Message); } }
// internal List<string> Decode(string axPath) #if AllowDecryption // internal List<string> DecodeAndDecrypt(BinaryReader reader,int fileSize) #endif public override List <string> Decode(BinaryReader reader) { AxData data = new AxData(); LexicalAnalyzer lex = null; TokenCollection stream = null; SyntacticAnalyzer synt = null; List <LogicalLine> lines = null; List <string> stringLines = new List <string>(); try { global::KttK.HspDecompiler.HspConsole.Write("ヘッダー解析中..."); data.LoadStart(reader, dictionary); data.ReadHeader(); global::KttK.HspDecompiler.HspConsole.Write("プリプロセッサ解析中..."); data.ReadPreprocessor(dictionary); global::KttK.HspDecompiler.HspConsole.Write("字句解析中..."); lex = new LexicalAnalyzer(dictionary); stream = lex.Analyze(data); data.LoadEnd(); global::KttK.HspDecompiler.HspConsole.Write("構文解析中..."); synt = new SyntacticAnalyzer(); lines = synt.Analyze(stream, data); global::KttK.HspDecompiler.HspConsole.Write("出力ファイル作成中..."); foreach (LogicalLine line in lines) { if (line.Visible) { string str = new string('\t', line.TabCount); stringLines.Add(str + line.ToString()); } } } catch (SystemException e) { throw new HspDecoderException("AxData", "想定外のエラー", e); } return(stringLines); }
static List <Instruction> Generate(string sourceCode) { var lexer = new Lexer(sourceCode); var tokens = lexer.Parse().ToList(); var tokenReader = new TokenReader(tokens); var syntacticAnalyzer = new SyntacticAnalyzer(tokenReader); var success = syntacticAnalyzer.TryOperatorExpression(out OperatorExpressionAst ast); Assert.True(success); Assert.True(tokenReader.ReachedEnd); var rootScope = new SymbolScope(); var generator = new IntermediateCodeGenerator(rootScope); generator.CodeGenerationEnabled = true; generator.ReturnCheckEnabled = true; generator.ProcessOperatorExpression(ast); return(generator.ExpressionBucket.Pop()); }
private void Button_SyntacticalAnalyzer_Click(object sender, EventArgs e) { SyntacticAnalyzer SA = new SyntacticAnalyzer(RichTextBox_ProgramCode.Text); SA.Analysis(); List <string> AnalysisResult = SA.GetError(); if (AnalysisResult.Count == 0) { RichTextBox_AssemblyStatus.Text = "ОШИБОК НЕТ"; RichTextBox_ProgramCode.SelectAll(); RichTextBox_ProgramCode.SelectionBackColor = Color.White; } else { ErrorSelect(SA.GetErrorNumberLine()); RichTextBox_AssemblyStatus.Text = "НАЙДЕНЫ ОШИБКИ!\n----------------------------------------------------\n"; foreach (string Line in AnalysisResult) { RichTextBox_AssemblyStatus.AppendText(Line + "\n\n"); } } }
public void SetUp() { testee = new SyntacticAnalyzer<CustomEntity>(new PartialMatch<CustomEntity>(":")); }
static void Main(string[] args) { SystemChecks(args[0]); var fileName = Path.GetFileNameWithoutExtension(args[0]); try { var sr = new StreamReader(args[0]); var swTokens = new StreamWriter(Path.GetDirectoryName(args[0]) + "\\" + fileName + ".outderivation"); var swTree = new StreamWriter(Path.GetDirectoryName(args[0]) + "\\" + fileName + ".outast"); var swErrors = new StreamWriter(Path.GetDirectoryName(args[0]) + "\\" + fileName + ".outsyntaxerrors"); var tokens = new List <TokenData>(); //Read the first line of file var line = sr.ReadLine(); //Continue to read until you reach end of file while (line != null) { // Analyze the line var trimmedLine = Regex.Replace(line, @"\s+", ""); string[] separator = { "][" }; var words = trimmedLine.Split(separator, StringSplitOptions.None); for (int i = 0; i < words.Length; i++) { if (words[i].Equals(string.Empty)) { continue; } if (i == 0) { words[i] = words[i].Substring(1); } if (i == words.Length - 1) { words[i] = words[i].Substring(0, words[i].Length - 1); } var splitToken = words[i].Trim().Split(','); if (splitToken[0].Equals("inlinecmt") || splitToken[0].Equals("blockcmt")) { continue; } if (splitToken[0].Equals("comma")) { splitToken[2] = splitToken[3]; splitToken[3] = splitToken[4]; splitToken[1] = ","; } var token = new TokenData(splitToken[0], splitToken[1], int.Parse(splitToken[2]), int.Parse(splitToken[3])); tokens.Add(token); } //Read the next line line = sr.ReadLine(); } var parsedOutput = SyntacticAnalyzer.Parse(tokens); foreach (string errorLine in parsedOutput.errorList) { swErrors.WriteLine(errorLine); } parsedOutput.parseTree.TraverseDFSInFile(swTokens); var absTree = SyntacticAnalyzer.ParseTreeToAbstractSyntaxTree(parsedOutput.parseTree); absTree.TraverseDFSInFile(swTree); sr.Close(); swTokens.Close(); swTree.Close(); swErrors.Close(); Console.WriteLine("Done reading and writing derivation files"); var swTable = new StreamWriter(Path.GetDirectoryName(args[0]) + "\\" + fileName + ".outsymboltables"); var swSemErrors = new StreamWriter(Path.GetDirectoryName(args[0]) + "\\" + fileName + ".outsemanticerrors"); //Start semantic parsing // Traverse Ast and create symbol tables. var symbolTableResult = SemanticAnalyzer.CreateSymbolTable(absTree); swTable.WriteLine(symbolTableResult.symbolTable); // Traverse Ast and ensure semantics are good. SemanticAnalyzer.SemanticAnalysis(absTree, symbolTableResult.symbolTable, symbolTableResult.errorList); foreach (var error in symbolTableResult.errorList) { swSemErrors.WriteLine(error); } swTable.Close(); swSemErrors.Close(); Console.WriteLine("Done reading and writing semantic files"); System.Environment.Exit(9000); } catch (Exception e) { Console.WriteLine("Exception: " + e.Message); } finally { System.Environment.Exit(2); } }
public void SetUp() { testee = new SyntacticAnalyzer <CustomEntity>(new PartialMatch <CustomEntity>(":")); }
/// <summary> /// Compiles the source code. /// </summary> /// <returns><see cref="Task"/></returns> private async Task <bool> CompileAsync() { if (string.IsNullOrEmpty(Editor.Text)) { return(false); } if (_modificationsCount > 0) { if (string.IsNullOrEmpty(_selectedFile)) { await SaveAsFileAsync(); } else { await SaveFileAsync(); } } TokensList.ItemsSource = null; ErrorListView.ItemsSource = null; var success = false; using (LexicalAnalyzer lexical = new LexicalAnalyzer(_selectedFile)) { string outputFilePath = FileHelper.GetOutputFilePath(Path.GetFileNameWithoutExtension(_selectedFile) + OutputFileExtension); SyntacticAnalyzer syntactic = new SyntacticAnalyzer(lexical); CompilationResult compilationResult = await syntactic.DoAnalysisAsync(outputFilePath); if (compilationResult.Error != null) { ErrorListView.ItemsSource = new List <CompilationResultViewModel> { new CompilationResultViewModel() { Message = compilationResult.Error.Message, Position = compilationResult.Error.Position } }; } else { _currentProgramName = compilationResult.ProgramName; _outputFile = outputFilePath; success = true; ErrorListView.ItemsSource = new List <CompilationResultViewModel> { new CompilationResultViewModel() { Message = "Compilação sem erros.", Position = new CodePosition { Line = 0, Column = 0, Index = 0 } } }; } TokensList.ItemsSource = lexical.ReadTokens; } int tokensCount = TokensList.Items.Count; if (tokensCount > 0) { TokenGroupBox.Header = tokensCount > 1 ? tokensCount + " tokens" : "1 token"; } else { TokenGroupBox.Header = "Nenhum token"; } return(success); }