public void TokensAreExtracted() { //Arrange var testStrings = new [] { "100", "100+", "100 ", "100L", "+-*/", "()", "", null, "(3 *5)+ 49" }; var output = new Output(); //Act foreach (var testString in testStrings) { var result = LexicalAnalyser.ExtractTokens(testString); output.WrapLine($@"Analysis of ""{testString ?? "NULL"}"":"); output.FormatTable(result.AsReport(rep => rep .AddColumn(r => r.TokenType, cc => cc.LeftAlign()) .AddColumn(r => r.Text, cc => {})) ); output.WriteLine(); output.WriteLine(); } //Assert output.Report.Verify(); }
public void SyntaxAnalyserCall() { const string clCode = @" int main() { toto(1, 2); } "; var tokens = new LexicalAnalyser().Convert(clCode); var actualTree = new SyntaxAnalyser().Convert(tokens); var expectedTree = new Node(Nodes.Program, _tokenDefault, new Node(Nodes.DeclFunc, tokens[1], new Node(Nodes.Block, _tokenDefault, new Node(Nodes.Drop, _tokenDefault, new Node(Nodes.Call, tokens[5], new Node(Nodes.Const, tokens[7]), new Node(Nodes.Const, tokens[9]) ) ) ) ) ); Assert.AreEqual(expectedTree, actualTree); }
/// <summary> /// Инициализация /// </summary> public void Initialize(LexicalAnalyser lanalyser, SyntaxisAnalyser sanalyser) { this.lanalyser = lanalyser; this.sanalyser = sanalyser; Load(); InitEvents(lanalyser, sanalyser); }
public void SyntaxAnalyserDeclareFunctionIntWithParams() { const string clCode = @" int main(int a) { out a + 5; } "; var tokens = new LexicalAnalyser().Convert(clCode); var actualTree = new SyntaxAnalyser().Convert(tokens); var expectedTree = new Node(Nodes.Program, _tokenDefault, new Node(Nodes.DeclFunc, tokens[1], new Node(Nodes.Block, _tokenDefault, new Node(Nodes.Out, _tokenDefault, new Node(Nodes.Addition, _tokenDefault, new Node(Nodes.RefVar, tokens[8]), new Node(Nodes.Const, tokens[10]) ) ) ) ) { Tokens = { tokens[4] } } ); Assert.AreEqual(expectedTree, actualTree); }
private async void Compile_Click(object sender, RoutedEventArgs e) { string code = Code.Text; var lexicalAnalyser = new LexicalAnalyser(); var tokens = await lexicalAnalyser.ExecuteAsync(code); var syntaticAnalyser = new SyntacticAnalyser(tokens.ToList()); var result = await syntaticAnalyser.ExecuteAsync(); foreach (var token in tokens) { var tokenName = $"TokenType: {token.TokenType}"; var tokenValue = $"Lexema: {token.TokenValue}"; Console.WriteLine(string.Format("{0,-40} {1,5}\n", tokenName, tokenValue)); } var assemblyGenerator = new AssemblyGenerator(result); await assemblyGenerator.GenerateAsync(); Console.ReadLine(); }
public Ogma() { analyser = new LexicalAnalyser(); CurrentStep = ""; StepPosition = 1; CurrentObject = ""; }
public void LexicalAnalysisTests() { //Arrange var testStrings = new[] { "var eq 'literal'", "(var gt 'literal' and var2 lt 45) or (datevar gt datetime'2017-07-21T22:02:31Z')", "var eq guid'9E37E338-27B2-4F32-AFFA-DC74F017AF1D' or (var2 ge 45L and var3 ne 35.4)", string.Empty, null, "\t\tvar eq\r\n'literal'", }; var output = new Output(); //Act foreach (var testString in testStrings) { var result = LexicalAnalyser.Analyse(testString); output.WrapLine($@"Analysis of ""{testString ?? "NULL"}"":"); output.FormatTable(result.AsReport(rep => rep .AddColumn(r => r.TokenType, cc => cc.LeftAlign()) .AddColumn(r => r.Text, cc => {})) ); output.WriteLine(); output.WriteLine(); } //Assert Approvals.Verify(output.Report); }
public void SyntaxAnalyserIfElse() { const string clCode = @" int main() { int i; i = 0; if (i == 0) i = 1; else { int j; j = 1; i = i - j; } } "; var tokens = new LexicalAnalyser().Convert(clCode); var actualTree = new SyntaxAnalyser().Convert(tokens); var expectedTree = new Node(Nodes.Program, _tokenDefault, new Node(Nodes.DeclFunc, tokens[1], new Node(Nodes.Block, _tokenDefault, new Node(Nodes.DeclVar, tokens[6]), new Node(Nodes.Assign, tokens[8], new Node(Nodes.Const, tokens[10]) ), new Node(Nodes.Condition, _tokenDefault, new Node(Nodes.AreEqual, _tokenDefault, new Node(Nodes.RefVar, tokens[14]), new Node(Nodes.Const, tokens[16]) ), new Node(Nodes.Assign, tokens[18], new Node(Nodes.Const, tokens[20]) ), new Node(Nodes.Block, _tokenDefault, new Node(Nodes.DeclVar, tokens[25]), new Node(Nodes.Assign, tokens[27], new Node(Nodes.Const, tokens[29]) ), new Node(Nodes.Assign, tokens[31], new Node(Nodes.Substraction, _tokenDefault, new Node(Nodes.RefVar, tokens[33]), new Node(Nodes.RefVar, tokens[35]) ) ) ) ) ))); Assert.AreEqual(expectedTree, actualTree); }
private void PerformTranslation(LexicalAnalyser lexer, CilEmitter emitter, ILogger treeLogger) { using (lexer) { Parser parser = new Parser(lexer); SyntaxTree syntaxTree = parser.Parse(); syntaxTree.Log(treeLogger); syntaxTree.Generate(emitter); } }
private Task <bool> ParseStrings() { Invoke(new MethodInvoker(delegate { var lexicalAnalyser = new LexicalAnalyser(); var tokens = lexicalAnalyser.AnalyseResourceFile(richTextBox1.Lines); dataGridView1.Rows.Clear(); richTextBox2.Clear(); richTextBox3.Clear(); CleanUpLabels(ref tokens); Display(tokens); })); return(Task.FromResult(true)); }
static string GetTokens(string sourceCode) { var sw = new Stopwatch(); sw.Start(); var lexer = new LexicalAnalyser(sourceCode); LinkedList <Token> tokens = lexer.Tokenize(); sw.Stop(); ElapsedTime = sw.Elapsed; return(tokens.Aggregate(string.Empty, (current, i) => current + $"Type: {i.Type}\n" + $"Value: {i.Value}\n\n")); }
private Task <bool> ParseDisassembly() { var lexicalAnalyser = new LexicalAnalyser(); var tokens = lexicalAnalyser.AnalyseResourceFile(Resources.s2.Split('\n')); Invoke(new MethodInvoker(delegate { dataGridView1.Rows.Clear(); richTextBox2.Clear(); richTextBox3.Clear(); CleanUpLabels(ref tokens); Display(tokens); })); return(Task.FromResult(true)); }
public static ArithmeticExpression Parse(string input) { var tokens = LexicalAnalyser.ExtractTokens(input ?? string.Empty); var pos = new TokenKeeper(tokens); if (TryTakeCalc(pos, out var calculation) && pos.Finished) { return(new ArithmeticExpression(calculation)); } else { var errorMessage = $"Unable to interpret calculation at \"{pos.RemainingData()}\""; return(new ArithmeticExpression(errorMessage)); } }
public void SyntaxAnalyserFor() { const string clCode = @" int main() { int i; for (i = 0; i < 10; i = i + 1) { out i; } } "; var tokens = new LexicalAnalyser().Convert(clCode); var actualTree = new SyntaxAnalyser().Convert(tokens); var expectedTree = new Node(Nodes.Program, _tokenDefault, new Node(Nodes.DeclFunc, tokens[1], new Node(Nodes.Block, _tokenDefault, new Node(Nodes.DeclVar, tokens[6]), new Node(Nodes.Block, _tokenDefault, new Node(Nodes.Assign, tokens[10], new Node(Nodes.Const, tokens[12])), new Node(Nodes.Loop, _tokenDefault, new Node(Nodes.Condition, _tokenDefault, new Node(Nodes.LowerThan, _tokenDefault, new Node(Nodes.RefVar, tokens[14]), new Node(Nodes.Const, tokens[16]) ), new Node(Nodes.Block, _tokenDefault, new Node(Nodes.Block, _tokenDefault, new Node(Nodes.Out, _tokenDefault, new Node(Nodes.RefVar, tokens[26])) ), new Node(Nodes.Assign, tokens[18], new Node(Nodes.Addition, _tokenDefault, new Node(Nodes.RefVar, tokens[20]), new Node(Nodes.Const, tokens[22]) ) ) ), new Node(Nodes.Break, _tokenDefault) ) ) ) ) ) ); Assert.AreEqual(expectedTree, actualTree); }
public void SyntaxAnalyserArithmeticsAndLogic() { const string clCode = @" int main() { a = !2 && 3 < 5 || 1 == (2 + 3 != 5); } "; var tokens = new LexicalAnalyser().Convert(clCode); var actualTree = new SyntaxAnalyser().Convert(tokens); var expectedTree = new Node(Nodes.Program, _tokenDefault, new Node(Nodes.DeclFunc, tokens[1], new Node(Nodes.Block, _tokenDefault, new Node(Nodes.Assign, tokens[5], new Node(Nodes.Or, _tokenDefault, new Node(Nodes.And, _tokenDefault, new Node(Nodes.Not, _tokenDefault, new Node(Nodes.Const, tokens[8]) ), new Node(Nodes.LowerThan, _tokenDefault, new Node(Nodes.Const, tokens[10]), new Node(Nodes.Const, tokens[12]) ) ), new Node(Nodes.AreEqual, _tokenDefault, new Node(Nodes.Const, tokens[14]), new Node(Nodes.AreNotEqual, _tokenDefault, new Node(Nodes.Addition, _tokenDefault, new Node(Nodes.Const, tokens[17]), new Node(Nodes.Const, tokens[19]) ), new Node(Nodes.Const, tokens[21]) ) ) ) ) ) ) ); Assert.AreEqual(expectedTree, actualTree); }
static void Main(string[] args) { var lexer = new LexicalAnalyser(); using (var input = GetFakeInputStream()) { string line; while (!input.EndOfStream) { line = input.ReadLine(); Console.WriteLine($"Line: {line}"); foreach (var item in lexer.GetTokens(line)) { Console.WriteLine(item); } Console.WriteLine(); } } }
public void Translate() { ILogger lexicalLogger = CreateLogger(LogLexer); ILogger treeLogger = CreateLogger(LogSyntaxTree); LexicalAnalyser lexer = new LexicalAnalyser(sourceFilepath, lexicalLogger); CilEmitter emitter = new CilEmitter(programName); try { PerformTranslation(lexer, emitter, treeLogger); } catch (ParseException exc) { Console.WriteLine("Error during translation : \n" + exc.Message); } finally { lexicalLogger.Flush(); treeLogger.Flush(); } }
public QueryOutFields(string outFieldsText) { outFieldsText = outFieldsText?.Trim(); if (String.IsNullOrEmpty(outFieldsText)) { // do nothing } else if (outFieldsText.Contains(";")) { // Parse, Lexer if (!outFieldsText.EndsWith(";")) { outFieldsText = $"{ outFieldsText };"; } var lexicalAnalyser = new LexicalAnalyser(new LuceneServerOutFieldsSyntax()); var tokens = lexicalAnalyser.Tokenize(outFieldsText); Fields = tokens.GetStatements() .Select(s => new QueryOutField(s)) .ToArray(); } else { // Simple Fields = outFieldsText.Split(',') .Select(f => f.Trim()) .Select(f => new QueryOutField(f)) .ToArray(); } _names = this.Fields?.Select(f => f.Name).ToArray() ?? new string[0]; if (this.Fields == null || Fields.Count() == 0) { this.Fields = new QueryOutField[] { new QueryOutField("*") } } ; }
private void TestFile(bool valid, string filename) { if (valid) { LexicalAnalyser analiser = new LexicalAnalyser(); string code = ReadFile("Valid", filename); try { List <Token> tokens = analiser.Convert(code); string expected = ReadFile("ValidCorrect", filename); string given = string.Empty; for (int i = 0; i < tokens.Count; i++) { given += (i > 0 ? " " : string.Empty) + tokens[i].ToCode(); } Assert.AreEqual(expected, given); } catch (LexicalException e) { Assert.Fail(e.Message); } } else { LexicalAnalyser analiser = new LexicalAnalyser(); string code = ReadFile("Invalid", filename); try { analiser.Convert(code); Assert.Fail("Invalid/" + filename + " should fail"); } catch (LexicalException) { } } }
public static ParseResult Parse(string filter) { var tokens = LexicalAnalyser.Analyse(filter); var firstErr = tokens.FirstOrDefault(t => t.TokenType == TokenType.Error); if (firstErr != null) { var lexingError = $@"""{firstErr.Text}"" is not a valid token."; return(new ParseResult(lexingError)); } var result = new ParseResult(); string error; result.Root = PerformParse(tokens, out error); if (result.Root == null) { result.FailedParse(error); } return(result); }
public void SyntaxAnalyserDeclarationAndAssignments() { const string clCode = @" int main() { int i; i = 0; } "; var tokens = new LexicalAnalyser().Convert(clCode); var actualTree = new SyntaxAnalyser().Convert(tokens); var expectedTree = new Node(Nodes.Program, _tokenDefault, new Node(Nodes.DeclFunc, tokens[1], new Node(Nodes.Block, _tokenDefault, new Node(Nodes.DeclVar, tokens[6]), new Node(Nodes.Assign, tokens[8], new Node(Nodes.Const, tokens[10]) ) ))); Assert.AreEqual(expectedTree, actualTree); }
public TestTokenKeeper() { _tokens = LexicalAnalyser.ExtractTokens(TestExpression).ToList(); _keeper = new TokenKeeper(_tokens); }
/// <summary> /// Подписка на события /// </summary> /// <param name="lanalyser">лексический анализатор</param> /// <param name="sanalyser">синтаксический анализатор</param> protected override void InitEvents(LexicalAnalyser lanalyser, SyntaxisAnalyser sanalyser) { lanalyser.DfmInclusionEvent += new EventHandler <AnalyserParameters>(Analyse); }
/// <summary> /// Подписка на события /// </summary> /// <param name="lanalyser">лексический анализатор</param> /// <param name="sanalyser">синтаксический анализатор</param> protected override void InitEvents(LexicalAnalyser lanalyser, SyntaxisAnalyser sanalyser) { sanalyser.IdDeclarationEvent += new EventHandler <AnalyserParameters>(Analyse); }
private int memoryUsed = 0; // Memory used for declarations public Parser(LexicalAnalyser lexer) { this.lexer = lexer; Move(); }
static void Main(string[] args) { var lexer = new LexicalAnalyser(); using (var input = GetFakeInputStream()) { string line; while (!input.EndOfStream) { line = input.ReadLine(); Console.WriteLine($"Line: {line}"); foreach (var item in lexer.GetTokens(line)) Console.WriteLine(item); Console.WriteLine(); } } }
/// <summary> /// Показать найденные токены /// </summary> private void ShowTokens() { txtTree.Text = "No errors.\nFound tokens: \n\n"; LexicalAnalyser myLexicalAnalyser = new LexicalAnalyser(); txtTree.Text += myLexicalAnalyser.ShowTokens(_root); }
/// <summary> /// Подписка на события /// </summary> /// <param name="lanalyser">лексический анализатор</param> /// <param name="sanalyser">синтаксический анализатор</param> protected virtual void InitEvents(LexicalAnalyser lanalyser, SyntaxisAnalyser sanalyser) { }
/// <summary> /// Подписка на события /// </summary> /// <param name="lanalyser">лексический анализатор</param> /// <param name="sanalyser">синтаксический анализатор</param> protected override void InitEvents(LexicalAnalyser lanalyser, SyntaxisAnalyser sanalyser) { sanalyser.BodyDefinedEvent += new EventHandler <AnalyserParameters>(Analyse); }