public void AmbiguousGrammarParseTest() { var E = new NonTerminal("E"); var L = new NonTerminal("L"); var R = new NonTerminal("R"); var tokenizer = new RegExpTokenizer(); tokenizer = new RegExpTokenizer(); tokenizer.SetTransitionFunction(new TableDrivenTransitionFunction()); var id = tokenizer.UseTerminal(RegExp.GetNumberRegExp()); var assign = tokenizer.UseTerminal(RegExp.Literal("=")); var deref = tokenizer.UseTerminal(RegExp.Literal("*")); tokenizer.BuildTransitions(); var grammar = new AugmentedGrammar() { E --> L & assign & R, E --> R, L --> deref & R, L --> id, R --> L }; var ptBuilder = new SLRParsingTableBuilder(); ptBuilder.SetGrammar(grammar); Assert.Throws(typeof(ParserException), ptBuilder.ConstructParsingTable); }
public ParsingTableGenerationTests() { E = new NonTerminal("E"); T = new NonTerminal("T"); F = new NonTerminal("F"); tokenizer = new RegExpTokenizer(); tokenizer.SetTransitionFunction(new TableDrivenTransitionFunction()); id = tokenizer.UseTerminal(RegExp.GetNumberRegExp()); plus = tokenizer.UseTerminal(RegExp.Literal('+')); mult = tokenizer.UseTerminal(RegExp.Literal('*')); leftBrace = tokenizer.UseTerminal(RegExp.Literal('(')); rightBrace = tokenizer.UseTerminal(RegExp.Literal(')')); grammar = new AugmentedGrammar() { E --> E & plus & T, E --> T, T --> T & mult & F, T --> F, F --> leftBrace & E & rightBrace, F --> id }; }
private AugmentedGrammar BuildGrammar() { var motekCode = new NonTerminal("motekCode"); // M var expression = new NonTerminal("expression"); // S var statement = new NonTerminal("statement"); // H var addedStatement = new NonTerminal("addedStatement"); // A var multipliedStatement = new NonTerminal("multipliedStatement"); // F var parameter = new NonTerminal("parameter");// p var parameters = new NonTerminal("parameters"); // P var functionCall = new NonTerminal("functionCall"); // U var motekGrammar = new AugmentedGrammar() { motekCode --> statement & mSemicolon & motekCode ^ (ProcessTerminals), // M --> H ; M motekCode --> statement & mSemicolon ^ (ProcessTerminals), // M --> H ; statement --> mIdentifier & mEqualSign & expression ^ (ProcessTerminals), // H --> id = S expression --> expression & mPlusSign & addedStatement ^ (ProcessTerminals), // S --> S + A expression --> expression & mMinusSign & addedStatement ^ (ProcessTerminals), // S --> S - A expression --> addedStatement ^ (ProcessTerminals), // S --> A, addedStatement --> addedStatement & mAsterisk & multipliedStatement ^ (ProcessTerminals), // A --> A * F addedStatement --> addedStatement & mDevideSign & multipliedStatement ^ (ProcessTerminals), // A --> A / F addedStatement --> multipliedStatement ^ (ProcessTerminals), // A -> F multipliedStatement --> functionCall, // F --> U multipliedStatement --> mLeftBrace & expression & mRightBrace ^ (ProcessTerminals), // F --> ( S ) multipliedStatement --> mNumber ^ (ProcessTerminals), // F --> number multipliedStatement --> mIdentifier ^ (ProcessTerminals), // F --> I functionCall --> mIdentifier & mLeftBrace & parameters & mRightBrace ^ (ProcessTerminals), // U --> I ( P ) parameters --> parameters & mComma & parameter ^ (ProcessTerminals), // P --> P , p parameters --> parameter ^ (ProcessTerminals),// P --> p parameter --> expression // p --> S }; return motekGrammar; }
public void AugmentedGrammarInitialProductionTest() { var P = new NonTerminal("P"); var g = new AugmentedGrammar() { P --> P }; Assert.That(g.ToString(), Is.EqualTo("(0) " + g.InitialProduction.Symbol + " --> P\n(1) P --> P\n")); }
public static LRParser GetSLRParser(AugmentedGrammar grammar, Lexer lexer) { var parsingTableBuilder = new SLRParsingTableBuilder(lexer.TokensNumber); parsingTableBuilder.SetGrammar(grammar); parsingTableBuilder.ConstructParsingTable(); var parser = new LRParser { ParsingTable = parsingTableBuilder.GetTable(), Lexer = lexer, Grammar = grammar }; return parser; }
public void ProductionWithSemanticActionTest() { NonTerminal E = new NonTerminal("E"), T = new NonTerminal("T"), F = new NonTerminal("F"); var plus = tokenizer.UseTerminal(RegExp.Literal('+')); var mult = tokenizer.UseTerminal(RegExp.Literal('*')); int testValue = 0; var g = new AugmentedGrammar() { E --> T & F ^ (v => testValue = 3), // Semantic action of production 1 T --> plus, E --> T & plus ^ (v => testValue = 5), // production 3 E --> plus & mult & T ^ (v => testValue = 8), //production 4 }; Assert.That(testValue, Is.EqualTo(0)); g.Productions[1].SemanticAction(null); Assert.That(testValue, Is.EqualTo(3)); g.Productions[3].SemanticAction(null); Assert.That(testValue, Is.EqualTo(5)); g.Productions[4].SemanticAction(null); Assert.That(testValue, Is.EqualTo(8)); }
private AugmentedGrammar BuildGrammar() { var misaCode = new NonTerminal("mISA_code"); var codeLine = new NonTerminal("codeline"); var code = new NonTerminal("code"); var instruction = new NonTerminal("instruction"); var operands = new NonTerminal("operands"); var operand = new NonTerminal("operand"); var slashedInstruction = new NonTerminal("slashedInstruction"); var motekGrammar = new AugmentedGrammar() { misaCode --> misaCode & codeLine, misaCode --> codeLine, codeLine --> code & mNewLine, codeLine --> mNewLine, code --> instruction & operands, code --> slashedInstruction & operands & mSemicolon, code --> mIdentifier & mColon, code --> mDirective, instruction --> mIdentifier, instruction --> mIdentifier & mDot & mNumber, instruction --> mIdentifier & mDotC & mDot & mNumber, instruction --> mIdentifier & mDotC & mDot & mNumber & mSlash & mNumber, slashedInstruction --> mIdentifier & mDot & mNumber & mSlash & mNumber, operands --> operand & mComma & operands, operands --> operand, operand --> mNumber, operand --> mHexNumber, operand --> mRegister, operand --> mIdentifier, operand --> mIdentifier & mColon & mSquareBraceOpen & mRegister & mSquareBraceClose, operand --> mIdentifier & mColon & mSquareBraceOpen & mRegister & mPlus & mNumber & mSquareBraceClose, }; return motekGrammar; }
public void ArithmeticStatementFullyAutonomousParseTest() { const string INPUT = @"2*(3+4)"; string fileName = Path.GetTempFileName(); File.WriteAllText(fileName, INPUT); RegExpTokenizer tokenizer = new RegExpTokenizer(); tokenizer.SetTransitionFunction(new TableDrivenTransitionFunction()); ILexer lexer = new Lexer(tokenizer); Stream fileStream = File.OpenRead(fileName); lexer.SetDataSource(fileStream); var stack = new Stack<int>(); var parser = new LRParser(); var E = new NonTerminal("E"); var T = new NonTerminal("T"); var F = new NonTerminal("F"); var id = tokenizer.UseTerminal(RegExp.AtLeastOneOf(RegExp.Choice( RegExp.Literal('0'), RegExp.Literal('1'), RegExp.Literal('2'), RegExp.Literal('3'), RegExp.Literal('4'), RegExp.Literal('5'), RegExp.Literal('6'), RegExp.Literal('7'), RegExp.Literal('8'), RegExp.Literal('9')))); var plus = tokenizer.UseTerminal(RegExp.Literal('+')); var mult = tokenizer.UseTerminal(RegExp.Literal('*')); var leftBrace = tokenizer.UseTerminal(RegExp.Literal('(')); var rightBrace = tokenizer.UseTerminal(RegExp.Literal(')')); tokenizer.BuildTransitions(); var grammar = new AugmentedGrammar() { E --> E & plus & T ^ (v => stack.Push(stack.Pop() + stack.Pop())), E --> T, T --> T & mult & F ^ (v => stack.Push(stack.Pop() * stack.Pop())), T --> F, F --> leftBrace & E & rightBrace, F --> id ^ (v => stack.Push(v[id].AsInt())) }; Console.WriteLine("Grammar is being tested: \n{0}", grammar); Console.WriteLine("Input is being parsed: {0}\n", INPUT); Console.WriteLine("Parsing process:\n"); parser.Grammar = grammar; parser.Lexer = lexer; var ptBuilder = new SLRParsingTableBuilder(); ptBuilder.SetGrammar(grammar); ptBuilder.ConstructParsingTable(); parser.ParsingTable = ptBuilder.GetTable(); parser.InputAccepted += (sender, eventArgs) => Console.WriteLine("Accepted!"); parser.ParseInput(); Assert.That(stack.Pop(), Is.EqualTo(14)); Assert.That(stack.Count, Is.EqualTo(0)); fileStream.Close(); File.Delete(fileName); }
public void ArithmeticStatementParseTest() { const string INPUT = @"2*(3+4)";//@"2*(3+4)$" ActionTableEntry[,] actionTable; int[,] gotoTable; string fileName; ILexer lexer; Stream fileStream; RegExpTokenizer tokenizer; fileName = Path.GetTempFileName(); File.WriteAllText(fileName, INPUT); tokenizer = new RegExpTokenizer(); tokenizer.SetTransitionFunction(new TableDrivenTransitionFunction()); lexer = new Lexer(tokenizer); fileStream = File.OpenRead(fileName); lexer.SetDataSource(fileStream); actionTable = new ActionTableEntry[12, 6]; #region Populating Action Table actionTable[0, 1].Action = ParserAction.Shift; actionTable[0, 1].Destination = 5; actionTable[0, 4].Action = ParserAction.Shift; actionTable[0, 4].Destination = 4; actionTable[1, 0].Action = ParserAction.Accept; // <-- !!!! actionTable[1, 2].Action = ParserAction.Shift; actionTable[1, 2].Destination = 6; actionTable[2, 0].Action = ParserAction.Reduce; actionTable[2, 0].Destination = 2; actionTable[2, 2].Action = ParserAction.Reduce; actionTable[2, 2].Destination = 2; actionTable[2, 3].Action = ParserAction.Shift; actionTable[2, 3].Destination = 7; actionTable[2, 5].Action = ParserAction.Reduce; actionTable[2, 5].Destination = 2; actionTable[3, 0].Action = ParserAction.Reduce; actionTable[3, 0].Destination = 4; actionTable[3, 2].Action = ParserAction.Reduce; actionTable[3, 2].Destination = 4; actionTable[3, 3].Action = ParserAction.Reduce; actionTable[3, 3].Destination = 4; actionTable[3, 5].Action = ParserAction.Reduce; actionTable[3, 5].Destination = 4; actionTable[4, 1].Action = ParserAction.Shift; actionTable[4, 1].Destination = 5; actionTable[4, 4].Action = ParserAction.Shift; actionTable[4, 4].Destination = 4; actionTable[5, 0].Action = ParserAction.Reduce; actionTable[5, 0].Destination = 6; actionTable[5, 2].Action = ParserAction.Reduce; actionTable[5, 2].Destination = 6; actionTable[5, 3].Action = ParserAction.Reduce; actionTable[5, 3].Destination = 6; actionTable[5, 5].Action = ParserAction.Reduce; actionTable[5, 5].Destination = 6; actionTable[6, 1].Action = ParserAction.Shift; actionTable[6, 1].Destination = 5; actionTable[6, 4].Action = ParserAction.Shift; actionTable[6, 4].Destination = 4; actionTable[7, 1].Action = ParserAction.Shift; actionTable[7, 1].Destination = 5; actionTable[7, 4].Action = ParserAction.Shift; actionTable[7, 4].Destination = 4; actionTable[8, 2].Action = ParserAction.Shift; actionTable[8, 2].Destination = 6; actionTable[8, 5].Action = ParserAction.Shift; actionTable[8, 5].Destination = 11; actionTable[9, 0].Action = ParserAction.Reduce; actionTable[9, 0].Destination = 1; actionTable[9, 2].Action = ParserAction.Reduce; actionTable[9, 2].Destination = 1; actionTable[9, 3].Action = ParserAction.Shift; actionTable[9, 3].Destination = 7; actionTable[9, 5].Action = ParserAction.Reduce; actionTable[9, 5].Destination = 1; actionTable[10, 0].Action = ParserAction.Reduce; actionTable[10, 0].Destination = 3; actionTable[10, 2].Action = ParserAction.Reduce; actionTable[10, 2].Destination = 3; actionTable[10, 3].Action = ParserAction.Reduce; actionTable[10, 3].Destination = 3; actionTable[10, 5].Action = ParserAction.Reduce; actionTable[10, 5].Destination = 3; actionTable[11, 0].Action = ParserAction.Reduce; actionTable[11, 0].Destination = 5; actionTable[11, 2].Action = ParserAction.Reduce; actionTable[11, 2].Destination = 5; actionTable[11, 3].Action = ParserAction.Reduce; actionTable[11, 3].Destination = 5; actionTable[11, 5].Action = ParserAction.Reduce; actionTable[11, 5].Destination = 5; //actionTable[0, 0].Action = ParserAction.Shift; //actionTable[0, 0].Destination = 5; //actionTable[0, 3].Action = ParserAction.Shift; //actionTable[0, 3].Destination = 4; //actionTable[1, 1].Action = ParserAction.Shift; //actionTable[1, 1].Destination = 6; //actionTable[1, 5].Action = ParserAction.Accept; // <-- !!!! //actionTable[2, 1].Action = ParserAction.Reduce; //actionTable[2, 1].Destination = 2; //actionTable[2, 2].Action = ParserAction.Shift; //actionTable[2, 2].Destination = 7; //actionTable[2, 4].Action = ParserAction.Reduce; //actionTable[2, 4].Destination = 2; //actionTable[2, 5].Action = ParserAction.Reduce; //actionTable[2, 5].Destination = 2; //actionTable[3, 1].Action = ParserAction.Reduce; //actionTable[3, 1].Destination = 4; //actionTable[3, 2].Action = ParserAction.Reduce; //actionTable[3, 2].Destination = 4; //actionTable[3, 4].Action = ParserAction.Reduce; //actionTable[3, 4].Destination = 4; //actionTable[3, 5].Action = ParserAction.Reduce; //actionTable[3, 5].Destination = 4; //actionTable[4, 0].Action = ParserAction.Shift; //actionTable[4, 0].Destination = 5; //actionTable[4, 3].Action = ParserAction.Shift; //actionTable[4, 3].Destination = 4; //actionTable[5, 1].Action = ParserAction.Reduce; //actionTable[5, 1].Destination = 6; //actionTable[5, 2].Action = ParserAction.Reduce; //actionTable[5, 2].Destination = 6; //actionTable[5, 4].Action = ParserAction.Reduce; //actionTable[5, 4].Destination = 6; //actionTable[5, 5].Action = ParserAction.Reduce; //actionTable[5, 5].Destination = 6; //actionTable[6, 0].Action = ParserAction.Shift; //actionTable[6, 0].Destination = 5; //actionTable[6, 3].Action = ParserAction.Shift; //actionTable[6, 3].Destination = 4; //actionTable[7, 0].Action = ParserAction.Shift; //actionTable[7, 0].Destination = 5; //actionTable[7, 3].Action = ParserAction.Shift; //actionTable[7, 3].Destination = 4; //actionTable[8, 1].Action = ParserAction.Shift; //actionTable[8, 1].Destination = 6; //actionTable[8, 4].Action = ParserAction.Shift; //actionTable[8, 4].Destination = 11; //actionTable[9, 1].Action = ParserAction.Reduce; //actionTable[9, 1].Destination = 1; //actionTable[9, 2].Action = ParserAction.Shift; //actionTable[9, 2].Destination = 7; //actionTable[9, 4].Action = ParserAction.Reduce; //actionTable[9, 4].Destination = 1; //actionTable[9, 5].Action = ParserAction.Reduce; //actionTable[9, 5].Destination = 1; //actionTable[10, 1].Action = ParserAction.Reduce; //actionTable[10, 1].Destination = 3; //actionTable[10, 2].Action = ParserAction.Reduce; //actionTable[10, 2].Destination = 3; //actionTable[10, 4].Action = ParserAction.Reduce; //actionTable[10, 4].Destination = 3; //actionTable[10, 5].Action = ParserAction.Reduce; //actionTable[10, 5].Destination = 3; //actionTable[11, 1].Action = ParserAction.Reduce; //actionTable[11, 1].Destination = 5; //actionTable[11, 2].Action = ParserAction.Reduce; //actionTable[11, 2].Destination = 5; //actionTable[11, 4].Action = ParserAction.Reduce; //actionTable[11, 4].Destination = 5; //actionTable[11, 5].Action = ParserAction.Reduce; //actionTable[11, 5].Destination = 5; #endregion gotoTable = new int[12, 3]; #region Populating Goto Table gotoTable[0, 0] = 1; gotoTable[0, 1] = 2; gotoTable[0, 2] = 3; gotoTable[4, 0] = 8; gotoTable[4, 1] = 2; gotoTable[4, 2] = 3; gotoTable[6, 1] = 9; gotoTable[6, 2] = 3; gotoTable[7, 2] = 10; #endregion var stack = new Stack<int>(); var parser = new LRParser(); var E = new NonTerminal("E"); var T = new NonTerminal("T"); var F = new NonTerminal("F"); var id = tokenizer.UseTerminal(RegExp.AtLeastOneOf(RegExp.Choice( RegExp.Literal('0'), RegExp.Literal('1'), RegExp.Literal('2'), RegExp.Literal('3'), RegExp.Literal('4'), RegExp.Literal('5'), RegExp.Literal('6'), RegExp.Literal('7'), RegExp.Literal('8'), RegExp.Literal('9')))); var plus = tokenizer.UseTerminal(RegExp.Literal('+')); var mult = tokenizer.UseTerminal(RegExp.Literal('*')); var leftBrace = tokenizer.UseTerminal(RegExp.Literal('(')); var rightBrace = tokenizer.UseTerminal(RegExp.Literal(')')); tokenizer.BuildTransitions(); var grammar = new AugmentedGrammar() { E --> E & plus & T ^ (v => stack.Push(stack.Pop() + stack.Pop())), E --> T, T --> T & mult & F ^ (v => stack.Push(stack.Pop() * stack.Pop())), T --> F, F --> leftBrace & E & rightBrace, F --> id ^ (v => stack.Push(v[id].AsInt())) }; Console.WriteLine("Grammar is being tested: \n{0}", grammar); Console.WriteLine("Input is being parsed: {0}\n", INPUT); Console.WriteLine("Parsing process:\n"); parser.Grammar = grammar; parser.Lexer = lexer; parser.ParsingTable = new LRParsingTable(actionTable, gotoTable); parser.InputAccepted += (sender, eventArgs) => Console.WriteLine("Accepted!"); parser.ParseInput(); Assert.That(stack.Pop(), Is.EqualTo(14)); Assert.That(stack.Count, Is.EqualTo(0)); fileStream.Close(); File.Delete(fileName); }