public void parseArrayAndDataTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("array_and_data_elements.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseArrayElement(reader); Assert.AreEqual((Token)node.getValue(), new Token(Token.TokenType.Identifier, "bla")); var childs = new ArrayList { new AstNode(new Token(Token.TokenType.Number, "2")) }; var curChilds = node.getChilds(); Assert.AreEqual(childs, curChilds); Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); tokenizer = new Tokenizer.Tokenizer(getTestFilePath("array_and_data_elements.txt")); reader = new TokenReader(tokenizer); node = Parser.Parser.parseDataElement(reader); Assert.AreEqual((Token)node.getValue(), new Token(Token.TokenType.Identifier, "bla")); childs = new ArrayList { new AstNode(new Token(Token.TokenType.Number, "2")) }; curChilds = node.getChilds(); Assert.AreEqual(childs, curChilds); Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); }
public void parseConstantTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("constant.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseConstant(reader); Assert.AreEqual((Token)node.getValue(), new Token(Token.TokenType.Keyword, "const")); var curChilds = node.getChilds(); var expectedChilds = new ArrayList(); var constDef = new AstNode(new Token(Token.TokenType.Operator, "=")); constDef.addChild(new AstNode(new Token(Token.TokenType.Identifier, "a"))); constDef.addChild(new AstNode(new Token(Token.TokenType.Number, "1"))); var constDef1 = new AstNode(new Token(Token.TokenType.Operator, "=")); constDef1.addChild(new AstNode(new Token(Token.TokenType.Identifier, "b"))); constDef1.addChild(new AstNode(new Token(Token.TokenType.Number, "2"))); expectedChilds.Add(constDef); expectedChilds.Add(constDef1); Assert.AreEqual(curChilds, expectedChilds); Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); node = Parser.Parser.parseConstant(reader); Assert.AreEqual((Token)node.getValue(), new Token(Token.TokenType.Keyword, "const")); curChilds = node.getChilds(); expectedChilds = new ArrayList(); constDef = new AstNode(new Token(Token.TokenType.Operator, "=")); constDef.addChild(new AstNode(new Token(Token.TokenType.Identifier, "a"))); constDef.addChild(new AstNode(new Token(Token.TokenType.Number, "1"))); expectedChilds.Add(constDef); Assert.AreEqual(curChilds, expectedChilds); Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); }
public void parseResultsTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("results.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseResults(reader); Assert.AreEqual(node.getValue(), AstNode.NodeType.Results); node.getChilds(); var expectedChilds = new ArrayList { new AstNode(new Token(Token.TokenType.Register, "R1")), new AstNode(new Token(Token.TokenType.Register, "R2")), new AstNode(new Token(Token.TokenType.Register, "R3")) }; Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); node = Parser.Parser.parseResults(reader); Assert.AreEqual(node.getValue(), AstNode.NodeType.Results); node.getChilds(); expectedChilds = new ArrayList { new AstNode(new Token(Token.TokenType.Register, "R1")) }; Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); node = Parser.Parser.parseResults(reader); Assert.AreEqual(node.getValue(), AstNode.NodeType.Results); node.getChilds(); expectedChilds = new ArrayList { new AstNode(new Token(Token.TokenType.Register, "R1")), new AstNode(new Token(Token.TokenType.Register, "R2")) }; Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); }
public void parseCallTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("call.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseCall(reader); Assert.AreEqual(node.getValue(), "Call"); var curChilds = node.getChilds(); var expectedChilds = new ArrayList { new AstNode(new Token(Token.TokenType.Identifier, "func")) }; var parameters = new AstNode("CallParameters"); parameters.addChild(new AstNode(new Token(Token.TokenType.Identifier, "a"))); expectedChilds.Add(parameters); Assert.AreEqual(curChilds, expectedChilds); node = Parser.Parser.parseCall(reader); Assert.AreEqual(node.getValue(), "Call"); curChilds = node.getChilds(); expectedChilds = new ArrayList(); var funcCall = new AstNode(new Token(Token.TokenType.Delimiter, ".")); funcCall.addChild(new AstNode(new Token(Token.TokenType.Identifier, "obj"))); funcCall.addChild(new AstNode(new Token(Token.TokenType.Identifier, "func"))); expectedChilds.Add(funcCall); parameters = new AstNode("CallParameters"); expectedChilds.Add(parameters); Assert.AreEqual(curChilds, expectedChilds); }
public void parseStatementTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("statement.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseStatement(reader); Assert.AreEqual(node.getValue(), AstNode.NodeType.Statement); var curChilds = node.getChilds(); var expectedChilds = new ArrayList(); var gotoNode = new AstNode(new Token(Token.TokenType.Keyword, "goto")); gotoNode.addChild(new AstNode(new Token(Token.TokenType.Identifier, "a"))); expectedChilds.Add(gotoNode); Assert.AreEqual(curChilds, expectedChilds); node = Parser.Parser.parseStatement(reader); Assert.AreEqual(node.getValue(), AstNode.NodeType.Statement); curChilds = node.getChilds(); expectedChilds = new ArrayList { new AstNode(new Token(Token.TokenType.Identifier, "id")) }; gotoNode = new AstNode(new Token(Token.TokenType.Keyword, "goto")); gotoNode.addChild(new AstNode(new Token(Token.TokenType.Identifier, "a"))); expectedChilds.Add(gotoNode); Assert.AreEqual(curChilds, expectedChilds); }
public void parseExpressionTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("expression.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseExpression(reader); Assert.AreEqual((Token)node.getValue(), new Token(Token.TokenType.Operator, "+")); var childs = new ArrayList { new AstNode(new Token(Token.TokenType.Number, "1")), new AstNode(new Token(Token.TokenType.Number, "2")) }; var curChilds = node.getChilds(); Assert.AreEqual(childs, curChilds); Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); node = Parser.Parser.parseExpression(reader); Assert.AreEqual((Token)node.getValue(), new Token(Token.TokenType.Operator, "&")); childs = new ArrayList { new AstNode(new Token(Token.TokenType.Identifier, "a")) }; curChilds = node.getChilds(); Assert.AreEqual(childs, curChilds); Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); }
public void parseOperandTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("Literal.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseOperand(reader); Assert.AreEqual(node, new AstNode(new Token(Token.TokenType.Number, "123"))); Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); tokenizer = new Tokenizer.Tokenizer(getTestFilePath("Address.txt")); reader = new TokenReader(tokenizer); node = Parser.Parser.parseOperand(reader); Assert.AreEqual((Token)node.getValue(), new Token(Token.TokenType.Operator, "&")); var childs = new ArrayList { new AstNode(new Token(Token.TokenType.Identifier, "var")) }; var curChilds = node.getChilds(); Assert.AreEqual(childs, curChilds); Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); tokenizer = new Tokenizer.Tokenizer(getTestFilePath("receiver.txt")); reader = new TokenReader(tokenizer); node = Parser.Parser.parseOperand(reader); Assert.AreEqual(node, new AstNode(new Token(Token.TokenType.Identifier, "a"))); Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); }
public Analysis Analize(string content, int tokenLimit) { ITokenizer tokenizer = new Tokenizer.Tokenizer(); IEnumerable <string> tokens = tokenizer.Tokenize(content); Analysis analysis = new Analysis(); foreach (string token in tokens) { foreach (LanguageDictionary dict in languageDictionaries) { if (dict.Internal.ContainsKey(token)) { if (!analysis.analysisMap.ContainsKey(dict.Langauge)) { analysis.analysisMap.Add(dict.Langauge, 0.0d); } analysis.analysisMap[dict.Langauge] += 1; } } if (tokenLimit-- == 0) { break; } } return(analysis); }
public Analyzer(IoManager.IoManager ioManager, Tokenizer.Tokenizer tokenizer) { IoManager = ioManager; Tokenizer = tokenizer; Scopes = new List <Scope>(); var fictitiousScope = new Scope(); fictitiousScope.Types.Add(new Type() { Identifier = "integer", BaseType = BaseType.Scalar, ScalarType = ScalarType.Integer }); fictitiousScope.Types.Add(new Type() { Identifier = "real", BaseType = BaseType.Scalar, ScalarType = ScalarType.Real }); fictitiousScope.Types.Add(new Type() { Identifier = "char", BaseType = BaseType.Scalar, ScalarType = ScalarType.Char }); fictitiousScope.Types.Add(new Type() { Identifier = "string", BaseType = BaseType.Scalar, ScalarType = ScalarType.String }); fictitiousScope.Types.Add(new Type() { Identifier = "boolean", BaseType = BaseType.Scalar, ScalarType = ScalarType.Boolean }); Scopes.Add(fictitiousScope); }
public void codeTest4Case() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("codeCode4.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.ParseUnit(reader); Console.WriteLine(node.ToString()); }
public void parseAttributeTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("attribute.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseAttribute(reader); Assert.AreEqual((Token)node.getValue(), new Token(Token.TokenType.Keyword, "start")); Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); }
public void parseRegisterTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("register.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseRegister(reader); Assert.AreEqual((Token)node.getValue(), new Token(Token.TokenType.Register, "R1")); Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); }
public void TokenReadingTestCase() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("test_reading.txt")); var reader = new TokenReader(tokenizer); Assert.AreEqual(reader.readNextToken().ToString(), new Token(Token.TokenType.Number, "1").ToString()); Assert.AreEqual(reader.readNextToken().ToString(), new Token(Token.TokenType.Number, "2").ToString()); Assert.AreEqual(reader.readNextToken().ToString(), new Token(Token.TokenType.Number, "3").ToString()); }
public void parseLiteralTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("Literal.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseLiteral(reader); Assert.AreEqual(node, new AstNode(new Token(Token.TokenType.Number, "123"))); Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); }
public void parseReceiverTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("receiver.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseReceiver(reader); Assert.AreEqual(node, new AstNode(new Token(Token.TokenType.Identifier, "a"))); Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); }
public void negativeLoopVarDefinitionTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("varInLoop.txt")); var reader = new TokenReader(tokenizer); var exLoop = Assert.Throws <SyntaxError>(delegate { Parser.Parser.parseWhile(reader); }); Assert.That(exLoop.Message, Is.EqualTo("Can't parse loop body")); }
public void parseLabelTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("label.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseLabel(reader); Assert.AreEqual((Token)node.getValue(), new Token(Token.TokenType.Identifier, "id")); Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); }
public void parseBreakTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("Break.txt")); var reader = new TokenReader(tokenizer); var breakNode = Parser.Parser.parseBreak(reader); Assert.AreEqual(((Token)breakNode.getValue()), new Token(Token.TokenType.Keyword, "break")); Assert.AreEqual(new ArrayList(), breakNode.getChilds()); Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); }
public void parseIdentifierTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("Identifier.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseIdentifier(reader); Assert.AreEqual((Token)node.getValue(), new Token(Token.TokenType.Identifier, "player")); Assert.AreEqual(new ArrayList(), node.getChilds()); Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); }
public void CommentsIgnored() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("comments.txt")); var reader = new TokenReader(tokenizer); var firstToken = reader.readNextToken(); Assert.NotNull(firstToken); Assert.AreEqual(new Token(Token.TokenType.Keyword, "code").ToString(), firstToken.ToString()); Assert.Null(reader.readNextToken()); }
private String Text(Tokenizer.Tokenizer tokenizer) { AbstractToken token = tokenizer.NextToken(); if (token.GetType() == typeof(Comma) || token.GetType() == typeof(Text)) { return(token.GetValue()); } throw new ParseException("Expected type Text but found: " + token.GetType() + " after " + tokenizer.GetPreviousCharacters(25)); }
private AbstractToken ValueStop(Tokenizer.Tokenizer tokenizer) { AbstractToken token = tokenizer.NextToken(); if (token.GetType() == typeof(ClosingBrace) || token.GetType() == typeof(ValueQuote)) { return(token); } throw new ParseException("Expected type ClosingBrace or ValueQuote but found: " + token.GetType() + " after " + tokenizer.GetPreviousCharacters(25)); }
private void Equals(Tokenizer.Tokenizer tokenizer) { AbstractToken token = tokenizer.NextToken(); if (token.GetType() == typeof(Equals)) { return; } throw new ParseException("Expected type Equals but found: " + token.GetType() + " after " + tokenizer.GetPreviousCharacters(25)); }
public void Tokenize() { Contract.Requires(SourceCode != null); Tokenizer = new Tokenizer.Tokenizer(SourceCode); Tokenizer.Process(); //PrintDebug(Tokenizer); TokenStream = new TokenStream(Tokenizer); //PrintDebug(TokenStream); }
public void parseWhileTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("while.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseWhile(reader); Assert.AreEqual(node.getValue(), AstNode.NodeType.While); var curChilds = node.getChilds(); Assert.AreEqual(curChilds[0].getValue(), new Token(Token.TokenType.Operator, ">")); Assert.AreEqual(curChilds[1].getValue(), AstNode.NodeType.LoopBody); }
public void parseLoopBodyTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("loopBody.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseLoopBody(reader); Assert.AreEqual(node.GetNodeType(), AstNode.NodeType.LoopBody); var curChilds = node.getChilds(); Assert.AreEqual(curChilds[0].GetNodeType(), AstNode.NodeType.Statement); }
public void parseParametersTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("parameters.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseParameters(reader); Assert.AreEqual(node.getValue(), AstNode.NodeType.Parameters); node.getChilds(); var expectedChilds = new ArrayList { new AstNode(new Token(Token.TokenType.Register, "R0")) }; }
public void parseAssignmentTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("assignment.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseAssignment(reader); Assert.AreEqual((Token)node.getValue(), new Token(Token.TokenType.Operator, ":=")); node.getChilds(); var expectedChilds = new ArrayList { new AstNode(new Token(Token.TokenType.Identifier, "id")), new AstNode(new Token(Token.TokenType.Number, "2")) }; }
public void parseDirectiveTest() { var tokenizer = new Tokenizer.Tokenizer(getTestFilePath("directive.txt")); var reader = new TokenReader(tokenizer); var node = Parser.Parser.parseDirective(reader); Assert.AreEqual((Token)node.getValue(), new Token(Token.TokenType.Keyword, "format")); node.getChilds(); var expectedChilds = new ArrayList { new AstNode(new Token(Token.TokenType.Number, "8")) }; Assert.AreEqual(reader.readNextToken(), new Token(Token.TokenType.Delimiter, ";")); }
public void negativeConditionVarDefinitionTest() { var tokenizerIf = new Tokenizer.Tokenizer(getTestFilePath("varInIf.txt")); var readerIf = new TokenReader(tokenizerIf); var tokenizerElse = new Tokenizer.Tokenizer(getTestFilePath("varInElse.txt")); var readerElse = new TokenReader(tokenizerElse); var exIf = Assert.Throws <SyntaxError>(delegate { Parser.Parser.parseIf(readerIf); }); var exElse = Assert.Throws <SyntaxError>(delegate { Parser.Parser.parseIf(readerElse); }); Assert.That(exIf.Message, Is.EqualTo("Can't parse if body")); Assert.That(exElse.Message, Is.EqualTo("Can't parse if body")); }
public Parser(Tokenizer.Tokenizer tokenizer) { this.tokenizer = tokenizer; }
public BibtexParser(Tokenizer.Tokenizer tokenizer) { _tokenizer = tokenizer; }