// Takes a token as input and makes a node out of it public Node makeExp(Token t) { if (t == null) return nodeNil; TokenType tt = t.getType(); // If this token is a LPAREN, parse the rest of the list if (tt == TokenType.LPAREN) { return parseRest(); } // If token is TRUE or FALSE, // return pointers to the nodes we initialized at the beginning else if (tt == TokenType.FALSE) { return nodeFalse; } else if (tt == TokenType.TRUE) { return nodeTrue; } // If this token in a QUOTE, make a Cons node // with ' as its car // and the parsing of a node (or node tree) as its cdr // because what follows a QUOTE token should be treated // as a regular list to parse. else if (tt == TokenType.QUOTE) { return new Cons ( new Ident ("\'"), parseExp () ); } // For INT, STRING, and IDENT tokens, // just return their respective nodes while keeping node data // consistent with the original token values else if (tt == TokenType.INT) return new IntLit(t.getIntVal()); else if (tt == TokenType.STRING) return new StringLit(t.getStringVal()); else /*(tt == TokenType.IDENT)*/ return new Ident(t.getName()); }
private Node parseExp(Token t) { token t = Scanner.getNextToken(); if(t == null) return null; else if(t.getType == TokenType.LPAREN) return parseRest(); else if(t.getType == TokenType.TRUE) return new BoolLit(true); else if(t.getType == TokenType.FALSE) return new BoolLit(false); else if(t.getType == TokenType.INT) return new IntLit(t.getIntVal()); else if(t.getType == TokenType.IDENT) return new Ident(t.getName()); else if(t.getType == TokenType.QUOTE) return new Cons(new Ident("quote"), new Cons(parseExp(), null)); else if(t.getType == TokenType.STRING) return new StrLit(t.getStringVal()); else if(t.getType == TokenType.RPAREN || t.getType == DOT) Console.write("Error, can't have right parenthesis"); return null; //INCLUDE RPAREN AND DOT ERRORS }
private void InsertOperator(Token token, TreeNode node) { Insert(token, node); sentinel = node.HasRightChild() ? node.RightChild : node.LeftChild; }
public void UnGet(Token token) { if (token != null) { _tokenStack.Push(token); } }
private void Insert(Token token, TreeNode node) { if (node.HasLeftChild()) { node.SetRightChild(token); } else { node.SetLeftChild(token); } }
private Node parseExp(Token t) { if (t == null) { //Console.Error.WriteLine("Warning null token!!!"); return null; } else if (t.getType() == TokenType.LPAREN) { return parseRest(); } else if (t.getType() == TokenType.FALSE) { return BoolLit.getFalse(); } else if (t.getType() == TokenType.TRUE) { return BoolLit.getTrue(); } else if (t.getType() == TokenType.QUOTE) { // quote token does not have a name, so give it name "quote" // cons // / \ // quote cons // / \ // exp Nil return new Cons(new Ident("quote"), new Cons(parseExp(), Nil.getNil())); } else if (t.getType() == TokenType.INT) { return new IntLit(t.getIntVal()); } else if (t.getType() == TokenType.STRING) { return new StringLit(t.getStringVal()); } else if (t.getType() == TokenType.IDENT) { return new Ident(t.getName()); } else if (t.getType() == TokenType.DOT) { Console.Error.WriteLine("Warning illegal DOT token"); return parseExp(); } else if (t.getType() == TokenType.RPAREN) { Console.Error.WriteLine("Warning illegal RPAREN token"); return parseExp(); } else { Console.Error.WriteLine("Warning illegal token"); return parseExp(); } }
public void AddToken(Token token) { if (rootNode == null) { rootNode = new TreeNode(token); sentinel = rootNode; } else { while (sentinel.CorrectNumberChildren()) { sentinel = sentinel.Parent; } if (token.Type != TokenType.Operator) { Insert(token, sentinel); } else { InsertOperator(token, sentinel); } } }
private void BeginParsingFile(ref string fileContents) { if (fileContents.Any()) { var firstToken = scanner.GetNextToken(ref fileContents); last = firstToken; if (firstToken.Type != TokenType.LeftParen) { Console.WriteLine("\\ Expression: '{0}' does not begin with '(', invalid.", fileContents); return; } ParseAllExpressions(ref fileContents, firstToken); if (fileContents.Any()) { Console.WriteLine("\\ Unexpected token \'{0}{1}\', expected end of file.", last.Key, last.Type); } } else { Console.WriteLine("\\ File contains no tokens.\n\n"); } }
// lookahead public Node parseExp(Token currentToken) { var t = currentToken; if (t == null) { return null; } else if (t.getType() == TokenType.LPAREN) { return parseRest(); } else if (t.getType() == TokenType.TRUE) { return new BoolLit(true); } else if (t.getType() == TokenType.FALSE) { return new BoolLit(false); } else if (t.getType() == TokenType.QUOTE) { return new Cons(new Ident("quote"), new Cons(parseExp(), new Nil())); } else if (t.getType() == TokenType.INT) { return new IntLit(t.getIntVal()); } else if (t.getType() == TokenType.STRING) { return new StringLit(t.getStringVal()); } else if (t.getType() == TokenType.IDENT) { return new Ident(t.getName()); } return null; }
// Handling look ahead for expressions public Node parseExp(Token t) { // TODO: write code for parsing an exp var tok = t; if (tok == null) { return null; } else if (tok.getType() == TokenType.LPAREN) { return parseRest(); } else if (tok.getType() == TokenType.TRUE) { return new BoolLit(true); } else if (tok.getType() == TokenType.FALSE) { return new BoolLit(false); } else if (tok.getType() == TokenType.QUOTE) { return new Cons(new Ident("quote"), new Cons(parseExp(), new Nil())); } else if (tok.getType() == TokenType.INT) { return new IntLit(tok.getIntVal()); } else if (tok.getType() == TokenType.STRING) { return new StringLit(tok.getStringVal()); } else if (tok.getType() == TokenType.IDENT) { return new Ident(tok.getName()); } return null; }
// Handling look ahead for rest protected Node parseRest(Token t) { // TODO: write code for parsing a rest var tok = t; if (tok == null) { return null; } else if (tok.getType() == TokenType.RPAREN) { return new Nil(); } else { return new Cons(parseExp(tok), parseMRest()); } }
private void ParseAllExpressions(ref string fileContents, Token token) { // If there is anything in fileContents, that means we are parsing the next expression within the file. while (fileContents.Any()) { try { List<Token> tokens = new List<Token>(); ParseExpression(ref fileContents, tokens); if (tokens.Count == 2 && tokens[0].Type == TokenType.LeftParen && tokens[1].Type == TokenType.RightParen) { throw new InvalidDataException(string.Format("Parse error! Production rules do not allow S->epsilon.")); } allParsedTokens.AddRange(tokens); builtExpressions.Add(new Tuple<string, ExpressionParseSucces>(expressionBuilder.ToString(), ExpressionParseSucces.PASS)); TreeStructure tree = new TreeStructure(); foreach (var t in tokens .Where(tkn => tkn.Type != TokenType.LeftParen) .Where(tkn => tkn.Type != TokenType.RightParen)) { tree.AddToken(t); } tree.PrintTreePostTraversal(); } catch (InvalidDataException ide) { Console.WriteLine(ide.Message.ToString() == string.Empty ? "\\ " + expressionBuilder.ToString() : "\\ " + ide.Message.ToString()); builtExpressions.Add(new Tuple<string, ExpressionParseSucces>(expressionBuilder.ToString(), ExpressionParseSucces.FAIL)); fileContents = string.Empty; } catch (InvalidOperationException ioe) { Console.WriteLine("\\ " + ioe.Message.ToString()); builtExpressions.Add(new Tuple<string, ExpressionParseSucces>(ioe.Message.ToString(), ExpressionParseSucces.FAIL)); fileContents = string.Empty; } expressionBuilder.Clear(); scanner.ClearScannedTokens(); } //builtExpressions.ForEach(list => // Console.WriteLine(list.Item1 + string.Format("\t{0}", list.Item2.ToString()))); //Console.WriteLine("\n"); }
protected Node parseRest(Token t) { return makeRest(t); }
private Node parseR(Token t) { if (t.getType() == TokenType.DOT) { Node a = parseExp(); Token lookahead = scanner.getNextToken(); if (lookahead.getType() == TokenType.RPAREN) { return a; } else { Console.Error.WriteLine("Warning more than one exp token after dot"); // discard exps between '. exp' and ')' do { lookahead = scanner.getNextToken(); } while (lookahead.getType() != TokenType.RPAREN || lookahead == null); if (lookahead == null) { return null; } else { return a; } } } return parseRest(t); }
public void SetLeftChild(Token token) { LeftChild = new TreeNode(token); LeftChild.SetParent(this); }
public void AnalyzeTrueString_Test() { _testScanner = new Scanner(); Token expected = new Token("\"This is a string\"", TokenType.String); string s = "\"This is a string\""; Token actual = _testScanner.GetNextToken(ref s); Assert.AreEqual(expected.Key, actual.Key); Assert.AreEqual(expected.Type, actual.Type); }
public void AnalyzeTrueReal_Test() { _testScanner = new Scanner(); Token expected = new Token("10.0", TokenType.Real); string s = "10.0"; Token actual = _testScanner.GetNextToken(ref s); Assert.AreEqual(expected.Key, actual.Key); Assert.AreEqual(expected.Type, actual.Type); }
public void AnalyzeTrueOperator_Test() { _testScanner = new Scanner(); Token expected = new Token("+", TokenType.Operator); string s = "+"; Token actual = _testScanner.GetNextToken(ref s); Assert.AreEqual(expected.Key, actual.Key); Assert.AreEqual(expected.Type, actual.Type); }
public void AnalyzeTrueKeyword_Test() { _testScanner = new Scanner(); Token expected = new Token("int", TokenType.Keyword); string s = "int"; Token actual = _testScanner.GetNextToken(ref s); Assert.AreEqual(expected.Key, actual.Key); Assert.AreEqual(expected.Type, actual.Type); }
public void AnalyzeTrueIdentifier_Test() { _testScanner = new Scanner(); Token expected = new Token("thisIdentifier", TokenType.Identifier); string s = ("thisIdentifier"); Token actual = _testScanner.GetNextToken(ref s); Assert.AreEqual(expected.Key, actual.Key); Assert.AreEqual(expected.Type, actual.Type); }
public void AnalyzeTrueBoolean_Test() { _testScanner = new Scanner(); Token expected = new Token("true", TokenType.Boolean); string s = "true"; Token actual = _testScanner.GetNextToken(ref s); Assert.AreEqual(expected.Key, actual.Key); Assert.AreEqual(expected.Type, actual.Type); }
public TreeNode(Token token) { Token = token; }
// lookahead public Node parseRest(Token currentToken) { var t = currentToken; if (t == null) { return null; } else if (t.getType() == TokenType.RPAREN) { return new Nil(); } else { return new Cons(parseExp(t), parserRest()); } // TODO: dot expressions, leave this for now //Console.Error.WriteLine("end of if/elses"); //Nil error = new Nil(); //error.print(1); //return null; }
/// <summary> /// Recursive token parser helper /// </summary> /// <param name="fileContents"></param> /// <param name="token"></param> private void ParseExpression(ref string fileContents, List<Token> tokens) { last = scanner.GetNextToken(ref fileContents); expressionBuilder.Append(last.Key + " "); if (last.Type != TokenType.LeftParen) { throw new InvalidDataException(expressionBuilder.ToString().Replace(" ", "")); } tokens.Add(last); scanner.RemoveTokenFromBeginning(ref fileContents, last); last = scanner.GetNextToken(ref fileContents); ParseExpressionHelper(ref fileContents, tokens); }
public void SetRightChild(Token token) { RightChild = new TreeNode(token); RightChild.SetParent(this); }
public Node makeRest(Token t1) { // We need the type of this token so we can find out // what kind of nodes we need to make and where to put them TokenType tt1 = t1.getType(); // If this token is a RPAREN, just return Nil if (tt1 == TokenType.RPAREN) { return nodeNil; } // Otherwise, find out what the next token and its type is Token t2 = scanner.getNextToken(); TokenType tt2 = t2.getType(); // Make a check to see if the current token is a LPAREN, because // if it is, then that is indicative that there exists an empty or // non-empty list within the current list context // Identifying LPARENs when using token look-ahead takes highest // priority because it is critical to recognize empty lists, which // are LPAREN-RPAREN pairs, as soon as possible if (tt1 == TokenType.LPAREN) { // If this and the next token are a LPAREN-RPAREN pair, we // are seeing a an empty list within a list. // Return a Cons node whose car is Nil and cdr is the parsing // of the rest of the list if (tt2 == TokenType.RPAREN) { return new Cons ( nodeNil, parseRest()); } // If this token is a LPAREN and the next token is anything, // besides the RPAREN, that means that this LPAREN is starting a // new, inner list of exps in this current list // and the next token is the start of the new inner list else { return new Cons ( // Make a Cons node new Cons( // Car parseExp(t2), // Car's car - the next token parseRest() // Car's cdr - the parseRest of the // next-next token ), parseRest () // Cdr - the parseRest of the // next-next-next token ); } } // If this token is an exp and the next token is a DOT, // finish parsing this exp in the car of a new Cons node, // and parse the next-next token as an exp // because we know it will be the last exp since it follows a DOT if (tt2 == TokenType.DOT) { return new Cons (parseExp(t1), parseExp()); } // If the next token is a RPAREN, // we can infer that it will be the end of the list, and we can // return a Cons node that parses the current token in its car // and puts Nil in its cdr else if (tt2 == TokenType.RPAREN) { return new Cons (parseExp(t1), nodeNil); } // At this point, we know that this token is an exp // and the next token is not a DOT or RPAREN or an inner list. // Therefore, we can infer that the next token must be an exp. // It then follows that we have to make // the car of this Cons node to be the parsing of the current token, // the cadr be the parsing of the next token, // and the cddr be the parsing of the rest of the list else { return new Cons( parseExp(t1), new Cons( parseExp(t2), parseRest() ) ); } }
private Node parseRest(Token t) { if (t == null) { //Console.Error.WriteLine("Warning null token!!!"); return null; } // list expression ened with RPAREN, return Nil to end the list else if (t.getType() == TokenType.RPAREN) { return Nil.getNil(); } else if (t.getType() == TokenType.DOT) { Console.Error.WriteLine("Warning illegal DOT token"); return parseRest(); } // cons // / \ // exp cons last cons node can be // / \ cons // exp cons / \ // ....... . exp // \ // cons // / \ // exp Nil Node a = parseExp(t); Node b = parseR(); return new Cons(a, b); }
public Node parseExp(Token t) { return makeExp (t); }
private Node parseExp(Token token) { if(token == null) { return null; } else if(token.getType() == TokenType.LPAREN) { return parseRest(); } else if(token.getType() == TokenType.IDENT) { return new Ident(token.getName()); } else if(token.getType() == TokenType.TRUE) { return new BoolLit(true); } else if(token.getType() == TokenType.FALSE) { return new BoolLit(false); } else if(token.getType() == TokenType.INT) { return new IntLit(token.getIntVal()); } else if(token.getType() == TokenType.STRING) { return new StringLit(token.getStringVal()); } else if(token.getType() == TokenType.QUOTE) { // return new Cons(new Ident("\'"), new Cons(parseExp(), new Nil())); return new Cons(new Ident("\'"), parseExp()); } else { Console.Error.WriteLine("PARSING ERROR in parseExp: "); return null; } }
private void ParseExpressionHelper(ref string fileContents, List<Token> tokens) { begin: if (last.Type == TokenType.LeftParen) { tokens.Add(last); scanner.RemoveTokenFromBeginning(ref fileContents, last); expressionBuilder.Append(last.Key + " "); last = scanner.GetNextToken(ref fileContents); ParseExpressionHelper(ref fileContents, tokens); if (fileContents.Any()) { last = scanner.GetNextToken(ref fileContents); goto begin; } } while (last.Type != TokenType.LeftParen && last.Type != TokenType.RightParen) { tokens.Add(last); scanner.RemoveTokenFromBeginning(ref fileContents, last); expressionBuilder.Append(last.Key + " "); if (!fileContents.Any()) { // expected left/right parenthesis throw new InvalidDataException(expressionBuilder.ToString()); } last = scanner.GetNextToken(ref fileContents); } if (last.Type == TokenType.LeftParen) { tokens.Add(last); scanner.RemoveTokenFromBeginning(ref fileContents, last); expressionBuilder.Append(last.Key + " "); last = scanner.GetNextToken(ref fileContents); ParseExpressionHelper(ref fileContents, tokens); if (fileContents.Any()) { last = scanner.GetNextToken(ref fileContents); goto begin; } } else if (last.Type == TokenType.RightParen) { tokens.Add(last); if (!fileContents.Any()) { throw new InvalidDataException(expressionBuilder.ToString() + "\n"); } scanner.RemoveTokenFromBeginning(ref fileContents, last); expressionBuilder.Append(last.Key + " "); } }