//----< keep extracting until get none-alpha >------------------- override public Token getTok() { Token tok = new Token(); tok.Append((char)context_.src.next()); tok.Append((char)context_.src.next());// return two char return(tok); }
//----< keep extracting until get none-alpha >------------------- override public Token getTok() { Token tok = new Token(); tok.Append((char)context_.src.next()); // first special char tok.Append((char)context_.src.next()); // second special char return(tok); }
//----< keep extracting until get none-in-quote >------------------- override public Token getTok() { Token tok = new Token(); tok.Append((char)context_.src.next()); while ((char)context_.src.peek() != '\r') { tok.Append((char)context_.src.next()); } return tok; }
//----< keep extracting until get none-whitespace >-------------- override public Token getTok() { Token tok = new Token(); tok.Append((char)context_.src.next()); // first is WhiteSpace while (isWhiteSpace(context_.src.peek())) // stop when non-WhiteSpace { tok.Append((char)context_.src.next()); } return tok; }
//----< keep extracting until get none-alpha >------------------- override public Token getTok() { Token tok = new Token(); tok.Append((char)context_.src.next()); while (isLetterOrDigit(context_.src.peek())) { tok.Append((char)context_.src.next()); } return tok; }
//----< keep extracting until get none-punctuator >-------------- override public Token getTok() { Token tok = new Token(); tok.Append((char)context_.src.next()); while (isPunctuation(context_.src.peek())) { tok.Append((char)context_.src.next()); } return tok; }
//----< manage converting extracted ints to chars and doing comment check>-------------- public override Token getTok() { Token tok = new Token(); tok.Append((char)context_.src.next()); // first is backslash while (context_.src.peek() != 13) { tok.Append((char)context_.src.next()); // add everything untill newline is reached } return(tok); }
//----< keep extracting until get none-alpha >------------------- override public Token getTok() { Token tok = new Token(); tok.Append((char)context_.src.next()); // first is alpha while (isLetterOrDigit(context_.src.peek())||context_.src.peek()=='_') // stop when non-alpha { tok.Append((char)context_.src.next()); } return tok; }
//----< keep extracting until get none-in-quote >------------------- override public Token getTok() { Token tok = new Token(); tok.Append((char)context_.src.next()); while (!isDoubleQuote((char)context_.src.peek())) { tok.Append((char)context_.src.next()); } tok.Append((char)context_.src.next()); return tok; }
//----< keep extracting until get none-in-quote >------------------- override public Token getTok() { Token tok = new Token(); tok.Append((char)context_.src.next()); while (((char)context_.src.peek() != '/')) { tok.Append((char)context_.src.next()); } tok.Append((char)context_.src.next()); tok = tok.Replace(Environment.NewLine, " "); return tok; }
//----< keep extracting until get none-punctuator >-------------- override public Token getTok() { Token tok = new Token(); tok.Append((char)context_.src.next()); // first is punctuator while (isPunctuation(context_.src.peek())) // stop when non-punctuator { tok.Append((char)context_.src.next()); } return(tok); }
//----< keep extracting until get none-NewLine >-------------- override public Token getTok() { Token tok = new Token(); //tok.Append("\\n"); //context_.src.next(); tok.Append((char)context_.src.next()); // first is NewLine while (isNewLine(context_.src.peek())) // stop when non-NewLine { tok.Append((char)context_.src.next()); //tok.Append("\\n"); } return(tok); }
//----< keep extracting until get non-quote >------------------- override public Token getTok() { Token tok = new Token(); tok.Append((char)context_.src.next()); // first is quotes while ((char)context_.src.peek() != '\'') // stop when quotes again { tok.Append((char)context_.src.next()); if (tok.Length > 1 && escape(tok)) { tok.Append((char)context_.src.next()); } } return(tok.Append((char)context_.src.next())); }
private static Token MakeToken(string text, int posIncr) { Token t = new Token(); t.Append(text); t.PositionIncrement = posIncr; return(t); }
public static StringBuilder Test_inter(String path) { Token s = new Token(); SemiExp se = new SemiExp(); s.Append("\n - Semiexpression : " + se.getSemi(path)).Append(' ', 40 - se.getSemi(path).Length).Append("One SemiExpression."); return(s); }
private Token MakeToken(string text, int posIncr, int startOffset, int endOffset) { Token t = new Token(); t.Append(text); t.PositionIncrement = posIncr; t.SetOffset(startOffset, endOffset); return(t); }
//----< keep extracting until get none-alpha >------------------- override public Token getTok() { Token tok = new Token(); tok.Append((char)context_.src.next()); // first is "/" bool endline = false; while (!endline) // stop when the line end { char ch = (char)context_.src.next(); tok.Append(ch); if (ch == '\n')//return the entire line { endline = true; } } return(tok); }
private LexerToken ScanHexadecimalString(EpsStreamReader reader) { Debug.Assert(reader.CurrentChar == Chars.Less); ClearToken(); char[] hex = new char[2]; ScanNextChar(reader); while (true) { MoveToNonWhiteSpace(reader); if (reader.CurrentChar == '>') { ScanNextChar(reader); break; } if (char.IsLetterOrDigit(reader.CurrentChar)) { hex[0] = char.ToUpper(reader.CurrentChar); var nextChar = reader.NextChar; if (nextChar != '>') { hex[1] = char.ToUpper(nextChar); } else { hex[1] = '0'; } int ch = int.Parse(new string(hex), NumberStyles.AllowHexSpecifier); Token.Append(Convert.ToChar(ch)); ScanNextChar(reader); ScanNextChar(reader); } } string chars = Token.ToString(); int count = chars.Length; if (count > 2 && chars[0] == (char)0xFE && chars[1] == (char)0xFF) { Debug.Assert(count % 2 == 0); Token.Length = 0; for (int idx = 2; idx < count; idx += 2) { Token.Append((char)(chars[idx] * 256 + chars[idx + 1])); } } return(Symbol = LexerToken.HexString); }
public void AppendNull() { var text = "something"; var token = new Token(0, 0, 0, text.Length - 1, TokenType.Word, text); var newToken = token.Append(null); Assert.NotNull(newToken); Assert.Equal("something", newToken.Text); }
override public Token getTok() { Token tok = new Token(); if (isSinglePunct(context_.src.peek())) { tok.Append((char)context_.src.next()); } return tok; }
//----< keep extracting until get none-alpha >------------------- override public Token getTok() { Token tok = new Token(); tok.Append((char)context_.src.next()); // first is '/' bool endcomment = false; char pre_ch = '/'; while (!endcomment) // stop when comment is ended { char cur_ch = (char)context_.src.next(); tok.Append(cur_ch); if (cur_ch == '/' && pre_ch == '*')//end when "*/“ occurs { endcomment = true; } pre_ch = cur_ch; } return(tok); }
public void Append() { var text = "something"; var token = new Token(0, 0, 0, text.Length - 1, TokenType.Word, text); var text2 = " other"; var token2 = new Token(0, 0, 0, text2.Length - 1, TokenType.Word, text2); var newToken = token.Append(token2); Assert.NotNull(newToken); Assert.Equal("something other", newToken.Text); Assert.Equal(13, newToken.WordEnd); }
public static StringBuilder Test_semi(String path) { StringBuilder str = new StringBuilder(); SemiExp s = new SemiExp(); int count = 0; List <List <string> > s_ex = s.WholeSemi(path); foreach (List <String> t in s_ex) { if (count > 7) { continue; } StringBuilder msg = new Token(); for (int i = 0; i < t.Count(); i++) { if (!t[i].Equals("\n")) { msg.Append(t[i]); msg.Append(" "); } } count++; str.Append("\n - " + msg).Append(' ', 60 - msg.Length); if (msg.ToString().Contains("#")) { str.Append("| # condition satisfied"); } if (msg.ToString().Contains("for")) { str.Append("| for condition satisfied"); } if (msg.ToString().Contains("{") || msg.ToString().Contains("}") || msg.ToString().Contains(";") && !msg.ToString().Contains("for")) { str.Append("| end condition satisfied"); } } return(str); }
public static StringBuilder Test_semi_here(String path) { StringBuilder str = new StringBuilder(); SemiExp s = new SemiExp(); int count = 0; List <List <string> > s_ex = s.WholeSemi(path); foreach (List <String> t in s_ex) { StringBuilder msg = new Token(); for (int i = 0; i < t.Count(); i++) { if (!t[i].Equals("\n")) { msg.Append(t[i]); msg.Append(" "); } } count++; str.Append("\n - " + msg); } return(str); }
// Run the integrated IPP code // @todo ipp: How to log the output in a nice UAT way? protected static int RunIPP(string IPPArguments) { List <string> Args = new List <string>(); StringBuilder Token = null; int Index = 0; bool bInQuote = false; while (Index < IPPArguments.Length) { if (IPPArguments[Index] == '\"') { bInQuote = !bInQuote; } else if (IPPArguments[Index] == ' ' && !bInQuote) { if (Token != null) { Args.Add(Token.ToString()); Token = null; } } else { if (Token == null) { Token = new StringBuilder(); } Token.Append(IPPArguments[Index]); } Index++; } if (Token != null) { Args.Add(Token.ToString()); } return(RunIPP(Args.ToArray())); }
public Token Parse( Token source ) { this.Source = source.String; int sourceEned = this.Source.Length; this.Line = 1; this.BOL = true; this.EnableOutput = true; //Accumulate ouput into this token var output = new Token( Token.Kind.Text ); int emptyLines = 0; //Enabel output only if all embedded #if's were true bool oldOutputEnabled = true; bool outputEnabled = true; int outputDisabledLine = 0; for ( int i = 0; i < sourceEned; i++ ) { int oldLine = this.Line; Token t = this.GetToken( true ); NextToken: switch ( t.Type ) { case Token.Kind.Error: return t; case Token.Kind.EOS: return output; //Force termination case Token.Kind.Comment: //C comments are replaced with single spaces. if ( outputEnabled ) { output.Append( " ", 1 ); output.AppendNL( this.Line - oldLine ); } break; case Token.Kind.Linecomment: //C++ comments are ignored continue; case Token.Kind.Directive: t = this.HandleDirective( t, oldLine ); outputEnabled = this.EnableOutput; if ( outputEnabled != oldOutputEnabled ) { if ( outputEnabled ) { output.AppendNL( oldLine - outputDisabledLine ); } else { outputDisabledLine = oldLine; } oldOutputEnabled = outputEnabled; } if ( outputEnabled ) { output.AppendNL( this.Line - oldLine - t.CountNL ); } goto NextToken; case Token.Kind.Linecont: //Backslash-Newline sequences are delted, no matter where. emptyLines++; break; case Token.Kind.Newline: if ( emptyLines > 0 ) { // Compensate for the backslash-newline combinations // we have encountered, otherwise line numeration is broken if ( outputEnabled ) { output.AppendNL( emptyLines ); } emptyLines = 0; } goto default; case Token.Kind.Whitespace: default: //Passthrough all other tokens if ( outputEnabled ) { output.Append( t ); } break; } } if ( this.EnableOutput == false ) { this.Error( this.Line, "Unclosed #if at end of source", null ); return Token.Error; } return output; }
/** * Implementation of the <i>Shunting Yard</i> algorithm to transform an * infix expression to a RPN expression. */ internal List <Token> ShuntingYard(string expression) { List <Token> outputQueue = new List <Token>(); Stack <Token> stack = new Stack <Token>(); Tokenizer tokenizer = new Tokenizer(this, expression); Token lastFunction = null; Token previousToken = null; foreach (Token token in tokenizer) { switch (token.type) { case TokenType.STRINGPARAM: //stack.Push(token); outputQueue.Add(token); break; case TokenType.LITERAL: case TokenType.HEX_LITERAL: case TokenType.VARIABLE: if (previousToken != null) { if (previousToken.type == TokenType.LITERAL || previousToken.type == TokenType.CLOSE_PAREN || previousToken.type == TokenType.VARIABLE || previousToken.type == TokenType.HEX_LITERAL) { // Implicit multiplication, e.g. 23(a+b) or (a+b)(a-b) Token multiplication = new Token(); multiplication.Append("*"); multiplication.type = TokenType.OPERATOR; stack.Push(multiplication); } } outputQueue.Add(token); break; case TokenType.OBJPATH: outputQueue.Add(token); break; case TokenType.FUNCTION: stack.Push(token); lastFunction = token; break; case TokenType.COMMA: if (previousToken != null && previousToken.type == TokenType.OPERATOR) { throw new ExpressionException("Missing parameter(s) for operator " + previousToken + " at character position " + previousToken.pos); } while (!stack.IsEmpty() && stack.Peek().type != TokenType.OPEN_PAREN) { outputQueue.Add(stack.Pop()); } if (stack.IsEmpty()) { throw new ExpressionException("Parse error for function '" + lastFunction + "'"); } break; case TokenType.OPERATOR: { if (previousToken != null && (previousToken.type == TokenType.COMMA || previousToken.type == TokenType.OPEN_PAREN)) { throw new ExpressionException("Missing parameter(s) for operator " + token + " at character position " + token.pos); } if (!operators.TryGetValue(token.surface, out Operator o1)) { throw new ExpressionException("Unknown operator '" + token + "' at position " + (token.pos + 1)); } ShuntOperators(outputQueue, stack, o1); stack.Push(token); break; } case TokenType.UNARY_OPERATOR: { if (previousToken != null && previousToken.type != TokenType.OPERATOR && previousToken.type != TokenType.UNARY_OPERATOR && previousToken.type != TokenType.COMMA && previousToken.type != TokenType.OPEN_PAREN) { throw new ExpressionException("Invalid position for unary operator " + token + " at character position " + token.pos); } Operator o1; if (operators.ContainsKey(token.surface)) { o1 = operators[token.surface]; } else { throw new ExpressionException("Unknown unary operator '" + token.surface.Substring(0, token.surface.Length - 1) + "' at position " + (token.pos + 1)); } ShuntOperators(outputQueue, stack, o1); stack.Push(token); break; } case TokenType.OPEN_PAREN: if (previousToken != null) { if (previousToken.type == TokenType.LITERAL || previousToken.type == TokenType.CLOSE_PAREN || previousToken.type == TokenType.VARIABLE || previousToken.type == TokenType.HEX_LITERAL) { // Implicit multiplication, e.g. 23(a+b) or (a+b)(a-b) Token multiplication = new Token(); multiplication.Append("*"); multiplication.type = TokenType.OPERATOR; stack.Push(multiplication); } // if the ( is preceded by a valid function, then it // denotes the start of a parameter list if (previousToken.type == TokenType.FUNCTION) { outputQueue.Add(token); } } stack.Push(token); break; case TokenType.CLOSE_PAREN: if (previousToken != null && previousToken.type == TokenType.OPERATOR) { throw new ExpressionException("Missing parameter(s) for operator " + previousToken + " at character position " + previousToken.pos); } if (previousToken != null && previousToken.type == TokenType.COMMA) { throw new ExpressionException("Missing parameter after comma at character position " + previousToken.pos); } while (!stack.IsEmpty() && stack.Peek().type != TokenType.OPEN_PAREN) { outputQueue.Add(stack.Pop()); } if (stack.IsEmpty()) { throw new ExpressionException("Mismatched parentheses"); } stack.Pop(); if (!stack.IsEmpty() && stack.Peek().type == TokenType.FUNCTION) { outputQueue.Add(stack.Pop()); } break; } previousToken = token; } while (!stack.IsEmpty()) { Token element = stack.Pop(); if (element.type == TokenType.OPEN_PAREN || element.type == TokenType.CLOSE_PAREN) { throw new ExpressionException("Mismatched parentheses"); } outputQueue.Add(element); } return(outputQueue); }
/// <summary> /// Appends current character to the token and reads next one. /// </summary> protected char AppendAndScanNextChar(EpsStreamReader reader) { Token.Append(reader.CurrentChar); return(ScanNextChar(reader)); }
private Token MakeToken(string text, int posIncr, int startOffset, int endOffset) { Token t = new Token(); t.Append(text); t.PositionIncrement = posIncr; t.SetOffset(startOffset, endOffset); return t; }
private static Token MakeToken(string text, int posIncr) { Token t = new Token(); t.Append(text); t.PositionIncrement = posIncr; return t; }
//----< keep extracting until get none-alpha >------------------- override public Token getTok() { Token tok = new Token(); char quote = (char)context_.src.next(); tok.Append(quote); // first is \" or \' bool endquote = false; if (quote == '\'') { int count = 0; char pre_ch = '\''; while (!endquote) // stop when the quote complete { char cur_ch = (char)context_.src.next(); tok.Append(cur_ch); if (cur_ch == '\\') { count++; } if (cur_ch == '\'') { if (pre_ch != '\\') { endquote = true; } if (pre_ch == '\\' && (count % 2 == 0)) //odd: '\'', even: '\\' { endquote = true; } } pre_ch = cur_ch; } return(tok); } else { int count = 0; char pre_ch = '\"'; while (!endquote) // stop when the quote complete { char cur_ch = (char)context_.src.next(); tok.Append(cur_ch); if (cur_ch == '\\') { count++; } if (cur_ch == '\"') { if (pre_ch != '\\') { endquote = true; } if (pre_ch == '\\' && (count % 2 == 0)) //odd: "\"", even: "\\" { endquote = true; } } pre_ch = cur_ch; } return(tok); } }
//----< keep extracting until get none-punctuator >-------------- override public Token getTok() { Token tok = new Token(); tok.Append((char)context_.src.next()); // first is punctuator if (tok[0] == '\"') { while (context_.src.peek() != '\"') { tok.Append((char)context_.src.next()); } tok.Append((char)context_.src.next()); } else if(tok[0]=='\'') { while (context_.src.peek() != '\'') { tok.Append((char)context_.src.next()); } tok.Append((char)context_.src.next()); } else if (tok[0] == '/') { if (context_.src.peek() == '/') //single line comment { while (context_.src.peek() != '\n') { tok.Append((char)context_.src.next()); } } else if (context_.src.peek() == '*') //Multi-line comments { tok.Append((char)context_.src.next()); while(!(tok[tok.Length-1]=='*' && context_.src.peek()=='/')) { tok.Append((char)context_.src.next()); } tok.Append((char)context_.src.next()); } else if (context_.src.peek() == '=') { tok.Append((char)context_.src.next()); } } // special two pairs else if (tok[0] == '<') { if (context_.src.peek() == '<') { tok.Append((char)context_.src.next()); } } else if (tok[0] == '>') { if (context_.src.peek() == '>') { tok.Append((char)context_.src.next()); } } else if (tok[0] == ':') { if (context_.src.peek() == ':') { tok.Append((char)context_.src.next()); } } else if (tok[0] == '=') { if (context_.src.peek() == '=') { tok.Append((char)context_.src.next()); } } else if (tok[0] == '*') { if (context_.src.peek() == '=') { tok.Append((char)context_.src.next()); } } else if (tok[0] == '|') { if (context_.src.peek() == '|') { tok.Append((char)context_.src.next()); } } else if (tok[0] == '&') { if (context_.src.peek() == '&') { tok.Append((char)context_.src.next()); } } else if (tok[0] == '+') { if (context_.src.peek() == '+'|| context_.src.peek() == '=') { tok.Append((char)context_.src.next()); } } else if (tok[0] == '-') { if (context_.src.peek() == '-' || context_.src.peek() == '=') { tok.Append((char)context_.src.next()); } } return tok; }
private bool CopyChar() { m_token.Append(m_input.Char); return(m_input.Read()); }