/// <summary> /// Read a single quote or a character. The initial single quote has already been /// read /// </summary> /// <returns></returns> /// <remarks></remarks> private Token ReadSingleQuoteOrCharacter() { if (_buffer.EndOfStream) { return(new Token(TokenType.SingleQuote, "'")); } ScannerMark mark = _buffer.Mark(); Token token = null; try { char data = _buffer.ReadChar(); if (data != '\\') { if (_buffer.ReadChar() == '\'') { token = new Token(TokenType.CharacterAnsi, '\'' + data.ToString() + '\''); } } else { StringBuilder builder = new StringBuilder(); builder.Append(data); do { data = _buffer.ReadChar(); if (data == '\'') { token = new Token(TokenType.CharacterAnsi, '\'' + builder.ToString() + '\''); break; // TODO: might not be correct. Was : Exit Do } else if (_buffer.EndOfStream || builder.Length > 5) { break; } else { builder.Append(data); } } while (true); } } catch (ScannerInternalException) { // Swallow the exception. It will rollbakc when the token variable is not set } if (token == null) { _buffer.RollBack(mark); token = new Token(TokenType.SingleQuote, "'"); } return(token); }
/// <summary> /// A # is already read, go ahead and read the text of the pound token /// </summary> /// <returns></returns> /// <remarks></remarks> private Token ReadPoundToken() { ScannerMark mark = this.Mark(); string word = ReadWord(); Token token = null; if (TokenHelper.TryConvertToPoundToken(word, out token)) { return(token); } // The word didn't match any of our pound tokens so just return the pound this.Rollback(mark); return(new Token(TokenType.Pound, "#")); }
/// <summary> /// Peek the next token not of th specified type /// </summary> /// <param name="types"></param> /// <returns></returns> /// <remarks></remarks> public Token PeekNextTokenNotOfType(params TokenType[] types) { ScannerMark mark = _buffer.Mark(); Token token = default(Token); try { token = GetNextTokenNotOfType(types); } finally { _buffer.RollBack(mark); } return(token); }
/// <summary> /// Peek the next token in the stream /// </summary> /// <returns></returns> /// <remarks></remarks> public Token PeekNextToken() { Token token = default(Token); ScannerMark mark = _buffer.Mark(); try { token = GetNextToken(); } finally { _buffer.RollBack(mark); } return(token); }
/// <summary> /// Peek at the next line /// </summary> /// <returns></returns> /// <remarks></remarks> private PreprocessorLine PeekNextLine() { ScannerMark mark = _scanner.Mark(); PreprocessorLine line = null; try { line = GetNextLine(); } finally { _scanner.Rollback(mark); } return(line); }
/// <summary> /// Read a quote or a string from the stream. The initial quote has already been /// read /// </summary> /// <returns></returns> /// <remarks></remarks> private Token ReadDoubleQuoteOrString() { ScannerMark mark = _buffer.Mark(); try { StringBuilder builder = new StringBuilder(); builder.Append('"'); bool done = false; while (!done) { char c = _buffer.ReadChar(); switch (c) { case '"': builder.Append('"'); done = true; break; case '\\': builder.Append(c); builder.Append(_buffer.ReadChar()); break; default: builder.Append(c); break; } } return(new Token(TokenType.QuotedStringAnsi, builder.ToString())); } catch (ScannerInternalException) { // If we get a scanner exception while trying to read the string then this // is just a simple quote. Rollback the buffer and return the quote token _buffer.RollBack(mark); return(new Token(TokenType.DoubleQuote, "\"")); } }
private Token ReadWordOrNumberToken() { string word = ReadWord(); // First check and see if this is a keyword that we care about TokenType keywordType = default(TokenType); if (TokenHelper.KeywordMap.TryGetValue(word, out keywordType)) { return(new Token(keywordType, word)); } TokenType numberType = TokenType.Ampersand; if (IsNumber(word, ref numberType)) { // Loop for a floating point number literal if (!_buffer.EndOfStream && _buffer.PeekChar() == '.') { ScannerMark mark = _buffer.Mark(); _buffer.ReadChar(); string fullWord = word + "." + ReadWord(); TokenType fullNumberType = TokenType.Ampersand; if (IsNumber(fullWord, ref fullNumberType)) { return(new Token(fullNumberType, fullWord)); } else { _buffer.RollBack(mark); } } return(new Token(numberType, word)); } // Just a plain word return(new Token(TokenType.Word, word)); }
/// <summary> /// Peek a list of tokens from the stream. Don't throw when doing an extended peek /// </summary> /// <param name="count"></param> /// <returns></returns> /// <remarks></remarks> public List <Token> PeekTokenList(int count) { ScannerMark mark = this.Mark(); bool oldThrow = _options.ThrowOnEndOfStream; _options.ThrowOnEndOfStream = false; try { List <Token> list = new List <Token>(); for (int i = 0; i <= count - 1; i++) { list.Add(this.GetNextToken()); } return(list); } finally { _options.ThrowOnEndOfStream = oldThrow; this.Rollback(mark); } }
/// <summary> /// Rollback to the specified mark /// </summary> /// <param name="mark"></param> /// <remarks></remarks> public void Rollback(ScannerMark mark) { _buffer.RollBack(mark); }
/// <summary> /// Get the next token from the stream /// </summary> /// <returns></returns> /// <remarks></remarks> public Token GetNextToken() { Token ret = null; bool done = false; do { // Easy cases are out of the way. Now we need to actually go ahead and // parse out the next token. Mark the stream so that if scanning fails // we can send back a token of the remainder of the line ScannerMark mark = _buffer.Mark(); try { ret = GetNextTokenImpl(); } catch (ScannerInternalException ex) { AddWarning(ex.Message); _buffer.RollBack(mark); ret = new Token(TokenType.Text, SafeReadTillEndOfLine()); } done = true; // Done unless we find out otherwise switch (ret.TokenType) { case TokenType.EndOfStream: if (this.Options.ThrowOnEndOfStream) { throw new EndOfStreamException("Scanner reached the end of the stream"); } break; case TokenType.BlockComment: case TokenType.LineComment: if (this.Options.HideComments) { done = false; } break; case TokenType.NewLine: if (this.Options.HideNewLines) { done = false; } break; case TokenType.WhiteSpace: if (this.Options.HideWhitespace) { done = false; } break; } } while (!(done)); return(ret); }
/// <summary> /// Rolling back unsets the mark /// </summary> /// <remarks></remarks> public void RollBack(ScannerMark mark) { Contract.ThrowIfNull(mark, "Must be passed a valid ScannerMark"); _index = mark.Index; _lineNumber = mark.LineNumber; }
/// <summary> /// Core processing loop. Processes blocks of text. /// </summary> /// <remarks></remarks> private void ProcessLoop() { bool done = false; while (!done) { ScannerMark mark = _scanner.Mark(); try { PreprocessorLine line = this.GetNextLine(); ThrowIfFalse(line.TokenList.Count > 0); Token token = line.FirstValidToken; if (token == null) { WriteToStream(line); continue; } switch (token.TokenType) { case TokenType.PoundIf: ProcessPoundIf(line); break; case TokenType.PoundIfndef: ProcessPoundIfndef(line); break; case TokenType.PoundElse: case TokenType.PoundElseIf: // stop on a conditional branch end ChewThroughConditionalEnd(); done = true; break; case TokenType.EndOfStream: case TokenType.PoundEndIf: done = true; break; case TokenType.PoundPragma: ProcessPoundPragma(line); break; case TokenType.PoundDefine: ProcessPoundDefine(line); break; case TokenType.PoundUnDef: ProcessPoundUndefine(line); break; case TokenType.PoundInclude: ProcessPoundInclude(line); break; default: WriteToStream(line); break; } } catch (PreProcessorException ex) { if (ex.IsError) { _errorProvider.AddError(ex.Message); } else { _errorProvider.AddWarning(ex.Message); } _scanner.Rollback(mark); GetNextLine(); // Chew through the line } } }