private void ReplaceDefinedTokens(PreprocessorLine line) { ThrowIfNull(line); int i = 0; List <Token> list = line.TokenList; while ((i < list.Count)) { Token token = list[i]; if (token.TokenType != TokenType.Word) { i += 1; continue; } Macro macro = null; if (_macroMap.TryGetValue(token.Value, out macro)) { // Remove the original token list.RemoveAt(i); List <Token> replaceList = null; if (macro.IsMethod) { MethodMacro method = (MethodMacro)macro; List <Token> args = ParseAndRemoveMacroMethodArguments(list, i); if (args == null) { // Parse did not succeed, move to the next token i += 1; } else { // Insert the tokens replaceList = ReplaceMethodMacro(method, args); } } else { // Use the scanner to create the replacement tokens replaceList = Scanner.TokenizeText(macro.Value, CreateScannerOptions()); } if (replaceList != null) { CollapseDoublePounds(replaceList); list.InsertRange(i, replaceList); } } else { i += 1; } } // Do one more pass to check and see if we need a recursive replace bool needAnotherPass = false; foreach (Token cur in line.TokenList) { if (cur.TokenType == TokenType.Word && _macroMap.ContainsKey(cur.Value)) { needAnotherPass = true; break; } } if (needAnotherPass) { ReplaceDefinedTokens(line); } }
/// <summary> /// Get the next line of tokens /// </summary> /// <returns></returns> /// <remarks></remarks> private PreprocessorLine GetNextLine() { PreprocessorLine line = new PreprocessorLine(); line.TokenList = new List <Token>(); Token lastValidToken = null; bool done = false; while (!done) { Token token = _scanner.GetNextToken(); line.TokenList.Add(token); bool isValid = false; if (token.TokenType == TokenType.NewLine) { // Check and see if this is a preprocessor directive token that ends with a // backslash. If so then remove the backslash from the stream and continue processing // the line if (lastValidToken != null && lastValidToken.TokenType == TokenType.BackSlash) { isValid = false; line.TokenList.Remove(lastValidToken); lastValidToken = null; } else { done = true; isValid = true; } } else if (token.TokenType == TokenType.EndOfStream) { done = true; isValid = true; // simulate a newline token line.TokenList.RemoveAt(line.TokenList.Count - 1); line.TokenList.Add(new Token(TokenType.NewLine, Environment.NewLine)); } else if (token.TokenType != TokenType.WhiteSpace) { isValid = true; } else { isValid = false; } if (isValid) { lastValidToken = token; if (line.FirstValidToken == null) { line.FirstValidToken = token; // See if this is a preprocessor line if (token.IsPreProcessorDirective) { line.IsPreProcessorDirectiveLine = true; } } } } // This should always have at least one valid token ThrowIfNull(line.FirstValidToken); // Check and see if the line looks like the following. If so convert it to a valid pre-processor line // # define foo CollapseExpandedPreprocessorLines(ref line); // If this is not a preprocessor directive line then we need to substitute all of the // #define'd tokens in the stream if (!line.IsPreProcessorDirectiveLine || (line.FirstValidToken != null && line.FirstValidToken.TokenType == TokenType.PoundInclude)) { ReplaceDefinedTokens(line); } // Collapse quoted strings that are adjacent to each other CollapseAdjacentQuoteStrings(line); return(line); }
/// <summary> /// Process a #define that is actually a function /// </summary> /// <param name="line"></param> /// <returns></returns> /// <remarks></remarks> private Macro ProcessPoundDefineMethod(PreprocessorLine line) { // First step is to parse out the name and parameters List <Token> list = line.GetValidTokens(); string name = list[1].Value; list.RemoveRange(0, 3); List <string> paramList = new List <string>(); while ((list[0].TokenType != TokenType.ParenClose)) { if (list[0].TokenType == TokenType.Word) { paramList.Add(list[0].Value); } else if (list[0].TokenType == TokenType.ParenOpen) { // ( is not legal inside a parameter list. This is a simple macro return(ProcessPoundDefineComplexMacro(line)); } list.RemoveAt(0); } // Now get the fullBody. We need the actual text for the fullBody so search through the true token list Int32 index = 0; while ((line.TokenList[index].TokenType != TokenType.ParenClose)) { index += 1; } index += 1; List <Token> fullBody = line.TokenList.GetRange(index, line.TokenList.Count - index); // Strip the trailing and ending whitespace on the fullBody while (fullBody.Count > 0 && (fullBody[0].TokenType == TokenType.WhiteSpace || fullBody[0].TokenType == TokenType.NewLine)) { fullBody.RemoveAt(0); } // Don't be fooled by a simple #define that simply wraps the entire fullBody inside a // set of (). if ((fullBody.Count == 0)) { return(ProcessPoundDefineComplexMacro(line)); } while (fullBody.Count > 0 && (fullBody[fullBody.Count - 1].TokenType == TokenType.WhiteSpace || fullBody[fullBody.Count - 1].TokenType == TokenType.NewLine)) { fullBody.RemoveAt(fullBody.Count - 1); } // Coy the body token list since we are about to change the data List <Token> body = new List <Token>(fullBody); // Collapse the whitespace around ## entries int i = 0; while (i + 1 < body.Count) { Token left = body[i]; Token right = body[i + 1]; if (left.TokenType == TokenType.Pound && right.TokenType == TokenType.Pound) { // First look at the right if (i + 2 < body.Count && body[i + 2].TokenType == TokenType.WhiteSpace) { body.RemoveAt(i + 2); } // Now look at the left if (i > 0 && body[i - 1].TokenType == TokenType.WhiteSpace) { body.RemoveAt(i - 1); } } i += 1; } index += 1; return(new MethodMacro(name, paramList, body, fullBody)); }
private Macro ProcessPoundDefineComplexMacro(PreprocessorLine line) { // It's a complex macro. Go ahead and get the line information List <Token> list = new List <Token>(line.TokenList); int i = 0; // Strip the newlines while (i < list.Count) { if (list[i].TokenType == TokenType.NewLine) { list.RemoveAt(i); } else { i += 1; } } i = 0; // Get the #define token Token defineToken = null; while (i < list.Count) { if (list[i].TokenType == TokenType.PoundDefine) { defineToken = list[i]; break; } i += 1; } // Get the name token Token nameToken = null; while (i < list.Count) { if (list[i].TokenType == TokenType.Word) { nameToken = list[i]; break; } i += 1; } if (defineToken == null || nameToken == null) { _errorProvider.AddWarning("Error processing line: {0}", line.ToString()); return(new Macro(NativeSymbolBag.GenerateAnonymousName(), string.Empty)); } // i now points to the name token. Remove the range of tokens up until this point. Now remove the // whitespace on either end of the list list.RemoveRange(0, i + 1); while (list.Count > 0 && (list[0].TokenType == TokenType.WhiteSpace || list[0].TokenType == TokenType.NewLine)) { list.RemoveAt(0); } while (list.Count > 0 && (list[list.Count - 1].TokenType == TokenType.WhiteSpace || list[list.Count - 1].TokenType == TokenType.NewLine)) { list.RemoveAt(list.Count - 1); } // Create a string for all of the tokens var b = new StringBuilder(); foreach (Token cur in list) { b.Append(cur.Value); } return(new Macro(nameToken.Value, b.ToString())); }
/// <summary> /// Core processing loop. Processes blocks of text. /// </summary> /// <remarks></remarks> private void ProcessLoop() { bool done = false; while (!done) { ScannerMark mark = _scanner.Mark(); try { PreprocessorLine line = this.GetNextLine(); ThrowIfFalse(line.TokenList.Count > 0); Token token = line.FirstValidToken; if (token == null) { WriteToStream(line); continue; } switch (token.TokenType) { case TokenType.PoundIf: ProcessPoundIf(line); break; case TokenType.PoundIfndef: ProcessPoundIfndef(line); break; case TokenType.PoundElse: case TokenType.PoundElseIf: // stop on a conditional branch end ChewThroughConditionalEnd(); done = true; break; case TokenType.EndOfStream: case TokenType.PoundEndIf: done = true; break; case TokenType.PoundPragma: ProcessPoundPragma(line); break; case TokenType.PoundDefine: ProcessPoundDefine(line); break; case TokenType.PoundUnDef: ProcessPoundUndefine(line); break; case TokenType.PoundInclude: ProcessPoundInclude(line); break; default: WriteToStream(line); break; } } catch (PreProcessorException ex) { if (ex.IsError) { _errorProvider.AddError(ex.Message); } else { _errorProvider.AddWarning(ex.Message); } _scanner.Rollback(mark); GetNextLine(); // Chew through the line } } }
private List <Token> ParseAndRemoveMacroMethodArguments(List <Token> list, Int32 start) { List <Token> args = new List <Token>(); Int32 i = start; // Search for the start paren while (i < list.Count && list[i].TokenType == TokenType.WhiteSpace) { i += 1; } if (list[i].TokenType != TokenType.ParenOpen) { return(null); } i += 1; // Move past the '(' var depth = 0; var curArg = new Token(TokenType.Text, string.Empty); var done = false; while (i < list.Count && !done) { Token cur = list[i]; bool append = false; switch (cur.TokenType) { case TokenType.Comma: if (depth == 0) { args.Add(curArg); curArg = new Token(TokenType.Text, string.Empty); } break; case TokenType.ParenOpen: depth += 1; append = true; break; case TokenType.ParenClose: if (depth == 0) { args.Add(curArg); done = true; } else { depth -= 1; append = true; } break; default: append = true; break; } if (done) { break; } if (append) { if (curArg.TokenType == TokenType.Text && string.IsNullOrEmpty(curArg.Value)) { curArg = cur; } else { curArg = new Token(TokenType.Text, curArg.Value + cur.Value); } } i += 1; } if (i == list.Count) { return(null); } // Success so remove the list. 'i' currently points at ) list.RemoveRange(start, (i - start) + 1); return(args); }