/// <summary>
        /// Returns the list of tokens corresponding to the given include
        /// </summary>
        private List <Token> GetIncludeFileTokens(TokenString previousTokenString, ParsedIncludeFile parsedInclude)
        {
            // Parse the include file ?
            if (!string.IsNullOrEmpty(parsedInclude.IncludeFilePath))
            {
                ProTokenizer proTokenizer;

                // did we already parsed this file in a previous parse session?
                if (SavedTokenizerInclude.ContainsKey(parsedInclude.IncludeFilePath))
                {
                    proTokenizer = SavedTokenizerInclude[parsedInclude.IncludeFilePath];
                }
                else
                {
                    // Parse it
                    if (previousTokenString != null && !string.IsNullOrEmpty(previousTokenString.Value))
                    {
                        proTokenizer = new ProTokenizer(Utils.ReadAllText(parsedInclude.IncludeFilePath), previousTokenString.Value[0] == '"', previousTokenString.Value[0] == '\'');
                    }
                    else
                    {
                        proTokenizer = new ProTokenizer(Utils.ReadAllText(parsedInclude.IncludeFilePath));
                    }
                    if (!SavedTokenizerInclude.ContainsKey(parsedInclude.IncludeFilePath))
                    {
                        SavedTokenizerInclude.Add(parsedInclude.IncludeFilePath, proTokenizer);
                    }
                }

                _parsedIncludes.Add(parsedInclude);
                var includeNumber = (ushort)(_parsedIncludes.Count - 1);

                // add this include to the references and modify each token
                // Remove EOF
                List <Token> copiedTokens = new List <Token>();
                for (int i = 0; i < proTokenizer.GetTokensList.Count - 1; i++)
                {
                    var token       = proTokenizer.GetTokensList[i];
                    var copiedToken = token.Copy(token.Line, token.Column, token.StartPosition, token.EndPosition);
                    copiedToken.OwnerNumber = includeNumber;
                    copiedTokens.Add(copiedToken);
                }
                return(copiedTokens);
            }

            parsedInclude.Flags |= ParseFlag.NotFound;
            return(null);
        }
Example #2
0
        /// <summary>
        /// Parses a list of tokens into a list of parsedItems
        /// </summary>
        public Parser(GapBuffer <Token> tokens, string filePathBeingParsed, ParsedScopeBlock defaultScope, bool matchKnownWords, StringBuilder debugListOut)
        {
            // process inputs
            _filePathBeingParsed = filePathBeingParsed;
            _matchKnownWords     = matchKnownWords && KnownStaticItems != null;

            var rootToken = new TokenEos(null, 0, 0, 0, 0)
            {
                OwnerNumber = 0
            };

            // the first of this list represents the file currently being parsed
            _parsedIncludes.Add(
                new ParsedIncludeFile(
                    "root",
                    rootToken,
                    // the preprocessed variable {0} equals to the filename...
                    new Dictionary <string, string>(StringComparer.CurrentCultureIgnoreCase)
            {
                { "0", Path.GetFileName(FilePathBeingParsed) }
            },
                    _filePathBeingParsed,
                    null)
                );

            // init context
            _context = new ParseContext {
                BlockStack              = new Stack <ParsedScope>(),
                CurrentStatement        = new ParsedStatement(rootToken),
                CurrentStatementIsEnded = true
            };

            // create root item
            var rootScope = defaultScope ?? new ParsedFile("Root", rootToken);

            _context.BlockStack.Push(rootScope);
            if (defaultScope == null)
            {
                AddParsedItem(rootScope, 0);
            }

            // Analyze
            _tokenList  = tokens;
            _tokenCount = _tokenList.Count;
            _tokenPos   = -1;
            ReplaceIncludeAndPreprocVariablesAhead(1); // replaces an include or a preproc var {&x} at token position 0
            ReplaceIncludeAndPreprocVariablesAhead(2); // @position 1
            while (MoveNext())
            {
                try {
                    Analyze();
                } catch (Exception e) {
                    ErrorHandler.LogError(e, "Error while parsing the following file : " + filePathBeingParsed);
                }
            }
            AddLineInfo(_tokenList[_tokenList.Count - 1]); // add info on last line
            PopOneStatementIndentBlock(0);                 // make sure to pop the final block

            // add missing values to the line dictionary
            // missing values will be for the lines within a multilines comment/string for which we didn't match an EOL to add line info
            var currentLineInfo = _lineInfo[_tokenList[_tokenList.Count - 1].Line];

            for (int i = PeekAt(-1).Line - 1; i >= 0; i--)
            {
                if (!_lineInfo.ContainsKey(i))
                {
                    _lineInfo.Add(i, currentLineInfo);
                }
                else
                {
                    currentLineInfo = _lineInfo[i];
                }
            }

            // check for parser errors
            while (_context.BlockStack.Count > 1)
            {
                ParsedScope scope = _context.BlockStack.Pop();
                // check that we match a RESUME for each SUSPEND
                if (scope is ParsedScopePreProcBlock)
                {
                    _parserErrors.Add(new ParserError(ParserErrorType.MissingUibBlockEnd, PeekAt(-1), _context.BlockStack.Count, _parsedIncludes));
                }

                // check that we match an &ENDIF for each &IF
                else if (scope is ParsedScopePreProcIfBlock)
                {
                    _parserErrors.Add(new ParserError(ParserErrorType.MissingPreprocEndIf, PeekAt(-1), _context.BlockStack.Count, _parsedIncludes));
                }

                // check that we match an END. for each block
                else
                {
                    _parserErrors.Add(new ParserError(ParserErrorType.MissingBlockEnd, PeekAt(-1), _context.BlockStack.Count, _parsedIncludes));
                }
            }

            // returns the concatenation of all the tokens once the parsing is done
            if (debugListOut != null)
            {
                foreach (var token in _tokenList)
                {
                    debugListOut.Append(token.Value);
                }
            }

            // dispose
            _context.BlockStack = null;
            _context            = null;
            _tokenList          = null;
            _functionPrototype  = null;
            _parsedIncludes     = null;
            _knownWords         = null;

            // if we are parsing an include file that was saved for later use, update it
            if (SavedTokenizerInclude.ContainsKey(filePathBeingParsed))
            {
                SavedTokenizerInclude.Remove(filePathBeingParsed);
            }
        }