Beispiel #1
0
        /// <summary>
        /// Returns a list of tokens for a given string
        /// </summary>
        public List <Token> TokenizeString(string data)
        {
            var lexer   = new ProLexer(data);
            var outList = lexer.GetTokensList.ToList();

            outList.RemoveAt(outList.Count - 1);
            return(outList);
        }
Beispiel #2
0
        /// <summary>
        /// Parses a text into a list of parsedItems
        /// </summary>
        public Parser(ProLexer proLexer, string filePathBeingParsed, ParsedScopeItem defaultScope, bool matchKnownWords)
        {
            // process inputs
            _filePathBeingParsed = filePathBeingParsed;
            _matchKnownWords     = matchKnownWords && KnownStaticItems != null;

            // the first of this list represents the file currently being parsed
            _parsedIncludes.Add(
                new ParsedIncludeFile(
                    "root",
                    new TokenEos(null, 0, 0, 0, 0),
                    // the preprocessed variable {0} equals to the filename...
                    new Dictionary <string, List <Token> >(StringComparer.CurrentCultureIgnoreCase)
            {
                { "0", new List <Token> {
                      new TokenWord(Path.GetFileName(FilePathBeingParsed), 0, 0, 0, 0)
                  } }
            },
                    _filePathBeingParsed,
                    null)
                );

            // init context
            _context = new ParseContext {
                BlockStack     = new Stack <BlockInfo>(),
                PreProcIfStack = new Stack <ParsedPreProcBlock>(),
                UibBlockStack  = new Stack <ParsedPreProcBlock>()
            };

            // create root item
            if (defaultScope == null)
            {
                var rootToken = new TokenEos(null, 0, 0, 0, 0);
                rootToken.OwnerNumber = 0;
                _rootScope            = new ParsedFile("Root", rootToken);
                AddParsedItem(_rootScope, rootToken.OwnerNumber);
            }
            else
            {
                _rootScope = defaultScope;
            }
            _context.Scope = _rootScope;

            // Analyze
            _tokenList  = proLexer.GetTokensList;
            _tokenCount = _tokenList.Count;
            ReplacePreProcVariablesAhead(1); // replaces a preproc var {&x} at token position 0
            ReplacePreProcVariablesAhead(2); // replaces a preproc var {&x} at token position 1
            while (MoveNext())
            {
                try {
                    Analyze();
                } catch (Exception e) {
                    ErrorHandler.LogError(e, "Error while parsing the following file : " + filePathBeingParsed);
                }
            }

            // add missing values to the line dictionary
            var current = new LineInfo(GetCurrentDepth(), _rootScope);

            for (int i = proLexer.MaxLine; i >= 0; i--)
            {
                if (_lineInfo.ContainsKey(i))
                {
                    current = _lineInfo[i];
                }
                else
                {
                    _lineInfo.Add(i, current);
                }
            }

            // check that we match an &ENDIF for each &IF
            if (_context.PreProcIfStack.Count > 0)
            {
                _parserErrors.Add(new ParserError(ParserErrorType.MismatchNumberOfIfEndIf, PeekAt(0), _context.PreProcIfStack.Count, _parsedIncludes));
            }

            // dispose
            _context.BlockStack.Clear();
            _context.PreProcIfStack.Clear();
            _context.UibBlockStack.Clear();
            _context   = null;
            _tokenList = null;

            // if we are parsing an include file that was saved for later use, update it
            if (SavedLexerInclude.ContainsKey(filePathBeingParsed))
            {
                SavedLexerInclude.Remove(filePathBeingParsed);
            }
        }