Пример #1
0
        private TokenizeResult TokenizeCpp(string text, TextPosition pos, int length, bool allowDoxy)
        {
            TokenizeResult result = new TokenizeResult();
            Stopwatch      timer  = new Stopwatch();

            timer.Restart();
            List <CppToken> cppTokens = new List <CppToken>();

            using (CppLexer cppLexer = new CppLexer(text, pos, length))
            {
                cppTokens.AddRange(cppLexer.Tokenize());
                result.AddErrors(cppLexer.LexErrors);
            }
            timer.Stop();
            result.Stats.CppDuration += timer.Elapsed;
            foreach (CppToken token in cppTokens)
            {
                if (allowDoxy && (token.Kind == CppTokenKind.MultiLineCommentDoc || token.Kind == CppTokenKind.SingleLineCommentDoc))
                {
                    result.AddToken(token);
                    using (TokenizeResult doxyRes = TokenizeDoxy(text, token.Position, token.Length))
                    {
                        result.Stats.DoxyDuration += doxyRes.Stats.DoxyDuration;
                        result.Stats.HtmlDuration += doxyRes.Stats.HtmlDuration;
                        result.AddTokens(doxyRes.Tokens);
                        result.AddErrors(doxyRes.Errors);
                    }
                }
                else
                {
                    result.AddToken(token);
                }
            }
            return(result);
        }
Пример #2
0

        
Пример #3
0
        private TokenizeResult TokenizeHtml(string text, TextPosition pos, int length)
        {
            TokenizeResult result = new TokenizeResult();
            Stopwatch      timer  = Stopwatch.StartNew();

            using (HtmlLexer htmlLexer = new HtmlLexer(text, pos, length))
            {
                IEnumerable <HtmlToken> htmlTokens = htmlLexer.Tokenize();
                if (htmlTokens.FirstOrDefault(d => !d.IsEOF) != null)
                {
                    result.AddTokens(htmlTokens);
                }
                result.AddErrors(htmlLexer.LexErrors);
            }
            timer.Stop();
            result.Stats.HtmlDuration += timer.Elapsed;
            return(result);
        }
Пример #4
0
        static void Main(string[] args)
        {
            string input = String.Empty;
            string output;

            while (input != "exit")
            {
                input = Console.ReadLine();

                Tokenizer      tokenizer      = new Tokenizer();
                TokenizeResult tokenizeResult = tokenizer.TokenizeString(input);

                if (!tokenizeResult.Success)
                {
                    output = tokenizeResult.AddtionalInfo;
                }
                else
                {
                    ExpressionBuilder   expressionBuilder   = new ExpressionBuilder(tokenizeResult.TokenList);
                    ExpressionizeResult expressionizeResult = expressionBuilder.GenerateExpression();

                    if (!expressionizeResult.Success)
                    {
                        output = expressionizeResult.Error.Message;
                    }
                    else
                    {
                        EvaluationResult evaluationResult = expressionizeResult.CalculatedExpression.Evaluate(null);

                        if (!evaluationResult.Success)
                        {
                            output = evaluationResult.AdditionalInfo.Aggregate((item, aggregator) => $"{aggregator}\n{item}");
                        }
                        else
                        {
                            output = evaluationResult.Value.ToString();
                        }
                    }
                }

                Console.WriteLine(output);
            }
        }
Пример #5
0
        private void Tokenize(string text)
        {
            Stopwatch timer = new Stopwatch();

            // Push back all tokens to to pools
            GiveTokensBackToPool();

            // Clear tokens & errors
            _tokens.Clear();
            _errors.Clear();
            _performanceItems.Clear();
            SymbolCache.Clear(this);

            Stopwatch totalLexTimer = Stopwatch.StartNew();

            // C++ lexing -> Doxygen (Code -> Cpp) -> (Text -> Html)
            TokenizerTimingStats totalStats = new TokenizerTimingStats();

            using (TokenizeResult cppRes = TokenizeCpp(text, new TextPosition(0), text.Length, true))
            {
                totalStats += cppRes.Stats;
                _tokens.AddRange(cppRes.Tokens);
                _errors.AddRange(cppRes.Errors);
            }
            totalLexTimer.Stop();
            Debug.WriteLine($"Lexing done (Tokens: {_tokens.Count}, Total: {totalLexTimer.Elapsed.ToMilliseconds()} ms, Insert: {totalStats.InsertDuration.ToMilliseconds()}, C++: {totalStats.CppDuration.ToMilliseconds()} ms, Doxygen: {totalStats.DoxyDuration.ToMilliseconds()} ms, Html: {totalStats.HtmlDuration.ToMilliseconds()} ms)");

            int countCppTokens  = _tokens.Count(t => typeof(CppToken).Equals(t.GetType()));
            int countHtmlTokens = _tokens.Count(t => typeof(HtmlToken).Equals(t.GetType()));
            int countDoxyTokens = _tokens.Count(t => typeof(DoxygenToken).Equals(t.GetType()));

            _performanceItems.Add(new PerformanceItemModel(this, TabIndex, $"{text.Length} chars", $"{countCppTokens} tokens", "C++ lexer", totalStats.CppDuration));
            _performanceItems.Add(new PerformanceItemModel(this, TabIndex, $"{text.Length} chars", $"{countDoxyTokens} tokens", "Doxygen lexer", totalStats.DoxyDuration));
            _performanceItems.Add(new PerformanceItemModel(this, TabIndex, $"{text.Length} chars", $"{countHtmlTokens} tokens", "Html lexer", totalStats.HtmlDuration));

            timer.Restart();
            _styler.Refresh(_tokens);
            timer.Stop();
            Debug.WriteLine($"Styler done, took {timer.Elapsed.ToMilliseconds()} ms");
        }
Пример #6
0
        private TokenizeResult TokenizeDoxy(string text, TextPosition pos, int length)
        {
            TokenizeResult result = new TokenizeResult();

            Stopwatch timer = new Stopwatch();

            timer.Restart();
            List <DoxygenToken> doxyTokens = new List <DoxygenToken>();

            using (DoxygenLexer doxyLexer = new DoxygenLexer(text, pos, length))
            {
                doxyTokens.AddRange(doxyLexer.Tokenize());
                result.AddErrors(doxyLexer.LexErrors);
            }
            timer.Stop();
            result.Stats.DoxyDuration += timer.Elapsed;

            Stack <CommandStartState> startStates     = new Stack <CommandStartState>();
            Stack <DoxygenToken>      textStartTokens = new Stack <DoxygenToken>();
            var doxyTokenList = new LinkedList <DoxygenToken>(doxyTokens);
            var curLink       = doxyTokenList.First;

            while (curLink != null)
            {
                var doxyToken = curLink.Value;
                if (doxyToken.Kind == DoxygenTokenKind.CommandStart)
                {
                    result.AddToken(doxyToken);
                    string commandName            = text.Substring(doxyToken.Index + 1, doxyToken.Length - 1);
                    List <DoxygenToken> argTokens = new List <DoxygenToken>();
                    if (curLink.Next != null)
                    {
                        LinkedListNode <DoxygenToken> nextLink = curLink.Next;
                        while (nextLink != null)
                        {
                            if (nextLink.Value.IsArgument)
                            {
                                argTokens.Add(nextLink.Value);
                            }
                            else
                            {
                                break;
                            }
                            nextLink = nextLink.Next;
                        }
                        curLink = nextLink;
                    }
                    else
                    {
                        curLink = null;
                    }

                    result.AddTokens(argTokens);

                    CommandStartState startState = new CommandStartState(doxyToken, commandName);
                    startState.ArgTokens.AddRange(argTokens);
                    startStates.Push(startState);

                    if (argTokens.Count > 0)
                    {
                        var last = argTokens.Last();
                        startState.StartPosition = new TextPosition(last.End, last.Position.Line, last.Position.Column);
                    }
                    else
                    {
                        startState.StartPosition = new TextPosition(doxyToken.End, doxyToken.Position.Line, doxyToken.Position.Column);
                    }

                    continue;
                }

                if (doxyToken.Kind == DoxygenTokenKind.CommandEnd)
                {
                    string            commandName   = text.Substring(doxyToken.Index + 1, doxyToken.Length - 1);
                    CommandStartState topStartState = startStates.Count > 0 ? startStates.Peek() : null;
                    if (topStartState != null)
                    {
                        var rule = DoxygenSyntax.GetCommandRule(commandName);
                        Debug.Assert(rule != null && rule.Kind == DoxygenSyntax.CommandKind.EndCommandBlock);
                        DoxygenSyntax.EndBlockCommandRule endRule = rule as DoxygenSyntax.EndBlockCommandRule;
                        Debug.Assert(endRule != null);
                        if (endRule.StartCommandNames.Contains(topStartState.CommandName))
                        {
                            TextPosition commandContentStart = topStartState.StartPosition;
                            TextPosition commandContentEnd   = doxyToken.Position;
                            Debug.Assert(commandContentEnd.Index >= commandContentStart.Index);
                            int commandContentLength = commandContentEnd.Index - commandContentStart.Index;

                            // Special handling for code block
                            if ("code".Equals(topStartState.CommandName))
                            {
                                string       codeType      = null;
                                DoxygenToken firstArgToken = topStartState.ArgTokens.FirstOrDefault();
                                if (firstArgToken != null && firstArgToken.Kind == DoxygenTokenKind.ArgumentCaption)
                                {
                                    codeType = text.Substring(firstArgToken.Index, firstArgToken.Length);
                                }
                                if ("{.c}".Equals(codeType, StringComparison.InvariantCultureIgnoreCase) || "{.cpp}".Equals(codeType, StringComparison.InvariantCultureIgnoreCase))
                                {
                                    using (TokenizeResult cppRes = TokenizeCpp(text, commandContentStart, commandContentLength, false))
                                    {
                                        result.AddTokens(cppRes.Tokens);
                                        result.AddErrors(cppRes.Errors);
                                    }
                                }
                            }
                            startStates.Pop();
                        }
                    }
                    else
                    {
                        // @TODO(final): Print error (Command end without command start)
                    }
                    result.AddToken(doxyToken);
                }
                else if (doxyToken.Kind == DoxygenTokenKind.TextStart)
                {
                    textStartTokens.Push(doxyToken);
                    result.AddToken(doxyToken);
                }
                else if (doxyToken.Kind == DoxygenTokenKind.TextEnd)
                {
                    if (textStartTokens.Count > 0)
                    {
                        DoxygenToken textStartToken = textStartTokens.Pop();
                        Debug.Assert(doxyToken.Index >= textStartToken.Index);
                        int textContentLen = doxyToken.Index - textStartToken.Index;
                        using (TokenizeResult htmlRes = TokenizeHtml(text, textStartToken.Position, textContentLen))
                        {
                            result.AddTokens(htmlRes.Tokens);
                            result.AddErrors(htmlRes.Errors);
                            result.Stats.HtmlDuration += htmlRes.Stats.HtmlDuration;
                        }
                    }
                    result.AddToken(doxyToken);
                }
                else
                {
                    result.AddToken(doxyToken);
                }
                curLink = curLink.Next;
            }
            return(result);
        }
Пример #7
0
 public override bool CanFixup(TokenizeResult <WhoisResponse> result)
 {
     // Templates that this Fixup can work on
     return(result.Template.Name == "whois.isoc.org.il/il/Found");
 }
Пример #8
0