public TokenizerResult Tokenize(ITracker tracker)
        {
            tokenizers ??= StandardList.List;

            if (PreviousType != Token.TokenType.NA && ((tracker.Before?.Type ?? Token.TokenType.None) & PreviousType) == Token.TokenType.NA)
            {
                return(TokenizerResult.Failure);
            }

            int level = 0;

            int textRegionStart = tracker.Index;

            string readString = "";

            while (true)
            {
                if (!tracker.ReadWithOffset(readString.Length, out char c))
                {
                    tracker.AddError("No closing bracket in level: " + level, 2, new TextRegion(tracker.InitialIndex));
                    return(TokenizerResult.Failure);
                }
                readString += c;

                if (c == OpeningBracket)
                {
                    level++;
                }
                else if (c == ClosingBracket)
                {
                    level--;

                    if (level == 0)
                    {
                        List <Token> tokens;
                        using (var point = new ResetPoint(tracker))
                        {
                            // Usage of PseudoToken to ensure that tracker.Last returns TokenType.None for further checks
                            var pseudoToken = new Token(new TextRegion(tracker.Index, tracker.Index + 1), OpeningBracket.ToString(), "PseudoToken", Token.TokenType.None);
                            tracker.AddToken(pseudoToken, 1);
                            tokens = new Tokenizer(tokenizers).TokenizeFrom(tracker, point.resetPoint, tracker.Index - 1 + readString.Length - 1);
                            tokens.Remove(pseudoToken);                             // should equal tokens.RemoveAt(0)
                        }

                        tracker.AddToken(new CompoundToken(new TextRegion(textRegionStart, textRegionStart + readString.Length), tokens, OpeningBracket, ClosingBracket, Type, Name), readString.Length);
                        return(TokenizerResult.Success);
                    }
                }
            }
        }
        public TokenizerResult Tokenize(ITracker tracker)
        {
            // Fail if the previoustype is not NA and the type of the previous token doesn´t match
            if (PreviousType != Token.TokenType.NA && ((tracker.Before?.Type ?? Token.TokenType.None) & PreviousType) == Token.TokenType.NA)
            {
                tracker.AddError("'" + (tracker.Before?.ToString() ?? "") + "' Can´t appear in front of " + Type, 1);
                return(TokenizerResult.Failure);
            }

            if (PreviousNames != null && PreviousNames.Length != 0 && !PreviousNames.Contains(tracker.Before?.Name))
            {
                tracker.AddError("'" + (tracker.Before?.ToString() ?? "") + "' Can´t appear in front of " + Type, 1);
                return(TokenizerResult.Failure);
            }

            if (!MatchesValue(tracker, out string consumed))
            {
                return(TokenizerResult.Failure);
            }

            Token t = new Token(new TextRegion(tracker.Index, tracker.Index + consumed.Length), consumed, Name, Type);

            tracker.AddToken(t, consumed.Length);

            return(TokenizerResult.Success);
        }
        public TokenizerResult Tokenize(ITracker tracker)
        {
            int count = 0;

            while (tracker.ReadWithOffset(count, out c) && range.Contains(c))
            {
                count++;
            }
            tracker.AddToken(null, count);
            return(TokenizerResult.Success);
        }
Example #4
0
        public TokenizerResult Tokenize(ITracker tracker)
        {
            if (PreviousType != Token.TokenType.NA && ((tracker.Before?.Type ?? Token.TokenType.None) & PreviousType) == Token.TokenType.NA)
            {
                return(TokenizerResult.Failure);
            }

            // TODO don't consume it here
            tracker.Consume(1); // First Char is guaranteed to be Symbol by Selector

            string text    = "";
            bool   escaped = false;

            while (true)
            {
                if (!tracker.Read(out char c))
                {
                    tracker.AddError("String not closed", 2, new TextRegion(tracker.InitialIndex));
                    tracker.Consume(1); // TODO don't consume it here
                    return(TokenizerResult.Failure);
                }
                tracker.Consume(1); // TODO don't consume it here

                if (escaped)
                {
                    text   += c;
                    escaped = false;
                    continue;
                }
                else if (c == Symbol)
                {
                    Token t = new Token(new TextRegion(tracker.InitialIndex, tracker.Index), text, Name, Type);

                    tracker.AddToken(t);

                    return(TokenizerResult.Success);
                }
                else if (c == escapeChar)
                {
                    escaped = true;
                }
                else
                {
                    text += c;
                }
            }
        }
Example #5
0
        public TokenizerResult Tokenize(ITracker tracker)
        {
            ConstructToken token;

            using (var point = new ResetPoint(tracker))
                token = TokenizeConstruct(tracker);

            if (token is null)
            {
                return(TokenizerResult.Failure);
            }
            else
            {
                tracker.AddToken(token, token.Range.End - tracker.Index);
                return(TokenizerResult.Success);
            }
        }
Example #6
0
        public TokenizerResult Tokenize(ITracker tracker)
        {
            int count = 0;

            while (tracker.ReadWithOffset(count, out c) && range.Contains(c))
            {
                if (c == '\n')                  // Newline is used as Seperator
                {
                    break;
                }

                count++;
            }

            tracker.AddToken(null, count);

            return(TokenizerResult.Success);
        }
        public TokenizerResult Tokenize(ITracker tracker)
        {
            // Fail if the previoustype is not NA and the type of the previous token doesn´t match
            if (PreviousType != Token.TokenType.NA && ((tracker.Before?.Type ?? Token.TokenType.None) & PreviousType) == Token.TokenType.NA)
            {
                tracker.AddError("'" + (tracker.Before?.ToString() ?? "") + "' Can´t appear in front of " + Type, 1);
                return(TokenizerResult.Failure);
            }

            if (PreviousNames != null && PreviousNames.Length != 0 && !PreviousNames.Contains(tracker.Before?.Name))
            {
                tracker.AddError("'" + (tracker.Before?.ToString() ?? "") + "' Can´t appear in front of " + Type, 1);
                return(TokenizerResult.Failure);
            }


            Match Match = Regex.Match(tracker.Remaining);

            if (!Match.Success)
            {
                return(TokenizerResult.Failure);
            }

            if (Match.Index != 0)
            {
                return(TokenizerResult.Failure);
            }

            if (Match.Length == 0)
            {
                return(TokenizerResult.Failure);
            }

            string consumed = Match.Value;

            var t = new RegexToken(new TextRegion(tracker.Index, tracker.Index + consumed.Length), consumed, (Match.Groups as IEnumerable <Group>).Select(g => g.Value).ToArray(), Name, Type);

            tracker.AddToken(t, consumed.Length);

            return(TokenizerResult.Success);
        }