Пример #1
0
 private static Token BlockComment(Tokenizer t)
 {
     int i = 0, nest = 1;
     if (!t.IsReadable(i + 1) || t.Read(i, 2) != "/*")
     {
         return Token.Empty;
     }
     for (i = 2; t.IsReadable(i + 1); i++)
     {
         if (t.Read(i, 2) == "*/")
         {
             ++i;
             if (--nest == 0)
             {
                 break;
             }
         }
         if (t.Read(i, 2) == "/*")
         {
             ++i;
             ++nest;
         }
     }
     return t.TakeToken(++i, TokenType.BlockComment);
 }
Пример #2
0
 public static TokenCollection Lex(string text, string fileName)
 {
     var tokenList = new List<Token>();
     var errorToken = new List<Token>();
     var t = new Tokenizer(text, fileName);
     while (t.IsReadable())
     {
         LexPartion(t, tokenList, errorToken);
     }
     return new TokenCollection(text, fileName, tokenList, errorToken, t.Position);
 }
Пример #3
0
 private static void BuiltInExpression(Tokenizer t, List<Token> tokenList, List<Token> errorToken)
 {
     if (!t.IsReadable(0) || !t.MatchAny(0, "{"))
     {
         return;
     }
     var result = new List<Token>();
     int nest = 0;
     while (t.IsReadable())
     {
         var tt = LexPartion(t, tokenList, errorToken);
         if (tt == TokenType.LeftBrace)
         {
             ++nest;
         }
         if (tt == TokenType.RightBrace)
         {
             if (--nest == 0)
             {
                 break;
             }
         }
     }
 }
Пример #4
0
 private static Token DigitStartString(Tokenizer t)
 {
     int i;
     for (i = 0; t.IsReadable(i); i++)
     {
         if (t.MatchRange(i, '0', '9'))
         {
             continue;
         }
         if (i > 0 && (t.MatchRange(i, 'a', 'z') || t.MatchRange(i, 'A', 'Z') || t.MatchAny(i, "_")))
         {
             continue;
         }
         break;
     }
     return t.TakeToken(i, TokenType.DigitStartString);
 }
Пример #5
0
 private static Token WhiteSpace(Tokenizer t)
 {
     int i;
     for (i = 0; t.IsReadable(i); i++)
     {
         if (t.MatchRange(i, '\x00', '\x09') || t.MatchRange(i, '\x0B', '\x0C') || t.MatchRange(i, '\x0E', '\x20') || t.MatchAny(i, "\x7F"))
         {
             continue;
         }
         break;
     }
     return t.TakeToken(i, TokenType.WhiteSpace);
 }
Пример #6
0
 private static bool StringLiteral(Tokenizer t, List<Token> tokenList, List<Token> errorToken)
 {
     if (!t.IsReadable(0) || !t.MatchAny(0, "\'\"`"))
     {
         return false;
     }
     string quote = t.Read(0, 1);
     bool escape = false;
     tokenList.Add(t.TakeToken(1, TokenType.QuoteSeparator));
     int i;
     for (i = 0; t.IsReadable(i); i++)
     {
         if (!escape && t.MatchAny(i, quote))
         {
             if (i > 0)
             {
                 tokenList.Add(t.TakeToken(i, TokenType.PlainText));
             }
             tokenList.Add(t.TakeToken(1, TokenType.QuoteSeparator));
             return true;
         }
         if (!escape && t.MatchAny(i, "{"))
         {
             if (i > 0)
             {
                 tokenList.Add(t.TakeToken(i, TokenType.PlainText));
             }
             BuiltInExpression(t, tokenList, errorToken);
             i = -1;
         }
         else if (t.MatchAny(i, "\\"))
         {
             escape = !escape;
             continue;
         }
         escape = false;
     }
     tokenList.Add(t.TakeToken(i, TokenType.PlainText));
     return true;
 }
Пример #7
0
 private static Token OtherString(Tokenizer t)
 {
     int i;
     for (i = 0; t.IsReadable(i); i++)
     {
         if (!t.MatchRange(i, '\x00', '\x7F'))
         {
             continue;
         }
         break;
     }
     return t.TakeToken(i, TokenType.OtherString);
 }
Пример #8
0
 private static Token LineTerminator(Tokenizer t)
 {
     int i = 0;
     if (t.IsReadable(i) && t.MatchAny(i, "\x0A"))
     {
         i++;
         if (t.IsReadable(i) && t.MatchAny(i, "\x0D"))
         {
             i++;
         }
     }
     else if (t.IsReadable(i) && t.MatchAny(i, "\x0D"))
     {
         i++;
         if (t.IsReadable(i) && t.MatchAny(i, "\x0A"))
         {
             i++;
         }
     }
     return t.TakeToken(i, TokenType.LineTerminator);
 }
Пример #9
0
 private static Token LineCommnet(Tokenizer t)
 {
     int i = 0;
     if (!t.IsReadable(i + 1))
     {
         return Token.Empty;
     }
     if (t.Read(i, 2) != "//" && t.Read(i, 2) != "#!")
     {
         return Token.Empty;
     }
     for (i = 2; t.IsReadable(i); i++)
     {
         if (t.MatchAny(i, "\x0A\x0D"))
         {
             break;
         }
     }
     return t.TakeToken(i, TokenType.LineCommnet);
 }
Пример #10
0
 private static Token LetterStartString(Tokenizer t)
 {
     int i;
     bool escape = false;
     for (i = 0; t.IsReadable(i); i++)
     {
         if (escape && t.MatchRange(i, '!', '~'))
         {
             escape = false;
             continue;
         }
         if (t.MatchRange(i, 'a', 'z') || t.MatchRange(i, 'A', 'Z') || t.MatchAny(i, "_"))
         {
             escape = false;
             continue;
         }
         if (i > 0 && t.MatchRange(i, '0', '9'))
         {
             escape = false;
             continue;
         }
         if (t.MatchAny(i, "\\"))
         {
             escape = !escape;
             continue;
         }
         break;
     }
     return t.TakeToken(i, TokenType.LetterStartString);
 }
Пример #11
0
 public void IsReadable(string text, int index, bool expected)
 {
     Tokenizer t = new Tokenizer(text, string.Empty);
     Assert.That(t.IsReadable(index), Is.EqualTo(expected));
 }