Exemplo n.º 1
0
        public TokenSet Lex(string code)
        {
            Debug.Log("[Lexer] Starting first phase Tokenization!");
            Stopwatch stopwatch = Stopwatch.StartNew();
            TokenSet  set       = m_TokenNizer.Tokenize(code);

            Debug.Log($"[Lexer] Done! {stopwatch.ElapsedMilliseconds}ms elapsed!");

            Debug.Log("[Lexer] Starting second phase Tokenization!");
            stopwatch = Stopwatch.StartNew();
            set       = m_TokenReplacer.Process(set);
            stopwatch.Stop();
            Debug.Log($"[Lexer] Done! {stopwatch.ElapsedMilliseconds}ms elapsed!");

            Debug.Log("[Grammar] Starting first phase structuration!");
            stopwatch = Stopwatch.StartNew();
            StructureToken[] stokens = m_Structurizer.Parse(set);
            stopwatch.Stop();
            Debug.Log($"[Grammar] Done! {stopwatch.ElapsedMilliseconds}ms elapsed!");

            Debug.Show(set.ToString(), System.ConsoleColor.Cyan);

            for (int i = 0; i < stokens.Length; i++)
            {
                Debug.Show(stokens[i].ToString(), System.ConsoleColor.DarkGreen);
            }

            Debug.Pause();

            return(set);
        }
Exemplo n.º 2
0
 public bool GetAllOfType(string Type, out TokenSet tokens)
 {
     tokens = null;
     if (m_Dictionary.TryGetValue(Type.GetHashCode(), out List <Token> list))
     {
         tokens = new TokenSet(list.ToArray());
         return(true);
     }
     return(false);
 }
Exemplo n.º 3
0
        public TokenSet Tokenize(string code)
        {
            m_Code = code + "\n";
            while (CreateNextToken(out Token token))
            {
                m_Tokens.Add(token);

                if (!string.IsNullOrEmpty(m_SearchFlag))
                {
                    m_Tokens.Add(FindNextTokenOf(m_SearchFlag, token));
                }
            }

            TokenSet Set = new TokenSet(m_Tokens.ToArray());

            Reset();
            return(Set);
        }
Exemplo n.º 4
0
 public TokenSet Process(TokenSet set)
 {
     DataContainer[] wordDef = lexConfig.WordDefinitions.All;
     for (int i = 0; i < wordDef.Length; i++)
     {
         TokenSet tokens;
         if (set.GetAllOfType(wordDef[i].Name, out tokens))
         {
             for (int j = 0; j < tokens.Length; j++)
             {
                 for (int k = 0; k < wordDef[i].Length; k++)
                 {
                     if (tokens[j].Value == wordDef[i][k].key)
                     {
                         tokens[j].Type = wordDef[i][k].value;
                     }
                 }
             }
         }
     }
     set.Refresh();
     return(set);
 }