Exemple #1
0
        static void _EnqueueFiber(ref int lcount, ref TokenizerFiber[] l, TokenizerFiber t, int sp)
        {
            if ((l.Length <= lcount))
            {
                TokenizerFiber[] newarr = new TokenizerFiber[(l.Length * 2)];
                System.Array.Copy(l, 0, newarr, 0, l.Length);
                l = newarr;
            }
            l[lcount] = t;
            lcount    = (lcount + 1);
            int[] pc = t.Program[t.Index];
            int   op = pc[0];

            if ((TokenizerEnumerator._Jmp == op))
            {
                for (int j = 1; (j < pc.Length); j = (j + 1))
                {
                    TokenizerEnumerator._EnqueueFiber(ref lcount, ref l, new TokenizerFiber(t.Program, pc[j], t.Saved), sp);
                }
                return;
            }
            if ((TokenizerEnumerator._Save == op))
            {
                int slot = pc[1];
                int max  = t.Saved.Length;
                if ((slot > max))
                {
                    max = slot;
                }
                int[] saved = new int[max];
                for (int i = 0; (i < t.Saved.Length); i = (i + 1))
                {
                    saved[i] = t.Saved[i];
                }
                saved[slot] = sp;
                TokenizerEnumerator._EnqueueFiber(ref lcount, ref l, new TokenizerFiber(t, (t.Index + 1), saved), sp);
                return;
            }
        }
Exemple #2
0
        int _Lex()
        {
            this._capture.Clear();
            if ((this._state == TokenizerEnumerator._BeforeBegin))
            {
                this._MoveNextInput();
            }
            int i;
            int match = -1;

            TokenizerFiber[] tmp;
            int currentFiberCount = 0;
            int nextFiberCount    = 0;

            int[] pc;
            int   sp = 0;

            int[] saved;
            int[] matched;
            saved = new int[2];
            TokenizerEnumerator._EnqueueFiber(ref currentFiberCount, ref this._currentFibers, new TokenizerFiber(this._program, 0, saved), 0);
            matched = null;
            int cur = -1;

            if ((false
                 == (TokenizerEnumerator._EndOfInput == this._ch)))
            {
                char ch1 = ((char)(this._ch));
                if (char.IsHighSurrogate(ch1))
                {
                    if ((false == this._MoveNextInput()))
                    {
                        throw new IOException(string.Format("Expecting low surrogate in unicode stream. The input source is corrupt or not val" +
                                                            "id Unicode at line {0}, column {1}, position {2}", this._line, this._column, this._position));
                    }
                    this._column = (this._column - 1);
                    char ch2 = ((char)(this._ch));
                    cur = char.ConvertToUtf32(ch1, ch2);
                }
                else
                {
                    cur = ch1;
                }
            }
            else
            {
                cur = -1;
            }
            for (
                ; (0 < currentFiberCount);
                )
            {
                bool passed = false;
                for (i = 0; (i < currentFiberCount); i = (i + 1))
                {
                    TokenizerFiber t = this._currentFibers[i];
                    pc    = t.Program[t.Index];
                    saved = t.Saved;
                    int op = pc[0];
                    if ((TokenizerEnumerator._Char == op))
                    {
                        if ((cur == pc[1]))
                        {
                            passed = true;
                            TokenizerEnumerator._EnqueueFiber(ref nextFiberCount, ref this._nextFibers, new TokenizerFiber(t, (t.Index + 1), saved), (sp + 1));
                        }
                    }
                    else
                    {
                        if ((TokenizerEnumerator._Set == op))
                        {
                            if (TokenizerEnumerator._InRanges(pc, cur))
                            {
                                passed = true;
                                TokenizerEnumerator._EnqueueFiber(ref nextFiberCount, ref this._nextFibers, new TokenizerFiber(t, (t.Index + 1), saved), (sp + 1));
                            }
                        }
                        else
                        {
                            if ((TokenizerEnumerator._NSet == op))
                            {
                                if (((false == TokenizerEnumerator._InRanges(pc, cur)) &&
                                     (false
                                      == (TokenizerEnumerator._EndOfInput == this._ch))))
                                {
                                    passed = true;
                                    TokenizerEnumerator._EnqueueFiber(ref nextFiberCount, ref this._nextFibers, new TokenizerFiber(t, (t.Index + 1), saved), (sp + 1));
                                }
                            }
                            else
                            {
                                if ((TokenizerEnumerator._UCode == op))
                                {
                                    string str = char.ConvertFromUtf32(cur);
                                    if ((((int)(char.GetUnicodeCategory(str, 0))) == pc[1]))
                                    {
                                        passed = true;
                                        TokenizerEnumerator._EnqueueFiber(ref nextFiberCount, ref this._nextFibers, new TokenizerFiber(t, (t.Index + 1), saved), (sp + 1));
                                    }
                                }
                                else
                                {
                                    if ((TokenizerEnumerator._NUCode == op))
                                    {
                                        string str = char.ConvertFromUtf32(cur);
                                        if (((false
                                              == (((int)(char.GetUnicodeCategory(str, 0))) == pc[1])) &&
                                             (false
                                              == (TokenizerEnumerator._EndOfInput == this._ch))))
                                        {
                                            passed = true;
                                            TokenizerEnumerator._EnqueueFiber(ref nextFiberCount, ref this._nextFibers, new TokenizerFiber(t, (t.Index + 1), saved), (sp + 1));
                                        }
                                    }
                                    else
                                    {
                                        if ((TokenizerEnumerator._Any == op))
                                        {
                                            if ((false
                                                 == (TokenizerEnumerator._EndOfInput == this._ch)))
                                            {
                                                passed = true;
                                                TokenizerEnumerator._EnqueueFiber(ref nextFiberCount, ref this._nextFibers, new TokenizerFiber(t, (t.Index + 1), saved), (sp + 1));
                                            }
                                        }
                                        else
                                        {
                                            if ((TokenizerEnumerator._Match == op))
                                            {
                                                matched = saved;
                                                match   = pc[1];
                                                i       = currentFiberCount;
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
                if (passed)
                {
                    this._capture.Append(char.ConvertFromUtf32(cur));
                    this._MoveNextInput();
                    if ((false
                         == (TokenizerEnumerator._EndOfInput == this._ch)))
                    {
                        char ch1 = ((char)(this._ch));
                        if (char.IsHighSurrogate(ch1))
                        {
                            if ((false == this._MoveNextInput()))
                            {
                                throw new IOException(string.Format("Expecting low surrogate in unicode stream. The input source is corrupt or not val" +
                                                                    "id Unicode at line {0}, column {1}, position {2}", this._line, this._column, this._position));
                            }
                            this._column = (this._column - 1);
                            sp           = (sp + 1);
                            char ch2 = ((char)(this._ch));
                            cur = char.ConvertToUtf32(ch1, ch2);
                        }
                        else
                        {
                            cur = ch1;
                        }
                    }
                    else
                    {
                        cur = -1;
                    }
                    sp = (sp + 1);
                }
                tmp = this._currentFibers;
                this._currentFibers = this._nextFibers;
                this._nextFibers    = tmp;
                currentFiberCount   = nextFiberCount;
                nextFiberCount      = 0;
            }
            if ((null != matched))
            {
                int start = matched[0];
                int len   = matched[1];
                this._value = this._capture.ToString(start, (len - start));
                return(match);
            }
            return(-1);
        }