예제 #1
0
        private SToken ProcessSuperSub(SToken oneToken, SEntity aEntity)
        {
            if (oneToken.TokenType == STokenType.ABSTRACT)
            {
                aEntity.IsAbstract = true;
                //get new token for next step
                oneToken = SLexer.Tokenizer(_dataStream);
            }

            if (oneToken.TokenType == STokenType.SUPERTYPE)
            {   // remove OF token after supertype token
                oneToken = SLexer.Tokenizer(_dataStream);
                ProcessSupertypeOf(aEntity);
                //get new token for next step
                oneToken = SLexer.Tokenizer(_dataStream);
            }

            if (oneToken.TokenType == STokenType.SUBTYPE)
            {   // remove of token after supertype token
                oneToken = SLexer.Tokenizer(_dataStream);
                ProcessSubtypeOf(aEntity);
                //get new token for next step
                oneToken = SLexer.Tokenizer(_dataStream);
            }

            return(SLexer.Tokenizer(_dataStream));
        }
예제 #2
0
        private void ProcessType()
        {
            SToken oneToken = SLexer.Tokenizer(_dataStream);

            if (oneToken.TokenType != STokenType.SIMPLEID)
            {
                throw new Exception("Syntax error in Type Name Definition");
            }

            SType st = ProcessTypeSub(oneToken);

            oneToken = SLexer.Tokenizer(_dataStream);

            if (oneToken.TokenType == STokenType.WHERE)
            {
                oneToken = DummySubLoop();
            }

            while (oneToken.TokenType != STokenType.END_TYPE)
            {
                IgnoredToken(oneToken);
                oneToken = SLexer.Tokenizer(_dataStream);
            }
            CleanUpSemiColon();

            SchemaSet.TypeList.Add(st.Name, st);
        }
예제 #3
0
        private void ProcessSubtypeOf(SEntity aEntity)
        {
            SToken oneToken = SLexer.Tokenizer(_dataStream);

            if (oneToken.TokenType != STokenType.LEFTPARENTHESIS)
            {
                throw new Exception("Syntax Error : Supertype Definition (");
            }
            oneToken = SLexer.Tokenizer(_dataStream);
            if (oneToken.TokenType != STokenType.SIMPLEID)
            {
                throw new Exception("Syntax Error : Supertype Definition simpleId");
            }
            aEntity.SubTypesOf.Add(getEntity(oneToken.StringValue));
            oneToken = SLexer.Tokenizer(_dataStream);
            while (oneToken.TokenType != STokenType.RIGHTPARENTHESIS)
            {
                if (oneToken.TokenType != STokenType.COMMA)
                {
                    throw new Exception("Syntax Error : Supertype Definition ,");
                }
                oneToken = SLexer.Tokenizer(_dataStream);
                if (oneToken.TokenType != STokenType.SIMPLEID)
                {
                    throw new Exception("Syntax Error : Supertype Definition simpleid2");
                }
                aEntity.SuperTypesOf.Add(getEntity(oneToken.StringValue));
                oneToken = SLexer.Tokenizer(_dataStream);
            }
            //
            //oneToken = SchemaLexer.Tokenizer(DataStream);
        }
예제 #4
0
        private void ProcessRule()
        {
            var token = SLexer.Tokenizer(_dataStream);

            while (token.TokenType != STokenType.END_RULE)
            {
                token = SLexer.Tokenizer(_dataStream);
            }
            token = SLexer.Tokenizer(_dataStream);
        }
예제 #5
0
        private void ProcessFuction()
        {
            SToken Token = SLexer.Tokenizer(_dataStream);

            while (Token.TokenType != STokenType.END_FUNCTION)
            {
                Token = SLexer.Tokenizer(_dataStream);
            }
            Token = SLexer.Tokenizer(_dataStream);
        }
예제 #6
0
        private void CleanUpSemiColon()
        {
            // clean up semicolon
            SToken oneToken = SLexer.Tokenizer(_dataStream);

            if (oneToken.TokenType != STokenType.SEMICOLON)
            {
                throw new Exception("Syntax error : " + SLexer.CurrentRow.ToString() + " : " + SLexer.CurrentColumn.ToString());
            }
        }
예제 #7
0
        private void ProcessSchemaTitle()
        {
            SToken oneToken = SLexer.Tokenizer(_dataStream);

            if (oneToken.TokenType != STokenType.SIMPLEID)
            {
                throw new Exception("Syntax error in Schema Name Definition");
            }
            SchemaSet.SchemaName = oneToken.StringValue;
            oneToken             = SLexer.Tokenizer(_dataStream);
            //SchemaEntity aEntity = getEntity(oneToken.StringValue);
        }
예제 #8
0
        /// <summary>
        ///
        /// </summary>
        /// <returns></returns>
        private SToken DummySubLoop()
        {
            SToken oneToken = SLexer.Tokenizer(_dataStream);

            while (oneToken.TokenType == STokenType.SIMPLEID)
            {
                while (oneToken.TokenType != STokenType.SEMICOLON)
                {
                    oneToken = SLexer.Tokenizer(_dataStream);
                }
                oneToken = SLexer.Tokenizer(_dataStream);
            }
            return(oneToken);
        }
예제 #9
0
        private void ProcessAttribute(
            SEntity oneEntity,
            string AttributeName)
        {
            SAttribute satt     = new SAttribute(AttributeName);
            SToken     oneToken = SLexer.Tokenizer(_dataStream);

            if (oneToken.TokenType != STokenType.COLON)
            {
                throw new Exception("Error in attribute : " + SLexer.CurrentRow + " : " + SLexer.CurrentColumn);
            }

            satt.Type = ProcessParameter();
            oneEntity.ParameterList.Add(satt);
        }
예제 #10
0
        private void collection(STypeCollection sTypeCol)
        {
            // Left brackett
            SToken oneToken = SLexer.Tokenizer(_dataStream);

            // lbound
            oneToken        = SLexer.Tokenizer(_dataStream);
            sTypeCol.LBound = getBound(oneToken);
            oneToken        = SLexer.Tokenizer(_dataStream);
            oneToken        = SLexer.Tokenizer(_dataStream);
            sTypeCol.UBound = getBound(oneToken);
            // right bracket
            oneToken = SLexer.Tokenizer(_dataStream);
            // of
            oneToken = SLexer.Tokenizer(_dataStream);
            // type
            oneToken = SLexer.Tokenizer(_dataStream);
            SParamType sp = SParameter.TokenTypeToAttribType(oneToken.TokenType);

            switch (oneToken.TokenType)
            {
            case STokenType.BOOLEAN:
            case STokenType.REAL:
            case STokenType.INTEGER:
            case STokenType.LOGICAL:
            case STokenType.NUMBER:
            case STokenType.STRING:
            case STokenType.ARRAY:
            case STokenType.LIST:
            case STokenType.SET:
            case STokenType.BAG:
                sTypeCol.CollectionType = sp;
                sTypeCol.CollectionName = SParameter.TypeToString(sp);
                break;

            case STokenType.SIMPLEID:
                sTypeCol.CollectionName = oneToken.StringValue;
                break;

            default:
                if (_logFile != null)
                {
                    _logFile.WriteLine(oneToken.TokenType.ToString() + " : " + oneToken.StringValue);
                }
                break;
            }
        }
예제 #11
0
        /// <summary>
        /// returns list of schema token surrouned by parenthesis from Stream
        /// </summary>
        /// <param name="tokenList"></param>
        private void GetInsideSingleParenthesis(List <SToken> tokenList)
        {
            tokenList.Clear();
            // dummy token to begin
            SToken oneToken = SLexer.Tokenizer(_dataStream);

            int counter = 0;

            while (oneToken.TokenType != STokenType.RIGHTPARENTHESIS)
            {
                if (counter == 0)
                {
                    if (oneToken.TokenType != STokenType.LEFTPARENTHESIS)
                    {
                        throw new Exception("Syntax error in Single Parenthesis : Type 1");
                    }
                }
                else
                {
                    oneToken = SLexer.Tokenizer(_dataStream);
                    if (oneToken.TokenType == STokenType.COMMA)
                    {
                    }
                    else if (oneToken.TokenType == STokenType.RIGHTPARENTHESIS)
                    {
                        break;
                    }
                    else
                    {
                        throw new Exception("Syntax error in Single Parenthesis : Type 2");
                    }
                }

                oneToken = SLexer.Tokenizer(_dataStream);
                if (oneToken.TokenType == STokenType.SIMPLEID)
                {
                    tokenList.Add(oneToken);
                    counter += 1;
                }
                else
                {
                    throw new Exception("Syntax error in Single Parenthesis : Type 3");
                }
            }
        }
예제 #12
0
        private void ProcessInverse(SEntity sEntity, SToken sToken)
        {
            SAttributeInverse sAttributeInverse = new SAttributeInverse(sToken.StringValue);
            SToken            oneToken          = SLexer.Tokenizer(_dataStream);

            if (oneToken.TokenType != STokenType.COLON)
            {
                throw new Exception("Error in inverse attribute : " + SLexer.CurrentRow + " : " + SLexer.CurrentColumn);
            }

            sAttributeInverse.Type = ProcessInverseParameter();
            oneToken = SLexer.Tokenizer(_dataStream);
            // referencing entity's attribute name
            oneToken = SLexer.Tokenizer(_dataStream);
            sAttributeInverse.InversingAttributeName = oneToken.StringValue;
            sEntity.InverseList.Add(sAttributeInverse);
            // semi colon
            oneToken = SLexer.Tokenizer(_dataStream);
        }
예제 #13
0
        /// <summary>
        /// Parser main
        /// </summary>
        private void Parse()
        {
            SToken oneToken = SLexer.Tokenizer(_dataStream);

            //int pCounter = 0;
            while (oneToken.TokenType != STokenType.EOF)
            {
                //AddToken(oneToken, InstanceData, DataStream);
                switch (oneToken.TokenType)
                {
                case STokenType.SCHEMA:
                    ProcessSchemaTitle();
                    break;

                case STokenType.ENTITY:
                    ProcessEntity();
                    break;

                case STokenType.TYPE:
                    ProcessType();
                    break;

                case STokenType.FUNCTION:
                    ProcessFuction();
                    break;

                case STokenType.RULE:
                    ProcessRule();
                    break;

                case STokenType.END_SCHEMA:
                    ProcessSchemaEnd();
                    break;

                default:
                    IgnoredToken(oneToken);
                    break;
                }
                oneToken = SLexer.Tokenizer(_dataStream);
            }
        }
예제 #14
0
        private void ProcessSupertypeOf(SEntity aEntity)
        {
            //319 supertype_constraint = abstract_entity_declaration |
            //    abstract_supertype_declaration | supertype_rule .
            //164 abstract_entity_declaration = ABSTRACT .
            //166 abstract_supertype_declaration = ABSTRACT SUPERTYPE [ subtype_constraint ] .
            //313 subtype_constraint = OF ’(’ supertype_expression ’)’ .
            //320 supertype_expression = supertype_factor { ANDOR supertype_factor } .
            //321 supertype_factor = supertype_term { AND supertype_term } .
            //323 supertype_term = entity_ref | one_of | ’(’ supertype_expression ’)’ .
            //263 one_of = ONEOF ’(’ supertype_expression { ’,’ supertype_expression } ’)’ .
            //322 supertype_rule = SUPERTYPE subtype_constraint .


            int parenthesisCounter = 0;

            SToken oneToken = SLexer.Tokenizer(_dataStream);

            if (oneToken.TokenType != STokenType.LEFTPARENTHESIS)
            {
                throw new Exception("Syntax Error : Supertype Definition (");
            }
            else
            {
                parenthesisCounter += 1;
            }


            while (parenthesisCounter != 0)
            {
                oneToken = SLexer.Tokenizer(_dataStream);

                switch (oneToken.TokenType)
                {
                case STokenType.LEFTPARENTHESIS:
                    parenthesisCounter += 1;
                    break;

                case STokenType.RIGHTPARENTHESIS:
                    parenthesisCounter -= 1;
                    break;

                case STokenType.ONEOF:
                case STokenType.COMMA:
                    break;

                case STokenType.SIMPLEID:
                    aEntity.SuperTypesOf.Add(getEntity(oneToken.StringValue));
                    break;

                default:
                    string logout = String.Format("Undefined supertype definition at row : {0}, column : {1}",
                                                  SLexer.CurrentRow,
                                                  SLexer.CurrentColumn);
                    if (_logFile != null)
                    {
                        _logFile.WriteLine(logout);
                    }
                    break;
                }
            }

            //if (oneToken.TokenType != SchemaTokenType.ONEOF)
            //    throw new Exception("Syntax Error : Supertype Definition oneof");

            //oneToken = SchemaLexer.Tokenizer(DataStream);

            //if (oneToken.TokenType != SchemaTokenType.LEFTPARENTHESIS)
            //    throw new Exception("Syntax Error : Supertype Definition (2");

            //oneToken = SchemaLexer.Tokenizer(DataStream);

            //if (oneToken.TokenType != SchemaTokenType.SIMPLEID)
            //    throw new Exception("Syntax Error : Supertype Definition simpleId");

            //aEntity.SuperTypesOf.Add(getEntity(oneToken.StringValue));

            //oneToken = SchemaLexer.Tokenizer(DataStream);

            //while (oneToken.TokenType != SchemaTokenType.RIGHTPARENTHESIS)
            //{
            //    if (oneToken.TokenType != SchemaTokenType.COMMA)
            //        throw new Exception("Syntax Error : Supertype Definition ,");
            //    oneToken = SchemaLexer.Tokenizer(DataStream);
            //    if (oneToken.TokenType != SchemaTokenType.SIMPLEID)
            //        throw new Exception("Syntax Error : Supertype Definition simpleid2");
            //    aEntity.SuperTypesOf.Add(getEntity(oneToken.StringValue));
            //    oneToken = SchemaLexer.Tokenizer(DataStream);
            //}
            //// for second parenthesis
            //oneToken = SchemaLexer.Tokenizer(DataStream);
        }
예제 #15
0
        /// <summary>
        /// not a complete derive implementation
        /// intended to identify omitted value in instance population
        /// </summary>
        /// <param name="oneEntity"></param>
        /// <param name="AttributeName"></param>
        private void ProcessDerivedAttribute(
            SEntity oneEntity,
            SToken mToken)
        {
            SAttributeDerived drvAtt;

            //SParam sParam;

            if (mToken.TokenType == STokenType.SELF)
            {
                //drvAtt = new SAttributeDerived(AttribType.DERIVED);
                // defines self drived attribute

                // reverse_solidus
                mToken = SLexer.Tokenizer(_dataStream);

                // super entity
                mToken = SLexer.Tokenizer(_dataStream);
                if (mToken.TokenType != STokenType.SIMPLEID)
                {
                    throw new InvalidDataException(string.Format(
                                                       "schema is not in syntax at {0} : {1}",
                                                       oneEntity.Name, mToken.TokenType.ToString()));
                }

                string orgSuperTypeName = mToken.StringValue;

                // period
                mToken = SLexer.Tokenizer(_dataStream);
                // attribute name
                mToken = SLexer.Tokenizer(_dataStream);

                string sAttName = mToken.StringValue;

                drvAtt = new SAttributeDerived(sAttName);
                drvAtt.OriginatingSuperType = orgSuperTypeName;
                drvAtt.Name        = sAttName;
                drvAtt.isInherited = true;

                // colon
                mToken      = SLexer.Tokenizer(_dataStream);
                drvAtt.Type = ProcessParameter();
                oneEntity.DerivedList.Add(drvAtt);
            }
            else //if (mToken.TokenType == SchemaTokenType.SIMPLEID)
            {
                string sAttName = mToken.StringValue;

                drvAtt             = new SAttributeDerived(sAttName);
                drvAtt.Name        = sAttName;
                drvAtt.isInherited = false;

                mToken = SLexer.Tokenizer(_dataStream);

                drvAtt.Type = ProcessParameter();
                oneEntity.DerivedList.Add(drvAtt);
            }

            //mToken = SLexer.Tokenizer(DataStream);
            //oneEntity.AttributeList.Add(oneAttribute);
            while (mToken.TokenType != STokenType.SEMICOLON)
            {
                mToken = SLexer.Tokenizer(_dataStream);
            }
        }
예제 #16
0
        private SParameter ProcessInverseParameter()
        {
            SToken oneToken   = SLexer.Tokenizer(_dataStream);
            bool   isOptional = false;

            if (oneToken.TokenType == STokenType.OPTIONAL)
            {
                oneToken   = SLexer.Tokenizer(_dataStream);
                isOptional = true;
            }

            bool isUnique = false;

            if (oneToken.TokenType == STokenType.UNIQUE)
            {
                isUnique = true;
                oneToken = SLexer.Tokenizer(_dataStream);
            }

            SParamType stype = SParameter.TokenTypeToAttribType(oneToken.TokenType);

            SParameter sParam;

            if (stype == SParamType.SET ||
                stype == SParamType.LIST ||
                stype == SParamType.ARRAY ||
                stype == SParamType.BAG)
            {
                sParam = new SParamCollection(stype);
                var sParamCol = sParam as SParamCollection;
                // get leftbracket
                oneToken = SLexer.Tokenizer(_dataStream);

                // after adding lower bound, upper bound lexer
                oneToken         = SLexer.Tokenizer(_dataStream);
                sParamCol.LBound = getBound(oneToken);

                // get colon
                oneToken = SLexer.Tokenizer(_dataStream);
                // get upper bound
                oneToken         = SLexer.Tokenizer(_dataStream);
                sParamCol.UBound = getBound(oneToken);

                // get rightbracket
                oneToken = SLexer.Tokenizer(_dataStream);
                // get of
                oneToken = SLexer.Tokenizer(_dataStream);
                sParamCol.ParamReference = ProcessInverseParameter();
            }
            else if (stype == SParamType.NAMED)
            {
                sParam = new SParamNamed(oneToken.StringValue);
            }
            else
            {
                sParam = new SParameter(stype);
            }

            sParam.IsUnique   = isUnique;
            sParam.IsOptional = isOptional;


            // either semi colon or left parenthesis
            //oneToken = SLexer.Tokenizer(DataStream);
            //if (!(sParam is SParamCollection))
            //{
            //    oneToken = SLexer.Tokenizer(_dataStream);
            //    if (oneToken.TokenType == STokenType.LEFTPARENTHESIS)
            //    {
            //        oneToken = SLexer.Tokenizer(_dataStream); // integer
            //        // add integer
            //        sParam.TypeLength = (short)oneToken.IntegerValue;
            //        oneToken = SLexer.Tokenizer(_dataStream); // right parenthesis
            //        oneToken = SLexer.Tokenizer(_dataStream);
            //        if (oneToken.TokenType == STokenType.FIXED)
            //        {
            //            // process fixed
            //            sParam.IsFixed = true;
            //            // semicolon
            //            oneToken = SLexer.Tokenizer(_dataStream);
            //        }
            //    }
            //}



            //SAttribute oneAttribute = new SAttribute(AttributeName, sParam);

            return(sParam);
        }
예제 #17
0
 private void ProcessSchemaEnd()
 {
     SLexer.Tokenizer(_dataStream);
 }
예제 #18
0
        private SType ProcessTypeSub(SToken oneToken)
        {
            SType  ret;
            string t_TypeName = oneToken.StringValue;

            // tokenlist for syntax checking
            List <SToken> tokenList = new List <SToken>();

            oneToken = SLexer.Tokenizer(_dataStream);
            #region switch
            switch (oneToken.TokenType)
            {
            case STokenType.BOOLEAN:
            case STokenType.REAL:
            case STokenType.INTEGER:
            case STokenType.LOGICAL:
            case STokenType.NUMBER:
                ret = new SType(t_TypeName, SParameter.TokenTypeToAttribType(oneToken.TokenType));
                CleanUpSemiColon();
                break;

            case STokenType.STRING:
                ret      = new STypeString(t_TypeName);
                ret.Kind = SParameter.TokenTypeToAttribType(oneToken.TokenType);
                oneToken = SLexer.Tokenizer(_dataStream);
                if (oneToken.TokenType == STokenType.LEFTPARENTHESIS)
                {
                    // process width
                    oneToken = SLexer.Tokenizer(_dataStream);

                    ((STypeString)ret).StringLength = (short)oneToken.IntegerValue;
                    oneToken = SLexer.Tokenizer(_dataStream);     // right parenthesis
                    oneToken = SLexer.Tokenizer(_dataStream);

                    if (oneToken.TokenType == STokenType.FIXED)
                    {
                        // process fixed
                        ((STypeString)ret).IsFixed = true;
                        // semicolon
                        oneToken = SLexer.Tokenizer(_dataStream);
                    }
                }

                break;

            case STokenType.SIMPLEID:
                ret      = new STypeSimple(t_TypeName, oneToken.StringValue);
                ret.Kind = SParameter.TokenTypeToAttribType(oneToken.TokenType);
                CleanUpSemiColon();
                break;

            case STokenType.ARRAY:
            case STokenType.LIST:
            case STokenType.SET:
            case STokenType.BAG:
                ret      = new STypeCollection(t_TypeName, oneToken.TokenType);
                ret.Kind = SParameter.TokenTypeToAttribType(oneToken.TokenType);
                collection((STypeCollection)ret);
                break;

            case STokenType.ENUMERATION:
                ret      = new STypeEnum(t_TypeName);
                ret.Kind = SParameter.TokenTypeToAttribType(oneToken.TokenType);
                oneToken = SLexer.Tokenizer(_dataStream);
                if (oneToken.TokenType != STokenType.OF)
                {
                    throw new Exception("Syntax error in Schema Name Definition");
                }

                GetInsideSingleParenthesis(tokenList);
                foreach (SToken aToken in tokenList)
                {
                    if (aToken.TokenType == STokenType.SIMPLEID)
                    {
                        ((STypeEnum)ret).EnumList.Add(aToken.StringValue);
                    }
                    else
                    {
                        throw new Exception("Syntax error in Enumeration Type Definition : " + ret.Name);
                    }
                }
                CleanUpSemiColon();
                break;

            case STokenType.SELECT:
                ret      = new STypeSelect(t_TypeName);
                ret.Kind = SParameter.TokenTypeToAttribType(oneToken.TokenType);
                GetInsideSingleParenthesis(tokenList);
                foreach (SToken aToken in tokenList)
                {
                    if (aToken.TokenType == STokenType.SIMPLEID)
                    {
                        ((STypeSelect)ret).SelectList.Add(aToken.StringValue, null);
                    }
                    else
                    {
                        throw new Exception("Syntax error in Select Type Definition : " + ret.Name);
                    }
                }

                CleanUpSemiColon();

                break;


            default:
                ret = null;
                if (_logFile != null)
                {
                    _logFile.WriteLine(oneToken.TokenType.ToString() + " : " + oneToken.StringValue);
                }
                break;
            }
            #endregion // switch
            return(ret);
        }
예제 #19
0
        private void ProcessEntity()
        {
            SToken oneToken = SLexer.Tokenizer(_dataStream);

            if (oneToken.TokenType != STokenType.SIMPLEID)
            {
                throw new Exception("Syntax error in Entity Definition");
            }

            SEntity aEntity = getEntity(oneToken.StringValue);

            oneToken = SLexer.Tokenizer(_dataStream);
            if (oneToken.TokenType == STokenType.SEMICOLON)
            {   //get new token for next step
                oneToken = SLexer.Tokenizer(_dataStream);
            }

            if (oneToken.TokenType == STokenType.ABSTRACT ||
                oneToken.TokenType == STokenType.SUPERTYPE ||
                oneToken.TokenType == STokenType.SUBTYPE)
            {
                oneToken = ProcessSuperSub(oneToken, aEntity);
            }

            if (oneToken.TokenType == STokenType.SIMPLEID)
            {
                while (oneToken.TokenType == STokenType.SIMPLEID)
                {
                    ProcessAttribute(aEntity, oneToken.StringValue);
                    oneToken = SLexer.Tokenizer(_dataStream);
                }
            }

            if (oneToken.TokenType == STokenType.DERIVE)
            {
                // oneToken = DummySubLoop();
                // next one after derived
                oneToken = SLexer.Tokenizer(_dataStream);

                while (oneToken.TokenType == STokenType.SIMPLEID || oneToken.TokenType == STokenType.SELF)
                {
                    ProcessDerivedAttribute(aEntity, oneToken);
                    oneToken = SLexer.Tokenizer(_dataStream);
                }
            }

            if (oneToken.TokenType == STokenType.INVERSE)
            {
                //oneToken = DummySubLoop();
                oneToken = SLexer.Tokenizer(_dataStream);

                while (oneToken.TokenType == STokenType.SIMPLEID)
                {
                    ProcessInverse(aEntity, oneToken);
                    oneToken = SLexer.Tokenizer(_dataStream);
                }
            }

            if (oneToken.TokenType == STokenType.UNIQUE)
            {
                oneToken = DummySubLoop();
            }

            if (oneToken.TokenType == STokenType.UNDEFINED)
            {
                oneToken = DummySubLoop();
            }

            if (oneToken.TokenType == STokenType.WHERE)
            {
                oneToken = DummySubLoop();
            }

            if (oneToken.TokenType == STokenType.END_ENTITY)
            {
                oneToken = SLexer.Tokenizer(_dataStream); // semi colon
            }
        }