Example #1
0
        InstanceComplex AddComplexInstance(uint EntityInstanceName)
        {
            InstanceComplex aInstance = new InstanceComplex(EntityInstanceName);
            Token           aToken    = Lexer.Tokenizer(DataStream);

            while (aToken.TokenTypeValue == TokenType.KEYWORD)
            {
                aInstance.SimpleRecordList.Add(AddSimpleRecord(aInstance, aToken.StringValue));
                aToken = Lexer.Tokenizer(DataStream);
            }
            return(aInstance);
        }
Example #2
0
 InstanceComplex AddComplexInstance(uint EntityInstanceName)
 {
     InstanceComplex aInstance = new InstanceComplex(EntityInstanceName);
     Token aToken = Lexer.Tokenizer(DataStream);
     while (aToken.TokenTypeValue == TokenType.KEYWORD)
     {
         aInstance.SimpleRecordList.Add(AddSimpleRecord(aInstance, aToken.StringValue));
         aToken = Lexer.Tokenizer(DataStream);
     }
     return aInstance;
 }
Example #3
0
        /// <summary>
        /// Process Token
        /// </summary>
        /// <param name="TokenToAdd"></param>
        /// <param name="InstanceData"></param>
        /// <param name="DataStream"></param>
        void ProcessToken(Token TokenToAdd)
        {
            switch (TokenToAdd.TokenTypeValue)
            {
            case TokenType.ISO1030321:
                Lexer.Tokenizer(DataStream);
                currStatus = ParserStatus.ISO;
                break;

            case TokenType.ISO1030321END:
                Lexer.Tokenizer(DataStream);
                currStatus = ParserStatus.READY;
                break;

            case TokenType.HEADER:
                Lexer.Tokenizer(DataStream);
                //InstanceData.aHeaderSection = new HeaderSection();
                currStatus = ParserStatus.HEADER;

                break;

            case TokenType.ENDSEC:
                Lexer.Tokenizer(DataStream);
                currStatus = ParserStatus.ISO;
                break;

            case TokenType.ENTITYINSTANCENAME:
                switch (currStatus)
                {
                case ParserStatus.DATA:
                    Token oneToken = Lexer.Tokenizer(DataStream);
                    if (oneToken.TokenTypeValue != TokenType.EQUAL)
                    {
                        Console.WriteLine("Syntax Error in Entity Instance : #" + TokenToAdd.IntegerValue.ToString());
                        return;
                    }
                    oneToken = Lexer.Tokenizer(DataStream);
                    if (oneToken.TokenTypeValue == TokenType.KEYWORD)
                    {
                        SortedDictionary <uint, Instance> toAdd = DataList;
                        if (toAdd.ContainsKey((uint)TokenToAdd.IntegerValue))
                        {
                            // when instance is already created by other caller
                            //P21Instance a =   toAdd[(uint)TokenToAdd.IntegerValue];
                            InstanceSimple aSimpleInstance = (InstanceSimple)toAdd[(uint)TokenToAdd.IntegerValue];
                            ModifySimpleInstanceRecord(aSimpleInstance, oneToken.StringValue);
                        }
                        else
                        {
                            // when it is the first time of parsing entity instance name #???
                            InstanceSimple aSimpleInstance = AddSimpleInstance((uint)TokenToAdd.IntegerValue, oneToken.StringValue);
                            toAdd.Add((uint)TokenToAdd.IntegerValue, aSimpleInstance);
                        }


                        // adding instances to instanceByType according to instance type
                        if (InstanceByType.ContainsKey(oneToken.StringValue))
                        {
                            List <uint> existingList = InstanceByType[oneToken.StringValue];
                            existingList.Add((uint)TokenToAdd.IntegerValue);
                        }
                        else
                        {
                            List <uint> newList = new List <uint>();
                            newList.Add((uint)TokenToAdd.IntegerValue);
                            InstanceByType.Add(oneToken.StringValue, newList);
                        }
                    }
                    else if (oneToken.TokenTypeValue == TokenType.LEFTPARENTHESIS)
                    {
                        // need update for complex type
                        SortedDictionary <uint, Instance> toAdd = DataList;
                        InstanceComplex aComplexInstance        = AddComplexInstance((uint)TokenToAdd.IntegerValue);
                        if (toAdd.ContainsKey((uint)TokenToAdd.IntegerValue))
                        {
                            toAdd[(uint)TokenToAdd.IntegerValue] = aComplexInstance;
                        }
                        else
                        {
                            toAdd.Add((uint)TokenToAdd.IntegerValue, aComplexInstance);
                        }
                    }
                    else
                    {
                        Console.WriteLine("Syntax Error at value defintion in Entity Instance : #" + TokenToAdd.IntegerValue.ToString());
                        return;
                    }
                    break;

                default:
                    Console.WriteLine("Parser Error : Not defined parsing condition in Entity Instance Name : " + currStatus);
                    break;
                }
                break;

            case TokenType.KEYWORD:
                switch (currStatus)
                {
                case ParserStatus.HEADER:
                    SimpleRecord aSimpleRecord = AddSimpleRecord(null, TokenToAdd.StringValue);
                    HeaderList.Add(aSimpleRecord);

                    Lexer.Tokenizer(DataStream);         // cleans semicolon at the end of header instance
                    break;

                case ParserStatus.DATA:
                    Console.WriteLine("Syntax Error : Instance without Entity Instance Name");
                    break;

                default:
                    Console.WriteLine("Parser Error : Not defined parsing condition in Keyword");
                    break;
                }
                break;

            case TokenType.DATA:
                Lexer.Tokenizer(DataStream);
                currStatus = ParserStatus.DATA;

                //theDataSet.aDataSectionList.Add(new DataSection());


                break;

            default:
                Console.WriteLine("not defined condition : " + TokenToAdd.TokenTypeValue.ToString());
                break;
            }
        }