Parameter GetParameter(Token TokenToAdd) { //Token TokenToAdd = Lexer.Tokenizer( DataStream); Parameter aParameter; switch (TokenToAdd.TokenTypeValue) { case TokenType.ENTITYINSTANCENAME: aParameter = new ParamEntity((uint)TokenToAdd.IntegerValue, this); break; case TokenType.KEYWORD: aParameter = getSelectParameter(TokenToAdd.StringValue); break; case TokenType.DOLLAR: aParameter = new ParamNullValue(); break; case TokenType.INTEGER: aParameter = new ParamInteger(TokenToAdd.IntegerValue); break; case TokenType.REAL: aParameter = new ParamReal(TokenToAdd.RealValue); break; case TokenType.STRING: aParameter = new ParamString(TokenToAdd.StringValue); break; case TokenType.ENUMERATION: aParameter = new ParamEnum(TokenToAdd.StringValue, this); break; case TokenType.BINARY: aParameter = new ParamBinary(SParamType.BINARY); // need to update break; case TokenType.LEFTPARENTHESIS: aParameter = new ParamList(new List<Parameter>()); Token listToken = Lexer.Tokenizer(DataStream); while (listToken.TokenTypeValue != TokenType.RIGHTPARENTHESIS) { Parameter subParam = GetParameter(listToken); ((ParamList)aParameter).ParameterList.Add(subParam); listToken = Lexer.Tokenizer(DataStream); } break; case TokenType.OMITTED: aParameter = new ParamOmitted(); break; case TokenType.TRUE: aParameter = new ParamBoolean(1); break; case TokenType.FALSE: aParameter = new ParamBoolean(-1); break; case TokenType.UNKNOWN: aParameter = new ParamBoolean(0); break; case TokenType.RIGHTPARENTHESIS: case TokenType.COMMA: TokenToAdd = Lexer.Tokenizer(DataStream); aParameter = GetParameter(TokenToAdd); break; default: TokenToAdd = Lexer.Tokenizer(DataStream); aParameter = GetParameter(TokenToAdd); break; } return aParameter; }
/// <summary> /// Process token according to late bind scheme /// </summary> /// <param name="TokenToAdd">Lexer generated token</param> void ProcessTokenLB(Token TokenToAdd) { try { switch (TokenToAdd.TokenTypeValue) { case TokenType.ISO1030321: Lexer.Tokenizer(DataStream); currStatus = ParserStatus.ISO; break; case TokenType.ISO1030321END: Lexer.Tokenizer(DataStream); currStatus = ParserStatus.READY; break; case TokenType.HEADER: Lexer.Tokenizer(DataStream); currStatus = ParserStatus.HEADER; break; case TokenType.ENDSEC: Lexer.Tokenizer(DataStream); currStatus = ParserStatus.ISO; break; case TokenType.ENTITYINSTANCENAME: switch (currStatus) { case ParserStatus.DATA: Token oneToken = Lexer.Tokenizer(DataStream); if (oneToken.TokenTypeValue != TokenType.EQUAL) { Console.WriteLine("Syntax Error in Entity Instance : #" + TokenToAdd.IntegerValue.ToString()); return; } oneToken = Lexer.Tokenizer(DataStream); if (oneToken.TokenTypeValue == TokenType.KEYWORD) { SortedDictionary<uint, Instance> toAdd = DataList; if (toAdd.ContainsKey((uint)TokenToAdd.IntegerValue)) { // when instance is already created by other caller //P21Instance a = toAdd[(uint)TokenToAdd.IntegerValue]; InstanceSimple aSimpleInstance = (InstanceSimple)toAdd[(uint)TokenToAdd.IntegerValue]; ModifySimpleInstanceRecord(aSimpleInstance, oneToken.StringValue); } else { // when it is the first time of parsing entity instance name #??? InstanceSimple aSimpleInstance = AddSimpleInstance((uint)TokenToAdd.IntegerValue, oneToken.StringValue); toAdd.Add((uint)TokenToAdd.IntegerValue, aSimpleInstance); } // adding instances to instanceByType according to instance type if (InstanceByType.ContainsKey(oneToken.StringValue)) { List<uint> existingList = InstanceByType[oneToken.StringValue]; existingList.Add((uint)TokenToAdd.IntegerValue); } else { List<uint> newList = new List<uint>(); newList.Add((uint)TokenToAdd.IntegerValue); InstanceByType.Add(oneToken.StringValue, newList); } } else if (oneToken.TokenTypeValue == TokenType.LEFTPARENTHESIS) { // need update for complex type SortedDictionary<uint, Instance> toAdd = DataList; InstanceComplex aComplexInstance = AddComplexInstance((uint)TokenToAdd.IntegerValue); if (toAdd.ContainsKey((uint)TokenToAdd.IntegerValue)) { toAdd[(uint)TokenToAdd.IntegerValue] = aComplexInstance; } else { toAdd.Add((uint)TokenToAdd.IntegerValue, aComplexInstance); } } else { Console.WriteLine("Syntax Error at value defintion in Entity Instance : #" + TokenToAdd.IntegerValue.ToString()); return; } break; default: Console.WriteLine("Parser Error : Not defined parsing condition in Entity Instance Name : " + currStatus); break; } break; case TokenType.KEYWORD: switch (currStatus) { case ParserStatus.HEADER: SimpleRecord aSimpleRecord = AddSimpleRecord(null, TokenToAdd.StringValue); HeaderList.Add(aSimpleRecord); Lexer.Tokenizer(DataStream); // cleans semicolon at the end of header instance break; case ParserStatus.DATA: Console.WriteLine("Syntax Error : Instance without Entity Instance Name"); break; default: Console.WriteLine("Parser Error : Not defined parsing condition in Keyword"); break; } break; case TokenType.DATA: Lexer.Tokenizer(DataStream); currStatus = ParserStatus.DATA; //theDataSet.aDataSectionList.Add(new DataSection()); break; default: Console.WriteLine("not defined condition : " + TokenToAdd.TokenTypeValue.ToString()); break; } } catch (Exception) { throw new FormatException("Parse Error!"); } }
Parameter AddParameterValue(Instance cInstance, Token TokenToAdd) { //Token TokenToAdd = Lexer.Tokenizer( DataStream); Parameter aParameter; switch (TokenToAdd.TokenTypeValue) { case TokenType.ENTITYINSTANCENAME: //aParameter = new EntityInstanceParameter(TokenToAdd.IntegerValue); //if(DataSet.DataList.ContainsKey(TokenToAdd.IntegerValue)) aParameter = new ParamEntity((uint)TokenToAdd.IntegerValue, cInstance, this); //new Parameter(ParameterType.ENTITY_INSTANCE_NAME, TokenToAdd.IntegerValue); break; case TokenType.KEYWORD: aParameter = getSelectParameter(TokenToAdd.StringValue); //new Parameter(AddSimpleRecord(TokenToAdd.StringValue, DataStream)); break; case TokenType.DOLLAR: aParameter = new ParamNullValue(); //new Parameter(ParameterType.NULL_VALUE); break; case TokenType.INTEGER: aParameter = new ParamInteger(TokenToAdd.IntegerValue); //new Parameter(ParameterType.INTEGER, TokenToAdd.IntegerValue); //InstanceData.aDataSectionList[HeaderCounter -1].DataList[0]; break; case TokenType.REAL: aParameter = new ParamReal(TokenToAdd.RealValue); //new Parameter(TokenToAdd.RealValue); break; case TokenType.STRING: aParameter = new ParamString(TokenToAdd.StringValue); //new Parameter(ParameterType.STRING, TokenToAdd.StringValue, DataSet); break; case TokenType.ENUMERATION: aParameter = new ParamEnum(TokenToAdd.StringValue, this); //new Parameter(ParameterType.ENUMERATION, TokenToAdd.StringValue, DataSet); break; case TokenType.BINARY: //aParameter = new Parameter(ParameterType.BINARY); aParameter = new ParamBinary(SParamType.BINARY); // need to update break; case TokenType.LEFTPARENTHESIS: aParameter = new ParamList(new List<Parameter>()); //new Parameter(new List<Parameter>()); Token listToken = Lexer.Tokenizer(DataStream); while (listToken.TokenTypeValue != TokenType.RIGHTPARENTHESIS) { Parameter subParam = AddParameterValue(cInstance, listToken); ((ParamList)aParameter).ParameterList.Add(subParam); //List<Parameter> ParameterList = (List<Parameter>)aParameter.ParameterValue; //ParameterList.Add(AddParameterValue( listToken, DataStream)); listToken = Lexer.Tokenizer(DataStream); } break; case TokenType.OMITTED: aParameter = new ParamOmitted(); //new Parameter(ParameterType.OMITTED_PARAMETER); break; case TokenType.TRUE: aParameter = new ParamBoolean(1); break; case TokenType.FALSE: aParameter = new ParamBoolean(-1); break; case TokenType.UNKNOWN: aParameter = new ParamBoolean(0); break; case TokenType.RIGHTPARENTHESIS: case TokenType.COMMA: //Console.WriteLine("not defined condition : " + TokenToAdd.TokenTypeValue.ToString()); TokenToAdd = Lexer.Tokenizer(DataStream); aParameter = AddParameterValue(cInstance, TokenToAdd); break; default: //Console.WriteLine("not defined condition : " + TokenToAdd.TokenTypeValue.ToString()); TokenToAdd = Lexer.Tokenizer(DataStream); aParameter = AddParameterValue(cInstance, TokenToAdd); break; } return aParameter; }