private SToken ProcessSuperSub(SToken oneToken, SEntity aEntity) { if (oneToken.TokenType == STokenType.ABSTRACT) { aEntity.IsAbstract = true; //get new token for next step oneToken = SLexer.Tokenizer(_dataStream); } if (oneToken.TokenType == STokenType.SUPERTYPE) { // remove OF token after supertype token oneToken = SLexer.Tokenizer(_dataStream); ProcessSupertypeOf(aEntity); //get new token for next step oneToken = SLexer.Tokenizer(_dataStream); } if (oneToken.TokenType == STokenType.SUBTYPE) { // remove of token after supertype token oneToken = SLexer.Tokenizer(_dataStream); ProcessSubtypeOf(aEntity); //get new token for next step oneToken = SLexer.Tokenizer(_dataStream); } return(SLexer.Tokenizer(_dataStream)); }
private void ProcessType() { SToken oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType != STokenType.SIMPLEID) { throw new Exception("Syntax error in Type Name Definition"); } SType st = ProcessTypeSub(oneToken); oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType == STokenType.WHERE) { oneToken = DummySubLoop(); } while (oneToken.TokenType != STokenType.END_TYPE) { IgnoredToken(oneToken); oneToken = SLexer.Tokenizer(_dataStream); } CleanUpSemiColon(); SchemaSet.TypeList.Add(st.Name, st); }
private void ProcessSubtypeOf(SEntity aEntity) { SToken oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType != STokenType.LEFTPARENTHESIS) { throw new Exception("Syntax Error : Supertype Definition ("); } oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType != STokenType.SIMPLEID) { throw new Exception("Syntax Error : Supertype Definition simpleId"); } aEntity.SubTypesOf.Add(getEntity(oneToken.StringValue)); oneToken = SLexer.Tokenizer(_dataStream); while (oneToken.TokenType != STokenType.RIGHTPARENTHESIS) { if (oneToken.TokenType != STokenType.COMMA) { throw new Exception("Syntax Error : Supertype Definition ,"); } oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType != STokenType.SIMPLEID) { throw new Exception("Syntax Error : Supertype Definition simpleid2"); } aEntity.SuperTypesOf.Add(getEntity(oneToken.StringValue)); oneToken = SLexer.Tokenizer(_dataStream); } // //oneToken = SchemaLexer.Tokenizer(DataStream); }
private void ProcessFuction() { SToken Token = SLexer.Tokenizer(_dataStream); while (Token.TokenType != STokenType.END_FUNCTION) { Token = SLexer.Tokenizer(_dataStream); } Token = SLexer.Tokenizer(_dataStream); }
private void CleanUpSemiColon() { // clean up semicolon SToken oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType != STokenType.SEMICOLON) { throw new Exception("Syntax error : " + SLexer.CurrentRow.ToString() + " : " + SLexer.CurrentColumn.ToString()); } }
private void ProcessSchemaTitle() { SToken oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType != STokenType.SIMPLEID) { throw new Exception("Syntax error in Schema Name Definition"); } SchemaSet.SchemaName = oneToken.StringValue; oneToken = SLexer.Tokenizer(_dataStream); //SchemaEntity aEntity = getEntity(oneToken.StringValue); }
/// <summary> /// /// </summary> /// <returns></returns> private SToken DummySubLoop() { SToken oneToken = SLexer.Tokenizer(_dataStream); while (oneToken.TokenType == STokenType.SIMPLEID) { while (oneToken.TokenType != STokenType.SEMICOLON) { oneToken = SLexer.Tokenizer(_dataStream); } oneToken = SLexer.Tokenizer(_dataStream); } return(oneToken); }
private void ProcessAttribute( SEntity oneEntity, string AttributeName) { SAttribute satt = new SAttribute(AttributeName); SToken oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType != STokenType.COLON) { throw new Exception("Error in attribute : " + SLexer.CurrentRow + " : " + SLexer.CurrentColumn); } satt.Type = ProcessParameter(); oneEntity.ParameterList.Add(satt); }
private void collection(STypeCollection sTypeCol) { // Left brackett SToken oneToken = SLexer.Tokenizer(_dataStream); // lbound oneToken = SLexer.Tokenizer(_dataStream); sTypeCol.LBound = getBound(oneToken); oneToken = SLexer.Tokenizer(_dataStream); oneToken = SLexer.Tokenizer(_dataStream); sTypeCol.UBound = getBound(oneToken); // right bracket oneToken = SLexer.Tokenizer(_dataStream); // of oneToken = SLexer.Tokenizer(_dataStream); // type oneToken = SLexer.Tokenizer(_dataStream); SParamType sp = SParameter.TokenTypeToAttribType(oneToken.TokenType); switch (oneToken.TokenType) { case STokenType.BOOLEAN: case STokenType.REAL: case STokenType.INTEGER: case STokenType.LOGICAL: case STokenType.NUMBER: case STokenType.STRING: case STokenType.ARRAY: case STokenType.LIST: case STokenType.SET: case STokenType.BAG: sTypeCol.CollectionType = sp; sTypeCol.CollectionName = SParameter.TypeToString(sp); break; case STokenType.SIMPLEID: sTypeCol.CollectionName = oneToken.StringValue; break; default: if (_logFile != null) { _logFile.WriteLine(oneToken.TokenType.ToString() + " : " + oneToken.StringValue); } break; } }
/// <summary> /// returns list of schema token surrouned by parenthesis from Stream /// </summary> /// <param name="tokenList"></param> private void GetInsideSingleParenthesis(List <SToken> tokenList) { tokenList.Clear(); // dummy token to begin SToken oneToken = SLexer.Tokenizer(_dataStream); int counter = 0; while (oneToken.TokenType != STokenType.RIGHTPARENTHESIS) { if (counter == 0) { if (oneToken.TokenType != STokenType.LEFTPARENTHESIS) { throw new Exception("Syntax error in Single Parenthesis : Type 1"); } } else { oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType == STokenType.COMMA) { } else if (oneToken.TokenType == STokenType.RIGHTPARENTHESIS) { break; } else { throw new Exception("Syntax error in Single Parenthesis : Type 2"); } } oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType == STokenType.SIMPLEID) { tokenList.Add(oneToken); counter += 1; } else { throw new Exception("Syntax error in Single Parenthesis : Type 3"); } } }
private void ProcessInverse(SEntity sEntity, SToken sToken) { SAttributeInverse sAttributeInverse = new SAttributeInverse(sToken.StringValue); SToken oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType != STokenType.COLON) { throw new Exception("Error in inverse attribute : " + SLexer.CurrentRow + " : " + SLexer.CurrentColumn); } sAttributeInverse.Type = ProcessInverseParameter(); oneToken = SLexer.Tokenizer(_dataStream); // referencing entity's attribute name oneToken = SLexer.Tokenizer(_dataStream); sAttributeInverse.InversingAttributeName = oneToken.StringValue; sEntity.InverseList.Add(sAttributeInverse); // semi colon oneToken = SLexer.Tokenizer(_dataStream); }
/// <summary> /// Parser main /// </summary> private void Parse() { SToken oneToken = SLexer.Tokenizer(_dataStream); //int pCounter = 0; while (oneToken.TokenType != STokenType.EOF) { //AddToken(oneToken, InstanceData, DataStream); switch (oneToken.TokenType) { case STokenType.SCHEMA: ProcessSchemaTitle(); break; case STokenType.ENTITY: ProcessEntity(); break; case STokenType.TYPE: ProcessType(); break; case STokenType.FUNCTION: ProcessFuction(); break; case STokenType.RULE: ProcessRule(); break; case STokenType.END_SCHEMA: ProcessSchemaEnd(); break; default: IgnoredToken(oneToken); break; } oneToken = SLexer.Tokenizer(_dataStream); } }
short getBound(SToken token) { short ret; if (token.TokenType == STokenType.INTEGER) { ret = (short)token.IntegerValue; } else if (token.TokenType == STokenType.QUESTIONMARK) { ret = -2; } else if (token.TokenType == STokenType.SIMPLEID) { ret = -3; } else { ret = -4; //throw new Exception("Syntax error"); } return(ret); }
private void IgnoredToken(SToken aToken) { aToken.StringValue = string.Format("Unprocessed {0} at Row {1}, Column {2} with String Value '{3}'", aToken.TokenType.ToString(), SLexer.CurrentRow, SLexer.CurrentColumn, aToken.StringValue); _mIgnoredToken.Add(aToken); }
short getBound(SToken token) { short ret; if (token.TokenType == STokenType.INTEGER) { ret = (short)token.IntegerValue; } else if (token.TokenType == STokenType.QUESTIONMARK) { ret = -2; } else if (token.TokenType == STokenType.SIMPLEID) { ret = -3; } else { ret = -4; //throw new Exception("Syntax error"); } return ret; }
public static SToken ProcessUpper(int FirstCharactor, StreamReader DataStream) { StringBuilder theString = new StringBuilder(); SToken aToken; theString.Append((char)FirstCharactor); int peekedChar; while (((peekedChar = DataStream.Peek()) >= 65 && peekedChar <= 90) || (peekedChar >= 48 && peekedChar <= 57) || peekedChar == 45 || peekedChar == 95 || (peekedChar >= 97 && peekedChar <= 122)) { theString.Append((char)DataStream.Read()); CurrentColumn += 1; } //Console.WriteLine(theString); //AddKeywordToken(theString.ToString()); switch (theString.ToString()) { case @"SCHEMA": aToken = new SToken(STokenType.SCHEMA); break; case @"ENTITY": aToken = new SToken(STokenType.ENTITY); break; case @"END_ENTITY": aToken = new SToken(STokenType.END_ENTITY); break; case @"TYPE": aToken = new SToken(STokenType.TYPE); break; case @"END_TYPE": aToken = new SToken(STokenType.END_TYPE); break; case @"ABSTRACT": aToken = new SToken(STokenType.ABSTRACT); break; case @"SUPERTYPE": aToken = new SToken(STokenType.SUPERTYPE); break; case @"SUBTYPE": aToken = new SToken(STokenType.SUBTYPE); break; case @"OF": aToken = new SToken(STokenType.OF); break; case @"ONEOF": aToken = new SToken(STokenType.ONEOF); break; case @"OPTIONAL": aToken = new SToken(STokenType.OPTIONAL); break; case @"INTEGER": aToken = new SToken(STokenType.INTEGER); break; case @"REAL": aToken = new SToken(STokenType.REAL); break; case @"STRING": aToken = new SToken(STokenType.STRING); break; case @"BOOLEAN": aToken = new SToken(STokenType.BOOLEAN); break; case @"LOGICAL": aToken = new SToken(STokenType.LOGICAL); break; case @"NUMBER": aToken = new SToken(STokenType.NUMBER); break; case @"ENUMERATION": aToken = new SToken(STokenType.ENUMERATION); break; case @"SELECT": aToken = new SToken(STokenType.SELECT); break; case @"ARRAY": aToken = new SToken(STokenType.ARRAY); break; case @"LIST": aToken = new SToken(STokenType.LIST); break; case @"SET": aToken = new SToken(STokenType.SET); break; case @"BAG": aToken = new SToken(STokenType.BAG); break; case @"WHERE": aToken = new SToken(STokenType.WHERE); break; case @"DERIVE": aToken = new SToken(STokenType.DERIVE); break; case @"INVERSE": aToken = new SToken(STokenType.INVERSE); break; case @"FIXED": aToken = new SToken(STokenType.FIXED); break; case @"SELF": aToken = new SToken(STokenType.SELF); break; case @"UNIQUE": aToken = new SToken(STokenType.UNIQUE); break; case @"FUNCTION": aToken = new SToken(STokenType.FUNCTION); break; case @"END_FUNCTION": aToken = new SToken(STokenType.END_FUNCTION); break; case @"RULE": aToken = new SToken(STokenType.RULE); break; case @"END_RULE": aToken = new SToken(STokenType.END_RULE); break; case @"END_SCHEMA": aToken = new SToken(STokenType.END_SCHEMA); break; default: aToken = new SToken(theString.ToString()); break; } return aToken; }
private SType ProcessTypeSub(SToken oneToken) { SType ret; string t_TypeName = oneToken.StringValue; // tokenlist for syntax checking List <SToken> tokenList = new List <SToken>(); oneToken = SLexer.Tokenizer(_dataStream); #region switch switch (oneToken.TokenType) { case STokenType.BOOLEAN: case STokenType.REAL: case STokenType.INTEGER: case STokenType.LOGICAL: case STokenType.NUMBER: ret = new SType(t_TypeName, SParameter.TokenTypeToAttribType(oneToken.TokenType)); CleanUpSemiColon(); break; case STokenType.STRING: ret = new STypeString(t_TypeName); ret.Kind = SParameter.TokenTypeToAttribType(oneToken.TokenType); oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType == STokenType.LEFTPARENTHESIS) { // process width oneToken = SLexer.Tokenizer(_dataStream); ((STypeString)ret).StringLength = (short)oneToken.IntegerValue; oneToken = SLexer.Tokenizer(_dataStream); // right parenthesis oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType == STokenType.FIXED) { // process fixed ((STypeString)ret).IsFixed = true; // semicolon oneToken = SLexer.Tokenizer(_dataStream); } } break; case STokenType.SIMPLEID: ret = new STypeSimple(t_TypeName, oneToken.StringValue); ret.Kind = SParameter.TokenTypeToAttribType(oneToken.TokenType); CleanUpSemiColon(); break; case STokenType.ARRAY: case STokenType.LIST: case STokenType.SET: case STokenType.BAG: ret = new STypeCollection(t_TypeName, oneToken.TokenType); ret.Kind = SParameter.TokenTypeToAttribType(oneToken.TokenType); collection((STypeCollection)ret); break; case STokenType.ENUMERATION: ret = new STypeEnum(t_TypeName); ret.Kind = SParameter.TokenTypeToAttribType(oneToken.TokenType); oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType != STokenType.OF) { throw new Exception("Syntax error in Schema Name Definition"); } GetInsideSingleParenthesis(tokenList); foreach (SToken aToken in tokenList) { if (aToken.TokenType == STokenType.SIMPLEID) { ((STypeEnum)ret).EnumList.Add(aToken.StringValue); } else { throw new Exception("Syntax error in Enumeration Type Definition : " + ret.Name); } } CleanUpSemiColon(); break; case STokenType.SELECT: ret = new STypeSelect(t_TypeName); ret.Kind = SParameter.TokenTypeToAttribType(oneToken.TokenType); GetInsideSingleParenthesis(tokenList); foreach (SToken aToken in tokenList) { if (aToken.TokenType == STokenType.SIMPLEID) { ((STypeSelect)ret).SelectList.Add(aToken.StringValue, null); } else { throw new Exception("Syntax error in Select Type Definition : " + ret.Name); } } CleanUpSemiColon(); break; default: ret = null; if (_logFile != null) { _logFile.WriteLine(oneToken.TokenType.ToString() + " : " + oneToken.StringValue); } break; } #endregion // switch return(ret); }
private void ProcessSupertypeOf(SEntity aEntity) { //319 supertype_constraint = abstract_entity_declaration | // abstract_supertype_declaration | supertype_rule . //164 abstract_entity_declaration = ABSTRACT . //166 abstract_supertype_declaration = ABSTRACT SUPERTYPE [ subtype_constraint ] . //313 subtype_constraint = OF ’(’ supertype_expression ’)’ . //320 supertype_expression = supertype_factor { ANDOR supertype_factor } . //321 supertype_factor = supertype_term { AND supertype_term } . //323 supertype_term = entity_ref | one_of | ’(’ supertype_expression ’)’ . //263 one_of = ONEOF ’(’ supertype_expression { ’,’ supertype_expression } ’)’ . //322 supertype_rule = SUPERTYPE subtype_constraint . int parenthesisCounter = 0; SToken oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType != STokenType.LEFTPARENTHESIS) { throw new Exception("Syntax Error : Supertype Definition ("); } else { parenthesisCounter += 1; } while (parenthesisCounter != 0) { oneToken = SLexer.Tokenizer(_dataStream); switch (oneToken.TokenType) { case STokenType.LEFTPARENTHESIS: parenthesisCounter += 1; break; case STokenType.RIGHTPARENTHESIS: parenthesisCounter -= 1; break; case STokenType.ONEOF: case STokenType.COMMA: break; case STokenType.SIMPLEID: aEntity.SuperTypesOf.Add(getEntity(oneToken.StringValue)); break; default: string logout = String.Format("Undefined supertype definition at row : {0}, column : {1}", SLexer.CurrentRow, SLexer.CurrentColumn); if (_logFile != null) { _logFile.WriteLine(logout); } break; } } //if (oneToken.TokenType != SchemaTokenType.ONEOF) // throw new Exception("Syntax Error : Supertype Definition oneof"); //oneToken = SchemaLexer.Tokenizer(DataStream); //if (oneToken.TokenType != SchemaTokenType.LEFTPARENTHESIS) // throw new Exception("Syntax Error : Supertype Definition (2"); //oneToken = SchemaLexer.Tokenizer(DataStream); //if (oneToken.TokenType != SchemaTokenType.SIMPLEID) // throw new Exception("Syntax Error : Supertype Definition simpleId"); //aEntity.SuperTypesOf.Add(getEntity(oneToken.StringValue)); //oneToken = SchemaLexer.Tokenizer(DataStream); //while (oneToken.TokenType != SchemaTokenType.RIGHTPARENTHESIS) //{ // if (oneToken.TokenType != SchemaTokenType.COMMA) // throw new Exception("Syntax Error : Supertype Definition ,"); // oneToken = SchemaLexer.Tokenizer(DataStream); // if (oneToken.TokenType != SchemaTokenType.SIMPLEID) // throw new Exception("Syntax Error : Supertype Definition simpleid2"); // aEntity.SuperTypesOf.Add(getEntity(oneToken.StringValue)); // oneToken = SchemaLexer.Tokenizer(DataStream); //} //// for second parenthesis //oneToken = SchemaLexer.Tokenizer(DataStream); }
private void ProcessEntity() { SToken oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType != STokenType.SIMPLEID) { throw new Exception("Syntax error in Entity Definition"); } SEntity aEntity = getEntity(oneToken.StringValue); oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType == STokenType.SEMICOLON) { //get new token for next step oneToken = SLexer.Tokenizer(_dataStream); } if (oneToken.TokenType == STokenType.ABSTRACT || oneToken.TokenType == STokenType.SUPERTYPE || oneToken.TokenType == STokenType.SUBTYPE) { oneToken = ProcessSuperSub(oneToken, aEntity); } if (oneToken.TokenType == STokenType.SIMPLEID) { while (oneToken.TokenType == STokenType.SIMPLEID) { ProcessAttribute(aEntity, oneToken.StringValue); oneToken = SLexer.Tokenizer(_dataStream); } } if (oneToken.TokenType == STokenType.DERIVE) { // oneToken = DummySubLoop(); // next one after derived oneToken = SLexer.Tokenizer(_dataStream); while (oneToken.TokenType == STokenType.SIMPLEID || oneToken.TokenType == STokenType.SELF) { ProcessDerivedAttribute(aEntity, oneToken); oneToken = SLexer.Tokenizer(_dataStream); } } if (oneToken.TokenType == STokenType.INVERSE) { //oneToken = DummySubLoop(); oneToken = SLexer.Tokenizer(_dataStream); while (oneToken.TokenType == STokenType.SIMPLEID) { ProcessInverse(aEntity, oneToken); oneToken = SLexer.Tokenizer(_dataStream); } } if (oneToken.TokenType == STokenType.UNIQUE) { oneToken = DummySubLoop(); } if (oneToken.TokenType == STokenType.UNDEFINED) { oneToken = DummySubLoop(); } if (oneToken.TokenType == STokenType.WHERE) { oneToken = DummySubLoop(); } if (oneToken.TokenType == STokenType.END_ENTITY) { oneToken = SLexer.Tokenizer(_dataStream); // semi colon } }
/// <summary> /// not a complete derive implementation /// intended to identify omitted value in instance population /// </summary> /// <param name="oneEntity"></param> /// <param name="AttributeName"></param> private void ProcessDerivedAttribute( SEntity oneEntity, SToken mToken) { SAttributeDerived drvAtt; //SParam sParam; if (mToken.TokenType == STokenType.SELF) { //drvAtt = new SAttributeDerived(AttribType.DERIVED); // defines self drived attribute // reverse_solidus mToken = SLexer.Tokenizer(_dataStream); // super entity mToken = SLexer.Tokenizer(_dataStream); if (mToken.TokenType != STokenType.SIMPLEID) { throw new InvalidDataException(string.Format( "schema is not in syntax at {0} : {1}", oneEntity.Name, mToken.TokenType.ToString())); } string orgSuperTypeName = mToken.StringValue; // period mToken = SLexer.Tokenizer(_dataStream); // attribute name mToken = SLexer.Tokenizer(_dataStream); string sAttName = mToken.StringValue; drvAtt = new SAttributeDerived(sAttName); drvAtt.OriginatingSuperType = orgSuperTypeName; drvAtt.Name = sAttName; drvAtt.isInherited = true; // colon mToken = SLexer.Tokenizer(_dataStream); drvAtt.Type = ProcessParameter(); oneEntity.DerivedList.Add(drvAtt); } else //if (mToken.TokenType == SchemaTokenType.SIMPLEID) { string sAttName = mToken.StringValue; drvAtt = new SAttributeDerived(sAttName); drvAtt.Name = sAttName; drvAtt.isInherited = false; mToken = SLexer.Tokenizer(_dataStream); drvAtt.Type = ProcessParameter(); oneEntity.DerivedList.Add(drvAtt); } //mToken = SLexer.Tokenizer(DataStream); //oneEntity.AttributeList.Add(oneAttribute); while (mToken.TokenType != STokenType.SEMICOLON) { mToken = SLexer.Tokenizer(_dataStream); } }
private SToken ProcessSuperSub(SToken oneToken, SEntity aEntity) { if (oneToken.TokenType == STokenType.ABSTRACT) { aEntity.IsAbstract = true; //get new token for next step oneToken = SLexer.Tokenizer(_dataStream); } if (oneToken.TokenType == STokenType.SUPERTYPE) { // remove OF token after supertype token oneToken = SLexer.Tokenizer(_dataStream); ProcessSupertypeOf(aEntity); //get new token for next step oneToken = SLexer.Tokenizer(_dataStream); } if (oneToken.TokenType == STokenType.SUBTYPE) { // remove of token after supertype token oneToken = SLexer.Tokenizer(_dataStream); ProcessSubtypeOf(aEntity); //get new token for next step oneToken = SLexer.Tokenizer(_dataStream); } return SLexer.Tokenizer(_dataStream); }
/// <summary> /// not a complete derive implementation /// intended to identify omitted value in instance population /// </summary> /// <param name="oneEntity"></param> /// <param name="AttributeName"></param> private void ProcessDerivedAttribute( SEntity oneEntity, SToken mToken) { SAttributeDerived drvAtt; //SParam sParam; if (mToken.TokenType == STokenType.SELF) { //drvAtt = new SAttributeDerived(AttribType.DERIVED); // defines self drived attribute // reverse_solidus mToken = SLexer.Tokenizer(_dataStream); // super entity mToken = SLexer.Tokenizer(_dataStream); if (mToken.TokenType != STokenType.SIMPLEID) throw new InvalidDataException(string.Format( "schema is not in syntax at {0} : {1}", oneEntity.Name, mToken.TokenType.ToString())); string orgSuperTypeName = mToken.StringValue; // period mToken = SLexer.Tokenizer(_dataStream); // attribute name mToken = SLexer.Tokenizer(_dataStream); string sAttName = mToken.StringValue; drvAtt = new SAttributeDerived(sAttName); drvAtt.OriginatingSuperType = orgSuperTypeName; drvAtt.Name = sAttName; drvAtt.isInherited = true; // colon mToken = SLexer.Tokenizer(_dataStream); drvAtt.Type = ProcessParameter(); oneEntity.DerivedList.Add(drvAtt); } else //if (mToken.TokenType == SchemaTokenType.SIMPLEID) { string sAttName = mToken.StringValue; drvAtt = new SAttributeDerived(sAttName); drvAtt.Name = sAttName; drvAtt.isInherited = false; mToken = SLexer.Tokenizer(_dataStream); drvAtt.Type = ProcessParameter(); oneEntity.DerivedList.Add(drvAtt); } //mToken = SLexer.Tokenizer(DataStream); //oneEntity.AttributeList.Add(oneAttribute); while (mToken.TokenType != STokenType.SEMICOLON) { mToken = SLexer.Tokenizer(_dataStream); } }
private SParameter ProcessInverseParameter() { SToken oneToken = SLexer.Tokenizer(_dataStream); bool isOptional = false; if (oneToken.TokenType == STokenType.OPTIONAL) { oneToken = SLexer.Tokenizer(_dataStream); isOptional = true; } bool isUnique = false; if (oneToken.TokenType == STokenType.UNIQUE) { isUnique = true; oneToken = SLexer.Tokenizer(_dataStream); } SParamType stype = SParameter.TokenTypeToAttribType(oneToken.TokenType); SParameter sParam; if (stype == SParamType.SET || stype == SParamType.LIST || stype == SParamType.ARRAY || stype == SParamType.BAG) { sParam = new SParamCollection(stype); var sParamCol = sParam as SParamCollection; // get leftbracket oneToken = SLexer.Tokenizer(_dataStream); // after adding lower bound, upper bound lexer oneToken = SLexer.Tokenizer(_dataStream); sParamCol.LBound = getBound(oneToken); // get colon oneToken = SLexer.Tokenizer(_dataStream); // get upper bound oneToken = SLexer.Tokenizer(_dataStream); sParamCol.UBound = getBound(oneToken); // get rightbracket oneToken = SLexer.Tokenizer(_dataStream); // get of oneToken = SLexer.Tokenizer(_dataStream); sParamCol.ParamReference = ProcessInverseParameter(); } else if (stype == SParamType.NAMED) { sParam = new SParamNamed(oneToken.StringValue); } else { sParam = new SParameter(stype); } sParam.IsUnique = isUnique; sParam.IsOptional = isOptional; // either semi colon or left parenthesis //oneToken = SLexer.Tokenizer(DataStream); //if (!(sParam is SParamCollection)) //{ // oneToken = SLexer.Tokenizer(_dataStream); // if (oneToken.TokenType == STokenType.LEFTPARENTHESIS) // { // oneToken = SLexer.Tokenizer(_dataStream); // integer // // add integer // sParam.TypeLength = (short)oneToken.IntegerValue; // oneToken = SLexer.Tokenizer(_dataStream); // right parenthesis // oneToken = SLexer.Tokenizer(_dataStream); // if (oneToken.TokenType == STokenType.FIXED) // { // // process fixed // sParam.IsFixed = true; // // semicolon // oneToken = SLexer.Tokenizer(_dataStream); // } // } //} //SAttribute oneAttribute = new SAttribute(AttributeName, sParam); return(sParam); }
private SType ProcessTypeSub(SToken oneToken) { SType ret; string t_TypeName = oneToken.StringValue; // tokenlist for syntax checking List<SToken> tokenList = new List<SToken>(); oneToken = SLexer.Tokenizer(_dataStream); #region switch switch (oneToken.TokenType) { case STokenType.BOOLEAN: case STokenType.REAL: case STokenType.INTEGER: case STokenType.LOGICAL: case STokenType.NUMBER: ret = new SType(t_TypeName, SParameter.TokenTypeToAttribType(oneToken.TokenType)); CleanUpSemiColon(); break; case STokenType.STRING: ret = new STypeString(t_TypeName); ret.Kind = SParameter.TokenTypeToAttribType(oneToken.TokenType); oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType == STokenType.LEFTPARENTHESIS) { // process width oneToken = SLexer.Tokenizer(_dataStream); ((STypeString)ret).StringLength = (short)oneToken.IntegerValue; oneToken = SLexer.Tokenizer(_dataStream); // right parenthesis oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType == STokenType.FIXED) { // process fixed ((STypeString)ret).IsFixed = true; // semicolon oneToken = SLexer.Tokenizer(_dataStream); } } break; case STokenType.SIMPLEID: ret = new STypeSimple(t_TypeName, oneToken.StringValue); ret.Kind = SParameter.TokenTypeToAttribType(oneToken.TokenType); CleanUpSemiColon(); break; case STokenType.ARRAY: case STokenType.LIST: case STokenType.SET: case STokenType.BAG: ret = new STypeCollection(t_TypeName, oneToken.TokenType); ret.Kind = SParameter.TokenTypeToAttribType(oneToken.TokenType); collection((STypeCollection)ret); break; case STokenType.ENUMERATION: ret = new STypeEnum(t_TypeName); ret.Kind = SParameter.TokenTypeToAttribType(oneToken.TokenType); oneToken = SLexer.Tokenizer(_dataStream); if (oneToken.TokenType != STokenType.OF) { throw new Exception("Syntax error in Schema Name Definition"); } GetInsideSingleParenthesis(tokenList); foreach (SToken aToken in tokenList) { if (aToken.TokenType == STokenType.SIMPLEID) { ((STypeEnum)ret).EnumList.Add(aToken.StringValue); } else { throw new Exception("Syntax error in Enumeration Type Definition : " + ret.Name); } } CleanUpSemiColon(); break; case STokenType.SELECT: ret = new STypeSelect(t_TypeName); ret.Kind = SParameter.TokenTypeToAttribType(oneToken.TokenType); GetInsideSingleParenthesis(tokenList); foreach (SToken aToken in tokenList) { if (aToken.TokenType == STokenType.SIMPLEID) { ((STypeSelect)ret).SelectList.Add(aToken.StringValue, null); } else { throw new Exception("Syntax error in Select Type Definition : " + ret.Name); } } CleanUpSemiColon(); break; default: ret = null; if (_logFile != null) _logFile.WriteLine(oneToken.TokenType.ToString() + " : " + oneToken.StringValue); break; } #endregion // switch return ret; }
public static SToken ProcessUpper(int FirstCharactor, StreamReader DataStream) { StringBuilder theString = new StringBuilder(); SToken aToken; theString.Append((char)FirstCharactor); int peekedChar; while (((peekedChar = DataStream.Peek()) >= 65 && peekedChar <= 90) || (peekedChar >= 48 && peekedChar <= 57) || peekedChar == 45 || peekedChar == 95 || (peekedChar >= 97 && peekedChar <= 122)) { theString.Append((char)DataStream.Read()); CurrentColumn += 1; } //Console.WriteLine(theString); //AddKeywordToken(theString.ToString()); switch (theString.ToString()) { case @"SCHEMA": aToken = new SToken(STokenType.SCHEMA); break; case @"ENTITY": aToken = new SToken(STokenType.ENTITY); break; case @"END_ENTITY": aToken = new SToken(STokenType.END_ENTITY); break; case @"TYPE": aToken = new SToken(STokenType.TYPE); break; case @"END_TYPE": aToken = new SToken(STokenType.END_TYPE); break; case @"ABSTRACT": aToken = new SToken(STokenType.ABSTRACT); break; case @"SUPERTYPE": aToken = new SToken(STokenType.SUPERTYPE); break; case @"SUBTYPE": aToken = new SToken(STokenType.SUBTYPE); break; case @"OF": aToken = new SToken(STokenType.OF); break; case @"ONEOF": aToken = new SToken(STokenType.ONEOF); break; case @"OPTIONAL": aToken = new SToken(STokenType.OPTIONAL); break; case @"INTEGER": aToken = new SToken(STokenType.INTEGER); break; case @"REAL": aToken = new SToken(STokenType.REAL); break; case @"STRING": aToken = new SToken(STokenType.STRING); break; case @"BOOLEAN": aToken = new SToken(STokenType.BOOLEAN); break; case @"LOGICAL": aToken = new SToken(STokenType.LOGICAL); break; case @"NUMBER": aToken = new SToken(STokenType.NUMBER); break; case @"ENUMERATION": aToken = new SToken(STokenType.ENUMERATION); break; case @"SELECT": aToken = new SToken(STokenType.SELECT); break; case @"ARRAY": aToken = new SToken(STokenType.ARRAY); break; case @"LIST": aToken = new SToken(STokenType.LIST); break; case @"SET": aToken = new SToken(STokenType.SET); break; case @"BAG": aToken = new SToken(STokenType.BAG); break; case @"WHERE": aToken = new SToken(STokenType.WHERE); break; case @"DERIVE": aToken = new SToken(STokenType.DERIVE); break; case @"INVERSE": aToken = new SToken(STokenType.INVERSE); break; case @"FIXED": aToken = new SToken(STokenType.FIXED); break; case @"SELF": aToken = new SToken(STokenType.SELF); break; case @"UNIQUE": aToken = new SToken(STokenType.UNIQUE); break; case @"FUNCTION": aToken = new SToken(STokenType.FUNCTION); break; case @"END_FUNCTION": aToken = new SToken(STokenType.END_FUNCTION); break; case @"RULE": aToken = new SToken(STokenType.RULE); break; case @"END_RULE": aToken = new SToken(STokenType.END_RULE); break; case @"END_SCHEMA": aToken = new SToken(STokenType.END_SCHEMA); break; default: aToken = new SToken(theString.ToString()); break; } return(aToken); }