Ejemplo n.º 1
0
        public static bool ParseBitFlagsData(Tokenizer tokenizer, Type resultType, out object result)
        {
            Tokenizer.Token token;
            if ((token = tokenizer.Current()) == null)
            {
                throw new ArgumentException();
            }

            int start = token.Offset;
            int length;
            if (token.Type == Tokenizer.EXPRESSION_STRING)
            {
                ++start;
                length = token.Value.Length - 2;
            }
            else
            {
                token = GoToEndOfLine(tokenizer);
                length = token.Offset + token.Length - start;
            }
            if (length == 0)
            {
                result = null;
                return false;
            }
            result = tokenizer.TheString.Substring(start, length);
            result = resultType.GetConstructor(new[] { typeof(object) }).Invoke(new[] { result });
            return true;
        }
Ejemplo n.º 2
0
 public static bool ParseEnum(Tokenizer tokenizer, Type resultType, out object result)
 {
     Tokenizer.Token token;
     if ((token = tokenizer.Current()) == null)
     {
         throw new ArgumentException();
     }
     if (token.Type == Tokenizer.EXPRESSION_NUMBER_HEX)
     {
         result = uint.Parse(token.Value, NumberStyles.HexNumber);
     }
     else if (token.Type == Tokenizer.EXPRESSION_STRING)
     {
         result = token.Value.Substring(1, token.Value.Length - 2);
     }
     else if (token.Type == Tokenizer.EXPRESSION_NUMBER || token.Type == Tokenizer.EXPRESSION_WORD)
     {
         result = token.Value;
     }
     else
     {
         result = null;
         return false;
     }
     result = Enum.Parse(resultType, result.ToString(), true);
     return true;
 }
Ejemplo n.º 3
0
 public static bool IsEndOfLineFileOrComment(Tokenizer tokenizer)
 {
     Tokenizer.Token token = tokenizer.Current();
     if (token == null || token.Type == Tokenizer.EXPRESSION_END_OF_LINE || token.Type == EXPRESSION_COMMENT)
     {
         return true;
     }
     return false;
 }
Ejemplo n.º 4
0
 public static void GoToValue(Tokenizer tokenizer)
 {
     while (IsEndOfLineFileOrComment(tokenizer))
     {
         if (tokenizer.Current() == null)
         {
             break;
         }
         tokenizer.Next();
     }
 }
Ejemplo n.º 5
0
 public static Tokenizer.Token GoToEndOfLine(Tokenizer tokenizer)
 {
     Tokenizer.Token previous = null;
     Tokenizer.Token token = tokenizer.Next();
     while (token != null && token.Type != EXPRESSION_COMMENT && token.Type != Tokenizer.EXPRESSION_END_OF_LINE)
     {
         previous = token;
         token = tokenizer.Next();
     }
     return previous;
 }
Ejemplo n.º 6
0
 public unsafe void Load(byte[] buffer)
 {
     if (buffer == null)
     {
         return;
     }
     string theString;
     fixed (byte* pBuffer = &buffer[0])
     {
         theString = BufferReader.ReadString(pBuffer, buffer.Length);
     }
     Tokenizer tokenizer = new Tokenizer(theString, _datExpressions, addNewLine: true);
     tokenizer.First();
     Tokenizer.Token token;
     Character currentCharacter;
     while ((token = tokenizer.Current()) != null)
     {
         if (token.Type == CommonParser.EXPRESSION_COMMENT || token.Type == Tokenizer.EXPRESSION_END_OF_LINE)
         {
             CommonParser.GoToValue(tokenizer);
             continue;
         }
         if (token.Type != Tokenizer.EXPRESSION_NUMBER)
         {
             throw new OpenSAGEException(ErrorCode.AptParser, "Token '$TOKEN$' was not recognized", "token", token);
         }
         currentCharacter = new Character(uint.Parse(token.Value));
         if ((token = tokenizer.Next()).Type != EXPRESSION_IMAGE && token.Type != EXPRESSION_TEXTURE)
         {
             throw new OpenSAGEException(ErrorCode.AptParser, "Token '$TOKEN$' was not recognized", "token", token);
         }
         ISource source = null;
         switch (token.Type)
         {
             case EXPRESSION_IMAGE:
                 SourceImage sourceImage = new SourceImage();
                 token = tokenizer.Next();
                 if (token.Type == EXPRESSION_SOURCE_STANDARD)
                 {
                     sourceImage.Type = SourceImageType.STANDARD;
                 }
                 else if (token.Type == EXPRESSION_SOURCE_CLASSIC)
                 {
                     sourceImage.Type = SourceImageType.CLASSIC;
                 }
                 else
                 {
                     throw new OpenSAGEException(ErrorCode.AptParser, "Token '$TOKEN$' was not recognized", "token", token);
                 }
                 sourceImage.X = int.Parse(token.Match.Groups["X"].Value);
                 sourceImage.Y = int.Parse(token.Match.Groups["Y"].Value);
                 sourceImage.Width = int.Parse(token.Match.Groups["Width"].Value);
                 sourceImage.Height = int.Parse(token.Match.Groups["Height"].Value);
                 source = sourceImage;
                 break;
             case EXPRESSION_TEXTURE:
                 SourceTexture sourceTexture = new SourceTexture();
                 token = tokenizer.Next();
                 if (token.Type != Tokenizer.EXPRESSION_NUMBER)
                 {
                     throw new OpenSAGEException(ErrorCode.AptParser, "Token '$TOKEN$' was not recognized", "token", tokenizer.Current());
                 }
                 sourceTexture.Id = int.Parse(token.Value);
                 source = sourceTexture;
                 break;
         }
         currentCharacter.Source = source;
         _characters.Add(currentCharacter);
         tokenizer.Next();
     }
 }
Ejemplo n.º 7
0
 public static unsafe ABaseAssetType[] Load(byte[] buffer)
 {
     if (buffer == null)
     {
         return null;
     }
     string theString;
     fixed (byte* pBuffer = &buffer[0])
     {
         theString = BufferReader.ReadString(pBuffer, buffer.Length);
     }
     Tokenizer tokenizer = new Tokenizer(theString, _iniExpressions, addNewLine: true);
     tokenizer.First();
     List<ABaseAssetType> result = new List<ABaseAssetType>();
     Tokenizer.Token token;
     ABaseAssetType currentObject = null;
     IniTypeDescription type = null;
     Match match;
     TypeHash typeHash;
     uint hash;
     while (tokenizer.Current() != null)
     {
         CommonParser.GoToValue(tokenizer);
         if ((token = tokenizer.Current()).Type != EXPRESSION_OBJECTSTART)
         {
             throw new OpenSAGEException(ErrorCode.IniParser, "Token '$TOKEN$' was not recognized", "token", token);
         }
         hash = GetHash(tokenizer);
         match = _objectTypeAndId.Match(token.Value);
         typeHash = match.Groups["type"].Value;
         type = _descriptions.Find(x => x.Name == typeHash);
         if (type == null)
         {
             throw new OpenSAGEException(ErrorCode.IniParser, "No object of type $TYPE$ can be instanciated.", "type", typeHash.TheString);
         }
         currentObject = Activator.CreateInstance(type.Type, match.Groups["id"].Value, hash) as ABaseAssetType;
         tokenizer.Next();
         Parse(tokenizer, currentObject, type);
         result.Add(currentObject);
     }
     return result.ToArray();
 }
Ejemplo n.º 8
0
 private static void Parse(Tokenizer tokenizer, object current, IniTypeDescription type)
 {
     Tokenizer.Token token;
     string name;
     TypeHash subType;
     IniTypeDescription childType = null;
     IniTypeAndAlias[] typeAndAliases;
     IniTypeAndAlias typeAndAlias;
     Match match;
     object subObject;
     CommonParser.GoToValue(tokenizer);
     Dictionary<string, int> attributeSetCheck = new Dictionary<string, int>(type.Items.Length);
     for (int idx = 0; idx < type.Items.Length; ++idx)
     {
         attributeSetCheck.Add(type.Items[idx].Name, 0);
     }
     while ((token = tokenizer.Current()).Type != EXPRESSION_OBJECTEND)
     {
         if (token.Type != EXPRESSION_ATTRIBUTE)
         {
             throw new OpenSAGEException(ErrorCode.IniParser, "Token '$TOKEN$' was not recognized", "token", token);
         }
         name = _attributeId.Match(token.Value).Groups["name"].Value;
         IIniDescription itemDescription = null;
         for (int idx = 0; idx < type.Items.Length; ++idx)
         {
             if (type.Items[idx].Name == name)
             {
                 itemDescription = type.Items[idx];
                 break;
             }
         }
         if (itemDescription == null)
         {
             throw new OpenSAGEException(ErrorCode.IniParser, "No property named $NAME$ can be found.", "name", name);
         }
         token = tokenizer.Next();
         if (itemDescription is IniPropertyDescription)
         {
             object result;
             if (!(itemDescription as IniPropertyDescription).TheParseHandler(tokenizer, (itemDescription as IniPropertyDescription).ItemType, out result))
             {
                 throw new OpenSAGEException(ErrorCode.IniParser, "Could not parse property named $NAME$.", "name", name);
             }
             if (itemDescription.MaxOccurrences == 1)
             {
                 itemDescription.Property.SetValue(current, result);
             }
             else
             {
                 if (itemDescription.Property.PropertyType.IsArray)
                 {
                     Array testArray = itemDescription.Property.GetValue(current) as Array;
                     if (attributeSetCheck[name] == testArray.Length)
                     {
                         throw new OpenSAGEException(ErrorCode.IniParser, "Attribute $NAME$ was set more times than its array length ($LENGTH$).",
                             "name", itemDescription.Name, "length", testArray.Length);
                     }
                     (itemDescription.Property.GetValue(current) as Array).SetValue(result, attributeSetCheck[name]);
                 }
                 else
                 {
                     itemDescription.Property.PropertyType.GetMethod("Add").Invoke(itemDescription.Property.GetValue(current), new[] { result });
                 }
             }
         }
         else
         {
             if (token.Type != EXPRESSION_OBJECTSTART)
             {
                 throw new OpenSAGEException(ErrorCode.IniParser, "Token '$TOKEN$' was not recognized", "token", token);
             }
             match = _objectTypeAndId.Match(token.Value);
             subType = match.Groups["type"].Value;
             typeAndAliases = (itemDescription as IniTypePropertyDescription).TypeOptions;
             typeAndAlias = null;
             for (int idx = 0; idx < typeAndAliases.Length; ++idx)
             {
                 if (typeAndAliases[idx].Alias == subType)
                 {
                     typeAndAlias = typeAndAliases[idx];
                 }
             }
             if (typeAndAlias == null)
             {
                 throw new OpenSAGEException(ErrorCode.IniParser, "No object of type $TYPE$ can be instanciated.", "type", subType.TheString);
             }
             childType = _descriptions.Find(x => x.Name == subType);
             if (typeAndAlias == null)
             {
                 throw new OpenSAGEException(ErrorCode.IniParser, "No object of type $TYPE$ can be instanciated.", "type", subType.TheString);
             }
             subObject = Activator.CreateInstance(typeAndAlias.Type, match.Groups["id"].Value);
             tokenizer.Next();
             Parse(tokenizer, subObject, childType);
             if (itemDescription.MaxOccurrences == 1)
             {
                 itemDescription.Property.SetValue(current, subObject);
             }
             else
             {
                 if (itemDescription.Property.PropertyType.IsArray)
                 {
                     Array testArray = itemDescription.Property.GetValue(current) as Array;
                     if (attributeSetCheck[name] == testArray.Length)
                     {
                         throw new OpenSAGEException(ErrorCode.IniParser, "Attribute $NAME$ was set more times than its array length ($LENGTH$).",
                             "name", itemDescription.Name, "length", testArray.Length);
                     }
                     (itemDescription.Property.GetValue(current) as Array).SetValue(subObject, attributeSetCheck[name]);
                 }
                 else
                 {
                     itemDescription.Property.PropertyType.GetMethod("Add").Invoke(itemDescription.Property.GetValue(current), new[] { subObject });
                 }
             }
         }
         ++attributeSetCheck[name];
         tokenizer.Next();
         CommonParser.GoToValue(tokenizer);
     }
     IIniDescription item;
     for (int idx = 0; idx < type.Items.Length; ++idx)
     {
         if ((item = type.Items[idx]).MinOccurrences != 0 && attributeSetCheck[item.Name] < item.MinOccurrences)
         {
             throw new OpenSAGEException(ErrorCode.IniParser, "Attribute $NAME$ was only set $OCCURRENCIES$ times but needs to be set at least $MIN$ times.",
                 "name", item.Name, "occurrencies", attributeSetCheck[item.Name], "min", item.MinOccurrences);
         }
         if (item.MaxOccurrences != 0 && attributeSetCheck[item.Name] > item.MaxOccurrences)
         {
             throw new OpenSAGEException(ErrorCode.IniParser, "Attribute $NAME$ was set $OCCURRENCIES$ times but needs to be set at most $MAX$ times.",
                 "name", item.Name, "occurrencies", attributeSetCheck[item.Name], "max", item.MaxOccurrences);
         }
     }
     tokenizer.Next();
 }
Ejemplo n.º 9
0
 private static Tokenizer.Token GoToEnd(Tokenizer tokenizer)
 {
     Tokenizer.Token token = tokenizer.Next();
     while (token.Type != EXPRESSION_OBJECTEND)
     {
         if (token.Type == EXPRESSION_OBJECTSTART)
         {
             for (int idx = 0; idx < _descriptions.Count; ++idx)
             {
                 if (_descriptions[idx].Name == new TypeHash(token.Value))
                 {
                     token = GoToEnd(tokenizer);
                     break;
                 }
             }
         }
         token = tokenizer.Next();
     }
     return token;
 }
Ejemplo n.º 10
0
 private static uint GetHash(Tokenizer tokenizer)
 {
     Tokenizer endSearcher = new Tokenizer(tokenizer);
     return Hash.GetHash(tokenizer.GetValue(tokenizer.Current(), GoToEnd(endSearcher)));
 }
Ejemplo n.º 11
0
 public static bool ParseUInt32(Tokenizer tokenizer, Type resultType, out object result)
 {
     Tokenizer.Token token;
     if ((token = tokenizer.Current()) == null)
     {
         throw new ArgumentException();
     }
     if (token.Type == Tokenizer.EXPRESSION_NUMBER)
     {
         result = uint.Parse(token.Value);
         return true;
     }
     else if (token.Type == Tokenizer.EXPRESSION_NUMBER_HEX)
     {
         result = uint.Parse(token.Value, NumberStyles.HexNumber);
         return true;
     }
     result = null;
     return false;
 }
Ejemplo n.º 12
0
 public static bool ParseString(Tokenizer tokenizer, Type resultType, out object result)
 {
     Tokenizer.Token token;
     if ((token = tokenizer.Current()) == null)
     {
         throw new ArgumentException();
     }
     string value = token.Value;
     if (token.Type == Tokenizer.EXPRESSION_STRING)
     {
         result = value.Substring(1, value.Length - 2);
     }
     else
     {
         result = string.Empty;
         while (!IsEndOfLineFileOrComment(tokenizer))
         {
             result += token.Value;
             token = tokenizer.Next();
         }
         tokenizer.Previous();
     }
     (result as string).Replace("\\n", "\n");
     (result as string).Replace("\\r", "\r");
     return true;
 }
Ejemplo n.º 13
0
 public static bool ParseEnumData(Tokenizer tokenizer, Type resultType, out object result)
 {
     Tokenizer.Token token;
     if ((token = tokenizer.Current()) == null)
     {
         throw new ArgumentException();
     }
     if (token.Type == Tokenizer.EXPRESSION_NUMBER_HEX)
     {
         result = uint.Parse(token.Value, NumberStyles.HexNumber);
     }
     else if (token.Type == Tokenizer.EXPRESSION_STRING)
     {
         result = token.Value.Substring(1, token.Value.Length - 2);
     }
     else if (token.Type == Tokenizer.EXPRESSION_NUMBER || token.Type == Tokenizer.EXPRESSION_WORD)
     {
         result = token.Value;
     }
     else
     {
         result = null;
         return false;
     }
     result = resultType.GetConstructor(new[] { typeof(object) }).Invoke(new[] { result });
     return true;
 }
Ejemplo n.º 14
0
 public Token(Tokenizer parent, string type, int offset, int length, Match match)
 {
     _parent = parent;
     Type = type;
     Offset = offset;
     Length = length;
     Match = match;
 }
Ejemplo n.º 15
0
 public Tokenizer(Tokenizer source)
 {
     _theString = source._theString;
     _tokens = source._tokens;
     _currentToken = source._currentToken;
 }
Ejemplo n.º 16
0
 public unsafe void Load(byte[] buffer)
 {
     if (buffer == null)
     {
         return;
     }
     string theString;
     fixed (byte* pBuffer = &buffer[0])
     {
         theString = BufferReader.ReadString(pBuffer, buffer.Length);
     }
     Tokenizer tokenizer = new Tokenizer(theString, _geometryExpressions, addNewLine: true);
     tokenizer.First();
     Tokenizer.Token token;
     Character currentCharacter;
     while (tokenizer.Current() != null)
     {
         if (tokenizer.Current().Type != EXPRESSION_CHARACTER)
         {
             throw new OpenSAGEException(ErrorCode.AptParser, "Token '$TOKEN$' was not recognized", "token", tokenizer.Current());
         }
         currentCharacter = new Character();
         tokenizer.Next();
         CommonParser.GoToValue(tokenizer);
         if (tokenizer.Current().Type != EXPRESSION_SHAPE)
         {
             throw new OpenSAGEException(ErrorCode.AptParser, "Token '$TOKEN$' was not recognized", "token", tokenizer.Current());
         }
         IShape shape = null;
         token = tokenizer.Next();
         switch (token.Type)
         {
             case EXPRESSION_SHAPE_SOLID:
                 ShapeSolid shapeSolid = new ShapeSolid();
                 shapeSolid.Color = new Color(
                     byte.Parse(token.Match.Groups["C"].Captures[0].Value),
                     byte.Parse(token.Match.Groups["C"].Captures[1].Value),
                     byte.Parse(token.Match.Groups["C"].Captures[2].Value),
                     byte.Parse(token.Match.Groups["C"].Captures[3].Value));
                 shape = shapeSolid;
                 break;
             case EXPRESSION_SHAPE_LINE:
                 ShapeLine shapeLine = new ShapeLine();
                 shapeLine.Width = float.Parse(token.Match.Groups["Width"].Value);
                 shapeLine.Color = new Color(
                     byte.Parse(token.Match.Groups["C"].Captures[0].Value),
                     byte.Parse(token.Match.Groups["C"].Captures[1].Value),
                     byte.Parse(token.Match.Groups["C"].Captures[2].Value),
                     byte.Parse(token.Match.Groups["C"].Captures[3].Value));
                 shape = shapeLine;
                 break;
             case EXPRESSION_SHAPE_TEXTURE:
                 ShapeTexture shapeTexture = new ShapeTexture();
                 shapeTexture.Color = new Color(
                     byte.Parse(token.Match.Groups["C"].Captures[0].Value),
                     byte.Parse(token.Match.Groups["C"].Captures[1].Value),
                     byte.Parse(token.Match.Groups["C"].Captures[2].Value),
                     byte.Parse(token.Match.Groups["C"].Captures[3].Value));
                 shapeTexture.TextureId = uint.Parse(token.Match.Groups["TextureId"].Value);
                 shapeTexture.Rotation = new Matrix2x2(
                     float.Parse(token.Match.Groups["R"].Captures[0].Value),
                     float.Parse(token.Match.Groups["R"].Captures[1].Value),
                     float.Parse(token.Match.Groups["R"].Captures[2].Value),
                     float.Parse(token.Match.Groups["R"].Captures[3].Value));
                 shapeTexture.Translation = new Vector2(
                     float.Parse(token.Match.Groups["T"].Captures[0].Value),
                     float.Parse(token.Match.Groups["T"].Captures[1].Value));
                 shape = shapeTexture;
                 break;
         }
         currentCharacter.TheShape.Type = shape;
         tokenizer.Next();
         CommonParser.GoToValue(tokenizer);
         while ((token = tokenizer.Current()) != null && token.Type != EXPRESSION_CHARACTER)
         {
             IShapeItem item = null;
             switch (token.Type)
             {
                 case EXPRESSION_LINE:
                     ItemLine itemLine = new ItemLine();
                     itemLine.Start = new Vector2(
                         float.Parse(token.Match.Groups["V"].Captures[0].Value),
                         float.Parse(token.Match.Groups["V"].Captures[1].Value));
                     itemLine.End = new Vector2(
                         float.Parse(token.Match.Groups["V"].Captures[2].Value),
                         float.Parse(token.Match.Groups["V"].Captures[3].Value));
                     item = itemLine;
                     break;
                 case EXPRESSION_TRIANGLE:
                     ItemTriangle itemTriangle = new ItemTriangle();
                     itemTriangle.V0 = new Vector2(
                         float.Parse(token.Match.Groups["V"].Captures[0].Value),
                         float.Parse(token.Match.Groups["V"].Captures[1].Value));
                     itemTriangle.V1 = new Vector2(
                         float.Parse(token.Match.Groups["V"].Captures[2].Value),
                         float.Parse(token.Match.Groups["V"].Captures[3].Value));
                     itemTriangle.V2 = new Vector2(
                         float.Parse(token.Match.Groups["V"].Captures[4].Value),
                         float.Parse(token.Match.Groups["V"].Captures[5].Value));
                     item = itemTriangle;
                     break;
             }
             currentCharacter.TheShape.Items.Add(item);
             tokenizer.Next();
             CommonParser.GoToValue(tokenizer);
         }
         _characters.Add(currentCharacter);
     }
 }