/// <summary> /// Parses the tokens of the chunk. /// </summary> /// <param name="tokens">The tokens.</param> internal override void ParseTokens(string[] tokens) { var chunks = Tokenizer.SplitChunks(tokens); foreach (var chunk in chunks) { int index = 0; switch (chunk.Ident) { case null: this.Name = Tokenizer.ReadString(chunk.Tokens, ref index); break; case "material": { var material = new An8Material(); material.ParseTokens(chunk.Tokens); this.Materials.Add(material); break; } } } foreach (var component in An8Component.ParseComponents(tokens)) { this.Components.Add(component); } }
/// <summary> /// Parses the tokens of the chunk. /// </summary> /// <param name="tokens">The tokens.</param> internal override void ParseTokens(string[] tokens) { base.ParseTokens(tokens); var chunks = Tokenizer.SplitChunks(tokens); foreach (var chunk in chunks) { int index = 0; switch (chunk.Ident) { case "material": { var material = new An8Material(); material.ParseTokens(chunk.Tokens); this.Material = material; break; } case "longlat": this.Geodesic = null; this.LongLatDivisions = An8LongLat.ReadTokens(chunk.Tokens, ref index); break; case "geodesic": this.LongLatDivisions = null; this.Geodesic = Tokenizer.ReadInt(chunk.Tokens, ref index); break; } } }
/// <summary> /// Parses the tokens of the chunk. /// </summary> /// <param name="tokens">The tokens.</param> internal override void ParseTokens(string[] tokens) { base.ParseTokens(tokens); var chunks = Tokenizer.SplitChunks(tokens); foreach (var chunk in chunks) { int index = 0; switch (chunk.Ident) { case null: this.ObjectName = Tokenizer.ReadString(chunk.Tokens, ref index); break; case "material": { var material = new An8Material(); material.ParseTokens(chunk.Tokens); this.Material = material; break; } case "weightedby": this.WeightedBy.Add(Tokenizer.ReadString(chunk.Tokens, ref index)); break; } } }
/// <summary> /// Parses the tokens of the chunk. /// </summary> /// <param name="tokens">The tokens.</param> internal override void ParseTokens(string[] tokens) { base.ParseTokens(tokens); var chunks = Tokenizer.SplitChunks(tokens); foreach (var chunk in chunks) { int index = 0; switch (chunk.Ident) { case "material": { var material = new An8Material(); material.ParseTokens(chunk.Tokens); this.Material = material; break; } case "scale": this.ScaleX = Tokenizer.ReadFloat(chunk.Tokens, ref index); this.ScaleY = Tokenizer.ReadFloat(chunk.Tokens, ref index); this.ScaleZ = Tokenizer.ReadFloat(chunk.Tokens, ref index); break; case "divisions": this.DivisionsX = Tokenizer.ReadInt(chunk.Tokens, ref index); this.DivisionsY = Tokenizer.ReadInt(chunk.Tokens, ref index); this.DivisionsZ = Tokenizer.ReadInt(chunk.Tokens, ref index); break; } } }
/// <summary> /// Parses the tokens of the chunk. /// </summary> /// <param name="tokens">The tokens.</param> internal override void ParseTokens(string[] tokens) { var chunks = Tokenizer.SplitChunks(tokens); foreach (var chunk in chunks) { int index = 0; switch (chunk.Ident) { case null: this.Name = Tokenizer.ReadString(chunk.Tokens, ref index); break; case "material": { var material = new An8Material(); material.ParseTokens(chunk.Tokens); this.Materials.Add(material); break; } case "bone": { var bone = new An8Bone(); bone.ParseTokens(chunk.Tokens); this.RootBone = bone; break; } } } }
/// <summary> /// Parses a file content. /// </summary> /// <param name="text">The text.</param> public void Parse(string text) { var chunks = Tokenizer.SplitChunks(text); foreach (var chunk in chunks) { int index = 0; switch (chunk.Ident) { case "header": this.Header.ParseTokens(chunk.Tokens); break; case "description": this.Description = Tokenizer.ReadString(chunk.Tokens, ref index); break; case "environment": this.Environment.ParseTokens(chunk.Tokens); break; case "texture": { var texture = new An8Texture(); texture.ParseTokens(chunk.Tokens); this.Textures.Add(texture); break; } case "material": { var material = new An8Material(); material.ParseTokens(chunk.Tokens); this.Materials.Add(material); break; } case "object": { var obj = new An8Object(); obj.ParseTokens(chunk.Tokens); this.Objects.Add(obj); break; } case "figure": { var figure = new An8Figure(); figure.ParseTokens(chunk.Tokens); this.Figures.Add(figure); break; } } } }
/// <summary> /// Parses the tokens of the chunk. /// </summary> /// <param name="tokens">The tokens.</param> internal override void ParseTokens(string[] tokens) { base.ParseTokens(tokens); var chunks = Tokenizer.SplitChunks(tokens); foreach (var chunk in chunks) { int index = 0; switch (chunk.Ident) { case "material": { var material = new An8Material(); material.ParseTokens(chunk.Tokens); this.Material = material; break; } case "length": this.Length = Tokenizer.ReadFloat(chunk.Tokens, ref index); break; case "diameter": this.Diameter = Tokenizer.ReadFloat(chunk.Tokens, ref index); break; case "topdiameter": this.TopDiameter = Tokenizer.ReadFloat(chunk.Tokens, ref index); break; case "longlat": this.LongLatDivisions = An8LongLat.ReadTokens(chunk.Tokens, ref index); break; case "capstart": this.IsStartCapped = true; break; case "capend": this.IsEndCapped = true; break; } } }
internal override void ParseTokens(string[] tokens) { base.ParseTokens(tokens); var chunks = Tokenizer.SplitChunks(tokens); foreach (var chunk in chunks) { int index = 0; switch (chunk.Ident) { case "material": { var material = new An8Material(); material.ParseTokens(chunk.Tokens); this.Material = material; break; } case "smoothangle": this.SmoothAngleThreshold = Tokenizer.ReadFloat(chunk.Tokens, ref index); break; case "working": this.Divisions = Tokenizer.ReadInt(chunk.Tokens, ref index); break; case "divisions": this.RenderDivisions = Tokenizer.ReadInt(chunk.Tokens, ref index); break; case "materiallist": this.MaterialList.Clear(); foreach (var material in Tokenizer.SplitChunks(chunk.Tokens) .Where(t => string.Equals(t.Ident, "materialname", StringComparison.Ordinal)) .Select(t => { int i = 0; return(Tokenizer.ReadString(t.Tokens, ref i)); })) { this.MaterialList.Add(material); } break; case "points": this.Points.Clear(); while (index < chunk.Tokens.Length) { this.Points.Add(An8Point.ReadTokens(chunk.Tokens, ref index)); } break; case "normals": this.Normals.Clear(); while (index < chunk.Tokens.Length) { this.Normals.Add(An8Point.ReadTokens(chunk.Tokens, ref index)); } break; case "edges": this.Edges.Clear(); while (index < chunk.Tokens.Length) { this.Edges.Add(An8Edge.ReadTokens(chunk.Tokens, ref index)); } break; case "texcoords": this.TexCoords.Clear(); while (index < chunk.Tokens.Length) { this.TexCoords.Add(An8TexCoord.ReadTokens(chunk.Tokens, ref index)); } break; case "faces": this.Faces.Clear(); while (index < chunk.Tokens.Length) { this.Faces.Add(An8Face.ReadTokens(chunk.Tokens, ref index)); } break; } } }