internal override void ParseTokens(string[] tokens) { base.ParseTokens(tokens); var chunks = Tokenizer.SplitChunks(tokens); foreach (var chunk in chunks) { int index = 0; switch (chunk.Ident) { case "material": { var material = new An8Material(); material.ParseTokens(chunk.Tokens); this.Material = material; break; } case "smoothangle": this.SmoothAngleThreshold = Tokenizer.ReadFloat(chunk.Tokens, ref index); break; case "working": this.Divisions = Tokenizer.ReadInt(chunk.Tokens, ref index); break; case "divisions": this.RenderDivisions = Tokenizer.ReadInt(chunk.Tokens, ref index); break; case "materiallist": this.MaterialList.Clear(); foreach (var material in Tokenizer.SplitChunks(chunk.Tokens) .Where(t => string.Equals(t.Ident, "materialname", StringComparison.Ordinal)) .Select(t => { int i = 0; return(Tokenizer.ReadString(t.Tokens, ref i)); })) { this.MaterialList.Add(material); } break; case "points": this.Points.Clear(); while (index < chunk.Tokens.Length) { this.Points.Add(An8Point.ReadTokens(chunk.Tokens, ref index)); } break; case "normals": this.Normals.Clear(); while (index < chunk.Tokens.Length) { this.Normals.Add(An8Point.ReadTokens(chunk.Tokens, ref index)); } break; case "edges": this.Edges.Clear(); while (index < chunk.Tokens.Length) { this.Edges.Add(An8Edge.ReadTokens(chunk.Tokens, ref index)); } break; case "texcoords": this.TexCoords.Clear(); while (index < chunk.Tokens.Length) { this.TexCoords.Add(An8TexCoord.ReadTokens(chunk.Tokens, ref index)); } break; case "faces": this.Faces.Clear(); while (index < chunk.Tokens.Length) { this.Faces.Add(An8Face.ReadTokens(chunk.Tokens, ref index)); } break; } } }
/// <summary> /// Reads tokens. /// </summary> /// <param name="tokens">The tokens.</param> /// <param name="index">The index.</param> /// <returns>A face.</returns> internal static An8Face ReadTokens(string[] tokens, ref int index) { var face = new An8Face(); int num = Tokenizer.ReadInt(tokens, ref index); int flags = Tokenizer.ReadInt(tokens, ref index); if ((flags & 1) != 0) { face.IsBackShown = true; } bool hasNormals = (flags & 2) != 0; bool hasTexCoords = (flags & 4) != 0; face.MaterialIndex = Tokenizer.ReadInt(tokens, ref index); face.FlatNormalIndex = Tokenizer.ReadInt(tokens, ref index); face.PointIndexes = new int[num]; if (hasNormals) { face.NormalIndexes = new int[num]; } else { face.NormalIndexes = null; } if (hasTexCoords) { face.TexCoordIndexes = new int[num]; } else { face.TexCoordIndexes = null; } Tokenizer.ReadOpenData(tokens, ref index); for (int i = 0; i < num; i++) { Tokenizer.ReadOpenData(tokens, ref index); face.PointIndexes[i] = Tokenizer.ReadInt(tokens, ref index); if (hasNormals) { face.NormalIndexes[i] = Tokenizer.ReadInt(tokens, ref index); } if (hasTexCoords) { face.TexCoordIndexes[i] = Tokenizer.ReadInt(tokens, ref index); } Tokenizer.ReadCloseData(tokens, ref index); } Tokenizer.ReadCloseData(tokens, ref index); return(face); }