private void ParsePolygons(idLexer lexer, CollisionModel model) { idToken token = lexer.CheckTokenType(TokenType.Number, 0); float[] tmp; Vector3 normal; lexer.ExpectTokenString("{"); while(lexer.CheckTokenString("}") == false) { // parse polygon int edgeCount = lexer.ParseInt(); CollisionModelPolygon p = new CollisionModelPolygon(); p.Material = _traceModelMaterial; p.Contents = ContentFlags.All; p.Edges = new int[edgeCount]; lexer.ExpectTokenString("("); for(int i = 0; i < edgeCount; i++) { p.Edges[i] = lexer.ParseInt(); } lexer.ExpectTokenString(")"); tmp = lexer.Parse1DMatrix(3); normal = new Vector3(tmp[0], tmp[1], tmp[2]); p.Plane.Normal = normal; p.Plane.D = lexer.ParseFloat(); tmp = lexer.Parse1DMatrix(3); p.Bounds.Min = new Vector3(tmp[0], tmp[1], tmp[2]); tmp = lexer.Parse1DMatrix(3); p.Bounds.Max = new Vector3(tmp[0], tmp[1], tmp[2]); token = lexer.ExpectTokenType(TokenType.String, 0); // get material p.Material = idE.DeclManager.FindMaterial(token.ToString()); p.Contents = p.Material.ContentFlags; p.CheckCount = 0; // filter polygon into tree FilterPolygonIntoTree(model, model.Node, p); } }
private void ParseEdges(idLexer lexer, CollisionModel model) { lexer.ExpectTokenString("{"); int edgeCount = lexer.ParseInt(); model.Edges = new CollisionModelEdge[edgeCount]; for(int i = 0; i < edgeCount; i++) { lexer.ExpectTokenString("("); model.Edges[i].VertexCount = new int[] { lexer.ParseInt(), lexer.ParseInt() }; lexer.ExpectTokenString(")"); model.Edges[i].Side = 0; model.Edges[i].SideSet = 0; model.Edges[i].Internal = (ushort) lexer.ParseInt(); model.Edges[i].UserCount = (ushort) lexer.ParseInt(); model.Edges[i].Normal = Vector3.Zero; model.Edges[i].CheckCount = 0; model.InternalEdgeCount += model.Edges[i].Internal; } lexer.ExpectTokenString("}"); }
private CollisionModelNode ParseNodes(idLexer lexer, CollisionModel model, CollisionModelNode parent) { model.NodeCount++; lexer.ExpectTokenString("("); CollisionModelNode node = new CollisionModelNode(); node.Parent = parent; node.PlaneType = lexer.ParseInt(); node.PlaneDistance = lexer.ParseFloat(); lexer.ExpectTokenString(")"); if(node.PlaneType != -1) { node.Children[0] = ParseNodes(lexer, model, node); node.Children[1] = ParseNodes(lexer, model, node); } return node; }
private void ParseBrushes(idLexer lexer, CollisionModel model) { idToken token = lexer.CheckTokenType(TokenType.Number, 0); int planeCount; CollisionModelBrush b; float[] tmp; lexer.ExpectTokenString("{"); while(lexer.CheckTokenString("}") == false) { // parse brush planeCount = lexer.ParseInt(); b = new CollisionModelBrush(); b.Contents = ContentFlags.All; b.Material = _traceModelMaterial; b.Planes = new Plane[planeCount]; lexer.ExpectTokenString("{"); for(int i = 0; i < planeCount; i++) { tmp = lexer.Parse1DMatrix(3); b.Planes[i].Normal = new Vector3(tmp[0], tmp[1], tmp[2]); b.Planes[i].D = lexer.ParseFloat(); } lexer.ExpectTokenString("}"); tmp = lexer.Parse1DMatrix(3); b.Bounds.Min = new Vector3(tmp[0], tmp[1], tmp[2]); tmp = lexer.Parse1DMatrix(3); b.Bounds.Max = new Vector3(tmp[0], tmp[1], tmp[2]); token = lexer.ReadToken(); if(token.Type == TokenType.Number) { b.Contents = (ContentFlags) token.ToInt32(); // old .cm files use a single integer } else { b.Contents = ContentsFromString(token.ToString()); } b.CheckCount = 0; b.PrimitiveCount = 0; // filter brush into tree FilterBrushIntoTree(model, model.Node, b); } }
private bool ParseCollisionModel(idLexer lexer) { CollisionModel model = new CollisionModel(); _models[_modelCount++] = model; // parse the file idToken token = lexer.ExpectTokenType(TokenType.String, 0); string tokenLower; model.Name = token.ToString(); lexer.ExpectTokenString("{"); while(lexer.CheckTokenString("}") == false) { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if(tokenLower == "vertices") { ParseVertices(lexer, model); } else if(tokenLower == "edges") { ParseEdges(lexer, model); } else if(tokenLower == "nodes") { lexer.ExpectTokenString("{"); model.Node = ParseNodes(lexer, model, null); lexer.ExpectTokenString("}"); } else if(tokenLower == "polygons") { ParsePolygons(lexer, model); } else if(tokenLower == "brushes") { ParseBrushes(lexer, model); } else { lexer.Error("ParseCollisionModel: bad token \"{0}\"", token); } } // calculate edge normals _checkCount++; idConsole.Warning("TODO: CalculateEdgeNormals(model, model.Node);"); // get model bounds from brush and polygon bounds model.Bounds = GetNodeBounds(model.Node); // get model contents model.Contents = GetNodeContents(model.Node); idConsole.Warning("TODO: used memory"); // total memory used by this model /*model->usedMemory = model->numVertices * sizeof(cm_vertex_t) + model->numEdges * sizeof(cm_edge_t) + model->polygonMemory + model->brushMemory + model->numNodes * sizeof(cm_node_t) + model->numPolygonRefs * sizeof(cm_polygonRef_t) + model->numBrushRefs * sizeof(cm_brushRef_t);*/ return true; }
public static idMapBrush Parse(idLexer lexer, Vector3 origin, bool newFormat = true, float version = idMapFile.CurrentMapVersion) { idToken token; idMapBrushSide side; List<idMapBrushSide> sides = new List<idMapBrushSide>(); idDict dict = new idDict(); Vector3[] planePoints = new Vector3[3]; if(lexer.ExpectTokenString("{") == false) { return null; } do { if((token = lexer.ReadToken()) == null) { lexer.Error("idMapBrush::Parse: unexpected EOF"); return null; } if(token.ToString() == "}") { break; } // here we may have to jump over brush epairs ( only used in editor ) do { // if token is a brace if(token.ToString() == "(") { break; } // the token should be a key string for a key/value pair if(token.Type != TokenType.String) { lexer.Error("idMapBrush::Parse: unexpected {0}, expected ( or epair key string", token.ToString()); return null; } string key = token.ToString(); if(((token = lexer.ReadTokenOnLine()) == null) || (token.Type != TokenType.String)) { lexer.Error("idMapBrush::Parse: expected epair value string not found"); return null; } dict.Set(key, token.ToString()); // try to read the next key if((token = lexer.ReadToken()) == null) { lexer.Error("idMapBrush::Parse: unexpected EOF"); return null; } } while(true); lexer.UnreadToken = token; side = new idMapBrushSide(); sides.Add(side); if(newFormat == true) { float[] tmp = lexer.Parse1DMatrix(4); if(tmp == null) { lexer.Error("idMapBrush::Parse: unable to read brush side plane definition"); return null; } else { side.Plane = new Plane(tmp[0], tmp[1], tmp[2], tmp[3]); } } else { // read the three point plane definition float[] tmp, tmp2, tmp3; if(((tmp = lexer.Parse1DMatrix(3)) == null) || ((tmp2 = lexer.Parse1DMatrix(3)) == null) || ((tmp3 = lexer.Parse1DMatrix(3)) == null)) { lexer.Error("idMapBrush::Parse: unable to read brush side plane definition"); return null; } planePoints[0] = new Vector3(tmp[0], tmp[1], tmp[2]) - origin; planePoints[1] = new Vector3(tmp2[0], tmp2[1], tmp2[2]) - origin; planePoints[2] = new Vector3(tmp3[0], tmp3[1], tmp3[2]) - origin; side.Plane.FromPoints(planePoints[0], planePoints[1], planePoints[2]); } // read the texture matrix // this is odd, because the texmat is 2D relative to default planar texture axis float[,] tmp5 = lexer.Parse2DMatrix(2, 3); if(tmp5 == null) { lexer.Error("idMapBrush::Parse: unable to read brush side texture matrix"); return null; } side.TextureMatrix[0] = new Vector3(tmp5[0, 0], tmp5[0, 1], tmp5[0, 2]); side.TextureMatrix[1] = new Vector3(tmp5[1, 0], tmp5[1, 1], tmp5[1, 2]); side.Origin = origin; // read the material if((token = lexer.ReadTokenOnLine()) == null) { lexer.Error("idMapBrush::Parse: unable to read brush side material"); return null; } // we had an implicit 'textures/' in the old format... if(version < 2.0f) { side.Material = "textures/" + token.ToString(); } else { side.Material = token.ToString(); } // Q2 allowed override of default flags and values, but we don't any more if(lexer.ReadTokenOnLine() != null) { if(lexer.ReadTokenOnLine() != null) { if(lexer.ReadTokenOnLine() != null) { } } } } while(true); if(lexer.ExpectTokenString("}") == false) { return null; } idMapBrush brush = new idMapBrush(); foreach(idMapBrushSide s in sides) { brush.AddSide(s); } brush.Dict = dict; return brush; }
private void ParseInterAreaPortals(idLexer lexer) { lexer.ExpectTokenString("{"); _portalAreaCount = lexer.ParseInt(); if(_portalAreaCount < 0) { lexer.Error("ParseInterAreaPortals: bad portalAreaCount"); } _portalAreas = new PortalArea[_portalAreaCount]; _areaScreenRect = new idScreenRect[_portalAreaCount]; for(int i = 0; i < _portalAreaCount; i++) { _portalAreas[i] = new PortalArea(); _areaScreenRect[i] = new idScreenRect(); } // set the doubly linked lists SetupAreaReferences(); _interAreaPortalCount = lexer.ParseInt(); if(_interAreaPortalCount < 0) { lexer.Error("ParseInterAreaPortals: bad interAreaPortalCount"); } _doublePortals = new DoublePortal[_interAreaPortalCount]; for(int i = 0; i < _interAreaPortalCount; i++) { _doublePortals[i] = new DoublePortal(); int pointCount = lexer.ParseInt(); int a1 = lexer.ParseInt(); int a2 = lexer.ParseInt(); idWinding w = new idWinding(pointCount); for(int j = 0; j < pointCount; j++) { float[] tmp = lexer.Parse1DMatrix(3); w[j,0] = tmp[0]; w[j,1] = tmp[1]; w[j,2] = tmp[2]; // no texture coordinates w[j,3] = 0; w[j,4] = 0; } // add the portal to a1 Portal p = new Portal(); p.IntoArea = a2; p.DoublePortal = _doublePortals[i]; p.Winding = w; p.Plane = w.GetPlane(); p.Next = _portalAreas[a1].Portals; _portalAreas[a1].Portals = p; _doublePortals[i].Portals[0] = p; // reverse it for a2 p = new Portal(); p.IntoArea = a1; p.DoublePortal = _doublePortals[i]; p.Winding = w.Reverse(); p.Plane = w.GetPlane(); p.Next = _portalAreas[a2].Portals; _portalAreas[a2].Portals = p; _doublePortals[i].Portals[1] = p; } lexer.ExpectTokenString("}"); }
private void MatchAndAppendToken(StringBuilder b, idLexer lexer, string match) { if(lexer.ExpectTokenString(match) == false) { return; } // a matched token won't need a leading space. b.Append(match); }
/// <summary> /// Sets defaultShader and returns false if the next token doesn't match. /// </summary> /// <param name="lexer"></param> /// <param name="match"></param> /// <returns></returns> private bool MatchToken(idLexer lexer, string match) { if(lexer.ExpectTokenString(match) == false) { this.MaterialFlag = MaterialFlags.Defaulted; return false; } return true; }
/// <summary> /// Used for initial loads, reloadModel, and reloading the data of purged models. /// </summary> /// <remarks> /// Upon exit, the model will absolutely be valid, but possibly as a default model. /// </remarks> public override void Load() { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } if(_purged == false) { Purge(); } _purged = false; idLexer lexer = new idLexer(LexerOptions.AllowPathNames | LexerOptions.NoStringEscapeCharacters); if(lexer.LoadFile(Name) == false) { MakeDefault(); return; } lexer.ExpectTokenString(VersionString); int version = lexer.ParseInt(); int count = 0; idToken token; if(version != Version) { lexer.Error("Invalid version {0}. Should be version {1}", version, Version); } // // skip commandline // lexer.ExpectTokenString("commandline"); lexer.ReadToken(); // parse num joints lexer.ExpectTokenString("numJoints"); count = lexer.ParseInt(); _joints = new idMD5Joint[count]; _defaultPose = new idJointQuaternion[count]; idJointMatrix[] poseMat3 = new idJointMatrix[count]; // parse num meshes lexer.ExpectTokenString("numMeshes"); count = lexer.ParseInt(); if(count < 0) { lexer.Error("Invalid size: {0}", count); } _meshes = new idMD5Mesh[count]; // // parse joints // lexer.ExpectTokenString("joints"); lexer.ExpectTokenString("{"); int jointCount = _joints.Length; for(int i = 0; i < jointCount; i++) { idMD5Joint joint = _joints[i] = new idMD5Joint(); idJointQuaternion pose = new idJointQuaternion(); ParseJoint(lexer, joint, ref pose); poseMat3[i] = idJointMatrix.Zero; poseMat3[i].Rotation = Matrix.CreateFromQuaternion(pose.Quaternion); poseMat3[i].Translation = pose.Translation; if(joint.Parent != null) { int parentIndex = GetJointIndex(joint.Parent); pose.Quaternion = Quaternion.CreateFromRotationMatrix(poseMat3[i].ToMatrix() * Matrix.Transpose(poseMat3[parentIndex].ToMatrix())); pose.Translation = Vector3.Transform(poseMat3[i].ToVector3() - poseMat3[parentIndex].ToVector3(), Matrix.Transpose(poseMat3[parentIndex].ToMatrix())); } _defaultPose[i] = pose; } lexer.ExpectTokenString("}"); int meshCount = _meshes.Length; for(int i = 0; i < meshCount; i++) { lexer.ExpectTokenString("mesh"); _meshes[i] = new idMD5Mesh(); _meshes[i].Parse(lexer, poseMat3); } // // calculate the bounds of the model // CalculateBounds(poseMat3); // set the timestamp for reloadmodels idConsole.Warning("TODO: fileSystem->ReadFile( name, NULL, &timeStamp );"); }
public void Parse(idLexer lexer, idJointMatrix[] joints) { lexer.ExpectTokenString("{"); // // parse name // if(lexer.CheckTokenString("name") == true) { lexer.ReadToken(); } // // parse shader // lexer.ExpectTokenString("shader"); idToken token = lexer.ReadToken(); string materialName = token.ToString(); _material = idE.DeclManager.FindMaterial(materialName); // // parse texture coordinates // lexer.ExpectTokenString("numverts"); int count = lexer.ParseInt(); if(count < 0) { lexer.Error("Invalid size: {0}", token.ToString()); } _texCoords = new Vector2[count]; int[] firstWeightForVertex = new int[count]; int[] weightCountForVertex = new int[count]; int maxWeight = 0; int coordCount = _texCoords.Length; _weightCount = 0; for(int i = 0; i < coordCount; i++) { lexer.ExpectTokenString("vert"); lexer.ParseInt(); float[] tmp = lexer.Parse1DMatrix(2); _texCoords[i] = new Vector2(tmp[0], tmp[1]); firstWeightForVertex[i] = lexer.ParseInt(); weightCountForVertex[i] = lexer.ParseInt(); if(weightCountForVertex[i] == 0) { lexer.Error("Vertex without any joint weights."); } _weightCount += weightCountForVertex[i]; if((weightCountForVertex[i] + firstWeightForVertex[i]) > maxWeight) { maxWeight = weightCountForVertex[i] + firstWeightForVertex[i]; } } // // parse tris // lexer.ExpectTokenString("numtris"); _triangleCount = lexer.ParseInt(); if(_triangleCount < 0) { lexer.Error("Invalid size: {0}", _triangleCount); } int[] tris = new int[_triangleCount * 3]; for(int i = 0; i < _triangleCount; i++) { lexer.ExpectTokenString("tri"); lexer.ParseInt(); tris[i * 3 + 0] = lexer.ParseInt(); tris[i * 3 + 1] = lexer.ParseInt(); tris[i * 3 + 2] = lexer.ParseInt(); } // // parse weights // lexer.ExpectTokenString("numweights"); count = lexer.ParseInt(); if(count < 0) { lexer.Error("Invalid size: {0}", count); } if(maxWeight > count) { lexer.Warning("Vertices reference out of range weights in model ({0} of {1} weights).", maxWeight, count); } VertexWeight[] tempWeights = new VertexWeight[count]; for(int i = 0; i < count; i++) { lexer.ExpectTokenString("weight"); lexer.ParseInt(); int jointIndex = lexer.ParseInt(); if((jointIndex < 0) || (jointIndex >= joints.Length)) { lexer.Error("Joint index out of range({0}): {1}", joints.Length, jointIndex); } tempWeights[i].JointIndex = jointIndex; tempWeights[i].JointWeight = lexer.ParseFloat(); float[] tmp = lexer.Parse1DMatrix(3); tempWeights[i].Offset = new Vector3(tmp[0], tmp[1], tmp[2]); } // create pre-scaled weights and an index for the vertex/joint lookup _scaledWeights = new Vector4[_weightCount]; _weightIndex = new int[_weightCount * 2]; count = 0; coordCount = _texCoords.Length; for(int i = 0; i < coordCount; i++) { int num = firstWeightForVertex[i]; int weightCount = weightCountForVertex[i]; for(int j = 0; j < weightCount; j++, num++, count++) { Vector3 tmp = tempWeights[num].Offset * tempWeights[num].JointWeight; _scaledWeights[count].X = tmp.X; _scaledWeights[count].Y = tmp.Y; _scaledWeights[count].Z = tmp.Z; _scaledWeights[count].W = tempWeights[num].JointWeight; _weightIndex[count * 2 + 0] = tempWeights[num].JointIndex; } _weightIndex[count * 2 - 1] = 1; } lexer.ExpectTokenString("}"); // update counters idConsole.Warning("TODO: idRenderModel_MD5 update counters"); /*c_numVerts += texCoords.Num(); c_numWeights += numWeights; c_numWeightJoints++; for ( i = 0; i < numWeights; i++ ) { c_numWeightJoints += weightIndex[i*2+1]; }*/ // // build the information that will be common to all animations of this mesh: // silhouette edge connectivity and normal / tangent generation information // Vertex[] verts = new Vertex[_texCoords.Length]; int vertCount = verts.Length; for(int i = 0; i < vertCount; i++) { verts[i].TextureCoordinates = _texCoords[i]; } TransformVertices(verts, joints); idConsole.Warning("TODO: idMD5Mesh Deform"); //_deformInfo = idE.RenderSystem.BuildDeformInformation(verts, tris, _material.UseUnsmoothedTangents); }
private idRenderModel ParseShadowModel(idLexer lexer) { lexer.ExpectTokenString("{"); // parse the name idToken token = lexer.ExpectAnyToken(); idRenderModel model = idE.RenderModelManager.AllocateModel(); model.InitEmpty(token.ToString()); RenderModelSurface modelSurface = new RenderModelSurface(); modelSurface.Material = idE.RenderSystem.DefaultMaterial; modelSurface.Geometry = new Surface(); modelSurface.Geometry.ShadowVertices = new ShadowVertex[lexer.ParseInt()]; modelSurface.Geometry.ShadowIndexesNoCapsCount = lexer.ParseInt(); modelSurface.Geometry.ShadowIndexesNoFrontCapsCount = lexer.ParseInt(); modelSurface.Geometry.Indexes = new int[lexer.ParseInt()]; modelSurface.Geometry.ShadowCapPlaneBits = lexer.ParseInt(); modelSurface.Geometry.Bounds.Clear(); int count = modelSurface.Geometry.ShadowVertices.Length; for(int j = 0; j < count; j++) { float[] vec = lexer.Parse1DMatrix(3); modelSurface.Geometry.ShadowVertices[j].Position = new Vector4(vec[0], vec[1], vec[2], 1); modelSurface.Geometry.Bounds.AddPoint(modelSurface.Geometry.ShadowVertices[j].Position); } count = modelSurface.Geometry.Indexes.Length; for(int j = 0; j < count; j++) { modelSurface.Geometry.Indexes[j] = lexer.ParseInt(); } // add the completed surface to the model model.AddSurface(modelSurface); lexer.ExpectTokenString("}"); // we do NOT do a model->FinishSurfaces, because we don't need sil edges, planes, tangents, etc. // model.FinishSurfaces(); return model; }
private void ParseNodes(idLexer lexer) { lexer.ExpectTokenString("{"); _areaNodeCount = lexer.ParseInt(); if(_areaNodeCount < 0) { lexer.Error("ParseNodes: bad areaNodeCount"); } _areaNodes = new AreaNode[_areaNodeCount]; float[] tmp; AreaNode node; for(int i = 0; i < _areaNodeCount; i++) { node = _areaNodes[i] = new AreaNode(); tmp = lexer.Parse1DMatrix(4); node.Plane = new Plane(tmp[0], tmp[1], tmp[2], tmp[3]); node.Children[0] = lexer.ParseInt(); node.Children[1] = lexer.ParseInt(); } lexer.ExpectTokenString("}"); }
private idRenderModel ParseModel(idLexer lexer) { if(this.Disposed == true) { throw new ObjectDisposedException(this.GetType().Name); } RenderModelSurface modelSurface; lexer.ExpectTokenString("{"); // parse the name idToken token = lexer.ExpectAnyToken(); idRenderModel model = idE.RenderModelManager.AllocateModel(); model.InitEmpty(token.ToString()); int surfaceCount = lexer.ParseInt(); int loopCount = 0; if(surfaceCount < 0) { lexer.Error("ParseModel: bad surfaceCount"); } for(int i = 0; i < surfaceCount; i++) { lexer.ExpectTokenString("{"); token = lexer.ExpectAnyToken(); modelSurface = new RenderModelSurface(); modelSurface.Material = idE.DeclManager.FindMaterial(token.ToString()); modelSurface.Material.AddReference(); modelSurface.Geometry = new Surface(); modelSurface.Geometry.Vertices = new Vertex[lexer.ParseInt()]; modelSurface.Geometry.Indexes = new int[lexer.ParseInt()]; loopCount = modelSurface.Geometry.Vertices.Length; for(int j = 0; j < loopCount; j++) { float[] vec = lexer.Parse1DMatrix(8); modelSurface.Geometry.Vertices[j].Position = new Vector3(vec[0], vec[1], vec[2]); modelSurface.Geometry.Vertices[j].TextureCoordinates = new Vector2(vec[3], vec[4]); modelSurface.Geometry.Vertices[j].Normal = new Vector3(vec[5], vec[6], vec[7]); } loopCount = modelSurface.Geometry.Indexes.Length; for(int j = 0; j < loopCount; j++) { modelSurface.Geometry.Indexes[j] = lexer.ParseInt(); } lexer.ExpectTokenString("}"); // add the completed surface to the model model.AddSurface(modelSurface); } lexer.ExpectTokenString("}"); model.FinishSurfaces(); return model; }
private void ParseVertices(idLexer lexer, CollisionModel model) { lexer.ExpectTokenString("{"); int vertexCount = lexer.ParseInt(); model.Vertices = new CollisionModelVertex[vertexCount]; for(int i = 0; i < vertexCount; i++) { float[] tmp = lexer.Parse1DMatrix(3); model.Vertices[i].Point = new Vector3(tmp[0], tmp[1], tmp[2]); model.Vertices[i].Side = 0; model.Vertices[i].SideSet = 0; model.Vertices[i].CheckCount = 0; } lexer.ExpectTokenString("}"); }
public bool Load(string fileName, bool clear) { if(clear == true) { Clear(); } byte[] data = idE.FileSystem.ReadFile(fileName); if(data == null) { // let whoever called us deal with the failure (so sys_lang can be reset) return false; } idLexer lexer = new idLexer(LexerOptions.NoFatalErrors | LexerOptions.NoStringConcatination | LexerOptions.AllowMultiCharacterLiterals | LexerOptions.AllowBackslashStringConcatination); lexer.LoadMemory(Encoding.UTF8.GetString(data), fileName); if(lexer.IsLoaded == false) { return false; } idToken token, token2; lexer.ExpectTokenString("{"); while((token = lexer.ReadToken()) != null) { if(token.ToString() == "}") { break; } else if((token2 = lexer.ReadToken()) != null) { if(token2.ToString() == "}") { break; } _regexReplaceIndex = 0; // stock d3 language files contain sprintf formatters, we need to replace them string val = token2.ToString(); val = Regex.Replace(val, "%s|%d|%x", new MatchEvaluator(ReplaceHandler)); _elements.Add(token.ToString(), val); } } idConsole.WriteLine("{0} strings read from {1}", _elements.Count, fileName); return true; }
public static idMapPatch Parse(idLexer lexer, Vector3 origin, bool patchDef3 = true, float version = idMapFile.CurrentMapVersion) { if(lexer.ExpectTokenString("{") == false) { return null; } // read the material (we had an implicit 'textures/' in the old format...) idToken token = lexer.ReadToken(); if(token == null) { lexer.Error("idMapPatch::Parse: unexpected EOF"); return null; } // Parse it float[] info; if(patchDef3 == true) { info = lexer.Parse1DMatrix(7); if(info == null) { lexer.Error("idMapPatch::Parse: unable to Parse patchDef3 info"); return null; } } else { info = lexer.Parse1DMatrix(5); if(info == null) { lexer.Error("idMapPatch::Parse: unable to parse patchDef2 info"); return null; } } idMapPatch patch = new idMapPatch((int) info[0], (int) info[1]); if(version < 2.0f) { patch.Material = "textures/" + token.ToString(); } else { patch.Material = token.ToString(); } if(patchDef3 == true) { patch.HorizontalSubdivisions = (int) info[2]; patch.VerticalSubdivisions = (int) info[3]; patch.ExplicitlySubdivided = true; } if((patch.Width < 0) || (patch.Height < 0)) { lexer.Error("idMapPatch::Parse: bad size"); return null; } // these were written out in the wrong order, IMHO if(lexer.ExpectTokenString("(") == false) { lexer.Error("idMapPatch::Parse: bad patch vertex data"); return null; } for(int j = 0; j < patch.Width; j++) { if(lexer.ExpectTokenString("(") == false) { lexer.Error("idMapPatch::Parse: bad vertex row data"); return null; } for(int i = 0; i < patch.Height; i++) { float[] v = lexer.Parse1DMatrix(5); if(v == null) { lexer.Error("idMapPatch::Parse: bad vertex column data"); return null; } Vertex vert = new Vertex(); vert.Position.X = v[0] - origin.X; vert.Position.Y = v[1] - origin.Y; vert.Position.Z = v[2] - origin.Z; vert.TextureCoordinates = new Vector2(v[3], v[4]); patch.SetVertex(i * patch.Width + j, vert); } if(lexer.ExpectTokenString(")") == false) { lexer.Error("idMapPatch::Parse: unable to parse patch control points"); return null; } } if(lexer.ExpectTokenString(")") == false) { lexer.Error("idMapPatch::Parse: unable to parse patch control points, no closure" ); return null; } // read any key/value pairs while((token = lexer.ReadToken()) != null) { if(token.ToString() == "}") { lexer.ExpectTokenString("}"); break; } if(token.Type == TokenType.String) { string key = token.ToString(); token = lexer.ExpectTokenType(TokenType.String, 0); patch.Dict.Set(key, token.ToString()); } } return patch; }
private bool LoadCollisionModelFile(string name, ulong mapFileCRC) { // load it string fileName = Path.Combine(Path.GetDirectoryName(name), Path.GetFileNameWithoutExtension(name) + Extension); idLexer lexer = new idLexer(LexerOptions.NoStringConcatination | LexerOptions.NoDollarPrecompilation); if(lexer.LoadFile(fileName) == false) { return false; } idToken token; if(lexer.ExpectTokenString(TokenFileID) == false) { idConsole.Warning("{0} is not a CM file.", fileName); } else if(((token = lexer.ReadToken()) == null) || (token.ToString() != FileVersion)) { idConsole.Warning("{0} has version {1} instead of {2}", fileName, token, FileVersion); } else if((token = lexer.ExpectTokenType(TokenType.Number, TokenSubType.Integer)) == null) { idConsole.Warning("{0} has no map file CRC", fileName); } else { ulong crc = token.ToUInt64(); if((mapFileCRC != 0) && (crc != mapFileCRC)) { idConsole.WriteLine("{0} is out of date", fileName); } else { // parse the file while(true) { if((token = lexer.ReadToken()) == null) { break; } if(token.ToString().ToLower() == "collisionmodel") { if(ParseCollisionModel(lexer) == false) { return false; } } else { lexer.Error("idCollisionModelManagerLocal::LoadCollisionModelFile: bad token \"{0}\"", token); } } return true; } } return false; }
public bool LoadAnimation(string fileName) { idToken token; idLexer lexer = new idLexer(LexerOptions.AllowPathNames | LexerOptions.NoStringEscapeCharacters | LexerOptions.NoStringConcatination); if(lexer.LoadFile(fileName) == false) { return false; } Clear(); _name = fileName; lexer.ExpectTokenString(idRenderModel_MD5.VersionString); int version = lexer.ParseInt(); if(version != idRenderModel_MD5.Version) { lexer.Error("Invalid version {0}. Should be version {1}", version, idRenderModel_MD5.Version); } // skip the commandline lexer.ExpectTokenString("commandline"); lexer.ReadToken(); // parse num frames lexer.ExpectTokenString("numFrames"); int frameCount = lexer.ParseInt(); if(frameCount <= 0) { lexer.Error("Invalid number of frames: {0}", frameCount); } // parse num joints lexer.ExpectTokenString("numJoints"); int jointCount = lexer.ParseInt(); if(jointCount <= 0) { lexer.Error("Invalid number of joints: {0}", jointCount); } // parse frame rate lexer.ExpectTokenString("frameRate"); _frameRate = lexer.ParseInt(); if(_frameRate < 0) { lexer.Error("Invalid frame rate: {0}", _frameRate); } // parse number of animated components lexer.ExpectTokenString("numAnimatedComponents"); _animatedComponentCount = lexer.ParseInt(); if((_animatedComponentCount < 0) || (_animatedComponentCount > (jointCount * 6))) { lexer.Error("Invalid number of animated components: {0}", _animatedComponentCount); } // parse the hierarchy _jointInfo = new JointAnimationInfo[jointCount]; lexer.ExpectTokenString("hierarchy"); lexer.ExpectTokenString("{"); for(int i = 0; i < jointCount; i++) { token = lexer.ReadToken(); _jointInfo[i] = new JointAnimationInfo(); _jointInfo[i].NameIndex = idR.AnimManager.GetJointIndex(token.ToString()); // parse parent num _jointInfo[i].ParentIndex = lexer.ParseInt(); if(_jointInfo[i].ParentIndex >= i) { lexer.Error("Invalid parent num: {0}", _jointInfo[i].ParentIndex); } if((i != 0) && (_jointInfo[i].ParentIndex < 0)) { lexer.Error("Animations may have only one root joint"); } // parse anim bits _jointInfo[i].AnimationBits = (AnimationBits) lexer.ParseInt(); if(((int) _jointInfo[i].AnimationBits & ~63) != 0) { lexer.Error("Invalid anim bits: {0}", _jointInfo[i].AnimationBits); } // parse first component _jointInfo[i].FirstComponent = lexer.ParseInt(); if((_animatedComponentCount > 0) && ((_jointInfo[i].FirstComponent < 0) || (_jointInfo[i].FirstComponent >= _animatedComponentCount))) { lexer.Error("Invalid first component: {0}", _jointInfo[i].FirstComponent); } } lexer.ExpectTokenString("}"); // parse bounds lexer.ExpectTokenString("bounds"); lexer.ExpectTokenString("{"); _bounds = new idBounds[frameCount]; for(int i = 0; i < frameCount; i++) { float[] tmp = lexer.Parse1DMatrix(3); float[] tmp2 = lexer.Parse1DMatrix(3); _bounds[i] = new idBounds( new Vector3(tmp[0], tmp[1], tmp[2]), new Vector3(tmp2[0], tmp2[1], tmp2[2]) ); } lexer.ExpectTokenString("}"); // parse base frame _baseFrame = new idJointQuaternion[jointCount]; lexer.ExpectTokenString("baseframe"); lexer.ExpectTokenString("{"); for(int i = 0; i < jointCount; i++) { float[] tmp = lexer.Parse1DMatrix(3); float[] tmp2 = lexer.Parse1DMatrix(3); idCompressedQuaternion q = new idCompressedQuaternion(tmp2[0], tmp2[1], tmp2[2]); _baseFrame[i] = new idJointQuaternion(); _baseFrame[i].Translation = new Vector3(tmp[0], tmp[1], tmp[2]); _baseFrame[i].Quaternion = q.ToQuaternion(); } lexer.ExpectTokenString("}"); // parse frames _componentFrames = new float[_animatedComponentCount * frameCount]; int frameOffset = 0; for(int i = 0; i < frameCount; i++) { lexer.ExpectTokenString("frame"); int count = lexer.ParseInt(); if(count != i) { lexer.Error("Expected frame number {0}", i); } lexer.ExpectTokenString("{"); for(int j = 0; j < _animatedComponentCount; j++, frameOffset++) { _componentFrames[frameOffset] = lexer.ParseFloat(); } lexer.ExpectTokenString("}"); } // get total move delta if(_animatedComponentCount == 0) { _totalDelta = Vector3.Zero; } else { int componentOffset = _jointInfo[0].FirstComponent; if((_jointInfo[0].AnimationBits & AnimationBits.TranslationX) == AnimationBits.TranslationX) { for(int i = 0; i < frameCount; i++) { _componentFrames[componentOffset + (_animatedComponentCount * i)] -= _baseFrame[0].Translation.X; } _totalDelta.X = _componentFrames[componentOffset + (_animatedComponentCount * (frameCount - 1))]; componentOffset++; } else { _totalDelta.X = 0; } if((_jointInfo[0].AnimationBits & AnimationBits.TranslationY) == AnimationBits.TranslationY) { for(int i = 0; i < frameCount; i++) { _componentFrames[componentOffset + (_animatedComponentCount * i)] -= _baseFrame[0].Translation.Y; } _totalDelta.Y = _componentFrames[componentOffset + (_animatedComponentCount * (frameCount - 1))]; componentOffset++; } else { _totalDelta.Y = 0; } if((_jointInfo[0].AnimationBits & AnimationBits.TranslationZ) == AnimationBits.TranslationZ) { for(int i = 0; i < frameCount; i++) { _componentFrames[componentOffset + (_animatedComponentCount * i)] -= _baseFrame[0].Translation.Z; } _totalDelta.Z = _componentFrames[componentOffset + (_animatedComponentCount * (frameCount - 1))]; } else { _totalDelta.Z = 0; } } _baseFrame[0].Translation = Vector3.Zero; // we don't count last frame because it would cause a 1 frame pause at the end _animLength = ((frameCount - 1) * 1000 + _frameRate - 1) / _frameRate; // done return true; }
private bool ParseMaterial(idLexer lexer) { _parameters.MinDistance = 1; _parameters.MaxDistance = 10; _parameters.Volume = 1; _speakerMask = 0; _altSound = null; idToken token; string tokenValue; int sampleCount = 0; while(true) { if((token = lexer.ExpectAnyToken()) == null) { return false; } tokenValue = token.ToString().ToLower(); if(tokenValue == "}") { break; } // minimum number of sounds else if(tokenValue == "minsamples") { sampleCount = lexer.ParseInt(); } else if(tokenValue == "description") { _description = lexer.ReadTokenOnLine().ToString(); } else if(tokenValue == "mindistance") { _parameters.MinDistance = lexer.ParseFloat(); } else if(tokenValue == "maxdistance") { _parameters.MaxDistance = lexer.ParseFloat(); } else if(tokenValue == "shakes") { token = lexer.ExpectAnyToken(); if(token.Type == TokenType.Number) { _parameters.Shakes = token.ToFloat(); } else { lexer.UnreadToken = token; _parameters.Shakes = 1.0f; } } else if(tokenValue == "reverb") { float reg0 = lexer.ParseFloat(); if(lexer.ExpectTokenString(",") == false) { return false; } float reg1 = lexer.ParseFloat(); // no longer supported } else if(tokenValue == "volume") { _parameters.Volume = lexer.ParseFloat(); } // leadinVolume is used to allow light breaking leadin sounds to be much louder than the broken loop else if(tokenValue == "leadinvolume") { _leadInVolume = lexer.ParseFloat(); } else if(tokenValue == "mask_center") { _speakerMask |= 1 << (int) Speakers.Center; } else if(tokenValue == "mask_left") { _speakerMask |= 1 << (int) Speakers.Left; } else if(tokenValue == "mask_right") { _speakerMask |= 1 << (int) Speakers.Right; } else if(tokenValue == "mask_backright") { _speakerMask |= 1 << (int) Speakers.BackRight; } else if(tokenValue == "mask_backleft") { _speakerMask |= 1 << (int) Speakers.BackLeft; } else if(tokenValue == "mask_lfe") { _speakerMask |= 1 << (int) Speakers.Lfe; } else if(tokenValue == "soundclass") { _parameters.SoundClass = lexer.ParseInt(); if(_parameters.SoundClass < 0) { lexer.Warning("SoundClass out of range"); return false; } } else if(tokenValue == "altsound") { if((token = lexer.ExpectAnyToken()) == null) { return false; } _altSound = idE.DeclManager.FindSound(token.ToString()); } else if(tokenValue == "ordered") { // no longer supported } else if(tokenValue == "no_dups") { _parameters.Flags |= SoundMaterialFlags.NoDuplicates; } else if(tokenValue == "no_flicker") { _parameters.Flags |= SoundMaterialFlags.NoFlicker; } else if(tokenValue == "plain") { // no longer supported } else if(tokenValue == "looping") { _parameters.Flags |= SoundMaterialFlags.Looping; } else if(tokenValue == "no_occlusion") { _parameters.Flags |= SoundMaterialFlags.NoOcclusion; } else if(tokenValue == "private") { _parameters.Flags |= SoundMaterialFlags.PrivateSound; } else if(tokenValue == "antiprivate") { _parameters.Flags |= SoundMaterialFlags.AntiPrivateSound; } else if(tokenValue == "playonce") { _parameters.Flags |= SoundMaterialFlags.PlayOnce; } else if(tokenValue == "global") { _parameters.Flags |= SoundMaterialFlags.Global; } else if(tokenValue == "unclamped") { _parameters.Flags |= SoundMaterialFlags.Unclamped; } else if(tokenValue == "omnidirectional") { _parameters.Flags |= SoundMaterialFlags.OmniDirectional; } // onDemand can't be a parms, because we must track all references and overrides would confuse it else if(tokenValue == "ondemand") { // no longer loading sounds on demand // _onDemand = true; } // the wave files else if(tokenValue == "leadin") { // add to the leadin list if((token = lexer.ReadToken()) == null) { lexer.Warning("Expected sound after leadin"); return false; } idConsole.Warning("TODO: leadin"); /*if(soundSystemLocal.soundCache && numLeadins < maxSamples) { leadins[numLeadins] = soundSystemLocal.soundCache->FindSound(token.c_str(), onDemand); numLeadins++; }*/ } else if((tokenValue.EndsWith(".wav") == true) || (tokenValue.EndsWith(".ogg") == true)) { idConsole.Warning("TODO: .wav|.ogg"); /*// add to the wav list if(soundSystemLocal.soundCache && numEntries < maxSamples) { token.BackSlashesToSlashes(); idStr lang = cvarSystem->GetCVarString("sys_lang"); if(lang.Icmp("english") != 0 && token.Find("sound/vo/", false) >= 0) { idStr work = token; work.ToLower(); work.StripLeading("sound/vo/"); work = va("sound/vo/%s/%s", lang.c_str(), work.c_str()); if(fileSystem->ReadFile(work, NULL, NULL) > 0) { token = work; } else { // also try to find it with the .ogg extension work.SetFileExtension(".ogg"); if(fileSystem->ReadFile(work, NULL, NULL) > 0) { token = work; } } } entries[numEntries] = soundSystemLocal.soundCache->FindSound(token.c_str(), onDemand); numEntries++; }*/ } else { lexer.Warning("unknown token '{0}'", token.ToString()); return false; } } if(_parameters.Shakes > 0.0f) { idConsole.Warning("TODO: CheckShakesAndOgg()"); } return true; }