private void ParseEdges(idLexer lexer, CollisionModel model) { lexer.ExpectTokenString("{"); int edgeCount = lexer.ParseInt(); model.Edges = new CollisionModelEdge[edgeCount]; for (int i = 0; i < edgeCount; i++) { lexer.ExpectTokenString("("); model.Edges[i].VertexCount = new int[] { lexer.ParseInt(), lexer.ParseInt() }; lexer.ExpectTokenString(")"); model.Edges[i].Side = 0; model.Edges[i].SideSet = 0; model.Edges[i].Internal = (ushort)lexer.ParseInt(); model.Edges[i].UserCount = (ushort)lexer.ParseInt(); model.Edges[i].Normal = Vector3.Zero; model.Edges[i].CheckCount = 0; model.InternalEdgeCount += model.Edges[i].Internal; } lexer.ExpectTokenString("}"); }
private void FilterPolygonIntoTree(CollisionModel model, CollisionModelNode node, CollisionModelPolygon p) { while (node.PlaneType != -1) { if (InsideAllChildren(node, p.Bounds) == true) { break; } float v = (node.PlaneType == 0) ? p.Bounds.Min.X : (node.PlaneType == 1) ? p.Bounds.Min.Y : p.Bounds.Min.Z; float v2 = (node.PlaneType == 0) ? p.Bounds.Max.X : (node.PlaneType == 1) ? p.Bounds.Max.Y : p.Bounds.Max.Z; if (v >= node.PlaneDistance) { node = node.Children[0]; } else if (v2 <= node.PlaneDistance) { node = node.Children[1]; } else { FilterPolygonIntoTree(model, node.Children[1], p); node = node.Children[0]; } } node.Polygons.Add(p); }
private void FilterBrushIntoTree(CollisionModel model, CollisionModelNode node, CollisionModelBrush b) { while (node.PlaneType != -1) { if (InsideAllChildren(node, b.Bounds) == true) { break; } float v = (node.PlaneType == 0) ? b.Bounds.Min.X : (node.PlaneType == 1) ? b.Bounds.Min.Y : b.Bounds.Min.Z; float v2 = (node.PlaneType == 0) ? b.Bounds.Max.X : (node.PlaneType == 1) ? b.Bounds.Max.Y : b.Bounds.Max.Z; if (v >= node.PlaneDistance) { node = node.Children[0]; } else if (v2 <= node.PlaneDistance) { node = node.Children[1]; } else { FilterBrushIntoTree(model, node.Children[1], b); node = node.Children[0]; } } node.Brushes.Add(b); }
private void ParseBrushes(idLexer lexer, CollisionModel model) { idToken token = lexer.CheckTokenType(TokenType.Number, 0); int planeCount; CollisionModelBrush b; float[] tmp; lexer.ExpectTokenString("{"); while (lexer.CheckTokenString("}") == false) { // parse brush planeCount = lexer.ParseInt(); b = new CollisionModelBrush(); b.Contents = ContentFlags.All; b.Material = _traceModelMaterial; b.Planes = new Plane[planeCount]; lexer.ExpectTokenString("{"); for (int i = 0; i < planeCount; i++) { tmp = lexer.Parse1DMatrix(3); b.Planes[i].Normal = new Vector3(tmp[0], tmp[1], tmp[2]); b.Planes[i].D = lexer.ParseFloat(); } lexer.ExpectTokenString("}"); tmp = lexer.Parse1DMatrix(3); b.Bounds.Min = new Vector3(tmp[0], tmp[1], tmp[2]); tmp = lexer.Parse1DMatrix(3); b.Bounds.Max = new Vector3(tmp[0], tmp[1], tmp[2]); token = lexer.ReadToken(); if (token.Type == TokenType.Number) { b.Contents = (ContentFlags)token.ToInt32(); // old .cm files use a single integer } else { b.Contents = ContentsFromString(token.ToString()); } b.CheckCount = 0; b.PrimitiveCount = 0; // filter brush into tree FilterBrushIntoTree(model, model.Node, b); } }
private void ParsePolygons(idLexer lexer, CollisionModel model) { idToken token = lexer.CheckTokenType(TokenType.Number, 0); float[] tmp; Vector3 normal; lexer.ExpectTokenString("{"); while (lexer.CheckTokenString("}") == false) { // parse polygon int edgeCount = lexer.ParseInt(); CollisionModelPolygon p = new CollisionModelPolygon(); p.Material = _traceModelMaterial; p.Contents = ContentFlags.All; p.Edges = new int[edgeCount]; lexer.ExpectTokenString("("); for (int i = 0; i < edgeCount; i++) { p.Edges[i] = lexer.ParseInt(); } lexer.ExpectTokenString(")"); tmp = lexer.Parse1DMatrix(3); normal = new Vector3(tmp[0], tmp[1], tmp[2]); p.Plane.Normal = normal; p.Plane.D = lexer.ParseFloat(); tmp = lexer.Parse1DMatrix(3); p.Bounds.Min = new Vector3(tmp[0], tmp[1], tmp[2]); tmp = lexer.Parse1DMatrix(3); p.Bounds.Max = new Vector3(tmp[0], tmp[1], tmp[2]); token = lexer.ExpectTokenType(TokenType.String, 0); // get material p.Material = idE.DeclManager.FindMaterial(token.ToString()); p.Contents = p.Material.ContentFlags; p.CheckCount = 0; // filter polygon into tree FilterPolygonIntoTree(model, model.Node, p); } }
private void SetupTraceModelStructure() { // setup model CollisionModel model = new CollisionModel(); _models[idE.MaxSubModels] = model; // create node to hold the collision data CollisionModelNode node = new CollisionModelNode(); node.PlaneType = -1; model.Node = node; // allocate vertex and edge arrays //model.Vertices = new CollisionModelVertex[idE.MaxTraceModelVertices]; //model->edges = (cm_edge_t *) Mem_ClearedAlloc( model->maxEdges * sizeof(cm_edge_t) ); // create a material for the trace model polygons _traceModelMaterial = idE.DeclManager.FindMaterial("_tracemodel", false); if (_traceModelMaterial == null) { idConsole.FatalError("_tracemodel material not found"); } // allocate polygons /*for ( i = 0; i < MAX_TRACEMODEL_POLYS; i++ ) { * trmPolygons[i] = AllocPolygonReference( model, MAX_TRACEMODEL_POLYS ); * trmPolygons[i]->p = AllocPolygon( model, MAX_TRACEMODEL_POLYEDGES ); * trmPolygons[i]->p->bounds.Clear(); * trmPolygons[i]->p->plane.Zero(); * trmPolygons[i]->p->checkcount = 0; * trmPolygons[i]->p->contents = -1; // all contents * trmPolygons[i]->p->material = trmMaterial; * trmPolygons[i]->p->numEdges = 0; * } * // allocate brush for position test * trmBrushes[0] = AllocBrushReference( model, 1 ); * trmBrushes[0]->b = AllocBrush( model, MAX_TRACEMODEL_POLYS ); * trmBrushes[0]->b->primitiveNum = 0; * trmBrushes[0]->b->bounds.Clear(); * trmBrushes[0]->b->checkcount = 0; * trmBrushes[0]->b->contents = -1; // all contents * trmBrushes[0]->b->numPlanes = 0;*/ }
private void ParseVertices(idLexer lexer, CollisionModel model) { lexer.ExpectTokenString("{"); int vertexCount = lexer.ParseInt(); model.Vertices = new CollisionModelVertex[vertexCount]; for (int i = 0; i < vertexCount; i++) { float[] tmp = lexer.Parse1DMatrix(3); model.Vertices[i].Point = new Vector3(tmp[0], tmp[1], tmp[2]); model.Vertices[i].Side = 0; model.Vertices[i].SideSet = 0; model.Vertices[i].CheckCount = 0; } lexer.ExpectTokenString("}"); }
public CollisionModel LoadModel(string model, bool precache) { CollisionModel collisionModel = FindModel(model); if (collisionModel != null) { return(collisionModel); } // try to load a .cm file if (LoadCollisionModelFile(model, 0) == true) { collisionModel = FindModel(model); if (collisionModel != null) { return(collisionModel); } else { idConsole.Warning("idCollisionModelManager::LoadModel: collision file for '{0}' contains different model", model); } } // if only precaching .cm files do not waste memory converting render models if (precache == true) { return(null); } // try to load a .ASE or .LWO model and convert it to a collision model idConsole.Warning("TODO: collisionModel = LoadRenderModel(model);"); if (collisionModel != null) { _models[_modelCount++] = collisionModel; } return(collisionModel); }
private CollisionModelNode ParseNodes(idLexer lexer, CollisionModel model, CollisionModelNode parent) { model.NodeCount++; lexer.ExpectTokenString("("); CollisionModelNode node = new CollisionModelNode(); node.Parent = parent; node.PlaneType = lexer.ParseInt(); node.PlaneDistance = lexer.ParseFloat(); lexer.ExpectTokenString(")"); if (node.PlaneType != -1) { node.Children[0] = ParseNodes(lexer, model, node); node.Children[1] = ParseNodes(lexer, model, node); } return(node); }
private void FilterBrushIntoTree(CollisionModel model, CollisionModelNode node, CollisionModelBrush b) { while(node.PlaneType != -1) { if(InsideAllChildren(node, b.Bounds) == true) { break; } float v = (node.PlaneType == 0) ? b.Bounds.Min.X : (node.PlaneType == 1) ? b.Bounds.Min.Y : b.Bounds.Min.Z; float v2 = (node.PlaneType == 0) ? b.Bounds.Max.X : (node.PlaneType == 1) ? b.Bounds.Max.Y : b.Bounds.Max.Z; if(v >= node.PlaneDistance) { node = node.Children[0]; } else if(v2 <= node.PlaneDistance) { node = node.Children[1]; } else { FilterBrushIntoTree(model, node.Children[1], b); node = node.Children[0]; } } node.Brushes.Add(b); }
private void ParseBrushes(idLexer lexer, CollisionModel model) { idToken token = lexer.CheckTokenType(TokenType.Number, 0); int planeCount; CollisionModelBrush b; float[] tmp; lexer.ExpectTokenString("{"); while(lexer.CheckTokenString("}") == false) { // parse brush planeCount = lexer.ParseInt(); b = new CollisionModelBrush(); b.Contents = ContentFlags.All; b.Material = _traceModelMaterial; b.Planes = new Plane[planeCount]; lexer.ExpectTokenString("{"); for(int i = 0; i < planeCount; i++) { tmp = lexer.Parse1DMatrix(3); b.Planes[i].Normal = new Vector3(tmp[0], tmp[1], tmp[2]); b.Planes[i].D = lexer.ParseFloat(); } lexer.ExpectTokenString("}"); tmp = lexer.Parse1DMatrix(3); b.Bounds.Min = new Vector3(tmp[0], tmp[1], tmp[2]); tmp = lexer.Parse1DMatrix(3); b.Bounds.Max = new Vector3(tmp[0], tmp[1], tmp[2]); token = lexer.ReadToken(); if(token.Type == TokenType.Number) { b.Contents = (ContentFlags) token.ToInt32(); // old .cm files use a single integer } else { b.Contents = ContentsFromString(token.ToString()); } b.CheckCount = 0; b.PrimitiveCount = 0; // filter brush into tree FilterBrushIntoTree(model, model.Node, b); } }
private bool ParseCollisionModel(idLexer lexer) { CollisionModel model = new CollisionModel(); _models[_modelCount++] = model; // parse the file idToken token = lexer.ExpectTokenType(TokenType.String, 0); string tokenLower; model.Name = token.ToString(); lexer.ExpectTokenString("{"); while(lexer.CheckTokenString("}") == false) { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if(tokenLower == "vertices") { ParseVertices(lexer, model); } else if(tokenLower == "edges") { ParseEdges(lexer, model); } else if(tokenLower == "nodes") { lexer.ExpectTokenString("{"); model.Node = ParseNodes(lexer, model, null); lexer.ExpectTokenString("}"); } else if(tokenLower == "polygons") { ParsePolygons(lexer, model); } else if(tokenLower == "brushes") { ParseBrushes(lexer, model); } else { lexer.Error("ParseCollisionModel: bad token \"{0}\"", token); } } // calculate edge normals _checkCount++; idConsole.Warning("TODO: CalculateEdgeNormals(model, model.Node);"); // get model bounds from brush and polygon bounds model.Bounds = GetNodeBounds(model.Node); // get model contents model.Contents = GetNodeContents(model.Node); idConsole.Warning("TODO: used memory"); // total memory used by this model /*model->usedMemory = model->numVertices * sizeof(cm_vertex_t) + model->numEdges * sizeof(cm_edge_t) + model->polygonMemory + model->brushMemory + model->numNodes * sizeof(cm_node_t) + model->numPolygonRefs * sizeof(cm_polygonRef_t) + model->numBrushRefs * sizeof(cm_brushRef_t);*/ return true; }
private bool ParseCollisionModel(idLexer lexer) { CollisionModel model = new CollisionModel(); _models[_modelCount++] = model; // parse the file idToken token = lexer.ExpectTokenType(TokenType.String, 0); string tokenLower; model.Name = token.ToString(); lexer.ExpectTokenString("{"); while (lexer.CheckTokenString("}") == false) { token = lexer.ReadToken(); tokenLower = token.ToString().ToLower(); if (tokenLower == "vertices") { ParseVertices(lexer, model); } else if (tokenLower == "edges") { ParseEdges(lexer, model); } else if (tokenLower == "nodes") { lexer.ExpectTokenString("{"); model.Node = ParseNodes(lexer, model, null); lexer.ExpectTokenString("}"); } else if (tokenLower == "polygons") { ParsePolygons(lexer, model); } else if (tokenLower == "brushes") { ParseBrushes(lexer, model); } else { lexer.Error("ParseCollisionModel: bad token \"{0}\"", token); } } // calculate edge normals _checkCount++; idConsole.Warning("TODO: CalculateEdgeNormals(model, model.Node);"); // get model bounds from brush and polygon bounds model.Bounds = GetNodeBounds(model.Node); // get model contents model.Contents = GetNodeContents(model.Node); idConsole.Warning("TODO: used memory"); // total memory used by this model /*model->usedMemory = model->numVertices * sizeof(cm_vertex_t) + * model->numEdges * sizeof(cm_edge_t) + * model->polygonMemory + * model->brushMemory + * model->numNodes * sizeof(cm_node_t) + * model->numPolygonRefs * sizeof(cm_polygonRef_t) + * model->numBrushRefs * sizeof(cm_brushRef_t);*/ return(true); }
private void ParseEdges(idLexer lexer, CollisionModel model) { lexer.ExpectTokenString("{"); int edgeCount = lexer.ParseInt(); model.Edges = new CollisionModelEdge[edgeCount]; for(int i = 0; i < edgeCount; i++) { lexer.ExpectTokenString("("); model.Edges[i].VertexCount = new int[] { lexer.ParseInt(), lexer.ParseInt() }; lexer.ExpectTokenString(")"); model.Edges[i].Side = 0; model.Edges[i].SideSet = 0; model.Edges[i].Internal = (ushort) lexer.ParseInt(); model.Edges[i].UserCount = (ushort) lexer.ParseInt(); model.Edges[i].Normal = Vector3.Zero; model.Edges[i].CheckCount = 0; model.InternalEdgeCount += model.Edges[i].Internal; } lexer.ExpectTokenString("}"); }
private CollisionModelNode ParseNodes(idLexer lexer, CollisionModel model, CollisionModelNode parent) { model.NodeCount++; lexer.ExpectTokenString("("); CollisionModelNode node = new CollisionModelNode(); node.Parent = parent; node.PlaneType = lexer.ParseInt(); node.PlaneDistance = lexer.ParseFloat(); lexer.ExpectTokenString(")"); if(node.PlaneType != -1) { node.Children[0] = ParseNodes(lexer, model, node); node.Children[1] = ParseNodes(lexer, model, node); } return node; }
private void ParsePolygons(idLexer lexer, CollisionModel model) { idToken token = lexer.CheckTokenType(TokenType.Number, 0); float[] tmp; Vector3 normal; lexer.ExpectTokenString("{"); while(lexer.CheckTokenString("}") == false) { // parse polygon int edgeCount = lexer.ParseInt(); CollisionModelPolygon p = new CollisionModelPolygon(); p.Material = _traceModelMaterial; p.Contents = ContentFlags.All; p.Edges = new int[edgeCount]; lexer.ExpectTokenString("("); for(int i = 0; i < edgeCount; i++) { p.Edges[i] = lexer.ParseInt(); } lexer.ExpectTokenString(")"); tmp = lexer.Parse1DMatrix(3); normal = new Vector3(tmp[0], tmp[1], tmp[2]); p.Plane.Normal = normal; p.Plane.D = lexer.ParseFloat(); tmp = lexer.Parse1DMatrix(3); p.Bounds.Min = new Vector3(tmp[0], tmp[1], tmp[2]); tmp = lexer.Parse1DMatrix(3); p.Bounds.Max = new Vector3(tmp[0], tmp[1], tmp[2]); token = lexer.ExpectTokenType(TokenType.String, 0); // get material p.Material = idE.DeclManager.FindMaterial(token.ToString()); p.Contents = p.Material.ContentFlags; p.CheckCount = 0; // filter polygon into tree FilterPolygonIntoTree(model, model.Node, p); } }
private void ParseVertices(idLexer lexer, CollisionModel model) { lexer.ExpectTokenString("{"); int vertexCount = lexer.ParseInt(); model.Vertices = new CollisionModelVertex[vertexCount]; for(int i = 0; i < vertexCount; i++) { float[] tmp = lexer.Parse1DMatrix(3); model.Vertices[i].Point = new Vector3(tmp[0], tmp[1], tmp[2]); model.Vertices[i].Side = 0; model.Vertices[i].SideSet = 0; model.Vertices[i].CheckCount = 0; } lexer.ExpectTokenString("}"); }
private void FilterPolygonIntoTree(CollisionModel model, CollisionModelNode node, CollisionModelPolygon p) { while(node.PlaneType != -1) { if(InsideAllChildren(node, p.Bounds) == true) { break; } float v = (node.PlaneType == 0) ? p.Bounds.Min.X : (node.PlaneType == 1) ? p.Bounds.Min.Y : p.Bounds.Min.Z; float v2 = (node.PlaneType == 0) ? p.Bounds.Max.X : (node.PlaneType == 1) ? p.Bounds.Max.Y : p.Bounds.Max.Z; if(v >= node.PlaneDistance) { node = node.Children[0]; } else if(v2 <= node.PlaneDistance) { node = node.Children[1]; } else { FilterPolygonIntoTree(model, node.Children[1], p); node = node.Children[0]; } } node.Polygons.Add(p); }
private void SetupTraceModelStructure() { // setup model CollisionModel model = new CollisionModel(); _models[idE.MaxSubModels] = model; // create node to hold the collision data CollisionModelNode node = new CollisionModelNode(); node.PlaneType = -1; model.Node = node; // allocate vertex and edge arrays //model.Vertices = new CollisionModelVertex[idE.MaxTraceModelVertices]; //model->edges = (cm_edge_t *) Mem_ClearedAlloc( model->maxEdges * sizeof(cm_edge_t) ); // create a material for the trace model polygons _traceModelMaterial = idE.DeclManager.FindMaterial("_tracemodel", false); if(_traceModelMaterial == null) { idConsole.FatalError("_tracemodel material not found"); } // allocate polygons /*for ( i = 0; i < MAX_TRACEMODEL_POLYS; i++ ) { trmPolygons[i] = AllocPolygonReference( model, MAX_TRACEMODEL_POLYS ); trmPolygons[i]->p = AllocPolygon( model, MAX_TRACEMODEL_POLYEDGES ); trmPolygons[i]->p->bounds.Clear(); trmPolygons[i]->p->plane.Zero(); trmPolygons[i]->p->checkcount = 0; trmPolygons[i]->p->contents = -1; // all contents trmPolygons[i]->p->material = trmMaterial; trmPolygons[i]->p->numEdges = 0; } // allocate brush for position test trmBrushes[0] = AllocBrushReference( model, 1 ); trmBrushes[0]->b = AllocBrush( model, MAX_TRACEMODEL_POLYS ); trmBrushes[0]->b->primitiveNum = 0; trmBrushes[0]->b->bounds.Clear(); trmBrushes[0]->b->checkcount = 0; trmBrushes[0]->b->contents = -1; // all contents trmBrushes[0]->b->numPlanes = 0;*/ }