private void ModelUpdated() { if (model != null) { // Create mesh-per-entity state meshInfos.Clear(); foreach (var mesh in model.Meshes) { var meshData = new MeshInfo(); meshInfos.Add(meshData); if (mesh.Skinning != null) { meshData.BlendMatrices = new Matrix[mesh.Skinning.Bones.Length]; } } if (skeleton != null) { // Reuse previous ModelViewHierarchy skeleton.Initialize(model.Skeleton); } else { skeleton = new SkeletonUpdater(model.Skeleton); } } }
private void ModelUpdated() { if (model != null) { if (skeleton != null) { // Reuse previous ModelViewHierarchy skeleton.Initialize(model.Skeleton); } else { skeleton = new SkeletonUpdater(model.Skeleton); } } }
/// <inheritdoc/> public override void ComputeMatrix(bool recursive, out Matrix matrix) { // If model is not in the parent, we might want to force recursive update (since parentModelComponent might not be updated yet) if (forceRecursive || recursive) { parentModelComponent.Entity.Transform.UpdateWorldMatrix(); } // Updated? (rare slow path) if (parentModelComponent.Skeleton != skeleton) { skeleton = parentModelComponent.Skeleton; if (skeleton == null) { goto failed; } // Find our node index nodeIndex = int.MaxValue; for (int index = 0; index < skeleton.Nodes.Length; index++) { var node = skeleton.Nodes[index]; if (node.Name == nodeName) { nodeIndex = index; } } } var nodes = skeleton.Nodes; var nodeTransformations = skeleton.NodeTransformations; if (nodeIndex >= nodes.Length) { goto failed; } // Hopefully, if ref locals gets merged in roslyn, this code can be refactored // Compute matrix = nodeTransformations[nodeIndex].WorldMatrix; return; failed: // Fallback to TransformComponent matrix = parentModelComponent.Entity.Transform.WorldMatrix; return; }
public PhysicsElementInfo(PhysicsComponent component, SkeletonUpdater skeleton) { shape = component.ColliderShape; var rigidbodyComponent = component as RigidbodyComponent; isKinematic = rigidbodyComponent != null && rigidbodyComponent.IsKinematic; colliderShapes = component.ColliderShapes != null?CloneDescs(component.ColliderShapes) : null; var componentBase = component as PhysicsSkinnedComponentBase; boneName = componentBase?.NodeName; this.skeleton = skeleton; boneIndex = componentBase?.BoneIndex ?? -1; var triggerBase = component as PhysicsTriggerComponentBase; isTrigger = triggerBase != null && triggerBase.IsTrigger; }
/// <inheritdoc/> public override void ComputeMatrix(bool recursive, out Matrix matrix) { // If model is not in the parent, we might want to force recursive update (since parentModelComponent might not be updated yet) if (forceRecursive || recursive) { parentModelComponent.Entity.Transform.UpdateWorldMatrix(); } if (parentModelComponent.Skeleton != skeleton || parentModelComponent.Skeleton != null && parentModelComponent.Skeleton.Nodes.Length != nodesLength) { skeleton = parentModelComponent.Skeleton; if (skeleton != null) { nodesLength = parentModelComponent.Skeleton.Nodes.Length; // Find our node index nodeIndex = int.MaxValue; for (int index = 0; index < skeleton.Nodes.Length; index++) { var node = skeleton.Nodes[index]; if (node.Name == nodeName) { nodeIndex = index; } } } } // Updated? (rare slow path) if (skeleton != null) { var nodes = skeleton.Nodes; var nodeTransformations = skeleton.NodeTransformations; if (nodeIndex < nodes.Length) { // Compute matrix = nodeTransformations[nodeIndex].WorldMatrix; return; } } // Fallback to TransformComponent matrix = parentModelComponent.Entity.Transform.WorldMatrix; }
public bool HasChanged(PhysicsComponent component, SkeletonUpdater skeletonUpdater) { var componentBase = component as PhysicsSkinnedComponentBase; var triggerBase = component as PhysicsTriggerComponentBase; var newBoneName = componentBase?.NodeName; var newIndex = componentBase?.BoneIndex ?? -1; var rb = component as RigidbodyComponent; return(shape != component.ColliderShape || (colliderShapes == null && component.ColliderShapes != null) || (colliderShapes != null && component.ColliderShapes == null) || DescsAreDifferent(colliderShapes, component.ColliderShapes) || component.ColliderShapeChanged || (rb != null && isKinematic != rb.IsKinematic) || skeleton != skeletonUpdater || boneIndex != newIndex || boneIndex == -1 && skeletonUpdater != null && !string.IsNullOrEmpty(boneName) || //force recreation if we have a skeleton?.. wrong name tho is also possible... triggerBase != null && triggerBase.IsTrigger != isTrigger || shape != null && component.DebugEntity == null || //force recreation in this case as well boneName != newBoneName); }
private object ExportSkeleton(ICommandContext commandContext, ContentManager contentManager) { var skeleton = LoadSkeleton(commandContext, contentManager); AdjustSkeleton(skeleton); var modelNodes = new HashSet <string>(skeleton.Nodes.Select(x => x.Name)); var skeletonNodes = new HashSet <string>(SkeletonNodesWithPreserveInfo.Select(x => x.Key)); // List missing nodes on both sides, to display warnings var missingNodesInModel = new HashSet <string>(skeletonNodes); missingNodesInModel.ExceptWith(modelNodes); var missingNodesInAsset = new HashSet <string>(modelNodes); missingNodesInAsset.ExceptWith(skeletonNodes); // Output warnings if skeleton was not properly reimported from latest FBX if (missingNodesInAsset.Count > 0) { commandContext.Logger.Warning($"{missingNodesInAsset.Count} node(s) were present in model [{SourcePath}] but not in asset [{Location}], please reimport: {string.Join(", ", missingNodesInAsset)}"); } if (missingNodesInModel.Count > 0) { commandContext.Logger.Warning($"{missingNodesInModel.Count} node(s) were present in asset [{Location}] but not in model [{SourcePath}], please reimport: {string.Join(", ", missingNodesInModel)}"); } // Build node mapping to expected structure var optimizedNodes = new HashSet <string>(SkeletonNodesWithPreserveInfo.Where(x => !x.Value).Select(x => x.Key)); // Refresh skeleton updater with loaded skeleton (to be able to compute matrices) var hierarchyUpdater = new SkeletonUpdater(skeleton); hierarchyUpdater.UpdateMatrices(); // Removed optimized nodes var filteredSkeleton = new Skeleton { Nodes = skeleton.Nodes.Where(x => !optimizedNodes.Contains(x.Name)).ToArray() }; // Fix parent indices (since we removed some nodes) for (int i = 0; i < filteredSkeleton.Nodes.Length; ++i) { var parentIndex = filteredSkeleton.Nodes[i].ParentIndex; if (parentIndex != -1) { // Find appropriate parent to map to var newParentIndex = -1; while (newParentIndex == -1 && parentIndex != -1) { var nodeName = skeleton.Nodes[parentIndex].Name; parentIndex = skeleton.Nodes[parentIndex].ParentIndex; newParentIndex = filteredSkeleton.Nodes.IndexOf(x => x.Name == nodeName); } filteredSkeleton.Nodes[i].ParentIndex = newParentIndex; } } // Generate mapping var skeletonMapping = new SkeletonMapping(filteredSkeleton, skeleton); // Children of remapped nodes need to have their matrices updated for (int i = 0; i < skeleton.Nodes.Length; ++i) { // Skip node if it doesn't exist in source skeleton if (skeletonMapping.SourceToSource[i] != i) { continue; } var node = skeleton.Nodes[i]; var filteredIndex = skeletonMapping.SourceToTarget[i]; var oldParentIndex = node.ParentIndex; if (oldParentIndex != -1 && skeletonMapping.SourceToSource[oldParentIndex] != oldParentIndex) { // Compute matrix for intermediate missing nodes var transformMatrix = CombineMatricesFromNodeIndices(hierarchyUpdater.NodeTransformations, skeletonMapping.SourceToSource[oldParentIndex], oldParentIndex); var localMatrix = hierarchyUpdater.NodeTransformations[i].LocalMatrix; // Combine it with local matrix, and use that instead in the new skeleton; resulting node should be same position as before optimized nodes were removed localMatrix = Matrix.Multiply(localMatrix, transformMatrix); localMatrix.Decompose(out filteredSkeleton.Nodes[filteredIndex].Transform.Scale, out filteredSkeleton.Nodes[filteredIndex].Transform.Rotation, out filteredSkeleton.Nodes[filteredIndex].Transform.Position); } } return(filteredSkeleton); }
private object ExportModel(ICommandContext commandContext, ContentManager contentManager) { // Read from model file var modelSkeleton = LoadSkeleton(commandContext, contentManager); // we get model skeleton to compare it to real skeleton we need to map to AdjustSkeleton(modelSkeleton); var model = LoadModel(commandContext, contentManager); if (!CheckInputSlots(commandContext, model)) { return(null); } // Apply materials foreach (var modelMaterial in Materials) { if (modelMaterial.MaterialInstance?.Material == null) { commandContext.Logger.Verbose($"The material [{modelMaterial.Name}] is null in the list of materials."); } model.Materials.Add(modelMaterial.MaterialInstance); } model.BoundingBox = BoundingBox.Empty; Skeleton skeleton; if (SkeletonUrl != null || !MergeMeshes) { if (SkeletonUrl != null) { // Load the skeleton skeleton = contentManager.Load <Skeleton>(SkeletonUrl); } else { skeleton = modelSkeleton; SkeletonUrl = Location + "_Skeleton_" + Guid.NewGuid(); contentManager.Save(SkeletonUrl, skeleton); } // Assign skeleton to model model.Skeleton = AttachedReferenceManager.CreateProxyObject <Skeleton>(AssetId.Empty, SkeletonUrl); } else { skeleton = null; } var skeletonMapping = new SkeletonMapping(skeleton, modelSkeleton); // Refresh skeleton updater with model skeleton var hierarchyUpdater = new SkeletonUpdater(modelSkeleton); hierarchyUpdater.UpdateMatrices(); // Move meshes in the new nodes foreach (var mesh in model.Meshes) { // Apply scale import on meshes if (!MathUtil.NearEqual(ScaleImport, 1.0f)) { var transformationMatrix = Matrix.Scaling(ScaleImport); for (int vbIdx = 0; vbIdx < mesh.Draw.VertexBuffers.Length; vbIdx++) { mesh.Draw.VertexBuffers[vbIdx].TransformBuffer(ref transformationMatrix); } } var skinning = mesh.Skinning; if (skinning != null) { // Update node mapping // Note: we only remap skinning matrices, but we could directly remap skinning bones instead for (int i = 0; i < skinning.Bones.Length; ++i) { var linkNodeIndex = skinning.Bones[i].NodeIndex; var newLinkNodeIndex = skeletonMapping.SourceToSource[linkNodeIndex]; var nodeIndex = mesh.NodeIndex; var newNodeIndex = skeletonMapping.SourceToSource[mesh.NodeIndex]; skinning.Bones[i].NodeIndex = skeletonMapping.SourceToTarget[linkNodeIndex]; // Adjust scale import if (!MathUtil.NearEqual(ScaleImport, 1.0f)) { skinning.Bones[i].LinkToMeshMatrix.TranslationVector = skinning.Bones[i].LinkToMeshMatrix.TranslationVector * ScaleImport; } // If it was remapped, we also need to update matrix if (nodeIndex != newNodeIndex) { // Update mesh part var transformMatrix = CombineMatricesFromNodeIndices(hierarchyUpdater.NodeTransformations, newNodeIndex, nodeIndex); transformMatrix.Invert(); skinning.Bones[i].LinkToMeshMatrix = Matrix.Multiply(transformMatrix, skinning.Bones[i].LinkToMeshMatrix); } if (newLinkNodeIndex != linkNodeIndex) { // Update link part var transformLinkMatrix = CombineMatricesFromNodeIndices(hierarchyUpdater.NodeTransformations, newLinkNodeIndex, linkNodeIndex); skinning.Bones[i].LinkToMeshMatrix = Matrix.Multiply(skinning.Bones[i].LinkToMeshMatrix, transformLinkMatrix); } } } // Check if there was a remap using model skeleton if (skeletonMapping.SourceToSource[mesh.NodeIndex] != mesh.NodeIndex) { // Transform vertices var transformationMatrix = CombineMatricesFromNodeIndices(hierarchyUpdater.NodeTransformations, skeletonMapping.SourceToSource[mesh.NodeIndex], mesh.NodeIndex); for (int vbIdx = 0; vbIdx < mesh.Draw.VertexBuffers.Length; vbIdx++) { mesh.Draw.VertexBuffers[vbIdx].TransformBuffer(ref transformationMatrix); } // Check if geometry is inverted, to know if we need to reverse winding order // TODO: What to do if there is no index buffer? We should create one... (not happening yet) if (mesh.Draw.IndexBuffer == null) { throw new InvalidOperationException(); } Matrix rotation; Vector3 scale, translation; if (transformationMatrix.Decompose(out scale, out rotation, out translation) && scale.X * scale.Y * scale.Z < 0) { mesh.Draw.ReverseWindingOrder(); } } // Update new node index using real asset skeleton mesh.NodeIndex = skeletonMapping.SourceToTarget[mesh.NodeIndex]; } // Apply custom model modifiers if (ModelModifiers != null) { foreach (var modifier in ModelModifiers) { modifier.Apply(commandContext, model); } } // Merge meshes with same parent nodes, material and skinning var meshesByNodes = model.Meshes.GroupBy(x => x.NodeIndex).ToList(); foreach (var meshesByNode in meshesByNodes) { // This logic to detect similar material is kept from old code; this should be reviewed/improved at some point foreach (var meshesPerDrawCall in meshesByNode.GroupBy(x => x, new AnonymousEqualityComparer <Mesh>((x, y) => x.MaterialIndex == y.MaterialIndex && // Same material ArrayExtensions.ArraysEqual(x.Skinning?.Bones, y.Skinning?.Bones) && // Same bones CompareParameters(model, x, y) && // Same parameters CompareShadowOptions(model, x, y), // Same shadow parameters x => 0)).ToList()) { if (meshesPerDrawCall.Count() == 1) { // Nothing to group, skip to next entry continue; } // Remove old meshes foreach (var mesh in meshesPerDrawCall) { model.Meshes.Remove(mesh); } // Add new combined mesh(es) var baseMesh = meshesPerDrawCall.First(); var newMeshList = meshesPerDrawCall.Select(x => x.Draw).ToList().GroupDrawData(Allow32BitIndex); foreach (var generatedMesh in newMeshList) { model.Meshes.Add(new Mesh(generatedMesh, baseMesh.Parameters) { MaterialIndex = baseMesh.MaterialIndex, Name = baseMesh.Name, Draw = generatedMesh, NodeIndex = baseMesh.NodeIndex, Skinning = baseMesh.Skinning, }); } } } // split the meshes if necessary model.Meshes = SplitExtensions.SplitMeshes(model.Meshes, Allow32BitIndex); // Refresh skeleton updater with asset skeleton hierarchyUpdater = new SkeletonUpdater(skeleton); hierarchyUpdater.UpdateMatrices(); // bounding boxes var modelBoundingBox = model.BoundingBox; var modelBoundingSphere = model.BoundingSphere; foreach (var mesh in model.Meshes) { var vertexBuffers = mesh.Draw.VertexBuffers; for (int vbIdx = 0; vbIdx < vertexBuffers.Length; vbIdx++) { // Compute local mesh bounding box (no node transformation) Matrix matrix = Matrix.Identity; mesh.BoundingBox = vertexBuffers[vbIdx].ComputeBounds(ref matrix, out mesh.BoundingSphere); // Compute model bounding box (includes node transformation) hierarchyUpdater.GetWorldMatrix(mesh.NodeIndex, out matrix); BoundingSphere meshBoundingSphere; var meshBoundingBox = vertexBuffers[vbIdx].ComputeBounds(ref matrix, out meshBoundingSphere); BoundingBox.Merge(ref modelBoundingBox, ref meshBoundingBox, out modelBoundingBox); BoundingSphere.Merge(ref modelBoundingSphere, ref meshBoundingSphere, out modelBoundingSphere); } // TODO: temporary Always try to compact mesh.Draw.CompactIndexBuffer(); } model.BoundingBox = modelBoundingBox; model.BoundingSphere = modelBoundingSphere; // Count unique meshes (they can be shared) var uniqueDrawMeshes = model.Meshes.Select(x => x.Draw).Distinct(); // Count unique vertex buffers and squish them together in a single buffer var uniqueVB = uniqueDrawMeshes.SelectMany(x => x.VertexBuffers).Distinct().ToList(); var vbMap = new Dictionary <VertexBufferBinding, VertexBufferBinding>(); var sizeVertexBuffer = uniqueVB.Select(x => x.Buffer.GetSerializationData().Content.Length).Sum(); var vertexBuffer = new BufferData(BufferFlags.VertexBuffer, new byte[sizeVertexBuffer]); var vertexBufferSerializable = vertexBuffer.ToSerializableVersion(); var vertexBufferNextIndex = 0; foreach (var vbBinding in uniqueVB) { var oldVertexBuffer = vbBinding.Buffer.GetSerializationData().Content; Array.Copy(oldVertexBuffer, 0, vertexBuffer.Content, vertexBufferNextIndex, oldVertexBuffer.Length); vbMap.Add(vbBinding, new VertexBufferBinding(vertexBufferSerializable, vbBinding.Declaration, vbBinding.Count, vbBinding.Stride, vertexBufferNextIndex)); vertexBufferNextIndex += oldVertexBuffer.Length; } // Count unique index buffers and squish them together in a single buffer var uniqueIB = uniqueDrawMeshes.Select(x => x.IndexBuffer).NotNull().Distinct().ToList(); var sizeIndexBuffer = 0; foreach (var ibBinding in uniqueIB) { // Make sure 32bit indices are properly aligned to 4 bytes in case the last alignment was 2 bytes if (ibBinding.Is32Bit && sizeIndexBuffer % 4 != 0) { sizeIndexBuffer += 2; } sizeIndexBuffer += ibBinding.Buffer.GetSerializationData().Content.Length; } var ibMap = new Dictionary <IndexBufferBinding, IndexBufferBinding>(); if (uniqueIB.Count > 0) { var indexBuffer = new BufferData(BufferFlags.IndexBuffer, new byte[sizeIndexBuffer]); var indexBufferSerializable = indexBuffer.ToSerializableVersion(); var indexBufferNextIndex = 0; foreach (var ibBinding in uniqueIB) { var oldIndexBuffer = ibBinding.Buffer.GetSerializationData().Content; // Make sure 32bit indices are properly aligned to 4 bytes in case the last alignment was 2 bytes if (ibBinding.Is32Bit && indexBufferNextIndex % 4 != 0) { indexBufferNextIndex += 2; } Array.Copy(oldIndexBuffer, 0, indexBuffer.Content, indexBufferNextIndex, oldIndexBuffer.Length); ibMap.Add(ibBinding, new IndexBufferBinding(indexBufferSerializable, ibBinding.Is32Bit, ibBinding.Count, indexBufferNextIndex)); indexBufferNextIndex += oldIndexBuffer.Length; } } // Assign new vertex and index buffer bindings foreach (var drawMesh in uniqueDrawMeshes) { for (int i = 0; i < drawMesh.VertexBuffers.Length; i++) { drawMesh.VertexBuffers[i] = vbMap[drawMesh.VertexBuffers[i]]; } if (drawMesh.IndexBuffer != null) { drawMesh.IndexBuffer = ibMap[drawMesh.IndexBuffer]; } } vbMap.Clear(); ibMap.Clear(); // Convert to Entity return(model); }
private object ExportModel(ICommandContext commandContext, ContentManager contentManager) { // Read from model file var modelSkeleton = LoadSkeleton(commandContext, contentManager); // we get model skeleton to compare it to real skeleton we need to map to AdjustSkeleton(modelSkeleton); var model = LoadModel(commandContext, contentManager); // Apply materials foreach (var modelMaterial in Materials) { if (modelMaterial.MaterialInstance?.Material == null) { commandContext.Logger.Warning($"The material [{modelMaterial.Name}] is null in the list of materials."); } model.Materials.Add(modelMaterial.MaterialInstance); } model.BoundingBox = BoundingBox.Empty; foreach (var mesh in model.Meshes) { if (TessellationAEN) { // TODO: Generate AEN model view commandContext.Logger.Error("TessellationAEN is not supported in {0}", ContextAsString); } } SkeletonMapping skeletonMapping; Skeleton skeleton; if (SkeletonUrl != null) { // Load skeleton and process it skeleton = contentManager.Load <Skeleton>(SkeletonUrl); // Assign skeleton to model model.Skeleton = AttachedReferenceManager.CreateProxyObject <Skeleton>(Guid.Empty, SkeletonUrl); } else { skeleton = null; } skeletonMapping = new SkeletonMapping(skeleton, modelSkeleton); // Refresh skeleton updater with model skeleton var hierarchyUpdater = new SkeletonUpdater(modelSkeleton); hierarchyUpdater.UpdateMatrices(); // Move meshes in the new nodes foreach (var mesh in model.Meshes) { // Apply scale import on meshes if (!MathUtil.NearEqual(ScaleImport, 1.0f)) { var transformationMatrix = Matrix.Scaling(ScaleImport); mesh.Draw.VertexBuffers[0].TransformBuffer(ref transformationMatrix); } var skinning = mesh.Skinning; if (skinning != null) { // Update node mapping // Note: we only remap skinning matrices, but we could directly remap skinning bones instead for (int i = 0; i < skinning.Bones.Length; ++i) { var linkNodeIndex = skinning.Bones[i].NodeIndex; var newLinkNodeIndex = skeletonMapping.SourceToSource[linkNodeIndex]; var nodeIndex = mesh.NodeIndex; var newNodeIndex = skeletonMapping.SourceToSource[mesh.NodeIndex]; skinning.Bones[i].NodeIndex = skeletonMapping.SourceToTarget[linkNodeIndex]; // Adjust scale import if (!MathUtil.NearEqual(ScaleImport, 1.0f)) { skinning.Bones[i].LinkToMeshMatrix.TranslationVector = skinning.Bones[i].LinkToMeshMatrix.TranslationVector * ScaleImport; } // If it was remapped, we also need to update matrix if (nodeIndex != newNodeIndex) { // Update mesh part var transformMatrix = CombineMatricesFromNodeIndices(hierarchyUpdater.NodeTransformations, newNodeIndex, nodeIndex); transformMatrix.Invert(); skinning.Bones[i].LinkToMeshMatrix = Matrix.Multiply(transformMatrix, skinning.Bones[i].LinkToMeshMatrix); } if (newLinkNodeIndex != linkNodeIndex) { // Update link part var transformLinkMatrix = CombineMatricesFromNodeIndices(hierarchyUpdater.NodeTransformations, newLinkNodeIndex, linkNodeIndex); skinning.Bones[i].LinkToMeshMatrix = Matrix.Multiply(skinning.Bones[i].LinkToMeshMatrix, transformLinkMatrix); } } } // Check if there was a remap using model skeleton if (skeletonMapping.SourceToSource[mesh.NodeIndex] != mesh.NodeIndex) { // Transform vertices var transformationMatrix = CombineMatricesFromNodeIndices(hierarchyUpdater.NodeTransformations, skeletonMapping.SourceToSource[mesh.NodeIndex], mesh.NodeIndex); mesh.Draw.VertexBuffers[0].TransformBuffer(ref transformationMatrix); // Check if geometry is inverted, to know if we need to reverse winding order // TODO: What to do if there is no index buffer? We should create one... (not happening yet) if (mesh.Draw.IndexBuffer == null) { throw new InvalidOperationException(); } Matrix rotation; Vector3 scale, translation; if (transformationMatrix.Decompose(out scale, out rotation, out translation) && scale.X * scale.Y * scale.Z < 0) { mesh.Draw.ReverseWindingOrder(); } } // Update new node index using real asset skeleton mesh.NodeIndex = skeletonMapping.SourceToTarget[mesh.NodeIndex]; } // Merge meshes with same parent nodes, material and skinning var meshesByNodes = model.Meshes.GroupBy(x => x.NodeIndex).ToList(); foreach (var meshesByNode in meshesByNodes) { // This logic to detect similar material is kept from old code; this should be reviewed/improved at some point foreach (var meshesPerDrawCall in meshesByNode.GroupBy(x => x, new AnonymousEqualityComparer <Mesh>((x, y) => x.MaterialIndex == y.MaterialIndex && // Same material ArrayExtensions.ArraysEqual(x.Skinning?.Bones, y.Skinning?.Bones) && // Same bones CompareParameters(model, x, y) && // Same parameters CompareShadowOptions(model, x, y), // Same shadow parameters x => 0)).ToList()) { if (meshesPerDrawCall.Count() == 1) { // Nothing to group, skip to next entry continue; } // Remove old meshes foreach (var mesh in meshesPerDrawCall) { model.Meshes.Remove(mesh); } // Add new combined mesh(es) var baseMesh = meshesPerDrawCall.First(); var newMeshList = meshesPerDrawCall.Select(x => x.Draw).ToList().GroupDrawData(Allow32BitIndex); foreach (var generatedMesh in newMeshList) { model.Meshes.Add(new Mesh(generatedMesh, baseMesh.Parameters) { MaterialIndex = baseMesh.MaterialIndex, Name = baseMesh.Name, Draw = generatedMesh, NodeIndex = baseMesh.NodeIndex, Skinning = baseMesh.Skinning, }); } } } // split the meshes if necessary model.Meshes = SplitExtensions.SplitMeshes(model.Meshes, Allow32BitIndex); // Refresh skeleton updater with asset skeleton hierarchyUpdater = new SkeletonUpdater(skeleton); hierarchyUpdater.UpdateMatrices(); // bounding boxes var modelBoundingBox = model.BoundingBox; var modelBoundingSphere = model.BoundingSphere; foreach (var mesh in model.Meshes) { var vertexBuffers = mesh.Draw.VertexBuffers; if (vertexBuffers.Length > 0) { // Compute local mesh bounding box (no node transformation) Matrix matrix = Matrix.Identity; mesh.BoundingBox = vertexBuffers[0].ComputeBounds(ref matrix, out mesh.BoundingSphere); // Compute model bounding box (includes node transformation) hierarchyUpdater.GetWorldMatrix(mesh.NodeIndex, out matrix); BoundingSphere meshBoundingSphere; var meshBoundingBox = vertexBuffers[0].ComputeBounds(ref matrix, out meshBoundingSphere); BoundingBox.Merge(ref modelBoundingBox, ref meshBoundingBox, out modelBoundingBox); BoundingSphere.Merge(ref modelBoundingSphere, ref meshBoundingSphere, out modelBoundingSphere); } // TODO: temporary Always try to compact mesh.Draw.CompactIndexBuffer(); } model.BoundingBox = modelBoundingBox; model.BoundingSphere = modelBoundingSphere; // merges all the Draw VB and IB together to produce one final VB and IB by entity. var sizeVertexBuffer = model.Meshes.SelectMany(x => x.Draw.VertexBuffers).Select(x => x.Buffer.GetSerializationData().Content.Length).Sum(); var sizeIndexBuffer = 0; foreach (var x in model.Meshes) { // Let's be aligned (if there was 16bit indices before, we might be off) if (x.Draw.IndexBuffer.Is32Bit && sizeIndexBuffer % 4 != 0) { sizeIndexBuffer += 2; } sizeIndexBuffer += x.Draw.IndexBuffer.Buffer.GetSerializationData().Content.Length; } var vertexBuffer = new BufferData(BufferFlags.VertexBuffer, new byte[sizeVertexBuffer]); var indexBuffer = new BufferData(BufferFlags.IndexBuffer, new byte[sizeIndexBuffer]); // Note: reusing same instance, to avoid having many VB with same hash but different URL var vertexBufferSerializable = vertexBuffer.ToSerializableVersion(); var indexBufferSerializable = indexBuffer.ToSerializableVersion(); var vertexBufferNextIndex = 0; var indexBufferNextIndex = 0; foreach (var drawMesh in model.Meshes.Select(x => x.Draw)) { // the index buffer var oldIndexBuffer = drawMesh.IndexBuffer.Buffer.GetSerializationData().Content; // Let's be aligned (if there was 16bit indices before, we might be off) if (drawMesh.IndexBuffer.Is32Bit && indexBufferNextIndex % 4 != 0) { indexBufferNextIndex += 2; } Array.Copy(oldIndexBuffer, 0, indexBuffer.Content, indexBufferNextIndex, oldIndexBuffer.Length); drawMesh.IndexBuffer = new IndexBufferBinding(indexBufferSerializable, drawMesh.IndexBuffer.Is32Bit, drawMesh.IndexBuffer.Count, indexBufferNextIndex); indexBufferNextIndex += oldIndexBuffer.Length; // the vertex buffers for (int index = 0; index < drawMesh.VertexBuffers.Length; index++) { var vertexBufferBinding = drawMesh.VertexBuffers[index]; var oldVertexBuffer = vertexBufferBinding.Buffer.GetSerializationData().Content; Array.Copy(oldVertexBuffer, 0, vertexBuffer.Content, vertexBufferNextIndex, oldVertexBuffer.Length); drawMesh.VertexBuffers[index] = new VertexBufferBinding(vertexBufferSerializable, vertexBufferBinding.Declaration, vertexBufferBinding.Count, vertexBufferBinding.Stride, vertexBufferNextIndex); vertexBufferNextIndex += oldVertexBuffer.Length; } } // Convert to Entity return(model); }