Пример #1
0
        public static void SelectEntity(IEnumerable <Entity> entities)
        {
            // Update property editor selection.
            if (selectedEntitiesContext != null)
            {
                selectedEntitiesContext.ViewModelByGuid.Clear();
                var viewModels = entities
                                 .Where(entity => entity != null)
                                 .Select(entity => selectedEntitiesContext.GetModelView(entity).Children.First(x => x.PropertyName == "Components"))
                                 .ToArray();

                if (viewModels.Count() > 1)
                {
                    selectedEntitiesContext.Root = ViewModelController.Combine(selectedEntitiesContext, viewModels);
                }
                else
                {
                    selectedEntitiesContext.Root = viewModels.FirstOrDefault();
                }
            }

            // Update picking system (gizmo).
            // It will also update the remote selection in entity tree view.
            var entitiesArray = entities.ToArray();

            if (!ArrayExtensions.ArraysEqual(pickingSystem.SelectedEntities, entitiesArray))
            {
                pickingSystem.SelectedEntities = entitiesArray;
            }

            entitiesChangePacketEvent.Set();
        }
Пример #2
0
 public bool Equals(DiffComponent other)
 {
     if (ReferenceEquals(null, other))
     {
         return(false);
     }
     if (ReferenceEquals(this, other))
     {
         return(true);
     }
     return(string.Equals(Name, other.Name) &&
            Position.Equals(other.Position) &&
            ArrayExtensions.ArraysEqual(Positions, other.Positions));
 }
Пример #3
0
        public override bool Equals(ShaderSource x, ShaderSource y)
        {
            if (x == null && y == null)
            {
                return(true);
            }

            if (x == null || y == null)
            {
                return(false);
            }

            if (x.GetType() != y.GetType())
            {
                return(false);
            }

            if (x is ShaderClassCode)
            {
                var x1 = (ShaderClassCode)x;
                var y1 = (ShaderClassCode)y;
                return(x1.ClassName == y1.ClassName &&
                       ArrayExtensions.ArraysEqual(x1.GenericArguments, y1.GenericArguments));
            }
            if (x is ShaderMixinSource)
            {
                var x1 = (ShaderMixinSource)x;
                var y1 = (ShaderMixinSource)y;
                return(ArrayExtensions.ArraysEqual(x1.Mixins, y1.Mixins, this) &&
                       ArrayExtensions.ArraysEqual(x1.Compositions.OrderBy(item => item.Key).ToArray(), y1.Compositions.OrderBy(item => item.Key).ToArray(), compositionComparer));
            }
            if (x is ShaderArraySource)
            {
                var x1 = (ShaderArraySource)x;
                var y1 = (ShaderArraySource)y;
                return(ArrayExtensions.ArraysEqual(x1.Values, y1.Values, this));
            }

            throw new InvalidOperationException("Invalid ShaderSource comparison.");
        }
 public bool Equals(VertexAttribsKey other)
 {
     return(Hash == other.Hash && ArrayExtensions.ArraysEqual(Attribs, other.Attribs));
 }
Пример #5
0
 public bool Equals(ShaderSourceKey other)
 {
     return(Equals(other.TypeName, TypeName) && Equals(other.generics, generics) && ArrayExtensions.ArraysEqual(other.shaderMacros, shaderMacros));
 }
Пример #6
0
        private object ExportModel(ICommandContext commandContext, ContentManager contentManager)
        {
            // Read from model file
            var modelSkeleton = LoadSkeleton(commandContext, contentManager); // we get model skeleton to compare it to real skeleton we need to map to

            AdjustSkeleton(modelSkeleton);
            var model = LoadModel(commandContext, contentManager);

            if (!CheckInputSlots(commandContext, model))
            {
                return(null);
            }

            // Apply materials
            foreach (var modelMaterial in Materials)
            {
                if (modelMaterial.MaterialInstance?.Material == null)
                {
                    commandContext.Logger.Verbose($"The material [{modelMaterial.Name}] is null in the list of materials.");
                }
                model.Materials.Add(modelMaterial.MaterialInstance);
            }

            model.BoundingBox = BoundingBox.Empty;

            Skeleton skeleton;

            if (SkeletonUrl != null || !MergeMeshes)
            {
                if (SkeletonUrl != null)
                {
                    // Load the skeleton
                    skeleton = contentManager.Load <Skeleton>(SkeletonUrl);
                }
                else
                {
                    skeleton    = modelSkeleton;
                    SkeletonUrl = Location + "_Skeleton_" + Guid.NewGuid();
                    contentManager.Save(SkeletonUrl, skeleton);
                }

                // Assign skeleton to model
                model.Skeleton = AttachedReferenceManager.CreateProxyObject <Skeleton>(AssetId.Empty, SkeletonUrl);
            }
            else
            {
                skeleton = null;
            }

            var skeletonMapping = new SkeletonMapping(skeleton, modelSkeleton);

            // Refresh skeleton updater with model skeleton
            var hierarchyUpdater = new SkeletonUpdater(modelSkeleton);

            hierarchyUpdater.UpdateMatrices();

            // Move meshes in the new nodes
            foreach (var mesh in model.Meshes)
            {
                // Apply scale import on meshes
                if (!MathUtil.NearEqual(ScaleImport, 1.0f))
                {
                    var transformationMatrix = Matrix.Scaling(ScaleImport);
                    for (int vbIdx = 0; vbIdx < mesh.Draw.VertexBuffers.Length; vbIdx++)
                    {
                        mesh.Draw.VertexBuffers[vbIdx].TransformBuffer(ref transformationMatrix);
                    }
                }

                var skinning = mesh.Skinning;
                if (skinning != null)
                {
                    // Update node mapping
                    // Note: we only remap skinning matrices, but we could directly remap skinning bones instead
                    for (int i = 0; i < skinning.Bones.Length; ++i)
                    {
                        var linkNodeIndex    = skinning.Bones[i].NodeIndex;
                        var newLinkNodeIndex = skeletonMapping.SourceToSource[linkNodeIndex];

                        var nodeIndex    = mesh.NodeIndex;
                        var newNodeIndex = skeletonMapping.SourceToSource[mesh.NodeIndex];

                        skinning.Bones[i].NodeIndex = skeletonMapping.SourceToTarget[linkNodeIndex];

                        // Adjust scale import
                        if (!MathUtil.NearEqual(ScaleImport, 1.0f))
                        {
                            skinning.Bones[i].LinkToMeshMatrix.TranslationVector = skinning.Bones[i].LinkToMeshMatrix.TranslationVector * ScaleImport;
                        }

                        // If it was remapped, we also need to update matrix
                        if (nodeIndex != newNodeIndex)
                        {
                            // Update mesh part
                            var transformMatrix = CombineMatricesFromNodeIndices(hierarchyUpdater.NodeTransformations, newNodeIndex, nodeIndex);
                            transformMatrix.Invert();
                            skinning.Bones[i].LinkToMeshMatrix = Matrix.Multiply(transformMatrix, skinning.Bones[i].LinkToMeshMatrix);
                        }

                        if (newLinkNodeIndex != linkNodeIndex)
                        {
                            // Update link part
                            var transformLinkMatrix = CombineMatricesFromNodeIndices(hierarchyUpdater.NodeTransformations, newLinkNodeIndex, linkNodeIndex);
                            skinning.Bones[i].LinkToMeshMatrix = Matrix.Multiply(skinning.Bones[i].LinkToMeshMatrix, transformLinkMatrix);
                        }
                    }
                }

                // Check if there was a remap using model skeleton
                if (skeletonMapping.SourceToSource[mesh.NodeIndex] != mesh.NodeIndex)
                {
                    // Transform vertices
                    var transformationMatrix = CombineMatricesFromNodeIndices(hierarchyUpdater.NodeTransformations, skeletonMapping.SourceToSource[mesh.NodeIndex], mesh.NodeIndex);
                    for (int vbIdx = 0; vbIdx < mesh.Draw.VertexBuffers.Length; vbIdx++)
                    {
                        mesh.Draw.VertexBuffers[vbIdx].TransformBuffer(ref transformationMatrix);
                    }

                    // Check if geometry is inverted, to know if we need to reverse winding order
                    // TODO: What to do if there is no index buffer? We should create one... (not happening yet)
                    if (mesh.Draw.IndexBuffer == null)
                    {
                        throw new InvalidOperationException();
                    }

                    Matrix  rotation;
                    Vector3 scale, translation;
                    if (transformationMatrix.Decompose(out scale, out rotation, out translation) &&
                        scale.X * scale.Y * scale.Z < 0)
                    {
                        mesh.Draw.ReverseWindingOrder();
                    }
                }

                // Update new node index using real asset skeleton
                mesh.NodeIndex = skeletonMapping.SourceToTarget[mesh.NodeIndex];
            }

            // Apply custom model modifiers
            if (ModelModifiers != null)
            {
                foreach (var modifier in ModelModifiers)
                {
                    modifier.Apply(commandContext, model);
                }
            }

            // Merge meshes with same parent nodes, material and skinning
            var meshesByNodes = model.Meshes.GroupBy(x => x.NodeIndex).ToList();

            foreach (var meshesByNode in meshesByNodes)
            {
                // This logic to detect similar material is kept from old code; this should be reviewed/improved at some point
                foreach (var meshesPerDrawCall in meshesByNode.GroupBy(x => x,
                                                                       new AnonymousEqualityComparer <Mesh>((x, y) =>
                                                                                                            x.MaterialIndex == y.MaterialIndex && // Same material
                                                                                                            ArrayExtensions.ArraysEqual(x.Skinning?.Bones, y.Skinning?.Bones) && // Same bones
                                                                                                            CompareParameters(model, x, y) && // Same parameters
                                                                                                            CompareShadowOptions(model, x, y), // Same shadow parameters
                                                                                                            x => 0)).ToList())
                {
                    if (meshesPerDrawCall.Count() == 1)
                    {
                        // Nothing to group, skip to next entry
                        continue;
                    }

                    // Remove old meshes
                    foreach (var mesh in meshesPerDrawCall)
                    {
                        model.Meshes.Remove(mesh);
                    }

                    // Add new combined mesh(es)
                    var baseMesh    = meshesPerDrawCall.First();
                    var newMeshList = meshesPerDrawCall.Select(x => x.Draw).ToList().GroupDrawData(Allow32BitIndex);

                    foreach (var generatedMesh in newMeshList)
                    {
                        model.Meshes.Add(new Mesh(generatedMesh, baseMesh.Parameters)
                        {
                            MaterialIndex = baseMesh.MaterialIndex,
                            Name          = baseMesh.Name,
                            Draw          = generatedMesh,
                            NodeIndex     = baseMesh.NodeIndex,
                            Skinning      = baseMesh.Skinning,
                        });
                    }
                }
            }

            // split the meshes if necessary
            model.Meshes = SplitExtensions.SplitMeshes(model.Meshes, Allow32BitIndex);

            // Refresh skeleton updater with asset skeleton
            hierarchyUpdater = new SkeletonUpdater(skeleton);
            hierarchyUpdater.UpdateMatrices();

            // bounding boxes
            var modelBoundingBox    = model.BoundingBox;
            var modelBoundingSphere = model.BoundingSphere;

            foreach (var mesh in model.Meshes)
            {
                var vertexBuffers = mesh.Draw.VertexBuffers;
                for (int vbIdx = 0; vbIdx < vertexBuffers.Length; vbIdx++)
                {
                    // Compute local mesh bounding box (no node transformation)
                    Matrix matrix = Matrix.Identity;
                    mesh.BoundingBox = vertexBuffers[vbIdx].ComputeBounds(ref matrix, out mesh.BoundingSphere);

                    // Compute model bounding box (includes node transformation)
                    hierarchyUpdater.GetWorldMatrix(mesh.NodeIndex, out matrix);
                    BoundingSphere meshBoundingSphere;
                    var            meshBoundingBox = vertexBuffers[vbIdx].ComputeBounds(ref matrix, out meshBoundingSphere);
                    BoundingBox.Merge(ref modelBoundingBox, ref meshBoundingBox, out modelBoundingBox);
                    BoundingSphere.Merge(ref modelBoundingSphere, ref meshBoundingSphere, out modelBoundingSphere);
                }

                // TODO: temporary Always try to compact
                mesh.Draw.CompactIndexBuffer();
            }
            model.BoundingBox    = modelBoundingBox;
            model.BoundingSphere = modelBoundingSphere;

            // Count unique meshes (they can be shared)
            var uniqueDrawMeshes = model.Meshes.Select(x => x.Draw).Distinct();

            // Count unique vertex buffers and squish them together in a single buffer
            var uniqueVB = uniqueDrawMeshes.SelectMany(x => x.VertexBuffers).Distinct().ToList();

            var vbMap                    = new Dictionary <VertexBufferBinding, VertexBufferBinding>();
            var sizeVertexBuffer         = uniqueVB.Select(x => x.Buffer.GetSerializationData().Content.Length).Sum();
            var vertexBuffer             = new BufferData(BufferFlags.VertexBuffer, new byte[sizeVertexBuffer]);
            var vertexBufferSerializable = vertexBuffer.ToSerializableVersion();

            var vertexBufferNextIndex = 0;

            foreach (var vbBinding in uniqueVB)
            {
                var oldVertexBuffer = vbBinding.Buffer.GetSerializationData().Content;
                Array.Copy(oldVertexBuffer, 0, vertexBuffer.Content, vertexBufferNextIndex, oldVertexBuffer.Length);

                vbMap.Add(vbBinding, new VertexBufferBinding(vertexBufferSerializable, vbBinding.Declaration, vbBinding.Count, vbBinding.Stride, vertexBufferNextIndex));

                vertexBufferNextIndex += oldVertexBuffer.Length;
            }

            // Count unique index buffers and squish them together in a single buffer
            var uniqueIB        = uniqueDrawMeshes.Select(x => x.IndexBuffer).NotNull().Distinct().ToList();
            var sizeIndexBuffer = 0;

            foreach (var ibBinding in uniqueIB)
            {
                // Make sure 32bit indices are properly aligned to 4 bytes in case the last alignment was 2 bytes
                if (ibBinding.Is32Bit && sizeIndexBuffer % 4 != 0)
                {
                    sizeIndexBuffer += 2;
                }

                sizeIndexBuffer += ibBinding.Buffer.GetSerializationData().Content.Length;
            }

            var ibMap = new Dictionary <IndexBufferBinding, IndexBufferBinding>();

            if (uniqueIB.Count > 0)
            {
                var indexBuffer             = new BufferData(BufferFlags.IndexBuffer, new byte[sizeIndexBuffer]);
                var indexBufferSerializable = indexBuffer.ToSerializableVersion();
                var indexBufferNextIndex    = 0;

                foreach (var ibBinding in uniqueIB)
                {
                    var oldIndexBuffer = ibBinding.Buffer.GetSerializationData().Content;

                    // Make sure 32bit indices are properly aligned to 4 bytes in case the last alignment was 2 bytes
                    if (ibBinding.Is32Bit && indexBufferNextIndex % 4 != 0)
                    {
                        indexBufferNextIndex += 2;
                    }

                    Array.Copy(oldIndexBuffer, 0, indexBuffer.Content, indexBufferNextIndex, oldIndexBuffer.Length);

                    ibMap.Add(ibBinding, new IndexBufferBinding(indexBufferSerializable, ibBinding.Is32Bit, ibBinding.Count, indexBufferNextIndex));

                    indexBufferNextIndex += oldIndexBuffer.Length;
                }
            }

            // Assign new vertex and index buffer bindings
            foreach (var drawMesh in uniqueDrawMeshes)
            {
                for (int i = 0; i < drawMesh.VertexBuffers.Length; i++)
                {
                    drawMesh.VertexBuffers[i] = vbMap[drawMesh.VertexBuffers[i]];
                }

                if (drawMesh.IndexBuffer != null)
                {
                    drawMesh.IndexBuffer = ibMap[drawMesh.IndexBuffer];
                }
            }

            vbMap.Clear();
            ibMap.Clear();

            // Convert to Entity
            return(model);
        }
Пример #7
0
        private object ExportModel(ICommandContext commandContext, ContentManager contentManager)
        {
            // Read from model file
            var modelSkeleton = LoadSkeleton(commandContext, contentManager); // we get model skeleton to compare it to real skeleton we need to map to

            AdjustSkeleton(modelSkeleton);
            var model = LoadModel(commandContext, contentManager);

            // Apply materials
            foreach (var modelMaterial in Materials)
            {
                if (modelMaterial.MaterialInstance?.Material == null)
                {
                    commandContext.Logger.Warning($"The material [{modelMaterial.Name}] is null in the list of materials.");
                }
                model.Materials.Add(modelMaterial.MaterialInstance);
            }

            model.BoundingBox = BoundingBox.Empty;

            foreach (var mesh in model.Meshes)
            {
                if (TessellationAEN)
                {
                    // TODO: Generate AEN model view
                    commandContext.Logger.Error("TessellationAEN is not supported in {0}", ContextAsString);
                }
            }

            SkeletonMapping skeletonMapping;

            Skeleton skeleton;

            if (SkeletonUrl != null)
            {
                // Load skeleton and process it
                skeleton = contentManager.Load <Skeleton>(SkeletonUrl);

                // Assign skeleton to model
                model.Skeleton = AttachedReferenceManager.CreateProxyObject <Skeleton>(Guid.Empty, SkeletonUrl);
            }
            else
            {
                skeleton = null;
            }

            skeletonMapping = new SkeletonMapping(skeleton, modelSkeleton);

            // Refresh skeleton updater with model skeleton
            var hierarchyUpdater = new SkeletonUpdater(modelSkeleton);

            hierarchyUpdater.UpdateMatrices();

            // Move meshes in the new nodes
            foreach (var mesh in model.Meshes)
            {
                // Apply scale import on meshes
                if (!MathUtil.NearEqual(ScaleImport, 1.0f))
                {
                    var transformationMatrix = Matrix.Scaling(ScaleImport);
                    mesh.Draw.VertexBuffers[0].TransformBuffer(ref transformationMatrix);
                }

                var skinning = mesh.Skinning;
                if (skinning != null)
                {
                    // Update node mapping
                    // Note: we only remap skinning matrices, but we could directly remap skinning bones instead
                    for (int i = 0; i < skinning.Bones.Length; ++i)
                    {
                        var linkNodeIndex    = skinning.Bones[i].NodeIndex;
                        var newLinkNodeIndex = skeletonMapping.SourceToSource[linkNodeIndex];

                        var nodeIndex    = mesh.NodeIndex;
                        var newNodeIndex = skeletonMapping.SourceToSource[mesh.NodeIndex];

                        skinning.Bones[i].NodeIndex = skeletonMapping.SourceToTarget[linkNodeIndex];

                        // Adjust scale import
                        if (!MathUtil.NearEqual(ScaleImport, 1.0f))
                        {
                            skinning.Bones[i].LinkToMeshMatrix.TranslationVector = skinning.Bones[i].LinkToMeshMatrix.TranslationVector * ScaleImport;
                        }

                        // If it was remapped, we also need to update matrix
                        if (nodeIndex != newNodeIndex)
                        {
                            // Update mesh part
                            var transformMatrix = CombineMatricesFromNodeIndices(hierarchyUpdater.NodeTransformations, newNodeIndex, nodeIndex);
                            transformMatrix.Invert();
                            skinning.Bones[i].LinkToMeshMatrix = Matrix.Multiply(transformMatrix, skinning.Bones[i].LinkToMeshMatrix);
                        }

                        if (newLinkNodeIndex != linkNodeIndex)
                        {
                            // Update link part
                            var transformLinkMatrix = CombineMatricesFromNodeIndices(hierarchyUpdater.NodeTransformations, newLinkNodeIndex, linkNodeIndex);
                            skinning.Bones[i].LinkToMeshMatrix = Matrix.Multiply(skinning.Bones[i].LinkToMeshMatrix, transformLinkMatrix);
                        }
                    }
                }

                // Check if there was a remap using model skeleton
                if (skeletonMapping.SourceToSource[mesh.NodeIndex] != mesh.NodeIndex)
                {
                    // Transform vertices
                    var transformationMatrix = CombineMatricesFromNodeIndices(hierarchyUpdater.NodeTransformations, skeletonMapping.SourceToSource[mesh.NodeIndex], mesh.NodeIndex);
                    mesh.Draw.VertexBuffers[0].TransformBuffer(ref transformationMatrix);

                    // Check if geometry is inverted, to know if we need to reverse winding order
                    // TODO: What to do if there is no index buffer? We should create one... (not happening yet)
                    if (mesh.Draw.IndexBuffer == null)
                    {
                        throw new InvalidOperationException();
                    }

                    Matrix  rotation;
                    Vector3 scale, translation;
                    if (transformationMatrix.Decompose(out scale, out rotation, out translation) &&
                        scale.X * scale.Y * scale.Z < 0)
                    {
                        mesh.Draw.ReverseWindingOrder();
                    }
                }

                // Update new node index using real asset skeleton
                mesh.NodeIndex = skeletonMapping.SourceToTarget[mesh.NodeIndex];
            }

            // Merge meshes with same parent nodes, material and skinning
            var meshesByNodes = model.Meshes.GroupBy(x => x.NodeIndex).ToList();

            foreach (var meshesByNode in meshesByNodes)
            {
                // This logic to detect similar material is kept from old code; this should be reviewed/improved at some point
                foreach (var meshesPerDrawCall in meshesByNode.GroupBy(x => x,
                                                                       new AnonymousEqualityComparer <Mesh>((x, y) =>
                                                                                                            x.MaterialIndex == y.MaterialIndex && // Same material
                                                                                                            ArrayExtensions.ArraysEqual(x.Skinning?.Bones, y.Skinning?.Bones) && // Same bones
                                                                                                            CompareParameters(model, x, y) && // Same parameters
                                                                                                            CompareShadowOptions(model, x, y), // Same shadow parameters
                                                                                                            x => 0)).ToList())
                {
                    if (meshesPerDrawCall.Count() == 1)
                    {
                        // Nothing to group, skip to next entry
                        continue;
                    }

                    // Remove old meshes
                    foreach (var mesh in meshesPerDrawCall)
                    {
                        model.Meshes.Remove(mesh);
                    }

                    // Add new combined mesh(es)
                    var baseMesh    = meshesPerDrawCall.First();
                    var newMeshList = meshesPerDrawCall.Select(x => x.Draw).ToList().GroupDrawData(Allow32BitIndex);

                    foreach (var generatedMesh in newMeshList)
                    {
                        model.Meshes.Add(new Mesh(generatedMesh, baseMesh.Parameters)
                        {
                            MaterialIndex = baseMesh.MaterialIndex,
                            Name          = baseMesh.Name,
                            Draw          = generatedMesh,
                            NodeIndex     = baseMesh.NodeIndex,
                            Skinning      = baseMesh.Skinning,
                        });
                    }
                }
            }

            // split the meshes if necessary
            model.Meshes = SplitExtensions.SplitMeshes(model.Meshes, Allow32BitIndex);

            // Refresh skeleton updater with asset skeleton
            hierarchyUpdater = new SkeletonUpdater(skeleton);
            hierarchyUpdater.UpdateMatrices();

            // bounding boxes
            var modelBoundingBox    = model.BoundingBox;
            var modelBoundingSphere = model.BoundingSphere;

            foreach (var mesh in model.Meshes)
            {
                var vertexBuffers = mesh.Draw.VertexBuffers;
                if (vertexBuffers.Length > 0)
                {
                    // Compute local mesh bounding box (no node transformation)
                    Matrix matrix = Matrix.Identity;
                    mesh.BoundingBox = vertexBuffers[0].ComputeBounds(ref matrix, out mesh.BoundingSphere);

                    // Compute model bounding box (includes node transformation)
                    hierarchyUpdater.GetWorldMatrix(mesh.NodeIndex, out matrix);
                    BoundingSphere meshBoundingSphere;
                    var            meshBoundingBox = vertexBuffers[0].ComputeBounds(ref matrix, out meshBoundingSphere);
                    BoundingBox.Merge(ref modelBoundingBox, ref meshBoundingBox, out modelBoundingBox);
                    BoundingSphere.Merge(ref modelBoundingSphere, ref meshBoundingSphere, out modelBoundingSphere);
                }

                // TODO: temporary Always try to compact
                mesh.Draw.CompactIndexBuffer();
            }
            model.BoundingBox    = modelBoundingBox;
            model.BoundingSphere = modelBoundingSphere;

            // merges all the Draw VB and IB together to produce one final VB and IB by entity.
            var sizeVertexBuffer = model.Meshes.SelectMany(x => x.Draw.VertexBuffers).Select(x => x.Buffer.GetSerializationData().Content.Length).Sum();
            var sizeIndexBuffer  = 0;

            foreach (var x in model.Meshes)
            {
                // Let's be aligned (if there was 16bit indices before, we might be off)
                if (x.Draw.IndexBuffer.Is32Bit && sizeIndexBuffer % 4 != 0)
                {
                    sizeIndexBuffer += 2;
                }

                sizeIndexBuffer += x.Draw.IndexBuffer.Buffer.GetSerializationData().Content.Length;
            }
            var vertexBuffer = new BufferData(BufferFlags.VertexBuffer, new byte[sizeVertexBuffer]);
            var indexBuffer  = new BufferData(BufferFlags.IndexBuffer, new byte[sizeIndexBuffer]);

            // Note: reusing same instance, to avoid having many VB with same hash but different URL
            var vertexBufferSerializable = vertexBuffer.ToSerializableVersion();
            var indexBufferSerializable  = indexBuffer.ToSerializableVersion();

            var vertexBufferNextIndex = 0;
            var indexBufferNextIndex  = 0;

            foreach (var drawMesh in model.Meshes.Select(x => x.Draw))
            {
                // the index buffer
                var oldIndexBuffer = drawMesh.IndexBuffer.Buffer.GetSerializationData().Content;

                // Let's be aligned (if there was 16bit indices before, we might be off)
                if (drawMesh.IndexBuffer.Is32Bit && indexBufferNextIndex % 4 != 0)
                {
                    indexBufferNextIndex += 2;
                }

                Array.Copy(oldIndexBuffer, 0, indexBuffer.Content, indexBufferNextIndex, oldIndexBuffer.Length);

                drawMesh.IndexBuffer = new IndexBufferBinding(indexBufferSerializable, drawMesh.IndexBuffer.Is32Bit, drawMesh.IndexBuffer.Count, indexBufferNextIndex);

                indexBufferNextIndex += oldIndexBuffer.Length;

                // the vertex buffers
                for (int index = 0; index < drawMesh.VertexBuffers.Length; index++)
                {
                    var vertexBufferBinding = drawMesh.VertexBuffers[index];
                    var oldVertexBuffer     = vertexBufferBinding.Buffer.GetSerializationData().Content;

                    Array.Copy(oldVertexBuffer, 0, vertexBuffer.Content, vertexBufferNextIndex, oldVertexBuffer.Length);

                    drawMesh.VertexBuffers[index] = new VertexBufferBinding(vertexBufferSerializable, vertexBufferBinding.Declaration, vertexBufferBinding.Count, vertexBufferBinding.Stride,
                                                                            vertexBufferNextIndex);

                    vertexBufferNextIndex += oldVertexBuffer.Length;
                }
            }

            // Convert to Entity
            return(model);
        }
Пример #8
0
 public bool Equals(Description other)
 {
     return(Equals(ShaderSignature, other.ShaderSignature) &&
            ArrayExtensions.ArraysEqual(VertexBuffers, other.VertexBuffers, VertexBufferBindingComparer) &&
            Equals(IndexBuffer, other.IndexBuffer));
 }
Пример #9
0
        public static void CreateBundle(string bundleUrl, IOdbBackend backend, ObjectId[] objectIds, ISet <ObjectId> disableCompressionIds, Dictionary <string, ObjectId> indexMap, IList <string> dependencies, bool useIncrementalBundle)
        {
            if (objectIds.Length == 0)
            {
                throw new InvalidOperationException("Nothing to pack.");
            }

            var objectsToIndex = new Dictionary <ObjectId, int>(objectIds.Length);

            var objects = new List <KeyValuePair <ObjectId, ObjectInfo> >();

            for (int i = 0; i < objectIds.Length; ++i)
            {
                objectsToIndex.Add(objectIds[i], objects.Count);
                objects.Add(new KeyValuePair <ObjectId, ObjectInfo>(objectIds[i], new ObjectInfo()));
            }

            var incrementalBundles = new List <ObjectId>();

            // If there is a .bundle, add incremental id before it
            var bundleExtensionLength = (bundleUrl.EndsWith(BundleExtension) ? BundleExtension.Length : 0);

            // Early exit if package didn't change (header-check only)
            if (VirtualFileSystem.FileExists(bundleUrl))
            {
                try
                {
                    using (var packStream = VirtualFileSystem.OpenStream(bundleUrl, VirtualFileMode.Open, VirtualFileAccess.Read))
                    {
                        var bundle = ReadBundleDescription(packStream);

                        // If package didn't change since last time, early exit!
                        if (ArrayExtensions.ArraysEqual(bundle.Dependencies, dependencies) &&
                            ArrayExtensions.ArraysEqual(bundle.Assets.OrderBy(x => x.Key).ToList(), indexMap.OrderBy(x => x.Key).ToList()) &&
                            ArrayExtensions.ArraysEqual(bundle.Objects.Select(x => x.Key).OrderBy(x => x).ToList(), objectIds.OrderBy(x => x).ToList()))
                        {
                            // Make sure all incremental bundles exist
                            // Also, if we don't want incremental bundles but we have some (or vice-versa), let's force a regeneration
                            if ((useIncrementalBundle == (bundle.IncrementalBundles.Count > 0)) &&
                                bundle.IncrementalBundles.Select(x => bundleUrl.Insert(bundleUrl.Length - bundleExtensionLength, "." + x)).All(x =>
                            {
                                if (!VirtualFileSystem.FileExists(x))
                                {
                                    return(false);
                                }
                                using (var incrementalStream = VirtualFileSystem.OpenStream(x, VirtualFileMode.Open, VirtualFileAccess.Read))
                                    return(ValidateHeader(incrementalStream));
                            }))
                            {
                                return;
                            }
                        }
                    }

                    // Process existing incremental bundles one by one
                    // Try to find if there is enough to reuse in each of them
                    var filename  = VirtualFileSystem.GetFileName(bundleUrl);
                    var directory = VirtualFileSystem.GetParentFolder(bundleUrl);

                    foreach (var incrementalBundleUrl in VirtualFileSystem.ListFiles(directory, filename.Insert(filename.Length - bundleExtensionLength, ".*"), VirtualSearchOption.TopDirectoryOnly).Result)
                    {
                        var      incrementalIdString = incrementalBundleUrl.Substring(incrementalBundleUrl.Length - bundleExtensionLength - ObjectId.HashStringLength, ObjectId.HashStringLength);
                        ObjectId incrementalId;
                        if (!ObjectId.TryParse(incrementalIdString, out incrementalId))
                        {
                            continue;
                        }

                        // If we don't want incremental bundles, delete old ones from previous build
                        if (!useIncrementalBundle)
                        {
                            VirtualFileSystem.FileDelete(incrementalBundleUrl);
                            continue;
                        }

                        long sizeNeededItems = 0;
                        long sizeTotal       = 0;

                        BundleDescription incrementalBundle;
                        try
                        {
                            using (var packStream = VirtualFileSystem.OpenStream(incrementalBundleUrl, VirtualFileMode.Open, VirtualFileAccess.Read))
                            {
                                incrementalBundle = ReadBundleDescription(packStream);
                            }

                            // Compute size of objects (needed ones and everything)
                            foreach (var @object in incrementalBundle.Objects)
                            {
                                var objectCompressedSize = @object.Value.EndOffset - @object.Value.StartOffset;

                                // TODO: Detect object that are stored without ObjectId being content hash: we need to check actual content hash is same in this case
                                if (objectsToIndex.ContainsKey(@object.Key))
                                {
                                    sizeNeededItems += objectCompressedSize;
                                }
                                sizeTotal += objectCompressedSize;
                            }

                            // Check if we would reuse at least 50% of the incremental bundle, otherwise let's just get rid of it
                            var reuseRatio = (float)((double)sizeNeededItems / (double)sizeTotal);
                            if (reuseRatio < 0.5f)
                            {
                                VirtualFileSystem.FileDelete(incrementalBundleUrl);
                            }
                            else
                            {
                                // We will reuse this incremental bundle
                                // Let's add ObjectId entries
                                foreach (var @object in incrementalBundle.Objects)
                                {
                                    int objectIndex;
                                    if (objectsToIndex.TryGetValue(@object.Key, out objectIndex))
                                    {
                                        var objectInfo = @object.Value;
                                        objectInfo.IncrementalBundleIndex = incrementalBundles.Count + 1;
                                        objects[objectIndex] = new KeyValuePair <ObjectId, ObjectInfo>(@object.Key, objectInfo);
                                    }
                                }

                                // Add this incremental bundle in the list
                                incrementalBundles.Add(incrementalId);
                            }
                        }
                        catch (Exception)
                        {
                            // Could not read incremental bundle (format changed?)
                            // Let's delete it
                            VirtualFileSystem.FileDelete(incrementalBundleUrl);
                        }
                    }
                }
                catch (Exception)
                {
                    // Could not read previous bundle (format changed?)
                    // Let's just mute this error as new bundle will overwrite it anyway
                }
            }

            // Count objects which needs to be saved
            var incrementalObjects = new List <KeyValuePair <ObjectId, ObjectInfo> >();

            if (useIncrementalBundle)
            {
                for (int i = 0; i < objectIds.Length; ++i)
                {
                    // Skip if already part of an existing incremental package
                    if (objects[i].Value.IncrementalBundleIndex > 0)
                    {
                        continue;
                    }

                    incrementalObjects.Add(new KeyValuePair <ObjectId, ObjectInfo>(objects[i].Key, new ObjectInfo()));
                }
            }

            // Create an incremental package
            var newIncrementalId       = ObjectId.New();
            var incrementalBundleIndex = incrementalBundles.Count;

            if (useIncrementalBundle && incrementalObjects.Count > 0)
            {
                incrementalBundles.Add(newIncrementalId);
            }

            using (var packStream = VirtualFileSystem.OpenStream(bundleUrl, VirtualFileMode.Create, VirtualFileAccess.Write))
            {
                var header = new Header();
                header.MagicHeader = Header.MagicHeaderValid;

                var packBinaryWriter = new BinarySerializationWriter(packStream);
                packBinaryWriter.Write(header);
                // Write dependencies
                packBinaryWriter.Write(dependencies.ToList());
                // Write inecremental bundles
                packBinaryWriter.Write(incrementalBundles.ToList());

                // Save location of object ids
                var packObjectIdPosition = packStream.Position;

                // Write empty object ids (reserve space, will be rewritten later)
                packBinaryWriter.Write(objects);

                // Write index
                packBinaryWriter.Write(indexMap.ToList());

                using (var incrementalStream = incrementalObjects.Count > 0 ? VirtualFileSystem.OpenStream(bundleUrl.Insert(bundleUrl.Length - bundleExtensionLength, "." + newIncrementalId), VirtualFileMode.Create, VirtualFileAccess.Write) : null)
                {
                    var  incrementalBinaryWriter     = incrementalStream != null ? new BinarySerializationWriter(incrementalStream) : null;
                    long incrementalObjectIdPosition = 0;
                    if (incrementalStream != null)
                    {
                        incrementalBinaryWriter.Write(header);
                        // Write dependencies
                        incrementalBinaryWriter.Write(new List <string>());
                        // Write inecremental bundles
                        incrementalBinaryWriter.Write(new List <ObjectId>());

                        // Save location of object ids
                        incrementalObjectIdPosition = incrementalStream.Position;

                        // Write empty object ids (reserve space, will be rewritten later)
                        incrementalBinaryWriter.Write(incrementalObjects);

                        // Write index
                        incrementalBinaryWriter.Write(new List <KeyValuePair <string, ObjectId> >());
                    }

                    var objectOutputStream     = incrementalStream ?? packStream;
                    int incrementalObjectIndex = 0;
                    for (int i = 0; i < objectIds.Length; ++i)
                    {
                        // Skip if already part of an existing incremental package
                        if (objects[i].Value.IncrementalBundleIndex > 0)
                        {
                            continue;
                        }

                        using (var objectStream = backend.OpenStream(objectIds[i]))
                        {
                            // Prepare object info
                            var objectInfo = new ObjectInfo {
                                StartOffset = objectOutputStream.Position, SizeNotCompressed = objectStream.Length
                            };

                            // re-order the file content so that it is not necessary to seek while reading the input stream (header/object/refs -> header/refs/object)
                            var inputStream          = objectStream;
                            var originalStreamLength = objectStream.Length;
                            var streamReader         = new BinarySerializationReader(inputStream);
                            var chunkHeader          = ChunkHeader.Read(streamReader);
                            if (chunkHeader != null)
                            {
                                // create the reordered stream
                                var reorderedStream = new MemoryStream((int)originalStreamLength);

                                // copy the header
                                var streamWriter = new BinarySerializationWriter(reorderedStream);
                                chunkHeader.Write(streamWriter);

                                // copy the references
                                var newOffsetReferences = reorderedStream.Position;
                                inputStream.Position = chunkHeader.OffsetToReferences;
                                inputStream.CopyTo(reorderedStream);

                                // copy the object
                                var newOffsetObject = reorderedStream.Position;
                                inputStream.Position = chunkHeader.OffsetToObject;
                                inputStream.CopyTo(reorderedStream, chunkHeader.OffsetToReferences - chunkHeader.OffsetToObject);

                                // rewrite the chunk header with correct offsets
                                chunkHeader.OffsetToObject     = (int)newOffsetObject;
                                chunkHeader.OffsetToReferences = (int)newOffsetReferences;
                                reorderedStream.Position       = 0;
                                chunkHeader.Write(streamWriter);

                                // change the input stream to use reordered stream
                                inputStream          = reorderedStream;
                                inputStream.Position = 0;
                            }

                            // compress the stream
                            if (!disableCompressionIds.Contains(objectIds[i]))
                            {
                                objectInfo.IsCompressed = true;

                                var lz4OutputStream = new LZ4Stream(objectOutputStream, CompressionMode.Compress);
                                inputStream.CopyTo(lz4OutputStream);
                                lz4OutputStream.Flush();
                            }
                            // copy the stream "as is"
                            else
                            {
                                // Write stream
                                inputStream.CopyTo(objectOutputStream);
                            }

                            // release the reordered created stream
                            if (chunkHeader != null)
                            {
                                inputStream.Dispose();
                            }

                            // Add updated object info
                            objectInfo.EndOffset = objectOutputStream.Position;
                            // Note: we add 1 because 0 is reserved for self; first incremental bundle starts at 1
                            objectInfo.IncrementalBundleIndex = objectOutputStream == incrementalStream ? incrementalBundleIndex + 1 : 0;
                            objects[i] = new KeyValuePair <ObjectId, ObjectInfo>(objectIds[i], objectInfo);

                            if (useIncrementalBundle)
                            {
                                // Also update incremental bundle object info
                                objectInfo.IncrementalBundleIndex            = 0; // stored in same bundle
                                incrementalObjects[incrementalObjectIndex++] = new KeyValuePair <ObjectId, ObjectInfo>(objectIds[i], objectInfo);
                            }
                        }
                    }

                    // First finish to write incremental package so that main one can't be valid on the HDD without the incremental one being too
                    if (incrementalStream != null)
                    {
                        // Rewrite headers
                        header.Size = incrementalStream.Length;
                        incrementalStream.Position = 0;
                        incrementalBinaryWriter.Write(header);

                        // Rewrite object with updated offsets/size
                        incrementalStream.Position = incrementalObjectIdPosition;
                        incrementalBinaryWriter.Write(incrementalObjects);
                    }
                }

                // Rewrite headers
                header.Size         = packStream.Length;
                packStream.Position = 0;
                packBinaryWriter.Write(header);

                // Rewrite object with updated offsets/size
                packStream.Position = packObjectIdPosition;
                packBinaryWriter.Write(objects);
            }
        }
Пример #10
0
        public static void CreateBundle(string vfsUrl, IOdbBackend backend, ObjectId[] objectIds, ISet <ObjectId> disableCompressionIds, Dictionary <string, ObjectId> indexMap, IList <string> dependencies)
        {
            if (objectIds.Length == 0)
            {
                throw new InvalidOperationException("Nothing to pack.");
            }

            // Early exit if package didn't change (header-check only)
            if (VirtualFileSystem.FileExists(vfsUrl))
            {
                try
                {
                    using (var packStream = VirtualFileSystem.OpenStream(vfsUrl, VirtualFileMode.Open, VirtualFileAccess.Read))
                    {
                        var bundle = ReadBundleDescription(packStream);

                        // If package didn't change since last time, early exit!
                        if (ArrayExtensions.ArraysEqual(bundle.Dependencies, dependencies) &&
                            ArrayExtensions.ArraysEqual(bundle.Assets.OrderBy(x => x.Key).ToList(), indexMap.OrderBy(x => x.Key).ToList()) &&
                            ArrayExtensions.ArraysEqual(bundle.Objects.Select(x => x.Key).OrderBy(x => x).ToList(), objectIds.OrderBy(x => x).ToList()))
                        {
                            return;
                        }
                    }
                }
                catch (Exception)
                {
                    // Could not read previous bundle (format changed?)
                    // Let's just mute this error as new bundle will overwrite it anyway
                }
            }

            using (var packStream = VirtualFileSystem.OpenStream(vfsUrl, VirtualFileMode.Create, VirtualFileAccess.Write))
            {
                var header = new Header();
                header.MagicHeader = Header.MagicHeaderValid;

                var binaryWriter = new BinarySerializationWriter(packStream);
                binaryWriter.Write(header);

                // Write dependencies
                binaryWriter.Write(dependencies.ToList());

                // Save location of object ids
                var objectIdPosition = packStream.Position;

                // Write empty object ids (reserve space, will be rewritten later)
                var objects = new List <KeyValuePair <ObjectId, ObjectInfo> >();
                for (int i = 0; i < objectIds.Length; ++i)
                {
                    objects.Add(new KeyValuePair <ObjectId, ObjectInfo>(objectIds[i], new ObjectInfo()));
                }

                binaryWriter.Write(objects);
                objects.Clear();

                // Write index
                binaryWriter.Write(indexMap.ToList());

                for (int i = 0; i < objectIds.Length; ++i)
                {
                    using (var objectStream = backend.OpenStream(objectIds[i]))
                    {
                        // Prepare object info
                        var objectInfo = new ObjectInfo {
                            StartOffset = packStream.Position, SizeNotCompressed = objectStream.Length
                        };

                        // re-order the file content so that it is not necessary to seek while reading the input stream (header/object/refs -> header/refs/object)
                        var inputStream          = objectStream;
                        var originalStreamLength = objectStream.Length;
                        var streamReader         = new BinarySerializationReader(inputStream);
                        var chunkHeader          = ChunkHeader.Read(streamReader);
                        if (chunkHeader != null)
                        {
                            // create the reordered stream
                            var reorderedStream = new MemoryStream((int)originalStreamLength);

                            // copy the header
                            var streamWriter = new BinarySerializationWriter(reorderedStream);
                            chunkHeader.Write(streamWriter);

                            // copy the references
                            var newOffsetReferences = reorderedStream.Position;
                            inputStream.Position = chunkHeader.OffsetToReferences;
                            inputStream.CopyTo(reorderedStream);

                            // copy the object
                            var newOffsetObject = reorderedStream.Position;
                            inputStream.Position = chunkHeader.OffsetToObject;
                            inputStream.CopyTo(reorderedStream, chunkHeader.OffsetToReferences - chunkHeader.OffsetToObject);

                            // rewrite the chunk header with correct offsets
                            chunkHeader.OffsetToObject     = (int)newOffsetObject;
                            chunkHeader.OffsetToReferences = (int)newOffsetReferences;
                            reorderedStream.Position       = 0;
                            chunkHeader.Write(streamWriter);

                            // change the input stream to use reordered stream
                            inputStream          = reorderedStream;
                            inputStream.Position = 0;
                        }

                        // compress the stream
                        if (!disableCompressionIds.Contains(objectIds[i]))
                        {
                            objectInfo.IsCompressed = true;

                            var lz4OutputStream = new LZ4Stream(packStream, CompressionMode.Compress);
                            inputStream.CopyTo(lz4OutputStream);
                            lz4OutputStream.Flush();
                        }
                        else // copy the stream "as is"
                        {
                            // Write stream
                            inputStream.CopyTo(packStream);
                        }

                        // release the reordered created stream
                        if (chunkHeader != null)
                        {
                            inputStream.Dispose();
                        }

                        // Add updated object info
                        objectInfo.EndOffset = packStream.Position;
                        objects.Add(new KeyValuePair <ObjectId, ObjectInfo>(objectIds[i], objectInfo));
                    }
                }

                // Rewrite header
                header.Size         = packStream.Length;
                packStream.Position = 0;
                binaryWriter.Write(header);

                // Rewrite object locations
                packStream.Position = objectIdPosition;
                binaryWriter.Write(objects);
            }
        }
Пример #11
0
 /// <inheritdoc/>
 public bool Equals(BlendStateDescription other)
 {
     return(AlphaToCoverageEnable.Equals(other.AlphaToCoverageEnable) &&
            IndependentBlendEnable.Equals(other.IndependentBlendEnable) &&
            ArrayExtensions.ArraysEqual(RenderTargets, other.RenderTargets));
 }