protected override Rendering.Model LoadModel(ICommandContext commandContext, AssetManager assetManager) { var meshConverter = CreateMeshConverter(commandContext); var materialMapping = Materials.Select((s, i) => new { Value = s, Index = i }).ToDictionary(x => x.Value.Name, x => x.Index); var sceneData = meshConverter.Convert(SourcePath, Location, materialMapping); return sceneData; }
public MaterialDescription Run(MaterialDescription material, UDirectory materialPath, PixelFormat outputFormat = PixelFormat.ETC1) { if (material == null) throw new ArgumentNullException("material"); var assetManager = new AssetManager(); var modifiedMaterial = material.Clone(); var textureVisitor = new MaterialTextureVisitor(modifiedMaterial); var nodeReplacer = new MaterialNodeReplacer(modifiedMaterial); var textureNodes = textureVisitor.GetAllModelTextureValues(); foreach (var textureNode in textureNodes) { var itemAsset = assetSession.FindAsset(textureNode.TextureReference.Id); if(itemAsset == null) throw new InvalidOperationException("The referenced texture is not included in the project session."); var textureAsset = (TextureAsset)itemAsset.Asset; if (textureAsset.Format != TextureFormat.Compressed || textureAsset.Alpha == AlphaFormat.None) continue; // the texture has no alpha so there is no need to divide the texture into two sub-textures var originalLocation = textureNode.TextureReference.Location; throw new NotImplementedException("TODO: Need to reimplement this with removed data layer."); using (var image = assetManager.Load<Image>(originalLocation)) { CreateAndSaveSeparateTextures(image, originalLocation, textureAsset.GenerateMipmaps, outputFormat); assetManager.Unload(image); // matching unload to the previous asset manager load call } // make new tree var colorNode = new MaterialTextureNode(GenerateColorTextureURL(originalLocation), textureNode.TexcoordIndex, Vector2.One, Vector2.Zero); var alphaNode = new MaterialTextureNode(GenerateAlphaTextureURL(originalLocation), textureNode.TexcoordIndex, Vector2.One, Vector2.Zero); var substituteAlphaNode = new MaterialShaderClassNode { MixinReference = new AssetReference<EffectShaderAsset>(Guid.Empty, "ComputeColorSubstituteAlphaWithColor") }; substituteAlphaNode.CompositionNodes.Add("color1", colorNode); substituteAlphaNode.CompositionNodes.Add("color2", alphaNode); // set the parameters of the children so that they match the original texture var children = new[] { colorNode, alphaNode }; foreach (var childTexture in children) { childTexture.Sampler.AddressModeU = textureNode.Sampler.AddressModeU; childTexture.Sampler.AddressModeV = textureNode.Sampler.AddressModeV; childTexture.Sampler.Filtering = textureNode.Sampler.Filtering; childTexture.Offset = textureNode.Offset; childTexture.Sampler.SamplerParameterKey = textureNode.Sampler.SamplerParameterKey; childTexture.Scale = textureNode.Scale; childTexture.TexcoordIndex = textureNode.TexcoordIndex; } // copy the parameter key on the color and let the one of the alpha null so that it is set automatically to available value later colorNode.Key = textureNode.Key; alphaNode.Key = null; // update all the material references to the new node nodeReplacer.Replace(textureNode, substituteAlphaNode); } return modifiedMaterial; }
protected override Rendering.Model LoadModel(ICommandContext commandContext, AssetManager assetManager) { var converter = CreateMeshConverter(commandContext); // Note: FBX exporter uses Materials for the mapping, but Assimp already uses indices so we can reuse them // We should still unify the behavior to be more consistent at some point (i.e. if model was changed on the HDD but not in the asset). // This should probably be better done during a large-scale FBX/Assimp refactoring. var sceneData = converter.Convert(SourcePath, Location); return sceneData; }
private void SaveAssetsAndDeleteAChild(AssetManager assetManager) { var simpleAsset = new SimpleAsset("Pa", new SimpleAsset("Son", null)); assetManager.Save(simpleAsset); var databaseFileProvider = (DatabaseFileProvider)VirtualFileSystem.ResolveProvider("/db", true).Provider; databaseFileProvider.AssetIndexMap.WaitPendingOperations(); ObjectId childId; databaseFileProvider.AssetIndexMap.TryGetValue("SimpleAssets/Son", out childId); VirtualFileSystem.FileDelete(FileOdbBackend.BuildUrl(VirtualFileSystem.ApplicationDatabasePath, childId)); }
public SkyboxGeneratorContext() { Services = new ServiceRegistry(); Assets = new AssetManager(Services); GraphicsDevice = GraphicsDevice.New(); GraphicsDeviceService = new GraphicsDeviceServiceLocal(Services, GraphicsDevice); EffectSystem = new EffectSystem(Services); EffectSystem.Initialize(); ((IContentable)EffectSystem).LoadContent(); ((EffectCompilerCache)EffectSystem.Compiler).CompileEffectAsynchronously = false; DrawEffectContext = RenderContext.GetShared(Services); }
protected override Task<ResultStatus> DoCommandOverride(ICommandContext commandContext) { var assetManager = new AssetManager(); Image image; using (var fileStream = new FileStream(SourcePath, FileMode.Open, FileAccess.Read)) { image = Image.Load(fileStream); } assetManager.Save(Location, image); image.Dispose(); return Task.FromResult(ResultStatus.Successful); }
public void TestSaveAndLoadEntities() { InitializeAssetDatabase(); var assetManager = new AssetManager(); var entity = new Entity(); entity.Transform.Position = new Vector3(100.0f, 0.0f, 0.0f); assetManager.Save("EntityAssets/Entity", entity); GC.Collect(); var entity2 = assetManager.Load<Entity>("EntityAssets/Entity"); Assert.AreEqual(entity.Transform.Position, entity2.Transform.Position); }
/// <summary> /// The method to override containing the actual command code. It is called by the <see cref="DoCommand" /> function /// </summary> /// <param name="commandContext">The command context.</param> /// <returns>Task{ResultStatus}.</returns> protected override async Task<ResultStatus> DoCommandOverride(ICommandContext commandContext) { var assetManager = new AssetManager(); while (Interlocked.Increment(ref spawnedCommands) >= 2) { Interlocked.Decrement(ref spawnedCommands); await Task.Delay(1, CancellationToken); } try { object exportedObject; switch (Mode) { case ExportMode.Animation: exportedObject = ExportAnimation(commandContext, assetManager); break; case ExportMode.Skeleton: exportedObject = ExportSkeleton(commandContext, assetManager); break; case ExportMode.Model: exportedObject = ExportModel(commandContext, assetManager); break; default: commandContext.Logger.Error("Unknown export type [{0}] {1}", Mode, ContextAsString); return ResultStatus.Failed; } if (exportedObject != null) assetManager.Save(Location, exportedObject); commandContext.Logger.Verbose("The {0} has been successfully imported.", ContextAsString); return ResultStatus.Successful; } catch (Exception ex) { commandContext.Logger.Error("Unexpected error while importing {0}", ex, ContextAsString); return ResultStatus.Failed; } finally { Interlocked.Decrement(ref spawnedCommands); } }
protected override Task<ResultStatus> DoCommandOverride(ICommandContext commandContext) { var assetManager = new AssetManager(); // Load image var image = assetManager.Load<Image>(InputUrl); // Initialize TextureTool library using (var texTool = new TextureTool()) using (var texImage = texTool.Load(image)) { var outputFormat = Format.HasValue ? Format.Value : image.Description.Format; // Apply transformations texTool.Decompress(texImage); if (IsAbsolute) { texTool.Resize(texImage, (int)Width, (int)Height, Filter.Rescaling.Lanczos3); } else { texTool.Rescale(texImage, Width / 100.0f, Height / 100.0f, Filter.Rescaling.Lanczos3); } // Generate mipmaps if (GenerateMipmaps) { texTool.GenerateMipMaps(texImage, Filter.MipMapGeneration.Box); } // Convert/Compress to output format texTool.Compress(texImage, outputFormat); // Save using (var outputImage = texTool.ConvertToParadoxImage(texImage)) { assetManager.Save(OutputUrl, outputImage); commandContext.Logger.Verbose("Compression successful [{3}] to ({0}x{1},{2})", outputImage.Description.Width, outputImage.Description.Height, outputImage.Description.Format, OutputUrl); } } return Task.FromResult(ResultStatus.Successful); }
/// <summary> /// Create an empty register. /// </summary> public FontManager() { assetManager = new AssetManager(); // Preload proper freetype native library (depending on CPU type) Core.NativeLibrary.PreloadLibrary("freetype.dll"); // create a freetype library used to generate the bitmaps freetypeLibrary = new Library(); #if SILICONSTUDIO_PLATFORM_WINDOWS_RUNTIME Windows.System.Threading.ThreadPool.RunAsync(operation => SafeAction.Wrap(BuildBitmapThread)()); #else // launch the thumbnail builder thread bitmapBuilderThread = new Thread(SafeAction.Wrap(BuildBitmapThread)) { IsBackground = true, Name = "Bitmap Builder thread" }; bitmapBuilderThread.Start(); #endif }
protected override async Task<ResultStatus> DoCommandOverride(ICommandContext commandContext) { var assetManager = new AssetManager(); DataContainer result = null; switch (Source.Type) { case UrlType.File: using (var fileStream = new FileStream(Source.Path, FileMode.Open, FileAccess.Read)) { if (!WaitDelay()) return ResultStatus.Cancelled; result = DataContainer.Load(fileStream); } break; case UrlType.ContentLink: case UrlType.Content: var container = assetManager.Load<DataContainer>(Source.Path); if (!WaitDelay()) return ResultStatus.Cancelled; result = container.Alterate(); break; } assetManager.Save(OutputUrl, result); var tasksToWait = CommandsToSpawn.Select(commandContext.ScheduleAndExecuteCommand); await Task.WhenAll(tasksToWait); foreach (ObjectUrl inputDep in InputDependencies) { commandContext.RegisterInputDependency(inputDep); } return ResultStatus.Successful; }
public unsafe void TestSaveAndLoadSimpleAssets() { var assetManager = new AssetManager(); SaveSimpleAssets(assetManager); GC.Collect(); var simpleAsset = assetManager.Load<SimpleAsset>("SimpleAssets/Grandpa"); Assert.That(simpleAsset.Url, Is.EqualTo("SimpleAssets/Grandpa")); Assert.That(simpleAsset.Str, Is.EqualTo("Grandpa")); Assert.That(simpleAsset.Dble, Is.EqualTo(5.0)); Assert.That(simpleAsset.Child, !Is.Null); Assert.That(simpleAsset.Child.Url, Is.EqualTo("SimpleAssets/Pa")); Assert.That(simpleAsset.Child.Str, Is.EqualTo("Pa")); Assert.That(simpleAsset.Child.Dble, Is.EqualTo(5.0)); Assert.That(simpleAsset.Child.Child, !Is.Null); Assert.That(simpleAsset.Child.Child.Url, Is.EqualTo("SimpleAssets/Son")); Assert.That(simpleAsset.Child.Child.Str, Is.EqualTo("Son")); Assert.That(simpleAsset.Child.Child.Dble, Is.EqualTo(5.0)); Assert.That(simpleAsset.Child.Child.Child, Is.Null); }
public unsafe void TestSaveAndLoadCyclicallyReferencedAssets() { var assetManager = new AssetManager(); SaveCyclicallyReferencedAssets(assetManager); GC.Collect(); var simpleAsset = assetManager.Load<SimpleAsset>("SimpleAssets/First"); Assert.That(simpleAsset.Url, Is.EqualTo("SimpleAssets/First")); Assert.That(simpleAsset.Str, Is.EqualTo("First")); Assert.That(simpleAsset.Dble, Is.EqualTo(5.0)); Assert.That(simpleAsset.Child, !Is.Null); Assert.That(simpleAsset.Child.Url, Is.EqualTo("SimpleAssets/Second")); Assert.That(simpleAsset.Child.Str, Is.EqualTo("Second")); Assert.That(simpleAsset.Child.Dble, Is.EqualTo(5.0)); Assert.That(simpleAsset.Child.Child, !Is.Null); Assert.That(simpleAsset.Child.Child.Url, Is.EqualTo("SimpleAssets/Third")); Assert.That(simpleAsset.Child.Child.Str, Is.EqualTo("Third")); Assert.That(simpleAsset.Child.Child.Dble, Is.EqualTo(5.0)); Assert.That(simpleAsset.Child.Child.Child, Is.SameAs(simpleAsset)); }
private object ExportSkeleton(ICommandContext commandContext, AssetManager assetManager) { var skeleton = LoadSkeleton(commandContext, assetManager); var modelNodes = new HashSet<string>(skeleton.Nodes.Select(x => x.Name)); var skeletonNodes = new HashSet<string>(SkeletonNodesWithPreserveInfo.Select(x => x.Key)); // List missing nodes on both sides, to display warnings var missingNodesInModel = new HashSet<string>(skeletonNodes); missingNodesInModel.ExceptWith(modelNodes); var missingNodesInAsset = new HashSet<string>(modelNodes); missingNodesInAsset.ExceptWith(skeletonNodes); // Output warnings if skeleton was not properly reimported from latest FBX if (missingNodesInAsset.Count > 0) commandContext.Logger.Warning($"{missingNodesInAsset.Count} node(s) were present in model [{SourcePath}] but not in asset [{Location}], please reimport: {string.Join(", ", missingNodesInAsset)}"); if (missingNodesInModel.Count > 0) commandContext.Logger.Warning($"{missingNodesInModel.Count} node(s) were present in asset [{Location}] but not in model [{SourcePath}], please reimport: {string.Join(", ", missingNodesInModel)}"); // Build node mapping to expected structure var optimizedNodes = new HashSet<string>(SkeletonNodesWithPreserveInfo.Where(x => !x.Value).Select(x => x.Key)); // Refresh skeleton updater with loaded skeleton (to be able to compute matrices) var hierarchyUpdater = new SkeletonUpdater(skeleton); hierarchyUpdater.UpdateMatrices(); // Removed optimized nodes var filteredSkeleton = new Skeleton { Nodes = skeleton.Nodes.Where(x => !optimizedNodes.Contains(x.Name)).ToArray() }; // Fix parent indices (since we removed some nodes) for (int i = 0; i < filteredSkeleton.Nodes.Length; ++i) { var parentIndex = filteredSkeleton.Nodes[i].ParentIndex; if (parentIndex != -1) { // Find appropriate parent to map to var newParentIndex = -1; while (newParentIndex == -1 && parentIndex != -1) { var nodeName = skeleton.Nodes[parentIndex].Name; parentIndex = skeleton.Nodes[parentIndex].ParentIndex; newParentIndex = filteredSkeleton.Nodes.IndexOf(x => x.Name == nodeName); } filteredSkeleton.Nodes[i].ParentIndex = newParentIndex; } } // Generate mapping var skeletonMapping = new SkeletonMapping(filteredSkeleton, skeleton); // Children of remapped nodes need to have their matrices updated for (int i = 0; i < skeleton.Nodes.Length; ++i) { // Skip node if it doesn't exist in source skeleton if (skeletonMapping.SourceToSource[i] != i) continue; var node = skeleton.Nodes[i]; var filteredIndex = skeletonMapping.SourceToTarget[i]; var oldParentIndex = node.ParentIndex; if (oldParentIndex != -1 && skeletonMapping.SourceToSource[oldParentIndex] != oldParentIndex) { // Compute matrix for intermediate missing nodes var transformMatrix = CombineMatricesFromNodeIndices(hierarchyUpdater.NodeTransformations, skeletonMapping.SourceToSource[oldParentIndex], oldParentIndex); var localMatrix = hierarchyUpdater.NodeTransformations[i].LocalMatrix; // Combine it with local matrix, and use that instead in the new skeleton; resulting node should be same position as before optimized nodes were removed localMatrix = Matrix.Multiply(localMatrix, transformMatrix); localMatrix.Decompose(out filteredSkeleton.Nodes[filteredIndex].Transform.Scale, out filteredSkeleton.Nodes[filteredIndex].Transform.Rotation, out filteredSkeleton.Nodes[filteredIndex].Transform.Position); } } return filteredSkeleton; }
public static ResultStatus ImportAndSaveTextureImage(UFile sourcePath, string outputUrl, TextureAsset textureAsset, TextureConvertParameters parameters, CancellationToken cancellationToken, Logger logger) { var assetManager = new AssetManager(); using (var texTool = new TextureTool()) using (var texImage = texTool.Load(sourcePath, textureAsset.SRgb)) { // Apply transformations texTool.Decompress(texImage, textureAsset.SRgb); if (cancellationToken.IsCancellationRequested) // abort the process if cancellation is demanded return ResultStatus.Cancelled; var fromSize = new Size2(texImage.Width, texImage.Height); var targetSize = new Size2((int)textureAsset.Width, (int)textureAsset.Height); // Resize the image if (textureAsset.IsSizeInPercentage) { targetSize = new Size2((int)(fromSize.Width * (float)textureAsset.Width / 100.0f), (int)(fromSize.Height * (float) textureAsset.Height / 100.0f)); } // Find the target size targetSize = FindBestTextureSize(textureAsset.Format, parameters.GraphicsPlatform, parameters.GraphicsProfile, fromSize, targetSize, textureAsset.GenerateMipmaps, logger); // Resize the image only if needed if (targetSize != fromSize) { texTool.Resize(texImage, targetSize.Width, targetSize.Height, Filter.Rescaling.Lanczos3); } if (cancellationToken.IsCancellationRequested) // abort the process if cancellation is demanded return ResultStatus.Cancelled; // texture size is now determined, we can cache it var textureSize = new Int2(texImage.Width, texImage.Height); // Apply the color key if (textureAsset.ColorKeyEnabled) texTool.ColorKey(texImage, textureAsset.ColorKeyColor); if (cancellationToken.IsCancellationRequested) // abort the process if cancellation is demanded return ResultStatus.Cancelled; // Pre-multiply alpha if (textureAsset.PremultiplyAlpha) texTool.PreMultiplyAlpha(texImage); if (cancellationToken.IsCancellationRequested) // abort the process if cancellation is demanded return ResultStatus.Cancelled; // Generate mipmaps if (textureAsset.GenerateMipmaps) { var boxFilteringIsSupported = texImage.Format != PixelFormat.B8G8R8A8_UNorm_SRgb || (IsPowerOfTwo(textureSize.X) && IsPowerOfTwo(textureSize.Y)); texTool.GenerateMipMaps(texImage, boxFilteringIsSupported? Filter.MipMapGeneration.Box: Filter.MipMapGeneration.Linear); } if (cancellationToken.IsCancellationRequested) // abort the process if cancellation is demanded return ResultStatus.Cancelled; // Convert/Compress to output format // TODO: Change alphaFormat depending on actual image content (auto-detection)? var outputFormat = DetermineOutputFormat(textureAsset, parameters, textureSize, texImage.Format, parameters.Platform, parameters.GraphicsPlatform, parameters.GraphicsProfile); texTool.Compress(texImage, outputFormat, (TextureConverter.Requests.TextureQuality)parameters.TextureQuality); if (cancellationToken.IsCancellationRequested) // abort the process if cancellation is demanded return ResultStatus.Cancelled; // Save the texture using (var outputImage = texTool.ConvertToParadoxImage(texImage)) { if (cancellationToken.IsCancellationRequested) // abort the process if cancellation is demanded return ResultStatus.Cancelled; assetManager.Save(outputUrl, outputImage.ToSerializableVersion()); logger.Info("Compression successful [{3}] to ({0}x{1},{2})", outputImage.Description.Width, outputImage.Description.Height, outputImage.Description.Format, outputUrl); } } return ResultStatus.Successful; }
private object ExportModel(ICommandContext commandContext, AssetManager assetManager) { // Read from model file var modelSkeleton = LoadSkeleton(commandContext, assetManager); // we get model skeleton to compare it to real skeleton we need to map to var model = LoadModel(commandContext, assetManager); // Apply materials foreach (var modelMaterial in Materials) { if (modelMaterial.MaterialInstance?.Material == null) { commandContext.Logger.Warning($"The material [{modelMaterial.Name}] is null in the list of materials."); continue; } model.Materials.Add(modelMaterial.MaterialInstance); } model.BoundingBox = BoundingBox.Empty; foreach (var mesh in model.Meshes) { if (TessellationAEN) { // TODO: Generate AEN model view commandContext.Logger.Error("TessellationAEN is not supported in {0}", ContextAsString); } } SkeletonMapping skeletonMapping; Skeleton skeleton; if (SkeletonUrl != null) { // Load skeleton and process it skeleton = assetManager.Load<Skeleton>(SkeletonUrl); // Assign skeleton to model model.Skeleton = AttachedReferenceManager.CreateSerializableVersion<Skeleton>(Guid.Empty, SkeletonUrl); } else { skeleton = null; } skeletonMapping = new SkeletonMapping(skeleton, modelSkeleton); // Refresh skeleton updater with model skeleton var hierarchyUpdater = new SkeletonUpdater(modelSkeleton); hierarchyUpdater.UpdateMatrices(); // Move meshes in the new nodes foreach (var mesh in model.Meshes) { // Check if there was a remap using model skeleton if (skeletonMapping.SourceToSource[mesh.NodeIndex] != mesh.NodeIndex) { // Transform vertices var transformationMatrix = CombineMatricesFromNodeIndices(hierarchyUpdater.NodeTransformations, skeletonMapping.SourceToSource[mesh.NodeIndex], mesh.NodeIndex); mesh.Draw.VertexBuffers[0].TransformBuffer(ref transformationMatrix); // Check if geometry is inverted, to know if we need to reverse winding order // TODO: What to do if there is no index buffer? We should create one... (not happening yet) if (mesh.Draw.IndexBuffer == null) throw new InvalidOperationException(); Matrix rotation; Vector3 scale, translation; if (transformationMatrix.Decompose(out scale, out rotation, out translation) && scale.X * scale.Y * scale.Z < 0) { mesh.Draw.ReverseWindingOrder(); } } // Update new node index using real asset skeleton mesh.NodeIndex = skeletonMapping.SourceToTarget[mesh.NodeIndex]; } // Merge meshes with same parent nodes, material and skinning var meshesByNodes = model.Meshes.GroupBy(x => x.NodeIndex).ToList(); foreach (var meshesByNode in meshesByNodes) { // This logic to detect similar material is kept from old code; this should be reviewed/improved at some point foreach (var meshesPerDrawCall in meshesByNode.GroupBy(x => x, new AnonymousEqualityComparer<Mesh>((x, y) => x.MaterialIndex == y.MaterialIndex // Same material && ArrayExtensions.ArraysEqual(x.Skinning?.Bones, y.Skinning?.Bones) // Same bones && CompareParameters(model, x, y) // Same parameters && CompareShadowOptions(model, x, y), // Same shadow parameters x => 0)).ToList()) { if (meshesPerDrawCall.Count() == 1) { // Nothing to group, skip to next entry continue; } // Remove old meshes foreach (var mesh in meshesPerDrawCall) { model.Meshes.Remove(mesh); } // Add new combined mesh(es) var baseMesh = meshesPerDrawCall.First(); var newMeshList = meshesPerDrawCall.Select(x => x.Draw).ToList().GroupDrawData(Allow32BitIndex); foreach (var generatedMesh in newMeshList) { model.Meshes.Add(new Mesh(generatedMesh, baseMesh.Parameters) { MaterialIndex = baseMesh.MaterialIndex, Name = baseMesh.Name, Draw = generatedMesh, NodeIndex = baseMesh.NodeIndex, Skinning = baseMesh.Skinning, }); } } } // Remap skinning foreach (var skinning in model.Meshes.Select(x => x.Skinning).Where(x => x != null).Distinct()) { // Update node mapping // Note: we only remap skinning matrices, but we could directly remap skinning bones instead for (int i = 0; i < skinning.Bones.Length; ++i) { var nodeIndex = skinning.Bones[i].NodeIndex; var newNodeIndex = skeletonMapping.SourceToSource[nodeIndex]; skinning.Bones[i].NodeIndex = skeletonMapping.SourceToTarget[nodeIndex]; // If it was remapped, we also need to update matrix if (newNodeIndex != nodeIndex) { var transformationMatrix = CombineMatricesFromNodeIndices(hierarchyUpdater.NodeTransformations, newNodeIndex, nodeIndex); skinning.Bones[i].LinkToMeshMatrix = Matrix.Multiply(skinning.Bones[i].LinkToMeshMatrix, transformationMatrix); } } } // split the meshes if necessary model.Meshes = SplitExtensions.SplitMeshes(model.Meshes, Allow32BitIndex); // Refresh skeleton updater with asset skeleton hierarchyUpdater = new SkeletonUpdater(skeleton); hierarchyUpdater.UpdateMatrices(); // bounding boxes var modelBoundingBox = model.BoundingBox; var modelBoundingSphere = model.BoundingSphere; foreach (var mesh in model.Meshes) { var vertexBuffers = mesh.Draw.VertexBuffers; if (vertexBuffers.Length > 0) { // Compute local mesh bounding box (no node transformation) Matrix matrix = Matrix.Identity; mesh.BoundingBox = vertexBuffers[0].ComputeBounds(ref matrix, out mesh.BoundingSphere); // Compute model bounding box (includes node transformation) hierarchyUpdater.GetWorldMatrix(mesh.NodeIndex, out matrix); BoundingSphere meshBoundingSphere; var meshBoundingBox = vertexBuffers[0].ComputeBounds(ref matrix, out meshBoundingSphere); BoundingBox.Merge(ref modelBoundingBox, ref meshBoundingBox, out modelBoundingBox); BoundingSphere.Merge(ref modelBoundingSphere, ref meshBoundingSphere, out modelBoundingSphere); } // TODO: temporary Always try to compact mesh.Draw.CompactIndexBuffer(); } model.BoundingBox = modelBoundingBox; model.BoundingSphere = modelBoundingSphere; // merges all the Draw VB and IB together to produce one final VB and IB by entity. var sizeVertexBuffer = model.Meshes.SelectMany(x => x.Draw.VertexBuffers).Select(x => x.Buffer.GetSerializationData().Content.Length).Sum(); var sizeIndexBuffer = 0; foreach (var x in model.Meshes) { // Let's be aligned (if there was 16bit indices before, we might be off) if (x.Draw.IndexBuffer.Is32Bit && sizeIndexBuffer % 4 != 0) sizeIndexBuffer += 2; sizeIndexBuffer += x.Draw.IndexBuffer.Buffer.GetSerializationData().Content.Length; } var vertexBuffer = new BufferData(BufferFlags.VertexBuffer, new byte[sizeVertexBuffer]); var indexBuffer = new BufferData(BufferFlags.IndexBuffer, new byte[sizeIndexBuffer]); // Note: reusing same instance, to avoid having many VB with same hash but different URL var vertexBufferSerializable = vertexBuffer.ToSerializableVersion(); var indexBufferSerializable = indexBuffer.ToSerializableVersion(); var vertexBufferNextIndex = 0; var indexBufferNextIndex = 0; foreach (var drawMesh in model.Meshes.Select(x => x.Draw)) { // the index buffer var oldIndexBuffer = drawMesh.IndexBuffer.Buffer.GetSerializationData().Content; // Let's be aligned (if there was 16bit indices before, we might be off) if (drawMesh.IndexBuffer.Is32Bit && indexBufferNextIndex % 4 != 0) indexBufferNextIndex += 2; Array.Copy(oldIndexBuffer, 0, indexBuffer.Content, indexBufferNextIndex, oldIndexBuffer.Length); drawMesh.IndexBuffer = new IndexBufferBinding(indexBufferSerializable, drawMesh.IndexBuffer.Is32Bit, drawMesh.IndexBuffer.Count, indexBufferNextIndex); indexBufferNextIndex += oldIndexBuffer.Length; // the vertex buffers for (int index = 0; index < drawMesh.VertexBuffers.Length; index++) { var vertexBufferBinding = drawMesh.VertexBuffers[index]; var oldVertexBuffer = vertexBufferBinding.Buffer.GetSerializationData().Content; Array.Copy(oldVertexBuffer, 0, vertexBuffer.Content, vertexBufferNextIndex, oldVertexBuffer.Length); drawMesh.VertexBuffers[index] = new VertexBufferBinding(vertexBufferSerializable, vertexBufferBinding.Declaration, vertexBufferBinding.Count, vertexBufferBinding.Stride, vertexBufferNextIndex); vertexBufferNextIndex += oldVertexBuffer.Length; } } // Convert to Entity return model; }
public void TestImportModelSimple() { var file = Path.Combine(Environment.CurrentDirectory, @"scenes\goblin.fbx"); // Create a project with an asset reference a raw file var project = new Package { FullPath = Path.Combine(Environment.CurrentDirectory, "ModelAssets", "ModelAssets" + Package.PackageFileExtension) }; using (var session = new PackageSession(project)) { var importSession = new AssetImportSession(session); // ------------------------------------------------------------------ // Step 1: Add files to session // ------------------------------------------------------------------ importSession.AddFile(file, project, UDirectory.Empty); // ------------------------------------------------------------------ // Step 2: Stage assets // ------------------------------------------------------------------ var stageResult = importSession.Stage(); Assert.IsTrue(stageResult); Assert.AreEqual(0, project.Assets.Count); // ------------------------------------------------------------------ // Step 3: Import asset directly // ------------------------------------------------------------------ importSession.Import(); Assert.AreEqual(4, project.Assets.Count); var assetItem = project.Assets.FirstOrDefault(item => item.Asset is EntityAsset); Assert.NotNull(assetItem); EntityAnalysis.UpdateEntityReferences(((EntityAsset)assetItem.Asset).Hierarchy); var assetCollection = new AssetItemCollection(); // Remove directory from the location assetCollection.Add(assetItem); Console.WriteLine(assetCollection.ToText()); //session.Save(); // Create and mount database file system var objDatabase = new ObjectDatabase("/data/db", "index", "/local/db"); var databaseFileProvider = new DatabaseFileProvider(objDatabase); AssetManager.GetFileProvider = () => databaseFileProvider; ((EntityAsset)assetItem.Asset).Hierarchy.Entities[0].Components.RemoveWhere(x => x.Key != TransformComponent.Key); //((EntityAsset)assetItem.Asset).Data.Entities[1].Components.RemoveWhere(x => x.Key != SiliconStudio.Paradox.Engine.TransformComponent.Key); var assetManager = new AssetManager(); assetManager.Save("Entity1", ((EntityAsset)assetItem.Asset).Hierarchy); assetManager = new AssetManager(); var entity = assetManager.Load<Entity>("Entity1"); var entity2 = entity.Clone(); var entityAsset = (EntityAsset)assetItem.Asset; entityAsset.Hierarchy.Entities[0].Components.Add(TransformComponent.Key, new TransformComponent()); var entityAsset2 = (EntityAsset)AssetCloner.Clone(entityAsset); entityAsset2.Hierarchy.Entities[0].Components.Get(TransformComponent.Key).Position = new Vector3(10.0f, 0.0f, 0.0f); AssetMerge.Merge(entityAsset, entityAsset2, null, AssetMergePolicies.MergePolicyAsset2AsNewBaseOfAsset1); } }
/// <summary> /// Initializes a new instance of the <see cref="GameBase" /> class. /// </summary> protected GameBase() { // Internals Log = GlobalLogger.GetLogger(GetType().GetTypeInfo().Name); updateTime = new GameTime(); drawTime = new GameTime(); playTimer = new TimerTick(); updateTimer = new TimerTick(); totalUpdateTime = new TimeSpan(); timer = new TimerTick(); IsFixedTimeStep = false; maximumElapsedTime = TimeSpan.FromMilliseconds(500.0); TargetElapsedTime = TimeSpan.FromTicks(10000000 / 60); // target elapsed time is by default 60Hz lastUpdateCount = new int[4]; nextLastUpdateCountIndex = 0; // Calculate the updateCountAverageSlowLimit (assuming moving average is >=3 ) // Example for a moving average of 4: // updateCountAverageSlowLimit = (2 * 2 + (4 - 2)) / 4 = 1.5f const int BadUpdateCountTime = 2; // number of bad frame (a bad frame is a frame that has at least 2 updates) var maxLastCount = 2 * Math.Min(BadUpdateCountTime, lastUpdateCount.Length); updateCountAverageSlowLimit = (float)(maxLastCount + (lastUpdateCount.Length - maxLastCount)) / lastUpdateCount.Length; // Externals Services = new ServiceRegistry(); // Asset manager Asset = new AssetManager(Services); LaunchParameters = new LaunchParameters(); GameSystems = new GameSystemCollection(); // Create Platform gamePlatform = GamePlatform.Create(this); gamePlatform.Activated += gamePlatform_Activated; gamePlatform.Deactivated += gamePlatform_Deactivated; gamePlatform.Exiting += gamePlatform_Exiting; gamePlatform.WindowCreated += GamePlatformOnWindowCreated; // Setup registry Services.AddService(typeof(IGame), this); Services.AddService(typeof(IGamePlatform), gamePlatform); IsActive = true; }
private unsafe object ExportAnimation(ICommandContext commandContext, AssetManager assetManager) { // Read from model file var modelSkeleton = LoadSkeleton(commandContext, assetManager); // we get model skeleton to compare it to real skeleton we need to map to var animationClips = LoadAnimation(commandContext, assetManager); AnimationClip animationClip = null; if (animationClips.Count > 0) { animationClip = new AnimationClip(); AnimationClip rootMotionAnimationClip = null; // If root motion is explicitely enabled, or if there is no skeleton, try to find root node and apply animation directly on TransformComponent if ((AnimationRootMotion || SkeletonUrl == null) && modelSkeleton.Nodes.Length >= 1) { // No skeleton, map root node only // TODO: For now, it seems to be located on node 1 in FBX files. Need to check if always the case, and what happens with Assimp var rootNode0 = modelSkeleton.Nodes.Length >= 1 ? modelSkeleton.Nodes[0].Name : null; var rootNode1 = modelSkeleton.Nodes.Length >= 2 ? modelSkeleton.Nodes[1].Name : null; if ((rootNode0 != null && animationClips.TryGetValue(rootNode0, out rootMotionAnimationClip)) || (rootNode1 != null && animationClips.TryGetValue(rootNode1, out rootMotionAnimationClip))) { foreach (var channel in rootMotionAnimationClip.Channels) { var curve = rootMotionAnimationClip.Curves[channel.Value.CurveIndex]; // Root motion var channelName = channel.Key; if (channelName.StartsWith("Transform.")) { animationClip.AddCurve($"[TransformComponent.Key]." + channelName.Replace("Transform.", string.Empty), curve); } // Also apply Camera curves // TODO: Add some other curves? if (channelName.StartsWith("Camera.")) { animationClip.AddCurve($"[CameraComponent.Key]." + channelName.Replace("Camera.", string.Empty), curve); } } // Take max of durations if (animationClip.Duration < rootMotionAnimationClip.Duration) animationClip.Duration = rootMotionAnimationClip.Duration; } } // Load asset reference skeleton if (SkeletonUrl != null) { var skeleton = assetManager.Load<Skeleton>(SkeletonUrl); var skeletonMapping = new SkeletonMapping(skeleton, modelSkeleton); // Process missing nodes foreach (var nodeAnimationClipEntry in animationClips) { var nodeName = nodeAnimationClipEntry.Key; var nodeAnimationClip = nodeAnimationClipEntry.Value; var nodeIndex = modelSkeleton.Nodes.IndexOf(x => x.Name == nodeName); // Node doesn't exist in skeleton? skip it if (nodeIndex == -1 || skeletonMapping.SourceToSource[nodeIndex] != nodeIndex) continue; // Skip root motion node (if any) if (nodeAnimationClip == rootMotionAnimationClip) continue; // Find parent node var parentNodeIndex = modelSkeleton.Nodes[nodeIndex].ParentIndex; if (parentNodeIndex != -1 && skeletonMapping.SourceToSource[parentNodeIndex] != parentNodeIndex) { // Some nodes were removed, we need to concat the anim curves var currentNodeIndex = nodeIndex; var nodesToMerge = new List<Tuple<ModelNodeDefinition, AnimationBlender, AnimationClipEvaluator>>(); while (currentNodeIndex != -1 && currentNodeIndex != skeletonMapping.SourceToSource[parentNodeIndex]) { AnimationClip animationClipToMerge; AnimationClipEvaluator animationClipEvaluator = null; AnimationBlender animationBlender = null; if (animationClips.TryGetValue(modelSkeleton.Nodes[currentNodeIndex].Name, out animationClipToMerge)) { animationBlender = new AnimationBlender(); animationClipEvaluator = animationBlender.CreateEvaluator(animationClipToMerge); } nodesToMerge.Add(Tuple.Create(modelSkeleton.Nodes[currentNodeIndex], animationBlender, animationClipEvaluator)); currentNodeIndex = modelSkeleton.Nodes[currentNodeIndex].ParentIndex; } // Put them in proper parent to children order nodesToMerge.Reverse(); // Find all key times // TODO: We should detect discontinuities and keep them var animationKeysSet = new HashSet<CompressedTimeSpan>(); foreach (var node in nodesToMerge) { foreach (var curve in node.Item3.Clip.Curves) { foreach (CompressedTimeSpan time in curve.Keys) { animationKeysSet.Add(time); } } } // Sort key times var animationKeys = animationKeysSet.ToList(); animationKeys.Sort(); var animationOperations = new FastList<AnimationOperation>(); var combinedAnimationClip = new AnimationClip(); var translationCurve = new AnimationCurve<Vector3>(); var rotationCurve = new AnimationCurve<Quaternion>(); var scaleCurve = new AnimationCurve<Vector3>(); // Evaluate at every key frame foreach (var animationKey in animationKeys) { var matrix = Matrix.Identity; // Evaluate node foreach (var node in nodesToMerge) { // Get default position var modelNodeDefinition = node.Item1; // Compute AnimationClipResult animationClipResult = null; animationOperations.Clear(); animationOperations.Add(AnimationOperation.NewPush(node.Item3, animationKey)); node.Item2.Compute(animationOperations, ref animationClipResult); var updateMemberInfos = new List<UpdateMemberInfo>(); foreach (var channel in animationClipResult.Channels) updateMemberInfos.Add(new UpdateMemberInfo { Name = channel.PropertyName, DataOffset = channel.Offset }); // TODO: Cache this var compiledUpdate = UpdateEngine.Compile(typeof(ModelNodeDefinition), updateMemberInfos); unsafe { fixed (byte* data = animationClipResult.Data) UpdateEngine.Run(modelNodeDefinition, compiledUpdate, (IntPtr)data, null); } Matrix localMatrix; TransformComponent.CreateMatrixTRS(ref modelNodeDefinition.Transform.Position, ref modelNodeDefinition.Transform.Rotation, ref modelNodeDefinition.Transform.Scale, out localMatrix); matrix = Matrix.Multiply(localMatrix, matrix); } // Done evaluating, let's decompose matrix TransformTRS transform; matrix.Decompose(out transform.Scale, out transform.Rotation, out transform.Position); // Create a key translationCurve.KeyFrames.Add(new KeyFrameData<Vector3>(animationKey, transform.Position)); rotationCurve.KeyFrames.Add(new KeyFrameData<Quaternion>(animationKey, transform.Rotation)); scaleCurve.KeyFrames.Add(new KeyFrameData<Vector3>(animationKey, transform.Scale)); } combinedAnimationClip.AddCurve($"{nameof(ModelNodeTransformation.Transform)}.{nameof(TransformTRS.Position)}", translationCurve); combinedAnimationClip.AddCurve($"{nameof(ModelNodeTransformation.Transform)}.{nameof(TransformTRS.Rotation)}", rotationCurve); combinedAnimationClip.AddCurve($"{nameof(ModelNodeTransformation.Transform)}.{nameof(TransformTRS.Scale)}", scaleCurve); nodeAnimationClip = combinedAnimationClip; } foreach (var channel in nodeAnimationClip.Channels) { var curve = nodeAnimationClip.Curves[channel.Value.CurveIndex]; // TODO: Root motion var channelName = channel.Key; if (channelName.StartsWith("Transform.")) { animationClip.AddCurve($"[ModelComponent.Key].Skeleton.NodeTransformations[{skeletonMapping.SourceToTarget[nodeIndex]}]." + channelName, curve); } } // Take max of durations if (animationClip.Duration < nodeAnimationClip.Duration) animationClip.Duration = nodeAnimationClip.Duration; } } } if (animationClip == null) { commandContext.Logger.Info("File {0} has an empty animation.", SourcePath); } else { if (animationClip.Duration.Ticks == 0) { commandContext.Logger.Warning("File {0} has a 0 tick long animation.", SourcePath); } // Optimize and set common parameters animationClip.RepeatMode = AnimationRepeatMode; animationClip.Optimize(); } return animationClip; }
public unsafe void TestSaveAndLoadAssetManyTimes() { var assetManager = new AssetManager(); var simpleAsset = new SimpleAsset("Grandpa", null) { Dble = 0.0 }; assetManager.SaveSingle(simpleAsset); assetManager.Unload(simpleAsset); simpleAsset = null; GC.Collect(); for (double d = 0; d < 10.0; ++d) { simpleAsset = assetManager.Load<SimpleAsset>("SimpleAssets/Grandpa"); Assert.That(simpleAsset.Dble, Is.EqualTo(d)); simpleAsset.Dble += 1.0; assetManager.SaveSingle(simpleAsset); assetManager.Unload(simpleAsset); simpleAsset = null; GC.Collect(); } }
private void SaveComplexAssets(AssetManager assetManager) { var ass1 = new ComplexAsset("First"); var ass2 = new ComplexAsset("Second"); var ass3 = new ComplexAsset("Third"); ass1.Children.Add(ass2); ass1.FirstChild = ass2; ass1.Data = new MemberData { Asset = ass2, Num = 1 }; ass2.Children.Add(ass3); ass2.FirstChild = ass3; ass3.Children.Add(ass1); ass3.Children.Add(ass2); ass3.FirstChild = ass2; ass3.Data = new MemberData { Asset = ass1, Num = 2 }; assetManager.Save(ass1); }
public unsafe void TestLoadMissingAsset() { var assetManager = new AssetManager(); var asset = assetManager.Load<SimpleAsset>("inexisting/asset"); Assert.That(asset, Is.Null); Assert.That(assetManager.HasAssetWithUrl("inexisting/asset"), Is.False); SaveAssetsAndDeleteAChild(assetManager); GC.Collect(); asset = assetManager.Load<SimpleAsset>("SimpleAssets/Pa"); Assert.That(asset, !Is.Null); Assert.That(asset.Url, Is.EqualTo("SimpleAssets/Pa")); Assert.That(asset.Child, Is.Null); asset = assetManager.Load<SimpleAsset>("SimpleAssets/Son"); Assert.That(asset, Is.Null); }
public unsafe void TestComplexAssets() { var assetManager = new AssetManager(); SaveComplexAssets(assetManager); GC.Collect(); var ass1 = assetManager.Load<ComplexAsset>("ComplexAssets/First"); var ass2FromAss1 = ass1.FirstChild; var ass2 = assetManager.Load<ComplexAsset>("ComplexAssets/Second"); var ass3FromAss2 = ass2.FirstChild; var ass3 = assetManager.Load<ComplexAsset>("ComplexAssets/Third"); var ass2FromAss3 = ass3.FirstChild; Assert.That(ass1.Url, Is.EqualTo("ComplexAssets/First")); Assert.That(ass2FromAss1, Is.SameAs(ass1.FirstChild)); Assert.That(ass2FromAss1, Is.SameAs(ass2)); Assert.That(ass1.Data, !Is.Null); Assert.That(ass1.Data.Asset, Is.SameAs(ass2)); Assert.That(ass1.Data.Num, Is.EqualTo(1)); Assert.That(ass1.Children.Count, Is.EqualTo(1)); Assert.That(ass1.Children[0], Is.SameAs(ass2)); Assert.That(ass2.Url, Is.EqualTo("ComplexAssets/Second")); Assert.That(ass3FromAss2, Is.SameAs(ass2.FirstChild)); Assert.That(ass3FromAss2, Is.SameAs(ass3)); Assert.That(ass2.Data, Is.Null); Assert.That(ass2.Children.Count, Is.EqualTo(1)); Assert.That(ass2.Children[0], Is.SameAs(ass3)); Assert.That(ass3.Url, Is.EqualTo("ComplexAssets/Third")); Assert.That(ass2FromAss3, Is.SameAs(ass3.FirstChild)); Assert.That(ass2FromAss3, Is.SameAs(ass2)); Assert.That(ass3.Data, !Is.Null); Assert.That(ass3.Data.Asset, Is.SameAs(ass1)); Assert.That(ass3.Data.Num, Is.EqualTo(2)); Assert.That(ass3.Children.Count, Is.EqualTo(2)); Assert.That(ass3.Children[0], Is.SameAs(ass1)); Assert.That(ass3.Children[1], Is.SameAs(ass2)); }
private static void SaveCyclicallyReferencedAssets(AssetManager assetManager) { var simpleAsset = new SimpleAsset("First", new SimpleAsset("Second", new SimpleAsset("Third", null))); simpleAsset.Child.Child.Child = simpleAsset; assetManager.Save(simpleAsset); }
private static void SaveSimpleAssets(AssetManager assetManager) { var simpleAsset = new SimpleAsset("Grandpa", new SimpleAsset("Pa", new SimpleAsset("Son", null))); assetManager.Save(simpleAsset); }
public unsafe void TestSaveAndLoadAssetAndIndexFileManyTimes() { var assetManager = new AssetManager(); var simpleAsset = new SimpleAsset("Grandpa", null) { Dble = 0.0 }; assetManager.SaveSingle(simpleAsset); assetManager.Unload(simpleAsset); var databaseFileProvider = (DatabaseFileProvider)VirtualFileSystem.ResolveProvider("/db", true).Provider; databaseFileProvider.AssetIndexMap.WaitPendingOperations(); simpleAsset = null; GC.Collect(); for (double d = 0; d < 10.0; ++d) { var anotherAssetManager = new AssetManager(); simpleAsset = anotherAssetManager.Load<SimpleAsset>("SimpleAssets/Grandpa"); Assert.That(simpleAsset.Dble, Is.EqualTo(d)); simpleAsset.Dble += 1.0; anotherAssetManager.SaveSingle(simpleAsset); anotherAssetManager.Unload(simpleAsset); databaseFileProvider.AssetIndexMap.WaitPendingOperations(); simpleAsset = null; GC.Collect(); } }
protected override AnimationClip LoadAnimation(ICommandContext commandContext, AssetManager assetManager) { var meshConverter = this.CreateMeshConverter(commandContext); var sceneData = meshConverter.ConvertAnimation(SourcePath, Location); return sceneData; }
public static void CreateAndSaveSeparateTextures(TextureTool texTool, TexImage texImage, string originalTextureURL, bool shouldGenerateMipMaps, PixelFormat outputFormat = PixelFormat.ETC1) { var assetManager = new AssetManager(); var alphaTextureURL = GenerateAlphaTextureURL(originalTextureURL); var colorTextureURL = GenerateColorTextureURL(originalTextureURL); // create a new image containing only the alpha component texTool.Decompress(texImage, texImage.Format.IsSRgb()); using (var alphaImage = texTool.CreateImageFromAlphaComponent(texImage)) { // generate the mip-maps for the alpha component if required if (shouldGenerateMipMaps) texTool.GenerateMipMaps(alphaImage, Filter.MipMapGeneration.Box); // save the alpha component texTool.Compress(alphaImage, outputFormat); using (var outputImage = texTool.ConvertToXenkoImage(alphaImage)) assetManager.Save(alphaTextureURL, outputImage.ToSerializableVersion()); } // save the color component texTool.Decompress(texImage, texImage.Format.IsSRgb()); texTool.Compress(texImage, outputFormat); using (var outputImage = texTool.ConvertToXenkoImage(texImage)) assetManager.Save(colorTextureURL, outputImage.ToSerializableVersion()); }
public static ResultStatus ImportTextureImage(TextureTool textureTool, TexImage texImage, ImportParameters parameters, CancellationToken cancellationToken, Logger logger) { var assetManager = new AssetManager(); // Apply transformations textureTool.Decompress(texImage, parameters.IsSRgb); // Special case when the input texture is monochromatic but it is supposed to be a color and we are working in SRGB // In that case, we need to transform it to a supported SRGB format (R8G8B8A8_UNorm_SRgb) // TODO: As part of a conversion phase, this code may be moved to a dedicated method in this class at some point if (parameters.TextureHint == TextureHint.Color && parameters.IsSRgb && (texImage.Format == PixelFormat.R8_UNorm || texImage.Format == PixelFormat.A8_UNorm)) { textureTool.Convert(texImage, PixelFormat.R8G8B8A8_UNorm_SRgb); } if (cancellationToken.IsCancellationRequested) // abort the process if cancellation is demanded return ResultStatus.Cancelled; var fromSize = new Size2(texImage.Width, texImage.Height); var targetSize = parameters.DesiredSize; // Resize the image if (parameters.IsSizeInPercentage) { targetSize = new Size2((int)(fromSize.Width * targetSize.Width / 100.0f), (int)(fromSize.Height * targetSize.Height / 100.0f)); } // Find the target size targetSize = FindBestTextureSize(parameters, targetSize, logger); // Resize the image only if needed if (targetSize != fromSize) { textureTool.Resize(texImage, targetSize.Width, targetSize.Height, Filter.Rescaling.Lanczos3); } if (cancellationToken.IsCancellationRequested) // abort the process if cancellation is demanded return ResultStatus.Cancelled; // texture size is now determined, we can cache it var textureSize = new Int2(texImage.Width, texImage.Height); // determine the alpha format of the texture when set to Auto // Note: this has to be done before the ColorKey transformation in order to be able to take advantage of image file AlphaDepth information if(parameters.DesiredAlpha == AlphaFormat.Auto) { var colorKey = parameters.ColorKeyEnabled? (Color?)parameters.ColorKeyColor : null; var alphaLevel = textureTool.GetAlphaLevels(texImage, new Rectangle(0, 0, textureSize.X, textureSize.Y), colorKey, logger); parameters.DesiredAlpha = alphaLevel.ToAlphaFormat(); } // Apply the color key if (parameters.ColorKeyEnabled) textureTool.ColorKey(texImage, parameters.ColorKeyColor); if (cancellationToken.IsCancellationRequested) // abort the process if cancellation is demanded return ResultStatus.Cancelled; // Pre-multiply alpha only for relevant formats if (parameters.PremultiplyAlpha && texImage.Format.HasAlpha32Bits()) textureTool.PreMultiplyAlpha(texImage); if (cancellationToken.IsCancellationRequested) // abort the process if cancellation is demanded return ResultStatus.Cancelled; // Generate mipmaps if (parameters.GenerateMipmaps) { var boxFilteringIsSupported = !texImage.Format.IsSRgb() || (MathUtil.IsPow2(textureSize.X) && MathUtil.IsPow2(textureSize.Y)); textureTool.GenerateMipMaps(texImage, boxFilteringIsSupported? Filter.MipMapGeneration.Box: Filter.MipMapGeneration.Linear); } if (cancellationToken.IsCancellationRequested) // abort the process if cancellation is demanded return ResultStatus.Cancelled; // Convert/Compress to output format // TODO: Change alphaFormat depending on actual image content (auto-detection)? var outputFormat = DetermineOutputFormat(parameters, textureSize, texImage.Format); textureTool.Compress(texImage, outputFormat, (TextureConverter.Requests.TextureQuality)parameters.TextureQuality); if (cancellationToken.IsCancellationRequested) // abort the process if cancellation is demanded return ResultStatus.Cancelled; // Save the texture using (var outputImage = textureTool.ConvertToParadoxImage(texImage)) { if (cancellationToken.IsCancellationRequested) // abort the process if cancellation is demanded return ResultStatus.Cancelled; assetManager.Save(parameters.OutputUrl, outputImage.ToSerializableVersion()); logger.Info("Compression successful [{3}] to ({0}x{1},{2})", outputImage.Description.Width, outputImage.Description.Height, outputImage.Description.Format, parameters.OutputUrl); } return ResultStatus.Successful; }
private void SaveChangeResaveAssets(AssetManager assetManager) { var simpleAsset = new SimpleAsset("Grandpa", new SimpleAsset("Pa", new SimpleAsset("Son", null))); assetManager.Save(simpleAsset); simpleAsset.Dble = 22.0; simpleAsset.Child.Dble = 42.0; assetManager.SaveSingle(simpleAsset); }