public Save ( string destination, ImageEngineFormat format, MipHandling GenerateMips, int desiredMaxDimension, int mipToSave, bool removeAlpha = true, List |
||
destination | string | File to save to. |
format | ImageEngineFormat | Desired image format. |
GenerateMips | MipHandling | Determines how mipmaps are handled during saving. |
desiredMaxDimension | int | Maximum size for saved image. Resizes if required, but uses mipmaps if available. |
mipToSave | int | Index of mipmap to save as single image. |
removeAlpha | bool | True = Alpha removed. False = Uses threshold value and alpha values to mask RGB FOR DXT1 ONLY, otherwise removes completely. |
customMasks | List |
Custom user defined masks for colours. |
return | System.Threading.Tasks.Task |
/// <summary> /// Generates a thumbnail of image and saves it to a file. /// </summary> /// <param name="stream">Fully formatted image stream.</param> /// <param name="destination">File path to save to.</param> /// <param name="maxDimension">Maximum value for either image dimension.</param> /// <param name="mergeAlpha">DXT1 only. True = Flatten alpha into RGB.</param> /// <returns>True on success.</returns> public static bool GenerateThumbnailToFile(Stream stream, string destination, int maxDimension, bool mergeAlpha = false) { using (ImageEngineImage img = new ImageEngineImage(stream, null, maxDimension, true)) { bool success = false; using (FileStream fs = new FileStream(destination, FileMode.Create)) success = img.Save(fs, ImageEngineFormat.JPG, MipHandling.KeepTopOnly, mergeAlpha: mergeAlpha, desiredMaxDimension: maxDimension); return(success); } }
/// <summary> /// Performs a bulk conversion of a bunch of images given conversion parameters. /// </summary> /// <param name="files">List of supported files to be converted.</param> /// <param name="saveFolder">Destination folder of all textures. Can be null if <paramref name="useSourceAsDestination"/> is set.</param> /// <param name="saveMipType">Determines how to handle mipmaps for converted images.</param> /// <param name="useSourceAsDestination">True = Converted images are saved next to the originals.</param> /// <param name="removeAlpha">True = Alpha is removed from converted images.</param> /// <param name="destFormatDetails">Details about destination format.</param> /// <param name="progressReporter">Progress reporting callback.</param> /// <param name="useSourceFormat">No format conversion is performed if possible.</param> /// <returns>Errors</returns> public static async Task <ConcurrentBag <string> > BulkConvert(IEnumerable <string> files, ImageFormats.ImageEngineFormatDetails destFormatDetails, bool useSourceFormat, string saveFolder, MipHandling saveMipType = MipHandling.Default, bool useSourceAsDestination = false, bool removeAlpha = false, IProgress <int> progressReporter = null) { ConcurrentBag <string> Failures = new ConcurrentBag <string>(); // Test if can parallelise uncompressed saving // Below says: Only formats that don't support mips or do but aren't block compressed - can be parallised. Also don't parallelise if using source formats. bool supportsParallel = false; if (!useSourceFormat) { supportsParallel = !useSourceFormat && !destFormatDetails.IsMippable; supportsParallel |= !supportsParallel && !destFormatDetails.IsBlockCompressed; } if (EnableThreading && supportsParallel) { Failures = await DoBulkParallel(files, destFormatDetails, saveFolder, saveMipType, useSourceAsDestination, removeAlpha, progressReporter); } else { foreach (var file in files) { using (ImageEngineImage img = new ImageEngineImage(file)) { // Using source format can only come into this leg of the operation. var saveFormatDetails = useSourceFormat ? img.FormatDetails : destFormatDetails; string filename = useSourceFormat ? Path.GetFileName(file) : Path.GetFileNameWithoutExtension(file) + "." + destFormatDetails.Extension; // This can stay destFormatDetails instead of saveFormatDetails as it shouldn't be able to get here if destFormatDetails not set. string path = Path.Combine(useSourceAsDestination ? Path.GetDirectoryName(file) : saveFolder, filename); path = UsefulThings.General.FindValidNewFileName(path); try { await img.Save(path, saveFormatDetails, saveMipType, removeAlpha : removeAlpha); } catch (Exception e) { Failures.Add(path + " Reason: " + e.ToString()); } } progressReporter?.Report(1); // Value not relevent. } } return(Failures); }
Task DoBulkParallel() { BufferBlock<string> fileNameStore = new BufferBlock<string>(); int maxParallelism = ImageEngine.NumThreads == 1 ? 1 : (ImageEngine.NumThreads == -1 ? Environment.ProcessorCount : ImageEngine.NumThreads); // Define block to perform each conversion var encoder = new TransformBlock<string, Tuple<byte[], string>>(file => { byte[] data = null; string filename = Path.GetFileNameWithoutExtension(file) + "." + ImageFormats.GetExtensionOfFormat(SaveFormat); string path = Path.Combine(BulkUseSourceDestination ? Path.GetDirectoryName(file) : BulkSaveFolder, filename); path = UsefulThings.General.FindValidNewFileName(path); using (ImageEngineImage img = new ImageEngineImage(file)) { try { data = img.Save(SaveFormat, SaveMipType, removeAlpha: GeneralRemovingAlpha, customMasks: customMasks); } catch (Exception e) { BulkConvertFailed.Add(path + " Reason: " + e.ToString()); } } BulkProgressValue++; BulkStatus = $"Converting {BulkProgressValue}/{BulkProgressMax} images."; return new Tuple<byte[], string>(data, path); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = maxParallelism, BoundedCapacity = maxParallelism }); // Define block to write converted data to disk var diskWriter = new ActionBlock<Tuple<byte[], string>>(tuple => { File.WriteAllBytes(tuple.Item2, tuple.Item1); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2, BoundedCapacity = maxParallelism }); // Limit to 2 disk write operations at a time, but allow many to be stored in it's buffer. // Link blocks together fileNameStore.LinkTo(encoder, new DataflowLinkOptions { PropagateCompletion = true }); encoder.LinkTo(diskWriter, new DataflowLinkOptions { PropagateCompletion = true }); // Begin production new Action(async () => { foreach (var file in BulkConvertFiles) await fileNameStore.SendAsync(file); fileNameStore.Complete(); }).Invoke(); return diskWriter.Completion; }
public async Task DoBulkConvert() { BulkProgressMax = BulkConvertFiles.Count; BulkProgressValue = 0; BulkStatus = $"Converting {BulkProgressValue}/{BulkProgressMax} images."; BulkConvertFinished = false; BulkConvertRunning = true; await Task.Run(async () => { // Test if can parallelise uncompressed saving // Below says: Only formats that don't support mips or do but aren't block compressed - can be parallised. bool supportsParallel = !ImageFormats.IsFormatMippable(SaveFormat); supportsParallel |= !supportsParallel && !ImageFormats.IsBlockCompressed(SaveFormat); if (supportsParallel) await DoBulkParallel(); else foreach (var file in BulkConvertFiles) { using (ImageEngineImage img = new ImageEngineImage(file)) { string filename = Path.GetFileNameWithoutExtension(file) + "." + ImageFormats.GetExtensionOfFormat(SaveFormat); string path = Path.Combine(BulkUseSourceDestination ? Path.GetDirectoryName(file) : BulkSaveFolder, filename); path = UsefulThings.General.FindValidNewFileName(path); try { await img.Save(path, SaveFormat, SaveMipType, removeAlpha: GeneralRemovingAlpha, customMasks: customMasks); } catch (Exception e) { BulkConvertFailed.Add(path + " Reason: " + e.ToString()); } } BulkProgressValue++; BulkStatus = $"Converting {BulkProgressValue}/{BulkProgressMax} images."; } }); BulkStatus = "Conversion complete! "; if (BulkConvertFailed.Count > 0) BulkStatus += $"{BulkConvertFailed.Count} failed to convert."; BulkProgressValue = BulkProgressMax; BulkConvertFinished = true; BulkConvertRunning = false; }
static async Task <ConcurrentBag <string> > DoBulkParallel(IEnumerable <string> files, ImageFormats.ImageEngineFormatDetails destFormatDetails, string saveFolder, MipHandling saveMipType = MipHandling.Default, bool useSourceAsDestination = false, bool removeAlpha = false, IProgress <int> progressReporter = null) { ConcurrentBag <string> failures = new ConcurrentBag <string>(); BufferBlock <string> fileNameStore = new BufferBlock <string>(); int maxParallelism = ImageEngine.NumThreads == 1 ? 1 : (ImageEngine.NumThreads == -1 ? Environment.ProcessorCount : ImageEngine.NumThreads); // Define block to perform each conversion var encoder = new TransformBlock <string, Tuple <byte[], string> >(file => { byte[] data = null; string filename = Path.GetFileNameWithoutExtension(file) + "." + destFormatDetails.Extension; string path = Path.Combine(useSourceAsDestination ? Path.GetDirectoryName(file) : saveFolder, filename); path = UsefulThings.General.FindValidNewFileName(path); using (ImageEngineImage img = new ImageEngineImage(file)) { try { data = img.Save(destFormatDetails, saveMipType, removeAlpha: removeAlpha); } catch (Exception e) { failures.Add(path + " Reason: " + e.ToString()); } } progressReporter.Report(1); // Value not relevent. return(new Tuple <byte[], string>(data, path)); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = maxParallelism, BoundedCapacity = maxParallelism }); // Define block to write converted data to disk var diskWriter = new ActionBlock <Tuple <byte[], string> >(tuple => { string path = UsefulThings.General.FindValidNewFileName(tuple.Item2); try { File.WriteAllBytes(path, tuple.Item1); } catch (Exception e) { failures.Add(path + " Reason: " + e.ToString()); } }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2, BoundedCapacity = maxParallelism }); // Limit to 2 disk write operations at a time, but allow many to be stored in it's buffer. // Link blocks together fileNameStore.LinkTo(encoder, new DataflowLinkOptions { PropagateCompletion = true }); encoder.LinkTo(diskWriter, new DataflowLinkOptions { PropagateCompletion = true }); // Begin production new Action(async() => { foreach (var file in files) { await fileNameStore.SendAsync(file); } fileNameStore.Complete(); }).Invoke(); await diskWriter.Completion; return(failures); }