/// <summary> /// Generates a thumbnail of image and saves it to a file. /// </summary> /// <param name="stream">Fully formatted image stream.</param> /// <param name="destination">File path to save to.</param> /// <param name="maxDimension">Maximum value for either image dimension.</param> /// <param name="mergeAlpha">DXT1 only. True = Flatten alpha into RGB.</param> /// <returns>True on success.</returns> public static bool GenerateThumbnailToFile(Stream stream, string destination, int maxDimension, bool mergeAlpha = false) { using (ImageEngineImage img = new ImageEngineImage(stream, null, maxDimension, true)) { bool success = false; using (FileStream fs = new FileStream(destination, FileMode.Create)) success = img.Save(fs, ImageEngineFormat.JPG, MipHandling.KeepTopOnly, mergeAlpha: mergeAlpha, desiredMaxDimension: maxDimension); return(success); } }
/// <summary> /// Creates a channel from an image. Can merge together with other channels to form a proper image again. /// MUST BE GRAYSCALE (PixelFormats.Gray8). /// </summary> /// <param name="mainPath">Path to channel.</param> public MergeChannelsImage(string mainPath) { FilePath = mainPath; var img = new ImageEngineImage(mainPath); { Width = img.Width; Height = img.Height; Thumbnail = img.GetWPFBitmap(128); //Pixels = img.MipMaps[0].Pixels; } }
/// <summary> /// Performs a bulk conversion of a bunch of images given conversion parameters. /// </summary> /// <param name="files">List of supported files to be converted.</param> /// <param name="saveFolder">Destination folder of all textures. Can be null if <paramref name="useSourceAsDestination"/> is set.</param> /// <param name="saveMipType">Determines how to handle mipmaps for converted images.</param> /// <param name="useSourceAsDestination">True = Converted images are saved next to the originals.</param> /// <param name="removeAlpha">True = Alpha is removed from converted images.</param> /// <param name="destFormatDetails">Details about destination format.</param> /// <param name="progressReporter">Progress reporting callback.</param> /// <param name="useSourceFormat">No format conversion is performed if possible.</param> /// <returns>Errors</returns> public static async Task <ConcurrentBag <string> > BulkConvert(IEnumerable <string> files, ImageFormats.ImageEngineFormatDetails destFormatDetails, bool useSourceFormat, string saveFolder, MipHandling saveMipType = MipHandling.Default, bool useSourceAsDestination = false, bool removeAlpha = false, IProgress <int> progressReporter = null) { ConcurrentBag <string> Failures = new ConcurrentBag <string>(); // Test if can parallelise uncompressed saving // Below says: Only formats that don't support mips or do but aren't block compressed - can be parallised. Also don't parallelise if using source formats. bool supportsParallel = false; if (!useSourceFormat) { supportsParallel = !useSourceFormat && !destFormatDetails.IsMippable; supportsParallel |= !supportsParallel && !destFormatDetails.IsBlockCompressed; } if (EnableThreading && supportsParallel) { Failures = await DoBulkParallel(files, destFormatDetails, saveFolder, saveMipType, useSourceAsDestination, removeAlpha, progressReporter); } else { foreach (var file in files) { using (ImageEngineImage img = new ImageEngineImage(file)) { // Using source format can only come into this leg of the operation. var saveFormatDetails = useSourceFormat ? img.FormatDetails : destFormatDetails; string filename = useSourceFormat ? Path.GetFileName(file) : Path.GetFileNameWithoutExtension(file) + "." + destFormatDetails.Extension; // This can stay destFormatDetails instead of saveFormatDetails as it shouldn't be able to get here if destFormatDetails not set. string path = Path.Combine(useSourceAsDestination ? Path.GetDirectoryName(file) : saveFolder, filename); path = UsefulThings.General.FindValidNewFileName(path); try { await img.Save(path, saveFormatDetails, saveMipType, removeAlpha : removeAlpha); } catch (Exception e) { Failures.Add(path + " Reason: " + e.ToString()); } } progressReporter?.Report(1); // Value not relevent. } } return(Failures); }
internal async void GenerateSavePreview() { if (img == null || SaveFormat == ImageEngineFormat.Unknown) { return; } // KFreon: TGA saving not supported if (img.Format.SurfaceFormat == ImageEngineFormat.TGA) { SaveFormat = ImageEngineFormat.PNG; } stopwatch.Start(); savePreviews = await Task.Run(() => { using (MemoryStream stream = new MemoryStream()) { Stopwatch watch = new Stopwatch(); watch.Start(); img.Save(stream, SaveFormat, MipHandling.KeepTopOnly, 1024, mergeAlpha: (SaveFormat == ImageEngineFormat.DDS_DXT1 ? FlattenBlend : false)); // KFreon: Smaller size for quicker loading watch.Stop(); Debug.WriteLine($"Preview Save took {watch.ElapsedMilliseconds}ms"); using (ImageEngineImage previewimage = new ImageEngineImage(stream)) { BitmapSource[] tempImgs = new BitmapSource[2]; tempImgs[0] = previewimage.GeneratePreview(0, true); tempImgs[1] = previewimage.GeneratePreview(0, false); return(tempImgs); } } }); stopwatch.Stop(); Debug.WriteLine($"Preview generation took {stopwatch.ElapsedMilliseconds}ms"); stopwatch.Reset(); OnPropertyChanged(nameof(SavePreview)); }
public async Task UpdateSavePreview(bool needRegenerate = true) { timer.Restart(); if (needRegenerate) await Task.Run(() => { // Save and reload to give accurate depiction of what it'll look like when saved. byte[] data = LoadedImage.Save(SaveFormat, MipHandling.KeepTopOnly, removeAlpha: GeneralRemovingAlpha, customMasks: customMasks); SaveCompressedSize = data.Length; savePreviewIMG = new ImageEngineImage(data); }); byte[] pixels = GetPixels(savePreviewIMG.MipMaps[0]); UpdatePreview(ref savePreview, savePreviewIMG.Width, savePreviewIMG.Height, pixels); // Update Properties OnPropertyChanged(nameof(SavePreview)); timer.Stop(); Trace.WriteLine($"Save preview of {SaveFormat} ({Width}x{Height}, No Mips) = {timer.ElapsedMilliseconds}ms."); }
Task DoBulkParallel() { BufferBlock<string> fileNameStore = new BufferBlock<string>(); int maxParallelism = ImageEngine.NumThreads == 1 ? 1 : (ImageEngine.NumThreads == -1 ? Environment.ProcessorCount : ImageEngine.NumThreads); // Define block to perform each conversion var encoder = new TransformBlock<string, Tuple<byte[], string>>(file => { byte[] data = null; string filename = Path.GetFileNameWithoutExtension(file) + "." + ImageFormats.GetExtensionOfFormat(SaveFormat); string path = Path.Combine(BulkUseSourceDestination ? Path.GetDirectoryName(file) : BulkSaveFolder, filename); path = UsefulThings.General.FindValidNewFileName(path); using (ImageEngineImage img = new ImageEngineImage(file)) { try { data = img.Save(SaveFormat, SaveMipType, removeAlpha: GeneralRemovingAlpha, customMasks: customMasks); } catch (Exception e) { BulkConvertFailed.Add(path + " Reason: " + e.ToString()); } } BulkProgressValue++; BulkStatus = $"Converting {BulkProgressValue}/{BulkProgressMax} images."; return new Tuple<byte[], string>(data, path); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = maxParallelism, BoundedCapacity = maxParallelism }); // Define block to write converted data to disk var diskWriter = new ActionBlock<Tuple<byte[], string>>(tuple => { File.WriteAllBytes(tuple.Item2, tuple.Item1); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2, BoundedCapacity = maxParallelism }); // Limit to 2 disk write operations at a time, but allow many to be stored in it's buffer. // Link blocks together fileNameStore.LinkTo(encoder, new DataflowLinkOptions { PropagateCompletion = true }); encoder.LinkTo(diskWriter, new DataflowLinkOptions { PropagateCompletion = true }); // Begin production new Action(async () => { foreach (var file in BulkConvertFiles) await fileNameStore.SendAsync(file); fileNameStore.Complete(); }).Invoke(); return diskWriter.Completion; }
public async Task DoBulkConvert() { BulkProgressMax = BulkConvertFiles.Count; BulkProgressValue = 0; BulkStatus = $"Converting {BulkProgressValue}/{BulkProgressMax} images."; BulkConvertFinished = false; BulkConvertRunning = true; await Task.Run(async () => { // Test if can parallelise uncompressed saving // Below says: Only formats that don't support mips or do but aren't block compressed - can be parallised. bool supportsParallel = !ImageFormats.IsFormatMippable(SaveFormat); supportsParallel |= !supportsParallel && !ImageFormats.IsBlockCompressed(SaveFormat); if (supportsParallel) await DoBulkParallel(); else foreach (var file in BulkConvertFiles) { using (ImageEngineImage img = new ImageEngineImage(file)) { string filename = Path.GetFileNameWithoutExtension(file) + "." + ImageFormats.GetExtensionOfFormat(SaveFormat); string path = Path.Combine(BulkUseSourceDestination ? Path.GetDirectoryName(file) : BulkSaveFolder, filename); path = UsefulThings.General.FindValidNewFileName(path); try { await img.Save(path, SaveFormat, SaveMipType, removeAlpha: GeneralRemovingAlpha, customMasks: customMasks); } catch (Exception e) { BulkConvertFailed.Add(path + " Reason: " + e.ToString()); } } BulkProgressValue++; BulkStatus = $"Converting {BulkProgressValue}/{BulkProgressMax} images."; } }); BulkStatus = "Conversion complete! "; if (BulkConvertFailed.Count > 0) BulkStatus += $"{BulkConvertFailed.Count} failed to convert."; BulkProgressValue = BulkProgressMax; BulkConvertFinished = true; BulkConvertRunning = false; }
static async Task <ConcurrentBag <string> > DoBulkParallel(IEnumerable <string> files, ImageFormats.ImageEngineFormatDetails destFormatDetails, string saveFolder, MipHandling saveMipType = MipHandling.Default, bool useSourceAsDestination = false, bool removeAlpha = false, IProgress <int> progressReporter = null) { ConcurrentBag <string> failures = new ConcurrentBag <string>(); BufferBlock <string> fileNameStore = new BufferBlock <string>(); int maxParallelism = ImageEngine.NumThreads == 1 ? 1 : (ImageEngine.NumThreads == -1 ? Environment.ProcessorCount : ImageEngine.NumThreads); // Define block to perform each conversion var encoder = new TransformBlock <string, Tuple <byte[], string> >(file => { byte[] data = null; string filename = Path.GetFileNameWithoutExtension(file) + "." + destFormatDetails.Extension; string path = Path.Combine(useSourceAsDestination ? Path.GetDirectoryName(file) : saveFolder, filename); path = UsefulThings.General.FindValidNewFileName(path); using (ImageEngineImage img = new ImageEngineImage(file)) { try { data = img.Save(destFormatDetails, saveMipType, removeAlpha: removeAlpha); } catch (Exception e) { failures.Add(path + " Reason: " + e.ToString()); } } progressReporter.Report(1); // Value not relevent. return(new Tuple <byte[], string>(data, path)); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = maxParallelism, BoundedCapacity = maxParallelism }); // Define block to write converted data to disk var diskWriter = new ActionBlock <Tuple <byte[], string> >(tuple => { string path = UsefulThings.General.FindValidNewFileName(tuple.Item2); try { File.WriteAllBytes(path, tuple.Item1); } catch (Exception e) { failures.Add(path + " Reason: " + e.ToString()); } }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2, BoundedCapacity = maxParallelism }); // Limit to 2 disk write operations at a time, but allow many to be stored in it's buffer. // Link blocks together fileNameStore.LinkTo(encoder, new DataflowLinkOptions { PropagateCompletion = true }); encoder.LinkTo(diskWriter, new DataflowLinkOptions { PropagateCompletion = true }); // Begin production new Action(async() => { foreach (var file in files) { await fileNameStore.SendAsync(file); } fileNameStore.Complete(); }).Invoke(); await diskWriter.Completion; return(failures); }
public async Task LoadImage(string path) { bool testing = false; // Set to true to load mips single threaded and only the full image instead of a smaller one first. Task <List <object> > fullLoadingTask = null; if (!testing) { // Load full size image //////////////////////////////////////////////////////////////////////////////////////// fullLoadingTask = Task.Run(() => { ImageEngineImage fullimage = new ImageEngineImage(path); List <BitmapSource> alphas = new List <BitmapSource>(); List <BitmapSource> nonalphas = new List <BitmapSource>(); for (int i = 0; i < fullimage.NumMipMaps; i++) { alphas.Add(fullimage.GeneratePreview(i, true)); nonalphas.Add(fullimage.GeneratePreview(i, false)); } List <object> bits = new List <object>(); bits.Add(fullimage); bits.Add(alphas); bits.Add(nonalphas); return(bits); }); //////////////////////////////////////////////////////////////////////////////////////// } SaveSuccess = null; Previews.Clear(); savePreviews = new BitmapSource[2]; SavePath = null; SaveFormat = ImageEngineFormat.Unknown; stopwatch.Start(); //////////////////////////////////////////////////////////////////////////////////////// if (testing) { img = await Task.Run(() => new ImageEngineImage(path)); } else { img = await Task.Run(() => new ImageEngineImage(path, 256, false)); } //////////////////////////////////////////////////////////////////////////////////////// Console.WriteLine(""); Console.WriteLine($"Format: {img.Format}"); stopwatch.Stop(); Console.WriteLine($"Image Loading: {stopwatch.ElapsedMilliseconds}"); stopwatch.Restart(); Previews.Add(img.GeneratePreview(0, ShowAlphaPreviews)); MipIndex = 1; // 1 based stopwatch.Stop(); Debug.WriteLine($"Image Preview: {stopwatch.ElapsedMilliseconds}"); stopwatch.Reset(); OnPropertyChanged(nameof(ImagePath)); OnPropertyChanged(nameof(Format)); OnPropertyChanged(nameof(NumMipMaps)); OnPropertyChanged(nameof(Preview)); OnPropertyChanged(nameof(MipWidth)); OnPropertyChanged(nameof(MipHeight)); // KFreon: Get full image details //////////////////////////////////////////////////////////////////////////////////////// if (!testing) { List <object> FullImageObjects = await fullLoadingTask; double? oldMipWidth = MipWidth; img = (ImageEngineImage)FullImageObjects[0]; AlphaPreviews = (List <BitmapSource>)FullImageObjects[1]; NonAlphaPreviews = (List <BitmapSource>)FullImageObjects[2]; UpdatePreviews(); // KFreon: Set selected mip index /*for (int i = 0; i < Previews.Count; i++) * { * if (Previews[i].Width == oldMipWidth) * { * MipIndex = i + 1; // 1 based * break; * } * }*/ MipIndex = 1; } //////////////////////////////////////////////////////////////////////////////////////// OnPropertyChanged(nameof(NumMipMaps)); OnPropertyChanged(nameof(Preview)); OnPropertyChanged(nameof(MipIndex)); OnPropertyChanged(nameof(MipWidth)); OnPropertyChanged(nameof(MipHeight)); OnPropertyChanged(nameof(img)); }
internal async Task GetDetails(byte[] imgData) { if (IsDef) return; try { // Hash data for duplicate checking purposes var hashGetter = Task.Run(() => CRC32.BlockChecksum(imgData)); // Put it off thread // Get image details and build thumbnail. DDSGeneral.DDS_HEADER header = null; using (MemoryStream ms = new MemoryStream(imgData)) { Format = ImageFormats.ParseFormat(ms, null, ref header).SurfaceFormat; ImageEngineImage image = null; if (header != null) { Width = header.dwWidth; Height = header.dwWidth; Mips = header.dwMipMapCount; image = new ImageEngineImage(ms, null, 64, true); // Often the header of DDS' are not set properly resulting in Mips = 0 - NOTE can't just read image.Mips as the image has been shrunk and doesn't have the same properties as originally if (Mips == 0) { int tempMips = 0; DDSGeneral.EnsureMipInImage(ms.Length, Width, Height, 4, new CSharpImageLibrary.Format(Format), out tempMips); Mips = tempMips; } } else { image = new ImageEngineImage(ms); Width = image.Width; Height = image.Height; Mips = image.NumMipMaps; } // Thumbnail Thumb.StreamThumb = new MemoryStream(); image.Save(Thumb.StreamThumb, ImageEngineFormat.JPG, MipHandling.Default, 64); image.Dispose(); } FileHash = await hashGetter; } catch (Exception e) { DebugOutput.PrintLn($"Failed to get image information for: {Name}. Reason: {e.ToString()}."); } }