protected void UpdateProgressPercentage(long currentOperation, ProgressChange progressChange) { // Increment the current operation count int progressPercentage = (int)(((double)currentOperation / (double)this.TotalOperations) * 100); progressChange(progressPercentage); }
/// <summary> /// Internal implementation of the <see cref="WriteAsync(string, string, MetadataCollection, CancellationToken)"/> method that writes into a <paramref name="writer"/> /// </summary> /// <param name="writer">The stream to write to</param> /// <param name="metadata">The metadata to be written</param> /// <param name="token">The cancellation token</param> /// <param name="textureAtlasPath">Path of the texture atlas corresponding to this metadata, LibGDX format requires it to be specified (needed for "page section" within the metadata file)</param> /// <remarks>When the cancellation is requested, the already written data is left in the stream</remarks> /// <returns>True on success, false on failure</returns> private Task <bool> Write(string textureAtlasPath, StreamWriter writer, MetadataCollection metadata, CancellationToken token = default) { return(Task.Run(() => { Progress = 0; int i = 0; //Page section starts writer.WriteLine(Path.GetFileName(textureAtlasPath)); writer.WriteLine("size: {0}, {1}", metadata.Width, metadata.Height); writer.WriteLine("format: {0}", metadata.Format); writer.WriteLine("filter: Linear,Linear"); writer.WriteLine("repeat: none"); foreach (var imgMetadata in metadata) { if (token.IsCancellationRequested) { Progress = 0; return false; } writer.WriteLine(Path.GetFileNameWithoutExtension(imgMetadata["path"].ToString())); writer.WriteLine(" rotate: {0}", imgMetadata["rotate"]); writer.WriteLine(" xy: {0}, {1}", imgMetadata["finalX"].ToString(), imgMetadata["finalY"].ToString()); writer.WriteLine(" size: {0}, {1}", imgMetadata["finalWidth"].ToString(), imgMetadata["finalHeight"].ToString()); //writer.WriteLine(" split: {0}, {1}, {2}, {3}", , , , ); //not mandatory, but TODO ? //writer.WriteLine(" pad: {0}, {1}, {2}, {3}", , , , ); writer.WriteLine(" orig: {0}, {1}", imgMetadata["origWidth"], imgMetadata["origHeight"]); writer.WriteLine(" offset: {0}, {1}", imgMetadata["offsetX"], (int)imgMetadata["origHeight"] - ((int)imgMetadata["offsetY"] + (int)imgMetadata["height"])); //LibGDX wants offset from bottom-left instead of a top-left corner writer.WriteLine(" index: -1"); //Would be useful for animations Progress = (int)(i++ / metadata.Count * 100.0); ProgressChange?.Invoke(this, Progress); } return true; })); }
/// <summary> /// Internal implementation of the <see cref="WriteAsync(string, string, MetadataCollection, CancellationToken)"/> method that writes into a <paramref name="writer"/> /// </summary> /// <param name="writer">The XmlWriter to write to</param> /// <param name="metadata">The metadata to be written</param> /// <param name="token">The cancellation token</param> /// <param name="textureAtlasPath">Path of the texture atlas corresponding to this metadata, Unity format requires it to be specified</param> /// <remarks>When the cancellation is requested, the already written data is left in the stream</remarks> /// <returns>True on success, false on failure</returns> private Task <bool> Write(string textureAtlasPath, XmlWriter writer, MetadataCollection metadata, CancellationToken token = default) { //Does it make sense to use *Async ??? return(Task.Run(() => { Progress = 0; int i = 0; writer.WriteStartDocument(); writer.WriteRaw("\n"); writer.WriteStartElement("TextureAtlas"); writer.WriteAttributeString("imagePath", textureAtlasPath); writer.WriteRaw("\n"); //add new lines for beater readability .. foreach (var imgMetadata in metadata) { if (token.IsCancellationRequested) { Progress = 0; return false; } writer.WriteStartElement("SubTexture"); writer.WriteAttributeString("name", imgMetadata["name"].ToString()); writer.WriteAttributeString("x", imgMetadata["finalX"].ToString()); writer.WriteAttributeString("y", imgMetadata["finalY"].ToString()); writer.WriteAttributeString("width", imgMetadata["finalWidth"].ToString()); writer.WriteAttributeString("height", imgMetadata["finalHeight"].ToString()); writer.WriteEndElement(); writer.WriteRaw("\n"); Progress = (int)(i++ / metadata.Count * 100.0); ProgressChange?.Invoke(this, Progress); } writer.WriteEndElement(); return true; })); }
public Detect() { ProgressChanged = new ProgressChange(OnProgressChanged); SuccessChanged = new ProgressChange(OnSuccessChanged); UpdateBase = new FmdcEventHandler(OnUpdateBase); upnp = new UPnP(this); upnp.ProtocolUPnP.Update += new FmdcEventHandler(OnUPnPUpdate); Port = 31173; }
public Detect() { ProgressChanged = new ProgressChange(OnProgressChanged); SuccessChanged = new ProgressChange(OnSuccessChanged); UpdateBase = new FmdcEventHandler(OnUpdateBase); // We are only interested to find the IGD device. upnp = new UPnP(this, "urn:schemas-upnp-org:service:WANIPConnection:1"); upnp.ProtocolUPnP.Update += new FmdcEventHandler(OnUPnPUpdate); Port = 31173; }
/// <summary> /// Permette di esportare tutte le immagini della collezione in una cartella /// </summary> /// <param name="PathOut"></param> /// <param name="OriginalName"></param> /// <param name="FormatoOutput"></param> public void Export(String PathOut, String OriginalName, FormatoOut FormatoOutput = FormatoOut.jpg) { StateChange?.Invoke(this, State.InWorking); if (!Directory.Exists(PathOut)) { Directory.CreateDirectory(PathOut); } if (ic.IsEmpty) { StateChange?.Invoke(this, State.Finish); return; } ImageFormat format; if (FormatoOutput == FormatoOut.jpg) { format = ImageFormat.Jpeg; } else if (FormatoOutput == FormatoOut.png) { format = ImageFormat.Png; } else if (FormatoOutput == FormatoOut.bmp) { format = ImageFormat.Bmp; } else { format = ImageFormat.Jpeg; } String Ext = FormatoOutput.ToString(); int current = 0; int tot = ic.Collection.Count; foreach (Image i in ic.Collection) { ProgressChange?.Invoke(this, current++, tot); ExportSingle(i, format, Path.Combine(PathOut, OriginalName + "_" + current + "." + Ext)); } StateChange?.Invoke(this, State.Finish); }
public Downloader(string url, string filename, string messageUrl, ProgressChange progressCallback, ConnectionFailed failedCallback, OperationComplete completeCallback, MessageDownloaded msgDone) { _Url = url; _Filename = filename; _MsgUrl = messageUrl; _ProgressCallback = progressCallback; _FailedCallback = failedCallback; _CompleteCallback = completeCallback; _MessageDownloaded = msgDone; }
public CopyFromArguments(ProgressChange progressChangeCallback, TimeSpan progressChangeCallbackInterval, long totalLength = -1) { if (progressChangeCallbackInterval.TotalSeconds < 0) { throw new ArgumentOutOfRangeException(nameof(progressChangeCallbackInterval), "ProgressChangeCallbackInterval has to be greater or equal to 0"); } ProgressChangeCallback = progressChangeCallback; ProgressChangeCallbackInterval = progressChangeCallbackInterval; TotalLength = totalLength; }
public void MakeProgress(float deltaTime) { float currentProgress = Mathf.Min(Duration, CurrentProgress + deltaTime * AssignedDesk.Skill); m_UI.SetProgressBar(currentProgress / Duration); CurrentProgress = currentProgress; ProgressChange?.Invoke(CurrentProgress); if (CurrentProgress >= Duration) { IsCompleted = true; } }
/// <inheritdoc /> public PackingResult PlaceRects(int width, int height, IEnumerable <PPRect> rects, CancellationToken token = default) { Progress = 0; if (rects == null) { throw new ArgumentNullException($"The {nameof(rects)} cannot be null"); } var sortedRects = imageSorter.SortImages(rects); freeRectanglesList.Clear(); freeRectanglesList.Add(new PPRect(0, 0, width, height)); List <PPRect> packing = new List <PPRect>(sortedRects.Count()); int inputSize = packing.Count; int placedRects = 0; foreach (var rectToPlace in sortedRects) { if (token.IsCancellationRequested) { Progress = 0; return(null); } PPRect?freeRectToUse = freeRectExtractor.ExtractFreeRectangle(freeRectanglesList, rectToPlace); //parametrizable part if (freeRectToUse == null) //unable to pack { Progress = 100; return(null); } var rectRotated = DecideOrientationOfRect(rectToPlace); var freeRects = freeRectangleSplitter.SplitFreeRectangle(freeRectToUse.Value, rectToPlace); //parametrizable part freeRectangleMerger.MergeFreeRectangles(freeRectanglesList, freeRects); //parametrizable part var rectToPlaceOriented = rectOrientationSelector.DetermineAndApplyRectOrientation(rectToPlace); //parametrizable part packing.Add(new PPRect(freeRectToUse.Value.Left, freeRectToUse.Value.Top, freeRectToUse.Value.Left + rectToPlace.Width, freeRectToUse.Value.Top + rectToPlace.Height, rectToPlaceOriented.Image)); freeRectanglePostProcessor?.PostProcess(freeRectanglesList, packing.Last()); Progress = (int)((++placedRects / (double)inputSize) * 100.0); ProgressChange?.Invoke(this, Progress); } return(new PackingResult(width, height, packing)); }
/// <summary> /// Registers progress of individual inner jobs. Is thread safe. /// </summary> /// <param name="progress">New progress to be registered.</param> /// <param name="cacheIndex">Id of inner job.</param> private void RegisterProgress(float progress, int cacheIndex) { var dif = (progress - progressCache[cacheIndex]) / progressCache.Length; progressCache[cacheIndex] = progress; int counter = 10; float totalProgressLocal = totalProgress; float newTotal = totalProgressLocal + dif; while (Interlocked.CompareExchange(ref totalProgress, newTotal, totalProgressLocal) != totalProgressLocal && counter > 0) { totalProgressLocal = totalProgress; newTotal = totalProgressLocal + dif; counter--; } ProgressChange?.Invoke(totalProgress); }
public override void TransferData(BackupDataSource destination, ProgressChange progressChange) { // Get the backup file from blob CloudBlockBlob zipBlob = this.container.GetBlockBlobReference(this.filename); BlobStream zipStream = zipBlob.OpenRead(); // Create a temp file for xml file CloudBlockBlob xmlBlob = container.GetBlockBlobReference(this.xmlFilename); BlobStream xmlStream = xmlBlob.OpenWrite(); // Unzip the backup file this.UnzipFile(zipStream, xmlStream); // Set delegate to class variable this.blockTransfer = destination.WriteData; this.progressChange = progressChange; // Open a stream to the xml file using (this.inStream = new StreamReader(xmlBlob.OpenRead())) { // The total number of backup operations is determined by the strem size this.TotalOperations = (int)this.inStream.BaseStream.Length; // Initialise detination writer destination.InitialiseWriting(this.GetTables(this.inStream)); // Split stream into batches & trigger delegate this.BatchStream(this.BlockTranferWithProgressUpdate, this.inStream.BaseStream); if (this.Cancelled) { // Exit the function if the user has cancelled return; } // Finalise detination writer destination.FinaliseWriting(); } // Delete the xml file xmlBlob.Delete(); }
public ImageCollection Split(String Path) { StateChange?.Invoke(this, State.InWorking); Image gifImg = Image.FromFile(Path); FrameDimension dimension = new FrameDimension(gifImg.FrameDimensionsList[0]); int frameCount = gifImg.GetFrameCount(dimension); ImageCollection ic = new ImageCollection(); for (int i = 0; i < frameCount; i++) { ProgressChange?.Invoke(this, i, frameCount); gifImg.SelectActiveFrame(dimension, i); ic.Collection.Add(gifImg.CloneFast()); } StateChange?.Invoke(this, State.Finish); gifImg.Dispose(); return(ic); }
public void SplitOneByOne(String Path) { StateChange?.Invoke(this, State.InWorking); Image gifImg = Image.FromFile(Path); FrameDimension dimension = new FrameDimension(gifImg.FrameDimensionsList[0]); int frameCount = gifImg.GetFrameCount(dimension); for (int i = 0; i < frameCount; i++) { ProgressChange?.Invoke(this, i, frameCount); gifImg.SelectActiveFrame(dimension, i); Image tmp = gifImg.CloneFast(); ElementFinish?.Invoke(tmp); tmp.Dispose(); } StateChange?.Invoke(this, State.Finish); gifImg.Dispose(); }
public override void TransferData(BackupDataSource destination, ProgressChange progressChange) { // Get all tables var tables = this.cloudTableClient.ListTables().ToArray(); // The total number of backup operations is determined by the table count this.TotalOperations = tables.Length; string nextPartitionKey = null; string nextRowKey = null; int currentTable = 1; // Initialise detination writer destination.InitialiseWriting(tables); // Iterate through each table foreach (string table in tables) { if (this.Cancelled) { // Exit the function if the user has cancelled return; } // Retrieve all entities for this table in blocks do { // Trigger delegate, passing in stream retrieved from table storage this.BatchStream(destination.WriteData, GetTableData(table, ref nextPartitionKey, ref nextRowKey)); } while (nextPartitionKey != null || nextRowKey != null); this.UpdateProgressPercentage(currentTable, progressChange); currentTable++; } // Finalise detination writer destination.FinaliseWriting(); }
public override void TransferData(BackupDataSource destination, ProgressChange progressChange) { // Get all tables var tables = this.cloudTableClient.ListTables().ToArray(); // The total number of backup operations is determined by the table count this.TotalOperations = tables.Length; string nextPartitionKey = null; string nextRowKey = null; int currentTable = 1; // Initialise detination writer destination.InitialiseWriting(tables); // Iterate through each table foreach (string table in tables) { if (this.Cancelled) { // Exit the function if the user has cancelled return; } // Retrieve all entities for this table in blocks do { // Trigger delegate, passing in stream retrieved from table storage this.BatchStream(destination.WriteData, GetTableData(table, ref nextPartitionKey, ref nextRowKey)); }while (nextPartitionKey != null || nextRowKey != null); this.UpdateProgressPercentage(currentTable, progressChange); currentTable++; } // Finalise detination writer destination.FinaliseWriting(); }
private void PatchFile(string idxPath, string mulPath, string newIdxPath, string newMulPath, List <Patch> patches) { if (StatusChange != null) { StatusChange(this, new StatusChangeEventArgs(String.Format("Creating temp mul/idx file(s)..."))); } File.Copy(idxPath, newIdxPath, true); File.Copy(mulPath, newMulPath, true); if (StatusChange != null) { StatusChange(this, new StatusChangeEventArgs(String.Format("Applying {0} patches to {1}...", patches.Count, Path.GetFileName(newMulPath)))); } FileInfo idxFileInfo = new FileInfo(newIdxPath); FileIndex index = new FileIndex(Path.GetFileName(idxPath), Path.GetFileName(mulPath), (int)(idxFileInfo.Length / 12)); BinaryWriter idx = new BinaryWriter(new FileStream(newIdxPath, FileMode.Open)); BinaryWriter mul = new BinaryWriter(new FileStream(newMulPath, FileMode.Open)); int oldPercent = 0; for (int p = 0; p < patches.Count; p++) { Patch patch = patches[p]; int a = 0; if (patch.BlockID == 0xEEEE) { a = 4; } /* * int pos; * * if (index[patch.BlockID].length > patch.Length) * pos = index[patch.BlockID].lookup; * else */int pos = Convert.ToInt32(mul.BaseStream.Length); idx.Seek(patch.BlockID * 12, SeekOrigin.Begin); idx.Write(pos); idx.Write(patch.Length); idx.Write(patch.Extra); if (patch.Length >= 0) { mul.Seek(pos, SeekOrigin.Begin); mul.Write(patch.Data, 0, patch.Length); } if (p != 0 && ProgressChange != null) { int percent = (p * 100) / patches.Count; if (percent != oldPercent) { ProgressChange.Invoke(this, new ProgressChangeEventArgs(percent, p, patches.Count)); oldPercent = percent; } } } index.Close(); if (idx != null) { idx.Close(); } if (mul != null) { mul.Close(); } if (StatusChange != null) { StatusChange(this, new StatusChangeEventArgs(String.Format("Moving temp mul/idx file(s)..."))); } if (File.Exists(newIdxPath.Substring(0, newIdxPath.Length - 4))) { File.Delete(newIdxPath.Substring(0, newIdxPath.Length - 4)); } File.Move(newIdxPath, newIdxPath.Substring(0, newIdxPath.Length - 4)); if (File.Exists(newMulPath.Substring(0, newMulPath.Length - 4))) { File.Delete(newMulPath.Substring(0, newMulPath.Length - 4)); } File.Move(newMulPath, newMulPath.Substring(0, newMulPath.Length - 4)); if (this.StatusChange != null) { this.StatusChange(this, new StatusChangeEventArgs(string.Format("Cleaning up...", new object[0]))); } File.Delete(newIdxPath); File.Delete(newMulPath); #region Meh for now /* * if (StatusChange != null) * StatusChange(this, new StatusChangeEventArgs(String.Format("Creating temp mul/idx file(s)..."))); * * FileInfo info = new FileInfo(idxPath); * FileIndex oldIndex = new FileIndex(Path.GetFileName(idxPath), Path.GetFileName(mulPath), (int)(info.Length / ((long)12))); * * BinaryWriter idx = new BinaryWriter(new FileStream(newIdxPath, FileMode.Create)); * idx.Seek((int)new FileInfo(idxPath).Length, SeekOrigin.Begin); * * BinaryWriter mul = new BinaryWriter(new FileStream(newMulPath, FileMode.Create)); * mul.Seek((int)new FileInfo(mulPath).Length, SeekOrigin.Begin); * * idx.Seek(0, SeekOrigin.Begin); * mul.Seek(0, SeekOrigin.Begin); * * if (StatusChange != null) * StatusChange(this, new StatusChangeEventArgs(String.Format("Applying {0} patches to {1}...", patches.Count, Path.GetFileName(newMulPath)))); * * int patchIndex = 0; * int oldPercent = 0; * * int offset = 0; * * for (int i = 0; i < oldIndex.IndexCount; i++) * { * if (patchIndex < patches.Count && patches[patchIndex].BlockID == i)//Write the patch * { * if (StatusChange != null) * StatusChange(this, new StatusChangeEventArgs(String.Format("Applying patch id: {0}...", i))); * * Patch patch = patches[patchIndex]; * * idx.Write((int)mul.BaseStream.Position); * idx.Write(patch.Length); * idx.Write(patch.Extra); * * if (patch.Length > 0) * mul.Write(patch.Data, 0, patch.Length); * * patchIndex++; * } * else//Write the regular data * { * if (StatusChange != null && (i % 200 == 0)) * StatusChange(this, new StatusChangeEventArgs(String.Format("Writing index: {0}...", i))); * * idx.Write((int)mul.BaseStream.Position); * idx.Write(oldIndex[i].length); * idx.Write(oldIndex[i].extra); * * if (oldIndex[i].length > 0) * { * BinaryReader reader = new BinaryReader(oldIndex.Seek(i)); * mul.Write(reader.ReadBytes(oldIndex[i].length), 0, oldIndex[i].length); * } * } * * if (i != 0 && ProgressChange != null) * { * int percent = (i * 100) / oldIndex.IndexCount; * * if (percent != oldPercent) * { * ProgressChange(this, new ProgressChangeEventArgs(percent, i, oldIndex.IndexCount)); * oldPercent = percent; * } * } * } * * //Patches out of idx range? Is this possible? * if (patchIndex < patches.Count - 1) * { * if (StatusChange != null) * StatusChange(this, new StatusChangeEventArgs(String.Format("Applying left over patches..."))); * * int count = (patches.Count - 1) - patchIndex; * int i = 0; * while (i < count) * { * if (StatusChange != null) * StatusChange(this, new StatusChangeEventArgs(String.Format("Applying patch id: {0}...", i))); * * Patch patch = patches[patchIndex]; * * idx.Write((int)mul.BaseStream.Position); * idx.Write(patch.Length); * idx.Write(patch.Extra); * * if (patch.Length >= 0) * mul.Write(patch.Data, 0, patch.Length); * * if (i != 0 && ProgressChange != null) * { * int percent = (i * 100) / count; * * if (percent != oldPercent) * { * ProgressChange(this, new ProgressChangeEventArgs(percent, i, count)); * oldPercent = percent; * } * } * * patchIndex++; * i++; * } * } * * oldIndex.Close(); * * if (idx != null) * idx.Close(); * if (mul != null) * mul.Close(); * * if (StatusChange != null) * StatusChange(this, new StatusChangeEventArgs(String.Format("Moving temp mul/idx file(s)..."))); * * if (File.Exists(newIdxPath.Substring(0, newIdxPath.Length - 4))) * File.Delete(newIdxPath.Substring(0, newIdxPath.Length - 4)); * * File.Move(newIdxPath, newIdxPath.Substring(0, newIdxPath.Length - 4)); * * if (File.Exists(newMulPath.Substring(0, newMulPath.Length - 4))) * File.Delete(newMulPath.Substring(0, newMulPath.Length - 4)); * * File.Move(newMulPath, newMulPath.Substring(0, newMulPath.Length - 4)); * * if (this.StatusChange != null) * this.StatusChange(this, new StatusChangeEventArgs(string.Format("Cleaning up...", new object[0]))); * * File.Delete(newIdxPath); * File.Delete(newMulPath);*/ #endregion }
/// <inheritdoc /> /// <remarks>Private implementation</remarks> private IEnumerable <PPRect> PlaceRectsImpl(int width, int height, IEnumerable <PPRect> sortedRects, CancellationToken token = default) { Progress = 0; var rects = sortedRects.GroupBy(key => key, new PPRectDimensionComparer()) .Select <IGrouping <PPRect, PPRect>, (PPRect Rect, List <PPRect> Rects)>(groupping => (groupping.Key, groupping.ToList())) .ToDictionary(pair => pair.Rect, pair => pair.Rects, new PPRectDimensionComparer()); var sortedRectsList = sortedRects.ToList(); int rectCount = sortedRectsList.Count; int placedRects = 0; while (rectCount > 0) { if (token.IsCancellationRequested) { Progress = 0; yield break; } ////There is not enough feasible points if (feasiblePoints.Count == 0) { Progress = 100; yield break; } var(Rect, FeasiblePointIndex) = rectAndPointPicker.PickRectAndPoint(sortedRectsList, feasiblePoints.Select(x => (x.Point2D.X, x.Point2D.Y, x.VerticalEdge.Length, x.HorizontalEdge.Length)), width, height); //Check if there is even some feasible solution .. if (FeasiblePointIndex < 0) { Progress = 100; yield break; } var feasiblePoint = feasiblePoints.ElementAt(FeasiblePointIndex); var rect = Rect; var placedImage = PlaceRect(feasiblePoint.Point2D, rect); rects[rect].Remove(rect); sortedRectsList.Remove(rect); rectCount--; if (rectCount > 0) { int minWidthOfRemaining = int.MaxValue; int minHeightOfRemaining = int.MaxValue; foreach (var keyValue in rects) { foreach (var rectangle in keyValue.Value) { if (rectangle.Width < minWidthOfRemaining) { minWidthOfRemaining = rectangle.Width; } if (rectangle.Height < minHeightOfRemaining) { minHeightOfRemaining = rectangle.Height; } } } AdjustEnvelope(width, height, feasiblePoint, placedImage, minWidthOfRemaining, minHeightOfRemaining); } Progress = (int)((++placedRects / (double)sortedRectsList.Count) * 100.0); ProgressChange?.Invoke(this, Progress); yield return(placedImage); } }
protected virtual void OnProgressChange(float percentage) { ProgressChange?.Invoke(percentage); }
public void Report(double newProgress) { ProgressChange?.Invoke(this, new ProgressEventArgs(newProgress)); }
public abstract void TransferData(BackupDataSource destination, ProgressChange progressChange);
/// <inheritdoc /> public PackingResult PlaceRects(int width, int height, IEnumerable <PPRect> rects, CancellationToken token = default) { Progress = 0; if (width < 0 || height < 0) { throw new ArgumentOutOfRangeException($"The {nameof(width)} and {nameof(height)} should be non-negative"); } var sortedInput = sorter.SortImages(rects); int inputSize = rects.Count(); int placedRects = 0; int actualWidth = 0; int actualHeight = 0; RectComparer rectComparer = new RectComparer(); PointComparer ptComparer = new PointComparer(); SortedSet <PPRect> currentPacking = new SortedSet <PPRect>(rectComparer); SortedDictionary <SKPointI, int> pointsToTry = new SortedDictionary <SKPointI, int>(ptComparer) { { new SKPointI(0, 0), -1 } //the current packing is empty, so only point to try is point [0,0] }; SKPointI[] pointsToAdd = new SKPointI[2]; foreach (var x in sortedInput) { if (token.IsCancellationRequested) { Progress = 0; return(null); } SKPointI?pointToRemove = null; foreach (var ptToTry in pointsToTry) { PPRect tested = new PPRect(ptToTry.Key.X, ptToTry.Key.Y, ptToTry.Key.X + x.Width, ptToTry.Key.Y + x.Height); var possibleIntersections = currentPacking.AsEnumerable(); //have to test everything if (ptToTry.Key.X + x.Width <= width && ptToTry.Key.Y + x.Height <= height && !Intersects(tested, possibleIntersections)) //safe to pack here { if (ptToTry.Key.X + x.Width > actualWidth) { actualWidth = ptToTry.Key.X + x.Width; } if (ptToTry.Key.Y + x.Height > actualHeight) { actualHeight = ptToTry.Key.Y + x.Height; } int improved = 0; if (TryImprove(ref tested, currentPacking, 0)) //Try to position it further to the top / left { improved++; } //Add it to the packing tested.Image = x.Image; currentPacking.Add(tested); if (improved == 0) { pointToRemove = ptToTry.Key; } pointsToAdd[0] = new SKPointI(ptToTry.Key.X + x.Width, ptToTry.Key.Y); pointsToAdd[1] = new SKPointI(ptToTry.Key.X, ptToTry.Key.Y + x.Height); break; } } if (pointToRemove != null) { pointsToTry.Remove(pointToRemove.Value); pointsToTry[pointsToAdd[0]] = -1; pointsToTry[pointsToAdd[1]] = -1; Progress = (int)((++placedRects / (double)inputSize) * 100.0); ProgressChange?.Invoke(this, Progress); } else { Progress = 100; return(null); //we cannot pack it anywhere } } //var result = new PackingResult(width, height, currentPacking.Select(x => (x.Value, x.Key))); // probably better to return result with actual width & height instead of those needed //actual height can be lower than height specified, width also BUT THIS IS NOT DESIRED, BECAUSE THIS CAN BE CALLED FROM FIXEDSIZE..? OR chhange size in FixedSize.. var result = new PackingResult(actualWidth, actualHeight, currentPacking); return(result); }
/// <summary> /// Permette di esportare tutte le immagini in un unico file ( file sprite ); i dati vengono inviati ad uno stream /// </summary> /// <param name="sw"></param> /// <param name="FormatoOutput"></param> public void Export(StreamWriter sw, FormatoOut FormatoOutput = FormatoOut.jpg) { StateChange?.Invoke(this, State.InWorking); if (ic.IsEmpty) { StateChange?.Invoke(this, State.Finish); return; } //Ottengo dimensione reale di un frame Size OriginalSize = ic.Collection[0].Size; //Ottengo il numero di frame int NumeroFrame = ic.Count; //Calcolo le righe int Righe = (int)Math.Ceiling(NumeroFrame / (float)Colonne); //Calcolo grandezza immagine finale Size FinalSize = new Size(Colonne * OriginalSize.Width, Righe * OriginalSize.Height); Bitmap b = new Bitmap(FinalSize.Width, FinalSize.Height); Graphics g = Graphics.FromImage(b); int r = 0, c = 0; int current = 0; foreach (Image i in ic.Collection) { ProgressChange?.Invoke(this, current++, NumeroFrame); //Calcolo il punto di inserimento del frame nell'immagine grande Point p = new Point(c * OriginalSize.Width, r * OriginalSize.Height); g.DrawImageUnscaled(i, p); c++; if (c >= Colonne) { c = 0; r++; } if (r >= Righe) { break; } } if (FormatoOutput == FormatoOut.jpg) { b.Save(sw.BaseStream, ImageFormat.Jpeg); } else if (FormatoOutput == FormatoOut.png) { b.Save(sw.BaseStream, ImageFormat.Png); } else if (FormatoOutput == FormatoOut.bmp) { b.Save(sw.BaseStream, ImageFormat.Bmp); } g.Dispose(); b.Dispose(); StateChange?.Invoke(this, State.Finish); }
public void Report(float progress) { Logger.Log <ProgressRepporter>(LoggerLevel.Info, $"{Status}: {(int)(progress * 100)}%"); Progress = progress; ProgressChange?.Invoke(this, Progress); }
public CopyFromArguments(ProgressChange progressChangeCallback) : this(progressChangeCallback, TimeSpan.FromSeconds(0.2)) { }
public void Run() { wholeJob.ct.ThrowIfCancellationRequested(); wholeJob.Args.To.Refresh(); wholeJob.Args.From.Refresh(); bool destinationExisted = wholeJob.Args.To.Exists; try { if (destinationExisted) { throw new IOException("Destination directory already exists."); } else if (!wholeJob.Args.From.Exists) { throw new IOException("Source directory does not exist."); } var files = wholeJob.Args.From.GetFiles(); var dirs = wholeJob.Args.From.GetDirectories(); wholeJob.Args.To.Create(); progressCache = new float[files.Length + dirs.Length]; int cacheInd = 0; foreach (var file in files) { int cacheIndLocal = cacheInd++; wholeJob.ct.ThrowIfCancellationRequested(); var destFile = new FileInfo(Path.Combine(wholeJob.Args.To.FullName, file.Name)); var job = new FileTransferJob(new FileTransferArguments(file, destFile, wholeJob.Args.Settings), p => RegisterProgress(p, cacheIndLocal), wholeJob.ct); job.Run(); } foreach (var dir in dirs) { int cacheIndLocal = cacheInd++; wholeJob.ct.ThrowIfCancellationRequested(); var destDir = new DirectoryInfo(Path.Combine(wholeJob.Args.To.FullName, dir.Name)); var job = new DirectoryTransferJob(new DirectoryTransferArguments(dir, destDir, wholeJob.Args.Settings), p => RegisterProgress(p, cacheIndLocal), wholeJob.ct); job.Run(); } if (totalProgress < 100) { totalProgress = 100f; ProgressChange?.Invoke(100f); } } catch (Exception e) when(e is IOException | e is UnauthorizedAccessException | e is SecurityException | e is FileOperationException) { throw new DirectoryTransferException(wholeJob.Args, e); } finally { if (totalProgress < 100) { //Rollback wholeJob.Args.To.Refresh(); if (wholeJob.Args.To.Exists && !destinationExisted) { wholeJob.Args.To.Delete(true); } } } }
protected void SetProgress(float p) { p = Math.Min(1f, Math.Max(0f, p)); Progress = p; ProgressChange?.Invoke(Progress); }