private void ProgressChanged(WorkerItem workerItem) { ProgressBar.Value = workerItem.progress; CurrentCard = workerItem.card.Properties[workerItem.alt].Properties.First(x => x.Key.Name == "Name").Value.ToString(); if (workerItem.set != null && workerItem.local == null && workerItem.web != null) { workerItem.set.ImageCount += 1; } if (workerItem.local == null && workerItem.web == null) { return; } LocalImage.Source = null; WebImage.Source = null; if (workerItem.local != null) { BitmapImage local = StreamToBitmapImage(workerItem.local); LocalImage.Source = local; LocalDimensions.Text = local.PixelWidth.ToString() + " x " + local.PixelHeight.ToString(); } if (workerItem.web != null) { BitmapImage web = StreamToBitmapImage(workerItem.web); WebImage.Source = web; if (LocalImage.Source == null) { LocalImage.Source = web; } } }
public void UpdateWorkerProgressBar(int id, int value, int max = 100) { WorkerItem item = WorkerItems.Find(e => e.ID == id); if (item.ID > -1) { if (InvokeRequired) { item.Invoke((MethodInvoker) delegate { item.ProgressValue = value; item.ProgressBar.Maximum = max; }); } else { item.ProgressValue = value; item.ProgressBar.Maximum = max; } if (value == max) { if (flpItems.Controls.Contains(item)) { flpItems.Invoke((MethodInvoker) delegate { flpItems.Controls.Remove(item); }); } } } }
private static async Task DoActionAsync(WorkerItem item) { try { if (item.Type == 0) { await item.Function(item.State).ConfigureAwait(false); } else if (item.Type == 1) { await item.FunctionAlt().ConfigureAwait(false); } else if (item.Type == 2) { item.Action(item.State); } else if (item.Type == 3) { item.ActionAlt(); } } catch (Exception ex) { if (item.OnExceptionCallback != null) { item.OnExceptionCallback(ex); } else { Core.Log.Write(ex); } throw; } }
public void AddWorkerItem(WorkerItem wi) { WorkerItems.Add(wi); flpItems.Controls.Add(wi); if (WorkerItems.Count > 0) { Width = WorkerItems.OrderBy(o => o.Width).ToList().First().Width; Width += (Width / 16); } }
public void Execute() { WorkerItem previous = null; foreach (var item in _unitOfWorkItems) { var repoItem = _dataRepository.Get(item.Id); var input = new WorkerItem(item.Id, repoItem.Name, previous); previous = input; } }
public void RemoveWorker(Worker worker) { WorkerItem workerItem = WorkerItems.Find(n => n.Worker == worker); if (worker == null) { Debug.LogError("worker can't be removed, isn't in WorkerItems!"); return; } WorkerItems.Remove(workerItem); }
private void RemoveWorker(int workerItemNumber) { for (int index = lstThreads.Items.Count - 1; index >= 0; index += -1) { WorkerItem item = (WorkerItem)lstThreads.Items[index]; if (item.WorkerNumber == workerItemNumber) { lstThreads.Items.RemoveAt(index); break; } } }
private void UpdateThreadProgressLogs(int workerNumber, string updatedCaption) { for (int index = 0; index <= lstThreads.Items.Count - 1; index++) { WorkerItem item = ((WorkerItem)(lstThreads.Items[index])); if (item.WorkerNumber == workerNumber) { item.Caption = "Thread " + workerNumber + ": " + updatedCaption; lstThreads.Items[index] = item; break; } } }
/// <summary> /// Enlarge pool and return object. Return null if poolSize exceeds _workerMaximum /// </summary> /// <returns></returns> private WorkerItem <T> TryCreateAndEnlargePool() { lock (_padLock) { if (_currentWorkers.Count < WorkerMaximum) { var newInstance = new WorkerItem <T>(_workerFactory.CreateInstance(_workerIdentifier.ToString()), true); _currentWorkers.Add(newInstance); return(newInstance); } return(null); } }
public async Task DeletesSelectedWorker() { var workerToDelete = new Mock <IWorker>(); var deleteCalled = false; workerToDelete.Setup(w => w.Delete()).Callback(() => deleteCalled = true); var workerItemToDelete = new WorkerItem(workerToDelete.Object); _workerList.Workers.Add(workerItemToDelete); _workerList.SelectedWorker = workerItemToDelete; var task = _workerList.DeleteWorkerCommand.ExecuteAsync(null); await task; CollectionAssert.DoesNotContain(_workerList.Workers, workerToDelete); Assert.IsTrue(deleteCalled); Assert.That(_workerList.SelectedWorker, Is.Null); }
public void UpdateWorkerLabel(int id, string val) { WorkerItem item = WorkerItems.Find(e => e.ID == id); if (item.ID > -1) { if (InvokeRequired) { item.Invoke((MethodInvoker) delegate { item.CountText = val; }); } else { item.CountText = val; } } }
public void NotRefreshsEditedWorkerIfWorkerModifierReturnsTrue() { var workerToEdit = new Mock <IWorker>(); var workerItemToEdit = new WorkerItem(workerToEdit.Object); _workerList.Workers.Add(workerItemToEdit); var notificationCalled = false; workerItemToEdit.PropertyChanged += (s, e) => notificationCalled = true; _workerList.SelectedWorker = workerItemToEdit; _workerList.EditWorkerCommand.Execute(null); _workerModifier.Raise( m => m.ModificationFinished += null, new ModificationStateEventArgs(false, workerToEdit.Object)); Assert.IsFalse(notificationCalled); }
public void UpdateWorkerProgressBar(int id, int value, int max = 100) { WorkerItem item = WorkerItems.Find(e => e.ID == id); if (item.ID > -1) { if (InvokeRequired) { item.Invoke((MethodInvoker) delegate { item.ProgressValue = value; item.ProgressBar.Maximum = max; }); } else { item.ProgressValue = value; item.ProgressBar.Maximum = max; } } }
private void InitializeWorkers(int workerMinimum, IWorkerFactory <T> workerFactory) { var spinLock = new SpinLock(); Parallel.For(0, workerMinimum, i => { bool lockHasBeenTaken = false; try { var workerItem = new WorkerItem <T>(workerFactory.CreateInstance(_workerIdentifier.ToString()), true); spinLock.Enter(ref lockHasBeenTaken); _currentWorkers.Add(workerItem); } finally { if (lockHasBeenTaken) { spinLock.Exit(); } } }); }
public WorkerItem(string id, string name, WorkerItem previous) { this.Name = name; this.Id = id; this.Previous = previous; }
public static void StaticWorkerFor(ParallelTasksStaticWorkerForInput input) { if (input.TasksLow < 0 || input.TasksHigh < input.TasksLow) return; ModuleProc PROC = new ModuleProc("ParallelTasks", "StaticFor"); if (input.WorkerCount <= 0) { if (input.Chunk > 0) { input.WorkerCount = (int)Math.Max(Math.Ceiling(Convert.ToDouble(input.TasksHigh) / Convert.ToDouble(input.Chunk)), 1); } else { input.WorkerCount = 1; } } if (input.Chunk <= 0) { input.Chunk = (input.TasksHigh / input.WorkerCount); } if (input.Chunk <= 0) input.Chunk = 1; if (input.TasksHigh < input.Chunk) input.Chunk = ((input.TasksHigh - input.TasksLow) / input.WorkerCount); CountdownEvent cde = new CountdownEvent(input.WorkerCount); Thread[] threads = new Thread[input.WorkerCount]; int currentCount = 0; ParallelTaskOptions options = new ParallelTaskOptions(); WorkerItem item = new WorkerItem() { ExecutorService = input.Executor, Result = new ParallelTaskResult(ParallelTaskResultStatus.Created), Completed = input.WorkCompleted, EventHandle = cde, }; try { for (int i = 0; i < input.WorkerCount; i++) { threads[i] = Extensions.CreateThreadAndStart((o) => { int k = (int)o; int start = input.TasksLow + (k * input.Chunk); int end = ((k == (input.WorkerCount - 1)) ? input.TasksHigh : (start + input.Chunk)); // work input.Chunk started if (input.WorkChunkStarted != null) { try { input.WorkChunkStarted(new ParallelTaskWorkChunkStartArgs() { Options = options, ThreadIndex = i, ChunkStart = start, ChunkEnd = end, SeqStart = 0, SeqEnd = (end - 1 - start), }); } catch (Exception ex) { Log.Exception(PROC, ex); item.Result.Exceptions.Add(ex); } } // work int chunkProgress = start; for (int j = start, sj = 0; j < end; j++, sj++) { if ((input.Executor != null && input.Executor.IsShutdown) || options.IsCancelled) break; chunkProgress = j; try { int proIdx = Interlocked.Increment(ref currentCount); int proPerc = (int)(((float)proIdx / (float)input.TasksHigh) * 100.0); string text = string.Format("{0:D} of {1:D} ({2:D} %)", proIdx, input.TasksHigh, proPerc); input.Work(new ParallelTaskWorkArgs() { Options = options, ThreadIndex = k, ChunkProgress = j, ChunkSeqProgress = sj, OverallProgress = proIdx, Total = input.TasksHigh, ProgressText = text, }); } catch (Exception ex) { Log.Exception(PROC, ex); item.Result.Exceptions.Add(ex); } finally { Interlocked.Increment(ref currentCount); } Thread.Sleep(input.SleepInMilliseconds); } // work input.Chunk completed if (input.WorkChunkCompleted != null) { try { input.WorkChunkCompleted(new ParallelTaskWorkChunkCompletedArgs() { Options = options, ThreadIndex = i, ChunkProgress = chunkProgress, OverallProgress = currentCount, }); } catch (Exception ex) { Log.Exception(PROC, ex); item.Result.Exceptions.Add(ex); } } cde.Signal(); }, i, "StaticFor_" + i.ToString() + "_"); Thread.Sleep(10); } } catch (Exception ex) { Log.Exception(PROC, ex); } finally { Extensions.CreateThreadAndStart((o) => { WorkerItem wi = o as WorkerItem; wi.EventHandle.Wait(); wi.Result.Status = (((input.Executor != null && input.Executor.IsShutdown) || options.IsCancelled) ? ParallelTaskResultStatus.Canceled : ParallelTaskResultStatus.Completed); if (wi.Completed != null) { wi.Completed(new ParallelTaskWorkCompletedArgs() { Result = wi.Result, }); } }, item, "StaticFor_Wait_"); } }
public void BackupSingleFolder(DirectoryInfo sourceDirectory, int level) { Exception?ex1 = null; try { if (level > 0) { var enumerable = sourceDirectory.EnumerateFiles(); foreach (var sourceFile in enumerable) { if (_cancellationToken.IsCancellationRequested) { break; } var readerItem = new WorkerItem(_parent, sourceFile); ItemAvailable?.Invoke(readerItem); } } } catch (Exception ex) { _parent._errorMessages.Enqueue($"Folder {sourceDirectory.FullName}: {ex.Message}"); ex1 = ex; } if (_cancellationToken.IsCancellationRequested) { return; } try { var enumerator = sourceDirectory.EnumerateDirectories(); foreach (var subSourceDirectory in enumerator) { if (_cancellationToken.IsCancellationRequested) { break; } if (level == 0) { if (0 == string.Compare(Current.BackupContentFolderName, subSourceDirectory.Name) || 0 == string.Compare(Current.BackupNameFolderName, subSourceDirectory.Name)) { continue; } } BackupSingleFolder(subSourceDirectory, level + 1); } } catch (Exception ex2) { if (ex2.Message != ex1?.Message) { _parent._errorMessages.Enqueue($"Folder {sourceDirectory.FullName}: {ex2.Message}"); } } }
public static void StaticWorkerFor(ParallelTasksStaticWorkerForInput input) { if (input.TasksLow < 0 || input.TasksHigh < input.TasksLow) { return; } ModuleProc PROC = new ModuleProc("ParallelTasks", "StaticFor"); if (input.WorkerCount <= 0) { if (input.Chunk > 0) { input.WorkerCount = (int)Math.Max(Math.Ceiling(Convert.ToDouble(input.TasksHigh) / Convert.ToDouble(input.Chunk)), 1); } else { input.WorkerCount = 1; } } if (input.Chunk <= 0) { input.Chunk = (input.TasksHigh / input.WorkerCount); } if (input.Chunk <= 0) { input.Chunk = 1; } if (input.TasksHigh < input.Chunk) { input.Chunk = ((input.TasksHigh - input.TasksLow) / input.WorkerCount); } CountdownEvent cde = new CountdownEvent(input.WorkerCount); Thread[] threads = new Thread[input.WorkerCount]; int currentCount = 0; ParallelTaskOptions options = new ParallelTaskOptions(); WorkerItem item = new WorkerItem() { ExecutorService = input.Executor, Result = new ParallelTaskResult(ParallelTaskResultStatus.Created), Completed = input.WorkCompleted, EventHandle = cde, }; try { for (int i = 0; i < input.WorkerCount; i++) { threads[i] = Extensions.CreateThreadAndStart((o) => { int k = (int)o; int start = input.TasksLow + (k * input.Chunk); int end = ((k == (input.WorkerCount - 1)) ? input.TasksHigh : (start + input.Chunk)); // work input.Chunk started if (input.WorkChunkStarted != null) { try { input.WorkChunkStarted(new ParallelTaskWorkChunkStartArgs() { Options = options, ThreadIndex = i, ChunkStart = start, ChunkEnd = end, SeqStart = 0, SeqEnd = (end - 1 - start), }); } catch (Exception ex) { Log.Exception(PROC, ex); item.Result.Exceptions.Add(ex); } } // work int chunkProgress = start; for (int j = start, sj = 0; j < end; j++, sj++) { if ((input.Executor != null && input.Executor.IsShutdown) || options.IsCancelled) { break; } chunkProgress = j; try { int proIdx = Interlocked.Increment(ref currentCount); int proPerc = (int)(((float)proIdx / (float)input.TasksHigh) * 100.0); string text = string.Format("{0:D} of {1:D} ({2:D} %)", proIdx, input.TasksHigh, proPerc); input.Work(new ParallelTaskWorkArgs() { Options = options, ThreadIndex = k, ChunkProgress = j, ChunkSeqProgress = sj, OverallProgress = proIdx, Total = input.TasksHigh, ProgressText = text, }); } catch (Exception ex) { Log.Exception(PROC, ex); item.Result.Exceptions.Add(ex); } finally { Interlocked.Increment(ref currentCount); } Thread.Sleep(input.SleepInMilliseconds); } // work input.Chunk completed if (input.WorkChunkCompleted != null) { try { input.WorkChunkCompleted(new ParallelTaskWorkChunkCompletedArgs() { Options = options, ThreadIndex = i, ChunkProgress = chunkProgress, OverallProgress = currentCount, }); } catch (Exception ex) { Log.Exception(PROC, ex); item.Result.Exceptions.Add(ex); } } cde.Signal(); }, i, "StaticFor_" + i.ToString() + "_"); Thread.Sleep(10); } } catch (Exception ex) { Log.Exception(PROC, ex); } finally { Extensions.CreateThreadAndStart((o) => { WorkerItem wi = o as WorkerItem; wi.EventHandle.Wait(); wi.Result.Status = (((input.Executor != null && input.Executor.IsShutdown) || options.IsCancelled) ? ParallelTaskResultStatus.Canceled : ParallelTaskResultStatus.Completed); if (wi.Completed != null) { wi.Completed(new ParallelTaskWorkCompletedArgs() { Result = wi.Result, }); } }, item, "StaticFor_Wait_"); } }
public void BackupSingleFolder(DirectoryInfo sourceDirectory, DirectoryInfo destinationDirectory, string relativeFolderName, FilterItemCollectionReadonly filter, int symLinkLevel, byte[] destinationNameBuffer) { Exception?ex1 = null; try { var enumerable = sourceDirectory.EnumerateFiles(); foreach (var sourceFile in enumerable) { if (_cancellationToken.IsCancellationRequested) { break; } var relativeFileName = string.Concat(relativeFolderName, sourceFile.Name.ToLowerInvariant()); if (filter.IsPathIncluded(relativeFolderName + sourceFile.Name)) { var readerItem = new WorkerItem(_parent, sourceFile, Path.Combine(destinationDirectory.FullName, sourceFile.Name), destinationNameBuffer); OutputAvailable?.Invoke(readerItem); } } } catch (Exception ex) { _parent._errorMessages.Enqueue($"Folder {sourceDirectory.FullName}: {ex.Message}"); ex1 = ex; } if (_cancellationToken.IsCancellationRequested) { return; } try { var enumerator = sourceDirectory.EnumerateDirectories(); foreach (var subSourceDirectory in enumerator) { if (_cancellationToken.IsCancellationRequested) { break; } var relativeSubFolderName = string.Concat(relativeFolderName, subSourceDirectory.Name.ToLowerInvariant(), Path.DirectorySeparatorChar); if (filter.IsPathIncluded(relativeSubFolderName)) { var subDestinationDirectory = destinationDirectory.CreateSubdirectory(subSourceDirectory.Name); int symLinkLevelLocally = symLinkLevel; if ((subSourceDirectory.Attributes & FileAttributes.ReparsePoint) == FileAttributes.ReparsePoint) { --symLinkLevelLocally; } if (symLinkLevelLocally >= 0) { BackupSingleFolder(subSourceDirectory, subDestinationDirectory, relativeSubFolderName, filter, symLinkLevelLocally, destinationNameBuffer); } else { _parent._errorMessages.Enqueue($"Folder ignored because user symlink limit was reached: {subSourceDirectory.FullName}"); } } } } catch (Exception ex2) { if (ex2.Message != ex1?.Message) { _parent._errorMessages.Enqueue($"Folder {sourceDirectory.FullName}: {ex2.Message}"); } } }
/// <summary> /// Update the library when invoked via the timer /// </summary> private void UpdateLibrary() { try { // get a hook to the DB IDBClient db = Database.RetrieveClient(); // retrieve the initial number of tracks int beforeTracks = Database.RetrieveNumberOfTracks(db); // get the audio library locations IList <AudioLibraryLocation> locations = Database.RetrieveLibraryLocations(db); // get the current timestamp, we'll use this in case anything gets modified while this is happening DateTime beforeUpdate = DateTime.Now; _log.Info("Starting library update at: " + beforeUpdate.ToLocalTime()); try { // recurse through each of the library locations foreach (AudioLibraryLocation location in locations) { if (_log.IsDebugEnabled) { _log.Debug("Updating library location: " + location.Path); } // initialise the list of directories to process IList <string> directoriesToProcess = new List <string>(); // start traversing down each directory ProcessDirectory(directoriesToProcess, location.Path, location.LastWritten); // if there was any processing needed to be done if (directoriesToProcess.Count > 0) { const int numThreads = 5; _libraryThreadPool = new FixedThreadPool(5, ThreadPriority.Lowest); _log.Debug("Created custom thread pool for library with " + numThreads + " threads"); // make all the workers for (int i = 0; i < directoriesToProcess.Count; i++) { var worker = new AudioLibraryWorker(); worker.Directory = directoriesToProcess[i]; // attach it to a worker for the pool var workerItem = new WorkerItem(worker.WorkerMethod); // add it to the pool _libraryThreadPool.QueueWorkerItem(workerItem); // start the show _libraryThreadPool.Start(); } } // reset the reference to when this update run started location.LastWritten = beforeUpdate; // store this updated location back in the DB Database.UpdateAddLibraryLocation(db, location); // commit after each location. if something goes wrong, we wont have to redo db.Commit(); } // get the number of tracks after int afterTracks = Database.RetrieveNumberOfTracks(db); TimeSpan elapsedTime = DateTime.Now - beforeUpdate; _log.Info("Finished library update at: " + DateTime.Now.ToLocalTime() + ". Took: " + elapsedTime.TotalSeconds + " seconds"); _log.Info("Imported " + (afterTracks - beforeTracks) + " tracks"); // close the db db.Close(); } catch (DatabaseClosedException) { _log.Debug("The database has been closed prematurely"); } } catch (Db4oException ex) { _log.Error("Problem occurred when updating library", ex); } finally { _libraryUpdateTimer.Start(); } }
private void DownloadSet(SetItem setItem, IProgress <WorkerItem> progress) { var i = 0; var set = setItem.set; //TODO: remove this and use setItem var setSize = set.Cards.Count(); foreach (var c in set.Cards) { i++; foreach (var alt in c.Properties) { if (_cts.IsCancellationRequested) { break; } var cardInfo = GetCardInfo(setItem, c, alt.Key); var workerItem = new WorkerItem(); workerItem.set = setItem; workerItem.alt = alt.Key; workerItem.progress = (double)i / setSize; workerItem.card = c; // get local image info var files = FindLocalCardImages(set, c, workerItem.alt); if (cardInfo == null) { if (set.Id.ToString() != "a584b75b-266f-4378-bed5-9ffa96cd3961") { MessageBox.Show(String.Format("Cannot find scryfall data for card {0}.", c.Name)); } progress.Report(workerItem); continue; } workerItem.local = files.Length > 0 ? UriToStream(files.First()) : null; var imageDownloadUrl = ""; var flipCard = false; switch (cardInfo.Layout) { case "transform": { if (workerItem.alt == "transform") { imageDownloadUrl = xl ? cardInfo.LargeBackUrl : cardInfo.NormalBackUrl; } else { imageDownloadUrl = xl ? cardInfo.LargeUrl : cardInfo.NormalUrl; } break; } case "split": { if (workerItem.alt == "") { imageDownloadUrl = xl ? cardInfo.LargeUrl : cardInfo.NormalUrl; } break; } case "flip": { if (workerItem.alt == "flip") { flipCard = true; } imageDownloadUrl = xl ? cardInfo.LargeUrl : cardInfo.NormalUrl; break; } default: { imageDownloadUrl = xl ? cardInfo.LargeUrl : cardInfo.NormalUrl; break; } } // if the card has no web image if (string.IsNullOrEmpty(imageDownloadUrl)) { progress.Report(workerItem); continue; } // check if the web image has a newer timestamp var webTimestamp = Convert.ToInt32(imageDownloadUrl.Split('?')[1]); if (workerItem.local != null) { using (var image = Image.FromStream(workerItem.local)) { if ((image.Width > 600 && xl) || (image.Width < 500 && !xl)) { bool hires = (image.PropertyIdList.FirstOrDefault(x => x == 40094) == 0) ? false : Convert.ToBoolean(Encoding.Unicode.GetString(image.GetPropertyItem(40094).Value)); if (hires && !update) { progress.Report(workerItem); continue; } int localTimestamp = (image.PropertyIdList.FirstOrDefault(x => x == 40092) == 0) ? 0 : Convert.ToInt32(Encoding.Unicode.GetString(image.GetPropertyItem(40092).Value)); if (webTimestamp <= localTimestamp) { progress.Report(workerItem); continue; } } } } // download image workerItem.web = UriToStream(imageDownloadUrl); if (workerItem.web == null) { progress.Report(workerItem); continue; } var garbage = Config.Instance.Paths.GraveyardPath; if (!Directory.Exists(garbage)) { Directory.CreateDirectory(garbage); } foreach (var f in files.Select(x => new FileInfo(x))) { f.MoveTo(Path.Combine(garbage, f.Name)); } using (var newimg = Image.FromStream(workerItem.web)) { if (flipCard) { newimg.RotateFlip(System.Drawing.RotateFlipType.Rotate180FlipNone); } else if (cardInfo.Layout == "Planar") { newimg.RotateFlip(System.Drawing.RotateFlipType.Rotate90FlipNone); } var commentMetadata = (PropertyItem)FormatterServices.GetUninitializedObject(typeof(PropertyItem)); commentMetadata.Id = 40092; // this is the comments field commentMetadata.Value = Encoding.Unicode.GetBytes(webTimestamp.ToString()); commentMetadata.Len = commentMetadata.Value.Length; commentMetadata.Type = 1; newimg.SetPropertyItem(commentMetadata); var keywordsMetadata = (PropertyItem)FormatterServices.GetUninitializedObject(typeof(PropertyItem)); keywordsMetadata.Id = 40094; // this is the keywords field keywordsMetadata.Value = Encoding.Unicode.GetBytes(cardInfo.HiRes.ToString()); keywordsMetadata.Len = commentMetadata.Value.Length; keywordsMetadata.Type = 1; newimg.SetPropertyItem(keywordsMetadata); var imageUri = String.IsNullOrWhiteSpace(workerItem.alt) ? c.ImageUri : c.ImageUri + "." + workerItem.alt; var newPath = Path.Combine(set.ImagePackUri, imageUri + ".jpg"); newimg.Save(newPath, ImageFormat.Jpeg); } progress.Report(workerItem); } } }