private void Stats_MarkCompleted(BsnesImportStreamProcessor.CompressedWorkItems compressedItems) { try { importerLock.EnterWriteLock(); bytesToProcess -= compressedItems.Bytes.Length; compressedBlocksToProcess--; } finally { importerLock.ExitWriteLock(); } }
private async void ProcessCompressedWorkItem(BsnesImportStreamProcessor.CompressedWorkItems compressedItems) { // tune this as needed. // we want parallel jobs going, but, we don't want too many of them at once. // average # workItems per CompressedWorkItem is like 12K currently. const int numItemsPerTask = 6000; using var enumerator = BsnesImportStreamProcessor.ProcessCompressedWorkItems(compressedItems).GetEnumerator(); bool keepGoing; var itemsRemainingBeforeSend = numItemsPerTask; var subTasks = new List <Task>(); var workItemsForThisTask = new List <BsnesImportStreamProcessor.WorkItem>(); do { var endOfList = !enumerator.MoveNext(); keepGoing = !streamProcessor.CancelToken.IsCancellationRequested && !endOfList; if (!endOfList) { workItemsForThisTask.Add(enumerator.Current); itemsRemainingBeforeSend--; } var shouldSendNow = !keepGoing || itemsRemainingBeforeSend == 0; if (!shouldSendNow) { continue; } var workItemsCopy = new List <BsnesImportStreamProcessor.WorkItem>(workItemsForThisTask); subTasks.Add(taskManager.Run(() => { ProcessWorkItems(workItemsCopy); })); itemsRemainingBeforeSend = numItemsPerTask; workItemsForThisTask.Clear(); } while (keepGoing); await Task.WhenAll(subTasks); Stats_MarkCompleted(compressedItems); }