public static void ForEach <TKey, TValue, TLocal>([NotNull] Dictionary <TKey, TValue> collection, [Pooled] Func <TLocal> initializeLocal, [Pooled] Action <KeyValuePair <TKey, TValue>, TLocal> action, [Pooled] Action <TLocal> finalizeLocal = null) { if (MaxDegreeOfParallelism <= 1 || collection.Count <= 1) { ExecuteBatch(collection, 0, collection.Count, initializeLocal, action, finalizeLocal); } else { var state = BatchState.Acquire(); try { var batchCount = Math.Min(MaxDegreeOfParallelism, collection.Count); var batchSize = (collection.Count + (batchCount - 1)) / batchCount; // Kick off a worker, then perform work synchronously state.AddReference(); Fork(collection, batchSize, MaxDegreeOfParallelism, initializeLocal, action, finalizeLocal, state); // Wait for all workers to finish state.WaitCompletion(collection.Count); var ex = Interlocked.Exchange(ref state.ExceptionThrown, null); if (ex != null) { throw ex; } } finally { state.Release(); } } }
/// <inheritdoc/> public override void Process(BatchState batchState) { Remover remover = new Remover(); remover.PopulateExclusionsFromList(Arguments); remover.ApplyRemovals(batchState.DatFile); }
public static void ForEach <TKey, TValue, TLocal>([NotNull] Dictionary <TKey, TValue> collection, [Pooled] Func <TLocal> initializeLocal, [Pooled] Action <KeyValuePair <TKey, TValue>, TLocal> action, [Pooled] Action <TLocal> finalizeLocal = null) { if (MaxDegreeOfParallelism <= 1 || collection.Count <= 1) { ExecuteBatch(collection, 0, collection.Count, initializeLocal, action, finalizeLocal); } else { var state = BatchState.Acquire(); try { var batchCount = Math.Min(MaxDegreeOfParallelism, collection.Count); var batchSize = (collection.Count + (batchCount - 1)) / batchCount; // Kick off a worker, then perform work synchronously state.AddReference(); Fork(collection, batchSize, MaxDegreeOfParallelism, initializeLocal, action, finalizeLocal, state); // Wait for all workers to finish if (state.ActiveWorkerCount != 0) { state.Finished.WaitOne(); } } finally { state.Release(); } } }
void Update() { if (mInitRender) { return; } if (Time.time - mRenderStartTime > 0.3f) { mInitRender = true; if (m_meshRenderer != null && m_meshFilter != null && m_meshFilter.sharedMesh != null && BatchRendererManager.Instance != null) { Vector3 curPosition = transform.position; Quaternion curRotation = transform.rotation; Vector2 curTexOffset = Vector2.zero; // if (m_meshRenderer.material != null) // curTexOffset = m_meshRenderer.material.GetTextureOffset("_MainTex"); // //if ((curPosition - m_position).sqrMagnitude > 0.00001f) // moving // if (curPosition != m_position || curRotation != m_rotation || curTexOffset != m_texOffset) // { // if (m_stableCount > m_stablePeriod && m_stablePeriod > 0) // if period is 0, it will be added in this frame, no need to remove // { // BatchRendererManager.Instance.RemoveRenderer(this); // } // m_stableCount = 0; // } m_position = curPosition; m_rotation = curRotation; m_texOffset = curTexOffset; BatchRendererManager.Instance.AddRenderer(this, m_batchStablePeriod); mState = BatchState.AddBatch; } } }
/// <summary> /// Cancels the batch /// </summary> /// <remarks> /// When batch is actually cancelled, Execute() will return with the appropiate status /// </remarks> public void Cancel() { lock (this) { if (state != BatchState.Cancelling) { state = BatchState.Cancelling; RaiseCancelling(); if (command != null) { try { command.Cancel(); Debug.WriteLine("Batch.Cancel: command.Cancel completed"); } catch (SqlException) { // eat it } catch (RetryLimitExceededException) { // eat it } } } } }
public async Task UpdateState(string batchId, BatchState state) { using (var db = await DbConnectionFactory.OpenAsync()) { await db.UpdateBatchState(batchId, state); } }
public static void ForEach <TKey, TValue>(Dictionary <TKey, TValue> collection, [Pooled] Action <KeyValuePair <TKey, TValue> > action) { if (MaxDregreeOfParallelism <= 1 || collection.Count <= 1) { ExecuteBatch(collection, 0, collection.Count, action); } else { var state = BatchState.Acquire(); try { int batchCount = Math.Min(MaxDregreeOfParallelism, collection.Count); int batchSize = (collection.Count + (batchCount - 1)) / batchCount; // Kick off a worker, then perform work synchronously state.AddReference(); Fork(collection, batchSize, MaxDregreeOfParallelism, action, state); // Wait for all workers to finish if (state.ActiveWorkerCount != 0) { state.Finished.WaitOne(); } } finally { state.Release(); } } }
/// <inheritdoc/> public override void Process(BatchState batchState) { Cleaner orpgCleaner = new Cleaner { OneRomPerGame = true }; orpgCleaner.ApplyCleaning(batchState.DatFile); }
/// <inheritdoc/> public override void Process(BatchState batchState) { Cleaner descNameCleaner = new Cleaner { DescriptionAsName = true }; descNameCleaner.ApplyCleaning(batchState.DatFile); }
/// <inheritdoc/> public override void Process(BatchState batchState) { Cleaner stripCleaner = new Cleaner { SceneDateStrip = true }; stripCleaner.ApplyCleaning(batchState.DatFile); }
/// <inheritdoc/> public override void Process(BatchState batchState) { Cleaner ogorCleaner = new Cleaner { OneGamePerRegion = true, RegionList = Arguments }; ogorCleaner.ApplyCleaning(batchState.DatFile); }
/// <inheritdoc/> public override void Process(BatchState batchState) { // Assume there could be multiple batchState.DatFile.Header.DatFormat = 0x00; foreach (string format in Arguments) { batchState.DatFile.Header.DatFormat |= GetDatFormat(format); } }
/// <inheritdoc/> public override void Process(BatchState batchState) { // Get only files from inputs List <ParentablePath> datFilePaths = PathTool.GetFilesOnly(Arguments); // Assume there could be multiple foreach (ParentablePath datFilePath in datFilePaths) { Parser.ParseInto(batchState.DatFile, datFilePath, batchState.Index++); } }
public void PerformBatchScan(BatchSettings settings, FormBase batchForm, Action<ScannedImage> imageCallback, Func<string, bool> progressCallback) { var state = new BatchState(scanPerformer, profileManager, fileNamePlaceholders, pdfExporter, operationFactory, pdfSettingsContainer, userConfigManager, formFactory) { Settings = settings, ProgressCallback = progressCallback, BatchForm = batchForm, LoadImageCallback = imageCallback }; state.Do(); }
/// <inheritdoc/> public override void Process(BatchState batchState) { // Read in the individual arguments MergingFlag mergingFlag = Arguments[0].AsMergingFlag(); // Apply the merging flag Filtering.Splitter splitter = new Filtering.Splitter { SplitType = mergingFlag }; splitter.ApplySplitting(batchState.DatFile, false); }
public static void For <TLocal>(int fromInclusive, int toExclusive, [Pooled] Func <TLocal> initializeLocal, [Pooled] Action <int, TLocal> action, [Pooled] Action <TLocal> finalizeLocal = null) { using (Profile(action)) { if (fromInclusive > toExclusive) { var temp = fromInclusive; fromInclusive = toExclusive + 1; toExclusive = temp + 1; } var count = toExclusive - fromInclusive; if (count == 0) { return; } if (MaxDegreeOfParallelism <= 1 || count == 1) { ExecuteBatch(fromInclusive, toExclusive, initializeLocal, action, finalizeLocal); } else { var state = BatchState.Acquire(); state.WorkDone = state.StartInclusive = fromInclusive; try { var batchCount = Math.Min(MaxDegreeOfParallelism, count); var batchSize = (count + (batchCount - 1)) / batchCount; // Kick off a worker, then perform work synchronously state.AddReference(); Fork(toExclusive, batchSize, MaxDegreeOfParallelism, initializeLocal, action, finalizeLocal, state); // Wait for all workers to finish if (state.WorkDone < toExclusive) { state.Finished.WaitOne(); } var ex = Interlocked.Exchange(ref state.ExceptionThrown, null); if (ex != null) { throw ex; } } finally { state.Release(); } } } }
public virtual void Test_OneBatch_NoOperations() { Assert.IsFalse(BatchCommitted, "The BatchCommitted flag should start off false."); using (Batch batch = BatchState.StartBatch()) { batch.Committed += new EventHandler(batch_Committed); } Assert.IsTrue(BatchCommitted, "The BatchCommitted flag wasn't changed to true."); }
/// <inheritdoc/> public override void Process(BatchState batchState) { // Read in the individual arguments DatHeaderField field = Arguments[0].AsDatHeaderField(); string value = Arguments[1]; // Set the header field batchState.DatFile.Header.SetFields(new Dictionary <DatHeaderField, string> { [field] = value }); }
public async Task PerformBatchScan(BatchSettings settings, FormBase batchForm, Action <ScannedImage> imageCallback, Action <string> progressCallback, CancellationToken cancelToken) { var state = new BatchState(scanPerformer, profileManager, fileNamePlaceholders, pdfExporter, operationFactory, pdfSettingsContainer, ocrManager, formFactory) { Settings = settings, ProgressCallback = progressCallback, CancelToken = cancelToken, BatchForm = batchForm, LoadImageCallback = imageCallback }; await state.Do(); }
public void PerformBatchScan(BatchSettings settings, FormBase batchForm, Action <ScannedImage> imageCallback, Func <string, bool> progressCallback) { var state = new BatchState(scanPerformer, profileManager, fileNamePlaceholders, pdfExporter, operationFactory, pdfSettingsContainer, ocrDependencyManager, formFactory) { Settings = settings, ProgressCallback = progressCallback, BatchForm = batchForm, LoadImageCallback = imageCallback }; state.Do(); }
public void ClearData(string dataDirectory) { if (DataAccess.IsInitialized) { using (Batch batch = BatchState.StartBatch()) { foreach (IEntity entity in DataAccess.Data.Indexer.GetEntities()) { DataAccess.Data.Deleter.Delete(entity); } } } }
/// <inheritdoc/> public override void Process(BatchState batchState) { // Get overwrite value, if possible bool?overwrite = true; if (Arguments.Count == 1) { overwrite = Arguments[0].AsYesNo(); } // Write out the dat with the current state Writer.Write(batchState.DatFile, batchState.OutputDirectory, overwrite: overwrite.Value); }
/// <summary> /// Resets the object to its initial state /// </summary> public void Reset() { lock (this) { state = BatchState.Initial; command = null; textSpan = new TextSpan(); totalAffectedRows = 0; hasErrors = false; expectedShowPlan = ShowPlanType.None; isSuppressProviderMessageHeaders = false; scriptTrackingId = 0; isScriptExecutionTracked = false; } }
public static void For(int fromInclusive, int toExclusive, [Pooled] Action <int> action) { using (Profile(action)) { if (fromInclusive > toExclusive) { var temp = fromInclusive; fromInclusive = toExclusive + 1; toExclusive = temp + 1; } var count = toExclusive - fromInclusive; if (count == 0) { return; } if (MaxDegreeOfParallelism <= 1 || count == 1) { ExecuteBatch(fromInclusive, toExclusive, action); } else { var state = BatchState.Acquire(); state.StartInclusive = fromInclusive; try { var batchCount = Math.Min(MaxDegreeOfParallelism, count); var batchSize = (count + (batchCount - 1)) / batchCount; // Kick off a worker, then perform work synchronously state.AddReference(); Fork(toExclusive, batchSize, MaxDegreeOfParallelism, action, state); // Wait for all workers to finish if (state.ActiveWorkerCount != 0) { state.Finished.WaitOne(); } } finally { state.Release(); } } } }
/// <inheritdoc/> /// <remarks>TODO: Should any of the other options be added for D2D?</remarks> public override void Process(BatchState batchState) { // Assume there could be multiple foreach (string input in Arguments) { DatTools.DatFromDir.PopulateFromDir(batchState.DatFile, input, hashes: Hash.Standard); } // TODO: We might not want to remove dates in the future Remover dfdRemover = new Remover(); dfdRemover.PopulateExclusionsFromList(new List <string> { "DatItem.Date" }); dfdRemover.ApplyRemovals(batchState.DatFile); }
public void HeartBeat(ITestState state) { if (_batchState == BatchState.Waiting) { IThreadPoolStats pool = state.ThreadPool; if (pool.CreatedThreadCount == pool.IdleThreadCount && pool.CreatedThreadCount >= _batchSize) { _executedInBatch = 0; _executeAt = state.Timer.Value.Add(ExecuteDelay); _batchState = BatchState.Executing; } } else if (_executedInBatch >= _batchSize) { _batchState = BatchState.Waiting; } }
public virtual void Test_TwoBatches_Nested() { Assert.IsFalse(BatchCommitted, "The BatchCommitted flag should start off false."); using (Batch batch = BatchState.StartBatch()) { batch.Committed += new EventHandler(batch_Committed); using (Batch batch2 = BatchState.StartBatch()) { batch2.Committed += new EventHandler(batch_Committed); } Assert.IsFalse(BatchCommitted, "The nested batch committed when it shouldn't have. It should leave it for the outer batch."); } Assert.IsTrue(BatchCommitted, "The BatchCommitted flag wasn't changed to true."); }
public void UpdateState(int batchId, BatchState batchState, int waitCount) { string sql = ""; if (waitCount > -1) { sql = "update sms_batch set batchstate=" + (int)batchState + ",mtcount=" + waitCount + " where id=" + batchId + ""; } else { sql = "update sms_batch set batchstate=" + (int)batchState + " where id=" + batchId + ""; } using (TradAction acion = new TradAction()) { acion.Excute(sql); } }
private static void Fork <TKey, TValue, TLocal>([NotNull] Dictionary <TKey, TValue> collection, int batchSize, int maxDegreeOfParallelism, [Pooled] Func <TLocal> initializeLocal, [Pooled] Action <KeyValuePair <TKey, TValue>, TLocal> action, [Pooled] Action <TLocal> finalizeLocal, [NotNull] BatchState state) { // Other threads already processed all work before this one started. ActiveWorkerCount is already 0 if (state.StartInclusive >= collection.Count) { state.Release(); return; } // This thread is now actively processing work items, meaning there might be work in progress Interlocked.Increment(ref state.ActiveWorkerCount); // Kick off another worker if there's any work left if (maxDegreeOfParallelism > 1 && state.StartInclusive + batchSize < collection.Count) { state.AddReference(); ThreadPool.Instance.QueueWorkItem(() => Fork(collection, batchSize, maxDegreeOfParallelism - 1, initializeLocal, action, finalizeLocal, state)); } try { // Process batches synchronously as long as there are any int newStart; while ((newStart = Interlocked.Add(ref state.StartInclusive, batchSize)) - batchSize < collection.Count) { // TODO: Reuse enumerator when processing multiple batches synchronously var start = newStart - batchSize; ExecuteBatch(collection, newStart - batchSize, Math.Min(collection.Count, newStart) - start, initializeLocal, action, finalizeLocal); } } finally { state.Release(); // If this was the last batch, signal if (Interlocked.Decrement(ref state.ActiveWorkerCount) == 0) { state.Finished.Set(); } } }
public void UpdateBatchState(BatchState batchState, int batchid) { SmsBatchManage.Instance.UpdateState(batchid, batchState,-1); }