/// <summary> /// use ConcurrectCollection /// </summary> public void Run3() { var products = GetProducts(); BlockingCollection <NewProduct> newProducts = new BlockingCollection <NewProduct>(); Console.WriteLine("來源資料筆數:{0}", products.Count()); Console.WriteLine("執行前結果資料筆數:{0}", newProducts.Count()); var watch = new Stopwatch(); watch.Start(); Parallel.ForEach(products, x => { int id = x.Id; string name = x.Name; int stock = 100 + x.Id; var newProduct = new NewProduct() { Id = id, Name = name, Stock = stock }; newProducts.Add(newProduct); }); watch.Stop(); Console.WriteLine("Consume Time: {0} ms", watch.ElapsedMilliseconds); Console.WriteLine("執行後結果資料筆數:{0}", newProducts.Count()); }
private void Timer_BulkAdd_Tick(object sender, EventArgs e) { timer_BulkAdd.Enabled = false; if (LstTime1.Count() > 0) { FlagList1Full = true; var result = AddDataToDbAsync(LstTime1.ToList()); while (LstTime1.Count() > 0) { LstTime1.Take(); } } if (LstTime2.Count() > 0) { FlagList1Full = false; var result = AddDataToDbAsync(LstTime1.ToList()); while (LstTime2.Count() > 0) { LstTime2.Take(); } } timer_BulkAdd.Enabled = true; }
public override GameState GetMessage(MjsonMessageAll msgobj) { if (msgobj.IsNONE()) { getMsgList.Add(msgobj); if (getMsgList.Count < Constants.PLAYER_NUM) { return(this); } //受け継いだメッセージ内に4人共アクションがない場合 if (prevMsgList.Count(e => e.IsNONE()) == Constants.PLAYER_NUM) { //終局判定 if (serverController.CanFinishKyoku()) { serverController.Ryukyoku(); getMsgList.Dispose(); return(new AfterRyukyokuState(this)); } else { //ツモ状態へ移行 serverController.Tsumo(); return(new AfterTsumoState(this)); } } //hora else if (prevMsgList.Count(e => e.IsDAIMINKAN()) == 1) { var daiminkanObj = prevMsgList.First(e => e.IsDAIMINKAN()); serverController.Daiminkan(daiminkanObj.actor, daiminkanObj.target, daiminkanObj.pai, daiminkanObj.consumed); prevMsgList.Dispose(); return(new AfterKanState(this)); } else if (prevMsgList.Count(e => e.IsPON()) == 1) { var ponObj = prevMsgList.First(e => e.IsPON()); serverController.Pon(ponObj.actor, ponObj.target, ponObj.pai, ponObj.consumed); prevMsgList.Dispose(); return(new AfterTsumoState(this)); } else if (prevMsgList.Count(e => e.IsCHI()) == 1) { var chiObj = prevMsgList.First(e => e.IsCHI()); serverController.Chi(chiObj.actor, chiObj.target, chiObj.pai, chiObj.consumed); prevMsgList.Dispose(); return(new AfterTsumoState(this)); } } else { //errorhandling serverController.SendErrorToRoomMember(msgobj); } return(this); }
private void RetrieveNewTasks(BlockingCollection <PlotTask> plotTasks) { if (plotTasks.Count() < Settings.QueueMinimumThreshold) { var list = _plotTaskRepository.GetPlotTasks(); plotTasks.TakeWhile(qItem => true); list.ToList().ForEach(x => plotTasks.Add(x)); } Console.WriteLine("Tasks queue length:" + plotTasks.Count()); }
/// <summary> /// Implementation of RBroker Interface 'status' method /// </summary> /// <returns>RBrokerStatus reference</returns> /// <remarks></remarks> public RBrokerStatus status() { /* * Pending tasks include all tasks on * high and low priority queues. */ int pendingTasks = m_pendingHighPriorityQueue.Count() + m_pendingLowPriorityQueue.Count(); int executingTasks = (int)Interlocked.Read(ref m_parallelTaskLimit) - m_resourceTokenPool.Count(); return(new RBrokerStatus(pendingTasks, executingTasks)); }
public void MultiThread_NumbersAreUnique() { var collection = new BlockingCollection <int>(); Parallel.ForEach(Enumerable.Range(0, 1000), i => { var random = GameRandom.Between(int.MinValue, int.MaxValue); collection.Add(random); }); output.WriteLine($"Repeated values: {collection.Count() - collection.Distinct().Count()}"); Assert.True(collection.Distinct().Count() == collection.Count()); }
//------------------------------------------------------------- public void ClientSend(object DTO, string IP = "") //------------------------------------------------------------- { if (playerList == null || playerList.Count == 0) { Log.Get().Write("Communication client no players found", Log.LogType.Info); return; } queueNetwork.Add(new QueueNetwork() { DTO = DTO, IP = IP }); kobberLanGui.UpdateQueueText(queueNetwork.Count()); }
/// <summary> /// Gets a list of pages to crawl from the database and runs tests on the pages. /// </summary> private void CreateTests() { BlockingCollection <CrawlTest> tests = getCrawlTests(); do { this.Processed += tests.Count(); Console.WriteLine($"Crawling {tests.Count()} records."); RunTests(tests); Console.WriteLine($"Saved {this.Succeeded} items out of {this.Processed}."); } while ((tests = getCrawlTests()).Count > 0); FlushDatabaseBuffer(); }
static void Main(string[] args) { var sw = Stopwatch.StartNew(); // Set the maximum number of elements to NUM_SENTENCES / 20 _sentencesBC = new BlockingCollection <string>(NUM_SENTENCES / 20); _capWordsInSentencesBC = new BlockingCollection <string>(NUM_SENTENCES / 20); _finalSentencesBC = new BlockingCollection <string>(NUM_SENTENCES / 20); Parallel.Invoke( () => ProduceSentences(), () => CapitalizeWordsInSentences(), () => RemoveLettersInSentences(), () => { foreach (var sentence in _finalSentencesBC.GetConsumingEnumerable()) { Console.WriteLine(sentence); } }); Console.WriteLine( "Number of sentences with capitalized words in the collection: {0}", _capWordsInSentencesBC.Count()); Console.WriteLine( "Number of sentences with removed letters in the collection: {0}", _finalSentencesBC.Count()); Debug.WriteLine(sw.Elapsed.ToString()); // Display the results and wait for the user to press a key Console.WriteLine("Finished!"); Console.ReadLine(); }
/// <summary> /// 无排序 /// </summary> public static List <TDestination> MapToListParallel <TSource, TDestination>(this IEnumerable <TSource> source) { BlockingCollection <TDestination> result = new BlockingCollection <TDestination>(); Mapper.CreateMap <TSource, TDestination>(); CancellationTokenSource cts = new CancellationTokenSource(); ParallelOptions pOption = new ParallelOptions() { CancellationToken = cts.Token }; pOption.MaxDegreeOfParallelism = 50; BlockingCollection <int> excuteCount = new BlockingCollection <int>(); Parallel.ForEach(source, pOption, item => { TDestination destination = System.Activator.CreateInstance <TDestination>();; Mapper.Map(item, destination); result.Add(destination); excuteCount.Add(1); }); while (true) { if (source.Count() == excuteCount.Count()) { break; } else { Thread.Sleep(1000); } } return(result.ToList()); }
/// <summary> /// Run a variation of the pipeline that uses a user-specified number of tasks for the filter stage. /// </summary> /// <param name="fileNames">List of image file names in source directory</param> /// <param name="sourceDir">Name of directory of source images</param> /// <param name="queueLength">Length of image queue</param> /// <param name="displayFn">Display action</param> /// <param name="cts">Cancellation token</param> /// <param name="filterTaskCount">Number of filter tasks</param> static void RunLoadBalancedPipeline(IEnumerable <string> fileNames, string sourceDir, int queueLength, Action <ImageInfo> displayFn, CancellationTokenSource cts, int filterTaskCount) { // Create data pipes var originalImages = new BlockingCollection <ImageInfo>(queueLength); var thumbnailImages = new BlockingCollection <ImageInfo>(queueLength); var filteredImageMultiplexer = new BlockingMultiplexer <ImageInfo>(info => info.SequenceNumber, 0, queueLength); var filteredImagesCollections = (BlockingCollection <ImageInfo>[])Array.CreateInstance( typeof(BlockingCollection <ImageInfo>), filterTaskCount); try { // Start pipelined tasks Action <ImageInfo> updateStatisticsFn = info => { info.QueueCount1 = originalImages.Count(); info.QueueCount2 = thumbnailImages.Count(); info.QueueCount3 = filteredImageMultiplexer.Count; }; const TaskCreationOptions options = TaskCreationOptions.LongRunning; var f = new TaskFactory(CancellationToken.None, options, TaskContinuationOptions.None, TaskScheduler.Default); Task[] tasks = (Task[])Array.CreateInstance(typeof(Task), filterTaskCount + 3); int taskId = 0; tasks[taskId++] = f.StartNew(() => LoadPipelinedImages(fileNames, sourceDir, originalImages, cts)); tasks[taskId++] = f.StartNew(() => ScalePipelinedImages(originalImages, thumbnailImages, cts)); for (int i = 0; i < filterTaskCount; i++) { var tmp = i; filteredImagesCollections[tmp] = filteredImageMultiplexer.GetProducerQueue(); tasks[taskId++] = f.StartNew(() => FilterPipelinedImages(thumbnailImages, filteredImagesCollections[tmp], cts)); } tasks[taskId++] = f.StartNew(() => DisplayPipelinedImages(filteredImageMultiplexer.GetConsumingEnumerable(), displayFn, updateStatisticsFn, cts)); Task.WaitAll(tasks); } finally { // there might be cleanup in the case of cancellation or an exception. DisposeImagesInQueue(originalImages); DisposeImagesInQueue(thumbnailImages); foreach (var filteredImages in filteredImagesCollections) { DisposeImagesInQueue(filteredImages); } foreach (var info in filteredImageMultiplexer.GetCleanupEnumerable()) { info.Dispose(); } } }
public void Start() { var tasks = new List <Task>(); var plotTasks = new BlockingCollection <PlotTask>(); var stopwatch = Stopwatch.StartNew(); var cancellationToken = _cancellationTokenSource.Token; tasks.Add( Task.Factory.StartNew( () => { while (!cancellationToken.IsCancellationRequested) { try { _plotTaskRepository.UpdatePlotJobStatusesFromPlotTaskStatuses(); if (plotTasks.Count() < Settings.BatchSize / 2) { RetrieveNewTasks(plotTasks); _plotTaskRepository.ImportNewPlotTasks(); } } catch (Exception ex) { Logger.Error(ex); } Thread.Sleep(Settings.BatchLoadInterval * 1000); } }, cancellationToken)); var count = 0; tasks.AddRange( Enumerable.Range(0, Settings.NumberOfConsoles) .Select( x => Task.Factory.StartNew( () => { while (!cancellationToken.IsCancellationRequested) { try { if (ProcessPlotTasks(plotTasks)) { Logger.Info("PlotTask {0} {1} sec", ++count, Math.Round(stopwatch.Elapsed.TotalSeconds, 0)); } } catch (Exception ex) { Logger.Error(ex); } } }, cancellationToken))); Task.WaitAll(tasks.ToArray()); }
private void MonitorResponder(Socket monitor, BlockingCollection <byte[]> queue, Encoding encoding) { while (_isRunning) { monitor.Recv(); var queueLength = queue.Count(); Console.WriteLine("MONITOR - Received queue length monitor request - currently at {0}", queueLength); monitor.Send(string.Format("{0}", queueLength), encoding); } }
public static async Task GetRandomFights([NotNull] Player player, [NotNull] Player opp, [NotNull] RefreshBB refresh, int builds = 100, int positions = 200, int cores = 8) { if (Running == true) { return; } Running = true; _refreshBB = refresh; _refreshBB.Bplayer = player.GetString(); _refreshBB.Bopp = opp.GetString(); source = new CancellationTokenSource(); token = source.Token; _empty = new ManualResetEvent(false); CORES = cores; MaxValue = 0; BUILDS = builds; POSITIONS = positions; _refreshBB.TOTAL_DONE = 0; _refreshBB.TOTAL = positions; START = DateTime.UtcNow; END = DateTime.MinValue; _jobs_random = new BlockingCollection <int>(); for (int i = 0; i < 40000; i++) { _jobs_random.Add(i); } for (int i = 0; i < 8; i++) { Thread thread = new Thread(OnHandlerStartRandom) { IsBackground = true }; //Mark 'false' if you want to prevent program exit until jobs finish thread.Start(); } while (!_empty.WaitOne(1000)) { Console.WriteLine(_jobs_random.Count()); _refreshBB.Update = !_refreshBB.Update; if (!_jobs_random.Any()) { break; } } END = DateTime.UtcNow; Running = false; _refreshBB.Update = !_refreshBB.Update; }
static void RunConsumer(Object stateInfo) { // GetConsumingEnumerable returns the enumerator for the // underlying collection. foreach (var item in collection.GetConsumingEnumerable()) { Console.WriteLine("Consuming tick value {0} : item# {1} ", item.ToString("D18"), subtractions++); } Console.WriteLine("Total added: {0} Total consumed: {1} Current count: {2} ", additions, subtractions, collection.Count()); sw.Stop(); Console.WriteLine("Press any key to exit"); }
public void Start() { IsRunning = true; // Producer Task.Run(async() => { while (IsRunning) { if (_queue.Count() < 2) { var imgs = await fetcher.FetchNextBatchAsync(); if (!imgs.Any()) { _queue.CompleteAdding(); IsRunning = false; } else { imgs.ForEach(img => _queue.Add(img)); } } Thread.Sleep(100); } }); Action consumer = () => { while (IsRunning) { foreach (var item in _queue.GetConsumingEnumerable()) { var grayImg = ImageManipulater.ToGrayScale(item.Img); grayImg.Save(item.Filename + ".gray.jpg"); } } }; // Single Process // Parallel.Invoke(consumer); // Concurrent Parallel.Invoke(consumer, consumer, consumer, consumer); Console.ReadKey(); }
static void Main(string[] args) { var sw = Stopwatch.StartNew(); // Set the maximum number of elements to NUM_SENTENCES / 20 _sentencesBC = new BlockingCollection <string>(new ConcurrentStack <string>(), NUM_SENTENCES / 20); _capWordsInSentencesBC = new BlockingCollection <string>(NUM_SENTENCES / 20); _finalSentencesBC = new BlockingCollection <string>(NUM_SENTENCES / 20); var cts = new System.Threading.CancellationTokenSource(); var ct = cts.Token; var deferredCancelTask = Task.Factory.StartNew(() => { // Sleep the thread that runs this task for 2 seconds System.Threading.Thread.Sleep(500); // Send the signal to cancel cts.Cancel(); }); Parallel.Invoke( () => ProduceSentences(ct), () => CapitalizeWordsInSentences(), () => RemoveLettersInSentences(), () => { foreach (var sentence in _finalSentencesBC.GetConsumingEnumerable(ct)) { Console.WriteLine(sentence); } }); Console.WriteLine( "Number of sentences with capitalized words in the collection: {0}", _capWordsInSentencesBC.Count()); Console.WriteLine( "Number of sentences with removed letters in the collection: {0}", _finalSentencesBC.Count()); Debug.WriteLine(sw.Elapsed.ToString()); // Display the results and wait for the user to press a key Console.WriteLine("Finished!"); Console.ReadLine(); }
/// <summary> /// Run the image processing pipeline. /// </summary> /// <param name="nomeDosArquivos">List of image file names in source directory</param> /// <param name="diretorioOrigem">Name of directory of source images</param> /// <param name="tamanhoFila">Length of image fila</param> /// <param name="acaoVisualizar">Display action</param> /// <param name="cts">Cancellation token</param> static void RodarPipeline(IEnumerable <string> nomeDosArquivos, string diretorioOrigem, int tamanhoFila, Action <ImagemControle> acaoVisualizar, CancellationTokenSource cts) { // Data pipes var imagensOriginais = new BlockingCollection <ImagemControle>(tamanhoFila); var miniaturaImagens = new BlockingCollection <ImagemControle>(tamanhoFila); var imagensFiltradas = new BlockingCollection <ImagemControle>(tamanhoFila); try { var f = new TaskFactory(TaskCreationOptions.LongRunning, TaskContinuationOptions.None); Action <ImagemControle> atualizarEstatisticas = info => { info.ContagemFila1 = imagensOriginais.Count(); info.ContagemFila2 = miniaturaImagens.Count(); info.ContagemFila3 = imagensFiltradas.Count(); }; //Iniciando tarefas em pipeline var tarefaCarregar = f.StartNew(() => CarregarImagensEmPipeline(nomeDosArquivos, diretorioOrigem, imagensOriginais, cts)); var tarefaEscala = f.StartNew(() => EscalarImagensEmPipeline(imagensOriginais, miniaturaImagens, cts)); var tarefaFiltro = f.StartNew(() => FiltrarImagensEmPipeline(miniaturaImagens, imagensFiltradas, cts)); var tarefaVisualizar = f.StartNew(() => VisualizarImagensEmPipeline(imagensFiltradas.GetConsumingEnumerable(), acaoVisualizar, atualizarEstatisticas, cts)); Task.WaitAll(tarefaCarregar, tarefaEscala, tarefaFiltro, tarefaVisualizar); } finally { // in case of exception or cancellation, there might be bitmaps // that need to be disposed. DisposeImagensNaFila(imagensOriginais); DisposeImagensNaFila(miniaturaImagens); DisposeImagensNaFila(imagensFiltradas); } }
/// <summary> /// Run the image processing pipeline. /// </summary> /// <param name="fileNames">List of image file names in source directory</param> /// <param name="sourceDir">Name of directory of source images</param> /// <param name="queueLength">Length of image queue</param> /// <param name="displayFn">Display action</param> /// <param name="cts">Cancellation token</param> static void RunPipelined(IEnumerable <string> fileNames, string sourceDir, int queueLength, Action <ImageInfo> displayFn, CancellationTokenSource cts) { // Data pipes var originalImages = new BlockingCollection <ImageInfo>(queueLength); var thumbnailImages = new BlockingCollection <ImageInfo>(queueLength); var filteredImages = new BlockingCollection <ImageInfo>(queueLength); try { var f = new TaskFactory(TaskCreationOptions.LongRunning, TaskContinuationOptions.None); Action <ImageInfo> updateStatisticsFn = info => { info.QueueCount1 = originalImages.Count(); info.QueueCount2 = thumbnailImages.Count(); info.QueueCount3 = filteredImages.Count(); }; // Start pipelined tasks var loadTask = f.StartNew(() => LoadPipelinedImages(fileNames, sourceDir, originalImages, cts)); var scaleTask = f.StartNew(() => ScalePipelinedImages(originalImages, thumbnailImages, cts)); var filterTask = f.StartNew(() => FilterPipelinedImages(thumbnailImages, filteredImages, cts)); var displayTask = f.StartNew(() => DisplayPipelinedImages(filteredImages.GetConsumingEnumerable(), displayFn, updateStatisticsFn, cts)); Task.WaitAll(loadTask, scaleTask, filterTask, displayTask); } finally { // in case of exception or cancellation, there might be bitmaps // that need to be disposed. DisposeImagesInQueue(originalImages); DisposeImagesInQueue(thumbnailImages); DisposeImagesInQueue(filteredImages); } }
public ModelCharacters() { _Characters = new ConcurrentDictionary <long, Character>(EqualityComparer <long> .Default); BlockingCollection <string> fileCollection = CreateBlockingFileCollection($"{PathingHelper.playerDir}characters{Path.DirectorySeparatorChar}"); int totalAmountOfFiles = fileCollection.Count(); int taskCount; if (totalAmountOfFiles < Environment.ProcessorCount * 2) { taskCount = totalAmountOfFiles; } else { taskCount = Environment.ProcessorCount; } Task[] taskArray = new Task[taskCount]; for (int i = 0; i < taskCount; i++) { taskArray[i] = Task.Factory.StartNew(() => { string fileName; while (!fileCollection.IsCompleted) { if (!fileCollection.TryTake(out fileName)) { continue; } Character Character = new Character().DeserializeFromFile(fileName); _Characters.TryAdd(Character.Id, Character); } }); } Task.WaitAll(taskArray); }
/// <summary> /// Чтение входного файла /// </summary> private void Read() { try { long currentPos = 0; long inputFileLenght = GetFileLength(inputFileName); int bytesReadLenght; byte[] currentFileBlock; int blockIdCursor = 0; while (currentPos < inputFileLenght) { bytesReadLenght = GetLengthToRead(inputFileLenght, currentPos); currentFileBlock = GetFileBlock(inputFileName, currentPos, bytesReadLenght); currentPos += bytesReadLenght; blockIdCursor++; ByteBlock readBlock = new ByteBlock(blockIdCursor, currentFileBlock); while (true) { if (readCollection.Count() < Constants.maxProcessedBlocks) { readCollection.Add(readBlock); break; } else { Thread.Sleep(Constants.ThreadDelay); } } } inputLastBlockID = blockIdCursor; readCollection.CompleteAdding(); } catch (Exception ex) { Console.WriteLine(ex.Message); } }
protected async Task OutputToCsvBatchAsync(BlockingCollection <ReportOutputItem> reportOutputItems) { if (reportOutputItems.Count() == 0) { // Nothing to output return; } _logger.Info("Outputting report lines to filename: " + GetCsvReportFilename()); // Write report output using (var w = new StreamWriter(GetCsvReportFilename(), true)) { foreach (ReportOutputItem reportOutputItem in reportOutputItems) { try { _logger.Info(string.Format("Outputting customer: {0} ({1}/{2})", reportOutputItem.Customer.CompanyProfile.CompanyName, reportOutputItems.ToList().IndexOf(reportOutputItem) + 1, reportOutputItems.Count)); await w.WriteLineAsync(GetCsvReportItemFormated(reportOutputItem)); } catch (Exception ex) { _logger.Warn("Error: " + ex.ToString()); w.WriteLine(string.Format("{0}\t{1}\t{2}\t{3}", reportOutputItem.Customer.Id, reportOutputItem.Customer.CompanyProfile.TenantId, reportOutputItem.Customer.CompanyProfile.CompanyName, ex.Message.Replace("\r\n", " ").Replace("\n", " ").Replace("\t", " "))); } } w.Flush(); } }
private bool FindBestRides(Vehicle vehicle) { AvailableRides = AvailableRides.Where(x => !x.IsPickedUp).ToList(); if (AvailableRides.Count() <= 0 || AvailableRides.Min(x => x.GetDistance()) > Math.Abs(vehicle.CurrentSteps - MaxStep)) { return(false); } if (vehicle.CurrentSteps >= MaxStep) { return(true); } var bestRide = FindMin(AvailableRides, vehicle); bestRide.IsPickedUp = true; vehicle.CurrentSteps += (bestRide.EarliestStart - vehicle.CurrentSteps) > 0 ? (bestRide.EarliestStart - vehicle.CurrentSteps) : 0; vehicle.CurrentSteps += bestRide.GetDistance(); vehicle.Route.Push(bestRide); return(true); }
public async Task <IConnection> GetConnection() { Log.V("GetConnection method is dispatched"); await this.Yield(); Log.V("GetConnection method is executing"); var conn = _state.GetConnection(); if (conn != null) { Log.V("GetConnection is completed"); return(conn); } Log.V("GetConnection is pending because client is not connected"); var tcs = new TaskCompletionSource <IConnection>(); if (_pendingRequests == null || !_pendingRequests.TryAdd(tcs)) { throw new QueueFullException($"Too many requests are queued: {_pendingRequests?.Count() ?? 0}"); } return(await tcs.Task); }
void DumpBufToDisk(ParallelOptions po) { Stopwatch sw; long TotalDBWrites = 0; long TotalRequested = 0; long DBPage = 0; SortMask = HDB.DBEntriesMask << HASH_SHIFT; do { var hashArrTpl = ReadyQueue.Take(po.CancellationToken); var hashArr = hashArrTpl.Item2; var Count = hashArrTpl.Item1; ParallelAlgorithms.Sort <HashRec>(hashArr, 0, Count, GetICompareer <HashRec>(SortByDBSizeMask)); TotalRequested += Count; if (Vtero.VerboseLevel >= 1) { WriteColor(ConsoleColor.Cyan, $"Hash entries to store: {Count:N0}"); } using (var fs = new FileStream(DBFile, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite, DB_READ_SIZE)) { // we need 2 pages now since were block reading and we might pick a hash that start's scan // at the very end of a page byte[] buff = new byte[DB_READ_SIZE]; byte[] zero = new byte[HASH_REC_BYTES]; int i = 0, firstIndex = 0, zeroIndex = 0; bool WriteBack = false; sw = Stopwatch.StartNew(); do { var Index = hashArr[i].Index; // convert Index to PageIndex DBPage = (long)((Index & SortMask) & ~DB_PAGE_MASK); // find block offset for this hash fs.Seek(DBPage, SeekOrigin.Begin); fs.Read(buff, 0, DB_READ_SIZE); WriteBack = false; if (po.CancellationToken.IsCancellationRequested) { return; } po.CancellationToken.ThrowIfCancellationRequested(); do { // skip duplicates if (i + 1 < Count && hashArr[i].Index == hashArr[i + 1].Index) //&& UnsafeHelp.UnsafeCompare(hashArr[i].HashData, hashArr[i + 1].HashData)) { i++; continue; } if (i < Count) { // re-read Inxex since we could be on the inner loop Index = hashArr[i].Index; // Index inside of a page var PageIndex = (int)(Index & DB_PAGE_MASK); // Hash to populate the DB with var toWrite = BitConverter.GetBytes(hashArr[i].CompressedHash); // do we already have this hash from disk? firstIndex = buff.SearchBytes(toWrite, PageIndex, HASH_REC_BYTES); if (firstIndex < 0) { zeroIndex = buff.SearchBytes(zero, PageIndex, HASH_REC_BYTES); if (zeroIndex >= 0) { // we want the modified buffer to get written back WriteBack = true; // we requested this to be pre-gen'd for us toWrite = hashArr[i].Serialized; // update buff with new hash entry for write back //Array.Copy(toWrite, 0, buff, zeroIndex, toWrite.Length); for (int j = zeroIndex, k = 0; j < zeroIndex + toWrite.Length; j++, k++) { buff[j] = toWrite[k]; } TotalDBWrites++; // set to the origional index, shift down since were bit aligned HDB.SetIdxBit(Index); } else if (zeroIndex < 0) { var strerr = $"HASH TABLE SATURATED!!! ({DBPage:X}:{PageIndex:X}) YOU NEED TO MAKE THE DB LARGER!!"; WriteColor(ConsoleColor.Red, strerr); source.Cancel(); } } } i++; if (i % 100000 == 0 && sw.Elapsed.TotalSeconds > 0) { WriteColor(ConsoleColor.Cyan, $"DB commit entries: {i:N0} - per second {(i / sw.Elapsed.TotalSeconds):N0}"); } // continue to next entry if it's in the same block } while (i < Count && (((hashArr[i].Index & SortMask) & ~DB_PAGE_MASK) == (ulong)DBPage)); if (WriteBack) { if (po.CancellationToken.IsCancellationRequested) { return; } // reset seek position fs.Seek(DBPage, SeekOrigin.Begin); // only write back 1 page if we can help it fs.Write(buff, 0, DB_READ_SIZE); } } while (i < Count); WriteColor(ConsoleColor.Cyan, $"DB entries: {i:N0} - per second {(i / sw.Elapsed.TotalSeconds):N0}"); //aPool.Return(hashArr); } } while (!DoneHashLoad || ReadyQueue.Count() > 0); WriteColor(ConsoleColor.Cyan, $"Finished DB write {TotalDBWrites:N0} NEW entries. Requsted {TotalRequested:N0} (reduced count reflects de-duplication). Task time: {sw.Elapsed}"); }
public int JobsInQueue() { return(_jobsCollection.Count()); }
public void TestCancellation() { IHostEnvironment env = new MLContext(seed: 42); for (int z = 0; z < 1000; z++) { var mainHost = env.Register("Main"); var children = new ConcurrentDictionary <IHost, List <IHost> >(); var hosts = new BlockingCollection <Tuple <IHost, int> >(); hosts.Add(new Tuple <IHost, int>(mainHost.Register("1"), 1)); hosts.Add(new Tuple <IHost, int>(mainHost.Register("2"), 1)); hosts.Add(new Tuple <IHost, int>(mainHost.Register("3"), 1)); hosts.Add(new Tuple <IHost, int>(mainHost.Register("4"), 1)); hosts.Add(new Tuple <IHost, int>(mainHost.Register("5"), 1)); int iterations = 100; Random rand = new Random(); var addThread = new Thread( () => { for (int i = 0; i < iterations; i++) { var randHostTuple = hosts.ElementAt(rand.Next(hosts.Count - 1)); var newHost = randHostTuple.Item1.Register((randHostTuple.Item2 + 1).ToString()); hosts.Add(new Tuple <IHost, int>(newHost, randHostTuple.Item2 + 1)); if (!children.ContainsKey(randHostTuple.Item1)) { children[randHostTuple.Item1] = new List <IHost>(); } else { children[randHostTuple.Item1].Add(newHost); } } }); addThread.Start(); Queue <IHost> queue = new Queue <IHost>(); for (int i = 0; i < 5; i++) { IHost rootHost = null; var index = 0; do { index = rand.Next(hosts.Count); } while ((hosts.ElementAt(index).Item1 as ICancelable).IsCanceled || // use 2 instead of 3 here as there is no guarantee there is always level 2 children hosts.ElementAt(index).Item2 < 2); (hosts.ElementAt(index).Item1 as ICancelable).CancelExecution(); rootHost = hosts.ElementAt(index).Item1; queue.Enqueue(rootHost); // all children has been canceled, we should stop looking if (hosts.Count(q => (q.Item1 as ICancelable).IsCanceled) == hosts.Count - 5) { break; } } addThread.Join(); while (queue.Count > 0) { var currentHost = queue.Dequeue(); Assert.True((currentHost as ICancelable).IsCanceled); if (children.ContainsKey(currentHost)) { children[currentHost].ForEach(x => queue.Enqueue(x)); } } } }
/// <summary> /// Run a variation of the pipeline that uses a user-specified number of tasks for the filter stage. /// </summary> /// <param name="fileNames">List of image file names in source directory</param> /// <param name="sourceDir">Name of directory of source images</param> /// <param name="queueLength">Length of image queue</param> /// <param name="displayFn">Display action</param> /// <param name="cts">Cancellation token</param> /// <param name="filterTaskCount">Number of filter tasks</param> static void RunLoadBalancedPipeline(IEnumerable<string> fileNames, string sourceDir, int queueLength, Action<ImageInfo> displayFn, CancellationTokenSource cts, int filterTaskCount) { // Create data pipes var originalImages = new BlockingCollection<ImageInfo>(queueLength); var thumbnailImages = new BlockingCollection<ImageInfo>(queueLength); var filteredImageMultiplexer = new BlockingMultiplexer<ImageInfo>(info => info.SequenceNumber, 0, queueLength); var filteredImagesCollections = (BlockingCollection<ImageInfo>[])Array.CreateInstance( typeof(BlockingCollection<ImageInfo>), filterTaskCount); try { // Start pipelined tasks Action<ImageInfo> updateStatisticsFn = info => { info.QueueCount1 = originalImages.Count(); info.QueueCount2 = thumbnailImages.Count(); info.QueueCount3 = filteredImageMultiplexer.Count; }; const TaskCreationOptions options = TaskCreationOptions.LongRunning; var f = new TaskFactory(CancellationToken.None, options, TaskContinuationOptions.None, TaskScheduler.Default); Task[] tasks = (Task[])Array.CreateInstance(typeof(Task), filterTaskCount + 3); int taskId = 0; tasks[taskId++] = f.StartNew(() => LoadPipelinedImages(fileNames, sourceDir, originalImages, cts)); tasks[taskId++] = f.StartNew(() => ScalePipelinedImages(originalImages, thumbnailImages, cts)); for (int i = 0; i < filterTaskCount; i++) { var tmp = i; filteredImagesCollections[tmp] = filteredImageMultiplexer.GetProducerQueue(); tasks[taskId++] = f.StartNew(() => FilterPipelinedImages(thumbnailImages, filteredImagesCollections[tmp], cts)); } tasks[taskId++] = f.StartNew(() => DisplayPipelinedImages(filteredImageMultiplexer.GetConsumingEnumerable(), displayFn, updateStatisticsFn, cts)); Task.WaitAll(tasks); } finally { // there might be cleanup in the case of cancellation or an exception. DisposeImagesInQueue(originalImages); DisposeImagesInQueue(thumbnailImages); foreach (var filteredImages in filteredImagesCollections) DisposeImagesInQueue(filteredImages); foreach (var info in filteredImageMultiplexer.GetCleanupEnumerable()) info.Dispose(); } }
public static void Doit(DSreplays Data, ScanStateChange stateChange, StartUp _startUp, int cores = 2) { source = new CancellationTokenSource(); token = source.Token; _empty = new ManualResetEvent(false); CORES = cores; Scan.Done = 0; Failed = new List <string>(); Console.WriteLine("Engine start."); s2dec.DEBUG = _startUp.Conf.Debug; s2dec.JsonFile = Program.myJson_file; s2dec.REPID = Data.ID; s2dec.ReplayFolder = Data.ReplayFolder; s2dec.LoadEngine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location)); //s2dec.LoadEngine(); s2dec.END = new DateTime(); s2dec.START = DateTime.UtcNow; stateChange.Update = !stateChange.Update; int total = 0; lock (Data.Todo) { _jobs_decode = new BlockingCollection <string>(); foreach (var ent in Data.Todo) { try { _jobs_decode.Add(ent); } catch (OperationCanceledException) { break; } total++; } } s2dec.TOTAL = total; Scan.Total = total; s2dec.TOTAL_DONE = 0; Scan.Info = s2dec.TOTAL_DONE + "/" + s2dec.TOTAL + " done. (0%)"; for (int i = 0; i < CORES; i++) { Thread thread = new Thread(OnHandlerStart) { IsBackground = true }; //Mark 'false' if you want to prevent program exit until jobs finish thread.Start(); } Task tsscan = Task.Factory.StartNew(() => { int i = 0; while (!_empty.WaitOne(1000)) { double twr = 0; if (s2dec.TOTAL > 0) { twr = (double)s2dec.TOTAL_DONE * 100 / (double)s2dec.TOTAL; } Scan.Done = Math.Round(twr, 2); string bab = s2dec.TOTAL_DONE + "/" + s2dec.TOTAL + " done. (" + Scan.Done.ToString() + "%)"; Scan.Info = bab; Console.Write("\r{0} ", bab); if (_jobs_decode.Count() == 0) { i++; if (!s2dec.END.Equals(DateTime.MinValue) || i > 20) { break; } } stateChange.Update = !stateChange.Update; } /* * Console.WriteLine("\r " + s2dec.TOTAL + "/" + s2dec.TOTAL + " done. (100%)"); * Console.WriteLine("Jobs done."); * Scan.Info = s2dec.TOTAL + "/" + s2dec.TOTAL + " done. (100%)"; * Scan.Done = 100; */ double wr = 0; if (s2dec.TOTAL > 0) { wr = (double)s2dec.TOTAL_DONE * 100 / (double)s2dec.TOTAL; } Scan.Done = Math.Round(wr, 2); string info = s2dec.TOTAL_DONE + "/" + s2dec.TOTAL + " done. (" + Scan.Done.ToString() + "%)"; Scan.Info = info; Scan.Running = false; Elapsed = s2dec.END - s2dec.START; if (s2dec.THREADS > 0) { int j = 0; while (s2dec.THREADS > 0 || j > 60) { Thread.Sleep(250); j++; } } Reload(Data); Failed = new List <string>(s2dec.REDO.Keys.ToList()); stateChange.Update = !stateChange.Update; }, TaskCreationOptions.AttachedToParent); }
/// <summary> /// Gets the random item. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="items">The items.</param> /// <returns></returns> public static T GetRandomItem <T>(this BlockingCollection <T> items) where T : class { return(items.ElementAt(DummyDataProvider.Instance.GetRandomNumber(items.Count()))); }
/// <summary> /// /// </summary> /// <param name="validateJsonConfig"> /// </param> public EDSValidationResult ValidateNodes(string validateJsonConfig) { try { IValidateContext context = new ValidateContext(); EDSValidationResult edsValidationResult = new EDSValidationResult(context); //This method will validate tree and see if all nodes are present in domain _logger.LogInformation(string.Format("Starting Validation Nodes of nodes with config: /n/n {0}", validateJsonConfig)); BlockingCollection <IValidationNode> nodes = ValidateAndLoad(validateJsonConfig); _logger.LogInformation(string.Format("Fetched {0} nodes for validation.", nodes.Count)); //Fetch from table storage all codes or cache. Stopwatch sw = new Stopwatch(); sw.Start(); //Read all nodes which Parent Validation code as null, because we can execute them in parallel _logger.LogInformation(string.Format("Executing {0} parent nodes for validation.", nodes.Count(y => string.IsNullOrEmpty(y.ParentValidationCode)))); Parallel.ForEach(nodes.Where(m => string.IsNullOrEmpty(m.ParentValidationCode)).ToList(), vn => { _logger.LogInformation(string.Format("Start Validating {0} node.", vn.ValidationCode)); Stopwatch swNode = new Stopwatch(); swNode.Start(); if (vn.Validate(context)) { //Only if I am passed edsValidationResult.NodeValidateResult.Add(new KeyValuePair <IValidationNode, bool>(vn, true)); edsValidationResult.NodeExecutionTimeResult.Add(new KeyValuePair <IValidationNode, long>(vn, swNode.ElapsedMilliseconds)); _logger.LogInformation(string.Format("Passed Validation of {0} node.", vn.ValidationCode)); ValidateChilds(nodes, vn, context, edsValidationResult); } else { edsValidationResult.NodeValidateResult.Add(new KeyValuePair <IValidationNode, bool>(vn, false)); edsValidationResult.NodeExecutionTimeResult.Add(new KeyValuePair <IValidationNode, long>(vn, swNode.ElapsedMilliseconds)); _logger.LogInformation(string.Format("Failed Validation of {0} node.", vn.ValidationCode)); } _logger.LogInformation(string.Format("Completed Validation of {0} node and its child.", vn.ValidationCode)); }); long timetaken = sw.ElapsedMilliseconds; _logger.LogInformation(string.Format("Completed Validation of {0} node and its child in {1} milliseconds.", nodes.Count, timetaken.ToString())); edsValidationResult.NodeExecutionTimeResult.CompleteAdding(); edsValidationResult.TotalTimeExecution = timetaken; //sw.Stop(); //sw = new Stopwatch(); //sw.Start(); //context = new ValidateContext(); ////Read all nodes which Parent Validation code as null, because we can execute them in parallel //foreach (var n in nodes) //{ // n.Validate(context); //} //string ss = sw.ElapsedMilliseconds.ToString(); //Console.WriteLine("Total time : " + sw.ElapsedMilliseconds); return(edsValidationResult); } catch (Exception ex) { _logger.LogError(ex, "Validation of Nodes failed. Please contact administrator."); //Log the error throw new Exception("Failed to validate nodes.", ex); } }
/// <summary> /// Run the image processing pipeline. /// </summary> /// <param name="fileNames">List of image file names in source directory</param> /// <param name="sourceDir">Name of directory of source images</param> /// <param name="queueLength">Length of image queue</param> /// <param name="displayFn">Display action</param> /// <param name="cts">Cancellation token</param> static void RunPipelined(IEnumerable<string> fileNames, string sourceDir, int queueLength, Action<ImageInfo> displayFn, CancellationTokenSource cts) { // Data pipes var originalImages = new BlockingCollection<ImageInfo>(queueLength); var thumbnailImages = new BlockingCollection<ImageInfo>(queueLength); var filteredImages = new BlockingCollection<ImageInfo>(queueLength); try { var f = new TaskFactory(TaskCreationOptions.LongRunning, TaskContinuationOptions.None); Action<ImageInfo> updateStatisticsFn = info => { info.QueueCount1 = originalImages.Count(); info.QueueCount2 = thumbnailImages.Count(); info.QueueCount3 = filteredImages.Count(); }; // Start pipelined tasks var loadTask = f.StartNew(() => LoadPipelinedImages(fileNames, sourceDir, originalImages, cts)); var scaleTask = f.StartNew(() => ScalePipelinedImages(originalImages, thumbnailImages, cts)); var filterTask = f.StartNew(() => FilterPipelinedImages(thumbnailImages, filteredImages, cts)); var displayTask = f.StartNew(() => DisplayPipelinedImages(filteredImages.GetConsumingEnumerable(), displayFn, updateStatisticsFn, cts)); Task.WaitAll(loadTask, scaleTask, filterTask, displayTask); } finally { // in case of exception or cancellation, there might be bitmaps // that need to be disposed. DisposeImagesInQueue(originalImages); DisposeImagesInQueue(thumbnailImages); DisposeImagesInQueue(filteredImages); } }