public static List<Builder> LoadBuildersWithTasks(int numberOfBuilders) { BlockingCollection<Builder> buildersToLoad = new BlockingCollection<Builder>(); BlockingCollection<Builder> loadedBuilders = new BlockingCollection<Builder>(); for (int i = 0; i < numberOfBuilders; i++) { buildersToLoad.Add(new Builder { Name = "Builder" + i, Status = "Status" + i }); } buildersToLoad.CompleteAdding(); Task loader1 = Task.Factory.StartNew(() => { foreach (Builder item in buildersToLoad.GetConsumingEnumerable()) { Thread.Sleep(1000); loadedBuilders.Add(item); } }, TaskCreationOptions.LongRunning); Task loader2 = Task.Factory.StartNew(() => { foreach (Builder item in buildersToLoad.GetConsumingEnumerable()) { Thread.Sleep(1000); loadedBuilders.Add(item); } }, TaskCreationOptions.LongRunning); Task.WaitAll(loader1, loader2); return loadedBuilders.ToList(); }
/// <summary> /// Stops the consumer, specifying the exact number of frame that should be consumed. Which /// may arrive in the future. So, the consumer will wait for it, within a timeout. /// </summary> /// <param name="lastItemToConsume"> /// Number of the last frame that should be consumed. This could be some number in the /// future. Then the consumer will keep going until a frame with this number or larger is added. /// </param> /// <param name="timeoutMilliseconds">Number of miliseconds the consumer will wait for the last /// item to consume. If it doesn't come it will stop anyway.</param> public void Stop(long lastItemToConsume, int timeoutMilliseconds = 5000) { // Save the number of frame that should be the last to be consumed. For processing videos // this will be the last frame of the video or of the range that is being postprocessed. // For cameras this will be the current frame at the time the stop recording is received. this.lastItemNumberToConsume = lastItemToConsume; // If the last item added is the same as the desired last item to consume we can mark the // buffer as completed and the consumer loop will stop when it finishes consuming the items. if (LastItemAdded >= lastItemToConsume) { buffer?.CompleteAdding(); } // If the last item to be consumed has not // been added yet we have to wait for it to arrive and tryAdd will mark the buffer // as completed. There is the posibility that it never arrives because it was dropped // at processing so we have a timeout for that case. Task.Delay(timeoutMilliseconds).ContinueWith((t) => { // If we have a timeout we will asume that last item to be consumed will never // arrive and we mark the buffer as completed and wait for the consumer loop to // finish consuming whatever it can. buffer?.CompleteAdding(); }); }
public async Task StopAsync(CancellationToken cancellationToken) { // Stop called without start if (_executingThread is null) { return; } _logger.LogInformation($"{DateTime.Now} Digitalstrom Event Subscriber Service is stopping."); _dssEventSubscriber?.Dispose(); _dssEventSubscriber = null; _persistenceQueue?.CompleteAdding(); _persistenceQueue = null; try { // Signal cancellation to the executing method _cancellationSource?.Cancel(); } finally { // Wait until the thread completes or the stop token triggers await Task.WhenAny(_executingThread, Task.Delay(Timeout.Infinite, cancellationToken)); } }
private static void Decrypt(Func<IBufferedCipher> engine, string encryptedFileName, bool allPaddings) { // Load the encrypted file. var encrpted = File.ReadAllBytes(encryptedFileName); // IProducerConsumerCollection using (var producerConsumerCollection = new BlockingCollection<string>(50000)) { // Consumer. var tasks = new List<Task>(); for (int workingThread = 0; workingThread < Environment.ProcessorCount; workingThread++) { tasks.Add(Task.Factory.StartNew(() => DecryptThread(engine(), encrpted, producerConsumerCollection, allPaddings))); } // Producer. while (true) { var line = Console.ReadLine(); if (line == null) { producerConsumerCollection.CompleteAdding(); break; } producerConsumerCollection.Add(line); } // Wait until processing is done. foreach (Task task in tasks) { task.Wait(); } } }
public static void Run() { int size = 10; BlockingCollection<int> col = new BlockingCollection<int>(size/3); Task read = Task.Run(() => { foreach(var item in col.GetConsumingEnumerable()) { Console.WriteLine("Read " + item); } }); Task write = Task.Run(() => { foreach(int i in Enumerable.Range(1, size)) { Console.WriteLine("adding " + i); col.Add(i); } col.CompleteAdding(); }); write.Wait(); read.Wait(); }
/// <summary> /// Do the specified input and output. /// </summary> /// <param name="input">Input.</param> /// <param name="output">Output.</param> public void Do(BlockingCollection<ISkeleton> input, BlockingCollection<ISkeleton> output) { var skeletons = new List<ISkeleton>(); try { foreach (var skeleton in input.GetConsumingEnumerable()) { skeletons.Add(skeleton); if (skeletons.Count < 3) { continue; } var first = skeletons.First(); var tail = skeletons.Skip(1); foreach (var joint in first.Joints) { var tailJoints = tail.Select(s => s.GetJoint(joint.JointType)); joint.Point = Mean(new List<Vector3> { joint.Point }.Concat(tailJoints.Select(j => j.Point)).ToList()); joint.Orientation = Mean(new List<Vector4> { joint.Orientation }.Concat(tailJoints.Select(j => j.Orientation)).ToList()); first.UpdateSkeleton(joint.JointType, joint); } output.Add(first); skeletons.Clear(); } } finally { output.CompleteAdding(); } }
public void IterationFinished(int iteration, int totalIterations) { _tripStorageQueue?.CompleteAdding(); _StoreTrips?.Wait(); _tripStorageQueue?.Dispose(); _tripStorageQueue = null; }
private async Task <(long size, long ios)> GenerateInternalAndReturnBytesWritten() { var chunkQueue = new BlockingCollection <WriterWorkItem>(_maxQueuedChunks); var state = new WriterState { RequestedFileSizeInBytes = _targetFileSizeInBytes }; var bufferPool = ArrayPool <byte> .Shared; _logger($"Writing {_targetFileSizeInBytes/Units.BytesPerMiB} MiB of test data..."); try { var cancellationTokenSource = new CancellationTokenSource(); var consumerTasks = LaunchChunkWriters(state, chunkQueue, bufferPool); var producerTasks = LaunchChunkProducers(chunkQueue, bufferPool, cancellationTokenSource.Token); var allTasks = producerTasks.Concat(consumerTasks); var result = await Task.WhenAny(allTasks); cancellationTokenSource.Cancel(); ThrowIfFailed(result); await Task.WhenAll(allTasks); return(GetActualFileSize(), state.WriteRequests); } finally { chunkQueue?.CompleteAdding(); } }
private void ResetConnections() { _queue.DispatchAsync(() => { _client?.Dispose(); _client = null; NetworkStream?.Dispose(); NetworkStream = null; _readWriteCancellationTokenSource?.Cancel(); _readWriteCancellationTokenSource?.Dispose(); _readWriteCancellationTokenSource = null; _receivePause?.Dispose(); _receivePause = null; _writeQueue?.CompleteAdding(); var count = 0; while (count++ < 5 && _writeQueue != null && !_writeQueue.IsCompleted) { Thread.Sleep(500); } if (_writeQueue != null && !_writeQueue.IsCompleted) { Log.To.Sync.W(Tag, "Timed out waiting for _writeQueue to finish, forcing Dispose..."); } _writeQueue?.Dispose(); _writeQueue = null; }); }
static void Main(string[] args) { // create a blocking collection BlockingCollection<int> blockingCollection = new BlockingCollection<int>(); // create and start a producer Task.Factory.StartNew(() => { // put the producer to sleep System.Threading.Thread.Sleep(500); for (int i = 0; i < 100; i++) { // add the item to the collection blockingCollection.Add(i); } // mark the collection as finished blockingCollection.CompleteAdding(); }); // create and start a consumer Task consumer = Task.Factory.StartNew(() => { // use a foreach loop to consume the blocking collection foreach (int i in blockingCollection) { Console.WriteLine("Item {0}", i); } Console.WriteLine("Collection is fully consumed"); }); // wait for the consumer to finish consumer.Wait(); // wait for input before exiting Console.WriteLine("Press enter to finish"); Console.ReadLine(); }
/// <summary> /// Stops polling for messages, waits for current messages to be handled, then /// closes and disposes the consumer. /// </summary> protected override void Dispose(bool disposing) { if (_disposed) { return; } _disposed = true; _stopped = true; _disposeSource.Cancel(); if (_pollingThread.IsValueCreated) { _pollingThread.Value.Join(); } _trackingCollection?.CompleteAdding(); if (_trackingThread?.IsValueCreated is true) { _trackingThread.Value.Join(); } if (_consumer.IsValueCreated) { _consumer.Value.Close(); _consumer.Value.Dispose(); } base.Dispose(disposing); }
private void ReadDT() { BlockingCollection<string> lines = new BlockingCollection<string>(); var stage1 = Task.Run(() => { using (StreamReader sr = new StreamReader("text.txt")) { string s; while ((s = sr.ReadLine()) != null) lines.Add(s); } lines.CompleteAdding(); }); var stage2 = Task.Run(() => { int i = 0; dataGridView1.Invoke((Action)(() => dataGridView1.SuspendLayout())); foreach (string line in lines.GetConsumingEnumerable()) { dataGridView1.Invoke((Action)(() => dataGridView1.Rows.Add(line.Split(';')))); dataGridView1.Invoke((Action)(() => dataGridView1.Rows[i].HeaderCell.Value = i.ToString())); i++; } dataGridView1.Invoke((Action)(() => dataGridView1.ResumeLayout(false))); }); Task.WaitAll(stage1, stage2); }
public void Dispose() { if (_isDisposed) { return; } DisposeTimer(); try { _headersRequests?.CompleteAdding(); } catch (ObjectDisposedException) { } try { _bodiesRequests?.CompleteAdding(); } catch (ObjectDisposedException) { } _isDisposed = true; }
public static void InternalCancellation_WakingUp() { for (int test = 0; test < 2; test++) { BlockingCollection<int> coll1 = new BlockingCollection<int>(1); coll1.Add(1); //fills the collection. Assert.False(coll1.IsAddingCompleted, "InternalCancellation_WakingUp: At this point CompleteAdding should not have occurred."); // This is racy on what we want to test, in that it's possible this queued work could execute // so quickly that CompleteAdding happens before the tested method gets invoked, but the test // should still pass in such cases, we're just testing something other than we'd planned. Task t = Task.Run(() => coll1.CompleteAdding()); // Try different methods that should wake up once CompleteAdding has been called int item = coll1.Take(); // remove the existing item in the collection switch (test) { case 0: Assert.Throws<InvalidOperationException>(() => coll1.Take()); break; case 1: Assert.False(coll1.TryTake(out item)); break; } t.Wait(); Assert.True(coll1.IsAddingCompleted, "InternalCancellation_WakingUp: At this point CompleteAdding should have occurred."); } }
/// <summary> /// Perform complete operation. In this method all threads/tasks are synchronized including final /// data reporting (via _dataStorage) /// </summary> /// <returns></returns> private async Task InternalProcessAsync() { if (_domainsTasks == null) { return; } try { await _domainsTasks; var allIpResolve = WhenAllOrError(_exchangeTasks); if (_cancellation.IsCancellationRequested) { return; } await allIpResolve; _dataStorage?.CompleteAdding(); _dataTask?.Wait(); } catch (Exception e) { // after first exception on this level whole process is terminated, we need only first exception message // so no need for AggregateException (InnerExceptions) ErrorMessage = e.Message; } }
// Demonstrates: // BlockingCollection<T>.Add() // BlockingCollection<T>.Take() // BlockingCollection<T>.CompleteAdding() public static void BC_AddTakeCompleteAdding() { using (BlockingCollection<int> bc = new BlockingCollection<int>()) { // Spin up a Task to populate the BlockingCollection using (Task t1 = Task.Factory.StartNew(() => { bc.Add(1); bc.Add(2); bc.Add(3); bc.CompleteAdding(); })) { // Spin up a Task to consume the BlockingCollection using (Task t2 = Task.Factory.StartNew(() => { try { // Consume the BlockingCollection while (true) Console.WriteLine(bc.Take()); } catch (InvalidOperationException) { // An InvalidOperationException means that Take() was called on a completed collection Console.WriteLine("That's All!"); } })) Task.WaitAll(t1, t2); } } }
public static void Main(string[] args) { BlockingCollection<string> collection = new BlockingCollection<string>(); Task read = Task.Run(() => { foreach (string v in collection.GetConsumingEnumerable()) { Console.WriteLine(v); } }); Task write = Task.Run(() => { while (true) { string s = Console.ReadLine(); if (string.IsNullOrWhiteSpace(s)) { collection.CompleteAdding(); break; } collection.Add(s); } }); write.Wait(); }
public void Run() { BlockingCollection<string> col = new BlockingCollection<string>(); Task read = Task.Run(() => { foreach (string v in col.GetConsumingEnumerable()) Console.WriteLine(v); Console.WriteLine("End of read task."); }); Task write = Task.Run(() => { while (true) { string s = Console.ReadLine(); if (string.IsNullOrWhiteSpace(s)) { col.CompleteAdding(); break; } col.Add(s); } }); write.Wait(); Thread.Sleep(1000); }
public int ConsumingEnumerablexample () { var collection = new BlockingCollection<int> (); var taker = Task<int>.Run (() => { var take = 0; foreach (var aTake in collection.GetConsumingEnumerable()) { take = aTake; } return take; }); var adder = Task.Run (() => { for (int x = 0; x <= 10; x++) { collection.Add (x); System.Threading.Thread.Sleep (1); } collection.CompleteAdding (); }); Task.WaitAll (taker, adder); return taker.Result; }
public override IEnumerable<Row> Execute(IEnumerable<Row> rows) { var blockingCollection = new BlockingCollection<Row>(); var count = _operations.Count; if (count == 0) { yield break; } Debug("Creating tasks for {0} operations.", count); var tasks = _operations.Select(currentOp => Task.Factory.StartNew(() => { try { foreach (var row in currentOp.Execute(null)) { blockingCollection.Add(row); } } finally { if (Interlocked.Decrement(ref count) == 0) { blockingCollection.CompleteAdding(); } } })).ToArray(); foreach (var row in blockingCollection.GetConsumingEnumerable()) { yield return row; } Task.WaitAll(tasks); // raise any exception that were raised during execution }
static void NonBlockingProducer(BlockingCollection<int> bc) { int itemToAdd = 0; bool success = false; do { // A shorter timeout causes more failures. success = bc.TryAdd(itemToAdd, 2); if (success) { Console.WriteLine(" Add:{0}", itemToAdd); itemToAdd++; } else { Console.Write(" AddBlocked:{0} Count = {1} ", itemToAdd.ToString(), bc.Count); // Don't increment nextItem. Try again on next iteration. //Do something else useful instead. UpdateProgress(itemToAdd); } } while (itemToAdd < inputs); // No lock required here because only one producer. bc.CompleteAdding(); }
private static void Decrypt(IHashAlgorithm algo) { // IProducerConsumerCollection using (var producerConsumerCollection = new BlockingCollection<string>(50000)) { // Consumer. var tasks = new List<Task>(); for (int workingThread = 0; workingThread < Environment.ProcessorCount; workingThread++) { tasks.Add(Task.Factory.StartNew(() => HashThread(algo, producerConsumerCollection))); } // Producer. while (true) { var line = Console.ReadLine(); if (line == null) { producerConsumerCollection.CompleteAdding(); break; } producerConsumerCollection.Add(line); } // Wait until processing is done. foreach (Task task in tasks) { task.Wait(); } } }
public object Read(Newtonsoft.Json.JsonReader reader) { if (reader.TokenType != Newtonsoft.Json.JsonToken.StartObject) throw new Exception(); int w = ReadIntProperty(reader, "Width"); int h = ReadIntProperty(reader, "Height"); int d = ReadIntProperty(reader, "Depth"); var grid = new TileData[d, h, w]; reader.Read(); if (reader.TokenType != Newtonsoft.Json.JsonToken.PropertyName || (string)reader.Value != "TileData") throw new Exception(); ReadAndValidate(reader, Newtonsoft.Json.JsonToken.StartArray); var queue = new BlockingCollection<Tuple<int, byte[]>>(); var readerTask = Task.Factory.StartNew(() => { for (int i = 0; i < d; ++i) { reader.Read(); int z = (int)(long)reader.Value; byte[] buf = reader.ReadAsBytes(); queue.Add(new Tuple<int, byte[]>(z, buf)); } queue.CompleteAdding(); }); Parallel.For(0, d, i => { var tuple = queue.Take(); int z = tuple.Item1; byte[] arr = tuple.Item2; using (var memStream = new MemoryStream(arr)) { using (var decompressStream = new DeflateStream(memStream, CompressionMode.Decompress)) using (var streamReader = new BinaryReader(decompressStream)) { for (int y = 0; y < h; ++y) for (int x = 0; x < w; ++x) grid[z, y, x].Raw = streamReader.ReadUInt64(); } } }); readerTask.Wait(); ReadAndValidate(reader, Newtonsoft.Json.JsonToken.EndArray); ReadAndValidate(reader, Newtonsoft.Json.JsonToken.EndObject); return grid; }
static void Main(string[] args) { // create a blocking collection BlockingCollection<int> blockingCollection = new BlockingCollection<int>(); // create and start a producer Task.Factory.StartNew(() => { // put items into the collectioon for (int i = 0; i < 1000; i++) { blockingCollection.Add(i); } // mark the collection as complete blockingCollection.CompleteAdding(); }); // create and start a producer Task.Factory.StartNew(() => { while (!blockingCollection.IsCompleted) { // take an item from the collection int item = blockingCollection.Take(); // print out the item Console.WriteLine("Item {0}", item); } }); // wait for input before exiting Console.WriteLine("Press enter to finish"); Console.ReadLine(); }
private static void ReadFileNames(string path, BlockingCollection<string> output) { foreach (string filename in Directory.EnumerateFiles(path, "*.cs")) { output.Add(filename); } output.CompleteAdding(); }
public void Disconnect() { _sendQueue?.CompleteAdding(); _sendQueue = null; client?.Close(); client = null; }
/// <summary> /// stops the session queue worker /// </summary> public void StopRlmDbWorkersSessions() { bcSessionsToCreate?.CompleteAdding(); bcSessionsToUpdate?.CompleteAdding(); sessionsDone = true; totalSessionsCount = Sessions.Count; }
public static void Unregister() { MyLog.Default.WriteLineAndConsole("TORCH MOD: Unregistering mod communication."); MyAPIGateway.Multiplayer?.UnregisterMessageHandler(NET_ID, MessageHandler); _processing?.CompleteAdding(); _closing = true; //_task.Wait(); }
static void UseBlockingCollection() { var count = 0; const int countMax = 10; var blockingCollection = new BlockingCollection<string>(); var producer1 = Task.Factory.StartNew(() => { while (count <= countMax) { blockingCollection.Add("value" + count); count++; } blockingCollection.CompleteAdding(); }); var producer2 = Task.Factory.StartNew(() => { while (count <= countMax) { blockingCollection.Add("value" + count); count++; } blockingCollection.CompleteAdding(); }); var consumer1 = Task.Factory.StartNew(() => { foreach (var value in blockingCollection.GetConsumingEnumerable()) { Console.WriteLine("Worker 1: " + value); Thread.Sleep(1000); } }); var consumer2 = Task.Factory.StartNew(() => { foreach (var value in blockingCollection.GetConsumingEnumerable()) { Console.WriteLine("Worker 1: " + value); Thread.Sleep(1000); } }); Task.WaitAll(producer1, consumer1, consumer2); }
/// <summary> /// IDisposable Dispose method to dispose IDisposable resources. /// </summary> public void Dispose() { Timer?.Dispose(); LogQueue?.CompleteAdding(); WriteQueueTask?.Wait(); StreamWriter?.Dispose(); FileStream?.Dispose(); LogQueue?.Dispose(); }
private static Trial[] LoadDataParallel(string path) { BlockingCollection<string> inputLines = new BlockingCollection<string>(); ConcurrentBag<Trial> trials = new ConcurrentBag<Trial>(); int itemsAdded = 0; int itemsProcessed = 0; const int outputFrequency = 100000; List<Task> consumerTasks = new List<Task>(); int counter = 0; using (StreamReader file = new StreamReader(path)) { // Skip header file.ReadLine(); string lineRead; while ((lineRead = file.ReadLine()) != null) { inputLines.Add(lineRead); ++itemsAdded; if (++counter >= outputFrequency) { counter = 0; Console.Out.WriteLine("Trial lines Read: {0}", itemsAdded); if (inputLines.Count >= outputFrequency * consumerTasks.Count) { consumerTasks.Add(Task.Run(() => { string line; do { while (inputLines.TryTake(out line)) { string[] fields = line.Trim().Split(new char[] { ',' }); if (fields.Length >= 11) { Trial trial = new Trial(fields); trials.Add(trial); } int numProcessed = Interlocked.Increment(ref itemsProcessed); if (numProcessed % 100000 == 0) Console.Out.WriteLine("Trial lines processed: {0}", numProcessed); } Thread.Sleep(100); } while (!inputLines.IsCompleted); })); } } } inputLines.CompleteAdding(); } Task.WaitAll(consumerTasks.ToArray()); return trials.ToArray(); }
private void DeactivateRecording() { isRecordingPendingFrames = true; videoFramesQueue?.CompleteAdding(); audioFramesQueue?.CompleteAdding(); Debug.WriteLine($"Video: {videoSamples}, Audio: {audioSamples}, Written: {samplesWritten}"); Debug.WriteLine($"Remaining:: Video: {videoFramesQueue?.Count}, Audio: {audioFramesQueue?.Count}"); }
private void Dispose(bool managed) { if (managed) { GC.SuppressFinalize(this); } _stream?.CompleteAdding(); _stream?.Dispose(); _stream = null; }
public void Dispose() { _timer.Elapsed -= TimedFlush; _timer.Dispose(); _queue?.CompleteAdding(); _consumerTask?.Wait(); _sw?.Dispose(); _fs?.Dispose(); _queue?.Dispose(); }
public override void Stop() { #if TRACK_DETAILED_STATS if (StatisticsCollector.CollectThreadTimeTrackingStats) { threadTracking.OnStopExecution(); } #endif requestQueue?.CompleteAdding(); base.Stop(); }
public static Pipeline StartNew(IFilter filter, IEnumerable<object> input, int bufferSize) { var inputEnumerable = input as object[] ?? input.ToArray(); var nextStageInput = new BlockingCollection<object>(inputEnumerable.Count()); foreach (var inputElement in inputEnumerable) { nextStageInput.Add(inputElement); } nextStageInput.CompleteAdding(); return new Pipeline(filter, nextStageInput, new BlockingCollection<object>(bufferSize)); }
public void Dispose() { _messageQueue?.CompleteAdding(); try { _outputThread.Join(1500); // with timeout in case writer is locked } catch (ThreadStateException) { } _messageQueue?.Dispose(); }
static async Task TaskProducer(BlockingCollection<CustomTask> collection) { for (int i = 1; i <= 20; i++) { await Task.Delay(20); var workItem = new CustomTask { Id = i }; collection.Add(workItem); Console.WriteLine("Task {0} has been posted", workItem.Id); } collection.CompleteAdding(); }
static void Main(string[] args) { // create the blocking collection BlockingCollection<Deposit> blockingCollection = new BlockingCollection<Deposit>(); // create and start the producers, which will generate // deposits and place them into the collection Task[] producers = new Task[3]; for (int i = 0; i < 3; i++) { producers[i] = Task.Factory.StartNew(() => { // create a series of deposits for (int j = 0; j < 20; j++) { // create the transfer Deposit deposit = new Deposit { Amount = 100 }; // place the transfer in the collection blockingCollection.Add(deposit); } }); }; // create a many to one continuation that will signal // the end of production to the consumer Task.Factory.ContinueWhenAll(producers, antecedents => { // signal that production has ended Console.WriteLine("Signalling production end"); blockingCollection.CompleteAdding(); }); // create a bank account BankAccount account = new BankAccount(); // create the consumer, which will update // the balance based on the deposits Task consumer = Task.Factory.StartNew(() => { while (!blockingCollection.IsCompleted) { Deposit deposit; // try to take the next item if (blockingCollection.TryTake(out deposit)) { // update the balance with the transfer amount account.Balance += deposit.Amount; } } // print out the final balance Console.WriteLine("Final Balance: {0}", account.Balance); }); // wait for the consumer to finish consumer.Wait(); // wait for input before exiting Console.WriteLine("Press enter to finish"); Console.ReadLine(); }
public static Task ReadFilenamesAsync(string path, BlockingCollection<string> output) { return Task.Run(() => { foreach (string filename in Directory.EnumerateFiles(path, "*.cs", SearchOption.AllDirectories)) { output.Add(filename); ConsoleHelper.WriteLine(string.Format("stage 1: added {0}", filename)); } output.CompleteAdding(); }); }
public void Dispose() { try { _tasks?.CompleteAdding(); _worker.Join(); } finally { _tasks?.Dispose(); } }
protected virtual void Dispose(bool disposing) { if (!disposedValue) { if (disposing) { queue?.CompleteAdding(); } disposedValue = true; } }
protected virtual void Dispose(bool disposing) { if (_disposed) { return; } if (disposing) { _messagesCollection?.CompleteAdding(); } _disposed = true; }
/// <summary> /// This is where the parallel fun starts. /// The BlockingCollection, and Task object are the parallel object which start the parallel processing going. /// </summary> public void Load() { // "Pipe" which contains the file names to be processed var inputfiles = new BlockingCollection<string>(); // "Pipe" which contains the results of the loads. var processResults = new BlockingCollection<SqlLoadResults>(); // Start feeding the file name pipe. var readfileNames = Task.Factory.StartNew(() => { try { if (loadDir != null) { foreach (var objectName in loadDir) { Console.WriteLine(((FileExcel.FilesCSV)(objectName)).FullName); Console.WriteLine(); inputfiles.Add(((FileExcel.FilesCSV)(objectName)).FullName); } } } finally { inputfiles.CompleteAdding(); } }); // Read the file name pipe, and parallel call the CSV Reader Load class and the Data Load Method var processes = Task.Factory.StartNew(() => { try { // A bit of dirty pool here, calling the constructor then the instance method in one go! // This ensures that there multiple instances of the class being used on multiple threads foreach (var loadResult in inputfiles.GetConsumingEnumerable().AsParallel(). Select(fileName => new CSVReaderLoad().LoadData(fileName, rowBufferLength, _dTables, this._countId , this._typeProcess))) { processResults.Add(loadResult); } } finally { processResults.CompleteAdding(); } }); // This "pulls" all of the results back into on IEnumerable, // there is some fancy thread synchronisation happening here (all under the covers). var summary = (from res in processResults.GetConsumingEnumerable() select res).ToList(); foreach (var a in summary) { Console.WriteLine("{0} rows read: {1} rows returned: {2}", a.FileName, a.RowsLoaded, a.RowsReturned); } }
public static void InternalCancellation_CompleteAdding_Negative() { BlockingCollection<int> coll1 = new BlockingCollection<int>(); Task.Run(() => coll1.CompleteAdding()); //call Take.. it should wake up with an OCE. when CompleteAdding() is called. Assert.Throws<InvalidOperationException>(() => coll1.Take()); // "InternalCancellation_WakingUpTake: an IOE should be thrown if CompleteAdding occurs during blocking Take()"); Assert.Throws<InvalidOperationException>(() => coll1.Add(1)); // "InternalCancellation_WakingUpAdd: an InvalidOpEx should be thrown if CompleteAdding occurs during blocking Add()"); Assert.Throws<InvalidOperationException>(() => coll1.TryAdd(1, 1000000)); //an indefinite wait to add.. 1000 seconds. // "InternalCancellation_WakingUpTryAdd: an InvalidOpEx should be thrown if CompleteAdding occurs during blocking Add()"); }
public static Task ReadFilenamesAsync(string path, BlockingCollection<string> output) { return Task.Factory.StartNew(() => { foreach (string filename in Directory.EnumerateFiles(path, "*.cs", SearchOption.AllDirectories)) { output.Add(filename); ColoredConsole.WriteLine($"stage 1: added {filename}"); } output.CompleteAdding(); }, TaskCreationOptions.LongRunning); }
public override void Stop() { Common.logger.Info("Stopping Reconfiguration"); abort = true; token?.SetCompleted(); queueItems?.CompleteAdding(); var nodesToList = new List <byte>(nodesToCheck); nodesToList.Sort(); Common.logger.Info("Nodes left:"); Common.logger.Info(string.Join(",", nodesToList)); }
public void Run(BlockingCollection<object> input, BlockingCollection<object> output) { try { foreach (var item in input.GetConsumingEnumerable()) { Run(new Context(item, output)); } } finally { output.CompleteAdding(); } }
public void Dispose() { _analyzerLogsCollection?.CompleteAdding(); _outputHandlers?.ToList().ForEach(aoh => aoh.Value?.Dispose()); _ruleHandlers?.ToList().ForEach(rh => rh.Value?.Dispose()); _outputHandlers?.Clear(); _ruleHandlers?.Clear(); _scheduleHandlers?.Clear(); }
public static async Task Run([NotNull] Stream stream, [NotNull] Action<string> parser, TimeSpan readTimeout, CancellationToken cancellationToken) { if (stream == null) throw new ArgumentNullException(nameof(stream)); if (parser == null) throw new ArgumentNullException(nameof(parser)); var parseCollection = new BlockingCollection<string>(); // create timeout cancellation token source try { using (var tokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken)) using (var reader = new CancellableStreamReader(stream)) { var localToken = tokenSource.Token; // start parser worker thread StartParserWorker(parseCollection, parser, localToken); while (true) { // set read timeout(add epsilon time to timeout (100 msec)) tokenSource.CancelAfter(readTimeout + TimeSpan.FromTicks(10000 * 100)); // execute reading next line and await completion var line = await reader.ReadLineAsync(localToken).ConfigureAwait(false); localToken.ThrowIfCancellationRequested(); // disable timer tokenSource.CancelAfter(Timeout.InfiniteTimeSpan); // send previous line to subsequent stage if (!String.IsNullOrWhiteSpace(line)) { parseCollection.Add(line, localToken); } if (line == null) { break; } } } } finally { // mark collection as completed and shutdown parser worker after consuming all items. parseCollection.CompleteAdding(); } }
private void StartThread() { _thread?.Abort(); _messageQueue?.CompleteAdding(); _messageQueue?.Dispose(); _messageQueue = new BlockingCollection <WebLogEntry>(_maxQueuedMessages); _thread = new Thread(ProcessLogQueue) { IsBackground = true, Name = "WebLoggerProcessor.Thread" }; _thread.Start(); }
public void Dispose() { if (IsDisposed) { return; } // Note that This will cause an IOException in the read loop. _stream?.Close(); _outboundMessages?.CompleteAdding(); _outboundMessages?.Dispose(); _outboundMessages = null; IsDisposed = true; }
public static async Task Waiter() { _groupsOutput?.CompleteAdding(); _devicesOutput?.CompleteAdding(); _signInOutput?.CompleteAdding(); _usersOutput?.CompleteAdding(); _domainsOutput?.CompleteAdding(); _applicationsOutput?.CompleteAdding(); await _groupsWriter; await _devicesWriter; await _signInWriter; await _usersWriter; await _domainsWriter; await _applicationsWriter; }
protected virtual void Dispose(bool disposing) { lock (_lockObj) { if (_disposed) { return; } _disposed = true; } if (!disposing) { return; } _messagesQueue?.CompleteAdding(); _messagesQueue?.Dispose(); _stream?.Close(); _stream?.Dispose(); }
public void Dispose() { // // NB: Disposal of the blob logs blocking collection is managed by the continuation of the blob logging task, see WriteBlobLogsAsync. // try { _blobLogs?.CompleteAdding(); } catch (ObjectDisposedException) { // // NB: Any error in the WriteBlobLogsAsyncCore method will trigger disposal, including the case where recovery gets cancelled. // // We shouldn't fail recovery because of this, and a failure to call CompleteAdding is not an issue because the enumeration // in WriteBlobLogsAsyncCore has exited, causing us to observe the disposed exception here. // } }
public void Stop() { try { if (_workThreads == null) { OnShowMessage(new MessageEventArgs(Resources.OperationIsNotStarted)); return; } _queueInput?.CompleteAdding(); _queueOutput?.CompleteAdding(); _whaitMemoryHandle?.Set(); _output?.Flush(); _zip?.Cancel(); } catch (Exception e) { OnShowMessage(new MessageEventArgs($"Error on stop: {e.Message}")); } }
/// <summary> /// stops the cases queue worker /// </summary> public void StopRlmDbWorkersCases() { bcCasesQueue?.CompleteAdding(); if (ConfigFile.DropDb) { Thread.Sleep(5000); rlmDb.DropDB(); RlmDbLogger.Info("\n" + string.Format("[{0:G}]: {1} database successfully dropped...\n*** END ***\n", DateTime.Now, Network.DatabaseName), Network.DatabaseName); } progressUpdater.Stop(); if (rlmDb.CaseWorkerQueues != null) { foreach (var item in rlmDb.CaseWorkerQueues) { item.WorkerQueues.CompleteAdding(); } } }
public override void Dispose() { if (Interlocked.Exchange(ref DisposedValue, 1) == 1) { return; } if (IsConsumer) { _providerService.FinishSessionsAsync(this).ContinueWith(t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); } }); _receiptsRequests?.CompleteAdding(); _receiptsRequests?.Dispose(); } if (!IsProvider) { return; } ConsumerService.FinishSessionsAsync(this).ContinueWith(t => { if (t.IsFaulted && Logger.IsError) { Logger.Error("There was an error within NDM subprotocol.", t.Exception); } }); DepositApprovalsRequests?.CompleteAdding(); DepositApprovalsRequests?.Dispose(); }
public void Spike2() { var queue = new BlockingCollection<int>(); queue.CompleteAdding(); queue.Add(1); }
public static void Main() { BlockingCollection<string> col = new BlockingCollection<string>(); Task read = Task.Run(() => { while (true) { //Console.WriteLine("Taking"); col.Add("Removing"); } }); Task write = Task.Run(() => { while (true) { string s = Console.ReadLine(); if (string.IsNullOrWhiteSpace(s)) break; Console.WriteLine("Adding"); col.Add(s); col.CompleteAdding(); } }); write.Wait(); }