public void EnqueueBeforeDequeueTest() { var queue = new BlockingQueue<object>(); var isEnqueued = new ManualResetEvent(false); var isDequeued = new ManualResetEvent(false); object value = null; ThreadPool.QueueUserWorkItem(_ => { queue.Enqueue(new object()); isEnqueued.Set(); }); ThreadPool.QueueUserWorkItem(_ => { isEnqueued.WaitOne(); value = queue.Dequeue(); isDequeued.Set(); }); if (!isDequeued.WaitOne(10)) Assert.Fail("Dequeue after Enqueue failed: Event hasn't been raised"); if(value == null) Assert.Fail("Dequeue after Enqueue failed: Wrong value returned"); }
public void CreateAndUseBlockingQueue() { BlockingQueue<int> queue = new BlockingQueue<int>(1); Thread thread = new Thread(new ThreadStart(delegate() { queue.Enqueue(1); })); thread.Start(); int element = queue.Dequeue(); Assert.AreEqual(1, element); }
public void Dequeue_on_closed_queue_throws() { BlockingQueue<string> q = new BlockingQueue<string>(); q.Enqueue("foo"); Assert.IsFalse(q.IsClosed); q.Close(); Assert.IsTrue(q.IsClosed); string x = q.Dequeue(); Assert.AreEqual("foo", x); x = q.Dequeue(); }
public static void Main() { var thread1 = new Thread(Process); var thread2 = new Thread(Process); thread1.Start(); thread2.Start(); while (true) { string url = GetNextUrl(); m_Queue.Enqueue(url); } }
public void TestEnqueueDequeueOne() { using (var queue = new BlockingQueue <int>(1)) { Task.Factory.StartNew(() => queue.Enqueue(42)).Wait(500).Should().BeTrue(); int value = 0; Task.Factory.StartNew(() => value = queue.Dequeue()).Wait(500).Should().BeTrue(); value.Should().Be(42); queue.Count.Should().Be(0); } }
private void Writer() { var random = new Random(); while (!_deadline.IsExceeded) { var value = new Value(random.Next()); if (_queue.Enqueue(value, TimeSpan.FromMilliseconds(1))) { Interlocked.Add(ref _writeCount, value.Get()); } } }
public void Send(byte[] bytes, int off, int len) { if (len > 0) { byte[] stack = new byte[len]; Array.Copy(bytes, 0, stack, 0, len); bool res = _packetQueueOut.Enqueue(stack); if (!res) { Console.WriteLine("overflowed stack"); } } }
public void BlockingQueue_TryPeek_Collection_Throw_ArgumentOutOfRangeException_When_Argument_is_Invalid( int value) { //Arrange var blockingQueue = new BlockingQueue <int>(); blockingQueue.Enqueue(value); //Act blockingQueue.TryPeek(out int item, value); //Assert is handled by the ExpectedException }
public void BlockingQueue_ToArray_Convert_Collection_to_Array_When_Collection_is_not_Empty() { //Arrange var blockingQueue = new BlockingQueue <int>(); blockingQueue.Enqueue(1); blockingQueue.Enqueue(2); blockingQueue.Enqueue(3); //Act var array = blockingQueue.ToArray(); //Assert Assert.AreEqual(blockingQueue.Count, array.Length); int index = 0; foreach (var n in blockingQueue) { Assert.AreEqual(n, array[index]); index++; } }
public void BlockingQueue_Enqueue_Collection_Size_is_Increased_When_Item_Enqueued_to_Collection() { //Arrange var blockingQueue = new BlockingQueue <int>(); var startCount = blockingQueue.Count; //Act blockingQueue.Enqueue(42); //Assert Assert.AreEqual(0, startCount); Assert.AreEqual(1, blockingQueue.Count); }
public void Dequeue_on_closed_queue_throws() { BlockingQueue <string> q = new BlockingQueue <string>(); q.Enqueue("foo"); Assert.IsFalse(q.IsClosed); q.Close(); Assert.IsTrue(q.IsClosed); string x = q.Dequeue(); Assert.AreEqual("foo", x); x = q.Dequeue(); }
/// <summary> /// UserTokenPool /// </summary> /// <param name="context"></param> /// <param name="count"></param> /// <param name="bufferSize"></param> /// <param name="completed"></param> public UserTokenPool(IContext <IUnpacker> context, int count, int bufferSize, EventHandler <SocketAsyncEventArgs> completed) { _userTokenFactory = new UserTokenFactory(); _bufferManager = new BufferManager(bufferSize * count, bufferSize); for (int i = 0; i < count; i++) { IUserToken userToken = _userTokenFactory.Create(context); var writeArgs = new SocketAsyncEventArgs(); writeArgs.Completed += completed; userToken.WriteArgs = writeArgs; var readArgs = new SocketAsyncEventArgs(); readArgs.Completed += completed; userToken.ReadArgs = readArgs; userToken.ReadArgs.UserToken = userToken.WriteArgs.UserToken = userToken; _concurrentQueue.Enqueue(userToken); } }
public void BlockingQueue_TryPeek_Collection_Return_Value_When_Collection_is_not_Empty(int value) { //Arrange var blockingQueue = new BlockingQueue <int>(); blockingQueue.Enqueue(value); //Act blockingQueue.TryPeek(out int item); //Assert Assert.AreEqual(value, item); }
// The runnable part of thread private void Run() { int counter = 0; while (true) { // The producer produces forever, no 'poison pill' // Add object to queue m_queue.Enqueue(String.Format("Producer {0}, Object {1}", m_id, counter++)); // Wait a while Thread.Sleep(500); } }
public void CreateAndUseBlockingQueueTenTimes() { BlockingQueue<int> queue = new BlockingQueue<int>(5); Thread thread = new Thread(new ThreadStart(delegate() { for (int k=1; k<=10; k++) queue.Enqueue(k); })); thread.Start(); for (int j = 1; j <= 10; j++) { int element = queue.Dequeue(); Assert.AreEqual(element, j); } }
public void Should_be_full_when_enqueue_max_elements() { blockingQueue.Enqueue(1); blockingQueue.Enqueue(2); blockingQueue.Enqueue(3); blockingQueue.IsFull.Should().BeTrue(); }
public void Can_Use_WorkItemDispatcher_On_STA_Thread() { Exception cex = null; Thread t = new Thread(() => { try { var queue = new BlockingQueue <string>(); int processed = 0; var dispatcher = new WorkItemDispatcher <string>(5, (workstring) => { Interlocked.Increment(ref processed); }, "Tester", queue); queue.Enqueue("Work1"); queue.Enqueue("Work2"); queue.ReleaseWaiters(); dispatcher.Dispose(); } catch (Exception ex) { cex = ex; } }); t.SetApartmentState(ApartmentState.STA); t.Start(); t.Join(); if (cex != null) { Assert.Fail("Get Exception On STA Thread: " + cex); } }
void processPacket(byte[] data) { if (data == null) { data = emptyBuf; } if (msgQueue == null) { return; } msgQueue.Enqueue(new Message(data)); }
private IEnumerator <object> ListenTask() { while (true) { var f = _Listener.AcceptIncomingConnection(); yield return(f); TcpClient tcpClient = f.Result as TcpClient; Console.WriteLine("Accepted connection from {0}.", tcpClient.Client.RemoteEndPoint); TelnetClient client = new TelnetClient(this, tcpClient); _Clients.Add(client); _NewClients.Enqueue(client); } }
public void One_producer_many_consumers_loop_with_foreach() { int n = 500; var enqueued = new List <string>(); var dequeued = new List <string>(); var q = new BlockingQueue <string>(); var c1 = new Thread(MultiConsumerForeachLoop) { IsBackground = true }; var c2 = new Thread(MultiConsumerForeachLoop) { IsBackground = true }; var c3 = new Thread(MultiConsumerForeachLoop) { IsBackground = true }; var v1 = new Tuplet <BlockingQueue <string>, List <string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false)); c1.Start(v1); var v2 = new Tuplet <BlockingQueue <string>, List <string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false)); c2.Start(v2); var v3 = new Tuplet <BlockingQueue <string>, List <string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false)); c3.Start(v3); Thread.Sleep(1000); for (int i = 0; i < n; i++) { string guid = Guid.NewGuid().ToString(); q.Enqueue(guid); enqueued.Add(guid); } q.Close(); Assert.IsTrue(v1.Item4.WaitOne(10000, false), "thread 1 did not finish"); Assert.IsTrue(v2.Item4.WaitOne(10000, false), "thread 2 did not finish"); Assert.IsTrue(v3.Item4.WaitOne(10000, false), "thread 3 did not finish"); _log.DebugFormat("Thread 1 processed {0}", v1.Item3); _log.DebugFormat("Thread 2 processed {0}", v2.Item3); _log.DebugFormat("Thread 3 processed {0}", v3.Item3); Console.WriteLine("Thread 1 processed {0}", v1.Item3); Console.WriteLine("Thread 2 processed {0}", v2.Item3); Console.WriteLine("Thread 3 processed {0}", v3.Item3); Assert.GreaterOrEqual(v1.Item3, n / 4); Assert.GreaterOrEqual(v2.Item3, n / 4); Assert.GreaterOrEqual(v3.Item3, n / 4); Assert.AreEqual(n, dequeued.Count); Assert.AreEqual(dequeued.OrderBy(x => x).ToArray(), enqueued.OrderBy(x => x).ToArray()); }
/// <summary> /// RedisStream /// </summary> /// <param name="timeout"></param> public RedisStream(int timeout = 6 * 1000) { _timeout = timeout; TaskHelper.LongRunning(() => { while (!IsDisposed) { if (!_queue.TryTake(out byte[] data, timeout)) { continue; } if (data == null || data.Length == 0) { continue; } _bytes.AddRange(data); do { var index = _bytes.IndexOf(13); if (index == -1) { break; } //双回车结束的情况 if (_bytes.IndexOf(10, index) == index + 1) { index += 1; } else { break; } var count = index + 1; var str = Encoding.UTF8.GetString(_bytes.Take(count).ToArray()); _stringQueue.Enqueue(str); _bytes.RemoveRange(0, count); }while (!IsDisposed); } }); }
public void BlockingQueue_TryPeek_Collection_Size_is_not_Changed_When_Item_Peeked_from_Collection() { //Arrange var blockingQueue = new BlockingQueue <int>(); blockingQueue.Enqueue(42); var startCount = blockingQueue.Count; //Act blockingQueue.TryPeek(out int item); //Assert Assert.AreEqual(1, startCount); Assert.AreEqual(1, blockingQueue.Count); }
public KeyBoardInput(BlockingQueue <char> keyQueue) { Thread keyBoardThread = new Thread(delegate() { while (true) { char key = System.Char.ToLower(System.Console.ReadKey(true).KeyChar); keyQueue.Enqueue(key); } }); keyBoardThread.IsBackground = true; keyBoardThread.Start(); }
private static async void ProducerLoop() { try { while (!_shutdown) { try { using (_pipeServer = new NamedPipeServerStream(_pipeName, PipeDirection.InOut, 10, PipeTransmissionMode.Message, PipeOptions.Asynchronous)) { await _pipeServer.WaitForConnectionAsync(_cancellationSource.Token); if (!_pipeServer.IsConnected || _shutdown) { return; } StreamString io = new StreamString(_pipeServer); while (_pipeServer.IsConnected) { string commandLine = io.ReadString(); if (commandLine == null || _shutdown) { break; } Debug.WriteLine("Command received via NamedPipe: " + commandLine); if (!string.IsNullOrWhiteSpace(commandLine)) { ExternalCommand command = new ExternalCommand(commandLine); CommandLineQueue.Enqueue(command); var result = command.WaitForResult(TimeSpan.FromMilliseconds(2000)); io.WriteString((result.Success ? "OK" : "FAIL") + ":" + result.Message); } } } } catch (Exception ex) { Debug.WriteLine("InstanceHandler.ProducerLoop: " + ex.Message); } } } catch (OperationCanceledException) { } catch (ThreadAbortException) { } }
/// <summary> /// Добавить сообщение в очередь на отправку. /// </summary> /// <param name="message">Сообщение.</param> private void EnqueueMessage(LogMessage message) { if (message.IsDispose) { _queue.Close(); return; } _queue.Enqueue(Tuple.Create(GetSubject(message), message.Message)); lock (_queue.SyncRoot) { if (_isThreadStarted) { return; } _isThreadStarted = true; ThreadingHelper.Thread(() => { try { using (var email = CreateClient()) { while (true) { Tuple <string, string> m; if (!_queue.TryDequeue(out m)) { break; } email.Send(From, To, m.Item1, m.Item2); } } lock (_queue.SyncRoot) _isThreadStarted = false; } catch (Exception ex) { Trace.WriteLine(ex); } }).Name("Email log queue").Launch(); } }
private void Read() { try { while (_reader.CanRead && _error == null) { _blocksLimiter.WaitOne(); var readedBlock = _reader.ReadNext(); _processingBuffer.Enqueue(readedBlock); } } catch (Exception e) { _error = e; } }
private void ReadPackets() { try { while (socket.Connected) { var p = PrivateReadPacket(); receiveQueue.Enqueue(p); } } catch (ThreadAbortException) { Debug.WriteLine("Reader thread aborting."); Thread.ResetAbort(); } }
/// <summary> /// Requests <paramref name="action"/> to be run on scheduler with <paramref name="state"/> being passed in /// </summary> public override void Schedule(Action <object> action, object state) { if (action == null) { return; // nothing to do } if (!_disposed && _queue.Count <= WorkerCount) { _queue.Enqueue(new WorkItem(action, state)); return; } // If condition above not met - We'll go to the Global ThreadPool ThreadPool.Schedule(action, state); }
public void BlockingQueue_Represent_a_Queue() { //Arrange var blockingQueue = new BlockingQueue <int>(); int[] array = new[] { 0, 1, 2, 3, 4, 6, 7, 8, 9 }; //Act foreach (var n in array) { blockingQueue.Enqueue(n); } //Assert Assert.IsTrue(blockingQueue.SequenceEqual(array)); }
public void Fetch() { while (true) { var packet = GetDataPacketFromDatabase(); if (packet != null) { var message = new Message(); message.Packet = packet; m_Queue.Enqueue(message); } else { break; // Stop if there is nothing left to fetch. } } }
public void ShouldEnqueAndDequeTheSameMessage() { BlockingQueue <ThreadMessage> x = new BlockingQueue <ThreadMessage>(); // ReSharper disable once UseObjectOrCollectionInitializer ThreadMessage msg1 = new ThreadMessage(1); msg1.Add("Test1", "Some String Value"); msg1.Add("TEST2", 1); msg1.Add("Test3", true); x.Enqueue(msg1); ThreadMessage msg2 = x.Dequeue(); Assert.AreEqual(msg1.Cmd, msg2.Cmd); Assert.AreEqual(msg1.GetString("Test1"), msg2.GetString("Test1")); Assert.AreEqual(msg1.GetInt("Test2"), msg2.GetInt("TEST2")); Assert.AreEqual(msg1.GetBool("Test3"), msg2.GetBool("Test3")); }
static void Main(string[] args) { BlockingQueue <int> blockingQueue = new BlockingQueue <int>(); Task.Factory.StartNew(() => { Thread.Sleep(1000); blockingQueue.Enqueue(42); }); if (blockingQueue.TryDequeue(out int item, 3000)) { Console.WriteLine(item); } Console.ReadLine(); }
public void RethrowException_On_Dispose_By_Default() { BlockingQueue <string> work = new BlockingQueue <string>(); WorkItemDispatcher <string> dispatcher = new WorkItemDispatcher <string>( 5, // work in 5 threads (data) => { throw new Exception(ExceptionMessage); }, work ); work.Enqueue("some work"); work.ReleaseWaiters(); Assert.Throws <AggregateException>(() => dispatcher.Dispose()); }
public void Simulatanous_Queueing_Enqueuing_Must_Not_Skip_Elements() { BlockingQueue <string> q = new BlockingQueue <string>(); int dequeueCount = 0; int exitCount = 0; Action dequeuer = () => { while (true) { string item = q.Dequeue(); if (item == null) { // last element reached Interlocked.Increment(ref exitCount); break; } Interlocked.Increment(ref dequeueCount); } }; const int Threads = 10; for (int i = 0; i < Threads; i++) { dequeuer.BeginInvoke(null, null); } // wait until some threads are up and running Thread.Sleep(300); for (int i = 0; i < QueueItems; i++) { q.Enqueue(i.ToString()); } q.ReleaseWaiters(); q.WaitUntilEmpty(); Thread.Sleep(30); Assert.AreEqual(QueueItems, dequeueCount, "Number of Enqueue and Deque calls must be the same"); Thread.Sleep(200); Assert.AreEqual(Threads, exitCount, "All Threads should have exited by now"); }
public void Test() { var q = new BlockingQueue<int>(4); // Producer new Thread(() => { for (var x = 0;; x++) { if (!q.Enqueue(x)) break; Trace.WriteLine(x.ToString("0000") + " >"); } Trace.WriteLine("Producer quitting"); }).Start(); // Consumers for (var i = 0; i < 2; i++) { new Thread(() => { for (;;) { Thread.Sleep(100); int x; if (!q.Dequeue(out x)) break; Trace.WriteLine(" < " + x.ToString("0000")); } Trace.WriteLine("Consumer quitting"); }).Start(); } Thread.Sleep(2000); Trace.WriteLine("Quitting"); q.Quit(); }
public static void TestBlockingQueue() { var bq = new BlockingQueue<int>(); var range = Arrays.Range(0, 1, 5); var list = new List<int>(); for (int i = 0; i < 2 * range.Count; i++) ThreadPool.QueueUserWorkItem((o) => { int val; if (!bq.TryDequeue(500, out val)) val = -1; lock (list) list.Add(val); }); Thread.Sleep(10); foreach (var i in range) bq.Enqueue(i); var now = DateTime.Now; while ((DateTime.Now - now).TotalSeconds < 5) { lock (list) if (list.Count >= 2 * range.Count) { if (list.Count > 2 * range.Count) throw new Exception("Too many items"); if (list.Count(i => i == -1) != range.Count) throw new Exception("Wrong number of -1's"); if (!list.Where(i => i != -1).InOrder().SequenceEqual(range)) throw new Exception("Wrong non-negative elements!"); return; // success } Thread.Sleep(10); } throw new Exception("Failed to complete after 5 seconds!"); }
public void MaxEnqueueTest() { Exception cx = null; BlockingQueue q = null; try { q = new BlockingQueue(0); Assert.IsFalse(true, "didn't throw exception on create 0"); } catch(ArgumentOutOfRangeException x) { cx = x; } Assert.IsNotNull(cx, "max_enqueue 0 caused exception"); q = new BlockingQueue(3); Assert.AreEqual(1, q.Enqueue("hey"), "enqueue 1"); Assert.AreEqual(2, q.Enqueue("hey"), "enqueue 1"); Assert.AreEqual(3, q.Enqueue("hey"), "enqueue 1"); cx = null; try { q.Enqueue("oh no!"); Assert.IsFalse(true, "didn't throw exception on enqueue 4"); } catch(InvalidOperationException iox) { cx = iox; } Assert.IsNotNull(cx, "4th enqueue caused a problem"); }
IEnumerator<object> PerformSearch(SearchQuery search) { pbProgress.Style = ProgressBarStyle.Marquee; lblStatus.Text = String.Format("Starting search..."); lbResults.Items.Clear(); var filenames = new BlockingQueue<string>(); var completionFuture = new Future<object>(); using (var fileSearch = Program.Scheduler.Start( SearchInFiles(search, filenames, completionFuture), TaskExecutionPolicy.RunAsBackgroundTask )) { using (var iterator = BuildQuery(search)) { var f = Program.Scheduler.Start(iterator.Fetch()); yield return f; if (!f.Failed) { txtSearch.BackColor = SystemColors.Window; while (!iterator.Disposed) { if (PendingSearchQuery != null) break; foreach (var current in iterator) filenames.Enqueue(current.Path); yield return iterator.Fetch(); } } else { txtSearch.BackColor = ErrorColor; } } completionFuture.Complete(); while (filenames.Count < 0) filenames.Enqueue(null); yield return fileSearch; } if (PendingSearchQuery != null) { yield return BeginSearch(); } else { pbProgress.Value = 0; pbProgress.Style = ProgressBarStyle.Continuous; } }
public static IEnumerator<object> ScanFiles() { var time_start = DateTime.UtcNow.Ticks; var completion = new Future<object>(); var batchQueue = new BlockingQueue<IEnumerable<string>>(); var changedFiles = new List<string>(); var deletedFiles = new List<string>(); for (int i = 0; i < System.Environment.ProcessorCount; i++) Scheduler.Start( CommitBatches(batchQueue, completion), TaskExecutionPolicy.RunAsBackgroundTask ); using (new ActiveWorker("Scanning folders for changes")) { var changeSet = new BlockingQueue<TagDatabase.Change>(); var changeGenerator = Scheduler.Start( Database.UpdateFileListAndGetChangeSet(changeSet), TaskExecutionPolicy.RunAsBackgroundTask ); changeGenerator.RegisterOnComplete((f) => changeSet.Enqueue(new TagDatabase.Change())); int numChanges = 0; int numDeletes = 0; while (!changeGenerator.Completed || (changeSet.Count > 0)) { var f = changeSet.Dequeue(); yield return f; var change = f.Result; if (change.Filename == null) continue; if (change.Deleted) { deletedFiles.Add(change.Filename); numDeletes += 1; } else { yield return Database.GetSourceFileID(change.Filename); changedFiles.Add(change.Filename); numChanges += 1; } if (deletedFiles.Count >= BatchSize) { var transaction = Database.Connection.CreateTransaction(); yield return transaction; foreach (string filename in deletedFiles) yield return Database.DeleteSourceFile(filename); deletedFiles.Clear(); yield return transaction.Commit(); } if (changedFiles.Count >= BatchSize) { string[] batch = changedFiles.ToArray(); changedFiles.Clear(); batchQueue.Enqueue(batch); } } if (deletedFiles.Count > 0) { var transaction = Database.Connection.CreateTransaction(); yield return transaction; foreach (string filename in deletedFiles) yield return Database.DeleteSourceFile(filename); deletedFiles.Clear(); yield return transaction.Commit(); } if (changedFiles.Count > 0) { string[] batch = changedFiles.ToArray(); batchQueue.Enqueue(batch); } completion.Complete(); while (batchQueue.Count < 0) batchQueue.Enqueue(null); var time_end = DateTime.UtcNow.Ticks; var elapsed = TimeSpan.FromTicks(time_end - time_start).TotalSeconds; System.Diagnostics.Debug.WriteLine(String.Format("Disk scan complete after {2:00000.00} seconds. {0} change(s), {1} delete(s).", numChanges, numDeletes, elapsed)); } }
public KeyBoardInput(BlockingQueue<char> keyQueue) { Thread keyBoardThread = new Thread(delegate() { while (true) { char key = System.Char.ToLower(System.Console.ReadKey(true).KeyChar); keyQueue.Enqueue(key); } }); keyBoardThread.IsBackground = true; keyBoardThread.Start(); }
public static void StartServices() { Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); IBlockingQueue<ServiceBE> services = new BlockingQueue<ServiceBE>(); List<ServiceBE> servicesToStart = new List<ServiceBE>(DbUtils.CurrentSession.Services_GetAll()); // extract all auth services and start them synchronously first List<ServiceBE> authServices = servicesToStart.FindAll(service => service.Type == ServiceType.AUTH); servicesToStart.RemoveAll(service => service.Type == ServiceType.AUTH); foreach(ServiceBE authService in authServices) { try { StartService(authService, false, false); } catch { //Services started on deki startup do not get disabled if they fail to start } } // start remaining services in parallel foreach(ServiceBE service in servicesToStart) { if(service.ServiceEnabled) { services.Enqueue(service); } } services.Close(); List<Result> workers = new List<Result>(); for(int i = 0; i < 10; i++) { workers.Add(Async.ForkThread(() => StartServices_Helper(services), new Result())); } workers.Join(new Result()).Wait(); _log.InfoFormat("Services started for instance '{0}' in {1}ms", DekiContext.Current.Instance.Id, stopwatch.ElapsedMilliseconds); }
public void Many_consumers_with_timeouts() { BlockingQueue<string> q = new BlockingQueue<string>(); Thread c1 = new Thread(MultiConsumer); Thread c2 = new Thread(MultiConsumer); Thread c3 = new Thread(MultiConsumer); c1.IsBackground = true; c2.IsBackground = true; c3.IsBackground = true; Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent> v1 = new Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent>(q, "x", TimeSpan.FromSeconds(1), new ManualResetEvent(false)); c1.Start(v1); Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent> v2 = new Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent>(q, "x", TimeSpan.FromSeconds(1), new ManualResetEvent(false)); c2.Start(v2); Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent> v3 = new Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent>(q, "x", TimeSpan.FromSeconds(1), new ManualResetEvent(false)); c3.Start(v3); q.Enqueue("foo"); Assert.IsTrue(v1.Item4.WaitOne(2000, false), "thread 1 did not finish"); Assert.IsTrue(v2.Item4.WaitOne(2000, false), "thread 2 did not finish"); Assert.IsTrue(v3.Item4.WaitOne(2000, false), "thread 3 did not finish"); bool gotValue = false; foreach(Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent> v in new Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent>[] { v1, v2, v3 }) { if(v.Item2 == "foo") { gotValue = true; Assert.Less(v.Item3.TotalSeconds, 1); } else { Assert.IsNull(v.Item2); Assert.GreaterOrEqual(v.Item3.TotalSeconds, 0.95); } } Assert.IsTrue(gotValue); }
public void One_producer_one_consumer_loop_with_foreach_and_stop() { int n = 10000; List<string> enqueued = new List<string>(); List<string> dequeued = new List<string>(); BlockingQueue<string> q = new BlockingQueue<string>(); Thread consumer = new Thread(SingleConsumerForeachLoopAndStop); consumer.Start(new Tuplet<IBlockingQueue<string>, List<string>>(q, dequeued)); for(int i = 0; i < n; i++) { string guid = Guid.NewGuid().ToString(); q.Enqueue(guid); enqueued.Add(guid); } q.Close(); Assert.IsTrue(consumer.Join(1000)); Assert.AreEqual(n, enqueued.Count); Assert.AreEqual(n, dequeued.Count); for(int i = 0; i < n; i++) { Assert.AreEqual(enqueued[i], dequeued[i]); } }
public void Single_threaded_queue_dequeue() { int n = 10000; List<string> guids = new List<string>(); BlockingQueue<string> q = new BlockingQueue<string>(); for(int i = 0; i < n; i++) { string guid = Guid.NewGuid().ToString(); q.Enqueue(guid); guids.Add(guid); } Assert.AreEqual(n, q.Count); for(int i = 0; i < n; i++) { string guid = q.Dequeue(); Assert.AreEqual(guids[i], guid); } }
public void Queue_on_closed_queue_throws() { BlockingQueue<string> q = new BlockingQueue<string>(); q.Enqueue("foo"); Assert.IsFalse(q.IsClosed); q.Close(); Assert.IsTrue(q.IsClosed); q.Enqueue("bar"); }
public void TestBlockingQueue() { Initialize(); BlockingQueue<int> queue = new BlockingQueue<int>(); ItemDelegate<int> addToQueue = delegate(int numberToAdd) { for (int i = 0; i < numberToAdd; ++i) { queue.Enqueue(1); } this.IncrementEnqueued(numberToAdd); this.IncrementExpectedDequeued(numberToAdd); }; addToQueue(100000); ThreadStart start = delegate() { while (true) { int next; bool queueEmpty = !queue.Dequeue(out next); if (queueEmpty) { if (_stopThreads) break; } else { this.IncrementDequeued(1); } Thread.Sleep(0); } }; List<Thread> threads = new List<Thread>(); for (int i = 0; i < _threadCount; ++i) { Thread thread = new Thread(start); thread.Start(); threads.Add(thread); } //continually add to the queue a bit. int numberTimesAdded = 0; for (int i = 0; i < _threadCount; ++i) { addToQueue(100000); Thread.Sleep(5); } //'pulse' the queue by letting it go empty, then adding more. numberTimesAdded = 0; while (true) { if (queue.Count == 0) { if (++numberTimesAdded <= _threadCount) { addToQueue(100000); } else { //the real test of exiting the queue is when it's empty, not when it's non-empty. queue.ContinueBlocking = false; break; } } Thread.Sleep(5); } _stopThreads = true; foreach (Thread thread in threads) thread.Join(); threads.Clear(); Assert.AreEqual(_expectedDequeued, _dequeued, "expectedValue != numberDequeued"); }
public void A_Flow_with_SelectAsyncUnordered_must_not_run_more_futures_than_configured() { this.AssertAllStagesStopped(() => { const int parallelism = 8; var counter = new AtomicCounter(); var queue = new BlockingQueue<Tuple<TaskCompletionSource<int>, long>>(); var timer = new Thread(() => { var delay = 500; // 50000 nanoseconds var count = 0; var cont = true; while (cont) { try { var t = queue.Take(CancellationToken.None); var promise = t.Item1; var enqueued = t.Item2; var wakeup = enqueued + delay; while (DateTime.Now.Ticks < wakeup) { } counter.Decrement(); promise.SetResult(count); count++; } catch { cont = false; } } }); timer.Start(); Func<Task<int>> deferred = () => { var promise = new TaskCompletionSource<int>(); if (counter.IncrementAndGet() > parallelism) promise.SetException(new Exception("parallelism exceeded")); else queue.Enqueue(Tuple.Create(promise, DateTime.Now.Ticks)); return promise.Task; }; try { const int n = 10000; var task = Source.From(Enumerable.Range(1, n)) .SelectAsyncUnordered(parallelism, _ => deferred()) .RunAggregate(0, (c, _) => c + 1, Materializer); task.Wait(TimeSpan.FromSeconds(3)).Should().BeTrue(); task.Result.Should().Be(n); } finally { timer.Interrupt(); } }, Materializer); }
/// <summary> /// Starts executing the test suite. /// </summary> public void Start() { // Create a ScriptEngine and freeze its state. SaveScriptEngineSnapshot(); // Create a queue to hold the tests. var queue = new BlockingQueue<TestExecutionState>(100); // Create a thread per processor. var threads = new List<Thread>(); for (int i = 0; i < GetThreadCount(); i++) { var thread = new Thread(ThreadStart); thread.Start(queue); threads.Add(thread); } for (int i = 0; i < this.zipFile.Count; i++) { var zipEntry = this.zipFile[i]; if (zipEntry.IsFile && zipEntry.Name.EndsWith(".js")) { // This is a test file. // Read out the contents (assume UTF-8). string fileContents; using (var entryStream = this.zipFile.GetInputStream(zipEntry)) using (var reader = new StreamReader(entryStream)) { fileContents = reader.ReadToEnd(); } // Parse out the test metadata. var test = new Test(this, zipEntry.Name, fileContents); // Check if the test should be skipped. if (this.skippedTestNames.Contains(Path.GetFileNameWithoutExtension(zipEntry.Name))) { this.skippedTestCount++; TestFinished(this, new TestEventArgs(TestRunStatus.Skipped, test, false)); continue; } if (this.IncludedTests.Count > 0 && this.IncludedTests.Contains(Path.GetFileNameWithoutExtension(zipEntry.Name)) == false) { this.skippedTestCount++; TestFinished(this, new TestEventArgs(TestRunStatus.Skipped, test, false)); continue; } // Queue the test. if (test.RunInNonStrictMode) queue.Enqueue(new TestExecutionState(test, runInStrictMode: false)); if (test.RunInStrictMode) queue.Enqueue(new TestExecutionState(test, runInStrictMode: true)); } } // Signal the threads that no more tests will be provided. queue.Close(); // Wait for all threads to exit. foreach (var thread in threads) thread.Join(); }
public void One_producer_many_consumers_loop_with_foreach() { int n = 500; var enqueued = new List<string>(); var dequeued = new List<string>(); var q = new BlockingQueue<string>(); var c1 = new Thread(MultiConsumerForeachLoop) { IsBackground = true }; var c2 = new Thread(MultiConsumerForeachLoop) { IsBackground = true }; var c3 = new Thread(MultiConsumerForeachLoop) { IsBackground = true }; var v1 = new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false)); c1.Start(v1); var v2 = new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false)); c2.Start(v2); var v3 = new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false)); c3.Start(v3); Thread.Sleep(1000); for(int i = 0; i < n; i++) { string guid = Guid.NewGuid().ToString(); q.Enqueue(guid); enqueued.Add(guid); } q.Close(); Assert.IsTrue(v1.Item4.WaitOne(10000, false), "thread 1 did not finish"); Assert.IsTrue(v2.Item4.WaitOne(10000, false), "thread 2 did not finish"); Assert.IsTrue(v3.Item4.WaitOne(10000, false), "thread 3 did not finish"); _log.DebugFormat("Thread 1 processed {0}", v1.Item3); _log.DebugFormat("Thread 2 processed {0}", v2.Item3); _log.DebugFormat("Thread 3 processed {0}", v3.Item3); Console.WriteLine("Thread 1 processed {0}", v1.Item3); Console.WriteLine("Thread 2 processed {0}", v2.Item3); Console.WriteLine("Thread 3 processed {0}", v3.Item3); Assert.GreaterOrEqual(v1.Item3, n / 4); Assert.GreaterOrEqual(v2.Item3, n / 4); Assert.GreaterOrEqual(v3.Item3, n / 4); Assert.AreEqual(n, dequeued.Count); Assert.AreEqual(dequeued.OrderBy(x => x).ToArray(), enqueued.OrderBy(x => x).ToArray()); }
public void One_producer_one_consumer_loop_with_foreach() { var n = 10000; var enqueued = new List<string>(); var dequeued = new List<string>(); var q = new BlockingQueue<string>(); var consumer = new Thread(SingleConsumerForeachLoop); consumer.IsBackground = true; var reset = new ManualResetEvent(false); consumer.Start(new Tuplet<int, IBlockingQueue<string>, List<string>, ManualResetEvent>(n, q, dequeued, reset)); for(int i = 0; i < n; i++) { string guid = Guid.NewGuid().ToString(); q.Enqueue(guid); enqueued.Add(guid); } Assert.IsTrue(reset.WaitOne(1000, true)); Assert.AreEqual(n, enqueued.Count); Assert.AreEqual(n, dequeued.Count); for(int i = 0; i < n; i++) { Assert.AreEqual(enqueued[i], dequeued[i]); } }
public IEnumerator<object> UpdateFileListAndGetChangeSet(BlockingQueue<Change> changeSet) { string filters; Folder[] folders = null; string[] exclusionList; { Future<string[]> f; yield return GetFilterPatterns().Run(out f); filters = String.Join(";", f.Result); } { var iter = new TaskEnumerator<TagDatabase.Folder>(GetFolders()); yield return Scheduler.Start(iter.GetArray()) .Bind(() => folders); exclusionList = (from folder in folders where folder.Excluded select folder.Path).ToArray(); } using (var iterator = new TaskEnumerator<SourceFile>(GetSourceFiles())) while (!iterator.Disposed) { yield return iterator.Fetch(); foreach (var file in iterator) { bool validFolder = false; bool fileExists = false; foreach (var folder in folders) { if (file.Path.StartsWith(folder.Path)) { if (folder.Excluded) { validFolder = false; break; } else { validFolder = true; } } } if (validFolder) fileExists = System.IO.File.Exists(file.Path); if (!validFolder || !fileExists) changeSet.Enqueue( new Change { Filename = file.Path, Deleted = true } ); } } foreach (var folder in folders) { if (folder.Excluded) continue; var enumerator = Squared.Util.IO.EnumDirectoryEntries( folder.Path, filters, true, Squared.Util.IO.IsFile ); using (var dirEntries = TaskEnumerator<IO.DirectoryEntry>.FromEnumerable(enumerator)) while (!dirEntries.Disposed) { yield return dirEntries.Fetch(); foreach (var entry in dirEntries) { bool excluded = false; foreach (var exclusion in exclusionList) { if (entry.Name.StartsWith(exclusion)) { excluded = true; break; } } if (excluded) continue; long newTimestamp = entry.LastWritten; long oldTimestamp = 0; IFuture f; yield return GetSourceFileTimestamp(entry.Name).Run(out f); if (f.Result is long) oldTimestamp = (long)f.Result; if (newTimestamp > oldTimestamp) changeSet.Enqueue( new Change { Filename = entry.Name, Deleted = false } ); } } } yield break; }