public static async Task AwaitAll(this ConcurrentQueue <Task> queue) { foreach (var item in queue.DequeueAll()) { await item.ConfigureAwait(continueOnCapturedContext : false); } }
public void WhenNoElements() { var queue = new ConcurrentQueue <int>(); var result = queue.DequeueAll(); result.Should().BeEmpty(); queue.Should().BeEmpty(); }
public void TestDequeueAllElementsInOrder() { var queue = new ConcurrentQueue <int>(new [] { 1, 42, 9, 8 }); var values = queue.DequeueAll(); values.Should().NotBeNull(); values.Should().Equal(new[] { 1, 42, 9, 8 }); queue.Should().BeEmpty("because the method should have actually dequeued the elements from the queue"); }
public void TestDequeueAllEmpty() { var queue = new ConcurrentQueue <int>(new int[0]); var values = queue.DequeueAll(); values.Should().NotBeNull(); values.Should().BeEmpty(); values.Should().BeSameAs(EnumerableExtensions <int> .Empty, "because the method shouldn't allocate memory unless necessary"); }
public void WhenSingleElement() { var queue = new ConcurrentQueue <int>(); queue.Enqueue(2); var result = queue.DequeueAll(); result.Should().Equal(2); queue.Should().BeEmpty(); }
protected override TimeSpan RunOnce(CancellationToken token) { var pendingSections = _pendingSections.DequeueAll(); if (!Process(pendingSections)) { SynchronizeProperties(); Listeners.OnRead(_count); } return(_maximumWaitTime); }
public void Tick() { if (_toRemove.Count > 0) { foreach (var rem in _toRemove.DequeueAll()) { RemoveAction(rem); } } tick++; bool needRecalc = false; if (_delays.Count > 0 && minTick == tick) { HashSet <TickTimerCancelToken> actions; if (_delays.TryGetValue(tick, out actions)) { foreach (var act in actions) { act.Action(act); if (act.Period > 0 && act.Timer != null) { act.Timeout = act.Period; AddAction(act); } } _delays.Remove(tick); } needRecalc = true; } if (_toAdd.Count > 0) { needRecalc = true; foreach (var add in _toAdd.DequeueAll()) { AddAction(add); } } if (needRecalc) { if (_delays.Count > 0) { minTick = _delays.First().Key; } else { minTick = 0; } } }
public void DequeueAll_ReturnsAllItems() { var queue = new ConcurrentQueue <TestClass>(); const int expectedCount = 3; for (var i = 0; i < expectedCount; i++) { queue.Enqueue(new TestClass { Value = i }); } var actual = queue.DequeueAll(); Assert.Equal(expectedCount, actual.Count); Assert.True(queue.IsEmpty); }
internal void Update(TimeSpan timeDelta) { foreach (var item in _netCircleSettings.DequeueAll()) { UpdateCircle(item); } foreach (var item in _netLazerSettings.DequeueAll()) { UpdateLazer(item); } foreach (GameObject gameObject in _objects.ToList()) { if (gameObject.IsDead) { _objects.Remove(gameObject); } gameObject.Update(timeDelta); } }
private void FetchPagesFromSource() { // There's no point in fetching more data than can fit in the cache. // When that happens, we assume that the data accessed last is more important than // the one we accessed earlier... var sections = _fetchQueue.DequeueAll().Reverse().Take(_maxNumPages).Distinct().ToList(); if (sections.Count > 0) { if (Log.IsDebugEnabled) { Log.DebugFormat("Fetching data in {0} batches...", sections.Count); } foreach (var section in sections) { // Yes, we could make this async and fetch data even faster, but we gotta // start somewhere... _source.GetEntries(section, _fetchBuffer); AddToCache(_fetchBuffer, 0, section); } } }
public IEnumerable <LogMessageEventArgs> ConsumeMissedMessages() { return(missedEvents.DequeueAll()); }