/// <summary> /// Attempts to remove batch from the head of the queue /// </summary> /// <param name="segment">Removed batch</param> /// <returns>True if the batch was removed</returns> internal bool TryDequeue(out BatchingQueueSegment <T> segment) { SpinWait sw = new SpinWait(); while (true) { BatchingQueueSegment <T> head = _head; if (head == _tail) { segment = null; return(false); } Debug.Assert(head.Next != null); if (Interlocked.CompareExchange(ref _head, head.Next, head) == head) { SpinWait completionSw = new SpinWait(); while (!head.IsNotInWork) { completionSw.SpinOnce(); } Interlocked.Add(ref _itemsCount, -head.Count); segment = head; return(true); } sw.SpinOnceNoSleep(); } }
/// <summary> /// Reads 'head' and 'tail' atomically and mark all the segments in between for observation. /// This ensures that the arrays inside the segments will not be exposed directly to the user /// </summary> /// <param name="head">Current head of the queue</param> /// <param name="tail">Current tail of the queue</param> /// <returns>True if the queue slice for observation is not empty, otherwise false</returns> private bool GetHeadTailForObservation(out BatchingQueueSegment <T> head, out BatchingQueueSegment <T> tail) { GetHeadTailAtomic(out head, out tail); // Mark for observation for (var current = head; current != tail; current = current.Next) { current.MarkForObservation(); } tail.MarkForObservation(); // move head forward to the current head position while (head != _head) { // All segments up to the tail was dequeued => nothing to enumerate if (head == tail) { return(false); } head = head.Next; } return(true); }
/// <summary> /// Adds the item to the tail of the queue /// </summary> /// <param name="item">New item</param> /// <param name="batchCountIncreased">Number of new batches appeared during this enqueue</param> public void Enqueue(T item, out int batchCountIncreased) { batchCountIncreased = 0; SpinWait spinWait = new SpinWait(); bool success = false; while (!success) { BatchingQueueSegment <T> tail = _tail; if (tail.TryAdd(item)) { Interlocked.Increment(ref _itemsCount); success = true; } if (tail.Next != null) { if (Interlocked.CompareExchange(ref _tail, tail.Next, tail) == tail) { batchCountIncreased++; } } if (!success) { spinWait.SpinOnce(); } } }
/// <summary> /// Enumerator constructor /// </summary> /// <param name="source">Source BatchQueueSegment to enumerate</param> public Enumerator(BatchingQueueSegment <T> source) { TurboContract.Requires(source != null, "source != null"); _source = source; _index = -1; }
private void PreallocateNextSegment() { if (_preallocatedNext == null) { _preallocatedNext = new BatchingQueueSegment <T>(Capacity, unchecked (_batchId + 1)); } }
/// <summary> /// <see cref="ConcurrentBatchingQueue{T}"/> constructor /// </summary> /// <param name="batchSize">Size of the batch</param> public ConcurrentBatchingQueue(int batchSize) { if (batchSize <= 0 || batchSize > int.MaxValue / 2) { throw new ArgumentOutOfRangeException(nameof(batchSize), $"'{nameof(batchSize)}' should be positive and less than {int.MaxValue / 2}"); } _batchSize = batchSize; _head = new BatchingQueueSegment <T>(batchSize); _tail = _head; _itemsCount = 0; }
/// <summary> /// Reads 'head' and 'tail' atomically /// </summary> /// <param name="head">Current head of the queue</param> /// <param name="tail">Current tail of the queue</param> private void GetHeadTailAtomic(out BatchingQueueSegment <T> head, out BatchingQueueSegment <T> tail) { head = _head; tail = _tail; SpinWait sw = new SpinWait(); while (head != _head || tail != _tail) { sw.SpinOnceNoSleep(); head = _head; tail = _tail; } }
/// <summary> /// <see cref="BatchingQueueSegment{T}"/> constructor /// </summary> /// <param name="capacity">Capacity of the segment</param> /// <param name="batchId">Incremental identifier of batch</param> public BatchingQueueSegment(int capacity, int batchId) { TurboContract.Requires(capacity > 0 && capacity <= int.MaxValue / 2, "'capacity' should be positive and less than int.MaxValue / 2"); _array = new T[capacity]; _batchId = batchId; _markedForObservation = false; _reservedIndexWithFinalizationMark = 0; _actualCount = 0; _next = null; _preallocatedNext = null; }
/// <summary> /// Mark active batch as completed so that it can be removed from the queue even if it is not full /// </summary> /// <returns>True when active batch is not empty, otherwise false</returns> public bool CompleteCurrentBatch() { BatchingQueueSegment <T> tail = _tail; if (_tail.Count == 0) { return(false); } if (tail.Grow() && tail.Next != null) { if (Interlocked.CompareExchange(ref _tail, tail.Next, tail) == tail) { return(true); } } return(false); }