/// <summary>
        /// Process a colKeys of specified items in a most optimal way according to
        /// the bundle settings.
        /// </summary>
        /// <param name="colKeys">
        /// The collection of keys to process.
        /// </param>
        /// <returns>
        /// An execution result according to the caller's contract.
        /// </returns>
        public IDictionary ProcessAll(ICollection colKeys)
        {
            AtomicCounter counter  = m_countThreads;
            int           cThreads = (int)counter.Increment();

            try
            {
                if (cThreads < ThreadThreshold)
                {
                    return(Bundling(colKeys));
                }

                Bundle bundle;
                bool   isBurst;
                while (true)
                {
                    bundle = (Bundle)getOpenBundle();
                    lock (bundle)
                    {
                        if (bundle.IsOpen())
                        {
                            bool isFirst = bundle.AddAll(colKeys);

                            isBurst = bundle.WaitForResults(isFirst);
                            break;
                        }
                    }
                }
                return(bundle.ProcessAll(isBurst, colKeys));
            }
            finally
            {
                counter.Decrement();
            }
        }
        /// <summary>
        /// Process the specified key in a most optimal way according to the
        /// bundle settings.
        /// </summary>
        /// <param name="key">
        /// The key to process.
        /// </param>
        /// <returns>
        /// An execution result according to the caller's contract.
        /// </returns>
        public Object Process(Object key)
        {
            AtomicCounter counter  = m_countThreads;
            int           cThreads = (int)counter.Increment();

            try
            {
                if (cThreads < ThreadThreshold)
                {
                    return(Unbundling(key));
                }

                Bundle bundle;
                bool   isBurst;
                while (true)
                {
                    bundle = (Bundle)getOpenBundle();
                    lock (bundle)
                    {
                        if (bundle.IsOpen())
                        {
                            bool isFirst = bundle.Add(key);

                            isBurst = bundle.WaitForResults(isFirst);
                            break;
                        }
                    }
                }
                return(bundle.Process(isBurst, key));
            }
            finally
            {
                counter.Decrement();
            }
        }
Ejemplo n.º 3
0
        public void AtomicCounter_ParallelCounter()
        {
            var k = 0;

            Parallel.For(0, 100, (i, loop) =>
            {
                Interlocked.Increment(ref k);

                var counter = new AtomicCounter();

                Assert.AreEqual(0, counter.Value);

                void Inc()
                {
                    counter.Increment();
                }

                void Dec()
                {
                    counter.Decrement();
                }

                Parallel.Invoke(Inc, Inc, Dec, Dec, Dec, Inc, Dec, Inc, Dec, Inc, Inc, Inc, Dec, Dec, Inc);

                Assert.AreEqual(1, counter.Value);
            });

            Assert.AreEqual(100, k);
        }
Ejemplo n.º 4
0
        public void Dispose()
        {
            if (referenceCount.Decrement() != 0)
            {
                return;
            }

            Dispose(true);
            GC.SuppressFinalize(this);
        }
Ejemplo n.º 5
0
        public void Dispose()
        {
            if (referenceCount.Decrement() != 0)
            {
                return;
            }

            GLWrapper.ScheduleDisposal(() => Dispose(true));
            GC.SuppressFinalize(this);
        }
Ejemplo n.º 6
0
        public void Decrement_ThreadSafety()
        {
            const int Iterations = 100_000;

            var counter = new AtomicCounter();

            Parallel.For(0, Iterations, i =>
            {
                counter.Decrement();
            });

            Assert.Equal(-Iterations, counter.Value);
        }
Ejemplo n.º 7
0
        /// <summary>
        /// Clear the <see cref="Data"/> and set the MsgType to Invalid.
        /// If this is not a shared-data Msg (MsgFlags.Shared is not set), or it is shared but the reference-counter has dropped to zero,
        /// then return the data back to the BufferPool.
        /// </summary>
        /// <exception cref="FaultException">The object is not initialised.</exception>
        public void Close()
        {
            if (!IsInitialised)
            {
                throw new FaultException("Cannot close an uninitialised Msg.");
            }

            if (MsgType == MsgType.Pool)
            {
                // if not shared or reference counter drop to zero
                if (!IsShared || m_refCount.Decrement() == 0)
                {
                    BufferPool.Return(Data);
                }

                m_refCount = null;
            }

            // Uninitialise the frame
            Data    = null;
            MsgType = MsgType.Uninitialised;
        }
Ejemplo n.º 8
0
Archivo: Msg.cs Proyecto: fhchina/netmq
        public void Close()
        {
            if (!Check())
            {
                throw new FaultException();
            }

            if (m_type == MsgType.Pool)
            {
                // if not shared or reference counter drop to zero
                if ((m_flags & MsgFlags.Shared) == 0 || m_atomicCounter.Decrement() == 0)
                {
                    BufferPool.Return(m_data);
                }

                m_atomicCounter = null;
            }

            m_data = null;

            //  Make the message invalid.
            m_type = MsgType.Invalid;
        }
Ejemplo n.º 9
0
        public void Decrement_ThreadSafety()
        {
            // Arrange
            const int Iterations = 100_000;

            var counter = new AtomicCounter();

            // Act
            Parallel.For(0, Iterations, i =>
            {
                counter.Decrement();
            });

            // Assert
            counter.Value.Should().Be(-Iterations);
        }
Ejemplo n.º 10
0
        public void CouldDispose()
        {
            var counter = 0;

            Assert.IsFalse(AtomicCounter.GetIsDisposed(ref counter));
            Assert.AreEqual(1, AtomicCounter.Increment(ref counter));
            Assert.AreEqual(2, AtomicCounter.Increment(ref counter));

            Assert.Throws <InvalidOperationException>(() => { AtomicCounter.Dispose(ref counter); });

            Assert.AreEqual(1, AtomicCounter.Decrement(ref counter));
            Assert.AreEqual(0, AtomicCounter.DecrementIfOne(ref counter));

            Assert.IsFalse(AtomicCounter.GetIsDisposed(ref counter));

            AtomicCounter.Dispose(ref counter);

            Assert.IsTrue(AtomicCounter.GetIsDisposed(ref counter));
        }
Ejemplo n.º 11
0
        /// <summary>
        /// Process the specified entry in a most optimal way according
        /// to the bundle settings.
        /// </summary>
        /// <param name="key">
        /// The entry key.
        /// </param>
        /// <param name="value">
        /// The entry value.
        /// </param>
        public void Process(Object key, Object value)
        {
            AtomicCounter counter  = m_countThreads;
            int           cThreads = (int)counter.Increment();

            try
            {
                if (cThreads < ThreadThreshold)
                {
                    IDictionary dictionary = new Hashtable();
                    dictionary.Add(key, value);
                    Bundling(dictionary);
                    return;
                }

                Bundle bundle;
                bool   isBurst;
                while (true)
                {
                    bundle = (Bundle)getOpenBundle();
                    lock (bundle)
                    {
                        if (bundle.IsOpen())
                        {
                            bool isFirst = bundle.Add(key, value);

                            isBurst = bundle.WaitForResults(isFirst);
                            break;
                        }
                    }
                }
                bundle.Process(isBurst, key, value);
            }
            finally
            {
                counter.Decrement();
            }
        }
Ejemplo n.º 12
0
        public void CouldTryDispose()
        {
            var counter = 0;

            Assert.IsFalse(AtomicCounter.GetIsDisposed(ref counter));
            Assert.AreEqual(1, AtomicCounter.Increment(ref counter));
            Assert.AreEqual(2, AtomicCounter.Increment(ref counter));

            Assert.AreEqual(2, AtomicCounter.TryDispose(ref counter));

            Assert.AreEqual(1, AtomicCounter.Decrement(ref counter));
            Assert.AreEqual(0, AtomicCounter.DecrementIfOne(ref counter));

            Assert.IsFalse(AtomicCounter.GetIsDisposed(ref counter));

            Assert.AreEqual(0, AtomicCounter.TryDispose(ref counter));

            Assert.IsTrue(AtomicCounter.GetIsDisposed(ref counter));

            Assert.AreEqual(-1, AtomicCounter.TryDispose(ref counter));

            counter = AtomicCounter.Disposed - 1;

            Assert.Throws <InvalidOperationException>(() => { AtomicCounter.TryDispose(ref counter); });

            counter = AtomicCounter.Disposed | (123 << 24);

            Assert.AreEqual(-1, AtomicCounter.TryDispose(ref counter));

            Assert.AreEqual(AtomicCounter.Disposed | (123 << 24), counter);

            counter = (123 << 24);

            Assert.AreEqual(0, AtomicCounter.TryDispose(ref counter));

            Assert.AreEqual((123 << 24), counter & ~AtomicCounter.Disposed);
        }
Ejemplo n.º 13
0
        /// <summary>
        /// Process a colllection of entries in a most optimal way
        /// according to the bundle settings.
        /// </summary>
        /// <param name="dictionary">
        /// The collection of entries to process
        /// </param>
        public void ProcessAll(IDictionary dictionary)
        {
            AtomicCounter counter  = m_countThreads;
            var           cThreads = (int)counter.Increment();

            try
            {
                if (cThreads < ThreadThreshold)
                {
                    Bundling(dictionary);
                    return;
                }

                Bundle bundle;
                bool   isBurst;
                while (true)
                {
                    bundle = (Bundle)getOpenBundle();
                    lock (bundle)
                    {
                        if (bundle.IsOpen())
                        {
                            bool isFirst = bundle.AddAll(dictionary);

                            isBurst = bundle.WaitForResults(isFirst);
                            break;
                        }
                    }
                }
                bundle.ProcessAll(isBurst, dictionary);
            }
            finally
            {
                counter.Decrement();
            }
        }
 private void ConfirmAndCheckForCompletion()
 {
     AwaitingConfirmation.Decrement();
     CheckForCompletion();
 }
        public void A_Flow_with_SelectAsyncUnordered_must_not_run_more_futures_than_configured()
        {
            this.AssertAllStagesStopped(() =>
            {
                const int parallelism = 8;
                var counter = new AtomicCounter();
                var queue = new BlockingQueue<Tuple<TaskCompletionSource<int>, long>>();

                var timer = new Thread(() =>
                {
                    var delay = 500; // 50000 nanoseconds
                    var count = 0;
                    var cont = true;
                    while (cont)
                    {
                        try
                        {
                            var t = queue.Take(CancellationToken.None);
                            var promise = t.Item1;
                            var enqueued = t.Item2;
                            var wakeup = enqueued + delay;
                            while (DateTime.Now.Ticks < wakeup) { }
                            counter.Decrement();
                            promise.SetResult(count);
                            count++;
                        }
                        catch
                        {
                            cont = false;
                        }
                    }
                });

                timer.Start();

                Func<Task<int>> deferred = () =>
                {
                    var promise = new TaskCompletionSource<int>();
                    if (counter.IncrementAndGet() > parallelism)
                        promise.SetException(new Exception("parallelism exceeded"));
                    else
                        queue.Enqueue(Tuple.Create(promise, DateTime.Now.Ticks));
                    return promise.Task;
                };

                try
                {
                    const int n = 10000;
                    var task = Source.From(Enumerable.Range(1, n))
                        .SelectAsyncUnordered(parallelism, _ => deferred())
                        .RunAggregate(0, (c, _) => c + 1, Materializer);

                    task.Wait(TimeSpan.FromSeconds(3)).Should().BeTrue();
                    task.Result.Should().Be(n);
                }
                finally
                {
                    timer.Interrupt();
                }
            }, Materializer);
        }
Ejemplo n.º 16
0
 public void Complete(CompletionContext context)
 {
     _counter.Decrement();
 }
Ejemplo n.º 17
0
 /// <summary>
 /// Decrement the value of the counter by 1
 /// </summary>
 public void Decrement()
 {
     _internalCounter.Decrement();
 }
Ejemplo n.º 18
0
        internal void RemoveCallReference(object call)
        {
            handle.DangerousRelease();

            activeCallCounter.Decrement();
        }
Ejemplo n.º 19
0
        public void A_Flow_with_SelectAsyncUnordered_must_not_run_more_futures_than_configured()
        {
            this.AssertAllStagesStopped(() =>
            {
                const int parallelism = 8;
                var counter           = new AtomicCounter();
                var queue             = new BlockingQueue <Tuple <TaskCompletionSource <int>, long> >();

                var timer = new Thread(() =>
                {
                    var delay = 500; // 50000 nanoseconds
                    var count = 0;
                    var cont  = true;
                    while (cont)
                    {
                        try
                        {
                            var t        = queue.Take(CancellationToken.None);
                            var promise  = t.Item1;
                            var enqueued = t.Item2;
                            var wakeup   = enqueued + delay;
                            while (DateTime.Now.Ticks < wakeup)
                            {
                            }
                            counter.Decrement();
                            promise.SetResult(count);
                            count++;
                        }
                        catch
                        {
                            cont = false;
                        }
                    }
                });

                timer.Start();

                Func <Task <int> > deferred = () =>
                {
                    var promise = new TaskCompletionSource <int>();
                    if (counter.IncrementAndGet() > parallelism)
                    {
                        promise.SetException(new Exception("parallelism exceeded"));
                    }
                    else
                    {
                        queue.Enqueue(Tuple.Create(promise, DateTime.Now.Ticks));
                    }
                    return(promise.Task);
                };

                try
                {
                    const int n = 10000;
                    var task    = Source.From(Enumerable.Range(1, n))
                                  .SelectAsyncUnordered(parallelism, _ => deferred())
                                  .RunAggregate(0, (c, _) => c + 1, Materializer);

                    task.Wait(TimeSpan.FromSeconds(3)).Should().BeTrue();
                    task.Result.Should().Be(n);
                }
                finally
                {
                    timer.Interrupt();
                }
            }, Materializer);
        }
Ejemplo n.º 20
0
            public void Dispose(bool disposing)
            {
                if (!_isSharedMemory)
                {
                    DoDispose();
                }
                else
                {
                    lock (this)
                    {
                        DoDispose();
                    }
                }

                void DoDispose()
                {
                    if (!disposing)
                    {
                        // Cache has a weak reference, so finalizer could start running while a handle is still in the cache
                        // As if _rc was 1 and we called DecrementIfOne - if successful, no resurrect is possible
                        // because Retain uses IncrementIfRetained.

                        var current  = Volatile.Read(ref _rc);
                        var existing = Interlocked.CompareExchange(ref _rc, 0, current);
                        if (existing != current)
                        {
                            // Resurrected while we tried to set rc to zero.
                            // What if rc was wrong and not 1? At some point all new users will
                            // dispose the proxy and it will be in the cache with
                            // positive rc but without GC root, then it will be
                            // collected and finalized and will return to this
                            // place where we will try to set rc to 0 again.
                            // TODO trace this condition, it indicates dropped proxies
                            // From user code it could be possible only when manually using cursors
                            // and forgetting to dispose them, so all blame is on users, but we should not fail.

                            ThrowHelper.AssertFailFast(existing > 1, "existing > 1 when resurrected");
                            return;
                        }
                    }
                    else
                    {
                        var remaining = AtomicCounter.Decrement(ref _rc);
                        if (remaining > 1)
                        {
                            return;
                        }

                        if (AtomicCounter.DecrementIfOne(ref _rc) != 0)
                        {
                            return;
                        }
                    }

                    ThrowHelper.AssertFailFast(_rc == 0, "_rc must be 0 to proceed with proxy disposal");

                    try
                    {
                        // remove self from cache
                        _cache._blocks.TryRemove(_key, out var handle);
                        if (handle.IsAllocated)
                        {
                            handle.Free();
                        }
                    }
                    finally
                    {
                        // If we are shutting down, e.g. unhandled exception in other threads
                        // increase the chances we do release shared memory ref.

#pragma warning disable 618
                        // ReSharper disable once InconsistentlySynchronizedField
                        Block.DisposeFree();
#pragma warning restore 618
                    }

                    // Do not pool finalized objects.
                    // TODO (review) proxy does not have ref type fields,
                    // probably could add to pool without thinking about GC/finalization order.
                    // However, this case should be very rare (e.g. unhandled exception)
                    // and we care about releasing RC of shared memory above all.
                    if (disposing)
                    {
                        GC.SuppressFinalize(this);
                        // ReSharper disable once InconsistentlySynchronizedField
                        Block = default;
                        _key  = default;
                        AtomicCounter.Dispose(ref _rc);
                        _isSharedMemory = default;
                    }
                }
            }