public async Task Process(BaseAction action)
 {
     Batch batch = _batchFactory.Create(new List <BaseAction>()
     {
         action
     });
     await _requestHandler.MakeRequest(batch).ConfigureAwait(false);
 }
Пример #2
0
        public void Process(BaseAction action)
        {
            Batch batch = _batchFactory.Create(new List <BaseAction>()
            {
                action
            });

            _requestHandler.MakeRequest(batch);
        }
Пример #3
0
        /// <summary>
        /// Loops on the flushing thread and processes the message queue
        /// </summary>
        private void Loop()
        {
            List <BaseAction> current = new List <BaseAction>();

            // keep looping while flushing thread is active
            while (_continue)
            {
                do
                {
                    // the only time we're actually not flushing
                    // is if the condition that the queue is empty here
                    if (_queue.Count == 0)
                    {
                        _idle.Set();
                    }

                    // blocks and waits for a dequeue
                    BaseAction action = _queue.Dequeue();

                    if (action == null)
                    {
                        // the queue was disposed, so we're done with this batch
                        break;
                    }
                    else
                    {
                        // we are no longer idle since there's messages to be processed
                        _idle.Reset();

                        // add this action to the current batch
                        current.Add(action);
                    }
                }
                // if we can easily see that there's still stuff in the queue
                // we'd prefer to add more to the current batch to send more
                // at once. But only if we're not disposed yet (_continue is true).
                while (_continue && _queue.Count > 0 && current.Count <= Constants.BatchIncrement);

                if (current.Count > 0)
                {
                    // we have a batch that we're trying to send
                    Batch batch = _batchFactory.Create(current);

                    // make the request here
                    _requestHandler.MakeRequest(batch);

                    // mark the current batch as null
                    current = new List <BaseAction>();
                }

                // thread context switch to avoid resource contention
                Thread.Sleep(0);
            }
        }
        public void FlushOne()
        {
            var actionList = new List <BaseAction>();

            for (int i = 0; i < Constants.BatchIncrement; i++)
            {
                if (actionQueue.Count <= 0)
                {
                    break;
                }
                actionList.Add(actionQueue.Dequeue());
            }

            Batch batch = batchFactory.Create(actionList);

            requestHandler.MakeRequest(batch);
        }
Пример #5
0
        private async Task FlushImpl()
        {
            var current     = new List <BaseAction>();
            var currentSize = 0;

            while (!_queue.IsEmpty && !_continue.Token.IsCancellationRequested)
            {
                do
                {
                    if (!_queue.TryDequeue(out var action))
                    {
                        break;
                    }

                    Logger.Debug("Dequeued action in async loop.", new Dict {
                        { "message id", action.MessageId },
                        { "queue size", _queue.Count }
                    });

                    current.Add(action);
                    currentSize += action.Size;
                } while (!_queue.IsEmpty && current.Count < _maxBatchSize && !_continue.Token.IsCancellationRequested && currentSize < BatchMaxSize - ActionMaxSize);

                if (current.Count > 0)
                {
                    // we have a batch that we're trying to send
                    Batch batch = _batchFactory.Create(current);

                    Logger.Debug("Created flush batch.", new Dict {
                        { "batch size", current.Count }
                    });

                    // make the request here
                    await _requestHandler.MakeRequest(batch);

                    // mark the current batch as null
                    current     = new List <BaseAction>();
                    currentSize = 0;
                }
            }
        }
 private Task <IEnumerable <Team> > GetFullTeamsAsync(Competition competition) =>
 _batchFactory
 .Create <int, Team>(10)
 .ForList(competition.Teams.Distinct())
 .Apply(teamId => _footballDataService.GetTeamAsync(teamId))
 .ExecuteAsync();
Пример #7
0
        /// <summary>
        /// Loops on the flushing thread and processes the message queue
        /// </summary>
        private void Loop()
        {
            Logger.Debug("Starting async flush thread ..");

            List <BaseAction> current  = new List <BaseAction>();
            DateTime          lastSend = DateTime.Now;
            TimeSpan          enlasepTime;

            // keep looping while flushing thread is active
#if NET_NOTHREAD
            while (!_continue.Token.IsCancellationRequested)
#else
            while (_continue)
#endif
            {
                do
                {
                    enlasepTime = DateTime.Now - lastSend;
                    // the only time we're actually not flushing
                    // is if the condition that the queue is empty here
                    if (_queue.Count == 0)
                    {
                        if (_forcedFlush)
                        {
                            Logger.Debug("Queue is empty, flushing is finished.");
                            break;
                        }
#if NET_NOTHREAD
                        Task.Delay(100).GetAwaiter().GetResult();
#else
                        Thread.Sleep(100);
#endif
                        continue;
                    }

                    // blocks and waits for a dequeue
                    BaseAction action = _queue.Dequeue();

                    if (action == null)
                    {
                        // the queue was disposed, so we're done with this batch
                        break;
                    }
                    else
                    {
                        // we are no longer idle since there's messages to be processed
                        _idle.Reset();

                        // add this action to the current batch
                        current.Add(action);

                        Logger.Debug("Dequeued action in async loop.", new Dict {
                            { "message id", action.MessageId },
                            { "queue size", _queue.Count }
                        });
                    }
                }
                // if we can easily see that there's still stuff in the queue
                // we'd prefer to add more to the current batch to send more
                // at once. But only if we're not disposed yet (_continue is true).
#if NET_NOTHREAD
                while (!_continue.Token.IsCancellationRequested && enlasepTime.TotalMilliseconds < _flushIntervalInMillis && current.Count <= MaxBatchSize);
#else
                while (_continue && enlasepTime.TotalMilliseconds < _flushIntervalInMillis && current.Count <= MaxBatchSize);
#endif

                if (current.Count > 0)
                {
                    // we have a batch that we're trying to send
                    Batch batch = _batchFactory.Create(current);

                    Logger.Debug("Created flush batch.", new Dict {
                        { "batch size", current.Count }
                    });

                    // make the request here
                    _requestHandler.MakeRequest(batch).GetAwaiter().GetResult();

                    // mark the current batch as null
                    current = new List <BaseAction>();
                }

                if (_queue.Count == 0)
                {
                    _idle.Set();
                }

                lastSend = DateTime.Now;
#if !NET_NOTHREAD
                // thread context switch to avoid resource contention
                Thread.Sleep(0);
#endif
            }
        }
        /// <summary>
        /// Loops on the flushing thread and processes the message queue
        /// </summary>
        private void Loop()
        {
            Logger.Debug("Starting async flush thread ..");

            List <BaseAction> current = new List <BaseAction>();

            // keep looping while flushing thread is active
            while (!_continue.Token.IsCancellationRequested)
            {
                do
                {
                    // the only time we're actually not flushing
                    // is if the condition that the queue is empty here
                    if (_queue.Count == 0)
                    {
                        _idle.Set();

                        Logger.Debug("Queue is empty, flushing is finished.");
                    }

                    // blocks and waits for a dequeue
                    BaseAction action = _queue.Dequeue();

                    if (action == null)
                    {
                        // the queue was disposed, so we're done with this batch
                        break;
                    }
                    else
                    {
                        // we are no longer idle since there's messages to be processed
                        _idle.Reset();

                        // add this action to the current batch
                        current.Add(action);

                        Logger.Debug("Dequeued action in async loop.", new Dict {
                            { "message id", action.MessageId },
                            { "queue size", _queue.Count }
                        });
                    }
                }
                // if we can easily see that there's still stuff in the queue
                // we'd prefer to add more to the current batch to send more
                // at once. But only if we're not disposed yet (_continue is true).
                while (!_continue.Token.IsCancellationRequested && _queue.Count > 0 && current.Count <= Constants.BatchIncrement);

                if (current.Count > 0)
                {
                    // we have a batch that we're trying to send
                    Batch batch = _batchFactory.Create(current);

                    Logger.Debug("Created flush batch.", new Dict {
                        { "batch size", current.Count }
                    });

                    // make the request here
                    _requestHandler.SendBatch(batch);

                    // mark the current batch as null
                    current = new List <BaseAction>();
                }
            }
        }