Beispiel #1
0
        public Task <int> ReportExecutionTimeAsync(string payloadType, long durationMilliseconds, int timeoutMilliseconds)
        {
            CheckNotDisposedOrThrow();
            CheckRunningOrThrow();

            if (payloadType == null)
            {
                throw new ArgumentNullException(nameof(payloadType));
            }

            mLogger.DebugFormat("Execution time {0} reported for payload {1}",
                                durationMilliseconds,
                                payloadType);

            long requestId = Interlocked.Increment(ref mLastRequestId);

            StandardExecutionPerformanceMonitorWriteRequest processRequest =
                new StandardExecutionPerformanceMonitorWriteRequest(requestId,
                                                                    payloadType,
                                                                    durationMilliseconds,
                                                                    timeoutMilliseconds: timeoutMilliseconds,
                                                                    maxFailCount: 3);

            mStatsProcessingQueue.Add(processRequest);
            IncrementPerfMonPostCount();

            return(processRequest.Task.WithCleanup((prev) =>
            {
                if (processRequest.IsTimedOut)
                {
                    IncrementPerfMonWriteRequestTimeoutCount();
                }
                processRequest.Dispose();
            }));
        }
Beispiel #2
0
        private async Task RunFlushLoopAsync()
        {
            CancellationToken stopToken = mStatsProcessingStopTokenSource
                                          .Token;

            if (stopToken.IsCancellationRequested)
            {
                return;
            }

            while (true)
            {
                List <StandardExecutionPerformanceMonitorWriteRequest> currentBatch =
                    new List <StandardExecutionPerformanceMonitorWriteRequest>();

                try
                {
                    stopToken.ThrowIfCancellationRequested();

                    //Try to dequeue and block if no item is available
                    StandardExecutionPerformanceMonitorWriteRequest processItem =
                        mStatsProcessingQueue.Take(stopToken);

                    currentBatch.Add(processItem);

                    //See if there are other items available
                    //	and add them to current batch
                    while (currentBatch.Count < 5 && mStatsProcessingQueue.TryTake(out processItem))
                    {
                        currentBatch.Add(processItem);
                    }

                    //Process the entire batch - don't observe
                    //	cancellation token
                    await ProcessStatsBatchAsync(currentBatch);
                }
                catch (OperationCanceledException)
                {
                    //Best effort to cancel all tasks
                    foreach (StandardExecutionPerformanceMonitorWriteRequest rq in mStatsProcessingQueue.ToArray())
                    {
                        rq.SetCancelled();
                    }

                    mLogger.Debug("Cancellation requested. Breaking stats processing loop...");
                    break;
                }
                catch (Exception exc)
                {
                    //Add them back to processing queue to be retried
                    foreach (StandardExecutionPerformanceMonitorWriteRequest rq in currentBatch)
                    {
                        rq.SetFailed(exc);
                        if (rq.CanBeRetried)
                        {
                            mStatsProcessingQueue.Add(rq);
                        }
                    }

                    mLogger.Error("Error processing results", exc);
                }
                finally
                {
                    //Clear batch and start over
                    currentBatch.Clear();
                }
            }
        }