Пример #1
0
        // The message pump.
        // When we start listening for the next request on one thread, we may need to be sure that the
        // completion continues on another thread as to not block the current request processing.
        // The awaits will manage stack depth for us.
        private async Task ProcessRequestsWorker()
        {
            Debug.Assert(RequestContextFactory != null);

            // Allocate and accept context per loop and reuse it for all accepts
            using var acceptContext = new AsyncAcceptContext(Listener, RequestContextFactory);

            int workerIndex = Interlocked.Increment(ref _acceptorCounts);

            while (!Stopping && workerIndex <= _maxAccepts)
            {
                // Receive a request
                RequestContext requestContext;
                try
                {
                    requestContext = await Listener.AcceptAsync(acceptContext);

                    if (!Listener.ValidateRequest(requestContext))
                    {
                        // Dispose the request
                        requestContext.ReleasePins();
                        requestContext.Dispose();

                        // If either of these is false then a response has already been sent to the client, so we can accept the next request
                        continue;
                    }
                }
                catch (Exception exception)
                {
                    Debug.Assert(Stopping);
                    if (Stopping)
                    {
                        Log.AcceptErrorStopping(_logger, exception);
                    }
                    else
                    {
                        Log.AcceptError(_logger, exception);
                    }
                    continue;
                }
                try
                {
                    if (_options.UnsafePreferInlineScheduling)
                    {
                        await requestContext.ExecuteAsync();
                    }
                    else
                    {
                        ThreadPool.UnsafeQueueUserWorkItem(requestContext, preferLocal: false);
                    }
                }
                catch (Exception ex)
                {
                    // Request processing failed
                    // Log the error message, release throttle and move on
                    Log.RequestListenerProcessError(_logger, ex);
                }
            }
            Interlocked.Decrement(ref _acceptorCounts);
        }