コード例 #1
0
        void Discard(Exception cause = null)
        {
            for (;;)
            {
                PendingWrite current = this.currentWrite;
                if (this.currentWrite == null)
                {
                    current = this.queue.Count > 0 ? this.queue.Dequeue() : null;
                }
                else
                {
                    this.currentWrite = null;
                }

                if (current == null)
                {
                    break;
                }

                object message = current.Message;
                var    chunks  = message as IChunkedInput <T>;
                if (chunks != null)
                {
                    try
                    {
                        if (!chunks.IsEndOfInput)
                        {
                            if (cause == null)
                            {
                                cause = new ClosedChannelException();
                            }

                            current.Fail(cause);
                        }
                        else
                        {
                            current.Success();
                        }
                    }
                    catch (Exception exception)
                    {
                        current.Fail(exception);
                        Logger.Warn($"{StringUtil.SimpleClassName(typeof(ChunkedWriteHandler<T>))}.IsEndOfInput failed", exception);
                    }
                    finally
                    {
                        CloseInput(chunks);
                    }
                }
                else
                {
                    if (cause == null)
                    {
                        cause = new ClosedChannelException();
                    }

                    current.Fail(cause);
                }
            }
        }
コード例 #2
0
 private static void HandleFuture(Task task, ChunkedWriteHandler <T> owner, IChannel channel, PendingWrite currentWrite, bool resume)
 {
     if (task.IsSuccess())
     {
         var chunks = (IChunkedInput <T>)currentWrite.Message;
         currentWrite.Progress(chunks.Progress, chunks.Length);
         if (resume && channel.IsWritable)
         {
             owner.ResumeTransfer();
         }
     }
     else
     {
         CloseInput((IChunkedInput <T>)currentWrite.Message);
         currentWrite.Fail(task.Exception);
     }
 }
コード例 #3
0
 private static void HandleEndOfInputFuture(Task task, PendingWrite currentWrite)
 {
     if (task.IsSuccess())
     {
         var chunks = (IChunkedInput <T>)currentWrite.Message;
         // read state of the input in local variables before closing it
         long inputProgress = chunks.Progress;
         long inputLength   = chunks.Length;
         CloseInput(chunks);
         currentWrite.Progress(inputProgress, inputLength);
         currentWrite.Success(inputLength);
     }
     else
     {
         CloseInput((IChunkedInput <T>)currentWrite.Message);
         currentWrite.Fail(task.Exception);
     }
 }
コード例 #4
0
        void DoFlush(IChannelHandlerContext context)
        {
            IChannel channel = context.Channel;

            if (!channel.Active)
            {
                this.Discard();
                return;
            }

            bool requiresFlush             = true;
            IByteBufferAllocator allocator = context.Allocator;

            while (channel.IsWritable)
            {
                if (this.currentWrite == null)
                {
                    this.currentWrite = this.queue.Count > 0 ? this.queue.Dequeue() : null;
                }

                if (this.currentWrite == null)
                {
                    break;
                }

                PendingWrite current        = this.currentWrite;
                object       pendingMessage = current.Message;

                var chunks = pendingMessage as IChunkedInput <T>;
                if (chunks != null)
                {
                    bool   endOfInput;
                    bool   suspend;
                    object message = null;

                    try
                    {
                        message    = chunks.ReadChunk(allocator);
                        endOfInput = chunks.IsEndOfInput;
                        if (message == null)
                        {
                            // No need to suspend when reached at the end.
                            suspend = !endOfInput;
                        }
                        else
                        {
                            suspend = false;
                        }
                    }
                    catch (Exception exception)
                    {
                        this.currentWrite = null;

                        if (message != null)
                        {
                            ReferenceCountUtil.Release(message);
                        }

                        current.Fail(exception);
                        CloseInput(chunks);

                        break;
                    }

                    if (suspend)
                    {
                        // ChunkedInput.nextChunk() returned null and it has
                        // not reached at the end of input. Let's wait until
                        // more chunks arrive. Nothing to write or notify.
                        break;
                    }

                    if (message == null)
                    {
                        // If message is null write an empty ByteBuf.
                        // See https://github.com/netty/netty/issues/1671
                        message = Unpooled.Empty;
                    }

                    Task future = context.WriteAsync(message);
                    if (endOfInput)
                    {
                        this.currentWrite = null;

                        // Register a listener which will close the input once the write is complete.
                        // This is needed because the Chunk may have some resource bound that can not
                        // be closed before its not written.
                        //
                        // See https://github.com/netty/netty/issues/303
                        future.ContinueWith((_, state) =>
                        {
                            var pendingTask = (PendingWrite)state;
                            CloseInput((IChunkedInput <T>)pendingTask.Message);
                            pendingTask.Success();
                        },
                                            current,
                                            TaskContinuationOptions.ExecuteSynchronously);
                    }
                    else if (channel.IsWritable)
                    {
                        future.ContinueWith((task, state) =>
                        {
                            var pendingTask = (PendingWrite)state;
                            if (task.IsFaulted)
                            {
                                CloseInput((IChunkedInput <T>)pendingTask.Message);
                                pendingTask.Fail(task.Exception);
                            }
                            else
                            {
                                pendingTask.Progress(chunks.Progress, chunks.Length);
                            }
                        },
                                            current,
                                            TaskContinuationOptions.ExecuteSynchronously);
                    }
                    else
                    {
                        future.ContinueWith((task, state) =>
                        {
                            var handler = (ChunkedWriteHandler <T>)state;
                            if (task.IsFaulted)
                            {
                                CloseInput((IChunkedInput <T>)handler.currentWrite.Message);
                                handler.currentWrite.Fail(task.Exception);
                            }
                            else
                            {
                                handler.currentWrite.Progress(chunks.Progress, chunks.Length);
                                if (channel.IsWritable)
                                {
                                    handler.ResumeTransfer();
                                }
                            }
                        },
                                            this,
                                            TaskContinuationOptions.ExecuteSynchronously);
                    }

                    // Flush each chunk to conserve memory
                    context.Flush();
                    requiresFlush = false;
                }
                else
                {
                    context.WriteAsync(pendingMessage)
                    .ContinueWith((task, state) =>
                    {
                        var pendingTask = (PendingWrite)state;
                        if (task.IsFaulted)
                        {
                            pendingTask.Fail(task.Exception);
                        }
                        else
                        {
                            pendingTask.Success();
                        }
                    },
                                  current,
                                  TaskContinuationOptions.ExecuteSynchronously);

                    this.currentWrite = null;
                    requiresFlush     = true;
                }

                if (!channel.Active)
                {
                    this.Discard(new ClosedChannelException());
                    break;
                }
            }

            if (requiresFlush)
            {
                context.Flush();
            }
        }
コード例 #5
0
        void DoFlush(IChannelHandlerContext context)
        {
            IChannel channel = context.Channel;

            if (!channel.Active)
            {
                Discard();
                return;
            }

            bool requiresFlush             = true;
            IByteBufferAllocator allocator = context.Allocator;

            while (channel.IsWritable)
            {
                PendingWrite currentWrite = _queue.FirstOrDefault;
                if (currentWrite is null)
                {
                    break;
                }

                if (currentWrite.Promise.IsCompleted)
                {
                    // This might happen e.g. in the case when a write operation
                    // failed, but there're still unconsumed chunks left.
                    // Most chunked input sources would stop generating chunks
                    // and report end of input, but this doesn't work with any
                    // source wrapped in HttpChunkedInput.
                    // Note, that we're not trying to release the message/chunks
                    // as this had to be done already by someone who resolved the
                    // promise (using ChunkedInput.close method).
                    // See https://github.com/netty/netty/issues/8700.
                    _ = _queue.RemoveFromFront();
                    continue;
                }

                object pendingMessage = currentWrite.Message;

                if (pendingMessage is IChunkedInput <T> chunks)
                {
                    bool   endOfInput;
                    bool   suspend;
                    object message = null;

                    try
                    {
                        message    = chunks.ReadChunk(allocator);
                        endOfInput = chunks.IsEndOfInput;
                        if (message is null)
                        {
                            // No need to suspend when reached at the end.
                            suspend = !endOfInput;
                        }
                        else
                        {
                            suspend = false;
                        }
                    }
                    catch (Exception exception)
                    {
                        _ = _queue.RemoveFromFront();

                        if (message is object)
                        {
                            _ = ReferenceCountUtil.Release(message);
                        }

                        CloseInput(chunks);
                        currentWrite.Fail(exception);

                        break;
                    }

                    if (suspend)
                    {
                        // ChunkedInput.nextChunk() returned null and it has
                        // not reached at the end of input. Let's wait until
                        // more chunks arrive. Nothing to write or notify.
                        break;
                    }

                    if (message is null)
                    {
                        // If message is null write an empty ByteBuf.
                        // See https://github.com/netty/netty/issues/1671
                        message = Unpooled.Empty;
                    }

                    // Flush each chunk to conserve memory
                    Task future = context.WriteAndFlushAsync(message);
                    if (endOfInput)
                    {
                        _ = _queue.RemoveFromFront();

                        if (future.IsCompleted)
                        {
                            HandleEndOfInputFuture(future, currentWrite);
                        }
                        else
                        {
                            // Register a listener which will close the input once the write is complete.
                            // This is needed because the Chunk may have some resource bound that can not
                            // be closed before its not written.
                            //
                            // See https://github.com/netty/netty/issues/303
                            _ = future.ContinueWith(LinkOutcomeWhenIsEndOfChunkedInputAction, currentWrite, TaskContinuationOptions.ExecuteSynchronously);
                        }
                    }
                    else
                    {
                        var resume = !channel.IsWritable;
                        if (future.IsCompleted)
                        {
                            HandleFuture(future, this, channel, currentWrite, resume);
                        }
                        else
                        {
                            _ = future.ContinueWith(LinkOutcomeAction,
                                                    Tuple.Create(this, channel, currentWrite, resume), TaskContinuationOptions.ExecuteSynchronously);
                        }
                    }

                    requiresFlush = false;
                }
                else
                {
                    _             = _queue.RemoveFromFront();
                    _             = context.WriteAsync(pendingMessage, currentWrite.Promise);
                    requiresFlush = true;
                }

                if (!channel.Active)
                {
                    Discard(new ClosedChannelException());
                    break;
                }
            }

            if (requiresFlush)
            {
                _ = context.Flush();
            }
        }