ILastHttpContent ReadTrailingHeaders(IByteBuffer buffer) { AppendableCharSequence line = this.headerParser.Parse(buffer); if (line == null) { return(null); } AsciiString lastHeader = null; if (line.Count > 0) { ILastHttpContent trailingHeaders = this.trailer; if (trailingHeaders == null) { trailingHeaders = new DefaultLastHttpContent(Unpooled.Empty, this.ValidateHeaders); this.trailer = trailingHeaders; } do { byte firstChar = line.Bytes[0]; if (lastHeader != null && (firstChar == ' ' || firstChar == '\t')) { IList <ICharSequence> current = trailingHeaders.TrailingHeaders.GetAll(lastHeader); if (current.Count > 0) { int lastPos = current.Count - 1; ICharSequence lineTrimmed = CharUtil.Trim(line); current[lastPos] = new AsciiString($"{current[lastPos]} {lineTrimmed}"); } } else { this.SplitHeader(line); AsciiString headerName = this.name; if (!HttpHeaderNames.ContentLength.ContentEqualsIgnoreCase(headerName) && !HttpHeaderNames.TransferEncoding.ContentEqualsIgnoreCase(headerName) && !HttpHeaderNames.Trailer.ContentEqualsIgnoreCase(headerName)) { trailingHeaders.TrailingHeaders.Add(headerName, this.value); } lastHeader = this.name; // reset name and value fields this.name = null; this.value = null; } line = this.headerParser.Parse(buffer); if (line == null) { return(null); } } while (line.Count > 0); this.trailer = null; return(trailingHeaders); } return(EmptyLastHttpContent.Default); }
public void TestTransferCodingGZIP() { string requestStr = "POST / HTTP/1.1\r\n" + "Content-Length: " + GzHelloWorld.Length + "\r\n" + "Transfer-Encoding: gzip\r\n" + "\r\n"; HttpRequestDecoder decoder = new HttpRequestDecoder(); HttpContentDecoder decompressor = new HttpContentDecompressor(); EmbeddedChannel channel = new EmbeddedChannel(decoder, decompressor); channel.WriteInbound(Unpooled.CopiedBuffer(Encoding.ASCII.GetBytes(requestStr))); channel.WriteInbound(Unpooled.CopiedBuffer(GzHelloWorld)); IHttpRequest request = channel.ReadInbound <IHttpRequest>(); Assert.True(request.Result.IsSuccess); Assert.False(request.Headers.Contains(HttpHeaderNames.ContentLength)); IHttpContent content = channel.ReadInbound <IHttpContent>(); Assert.True(content.Result.IsSuccess); Assert.Equal(HelloWorld, content.Content.ToString(Encoding.ASCII)); content.Release(); ILastHttpContent lastHttpContent = channel.ReadInbound <ILastHttpContent>(); Assert.True(lastHttpContent.Result.IsSuccess); lastHttpContent.Release(); AssertHasInboundMessages(channel, false); AssertHasOutboundMessages(channel, false); Assert.False(channel.Finish()); channel.ReleaseInbound(); }
IHttpContent InvalidChunk(IByteBuffer buf, Exception cause) { this.currentState = State.BadMessage; // Advance the readerIndex so that ByteToMessageDecoder does not complain // when we produced an invalid message without consuming anything. buf.SkipBytes(buf.ReadableBytes); IHttpContent chunk = new DefaultLastHttpContent(Unpooled.Empty); chunk.Result = DecoderResult.Failure(cause); this.message = null; this.trailer = null; return(chunk); }
public void ChunkedRequestDecompression() { HttpResponseDecoder decoder = new HttpResponseDecoder(); HttpContentDecoder decompressor = new HttpContentDecompressor(); EmbeddedChannel channel = new EmbeddedChannel(decoder, decompressor, null); string headers = "HTTP/1.1 200 OK\r\n" + "Transfer-Encoding: chunked\r\n" + "Trailer: My-Trailer\r\n" + "Content-Encoding: gzip\r\n\r\n"; channel.WriteInbound(Unpooled.CopiedBuffer(Encoding.ASCII.GetBytes(headers))); string chunkLength = GzHelloWorld.Length.ToString("x2"); Assert.True(channel.WriteInbound(Unpooled.CopiedBuffer(chunkLength + "\r\n", Encoding.ASCII))); Assert.True(channel.WriteInbound(Unpooled.CopiedBuffer(GzHelloWorld))); Assert.True(channel.WriteInbound(Unpooled.CopiedBuffer(Encoding.ASCII.GetBytes("\r\n")))); Assert.True(channel.WriteInbound(Unpooled.CopiedBuffer("0\r\n", Encoding.ASCII))); Assert.True(channel.WriteInbound(Unpooled.CopiedBuffer("My-Trailer: 42\r\n\r\n\r\n", Encoding.ASCII))); object ob1 = channel.ReadInbound <object>(); Assert.True(ob1 is DefaultHttpResponse); object ob2 = channel.ReadInbound <object>(); Assert.True(ob1 is DefaultHttpResponse); IHttpContent content = (IHttpContent)ob2; Assert.Equal(HelloWorld, content.Content.ToString(Encoding.ASCII)); content.Release(); object ob3 = channel.ReadInbound <object>(); Assert.True(ob1 is DefaultHttpResponse); ILastHttpContent lastContent = (ILastHttpContent)ob3; Assert.NotNull(lastContent.Result); Assert.True(lastContent.Result.IsSuccess); Assert.False(lastContent.TrailingHeaders.IsEmpty); Assert.Equal("42", lastContent.TrailingHeaders.Get((AsciiString)"My-Trailer", null)); AssertHasInboundMessages(channel, false); AssertHasOutboundMessages(channel, false); Assert.False(channel.Finish()); }
public void TestTransferCodingGZIPAndChunked() { string requestStr = "POST / HTTP/1.1\r\n" + "Host: example.com\r\n" + "Content-Type: application/x-www-form-urlencoded\r\n" + "Trailer: My-Trailer\r\n" + "Transfer-Encoding: gzip, chunked\r\n" + "\r\n"; HttpRequestDecoder decoder = new HttpRequestDecoder(); HttpContentDecoder decompressor = new HttpContentDecompressor(); EmbeddedChannel channel = new EmbeddedChannel(decoder, decompressor); Assert.True(channel.WriteInbound(Unpooled.CopiedBuffer(requestStr, Encoding.ASCII))); string chunkLength = GzHelloWorld.Length.ToString("x2"); Assert.True(channel.WriteInbound(Unpooled.CopiedBuffer(chunkLength + "\r\n", Encoding.ASCII))); Assert.True(channel.WriteInbound(Unpooled.CopiedBuffer(GzHelloWorld))); Assert.True(channel.WriteInbound(Unpooled.CopiedBuffer(Encoding.ASCII.GetBytes("\r\n")))); Assert.True(channel.WriteInbound(Unpooled.CopiedBuffer("0\r\n", Encoding.ASCII))); Assert.True(channel.WriteInbound(Unpooled.CopiedBuffer("My-Trailer: 42\r\n\r\n", Encoding.ASCII))); IHttpRequest request = channel.ReadInbound <IHttpRequest>(); Assert.True(request.Result.IsSuccess); Assert.True(request.Headers.ContainsValue(HttpHeaderNames.TransferEncoding, HttpHeaderValues.Chunked, true)); Assert.False(request.Headers.Contains(HttpHeaderNames.ContentLength)); IHttpContent chunk1 = channel.ReadInbound <IHttpContent>(); Assert.True(chunk1.Result.IsSuccess); Assert.Equal(HelloWorld, chunk1.Content.ToString(Encoding.ASCII)); chunk1.Release(); ILastHttpContent chunk2 = channel.ReadInbound <ILastHttpContent>(); Assert.True(chunk2.Result.IsSuccess); Assert.Equal("42", chunk2.TrailingHeaders.Get(AsciiString.Of("My-Trailer"), null)); chunk2.Release(); Assert.False(channel.Finish()); channel.ReleaseInbound(); }
public void ContentLengthHeaderAndChunked() { string requestStr = "POST / HTTP/1.1\r\n" + "Host: example.com\r\n" + "Connection: close\r\n" + "Content-Length: 5\r\n" + "Transfer-Encoding: chunked\r\n\r\n" + "0\r\n\r\n"; EmbeddedChannel channel = new EmbeddedChannel(new HttpRequestDecoder()); Assert.True(channel.WriteInbound(Unpooled.CopiedBuffer(requestStr, Encoding.ASCII))); IHttpRequest request = channel.ReadInbound <IHttpRequest>(); Assert.False(request.Result.IsFailure); Assert.True(request.Headers.Contains((AsciiString)"Transfer-Encoding", (AsciiString)"chunked", false)); Assert.False(request.Headers.Contains((AsciiString)"Content-Length")); ILastHttpContent c = channel.ReadInbound <ILastHttpContent>(); Assert.False(channel.Finish()); }
public void TestEncodeEmptyEndAsClient() { EmbeddedChannel ch = new EmbeddedChannel(new Http2StreamFrameToHttpObjectCodec(false)); ILastHttpContent end = EmptyLastHttpContent.Default; Assert.True(ch.WriteOutbound(end)); IHttp2DataFrame emptyFrame = ch.ReadOutbound <IHttp2DataFrame>(); try { Assert.Equal(0, emptyFrame.Content.ReadableBytes); Assert.True(emptyFrame.IsEndStream); } finally { emptyFrame.Release(); } Assert.Null(ch.ReadOutbound <object>()); Assert.False(ch.Finish()); }
public void TestDecodeEndDataAsClient() { EmbeddedChannel ch = new EmbeddedChannel(new Http2StreamFrameToHttpObjectCodec(false)); IByteBuffer hello = Unpooled.CopiedBuffer("hello world", Encoding.UTF8); Assert.True(ch.WriteInbound(new DefaultHttp2DataFrame(hello, true))); ILastHttpContent content = ch.ReadInbound <ILastHttpContent>(); try { Assert.Equal("hello world", content.Content.ToString(Encoding.UTF8)); Assert.True(content.TrailingHeaders.IsEmpty); } finally { content.Release(); } Assert.Null(ch.ReadInbound <object>()); Assert.False(ch.Finish()); }
public void TestMultipleContentLengthHeadersBehavior(bool allowDuplicateContentLengths, bool sameValue, bool singleField) { var decoder = new HttpRequestDecoder( HttpObjectDecoder.DefaultMaxInitialLineLength, HttpObjectDecoder.DefaultMaxHeaderSize, HttpObjectDecoder.DefaultMaxChunkSize, HttpObjectDecoder.DefaultValidateHeaders, HttpObjectDecoder.DefaultInitialBufferSize, allowDuplicateContentLengths); var channel = new EmbeddedChannel(decoder); string requestStr = SetupRequestString(sameValue, singleField); Assert.True(channel.WriteInbound(Unpooled.CopiedBuffer(requestStr, Encoding.ASCII))); IHttpRequest request = channel.ReadInbound <IHttpRequest>(); if (allowDuplicateContentLengths) { if (sameValue) { AssertValid(request); var contentLengths = request.Headers.GetAll(HttpHeaderNames.ContentLength); Assert.Contains("1", contentLengths.ToString()); ILastHttpContent body = channel.ReadInbound <ILastHttpContent>(); Assert.Equal(1, body.Content.ReadableBytes); Assert.Equal("a", body.Content.ReadCharSequence(1, Encoding.ASCII).ToString()); } else { AssertInvalid(request); } } else { AssertInvalid(request); } Assert.False(channel.Finish()); }
void ResetNow() { IHttpMessage msg = _message; _message = null; _name = null; _value = null; _contentLength = long.MinValue; _lineParser.Reset(); _headerParser.Reset(); _trailer = null; if (!IsDecodingRequest()) { if (msg is IHttpResponse res && IsSwitchingToNonHttp1Protocol(res)) { _currentState = State.Upgraded; return; } } _ = Interlocked.Exchange(ref _resetRequested, SharedConstants.False); _currentState = State.SkipControlChars; }
void ResetNow() { IHttpMessage msg = this.message; this.message = null; this.name = null; this.value = null; this.contentLength = long.MinValue; this.lineParser.Reset(); this.headerParser.Reset(); this.trailer = null; if (!this.IsDecodingRequest()) { if (msg is IHttpResponse res && this.IsSwitchingToNonHttp1Protocol(res)) { this.currentState = State.Upgraded; return; } } this.resetRequested = false; this.currentState = State.SkipControlChars; }
public void TestDecodeResponseTrailersAsClient() { EmbeddedChannel ch = new EmbeddedChannel(new Http2StreamFrameToHttpObjectCodec(false)); IHttp2Headers headers = new DefaultHttp2Headers(); headers.Set((AsciiString)"key", (AsciiString)"value"); Assert.True(ch.WriteInbound(new DefaultHttp2HeadersFrame(headers, true))); ILastHttpContent trailers = ch.ReadInbound <ILastHttpContent>(); try { Assert.Equal(0, trailers.Content.ReadableBytes); Assert.Equal("value", trailers.TrailingHeaders.Get((AsciiString)"key", null)); Assert.False(trailers is IFullHttpRequest); } finally { trailers.Release(); } Assert.Null(ch.ReadInbound <object>()); Assert.False(ch.Finish()); }
private ILastHttpContent ReadTrailingHeaders(IByteBuffer buffer) { AppendableCharSequence line = _headerParser.Parse(buffer); if (line is null) { return(null); } ILastHttpContent trailingHeaders = _trailer; if (0u >= (uint)line.Count && trailingHeaders is null) { // We have received the empty line which signals the trailer is complete and did not parse any trailers // before. Just return an empty last content to reduce allocations. return(EmptyLastHttpContent.Default); } AsciiString lastHeader = null; if (trailingHeaders is null) { trailingHeaders = new DefaultLastHttpContent(Unpooled.Empty, ValidateHeaders); _trailer = trailingHeaders; } while ((uint)line.Count > 0u) { byte firstChar = line.Bytes[0]; if (lastHeader is object && (firstChar == c_space || firstChar == c_tab)) { IList <ICharSequence> current = trailingHeaders.TrailingHeaders.GetAll(lastHeader); if ((uint)current.Count > 0u) { int lastPos = current.Count - 1; //please do not make one line from below code //as it breaks +XX:OptimizeStringConcat optimization ICharSequence lineTrimmed = CharUtil.Trim(line); current[lastPos] = new AsciiString($"{current[lastPos]}{lineTrimmed}"); } } else { SplitHeader(line); AsciiString headerName = _name; if (!HttpHeaderNames.ContentLength.ContentEqualsIgnoreCase(headerName) && !HttpHeaderNames.TransferEncoding.ContentEqualsIgnoreCase(headerName) && !HttpHeaderNames.Trailer.ContentEqualsIgnoreCase(headerName)) { _ = trailingHeaders.TrailingHeaders.Add(headerName, _value); } lastHeader = _name; // reset name and value fields _name = null; _value = null; } line = _headerParser.Parse(buffer); if (line is null) { return(null); } } _trailer = null; return(trailingHeaders); }
protected override void Decode(IChannelHandlerContext context, IByteBuffer buffer, List <object> output) { if (SharedConstants.False < (uint)Volatile.Read(ref _resetRequested)) { ResetNow(); } switch (_currentState) { case State.SkipControlChars: // Fall through case State.ReadInitial: { try { AppendableCharSequence line = _lineParser.Parse(buffer); if (line is null) { return; } AsciiString[] initialLine = SplitInitialLine(line); if ((uint)initialLine.Length < 3u) { // Invalid initial line - ignore. _currentState = State.SkipControlChars; return; } _message = CreateMessage(initialLine); _currentState = State.ReadHeader; goto case State.ReadHeader; // Fall through } catch (Exception e) { output.Add(InvalidMessage(buffer, e)); return; } } case State.ReadHeader: { try { State?nextState = ReadHeaders(buffer); if (nextState is null) { return; } _currentState = nextState.Value; switch (nextState.Value) { case State.SkipControlChars: { // fast-path // No content is expected. output.Add(_message); output.Add(EmptyLastHttpContent.Default); ResetNow(); return; } case State.ReadChunkSize: { if (!_chunkedSupported) { ThrowHelper.ThrowArgumentException_ChunkedMsgNotSupported(); } // Chunked encoding - generate HttpMessage first. HttpChunks will follow. output.Add(_message); return; } default: { // <a href="https://tools.ietf.org/html/rfc7230#section-3.3.3">RFC 7230, 3.3.3</a> states that if a // request does not have either a transfer-encoding or a content-length header then the message body // length is 0. However for a response the body length is the number of octets received prior to the // server closing the connection. So we treat this as variable length chunked encoding. long length = ContentLength(); if (0u >= (uint)length || length == -1 && IsDecodingRequest()) { output.Add(_message); output.Add(EmptyLastHttpContent.Default); ResetNow(); return; } Debug.Assert(nextState.Value == State.ReadFixedLengthContent || nextState.Value == State.ReadVariableLengthContent); output.Add(_message); if (nextState == State.ReadFixedLengthContent) { // chunkSize will be decreased as the READ_FIXED_LENGTH_CONTENT state reads data chunk by chunk. _chunkSize = length; } // We return here, this forces decode to be called again where we will decode the content return; } } } catch (Exception exception) { output.Add(InvalidMessage(buffer, exception)); return; } } case State.ReadVariableLengthContent: { // Keep reading data as a chunk until the end of connection is reached. int toRead = Math.Min(buffer.ReadableBytes, _maxChunkSize); if (toRead > 0) { IByteBuffer content = buffer.ReadRetainedSlice(toRead); output.Add(new DefaultHttpContent(content)); } return; } case State.ReadFixedLengthContent: { int readLimit = buffer.ReadableBytes; // Check if the buffer is readable first as we use the readable byte count // to create the HttpChunk. This is needed as otherwise we may end up with // create an HttpChunk instance that contains an empty buffer and so is // handled like it is the last HttpChunk. // // See https://github.com/netty/netty/issues/433 if (0u >= (uint)readLimit) { return; } int toRead = Math.Min(readLimit, _maxChunkSize); if (toRead > _chunkSize) { toRead = (int)_chunkSize; } IByteBuffer content = buffer.ReadRetainedSlice(toRead); _chunkSize -= toRead; if (0ul >= (ulong)_chunkSize) { // Read all content. output.Add(new DefaultLastHttpContent(content, ValidateHeaders)); ResetNow(); } else { output.Add(new DefaultHttpContent(content)); } return; } // everything else after this point takes care of reading chunked content. basically, read chunk size, // read chunk, read and ignore the CRLF and repeat until 0 case State.ReadChunkSize: { try { AppendableCharSequence line = _lineParser.Parse(buffer); if (line is null) { return; } int size = GetChunkSize(line.ToAsciiString()); _chunkSize = size; if (0u >= (uint)size) { _currentState = State.ReadChunkFooter; return; } _currentState = State.ReadChunkedContent; goto case State.ReadChunkedContent; // fall-through } catch (Exception e) { output.Add(InvalidChunk(buffer, e)); return; } } case State.ReadChunkedContent: { Debug.Assert(_chunkSize <= int.MaxValue); int toRead = Math.Min((int)_chunkSize, _maxChunkSize); toRead = Math.Min(toRead, buffer.ReadableBytes); if (0u >= (uint)toRead) { return; } IHttpContent chunk = new DefaultHttpContent(buffer.ReadRetainedSlice(toRead)); _chunkSize -= toRead; output.Add(chunk); if (_chunkSize != 0) { return; } _currentState = State.ReadChunkDelimiter; goto case State.ReadChunkDelimiter; // fall-through } case State.ReadChunkDelimiter: { int wIdx = buffer.WriterIndex; int rIdx = buffer.ReaderIndex; // TODO ForEachByte while (wIdx > rIdx) { byte next = buffer.GetByte(rIdx++); if (next == HttpConstants.LineFeed) { _currentState = State.ReadChunkSize; break; } } _ = buffer.SetReaderIndex(rIdx); return; } case State.ReadChunkFooter: { try { ILastHttpContent lastTrialer = ReadTrailingHeaders(buffer); if (lastTrialer is null) { return; } output.Add(lastTrialer); ResetNow(); return; } catch (Exception exception) { output.Add(InvalidChunk(buffer, exception)); return; } } case State.BadMessage: { // Keep discarding until disconnection. _ = buffer.SkipBytes(buffer.ReadableBytes); break; } case State.Upgraded: { int readableBytes = buffer.ReadableBytes; if (readableBytes > 0) { // Keep on consuming as otherwise we may trigger an DecoderException, // other handler will replace this codec with the upgraded protocol codec to // take the traffic over at some point then. // See https://github.com/netty/netty/issues/2173 output.Add(buffer.ReadBytes(readableBytes)); } break; } } }
public HttpChunkedInput(IChunkedInput <IByteBuffer> input, ILastHttpContent lastHttpContent) { this.input = input; this.lastHttpContent = lastHttpContent; }
protected override void Decode(IChannelHandlerContext context, IByteBuffer buffer, List <object> output) { if (this.resetRequested) { this.ResetNow(); } switch (this.currentState) { case State.SkipControlChars: { if (!SkipControlCharacters(buffer)) { return; } this.currentState = State.ReadInitial; goto case State.ReadInitial; // Fall through } case State.ReadInitial: { try { AppendableCharSequence line = this.lineParser.Parse(buffer); if (line == null) { return; } AsciiString[] initialLine = SplitInitialLine(line); if (initialLine.Length < 3) { // Invalid initial line - ignore. this.currentState = State.SkipControlChars; return; } this.message = this.CreateMessage(initialLine); var dfhp = (this.message as DefaultFullHttpRequest); this.pho.url = dfhp.Uri; this.pho.appKey = getAppKeyFromUri(pho.url); this.pho.databuffer.WriteString(dfhp.Method.Name, System.Text.Encoding.UTF8); this.pho.databuffer.WriteString(dfhp.Uri.TrimStart(("/" + pho.appKey).ToCharArray()), System.Text.Encoding.UTF8); this.pho.databuffer.WriteString(dfhp.ProtocolVersion.ToString(), System.Text.Encoding.UTF8); this.currentState = State.ReadHeader; goto case State.ReadHeader; // Fall through } catch (Exception e) { output.Add(this.InvalidMessage(buffer, e)); return; } } case State.ReadHeader: { try { State?nextState = this.ReadHeaders(buffer); if (nextState == null) { return; } this.currentState = nextState.Value; switch (nextState.Value) { case State.SkipControlChars: { // fast-path // No content is expected. output.Add(this.message); output.Add(EmptyLastHttpContent.Default); this.ResetNow(); return; } case State.ReadChunkSize: { if (!this.chunkedSupported) { throw new ArgumentException("Chunked messages not supported"); } // Chunked encoding - generate HttpMessage first. HttpChunks will follow. output.Add(this.message); return; } default: { // <a href="https://tools.ietf.org/html/rfc7230#section-3.3.3">RFC 7230, 3.3.3</a> states that if a // request does not have either a transfer-encoding or a content-length header then the message body // length is 0. However for a response the body length is the number of octets received prior to the // server closing the connection. So we treat this as variable length chunked encoding. long length = this.ContentLength(); if (length == 0 || length == -1 && this.IsDecodingRequest()) { output.Add(this.message); output.Add(EmptyLastHttpContent.Default); this.ResetNow(); return; } Debug.Assert(nextState.Value == State.ReadFixedLengthContent || nextState.Value == State.ReadVariableLengthContent); output.Add(this.message); if (nextState == State.ReadFixedLengthContent) { // chunkSize will be decreased as the READ_FIXED_LENGTH_CONTENT state reads data chunk by chunk. this.chunkSize = length; } // We return here, this forces decode to be called again where we will decode the content return; } } } catch (Exception exception) { output.Add(this.InvalidMessage(buffer, exception)); return; } } case State.ReadVariableLengthContent: { // Keep reading data as a chunk until the end of connection is reached. int toRead = Math.Min(buffer.ReadableBytes, this.maxChunkSize); if (toRead > 0) { IByteBuffer content = buffer.ReadRetainedSlice(toRead); output.Add(new DefaultHttpContent(content)); } return; } case State.ReadFixedLengthContent: { int readLimit = buffer.ReadableBytes; // Check if the buffer is readable first as we use the readable byte count // to create the HttpChunk. This is needed as otherwise we may end up with // create a HttpChunk instance that contains an empty buffer and so is // handled like it is the last HttpChunk. // // See https://github.com/netty/netty/issues/433 if (readLimit == 0) { return; } int toRead = Math.Min(readLimit, this.maxChunkSize); if (toRead > this.chunkSize) { toRead = (int)this.chunkSize; } IByteBuffer content = buffer.ReadRetainedSlice(toRead); this.chunkSize -= toRead; if (this.chunkSize == 0) { // Read all content. output.Add(new DefaultLastHttpContent(content, this.ValidateHeaders)); this.ResetNow(); } else { output.Add(new DefaultHttpContent(content)); } return; } // everything else after this point takes care of reading chunked content. basically, read chunk size, // read chunk, read and ignore the CRLF and repeat until 0 case State.ReadChunkSize: { try { AppendableCharSequence line = this.lineParser.Parse(buffer); if (line == null) { return; } int size = GetChunkSize(line.ToAsciiString()); this.chunkSize = size; if (size == 0) { this.currentState = State.ReadChunkFooter; return; } this.currentState = State.ReadChunkedContent; goto case State.ReadChunkedContent; // fall-through } catch (Exception e) { output.Add(this.InvalidChunk(buffer, e)); return; } } case State.ReadChunkedContent: { Debug.Assert(this.chunkSize <= int.MaxValue); int toRead = Math.Min((int)this.chunkSize, this.maxChunkSize); toRead = Math.Min(toRead, buffer.ReadableBytes); if (toRead == 0) { return; } IHttpContent chunk = new DefaultHttpContent(buffer.ReadRetainedSlice(toRead)); this.chunkSize -= toRead; output.Add(chunk); if (this.chunkSize != 0) { return; } this.currentState = State.ReadChunkDelimiter; goto case State.ReadChunkDelimiter; // fall-through } case State.ReadChunkDelimiter: { int wIdx = buffer.WriterIndex; int rIdx = buffer.ReaderIndex; while (wIdx > rIdx) { byte next = buffer.GetByte(rIdx++); if (next == HttpConstants.LineFeed) { this.currentState = State.ReadChunkSize; break; } } buffer.SetReaderIndex(rIdx); return; } case State.ReadChunkFooter: { try { ILastHttpContent lastTrialer = this.ReadTrailingHeaders(buffer); if (lastTrialer == null) { return; } output.Add(lastTrialer); this.ResetNow(); return; } catch (Exception exception) { output.Add(this.InvalidChunk(buffer, exception)); return; } } case State.BadMessage: { // Keep discarding until disconnection. buffer.SkipBytes(buffer.ReadableBytes); break; } case State.Upgraded: { int readableBytes = buffer.ReadableBytes; if (readableBytes > 0) { // Keep on consuming as otherwise we may trigger an DecoderException, // other handler will replace this codec with the upgraded protocol codec to // take the traffic over at some point then. // See https://github.com/netty/netty/issues/2173 output.Add(buffer.ReadBytes(readableBytes)); } break; } } }
public async Task ExecutorPreserveOrdering() { var sb = new ServerBootstrap(); sb.Group(new DefaultEventLoopGroup(1), new DefaultEventLoopGroup()); sb.Channel <LocalServerChannel>(); sb.ChildHandler(new ActionChannelInitializer <IChannel>(ch => { ch.Pipeline .AddLast(new HttpServerCodec()) .AddLast(new HttpObjectAggregator(1024)) .AddLast(/*compressorGroup,*/ new HttpContentCompressor()) .AddLast(new ChannelOutboundHandlerAdapter0()) .AddLast(new ChannelOutboundHandlerAdapter1()); })); var responses = new BlockingCollection <IHttpObject>(); var bs = new Bootstrap(); bs.Group(new DefaultEventLoopGroup()); bs.Channel <LocalChannel>(); bs.Handler(new ActionChannelInitializer <IChannel>(ch => { ch.Pipeline .AddLast(new HttpClientCodec()) .AddLast(new ChannelInboundHandlerAdapter0(responses)); })); IChannel serverChannel = null; IChannel clientChannel = null; try { serverChannel = await sb.BindAsync(new LocalAddress(Guid.NewGuid().ToString("N"))); clientChannel = await bs.ConnectAsync(serverChannel.LocalAddress); await clientChannel.WriteAndFlushAsync(NewRequest()); var result = responses.TryTake(out var item, TimeSpan.FromSeconds(1)); Assert.True(result); AssertEncodedResponse((IHttpResponse)item); result = responses.TryTake(out item, TimeSpan.FromSeconds(1)); Assert.True(result); IHttpContent c = (IHttpContent)item; Assert.NotNull(c); Assert.Equal($"1f8b08000000000000{Platform}f248cdc9c9d75108cf2fca4901000000ffff", ByteBufferUtil.HexDump(c.Content)); c.Release(); result = responses.TryTake(out item, TimeSpan.FromSeconds(1)); Assert.True(result); c = (IHttpContent)item; Assert.NotNull(c); Assert.Equal("0300c6865b260c000000", ByteBufferUtil.HexDump(c.Content)); c.Release(); result = responses.TryTake(out item, TimeSpan.FromSeconds(1)); Assert.True(result); ILastHttpContent last = (ILastHttpContent)item; Assert.NotNull(last); Assert.Equal(0, last.Content.ReadableBytes); last.Release(); Assert.False(responses.TryTake(out _, TimeSpan.FromSeconds(1))); } finally { if (clientChannel != null) { await clientChannel.CloseAsync(); } if (serverChannel != null) { await serverChannel.CloseAsync(); } await Task.WhenAll( sb.Group().ShutdownGracefullyAsync(TimeSpan.FromMilliseconds(100), TimeSpan.FromSeconds(5)), sb.ChildGroup().ShutdownGracefullyAsync(TimeSpan.FromMilliseconds(100), TimeSpan.FromSeconds(5)), bs.Group().ShutdownGracefullyAsync(TimeSpan.FromMilliseconds(100), TimeSpan.FromSeconds(5))); } }
public HttpChunkedInput(IChunkedInput <IByteBuffer> input) { this.input = input; this.lastHttpContent = EmptyLastHttpContent.Default; }