protected override void Decode(IChannelHandlerContext context, IByteBuffer buffer, List <object> output) { if (SharedConstants.False < (uint)Volatile.Read(ref _resetRequested)) { ResetNow(); } switch (_currentState) { case State.SkipControlChars: // Fall through case State.ReadInitial: { try { AppendableCharSequence line = _lineParser.Parse(buffer); if (line is null) { return; } AsciiString[] initialLine = SplitInitialLine(line); if ((uint)initialLine.Length < 3u) { // Invalid initial line - ignore. _currentState = State.SkipControlChars; return; } _message = CreateMessage(initialLine); _currentState = State.ReadHeader; goto case State.ReadHeader; // Fall through } catch (Exception e) { output.Add(InvalidMessage(buffer, e)); return; } } case State.ReadHeader: { try { State?nextState = ReadHeaders(buffer); if (nextState is null) { return; } _currentState = nextState.Value; switch (nextState.Value) { case State.SkipControlChars: { // fast-path // No content is expected. output.Add(_message); output.Add(EmptyLastHttpContent.Default); ResetNow(); return; } case State.ReadChunkSize: { if (!_chunkedSupported) { ThrowHelper.ThrowArgumentException_ChunkedMsgNotSupported(); } // Chunked encoding - generate HttpMessage first. HttpChunks will follow. output.Add(_message); return; } default: { // <a href="https://tools.ietf.org/html/rfc7230#section-3.3.3">RFC 7230, 3.3.3</a> states that if a // request does not have either a transfer-encoding or a content-length header then the message body // length is 0. However for a response the body length is the number of octets received prior to the // server closing the connection. So we treat this as variable length chunked encoding. long length = ContentLength(); if (0u >= (uint)length || length == -1 && IsDecodingRequest()) { output.Add(_message); output.Add(EmptyLastHttpContent.Default); ResetNow(); return; } Debug.Assert(nextState.Value == State.ReadFixedLengthContent || nextState.Value == State.ReadVariableLengthContent); output.Add(_message); if (nextState == State.ReadFixedLengthContent) { // chunkSize will be decreased as the READ_FIXED_LENGTH_CONTENT state reads data chunk by chunk. _chunkSize = length; } // We return here, this forces decode to be called again where we will decode the content return; } } } catch (Exception exception) { output.Add(InvalidMessage(buffer, exception)); return; } } case State.ReadVariableLengthContent: { // Keep reading data as a chunk until the end of connection is reached. int toRead = Math.Min(buffer.ReadableBytes, _maxChunkSize); if (toRead > 0) { IByteBuffer content = buffer.ReadRetainedSlice(toRead); output.Add(new DefaultHttpContent(content)); } return; } case State.ReadFixedLengthContent: { int readLimit = buffer.ReadableBytes; // Check if the buffer is readable first as we use the readable byte count // to create the HttpChunk. This is needed as otherwise we may end up with // create an HttpChunk instance that contains an empty buffer and so is // handled like it is the last HttpChunk. // // See https://github.com/netty/netty/issues/433 if (0u >= (uint)readLimit) { return; } int toRead = Math.Min(readLimit, _maxChunkSize); if (toRead > _chunkSize) { toRead = (int)_chunkSize; } IByteBuffer content = buffer.ReadRetainedSlice(toRead); _chunkSize -= toRead; if (0ul >= (ulong)_chunkSize) { // Read all content. output.Add(new DefaultLastHttpContent(content, ValidateHeaders)); ResetNow(); } else { output.Add(new DefaultHttpContent(content)); } return; } // everything else after this point takes care of reading chunked content. basically, read chunk size, // read chunk, read and ignore the CRLF and repeat until 0 case State.ReadChunkSize: { try { AppendableCharSequence line = _lineParser.Parse(buffer); if (line is null) { return; } int size = GetChunkSize(line.ToAsciiString()); _chunkSize = size; if (0u >= (uint)size) { _currentState = State.ReadChunkFooter; return; } _currentState = State.ReadChunkedContent; goto case State.ReadChunkedContent; // fall-through } catch (Exception e) { output.Add(InvalidChunk(buffer, e)); return; } } case State.ReadChunkedContent: { Debug.Assert(_chunkSize <= int.MaxValue); int toRead = Math.Min((int)_chunkSize, _maxChunkSize); toRead = Math.Min(toRead, buffer.ReadableBytes); if (0u >= (uint)toRead) { return; } IHttpContent chunk = new DefaultHttpContent(buffer.ReadRetainedSlice(toRead)); _chunkSize -= toRead; output.Add(chunk); if (_chunkSize != 0) { return; } _currentState = State.ReadChunkDelimiter; goto case State.ReadChunkDelimiter; // fall-through } case State.ReadChunkDelimiter: { int wIdx = buffer.WriterIndex; int rIdx = buffer.ReaderIndex; // TODO ForEachByte while (wIdx > rIdx) { byte next = buffer.GetByte(rIdx++); if (next == HttpConstants.LineFeed) { _currentState = State.ReadChunkSize; break; } } _ = buffer.SetReaderIndex(rIdx); return; } case State.ReadChunkFooter: { try { ILastHttpContent lastTrialer = ReadTrailingHeaders(buffer); if (lastTrialer is null) { return; } output.Add(lastTrialer); ResetNow(); return; } catch (Exception exception) { output.Add(InvalidChunk(buffer, exception)); return; } } case State.BadMessage: { // Keep discarding until disconnection. _ = buffer.SkipBytes(buffer.ReadableBytes); break; } case State.Upgraded: { int readableBytes = buffer.ReadableBytes; if (readableBytes > 0) { // Keep on consuming as otherwise we may trigger an DecoderException, // other handler will replace this codec with the upgraded protocol codec to // take the traffic over at some point then. // See https://github.com/netty/netty/issues/2173 output.Add(buffer.ReadBytes(readableBytes)); } break; } } }
protected override void Decode(IChannelHandlerContext context, IByteBuffer buffer, List <object> output) { if (this.resetRequested) { this.ResetNow(); } switch (this.currentState) { case State.SkipControlChars: { if (!SkipControlCharacters(buffer)) { return; } this.currentState = State.ReadInitial; goto case State.ReadInitial; // Fall through } case State.ReadInitial: { try { AppendableCharSequence line = this.lineParser.Parse(buffer); if (line == null) { return; } AsciiString[] initialLine = SplitInitialLine(line); if (initialLine.Length < 3) { // Invalid initial line - ignore. this.currentState = State.SkipControlChars; return; } this.message = this.CreateMessage(initialLine); var dfhp = (this.message as DefaultFullHttpRequest); this.pho.url = dfhp.Uri; this.pho.appKey = getAppKeyFromUri(pho.url); this.pho.databuffer.WriteString(dfhp.Method.Name, System.Text.Encoding.UTF8); this.pho.databuffer.WriteString(dfhp.Uri.TrimStart(("/" + pho.appKey).ToCharArray()), System.Text.Encoding.UTF8); this.pho.databuffer.WriteString(dfhp.ProtocolVersion.ToString(), System.Text.Encoding.UTF8); this.currentState = State.ReadHeader; goto case State.ReadHeader; // Fall through } catch (Exception e) { output.Add(this.InvalidMessage(buffer, e)); return; } } case State.ReadHeader: { try { State?nextState = this.ReadHeaders(buffer); if (nextState == null) { return; } this.currentState = nextState.Value; switch (nextState.Value) { case State.SkipControlChars: { // fast-path // No content is expected. output.Add(this.message); output.Add(EmptyLastHttpContent.Default); this.ResetNow(); return; } case State.ReadChunkSize: { if (!this.chunkedSupported) { throw new ArgumentException("Chunked messages not supported"); } // Chunked encoding - generate HttpMessage first. HttpChunks will follow. output.Add(this.message); return; } default: { // <a href="https://tools.ietf.org/html/rfc7230#section-3.3.3">RFC 7230, 3.3.3</a> states that if a // request does not have either a transfer-encoding or a content-length header then the message body // length is 0. However for a response the body length is the number of octets received prior to the // server closing the connection. So we treat this as variable length chunked encoding. long length = this.ContentLength(); if (length == 0 || length == -1 && this.IsDecodingRequest()) { output.Add(this.message); output.Add(EmptyLastHttpContent.Default); this.ResetNow(); return; } Debug.Assert(nextState.Value == State.ReadFixedLengthContent || nextState.Value == State.ReadVariableLengthContent); output.Add(this.message); if (nextState == State.ReadFixedLengthContent) { // chunkSize will be decreased as the READ_FIXED_LENGTH_CONTENT state reads data chunk by chunk. this.chunkSize = length; } // We return here, this forces decode to be called again where we will decode the content return; } } } catch (Exception exception) { output.Add(this.InvalidMessage(buffer, exception)); return; } } case State.ReadVariableLengthContent: { // Keep reading data as a chunk until the end of connection is reached. int toRead = Math.Min(buffer.ReadableBytes, this.maxChunkSize); if (toRead > 0) { IByteBuffer content = buffer.ReadRetainedSlice(toRead); output.Add(new DefaultHttpContent(content)); } return; } case State.ReadFixedLengthContent: { int readLimit = buffer.ReadableBytes; // Check if the buffer is readable first as we use the readable byte count // to create the HttpChunk. This is needed as otherwise we may end up with // create a HttpChunk instance that contains an empty buffer and so is // handled like it is the last HttpChunk. // // See https://github.com/netty/netty/issues/433 if (readLimit == 0) { return; } int toRead = Math.Min(readLimit, this.maxChunkSize); if (toRead > this.chunkSize) { toRead = (int)this.chunkSize; } IByteBuffer content = buffer.ReadRetainedSlice(toRead); this.chunkSize -= toRead; if (this.chunkSize == 0) { // Read all content. output.Add(new DefaultLastHttpContent(content, this.ValidateHeaders)); this.ResetNow(); } else { output.Add(new DefaultHttpContent(content)); } return; } // everything else after this point takes care of reading chunked content. basically, read chunk size, // read chunk, read and ignore the CRLF and repeat until 0 case State.ReadChunkSize: { try { AppendableCharSequence line = this.lineParser.Parse(buffer); if (line == null) { return; } int size = GetChunkSize(line.ToAsciiString()); this.chunkSize = size; if (size == 0) { this.currentState = State.ReadChunkFooter; return; } this.currentState = State.ReadChunkedContent; goto case State.ReadChunkedContent; // fall-through } catch (Exception e) { output.Add(this.InvalidChunk(buffer, e)); return; } } case State.ReadChunkedContent: { Debug.Assert(this.chunkSize <= int.MaxValue); int toRead = Math.Min((int)this.chunkSize, this.maxChunkSize); toRead = Math.Min(toRead, buffer.ReadableBytes); if (toRead == 0) { return; } IHttpContent chunk = new DefaultHttpContent(buffer.ReadRetainedSlice(toRead)); this.chunkSize -= toRead; output.Add(chunk); if (this.chunkSize != 0) { return; } this.currentState = State.ReadChunkDelimiter; goto case State.ReadChunkDelimiter; // fall-through } case State.ReadChunkDelimiter: { int wIdx = buffer.WriterIndex; int rIdx = buffer.ReaderIndex; while (wIdx > rIdx) { byte next = buffer.GetByte(rIdx++); if (next == HttpConstants.LineFeed) { this.currentState = State.ReadChunkSize; break; } } buffer.SetReaderIndex(rIdx); return; } case State.ReadChunkFooter: { try { ILastHttpContent lastTrialer = this.ReadTrailingHeaders(buffer); if (lastTrialer == null) { return; } output.Add(lastTrialer); this.ResetNow(); return; } catch (Exception exception) { output.Add(this.InvalidChunk(buffer, exception)); return; } } case State.BadMessage: { // Keep discarding until disconnection. buffer.SkipBytes(buffer.ReadableBytes); break; } case State.Upgraded: { int readableBytes = buffer.ReadableBytes; if (readableBytes > 0) { // Keep on consuming as otherwise we may trigger an DecoderException, // other handler will replace this codec with the upgraded protocol codec to // take the traffic over at some point then. // See https://github.com/netty/netty/issues/2173 output.Add(buffer.ReadBytes(readableBytes)); } break; } } }