/// <summary> /// Creates a new instance with the specified parameters. /// </summary> /// <param name="maxInitialLineLength"></param> /// <param name="maxHeaderSize"></param> /// <param name="maxChunkSize"></param> /// <param name="chunkedSupported"></param> /// <param name="validateHeaders"></param> /// <param name="initialBufferSize"></param> protected HttpObjectDecoder( int maxInitialLineLength, int maxHeaderSize, int maxChunkSize, bool chunkedSupported, bool validateHeaders, int initialBufferSize) { if ((uint)(maxInitialLineLength - 1) > SharedConstants.TooBigOrNegative) { ThrowHelper.ThrowArgumentException_Positive(maxInitialLineLength, ExceptionArgument.maxInitialLineLength); } if ((uint)(maxHeaderSize - 1) > SharedConstants.TooBigOrNegative) { ThrowHelper.ThrowArgumentException_Positive(maxHeaderSize, ExceptionArgument.maxHeaderSize); } if ((uint)(maxChunkSize - 1) > SharedConstants.TooBigOrNegative) { ThrowHelper.ThrowArgumentException_Positive(maxChunkSize, ExceptionArgument.maxChunkSize); } var seq = new AppendableCharSequence(initialBufferSize); _lineParser = new LineParser(this, seq, maxInitialLineLength); _headerParser = new HeaderParser(seq, maxHeaderSize); _maxChunkSize = maxChunkSize; _chunkedSupported = chunkedSupported; ValidateHeaders = validateHeaders; }
public void TestSubSequence() { AppendableCharSequence master = new AppendableCharSequence(26); master.Append((AsciiString)"abcdefghijlkmonpqrstuvwxyz"); Assert.Equal("abcdefghij", master.SubSequence(0, 10).ToString()); }
ILastHttpContent ReadTrailingHeaders(IByteBuffer buffer) { AppendableCharSequence line = this.headerParser.Parse(buffer); if (line == null) { return(null); } AsciiString lastHeader = null; if (line.Count > 0) { ILastHttpContent trailingHeaders = this.trailer; if (trailingHeaders == null) { trailingHeaders = new DefaultLastHttpContent(Unpooled.Empty, this.ValidateHeaders); this.trailer = trailingHeaders; } do { byte firstChar = line.Bytes[0]; if (lastHeader != null && (firstChar == ' ' || firstChar == '\t')) { IList <ICharSequence> current = trailingHeaders.TrailingHeaders.GetAll(lastHeader); if (current.Count > 0) { int lastPos = current.Count - 1; ICharSequence lineTrimmed = CharUtil.Trim(line); current[lastPos] = new AsciiString($"{current[lastPos]} {lineTrimmed}"); } } else { this.SplitHeader(line); AsciiString headerName = this.name; if (!HttpHeaderNames.ContentLength.ContentEqualsIgnoreCase(headerName) && !HttpHeaderNames.TransferEncoding.ContentEqualsIgnoreCase(headerName) && !HttpHeaderNames.Trailer.ContentEqualsIgnoreCase(headerName)) { trailingHeaders.TrailingHeaders.Add(headerName, this.value); } lastHeader = this.name; // reset name and value fields this.name = null; this.value = null; } line = this.headerParser.Parse(buffer); if (line == null) { return(null); } } while (line.Count > 0); this.trailer = null; return(trailingHeaders); } return(EmptyLastHttpContent.Default); }
private void SplitHeader(AppendableCharSequence sb) { byte[] chars = sb.Bytes; int length = sb.Count; int nameEnd; int colonEnd; int nameStart = FindNonWhitespace(chars, 0, length, false); for (nameEnd = nameStart; nameEnd < length; nameEnd++) { byte ch = chars[nameEnd]; // https://tools.ietf.org/html/rfc7230#section-3.2.4 // // No whitespace is allowed between the header field-name and colon. In // the past, differences in the handling of such whitespace have led to // security vulnerabilities in request routing and response handling. A // server MUST reject any received request message that contains // whitespace between a header field-name and colon with a response code // of 400 (Bad Request). A proxy MUST remove any such whitespace from a // response message before forwarding the message downstream. if (ch == ':' || // In case of decoding a request we will just continue processing and header validation // is done in the DefaultHttpHeaders implementation. // // In the case of decoding a response we will "skip" the whitespace. (!IsDecodingRequest() && IsOWS(ch))) { break; } } if (0u >= (uint)(nameEnd - length)) { // There was no colon present at all. ThrowHelper.ThrowArgumentException_No_colon_found(); } for (colonEnd = nameEnd; colonEnd < length; colonEnd++) { if (chars[colonEnd] == HttpConstants.Colon) { colonEnd++; break; } } _name = sb.SubStringUnsafe(nameStart, nameEnd); int valueStart = FindNonWhitespace(chars, colonEnd, length, true); if (valueStart == length) { _value = AsciiString.Empty; } else { int valueEnd = FindEndOfString(chars, length); _value = sb.SubStringUnsafe(valueStart, valueEnd); } }
public void TestEmptySubSequence() { AppendableCharSequence master = new AppendableCharSequence(26); master.Append((AsciiString)"abcdefghijlkmonpqrstuvwxyz"); AppendableCharSequence sub = (AppendableCharSequence)master.SubSequence(0, 0); Assert.Empty(sub); sub.Append('b'); Assert.Equal('b', sub[0]); }
private static void TestAppendString0(AppendableCharSequence seq) { string text = "testdata"; seq.Append((AsciiString)text); Assert.Equal(text, seq.ToString()); Assert.Equal(text.Substring(1, text.Length - 2), seq.SubSequence(1, text.Length - 1).ToString()); AssertEqualsChars((AsciiString)text, seq); seq.Reset(); Assert.Empty(seq); }
public void TestAppendAppendableCharSequence() { AppendableCharSequence seq = new AppendableCharSequence(128); String text = "testdata"; AppendableCharSequence seq2 = new AppendableCharSequence(128); seq2.Append((AsciiString)text); seq.Append(seq2); Assert.Equal(text, seq.ToString()); Assert.Equal(text.Substring(1, text.Length - 2), seq.SubSequence(1, text.Length - 1).ToString()); AssertEqualsChars((AsciiString)text, seq); }
protected HttpObjectDecoder( int maxInitialLineLength, int maxHeaderSize, int maxChunkSize, bool chunkedSupported, bool validateHeaders, int initialBufferSize) { Contract.Requires(maxInitialLineLength > 0); Contract.Requires(maxHeaderSize > 0); Contract.Requires(maxChunkSize > 0); var seq = new AppendableCharSequence(initialBufferSize); this.lineParser = new LineParser(seq, maxInitialLineLength); this.headerParser = new HeaderParser(seq, maxHeaderSize); this.maxChunkSize = maxChunkSize; this.chunkedSupported = chunkedSupported; this.ValidateHeaders = validateHeaders; }
private static void TestSimpleAppend0(AppendableCharSequence seq) { string text = "testdata"; for (int i = 0; i < text.Length; i++) { seq.Append(text[i]); } Assert.Equal(text, seq.ToString()); Assert.Equal(text.Substring(1, text.Length - 2), seq.SubSequence(1, text.Length - 1).ToString()); AssertEqualsChars((AsciiString)text, seq); seq.Reset(); Assert.Empty(seq); }
private static AsciiString[] SplitInitialLine(AppendableCharSequence sb) { byte[] chars = sb.Bytes; int length = sb.Count; int aStart = FindNonSPLenient(chars, 0, length); int aEnd = FindSPLenient(chars, aStart, length); int bStart = FindNonSPLenient(chars, aEnd, length); int bEnd = FindSPLenient(chars, bStart, length); int cStart = FindNonSPLenient(chars, bEnd, length); int cEnd = FindEndOfString(chars, length); return(new[] { sb.SubStringUnsafe(aStart, aEnd), sb.SubStringUnsafe(bStart, bEnd), cStart < cEnd?sb.SubStringUnsafe(cStart, cEnd) : AsciiString.Empty }); }
void SplitHeader(AppendableCharSequence sb) { byte[] chars = sb.Bytes; int length = sb.Count; int nameEnd; int colonEnd; int nameStart = FindNonWhitespace(chars, 0, length); for (nameEnd = nameStart; nameEnd < length; nameEnd++) { byte ch = chars[nameEnd]; if (ch == ':' || IsWhiteSpace(ch)) { break; } } for (colonEnd = nameEnd; colonEnd < length; colonEnd++) { if (chars[colonEnd] == ':') { colonEnd++; break; } } this.name = sb.SubStringUnsafe(nameStart, nameEnd); int valueStart = FindNonWhitespace(chars, colonEnd, length); if (valueStart == length) { this.value = AsciiString.Empty; } else { int valueEnd = FindEndOfString(chars, length); this.value = sb.SubStringUnsafe(valueStart, valueEnd); } }
internal HeaderParser(AppendableCharSequence seq, int maxLength) { this.seq = seq; this.maxLength = maxLength; }
private ILastHttpContent ReadTrailingHeaders(IByteBuffer buffer) { AppendableCharSequence line = _headerParser.Parse(buffer); if (line is null) { return(null); } ILastHttpContent trailingHeaders = _trailer; if (0u >= (uint)line.Count && trailingHeaders is null) { // We have received the empty line which signals the trailer is complete and did not parse any trailers // before. Just return an empty last content to reduce allocations. return(EmptyLastHttpContent.Default); } AsciiString lastHeader = null; if (trailingHeaders is null) { trailingHeaders = new DefaultLastHttpContent(Unpooled.Empty, ValidateHeaders); _trailer = trailingHeaders; } while ((uint)line.Count > 0u) { byte firstChar = line.Bytes[0]; if (lastHeader is object && (firstChar == c_space || firstChar == c_tab)) { IList <ICharSequence> current = trailingHeaders.TrailingHeaders.GetAll(lastHeader); if ((uint)current.Count > 0u) { int lastPos = current.Count - 1; //please do not make one line from below code //as it breaks +XX:OptimizeStringConcat optimization ICharSequence lineTrimmed = CharUtil.Trim(line); current[lastPos] = new AsciiString($"{current[lastPos]}{lineTrimmed}"); } } else { SplitHeader(line); AsciiString headerName = _name; if (!HttpHeaderNames.ContentLength.ContentEqualsIgnoreCase(headerName) && !HttpHeaderNames.TransferEncoding.ContentEqualsIgnoreCase(headerName) && !HttpHeaderNames.Trailer.ContentEqualsIgnoreCase(headerName)) { _ = trailingHeaders.TrailingHeaders.Add(headerName, _value); } lastHeader = _name; // reset name and value fields _name = null; _value = null; } line = _headerParser.Parse(buffer); if (line is null) { return(null); } } _trailer = null; return(trailingHeaders); }
State?ReadHeaders(IByteBuffer buffer) { IHttpMessage httpMessage = _message; HttpHeaders headers = httpMessage.Headers; AppendableCharSequence line = _headerParser.Parse(buffer); if (line is null) { return(null); } // ReSharper disable once ConvertIfDoToWhile if ((uint)line.Count > 0u) { do { byte firstChar = line.Bytes[0]; if (_name is object && (firstChar == c_space || firstChar == c_tab)) { //please do not make one line from below code //as it breaks +XX:OptimizeStringConcat optimization ICharSequence trimmedLine = CharUtil.Trim(line); _value = new AsciiString($"{_value} {trimmedLine}"); } else { if (_name is object) { _ = headers.Add(_name, _value); } SplitHeader(line); } line = _headerParser.Parse(buffer); if (line is null) { return(null); } } while ((uint)line.Count > 0u); } // Add the last header. if (_name is object) { _ = headers.Add(_name, _value); } // reset name and value fields _name = null; _value = null; var values = headers.GetAll(HttpHeaderNames.ContentLength); uint contentLengthValuesCount = (uint)values.Count; if (contentLengthValuesCount > 0u) { // Guard against multiple Content-Length headers as stated in // https://tools.ietf.org/html/rfc7230#section-3.3.2: // // If a message is received that has multiple Content-Length header // fields with field-values consisting of the same decimal value, or a // single Content-Length header field with a field value containing a // list of identical decimal values (e.g., "Content-Length: 42, 42"), // indicating that duplicate Content-Length header fields have been // generated or combined by an upstream message processor, then the // recipient MUST either reject the message as invalid or replace the // duplicated field-values with a single valid Content-Length field // containing that decimal value prior to determining the message body // length or forwarding the message. if (contentLengthValuesCount > 1u && httpMessage.ProtocolVersion == HttpVersion.Http11) { ThrowHelper.ThrowArgumentException_Multiple_Content_Length_Headers_Found(); } if (!long.TryParse(values[0].ToString(), out _contentLength)) { ThrowHelper.ThrowArgumentException_Invalid_Content_Length(); } } if (IsContentAlwaysEmpty(httpMessage)) { HttpUtil.SetTransferEncodingChunked(httpMessage, false); return(State.SkipControlChars); } else if (HttpUtil.IsTransferEncodingChunked(httpMessage)) { if (contentLengthValuesCount > 0u && httpMessage.ProtocolVersion == HttpVersion.Http11) { HandleTransferEncodingChunkedWithContentLength(httpMessage); } return(State.ReadChunkSize); } else if (ContentLength() >= 0L) { return(State.ReadFixedLengthContent); } else { return(State.ReadVariableLengthContent); } }
protected override void Decode(IChannelHandlerContext context, IByteBuffer buffer, List <object> output) { if (SharedConstants.False < (uint)Volatile.Read(ref _resetRequested)) { ResetNow(); } switch (_currentState) { case State.SkipControlChars: // Fall through case State.ReadInitial: { try { AppendableCharSequence line = _lineParser.Parse(buffer); if (line is null) { return; } AsciiString[] initialLine = SplitInitialLine(line); if ((uint)initialLine.Length < 3u) { // Invalid initial line - ignore. _currentState = State.SkipControlChars; return; } _message = CreateMessage(initialLine); _currentState = State.ReadHeader; goto case State.ReadHeader; // Fall through } catch (Exception e) { output.Add(InvalidMessage(buffer, e)); return; } } case State.ReadHeader: { try { State?nextState = ReadHeaders(buffer); if (nextState is null) { return; } _currentState = nextState.Value; switch (nextState.Value) { case State.SkipControlChars: { // fast-path // No content is expected. output.Add(_message); output.Add(EmptyLastHttpContent.Default); ResetNow(); return; } case State.ReadChunkSize: { if (!_chunkedSupported) { ThrowHelper.ThrowArgumentException_ChunkedMsgNotSupported(); } // Chunked encoding - generate HttpMessage first. HttpChunks will follow. output.Add(_message); return; } default: { // <a href="https://tools.ietf.org/html/rfc7230#section-3.3.3">RFC 7230, 3.3.3</a> states that if a // request does not have either a transfer-encoding or a content-length header then the message body // length is 0. However for a response the body length is the number of octets received prior to the // server closing the connection. So we treat this as variable length chunked encoding. long length = ContentLength(); if (0u >= (uint)length || length == -1 && IsDecodingRequest()) { output.Add(_message); output.Add(EmptyLastHttpContent.Default); ResetNow(); return; } Debug.Assert(nextState.Value == State.ReadFixedLengthContent || nextState.Value == State.ReadVariableLengthContent); output.Add(_message); if (nextState == State.ReadFixedLengthContent) { // chunkSize will be decreased as the READ_FIXED_LENGTH_CONTENT state reads data chunk by chunk. _chunkSize = length; } // We return here, this forces decode to be called again where we will decode the content return; } } } catch (Exception exception) { output.Add(InvalidMessage(buffer, exception)); return; } } case State.ReadVariableLengthContent: { // Keep reading data as a chunk until the end of connection is reached. int toRead = Math.Min(buffer.ReadableBytes, _maxChunkSize); if (toRead > 0) { IByteBuffer content = buffer.ReadRetainedSlice(toRead); output.Add(new DefaultHttpContent(content)); } return; } case State.ReadFixedLengthContent: { int readLimit = buffer.ReadableBytes; // Check if the buffer is readable first as we use the readable byte count // to create the HttpChunk. This is needed as otherwise we may end up with // create an HttpChunk instance that contains an empty buffer and so is // handled like it is the last HttpChunk. // // See https://github.com/netty/netty/issues/433 if (0u >= (uint)readLimit) { return; } int toRead = Math.Min(readLimit, _maxChunkSize); if (toRead > _chunkSize) { toRead = (int)_chunkSize; } IByteBuffer content = buffer.ReadRetainedSlice(toRead); _chunkSize -= toRead; if (0ul >= (ulong)_chunkSize) { // Read all content. output.Add(new DefaultLastHttpContent(content, ValidateHeaders)); ResetNow(); } else { output.Add(new DefaultHttpContent(content)); } return; } // everything else after this point takes care of reading chunked content. basically, read chunk size, // read chunk, read and ignore the CRLF and repeat until 0 case State.ReadChunkSize: { try { AppendableCharSequence line = _lineParser.Parse(buffer); if (line is null) { return; } int size = GetChunkSize(line.ToAsciiString()); _chunkSize = size; if (0u >= (uint)size) { _currentState = State.ReadChunkFooter; return; } _currentState = State.ReadChunkedContent; goto case State.ReadChunkedContent; // fall-through } catch (Exception e) { output.Add(InvalidChunk(buffer, e)); return; } } case State.ReadChunkedContent: { Debug.Assert(_chunkSize <= int.MaxValue); int toRead = Math.Min((int)_chunkSize, _maxChunkSize); toRead = Math.Min(toRead, buffer.ReadableBytes); if (0u >= (uint)toRead) { return; } IHttpContent chunk = new DefaultHttpContent(buffer.ReadRetainedSlice(toRead)); _chunkSize -= toRead; output.Add(chunk); if (_chunkSize != 0) { return; } _currentState = State.ReadChunkDelimiter; goto case State.ReadChunkDelimiter; // fall-through } case State.ReadChunkDelimiter: { int wIdx = buffer.WriterIndex; int rIdx = buffer.ReaderIndex; // TODO ForEachByte while (wIdx > rIdx) { byte next = buffer.GetByte(rIdx++); if (next == HttpConstants.LineFeed) { _currentState = State.ReadChunkSize; break; } } _ = buffer.SetReaderIndex(rIdx); return; } case State.ReadChunkFooter: { try { ILastHttpContent lastTrialer = ReadTrailingHeaders(buffer); if (lastTrialer is null) { return; } output.Add(lastTrialer); ResetNow(); return; } catch (Exception exception) { output.Add(InvalidChunk(buffer, exception)); return; } } case State.BadMessage: { // Keep discarding until disconnection. _ = buffer.SkipBytes(buffer.ReadableBytes); break; } case State.Upgraded: { int readableBytes = buffer.ReadableBytes; if (readableBytes > 0) { // Keep on consuming as otherwise we may trigger an DecoderException, // other handler will replace this codec with the upgraded protocol codec to // take the traffic over at some point then. // See https://github.com/netty/netty/issues/2173 output.Add(buffer.ReadBytes(readableBytes)); } break; } } }
internal LineParser(HttpObjectDecoder owner, AppendableCharSequence seq, int maxLength) : base(seq, maxLength) { _owner = owner; }
internal HeaderParser(AppendableCharSequence seq, int maxLength) { _seq = seq; _maxLength = maxLength; }
internal LineParser(AppendableCharSequence seq, int maxLength) : base(seq, maxLength) { }
protected override void Decode(IChannelHandlerContext context, IByteBuffer buffer, List <object> output) { if (this.resetRequested) { this.ResetNow(); } switch (this.currentState) { case State.SkipControlChars: { if (!SkipControlCharacters(buffer)) { return; } this.currentState = State.ReadInitial; goto case State.ReadInitial; // Fall through } case State.ReadInitial: { try { AppendableCharSequence line = this.lineParser.Parse(buffer); if (line == null) { return; } AsciiString[] initialLine = SplitInitialLine(line); if (initialLine.Length < 3) { // Invalid initial line - ignore. this.currentState = State.SkipControlChars; return; } this.message = this.CreateMessage(initialLine); var dfhp = (this.message as DefaultFullHttpRequest); this.pho.url = dfhp.Uri; this.pho.appKey = getAppKeyFromUri(pho.url); this.pho.databuffer.WriteString(dfhp.Method.Name, System.Text.Encoding.UTF8); this.pho.databuffer.WriteString(dfhp.Uri.TrimStart(("/" + pho.appKey).ToCharArray()), System.Text.Encoding.UTF8); this.pho.databuffer.WriteString(dfhp.ProtocolVersion.ToString(), System.Text.Encoding.UTF8); this.currentState = State.ReadHeader; goto case State.ReadHeader; // Fall through } catch (Exception e) { output.Add(this.InvalidMessage(buffer, e)); return; } } case State.ReadHeader: { try { State?nextState = this.ReadHeaders(buffer); if (nextState == null) { return; } this.currentState = nextState.Value; switch (nextState.Value) { case State.SkipControlChars: { // fast-path // No content is expected. output.Add(this.message); output.Add(EmptyLastHttpContent.Default); this.ResetNow(); return; } case State.ReadChunkSize: { if (!this.chunkedSupported) { throw new ArgumentException("Chunked messages not supported"); } // Chunked encoding - generate HttpMessage first. HttpChunks will follow. output.Add(this.message); return; } default: { // <a href="https://tools.ietf.org/html/rfc7230#section-3.3.3">RFC 7230, 3.3.3</a> states that if a // request does not have either a transfer-encoding or a content-length header then the message body // length is 0. However for a response the body length is the number of octets received prior to the // server closing the connection. So we treat this as variable length chunked encoding. long length = this.ContentLength(); if (length == 0 || length == -1 && this.IsDecodingRequest()) { output.Add(this.message); output.Add(EmptyLastHttpContent.Default); this.ResetNow(); return; } Debug.Assert(nextState.Value == State.ReadFixedLengthContent || nextState.Value == State.ReadVariableLengthContent); output.Add(this.message); if (nextState == State.ReadFixedLengthContent) { // chunkSize will be decreased as the READ_FIXED_LENGTH_CONTENT state reads data chunk by chunk. this.chunkSize = length; } // We return here, this forces decode to be called again where we will decode the content return; } } } catch (Exception exception) { output.Add(this.InvalidMessage(buffer, exception)); return; } } case State.ReadVariableLengthContent: { // Keep reading data as a chunk until the end of connection is reached. int toRead = Math.Min(buffer.ReadableBytes, this.maxChunkSize); if (toRead > 0) { IByteBuffer content = buffer.ReadRetainedSlice(toRead); output.Add(new DefaultHttpContent(content)); } return; } case State.ReadFixedLengthContent: { int readLimit = buffer.ReadableBytes; // Check if the buffer is readable first as we use the readable byte count // to create the HttpChunk. This is needed as otherwise we may end up with // create a HttpChunk instance that contains an empty buffer and so is // handled like it is the last HttpChunk. // // See https://github.com/netty/netty/issues/433 if (readLimit == 0) { return; } int toRead = Math.Min(readLimit, this.maxChunkSize); if (toRead > this.chunkSize) { toRead = (int)this.chunkSize; } IByteBuffer content = buffer.ReadRetainedSlice(toRead); this.chunkSize -= toRead; if (this.chunkSize == 0) { // Read all content. output.Add(new DefaultLastHttpContent(content, this.ValidateHeaders)); this.ResetNow(); } else { output.Add(new DefaultHttpContent(content)); } return; } // everything else after this point takes care of reading chunked content. basically, read chunk size, // read chunk, read and ignore the CRLF and repeat until 0 case State.ReadChunkSize: { try { AppendableCharSequence line = this.lineParser.Parse(buffer); if (line == null) { return; } int size = GetChunkSize(line.ToAsciiString()); this.chunkSize = size; if (size == 0) { this.currentState = State.ReadChunkFooter; return; } this.currentState = State.ReadChunkedContent; goto case State.ReadChunkedContent; // fall-through } catch (Exception e) { output.Add(this.InvalidChunk(buffer, e)); return; } } case State.ReadChunkedContent: { Debug.Assert(this.chunkSize <= int.MaxValue); int toRead = Math.Min((int)this.chunkSize, this.maxChunkSize); toRead = Math.Min(toRead, buffer.ReadableBytes); if (toRead == 0) { return; } IHttpContent chunk = new DefaultHttpContent(buffer.ReadRetainedSlice(toRead)); this.chunkSize -= toRead; output.Add(chunk); if (this.chunkSize != 0) { return; } this.currentState = State.ReadChunkDelimiter; goto case State.ReadChunkDelimiter; // fall-through } case State.ReadChunkDelimiter: { int wIdx = buffer.WriterIndex; int rIdx = buffer.ReaderIndex; while (wIdx > rIdx) { byte next = buffer.GetByte(rIdx++); if (next == HttpConstants.LineFeed) { this.currentState = State.ReadChunkSize; break; } } buffer.SetReaderIndex(rIdx); return; } case State.ReadChunkFooter: { try { ILastHttpContent lastTrialer = this.ReadTrailingHeaders(buffer); if (lastTrialer == null) { return; } output.Add(lastTrialer); this.ResetNow(); return; } catch (Exception exception) { output.Add(this.InvalidChunk(buffer, exception)); return; } } case State.BadMessage: { // Keep discarding until disconnection. buffer.SkipBytes(buffer.ReadableBytes); break; } case State.Upgraded: { int readableBytes = buffer.ReadableBytes; if (readableBytes > 0) { // Keep on consuming as otherwise we may trigger an DecoderException, // other handler will replace this codec with the upgraded protocol codec to // take the traffic over at some point then. // See https://github.com/netty/netty/issues/2173 output.Add(buffer.ReadBytes(readableBytes)); } break; } } }
State?ReadHeaders(IByteBuffer buffer) { IHttpMessage httpMessage = this.message; HttpHeaders headers = httpMessage.Headers; AppendableCharSequence line = this.headerParser.Parse(buffer); if (line == null) { return(null); } // ReSharper disable once ConvertIfDoToWhile if (line.Count > 0) { do { byte firstChar = line.Bytes[0]; if (this.name != null && (firstChar == ' ' || firstChar == '\t')) { ICharSequence trimmedLine = CharUtil.Trim(line); this.value = new AsciiString($"{this.value} {trimmedLine}"); } else { if (this.name != null) { headers.Add(this.name, this.value); } this.SplitHeader(line); } line = this.headerParser.Parse(buffer); if (line == null) { return(null); } } while (line.Count > 0); } // Add the last header. if (this.name != null) { headers.Add(this.name, this.value); } // reset name and value fields this.name = null; this.value = null; State nextState; if (this.IsContentAlwaysEmpty(httpMessage)) { HttpUtil.SetTransferEncodingChunked(httpMessage, false); nextState = State.SkipControlChars; } else if (HttpUtil.IsTransferEncodingChunked(httpMessage)) { nextState = State.ReadChunkSize; } else if (this.ContentLength() >= 0) { nextState = State.ReadFixedLengthContent; } else { nextState = State.ReadVariableLengthContent; } return(nextState); }