private static int FindNonSPLenient(byte[] sb, int offset, int length) { for (int result = offset; result < length; ++result) { var c = sb[result]; // See https://tools.ietf.org/html/rfc7230#section-3.5 if (IsSPLenient(c)) { continue; } if (IsWhiteSpace(c)) { // Any other whitespace delimiter is invalid ThrowHelper.ThrowArgumentException_Invalid_separator(); } return(result); } return(length); }
public void ValidateName(ICharSequence name) { if (name == null || name.Count == 0) { ThrowHelper.ThrowArgumentException_HeaderName(); } if (name is AsciiString asciiString) { asciiString.ForEachByte(ByteProcessor); } else { // Go through each character in the name Debug.Assert(name != null); // ReSharper disable once ForCanBeConvertedToForeach // Avoid new enumerator instance for (int index = 0; index < name.Count; ++index) { ValidateHeaderNameElement(name[index]); } } }
public QueryStringDecoder(string uri, Encoding charset, bool hasPath, int maxParams, bool semicolonIsNormalChar) { if (uri is null) { ThrowHelper.ThrowArgumentNullException(ExceptionArgument.uri); } if (charset is null) { ThrowHelper.ThrowArgumentNullException(ExceptionArgument.charset); } if ((uint)(maxParams - 1) > SharedConstants.TooBigOrNegative) { ThrowHelper.ThrowArgumentException_Positive(maxParams, ExceptionArgument.maxParams); } _uri = uri; _charset = charset; _maxParams = maxParams; _semicolonIsNormalChar = semicolonIsNormalChar; // -1 means that path end index will be initialized lazily _pathEndIdx = hasPath ? -1 : 0; }
public QueryStringDecoder(Uri uri, Encoding charset, int maxParams, bool semicolonIsNormalChar) { if (uri is null) { ThrowHelper.ThrowArgumentNullException(ExceptionArgument.uri); } if (charset is null) { ThrowHelper.ThrowArgumentNullException(ExceptionArgument.charset); } if ((uint)(maxParams - 1) > SharedConstants.TooBigOrNegative) { ThrowHelper.ThrowArgumentException_Positive(maxParams, ExceptionArgument.maxParams); } string rawPath = uri.AbsolutePath; // Also take care of cut of things like "http://localhost" _uri = uri.PathAndQuery; _charset = charset; _maxParams = maxParams; _semicolonIsNormalChar = semicolonIsNormalChar; _pathEndIdx = rawPath.Length; }
State?ReadHeaders(IByteBuffer buffer) { IHttpMessage httpMessage = _message; HttpHeaders headers = httpMessage.Headers; AppendableCharSequence line = _headerParser.Parse(buffer); if (line is null) { return(null); } // ReSharper disable once ConvertIfDoToWhile if ((uint)line.Count > 0u) { do { byte firstChar = line.Bytes[0]; if (_name is object && (firstChar == c_space || firstChar == c_tab)) { //please do not make one line from below code //as it breaks +XX:OptimizeStringConcat optimization ICharSequence trimmedLine = CharUtil.Trim(line); _value = new AsciiString($"{_value} {trimmedLine}"); } else { if (_name is object) { _ = headers.Add(_name, _value); } SplitHeader(line); } line = _headerParser.Parse(buffer); if (line is null) { return(null); } } while ((uint)line.Count > 0u); } // Add the last header. if (_name is object) { _ = headers.Add(_name, _value); } // reset name and value fields _name = null; _value = null; var values = headers.GetAll(HttpHeaderNames.ContentLength); uint contentLengthValuesCount = (uint)values.Count; if (contentLengthValuesCount > 0u) { // Guard against multiple Content-Length headers as stated in // https://tools.ietf.org/html/rfc7230#section-3.3.2: // // If a message is received that has multiple Content-Length header // fields with field-values consisting of the same decimal value, or a // single Content-Length header field with a field value containing a // list of identical decimal values (e.g., "Content-Length: 42, 42"), // indicating that duplicate Content-Length header fields have been // generated or combined by an upstream message processor, then the // recipient MUST either reject the message as invalid or replace the // duplicated field-values with a single valid Content-Length field // containing that decimal value prior to determining the message body // length or forwarding the message. if (contentLengthValuesCount > 1u && httpMessage.ProtocolVersion == HttpVersion.Http11) { ThrowHelper.ThrowArgumentException_Multiple_Content_Length_Headers_Found(); } if (!long.TryParse(values[0].ToString(), out _contentLength)) { ThrowHelper.ThrowArgumentException_Invalid_Content_Length(); } } if (IsContentAlwaysEmpty(httpMessage)) { HttpUtil.SetTransferEncodingChunked(httpMessage, false); return(State.SkipControlChars); } else if (HttpUtil.IsTransferEncodingChunked(httpMessage)) { if (contentLengthValuesCount > 0u && httpMessage.ProtocolVersion == HttpVersion.Http11) { HandleTransferEncodingChunkedWithContentLength(httpMessage); } return(State.ReadChunkSize); } else if (ContentLength() >= 0L) { return(State.ReadFixedLengthContent); } else { return(State.ReadVariableLengthContent); } }
protected override void Decode(IChannelHandlerContext context, IByteBuffer buffer, List <object> output) { if (SharedConstants.False < (uint)Volatile.Read(ref _resetRequested)) { ResetNow(); } switch (_currentState) { case State.SkipControlChars: // Fall through case State.ReadInitial: { try { AppendableCharSequence line = _lineParser.Parse(buffer); if (line is null) { return; } AsciiString[] initialLine = SplitInitialLine(line); if ((uint)initialLine.Length < 3u) { // Invalid initial line - ignore. _currentState = State.SkipControlChars; return; } _message = CreateMessage(initialLine); _currentState = State.ReadHeader; goto case State.ReadHeader; // Fall through } catch (Exception e) { output.Add(InvalidMessage(buffer, e)); return; } } case State.ReadHeader: { try { State?nextState = ReadHeaders(buffer); if (nextState is null) { return; } _currentState = nextState.Value; switch (nextState.Value) { case State.SkipControlChars: { // fast-path // No content is expected. output.Add(_message); output.Add(EmptyLastHttpContent.Default); ResetNow(); return; } case State.ReadChunkSize: { if (!_chunkedSupported) { ThrowHelper.ThrowArgumentException_ChunkedMsgNotSupported(); } // Chunked encoding - generate HttpMessage first. HttpChunks will follow. output.Add(_message); return; } default: { // <a href="https://tools.ietf.org/html/rfc7230#section-3.3.3">RFC 7230, 3.3.3</a> states that if a // request does not have either a transfer-encoding or a content-length header then the message body // length is 0. However for a response the body length is the number of octets received prior to the // server closing the connection. So we treat this as variable length chunked encoding. long length = ContentLength(); if (0u >= (uint)length || length == -1 && IsDecodingRequest()) { output.Add(_message); output.Add(EmptyLastHttpContent.Default); ResetNow(); return; } Debug.Assert(nextState.Value == State.ReadFixedLengthContent || nextState.Value == State.ReadVariableLengthContent); output.Add(_message); if (nextState == State.ReadFixedLengthContent) { // chunkSize will be decreased as the READ_FIXED_LENGTH_CONTENT state reads data chunk by chunk. _chunkSize = length; } // We return here, this forces decode to be called again where we will decode the content return; } } } catch (Exception exception) { output.Add(InvalidMessage(buffer, exception)); return; } } case State.ReadVariableLengthContent: { // Keep reading data as a chunk until the end of connection is reached. int toRead = Math.Min(buffer.ReadableBytes, _maxChunkSize); if (toRead > 0) { IByteBuffer content = buffer.ReadRetainedSlice(toRead); output.Add(new DefaultHttpContent(content)); } return; } case State.ReadFixedLengthContent: { int readLimit = buffer.ReadableBytes; // Check if the buffer is readable first as we use the readable byte count // to create the HttpChunk. This is needed as otherwise we may end up with // create an HttpChunk instance that contains an empty buffer and so is // handled like it is the last HttpChunk. // // See https://github.com/netty/netty/issues/433 if (0u >= (uint)readLimit) { return; } int toRead = Math.Min(readLimit, _maxChunkSize); if (toRead > _chunkSize) { toRead = (int)_chunkSize; } IByteBuffer content = buffer.ReadRetainedSlice(toRead); _chunkSize -= toRead; if (0ul >= (ulong)_chunkSize) { // Read all content. output.Add(new DefaultLastHttpContent(content, ValidateHeaders)); ResetNow(); } else { output.Add(new DefaultHttpContent(content)); } return; } // everything else after this point takes care of reading chunked content. basically, read chunk size, // read chunk, read and ignore the CRLF and repeat until 0 case State.ReadChunkSize: { try { AppendableCharSequence line = _lineParser.Parse(buffer); if (line is null) { return; } int size = GetChunkSize(line.ToAsciiString()); _chunkSize = size; if (0u >= (uint)size) { _currentState = State.ReadChunkFooter; return; } _currentState = State.ReadChunkedContent; goto case State.ReadChunkedContent; // fall-through } catch (Exception e) { output.Add(InvalidChunk(buffer, e)); return; } } case State.ReadChunkedContent: { Debug.Assert(_chunkSize <= int.MaxValue); int toRead = Math.Min((int)_chunkSize, _maxChunkSize); toRead = Math.Min(toRead, buffer.ReadableBytes); if (0u >= (uint)toRead) { return; } IHttpContent chunk = new DefaultHttpContent(buffer.ReadRetainedSlice(toRead)); _chunkSize -= toRead; output.Add(chunk); if (_chunkSize != 0) { return; } _currentState = State.ReadChunkDelimiter; goto case State.ReadChunkDelimiter; // fall-through } case State.ReadChunkDelimiter: { int wIdx = buffer.WriterIndex; int rIdx = buffer.ReaderIndex; // TODO ForEachByte while (wIdx > rIdx) { byte next = buffer.GetByte(rIdx++); if (next == HttpConstants.LineFeed) { _currentState = State.ReadChunkSize; break; } } _ = buffer.SetReaderIndex(rIdx); return; } case State.ReadChunkFooter: { try { ILastHttpContent lastTrialer = ReadTrailingHeaders(buffer); if (lastTrialer is null) { return; } output.Add(lastTrialer); ResetNow(); return; } catch (Exception exception) { output.Add(InvalidChunk(buffer, exception)); return; } } case State.BadMessage: { // Keep discarding until disconnection. _ = buffer.SkipBytes(buffer.ReadableBytes); break; } case State.Upgraded: { int readableBytes = buffer.ReadableBytes; if (readableBytes > 0) { // Keep on consuming as otherwise we may trigger an DecoderException, // other handler will replace this codec with the upgraded protocol codec to // take the traffic over at some point then. // See https://github.com/netty/netty/issues/2173 output.Add(buffer.ReadBytes(readableBytes)); } break; } } }
protected override void Decode(IChannelHandlerContext context, IHttpObject message, List <object> output) { try { if (message is IHttpResponse response && response.Status.Code == StatusCodes.Status100Continue) { if (!(response is ILastHttpContent)) { this.continueResponse = true; } // 100-continue response must be passed through. output.Add(ReferenceCountUtil.Retain(message)); return; } if (this.continueResponse) { if (message is ILastHttpContent) { this.continueResponse = false; } // 100-continue response must be passed through. output.Add(ReferenceCountUtil.Retain(message)); return; } var httpContent = message as IHttpContent; if (message is IHttpMessage httpMessage) { this.Cleanup(); HttpHeaders headers = httpMessage.Headers; // Determine the content encoding. if (headers.TryGet(HttpHeaderNames.ContentEncoding, out ICharSequence contentEncoding)) { contentEncoding = AsciiString.Trim(contentEncoding); } else { contentEncoding = Identity; } this.decoder = this.NewContentDecoder(contentEncoding); if (this.decoder is null) { if (httpContent is object) { _ = httpContent.Retain(); } output.Add(httpMessage); return; } // Remove content-length header: // the correct value can be set only after all chunks are processed/decoded. // If buffering is not an issue, add HttpObjectAggregator down the chain, it will set the header. // Otherwise, rely on LastHttpContent message. if (headers.Contains(HttpHeaderNames.ContentLength)) { _ = headers.Remove(HttpHeaderNames.ContentLength); _ = headers.Set(HttpHeaderNames.TransferEncoding, HttpHeaderValues.Chunked); } // Either it is already chunked or EOF terminated. // See https://github.com/netty/netty/issues/5892 // set new content encoding, ICharSequence targetContentEncoding = this.GetTargetContentEncoding(contentEncoding); if (HttpHeaderValues.Identity.ContentEquals(targetContentEncoding)) { // Do NOT set the 'Content-Encoding' header if the target encoding is 'identity' // as per: http://tools.ietf.org/html/rfc2616#section-14.11 _ = headers.Remove(HttpHeaderNames.ContentEncoding); } else { _ = headers.Set(HttpHeaderNames.ContentEncoding, targetContentEncoding); } if (httpContent is object) { // If message is a full request or response object (headers + data), don't copy data part into out. // Output headers only; data part will be decoded below. // Note: "copy" object must not be an instance of LastHttpContent class, // as this would (erroneously) indicate the end of the HttpMessage to other handlers. IHttpMessage copy = null; switch (httpMessage) { case IHttpRequest req: // HttpRequest or FullHttpRequest copy = new DefaultHttpRequest(req.ProtocolVersion, req.Method, req.Uri); break; case IHttpResponse res: // HttpResponse or FullHttpResponse copy = new DefaultHttpResponse(res.ProtocolVersion, res.Status); break; default: ThrowHelper.ThrowCodecException_InvalidHttpMsg(httpMessage); break; } _ = copy.Headers.Set(httpMessage.Headers); copy.Result = httpMessage.Result; output.Add(copy); } else { output.Add(httpMessage); } } if (httpContent is object) { if (this.decoder is null) { output.Add(httpContent.Retain()); } else { this.DecodeContent(httpContent, output); } } } finally { _needRead = 0u >= (uint)output.Count; } }