/// <summary> /// TODO: Implement chunking /// </summary> /// <returns>An enumerable list of chunks</returns> public IEnumerable <HttpResponseDataChunk> ReadChunks(HttpParserConfig config) { if (ConnectRequest) { return(ReadChunksConnect(config)); } else if (HasBody && !Is100Continue && ResponseCode != 304) { if (config.StreamBody) { if (!ChunkedEncoding) { return(ReadChunksStreamedLength(config)); } else { return(ReadChunksStreamedChunked(config)); } } else { return(ReadChunksBuffered(config)); } } else { // No body, then just return the chunk as headers HttpResponseDataChunk chunk = new HttpResponseDataChunk(this); chunk.ChunkNumber = 0; chunk.FinalChunk = true; return(new HttpResponseDataChunk[] { chunk }); } }
/// <summary> /// Read frames from stream /// </summary> /// <param name="config">An enumerable list of frames</param> /// <returns>The frames</returns> public IEnumerable <DataFrame> ReadFrames(HttpParserConfig config) { foreach (HttpResponseDataChunk chunk in ReadChunks(config)) { yield return(new DataFrame((DataKey)chunk.ToNode("Root"))); } }
private IEnumerable <HttpRequestDataChunk> ReadChunksConnect(HttpParserConfig config) { int chunkNumber = 0; while (true) { byte[] data = _reader.ReadBytes(HttpParserConfig.DEFAULT_CHUNK_SIZE, false); if (data.Length == 0) { throw new EndOfStreamException(); } HttpRequestDataChunk chunk = new HttpRequestDataChunk(this); chunk.ChunkNumber = chunkNumber; if (chunkNumber < int.MaxValue) { chunkNumber++; } // Cast to length, really should just ignore any data set greater than a set amount chunk.Body = data; yield return(chunk); } }
private IEnumerable <HttpRequestDataChunk> ReadChunksBuffered(HttpParserConfig config) { long length = ContentLength; int chunkNumber = 0; HttpRequestDataChunk chunk = new HttpRequestDataChunk(this); // If we are expecting 100 continue then we have to // send the header first otherwise this will block if we have a length if (Headers.HasHeader("Expect", "100-continue") && (length > 0)) { chunk.ChunkNumber = chunkNumber++; yield return(chunk); chunk = new HttpRequestDataChunk(this); } chunk.ChunkNumber = chunkNumber; chunk.FinalChunk = true; if (length > 0) { // Cast to length, really should just ignore any data set greater than a set amount chunk.Body = _reader.ReadBytes((int)length); } yield return(chunk); }
private IEnumerable <HttpResponseDataChunk> ReadChunksBuffered(HttpParserConfig config) { HttpResponseDataChunk chunk = new HttpResponseDataChunk(this); chunk.ChunkNumber = 0; chunk.FinalChunk = true; if (!ChunkedEncoding) { if (ReadToEnd) { if (_initialData != null) { List <byte> data = new List <byte>(BinaryEncoding.Instance.GetBytes(_initialData)); if (!_reader.Eof) { data.AddRange(_reader.ReadToEnd()); } chunk.Body = data.ToArray(); } else { chunk.Body = _reader.ReadToEnd(); } } else { long length = ContentLength; if (length > 0) { chunk.Body = _reader.ReadBytes((int)length); } else { chunk.Body = new byte[0]; } } } else { List <byte> body = new List <byte>(); string extension = null; byte[] ret = ReadChunkedEncoding(_reader, out extension); while (ret.Length > 0) { body.AddRange(ret); ret = ReadChunkedEncoding(_reader, out extension); } chunk.ChunkedEncoding = true; chunk.Body = body.ToArray(); } return(new HttpResponseDataChunk[] { chunk }); }
private IEnumerable <HttpResponseDataChunk> ReadChunksStreamedLength(HttpParserConfig config) { long length = ContentLength; int chunkSize = config.StreamChunkSize; bool waitForAll = true; int chunkNumber = 0; // Can only convert if returning HTTP/1.1 bool convertToChunked = config.ConvertToChunked && Version.IsVersion11; if (chunkSize <= 0) { waitForAll = false; chunkSize = HttpDataChunk.DEFAULT_CHUNK_SIZE; } if (ReadToEnd) { if (_initialData != null) { List <byte> block = new List <byte>(BinaryEncoding.Instance.GetBytes(_initialData)); if (!_reader.Eof) { // Worst case you will get chunkSize + 4 block.AddRange(waitForAll ? _reader.ReadToEnd(chunkSize) : _reader.ReadBytes(chunkSize, false)); } yield return(CreateChunk(block.ToArray(), chunkNumber++, _reader.Eof, convertToChunked)); } while (!_reader.Eof) { byte[] block = waitForAll ? _reader.ReadToEnd(chunkSize) : _reader.ReadBytes(chunkSize, false); yield return(CreateChunk(block, chunkNumber++, _reader.Eof, convertToChunked)); } } else { while (length > 0) { int readLength = length > chunkSize ? chunkSize : (int)length; byte[] data = _reader.ReadBytes(readLength, waitForAll); // While we are reading we wouldn't expect no data to be returned if (data.Length == 0) { throw new EndOfStreamException(); } length -= data.Length; yield return(CreateChunk(data, chunkNumber++, length == 0, config.ConvertToChunked)); } } }
private IEnumerable <HttpRequestDataChunk> ReadChunksStreamed(HttpParserConfig config) { long length = ContentLength; int chunkSize = config.StreamChunkSize; bool waitForAll = true; int chunkNumber = 0; if (chunkSize <= 0) { waitForAll = false; chunkSize = HttpParserConfig.DEFAULT_CHUNK_SIZE; } // If we are expecting 100 continue then we have to // send the header first otherwise this will block if we have a length // Also send if length is as there will be no following data if ((length == 0) || Headers.HasHeader("Expect", "100-continue")) { HttpRequestDataChunk chunk = new HttpRequestDataChunk(this); chunk.ChunkNumber = chunkNumber++; yield return(chunk); } while (length > 0) { int readLength = length > chunkSize ? chunkSize : (int)length; byte[] data = _reader.ReadBytes(readLength, waitForAll); // While we are reading we wouldn't expect no data to be returned if (data.Length == 0) { throw new EndOfStreamException(); } length -= data.Length; HttpRequestDataChunk chunk = new HttpRequestDataChunk(this); chunk.ChunkNumber = chunkNumber++; chunk.FinalChunk = length == 0; // Cast to length, really should just ignore any data set greater than a set amount chunk.Body = data; yield return(chunk); } }
private IEnumerable <HttpResponseDataChunk> ReadChunksStreamedChunked(HttpParserConfig config) { string extension; int chunkNumber = 0; List <KeyDataPair <string> > headers = new List <KeyDataPair <string> >(Headers); if (config.DowngradeChunkedToHttp10) { int i = 0; while (i < headers.Count) { if (headers[i].Name.Equals("transfer-encoding", StringComparison.OrdinalIgnoreCase) && headers[i].Value.Equals("chunked", StringComparison.OrdinalIgnoreCase)) { headers.RemoveAt(i); } else { i++; } } } byte[] ret = ReadChunkedEncoding(_reader, out extension); do { HttpResponseDataChunk chunk = new HttpResponseDataChunk(this); chunk.ChunkNumber = chunkNumber++; chunk.Body = ret; if (config.DowngradeChunkedToHttp10) { chunk.Headers = headers.ToArray(); chunk.Version = HttpVersion.Version10; } else { chunk.ChunkedEncoding = true; } chunk.ChunkExtension = extension; ret = ReadChunkedEncoding(_reader, out extension); chunk.FinalChunk = ret.Length == 0; yield return(chunk); }while (ret.Length > 0); }
/// <summary> /// Read chunks from stream /// </summary> /// <returns>An enumerable list of chunks</returns> public IEnumerable <HttpRequestDataChunk> ReadChunks(HttpParserConfig config) { if (IsConnect) { return(ReadChunksConnect(config)); } else if (config.StreamBody && ContentLength > 0) { return(ReadChunksStreamed(config)); } else { return(ReadChunksBuffered(config)); } }
private IEnumerable<HttpRequestDataChunk> ReadChunksBuffered(HttpParserConfig config) { long length = ContentLength; int chunkNumber = 0; HttpRequestDataChunk chunk = new HttpRequestDataChunk(this); // If we are expecting 100 continue then we have to // send the header first otherwise this will block if we have a length if (Headers.HasHeader("Expect", "100-continue") && (length > 0)) { chunk.ChunkNumber = chunkNumber++; yield return chunk; chunk = new HttpRequestDataChunk(this); } chunk.ChunkNumber = chunkNumber; chunk.FinalChunk = true; if (length > 0) { // Cast to length, really should just ignore any data set greater than a set amount chunk.Body = _reader.ReadBytes((int)length); } yield return chunk; }
private IEnumerable<HttpResponseDataChunk> ReadChunksStreamedChunked(HttpParserConfig config) { string extension; int chunkNumber = 0; List<KeyDataPair<string>> headers = new List<KeyDataPair<string>>(Headers); if (config.DowngradeChunkedToHttp10) { int i = 0; while(i < headers.Count) { if (headers[i].Name.Equals("transfer-encoding", StringComparison.OrdinalIgnoreCase) && headers[i].Value.Equals("chunked", StringComparison.OrdinalIgnoreCase)) { headers.RemoveAt(i); } else { i++; } } } byte[] ret = ReadChunkedEncoding(_reader, out extension); do { HttpResponseDataChunk chunk = new HttpResponseDataChunk(this); chunk.ChunkNumber = chunkNumber++; chunk.Body = ret; if (config.DowngradeChunkedToHttp10) { chunk.Headers = headers.ToArray(); chunk.Version = HttpVersion.Version10; } else { chunk.ChunkedEncoding = true; } chunk.ChunkExtension = extension; ret = ReadChunkedEncoding(_reader, out extension); chunk.FinalChunk = ret.Length == 0; yield return chunk; } while (ret.Length > 0); }
private IEnumerable<HttpResponseDataChunk> ReadChunksStreamedLength(HttpParserConfig config) { long length = ContentLength; int chunkSize = config.StreamChunkSize; bool waitForAll = true; int chunkNumber = 0; // Can only convert if returning HTTP/1.1 bool convertToChunked = config.ConvertToChunked && Version.IsVersion11; if (chunkSize <= 0) { waitForAll = false; chunkSize = HttpDataChunk.DEFAULT_CHUNK_SIZE; } if (ReadToEnd) { if (_initialData != null) { List<byte> block = new List<byte>(BinaryEncoding.Instance.GetBytes(_initialData)); if(!_reader.Eof) { // Worst case you will get chunkSize + 4 block.AddRange(waitForAll ? _reader.ReadToEnd(chunkSize) : _reader.ReadBytes(chunkSize, false)); } yield return CreateChunk(block.ToArray(), chunkNumber++, _reader.Eof, convertToChunked); } while(!_reader.Eof) { byte[] block = waitForAll ? _reader.ReadToEnd(chunkSize) : _reader.ReadBytes(chunkSize, false); yield return CreateChunk(block, chunkNumber++, _reader.Eof, convertToChunked); } } else { while (length > 0) { int readLength = length > chunkSize ? chunkSize : (int)length; byte[] data = _reader.ReadBytes(readLength, waitForAll); // While we are reading we wouldn't expect no data to be returned if (data.Length == 0) { throw new EndOfStreamException(); } length -= data.Length; yield return CreateChunk(data, chunkNumber++, length == 0, config.ConvertToChunked); } } }
private IEnumerable<HttpResponseDataChunk> GetResponse(ProxyConnection conn, Uri url, bool headRequest) { try { DataReader reader = new DataReader(conn.DataAdapter.Coupling); HttpParserConfig config = new HttpParserConfig(); config.StreamBody = true; if (_server._config.Version10Proxy) { config.DowngradeChunkedToHttp10 = true; } _logger.LogVerbose("Starting processing of {0}", url); HttpResponseHeader response = HttpParser.ReadResponseHeader(reader, false, _logger); // If 100 continue then read out just that response then restart read if (response.Is100Continue) { foreach (HttpResponseDataChunk chunk in response.ReadChunks(config)) { _logger.LogVerbose("Read 100 continue chunk for {0} {1} {2}", url, chunk.Body.Length, chunk.FinalChunk); yield return chunk; } response = HttpParser.ReadResponseHeader(reader, false, _logger); } _logger.LogVerbose("Read response header {0}", response.ResponseCode); response.SetHeadRequest(headRequest); foreach (HttpResponseDataChunk newChunk in response.ReadChunks(config)) { _logger.LogVerbose("Read chunk for {0} {1} {2}", url, newChunk.Body.Length, newChunk.FinalChunk); yield return newChunk; } } finally { bool closeSuccess = false; try { conn.DataAdapter.Coupling.Close(); lock (_graphs) { _graphs.Remove(conn.Graph); } closeSuccess = true; } catch (OperationCanceledException) { } catch (ObjectDisposedException) { } if (!closeSuccess) { lock (_graphs) { // Force close _service.CloseConnection(conn.Graph); _graphs.Remove(conn.Graph); } } } }
public HttpProxyServerAdapter(FullHttpProxyServer server, DataAdapterToStream stm, HttpRequestHeader initialRequest, Logger logger) { _server = server; _stm = stm; _writer = new DataWriter(_stm); _request = initialRequest; ProcessProxyRequestHeaders(_request); _config = new HttpParserConfig(); _config.StreamBody = true; _logger = logger; _requestQueue = new Queue<HttpRequestHeader>(); _requestQueue.Enqueue(_request); Description = stm.Description; }
private HttpParserConfig CreateConfig(HttpResponseHeader response, HttpRequestHeader request) { HttpParserConfig config = new HttpParserConfig(); HttpLayerConfigEntry entry = _layer._config.GetEntry(request, response); config.ConvertToChunked = entry.ConvertToChunked; config.StreamBody = entry.ResponseStreamBody; config.StreamChunkSize = _layer._config.ResponseStreamChunkSize; config.StrictParsing = _layer._config.ResponseStrictParsing; if (_layer._config.BufferedResponseMaxLength != 0 && (response.ContentLength > _layer._config.BufferedResponseMaxLength)) { config.StreamBody = true; } return config; }
private HttpParserConfig CreateConfig(HttpRequestHeader header) { HttpParserConfig config = new HttpParserConfig(); HttpLayerConfigEntry entry = _layer._config.GetEntry(header); config.StreamBody = entry.RequestStreamBody; config.StreamChunkSize = _layer._config.RequestStreamChunkSize; config.StrictParsing = _layer._config.RequestStrictParsing; if (_layer._config.BufferedRequestMaxLength != 0 && (header.ContentLength > _layer._config.BufferedRequestMaxLength)) { config.StreamBody = true; } return config; }
private IEnumerable<HttpResponseDataChunk> ReadChunksBuffered(HttpParserConfig config) { HttpResponseDataChunk chunk = new HttpResponseDataChunk(this); chunk.ChunkNumber = 0; chunk.FinalChunk = true; if (!ChunkedEncoding) { if (ReadToEnd) { if (_initialData != null) { List<byte> data = new List<byte>(BinaryEncoding.Instance.GetBytes(_initialData)); if (!_reader.Eof) { data.AddRange(_reader.ReadToEnd()); } chunk.Body = data.ToArray(); } else { chunk.Body = _reader.ReadToEnd(); } } else { long length = ContentLength; if (length > 0) { chunk.Body = _reader.ReadBytes((int)length); } else { chunk.Body = new byte[0]; } } } else { List<byte> body = new List<byte>(); string extension = null; byte[] ret = ReadChunkedEncoding(_reader, out extension); while (ret.Length > 0) { body.AddRange(ret); ret = ReadChunkedEncoding(_reader, out extension); } chunk.ChunkedEncoding = true; chunk.Body = body.ToArray(); } return new HttpResponseDataChunk[] { chunk }; }
private void FlushRequest(HttpRequestHeader request) { if (!request.IsConnect) { HttpParserConfig config = new HttpParserConfig(); config.StreamBody = true; IEnumerator<HttpRequestDataChunk> e = request.ReadChunks(config).GetEnumerator(); while (e.MoveNext()) { } } }
/// <summary> /// Read frames from sttream /// </summary> /// <param name="config">An enumerable list of frames</param> /// <returns>The frames</returns> public IEnumerable<DataFrame> ReadFrames(HttpParserConfig config) { foreach (HttpRequestDataChunk chunk in ReadChunks(config)) { yield return new DataFrame((DataKey)chunk.ToNode("Root")); } }
/// <summary> /// Read chunks from stream /// </summary> /// <returns>An enumerable list of chunks</returns> public IEnumerable<HttpRequestDataChunk> ReadChunks(HttpParserConfig config) { if (IsConnect) { return ReadChunksConnect(config); } else if (config.StreamBody && ContentLength > 0) { return ReadChunksStreamed(config); } else { return ReadChunksBuffered(config); } }
public HttpProxyServerAdapter(DataAdapterToStream stm, HttpRequestHeader initialRequest, Logger logger) { _stm = stm; _writer = new DataWriter(_stm); _request = initialRequest; _config = new HttpParserConfig(); _config.StreamBody = true; _logger = logger; Description = stm.Description; }
private IEnumerable<HttpRequestDataChunk> ReadChunksConnect(HttpParserConfig config) { int chunkNumber = 0; while(true) { byte[] data = _reader.ReadBytes(HttpDataChunk.DEFAULT_CHUNK_SIZE, false); if (data.Length == 0) { throw new EndOfStreamException(); } HttpRequestDataChunk chunk = new HttpRequestDataChunk(this); chunk.ChunkNumber = chunkNumber; if (chunkNumber < int.MaxValue) { chunkNumber++; } // Cast to length, really should just ignore any data set greater than a set amount chunk.Body = data; yield return chunk; } }
private IEnumerable<HttpRequestDataChunk> ReadChunksStreamed(HttpParserConfig config) { long length = ContentLength; int chunkSize = config.StreamChunkSize; bool waitForAll = true; int chunkNumber = 0; if (chunkSize <= 0) { waitForAll = false; chunkSize = HttpDataChunk.DEFAULT_CHUNK_SIZE; } // If we are expecting 100 continue then we have to // send the header first otherwise this will block if we have a length // Also send if length is as there will be no following data if ((length == 0) || Headers.HasHeader("Expect", "100-continue")) { HttpRequestDataChunk chunk = new HttpRequestDataChunk(this); chunk.ChunkNumber = chunkNumber++; yield return chunk; } while (length > 0) { int readLength = length > chunkSize ? chunkSize : (int)length; byte[] data = _reader.ReadBytes(readLength, waitForAll); // While we are reading we wouldn't expect no data to be returned if (data.Length == 0) { throw new EndOfStreamException(); } length -= data.Length; HttpRequestDataChunk chunk = new HttpRequestDataChunk(this); chunk.ChunkNumber = chunkNumber++; chunk.FinalChunk = length == 0; // Cast to length, really should just ignore any data set greater than a set amount chunk.Body = data; yield return chunk; } }
/// <summary> /// Read frames from sttream /// </summary> /// <param name="config">An enumerable list of frames</param> /// <returns>The frames</returns> public IEnumerable <DataFrame> ReadFrames(HttpParserConfig config) { return(ReadChunks(config).Select(c => c.ToDataFrame())); }
/// <summary> /// TODO: Implement chunking /// </summary> /// <returns>An enumerable list of chunks</returns> public IEnumerable<HttpResponseDataChunk> ReadChunks(HttpParserConfig config) { if (ConnectRequest) { return ReadChunksConnect(config); } else if (HasBody && !Is100Continue && ResponseCode != 304) { if (config.StreamBody) { if (!ChunkedEncoding) { return ReadChunksStreamedLength(config); } else { return ReadChunksStreamedChunked(config); } } else { return ReadChunksBuffered(config); } } else { // No body, then just return the chunk as headers HttpResponseDataChunk chunk = new HttpResponseDataChunk(this); chunk.ChunkNumber = 0; chunk.FinalChunk = true; return new HttpResponseDataChunk[] { chunk }; } }