public override TextElement Filter(IEncodingProvider enc, TextElement input, bool closing) { var bInput = input.AsBytes(enc.StringEncoding); if (bInput != null) { int offset = 0; return(new TextElement(FilterInner(bInput, ref offset, closing))); } else { Debug.Fail("InflateFilter expects chunks to be convertible to PhpBytes."); return(TextElement.Null); } }
/// <summary> /// Passes the data through output filter-chain to the output buffer. /// When the buffer is full or buffering is disabled, passes the data to the low-level stream. /// </summary> /// <param name="data">The data to store (filters will handle the type themselves).</param> /// <param name="closing"><c>true</c> when this method is called from <c>close()</c> /// to prune all the pending filters with closing set to <c>true</c>.</param> /// <returns>Number of character entities successfully written or <c>-1</c> on an error.</returns> protected int WriteData(TextElement data, bool closing = false) { // Set file access to writing CurrentAccess = FileAccess.Write; if (!CanWrite) return -1; Debug.Assert(!data.IsNull); int consumed = data.Length; writeFilteredCount += consumed; if (writeFilters != null) { // Process the data through the custom write filters first. foreach (IFilter f in writeFilters) { if (data.IsNull) { // When closing, feed all the filters with data. if (closing) data = TextElement.Empty; else return consumed; // Eaten all } data = f.Filter(_ctx, data, closing); if (closing) f.OnClose(); } } if (textWriteFilter != null) { // Then pass it through the text-conversion filter if any. data = textWriteFilter.Filter(_ctx, data, closing); } // From now on, the data is treated just as binary byte[] bin = data.AsBytes(_ctx.StringEncoding); if (bin.Length == 0) { return consumed; } // Append the resulting data to the output buffer if any. if (IsWriteBuffered) { // Is this the first access? if (writeBuffer == null) { writeBuffer = new byte[writeBufferSize]; writePosition = 0; } // The whole binary data fits in the buffer, great! if (writeBufferSize - writePosition > bin.Length) { Array.Copy(bin, 0, writeBuffer, writePosition, bin.Length); writePosition += bin.Length; return consumed; } int copied = 0; // Use the buffer for small data only if (writeBufferSize > bin.Length) { // Otherwise fill the buffer and flush it. copied = writeBufferSize - writePosition; Array.Copy(bin, 0, writeBuffer, writePosition, copied); writePosition += copied; } // Flush the buffer if ((writePosition > 0) && (!FlushWriteBuffer())) return (copied > 0) ? copied : -1; // It is an error but still some output was written. if (bin.Length - copied >= writeBufferSize) { // If the binary data is really big, write it directly to stream. while (copied < bin.Length) { int written = RawWrite(bin, copied, bin.Length - copied); if (written <= 0) { PhpException.Throw(PhpError.Warning, ErrResources.stream_write_failed, copied.ToString(), bin.Length.ToString()); return (copied > 0) ? copied : -1; // It is an error but still some output was written. } copied += written; writeOffset += written; } } else { // Otherwise just start a new buffer with the rest of the data. Array.Copy(bin, copied, writeBuffer, 0, bin.Length - copied); writePosition = bin.Length - copied; } return consumed; } else { // No write buffer. Write the data directly. int copied = 0; while (copied < bin.Length) { int written = RawWrite(bin, copied, bin.Length - copied); if (written <= 0) { PhpException.Throw(PhpError.Warning, ErrResources.stream_write_failed, copied.ToString(), bin.Length.ToString()); return (copied > 0) ? copied : -1; // ERROR but maybe some was written. } copied += written; writeOffset += written; } return consumed; } }
public override TextElement Filter(IEncodingProvider enc, TextElement input, bool closing) { // TODO: not the most efficient method - after the filters are upgraded to bucket lists, update this var bInput = input.AsBytes(enc.StringEncoding); if (bInput != null) { if (_state == UncompressionState.Failed) { // failed filter should not get any more calls PhpException.Throw(PhpError.Warning, "using filter in failed state"); return(TextElement.Null); } if (_state == UncompressionState.PostTrailer) { // post trailer - ignore everything if (closing) { _state = UncompressionState.Finished; } return(TextElement.Empty); } if (_state == UncompressionState.Finished) { // finished filter should not get any more data PhpException.Throw(PhpError.Warning, "using filter in finished state"); return(TextElement.Null); } if (_state == UncompressionState.Passthrough) { // this is not gzip data format - pass the data through return(new TextElement(bInput)); } // enqueue the block _chunkQueue.EnqueueByteBlock(bInput, 0, bInput.Length); if (_state == UncompressionState.Header) { #region Header handling //beginning of the stream byte[] beginning = _chunkQueue.DequeueByteBlock(Zlib.GZIP_HEADER_LENGTH); if (beginning == null && !closing) { // we do not have enough data, but we know there would be more data ahead return(TextElement.Empty); } else { //check the header format if (beginning.Length >= 2 && beginning[0] == Zlib.GZIP_HEADER[0] && beginning[1] == Zlib.GZIP_HEADER[1]) { //header magic bytes are OK if (beginning.Length < Zlib.GZIP_HEADER_LENGTH) { // header is too short -> this is an error PhpException.Throw(PhpError.Warning, "unexpected end of file"); return(TextElement.Null); } else { // check the rest of the header if (beginning[2] != Zlib.Z_DEFLATED) { PhpException.Throw(PhpError.Warning, "unknown compression method"); return(TextElement.Null); } if ((beginning[3] & Zlib.GZIP_HEADER_RESERVED_FLAGS) != 0) { PhpException.Throw(PhpError.Warning, "unknown header flags set"); return(TextElement.Null); } _headerFlags = beginning[3]; //change the header state based on the header flags UpdateHeaderState(); } } else { // this is not a gzip format -> passthrough the data _state = UncompressionState.Passthrough; return(new TextElement(beginning)); } } #endregion } if (_state == UncompressionState.HeaderExtraField) { #region Header Extra Field Handling if (_extraHeaderLength == null) { //length was not yet detected if (_chunkQueue.AvailableBytes < 2) { //wait for more input return(TextElement.Empty); } else { //assemble length _extraHeaderLength = _chunkQueue.DequeueByte(); _extraHeaderLength &= (_chunkQueue.DequeueByte() << 8); } } if (_extraHeaderLength != null) { //length was already read if (_chunkQueue.AvailableBytes < _extraHeaderLength) { //wait for more input return(TextElement.Empty); } else { Debug.Assert(_extraHeaderLength.HasValue); //skip the extra header _chunkQueue.SkipByteBlock(_extraHeaderLength.Value); UpdateHeaderState(); } } #endregion } if (_state == UncompressionState.HeaderFilename || _state == UncompressionState.HeaderComment) { #region Header Filename and Comment Handling // filename or comment // cycle until input ends or zero character is encountered while (true) { byte?nextByte = _chunkQueue.DequeueByte(); if (nextByte == null) { //wait for more input return(TextElement.Empty); } if (nextByte == 0) { // end the cycle break; } } // go to the next state UpdateHeaderState(); #endregion } if (_state == UncompressionState.HeaderCRC) { #region CRC Handling // header CRC if (_chunkQueue.AvailableBytes < 2) { //wait for more input return(TextElement.Empty); } else { //skip the CRC _chunkQueue.DequeueByte(); _chunkQueue.DequeueByte(); UpdateHeaderState(); } #endregion } //filled by data handling and sometimes returned by trailer handling byte[] output = null; if (_state == UncompressionState.Data) { #region Deflated Data Handling //get all available bytes byte[] inputBytes = _chunkQueue.DequeueByteBlock(_chunkQueue.AvailableBytes); int inputOffset = 0; // perform the inner operation try { output = FilterInner(inputBytes, ref inputOffset, closing); } catch { // exception was thrown _state = UncompressionState.Failed; throw; } if (output == null) { // error happened and exception was not thrown _state = UncompressionState.Failed; return(TextElement.Null); } // update the hash algorithm _crc.Update(output); if (inputOffset != inputBytes.Length) { // push the rest of the data into the chunk queue _chunkQueue.PushByteBlock(inputBytes, inputOffset, inputBytes.Length - inputOffset); // end of deflated block reached _state = UncompressionState.Trailer; // pass through to Trailer handling } else { //normal decompressed block - return it return(new TextElement(output)); } #endregion } if (_state == UncompressionState.Trailer) { #region Trailer Handling // the deflate block has already ended, we are processing trailer if (closing || _chunkQueue.AvailableBytes >= Zlib.GZIP_FOOTER_LENGTH) { byte[] trailer; trailer = _chunkQueue.DequeueByteBlock(_chunkQueue.AvailableBytes); if (trailer.Length >= Zlib.GZIP_FOOTER_LENGTH) { byte[] crc = _crc.Final(); if (crc[3] != trailer[0] || crc[2] != trailer[1] || crc[1] != trailer[2] || crc[0] != trailer[3]) { _state = UncompressionState.Failed; PhpException.Throw(PhpError.Warning, "incorrect data check"); return(TextElement.Null); } if (BitConverter.ToInt32(trailer, 4) != _stream.total_out) { _state = UncompressionState.Failed; PhpException.Throw(PhpError.Warning, "incorrect length check"); return(TextElement.Null); } _state = closing ? UncompressionState.Finished : UncompressionState.PostTrailer; // everything is fine, return the output if available return(output != null ? new TextElement(output) : TextElement.Empty); } else { _state = UncompressionState.Failed; PhpException.Throw(PhpError.Warning, "unexpected end of file"); return(TextElement.Null); } } else { //stream is not closing yet - return the remaining output, otherwise empty return(output != null ? new TextElement(output) : TextElement.Empty); } #endregion } //this should not happen Debug.Fail(null); return(TextElement.Null); } else { Debug.Fail("GzipUncompressionFilter expects chunks to be convertible to PhpBytes."); return(TextElement.Null); } }
public override TextElement Filter(IEncodingProvider enc, TextElement input, bool closing) { var bInput = input.AsBytes(enc.StringEncoding); if (bInput != null) { if (_state == CompressionState.Failed) { PhpException.Throw(PhpError.Warning, "using filter in failed state"); return(TextElement.Null); } if (_state == CompressionState.Finished) { PhpException.Throw(PhpError.Warning, "using filter in finished state"); return(TextElement.Null); } byte[] header = null; byte[] footer = null; if (_state == CompressionState.Header) { header = new byte[Zlib.GZIP_HEADER_LENGTH]; header[0] = Zlib.GZIP_HEADER[0]; header[1] = Zlib.GZIP_HEADER[1]; header[2] = Zlib.Z_DEFLATED; header[3] = 0; // 3-8 represent time and are set to zero header[9] = Zlib.OS_CODE; _crc.Init(); _state = CompressionState.Data; } int outputOffset = 0; byte[] output; try { output = FilterInner(bInput, ref outputOffset, closing); } catch { _state = CompressionState.Failed; throw; } if (output == null) { _state = CompressionState.Failed; return(TextElement.Null); } // input should be read to the end Debug.Assert(outputOffset == bInput.Length); _crc.Update(bInput); if (closing) { byte[] crcBytes = _crc.Final(); footer = new byte[Zlib.GZIP_FOOTER_LENGTH]; // well this implementation simply has the hash inverted compared to C implementation footer[0] = crcBytes[3]; footer[1] = crcBytes[2]; footer[2] = crcBytes[1]; footer[3] = crcBytes[0]; footer[4] = (byte)(_stream.total_in & 0xFF); footer[5] = (byte)((_stream.total_in >> 8) & 0xFF); footer[6] = (byte)((_stream.total_in >> 16) & 0xFF); footer[7] = (byte)((_stream.total_in >> 24) & 0xFF); _state = CompressionState.Finished; } if (header != null || footer != null) { int offset = 0; byte[] appended = new byte[(header != null ? header.Length : 0) + output.Length + (footer != null ? footer.Length : 0)]; if (header != null) { Buffer.BlockCopy(header, 0, appended, 0, header.Length); offset += header.Length; } if (output != null && output.Length > 0) { Buffer.BlockCopy(output, 0, appended, offset, output.Length); offset += output.Length; } if (footer != null) { Buffer.BlockCopy(footer, 0, appended, offset, footer.Length); } return(new TextElement(appended)); } else { return(new TextElement(output)); } } else { Debug.Fail("GzipCompresionFilter expects chunks to be of type PhpBytes."); return(TextElement.Null); } }