public override void Flush() { // _pos represents the size try { if (_data != null) { Debug.Assert(_stream.OldEncoding); Debug.Assert(_pos <= _data.Length); _stream.WriteSize(_pos); // 1 byte size length _stream.WriteByteSpan(_data.AsSpan(0, _pos)); } else { // Patch previously-written dummy value. if (_stream.OldEncoding) { Debug.Assert(_pos >= 255); Span <byte> data = stackalloc byte[5]; data[0] = 255; OutputStream.WriteInt(_pos, data.Slice(1, 4)); _stream.RewriteByteSpan(data, _startPos); } else { Span <byte> data = stackalloc byte[OutputStream.DefaultSizeLength]; OutputStream.WriteFixedLengthSize20(_pos, data); _stream.RewriteByteSpan(data, _startPos); } } } catch (Exception ex) { throw new IOException("could not flush stream", ex); } }
/// <summary>Compresses the encapsulation payload using GZip compression. Compressed encapsulation payload is /// only supported with the 2.0 encoding.</summary> /// <returns>A <see cref="CompressionResult"/> value indicating the result of the compression operation. /// </returns> public CompressionResult CompressPayload() { if (IsSealed) { throw new InvalidOperationException("cannot modify a sealed frame"); } if (Encoding != Encoding.V20) { throw new NotSupportedException("payload compression is only supported with 2.0 encoding"); } else { IList <ArraySegment <byte> > payload = Payload; int encapsulationOffset = this is OutgoingResponseFrame ? 1 : 0; // The encapsulation always starts in the first segment of the payload (at position 0 or 1). Debug.Assert(encapsulationOffset < payload[0].Count); int sizeLength = Protocol == Protocol.Ice2 ? payload[0][encapsulationOffset].ReadSizeLength20() : 4; byte compressionStatus = payload.GetByte(encapsulationOffset + sizeLength + 2); if (compressionStatus != 0) { throw new InvalidOperationException("payload is already compressed"); } int encapsulationSize = payload.GetByteCount() - encapsulationOffset; // this includes the size length if (encapsulationSize < _compressionMinSize) { return(CompressionResult.PayloadTooSmall); } // Reserve memory for the compressed data, this should never be greater than the uncompressed data // otherwise we will just send the uncompressed data. byte[] compressedData = new byte[encapsulationOffset + encapsulationSize]; // Copy the byte before the encapsulation, if any if (encapsulationOffset == 1) { compressedData[0] = payload[0][0]; } // Write the encapsulation header int offset = encapsulationOffset + sizeLength; compressedData[offset++] = Encoding.Major; compressedData[offset++] = Encoding.Minor; // Set the compression status to '1' GZip compressed compressedData[offset++] = 1; // Write the size of the uncompressed data OutputStream.WriteFixedLengthSize20(encapsulationSize - sizeLength, compressedData.AsSpan(offset, sizeLength)); offset += sizeLength; using var memoryStream = new MemoryStream(compressedData, offset, compressedData.Length - offset); using var gzipStream = new GZipStream( memoryStream, _compressionLevel == CompressionLevel.Fastest ? System.IO.Compression.CompressionLevel.Fastest : System.IO.Compression.CompressionLevel.Optimal); try { // The data to compress starts after the compression status byte, + 3 corresponds to (Encoding 2 // bytes, Compression status 1 byte) foreach (ArraySegment <byte> segment in payload.Slice(encapsulationOffset + sizeLength + 3)) { gzipStream.Write(segment); } gzipStream.Flush(); } catch (NotSupportedException) { // If the data doesn't fit in the memory stream NotSupportedException is thrown when GZipStream // try to expand the fixed size MemoryStream. return(CompressionResult.PayloadNotCompressible); } int binaryContextLastSegmentOffset = -1; if (_binaryContextOstr is OutputStream ostr) { // If there is a binary context, we make sure it uses its own segment(s). OutputStream.Position binaryContextEnd = ostr.Tail; binaryContextLastSegmentOffset = binaryContextEnd.Offset; // When we have a _binaryContextOstr, we wrote at least the size placeholder for the binary context // dictionary. Debug.Assert(binaryContextEnd.Segment > PayloadEnd.Segment || binaryContextEnd.Offset > PayloadEnd.Offset); // The first segment of the binary context is immediately after the payload ArraySegment <byte> segment = Data[PayloadEnd.Segment].Slice(PayloadEnd.Offset); if (segment.Count > 0) { Data.Insert(PayloadEnd.Segment + 1, segment); if (binaryContextEnd.Segment == PayloadEnd.Segment) { binaryContextLastSegmentOffset -= PayloadEnd.Offset; } } // else the binary context already starts with its own segment } int start = PayloadStart.Segment; if (PayloadStart.Offset > 0) { // There is non payload bytes in the first payload segment: we move them to their own segment. ArraySegment <byte> segment = Data[PayloadStart.Segment]; Data[PayloadStart.Segment] = segment.Slice(0, PayloadStart.Offset); start += 1; } Data.RemoveRange(start, PayloadEnd.Segment - start + 1); offset += (int)memoryStream.Position; Data.Insert(start, new ArraySegment <byte>(compressedData, 0, offset)); PayloadStart = new OutputStream.Position(start, 0); PayloadEnd = new OutputStream.Position(start, offset); Size = Data.GetByteCount(); if (_binaryContextOstr != null) { // Recreate binary context OutputStream _binaryContextOstr = new OutputStream(_binaryContextOstr.Encoding, Data, new OutputStream.Position(Data.Count - 1, binaryContextLastSegmentOffset)); } // Rewrite the encapsulation size OutputStream.WriteEncapsulationSize(offset - sizeLength - encapsulationOffset, compressedData.AsSpan(encapsulationOffset, sizeLength), Protocol.GetEncoding()); _payload = null; // reset cache return(CompressionResult.Success); } }