internal override byte[] Decompress(byte[] compressedData) { byte[] dataToDecompress = new byte[compressedData.Length - 4]; // last 4 bytes are CRC Array.Copy(compressedData, dataToDecompress, dataToDecompress.Length); return(Snappy.Decode(dataToDecompress)); }
public byte[] Read(Stream source, int count) { byte[] buffer = new byte[count]; source.Read(buffer, 0, count); byte[] uncompressedBytes = Snappy.Decode(buffer); return(uncompressedBytes); }
/// <summary> /// Processes the packet within the context. Returns true whether the packet was processed or throttled. /// </summary> /// <param name="channel">The through which the packet is coming/going out.</param> /// <param name="context">The packet context for this operation.</param> /// <returns>True whether the packet was processed or throttled, false otherwise.</returns> public static ProcessingState Process(Emitter.Connection channel, ProcessingContext context) { // Get the buffer to decompress var input = context.Buffer.AsSegment(); try { // Reserve the buffer, we know exactly how many bytes to decompress var output = context.BufferReserve( (int)VarInt.UvarInt(input.Array, input.Offset).Value ); // Decompress var length = Snappy.Decode( input.Array, input.Offset, input.Count, output.Array, output.Offset, output.Size ); // Switch the buffer to the decompressed one context.SwitchBuffer(output); } catch (Exception ex) { ex.Log(); } return(ProcessingState.Success); }
public void RoundtripGoldenData() { byte[] goldenRaw = File.ReadAllBytes("TestData/Mark.Twain-Tom.Sawyer.txt"); byte[] compressed = Snappy.Encode(goldenRaw); byte[] uncompressed = Snappy.Decode(compressed); Assert.Equal(goldenRaw.Length, uncompressed.Length); Assert.Equal(goldenRaw, uncompressed); }
public void DecodeGoldenInput() { byte[] got = Snappy.Decode(File.ReadAllBytes("TestData/Mark.Twain-Tom.Sawyer.rawsnappy.txt")); byte[] want = File.ReadAllBytes("TestData/Mark.Twain-Tom.Sawyer.txt"); Assert.Equal(want.Length, got.Length); Assert.Equal(want, got); }
public void RoundtripEncodeBytes() { byte[] bytes = File.ReadAllBytes("TestData/Mark.Shanghai-skyyearxp.bytes"); byte[] wants = File.ReadAllBytes("TestData/Mark.Shanghai-skyyearxp.snappy.bytes"); byte[] compressed = Snappy.Encode(bytes); Assert.Equal(wants.Length, compressed.Length); Assert.Equal(wants, compressed); byte[] uncompressed = Snappy.Decode(compressed); Assert.Equal(bytes.Length, uncompressed.Length); Assert.Equal(bytes, uncompressed); }
static public CompanyModel CreateCompanyModelAsync(Company source, bool includeAllFields) { var model = new CompanyModel() { CompanyID = source.CompanyID, CompanyGuid = source.CompanyGuid, Name = string.IsNullOrEmpty(source.Name) ? source.Name : source.Name.Trim(), PhoneNoIsdCode = string.IsNullOrEmpty(source.PhoneNoIsdCode) ? source.PhoneNoIsdCode : source.PhoneNoIsdCode.Trim(), PhoneNo = string.IsNullOrEmpty(source.PhoneNo) ? source.PhoneNo : source.PhoneNo.Trim(), Email = string.IsNullOrEmpty(source.Email) ? source.Email : source.Email.Trim(), PAN = string.IsNullOrEmpty(source.PAN) ? source.PAN : source.PAN.Trim(), GSTIN = string.IsNullOrEmpty(source.GSTIN) ? source.GSTIN : source.GSTIN.Trim(), AddressLine1 = string.IsNullOrEmpty(source.AddressLine1) ? source.AddressLine1 : source.AddressLine1.Trim(), AddressLine2 = string.IsNullOrEmpty(source.AddressLine2) ? source.AddressLine2 : source.AddressLine2.Trim(), City = string.IsNullOrEmpty(source.City) ? source.City : source.City.Trim(), IsActive = source.IsActive, Pincode = string.IsNullOrEmpty(source.Pincode)? source.Pincode : source.Pincode.Trim() }; if (source.CompanyDocuments != null && source.CompanyDocuments.Count > 0) { ObservableCollection <ImagePickerResult> docs = new ObservableCollection <ImagePickerResult>(); foreach (var doc in source.CompanyDocuments) { docs.Add(new ImagePickerResult { blobId = doc.CompanyBlobId, guid = doc.CompanyGuid, FileName = doc.FileName, ImageBytes = Snappy.Decode(doc.FileBlob), ContentType = doc.FileType, Size = doc.FileLength, FileCategoryId = doc.FileCategoryId }); } model.CompanyDocuments = docs; } return(model); }
public static BytesOwner ReadPageData(Stream nakedStream, Thrift.CompressionCodec compressionCodec, int compressedLength, int uncompressedLength) { if (!_codecToCompressionMethod.TryGetValue(compressionCodec, out CompressionMethod compressionMethod)) { throw new NotSupportedException($"reader for compression '{compressionCodec}' is not supported."); } int totalBytesRead = 0; int currentBytesRead = int.MinValue; byte[] data = BytesPool.Rent(compressedLength); bool dataRented = true; // Some storage solutions (like Azure blobs) might require more than one 'Read' action to read the requested length. while (totalBytesRead < compressedLength && currentBytesRead != 0) { currentBytesRead = nakedStream.Read(data, totalBytesRead, compressedLength - totalBytesRead); totalBytesRead += currentBytesRead; } if (totalBytesRead != compressedLength) { throw new ParquetException($"expected {compressedLength} bytes in source stream but could read only {totalBytesRead}"); } switch (compressionMethod) { case CompressionMethod.None: //nothing to do, original data is the raw data break; case CompressionMethod.Gzip: using (var source = new MemoryStream(data, 0, compressedLength)) { byte[] unGzData = BytesPool.Rent(uncompressedLength); using (var dest = new MemoryStream(unGzData, 0, uncompressedLength)) { using (var gz = new GZipStream(source, CompressionMode.Decompress)) { gz.CopyTo(dest); } } BytesPool.Return(data); data = unGzData; } break; case CompressionMethod.Snappy: byte[] uncompressed = Snappy.Decode(data.AsSpan(0, compressedLength)); BytesPool.Return(data); data = uncompressed; dataRented = false; break; default: throw new NotSupportedException("method: " + compressionMethod); } return(new BytesOwner(data, 0, data.AsMemory(0, uncompressedLength), d => BytesPool.Return(d), dataRented)); }