private void PersistToDisk(object theObject) { using (this.persistedToDiskRegion.UseEnterScope()) { try { object persistBackgroundSync = PersistedObject <T> .persistBackgroundSync; lock (persistBackgroundSync) { using (FileStream stream = new FileStream(this.tempFileName, FileMode.Create, FileAccess.Write, FileShare.Read)) { BinaryFormatter formatter = new BinaryFormatter(); KeyValuePair <object, object>[] items = new KeyValuePair <object, object>[] { KeyValuePairUtil.Create <object, object>(MemoryBlock.UseCompressionBooleanDeferredFormatterKey, true) }; DeferredFormatter additional = new DeferredFormatter(null, ArrayUtil.Infer <KeyValuePair <object, object> >(items)); StreamingContext context = new StreamingContext(formatter.Context.State, additional); formatter.Context = context; formatter.Serialize(stream, theObject); additional.FinishSerialization(stream); stream.Flush(); } } } finally { this.theObjectSaved.Set(); this.theObjectSaved = null; } GC.KeepAlive(theObject); } }
public DecompressChunkParms(byte[] compressedBytes, uint chunkSize, long chunkOffset, DeferredFormatter deferredFormatter, ArrayList exceptions) { this.compressedBytes = compressedBytes; this.chunkSize = chunkSize; this.chunkOffset = chunkOffset; this.deferredFormatter = deferredFormatter; this.exceptions = exceptions; }
private void SerializeChunk(Stream output, uint chunkNumber, long chunkOffset, long chunkSize, object currentLock, object previousLock, DeferredFormatter deferredFormatter) { lock (currentLock) { bool useCompression = deferredFormatter.UseCompression; MemoryStream chunkOutput = new MemoryStream(); // chunkNumber WriteUInt(chunkOutput, chunkNumber); // dataSize long rewindPos = chunkOutput.Position; WriteUInt(chunkOutput, 0); // we'll rewind and write this later long startPos = chunkOutput.Position; // Compress data byte[] array = new byte[chunkSize]; fixed(byte *pbArray = array) { Memory.Copy(pbArray, (byte *)this.VoidStar + chunkOffset, (ulong)chunkSize); } chunkOutput.Flush(); if (useCompression) { GZipStream gZipStream = new GZipStream(chunkOutput, CompressionMode.Compress, true); gZipStream.Write(array, 0, array.Length); gZipStream.Close(); } else { chunkOutput.Write(array, 0, array.Length); } long endPos = chunkOutput.Position; // dataSize chunkOutput.Position = rewindPos; uint dataSize = (uint)(endPos - startPos); WriteUInt(chunkOutput, dataSize); // bytes chunkOutput.Flush(); lock (previousLock) { output.Write(chunkOutput.GetBuffer(), 0, (int)chunkOutput.Length); deferredFormatter.ReportBytes(chunkSize); } } }
public SerializeChunkParms(Stream output, uint chunkNumber, long chunkOffset, long chunkSize, object previousLock, DeferredFormatter deferredFormatter, ArrayList exceptions) { this.output = output; this.chunkNumber = chunkNumber; this.chunkOffset = chunkOffset; this.chunkSize = chunkSize; this.previousLock = previousLock; this.deferredFormatter = deferredFormatter; this.exceptions = exceptions; }
public void WriteFormat2Data(SerializationInfo info, StreamingContext context) { DeferredFormatter deferred = context.Context as DeferredFormatter; if (deferred != null) { info.AddValue("deferred", true); deferred.AddDeferredObject(this, this._length); } else { WriteFormat1Data(info, context); } }
void IDeferredSerializable.FinishSerialization(Stream output, DeferredFormatter context) { bool useCompression = context.UseCompression; // formatVersion = 0 for GZIP, or 1 for uncompressed if (useCompression) { output.WriteByte(0); } else { output.WriteByte(1); } // chunkSize WriteUInt(output, SerializationChunkSize); uint chunkCount = (uint)((this._length + (long)SerializationChunkSize - 1) / (long)SerializationChunkSize); ThreadPool threadPool = new ThreadPool(Processor.LogicalCpuCount); ArrayList exceptions = ArrayList.Synchronized(new ArrayList(Processor.LogicalCpuCount)); var callback = new WaitCallback(SerializeChunk); object previousLock = null; for (uint chunk = 0; chunk < chunkCount; ++chunk) { long chunkOffset = chunk * (long)SerializationChunkSize; uint chunkSize = Math.Min(SerializationChunkSize, (uint)(this._length - chunkOffset)); var parms = new SerializeChunkParms(output, chunk, chunkOffset, chunkSize, previousLock, context, exceptions); threadPool.QueueTask(callback, parms); previousLock = parms; } threadPool.Drain(); output.Flush(); if (exceptions.Count > 0) { throw new SerializationException("Exception thrown by worker thread", (Exception)exceptions[0]); } return; }
void IDeferredSerializable.FinishDeserialization(Stream input, DeferredFormatter context) { // Allocate the memory if (this._bitmapWidth != 0 && this._bitmapHeight != 0) { this._voidStar = Allocate(this._bitmapWidth, this._bitmapHeight, out this._bitmapHandle).ToPointer(); this._valid = true; } else { this._voidStar = Allocate(this._length).ToPointer(); this._valid = true; } // formatVersion should equal 0 int formatVersion = input.ReadByte(); if (formatVersion == -1) { throw new EndOfStreamException(); } if (formatVersion != 0 && formatVersion != 1) { throw new SerializationException("formatVersion was neither zero nor one"); } // chunkSize uint chunkSize = ReadUInt(input); var threadPool = new ThreadPool(Processor.LogicalCpuCount); var exceptions = new ArrayList(Processor.LogicalCpuCount); var callback = new WaitCallback(DecompressChunk); // calculate chunkCount var chunkCount = (uint)((this._length + (long)chunkSize - 1) / (long)chunkSize); var chunksFound = new bool[chunkCount]; for (uint i = 0; i < chunkCount; ++i) { // chunkNumber uint chunkNumber = ReadUInt(input); if (chunkNumber >= chunkCount) { throw new SerializationException("chunkNumber read from stream is out of bounds"); } if (chunksFound[chunkNumber]) { throw new SerializationException("already encountered chunk #" + chunkNumber.ToString()); } chunksFound[chunkNumber] = true; // dataSize uint dataSize = ReadUInt(input); // calculate chunkOffset long chunkOffset = (long)chunkNumber * (long)chunkSize; // calculate decompressed chunkSize uint thisChunkSize = Math.Min(chunkSize, (uint)(this._length - chunkOffset)); // bounds checking if (chunkOffset < 0 || chunkOffset >= this._length || chunkOffset + thisChunkSize > this._length) { throw new SerializationException("data was specified to be out of bounds"); } // read compressed data var compressedBytes = new byte[dataSize]; Utility.ReadFromStream(input, compressedBytes, 0, compressedBytes.Length); // decompress data if (formatVersion == 0) { var parms = new DecompressChunkParms(compressedBytes, thisChunkSize, chunkOffset, context, exceptions); threadPool.QueueTask(callback, parms); } else { fixed(byte *pbSrc = compressedBytes) { Memory.Copy((void *)((byte *)this.VoidStar + chunkOffset), (void *)pbSrc, thisChunkSize); } } } threadPool.Drain(); if (exceptions.Count > 0) { throw new SerializationException("Exception thrown by worker thread", (Exception)exceptions[0]); } }
private void DecompressChunk(byte[] compressedBytes, uint chunkSize, long chunkOffset, DeferredFormatter deferredFormatter) { // decompress data MemoryStream compressedStream = new MemoryStream(compressedBytes, false); GZipStream gZipStream = new GZipStream(compressedStream, CompressionMode.Decompress, true); byte[] decompressedBytes = new byte[chunkSize]; int dstOffset = 0; while (dstOffset < decompressedBytes.Length) { int bytesRead = gZipStream.Read(decompressedBytes, dstOffset, (int)chunkSize - dstOffset); if (bytesRead == 0) { throw new SerializationException("ran out of data to decompress"); } dstOffset += bytesRead; deferredFormatter.ReportBytes((long)bytesRead); } // copy data fixed(byte *pbDecompressedBytes = decompressedBytes) { byte *pbDst = (byte *)this.VoidStar + chunkOffset; Memory.Copy(pbDst, pbDecompressedBytes, (ulong)chunkSize); } }
private MemoryBlock(SerializationInfo info, StreamingContext context) { _disposed = false; // Try to read a 64-bit value, and for backwards compatibility fall back on a 32-bit value. try { this._length = info.GetInt64("length64"); } catch (SerializationException) { this._length = (long)info.GetInt32("length"); } try { this._bitmapWidth = (int)info.GetInt32("bitmapWidth"); this._bitmapHeight = (int)info.GetInt32("bitmapHeight"); if (this._bitmapWidth != 0 || this._bitmapHeight != 0) { long bytes = (long)this._bitmapWidth * (long)this._bitmapHeight * (long)ColorBgra.Size; if (bytes != this._length) { throw new ApplicationException("Invalid file format: width * height * 4 != length"); } } } catch (SerializationException) { this._bitmapWidth = 0; this._bitmapHeight = 0; } bool hasParent = info.GetBoolean("hasParent"); if (hasParent) { this._parent = (MemoryBlock)info.GetValue("parentBlock", typeof(MemoryBlock)); // Try to read a 64-bit value, and for backwards compatibility fall back on a 32-bit value. long parentOffset; try { parentOffset = info.GetInt64("parentOffset64"); } catch (SerializationException) { parentOffset = (long)info.GetInt32("parentOffset"); } this._voidStar = (void *)((byte *)_parent.VoidStar + parentOffset); this._valid = true; } else { DeferredFormatter deferredFormatter = context.Context as DeferredFormatter; bool deferred = false; // Was this stream serialized with deferment? foreach (SerializationEntry entry in info) { if (entry.Name == "deferred") { deferred = (bool)entry.Value; break; } } if (deferred && deferredFormatter != null) { // The newest PDN files use deferred deserialization. This lets us read straight from the stream, // minimizing memory use and adding the potential for multithreading // Deserialization will complete in IDeferredDeserializer.FinishDeserialization() deferredFormatter.AddDeferredObject(this, this._length); } else if (deferred && deferredFormatter == null) { throw new InvalidOperationException("stream has deferred serialization streams, but a DeferredFormatter was not provided"); } else { this._voidStar = Allocate(this._length).ToPointer(); this._valid = true; // Non-deferred format serializes one big byte[] chunk. This is also // how PDN files were saved with v2.1 Beta 2 and before. byte[] array = (byte[])info.GetValue("pointerData", typeof(byte[])); fixed(byte *pbArray = array) { Memory.Copy(this.VoidStar, (void *)pbArray, (ulong)array.LongLength); } } } }