void IDeferredSerializable.FinishSerialization(Stream output, DeferredFormatter context) { bool useCompression = context.UseCompression; // formatVersion = 0 for GZIP, or 1 for uncompressed if (useCompression) { output.WriteByte(0); } else { output.WriteByte(1); } // chunkSize WriteUInt(output, SerializationChunkSize); uint chunkCount = (uint)((this._length + (long)SerializationChunkSize - 1) / (long)SerializationChunkSize); ThreadPool threadPool = new ThreadPool(Processor.LogicalCpuCount); ArrayList exceptions = ArrayList.Synchronized(new ArrayList(Processor.LogicalCpuCount)); var callback = new WaitCallback(SerializeChunk); object previousLock = null; for (uint chunk = 0; chunk < chunkCount; ++chunk) { long chunkOffset = chunk * (long)SerializationChunkSize; uint chunkSize = Math.Min(SerializationChunkSize, (uint)(this._length - chunkOffset)); var parms = new SerializeChunkParms(output, chunk, chunkOffset, chunkSize, previousLock, context, exceptions); threadPool.QueueTask(callback, parms); previousLock = parms; } threadPool.Drain(); output.Flush(); if (exceptions.Count > 0) { throw new SerializationException("Exception thrown by worker thread", (Exception)exceptions[0]); } return; }
void IDeferredSerializable.FinishDeserialization(Stream input, DeferredFormatter context) { // Allocate the memory if (this._bitmapWidth != 0 && this._bitmapHeight != 0) { this._voidStar = Allocate(this._bitmapWidth, this._bitmapHeight, out this._bitmapHandle).ToPointer(); this._valid = true; } else { this._voidStar = Allocate(this._length).ToPointer(); this._valid = true; } // formatVersion should equal 0 int formatVersion = input.ReadByte(); if (formatVersion == -1) { throw new EndOfStreamException(); } if (formatVersion != 0 && formatVersion != 1) { throw new SerializationException("formatVersion was neither zero nor one"); } // chunkSize uint chunkSize = ReadUInt(input); var threadPool = new ThreadPool(Processor.LogicalCpuCount); var exceptions = new ArrayList(Processor.LogicalCpuCount); var callback = new WaitCallback(DecompressChunk); // calculate chunkCount var chunkCount = (uint)((this._length + (long)chunkSize - 1) / (long)chunkSize); var chunksFound = new bool[chunkCount]; for (uint i = 0; i < chunkCount; ++i) { // chunkNumber uint chunkNumber = ReadUInt(input); if (chunkNumber >= chunkCount) { throw new SerializationException("chunkNumber read from stream is out of bounds"); } if (chunksFound[chunkNumber]) { throw new SerializationException("already encountered chunk #" + chunkNumber.ToString()); } chunksFound[chunkNumber] = true; // dataSize uint dataSize = ReadUInt(input); // calculate chunkOffset long chunkOffset = (long)chunkNumber * (long)chunkSize; // calculate decompressed chunkSize uint thisChunkSize = Math.Min(chunkSize, (uint)(this._length - chunkOffset)); // bounds checking if (chunkOffset < 0 || chunkOffset >= this._length || chunkOffset + thisChunkSize > this._length) { throw new SerializationException("data was specified to be out of bounds"); } // read compressed data var compressedBytes = new byte[dataSize]; Utility.ReadFromStream(input, compressedBytes, 0, compressedBytes.Length); // decompress data if (formatVersion == 0) { var parms = new DecompressChunkParms(compressedBytes, thisChunkSize, chunkOffset, context, exceptions); threadPool.QueueTask(callback, parms); } else { fixed(byte *pbSrc = compressedBytes) { Memory.Copy((void *)((byte *)this.VoidStar + chunkOffset), (void *)pbSrc, thisChunkSize); } } } threadPool.Drain(); if (exceptions.Count > 0) { throw new SerializationException("Exception thrown by worker thread", (Exception)exceptions[0]); } }