public DecompressChunkParms(byte[] compressedBytes, uint chunkSize, long chunkOffset, DeferredFormatter deferredFormatter, ArrayList exceptions) { this.compressedBytes = compressedBytes; this.chunkSize = chunkSize; this.chunkOffset = chunkOffset; this.deferredFormatter = deferredFormatter; this.exceptions = exceptions; }
private void SerializeChunk(Stream output, uint chunkNumber, long chunkOffset, long chunkSize, object currentLock, object previousLock, DeferredFormatter deferredFormatter) { throw new StillNotPortedException(); //lock (currentLock) //{ // bool useCompression = deferredFormatter.UseCompression; // MemoryStream chunkOutput = new MemoryStream(); // // chunkNumber // WriteUInt(chunkOutput, chunkNumber); // // dataSize // long rewindPos = chunkOutput.Position; // WriteUInt(chunkOutput, 0); // we'll rewind and write this later // long startPos = chunkOutput.Position; // // Compress data // byte[] array = new byte[chunkSize]; // fixed (byte* pbArray = array) // { // PlatformMemory.Copy(pbArray, (byte*)this.VoidStar + chunkOffset, (ulong)chunkSize); // } // chunkOutput.Flush(); // if (useCompression) // { // GZipStream gZipStream = new GZipStream(chunkOutput, CompressionMode.Compress, true); // gZipStream.Write(array, 0, array.Length); // gZipStream.Close(); // } // else // { // chunkOutput.Write(array, 0, array.Length); // } // long endPos = chunkOutput.Position; // // dataSize // chunkOutput.Position = rewindPos; // uint dataSize = (uint)(endPos - startPos); // WriteUInt(chunkOutput, dataSize); // // bytes // chunkOutput.Flush(); // lock (previousLock) // { // output.Write(chunkOutput.GetBuffer(), 0, (int)chunkOutput.Length); // deferredFormatter.ReportBytes(chunkSize); // } //} }
private void SerializeChunk(Stream output, uint chunkNumber, long chunkOffset, long chunkSize, object currentLock, object previousLock, DeferredFormatter deferredFormatter) { lock (currentLock) { bool useCompression = deferredFormatter.UseCompression; MemoryStream chunkOutput = new MemoryStream(); // chunkNumber WriteUInt(chunkOutput, chunkNumber); // dataSize long rewindPos = chunkOutput.Position; WriteUInt(chunkOutput, 0); // we'll rewind and write this later long startPos = chunkOutput.Position; // Compress data byte[] array = new byte[chunkSize]; fixed(byte *pbArray = array) { Memory.Copy(pbArray, (byte *)this.VoidStar + chunkOffset, (ulong)chunkSize); } chunkOutput.Flush(); if (useCompression) { GZipStream gZipStream = new GZipStream(chunkOutput, CompressionMode.Compress, true); gZipStream.Write(array, 0, array.Length); gZipStream.Close(); } else { chunkOutput.Write(array, 0, array.Length); } long endPos = chunkOutput.Position; // dataSize chunkOutput.Position = rewindPos; uint dataSize = (uint)(endPos - startPos); WriteUInt(chunkOutput, dataSize); // bytes chunkOutput.Flush(); lock (previousLock) { output.Write(chunkOutput.GetBuffer(), 0, (int)chunkOutput.Length); deferredFormatter.ReportBytes(chunkSize); } } }
public SerializeChunkParms(Stream output, uint chunkNumber, long chunkOffset, long chunkSize, object previousLock, DeferredFormatter deferredFormatter, ArrayList exceptions) { this.output = output; this.chunkNumber = chunkNumber; this.chunkOffset = chunkOffset; this.chunkSize = chunkSize; this.previousLock = previousLock; this.deferredFormatter = deferredFormatter; this.exceptions = exceptions; }
public SaveConfigToken GetLastSaveConfigToken() { Type ourType = this.GetType(); string savedTokenName = "SaveConfigToken." + ourType.Namespace + "." + ourType.Name + ".BinaryFormatter"; string savedToken = Settings.CurrentUser.GetString(savedTokenName, null); SaveConfigToken saveConfigToken = null; if (savedToken != null) { try { byte[] bytes = Convert.FromBase64String(savedToken); MemoryStream ms = new MemoryStream(bytes); BinaryFormatter formatter = new BinaryFormatter(); DeferredFormatter deferred = new DeferredFormatter(); StreamingContext streamingContext = new StreamingContext(formatter.Context.State, deferred); formatter.Context = streamingContext; SerializationFallbackBinder sfb = new SerializationFallbackBinder(); sfb.AddAssembly(this.GetType().Assembly); sfb.AddAssembly(typeof(FileType).Assembly); formatter.Binder = sfb; object obj = formatter.Deserialize(ms); deferred.FinishDeserialization(ms); ms.Close(); ms = null; //SaveConfigToken sct = new SaveConfigToken(); //saveConfigToken = (SaveConfigToken)obj; saveConfigToken = GetSaveConfigTokenFromSerializablePortion(obj); } catch (Exception) { // Ignore erros and revert to default saveConfigToken = null; } } if (saveConfigToken == null) { saveConfigToken = CreateDefaultSaveConfigToken(); } return(saveConfigToken); }
public void WriteFormat2Data(SerializationInfo info, StreamingContext context) { DeferredFormatter deferred = context.Context as DeferredFormatter; if (deferred != null) { info.AddValue("deferred", true); deferred.AddDeferredObject(this, this.length); } else { WriteFormat1Data(info, context); } }
void IDeferredSerializable.FinishSerialization(Stream output, DeferredFormatter context) { bool useCompression = context.UseCompression; // formatVersion = 0 for GZIP, or 1 for uncompressed if (useCompression) { output.WriteByte(0); } else { output.WriteByte(1); } // chunkSize WriteUInt(output, serializationChunkSize); uint chunkCount = (uint)((this.length + (long)serializationChunkSize - 1) / (long)serializationChunkSize); PaintDotNet.Threading.ThreadPool threadPool = new PaintDotNet.Threading.ThreadPool(Processor.LogicalCpuCount); ArrayList exceptions = ArrayList.Synchronized(new ArrayList(Processor.LogicalCpuCount)); WaitCallback callback = new WaitCallback(SerializeChunk); object previousLock = null; for (uint chunk = 0; chunk < chunkCount; ++chunk) { long chunkOffset = (long)chunk * (long)serializationChunkSize; uint chunkSize = Math.Min((uint)serializationChunkSize, (uint)(this.length - chunkOffset)); SerializeChunkParms parms = new SerializeChunkParms(output, chunk, chunkOffset, chunkSize, previousLock, context, exceptions); threadPool.QueueUserWorkItem(callback, parms); previousLock = parms; } threadPool.Drain(); output.Flush(); if (exceptions.Count > 0) { throw new SerializationException("Exception thrown by worker thread", (Exception)exceptions[0]); } return; }
private void PersistToDisk(object theObject) { try { FileStream stream = new FileStream(this.tempFileName, FileMode.Create, FileAccess.Write, FileShare.Read); BinaryFormatter formatter = new BinaryFormatter(); DeferredFormatter deferred = new DeferredFormatter(false, null); StreamingContext context = new StreamingContext(formatter.Context.State, deferred); formatter.Context = context; formatter.Serialize(stream, theObject); deferred.FinishSerialization(stream); stream.Flush(); stream.Close(); } finally { this.theObjectSaved.Set(); this.theObjectSaved = null; } }
/// <summary> /// Deserializes a Document from a stream. /// </summary> /// <param name="stream">The stream to deserialize from. This stream must be seekable.</param> /// <returns>The Document that was stored in stream.</returns> /// <remarks> /// This is the only supported way to deserialize a Document instance from disk. /// </remarks> public static Document FromStream(Stream stream) { long oldPosition = stream.Position; bool pdn21Format = true; // Version 2.1+ file format: // Starts with bytes as defined by MagicBytes // Next three bytes are 24-bit unsigned int 'N' (first byte is low-word, second byte is middle-word, third byte is high word) // The next N bytes are a string, this is the document header (it is XML, UTF-8 encoded) // Important: 'N' indicates a byte count, not a character count. 'N' bytes may result in less than 'N' characters, // depending on how the characters decode as per UTF8 // If the next 2 bytes are 0x00, 0x01: This signifies that non-compressed .NET serialized data follows. // If the next 2 bytes are 0x1f, 0x8b: This signifies the start of the gzip compressed .NET serialized data // // Version 2.0 and previous file format: // Starts with 0x1f, 0x8b: this signifies the start of the gzip compressed .NET serialized data. // Read in the 'magic' bytes for (int i = 0; i < MagicBytes.Length; ++i) { int theByte = stream.ReadByte(); if (theByte == -1) { throw new EndOfStreamException(); } if (theByte != MagicBytes[i]) { pdn21Format = false; break; } } // Read in the header if we found the 'magic' bytes identifying a PDN 2.1 file XmlDocument headerXml = null; if (pdn21Format) { // This is a Paint.NET v2.1+ file. int low = stream.ReadByte(); if (low == -1) { throw new EndOfStreamException(); } int mid = stream.ReadByte(); if (mid == -1) { throw new EndOfStreamException(); } int high = stream.ReadByte(); if (high == -1) { throw new EndOfStreamException(); } int byteCount = low + (mid << 8) + (high << 16); byte[] bytes = new byte[byteCount]; int bytesRead = Utility.ReadFromStream(stream, bytes, 0, byteCount); if (bytesRead != byteCount) { throw new EndOfStreamException("expected " + byteCount + " bytes, but only got " + bytesRead); } string xml = Encoding.UTF8.GetString(bytes); headerXml = new XmlDocument(); headerXml.LoadXml(xml); } else { stream.Position = oldPosition; // rewind and try as v2.0-or-earlier file } // Start reading the data section of the file. Determine if it's gzip or regular long oldPosition2 = stream.Position; int first = stream.ReadByte(); if (first == -1) { throw new EndOfStreamException(); } int second = stream.ReadByte(); if (second == -1) { throw new EndOfStreamException(); } Document document; object docObject; BinaryFormatter formatter = new BinaryFormatter(); SerializationFallbackBinder sfb = new SerializationFallbackBinder(); sfb.AddAssembly(Assembly.GetExecutingAssembly()); // first try PaintDotNet.Data.dll sfb.AddAssembly(typeof(Utility).Assembly); // second, try PdnLib.dll sfb.AddAssembly(typeof(SystemLayer.Memory).Assembly); // third, try PaintDotNet.SystemLayer.dll formatter.Binder = sfb; if (first == 0 && second == 1) { DeferredFormatter deferred = new DeferredFormatter(); formatter.Context = new StreamingContext(formatter.Context.State, deferred); docObject = formatter.UnsafeDeserialize(stream, null); deferred.FinishDeserialization(stream); } else if (first == 0x1f && second == 0x8b) { stream.Position = oldPosition2; // rewind to the start of 0x1f, 0x8b GZipStream gZipStream = new GZipStream(stream, CompressionMode.Decompress, true); docObject = formatter.UnsafeDeserialize(gZipStream, null); } else { throw new FormatException("file is not a valid Paint.NET document"); } document = (Document)docObject; document.Dirty = true; document.headerXml = headerXml; document.Invalidate(); return document; }
/// <summary> /// Saves the Document to the given Stream with the default and given headers, and /// using the given IO completion callback. /// </summary> /// <param name="stream">The Stream to serialize the Document to.</param> /// <param name="callback"> /// This can be used to keep track of the number of uncompressed bytes that are written. The /// values reported through the IOEventArgs.Count+Offset will vary from 1 to approximately /// Layers.Count*Width*Height*sizeof(ColorBgra). The final number will actually be higher /// because of hierarchical overhead, so make sure to cap any progress reports to 100%. This /// callback will be wired to the IOFinished event of a SiphonStream. Events may be raised /// from any thread. May be null. /// </param> public void SaveToStream(Stream stream, IOEventHandler callback) { InitializeDpu(); PrepareHeader(); string headerText = this.HeaderXml.OuterXml; // Write the header byte[] magicBytes = Document.MagicBytes; stream.Write(magicBytes, 0, magicBytes.Length); byte[] headerBytes = Encoding.UTF8.GetBytes(headerText); stream.WriteByte((byte)(headerBytes.Length & 0xff)); stream.WriteByte((byte)((headerBytes.Length & 0xff00) >> 8)); stream.WriteByte((byte)((headerBytes.Length & 0xff0000) >> 16)); stream.Write(headerBytes, 0, headerBytes.Length); stream.Flush(); // Copy version info this.savedWith = PdnInfo.GetVersion(); // Write 0x00, 0x01 to indicate normal .NET serialized data stream.WriteByte(0x00); stream.WriteByte(0x01); // Write the remainder of the file (gzip compressed) SiphonStream siphonStream = new SiphonStream(stream); BinaryFormatter formatter = new BinaryFormatter(); DeferredFormatter deferred = new DeferredFormatter(true, null); SaveProgressRelay relay = new SaveProgressRelay(deferred, callback); formatter.Context = new StreamingContext(formatter.Context.State, deferred); formatter.Serialize(siphonStream, this); deferred.FinishSerialization(siphonStream); stream.Flush(); }
public SaveProgressRelay(DeferredFormatter formatter, IOEventHandler ioCallback) { this.formatter = formatter; this.ioCallback = ioCallback; this.formatter.ReportedBytesChanged += new EventHandler(Formatter_ReportedBytesChanged); }
public SaveConfigToken GetLastSaveConfigToken() { Type ourType = this.GetType(); string savedTokenName = "SaveConfigToken." + ourType.Namespace + "." + ourType.Name + ".BinaryFormatter"; string savedToken = Settings.CurrentUser.GetString(savedTokenName, null); SaveConfigToken saveConfigToken = null; if (savedToken != null) { try { byte[] bytes = Convert.FromBase64String(savedToken); MemoryStream ms = new MemoryStream(bytes); BinaryFormatter formatter = new BinaryFormatter(); DeferredFormatter deferred = new DeferredFormatter(); StreamingContext streamingContext = new StreamingContext(formatter.Context.State, deferred); formatter.Context = streamingContext; SerializationFallbackBinder sfb = new SerializationFallbackBinder(); sfb.AddAssembly(this.GetType().Assembly); sfb.AddAssembly(typeof(FileType).Assembly); formatter.Binder = sfb; object obj = formatter.Deserialize(ms); deferred.FinishDeserialization(ms); ms.Close(); ms = null; //SaveConfigToken sct = new SaveConfigToken(); //saveConfigToken = (SaveConfigToken)obj; saveConfigToken = GetSaveConfigTokenFromSerializablePortion(obj); } catch (Exception) { // Ignore erros and revert to default saveConfigToken = null; } } if (saveConfigToken == null) { saveConfigToken = CreateDefaultSaveConfigToken(); } return saveConfigToken; }
public void Save( Document input, Stream output, SaveConfigToken token, Surface scratchSurface, ProgressEventHandler callback, bool rememberToken) { Tracing.LogFeature("Save(" + GetType().FullName + ")"); if (!this.SupportsSaving) { throw new NotImplementedException("Saving is not supported by this FileType"); } else { Surface disposeMe = null; if (scratchSurface == null) { disposeMe = new Surface(input.Size); scratchSurface = disposeMe; } else if (scratchSurface.Size != input.Size) { throw new ArgumentException("scratchSurface.Size must equal input.Size"); } if (rememberToken) { Type ourType = this.GetType(); string savedTokenName = "SaveConfigToken." + ourType.Namespace + "." + ourType.Name + ".BinaryFormatter"; MemoryStream ms = new MemoryStream(); BinaryFormatter formatter = new BinaryFormatter(); DeferredFormatter deferredFormatter = new DeferredFormatter(false, null); StreamingContext streamingContext = new StreamingContext(formatter.Context.State, deferredFormatter); formatter.Context = streamingContext; object tokenSubset = GetSerializablePortionOfSaveConfigToken(token); formatter.Serialize(ms, tokenSubset); deferredFormatter.FinishSerialization(ms); byte[] bytes = ms.GetBuffer(); string base64Bytes = Convert.ToBase64String(bytes); Settings.CurrentUser.SetString(savedTokenName, base64Bytes); } try { OnSave(input, output, token, scratchSurface, callback); } catch (OnSaveNotImplementedException) { OldOnSaveTrampoline(input, output, token, callback); } if (disposeMe != null) { disposeMe.Dispose(); disposeMe = null; } } }
private void DecompressChunk(byte[] compressedBytes, uint chunkSize, long chunkOffset, DeferredFormatter deferredFormatter) { // decompress data MemoryStream compressedStream = new MemoryStream(compressedBytes, false); GZipStream gZipStream = new GZipStream(compressedStream, CompressionMode.Decompress, true); byte[] decompressedBytes = new byte[chunkSize]; int dstOffset = 0; while (dstOffset < decompressedBytes.Length) { int bytesRead = gZipStream.Read(decompressedBytes, dstOffset, (int)chunkSize - dstOffset); if (bytesRead == 0) { throw new SerializationException("ran out of data to decompress"); } dstOffset += bytesRead; deferredFormatter.ReportBytes((long)bytesRead); } // copy data fixed(byte *pbDecompressedBytes = decompressedBytes) { byte *pbDst = (byte *)this.VoidStar + chunkOffset; Memory.Copy(pbDst, pbDecompressedBytes, (ulong)chunkSize); } }
void IDeferredSerializable.FinishDeserialization(Stream input, DeferredFormatter context) { // Allocate the memory if (this.bitmapWidth != 0 && this.bitmapHeight != 0) { this.voidStar = Allocate(this.bitmapWidth, this.bitmapHeight, out this.bitmapHandle).ToPointer(); this.valid = true; } else { this.voidStar = Allocate(this.length).ToPointer(); this.valid = true; } // formatVersion should equal 0 int formatVersion = input.ReadByte(); if (formatVersion == -1) { throw new EndOfStreamException(); } if (formatVersion != 0 && formatVersion != 1) { throw new SerializationException("formatVersion was neither zero nor one"); } // chunkSize uint chunkSize = ReadUInt(input); PaintDotNet.Threading.ThreadPool threadPool = new PaintDotNet.Threading.ThreadPool(Processor.LogicalCpuCount); ArrayList exceptions = new ArrayList(Processor.LogicalCpuCount); WaitCallback callback = new WaitCallback(DecompressChunk); // calculate chunkCount uint chunkCount = (uint)((this.length + (long)chunkSize - 1) / (long)chunkSize); bool[] chunksFound = new bool[chunkCount]; for (uint i = 0; i < chunkCount; ++i) { // chunkNumber uint chunkNumber = ReadUInt(input); if (chunkNumber >= chunkCount) { throw new SerializationException("chunkNumber read from stream is out of bounds"); } if (chunksFound[chunkNumber]) { throw new SerializationException("already encountered chunk #" + chunkNumber.ToString()); } chunksFound[chunkNumber] = true; // dataSize uint dataSize = ReadUInt(input); // calculate chunkOffset long chunkOffset = (long)chunkNumber * (long)chunkSize; // calculate decompressed chunkSize uint thisChunkSize = Math.Min(chunkSize, (uint)(this.length - chunkOffset)); // bounds checking if (chunkOffset < 0 || chunkOffset >= this.length || chunkOffset + thisChunkSize > this.length) { throw new SerializationException("data was specified to be out of bounds"); } // read compressed data byte[] compressedBytes = new byte[dataSize]; Utility.ReadFromStream(input, compressedBytes, 0, compressedBytes.Length); // decompress data if (formatVersion == 0) { DecompressChunkParms parms = new DecompressChunkParms(compressedBytes, thisChunkSize, chunkOffset, context, exceptions); threadPool.QueueUserWorkItem(callback, parms); } else { fixed(byte *pbSrc = compressedBytes) { Memory.Copy((void *)((byte *)this.VoidStar + chunkOffset), (void *)pbSrc, thisChunkSize); } } } threadPool.Drain(); if (exceptions.Count > 0) { throw new SerializationException("Exception thrown by worker thread", (Exception)exceptions[0]); } }
private void DecompressChunk(byte[] compressedBytes, uint chunkSize, long chunkOffset, DeferredFormatter deferredFormatter) { throw new StillNotPortedException(); //// decompress data //MemoryStream compressedStream = new MemoryStream(compressedBytes, false); //GZipStream gZipStream = new GZipStream(compressedStream, CompressionMode.Decompress, true); //byte[] decompressedBytes = new byte[chunkSize]; //int dstOffset = 0; //while (dstOffset < decompressedBytes.Length) //{ // int bytesRead = gZipStream.Read(decompressedBytes, dstOffset, (int)chunkSize - dstOffset); // if (bytesRead == 0) // { // throw new SerializationException("ran out of data to decompress"); // } // dstOffset += bytesRead; // deferredFormatter.ReportBytes((long)bytesRead); //} //// copy data //fixed (byte* pbDecompressedBytes = decompressedBytes) //{ // byte* pbDst = (byte*)this.VoidStar + chunkOffset; // PlatformMemory.Copy(pbDst, pbDecompressedBytes, (ulong)chunkSize); //} }
private void DecompressChunk(byte[] compressedBytes, uint chunkSize, long chunkOffset, DeferredFormatter deferredFormatter) { // decompress data MemoryStream compressedStream = new MemoryStream(compressedBytes, false); GZipStream gZipStream = new GZipStream(compressedStream, CompressionMode.Decompress, true); byte[] decompressedBytes = new byte[chunkSize]; int dstOffset = 0; while (dstOffset < decompressedBytes.Length) { int bytesRead = gZipStream.Read(decompressedBytes, dstOffset, (int)chunkSize - dstOffset); if (bytesRead == 0) { throw new SerializationException("ran out of data to decompress"); } dstOffset += bytesRead; deferredFormatter.ReportBytes((long)bytesRead); } // copy data fixed (byte *pbDecompressedBytes = decompressedBytes) { byte *pbDst = (byte *)this.VoidStar + chunkOffset; Memory.Copy(pbDst, pbDecompressedBytes, (ulong)chunkSize); } }
private void SerializeChunk(Stream output, uint chunkNumber, long chunkOffset, long chunkSize, object currentLock, object previousLock, DeferredFormatter deferredFormatter) { lock (currentLock) { bool useCompression = deferredFormatter.UseCompression; MemoryStream chunkOutput = new MemoryStream(); // chunkNumber WriteUInt(chunkOutput, chunkNumber); // dataSize long rewindPos = chunkOutput.Position; WriteUInt(chunkOutput, 0); // we'll rewind and write this later long startPos = chunkOutput.Position; // Compress data byte[] array = new byte[chunkSize]; fixed (byte *pbArray = array) { Memory.Copy(pbArray, (byte *)this.VoidStar + chunkOffset, (ulong)chunkSize); } chunkOutput.Flush(); if (useCompression) { GZipStream gZipStream = new GZipStream(chunkOutput, CompressionMode.Compress, true); gZipStream.Write(array, 0, array.Length); gZipStream.Close(); } else { chunkOutput.Write(array, 0, array.Length); } long endPos = chunkOutput.Position; // dataSize chunkOutput.Position = rewindPos; uint dataSize = (uint)(endPos - startPos); WriteUInt(chunkOutput, dataSize); // bytes chunkOutput.Flush(); lock (previousLock) { output.Write(chunkOutput.GetBuffer(), 0, (int)chunkOutput.Length); deferredFormatter.ReportBytes(chunkSize); } } }
void IDeferredSerializable.FinishDeserialization(Stream input, DeferredFormatter context) { // Allocate the memory if (this.bitmapWidth != 0 && this.bitmapHeight != 0) { this.voidStar = Allocate(this.bitmapWidth, this.bitmapHeight, out this.bitmapHandle).ToPointer(); this.valid = true; } else { this.voidStar = Allocate(this.length).ToPointer(); this.valid = true; } // formatVersion should equal 0 int formatVersion = input.ReadByte(); if (formatVersion == -1) { throw new EndOfStreamException(); } if (formatVersion != 0 && formatVersion != 1) { throw new SerializationException("formatVersion was neither zero nor one"); } // chunkSize uint chunkSize = ReadUInt(input); PaintDotNet.Threading.ThreadPool threadPool = new PaintDotNet.Threading.ThreadPool(Processor.LogicalCpuCount); ArrayList exceptions = new ArrayList(Processor.LogicalCpuCount); WaitCallback callback = new WaitCallback(DecompressChunk); // calculate chunkCount uint chunkCount = (uint)((this.length + (long)chunkSize - 1) / (long)chunkSize); bool[] chunksFound = new bool[chunkCount]; for (uint i = 0; i < chunkCount; ++i) { // chunkNumber uint chunkNumber = ReadUInt(input); if (chunkNumber >= chunkCount) { throw new SerializationException("chunkNumber read from stream is out of bounds"); } if (chunksFound[chunkNumber]) { throw new SerializationException("already encountered chunk #" + chunkNumber.ToString()); } chunksFound[chunkNumber] = true; // dataSize uint dataSize = ReadUInt(input); // calculate chunkOffset long chunkOffset = (long)chunkNumber * (long)chunkSize; // calculate decompressed chunkSize uint thisChunkSize = Math.Min(chunkSize, (uint)(this.length - chunkOffset)); // bounds checking if (chunkOffset < 0 || chunkOffset >= this.length || chunkOffset + thisChunkSize > this.length) { throw new SerializationException("data was specified to be out of bounds"); } // read compressed data byte[] compressedBytes = new byte[dataSize]; Utility.ReadFromStream(input, compressedBytes, 0, compressedBytes.Length); // decompress data if (formatVersion == 0) { DecompressChunkParms parms = new DecompressChunkParms(compressedBytes, thisChunkSize, chunkOffset, context, exceptions); threadPool.QueueUserWorkItem(callback, parms); } else { fixed (byte *pbSrc = compressedBytes) { Memory.Copy((void *)((byte *)this.VoidStar + chunkOffset), (void *)pbSrc, thisChunkSize); } } } threadPool.Drain(); if (exceptions.Count > 0) { throw new SerializationException("Exception thrown by worker thread", (Exception)exceptions[0]); } }
private MemoryBlock(SerializationInfo info, StreamingContext context) { disposed = false; // Try to read a 64-bit value, and for backwards compatibility fall back on a 32-bit value. try { this.length = info.GetInt64("length64"); } catch (SerializationException) { this.length = (long)info.GetInt32("length"); } try { this.bitmapWidth = (int)info.GetInt32("bitmapWidth"); this.bitmapHeight = (int)info.GetInt32("bitmapHeight"); if (this.bitmapWidth != 0 || this.bitmapHeight != 0) { long bytes = (long)this.bitmapWidth * (long)this.bitmapHeight * (long)ColorBgra.SizeOf; if (bytes != this.length) { throw new ApplicationException("Invalid file format: width * height * 4 != length"); } } } catch (SerializationException) { this.bitmapWidth = 0; this.bitmapHeight = 0; } bool hasParent = info.GetBoolean("hasParent"); if (hasParent) { this.parentBlock = (MemoryBlock)info.GetValue("parentBlock", typeof(MemoryBlock)); // Try to read a 64-bit value, and for backwards compatibility fall back on a 32-bit value. long parentOffset; try { parentOffset = info.GetInt64("parentOffset64"); } catch (SerializationException) { parentOffset = (long)info.GetInt32("parentOffset"); } this.voidStar = (void *)((byte *)parentBlock.VoidStar + parentOffset); this.valid = true; } else { DeferredFormatter deferredFormatter = context.Context as DeferredFormatter; bool deferred = false; // Was this stream serialized with deferment? foreach (SerializationEntry entry in info) { if (entry.Name == "deferred") { deferred = (bool)entry.Value; break; } } if (deferred && deferredFormatter != null) { // The newest PDN files use deferred deserialization. This lets us read straight from the stream, // minimizing memory use and adding the potential for multithreading // Deserialization will complete in IDeferredDeserializer.FinishDeserialization() deferredFormatter.AddDeferredObject(this, this.length); } else if (deferred && deferredFormatter == null) { throw new InvalidOperationException("stream has deferred serialization streams, but a DeferredFormatter was not provided"); } else { this.voidStar = Allocate(this.length).ToPointer(); this.valid = true; // Non-deferred format serializes one big byte[] chunk. This is also // how PDN files were saved with v2.1 Beta 2 and before. byte[] array = (byte[])info.GetValue("pointerData", typeof(byte[])); fixed(byte *pbArray = array) { Memory.Copy(this.VoidStar, (void *)pbArray, (ulong)array.LongLength); } } } }