/// <summary> /// Loads a stroke from the stream based on Stroke Descriptor, StylusPointDescription, Drawing Attributes, Stroke IDs, transform and GuidList /// </summary> /// <param name="stream"></param> /// <param name="size"></param> /// <param name="guidList"></param> /// <param name="strokeDescriptor"></param> /// <param name="stylusPointDescription"></param> /// <param name="drawingAttributes"></param> /// <param name="transform"></param> /// <param name="stroke">Newly decoded stroke</param> #endif internal static uint DecodeStroke(Stream stream, uint size, GuidList guidList, StrokeDescriptor strokeDescriptor, StylusPointDescription stylusPointDescription, DrawingAttributes drawingAttributes, Matrix transform, #if OLD_ISF Compressor compressor, #endif out Stroke stroke) { ExtendedPropertyCollection extendedProperties; StylusPointCollection stylusPoints; uint cb = DecodeISFIntoStroke( #if OLD_ISF compressor, #endif stream, size, guidList, strokeDescriptor, stylusPointDescription, transform, out stylusPoints, out extendedProperties); if (cb != size) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Stroke size (" + cb.ToString(System.Globalization.CultureInfo.InvariantCulture) + ") != expected (" + size.ToString(System.Globalization.CultureInfo.InvariantCulture) + ")")); } stroke = new Stroke(stylusPoints, drawingAttributes, extendedProperties); return cb; }
/// <summary> /// Loads a stroke from the stream based on Stroke Descriptor, StylusPointDescription, Drawing Attributes, Stroke IDs, transform and GuidList /// </summary> /// <param name="stream"></param> /// <param name="size"></param> /// <param name="guidList"></param> /// <param name="strokeDescriptor"></param> /// <param name="stylusPointDescription"></param> /// <param name="drawingAttributes"></param> /// <param name="transform"></param> /// <param name="compressor">Compression module</param> /// <param name="stroke">Newly decoded stroke</param> /// <returns></returns> #else /// <summary> /// Loads a stroke from the stream based on Stroke Descriptor, StylusPointDescription, Drawing Attributes, Stroke IDs, transform and GuidList /// </summary> /// <param name="stream"></param> /// <param name="size"></param> /// <param name="guidList"></param> /// <param name="strokeDescriptor"></param> /// <param name="stylusPointDescription"></param> /// <param name="drawingAttributes"></param> /// <param name="transform"></param> /// <param name="stroke">Newly decoded stroke</param> #endif internal static uint DecodeStroke(Stream stream, uint size, GuidList guidList, StrokeDescriptor strokeDescriptor, StylusPointDescription stylusPointDescription, DrawingAttributes drawingAttributes, Matrix transform, #if OLD_ISF Compressor compressor, #endif out Stroke stroke) { ExtendedPropertyCollection extendedProperties; StylusPointCollection stylusPoints; uint cb = DecodeISFIntoStroke( #if OLD_ISF compressor, #endif stream, size, guidList, strokeDescriptor, stylusPointDescription, transform, out stylusPoints, out extendedProperties); if (cb != size) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Stroke size (" + cb.ToString(System.Globalization.CultureInfo.InvariantCulture) + ") != expected (" + size.ToString(System.Globalization.CultureInfo.InvariantCulture) + ")")); } stroke = new Stroke(stylusPoints, drawingAttributes, extendedProperties); return(cb); }
/// <summary> /// Returns an array of bytes of the saved stroke /// </summary> /// <param name="stroke">Stroke to save</param> /// <param name="stream">null to calculate only the size</param> /// <param name="compressor"></param> /// <param name="compressionAlgorithm"></param> /// <param name="guidList"></param> /// <param name="strokeLookupEntry"></param> #else /// <summary> /// Returns an array of bytes of the saved stroke /// </summary> /// <param name="stroke">Stroke to save</param> /// <param name="stream">null to calculate only the size</param> /// <param name="compressionAlgorithm"></param> /// <param name="guidList"></param> /// <param name="strokeLookupEntry"></param> #endif internal static uint EncodeStroke( Stroke stroke, Stream stream, #if OLD_ISF Compressor compressor, #endif byte compressionAlgorithm, GuidList guidList, StrokeCollectionSerializer.StrokeLookupEntry strokeLookupEntry) { uint cbWrite = SavePackets(stroke, stream, #if OLD_ISF compressor, #endif strokeLookupEntry); if (stroke.ExtendedProperties.Count > 0) { cbWrite += ExtendedPropertySerializer.EncodeAsISF(stroke.ExtendedProperties, stream, guidList, compressionAlgorithm, false); } return(cbWrite); }
/// <summary> /// Loads packets from the input stream. For example, packets are all of the x's in a stroke /// </summary> #else /// <summary> /// Loads packets from the input stream. For example, packets are all of the x's in a stroke /// </summary> #endif static uint LoadPackets(Stream inputStream, uint totalBytesInStrokeBlockOfIsfStream, #if OLD_ISF Compressor compressor, #endif StylusPointDescription stylusPointDescription, Matrix transform, out StylusPointCollection stylusPoints) { stylusPoints = null; if (0 == totalBytesInStrokeBlockOfIsfStream) { return(0); } uint locallyDecodedBytesRemaining = totalBytesInStrokeBlockOfIsfStream; uint localBytesRead; // First read the no of packets uint pointCount; localBytesRead = SerializationHelper.Decode(inputStream, out pointCount); if (locallyDecodedBytesRemaining < localBytesRead) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } locallyDecodedBytesRemaining -= localBytesRead; if (0 == locallyDecodedBytesRemaining) { return(localBytesRead); } // Allocate packet properties int intsPerPoint = 0; //修复构建的代码 stylusPointDescription.GetInputArrayLengthPerPoint(); int buttonCount = 0; //修复构建的代码 stylusPointDescription.ButtonCount; int buttonIntsPerPoint = (buttonCount > 0 ? 1 : 0); int valueIntsPerPoint = intsPerPoint - buttonIntsPerPoint; //add one int per point for button data if it exists int[] rawPointData = new int[pointCount * intsPerPoint]; int[] packetDataSet = new int[pointCount]; // copy the rest of the data from the stroke data byte[] inputBuffer = new byte[locallyDecodedBytesRemaining]; // Read the input data into the byte array uint bytesRead = StrokeCollectionSerializer.ReliableRead(inputStream, inputBuffer, locallyDecodedBytesRemaining); if (bytesRead != locallyDecodedBytesRemaining) { // Make sure the bytes read are expected. If not, we should bail out. // An exception will be thrown. throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } // at this point, we have read all of the bytes remaining in the input // stream's packet block, and while we will keep the bytes remaining // variable for positioning within the local byte buffer, we should // not read from the stream again, or we risk reading into another // ISF tag's block. int originalPressureIndex = 0; //修复构建的代码 stylusPointDescription.OriginalPressureIndex; for (int i = 0; i < valueIntsPerPoint && locallyDecodedBytesRemaining > 0; i++) { localBytesRead = locallyDecodedBytesRemaining; Compressor.DecompressPacketData( #if OLD_ISF compressor, #endif inputBuffer, ref localBytesRead, packetDataSet); if (localBytesRead > locallyDecodedBytesRemaining) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } // // packetDataSet is like this: // ------------- // |X|X|X|X|X|X| // ------------- // // we need to copy into rawPointData at // // ------------- // |X| |X| |X| | // ------------- // // additionally, for NormalPressure, if it exists and was // reordered in the StylusPointDescription, we need to account for that here // int tempi = i; if (tempi > 1 && originalPressureIndex != -1 && originalPressureIndex != /* //修复构建的代码StylusPointDescription.RequiredPressureIndex*/ 2) { // // NormalPressure exists in the packet stream and was not at index 2 // StylusPointDescription enforces that NormalPressure is at index 2 // so we need to copy packet data beyond X and Y into a different location // // take the example of the original StylusPointDescription // |X|Y|XTilt|YTilt|NormalPressure|Rotation| // // originalPressureIndex is 4, and we know it is now 2 // which means that everything before index 4 has been shifted one // and everything after index 4 is still good. Index 4 should be copied to index 2 if (tempi == originalPressureIndex) { tempi = 2; } else if (tempi < originalPressureIndex) { tempi++; } } locallyDecodedBytesRemaining -= localBytesRead; for (int j = 0, x = 0; j < pointCount; j++, x += intsPerPoint) { rawPointData[x + tempi] = packetDataSet[j]; } // Move the array elements to point to next set of compressed data for (uint u = 0; u < locallyDecodedBytesRemaining; u++) { inputBuffer[u] = inputBuffer[u + (int)localBytesRead]; } } // Now that we've read packet data, we must read button data if it is there byte[] buttonData = null; // since the button state is a simple bit value (either down or up), the button state // for a series of packets is packed into an array of bits rather than integers // For example, if there are 16 packets, and 2 buttons, then 32 bits can be used (e.g. 1 32-bit integer) if (0 != locallyDecodedBytesRemaining && buttonCount > 0) { // calculate the number of full bytes used for buttons per packet // Example: 10 buttons would require 1 full byte int fullBytesForButtonsPerPacket = buttonCount / Native.BitsPerByte; // calculate the number of bits that spill beyond the full byte boundary // Example: 10 buttons would require 2 extra bits (8 fit in a full byte) int partialBitsForButtonsPerPacket = buttonCount % Native.BitsPerByte; // Now figure out how many bytes we need to read for the button data localBytesRead = (uint)((buttonCount * pointCount + 7) / Native.BitsPerByte); if (localBytesRead > locallyDecodedBytesRemaining) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Buffer range is smaller than expected expected size")); } locallyDecodedBytesRemaining -= localBytesRead; int buttonSizeInBytes = (buttonCount + 7) / Native.BitsPerByte; buttonData = new byte[pointCount * buttonSizeInBytes]; // Create a bit reader to unpack the bits from the ISF stream into // loosely packed byte buffer (e.g. button data aligned on full byte // boundaries only) BitStreamReader bitReader = new BitStreamReader(inputBuffer, (uint)buttonCount * pointCount); // unpack the button data into each packet int byteCounter = 0; while (!bitReader.EndOfStream) { // unpack the fully bytes first for (int fullBytes = 0; fullBytes < fullBytesForButtonsPerPacket; fullBytes++) { buttonData[byteCounter++] = bitReader.ReadByte(Native.BitsPerByte); } // then unpack a single partial byte if necessary if (partialBitsForButtonsPerPacket > 0) { buttonData[byteCounter++] = bitReader.ReadByte((int)partialBitsForButtonsPerPacket); } } // if the number of bytes allocated != necessary byte amount then an error occurred if (byteCounter != buttonData.Length) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Button data length not equal to expected length")); } // // set the point data in the raw array // FillButtonData((int)pointCount, buttonCount, valueIntsPerPoint, //gives the first button index rawPointData, buttonData); } stylusPoints = new StylusPointCollection(stylusPointDescription /*//修复构建的代码, rawPointData, null, transform*/); // if we read too far into the stream (e.g. the packets were compressed) // then move the stream pointer back to the end of the actual packet // data before returning. This keeps the return value on the function // (representing bytes read) honest and consistent with the stream // position movement in this function. if (0 != locallyDecodedBytesRemaining) { inputStream.Seek(0 - (long)locallyDecodedBytesRemaining, SeekOrigin.Current); } return(totalBytesInStrokeBlockOfIsfStream - locallyDecodedBytesRemaining); }
/// <summary> /// This functions loads a stroke from a memory stream based on the descriptor and GuidList. It returns /// the no of bytes it has read from the stream to correctly load the stream, which should be same as /// the value of the size parameter. If they are unequal throws ArgumentException. Stroke descriptor is /// used to load the packetproperty as well as ExtendedPropertyCollection on this stroke. Compressor is used /// to decompress the data. /// </summary> /// <param name="compressor"></param> /// <param name="stream"></param> /// <param name="totalBytesInStrokeBlockOfIsfStream"></param> /// <param name="guidList"></param> /// <param name="strokeDescriptor"></param> /// <param name="stylusPointDescription"></param> /// <param name="transform"></param> /// <param name="stylusPoints"></param> /// <param name="extendedProperties"></param> /// <returns></returns> #else /// <summary> /// This functions loads a stroke from a memory stream based on the descriptor and GuidList. It returns /// the no of bytes it has read from the stream to correctly load the stream, which should be same as /// the value of the size parameter. If they are unequal throws ArgumentException. Stroke descriptor is /// used to load the packetproperty as well as ExtendedPropertyCollection on this stroke. Compressor is used /// to decompress the data. /// </summary> /// <param name="stream"></param> /// <param name="totalBytesInStrokeBlockOfIsfStream"></param> /// <param name="guidList"></param> /// <param name="strokeDescriptor"></param> /// <param name="stylusPointDescription"></param> /// <param name="transform"></param> /// <param name="stylusPoints"></param> /// <param name="extendedProperties"></param> #endif static uint DecodeISFIntoStroke( #if OLD_ISF Compressor compressor, #endif Stream stream, uint totalBytesInStrokeBlockOfIsfStream, GuidList guidList, StrokeDescriptor strokeDescriptor, StylusPointDescription stylusPointDescription, Matrix transform, out StylusPointCollection stylusPoints, out ExtendedPropertyCollection extendedProperties) { stylusPoints = null; extendedProperties = null; // We do allow a stroke with no packet data if (0 == totalBytesInStrokeBlockOfIsfStream) { return(0); } uint locallyDecodedBytes; uint remainingBytesInStrokeBlock = totalBytesInStrokeBlockOfIsfStream; // First try to load any packet data locallyDecodedBytes = LoadPackets(stream, remainingBytesInStrokeBlock, #if OLD_ISF compressor, #endif stylusPointDescription, transform, out stylusPoints); if (locallyDecodedBytes > remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Packet buffer overflowed the ISF stream")); } remainingBytesInStrokeBlock -= locallyDecodedBytes; if (0 == remainingBytesInStrokeBlock) { return(locallyDecodedBytes); } // Now read the extended propertes for (int iTag = 1; iTag < strokeDescriptor.Template.Count && remainingBytesInStrokeBlock > 0; iTag++) { KnownTagCache.KnownTagIndex tag = strokeDescriptor.Template[iTag - 1]; switch (tag) { case MS.Internal.Ink.InkSerializedFormat.KnownTagCache.KnownTagIndex.StrokePropertyList: { // we've found the stroke extended properties. Load them now. while (iTag < strokeDescriptor.Template.Count && remainingBytesInStrokeBlock > 0) { tag = strokeDescriptor.Template[iTag]; object data; Guid guid = guidList.FindGuid(tag); if (guid == Guid.Empty) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Stroke Custom Attribute tag embedded in ISF stream does not match guid table")); } // load the extended property data from the stream (and decode the type) locallyDecodedBytes = ExtendedPropertySerializer.DecodeAsISF(stream, remainingBytesInStrokeBlock, guidList, tag, ref guid, out data); // add the guid/data pair into the property collection (don't redecode the type) if (extendedProperties == null) { extendedProperties = new ExtendedPropertyCollection(); } extendedProperties[guid] = data; if (locallyDecodedBytes > remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } remainingBytesInStrokeBlock -= locallyDecodedBytes; iTag++; } } break; case MS.Internal.Ink.InkSerializedFormat.KnownTagCache.KnownTagIndex.Buttons: { // Next tag is count of buttons and the tags for the button guids iTag += (int)((uint)strokeDescriptor.Template[iTag]) + 1; } break; // ignore any tags embedded in the Stroke block that this // version of the ISF decoder doesn't understand default: { System.Diagnostics.Trace.WriteLine("Ignoring unhandled stroke tag in ISF stroke descriptor"); } break; } } // Now try to load any tagged property data or point property data while (remainingBytesInStrokeBlock > 0) { // Read the tag first KnownTagCache.KnownTagIndex tag; uint uiTag; locallyDecodedBytes = SerializationHelper.Decode(stream, out uiTag); tag = (KnownTagCache.KnownTagIndex)uiTag; if (locallyDecodedBytes > remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } remainingBytesInStrokeBlock -= locallyDecodedBytes; // if it is a point property block switch (tag) { case MS.Internal.Ink.InkSerializedFormat.KnownTagCache.KnownTagIndex.PointProperty: { // First load the totalBytesInStrokeBlockOfIsfStream of the point property block uint cbsize; locallyDecodedBytes = SerializationHelper.Decode(stream, out cbsize); if (locallyDecodedBytes > remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } remainingBytesInStrokeBlock -= locallyDecodedBytes; while (remainingBytesInStrokeBlock > 0) { // First read the tag corresponding to the property locallyDecodedBytes = SerializationHelper.Decode(stream, out uiTag); tag = (KnownTagCache.KnownTagIndex)uiTag; if (locallyDecodedBytes > remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } remainingBytesInStrokeBlock -= locallyDecodedBytes; // Now read the packet index for which the property will apply uint propindex; locallyDecodedBytes = SerializationHelper.Decode(stream, out propindex); if (locallyDecodedBytes > remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } remainingBytesInStrokeBlock -= locallyDecodedBytes; uint propsize; locallyDecodedBytes = SerializationHelper.Decode(stream, out propsize); if (locallyDecodedBytes > remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } remainingBytesInStrokeBlock -= locallyDecodedBytes; // Compressed data totalBytesInStrokeBlockOfIsfStream propsize += 1; // Make sure we have enough data to read if (propsize > remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } byte[] in_buffer = new byte[propsize]; uint bytesRead = StrokeCollectionSerializer.ReliableRead(stream, in_buffer, propsize); if (propsize != bytesRead) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Read different size from stream then expected")); } byte[] out_buffer = Compressor.DecompressPropertyData(in_buffer); System.Diagnostics.Debug.Assert(false, "ExtendedProperties for points are not supported"); // skip the bytes in both success & failure cases // Note: Point ExtendedProperties are discarded remainingBytesInStrokeBlock -= propsize; } } break; default: { object data; Guid guid = guidList.FindGuid(tag); if (guid == Guid.Empty) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Stroke Custom Attribute tag embedded in ISF stream does not match guid table")); } // load the extended property data from the stream (and decode the type) locallyDecodedBytes = ExtendedPropertySerializer.DecodeAsISF(stream, remainingBytesInStrokeBlock, guidList, tag, ref guid, out data); // add the guid/data pair into the property collection (don't redecode the type) if (extendedProperties == null) { extendedProperties = new ExtendedPropertyCollection(); } extendedProperties[guid] = data; if (locallyDecodedBytes > remainingBytesInStrokeBlock) { throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage("ExtendedProperty decoded totalBytesInStrokeBlockOfIsfStream exceeded ISF stream totalBytesInStrokeBlockOfIsfStream")); } remainingBytesInStrokeBlock -= locallyDecodedBytes; } break; } } if (0 != remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } return(totalBytesInStrokeBlockOfIsfStream); }
/// <summary> /// DecompressPacketData - take a byte[] or a subset of a byte[] and decompresses it into /// an int[] of packet data (for example, x's in a Stroke) /// </summary> /// <param name="compressedInput">The byte[] to decompress</param> /// <param name="size">In: the max size of the subset of compressedInput to read, out: size read</param> /// <param name="decompressedPackets">The int[] to write the packet data to</param> #endif internal static void DecompressPacketData( #if OLD_ISF Compressor compressor, #endif byte[] compressedInput, ref uint size, int[] decompressedPackets) { #if OLD_ISF // // lock to prevent multi-threaded vulnerabilities // lock (_compressSync) { #endif if (compressedInput == null || size > compressedInput.Length || decompressedPackets == null) { //we don't raise any information that could be used to attack our ISF code //a simple 'ISF Operation Failed' is sufficient since the user can't do //anything to fix bogus ISF throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage(SR.Get(SRID.DecompressPacketDataFailed))); } #if OLD_ISF uint size2 = size; #endif size = AlgoModule.DecompressPacketData(compressedInput, decompressedPackets); #if OLD_ISF MS.Win32.Penimc.CompressorSafeHandle safeCompressorHandle = (compressor == null) ? MS.Win32.Penimc.CompressorSafeHandle.Null : compressor._compressorHandle; int[] decompressedPackets2 = new int[decompressedPackets.Length]; byte algo = AlgoModule.NoCompression; int hr = MS.Win32.Penimc.UnsafeNativeMethods.IsfDecompressPacketData(safeCompressorHandle, compressedInput, ref size2, (uint)decompressedPackets2.Length, decompressedPackets2, ref algo); if (0 != hr) { //we don't raise any information that could be used to attack our ISF code //a simple 'ISF Operation Failed' is sufficient since the user can't do //anything to fix bogus ISF throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage("IsfDecompressPacketData returned: " + hr.ToString(CultureInfo.InvariantCulture))); } if (size != size2) { throw new InvalidOperationException("MAGIC EXCEPTION: Packet data bytes read didn't match with new uncompression"); } for (int i = 0; i < decompressedPackets.Length; i++) { if (decompressedPackets[i] != decompressedPackets2[i]) { throw new InvalidOperationException("MAGIC EXCEPTION: Packet data didn't match with new uncompression at index " + i.ToString()); } } } #endif }
/// <summary> /// CompressPropertyData - compresses property data using the compression defined by 'compressor' /// </summary> /// <param name="input">The int[] of packet data to compress</param> /// <param name="algorithm">In: the desired algorithm to use. Out: the algorithm used</param> /// <returns>the compressed data in a byte[]</returns> #endif internal static byte[] CompressPacketData( #if OLD_ISF Compressor compressor, #endif int[] input, ref byte algorithm) { #if OLD_ISF // // lock to prevent multi-threaded vulnerabilities // lock (_compressSync) { #endif if (input == null) { //we don't raise any information that could be used to attack our ISF code //a simple 'ISF Operation Failed' is sufficient since the user can't do //anything to fix bogus ISF throw new InvalidOperationException(SR.Get(SRID.IsfOperationFailed)); } byte[] data = AlgoModule.CompressPacketData(input, algorithm); #if OLD_ISF uint cbOutSize = 0; MS.Win32.Penimc.CompressorSafeHandle safeCompressorHandle = (compressor == null) ? MS.Win32.Penimc.CompressorSafeHandle.Null : compressor._compressorHandle; int hr = MS.Win32.Penimc.UnsafeNativeMethods.IsfCompressPacketData(safeCompressorHandle, input, (uint)input.Length, ref algorithm, ref cbOutSize, null); if (0 == hr) { byte[] data2 = new byte[cbOutSize]; hr = MS.Win32.Penimc.UnsafeNativeMethods.IsfCompressPacketData(safeCompressorHandle, input, (uint)input.Length, ref algorithm, ref cbOutSize, data2); if (0 == hr) { //see if data matches if (data2.Length != data.Length) { throw new InvalidOperationException("MAGIC EXCEPTION: Packet data length didn't match with new compression"); } for (int i = 0; i < data2.Length; i++) { if (data2[i] != data[i]) { throw new InvalidOperationException("MAGIC EXCEPTION: Packet data didn't match with new compression at index " + i.ToString()); } } return data; } } throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage("IsfCompressPacketData returned:" + hr.ToString(CultureInfo.InvariantCulture))); } #else return data; #endif }
/// <summary> /// Takes an ISF Stream and populates the StrokeCollection /// attached to this StrokeCollectionSerializer. /// </summary> /// <param name="inputStream">a Stream the raw isf to decode</param> #endif private void DecodeRawISF(Stream inputStream) { Debug.Assert(inputStream != null); KnownTagCache.KnownTagIndex isfTag; uint remainingBytesInStream; uint bytesDecodedInCurrentTag = 0; bool strokeDescriptorBlockDecoded = false; bool drawingAttributesBlockDecoded = false; bool metricBlockDecoded = false; bool transformDecoded = false; uint strokeDescriptorTableIndex = 0; uint oldStrokeDescriptorTableIndex = 0xFFFFFFFF; uint drawingAttributesTableIndex = 0; uint oldDrawingAttributesTableIndex = 0xFFFFFFFF; uint metricDescriptorTableIndex = 0; uint oldMetricDescriptorTableIndex = 0xFFFFFFFF; uint transformTableIndex = 0; uint oldTransformTableIndex = 0xFFFFFFFF; GuidList guidList = new GuidList(); int strokeIndex = 0; StylusPointDescription currentStylusPointDescription = null; Matrix currentTabletToInkTransform = Matrix.Identity; _strokeDescriptorTable = new System.Collections.Generic.List<StrokeDescriptor>(); _drawingAttributesTable = new System.Collections.Generic.List<DrawingAttributes>(); _transformTable = new System.Collections.Generic.List<TransformDescriptor>(); _metricTable = new System.Collections.Generic.List<MetricBlock>(); // First make sure this ink is empty if (0 != _coreStrokes.Count || _coreStrokes.ExtendedProperties.Count != 0) { throw new InvalidOperationException(ISFDebugMessage("ISF decoder cannot operate on non-empty ink container")); } #if OLD_ISF // // store a compressor reference at this scope, if it is needed (if there is a compresson header) and // therefore instanced during this routine, we will dispose of it // in the finally block // Compressor compressor = null; try { #endif // First read the isfTag uint uiTag; uint localBytesDecoded = SerializationHelper.Decode(inputStream, out uiTag); if (0x00 != uiTag) throw new ArgumentException(SR.Get(SRID.InvalidStream)); // Now read the size of the stream localBytesDecoded = SerializationHelper.Decode(inputStream, out remainingBytesInStream); ISFDebugTrace("Decoded Stream Size in Bytes: " + remainingBytesInStream.ToString()); if (0 == remainingBytesInStream) return; while (0 < remainingBytesInStream) { bytesDecodedInCurrentTag = 0; // First read the isfTag localBytesDecoded = SerializationHelper.Decode(inputStream, out uiTag); isfTag = (KnownTagCache.KnownTagIndex)uiTag; if (remainingBytesInStream >= localBytesDecoded) remainingBytesInStream -= localBytesDecoded; else { throw new ArgumentException(ISFDebugMessage("Invalid ISF data")); } ISFDebugTrace("Decoding Tag: " + ((KnownTagCache.KnownTagIndex)isfTag).ToString()); switch (isfTag) { case KnownTagCache.KnownTagIndex.GuidTable: case KnownTagCache.KnownTagIndex.DrawingAttributesTable: case KnownTagCache.KnownTagIndex.DrawingAttributesBlock: case KnownTagCache.KnownTagIndex.StrokeDescriptorTable: case KnownTagCache.KnownTagIndex.StrokeDescriptorBlock: case KnownTagCache.KnownTagIndex.MetricTable: case KnownTagCache.KnownTagIndex.MetricBlock: case KnownTagCache.KnownTagIndex.TransformTable: case KnownTagCache.KnownTagIndex.ExtendedTransformTable: case KnownTagCache.KnownTagIndex.Stroke: case KnownTagCache.KnownTagIndex.CompressionHeader: case KnownTagCache.KnownTagIndex.PersistenceFormat: case KnownTagCache.KnownTagIndex.HimetricSize: case KnownTagCache.KnownTagIndex.StrokeIds: { localBytesDecoded = SerializationHelper.Decode(inputStream, out bytesDecodedInCurrentTag); if (remainingBytesInStream < (localBytesDecoded + bytesDecodedInCurrentTag)) { throw new ArgumentException(ISFDebugMessage("Invalid ISF data"), "inputStream"); } remainingBytesInStream -= localBytesDecoded; // Based on the isfTag figure out what information we're loading switch (isfTag) { case KnownTagCache.KnownTagIndex.GuidTable: { // Load guid Table localBytesDecoded = guidList.Load(inputStream, bytesDecodedInCurrentTag); break; } case KnownTagCache.KnownTagIndex.DrawingAttributesTable: { // Load drawing attributes table localBytesDecoded = LoadDrawAttrsTable(inputStream, guidList, bytesDecodedInCurrentTag); drawingAttributesBlockDecoded = true; break; } case KnownTagCache.KnownTagIndex.DrawingAttributesBlock: { //initialize to V1 defaults, we do it this way as opposed //to dr.DrawingFlags = 0 because this was a perf hot spot //and instancing the epc first mitigates it ExtendedPropertyCollection epc = new ExtendedPropertyCollection(); epc.Add(KnownIds.DrawingFlags, DrawingFlags.Polyline); DrawingAttributes dr = new DrawingAttributes(epc); localBytesDecoded = DrawingAttributeSerializer.DecodeAsISF(inputStream, guidList, bytesDecodedInCurrentTag, dr); _drawingAttributesTable.Add(dr); drawingAttributesBlockDecoded = true; break; } case KnownTagCache.KnownTagIndex.StrokeDescriptorTable: { // Load stroke descriptor table localBytesDecoded = DecodeStrokeDescriptorTable(inputStream, bytesDecodedInCurrentTag); strokeDescriptorBlockDecoded = true; break; } case KnownTagCache.KnownTagIndex.StrokeDescriptorBlock: { // Load a single stroke descriptor localBytesDecoded = DecodeStrokeDescriptorBlock(inputStream, bytesDecodedInCurrentTag); strokeDescriptorBlockDecoded = true; break; } case KnownTagCache.KnownTagIndex.MetricTable: { // Load Metric Table localBytesDecoded = DecodeMetricTable(inputStream, bytesDecodedInCurrentTag); metricBlockDecoded = true; break; } case KnownTagCache.KnownTagIndex.MetricBlock: { // Load a single Metric Block MetricBlock blk; localBytesDecoded = DecodeMetricBlock(inputStream, bytesDecodedInCurrentTag, out blk); _metricTable.Clear(); _metricTable.Add(blk); metricBlockDecoded = true; break; } case KnownTagCache.KnownTagIndex.TransformTable: { // Load Transform Table localBytesDecoded = DecodeTransformTable(inputStream, bytesDecodedInCurrentTag, false); transformDecoded = true; break; } case KnownTagCache.KnownTagIndex.ExtendedTransformTable: { // non-double transform table should have already been loaded if (!transformDecoded) { throw new ArgumentException(ISFDebugMessage("Invalid ISF data")); } // Load double-sized Transform Table localBytesDecoded = DecodeTransformTable(inputStream, bytesDecodedInCurrentTag, true); break; } case KnownTagCache.KnownTagIndex.PersistenceFormat: { uint fmt; localBytesDecoded = SerializationHelper.Decode(inputStream, out fmt); // Set the appropriate persistence information if (0 == fmt) { CurrentPersistenceFormat = PersistenceFormat.InkSerializedFormat; } else if (0x00000001 == fmt) { CurrentPersistenceFormat = PersistenceFormat.Gif; } break; } case KnownTagCache.KnownTagIndex.HimetricSize: { // Loads the Hi Metric Size for Fortified GIFs int sz; localBytesDecoded = SerializationHelper.SignDecode(inputStream, out sz); if (localBytesDecoded > remainingBytesInStream) throw new ArgumentException(ISFDebugMessage("Invalid ISF data")); _himetricSize.X = (double)sz; localBytesDecoded += SerializationHelper.SignDecode(inputStream, out sz); _himetricSize.Y = (double)sz; break; } case KnownTagCache.KnownTagIndex.CompressionHeader: { #if OLD_ISF byte[] data = new byte[bytesDecodedInCurrentTag]; // read the header from the stream uint bytesRead = StrokeCollectionSerializer.ReliableRead(inputStream, data, bytesDecodedInCurrentTag); if (bytesDecodedInCurrentTag != bytesRead) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Read different size from stream then expected"), "isfStream"); } uint size = bytesDecodedInCurrentTag; compressor = new Compressor(data, ref size); // in case the actual number of bytes read by the compressor // is less than the encoder had expected (e.g. compression // header was encoded as 10 bytes, but only 7 bytes were read) // then we don't want to adjust the stream position because // there are likely other following tags that are encoded // after the compression tag. This should never happen, // so just fail if the compressor is broken or the ISF is // corrupted. if (size != bytesDecodedInCurrentTag) { throw new InvalidOperationException(ISFDebugMessage("Compressor intialization reported inconsistent size")); } #else //just advance the inputstream position, we don't need //no compression header in the new isf decoding inputStream.Seek(bytesDecodedInCurrentTag, SeekOrigin.Current); #endif localBytesDecoded = bytesDecodedInCurrentTag; break; } case KnownTagCache.KnownTagIndex.StrokeIds: { localBytesDecoded = LoadStrokeIds(inputStream, bytesDecodedInCurrentTag); break; } case KnownTagCache.KnownTagIndex.Stroke: { ISFDebugTrace(" Decoding Stroke Id#(" + (strokeIndex + 1).ToString() + ")"); StrokeDescriptor strokeDescriptor = null; // Load the stroke descriptor based on the index from the list of unique // stroke descriptors if (strokeDescriptorBlockDecoded) { if (oldStrokeDescriptorTableIndex != strokeDescriptorTableIndex) { if (_strokeDescriptorTable.Count <= strokeDescriptorTableIndex) throw new ArgumentException(ISFDebugMessage("Invalid ISF data")); } strokeDescriptor = _strokeDescriptorTable[(int)strokeDescriptorTableIndex]; } // use new transform if the last transform is uninit'd or has changed if (oldTransformTableIndex != transformTableIndex) { // if transform was specified in the ISF stream if (transformDecoded) { if (_transformTable.Count <= transformTableIndex) throw new ArgumentException(ISFDebugMessage("Invalid ISF data")); // Load the transform descriptor based on the index from the list of unique // transforn descriptors currentTabletToInkTransform = LoadTransform(_transformTable[(int)transformTableIndex]); } oldTransformTableIndex = transformTableIndex; // cache the transform by remembering the index // since ISF is stored in HIMETRIC, and we want to expose packet data // as Avalon units, we'll update the convert the transform before loading the stroke currentTabletToInkTransform.Scale(StrokeCollectionSerializer.HimetricToAvalonMultiplier, StrokeCollectionSerializer.HimetricToAvalonMultiplier); } MetricBlock metricBlock = null; // Load the metric block based on the index from the list of unique metric blocks if (metricBlockDecoded) { if (oldMetricDescriptorTableIndex != metricDescriptorTableIndex) { if (_metricTable.Count <= metricDescriptorTableIndex) throw new ArgumentException(ISFDebugMessage("Invalid ISF data")); } metricBlock = _metricTable[(int)metricDescriptorTableIndex]; } DrawingAttributes activeDrawingAttributes = null; // Load the drawing attributes based on the index from the list of unique drawing attributes if (drawingAttributesBlockDecoded) { if (oldDrawingAttributesTableIndex != drawingAttributesTableIndex) { if (_drawingAttributesTable.Count <= drawingAttributesTableIndex) throw new ArgumentException(ISFDebugMessage("Invalid ISF data")); oldDrawingAttributesTableIndex = drawingAttributesTableIndex; } DrawingAttributes currDA = (DrawingAttributes)_drawingAttributesTable[(int)drawingAttributesTableIndex]; //we always clone so we don't get strokes that share DAs, which can lead //to all sorts of unpredictable behavior (ex: see Windows OS Bugs 1450047) activeDrawingAttributes = currDA.Clone(); } // if we didn't find an existing da to use, instance a new one if (activeDrawingAttributes == null) { activeDrawingAttributes = new DrawingAttributes(); } // Now create the StylusPacketDescription from the stroke descriptor and metric block if (oldMetricDescriptorTableIndex != metricDescriptorTableIndex || oldStrokeDescriptorTableIndex != strokeDescriptorTableIndex) { currentStylusPointDescription = BuildStylusPointDescription(strokeDescriptor, metricBlock, guidList); oldStrokeDescriptorTableIndex = strokeDescriptorTableIndex; oldMetricDescriptorTableIndex = metricDescriptorTableIndex; } // Load the stroke Stroke localStroke; #if OLD_ISF localBytesDecoded = StrokeSerializer.DecodeStroke(inputStream, bytesDecodedInCurrentTag, guidList, strokeDescriptor, currentStylusPointDescription, activeDrawingAttributes, currentTabletToInkTransform, compressor, out localStroke); #else localBytesDecoded = StrokeSerializer.DecodeStroke(inputStream, bytesDecodedInCurrentTag, guidList, strokeDescriptor, currentStylusPointDescription, activeDrawingAttributes, currentTabletToInkTransform, out localStroke); #endif if (localStroke != null) { _coreStrokes.AddWithoutEvent(localStroke); strokeIndex++; } break; } default: { throw new InvalidOperationException(ISFDebugMessage("Invalid ISF tag logic")); } } // if this isfTag's decoded size != expected size, then error out if (localBytesDecoded != bytesDecodedInCurrentTag) { throw new ArgumentException(ISFDebugMessage("Invalid ISF data")); } break; } case KnownTagCache.KnownTagIndex.Transform: case KnownTagCache.KnownTagIndex.TransformIsotropicScale: case KnownTagCache.KnownTagIndex.TransformAnisotropicScale: case KnownTagCache.KnownTagIndex.TransformRotate: case KnownTagCache.KnownTagIndex.TransformTranslate: case KnownTagCache.KnownTagIndex.TransformScaleAndTranslate: { // Load a single Transform Block TransformDescriptor xform; bytesDecodedInCurrentTag = DecodeTransformBlock(inputStream, isfTag, remainingBytesInStream, false, out xform); transformDecoded = true; _transformTable.Clear(); _transformTable.Add(xform); break; } case KnownTagCache.KnownTagIndex.TransformTableIndex: { // Load the Index into the Transform Table which will be used by the stroke following this till // a next different Index is found bytesDecodedInCurrentTag = SerializationHelper.Decode(inputStream, out transformTableIndex); break; } case KnownTagCache.KnownTagIndex.MetricTableIndex: { // Load the Index into the Metric Table which will be used by the stroke following this till // a next different Index is found bytesDecodedInCurrentTag = SerializationHelper.Decode(inputStream, out metricDescriptorTableIndex); break; } case KnownTagCache.KnownTagIndex.DrawingAttributesTableIndex: { // Load the Index into the Drawing Attributes Table which will be used by the stroke following this till // a next different Index is found bytesDecodedInCurrentTag = SerializationHelper.Decode(inputStream, out drawingAttributesTableIndex); break; } case KnownTagCache.KnownTagIndex.InkSpaceRectangle: { // Loads the Ink Space Rectangle information bytesDecodedInCurrentTag = DecodeInkSpaceRectangle(inputStream, remainingBytesInStream); break; } case KnownTagCache.KnownTagIndex.StrokeDescriptorTableIndex: { // Load the Index into the Stroke Descriptor Table which will be used by the stroke following this till // a next different Index is found bytesDecodedInCurrentTag = SerializationHelper.Decode(inputStream, out strokeDescriptorTableIndex); break; } default: { if ((uint)isfTag >= KnownIdCache.CustomGuidBaseIndex || ((uint)isfTag >= KnownTagCache.KnownTagCount && ((uint)isfTag < (KnownTagCache.KnownTagCount + KnownIdCache.OriginalISFIdTable.Length)))) { ISFDebugTrace(" CUSTOM_GUID=" + guidList.FindGuid(isfTag).ToString()); // Loads any custom property data bytesDecodedInCurrentTag = remainingBytesInStream; Guid guid = guidList.FindGuid(isfTag); if (guid == Guid.Empty) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Global Custom Attribute tag embedded in ISF stream does not match guid table"), "inkdata"); } object data; // load the custom property data from the stream (and decode the type) localBytesDecoded = ExtendedPropertySerializer.DecodeAsISF(inputStream, bytesDecodedInCurrentTag, guidList, isfTag, ref guid, out data); if (localBytesDecoded > bytesDecodedInCurrentTag) { throw new ArgumentException(ISFDebugMessage("Invalid ISF data"), "inkdata"); } // add the guid/data pair into the property collection (don't redecode the type) _coreStrokes.ExtendedProperties[guid] = data; } else { // Skip objects that this library doesn't know about // First read the size associated with this unknown isfTag localBytesDecoded = SerializationHelper.Decode(inputStream, out bytesDecodedInCurrentTag); if (remainingBytesInStream < (localBytesDecoded + bytesDecodedInCurrentTag)) { throw new ArgumentException(ISFDebugMessage("Invalid ISF data")); } else { inputStream.Seek(bytesDecodedInCurrentTag + localBytesDecoded, SeekOrigin.Current); } } bytesDecodedInCurrentTag = localBytesDecoded; break; } } ISFDebugTrace(" Size = " + bytesDecodedInCurrentTag.ToString()); if (bytesDecodedInCurrentTag > remainingBytesInStream) { throw new ArgumentException(ISFDebugMessage("Invalid ISF data")); } // update remaining ISF buffer length with decoded so far remainingBytesInStream -= bytesDecodedInCurrentTag; } #if OLD_ISF } finally { if (null != compressor) { compressor.Dispose(); compressor = null; } } #endif if (0 != remainingBytesInStream) throw new ArgumentException(ISFDebugMessage("Invalid ISF data"), "inkdata"); }
/// <summary> /// Loads a single ExtendedProperty from the stream and add that to the list. Tag may be passed as in /// the case of Stroke ExtendedPropertyCollection where tag is stored in the stroke descriptor or 0 when tag /// is embeded in the stream /// </summary> /// <param name="stream">Memory buffer to load from</param> /// <param name="cbSize">Maximum length of buffer to read</param> /// <param name="guidList">Guid cache to read from</param> /// <param name="tag">Guid tag to lookup</param> /// <param name="guid">Guid of property</param> /// <param name="data">Data of property</param> /// <returns>Length of buffer read</returns> #endif internal static uint DecodeAsISF(Stream stream, uint cbSize, GuidList guidList, KnownTagCache.KnownTagIndex tag, ref Guid guid, out object data) { uint cb, cbRead = 0; uint cbTotal = cbSize; if (0 == cbSize) { throw new InvalidOperationException(SR.Get(SRID.EmptyDataToLoad)); } if (0 == tag) // no tag is passed, it must be embedded in the data { uint uiTag; cb = SerializationHelper.Decode(stream, out uiTag); tag = (KnownTagCache.KnownTagIndex)uiTag; if (cb > cbTotal) { throw new ArgumentException(SR.Get(SRID.InvalidSizeSpecified), "cbSize"); } cbTotal -= cb; cbRead += cb; System.Diagnostics.Debug.Assert(guid == Guid.Empty); guid = guidList.FindGuid(tag); } if (guid == Guid.Empty) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Custom Attribute tag embedded in ISF stream does not match guid table"), "tag"); } // Try and find the size uint size = GuidList.GetDataSizeIfKnownGuid(guid); if (size > cbTotal) { throw new ArgumentException(SR.Get(SRID.InvalidSizeSpecified), "cbSize"); } // if the size is 0 if (0 == size) { // Size must be embedded in the stream. Find out the compressed data size cb = SerializationHelper.Decode(stream, out size); uint cbInsize = size + 1; cbRead += cb; cbTotal -= cb; if (cbInsize > cbTotal) { throw new ArgumentException(); } byte[] bytes = new byte[cbInsize]; uint bytesRead = (uint)stream.Read(bytes, 0, (int)cbInsize); if (cbInsize != bytesRead) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Read different size from stream then expected"), "cbSize"); } cbRead += cbInsize; cbTotal -= cbInsize; //Find out the Decompressed buffer size using (MemoryStream decompressedStream = new MemoryStream(Compressor.DecompressPropertyData(bytes))) { // Add the property data = ExtendedPropertySerializer.DecodeAttribute(guid, decompressedStream); } } else { // For known size data, we just read the data directly from the stream byte[] bytes = new byte[size]; uint bytesRead = (uint)stream.Read(bytes, 0, (int)size); if (size != bytesRead) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Read different size from stream then expected"), "cbSize"); } using (MemoryStream subStream = new MemoryStream(bytes)) { data = ExtendedPropertySerializer.DecodeAttribute(guid, subStream); } cbTotal -= size; cbRead += size; } return(cbRead); }
/// <summary> /// Saves the packets into a stream of bytes /// </summary> /// <param name="stroke">Stroke to save</param> /// <param name="stream">null to calculate size only</param> /// <param name="strokeLookupEntry"></param> #endif static uint SavePackets( Stroke stroke, Stream stream, #if OLD_ISF Compressor compressor, #endif StrokeCollectionSerializer.StrokeLookupEntry strokeLookupEntry) { // First write or calculate how many points are there uint pointCount = (uint)stroke.StylusPoints.Count; uint localBytesWritten = (stream != null) ? SerializationHelper.Encode(stream, pointCount) : SerializationHelper.VarSize(pointCount); byte compressionAlgorithm; int[][] outputArrays = strokeLookupEntry.ISFReadyStrokeData; //We don't serialize button data, see Windows OS Bugs 1413460 for details //int valuesPerPoint = stroke.StylusPoints.Description.GetOutputArrayLengthPerPoint(); //int buttonCount = stroke.StylusPoints.Description.ButtonCount; ReadOnlyCollection<StylusPointPropertyInfo> propertyInfos = stroke.StylusPoints.Description.GetStylusPointProperties(); int i = 0; for (; i < propertyInfos.Count; i++) { StylusPointPropertyInfo propertyInfo = propertyInfos[i]; if (i == 2 && !strokeLookupEntry.StorePressure) { // // only store pressure if we need to // continue; } if (propertyInfo.IsButton) { // // we're at the buttons, handle this below // break; } compressionAlgorithm = strokeLookupEntry.CompressionData; localBytesWritten += SavePacketPropertyData(outputArrays[i], stream, #if OLD_ISF compressor, #endif propertyInfo.Id, ref compressionAlgorithm); } /* We don't serialize button data, see Windows OS Bugs 1413460 for details // Now write all button data. Button data is stored as if it is another packet property // with size (cbuttoncount + 7)/8 bytes and corresponding guids are stored in the packet // description. Button data is only stored if buttons are present in the description and there // are packets in the stroke if (buttonCount > 0 && pointCount > 0) { Debug.Assert(i == valuesPerPoint - 1); BitStreamWriter bitWriter = new BitStreamWriter(); // // Get the array of button data (i is still pointing at it) // int[] buttonData = outputArrays[i]; for (int x = 0; x < pointCount; x++) { // // each int in the button data array contains buttonCount number // of bits that need to be written to the BitStreamWriter // the BitStreamWriter takes bytes at a time. We always write the most // signifigant bits first // int uncompactedButtonDataForPoint = buttonData[x]; // calculate the number of full bytes used for buttons per packet // Example: 10 buttons would require 1 full byte // but 8 would require int fullBytesForButtonsPerPacket = buttonCount / Native.BitsPerByte; // calculate the number of bits that spill beyond the full byte boundary // Example: 10 buttons would require 2 extra bits (8 fit in a full byte) int bitsToWrite = buttonCount % Native.BitsPerByte; for (; fullBytesForButtonsPerPacket >= 0; fullBytesForButtonsPerPacket--) { byte byteOfButtonData = Convert.ToByte(uncompactedButtonDataForPoint >> (fullBytesForButtonsPerPacket * Native.BitsPerByte)); // // write 8 or less bytes to the bitwriter // checking for 0 handles the case where we're writing 8, 16 or 24 bytes // and bitsToWrite is initialize to zero // if (bitsToWrite > 0) { bitWriter.Write(byteOfButtonData, bitsToWrite); } if (fullBytesForButtonsPerPacket > 0) { bitsToWrite = Native.BitsPerByte; } } } // retrieve the button bytes byte[] packedButtonData = bitWriter.ToBytes(); if (packedButtonData.Length != ((buttonCount * pointCount + 7) / Native.BitsPerByte)) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Packed button length not equal to expected length")); } // write out the packed button data to the output stream stream.Write(packedButtonData, 0, packedButtonData.Length); localBytesWritten += (uint)packedButtonData.Length; } */ return localBytesWritten; }
/// <summary> /// Returns an array of bytes of the saved stroke /// </summary> /// <param name="stroke">Stroke to save</param> /// <param name="stream">null to calculate only the size</param> /// <param name="compressionAlgorithm"></param> /// <param name="guidList"></param> /// <param name="strokeLookupEntry"></param> #endif internal static uint EncodeStroke( Stroke stroke, Stream stream, #if OLD_ISF Compressor compressor, #endif byte compressionAlgorithm, GuidList guidList, StrokeCollectionSerializer.StrokeLookupEntry strokeLookupEntry) { uint cbWrite = SavePackets( stroke, stream, #if OLD_ISF compressor, #endif strokeLookupEntry); if (stroke.ExtendedProperties.Count > 0) cbWrite += ExtendedPropertySerializer.EncodeAsISF(stroke.ExtendedProperties, stream, guidList, compressionAlgorithm, false); return cbWrite; }
/// <summary> /// Loads packets from the input stream. For example, packets are all of the x's in a stroke /// </summary> #endif static uint LoadPackets(Stream inputStream, uint totalBytesInStrokeBlockOfIsfStream, #if OLD_ISF Compressor compressor, #endif StylusPointDescription stylusPointDescription, Matrix transform, out StylusPointCollection stylusPoints) { stylusPoints = null; if (0 == totalBytesInStrokeBlockOfIsfStream) return 0; uint locallyDecodedBytesRemaining = totalBytesInStrokeBlockOfIsfStream; uint localBytesRead; // First read the no of packets uint pointCount; localBytesRead = SerializationHelper.Decode(inputStream, out pointCount); if (locallyDecodedBytesRemaining < localBytesRead) throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); locallyDecodedBytesRemaining -= localBytesRead; if (0 == locallyDecodedBytesRemaining) return localBytesRead; // Allocate packet properties int intsPerPoint = stylusPointDescription.GetInputArrayLengthPerPoint(); int buttonCount = stylusPointDescription.ButtonCount; int buttonIntsPerPoint = (buttonCount > 0 ? 1 : 0); int valueIntsPerPoint = intsPerPoint - buttonIntsPerPoint; //add one int per point for button data if it exists int[] rawPointData = new int[pointCount * intsPerPoint]; int[] packetDataSet = new int[pointCount]; // copy the rest of the data from the stroke data byte[] inputBuffer = new byte[locallyDecodedBytesRemaining]; // Read the input data into the byte array uint bytesRead = StrokeCollectionSerializer.ReliableRead(inputStream, inputBuffer, locallyDecodedBytesRemaining); if ( bytesRead != locallyDecodedBytesRemaining ) { // Make sure the bytes read are expected. If not, we should bail out. // An exception will be thrown. throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } // at this point, we have read all of the bytes remaining in the input // stream's packet block, and while we will keep the bytes remaining // variable for positioning within the local byte buffer, we should // not read from the stream again, or we risk reading into another // ISF tag's block. int originalPressureIndex = stylusPointDescription.OriginalPressureIndex; for (int i = 0; i < valueIntsPerPoint && locallyDecodedBytesRemaining > 0; i++) { localBytesRead = locallyDecodedBytesRemaining; Compressor.DecompressPacketData( #if OLD_ISF compressor, #endif inputBuffer, ref localBytesRead, packetDataSet); if (localBytesRead > locallyDecodedBytesRemaining) throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); // // packetDataSet is like this: // ------------- // |X|X|X|X|X|X| // ------------- // // we need to copy into rawPointData at // // ------------- // |X| |X| |X| | // ------------- // // additionally, for NormalPressure, if it exists and was // reordered in the StylusPointDescription, we need to account for that here // int tempi = i; if (tempi > 1 && originalPressureIndex != -1 && originalPressureIndex != StylusPointDescription.RequiredPressureIndex/*2*/) { // // NormalPressure exists in the packet stream and was not at index 2 // StylusPointDescription enforces that NormalPressure is at index 2 // so we need to copy packet data beyond X and Y into a different location // // take the example of the original StylusPointDescription // |X|Y|XTilt|YTilt|NormalPressure|Rotation| // // originalPressureIndex is 4, and we know it is now 2 // which means that everything before index 4 has been shifted one // and everything after index 4 is still good. Index 4 should be copied to index 2 if (tempi == originalPressureIndex) { tempi = 2; } else if (tempi < originalPressureIndex) { tempi++; } } locallyDecodedBytesRemaining -= localBytesRead; for (int j = 0, x = 0; j < pointCount; j++, x += intsPerPoint) { rawPointData[x + tempi] = packetDataSet[j]; } // Move the array elements to point to next set of compressed data for (uint u = 0; u < locallyDecodedBytesRemaining; u++) { inputBuffer[u] = inputBuffer[u + (int)localBytesRead]; } } // Now that we've read packet data, we must read button data if it is there byte[] buttonData = null; // since the button state is a simple bit value (either down or up), the button state // for a series of packets is packed into an array of bits rather than integers // For example, if there are 16 packets, and 2 buttons, then 32 bits can be used (e.g. 1 32-bit integer) if (0 != locallyDecodedBytesRemaining && buttonCount > 0) { // calculate the number of full bytes used for buttons per packet // Example: 10 buttons would require 1 full byte int fullBytesForButtonsPerPacket = buttonCount / Native.BitsPerByte; // calculate the number of bits that spill beyond the full byte boundary // Example: 10 buttons would require 2 extra bits (8 fit in a full byte) int partialBitsForButtonsPerPacket = buttonCount % Native.BitsPerByte; // Now figure out how many bytes we need to read for the button data localBytesRead = (uint)((buttonCount * pointCount + 7) / Native.BitsPerByte); if (localBytesRead > locallyDecodedBytesRemaining) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Buffer range is smaller than expected expected size")); } locallyDecodedBytesRemaining -= localBytesRead; int buttonSizeInBytes = (buttonCount + 7)/Native.BitsPerByte; buttonData = new byte[pointCount * buttonSizeInBytes]; // Create a bit reader to unpack the bits from the ISF stream into // loosely packed byte buffer (e.g. button data aligned on full byte // boundaries only) BitStreamReader bitReader = new BitStreamReader(inputBuffer, (uint)buttonCount * pointCount); // unpack the button data into each packet int byteCounter = 0; while (!bitReader.EndOfStream) { // unpack the fully bytes first for (int fullBytes = 0; fullBytes < fullBytesForButtonsPerPacket; fullBytes++) { buttonData[byteCounter++] = bitReader.ReadByte(Native.BitsPerByte); } // then unpack a single partial byte if necessary if (partialBitsForButtonsPerPacket > 0) { buttonData[byteCounter++] = bitReader.ReadByte((int)partialBitsForButtonsPerPacket); } } // if the number of bytes allocated != necessary byte amount then an error occurred if (byteCounter != buttonData.Length) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Button data length not equal to expected length")); } // // set the point data in the raw array // FillButtonData( (int)pointCount, buttonCount, valueIntsPerPoint, //gives the first button index rawPointData, buttonData); } stylusPoints = new StylusPointCollection(stylusPointDescription, rawPointData, null, transform); // if we read too far into the stream (e.g. the packets were compressed) // then move the stream pointer back to the end of the actual packet // data before returning. This keeps the return value on the function // (representing bytes read) honest and consistent with the stream // position movement in this function. if (0 != locallyDecodedBytesRemaining) { inputStream.Seek(0 - (long)locallyDecodedBytesRemaining, SeekOrigin.Current); } return totalBytesInStrokeBlockOfIsfStream - locallyDecodedBytesRemaining; }
/// <summary> /// This functions loads a stroke from a memory stream based on the descriptor and GuidList. It returns /// the no of bytes it has read from the stream to correctly load the stream, which should be same as /// the value of the size parameter. If they are unequal throws ArgumentException. Stroke descriptor is /// used to load the packetproperty as well as ExtendedPropertyCollection on this stroke. Compressor is used /// to decompress the data. /// </summary> /// <param name="stream"></param> /// <param name="totalBytesInStrokeBlockOfIsfStream"></param> /// <param name="guidList"></param> /// <param name="strokeDescriptor"></param> /// <param name="stylusPointDescription"></param> /// <param name="transform"></param> /// <param name="stylusPoints"></param> /// <param name="extendedProperties"></param> #endif static uint DecodeISFIntoStroke( #if OLD_ISF Compressor compressor, #endif Stream stream, uint totalBytesInStrokeBlockOfIsfStream, GuidList guidList, StrokeDescriptor strokeDescriptor, StylusPointDescription stylusPointDescription, Matrix transform, out StylusPointCollection stylusPoints, out ExtendedPropertyCollection extendedProperties) { stylusPoints = null; extendedProperties = null; // We do allow a stroke with no packet data if (0 == totalBytesInStrokeBlockOfIsfStream) { return 0; } uint locallyDecodedBytes; uint remainingBytesInStrokeBlock = totalBytesInStrokeBlockOfIsfStream; // First try to load any packet data locallyDecodedBytes = LoadPackets( stream, remainingBytesInStrokeBlock, #if OLD_ISF compressor, #endif stylusPointDescription, transform, out stylusPoints); if (locallyDecodedBytes > remainingBytesInStrokeBlock) throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Packet buffer overflowed the ISF stream")); remainingBytesInStrokeBlock -= locallyDecodedBytes; if (0 == remainingBytesInStrokeBlock) { return locallyDecodedBytes; } // Now read the extended propertes for (int iTag = 1; iTag < strokeDescriptor.Template.Count && remainingBytesInStrokeBlock > 0; iTag++) { KnownTagCache.KnownTagIndex tag = strokeDescriptor.Template[iTag - 1]; switch (tag) { case MS.Internal.Ink.InkSerializedFormat.KnownTagCache.KnownTagIndex.StrokePropertyList: { // we've found the stroke extended properties. Load them now. while (iTag < strokeDescriptor.Template.Count && remainingBytesInStrokeBlock > 0) { tag = strokeDescriptor.Template[iTag]; object data; Guid guid = guidList.FindGuid(tag); if (guid == Guid.Empty) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Stroke Custom Attribute tag embedded in ISF stream does not match guid table")); } // load the extended property data from the stream (and decode the type) locallyDecodedBytes = ExtendedPropertySerializer.DecodeAsISF(stream, remainingBytesInStrokeBlock, guidList, tag, ref guid, out data); // add the guid/data pair into the property collection (don't redecode the type) if (extendedProperties == null) { extendedProperties = new ExtendedPropertyCollection(); } extendedProperties[guid] = data; if (locallyDecodedBytes > remainingBytesInStrokeBlock) throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); remainingBytesInStrokeBlock -= locallyDecodedBytes; iTag++; } } break; case MS.Internal.Ink.InkSerializedFormat.KnownTagCache.KnownTagIndex.Buttons: { // Next tag is count of buttons and the tags for the button guids iTag += (int)((uint)strokeDescriptor.Template[iTag]) + 1; } break; // ignore any tags embedded in the Stroke block that this // version of the ISF decoder doesn't understand default: { System.Diagnostics.Trace.WriteLine("Ignoring unhandled stroke tag in ISF stroke descriptor"); } break; } } // Now try to load any tagged property data or point property data while (remainingBytesInStrokeBlock > 0) { // Read the tag first KnownTagCache.KnownTagIndex tag; uint uiTag; locallyDecodedBytes = SerializationHelper.Decode(stream, out uiTag); tag = (KnownTagCache.KnownTagIndex)uiTag; if (locallyDecodedBytes > remainingBytesInStrokeBlock) throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); remainingBytesInStrokeBlock -= locallyDecodedBytes; // if it is a point property block switch (tag) { case MS.Internal.Ink.InkSerializedFormat.KnownTagCache.KnownTagIndex.PointProperty: { // First load the totalBytesInStrokeBlockOfIsfStream of the point property block uint cbsize; locallyDecodedBytes = SerializationHelper.Decode(stream, out cbsize); if (locallyDecodedBytes > remainingBytesInStrokeBlock) throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); remainingBytesInStrokeBlock -= locallyDecodedBytes; while (remainingBytesInStrokeBlock > 0) { // First read the tag corresponding to the property locallyDecodedBytes = SerializationHelper.Decode(stream, out uiTag); tag = (KnownTagCache.KnownTagIndex)uiTag; if (locallyDecodedBytes > remainingBytesInStrokeBlock) throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); remainingBytesInStrokeBlock -= locallyDecodedBytes; // Now read the packet index for which the property will apply uint propindex; locallyDecodedBytes = SerializationHelper.Decode(stream, out propindex); if (locallyDecodedBytes > remainingBytesInStrokeBlock) throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); remainingBytesInStrokeBlock -= locallyDecodedBytes; uint propsize; locallyDecodedBytes = SerializationHelper.Decode(stream, out propsize); if (locallyDecodedBytes > remainingBytesInStrokeBlock) throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); remainingBytesInStrokeBlock -= locallyDecodedBytes; // Compressed data totalBytesInStrokeBlockOfIsfStream propsize += 1; // Make sure we have enough data to read if (propsize > remainingBytesInStrokeBlock) throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); byte[] in_buffer = new byte[propsize]; uint bytesRead = StrokeCollectionSerializer.ReliableRead(stream, in_buffer, propsize); if (propsize != bytesRead) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Read different size from stream then expected")); } byte[] out_buffer = Compressor.DecompressPropertyData(in_buffer); System.Diagnostics.Debug.Assert(false, "ExtendedProperties for points are not supported"); // skip the bytes in both success & failure cases // Note: Point ExtendedProperties are discarded remainingBytesInStrokeBlock -= propsize; } } break; default: { object data; Guid guid = guidList.FindGuid(tag); if (guid == Guid.Empty) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Stroke Custom Attribute tag embedded in ISF stream does not match guid table")); } // load the extended property data from the stream (and decode the type) locallyDecodedBytes = ExtendedPropertySerializer.DecodeAsISF(stream, remainingBytesInStrokeBlock, guidList, tag, ref guid, out data); // add the guid/data pair into the property collection (don't redecode the type) if (extendedProperties == null) { extendedProperties = new ExtendedPropertyCollection(); } extendedProperties[guid] = data; if (locallyDecodedBytes > remainingBytesInStrokeBlock) { throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage("ExtendedProperty decoded totalBytesInStrokeBlockOfIsfStream exceeded ISF stream totalBytesInStrokeBlockOfIsfStream")); } remainingBytesInStrokeBlock -= locallyDecodedBytes; } break; } } if (0 != remainingBytesInStrokeBlock) throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); return totalBytesInStrokeBlockOfIsfStream; }
/// <summary> /// Saves the packets data corresponding to a packet property (identified by the guid) into the stream /// based on the Compression algorithm and compress header /// </summary> /// <param name="packetdata">packet data to save</param> /// <param name="stream">null to calculate only the size</param> /// <param name="guid"></param> /// <param name="algo"></param> #endif static uint SavePacketPropertyData( int[] packetdata, Stream stream, #if OLD_ISF Compressor compressor, #endif Guid guid, ref byte algo) { if (packetdata.Length == 0) { return 0; } byte[] data = Compressor.CompressPacketData( #if OLD_ISF compressor, #endif packetdata, ref algo); Debug.Assert(stream != null); // Now write the data in the stream stream.Write(data, 0, (int)data.Length); return (uint)data.Length; }
/// <summary> /// Loads drawing attributes from a memory buffer. /// </summary> /// <param name="stream">Memory buffer to read from</param> /// <param name="guidList">Guid tags if extended properties are used</param> /// <param name="maximumStreamSize">Maximum size of buffer to read through</param> /// <param name="da">The drawing attributes collection to decode into</param> /// <returns>Number of bytes read</returns> #else /// <summary> /// Loads drawing attributes from a memory buffer. /// </summary> /// <param name="stream">Memory buffer to read from</param> /// <param name="guidList">Guid tags if extended properties are used</param> /// <param name="maximumStreamSize">Maximum size of buffer to read through</param> /// <param name="da">The drawing attributes collection to decode into</param> /// <returns>Number of bytes read</returns> #endif internal static uint DecodeAsISF(Stream stream, GuidList guidList, uint maximumStreamSize, DrawingAttributes da) { PenTip penTip = PenTip.Default; PenStyle penStyle = PenStyle.Default; double stylusWidth = DrawingAttributeSerializer.V1PenWidthWhenWidthIsMissing; double stylusHeight = DrawingAttributeSerializer.V1PenHeightWhenHeightIsMissing; uint rasterOperation = DrawingAttributeSerializer.RasterOperationDefaultV1; int transparency = DrawingAttributeSerializer.TransparencyDefaultV1; bool widthIsSetInISF = false; //did we find KnownIds.Width? bool heightIsSetInISF = false; //did we find KnownIds.Height? uint cbTotal = maximumStreamSize; while (maximumStreamSize > 0) { KnownTagCache.KnownTagIndex tag; uint uiTag; // First read the tag uint cb = SerializationHelper.Decode(stream, out uiTag); tag = (KnownTagCache.KnownTagIndex)uiTag; if (maximumStreamSize < cb) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("ISF size is larger than maximum stream size")); } maximumStreamSize -= cb; // Get the guid based on the tag Guid guid = guidList.FindGuid(tag); if (guid == Guid.Empty) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Drawing Attribute tag embedded in ISF stream does not match guid table")); } uint dw = 0; if (KnownIds.PenTip == guid) { cb = SerializationHelper.Decode(stream, out dw); penTip = (PenTip)dw; if (!PenTipHelper.IsDefined(penTip)) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid PenTip value found in ISF stream")); } maximumStreamSize -= cb; } else if (KnownIds.PenStyle == guid) { cb = SerializationHelper.Decode(stream, out dw); penStyle = (PenStyle)dw; maximumStreamSize -= cb; } else if (KnownIds.DrawingFlags == guid) { // Encode the drawing flags with considerations for v2 model cb = SerializationHelper.Decode(stream, out dw); DrawingFlags flags = (DrawingFlags)dw; da.DrawingFlags = flags; maximumStreamSize -= cb; } else if (KnownIds.RasterOperation == guid) { uint ropSize = GuidList.GetDataSizeIfKnownGuid(KnownIds.RasterOperation); if (ropSize == 0) { throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage("ROP data size was not found")); } byte[] data = new byte[ropSize]; stream.Read(data, 0, (int)ropSize); if (data != null && data.Length > 0) { //data[0] holds the allowable values of 0-255 rasterOperation = Convert.ToUInt32(data[0]); } maximumStreamSize -= ropSize; } else if (KnownIds.CurveFittingError == guid) { cb = SerializationHelper.Decode(stream, out dw); da.FittingError = (int)dw; maximumStreamSize -= cb; } else if (KnownIds.StylusHeight == guid || KnownIds.StylusWidth == guid) { double _size; cb = SerializationHelper.Decode(stream, out dw); _size = (double)dw; maximumStreamSize -= cb; if (maximumStreamSize > 0) { cb = SerializationHelper.Decode(stream, out dw); maximumStreamSize -= cb; if (KnownTagCache.KnownTagIndex.Mantissa == (KnownTagCache.KnownTagIndex)dw) { uint cbInSize; // First thing that is in there is maximumStreamSize of the data cb = SerializationHelper.Decode(stream, out cbInSize); maximumStreamSize -= cb; // in maximumStreamSize is one more than the decoded no cbInSize++; if (cbInSize > maximumStreamSize) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("ISF size if greater then maximum stream size")); } byte[] in_data = new byte[cbInSize]; uint bytesRead = (uint)stream.Read(in_data, 0, (int)cbInSize); if (cbInSize != bytesRead) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Read different size from stream then expected")); } byte[] out_buffer = Compressor.DecompressPropertyData(in_data); using (MemoryStream localStream = new MemoryStream(out_buffer)) using (BinaryReader rdr = new BinaryReader(localStream)) { short sFraction = rdr.ReadInt16(); _size += (double)(sFraction / DrawingAttributes.StylusPrecision); maximumStreamSize -= cbInSize; } } else { // Seek it back by cb stream.Seek(-cb, SeekOrigin.Current); maximumStreamSize += cb; } } if (KnownIds.StylusWidth == guid) { widthIsSetInISF = true; stylusWidth = _size; } else { heightIsSetInISF = true; stylusHeight = _size; } } else if (KnownIds.Transparency == guid) { cb = SerializationHelper.Decode(stream, out dw); transparency = (int)dw; maximumStreamSize -= cb; } else if (KnownIds.Color == guid) { cb = SerializationHelper.Decode(stream, out dw); Color color = Color.FromRgb((byte)(dw & 0xff), (byte)((dw & 0xff00) >> Native.BitsPerByte), (byte)((dw & 0xff0000) >> (Native.BitsPerByte * 2))); da.Color = color; maximumStreamSize -= cb; } else if (KnownIds.StylusTipTransform == guid) { try { object data; cb = ExtendedPropertySerializer.DecodeAsISF(stream, maximumStreamSize, guidList, tag, ref guid, out data); Matrix matrix = Matrix.Parse((string)data); da.StylusTipTransform = matrix; } catch (InvalidOperationException) // Matrix.Parse failed. { System.Diagnostics.Debug.Assert(false, "Corrupt Matrix in the ExtendedPropertyCollection!"); } finally { maximumStreamSize -= cb; } } else { object data; cb = ExtendedPropertySerializer.DecodeAsISF(stream, maximumStreamSize, guidList, tag, ref guid, out data); maximumStreamSize -= cb; da.AddPropertyData(guid, data); } } if (0 != maximumStreamSize) { throw new ArgumentException(); } // // time to create our drawing attributes. // // 1) First we need to evaluate PenTip / StylusTip // Here is the V1 - V2 mapping // // PenTip.Circle == StylusTip.Ellipse // PenTip.Rectangle == StylusTip.Rectangle // PenTip.Rectangle == StylusTip.Diamond if (penTip == PenTip.Default) { //Since StylusTip is stored in the EPC at this point (if set), we can compare against it here. if (da.StylusTip != StylusTip.Ellipse) { // // StylusTip was set to something other than Ellipse // when we last serialized (or else StylusTip would be Ellipse, the default) // when StylusTip is != Ellipse and we serialize, we set PenTip to Rectangle // which is not the default. Therefore, if PenTip is back to Circle, // that means someone set it in V1 and we should respect that by // changing StylusTip back to Ellipse // da.StylusTip = StylusTip.Ellipse; } //else da.StylusTip is already set } else { System.Diagnostics.Debug.Assert(penTip == PenTip.Rectangle); if (da.StylusTip == StylusTip.Ellipse) { // // PenTip is Rectangle and StylusTip was either not set // before or was set to Ellipse and PenTip was changed // in a V1 ink object. Either way, we need to change StylusTip to Rectangle da.StylusTip = StylusTip.Rectangle; } //else da.StylusTip is already set } // // 2) next we need to set hight and width // if (da.StylusTip == StylusTip.Ellipse && widthIsSetInISF && !heightIsSetInISF) { // // special case: V1 PenTip of Circle only used Width to compute the circle size // and so it only serializes Width of 53 // but since our default is Ellipse, if Height is unset and we use the default // height of 30, then your ink that looked like 53,53 in V1 will look // like 30,53 here. // // stylusHeight = stylusWidth; da.HeightChangedForCompatabity = true; } // need to convert width/height into Avalon, since they are stored in HIMETRIC in ISF stylusHeight *= StrokeCollectionSerializer.HimetricToAvalonMultiplier; stylusWidth *= StrokeCollectionSerializer.HimetricToAvalonMultiplier; // Map 0.0 width to DrawingAttributes.DefaultXXXXXX (V1 53 equivalent) double height = DoubleUtil.IsZero(stylusHeight) ? (Double)DrawingAttributes.GetDefaultDrawingAttributeValue(KnownIds.StylusHeight) : stylusHeight; double width = DoubleUtil.IsZero(stylusWidth) ? (Double)DrawingAttributes.GetDefaultDrawingAttributeValue(KnownIds.StylusWidth) : stylusWidth; da.Height = GetCappedHeightOrWidth(height); da.Width = GetCappedHeightOrWidth(width); // // 3) next we need to set IsHighlighter (by looking for RasterOperation.MaskPen) // // // always store raster op // da.RasterOperation = rasterOperation; if (rasterOperation == DrawingAttributeSerializer.RasterOperationDefaultV1) { // // if rasterop is default, make sure IsHighlighter isn't in the EPC // if (da.ContainsPropertyData(KnownIds.IsHighlighter)) { da.RemovePropertyData(KnownIds.IsHighlighter); } } else { if (rasterOperation == DrawingAttributeSerializer.RasterOperationMaskPen) { da.IsHighlighter = true; } } //else, IsHighlighter will be set to false by default, no need to set it // // 4) see if there is a transparency we need to add to color // if (transparency > DrawingAttributeSerializer.TransparencyDefaultV1) { //note: Color.A is set to 255 by default, which means fully opaque //transparency is just the opposite - 0 means fully opaque so //we need to flip the values int alpha = MathHelper.AbsNoThrow(transparency - 255); Color color = da.Color; color.A = Convert.ToByte(alpha); da.Color = color; } return(cbTotal); }
/// <summary> /// Saves the packets into a stream of bytes /// </summary> /// <param name="stroke">Stroke to save</param> /// <param name="stream">null to calculate size only</param> /// <param name="compressor"></param> /// <param name="strokeLookupEntry"></param> /// <returns></returns> #else /// <summary> /// Saves the packets into a stream of bytes /// </summary> /// <param name="stroke">Stroke to save</param> /// <param name="stream">null to calculate size only</param> /// <param name="strokeLookupEntry"></param> #endif static uint SavePackets( Stroke stroke, Stream stream, #if OLD_ISF Compressor compressor, #endif StrokeCollectionSerializer.StrokeLookupEntry strokeLookupEntry) { // First write or calculate how many points are there uint pointCount = (uint)stroke.StylusPoints.Count; uint localBytesWritten = (stream != null) ? SerializationHelper.Encode(stream, pointCount) : SerializationHelper.VarSize(pointCount); byte compressionAlgorithm; int[][] outputArrays = strokeLookupEntry.ISFReadyStrokeData; //We don't serialize button data //int valuesPerPoint = stroke.StylusPoints.Description.GetOutputArrayLengthPerPoint(); //int buttonCount = stroke.StylusPoints.Description.ButtonCount; ReadOnlyCollection <StylusPointPropertyInfo> propertyInfos = stroke.StylusPoints.Description.GetStylusPointProperties(); int i = 0; for (; i < propertyInfos.Count; i++) { StylusPointPropertyInfo propertyInfo = propertyInfos[i]; if (i == 2 && !strokeLookupEntry.StorePressure) { // // only store pressure if we need to // continue; } if (propertyInfo.IsButton) { // // we're at the buttons, handle this below // break; } compressionAlgorithm = strokeLookupEntry.CompressionData; localBytesWritten += SavePacketPropertyData(outputArrays[i], stream, #if OLD_ISF compressor, #endif propertyInfo.Id, ref compressionAlgorithm); } /* * We don't serialize button data * // Now write all button data. Button data is stored as if it is another packet property * // with size (cbuttoncount + 7)/8 bytes and corresponding guids are stored in the packet * // description. Button data is only stored if buttons are present in the description and there * // are packets in the stroke * if (buttonCount > 0 && pointCount > 0) * { * Debug.Assert(i == valuesPerPoint - 1); * BitStreamWriter bitWriter = new BitStreamWriter(); * // * // Get the array of button data (i is still pointing at it) * // * int[] buttonData = outputArrays[i]; * * for (int x = 0; x < pointCount; x++) * { * // * // each int in the button data array contains buttonCount number * // of bits that need to be written to the BitStreamWriter * // the BitStreamWriter takes bytes at a time. We always write the most * // signifigant bits first * // * int uncompactedButtonDataForPoint = buttonData[x]; * * // calculate the number of full bytes used for buttons per packet * // Example: 10 buttons would require 1 full byte * // but 8 would require * int fullBytesForButtonsPerPacket = buttonCount / Native.BitsPerByte; * * // calculate the number of bits that spill beyond the full byte boundary * // Example: 10 buttons would require 2 extra bits (8 fit in a full byte) * int bitsToWrite = buttonCount % Native.BitsPerByte; * * for (; fullBytesForButtonsPerPacket >= 0; fullBytesForButtonsPerPacket--) * { * byte byteOfButtonData = * Convert.ToByte(uncompactedButtonDataForPoint >> (fullBytesForButtonsPerPacket * Native.BitsPerByte)); * // * // write 8 or less bytes to the bitwriter * // checking for 0 handles the case where we're writing 8, 16 or 24 bytes * // and bitsToWrite is initialize to zero * // * if (bitsToWrite > 0) * { * bitWriter.Write(byteOfButtonData, bitsToWrite); * } * if (fullBytesForButtonsPerPacket > 0) * { * bitsToWrite = Native.BitsPerByte; * } * } * } * * // retrieve the button bytes * byte[] packedButtonData = bitWriter.ToBytes(); * * if (packedButtonData.Length != * ((buttonCount * pointCount + 7) / Native.BitsPerByte)) * { * throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Packed button length not equal to expected length")); * } * * // write out the packed button data to the output stream * stream.Write(packedButtonData, 0, packedButtonData.Length); * localBytesWritten += (uint)packedButtonData.Length; * } */ return(localBytesWritten); }
/// <summary> /// Encodes a custom attribute to the ISF stream /// </summary> #endif internal static uint EncodeAsISF(Guid id, byte[] data, Stream strm, GuidList guidList, byte compressionAlgorithm, bool fTag) { uint cbWrite = 0; uint cbSize = GuidList.GetDataSizeIfKnownGuid(id); Debug.Assert(strm != null); if (fTag) { uint uTag = (uint)guidList.FindTag(id, true); cbWrite += SerializationHelper.Encode(strm, uTag); } // If cbSize is 0, it is either a custom property or a known property with 0 // size. In either case, we need to write the size of the individual object if (0 == cbSize) { // Now we need to write the actual data for the property cbSize = (uint)data.Length; byte[] compresseddata = Compressor.CompressPropertyData(data, compressionAlgorithm); #if OLD_ISF byte nAlgo = compressionAlgorithm; uint cbOut = 0; Compressor.CompressPropertyData(data, ref nAlgo, ref cbOut, null); // Allocate a buffer big enough to hold the compressed data byte[] compresseddata2 = new byte[cbOut]; // NativeCompressor the data Compressor.CompressPropertyData(data, ref nAlgo, ref cbOut, compresseddata2); if (compresseddata.Length != compresseddata2.Length) { throw new InvalidOperationException("MAGIC EXCEPTION: Property bytes length when compressed didn't match with new compression"); } for (int i = 0; i < compresseddata.Length; i++) { if (compresseddata[i] != compresseddata2[i]) { throw new InvalidOperationException("MAGIC EXCEPTION: Property data didn't match with new property compression at index " + i.ToString()); } } #endif // write the encoded compressed size minus the algo byte cbWrite += SerializationHelper.Encode(strm, (uint)(compresseddata.Length - 1)); // Write the raw data strm.Write(compresseddata, 0, (int)compresseddata.Length); cbWrite += (uint)compresseddata.Length; } else { // // note that we used to write the nocompression byte, but that // was incorrect. We must not write it because loaders do not // expect it for known guids // // write the algo byte //strm.WriteByte(Compressor.NoCompression); //cbWrite++; // write the raw data without compression strm.Write(data, 0, (int)data.Length); cbWrite += (uint)data.Length; } return(cbWrite); }