/// <summary> /// Private helper used to read an int, short or byte (in reverse order) from the reader /// and return an int /// </summary> /// <param name="reader"></param> /// <param name="type"></param> /// <returns></returns> private int GetDataFromReader(BitStreamReader reader, GorillaEncodingType type) { switch (type) { case GorillaEncodingType.Int: { return((int)reader.ReadUInt32Reverse(Native.BitsPerInt)); } case GorillaEncodingType.Short: { return((int)reader.ReadUInt16Reverse(Native.BitsPerShort)); } case GorillaEncodingType.Byte: { return((int)reader.ReadByte(Native.BitsPerByte)); } default: { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("bogus GorillaEncodingType passed to GetDataFromReader")); } } }
/// <summary> /// FindCodec /// </summary> /// <param name="algoData"></param> internal HuffCodec FindCodec(byte algoData) { byte codec = (byte)(algoData & 0x1f); //unused //if ((0x20 & algoData) != 0) //{ // int iLookup = (algoData & 0x1f); // if ((iLookup > 0) && (iLookup <= _lookupList.Count)) // { // codec = _lookupList[iLookup - 1].Byte; // } //} if (codec < AlgoModule.DefaultBAACount) { return(GetDefCodec((uint)codec)); } if ((int)codec >= _huffCodecs.Count + AlgoModule.DefaultBAACount) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("invalid codec computed")); } return(_huffCodecs[(int)(codec - AlgoModule.DefaultBAACount)]); }
/// <summary> /// Decode /// </summary> /// <param name="input"></param> /// <param name="inputIndex"></param> /// <param name="data"></param> /// <returns></returns> internal uint Decode(byte[] input, int inputIndex, ref uint data) { Debug.Assert(input != null); Debug.Assert(inputIndex < input.Length); // We care about first 5 bytes uint cb = (input.Length - inputIndex > 5) ? 5 : (uint)(input.Length - inputIndex); uint index = 0; data = 0; while ((index < cb) && (input[index] > 0x7f)) { int leftShift = (int)(index * 7); data |= (uint)((input[index] & 0x7f) << leftShift); ++index; } if (index < cb) { int leftShift = (int)(index * 7); data |= (uint)((input[index] & 0x7f) << leftShift); } else { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("invalid input in MultiByteCodec.Decode")); } return(index + 1); }
/// <summary> /// Internal method for getting just the byte[] out /// </summary> internal void SaveIsf(Stream stream, bool compress) { StrokeCollectionSerializer serializer = new StrokeCollectionSerializer(this); serializer.CurrentCompressionMode = compress ? CompressionMode.Compressed : CompressionMode.NoCompression; serializer.EncodeISF(stream); }
/// <summary> /// Creates a metric entry based on a PropertyInfo and Tag and returns the Metric Entry Type created /// </summary> /// <param name="propertyInfo"></param> /// <param name="tag"></param> /// <returns></returns> public MetricEntryType CreateMetricEntry(StylusPointPropertyInfo propertyInfo, KnownTagCache.KnownTagIndex tag) { // First create the default Metric entry based on the property and type of metric entry and then use that to initialize the // metric entry data. uint index = 0; Tag = tag; MetricEntryType type; if (IsValidMetricEntry(propertyInfo, Tag, out type, out index)) { switch (type) { case MetricEntryType.Optional: { Initialize(propertyInfo, MetricEntry_Optional[index].PropertyMetrics); break; } case MetricEntryType.Must: case MetricEntryType.Custom: Initialize(propertyInfo, DefaultPropertyMetrics); break; default: throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("MetricEntryType was persisted with Never flag which should never happen")); } } return(type); }
internal static void CompressPropertyData(byte[] data, ref byte algorithm, ref uint outputSize, byte[] output) { // // lock to prevent multi-threaded vulnerabilities // lock (_compressSync) { // // it is valid to pass is null for output to check to see what the // required buffer size is. We want to guard against when output is not null // and outputSize doesn't match, as this is passed directly to unmanaged code // and could result in bytes being written past the end of output buffer // if (output != null && outputSize != output.Length) { //we don't raise any information that could be used to attack our ISF code //a simple 'ISF Operation Failed' is sufficient since the user can't do //anything to fix bogus ISF throw new InvalidOperationException(SR.Get(SRID.IsfOperationFailed)); } int hr = MS.Win32.Penimc.UnsafeNativeMethods.IsfCompressPropertyData(data, (uint)data.Length, ref algorithm, ref outputSize, output); if (0 != hr) { //we don't raise any information that could be used to attack our ISF code //a simple 'ISF Operation Failed' is sufficient since the user can't do //anything to fix bogus ISF throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage("IsfCompressPropertyData returned: " + hr.ToString(CultureInfo.InvariantCulture))); } } }
/// <summary> /// Deserializes the GuidList from the memory stream /// </summary> /// <param name="strm"></param> /// <param name="size"></param> /// <returns></returns> public uint Load(Stream strm, uint size) { uint cbsize = 0; _CustomGuids.Clear(); uint count = size / Native.SizeOfGuid; byte[] guids = new byte[Native.SizeOfGuid]; for (uint i = 0; i < count; i++) { // NTRAID:WINDOWSOS#1622775-2006/04/26-WAYNEZEN, // Stream.Read could read less number of bytes than the request. We call ReliableRead that // reads the bytes in a loop until all requested bytes are received or reach the end of the stream. uint bytesRead = StrokeCollectionSerializer.ReliableRead(strm, guids, Native.SizeOfGuid); cbsize += bytesRead; if (bytesRead == Native.SizeOfGuid) { _CustomGuids.Add(new Guid(guids)); } else { // If Stream.Read cannot return the expected number of bytes, we should break here. // The caller - StrokeCollectionSerializer.DecodeRawISF will check our return value. // An exception might be thrown if reading is failed. break; } } return(cbsize); }
/// <summary> /// CompressPropertyData - compresses property data using the compression defined by 'compressor' /// </summary> /// <param name="input">The int[] of packet data to compress</param> /// <param name="algorithm">In: the desired algorithm to use. Out: the algorithm used</param> /// <returns>the compressed data in a byte[]</returns> #endif internal static byte[] CompressPacketData( #if OLD_ISF Compressor compressor, #endif int[] input, ref byte algorithm) { #if OLD_ISF // // lock to prevent multi-threaded vulnerabilities // lock (_compressSync) { #endif if (input == null) { //we don't raise any information that could be used to attack our ISF code //a simple 'ISF Operation Failed' is sufficient since the user can't do //anything to fix bogus ISF throw new InvalidOperationException(SR.Get(SRID.IsfOperationFailed)); } byte[] data = AlgoModule.CompressPacketData(input, algorithm); #if OLD_ISF uint cbOutSize = 0; MS.Win32.Penimc.CompressorSafeHandle safeCompressorHandle = (compressor == null) ? MS.Win32.Penimc.CompressorSafeHandle.Null : compressor._compressorHandle; int hr = MS.Win32.Penimc.UnsafeNativeMethods.IsfCompressPacketData(safeCompressorHandle, input, (uint)input.Length, ref algorithm, ref cbOutSize, null); if (0 == hr) { byte[] data2 = new byte[cbOutSize]; hr = MS.Win32.Penimc.UnsafeNativeMethods.IsfCompressPacketData(safeCompressorHandle, input, (uint)input.Length, ref algorithm, ref cbOutSize, data2); if (0 == hr) { //see if data matches if (data2.Length != data.Length) { throw new InvalidOperationException("MAGIC EXCEPTION: Packet data length didn't match with new compression"); } for (int i = 0; i < data2.Length; i++) { if (data2[i] != data[i]) { throw new InvalidOperationException("MAGIC EXCEPTION: Packet data didn't match with new compression at index " + i.ToString()); } } return data; } } throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage("IsfCompressPacketData returned:" + hr.ToString(CultureInfo.InvariantCulture))); } #else return data; #endif }
/// <summary> /// Compress - compresses the byte[] being read by the BitStreamReader into compressed data /// </summary> /// <param name="bitCount">the number of bits to use for each element</param> /// <param name="reader">a reader over the byte[] to compress</param> /// <param name="encodingType">int, short or byte?</param> /// <param name="unitsToEncode">number of logical units to encoded</param> /// <param name="compressedData">output write buffer</param> internal void Compress(int bitCount, BitStreamReader reader, GorillaEncodingType encodingType, int unitsToEncode, List <byte> compressedData) { if (null == reader || null == compressedData) { throw new ArgumentNullException(StrokeCollectionSerializer.ISFDebugMessage("reader or compressedData was null in compress")); } if (bitCount < 0) { throw new ArgumentOutOfRangeException("bitCount"); } if (unitsToEncode < 0) { throw new ArgumentOutOfRangeException("unitsToEncode"); } if (bitCount == 0) { //adjust if the bitcount is 0 //(this makes bitCount 32) switch (encodingType) { case GorillaEncodingType.Int: { bitCount = Native.BitsPerInt; break; } case GorillaEncodingType.Short: { bitCount = Native.BitsPerShort; break; } case GorillaEncodingType.Byte: { bitCount = Native.BitsPerByte; break; } default: { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("bogus GorillaEncodingType passed to compress")); } } } //have the writer adapt to the List<byte> passed in and write to it BitStreamWriter writer = new BitStreamWriter(compressedData); while (!reader.EndOfStream && unitsToEncode > 0) { int data = GetDataFromReader(reader, encodingType); writer.Write((uint)data, bitCount); unitsToEncode--; } }
/// <summary> /// DecompressPropertyData - decompresses a byte[] representing property data (such as DrawingAttributes.Color) /// </summary> /// <param name="input">The byte[] to decompress</param> #endif internal static byte[] DecompressPropertyData(byte[] input) { #if OLD_ISF // // lock to prevent multi-threaded vulnerabilities // lock (_compressSync) { #endif if (input == null) { //we don't raise any information that could be used to attack our ISF code //a simple 'ISF Operation Failed' is sufficient since the user can't do //anything to fix bogus ISF throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage(SR.Get(SRID.DecompressPropertyFailed))); } byte[] data = AlgoModule.DecompressPropertyData(input); #if OLD_ISF uint size = 0; byte algo = 0; int hr = MS.Win32.Penimc.UnsafeNativeMethods.IsfDecompressPropertyData(input, (uint)input.Length, ref size, null, ref algo); if (0 == hr) { byte[] data2 = new byte[size]; hr = MS.Win32.Penimc.UnsafeNativeMethods.IsfDecompressPropertyData(input, (uint)input.Length, ref size, data2, ref algo); if (0 == hr) { if (data.Length != data2.Length) { throw new InvalidOperationException("MAGIC EXCEPTION: Property bytes length when decompressed didn't match with new uncompression"); } for (int i = 0; i < data.Length; i++) { if (data[i] != data2[i]) { throw new InvalidOperationException("MAGIC EXCEPTION: Property data didn't match with new property uncompression at index " + i.ToString()); } } return data; } } //we don't raise any information that could be used to attack our ISF code //a simple 'ISF Operation Failed' is sufficient since the user can't do //anything to fix bogus ISF throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage("IsfDecompressPropertyData returned: " + hr.ToString(CultureInfo.InvariantCulture))); #else return data; #endif #if OLD_ISF } #endif }
/// <summary> /// Compresses property data which is already in the form of a byte[] /// into a compressed byte[] /// </summary> /// <param name="input">byte[] data ready to be compressed</param> /// <param name="compression">the compression to use</param> /// <returns></returns> internal byte[] CompressPropertyData(byte[] input, byte compression) { List <byte> compressedData = new List <byte>(input.Length + 1); //reasonable default based on profiling. //leave room at the beginning of //compressedData for the compression header byte compressedData.Add((byte)0); if (DefaultCompression == (DefaultCompression & compression)) { compression = this.GorillaCodec.FindPropAlgoByte(input); } //validate that we never lzencode if (LempelZiv == (compression & LempelZiv)) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid compression specified or computed by FindPropAlgoByte")); } //determine what the optimal way to compress the data is. Should we treat //the byte[] as a series of Int's, Short's or Byte's? int countPerItem = 0, bitCount = 0, padCount = 0; this.GorillaCodec.GetPropertyBitCount(compression, ref countPerItem, ref bitCount, ref padCount); Debug.Assert(countPerItem == 4 || countPerItem == 2 || countPerItem == 1); GorillaEncodingType type = GorillaEncodingType.Byte; int unitCount = input.Length; if (countPerItem == 4) { type = GorillaEncodingType.Int; unitCount >>= 2; } else if (countPerItem == 2) { type = GorillaEncodingType.Short; unitCount >>= 1; } BitStreamReader reader = new BitStreamReader(input); //encode, gorilla style this.GorillaCodec.Compress(bitCount, //the max count of bits required for each int reader, //the reader, which can read int, byte, short type, //informs how the reader reads unitCount, //just how many items do we need to compress? compressedData); //a ref to the compressed data that will be written to compressedData[0] = compression; return(compressedData.ToArray()); }
/// <summary> /// Decode /// </summary> /// <param name="data"></param> /// <param name="extra"></param> /// <param name="reader"></param> /// <returns>number of bits decoded, 0 for error</returns> internal void Decode(ref int data, ref int extra, BitStreamReader reader) { // Find the prefix length byte prefIndex = 0; while (reader.ReadBit()) { prefIndex++; } // First indicate there is no extra data extra = 0; // More efficient for 0 if (0 == prefIndex) { data = 0; return; } else if (prefIndex < _huffBits.GetSize()) { // Find the data lenght uint nDataLen = _huffBits.GetBitsAtIndex(prefIndex); // Extract the offset data by lower dound with sign bit at LSB long nData = reader.ReadUInt64((int)(byte)nDataLen); // Find the sign bit bool bNeg = ((nData & 0x01) != 0); // Construct the data nData = (nData >> 1) + _mins[prefIndex]; // Adjust the sign bit data = bNeg ? -((int)nData) : (int)nData; // return the bit count read from stream return; } else if (prefIndex == _huffBits.GetSize()) { // This is the special prefix for extra data. // Decode the prefix first int extra2 = 0; int extra2Ignored = 0; Decode(ref extra2, ref extra2Ignored, reader); extra = extra2; // Following is the actual data int data2 = 0; Decode(ref data2, ref extra2Ignored, reader); data = data2; return; } throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("invalid huffman encoded data")); }
/// <summary> /// Decompresses property data (from a compressed byte[] to an uncompressed byte[]) /// </summary> /// <param name="input">The byte[] to decompress</param> /// <returns></returns> internal byte[] DecompressPropertyData(byte[] input) { if (input == null) { throw new ArgumentNullException("input"); } if (input.Length < 2) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("input.Length must be at least 2")); } byte compression = input[0]; int inputIndex = 1; if (LempelZiv == (compression & LempelZiv)) { if (0 != (compression & (~LempelZiv))) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("bogus isf, we don't decompress property data with lz")); } return(this.LZCodec.Uncompress(input, inputIndex)); } else { //gorilla //determine what the way to uncompress the data. Should we treat //the byte[] as a series of Int's, Short's or Byte's? int countPerItem = 0, bitCount = 0, padCount = 0; this.GorillaCodec.GetPropertyBitCount(compression, ref countPerItem, ref bitCount, ref padCount); Debug.Assert(countPerItem == 4 || countPerItem == 2 || countPerItem == 1); GorillaEncodingType type = GorillaEncodingType.Byte; if (countPerItem == 4) { type = GorillaEncodingType.Int; } else if (countPerItem == 2) { type = GorillaEncodingType.Short; } //determine how many units (of int, short or byte) that there are to decompress int unitsToDecode = ((input.Length - inputIndex << 3) / bitCount) - padCount; BitStreamReader reader = new BitStreamReader(input, inputIndex); return(this.GorillaCodec.Uncompress(bitCount, reader, type, unitsToDecode)); } }
/// <summary> /// Adds a new metric entry in the existing list of metric entries /// </summary> /// <param name="newEntry"></param> /// <returns></returns> public void AddMetricEntry(MetricEntry newEntry) { if (null == newEntry) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("MetricEntry cannot be null")); } if (null == _Entry) { _Entry = newEntry; } else { _Entry.Add(newEntry); // tack on at the end } _Count++; _size += newEntry.Size + SerializationHelper.VarSize(newEntry.Size) + SerializationHelper.VarSize((uint)newEntry.Tag); }
internal Compressor(byte[] data, ref uint size) { if (data == null || data.Length != size) { //we don't raise any information that could be used to attack our ISF code //a simple 'ISF Operation Failed' is sufficient since the user can't do //anything to fix bogus ISF throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage(SR.Get(SRID.InitializingCompressorFailed))); } _compressorHandle = MS.Win32.Penimc.UnsafeNativeMethods.IsfLoadCompressor(data, ref size); if (_compressorHandle.IsInvalid) { //we don't raise any information that could be used to attack our ISF code //a simple 'ISF Operation Failed' is sufficient since the user can't do //anything to fix bogus ISF throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage(SR.Get(SRID.InitializingCompressorFailed))); } }
/// <summary> /// Compress - compress the input[] into compressedData /// </summary> /// <param name="bitCount">The count of bits needed for all elements</param> /// <param name="input">input buffer</param> /// <param name="startInputIndex">offset into the input buffer</param> /// <param name="dtxf">data transform. can be null</param> /// <param name="compressedData">The list of bytes to write the compressed input to</param> internal void Compress(int bitCount, int[] input, int startInputIndex, DeltaDelta dtxf, List <byte> compressedData) { if (null == input || null == compressedData) { throw new ArgumentNullException(StrokeCollectionSerializer.ISFDebugMessage("input or compressed data was null in Compress")); } if (bitCount < 0) { throw new ArgumentOutOfRangeException("bitCount"); } if (bitCount == 0) { //adjust if the bitcount is 0 //(this makes bitCount 32) bitCount = (int)(Native.SizeOfInt << 3); } //have the writer adapt to the List<byte> passed in and write to it BitStreamWriter writer = new BitStreamWriter(compressedData); if (null != dtxf) { int xfData = 0; int xfExtra = 0; for (int i = startInputIndex; i < input.Length; i++) { dtxf.Transform(input[i], ref xfData, ref xfExtra); if (xfExtra != 0) { throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage("Transform returned unexpected results")); } writer.Write((uint)xfData, bitCount); } } else { for (int i = startInputIndex; i < input.Length; i++) { writer.Write((uint)input[i], bitCount); } } }
/// <summary> /// Finds a Custom Guid based on a Tag /// </summary> /// <param name="tag"></param> /// <returns></returns> Guid FindCustomGuid(KnownTagCache.KnownTagIndex tag) { if ((int)tag < (int)KnownIdCache.CustomGuidBaseIndex) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Tag is outside of the known guid tag range")); } // Get the index in the OriginalISFIdTable array first int nIndex = (int)(tag - KnownIdCache.CustomGuidBaseIndex); // If invalid, return Guid.Empty if ((0 > nIndex) || (_CustomGuids.Count <= nIndex)) { return(Guid.Empty); } // Otherwise, return the guid return((Guid)_CustomGuids[(int)nIndex]); }
/// <summary> /// Finds a known guid based on a Tag /// </summary> /// <param name="tag"></param> /// <returns></returns> static Guid FindKnownGuid(KnownTagCache.KnownTagIndex tag) { if (tag < KnownIdCache.KnownGuidBaseIndex) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Tag is outside of the known guid tag range")); } // Get the index in the OriginalISFIdTable array first uint nIndex = (uint)(tag - KnownIdCache.KnownGuidBaseIndex); // If invalid, return Guid.Empty if (KnownIdCache.OriginalISFIdTable.Length <= nIndex) { return(Guid.Empty); } // Otherwise, return the guid return(KnownIdCache.OriginalISFIdTable[nIndex]); }
/// <summary> /// Loads a stroke from the stream based on Stroke Descriptor, StylusPointDescription, Drawing Attributes, Stroke IDs, transform and GuidList /// </summary> /// <param name="stream"></param> /// <param name="size"></param> /// <param name="guidList"></param> /// <param name="strokeDescriptor"></param> /// <param name="stylusPointDescription"></param> /// <param name="drawingAttributes"></param> /// <param name="transform"></param> /// <param name="compressor">Compression module</param> /// <param name="stroke">Newly decoded stroke</param> /// <returns></returns> #else /// <summary> /// Loads a stroke from the stream based on Stroke Descriptor, StylusPointDescription, Drawing Attributes, Stroke IDs, transform and GuidList /// </summary> /// <param name="stream"></param> /// <param name="size"></param> /// <param name="guidList"></param> /// <param name="strokeDescriptor"></param> /// <param name="stylusPointDescription"></param> /// <param name="drawingAttributes"></param> /// <param name="transform"></param> /// <param name="stroke">Newly decoded stroke</param> #endif internal static uint DecodeStroke(Stream stream, uint size, GuidList guidList, StrokeDescriptor strokeDescriptor, StylusPointDescription stylusPointDescription, DrawingAttributes drawingAttributes, Matrix transform, #if OLD_ISF Compressor compressor, #endif out Stroke stroke) { ExtendedPropertyCollection extendedProperties; StylusPointCollection stylusPoints; uint cb = DecodeISFIntoStroke( #if OLD_ISF compressor, #endif stream, size, guidList, strokeDescriptor, stylusPointDescription, transform, out stylusPoints, out extendedProperties); if (cb != size) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Stroke size (" + cb.ToString(System.Globalization.CultureInfo.InvariantCulture) + ") != expected (" + size.ToString(System.Globalization.CultureInfo.InvariantCulture) + ")")); } stroke = new Stroke(stylusPoints, drawingAttributes, extendedProperties); return(cb); }
/// <summary>Creates a collection from ISF data in the specified stream</summary> /// <param name="stream">Stream of ISF data</param> public StrokeCollection(Stream stream) { if (stream == null) { throw new ArgumentNullException("stream"); } if (!stream.CanRead) { throw new ArgumentException(SR.Get(SRID.Image_StreamRead), "stream"); } Stream seekableStream = GetSeekableStream(stream); if (seekableStream == null) { throw new ArgumentException(SR.Get(SRID.Invalid_isfData_Length), "stream"); } //this will init our stroke collection StrokeCollectionSerializer serializer = new StrokeCollectionSerializer(this); serializer.DecodeISF(seekableStream); }
private static void PersistRasterOperation(DrawingAttributes da, Stream stream, GuidList guidList, ref uint cbData, ref BinaryWriter bw) { // write any non-default RasterOp value that we might have picked up from // V1 interop or by setting IsHighlighter. if (da.RasterOperation != DrawingAttributeSerializer.RasterOperationDefaultV1) { uint ropSize = GuidList.GetDataSizeIfKnownGuid(KnownIds.RasterOperation); if (ropSize == 0) { throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage("ROP data size was not found")); } Debug.Assert(bw != null); cbData += SerializationHelper.Encode(stream, (uint)guidList.FindTag(KnownIds.RasterOperation, true)); long currentPosition = stream.Position; bw.Write(da.RasterOperation); if ((uint)(stream.Position - currentPosition) != ropSize) { throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage("ROP data was incorrectly serialized")); } cbData += ropSize; } }
/// <summary> /// DecompressPacketData - given a compressed byte[], uncompress it to the outputBuffer /// </summary> /// <param name="input">compressed byte from the ISF stream</param> /// <param name="outputBuffer">prealloc'd buffer to write to</param> /// <returns></returns> internal uint DecompressPacketData(byte[] input, int[] outputBuffer) { if (input == null) { throw new ArgumentNullException("input"); } if (input.Length < 2) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Input buffer passed was shorter than expected")); } if (outputBuffer == null) { throw new ArgumentNullException("outputBuffer"); } if (outputBuffer.Length == 0) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("output buffer length was zero")); } byte compression = input[0]; uint totalBytesRead = 1; //we just read one int inputIndex = 1; switch (compression & 0xC0) { case 0x80: //IndexedHuffman { DataXform dtxf = this.HuffModule.FindDtXf(compression); HuffCodec huffCodec = this.HuffModule.FindCodec(compression); totalBytesRead += huffCodec.Uncompress(dtxf, input, inputIndex, outputBuffer); return(totalBytesRead); } case 0x00: //NoCompression { int outputBufferIndex = 0; DeltaDelta dtxf = null; if ((compression & 0x20) != 0) { dtxf = this.DeltaDelta; } int bitCount = 0; if ((compression & 0x1F) == 0) { bitCount = Native.BitsPerInt; //32 } else { bitCount = (compression & 0x1F); } if (null != dtxf) { //must have at least two more bytes besides the //initial algo byte if (input.Length < 3) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Input buffer was too short (must be at least 3 bytes)")); } //multibyteencode the first two values int xfData = 0; int xfExtra = 0; dtxf.ResetState(); uint bytesRead = this.MultiByteCodec.SignDecode(input, inputIndex, ref xfData); //advance our index inputIndex += (int)bytesRead; totalBytesRead += bytesRead; int result = dtxf.InverseTransform(xfData, xfExtra); Debug.Assert(outputBufferIndex < outputBuffer.Length); outputBuffer[outputBufferIndex++] = result; bytesRead = this.MultiByteCodec.SignDecode(input, inputIndex, ref xfData); //advance our index inputIndex += (int)bytesRead; totalBytesRead += bytesRead; result = dtxf.InverseTransform(xfData, xfExtra); Debug.Assert(outputBufferIndex < outputBuffer.Length); outputBuffer[outputBufferIndex++] = result; } totalBytesRead += this.GorillaCodec.Uncompress(bitCount, //the max count of bits required for each int input, //the input array to uncompress inputIndex, //the index to start uncompressing at dtxf, //data transform to use when compressing, can be null outputBuffer, //a ref to the output buffer to write to outputBufferIndex); //the index of the output buffer to write to return(totalBytesRead); } default: { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid decompression algo byte")); } } }
/// <summary> /// Loads packets from the input stream. For example, packets are all of the x's in a stroke /// </summary> #else /// <summary> /// Loads packets from the input stream. For example, packets are all of the x's in a stroke /// </summary> #endif static uint LoadPackets(Stream inputStream, uint totalBytesInStrokeBlockOfIsfStream, #if OLD_ISF Compressor compressor, #endif StylusPointDescription stylusPointDescription, Matrix transform, out StylusPointCollection stylusPoints) { stylusPoints = null; if (0 == totalBytesInStrokeBlockOfIsfStream) { return(0); } uint locallyDecodedBytesRemaining = totalBytesInStrokeBlockOfIsfStream; uint localBytesRead; // First read the no of packets uint pointCount; localBytesRead = SerializationHelper.Decode(inputStream, out pointCount); if (locallyDecodedBytesRemaining < localBytesRead) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } locallyDecodedBytesRemaining -= localBytesRead; if (0 == locallyDecodedBytesRemaining) { return(localBytesRead); } // Allocate packet properties int intsPerPoint = 0; //修复构建的代码 stylusPointDescription.GetInputArrayLengthPerPoint(); int buttonCount = 0; //修复构建的代码 stylusPointDescription.ButtonCount; int buttonIntsPerPoint = (buttonCount > 0 ? 1 : 0); int valueIntsPerPoint = intsPerPoint - buttonIntsPerPoint; //add one int per point for button data if it exists int[] rawPointData = new int[pointCount * intsPerPoint]; int[] packetDataSet = new int[pointCount]; // copy the rest of the data from the stroke data byte[] inputBuffer = new byte[locallyDecodedBytesRemaining]; // Read the input data into the byte array uint bytesRead = StrokeCollectionSerializer.ReliableRead(inputStream, inputBuffer, locallyDecodedBytesRemaining); if (bytesRead != locallyDecodedBytesRemaining) { // Make sure the bytes read are expected. If not, we should bail out. // An exception will be thrown. throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } // at this point, we have read all of the bytes remaining in the input // stream's packet block, and while we will keep the bytes remaining // variable for positioning within the local byte buffer, we should // not read from the stream again, or we risk reading into another // ISF tag's block. int originalPressureIndex = 0; //修复构建的代码 stylusPointDescription.OriginalPressureIndex; for (int i = 0; i < valueIntsPerPoint && locallyDecodedBytesRemaining > 0; i++) { localBytesRead = locallyDecodedBytesRemaining; Compressor.DecompressPacketData( #if OLD_ISF compressor, #endif inputBuffer, ref localBytesRead, packetDataSet); if (localBytesRead > locallyDecodedBytesRemaining) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } // // packetDataSet is like this: // ------------- // |X|X|X|X|X|X| // ------------- // // we need to copy into rawPointData at // // ------------- // |X| |X| |X| | // ------------- // // additionally, for NormalPressure, if it exists and was // reordered in the StylusPointDescription, we need to account for that here // int tempi = i; if (tempi > 1 && originalPressureIndex != -1 && originalPressureIndex != /* //修复构建的代码StylusPointDescription.RequiredPressureIndex*/ 2) { // // NormalPressure exists in the packet stream and was not at index 2 // StylusPointDescription enforces that NormalPressure is at index 2 // so we need to copy packet data beyond X and Y into a different location // // take the example of the original StylusPointDescription // |X|Y|XTilt|YTilt|NormalPressure|Rotation| // // originalPressureIndex is 4, and we know it is now 2 // which means that everything before index 4 has been shifted one // and everything after index 4 is still good. Index 4 should be copied to index 2 if (tempi == originalPressureIndex) { tempi = 2; } else if (tempi < originalPressureIndex) { tempi++; } } locallyDecodedBytesRemaining -= localBytesRead; for (int j = 0, x = 0; j < pointCount; j++, x += intsPerPoint) { rawPointData[x + tempi] = packetDataSet[j]; } // Move the array elements to point to next set of compressed data for (uint u = 0; u < locallyDecodedBytesRemaining; u++) { inputBuffer[u] = inputBuffer[u + (int)localBytesRead]; } } // Now that we've read packet data, we must read button data if it is there byte[] buttonData = null; // since the button state is a simple bit value (either down or up), the button state // for a series of packets is packed into an array of bits rather than integers // For example, if there are 16 packets, and 2 buttons, then 32 bits can be used (e.g. 1 32-bit integer) if (0 != locallyDecodedBytesRemaining && buttonCount > 0) { // calculate the number of full bytes used for buttons per packet // Example: 10 buttons would require 1 full byte int fullBytesForButtonsPerPacket = buttonCount / Native.BitsPerByte; // calculate the number of bits that spill beyond the full byte boundary // Example: 10 buttons would require 2 extra bits (8 fit in a full byte) int partialBitsForButtonsPerPacket = buttonCount % Native.BitsPerByte; // Now figure out how many bytes we need to read for the button data localBytesRead = (uint)((buttonCount * pointCount + 7) / Native.BitsPerByte); if (localBytesRead > locallyDecodedBytesRemaining) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Buffer range is smaller than expected expected size")); } locallyDecodedBytesRemaining -= localBytesRead; int buttonSizeInBytes = (buttonCount + 7) / Native.BitsPerByte; buttonData = new byte[pointCount * buttonSizeInBytes]; // Create a bit reader to unpack the bits from the ISF stream into // loosely packed byte buffer (e.g. button data aligned on full byte // boundaries only) BitStreamReader bitReader = new BitStreamReader(inputBuffer, (uint)buttonCount * pointCount); // unpack the button data into each packet int byteCounter = 0; while (!bitReader.EndOfStream) { // unpack the fully bytes first for (int fullBytes = 0; fullBytes < fullBytesForButtonsPerPacket; fullBytes++) { buttonData[byteCounter++] = bitReader.ReadByte(Native.BitsPerByte); } // then unpack a single partial byte if necessary if (partialBitsForButtonsPerPacket > 0) { buttonData[byteCounter++] = bitReader.ReadByte((int)partialBitsForButtonsPerPacket); } } // if the number of bytes allocated != necessary byte amount then an error occurred if (byteCounter != buttonData.Length) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Button data length not equal to expected length")); } // // set the point data in the raw array // FillButtonData((int)pointCount, buttonCount, valueIntsPerPoint, //gives the first button index rawPointData, buttonData); } stylusPoints = new StylusPointCollection(stylusPointDescription /*//修复构建的代码, rawPointData, null, transform*/); // if we read too far into the stream (e.g. the packets were compressed) // then move the stream pointer back to the end of the actual packet // data before returning. This keeps the return value on the function // (representing bytes read) honest and consistent with the stream // position movement in this function. if (0 != locallyDecodedBytesRemaining) { inputStream.Seek(0 - (long)locallyDecodedBytesRemaining, SeekOrigin.Current); } return(totalBytesInStrokeBlockOfIsfStream - locallyDecodedBytesRemaining); }
/// <summary> /// This functions loads a stroke from a memory stream based on the descriptor and GuidList. It returns /// the no of bytes it has read from the stream to correctly load the stream, which should be same as /// the value of the size parameter. If they are unequal throws ArgumentException. Stroke descriptor is /// used to load the packetproperty as well as ExtendedPropertyCollection on this stroke. Compressor is used /// to decompress the data. /// </summary> /// <param name="compressor"></param> /// <param name="stream"></param> /// <param name="totalBytesInStrokeBlockOfIsfStream"></param> /// <param name="guidList"></param> /// <param name="strokeDescriptor"></param> /// <param name="stylusPointDescription"></param> /// <param name="transform"></param> /// <param name="stylusPoints"></param> /// <param name="extendedProperties"></param> /// <returns></returns> #else /// <summary> /// This functions loads a stroke from a memory stream based on the descriptor and GuidList. It returns /// the no of bytes it has read from the stream to correctly load the stream, which should be same as /// the value of the size parameter. If they are unequal throws ArgumentException. Stroke descriptor is /// used to load the packetproperty as well as ExtendedPropertyCollection on this stroke. Compressor is used /// to decompress the data. /// </summary> /// <param name="stream"></param> /// <param name="totalBytesInStrokeBlockOfIsfStream"></param> /// <param name="guidList"></param> /// <param name="strokeDescriptor"></param> /// <param name="stylusPointDescription"></param> /// <param name="transform"></param> /// <param name="stylusPoints"></param> /// <param name="extendedProperties"></param> #endif static uint DecodeISFIntoStroke( #if OLD_ISF Compressor compressor, #endif Stream stream, uint totalBytesInStrokeBlockOfIsfStream, GuidList guidList, StrokeDescriptor strokeDescriptor, StylusPointDescription stylusPointDescription, Matrix transform, out StylusPointCollection stylusPoints, out ExtendedPropertyCollection extendedProperties) { stylusPoints = null; extendedProperties = null; // We do allow a stroke with no packet data if (0 == totalBytesInStrokeBlockOfIsfStream) { return(0); } uint locallyDecodedBytes; uint remainingBytesInStrokeBlock = totalBytesInStrokeBlockOfIsfStream; // First try to load any packet data locallyDecodedBytes = LoadPackets(stream, remainingBytesInStrokeBlock, #if OLD_ISF compressor, #endif stylusPointDescription, transform, out stylusPoints); if (locallyDecodedBytes > remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Packet buffer overflowed the ISF stream")); } remainingBytesInStrokeBlock -= locallyDecodedBytes; if (0 == remainingBytesInStrokeBlock) { return(locallyDecodedBytes); } // Now read the extended propertes for (int iTag = 1; iTag < strokeDescriptor.Template.Count && remainingBytesInStrokeBlock > 0; iTag++) { KnownTagCache.KnownTagIndex tag = strokeDescriptor.Template[iTag - 1]; switch (tag) { case MS.Internal.Ink.InkSerializedFormat.KnownTagCache.KnownTagIndex.StrokePropertyList: { // we've found the stroke extended properties. Load them now. while (iTag < strokeDescriptor.Template.Count && remainingBytesInStrokeBlock > 0) { tag = strokeDescriptor.Template[iTag]; object data; Guid guid = guidList.FindGuid(tag); if (guid == Guid.Empty) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Stroke Custom Attribute tag embedded in ISF stream does not match guid table")); } // load the extended property data from the stream (and decode the type) locallyDecodedBytes = ExtendedPropertySerializer.DecodeAsISF(stream, remainingBytesInStrokeBlock, guidList, tag, ref guid, out data); // add the guid/data pair into the property collection (don't redecode the type) if (extendedProperties == null) { extendedProperties = new ExtendedPropertyCollection(); } extendedProperties[guid] = data; if (locallyDecodedBytes > remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } remainingBytesInStrokeBlock -= locallyDecodedBytes; iTag++; } } break; case MS.Internal.Ink.InkSerializedFormat.KnownTagCache.KnownTagIndex.Buttons: { // Next tag is count of buttons and the tags for the button guids iTag += (int)((uint)strokeDescriptor.Template[iTag]) + 1; } break; // ignore any tags embedded in the Stroke block that this // version of the ISF decoder doesn't understand default: { System.Diagnostics.Trace.WriteLine("Ignoring unhandled stroke tag in ISF stroke descriptor"); } break; } } // Now try to load any tagged property data or point property data while (remainingBytesInStrokeBlock > 0) { // Read the tag first KnownTagCache.KnownTagIndex tag; uint uiTag; locallyDecodedBytes = SerializationHelper.Decode(stream, out uiTag); tag = (KnownTagCache.KnownTagIndex)uiTag; if (locallyDecodedBytes > remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } remainingBytesInStrokeBlock -= locallyDecodedBytes; // if it is a point property block switch (tag) { case MS.Internal.Ink.InkSerializedFormat.KnownTagCache.KnownTagIndex.PointProperty: { // First load the totalBytesInStrokeBlockOfIsfStream of the point property block uint cbsize; locallyDecodedBytes = SerializationHelper.Decode(stream, out cbsize); if (locallyDecodedBytes > remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } remainingBytesInStrokeBlock -= locallyDecodedBytes; while (remainingBytesInStrokeBlock > 0) { // First read the tag corresponding to the property locallyDecodedBytes = SerializationHelper.Decode(stream, out uiTag); tag = (KnownTagCache.KnownTagIndex)uiTag; if (locallyDecodedBytes > remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } remainingBytesInStrokeBlock -= locallyDecodedBytes; // Now read the packet index for which the property will apply uint propindex; locallyDecodedBytes = SerializationHelper.Decode(stream, out propindex); if (locallyDecodedBytes > remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } remainingBytesInStrokeBlock -= locallyDecodedBytes; uint propsize; locallyDecodedBytes = SerializationHelper.Decode(stream, out propsize); if (locallyDecodedBytes > remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } remainingBytesInStrokeBlock -= locallyDecodedBytes; // Compressed data totalBytesInStrokeBlockOfIsfStream propsize += 1; // Make sure we have enough data to read if (propsize > remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } byte[] in_buffer = new byte[propsize]; uint bytesRead = StrokeCollectionSerializer.ReliableRead(stream, in_buffer, propsize); if (propsize != bytesRead) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Read different size from stream then expected")); } byte[] out_buffer = Compressor.DecompressPropertyData(in_buffer); System.Diagnostics.Debug.Assert(false, "ExtendedProperties for points are not supported"); // skip the bytes in both success & failure cases // Note: Point ExtendedProperties are discarded remainingBytesInStrokeBlock -= propsize; } } break; default: { object data; Guid guid = guidList.FindGuid(tag); if (guid == Guid.Empty) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Stroke Custom Attribute tag embedded in ISF stream does not match guid table")); } // load the extended property data from the stream (and decode the type) locallyDecodedBytes = ExtendedPropertySerializer.DecodeAsISF(stream, remainingBytesInStrokeBlock, guidList, tag, ref guid, out data); // add the guid/data pair into the property collection (don't redecode the type) if (extendedProperties == null) { extendedProperties = new ExtendedPropertyCollection(); } extendedProperties[guid] = data; if (locallyDecodedBytes > remainingBytesInStrokeBlock) { throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage("ExtendedProperty decoded totalBytesInStrokeBlockOfIsfStream exceeded ISF stream totalBytesInStrokeBlockOfIsfStream")); } remainingBytesInStrokeBlock -= locallyDecodedBytes; } break; } } if (0 != remainingBytesInStrokeBlock) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid ISF data")); } return(totalBytesInStrokeBlockOfIsfStream); }
/// <summary> /// Loads a single ExtendedProperty from the stream and add that to the list. Tag may be passed as in /// the case of Stroke ExtendedPropertyCollection where tag is stored in the stroke descriptor or 0 when tag /// is embeded in the stream /// </summary> /// <param name="stream">Memory buffer to load from</param> /// <param name="cbSize">Maximum length of buffer to read</param> /// <param name="guidList">Guid cache to read from</param> /// <param name="tag">Guid tag to lookup</param> /// <param name="guid">Guid of property</param> /// <param name="data">Data of property</param> /// <returns>Length of buffer read</returns> #endif internal static uint DecodeAsISF(Stream stream, uint cbSize, GuidList guidList, KnownTagCache.KnownTagIndex tag, ref Guid guid, out object data) { uint cb, cbRead = 0; uint cbTotal = cbSize; if (0 == cbSize) { throw new InvalidOperationException(SR.Get(SRID.EmptyDataToLoad)); } if (0 == tag) // no tag is passed, it must be embedded in the data { uint uiTag; cb = SerializationHelper.Decode(stream, out uiTag); tag = (KnownTagCache.KnownTagIndex)uiTag; if (cb > cbTotal) { throw new ArgumentException(SR.Get(SRID.InvalidSizeSpecified), "cbSize"); } cbTotal -= cb; cbRead += cb; System.Diagnostics.Debug.Assert(guid == Guid.Empty); guid = guidList.FindGuid(tag); } if (guid == Guid.Empty) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Custom Attribute tag embedded in ISF stream does not match guid table"), "tag"); } // Try and find the size uint size = GuidList.GetDataSizeIfKnownGuid(guid); if (size > cbTotal) { throw new ArgumentException(SR.Get(SRID.InvalidSizeSpecified), "cbSize"); } // if the size is 0 if (0 == size) { // Size must be embedded in the stream. Find out the compressed data size cb = SerializationHelper.Decode(stream, out size); uint cbInsize = size + 1; cbRead += cb; cbTotal -= cb; if (cbInsize > cbTotal) { throw new ArgumentException(); } byte[] bytes = new byte[cbInsize]; uint bytesRead = (uint)stream.Read(bytes, 0, (int)cbInsize); if (cbInsize != bytesRead) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Read different size from stream then expected"), "cbSize"); } cbRead += cbInsize; cbTotal -= cbInsize; //Find out the Decompressed buffer size using (MemoryStream decompressedStream = new MemoryStream(Compressor.DecompressPropertyData(bytes))) { // Add the property data = ExtendedPropertySerializer.DecodeAttribute(guid, decompressedStream); } } else { // For known size data, we just read the data directly from the stream byte[] bytes = new byte[size]; uint bytesRead = (uint)stream.Read(bytes, 0, (int)size); if (size != bytesRead) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Read different size from stream then expected"), "cbSize"); } using (MemoryStream subStream = new MemoryStream(bytes)) { data = ExtendedPropertySerializer.DecodeAttribute(guid, subStream); } cbTotal -= size; cbRead += size; } return(cbRead); }
/// <summary> /// DecompressPacketData - take a byte[] or a subset of a byte[] and decompresses it into /// an int[] of packet data (for example, x's in a Stroke) /// </summary> /// <param name="compressedInput">The byte[] to decompress</param> /// <param name="size">In: the max size of the subset of compressedInput to read, out: size read</param> /// <param name="decompressedPackets">The int[] to write the packet data to</param> #endif internal static void DecompressPacketData( #if OLD_ISF Compressor compressor, #endif byte[] compressedInput, ref uint size, int[] decompressedPackets) { #if OLD_ISF // // lock to prevent multi-threaded vulnerabilities // lock (_compressSync) { #endif if (compressedInput == null || size > compressedInput.Length || decompressedPackets == null) { //we don't raise any information that could be used to attack our ISF code //a simple 'ISF Operation Failed' is sufficient since the user can't do //anything to fix bogus ISF throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage(SR.Get(SRID.DecompressPacketDataFailed))); } #if OLD_ISF uint size2 = size; #endif size = AlgoModule.DecompressPacketData(compressedInput, decompressedPackets); #if OLD_ISF MS.Win32.Penimc.CompressorSafeHandle safeCompressorHandle = (compressor == null) ? MS.Win32.Penimc.CompressorSafeHandle.Null : compressor._compressorHandle; int[] decompressedPackets2 = new int[decompressedPackets.Length]; byte algo = AlgoModule.NoCompression; int hr = MS.Win32.Penimc.UnsafeNativeMethods.IsfDecompressPacketData(safeCompressorHandle, compressedInput, ref size2, (uint)decompressedPackets2.Length, decompressedPackets2, ref algo); if (0 != hr) { //we don't raise any information that could be used to attack our ISF code //a simple 'ISF Operation Failed' is sufficient since the user can't do //anything to fix bogus ISF throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage("IsfDecompressPacketData returned: " + hr.ToString(CultureInfo.InvariantCulture))); } if (size != size2) { throw new InvalidOperationException("MAGIC EXCEPTION: Packet data bytes read didn't match with new uncompression"); } for (int i = 0; i < decompressedPackets.Length; i++) { if (decompressedPackets[i] != decompressedPackets2[i]) { throw new InvalidOperationException("MAGIC EXCEPTION: Packet data didn't match with new uncompression at index " + i.ToString()); } } } #endif }
/// <summary> /// Uncompress - uncompress the byte[] in the reader to a byte[] to return /// </summary> /// <param name="bitCount">number of bits each element is compressed to</param> /// <param name="reader">a reader over the compressed byte[]</param> /// <param name="encodingType">int, short or byte?</param> /// <param name="unitsToDecode">number of logical units to decode</param> /// <returns>Uncompressed byte[]</returns> internal byte[] Uncompress(int bitCount, BitStreamReader reader, GorillaEncodingType encodingType, int unitsToDecode) { if (null == reader) { throw new ArgumentNullException("reader"); } if (bitCount < 0) { throw new ArgumentOutOfRangeException("bitCount"); } if (unitsToDecode < 0) { throw new ArgumentOutOfRangeException("unitsToDecode"); } int bitsToWrite = 0; // Test whether the items are signed. For unsigned number, we don't need mask // If we are trying to compress signed long values with bit count = 5 // The mask will be 1111 1111 1111 0000. The way it is done is, if the 5th // bit is 1, the number is negative numbe, othrwise it's positive. Testing // will be non-zero, ONLY if the 5th bit is 1, in which case we OR with the mask // otherwise we leave the number as it is. uint bitMask = 0; //adjust if the bitcount is 0 //(this makes bitCount 32) switch (encodingType) { case GorillaEncodingType.Int: { if (bitCount == 0) { bitCount = Native.BitsPerInt; } bitsToWrite = Native.BitsPerInt; //we decode int's as unsigned, so we need to create a mask bitMask = (unchecked ((uint)~0) << (bitCount - 1)); break; } case GorillaEncodingType.Short: { if (bitCount == 0) { bitCount = Native.BitsPerShort; } bitsToWrite = Native.BitsPerShort; //shorts are decoded as unsigned values, no mask required bitMask = 0; break; } case GorillaEncodingType.Byte: { if (bitCount == 0) { bitCount = Native.BitsPerByte; } bitsToWrite = Native.BitsPerByte; //bytes are decoded as unsigned values, no mask required bitMask = 0; break; } default: { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("bogus GorillaEncodingType passed to Uncompress")); } } List <byte> output = new List <byte>((bitsToWrite / 8) * unitsToDecode); BitStreamWriter writer = new BitStreamWriter(output); uint bitData = 0; while (!reader.EndOfStream && unitsToDecode > 0) { //we're going to cast to an uint anyway, just read as one bitData = reader.ReadUInt32(bitCount); // Construct the item bitData = ((bitData & bitMask) != 0) ? bitMask | bitData : bitData; writer.WriteReverse(bitData, bitsToWrite); unitsToDecode--; } return(output.ToArray()); }
/// <summary> /// Loads drawing attributes from a memory buffer. /// </summary> /// <param name="stream">Memory buffer to read from</param> /// <param name="guidList">Guid tags if extended properties are used</param> /// <param name="maximumStreamSize">Maximum size of buffer to read through</param> /// <param name="da">The drawing attributes collection to decode into</param> /// <returns>Number of bytes read</returns> #else /// <summary> /// Loads drawing attributes from a memory buffer. /// </summary> /// <param name="stream">Memory buffer to read from</param> /// <param name="guidList">Guid tags if extended properties are used</param> /// <param name="maximumStreamSize">Maximum size of buffer to read through</param> /// <param name="da">The drawing attributes collection to decode into</param> /// <returns>Number of bytes read</returns> #endif internal static uint DecodeAsISF(Stream stream, GuidList guidList, uint maximumStreamSize, DrawingAttributes da) { PenTip penTip = PenTip.Default; PenStyle penStyle = PenStyle.Default; double stylusWidth = DrawingAttributeSerializer.V1PenWidthWhenWidthIsMissing; double stylusHeight = DrawingAttributeSerializer.V1PenHeightWhenHeightIsMissing; uint rasterOperation = DrawingAttributeSerializer.RasterOperationDefaultV1; int transparency = DrawingAttributeSerializer.TransparencyDefaultV1; bool widthIsSetInISF = false; //did we find KnownIds.Width? bool heightIsSetInISF = false; //did we find KnownIds.Height? uint cbTotal = maximumStreamSize; while (maximumStreamSize > 0) { KnownTagCache.KnownTagIndex tag; uint uiTag; // First read the tag uint cb = SerializationHelper.Decode(stream, out uiTag); tag = (KnownTagCache.KnownTagIndex)uiTag; if (maximumStreamSize < cb) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("ISF size is larger than maximum stream size")); } maximumStreamSize -= cb; // Get the guid based on the tag Guid guid = guidList.FindGuid(tag); if (guid == Guid.Empty) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Drawing Attribute tag embedded in ISF stream does not match guid table")); } uint dw = 0; if (KnownIds.PenTip == guid) { cb = SerializationHelper.Decode(stream, out dw); penTip = (PenTip)dw; if (!PenTipHelper.IsDefined(penTip)) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Invalid PenTip value found in ISF stream")); } maximumStreamSize -= cb; } else if (KnownIds.PenStyle == guid) { cb = SerializationHelper.Decode(stream, out dw); penStyle = (PenStyle)dw; maximumStreamSize -= cb; } else if (KnownIds.DrawingFlags == guid) { // Encode the drawing flags with considerations for v2 model cb = SerializationHelper.Decode(stream, out dw); DrawingFlags flags = (DrawingFlags)dw; da.DrawingFlags = flags; maximumStreamSize -= cb; } else if (KnownIds.RasterOperation == guid) { uint ropSize = GuidList.GetDataSizeIfKnownGuid(KnownIds.RasterOperation); if (ropSize == 0) { throw new InvalidOperationException(StrokeCollectionSerializer.ISFDebugMessage("ROP data size was not found")); } byte[] data = new byte[ropSize]; stream.Read(data, 0, (int)ropSize); if (data != null && data.Length > 0) { //data[0] holds the allowable values of 0-255 rasterOperation = Convert.ToUInt32(data[0]); } maximumStreamSize -= ropSize; } else if (KnownIds.CurveFittingError == guid) { cb = SerializationHelper.Decode(stream, out dw); da.FittingError = (int)dw; maximumStreamSize -= cb; } else if (KnownIds.StylusHeight == guid || KnownIds.StylusWidth == guid) { double _size; cb = SerializationHelper.Decode(stream, out dw); _size = (double)dw; maximumStreamSize -= cb; if (maximumStreamSize > 0) { cb = SerializationHelper.Decode(stream, out dw); maximumStreamSize -= cb; if (KnownTagCache.KnownTagIndex.Mantissa == (KnownTagCache.KnownTagIndex)dw) { uint cbInSize; // First thing that is in there is maximumStreamSize of the data cb = SerializationHelper.Decode(stream, out cbInSize); maximumStreamSize -= cb; // in maximumStreamSize is one more than the decoded no cbInSize++; if (cbInSize > maximumStreamSize) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("ISF size if greater then maximum stream size")); } byte[] in_data = new byte[cbInSize]; uint bytesRead = (uint)stream.Read(in_data, 0, (int)cbInSize); if (cbInSize != bytesRead) { throw new ArgumentException(StrokeCollectionSerializer.ISFDebugMessage("Read different size from stream then expected")); } byte[] out_buffer = Compressor.DecompressPropertyData(in_data); using (MemoryStream localStream = new MemoryStream(out_buffer)) using (BinaryReader rdr = new BinaryReader(localStream)) { short sFraction = rdr.ReadInt16(); _size += (double)(sFraction / DrawingAttributes.StylusPrecision); maximumStreamSize -= cbInSize; } } else { // Seek it back by cb stream.Seek(-cb, SeekOrigin.Current); maximumStreamSize += cb; } } if (KnownIds.StylusWidth == guid) { widthIsSetInISF = true; stylusWidth = _size; } else { heightIsSetInISF = true; stylusHeight = _size; } } else if (KnownIds.Transparency == guid) { cb = SerializationHelper.Decode(stream, out dw); transparency = (int)dw; maximumStreamSize -= cb; } else if (KnownIds.Color == guid) { cb = SerializationHelper.Decode(stream, out dw); Color color = Color.FromRgb((byte)(dw & 0xff), (byte)((dw & 0xff00) >> Native.BitsPerByte), (byte)((dw & 0xff0000) >> (Native.BitsPerByte * 2))); da.Color = color; maximumStreamSize -= cb; } else if (KnownIds.StylusTipTransform == guid) { try { object data; cb = ExtendedPropertySerializer.DecodeAsISF(stream, maximumStreamSize, guidList, tag, ref guid, out data); Matrix matrix = Matrix.Parse((string)data); da.StylusTipTransform = matrix; } catch (InvalidOperationException) // Matrix.Parse failed. { System.Diagnostics.Debug.Assert(false, "Corrupt Matrix in the ExtendedPropertyCollection!"); } finally { maximumStreamSize -= cb; } } else { object data; cb = ExtendedPropertySerializer.DecodeAsISF(stream, maximumStreamSize, guidList, tag, ref guid, out data); maximumStreamSize -= cb; da.AddPropertyData(guid, data); } } if (0 != maximumStreamSize) { throw new ArgumentException(); } // // time to create our drawing attributes. // // 1) First we need to evaluate PenTip / StylusTip // Here is the V1 - V2 mapping // // PenTip.Circle == StylusTip.Ellipse // PenTip.Rectangle == StylusTip.Rectangle // PenTip.Rectangle == StylusTip.Diamond if (penTip == PenTip.Default) { //Since StylusTip is stored in the EPC at this point (if set), we can compare against it here. if (da.StylusTip != StylusTip.Ellipse) { // // StylusTip was set to something other than Ellipse // when we last serialized (or else StylusTip would be Ellipse, the default) // when StylusTip is != Ellipse and we serialize, we set PenTip to Rectangle // which is not the default. Therefore, if PenTip is back to Circle, // that means someone set it in V1 and we should respect that by // changing StylusTip back to Ellipse // da.StylusTip = StylusTip.Ellipse; } //else da.StylusTip is already set } else { System.Diagnostics.Debug.Assert(penTip == PenTip.Rectangle); if (da.StylusTip == StylusTip.Ellipse) { // // PenTip is Rectangle and StylusTip was either not set // before or was set to Ellipse and PenTip was changed // in a V1 ink object. Either way, we need to change StylusTip to Rectangle da.StylusTip = StylusTip.Rectangle; } //else da.StylusTip is already set } // // 2) next we need to set hight and width // if (da.StylusTip == StylusTip.Ellipse && widthIsSetInISF && !heightIsSetInISF) { // // special case: V1 PenTip of Circle only used Width to compute the circle size // and so it only serializes Width of 53 // but since our default is Ellipse, if Height is unset and we use the default // height of 30, then your ink that looked like 53,53 in V1 will look // like 30,53 here. // // stylusHeight = stylusWidth; da.HeightChangedForCompatabity = true; } // need to convert width/height into Avalon, since they are stored in HIMETRIC in ISF stylusHeight *= StrokeCollectionSerializer.HimetricToAvalonMultiplier; stylusWidth *= StrokeCollectionSerializer.HimetricToAvalonMultiplier; // Map 0.0 width to DrawingAttributes.DefaultXXXXXX (V1 53 equivalent) double height = DoubleUtil.IsZero(stylusHeight) ? (Double)DrawingAttributes.GetDefaultDrawingAttributeValue(KnownIds.StylusHeight) : stylusHeight; double width = DoubleUtil.IsZero(stylusWidth) ? (Double)DrawingAttributes.GetDefaultDrawingAttributeValue(KnownIds.StylusWidth) : stylusWidth; da.Height = GetCappedHeightOrWidth(height); da.Width = GetCappedHeightOrWidth(width); // // 3) next we need to set IsHighlighter (by looking for RasterOperation.MaskPen) // // // always store raster op // da.RasterOperation = rasterOperation; if (rasterOperation == DrawingAttributeSerializer.RasterOperationDefaultV1) { // // if rasterop is default, make sure IsHighlighter isn't in the EPC // if (da.ContainsPropertyData(KnownIds.IsHighlighter)) { da.RemovePropertyData(KnownIds.IsHighlighter); } } else { if (rasterOperation == DrawingAttributeSerializer.RasterOperationMaskPen) { da.IsHighlighter = true; } } //else, IsHighlighter will be set to false by default, no need to set it // // 4) see if there is a transparency we need to add to color // if (transparency > DrawingAttributeSerializer.TransparencyDefaultV1) { //note: Color.A is set to 255 by default, which means fully opaque //transparency is just the opposite - 0 means fully opaque so //we need to flip the values int alpha = MathHelper.AbsNoThrow(transparency - 255); Color color = da.Color; color.A = Convert.ToByte(alpha); da.Color = color; } return(cbTotal); }