public static void Compress(string inFilePath, string toFilePath) { using (var context = new CompressionContext(inFilePath, toFilePath)) { ExecuteCompression(context); } }
private static void CompressSegment <T0, T1>(Pair <int> segment, CompressionContext <T0, T1> context) { int startIndex = segment.First; int endIndex = segment.Second; Debug.Assert(endIndex - startIndex > 1, "Empty segment."); Debug.Assert(context.CompressedKeyFrames.Contains(startIndex), "Start index of segment needs to be in CompressedKeyFrames."); Debug.Assert(context.CompressedKeyFrames.Contains(endIndex), "End index of segment needs to be in CompressedKeyFrames."); // Check whether all keyframes are within tolerance. var start = context.UncompressedKeyFrames[startIndex]; var end = context.UncompressedKeyFrames[endIndex]; float ticks = end.Time.Ticks - start.Time.Ticks; float maxError = 0; int maxErrorIndex = 0; for (int i = startIndex + 1; i < endIndex; i++) { var current = context.UncompressedKeyFrames[i]; float parameter = (current.Time.Ticks - start.Time.Ticks) / ticks; float error = context.ComputeError(context.GetValue(current), context.GetValue(start), context.GetValue(end), parameter); if (error > maxError) { maxError = error; maxErrorIndex = i; } } if (maxError > context.Threshold) { SplitSegment(startIndex, maxErrorIndex, endIndex, context); } }
static void Main(string[] args) { // The client code picks a concrete strategy and passes it to the // context. The client should be aware of the differences between // strategies in order to make the right choice. CompressionContext ctx = new CompressionContext(new ZipCompressionStrategy()); ctx.CreateArchive("DotNetDesignPattern"); ctx.SetStrategy(new RarCompressionStrategy()); ctx.CreateArchive("DotNetDesignPattern"); Console.Read(); #region Method - II CompressionContext ctxTwo = null; //ctxTwo = new CompressionContext(); //Create the default constructor in the CompressionContext class if ("ZipCompression".Equals("ZipCompression", StringComparison.InvariantCultureIgnoreCase)) { ctx.SetStrategy(new ZipCompressionStrategy()); } else if ("RarCompression".Equals("RarCompression", StringComparison.InvariantCultureIgnoreCase)) { ctx.SetStrategy(new RarCompressionStrategy()); } #endregion }
internal PrimaryRecordPage(byte[] bytes, CompressionContext compression, Database database) : base(bytes, database) { CompressionContext = compression; parseRecords(); }
public IActionResult CompressFile(string selectedfile, string compressiontype) { string extension = ""; string contentType = ""; CompressionContext context = null; switch (compressiontype) { case "Deflate": extension = ".cmp"; contentType = "application/deflate"; context = new CompressionContext(new DeflateAlgorithm()); break; case "GZip": extension = ".gz"; contentType = "application/gzip"; context = new CompressionContext(new GZipAlgorithm()); break; case "Zip": extension = ".zip"; contentType = "application/zip"; context = new CompressionContext(new ZipAlgorithm()); break; } string source = AppSettings.SourceFolder + $"\\{selectedfile}"; string destination = AppSettings.DestinationFolder + $"\\{Path.GetFileNameWithoutExtension(selectedfile)}{extension}"; context.Compress(source, destination); return(File(destination, contentType, Path.GetFileName(destination))); }
internal PrimaryRecordPage(byte[] bytes, CompressionContext compression, Database database) : base(bytes, database) { CompressionContext = compression; parseRecords(); }
public SqlDateTimeOffset(byte scale, CompressionContext compression) : base(compression) { this.scale = scale; dateTime2 = new SqlDateTime2(scale, compression); length = (byte)(dateTime2.FixedLength.Value + 2); }
public SqlDateTime2(byte scale, CompressionContext compression) : base(compression) { this.scale = scale; time = new SqlTime(this.scale, compression); date = new SqlDate(compression); length = (byte)(time.FixedLength.Value + 3); }
internal IEnumerable <Row> GetEntities(Row schema, CompressionContext compression) { for (int i = 0; i < Records.Length; i++) { var record = Records[i]; short fixedOffset = 0; short variableColumnIndex = 0; int columnIndex = 0; var readState = new RecordReadState(); var dataRow = schema.NewRow(); foreach (DataColumn col in dataRow.Columns) { var sqlType = SqlTypeFactory.Create(col, readState, compression); object columnValue = null; if (sqlType.IsVariableLength) { if (!record.HasNullBitmap || !record.NullBitmap[columnIndex]) { // If a nullable varlength column does not have a value, it may be not even appear in the varlength column array if it's at the tail if (record.VariableLengthColumnData.Count <= variableColumnIndex) { columnValue = sqlType.GetValue(new byte[] { }); } else { columnValue = sqlType.GetValue(record.VariableLengthColumnData[variableColumnIndex].GetBytes().ToArray()); } } variableColumnIndex++; } else { // Must cache type FixedLength as it may change after getting a value (e.g. SqlBit) short fixedLength = sqlType.FixedLength.Value; if (!record.HasNullBitmap || !record.NullBitmap[columnIndex]) { columnValue = sqlType.GetValue(record.FixedLengthData.Skip(fixedOffset).Take(fixedLength).ToArray()); } fixedOffset += fixedLength; } columnIndex++; dataRow[col] = columnValue; } yield return(dataRow); } }
/// <summary> /// Starts at the data page (loc) and follows the NextPage pointer chain till the end. /// </summary> internal IEnumerable<Row> ScanLinkedNonclusteredIndexPages(PagePointer loc, Row schema, CompressionContext compression) { while (loc != PagePointer.Zero) { var page = Database.GetNonclusteredIndexPage(loc); foreach (var dr in page.GetEntities(schema, compression)) yield return dr; loc = page.Header.NextPage; } }
public void ArchiveByCompressing() { var compressionStrategy = new Mock <ICompressionStrategy>(); compressionStrategy.Setup(c => c.Compress()).Returns("Compression Done Successfully"); ICompressionContext compressionContext = new CompressionContext(compressionStrategy.Object); compressionContext.CreateArchive(); compressionStrategy.Verify(c => c.Compress()); }
public SqlTime(byte scale, CompressionContext compression) : base(compression) { this.scale = scale; if (scale <= 2) length = 3; else if (scale <= 4) length = 4; else if (scale <= 7) length = 5; else throw new ArgumentException("Invalid scale: " + scale); }
internal IEnumerable<Row> GetEntities(Row schema, CompressionContext compression) { for (int i = 0; i < Records.Length; i++) { var record = Records[i]; short fixedOffset = 0; short variableColumnIndex = 0; int columnIndex = 0; var readState = new RecordReadState(); var dataRow = schema.NewRow(); foreach (DataColumn col in dataRow.Columns) { var sqlType = SqlTypeFactory.Create(col, readState, compression); object columnValue = null; if (sqlType.IsVariableLength) { if (!record.HasNullBitmap || !record.NullBitmap[columnIndex]) { // If a nullable varlength column does not have a value, it may be not even appear in the varlength column array if it's at the tail if (record.VariableLengthColumnData.Count <= variableColumnIndex) columnValue = sqlType.GetValue(new byte[] { }); else columnValue = sqlType.GetValue(record.VariableLengthColumnData[variableColumnIndex].GetBytes().ToArray()); } variableColumnIndex++; } else { // Must cache type FixedLength as it may change after getting a value (e.g. SqlBit) short fixedLength = sqlType.FixedLength.Value; if (!record.HasNullBitmap || !record.NullBitmap[columnIndex]) columnValue = sqlType.GetValue(record.FixedLengthData.Skip(fixedOffset).Take(fixedLength).ToArray()); fixedOffset += fixedLength; } columnIndex++; dataRow[col] = columnValue; } yield return dataRow; } }
internal static RecordEntityParser CreateEntityParserForPage(PagePointer loc, CompressionContext compression, Database database) { switch (compression.CompressionLevel) { case CompressionLevel.Page: throw new NotImplementedException("Page compression not yet supported."); case CompressionLevel.Row: return new CompressedRecordEntityParser(database.GetCompressedRecordPage(loc, compression)); case CompressionLevel.None: return new PrimaryRecordEntityParser(database.GetPrimaryRecordPage(loc, compression), compression); default: throw new ArgumentException("Unsupported compression level: " + compression.CompressionLevel); } }
private static void Compress <T0, T1>(KeyFrameAnimation <T0> sourceAnimation, KeyFrameAnimation <T1> targetAnimation, float threshold, Func <IKeyFrame <T0>, T1> getValue, Func <T1, T1, T1, float, float> computeError) { var keyFrames = sourceAnimation.KeyFrames; var context = new CompressionContext <T0, T1> { UncompressedKeyFrames = keyFrames, CompressedKeyFrames = new List <int>(), Segments = new Stack <Pair <int> >(), GetValue = getValue, ComputeError = computeError, Threshold = threshold }; const int startIndex = 0; int endIndex = keyFrames.Count - 1; Debug.Assert(context.CompressedKeyFrames.Count == 0); context.CompressedKeyFrames.Add(startIndex); context.CompressedKeyFrames.Add(endIndex); Debug.Assert(context.Segments.Count == 0); context.Segments.Push(new Pair <int>(startIndex, endIndex)); do { CompressSegment(context.Segments.Pop(), context); } while (context.Segments.Count > 0); Debug.Assert( context.CompressedKeyFrames.Distinct().Count() == context.CompressedKeyFrames.Count, "CompressedKeyFrames should not contain duplicates."); // Build compressed animation. context.CompressedKeyFrames.Sort(); foreach (int index in context.CompressedKeyFrames) { var keyFrame = keyFrames[index]; targetAnimation.KeyFrames.Add(new KeyFrame <T1>(keyFrame.Time, getValue(keyFrame))); } }
public void Test_Compression() { File file = new File { Comporession = CompressionType.None, FileName = "testingCompressionFile" }; ZIPCompression zipCompression = new ZIPCompression(); CompressionContext context = new CompressionContext(zipCompression); context.CreateArchive(file); Assert.That(file.Comporession, Is.EqualTo(CompressionType.Zip)); RARCompression rarCompression = new RARCompression(); context.SetStrategy(rarCompression); context.CreateArchive(file); Assert.That(file.Comporession, Is.EqualTo(CompressionType.Rar)); }
public SqlTime(byte scale, CompressionContext compression) : base(compression) { this.scale = scale; if (scale <= 2) { length = 3; } else if (scale <= 4) { length = 4; } else if (scale <= 7) { length = 5; } else { throw new ArgumentException("Invalid scale: " + scale); } }
public void BeforeSendReply(ref Message reply, object correlationState) { try { CompressionContext context = OperationContext.Current.Extensions.Find <CompressionContext>(); if (context != null) { var prop = reply.Properties[HttpResponseMessageProperty.Name] as HttpResponseMessageProperty; Log.Trace("CompressionMessageInspector::BeforeSendReply set {0} encoding for {1} response", context.Type, prop.Headers[HttpResponseHeader.ContentType]); if (context.Type == CompressionType.GZip) { prop.Headers[HttpResponseHeader.ContentEncoding] = "gzip"; } else if (context.Type == CompressionType.Deflate) { prop.Headers[HttpResponseHeader.ContentEncoding] = "deflate"; } } } catch (Exception e) { Log.Error("Could not process request", e); } }
private static void SplitSegment(int startIndex, int splitIndex, int endIndex, CompressionContext context) { Debug.Assert(startIndex < splitIndex && splitIndex < endIndex, "Keyframe indices need to be sorted."); Debug.Assert(context.CompressedKeyFrames.Contains(startIndex), "Start index of segment needs to be in CompressedKeyFrames."); Debug.Assert(context.CompressedKeyFrames.Contains(endIndex), "End index of segment needs to be in CompressedKeyFrames."); context.CompressedKeyFrames.Add(splitIndex); // Split if necessary. (The first segment should be on top of the stack.) if (endIndex - splitIndex > 1) context.Segments.Push(new Pair<int>(splitIndex, endIndex)); if (splitIndex - startIndex > 1) context.Segments.Push(new Pair<int>(startIndex, splitIndex)); }
internal PrimaryRecordPage GetPrimaryRecordPage(PagePointer loc, CompressionContext compression) { Debug.WriteLine("Loading Primary Record Page " + loc); return new PrimaryRecordPage(bufferManager.GetPageBytes(loc.FileID, loc.PageID), compression, this); }
private static void CompressSegment(Pair<int> segment, CompressionContext context) { int startIndex = segment.First; int endIndex = segment.Second; Debug.Assert(endIndex - startIndex > 1, "Empty segment."); Debug.Assert(context.CompressedKeyFrames.Contains(startIndex), "Start index of segment needs to be in CompressedKeyFrames."); Debug.Assert(context.CompressedKeyFrames.Contains(endIndex), "End index of segment needs to be in CompressedKeyFrames."); // Check whether all keyframes are within tolerance. var start = context.UncompressedKeyFrames[startIndex]; var end = context.UncompressedKeyFrames[endIndex]; float ticks = end.Time.Ticks - start.Time.Ticks; // ----- Rotation float maxError = 0; int maxErrorIndex = 0; for (int i = startIndex + 1; i < endIndex; i++) { var current = context.UncompressedKeyFrames[i]; float parameter = (current.Time.Ticks - start.Time.Ticks) / ticks; QuaternionF lerpedRotation = InterpolationHelper.Lerp(start.Transform.Rotation, end.Transform.Rotation, parameter); float error = QuaternionF.GetAngle(current.Transform.Rotation, lerpedRotation); if (error > maxError) { maxError = error; maxErrorIndex = i; } } if (maxError > context.RotationThreshold) { SplitSegment(startIndex, maxErrorIndex, endIndex, context); return; } // ----- Translation maxError = 0; maxErrorIndex = 0; for (int i = startIndex + 1; i < endIndex; i++) { var current = context.UncompressedKeyFrames[i]; float parameter = (current.Time.Ticks - start.Time.Ticks) / ticks; Vector3F lerpedTranslation = InterpolationHelper.Lerp(start.Transform.Translation, end.Transform.Translation, parameter); float error = (current.Transform.Translation - lerpedTranslation).Length; if (error > maxError) { maxError = error; maxErrorIndex = i; } } if (maxError > context.TranslationThreshold) { SplitSegment(startIndex, maxErrorIndex, endIndex, context); return; } // ----- Scale maxError = 0; maxErrorIndex = 0; for (int i = startIndex + 1; i < endIndex; i++) { var current = context.UncompressedKeyFrames[i]; float parameter = (current.Time.Ticks - start.Time.Ticks) / ticks; Vector3F lerpedScale = InterpolationHelper.Lerp(start.Transform.Scale, end.Transform.Scale, parameter); float error = (current.Transform.Scale - lerpedScale).Length; if (error > maxError) { maxError = error; maxErrorIndex = i; } } if (maxError > context.ScaleThreshold) { SplitSegment(startIndex, maxErrorIndex, endIndex, context); return; } // When we get here: Segment is within tolerance. }
public SqlSmallInt(CompressionContext compression) : base(compression) { }
public SqlBit(RecordReadState readState, CompressionContext compression) : base(compression) { this._readState = readState; }
public SqlNVarchar(CompressionContext compression) : base(compression) { }
public SqlBit(RecordReadState readState, CompressionContext compression) : base(compression) { this.readState = readState; }
public SqlDecimal(byte precision, byte scale, CompressionContext compression) : base(compression) { this.precision = precision; this.scale = scale; }
public SqlUniquifier(CompressionContext compression) : base(compression) { }
public SqlDecimal(byte precision, byte scale, CompressionContext compression) : base(compression) { this.precision = precision; this.scale = scale; }
public SqlTinyInt(CompressionContext compression) : base(compression) { }
public SqlMoney(CompressionContext compression) : base(compression) { }
public SqlImage(CompressionContext compression) : base(compression) { }
public SqlVarchar(CompressionContext compression, Encoding encoding) : base(compression) { _encoding = encoding; }
public SqlSmallMoney(CompressionContext compression) : base(compression) { }
public SqlSmallDateTime(CompressionContext compression) : base(compression) { }
public SqlDate(CompressionContext compression) : base(compression) { }
public SqlBigInt(CompressionContext compression) : base(compression) { }
public static ISqlType Create(DataColumn column, RecordReadState readState, CompressionContext compression) { switch (column.UnderlyingType) { case ColumnType.Binary: return(new SqlBinary((short)column.VariableFixedLength, compression)); case ColumnType.BigInt: return(new SqlBigInt(compression)); case ColumnType.Bit: return(new SqlBit(readState, compression)); case ColumnType.Char: return(new SqlChar((short)column.VariableFixedLength, column.Encoding ?? DefaultEncoding, compression)); case ColumnType.DateTime: return(new SqlDateTime(compression)); case ColumnType.Float: return(new SqlFloat(column.Precision, compression)); case ColumnType.Decimal: return(new SqlDecimal(column.Precision, column.Scale, compression)); case ColumnType.Image: return(new SqlImage(compression)); case ColumnType.Int: return(new SqlInt(compression)); case ColumnType.Money: return(new SqlMoney(compression)); case ColumnType.NChar: return(new SqlNChar((short)column.VariableFixedLength, compression)); case ColumnType.NText: return(new SqlNText(compression)); case ColumnType.NVarchar: return(new SqlNVarchar(compression)); case ColumnType.RID: return(new SqlRID(compression)); case ColumnType.SmallDatetime: return(new SqlSmallDateTime(compression)); case ColumnType.Date: return(new SqlDate(compression)); case ColumnType.DateTimeOffset: return(new SqlDateTimeOffset(column.Scale, compression)); case ColumnType.DateTime2: return(new SqlDateTime2(column.Scale, compression)); case ColumnType.Time: return(new SqlTime(column.Scale, compression)); case ColumnType.SmallInt: return(new SqlSmallInt(compression)); case ColumnType.SmallMoney: return(new SqlSmallMoney(compression)); case ColumnType.Text: return(new SqlText(compression)); case ColumnType.TinyInt: return(new SqlTinyInt(compression)); case ColumnType.UniqueIdentifier: return(new SqlUniqueIdentifier(compression)); case ColumnType.Uniquifier: return(new SqlUniquifier(compression)); case ColumnType.VarBinary: return(new SqlVarBinary(compression)); case ColumnType.Varchar: return(new SqlVarchar(compression, column.Encoding ?? DefaultEncoding)); case ColumnType.Variant: return(new SqlVariant(compression)); case ColumnType.Computed: return(new SqlComputed(compression)); } throw new ArgumentException("Unsupported type: " + column); }
public SqlNVarchar(CompressionContext compression) : base(compression) { }
internal PrimaryRecordEntityParser(PrimaryRecordPage page, CompressionContext compression) { this.page = page; this.compression = compression; }
public SqlInt(CompressionContext compression) : base(compression) { }
public static ISqlType Create(DataColumn column, RecordReadState readState, CompressionContext compression) { switch(column.Type) { case ColumnType.Binary: return new SqlBinary((short)column.VariableFixedLength, compression); case ColumnType.BigInt: return new SqlBigInt(compression); case ColumnType.Bit: return new SqlBit(readState, compression); case ColumnType.Char: return new SqlChar((short)column.VariableFixedLength, compression); case ColumnType.DateTime: return new SqlDateTime(compression); case ColumnType.Decimal: return new SqlDecimal(column.Precision, column.Scale, compression); case ColumnType.Image: return new SqlImage(compression); case ColumnType.Int: return new SqlInt(compression); case ColumnType.Money: return new SqlMoney(compression); case ColumnType.NChar: return new SqlNChar((short)column.VariableFixedLength, compression); case ColumnType.NText: return new SqlNText(compression); case ColumnType.NVarchar: return new SqlNVarchar(compression); case ColumnType.RID: return new SqlRID(compression); case ColumnType.SmallDatetime: return new SqlSmallDateTime(compression); case ColumnType.SmallInt: return new SqlSmallInt(compression); case ColumnType.SmallMoney: return new SqlSmallMoney(compression); case ColumnType.Text: return new SqlText(compression); case ColumnType.TinyInt: return new SqlTinyInt(compression); case ColumnType.UniqueIdentifier: return new SqlUniqueIdentifier(compression); case ColumnType.Uniquifier: return new SqlUniquifier(compression); case ColumnType.VarBinary: return new SqlVarBinary(compression); case ColumnType.Varchar: return new SqlVarchar(compression); case ColumnType.Variant: return new SqlVariant(compression); } throw new ArgumentException("Unsupported type: " + column); }
public SqlVarBinary(CompressionContext compression) : base(compression) { }
public SqlImage(CompressionContext compression) : base(compression) { }
public SqlNText(CompressionContext compression) : base(compression) { }
public SqlFloat(byte precision, CompressionContext compression) : base(compression) { this.precision = precision; }
public SqlVariant(CompressionContext compression) : base(compression) { }
public SqlChar(short length, Encoding encoding, CompressionContext compression) : base(compression) { this.length = length; _encoding = encoding; }
protected SqlTypeBase(CompressionContext compression) { CompressionContext = compression; }
internal CompressedRecordPage GetCompressedRecordPage(PagePointer loc, CompressionContext compression) { if (compression.CompressionLevel == CompressionLevel.None) throw new ArgumentException("Can't load compressed page with a compression level of none."); Debug.WriteLine("Loading compressed record page " + loc); return new CompressedRecordPage(bufferManager.GetPageBytes(loc.FileID, loc.PageID), compression, this); }
public SqlBinary(short length, CompressionContext compression) : base(compression) { this.length = length; }
/// <summary> /// Compresses the animation using a simple lossy compression algorithm. /// </summary> /// <param name="scaleThreshold">The scale threshold.</param> /// <param name="rotationThreshold">The rotation threshold in degrees.</param> /// <param name="translationThreshold">The translation threshold.</param> /// <returns> /// The amount of removed key frames in the range [0, 1]. 0 means that no key frames have been /// removed. 0.5 means that 50% of the key frames have been removed. Etc. /// </returns> /// <remarks> /// <para> /// This method compresses the animation by removing key frames that can be computed by /// interpolation of nearby key frames. The threshold parameters define the allowed errors. If /// the thresholds are 0, this compression is lossless. If the thresholds are greater than 0 /// (recommended), the compression is lossy. The best way to determine optimal thresholds is to /// compare the compressed animation with the uncompressed animation visually. /// </para> /// <para> /// This method does nothing if any threshold is negative. /// </para> /// </remarks> public float Compress(float scaleThreshold, float rotationThreshold, float translationThreshold) { // Abort if any threshold is negative. if (scaleThreshold < 0 || rotationThreshold < 0 || translationThreshold < 0) return 0; Unfreeze(); if (_preprocessData == null) return 0; int totalKeyFrameCount = 0; int removedKeyFrameCount = 0; var context = new CompressionContext { CompressedKeyFrames = new List<int>(), Segments = new Stack<Pair<int>>(), ScaleThreshold = scaleThreshold, RotationThreshold = MathHelper.ToRadians(rotationThreshold), TranslationThreshold = translationThreshold }; // Compress channels. foreach (var keyFrames in _preprocessData.Channels.Values) { const int startIndex = 0; int endIndex = keyFrames.Count - 1; totalKeyFrameCount += keyFrames.Count; if (endIndex - startIndex > 1) { Debug.Assert(context.UncompressedKeyFrames == null); context.UncompressedKeyFrames = keyFrames.ToArray(); Debug.Assert(context.CompressedKeyFrames.Count == 0); context.CompressedKeyFrames.Add(startIndex); context.CompressedKeyFrames.Add(endIndex); Debug.Assert(context.Segments.Count == 0); context.Segments.Push(new Pair<int>(startIndex, endIndex)); do { CompressSegment(context.Segments.Pop(), context); } while (context.Segments.Count > 0); Debug.Assert( context.CompressedKeyFrames.Distinct().Count() == context.CompressedKeyFrames.Count, "CompressedKeyFrames should not contain duplicates."); // Rebuild list of keyframes. keyFrames.Clear(); context.CompressedKeyFrames.Sort(); foreach (int index in context.CompressedKeyFrames) keyFrames.Add(context.UncompressedKeyFrames[index]); // Not necessary: Collections will be reduced in Freeze(). //keyFrames.TrimExcess(); removedKeyFrameCount += context.UncompressedKeyFrames.Length - context.CompressedKeyFrames.Count; // Clean up. context.UncompressedKeyFrames = null; context.CompressedKeyFrames.Clear(); } } return (float)removedKeyFrameCount / totalKeyFrameCount; }
public SqlBinary(short length, CompressionContext compression) : base(compression) { this.length = length; }
protected SqlTypeBase(CompressionContext compression) { CompressionContext = compression; }
internal PrimaryRecordEntityParser(PrimaryRecordPage page, CompressionContext compression) { this.page = page; this.compression = compression; }
public SqlDateTime(CompressionContext compression) : base(compression) { }
public SqlUniqueIdentifier(CompressionContext compression) : base(compression) { }
public SqlVarBinary(CompressionContext compression) : base(compression) { }