public void TestCompressorLimits() { Assert.Throws(typeof (ArgumentOutOfRangeException), () => { var cmp = new Compressor(TestBase.SolenoidModules); }); }
public static string Decompress(string inputString, Compressor compressor = Compressor.Deflate) { try { byte[] input = Convert.FromBase64String(inputString); using (MemoryStream inmemoryStream = new MemoryStream(input)) { using (MemoryStream outmemoryStream = new MemoryStream()) { Stream zipStream = (compressor == Compressor.Deflate ? new DeflateStream(inmemoryStream, CompressionMode.Decompress) as Stream : new GZipStream(inmemoryStream, CompressionMode.Decompress) as Stream); using (zipStream) { zipStream.CopyTo(outmemoryStream); } byte[] output = outmemoryStream.ToArray(); return Encoding.UTF8.GetString(output, 0, output.Length); } } } catch (Exception ex) { ex.GetType(); return null; } }
/// <summary> /// Arguments for a successful file process /// </summary> /// <param name="originalFilePath">Path to the original file</param> /// <param name="newFilePath">Path to the newly (over)written file</param> /// <param name="filePathIndex">Index in the set of files given to the batch</param> /// <param name="compressor">Compressor that produced the smallest file</param> public FileProcessSuccessEventArgs(string originalFilePath, string newFilePath, int filePathIndex, Compressor.PNGCompressor compressor) : base() { this.originalFilePath = originalFilePath; this.newFilePath = newFilePath; this.filePathIndex = filePathIndex; this.compressor = compressor; }
private static string _Serialize(object obj, PPMode mode) { byte[] binary = BinarySerializer.Serialize(obj); if (mode == PPMode.Encrypted || mode == PPMode.CompressedAndEncrypted) { binary = Encryption.BinaryEncrypt(binary); } if (mode == PPMode.Compressed || mode == PPMode.CompressedAndEncrypted) { binary = Compressor.Compress(binary); } string base64Value = StringTools.ToBase64String(binary); return(base64Value); }
public void GZipCompressDecompressEqualityTest() { for (int i = 0; i < 100; ++i) { Compressor compressor = new Compressor(Compressor.CompressionMethod.GZip); byte[] bytes = new byte[2056]; new Random().NextBytes(bytes); MemoryStream compressedStream = new MemoryStream(); compressor.Compress(bytes, compressedStream); compressedStream.Position = 0; byte[] decompressedBytes = compressor.Decompress(compressedStream); CollectionAssert.AreEqual(bytes, decompressedBytes); } }
public void SaveAs(string file) { XDocument xDoc = DataAsXDocument(); string finalData = ""; if (MainWindow.Instance.SaveAsCompressed) { finalData = Convert.ToBase64String(Compressor.Zip(xDoc.ToString())); } else { finalData = xDoc.ToString(); } File.WriteAllText(file, finalData); Global.StatusBarTextLeft = "Saved map '" + file + "'"; }
// De-serialize view state protected override object LoadPageStateFromPersistenceMedium() { var isCompressed = true; var compressedViewState = Request.Form[_compressedViewState]; isCompressed = Convert.ToBoolean(Convert.ToInt32(compressedViewState.Substring(0, 1))); compressedViewState = compressedViewState.Remove(0, 1); var bytes = Convert.FromBase64String(compressedViewState); if (isCompressed) { bytes = Compressor.Decompress <byte[]>(bytes); } var losformatter = new LosFormatter(); return(losformatter.Deserialize(Convert.ToBase64String(bytes))); }
private static object _Deserialize(string value, PPMode mode) { byte[] binary = StringTools.FromBase64String(value); if (mode == PPMode.Compressed || mode == PPMode.CompressedAndEncrypted) { binary = Compressor.Decompress(binary); } if (mode == PPMode.Encrypted || mode == PPMode.CompressedAndEncrypted) { binary = Encryption.BinaryDecrypt(binary); } object obj = BinarySerializer.Deserialize(binary); return(obj); }
private async Task CompressContent(HttpContent content, Compressor compressor) { using (content) { using (var compressionStream = compressor.CreateCompressionStream(_Buffer)) { await content.CopyToAsync(compressionStream); foreach (var header in content.Headers) { Headers.TryAddWithoutValidation(header.Key, header.Value); } Headers.ContentEncoding.Add(compressor.EncodingType); } Headers.ContentLength = _Buffer.Length; _Buffer.Position = 0; } }
public void Compress_canRead_fromArraySegment([Values(false, true)] bool useDictionary) { var data = GenerateSample(); var segment = new ArraySegment <byte>(data, 2, data.Length - 5); var dict = useDictionary ? BuildDictionary() : null; byte[] compressed; using (var options = new CompressionOptions(dict)) using (var compressor = new Compressor(options)) compressed = compressor.Wrap(segment); byte[] decompressed; using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) decompressed = decompressor.Unwrap(compressed); CollectionAssert.AreEqual(segment, decompressed); }
public void CompressTest_SingleTime() { DataAtomGenerator gen = new DataAtomGenerator(); Compressor target = new Compressor(); gen.Time = DateTime.Now; gen.Process = "Foo"; gen.Title = "Title"; gen.Frequency = 1; int count = 4; var sequence = gen.RandomDataStreamTakeNow(count); var actual = target.Compress(sequence, a => a); Assert.AreEqual(1, actual.Count()); Assert.AreEqual(gen.Frequency * count, actual.ElementAt(0).Frequency); }
public void Decompress_canRead_fromArraySegment([Values(false, true)] bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; byte[] compressed; using (var compressor = new Compressor(dict)) compressed = compressor.Wrap(data); compressed = new byte[] { 1, 2 }.Concat(compressed).Concat(new byte[] { 4, 5, 6 }) .ToArray(); var segment = new ArraySegment <byte>(compressed, 2, compressed.Length - 5); byte[] decompressed; using (var decompressor = new Decompressor(dict)) decompressed = decompressor.Unwrap(segment); CollectionAssert.AreEqual(data, decompressed); }
private void UnZipFiles() { try { if (SelectedItemPaths.Count() == 0) { return; } string[] aFilePath = SelectedItemPaths.First().Split('.'); string destinationFolder = string.Join(".", aFilePath.Take(aFilePath.Length - 1)); var window = new Compressor(destinationFolder, SelectedItemPaths.First()); window.Show(); } catch (Exception ex) { MessageBox.Show(ex.Message, "ZipMagikLITE: Error", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); } }
/// <summary> /// Reads the header data and sets the ContentData Property to the correct content. /// </summary> /// <returns>A valid instance of the Headerfile.</returns> protected NdfHeader ReadHeader() { var header = new NdfHeader(); using (var ms = new MemoryStream(FileData)) { ms.Seek(12, SeekOrigin.Begin); var buffer = new byte[4]; ms.Read(buffer, 0, buffer.Length); header.IsCompressedBody = BitConverter.ToInt32(buffer, 0) == 128; ms.Read(buffer, 0, 4); header.FileSizeUncompressedMinusE0 = BitConverter.ToInt32(buffer, 0); ms.Seek(12, SeekOrigin.Current); ms.Read(buffer, 0, 4); header.FileSizeUncompressed = BitConverter.ToInt32(buffer, 0); ms.Seek(4, SeekOrigin.Current); if (header.IsCompressedBody) { ms.Read(buffer, 0, 4); header.UncompressedContentSize = BitConverter.ToInt32(buffer, 0); } buffer = new byte[FileData.Length - ms.Position]; ms.Read(buffer, 0, buffer.Length); if (header.IsCompressedBody) { ContentData = Compressor.Decomp(buffer); } else { ContentData = buffer; } } return(header); }
public void Pack(Stream stream, byte volNum) { var info = GetInfo(); var path = Path.Combine(Package.GameDirectory, FileName); var begin = stream.Position; stream.Seek(info.HeadSize, SeekOrigin.Current); var ri = Volumes.FindIndex(r => r.Num == volNum); var res = Volumes[ri]; if (!File.Exists(path)) // Файл не был распакован, считываем запакованный и так же складываем { var data = GetCompressed(); stream.Write(data); } else { byte[] data = ReadContent(Package.GameDirectory); info.DecompSize = (ushort)data.Length; if (info.Method != 0) { Compressor comp = info.GetCompressor(); comp.Pack(data, stream); info.CompSize = (ushort)comp.CompSize; info.DecompSize = (ushort)comp.DecompSize; } else { stream.Write(data); info.CompSize = (ushort)data.Length; } } Volumes[ri] = new VolumeOffset(volNum, (int)begin); var end = stream.Position; stream.Seek(begin, SeekOrigin.Begin); info.Write(stream); stream.Seek(end, SeekOrigin.Begin); }
/// <exception cref="System.IO.IOException"/> private static void CodecTestWithNOCompression(Configuration conf, string codecClass ) { // Create a compressor with NO_COMPRESSION and make sure that // output is not compressed by comparing the size with the // original input CompressionCodec codec = null; ZlibFactory.SetCompressionLevel(conf, ZlibCompressor.CompressionLevel.NoCompression ); try { codec = (CompressionCodec)ReflectionUtils.NewInstance(conf.GetClassByName(codecClass ), conf); } catch (TypeLoadException) { throw new IOException("Illegal codec!"); } Compressor c = codec.CreateCompressor(); // ensure same compressor placed earlier ByteArrayOutputStream bos = new ByteArrayOutputStream(); CompressionOutputStream cos = null; // write trivially compressable data byte[] b = new byte[1 << 15]; Arrays.Fill(b, unchecked ((byte)43)); try { cos = codec.CreateOutputStream(bos, c); cos.Write(b); } finally { if (cos != null) { cos.Close(); } } byte[] outbytes = bos.ToByteArray(); // verify data were not compressed Assert.True("Compressed bytes contrary to configuration(NO_COMPRESSION)" , outbytes.Length >= b.Length); }
/// <summary> /// Saves the ViewState from the persistence medium /// </summary> protected override void SavePageStateToPersistenceMedium(object state) { var writer = new StringWriter(); try { var compression = ConfigurationManager.AppSettings["ViewStateCompression"]; if (string.IsNullOrEmpty(compression)) { compression = "true"; } if (bool.Parse(compression)) { var formatter = new LosFormatter(); formatter.Serialize(writer, state); var vState = writer.ToString(); var bytes = Convert.FromBase64String(vState); bytes = Compressor.Compress(bytes); vState = Convert.ToBase64String(bytes); var sm = ScriptManager.GetCurrent(this); if (sm != null && sm.IsInAsyncPostBack) { ScriptManager.RegisterHiddenField(this, "__VSTATE", vState); } else { Page.ClientScript.RegisterHiddenField("__VSTATE", vState); } } else { base.SavePageStateToPersistenceMedium(state); } } catch (Exception) { throw; } finally { if (writer != null) { writer.Dispose(); writer = null; } } }
private static void UploadReplay(object context) { try { var gameMessage = (GameMessage)context; var replay = JsonConvert.DeserializeObject <IEnumerable <JObject> >(gameMessage.Message); var game = replay.FirstOrDefault(record => record["type"].Value <string>() == MessageType.Game.ToString())?["message"]; var gameId = game?["id"]?.Value <string>(); if (gameId == null) { Logger.LogError($"Unable to find game id in a replay: {gameMessage.Message}"); return; } var gameVersion = game["version"]?.Value <string>(); byte[] compressedReplay; using (var compressor = new Compressor(Compressor.MaxCompressionLevel)) compressedReplay = compressor.Compress(Encoding.UTF8.GetBytes(gameMessage.Message)); var uri = string.Format(ApiRoute, gameMessage.GameType, gameId, DeckTrackerVersion, gameVersion); int attempt = 0; while (attempt++ < 3) { try { using (var webClient = new WebClient()) webClient.UploadData(uri, "PUT", compressedReplay); break; } catch (WebException) { if (attempt < 3) { Thread.Sleep(5000 * attempt * attempt); } else { throw; } } } } catch (ThreadAbortException) { } catch (Exception e) { Logger.LogError(e.ToString()); } finally { Threads.Remove(Thread.CurrentThread); } }
public static ResourceNode FromFile(ResourceNode parent, string path, FileOptions options) { ResourceNode node = null; FileMap map = FileMap.FromFile(path, FileMapProtect.Read, 0, 0, options); try { DataSource source = new DataSource(map); if ((node = FromSource(parent, source)) == null) { string ext = path.Substring(path.LastIndexOf('.') + 1).ToUpper(CultureInfo.InvariantCulture); if (Forced.ContainsKey(ext) && (node = Activator.CreateInstance(Forced[ext]) as ResourceNode) != null) { FileMap uncompressedMap = Compressor.TryExpand(ref source, false); if (uncompressedMap != null) { node.Initialize(parent, source, new DataSource(uncompressedMap)); } else { node.Initialize(parent, source); } } #if DEBUG else { node = new RawDataNode(Path.GetFileNameWithoutExtension(path)); node.Initialize(parent, source); } #endif } } finally { if (node == null) { map.Dispose(); } } return(node); }
public void ExportCompressed(string outPath) { if (_compression != CompressionType.None) { base.Export(outPath); } else { using (FileStream inStream = new FileStream(Path.GetTempFileName(), FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None, 0x8, FileOptions.SequentialScan | FileOptions.DeleteOnClose)) using (FileStream outStream = new FileStream(outPath, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None, 8, FileOptions.SequentialScan)) { Compressor.Compact(CompressionType.RunLengthYAZ0, WorkingUncompressed.Address, WorkingUncompressed.Length, inStream, this); outStream.SetLength(inStream.Length); using (FileMap map = FileMap.FromStream(inStream)) using (FileMap outMap = FileMap.FromStream(outStream)) Memory.Move(outMap.Address, map.Address, (uint)map.Length); } } }
public static string Decrypt(string decryptedText, byte[] passwordBytes) { byte[] bytesToBeDecrypted = Convert.FromBase64String(decryptedText); passwordBytes = sha.ComputeHash(passwordBytes); byte[] decryptedBytes = AES_Decrypt(bytesToBeDecrypted, passwordBytes); byte[] originalBytes = new byte[decryptedBytes.Length - _saltSize]; for (int i = _saltSize; i < decryptedBytes.Length; i++) { originalBytes[i - _saltSize] = decryptedBytes[i]; } originalBytes = Compressor.Decompress(originalBytes); return(Encoding.UTF8.GetString(originalBytes)); }
public void Transform(Engine engine, Package package) { this.package = package; this.engine = engine; Item outputItem = package.GetByName(Package.OutputName); string inputValue = package.GetValue(Package.OutputName); if (string.IsNullOrEmpty(inputValue)) { log.Warning("Could not find 'Output' in the package, nothing to transform"); return; } string outputValue = Compressor.Compress(inputValue); // replace the Output item in the package outputItem.SetAsString(outputValue); }
public static unsafe ResourceNode FromSource(ResourceNode parent, DataSource source) { ResourceNode n = null; if ((n = GetRaw(source)) != null) { n.Initialize(parent, source); } else { FileMap uncomp = Compressor.TryExpand(ref source); DataSource d; if (uncomp != null && (n = NodeFactory.GetRaw(d = new DataSource(uncomp))) != null) { n.Initialize(parent, source, d); } } return(n); }
private async void _SaveGamefile(string filename) { _BlockUI(true); if (File.Exists(filename)) { // Move to zip, ... string path = Path.GetDirectoryName(filename); string backupfile = Path.Combine(path, Path.GetFileNameWithoutExtension(filename) + "-" + DateTime.Now.ToString("yyyyMMdd-hhmmss") + ".zip"); _SetStatusbar(string.Format(Translate._("MainWindow.SaveGamefile.Backup.Statusbar"), filename, Path.GetFileName(backupfile))); await Task.Run(() => Compressor.CompressToFile(backupfile, filename, path)); } _SetStatusbar(string.Format(Translate._("MainWindow.SaveGamefile.Progress.Statusbar"), filename)); ProgressDialog progress = new ProgressDialog(this, Translate._("MainWindow.SaveGamefile.Progress.Title")); await Task.Run(() => { DateTime start_time = DateTime.Now; Log.Info("Saving file '{0}'", filename); progress.CounterFormat = Translate._("MainWindow.SaveGamefile.Progress.CounterFormat"); progress.Interval = 1000; CurrFile.SaveAs(progress.Events, filename); Log.Info("Finished saving"); Log.Info("... saved a total of {0} elements", CurrFile.TotalElements); DateTime end_time = DateTime.Now; TimeSpan ofs = end_time - start_time; Log.Info("Saving took {0}", ofs); }); if (!CurrFile.Modified) { TreeView.ResetModified(); } progress = null; _SetStatusbar(); _BlockUI(false); _UpdateUIState(); }
private bool CheckCompressNullPointerException(Compressor compressor, byte[] rawData ) { try { compressor.SetInput(rawData, 0, rawData.Length); compressor.Compress(null, 0, 1); } catch (ArgumentNullException) { return(true); } catch (Exception) { this.logger.Error(this.joiner.Join(compressor.GetType().GetCanonicalName(), "checkCompressNullPointerException error !!!" )); } return(false); }
private bool CheckCompressArrayIndexOutOfBoundsException(Compressor compressor, byte [] rawData) { try { compressor.SetInput(rawData, 0, rawData.Length); compressor.Compress(new byte[rawData.Length], 0, -1); } catch (IndexOutOfRangeException) { return(true); } catch (Exception) { this.logger.Error(this.joiner.Join(compressor.GetType().GetCanonicalName(), "checkCompressArrayIndexOutOfBoundsException error !!!" )); } return(false); }
/// <summary> /// Converts the value of a type specified by a generic type parameter into a binary-formatted stream. /// </summary> /// <param name="value">The object to parse to binary.</param> /// <param name="compressionType">The type of compression to use.</param> /// <returns>A binary-formatted array of bytes, parsed from the given object.</returns> public static async Task <MemoryStream> GetStreamAsync(object value, CompressionType compressionType) { if (compressionType == CompressionType.None) { return(await GetStreamAsync(value).ConfigureAwait(false)); } var decompressedStream = new MemoryStream(); using (var stream = new MemoryStream()) { var bf = new BinaryFormatter(); bf.Serialize(stream, value); await Compressor.Compress(stream, decompressedStream, compressionType).ConfigureAwait(false); } return(decompressedStream); }
internal override void AssertCompression(string name, Compressor compressor, Decompressor decompressor, byte[] rawData) { Assert.True(this.CheckSetInputNullPointerException(compressor)); Assert.True(this.CheckSetInputNullPointerException(decompressor )); Assert.True(this.CheckCompressArrayIndexOutOfBoundsException(compressor , rawData)); Assert.True(this.CheckCompressArrayIndexOutOfBoundsException(decompressor , rawData)); Assert.True(this.CheckCompressNullPointerException(compressor, rawData)); Assert.True(this.CheckCompressNullPointerException(decompressor , rawData)); Assert.True(this.CheckSetInputArrayIndexOutOfBoundsException(compressor )); Assert.True(this.CheckSetInputArrayIndexOutOfBoundsException(decompressor )); }
public void CompressAndDecompress_workCorrectly_onArraysOfDifferentSizes([Values(false, true)] bool useDictionary) { var dict = useDictionary ? BuildDictionary() : null; using (var compressionOptions = new CompressionOptions(dict)) using (var decompressionOptions = new DecompressionOptions(dict)) using (var compressor = new Compressor(compressionOptions)) using (var decompressor = new Decompressor(decompressionOptions)) { for (var i = 2; i < 100000; i += 3000) { var data = GenerateBuffer(i); var decompressed = decompressor.Unwrap(compressor.Wrap(data)); CollectionAssert.AreEqual(data, decompressed); } } }
static void Main(string[] args) { lib = new Library(); Console.WriteLine("Probram inited!"); Console.ForegroundColor = ConsoleColor.Green; Console.WriteLine("PERFORMANCE CHECK FOR COMPRESSOR"); Console.ForegroundColor = ConsoleColor.White; Compressor.performanceCheck(); Console.ForegroundColor = ConsoleColor.Green; Console.WriteLine("=================================="); Console.ForegroundColor = ConsoleColor.White; if (args.Length == 0) { while (true) { searchLibrary(); getBookInput(); } } else { foreach (string s in args) { if (s == "--search") { searchLibrary(); } else if (s == "--generate") { getBookInput(); } else { Console.WriteLine("Error: Invalid paramenter " + s); } } } //Console.ReadKey(); }
public async Task DecompressdResult_ShouldNot_NullOrEmpty() { using var inputStream = new MemoryStream(ObjectBytes); using var outputStream = new MemoryStream(); await Compressor.CompressAsync(inputStream, outputStream); var compressedBytes = outputStream.ToArray(); using var inputStream2 = new MemoryStream(compressedBytes); using var outputStream2 = new MemoryStream(); await Compressor.DecompressAsync(inputStream2, outputStream2); var decompressedBytes = outputStream2.ToArray(); Assert.IsNotNull(decompressedBytes); Assert.IsNotEmpty(decompressedBytes); }
/// <summary> /// Converts the value of a type specified by a generic type parameter into a JSON-formatted array of UTF-8 /// encoded bytes. /// </summary> /// <param name="value">The object to parse to JSON.</param> /// <param name="compressionType">The type of compression to use.</param> /// <param name="namingConvention">The naming convention to write in.</param> /// <returns>A JSON-formatted UTF-8 encoded array of bytes, parsed from the given object.</returns> public static MemoryStream GetStream(object value, NamingConvention namingConvention, CompressionType compressionType) { if (compressionType == CompressionType.None) { return(GetStream(value, namingConvention)); } var compressedStream = new MemoryStream(); using (var decompressedStream = new MemoryStream()) { Serialise_Internal(decompressedStream, value, namingConvention); Task.Run(() => Compressor.Compress(decompressedStream, compressedStream, compressionType)).GetAwaiter().GetResult(); } return(compressedStream); }
//Convert the png/export as dds public void CompressDDS() { Surface newDDS = Surface.LoadFromFile(moddedTexPath, true); MipmapFilter MipmapFilter = MipmapFilter.Box; CompressionFormat texFormat = new CompressionFormat(); if (exportSettingBox.SelectedIndex == 0) { texFormat = CompressionFormat.DXT1a; } else if (exportSettingBox.SelectedIndex == 1) { texFormat = CompressionFormat.DXT5; } Compressor compress = new Compressor(); compress.Input.SetData(newDDS); compress.Compression.Format = texFormat; compress.Input.SetMipmapGeneration(true, mipMapSetting.Value); compress.Input.MipmapFilter = MipmapFilter; // Removes the DDS header for the save xfbin button if (ddsNoHeader == true) { compress.Output.OutputHeader = false; compress.Process(ddsStream); byte[] ddsArray = ddsStream.ToArray(); int ddsLength = ddsArray.Length; int texIndex = selectTexBox.SelectedIndex; bool success = ReplaceTexture(texIndex, ddsArray); UpdateNut(texIndex, ddsLength, mipMapSetting.Value, texFormat, newDDS.Width, newDDS.Height); if (success) { MessageBox.Show($"Texture Replaced.", $"Success"); } ddsStream.Dispose(); } else { compress.Process("new.dds"); } compress.Dispose(); newDDS.Dispose(); }
public static void SetUpBeforeClass() { compressor = new Compressor(); fakePressureSwitch = new DigitalOutput(11); fakeCompressor = new AnalogInput(1); fakeSolenoid1 = new DigitalInput(12); fakeSolenoid2 = new DigitalInput(13); if (RobotBase.IsSimulation) { pressureSwitchCallback = (s, o) => { var comp = SimData.GetPCM(0).Compressor; comp.PressureSwitch = o; comp.On = o; double voltage = o ? CompressorOffVoltage : CompressorOnVoltage; SimData.AnalogIn[1].Voltage = voltage; }; SimData.DIO[11].Register("Value", pressureSwitchCallback); } }
public void TestStopLiveWindowMode() { Compressor compressor = new Compressor(); Assert.DoesNotThrow(() => { compressor.StopLiveWindowMode(); }); }
public void TestInitTable() { Compressor compressor = new Compressor(); ITable table = new MockNetworkTable(); Assert.DoesNotThrow(() => { compressor.InitTable(table); }); Assert.That(compressor.Table, Is.EqualTo(table)); }
public void TestUpdateTableNull() { Compressor compressor = new Compressor(); Assert.DoesNotThrow(() => { compressor.UpdateTable(); }); }
public async void Save(TextBox display) { try { StringBuilder text = new StringBuilder(); FileSavePicker picker = new FileSavePicker(); picker.SuggestedStartLocation = PickerLocationId.DocumentsLibrary; picker.FileTypeChoices.Add("Text File", new List<string>() { textExt }); picker.FileTypeChoices.Add("Compressed File", new List<string>() { compressedExt }); picker.DefaultFileExtension = textExt; StorageFile file = await picker.PickSaveFileAsync(); switch (file.FileType) { case textExt: await FileIO.WriteTextAsync(file, display.Text); break; case compressedExt: using (MemoryStream stream = new MemoryStream(Encoding.UTF8.GetBytes(display.Text))) using (IRandomAccessStream input = stream.AsRandomAccessStream()) using (IRandomAccessStream output = await file.OpenAsync(FileAccessMode.ReadWrite)) using (Compressor compressor = new Compressor(output.GetOutputStreamAt(0), algorithm, 0)) { ulong inputSize = await RandomAccessStream.CopyAsync(input, compressor); bool finished = await compressor.FinishAsync(); ulong outputSize = output.Size; Show(string.Format("Compressed {0} bytes to {1} bytes", inputSize, outputSize), "Compression App"); } break; default: break; } } catch { } }
public static void CompressionEfficiencyTest() { var strings = ExampleText.Split(new[] { '\n', '\r' }, StringSplitOptions.RemoveEmptyEntries); var arrays = strings.Select(UnicodeMap.FromStringToLinkArray).ToArray(); var totalCharacters = arrays.Select(x => x.Length).Sum(); using (var scope1 = new TempLinksTestScope(useSequences: true)) using (var scope2 = new TempLinksTestScope(useSequences: true)) { scope1.Links.UseUnicode(); scope2.Links.UseUnicode(); var compressor1 = new Compressor(scope1.Links.Unsync, scope1.Sequences); var compressor2 = scope2.Sequences; var compressed1 = new ulong[arrays.Length]; var compressed2 = new ulong[arrays.Length]; var sw1 = Stopwatch.StartNew(); var START = 0; var END = arrays.Length; for (int i = START; i < END; i++) compressed1[i] = compressor1.Compress(arrays[i]); var elapsed1 = sw1.Elapsed; var sw2 = Stopwatch.StartNew(); for (int i = START; i < END; i++) compressed2[i] = compressor2.CreateBalancedVariantCore(arrays[i]); var elapsed2 = sw2.Elapsed; Debug.WriteLine($"Compressor: {elapsed1}, Balanced sequence creator: {elapsed2}"); Assert.True(elapsed1 > elapsed2); // Checks for (int i = START; i < END; i++) { var sequence1 = compressed1[i]; var sequence2 = compressed2[i]; var decompress1 = UnicodeMap.FromSequenceLinkToString(sequence1, scope1.Links); var decompress2 = UnicodeMap.FromSequenceLinkToString(sequence2, scope2.Links); var structure1 = scope1.Links.FormatStructure(sequence1, link => link.IsPartialPoint()); var structure2 = scope2.Links.FormatStructure(sequence2, link => link.IsPartialPoint()); if (sequence1 != Constants.Null && sequence2 != Constants.Null && arrays[i].Length > 3) Assert.False(structure1 == structure2); Assert.True(strings[i] == decompress1 && decompress1 == decompress2); } Assert.True((int)(scope1.Links.Count() - UnicodeMap.MapSize) < totalCharacters); Assert.True((int)(scope2.Links.Count() - UnicodeMap.MapSize) < totalCharacters); Debug.WriteLine($"{(double)(scope1.Links.Count() - UnicodeMap.MapSize) / totalCharacters} | {(double)(scope2.Links.Count() - UnicodeMap.MapSize) / totalCharacters}"); Assert.True(scope1.Links.Count() < scope2.Links.Count()); compressor1.ValidateFrequencies(); } }
public ExtractPhase(Compressor parent) : base(parent) { }
public void Init() { compressor = new Compressor(); bf = new BinaryFormatter(); }
public void TestStartLiveWindowModeTable() { Compressor compressor = new Compressor(); Assert.DoesNotThrow(() => { ITable table = new MockNetworkTable(); compressor.InitTable(table); }); }
public void CompressionTest() { using (var scope = new TempLinksTestScope(useSequences: true)) { var links = scope.Links; var sequences = scope.Sequences; var e1 = links.Create(); var e2 = links.Create(); var sequence = new[] { e1, e2, e1, e2 // mama / papa / template [(m/p), a] { [1] [2] [1] [2] } }; var compressor = new Compressor(links.Unsync, sequences); var compressedVariant = compressor.Compress(sequence); // 1: [1] (1->1) point // 2: [2] (2->2) point // 3: [1,2] (1->2) pair // 4: [1,2,1,2] (3->3) pair Assert.True(links.GetSource(links.GetSource(compressedVariant)) == sequence[0]); Assert.True(links.GetTarget(links.GetSource(compressedVariant)) == sequence[1]); Assert.True(links.GetSource(links.GetTarget(compressedVariant)) == sequence[2]); Assert.True(links.GetTarget(links.GetTarget(compressedVariant)) == sequence[3]); var source = Constants.SourcePart; var target = Constants.TargetPart; Assert.True(links.GetByKeys(compressedVariant, source, source) == sequence[0]); Assert.True(links.GetByKeys(compressedVariant, source, target) == sequence[1]); Assert.True(links.GetByKeys(compressedVariant, target, source) == sequence[2]); Assert.True(links.GetByKeys(compressedVariant, target, target) == sequence[3]); // 4 - length of sequence Assert.True(links.GetSquareMatrixSequenceElementByIndex(compressedVariant, 4, 0) == sequence[0]); Assert.True(links.GetSquareMatrixSequenceElementByIndex(compressedVariant, 4, 1) == sequence[1]); Assert.True(links.GetSquareMatrixSequenceElementByIndex(compressedVariant, 4, 2) == sequence[2]); Assert.True(links.GetSquareMatrixSequenceElementByIndex(compressedVariant, 4, 3) == sequence[3]); } }