public void Decompress_throwsZstdException_onMalformedDecompressedSize([Values(false, true)] bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; byte[] compressed; using (var options = new CompressionOptions(dict)) using (var compressor = new Compressor(options)) compressed = compressor.Wrap(data); var frameHeader = compressed[4]; // Ensure that we malform decompressed size in the right place if (useDictionary) { Assert.AreEqual(frameHeader, 0x63); compressed[9]--; } else { Assert.AreEqual(frameHeader, 0x60); compressed[5]--; } // Thus, ZSTD_getDecompressedSize will return size that is one byte lesser than actual using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) Assert.Throws <ZstdException>(() => decompressor.Unwrap(compressed)); }
/// <summary> /// The create decompression builder. /// </summary> /// <param name="options"> /// The options. /// </param> public IWebHostBuilder CreateDecompressionBuilder(CompressionOptions options = null) { if (options == null) { options = new CompressionOptions(); } IWebHostBuilder builder = new WebHostBuilder() .ConfigureServices(s => s.AddCompression()) .Configure( app => { app.UseCompression(options); app.Run( async c => { string text; using (var reader = new StreamReader(c.Request.Body)) { text = reader.ReadToEnd(); } c.Response.ContentType = "text/plain"; c.Response.ContentLength = text.Length; await c.Response.WriteAsync(text); }); }); return(builder); }
public void CompressionImprovesWithDictionary() { var trainingData = new byte[100][]; for (int i = 0; i < trainingData.Length; i++) { trainingData[i] = DataGenerator.GetSmallBuffer(DataFill.Random); } var dict = DictBuilder.TrainFromBuffer(trainingData); var compressionOptions = new CompressionOptions(dict); var testStream = DataGenerator.GetSmallStream(DataFill.Random); var normalResultStream = new MemoryStream(); using (var compressionStream = new CompressorStream(normalResultStream)) testStream.CopyTo(compressionStream); var dictResultStream = new MemoryStream(); using (var compressionStream = new CompressorStream(dictResultStream, compressionOptions)) testStream.CopyTo(compressionStream); Assert.Greater(normalResultStream.Length, dictResultStream.Length); }
public void CompressionImprovesWithDictionary() { var dict = TrainDict(); var compressionOptions = new CompressionOptions(dict); var dataStream = DataGenerator.GetSmallStream(DataFill.Sequential); var normalResultStream = new MemoryStream(); using (var compressionStream = new CompressionStream(normalResultStream)) dataStream.CopyTo(compressionStream); dataStream.Seek(0, SeekOrigin.Begin); var dictResultStream = new MemoryStream(); using (var compressionStream = new CompressionStream(dictResultStream, compressionOptions)) dataStream.CopyTo(compressionStream); Assert.Greater(normalResultStream.Length, dictResultStream.Length); dictResultStream.Seek(0, SeekOrigin.Begin); var resultStream = new MemoryStream(); using (var decompressionStream = new DecompressionStream(dictResultStream, new DecompressionOptions(dict))) decompressionStream.CopyTo(resultStream); Assert.AreEqual(dataStream.ToArray(), resultStream.ToArray()); }
public static byte[] Compress(byte[] data, CompressAlgorithm method = CompressAlgorithm.Deflate, CompressionLevel level = CompressionLevel.Optimal) { MemoryStream output = new MemoryStream(); switch (method) { case CompressAlgorithm.Deflate: { using (DeflateStream dstream = new DeflateStream(output, level)) { dstream.Write(data, 0, data.Length); } } break; case CompressAlgorithm.Zstd: { var opt = new CompressionOptions(CompressionOptions.DefaultCompressionLevel); using (var compressor = new Compressor(opt)) { return(compressor.Wrap(data)); } } } return(output.ToArray()); }
public virtual void CompressionAlgorithm_InputStreamThenOuputStreamProducesEquivalentData(Stream testData) { var originalDataStream = new MemoryStream(); testData.CopyTo(originalDataStream); originalDataStream.Position = 0; CompressionAlgorithm algorithm = GetAlgorithm(); CompressionOptions options = GenerateCompressionOptions(algorithm.CompressionType); var compressedMemoryStream = new MemoryStream(); using (Stream compressorStream = algorithm.CreateCompressor(options).CreateOutputStream(compressedMemoryStream, true, 4096)) { originalDataStream.CopyTo(compressorStream); } var decompressedData = new MemoryStream(); compressedMemoryStream.Position = 0; using (Stream decopmressorStream = algorithm.CreateDecompressor().CreateInputStream(compressedMemoryStream, true)) { decopmressorStream.CopyTo(decompressedData); } Assert.AreEqual(originalDataStream.Length, decompressedData.Length); CollectionAssert.AreEqual(originalDataStream.ToArray(), decompressedData.ToArray()); }
public void RoundTrip_StreamingToStreaming_Stress([Values(true, false)] bool useDict, [Values(true, false)] bool async) { long i = 0; var dict = useDict ? TrainDict() : null; var compressionOptions = new CompressionOptions(dict); var decompressionOptions = new DecompressionOptions(dict); Enumerable.Range(0, 10000) .AsParallel() .WithDegreeOfParallelism(Environment.ProcessorCount * 4) .ForAll(n => { var testStream = DataGenerator.GetSmallStream(DataFill.Sequential); var cBuffer = new byte[1 + (int)(n % (testStream.Length * 11))]; var dBuffer = new byte[1 + (int)(n % (testStream.Length * 13))]; var tempStream = new MemoryStream(); using (var compressionStream = new CompressionStream(tempStream, compressionOptions, 1 + (int)(n % (testStream.Length * 17)))) { int bytesRead; int offset = n % cBuffer.Length; while ((bytesRead = testStream.Read(cBuffer, offset, cBuffer.Length - offset)) > 0) { if (async) { compressionStream.WriteAsync(cBuffer, offset, bytesRead).GetAwaiter().GetResult(); } else { compressionStream.Write(cBuffer, offset, bytesRead); } if (Interlocked.Increment(ref i) % 100 == 0) { GC.Collect(GC.MaxGeneration, GCCollectionMode.Forced, true, true); } } } tempStream.Seek(0, SeekOrigin.Begin); var resultStream = new MemoryStream(); using (var decompressionStream = new DecompressionStream(tempStream, decompressionOptions, 1 + (int)(n % (testStream.Length * 19)))) { int bytesRead; int offset = n % dBuffer.Length; while ((bytesRead = async ? decompressionStream.ReadAsync(dBuffer, offset, dBuffer.Length - offset).GetAwaiter().GetResult() : decompressionStream.Read(dBuffer, offset, dBuffer.Length - offset)) > 0) { resultStream.Write(dBuffer, offset, bytesRead); if (Interlocked.Increment(ref i) % 100 == 0) { GC.Collect(GC.MaxGeneration, GCCollectionMode.Forced, true, true); } } } Assert.AreEqual(testStream.ToArray(), resultStream.ToArray()); }); GC.KeepAlive(compressionOptions); GC.KeepAlive(decompressionOptions); }
public ErrorPromptResult Patch(CompressionOptions options, string inBsaFile, string inBsaPath, string outBsaPath) { bool patchSuccess; #if DEBUG var watch = new Stopwatch(); try { watch.Start(); #endif patchSuccess = _bsaDiff.PatchBsa(options, inBsaPath, outBsaPath); if (!patchSuccess) { Log.Dual("Patching BSA {0} failed", inBsaFile); } #if DEBUG } finally { watch.Stop(); Debug.WriteLine("PatchBSA for {0} finished in {1}", inBsaFile, watch.Elapsed); } #endif if (patchSuccess) { Log.Dual("Build successful."); return(ErrorPromptResult.Continue); } return(Prompts.PatchingErrorPrompt(inBsaFile)); }
public void CompressAndDecompress_worksCorrectly_advanced([Values(false, true)] bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; byte[] compressed1, compressed2; using (var options = new CompressionOptions(dict, new Dictionary <ZSTD_cParameter, int> { { ZSTD_cParameter.ZSTD_c_checksumFlag, 0 } })) using (var compressor = new Compressor(options)) compressed1 = compressor.Wrap(data); using (var options = new CompressionOptions(dict, new Dictionary <ZSTD_cParameter, int> { { ZSTD_cParameter.ZSTD_c_checksumFlag, 1 } })) using (var compressor = new Compressor(options)) compressed2 = compressor.Wrap(data); Assert.AreEqual(compressed1.Length + 4, compressed2.Length); using (var options = new DecompressionOptions(dict, new Dictionary <ZSTD_dParameter, int>())) using (var decompressor = new Decompressor(options)) { CollectionAssert.AreEqual(data, decompressor.Unwrap(compressed1)); CollectionAssert.AreEqual(data, decompressor.Unwrap(compressed2)); } }
/// <summary> /// Helper method to generate an CompressionOptions object for use with Nvidia Texture Tools. /// </summary> private static CompressionOptions GenerateCompressionOptions(TextureCompressionFormat format) { CompressionOptions compressionOptions = new CompressionOptions(); compressionOptions.SetFormat(format.GetNvttFormat()); return(compressionOptions); }
public void CompressAndDecompress_workCorrectly_spans([Values(false, true)] bool useDictionary) { var buffer = GenerateSample(); var data = new ReadOnlySpan <byte>(buffer, 1, buffer.Length - 1); var dict = useDictionary ? BuildDictionary() : null; Span <byte> compressed = stackalloc byte[Compressor.GetCompressBound(data.Length)]; using (var options = new CompressionOptions(dict)) using (var compressor = new Compressor(options)) { var size = compressor.Wrap(data, compressed); compressed = compressed.Slice(0, size); } Span <byte> decompressed = stackalloc byte[data.Length + 1]; using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) { var size = decompressor.Unwrap(compressed, decompressed); Assert.AreEqual(data.Length, size); decompressed = decompressed.Slice(0, size); } CollectionAssert.AreEqual(data.ToArray(), decompressed.ToArray()); }
protected override Task <ChunkInfo> DeserializeBytesAsync(BinaryReader reader) { var chunkType = (StructureType)reader.ReadByte(); long processedLength = reader.ReadInt64(); CompressionOptions compressionOptions = null; EncryptionOptions encryptionOptions = null; bool isCompressed = reader.ReadBoolean(); if (isCompressed) { compressionOptions = new CompressionOptions( (CompressionType)reader.ReadByte(), (CompressionLevel)reader.ReadByte()); } bool isEncrypted = reader.ReadBoolean(); if (isEncrypted) { byte[] readBytes = reader.ReadBytes(reader.ReadInt32()); encryptionOptions = new EncryptionOptions( algorithm: (EncryptionType)reader.ReadByte(), ivBase: reader.ReadString(), iterationCount: reader.ReadInt32(), salt: readBytes); } return(Task.FromResult(new ChunkInfo(chunkType, encryptionOptions, compressionOptions, processedLength))); }
public int Compress(byte[] inBuffer, byte[] outBuffer, int uncompressedSize) { using (CompressionOptions options = new CompressionOptions(CompressionOptions.MaxCompressionLevel)) using (Compressor compressor = new Compressor(options)) { return(compressor.Wrap(inBuffer, outBuffer, 0)); } }
public ColourSet(Byte[] rgba, int mask, CompressionMode mode, CompressionOptions options) { // check the compression mode for dxt1 bool isDxt1 = ((mode & CompressionMode.Dxt1) != 0); bool weightByAlpha = ((options & CompressionOptions.WeightColourByAlpha) != 0); Initialize(rgba, mask, isDxt1 ? 128 : 1, weightByAlpha); }
public ZStandardCompressor(CompressionOptions compressionOptions = null, DecompressionOptions decompressionOptions = null) { Compressor = compressionOptions != null ? new ZstdNet.Compressor(compressionOptions) : new ZstdNet.Compressor(); Decompressor = decompressionOptions != null ? new Decompressor(decompressionOptions) : new Decompressor(); }
public ChunkInfo( StructureType type, EncryptionOptions encryptionOptions, CompressionOptions compressionOptions, long processedLength) : base(type, encryptionOptions, compressionOptions) { _processedLength = processedLength; }
protected Compression(CompressionOptions options) { if (options == null) { throw new ArgumentNullException("options"); } Options = options; }
public RangeFit(ColourSet colours, CompressionOptions flags) : base(colours) { // initialise the metric bool perceptual = ((flags & CompressionOptions.ColourMetricPerceptual) != 0); m_metric = perceptual ? new Vec3(0.2126f, 0.7152f, 0.0722f) : Vec3.One; // initialise the best error m_besterror = float.MaxValue; // cache some values var count = m_colours.Count; var values = m_colours.Points; var weights = m_colours.Weights; // get the covariance matrix Sym3x3 covariance = Sym3x3.ComputeWeightedCovariance(count, values, weights); // compute the principle component Vec3 principle = Sym3x3.ComputePrincipleComponent(covariance); // get the min and max range as the codebook endpoints Vec3 start = Vec3.Zero; Vec3 end = Vec3.Zero; if (count > 0) { float min, max; // compute the range start = end = values[0]; min = max = Vec3.Dot(values[0], principle); for (int i = 1; i < count; ++i) { float val = Vec3.Dot(values[i], principle); if (val < min) { start = values[i]; min = val; } else if (val > max) { end = values[i]; max = val; } } } // clamp the output to [0, 1] start = start.Clamp(Vec3.Zero, Vec3.One); end = end.Clamp(Vec3.Zero, Vec3.One); // clamp to the grid and save m_start = (GRID * start + HALF).Truncate() * GRIDRCP; m_end = (GRID * end + HALF).Truncate() * GRIDRCP; }
/// <summary> /// Constructs a instance of the uglification options /// </summary> public UglificationOptions() { ParsingOptions = new ParsingOptions(); CompressionOptions = new CompressionOptions(); ManglingOptions = new ManglingOptions(); CodeGenerationOptions = new CodeGenerationOptions(); ScrewIe8 = false; KeepFunctionNames = false; Severity = 0; }
/// <summary> /// Compresses an image in memory. /// </summary> /// <param name="flags">Compression Flags</param> /// <returns>The compressed blocks</returns> /// <remarks> /// The source pixels should be presented as a contiguous array of width* height /// /// rgba values, with each component as 1 byte each.In memory this should be: /// /// { r1, g1, b1, a1, .... , rn, gn, bn, an } for n = width* height /// /// The flags parameter should specify either kDxt1, kDxt3 or kDxt5 compression, /// however, DXT1 will be used by default if none is specified.When using DXT1 /// compression, 8 bytes of storage are required for each compressed DXT block. /// DXT3 and DXT5 compression require 16 bytes of storage per block. /// /// The flags parameter can also specify a preferred colour compressor and /// colour error metric to use when fitting the RGB components of the data. /// Possible colour compressors are: kColourClusterFit (the default), /// kColourRangeFit or kColourIterativeClusterFit.Possible colour error metrics /// are: kColourMetricPerceptual(the default) or kColourMetricUniform.If no /// flags are specified in any particular category then the default will be /// used.Unknown flags are ignored. /// /// When using kColourClusterFit, an additional flag can be specified to /// weight the colour of each pixel by its alpha value.For images that are /// rendered using alpha blending, this can significantly increase the /// perceived quality. /// /// Internally this function calls squish::Compress for each block. To see how /// much memory is required in the compressed image, use /// squish::GetStorageRequirements. /// </remarks> public Byte[] Compress(CompressionMode mode, CompressionOptions options) { var l = GetStorageRequirements(_Width, _Height, mode); var blocks = new Byte[l]; CompressImage(this, blocks, mode, options); return(blocks); }
/// <summary> /// Constructs a instance of the JS uglification options /// </summary> public UglificationOptions() { ParsingOptions = new ParsingOptions(); CompressionOptions = new CompressionOptions(); ManglingOptions = new ManglingOptions(); CodeGenerationOptions = new CodeGenerationOptions(); KeepFunctionNames = false; ScrewIe8 = true; Severity = 0; }
/// <inheritdoc/> protected override byte[] BaseCompress(byte[] bytes) { byte[] compressedBytes; using (var options = new CompressionOptions(Level)) using (var compressor = new Compressor(options)) { compressedBytes = compressor.Wrap(bytes); } return(compressedBytes); }
public void Compress_throwsInsufficientMemoryException_whenDestinationBufferIsTooSmall([Values(false, true)] bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; var compressed = new byte[20]; const int offset = 4; using (var options = new CompressionOptions(dict)) using (var compressor = new Compressor(options)) Assert.Throws <InsufficientMemoryException>(() => compressor.Wrap(data, compressed, offset)); }
public static IMAGE SquishImage(this IMAGE srcImage, CompressionMode mode, CompressionOptions options, Action <string> logger) { var srcBitmap = srcImage.ToSquishImage(); var blocks = srcBitmap.Compress(mode, options); var dstBitmap = Bitmap.Decompress(srcImage.Width, srcImage.Height, blocks, mode); logger("\t" + dstBitmap.CompareRGBToOriginal(srcBitmap).ToString()); return(dstBitmap.ToImageSharp()); }
public SingleColourFit(ColourSet colours, CompressionOptions flags) : base(colours) { // grab the single colour var values = m_colours.Points; m_colour[0] = (Byte)((255.0f * values[0].X).FloatToInt(255)); m_colour[1] = (Byte)((255.0f * values[0].Y).FloatToInt(255)); m_colour[2] = (Byte)((255.0f * values[0].Z).FloatToInt(255)); // initialise the best error m_besterror = int.MaxValue; }
private static bool CompareCompressionOptions(CompressionOptions lhs, CompressionOptions rhs) { if (lhs == null && rhs == null) { return(true); } if (lhs != null && rhs != null) { return(lhs.ChunkLengthInKb == rhs.ChunkLengthInKb); } return(false); }
public static IMAGE SquishImage(this IMAGE srcImage, CompressionMode mode, CompressionOptions options, TestContext context) { var srcBitmap = srcImage.ToSquishImage(); var blocks = srcBitmap.Compress(mode, options); var dstBitmap = Bitmap.Decompress(srcImage.Width, srcImage.Height, blocks, mode); context.WriteLine(dstBitmap.CompareRGBToOriginal(srcBitmap).ToString()); return(dstBitmap.ToImageSharp()); }
public static byte[] Compress(byte[] decompressedBuffer) { byte[] compressedBuffer; using (CompressionOptions options = new CompressionOptions(null, compressionLevel: 4)) using (Compressor compressor = new Compressor(options)) { compressedBuffer = compressor.Wrap(decompressedBuffer); } uint compressedMagic = BitConverter.ToUInt32(compressedBuffer, 0); Debug.Assert(compressedMagic == 0xFD2FB528); return(compressedBuffer); }
public void DecompressWithoutDictionary_throwsZstdException_onDataCompressedWithIt() { var data = GenerateSample(); var dict = BuildDictionary(); byte[] compressed; using (var options = new CompressionOptions(dict)) using (var compressor = new Compressor(options)) compressed = compressor.Wrap(data); using (var decompressor = new Decompressor()) Assert.Throws <ZstdException>(() => decompressor.Unwrap(compressed)); }
private static byte[] CompressAndDecompress(byte[] data, byte[] dict, int compressionLevel = CompressionOptions.DefaultCompressionLevel) { byte[] compressed; using (var options = new CompressionOptions(dict, compressionLevel)) using (var compressor = new Compressor(options)) compressed = compressor.Wrap(data); byte[] decompressed; using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) decompressed = decompressor.Unwrap(compressed); return(decompressed); }
public void Compress_throwsDstSizeTooSmall_whenDestinationBufferIsTooSmall([Values(false, true)] bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; var compressed = new byte[20]; const int offset = 4; using (var options = new CompressionOptions(dict)) using (var compressor = new Compressor(options)) { var ex = Assert.Throws <ZstdException>(() => compressor.Wrap(data, compressed, offset)); Assert.AreEqual(ZSTD_ErrorCode.ZSTD_error_dstSize_tooSmall, ex.Code); } }
public ErrorPromptResult Patch(CompressionOptions options, string inBsaFile, string inBsaPath, string outBsaPath) { bool patchSuccess; #if DEBUG var watch = new Stopwatch(); try { watch.Start(); #endif patchSuccess = _bsaDiff.PatchBsa(options, inBsaPath, outBsaPath); if (!patchSuccess) Log.Dual("Patching BSA {0} failed", inBsaFile); #if DEBUG } finally { watch.Stop(); Debug.WriteLine("PatchBSA for {0} finished in {1}", inBsaFile, watch.Elapsed); } #endif if (patchSuccess) { Log.Dual("Build successful."); return ErrorPromptResult.Continue; } return Prompts.PatchingErrorPrompt(inBsaFile); }
public ArchiveSettings(bool defaultCompressed, bool bStringPrefixed, CompressionOptions options) { DefaultCompressed = defaultCompressed; BStringPrefixed = bStringPrefixed; Options = options; }
/// <summary> /// Constructs a instance of Uglify JS-minifier /// </summary> /// <param name="createJsEngineInstance">Delegate that creates an instance of JavaScript engine</param> /// <param name="uglifyConfig">Configuration settings of Uglify Minifier</param> public UglifyJsMinifier(Func<IJsEngine> createJsEngineInstance, UglifySettings uglifyConfig) { JsMinifierSettings jsMinifierConfig = uglifyConfig.Js; ParsingSettings parsingConfig = jsMinifierConfig.Parsing; CompressionSettings compressionConfig = jsMinifierConfig.Compression; ManglingSettings manglingConfig = jsMinifierConfig.Mangling; CodeGenerationSettings codeGenerationConfig = jsMinifierConfig.CodeGeneration; ParsingOptions = new ParsingOptions { Strict = parsingConfig.Strict, BareReturns = parsingConfig.BareReturns }; CompressionOptions = new CompressionOptions { Compress = compressionConfig.Compress, Sequences = compressionConfig.Sequences, PropertiesDotNotation = compressionConfig.PropertiesDotNotation, DeadCode = compressionConfig.DeadCode, DropDebugger = compressionConfig.DropDebugger, Unsafe = compressionConfig.Unsafe, Conditionals = compressionConfig.Conditionals, Comparisons = compressionConfig.Comparisons, Evaluate = compressionConfig.Evaluate, Booleans = compressionConfig.Booleans, Loops = compressionConfig.Loops, Unused = compressionConfig.Unused, HoistFunctions = compressionConfig.HoistFunctions, KeepFunctionArgs = compressionConfig.KeepFunctionArgs, HoistVars = compressionConfig.HoistVars, IfReturn = compressionConfig.IfReturn, JoinVars = compressionConfig.JoinVars, Cascade = compressionConfig.Cascade, GlobalDefinitions = compressionConfig.GlobalDefinitions, PureGetters = compressionConfig.PureGetters, PureFunctions = compressionConfig.PureFunctions, DropConsole = compressionConfig.DropConsole, Angular = compressionConfig.Angular }; ManglingOptions = new ManglingOptions { Mangle = manglingConfig.Mangle, Except = manglingConfig.Except, Eval = manglingConfig.Eval, Sort = manglingConfig.Sort, TopLevel = manglingConfig.TopLevel, }; CodeGenerationOptions = new CodeGenerationOptions { Beautify = codeGenerationConfig.Beautify, IndentLevel = codeGenerationConfig.IndentLevel, IndentStart = codeGenerationConfig.IndentStart, QuoteKeys = codeGenerationConfig.QuoteKeys, SpaceColon = codeGenerationConfig.SpaceColon, AsciiOnly = codeGenerationConfig.AsciiOnly, InlineScript = codeGenerationConfig.InlineScript, Width = codeGenerationConfig.Width, MaxLineLength = codeGenerationConfig.MaxLineLength, Bracketize = codeGenerationConfig.Bracketize, Semicolons = codeGenerationConfig.Semicolons, Comments = codeGenerationConfig.Comments, PreserveLine = codeGenerationConfig.PreserveLine, UnescapeRegexps = codeGenerationConfig.UnescapeRegexps, QuoteStyle = codeGenerationConfig.QuoteStyle }; ScrewIe8 = jsMinifierConfig.ScrewIe8; KeepFunctionNames = jsMinifierConfig.KeepFunctionNames; Severity = jsMinifierConfig.Severity; if (createJsEngineInstance == null) { string jsEngineName = uglifyConfig.JsEngine.Name; if (string.IsNullOrWhiteSpace(jsEngineName)) { throw new ConfigurationErrorsException( string.Format(CoreStrings.Configuration_JsEngineNotSpecified, "uglify", @" * JavaScriptEngineSwitcher.Msie * JavaScriptEngineSwitcher.V8", "MsieJsEngine") ); } createJsEngineInstance = (() => JsEngineSwitcher.Current.CreateJsEngineInstance(jsEngineName)); } _createJsEngineInstance = createJsEngineInstance; }
public BsaReader(MemoryMappedFile mmf, CompressionOptions options) { _mmf = mmf; Settings = new ArchiveSettings { Options = options }; }