public int deflateEnd() { if (this.dstate == null) { return -2; } int num = this.dstate.deflateEnd(); this.dstate = null; return num; }
public override void OnActionExecuting(HttpActionContext actionContext) { var content = actionContext.Request.Content; var zipContentBytes = content == null ? null : content.ReadAsByteArrayAsync().Result; var unzipContentBytes = zipContentBytes == null ? new byte[0] : Deflate.Decompress(zipContentBytes); actionContext.Request.Content = new ByteArrayContent(unzipContentBytes); base.OnActionExecuting(actionContext); }
public static MutableString /*!*/ Flush(Deflate /*!*/ self, [DefaultParameterValue(SYNC_FLUSH)] int flush) { if (flush == NO_FLUSH) { return(MutableString.CreateEmpty()); } return(Compress(self, MutableString.FrozenEmpty, flush)); }
public static async Task <byte[]> DecompressAsync(ReadOnlyMemory <byte> data, CompressionMethod method) { return(method switch { CompressionMethod.LZ4 => await LZ4.DecompressAsync(data).ConfigureAwait(false), CompressionMethod.Deflate => await Deflate.DecompressAsync(data).ConfigureAwait(false), CompressionMethod.Brotli => await Brotli.DecompressAsync(data).ConfigureAwait(false), CompressionMethod.Gzip => await Gzip.DecompressAsync(data).ConfigureAwait(false), _ => await Gzip.DecompressAsync(data).ConfigureAwait(false) });
internal int DeflateEnd() { if (dstate == null) { return(Z_STREAM_ERROR); } int ret = dstate.DeflateEnd(); dstate = null; return(ret); }
public int DeflateEnd() { if (Dstate == null) { return(ZStreamError); } int ret = Dstate.DeflateEnd(); Dstate = null; return(ret); }
public int deflateEnd() { if (dstate == null) { return(Z_STREAM_ERROR); } int ret = dstate.deflateEnd(); dstate = null; return(ret); }
public int deflateEnd() { if (dstate == null) { return(-2); } int result = dstate.deflateEnd(); dstate = null; return(result); }
public static void SetParams(Deflate /*!*/ self, [NotNull] MutableString /*!*/ dictionary) { byte[] buffer = dictionary.ToByteArray(); var zst = self.GetStream(); int err = zst.deflateSetDictionary(buffer, buffer.Length); if (err != Z_OK) { throw MakeError(err, zst.msg); } }
public static void SetParams( Deflate /*!*/ self, [DefaultParameterValue(DEFAULT_COMPRESSION)] int level, [DefaultParameterValue(DEFAULT_STRATEGY)] int strategy) { var zst = self.GetStream(); int err = zst.deflateParams(level, (zlib.CompressionStrategy)strategy); if (err != Z_OK) { throw MakeError(err, zst.msg); } }
public void Server_DeflateCompress_Client_DeflateDecompress_should_be_yao() { Console.WriteLine("用戶端用訪問伺服器→伺服器用Deflate壓縮資料→Client解壓縮,驗證解壓縮結果是否包含關鍵字"); var url = "api/test/DeflateCompression/yao"; var response = MsTestHook.Client.GetAsync(url).Result; var content = response.Content.ReadAsByteArrayAsync().Result; var decompress = Deflate.Decompress(content); var result = Encoding.UTF8.GetString(decompress); Assert.AreEqual(response.StatusCode, HttpStatusCode.OK); Assert.AreEqual(true, result.Contains("yao")); }
protected void Write(Stream stream, SocketMessager messager) { MemoryStream ms = new MemoryStream(); byte[] buff = Encoding.UTF8.GetBytes(messager.GetCanParseString()); ms.Write(buff, 0, buff.Length); if (messager.Arg != null) { buff = Deflate.Compress(BaseSocket.Serialize(messager.Arg)); ms.Write(buff, 0, buff.Length); } this.Write(stream, ms.ToArray()); ms.Close(); }
/// <summary> /// All dynamically allocated data structures for this stream are freed. This function discards any unprocessed input and does not flush any pending /// output. /// </summary> /// <returns> /// deflateEnd returns <see cref="ZLibResultCode.Z_OK" /> if success, <see cref="ZLibResultCode.Z_STREAM_ERROR" /> if the stream state was inconsistent, /// <see cref="ZLibResultCode.Z_DATA_ERROR" /> if the stream was freed prematurely (some input or output was discarded). In the error case, /// <see cref="msg" /> may be set but then points to a static string (which must not be deallocated). /// </returns> public int deflateEnd() { next_in_index = 0; next_out_index = 0; if (_dstate == null) { return((int)ZLibResultCode.Z_STREAM_ERROR); } int ret = _dstate.deflateEnd(); _dstate = null; return(ret); }
public static Deflate /*!*/ AppendData(Deflate /*!*/ self, [DefaultProtocol] MutableString str) { var zst = self.GetStream(); MutableString trailingUncompressedData = null; int result = Process(zst, str, zlib.FlushStrategy.Z_NO_FLUSH, compress, ref trailingUncompressedData); if (result != Z_OK) { throw MakeError(result, zst.msg); } return(self); }
public override void OnActionExecuted(HttpActionExecutedContext actionContext) { var content = actionContext.Response.Content; var sourceBytes = content == null ? null : content.ReadAsByteArrayAsync().Result; var zipContent = sourceBytes == null ? new byte[0] : Deflate.Compress(sourceBytes); actionContext.Response.Content = new ByteArrayContent(zipContent); actionContext.Response.Content.Headers.Remove("Content-Type"); actionContext.Response.Content.Headers.Add("Content-encoding", "deflate"); //actContext.Response.Content.Headers.Add("Content-Type", "application/json"); actionContext.Response.Content.Headers.Add("Content-Type", "application/json;charset=utf-8"); base.OnActionExecuted(actionContext); }
public static MutableString /*!*/ Compress(Deflate /*!*/ self, [DefaultProtocol] MutableString str, [DefaultParameterValue(NO_FLUSH)] int flush) { MutableString compressed; MutableString trailingUncompressedData = null; var zst = self.GetStream(); int result = Process(zst, str, (zlib.FlushStrategy)flush, compress, out compressed, ref trailingUncompressedData); if (result != Z_OK) { throw MakeError(result, zst.msg); } return(compressed); }
public void RoundTripListObject() { List <int> start = new List <int> { 1, 3, 4 }; string startString = JsonConvert.SerializeObject(start); byte[] encoded = Deflate.Encode(startString); byte[] finishBytes = Deflate.Decode(encoded); string finishString = Encoding.UTF8.GetString(finishBytes); List <int> finish = JsonConvert.DeserializeObject <List <int> >(finishString); Assert.Equal(start, finish); }
public void Client_DeflateCompress_Server_DeflateDecompress() { Console.WriteLine("用戶端用Deflate壓縮資料→伺服器端解壓縮後回傳結果→驗證解壓縮結果和Client壓縮前是否相同"); var url = "api/test/DeflateDecompression"; var builder = CreateData(); var contentBytes = Encoding.UTF8.GetBytes(builder); var zipContent = Deflate.Compress(contentBytes); var request = new HttpRequestMessage(HttpMethod.Post, url) { Content = new ByteArrayContent(zipContent) }; var response = MsTestHook.Client.SendAsync(request).Result; var result = response.Content.ReadAsStringAsync().Result; Assert.AreEqual(response.StatusCode, HttpStatusCode.OK); Assert.AreEqual(builder, result); }
private static Dictionary <string, MapcacheMapData> ReadMapcache(string path) { Dictionary <string, MapcacheMapData> mapdata = new Dictionary <string, MapcacheMapData>(); byte[] decodedBuf, encodedBuf; // We want a relative path if (path[0] == '/' || path[0] == '\\') { path = path.Substring(1); } //path = Path.Combine(Core.Conf.ConfigDir, path); using (FileStream fs = File.OpenRead(path)) { using (BinaryReader bin = new BinaryReader(fs)) { uint mapCount = bin.ReadUInt32(); for (int i = 0; i < mapCount; i++) { // Mapinfo string mapname = bin.ReadString(); short xs = bin.ReadInt16(); short ys = bin.ReadInt16(); int len = bin.ReadInt32(); encodedBuf = bin.ReadBytes(len); decodedBuf = Deflate.Decompress(encodedBuf); // encodedBuf now contains the cell array mapdata.Add(mapname, new MapcacheMapData { Width = xs, Height = ys, CellData = decodedBuf }); encodedBuf = null; decodedBuf = null; } } } return(mapdata); }
/* Utility method */ // 'input' is a string of 0's and 1's (with optional spaces) representing the input bit sequence. // 'refOutput' is a string of pairs of hexadecimal digits (with optional spaces) representing // the expected decompressed output byte sequence. private static void test(string input, string refOutput) { refOutput = refOutput.Replace(" ", string.Empty); if (refOutput.Length % 2 != 0) { throw new ArgumentException(); } var refOut = new byte[refOutput.Length / 2]; for (int i = 0; i < refOut.Length; i++) { refOut[i] = (byte)int.Parse(refOutput.Substring(i * 2, 2), NumberStyles.HexNumber); } input = input.Replace(" ", string.Empty); var inputStream = new StringBitReader(input); byte[] actualOut = Deflate.Decompress(inputStream); AssertArrayEquals(refOut, actualOut); }
/// <summary> /// Returns the file data, decompressed if needed /// </summary> /// <param name="item">The grf file</param> /// <param name="decompress">Should the data decompressed?</param> /// <returns></returns> public byte[] GetFileData(FileItem item, bool decompress) { byte[] buf = null; bool isUpdated = item.IsAdded || item.IsUpdated; if (isUpdated) { // Load data from file buf = File.ReadAllBytes(item.NewFilepath); } else if (item.FileData == null || item.FileData.Length != item.LengthCompressedAlign) { // Cache data CacheFileData(item); buf = item.FileData; } else { buf = item.FileData; } if (isUpdated == false && buf != null && buf.Length > 0) { // deocde, if needed if (item.Cycle >= 0 && Deflate.IsMagicHead(buf) == false) { EncryptionHelper.DecryptFileData(buf, item.Cycle == 0, item.Cycle); } // Decompress data if (decompress) { buf = Deflate.Decompress(buf); } } return(buf); }
static void Deflatetest() { BufferFormat fan = new BufferFormat(1000, new FDataExtraHandle((o) => { return(Deflate.Compress(o)); })); fan.AddItem(true); fan.AddItem("abcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabc"); fan.AddItem(123); byte[] data = fan.Finish(); ReadBytes read = new ReadBytes(data, 4, -1, new RDataExtraHandle((o) => { return(Deflate.Decompress(o)); })); int lengt; int cmd; bool var1; string var2; int var3; if (read.IsDataExtraSuccess && read.ReadInt32(out lengt) && lengt == read.Length && read.ReadInt32(out cmd) && read.ReadBoolean(out var1) && read.ReadString(out var2) && read.ReadInt32(out var3)) { Console.WriteLine("压缩前长度:{0}", read.Data.Length); Console.WriteLine("压缩后长度:{0}", read.Length); Console.WriteLine("This Deflate-> Length:{0} Cmd:{1} var1:{2} var2:{3} var3:{4}", lengt, cmd, var1, var2, var3); } }
public int deflateInit(int level, int bits, bool nowrap) { dstate = new Deflate(); return(dstate.deflateInit(this, level, nowrap?-bits:bits)); }
public int deflateInit(int level, int bits) { dstate = new Deflate(); return dstate.deflateInit(this, level, bits); }
public int deflateEnd() { if (dstate == null) return Z_STREAM_ERROR; int ret = dstate.deflateEnd(); dstate = null; return ret; }
internal int DeflateInit(int level, int bits, bool nowrap) { dstate = new Deflate(this); return(dstate.DeflateInit(level, nowrap ? -bits : bits)); }
internal int DeflateInit(int level, int bits, int memlevel) { dstate = new Deflate(this); return(dstate.DeflateInit(level, bits, memlevel)); }
private void btnBuild_Click(object sender, EventArgs e) { if (this._tables.Find(delegate(TableInfo table) { return(table.IsOutput); }) == null) { DataGridViewCellMouseEventArgs e2 = new DataGridViewCellMouseEventArgs(1, -1, 1, 1, new MouseEventArgs(MouseButtons.Left, 1, 1, 1, 1)); this.dgvGridview_ColumnHeaderMouseClick(this, e2); } FolderBrowserDialog fbd = new FolderBrowserDialog(); if (fbd.ShowDialog() != DialogResult.OK) { return; } string selectedPath = fbd.SelectedPath; List <BuildInfo> bs = null; SocketMessager messager = new SocketMessager("Build", new object[] { this.txtSolution.Text, this.chkSolution.Checked, string.Join("", this._tables.ConvertAll <string>(delegate(TableInfo table){ return(string.Concat(table.IsOutput ? 1 : 0)); }).ToArray()), this.chkWebAdmin.Checked, this.chkDownloadRes.Checked }); this._socket.Write(messager, delegate(object sender2, ClientSocketReceiveEventArgs e2) { bs = e2.Messager.Arg as List <BuildInfo>; if (e2.Messager.Arg is Exception) { throw e2.Messager.Arg as Exception; } }, TimeSpan.FromSeconds(60 * 5)); if (bs == null) { return; } foreach (BuildInfo b in bs) { string path = Path.Combine(selectedPath, b.Path); Directory.CreateDirectory(Path.GetDirectoryName(path)); string fileName = Path.GetFileName(b.Path); string ext = Path.GetExtension(b.Path); Encoding encode = Encoding.UTF8; if (fileName.EndsWith(".rar") || fileName.EndsWith(".zip") || fileName.EndsWith(".dll")) { using (FileStream fs = new FileStream(path, FileMode.Create, FileAccess.Write)) { fs.Write(b.Data, 0, b.Data.Length); fs.Close(); } continue; } byte[] data = Deflate.Decompress(b.Data); string content = Encoding.UTF8.GetString(data); if (string.Compare(fileName, "web.config") == 0) { string place = System.Web.HttpUtility.HtmlEncode(this.ConnectionString); content = content.Replace("{connectionString}", place); } //if (string.Compare(fileName, "procedure.sql") == 0) { // this.ExecuteNonQuery(content); //} if (string.Compare(ext, ".refresh") == 0) { encode = Encoding.Unicode; } using (StreamWriter sw = new StreamWriter(path, false, encode)) { sw.Write(content); sw.Close(); } } GC.Collect(); Lib.Msgbox("The code files be maked in \"" + selectedPath + "\", please check."); //System.Diagnostics.Process.Start("iexplore.exe", "http://www.penzz.com/"); }
public int DeflateInit(int level, int bits) { dstate = new Deflate(); return(dstate.deflateInit(this, level, bits)); }
// Construct one Huffman tree and assigns the code bit strings and lengths. // Update the total bit length for the current block. // IN assertion: the field freq is set for all tree elements. // OUT assertions: the fields len and code are set to the optimal bit length // and corresponding code. The length opt_len is updated; static_len is // also updated if stree is not null. The field max_code is set. internal void Build_tree(Deflate s) { short[] tree = dyn_tree; short[] stree = stat_desc.static_tree; int elems = stat_desc.elems; int n, m; // iterate over heap elements int max_code = -1; // largest code with non zero frequency int node; // new node being created // Construct the initial heap, with least frequent element in // heap[1]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. // heap[0] is not used. s.heap_len = 0; s.heap_max = HEAP_SIZE; for (n = 0; n < elems; n++) { if (tree[n * 2] != 0) { s.heap[++s.heap_len] = max_code = n; s.depth[n] = 0; } else { tree[n * 2 + 1] = 0; } } // The pkzip format requires that at least one distance code exists, // and that at least one bit should be sent even if there is only one // possible code. So to avoid special checks later on we force at least // two codes of non zero frequency. while (s.heap_len < 2) { node = s.heap[++s.heap_len] = (max_code < 2 ? ++max_code : 0); tree[node * 2] = 1; s.depth[node] = 0; s.opt_len--; if (stree is object) { s.static_len -= stree[node * 2 + 1]; } // node is 0 or 1 so it does not have extra bits } this.max_code = max_code; // The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, // establish sub-heaps of increasing lengths: for (n = s.heap_len / 2; n >= 1; n--) { s.Pqdownheap(tree, n); } // Construct the Huffman tree by repeatedly combining the least two // frequent nodes. node = elems; // next internal node of the tree do { // n = node of least frequency n = s.heap[1]; s.heap[1] = s.heap[s.heap_len--]; s.Pqdownheap(tree, 1); m = s.heap[1]; // m = node of next least frequency s.heap[--s.heap_max] = n; // keep the nodes sorted by frequency s.heap[--s.heap_max] = m; // Create a new node father of n and m tree[node * 2] = (short)(tree[n * 2] + tree[m * 2]); s.depth[node] = (byte)(Math.Max(s.depth[n], s.depth[m]) + 1); tree[n * 2 + 1] = tree[m * 2 + 1] = (short)node; // and insert the new node in the heap s.heap[1] = node++; s.Pqdownheap(tree, 1); }while (s.heap_len >= 2); s.heap[--s.heap_max] = s.heap[1]; // At this point, the fields freq and dad are set. We can now // generate the bit lengths. this.Gen_bitlen(s); // The field len is now set, we can generate the bit codes Gen_codes(tree, max_code, s.bl_count, s.next_code); }
public int DeflateInit(int level, int windowBits, int memLevel, CompressionStrategy strategy) { _dstate = new Deflate(); return(_dstate.DeflateInit2(this, level, windowBits, memLevel, strategy)); }
internal StaticTree stat_desc; // the corresponding static tree // Compute the optimal bit lengths for a tree and update the total bit length // for the current block. // IN assertion: the fields freq and dad are set, heap[heap_max] and // above are the tree nodes sorted by increasing frequency. // OUT assertions: the field len is set to the optimal bit length, the // array bl_count contains the frequencies for each bit length. // The length opt_len is updated; static_len is also updated if stree is // not null. void Gen_bitlen(Deflate s) { short[] tree = dyn_tree; short[] stree = stat_desc.static_tree; int[] extra = stat_desc.extra_bits; int _base = stat_desc.extra_base; int max_length = stat_desc.max_length; int h; // heap index int n, m; // iterate over the tree elements int bits; // bit length int xbits; // extra bits short f; // frequency int overflow = 0; // number of elements with bit length too large for (bits = 0; bits <= MAX_BITS; bits++) { s.bl_count[bits] = 0; } // In a first pass, compute the optimal bit lengths (which may // overflow in the case of the bit length tree). tree[s.heap[s.heap_max] * 2 + 1] = 0; // root of the heap for (h = s.heap_max + 1; h < HEAP_SIZE; h++) { n = s.heap[h]; bits = tree[tree[n * 2 + 1] * 2 + 1] + 1; if (bits > max_length) { bits = max_length; overflow++; } tree[n * 2 + 1] = (short)bits; // We overwrite tree[n*2+1] which is no longer needed if (n > max_code) { continue; // not a leaf node } s.bl_count[bits]++; xbits = 0; if (n >= _base) { xbits = extra[n - _base]; } f = tree[n * 2]; s.opt_len += f * (bits + xbits); if (stree is object) { s.static_len += f * (stree[n * 2 + 1] + xbits); } } if (overflow == 0) { return; } // This happens for example on obj2 and pic of the Calgary corpus // Find the first bit length which could increase: do { bits = max_length - 1; while (s.bl_count[bits] == 0) { bits--; } s.bl_count[bits]--; // move one leaf down the tree s.bl_count[bits + 1] += 2; // move one overflow item as its brother s.bl_count[max_length]--; // The brother of the overflow item also moves one step up, // but this does not affect bl_count[max_length] overflow -= 2; }while (overflow > 0); for (bits = max_length; bits != 0; bits--) { n = s.bl_count[bits]; while (n != 0) { m = s.heap[--h]; if (m > max_code) { continue; } if (tree[m * 2 + 1] != bits) { s.opt_len += (int)(((long)bits - (long)tree[m * 2 + 1]) * (long)tree[m * 2]); tree[m * 2 + 1] = (short)bits; } n--; } } }
internal StaticTree stat_desc; // the corresponding static tree // Compute the optimal bit lengths for a tree and update the total bit length // for the current block. // IN assertion: the fields freq and dad are set, heap[heap_max] and // above are the tree nodes sorted by increasing frequency. // OUT assertions: the field len is set to the optimal bit length, the // array bl_count contains the frequencies for each bit length. // The length opt_len is updated; static_len is also updated if stree is // not null. internal void gen_bitlen(Deflate s) { short[] tree = dyn_tree; short[] stree = stat_desc.static_tree; int[] extra = stat_desc.extra_bits; int base_Renamed = stat_desc.extra_base; int max_length = stat_desc.max_length; int h; // heap index int n, m; // iterate over the tree elements int bits; // bit length int xbits; // extra bits short f; // frequency int overflow = 0; // number of elements with bit length too large for (bits = 0; bits <= MAX_BITS; bits++) s.bl_count[bits] = 0; // In a first pass, compute the optimal bit lengths (which may // overflow in the case of the bit length tree). tree[s.heap[s.heap_max] * 2 + 1] = 0; // root of the heap for (h = s.heap_max + 1; h < HEAP_SIZE; h++) { n = s.heap[h]; bits = tree[tree[n * 2 + 1] * 2 + 1] + 1; if (bits > max_length) { bits = max_length; overflow++; } tree[n * 2 + 1] = (short) bits; // We overwrite tree[n*2+1] which is no longer needed if (n > max_code) continue; // not a leaf node s.bl_count[bits]++; xbits = 0; if (n >= base_Renamed) xbits = extra[n - base_Renamed]; f = tree[n * 2]; s.opt_len += f * (bits + xbits); if (stree != null) s.static_len += f * (stree[n * 2 + 1] + xbits); } if (overflow == 0) return ; // This happens for example on obj2 and pic of the Calgary corpus // Find the first bit length which could increase: do { bits = max_length - 1; while (s.bl_count[bits] == 0) bits--; s.bl_count[bits]--; // move one leaf down the tree s.bl_count[bits + 1] = (short) (s.bl_count[bits + 1] + 2); // move one overflow item as its brother s.bl_count[max_length]--; // The brother of the overflow item also moves one step up, // but this does not affect bl_count[max_length] overflow -= 2; } while (overflow > 0); for (bits = max_length; bits != 0; bits--) { n = s.bl_count[bits]; while (n != 0) { m = s.heap[--h]; if (m > max_code) continue; if (tree[m * 2 + 1] != bits) { s.opt_len = (int) (s.opt_len + ((long) bits - (long) tree[m * 2 + 1]) * (long) tree[m * 2]); tree[m * 2 + 1] = (short) bits; } n--; } } }
public byte[] Serialize(object obj) { string stringObj = JsonConvert.SerializeObject(obj); return(Deflate.Encode(stringObj)); }
// Construct one Huffman tree and assigns the code bit strings and lengths. // Update the total bit length for the current block. // IN assertion: the field freq is set for all tree elements. // OUT assertions: the fields len and code are set to the optimal bit length // and corresponding code. The length opt_len is updated; static_len is // also updated if stree is not null. The field max_code is set. internal void build_tree(Deflate s) { short[] tree = dyn_tree; short[] stree = stat_desc.static_tree; int elems = stat_desc.elems; int n, m; // iterate over heap elements int max_code = - 1; // largest code with non zero frequency int node; // new node being created // Construct the initial heap, with least frequent element in // heap[1]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. // heap[0] is not used. s.heap_len = 0; s.heap_max = HEAP_SIZE; for (n = 0; n < elems; n++) { if (tree[n * 2] != 0) { s.heap[++s.heap_len] = max_code = n; s.depth[n] = 0; } else { tree[n * 2 + 1] = 0; } } // The pkzip format requires that at least one distance code exists, // and that at least one bit should be sent even if there is only one // possible code. So to avoid special checks later on we force at least // two codes of non zero frequency. while (s.heap_len < 2) { node = s.heap[++s.heap_len] = (max_code < 2?++max_code:0); tree[node * 2] = 1; s.depth[node] = 0; s.opt_len--; if (stree != null) s.static_len -= stree[node * 2 + 1]; // node is 0 or 1 so it does not have extra bits } this.max_code = max_code; // The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, // establish sub-heaps of increasing lengths: for (n = s.heap_len / 2; n >= 1; n--) s.pqdownheap(tree, n); // Construct the Huffman tree by repeatedly combining the least two // frequent nodes. node = elems; // next internal node of the tree do { // n = node of least frequency n = s.heap[1]; s.heap[1] = s.heap[s.heap_len--]; s.pqdownheap(tree, 1); m = s.heap[1]; // m = node of next least frequency s.heap[--s.heap_max] = n; // keep the nodes sorted by frequency s.heap[--s.heap_max] = m; // Create a new node father of n and m tree[node * 2] = (short) (tree[n * 2] + tree[m * 2]); s.depth[node] = (byte) (System.Math.Max((byte) s.depth[n], (byte) s.depth[m]) + 1); tree[n * 2 + 1] = tree[m * 2 + 1] = (short) node; // and insert the new node in the heap s.heap[1] = node++; s.pqdownheap(tree, 1); } while (s.heap_len >= 2); s.heap[--s.heap_max] = s.heap[1]; // At this point, the fields freq and dad are set. We can now // generate the bit lengths. gen_bitlen(s); // The field len is now set, we can generate the bit codes gen_codes(tree, max_code, s.bl_count); }