public void SendPacket(int id, PacketBuffer buffer) { byte[] packetIdVI = ByteUtils.ToVarInt(id); byte[] packetData = ByteUtils.Concat(packetIdVI, buffer.ReadToEnd()); if (compressionThreshold > 0) { if (packetData.Length > compressionThreshold) { byte[] uncompressed_length = ByteUtils.ToVarInt(packetData.Length); byte[] compressed_packet = Zlib.Compress(packetData); byte[] compressed_packet_length = ByteUtils.ToVarInt(compressed_packet.Length); packetData = ByteUtils.Concat(compressed_packet_length, compressed_packet); } else { byte[] uncompressed_length = ByteUtils.ToVarInt(0); packetData = ByteUtils.Concat(uncompressed_length, packetData); } } byte[] lengthVI = ByteUtils.ToVarInt(packetData.Length); var l = ByteUtils.Concat(lengthVI, packetData); SendBytes(l); }
public void SendPacket(IPacket packet) { PacketBuffer sendBuffer = new PacketBuffer(); packet.Send(sendBuffer); byte[] packetIdVI = ByteUtils.ToVarInt(packet.GetId()); byte[] packetData = ByteUtils.Concat(packetIdVI, sendBuffer.ToArray()); if (compressionThreshold > 0) { if (packetData.Length > compressionThreshold) { byte[] uncompressed_length = ByteUtils.ToVarInt(packetData.Length); byte[] compressed_packet = Zlib.Compress(packetData); byte[] compressed_packet_length = ByteUtils.ToVarInt(compressed_packet.Length); packetData = ByteUtils.Concat(compressed_packet_length, compressed_packet); } else { byte[] uncompressed_length = ByteUtils.ToVarInt(0); packetData = ByteUtils.Concat(uncompressed_length, packetData); } } byte[] lengthVI = ByteUtils.ToVarInt(packetData.Length); var l = ByteUtils.Concat(lengthVI, packetData); SendBytes(l); }
private void LoadCallback(IAsyncResult ar) { var lr = (LoadRequest)ar.AsyncState; Bitmap bmp1 = null; Bitmap bmp2 = null; WebResponse response = null; try { response = lr.Request.EndGetResponse(ar); using (var stream = response.GetResponseStream()) { bmp1 = (Bitmap)Bitmap.FromStream(stream); bmp2 = ScaleImage(bmp1, 384, 384); using (var stream2 = new MemoryStream()) { bmp2.Save(stream2, ImageFormat.Jpeg); byte[] tmp = stream2.ToArray(); stream2.SetLength(0); Zlib.Compress(stream2, tmp); tmp = new byte[4000]; Reset(); stream2.Position = 0; while (true) { int count = stream2.Read(tmp, 0, tmp.Length); if (count == 0) { break; } Write(tmp, 0, count); } Size = (uint)Received; Chunks = ChunkCount; } } lr.Callback(lr.State); } catch (Exception ex) { lr.Callback(ex); } finally { if (bmp1 != null) { bmp1.Dispose(); } if (bmp2 != null) { bmp2.Dispose(); } } }
public static string SendRequest(string url, string body) { // send request Log.Info(Constants.backendUrl + url); HttpWebRequest request = (HttpWebRequest)WebRequest.Create(Constants.backendUrl + url); byte[] data = Zlib.Compress(Encoding.UTF8.GetBytes(body)); request.Method = "POST"; request.ContentType = "text/plain"; request.AutomaticDecompression = DecompressionMethods.Deflate; request.ContentLength = data.Length; using (Stream stream = request.GetRequestStream()) { stream.Write(data, 0, data.Length); } // get response Stream responseData = request.GetResponse().GetResponseStream(); using (MemoryStream ms = new MemoryStream()) { responseData.CopyTo(ms); return(Encoding.UTF8.GetString(Zlib.Decompress(ms.ToArray()))); } }
private void CopyResponse(Image <Rgba32> bitmap) { bitmap.Mutate((s) => s.Resize(new ResizeOptions() { Mode = ResizeMode.Max, Size = new Size(384, 384) })); using var stream2 = new MemoryStream(); bitmap.Save(stream2, new JpegEncoder()); byte[] tmp = stream2.ToArray(); stream2.SetLength(0); Zlib.Compress(stream2, tmp); tmp = new byte[4000]; Reset(); stream2.Position = 0; while (true) { int count = stream2.Read(tmp, 0, tmp.Length); if (count == 0) { break; } Write(tmp, 0, count); } Size = (uint)Received; Chunks = ChunkCount; }
public void AddFilesV3(List <string> files, string srcdir, string dstdir) { Stream.Reopen(false); SetProgressMax?.Invoke(files.Count); int cl = Settings.CompressionLevel; Stream.Seek(-280, SeekOrigin.End); long current_end = Stream.ReadInt64() ^ Key.KEY_1; foreach (string file in files) { SetProgressNext?.Invoke(); byte[] data = File.ReadAllBytes(file); int size = data.Length; byte[] compressed = Zlib.Compress(data, cl); if (compressed.Length < size) { data = compressed; } string path = (dstdir + file.RemoveFirst(srcdir).RemoveFirstSeparator()).RemoveFirstSeparator(); var entry = Files.Where(x => x.Path == path).ToList(); if (entry.Count > 0) { if (data.Length <= entry[0].CSize) { entry[0].Size = size; entry[0].CSize = data.Length; Stream.Seek(entry[0].Offset, SeekOrigin.Begin); Stream.WriteBytes(data); } else { entry[0].Size = size; entry[0].CSize = data.Length; entry[0].Offset = current_end; Stream.Seek(current_end, SeekOrigin.Begin); current_end += data.Length; Stream.WriteBytes(data); } } else { Files.Add(new ArchiveEntryV3() { Path = path, Size = size, CSize = data.Length, Offset = current_end }); Stream.Seek(current_end, SeekOrigin.Begin); current_end += data.Length; Stream.WriteBytes(data); } } SaveFileTable(current_end); SetProgress?.Invoke(0); LoadData?.Invoke(0); LoadData?.Invoke(1); }
private byte[] SendJson(HttpListenerResponse response, string json) { if (response == null) { return(null); } Log.Data("SEND:" + Environment.NewLine + json); response.AddHeader("Content-Type", "text/plain"); response.AddHeader("Content-Encoding", "deflate"); byte[] buffer = Encoding.UTF8.GetBytes(json); return(Zlib.Compress(buffer)); }
public void DefragV3() { Stream.Reopen(true); long oldsize = Stream.GetLenght(); ArchiveManager am = new ArchiveManager(Path + ".defrag", Key, false) { Version = Version }; am.Stream.Reopen(false); am.Stream.WriteInt32(Key.FSIG_1); am.Stream.WriteInt64(0); am.Stream.WriteInt32(Key.FSIG_2); int cl = Settings.CompressionLevel; SetProgressMax?.Invoke(Files.Count); foreach (IArchiveEntry file in Files) { SetProgressNext?.Invoke(); byte[] data = GetFile(file, false); byte[] compressed = Zlib.Compress(data, cl); if (data.Length < compressed.Length) { compressed = data; } file.Offset = am.Stream.Position; file.Size = data.Length; file.CSize = compressed.Length; am.Stream.WriteBytes(compressed); } am.Files = Files; am.SaveFileTable(am.Stream.Position); am.Stream.Close(); Stream.Close(); File.Delete(Path); File.Move(Path + ".defrag", Path); string pkx = Path.Replace(".pck", ".pkx"); if (File.Exists(pkx)) { File.Delete(pkx); File.Move(pkx + ".defrag", pkx); } ReadFileTable(); Stream.Reopen(true); long newsize = Stream.GetLenght(); MessageBox.Show($"Old size: {oldsize}\nNew size: {newsize}"); }
/* * Name function: Compress * Purpose: compress a part of the byte array into a Zlib Block * Input: - buffer: byte array * Output: compressed byte array block, the structure is: * - magic word * - max segment size * - total compressed size * - total uncompressed size * - segment list * - compressed data list */ public static byte[] Compress(byte[] buffer) { if (buffer == null) { throw new ArgumentNullException(); } MemoryStream headBlock = new MemoryStream(); MemoryStream dataBlock = new MemoryStream(); int numSeg = (int)Math.Ceiling(buffer.Length / (double)maxSegmentSize); headBlock.WriteValueU32(magic); headBlock.WriteValueU32(maxSegmentSize); headBlock.WriteValueU32(0x0); //total compressed size, still to calculate headBlock.WriteValueS32(buffer.Length); //total uncompressed size int offset = 0; for (int i = buffer.Length; i > 0; i -= (int)maxSegmentSize) { int copyBytes = Math.Min(i, (int)maxSegmentSize); uint precCompSize = (uint)dataBlock.Length; byte[] src = new byte[copyBytes]; Buffer.BlockCopy(buffer, offset, src, 0, copyBytes); byte[] dst = Zlib.Compress(src); if (dst.Length == 0) { throw new Exception("Zlib compression failed!"); } dataBlock.WriteBytes(dst); offset += dst.Length; headBlock.WriteValueU32((uint)dst.Length); //compressed segment size headBlock.WriteValueS32(copyBytes); //uncompressed segment size //Console.WriteLine(" Segment size: {0}, total read: {1}, compr size: {2}", maxSegmentSize, copyBytes, (uint)dataBlock.Length - precCompSize); } headBlock.Seek(8, SeekOrigin.Begin); headBlock.WriteValueS32((int)dataBlock.Length); // total compressed size byte[] finalBlock = new byte[headBlock.Length + dataBlock.Length]; Buffer.BlockCopy(headBlock.ToArray(), 0, finalBlock, 0, (int)headBlock.Length); Buffer.BlockCopy(dataBlock.ToArray(), 0, finalBlock, (int)headBlock.Length, (int)dataBlock.Length); headBlock.Close(); dataBlock.Close(); return(finalBlock); }
public byte[] Write(int cl) { MemoryStream msb = new MemoryStream(new byte[276]); BinaryWriter bw = new BinaryWriter(msb); bw.Write(Path.Replace("/", "\\").FromGBK()); bw.BaseStream.Seek(260, SeekOrigin.Begin); bw.Write((uint)Offset); bw.Write(Size); bw.Write(CSize); bw.Write(0); bw.BaseStream.Seek(0, SeekOrigin.Begin); bw.Close(); byte[] compressed = Zlib.Compress(msb.ToArray(), cl); return(compressed.Length < 276 ? compressed : msb.ToArray()); }
public void SendPacket(int id, PacketBuffer sendBuffer, bool flipBeforeSend = false) { if (Disconnected) { return; } if (flipBeforeSend) { sendBuffer.Reset(); } try { if (CompressionThreshold == 0) { var buffer = new PacketBuffer(); buffer.WriteVarInt(id); buffer.WriteRawByteArray(sendBuffer.ReadToEnd()); var packetData = buffer.ToArray(); Send(ByteUtils.ToVarInt(packetData.Length).Concat(packetData)); } else { var buffer = new PacketBuffer(); buffer.WriteVarInt(id); buffer.WriteRawByteArray(sendBuffer.ReadToEnd()); var packetData = buffer.ToArray(); byte[] uncompressedLength; if (packetData.Length >= CompressionThreshold) { uncompressedLength = packetData.Length.EncodeVarInt(); packetData = Zlib.Compress(packetData); } else { uncompressedLength = 0.EncodeVarInt(); } var packet = uncompressedLength.Concat(packetData); var Y = packet.Length.EncodeVarInt().Concat(packet); Send(Y); } } catch { Disconnected = true; Console.WriteLine("Client lost connection"); } }
static void Main(string[] args) { string[] files = new string[] { "image.jpg.compress", "pdf_book.pdf.compress", "image_decompress.jpg", "pdf_book_decompress.pdf", "image.jpg.compress_stream", "image_decompress_stream.jpg", "", "" }; foreach (string file in files) { if (File.Exists(file)) { File.Delete(file); } } byte[] file1 = File.ReadAllBytes("image.jpg"); if (Zlib.IsCompressedByZlib(file1)) { Console.WriteLine("file1 - compressed by zlib"); } else { Console.WriteLine("file1 - not compressed by zlib"); } var fs1 = new FileStream("image.jpg", FileMode.Open, FileAccess.Read); var fs2 = new FileStream("image.jpg.compress_stream", FileMode.Create, FileAccess.Write); Zlib.Compress(fs1, fs2, ZlibCompressionLevel.BEST_COMPRESSION); fs2.Close(); Zlib.Decompress(new FileStream("image.jpg.compress_stream", FileMode.Open, FileAccess.Read), new FileStream("image_decompress_stream.jpg", FileMode.Create, FileAccess.Write)); byte[] file2 = File.ReadAllBytes("pdf_book.pdf"); byte[] compressed1 = Zlib.Compress(file1, ZlibCompressionLevel.BEST_COMPRESSION); File.WriteAllBytes("image.jpg.compress", compressed1); if (Zlib.IsCompressedByZlib(compressed1)) { Console.WriteLine("compressed1 - compressed by zlib"); } else { Console.WriteLine("compressed1 - not compressed by zlib"); } byte[] compressed2 = Zlib.Compress(file2, ZlibCompressionLevel.BEST_COMPRESSION); File.WriteAllBytes("pdf_book.pdf.compress", compressed2); byte[] decompressed1 = Zlib.Decompress(compressed1, file1.Length); File.WriteAllBytes("image_decompress.jpg", decompressed1); byte[] decompressed2 = Zlib.Decompress(compressed2, file2.Length); File.WriteAllBytes("pdf_book_decompress.pdf", decompressed2); Console.ReadKey(); }
public static byte[] CompressChunk(ref byte[] iBytes) { int length = iBytes.Length; int destLength = (int)((double)length + (double)length * 0.001 + 12.0); byte[] array = new byte[destLength]; int num = Zlib.Compress(ref array[0], ref destLength, ref iBytes[0], length, 9); byte[] numArray; if (num == 0) { Array.Resize <byte>(ref array, destLength); numArray = array; } else { switch (num) { case -5: MessageBox.Show("Unable to compress data chunk, output buffer is too small.", "Compression Error"); break; case -4: MessageBox.Show("Unable to compress data chunk, out of memory.", "Compression Error"); break; case -3: MessageBox.Show("Unable to compress data chunk, it seems to be corrupted. This should be impossible during compression.", "Compression Error"); break; case -2: MessageBox.Show("Unable to compress data chunk, compression level was invalid.", "Compression Error"); break; default: MessageBox.Show("Unable to compress data chunk, unknown Zlib error: " + num + ".", "Compression Error"); return(new byte[0]); } numArray = new byte[0]; } return(numArray); }
void BuildTable(string excelFile, Table table, string outputPath, bool encode) { if (!Directory.Exists(outputPath)) { Directory.CreateDirectory(outputPath); } string code; string proto; object mainObject; var codeBuilder = new CodeBuilder(); codeBuilder.Build("cs", table, true, false, false, name => { return(ExcelHelper.ReadExcel(excelFile)); }, out code, out proto, out mainObject); var path = Path.Combine(outputPath, table.Name + ".bytes"); using (var memory = new MemoryStream()) { ProtoBuf.Meta.RuntimeTypeModel.Default.Serialize(memory, mainObject); var buffer = memory.ToArray(); if (encode) { buffer = Zlib.Compress(buffer); byte key = buffer[buffer.Length - 1]; for (int i = 0; i < buffer.Length - 1; i++) { buffer[i] ^= key; } } File.WriteAllBytes(path, buffer); } }
public void generateBinFile(int gameId) { MD5FileEntry[] entries = null; switch (gameId) { case 1: MD5TablesME1 tablesME1 = new MD5TablesME1(); entries = tablesME1.entriesME1; break; case 2: MD5TablesME2 tablesME2 = new MD5TablesME2(); entries = tablesME2.entriesME2; break; case 3: MD5TablesME3 tablesME3 = new MD5TablesME3(); entries = tablesME3.entriesME3; break; } MemoryStream stream = new MemoryStream(); stream.WriteInt32(entries.Length); for (int p = 0; p < entries.Length; p++) { stream.WriteFromBuffer(entries[p].md5); stream.WriteStringASCIINull(entries[p].path); } using (FileStream fs = new FileStream("MD5EntriesME" + gameId + ".bin", FileMode.Create, FileAccess.Write)) { fs.WriteUInt32(md5Tag); byte[] tmp = stream.ToArray(); fs.WriteInt32(tmp.Length); fs.WriteFromBuffer(Zlib.Compress(tmp, 9)); } }
public void generateBinFile(int gameId) { MD5FileEntry[] entries = null; switch (gameId) { case 1: MD5TablesME1 tablesME1 = new MD5TablesME1(); entries = tablesME1.entriesME1; break; case 2: MD5TablesME2 tablesME2 = new MD5TablesME2(); entries = tablesME2.entriesME2; break; case 3: MD5TablesME3 tablesME3 = new MD5TablesME3(); entries = tablesME3.entriesME3; break; } MemoryStream stream = new MemoryStream(); List <string> files = new List <string>(); for (int p = 0; p < entries.Length; p++) { if (!files.Exists(s => s == entries[p].path)) { files.Add(entries[p].path); } } stream.WriteInt32(files.Count); for (int p = 0; p < files.Count; p++) { stream.WriteStringASCIINull(files[p]); } stream.WriteInt32(entries.Length); for (int p = 0; p < entries.Length; p++) { stream.WriteInt32(files.IndexOf(entries[p].path)); stream.WriteInt32(entries[p].size); stream.WriteFromBuffer(entries[p].md5); } using (FileStream fs = new FileStream("MD5EntriesME" + gameId + ".bin", FileMode.Create, FileAccess.Write)) { fs.WriteUInt32(md5Tag); byte[] tmp = stream.ToArray(); fs.WriteInt32(tmp.Length); fs.WriteFromBuffer(Zlib.Compress(tmp, 9)); } if (gameId == 1) { // Polish version DB MD5TablesME1PL tablesME1PL = new MD5TablesME1PL(); entries = tablesME1PL.entriesME1PL; files = new List <string>(); stream = new MemoryStream(); for (int p = 0; p < entries.Length; p++) { if (!files.Exists(s => s == entries[p].path)) { files.Add(entries[p].path); } } stream.WriteInt32(files.Count); for (int p = 0; p < files.Count; p++) { stream.WriteStringASCIINull(files[p]); } stream.WriteInt32(entries.Length); for (int p = 0; p < entries.Length; p++) { stream.WriteInt32(files.IndexOf(entries[p].path)); stream.WriteInt32(entries[p].size); stream.WriteFromBuffer(entries[p].md5); } using (FileStream fs = new FileStream("MD5EntriesME1PL.bin", FileMode.Create, FileAccess.Write)) { fs.WriteUInt32(md5Tag); byte[] tmp = stream.ToArray(); fs.WriteInt32(tmp.Length); fs.WriteFromBuffer(Zlib.Compress(tmp, 9)); } } }
public byte[] GetBytes() { MemoryStream stream = new MemoryStream(); BinaryWriter writer = new BinaryWriter(stream); writer.Write(blockDeltas.Count); foreach (BlockDelta blockDelta in blockDeltas) { blockDelta.Write(writer); } writer.Write(hits.Count); foreach (Hit hit in hits) { hit.Write(writer, false); } writer.Write(particles.Count); foreach (Particle particle in particles) { particle.Write(writer); } writer.Write(sounds.Count); foreach (Sound sound in sounds) { sound.Write(writer); } writer.Write(shoots.Count); foreach (Shoot shoot in shoots) { shoot.Write(writer, false); } writer.Write(statics.Count); foreach (StaticEntity staticEntity in statics) { staticEntity.Write(writer); } writer.Write(chunkItems.Count); foreach (ChunkItems chunkItem in chunkItems) { chunkItem.Write(writer); } writer.Write(p48s.Count); foreach (P48 p48 in p48s) { p48.Write(writer); } writer.Write(pickups.Count); foreach (Pickup pickup in pickups) { pickup.Write(writer); } writer.Write(kills.Count); foreach (Kill kill in kills) { kill.Write(writer); } writer.Write(damages.Count); foreach (Damage damage in damages) { damage.Write(writer); } writer.Write(passiveProcs.Count); //npc rClick ??? todo foreach (PassiveProc passiveProc in passiveProcs) { passiveProc.Write(writer, false); } writer.Write(missions.Count); foreach (Mission mission in missions) { mission.Write(writer); } return(Zlib.Compress(stream.ToArray())); }
private static byte[] GetCompressedPayload(Record record) => record.CompressedFieldData ?? (record.CompressedFieldData = Zlib.Compress(new ArraySegment <byte>(GetUncompressedPayload(record))).ToArray());
public static bool EncodePackMini(SessionPack sessionPack) { try { byte[] inBuf = sessionPack.requestToByteArray(); byte[] key = sessionPack.getSessionKey(true); if (inBuf == null) { Log.e("MMPack", " in Data is Null"); return(false); } MMPKG_mini_header miniHeader = new MMPKG_mini_header { ret = (int)ConstantsProtocol.CLIENT_MAX_VERSION }; //369302560 °æ±¾ÁÙ½ç miniHeader.uin = (uint)SessionPackMgr.getAccount().getUin(); miniHeader.cmd_id = (ushort)sessionPack.getMMFuncID(); if (miniHeader.cmd_id == 805) { miniHeader.ret = (int)ConstantsProtocol.CLIENT_MIN_VERSION; } miniHeader.server_id = SessionPackMgr.getSeverID(); //miniHeader.device_type = 13; miniHeader.device_type = 1; miniHeader.compress_len = (uint)inBuf.Length; miniHeader.server_id_len = (byte)SessionPackMgr.getSeverID().Length; byte[] outBuf = null; if (sessionPack.mNeedCompress) { miniHeader.compress_algo = 1; if (!Zlib.Compress(inBuf, ref outBuf)) { Log.e("MMPack", " Zlib Compress failed"); return(false); } } else { miniHeader.compress_algo = 2; outBuf = inBuf; } miniHeader.compressed_len = (uint)outBuf.Length; short mEncrypt = sessionPack.mEncrypt; byte[] data = null; if ((key == null) || (key.Length <= 0)) { //miniHeader.encrypt_algo = 1; //RsaCertInfo new2 = sessionPack.getRsaCertInfo(); //if (new2 == null) //{ // Log.e("MMPack", "getRsaCertInfo failed"); // return false; //} miniHeader.cert_version = 156; //133 //BD6A54477640F0C0B209DB7747126896B27FB6B219AB9BC9C4CD9661F422E143A75AB2C34EAB88F44719D8D2E0D57CEC9713748BF821EC2014DF97B01CCE262F27CA24F4D89492F99DC8C1A414D0B8E760D815DF53A911D5D807CAF6827084BBE825A49C1BB9369675C4BE435597565B5C4222090235F6A5595003D5D5FA6780EBD51CEAC76D03D8EB9F97B45299719F7C352B2EF32449E0FDD09B562BA0317418B66FC0853EA9F5FFA85EAB8A14E2785C02B0CAC6AFD450EE5A6971C220E72FE6FA4B781235F39D206734C9974127E369E479BF3255FFF8C5FA4B133C642A5656A8E5F176472C5A3FE18D8816E40E58ABC2A4A32BA056EB0B504C86DAE05907 //160 //B577D7CC04490E8EEBAC4757BD1048234598AD3C4F9FD1AEDDF58D228752C6A977802F8C5183C3DE725B05C02B451D5F59C999510CBD0E40E7AF08EF021ED24628785BA8899A565027A4FC93805DB80000E35E174A341782D3D475D0B9D60C4F8424F45B6AC78D22D6F0B7080B6E9EC262B24212E03910BB03C9FD7094232859B14FA5A7747C07CCFA30AD016D07D1EB127ACC62B60697DAB27DBBE48238576A5569700E2C588FC3FBF513BDB12419CAAF9397E8DBBEE1F155B7543A4622725A1FD950A23A618FC6F536F18A4DA5D0C87268731AE3B1EDE7B196325DAAF9F6BD6FDB1CEB35F2E0AC00AC3FAF4D684E056B010A044C094B5D4CB7EC12C38869CF //156 //C7587AC1B1CD3AFD44110CDD1796FCDE878BF7984E35715D784F3A32A63407E9F6B96158752D3313476D5340AC53657167E92C0A6D37AEA65768D8F262A94F4620F57A1B532553BE1FD4F2F4BFEF20127F51B349DF438D45D53814DE96A1482C7C571CA978D3F4A16995E7874960C73E05B480355F08F19997CD5DCB8293D34B2DAFECEA1F1AAC60532FB5FB83C9655FB0812FAF492E02E17123212C09F55CE326A1360B807972C87606C0243498FD47E3DE9BB5E597DE257AC2363E938BB865AEF090A5832E0A1990416B1090F466FFD1C3043A940EE93FA2C1FB85B87DAA2A797A28F8198AA8DE8563B283ACE5FD08F5320A07192E2BE4345C7E0CD3B6D72B //135 //B5791473FDFACCE426058401B6125A3D6FEDD76C7DD1B0426A73D8A4182B29EA6D05F4F5E8D99A4D3D1C3E5CF3C8CB3CDDF935643C94D38927881B144D04F310F13307D1AE63A100A2797A714C0D1E2A5A0EF779FC3D6F7D3C3396276BF27DA6D66E2696A6557EFD4B6190C726894D35CE559E147969BAC04AFEBB0E3A235B2C795AC6A9818E14A33A4468F8FF6ABE8A54A74180042BF0FD38427F70B681B9431A099E774618D455F14D1F75121577DAE66C3853A2AA9C4F0F9C221A66F64A46D5F68B0D50F22C7E4FA0D84048B2F9179F4B86442A2720C8FE27BC68C5C6384DCC336F97914F2788B905E5FE98C5BB754488B0F6B09421BB27BFF518EF0E9299 try { //data = CdnRuntimeComponent.RSAEncrypt(outBuf, new2.strRsaKeyN, new2.strRsaKeyE); // Util.RSAEncrypt(out data, outBuf, "DFE56EEE6506E5F9796B4F12C3A48121B84E548E9999D834E2C037E3CD276E9C4A2B1758C582A67F6D12895CE5525DDE51D0B92D32B8BE7B2C85827729C3571DCC14B581877BC634BCC7F9DA3825C97A25B341A64295098303C4B584EC579ECCA7C8B96782F65D650039EE7A0772C195DBEFC4488BDFB0B9A58C5C058E3AB04D", "010001"); string rsa = "C7587AC1B1CD3AFD44110CDD1796FCDE878BF7984E35715D784F3A32A63407E9F6B96158752D3313476D5340AC53657167E92C0A6D37AEA65768D8F262A94F4620F57A1B532553BE1FD4F2F4BFEF20127F51B349DF438D45D53814DE96A1482C7C571CA978D3F4A16995E7874960C73E05B480355F08F19997CD5DCB8293D34B2DAFECEA1F1AAC60532FB5FB83C9655FB0812FAF492E02E17123212C09F55CE326A1360B807972C87606C0243498FD47E3DE9BB5E597DE257AC2363E938BB865AEF090A5832E0A1990416B1090F466FFD1C3043A940EE93FA2C1FB85B87DAA2A797A28F8198AA8DE8563B283ACE5FD08F5320A07192E2BE4345C7E0CD3B6D72B"; int blockSize = 2048; if (sessionPack.mNeedAutoAuth) { miniHeader.cert_version = 99; blockSize = 1024; rsa = "DFE56EEE6506E5F9796B4F12C3A48121B84E548E9999D834E2C037E3CD276E9C4A2B1758C582A67F6D12895CE5525DDE51D0B92D32B8BE7B2C85827729C3571DCC14B581877BC634BCC7F9DA3825C97A25B341A64295098303C4B584EC579ECCA7C8B96782F65D650039EE7A0772C195DBEFC4488BDFB0B9A58C5C058E3AB04D"; } data = Util.RSAEncryptBlock(outBuf, rsa, "010001", blockSize); miniHeader.encrypt_algo = 1; //MicroMsg.Common.Utils.RSA.RSAEncrypt(out data, outBuf, new2.strRsaKeyN, new2.strRsaKeyE); //MicroMsg.Common.Algorithm.RSA.RSAEncrypt(out data, outBuf); if ((data == null) || (data.Length <= 0)) { Log.e("MMPack", " RSAEncrypt failed"); return(false); } } catch (Exception exception) { Log.e("MMPack", " RSAEncrypt exception," + exception.Message); return(false); } return(PackMiniData(out sessionPack.mCacheBodyBuffer, miniHeader, data)); } if (mEncrypt == 5) { miniHeader.encrypt_algo = 5; data = AES.Encrypt(outBuf, key); if (data == null) { Log.e("MMPack", "AES: AESEncrypt failed"); return(false); } return(PackMiniData(out sessionPack.mCacheBodyBuffer, miniHeader, data)); } miniHeader.encrypt_algo = 4; data = DES.EncryptBytes(outBuf, key, 1); if (data == null) { Log.e("MMPack", "DES: DESEncrypt failed"); return(false); } return(PackMiniData(out sessionPack.mCacheBodyBuffer, miniHeader, data)); } catch (Exception exception2) { Log.e("MMPack", exception2.Message); return(false); } }
public static bool EncodePack(SessionPack sessionPack) { try { byte[] inBuf = sessionPack.requestToByteArray(); byte[] key = sessionPack.getSessionKey(true); MMTLVHeader tlvHeader = new MMTLVHeader { Ret = (int)ConstantsProtocol.CLIENT_MIN_VERSION, Uin = (uint)SessionPackMgr.getAccount().getUin(), CmdId = (ushort)sessionPack.getMMFuncID(), ServerId = SessionPackMgr.getSeverID(), DeviceId = Util.StringToByteArray(Util.getDeviceUniqueId()), CompressLen = (uint)inBuf.Length, CompressVersion = 0x3e9 }; byte[] outBuf = null; //Log.e("MMPack", " ServerId:" + Util.byteToHexStr(tlvHeader.ServerId)); if (sessionPack.mNeedCompress) { tlvHeader.CompressAlogrithm = 1; if (!Zlib.Compress(inBuf, ref outBuf)) { Log.e("MMPack", " Zlib Compress failed"); return(false); } } else { tlvHeader.CompressAlogrithm = 2; outBuf = inBuf; } tlvHeader.CompressedLen = (uint)outBuf.Length; short mEncrypt = sessionPack.mEncrypt; byte[] encryptText = null; if ((key == null) || (key.Length <= 0)) { //if (MicroMsg.Common.Algorithm.RSA.RSAEncrypt(out encryptText, outBuf) != 0) //{ // Log.e("MMPack", " RSAEncrypt failed"); // return false; //} //RsaCertInfo new2 = sessionPack.getRsaCertInfo(); //if (new2 == null) //{ // Log.e("MMPack", "getRsaCertInfo failed"); // return false; //} tlvHeader.CertVersion = 99; //data = CdnRuntimeComponent.RSAEncrypt(outBuf, new2.strRsaKeyN, new2.strRsaKeyE); //Util.RsaEncrypt(out encryptText, outBuf, new2.strRsaKeyN, new2.strRsaKeyE); //Util.RSAEncrypt(out encryptText, outBuf, "DFE56EEE6506E5F9796B4F12C3A48121B84E548E9999D834E2C037E3CD276E9C4A2B1758C582A67F6D12895CE5525DDE51D0B92D32B8BE7B2C85827729C3571DCC14B581877BC634BCC7F9DA3825C97A25B341A64295098303C4B584EC579ECCA7C8B96782F65D650039EE7A0772C195DBEFC4488BDFB0B9A58C5C058E3AB04D", "010001"); encryptText = Util.RSAEncryptBlock(outBuf, "DFE56EEE6506E5F9796B4F12C3A48121B84E548E9999D834E2C037E3CD276E9C4A2B1758C582A67F6D12895CE5525DDE51D0B92D32B8BE7B2C85827729C3571DCC14B581877BC634BCC7F9DA3825C97A25B341A64295098303C4B584EC579ECCA7C8B96782F65D650039EE7A0772C195DBEFC4488BDFB0B9A58C5C058E3AB04D", "010001"); //MicroMsg.Common.Algorithm.RSA.RSAEncrypt(out data, outBuf); if ((encryptText == null) || (encryptText.Length <= 0)) { Log.e("MMPack", " RSAEncrypt failed"); return(false); } return(Pack(ref sessionPack.mCacheBodyBuffer, tlvHeader, encryptText)); } if (mEncrypt == 5) { tlvHeader.CryptAlgorithm = 5; encryptText = AES.Encrypt(outBuf, key); if (encryptText == null) { Log.e("MMPack", "AES: AESEncrypt failed"); return(false); } return(Pack(ref sessionPack.mCacheBodyBuffer, tlvHeader, encryptText)); } tlvHeader.CryptAlgorithm = 4; encryptText = DES.EncryptBytes(outBuf, key, 1); if (encryptText == null) { Log.e("MMPack", "DES: DESEncrypt failed"); return(false); } return(Pack(ref sessionPack.mCacheBodyBuffer, tlvHeader, encryptText)); } catch (Exception exception) { Log.e("MMPack", exception.Message); return(false); } }
void BuildSelected(string type, string save_path, bool build_data, bool build_proto, bool build_code, bool encode) { // find the selected tables. var tables = new List <Table>(); foreach (ListViewItem lvItem in listView1.SelectedItems) { var key = Path.GetFileNameWithoutExtension(lvItem.Tag.ToString()); if (!mTableStore.Tables.ContainsKey(key)) { continue; } tables.Add(mTableStore.Tables[key]); } try { mCodeBuilder.Reset(); foreach (var table in tables) { string code; string proto; object mainObject; mCodeBuilder.Build(type, table, build_data, build_proto, build_code, ReadExcelData, out code, out proto, out mainObject); if (build_data) { string path = save_path + "\\" + table.Name + ".bytes"; using (var memory = new MemoryStream()) { ProtoBuf.Meta.RuntimeTypeModel.Default.Serialize(memory, mainObject); byte[] buffer = memory.ToArray(); if (encode) { buffer = Zlib.Compress(buffer); byte key = buffer[buffer.Length - 1]; for (int i = 0; i < buffer.Length - 1; i++) { buffer[i] ^= key; } File.WriteAllBytes(path, buffer); } else { File.WriteAllBytes(path, buffer); } } } if (build_proto) { string path = save_path + "\\" + table.Name + ".proto"; File.WriteAllText(path, proto); } if (build_code) { string path = save_path + "\\" + table.Name + "Manager." + type; File.WriteAllText(path, code, Encoding.UTF8); } } } catch (Exception ex) { MessageBox.Show(ex.Message, "Build Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
private static int WriteIDATChunk(Span <byte> png, Span <byte> zlib, int width, Span <ushort> blocks, Rgba32 backgroundColor, Span <Rgba32> palette, int scale) { // come back to the length of the chunk later var chunkLength = png.Slice(0, 4); const uint textIdat = 0x49444154; // "IDAT" var couldWriteIdatText = BinaryPrimitives.TryWriteUInt32BigEndian(png.Slice(4, sizeof(uint)), textIdat); Debug.Assert(couldWriteIdatText); // now, write the raw PNG data into the *zlib* section, and from // there, compress from the zlib section to the PNG section. // // it may seem backwards to write the PNG data not to the png // section, but we'd have to end up copying the data in the zlib // section back to the png section. // // it's really a matter of bad naming var offset = 0; using (var _ = DebugTimings.Start("Draw Pixels")) { if (scale == 1) { // typically, scale will be 1. this is a routine specifically // optimized for that for (var i = 0; i < blocks.Length; i++) { // for every scanline, we need to define the filter method if (i % width == 0) { // http://www.libpng.org/pub/png/spec/1.2/PNG-Chunks.html#C.IDAT // (Note that with filter method 0, the only one currently // defined, this implies prepending a filter-type byte to // each scanline.) zlib[offset++] = 0; } var block = blocks[i]; // TODO: use uint magic or something var color = GetColor(block, palette); var slice = zlib.Slice(offset, sizeof(uint)); slice[0] = color.R; slice[1] = color.G; slice[2] = color.B; slice[3] = color.A; offset += 4; } } else { // for non one scale things, we apply the following optimizations: // // - read in every color once, and write it twice // - once we write a row, we copy what we just wrote over // // in contrast to just a for loop for each block color & height for // the scale, this is much more efficient // RGBA pixels v v scanline var rowLength = (width * scale) * 4 + 1; var lastScanlinePosition = -1; // we'll always do the action once first // then we'll need to copy it scale minus one (one because we already did it) times var copies = scale - 1; for (var i = 0; i < blocks.Length; i++) { if (i % width == 0) { // if we haven't set the scanline position if (lastScanlinePosition != -1) { // there was data before this row // let's copy out the data, and paste it as many times // as the scale calls for var row = zlib.Slice(lastScanlinePosition, rowLength); // for as many times as we need to scale for (int j = 0; j < copies; j++) { row.CopyTo(zlib.Slice(offset, rowLength)); offset += rowLength; } } lastScanlinePosition = offset; zlib[offset++] = 0; } var block = blocks[i]; // TODO: use uint magic or something var color = GetColor(block, palette); var slice = zlib.Slice(offset, sizeof(uint)); slice[0] = color.R; slice[1] = color.G; slice[2] = color.B; slice[3] = color.A; offset += 4; for (int j = 0; j < copies; j++) { slice.CopyTo(zlib.Slice(offset, sizeof(uint))); offset += 4; } } // copied and pasted the code to paste the rows here // when the above loop finishes going through every block, it still // has to copy over the last scanline `copies` times { // there was data before this row // let's copy out the data, and paste it as many times // as the scale calls for var row = zlib.Slice(lastScanlinePosition, rowLength); // for as many times as we need to scale for (int j = 0; j < copies; j++) { row.CopyTo(zlib.Slice(offset, rowLength)); offset += rowLength; } } } } // we should've written to the entirety of zlib. // if zlib doesn't perfectly match our length, we've miscalculated // the amount of bytes to allocate. Debug.Assert(offset == zlib.Length); zlib = zlib.Slice(0, offset); // DEBUG int written; using (var _ = DebugTimings.Start("Zlib compression")) { written = Zlib.Compress(png.Slice(8), zlib); } // crc32 from the name to the end uint crc32; using (var _ = DebugTimings.Start("Crc32 Computation")) { crc32 = Crc32.Compute(png.Slice(4, 4 + written)); } var couldWriteCrc32 = BinaryPrimitives.TryWriteUInt32BigEndian(png.Slice(8 + written, 4), crc32); Debug.Assert(couldWriteCrc32); var couldWriteChunkLength = BinaryPrimitives.TryWriteUInt32BigEndian(chunkLength, (uint)written); Debug.Assert(couldWriteChunkLength); return(8 + written + 4); Rgba32 GetColor(ushort block, Span <Rgba32> palette) { if (block >= palette.Length) { return(backgroundColor); } var color = palette[block]; // if it's default(Rgba32), that means it doesn't have a color if (Rgba32.ToUInt32(ref color) == 0) { return(backgroundColor); } return(color); } }
public static byte[] CompressTexture(byte[] inputData, StorageTypes type) { using (MemoryStream ouputStream = new MemoryStream()) { uint compressedSize = 0; uint dataBlockLeft = (uint)inputData.Length; uint newNumBlocks = ((uint)inputData.Length + maxBlockSize - 1) / maxBlockSize; List <ChunkBlock> blocks = new List <ChunkBlock>(); using (MemoryStream inputStream = new MemoryStream(inputData)) { // skip blocks header and table - filled later ouputStream.Seek(SizeOfChunk + SizeOfChunkBlock * newNumBlocks, SeekOrigin.Begin); for (int b = 0; b < newNumBlocks; b++) { ChunkBlock block = new ChunkBlock(); block.uncomprSize = Math.Min(maxBlockSize, dataBlockLeft); dataBlockLeft -= block.uncomprSize; block.uncompressedBuffer = inputStream.ReadToBuffer(block.uncomprSize); blocks.Add(block); } } Parallel.For(0, blocks.Count, b => { ChunkBlock block = blocks[b]; if (type == StorageTypes.extLZO || type == StorageTypes.pccLZO) { block.compressedBuffer = LZO2.Compress(block.uncompressedBuffer); } else if (type == StorageTypes.extZlib || type == StorageTypes.pccZlib) { block.compressedBuffer = Zlib.Compress(block.uncompressedBuffer); } else { throw new Exception("Compression type not expected!"); } if (block.compressedBuffer.Length == 0) { throw new Exception("Compression failed!"); } block.comprSize = (uint)block.compressedBuffer.Length; blocks[b] = block; }); for (int b = 0; b < blocks.Count; b++) { ChunkBlock block = blocks[b]; ouputStream.Write(block.compressedBuffer, 0, (int)block.comprSize); compressedSize += block.comprSize; } ouputStream.SeekBegin(); ouputStream.WriteUInt32(textureTag); ouputStream.WriteUInt32(maxBlockSize); ouputStream.WriteUInt32(compressedSize); ouputStream.WriteInt32(inputData.Length); foreach (ChunkBlock block in blocks) { ouputStream.WriteUInt32(block.comprSize); ouputStream.WriteUInt32(block.uncomprSize); } return(ouputStream.ToArray()); } }