/// <summary> /// SharpZipѹËõ /// </summary> /// <param name="buffer"></param> /// <returns></returns> public static byte[] CompressSharpZip(byte[] buffer) { if (buffer == null || buffer.Length == 0) { return buffer; } using (MemoryStream inStream = new MemoryStream(buffer)) { MemoryStream outStream = new MemoryStream(); Deflater mDeflater = new Deflater(Deflater.BEST_COMPRESSION); DeflaterOutputStream compressStream = new DeflaterOutputStream(outStream, mDeflater); int mSize; byte[] mWriteData = new Byte[4096]; while ((mSize = inStream.Read(mWriteData, 0, 4096)) > 0) { compressStream.Write(mWriteData, 0, mSize); } compressStream.Finish(); inStream.Close(); return outStream.ToArray(); } }
public void Serialize(Stream output) { var saveGame = this.SaveGame; if (IsSupportedPlatform(this.Platform) == false) { throw new InvalidOperationException("unsupported platform"); } var endian = this.Platform.GetEndian(); var compressionScheme = this.Platform.GetCompressionScheme(); byte[] innerUncompressedBytes; using (var innerUncompressedData = new MemoryStream()) { if (this.PlayerStats != null) { saveGame.StatsData = this.PlayerStats.Serialize(endian); } ProtoSerializer.Serialize(innerUncompressedData, saveGame); innerUncompressedData.Position = 0; innerUncompressedBytes = innerUncompressedData.ReadBytes((uint)innerUncompressedData.Length); } byte[] innerCompressedBytes; using (var innerCompressedData = new MemoryStream()) { var hash = CRC32.Hash(innerUncompressedBytes, 0, innerUncompressedBytes.Length); innerCompressedData.WriteValueS32(0, Endian.Big); innerCompressedData.WriteString("WSG"); innerCompressedData.WriteValueU32(2, endian); innerCompressedData.WriteValueU32(hash, endian); innerCompressedData.WriteValueS32(innerUncompressedBytes.Length, endian); var encoder = new Huffman.Encoder(); encoder.Build(innerUncompressedBytes); innerCompressedData.WriteBytes(encoder.Encode(innerUncompressedBytes)); innerCompressedData.Position = 0; innerCompressedData.WriteValueU32((uint)(innerCompressedData.Length - 4), Endian.Big); innerCompressedData.Position = 0; innerCompressedBytes = innerCompressedData.ReadBytes((uint)innerCompressedData.Length); } byte[] compressedBytes; if (innerCompressedBytes.Length <= BlockSize) { if (compressionScheme == CompressionScheme.LZO) { compressedBytes = new byte[innerCompressedBytes.Length + (innerCompressedBytes.Length / 16) + 64 + 3]; var actualCompressedSize = compressedBytes.Length; var result = MiniLZO.LZO.Compress( innerCompressedBytes, 0, innerCompressedBytes.Length, compressedBytes, 0, ref actualCompressedSize, new MiniLZO.CompressWorkBuffer()); if (result != MiniLZO.ErrorCode.Success) { throw new SaveCorruptionException(string.Format("LZO compression failure ({0})", result)); } Array.Resize(ref compressedBytes, actualCompressedSize); } else if (compressionScheme == CompressionScheme.Zlib) { using (var temp = new MemoryStream()) { var zlib = new DeflaterOutputStream(temp); zlib.WriteBytes(innerCompressedBytes); zlib.Finish(); temp.Flush(); temp.Position = 0; compressedBytes = temp.ReadBytes((uint)temp.Length); } } else { throw new InvalidOperationException("unsupported compression scheme"); } } else { if (compressionScheme == CompressionScheme.LZO) { int innerCompressedOffset = 0; int innerCompressedSizeLeft = innerCompressedBytes.Length; using (var blockData = new MemoryStream()) { var blockCount = (innerCompressedSizeLeft + BlockSize) / BlockSize; blockData.WriteValueS32(blockCount, Endian.Big); blockData.Position = 4 + (blockCount * 8); var blockInfos = new List <Tuple <uint, uint> >(); while (innerCompressedSizeLeft > 0) { var blockUncompressedSize = Math.Min(BlockSize, innerCompressedSizeLeft); compressedBytes = new byte[blockUncompressedSize + (blockUncompressedSize / 16) + 64 + 3]; var actualCompressedSize = compressedBytes.Length; var result = MiniLZO.LZO.Compress( innerCompressedBytes, innerCompressedOffset, blockUncompressedSize, compressedBytes, 0, ref actualCompressedSize, new MiniLZO.CompressWorkBuffer()); if (result != MiniLZO.ErrorCode.Success) { throw new SaveCorruptionException(string.Format("LZO compression failure ({0})", result)); } blockData.Write(compressedBytes, 0, actualCompressedSize); blockInfos.Add(new Tuple <uint, uint>((uint)actualCompressedSize, BlockSize)); innerCompressedOffset += blockUncompressedSize; innerCompressedSizeLeft -= blockUncompressedSize; } blockData.Position = 4; foreach (var blockInfo in blockInfos) { blockData.WriteValueU32(blockInfo.Item1, Endian.Big); blockData.WriteValueU32(blockInfo.Item2, Endian.Big); } blockData.Position = 0; compressedBytes = blockData.ReadBytes((uint)blockData.Length); } } else if (compressionScheme == CompressionScheme.Zlib) { int innerCompressedOffset = 0; int innerCompressedSizeLeft = innerCompressedBytes.Length; using (var blockData = new MemoryStream()) { var blockCount = (innerCompressedSizeLeft + BlockSize) / BlockSize; blockData.WriteValueS32(blockCount, Endian.Big); blockData.Position = 4 + (blockCount * 8); var blockInfos = new List <Tuple <uint, uint> >(); while (innerCompressedSizeLeft > 0) { var blockUncompressedSize = Math.Min(BlockSize, innerCompressedSizeLeft); using (var temp = new MemoryStream()) { var zlib = new DeflaterOutputStream(temp); zlib.Write(innerCompressedBytes, innerCompressedOffset, blockUncompressedSize); zlib.Finish(); temp.Flush(); temp.Position = 0; compressedBytes = temp.ReadBytes((uint)temp.Length); } blockData.WriteBytes(compressedBytes); blockInfos.Add(new Tuple <uint, uint>((uint)compressedBytes.Length, BlockSize)); innerCompressedOffset += blockUncompressedSize; innerCompressedSizeLeft -= blockUncompressedSize; } blockData.Position = 4; foreach (var blockInfo in blockInfos) { blockData.WriteValueU32(blockInfo.Item1, Endian.Big); blockData.WriteValueU32(blockInfo.Item2, Endian.Big); } blockData.Position = 0; compressedBytes = blockData.ReadBytes((uint)blockData.Length); } } else { throw new InvalidOperationException("unsupported platform"); } } byte[] uncompressedBytes; using (var uncompressedData = new MemoryStream()) { uncompressedData.WriteValueS32(innerCompressedBytes.Length, Endian.Big); uncompressedData.WriteBytes(compressedBytes); uncompressedData.Position = 0; uncompressedBytes = uncompressedData.ReadBytes((uint)uncompressedData.Length); } byte[] computedHash; using (var sha1 = new System.Security.Cryptography.SHA1Managed()) { computedHash = sha1.ComputeHash(uncompressedBytes); } output.WriteBytes(computedHash); output.WriteBytes(uncompressedBytes); }
public void Commit(Packages.PackageCompressionType compressionType) { Stream clean; string tempFileName = Path.GetTempFileName(); tempFileName = Path.GetTempFileName(); clean = File.Open(tempFileName, FileMode.Create, FileAccess.ReadWrite, FileShare.Read); IPackageFile packageFile = null; if (this.Version == 3) { packageFile = new Packages.PackageFile3(); } else if (this.Version == 4) { packageFile = new Packages.PackageFile4(); } else if (this.Version == 6) { packageFile = new Packages.PackageFile6(); } else { throw new NotSupportedException(); } foreach (KeyValuePair <string, Entry> kvp in this.Entries) { var packageEntry = new Packages.PackageEntry(); packageEntry.Name = kvp.Key; packageEntry.CompressedSize = -1; packageEntry.UncompressedSize = kvp.Value.Size; packageEntry.Offset = 0; packageFile.Entries.Add(packageEntry); } int headerSize = packageFile.EstimateHeaderSize(); clean.Seek(headerSize, SeekOrigin.Begin); int uncompressedDataSize = 0; int compressedDataSize = 0; if (compressionType == Packages.PackageCompressionType.None) { long offset = 0; foreach (Packages.PackageEntry packageEntry in packageFile.Entries) { packageEntry.Offset = offset; this.ExportEntry(packageEntry.Name, clean); int align = packageEntry.UncompressedSize.Align(2048) - packageEntry.UncompressedSize; if (align > 0) { byte[] block = new byte[align]; clean.Write(block, 0, (int)align); } offset += packageEntry.UncompressedSize + align; uncompressedDataSize += packageEntry.UncompressedSize + align; } } else if (compressionType == Packages.PackageCompressionType.Zlib) { long offset = 0; foreach (Packages.PackageEntry packageEntry in packageFile.Entries) { packageEntry.Offset = offset; byte[] uncompressedData = this.GetEntry(packageEntry.Name); using (var temp = new MemoryStream()) { var zlib = new DeflaterOutputStream(temp); zlib.Write(uncompressedData, 0, uncompressedData.Length); zlib.Finish(); temp.Position = 0; clean.WriteFromStream(temp, temp.Length); packageEntry.CompressedSize = (int)temp.Length; int align = packageEntry.CompressedSize.Align(2048) - packageEntry.CompressedSize; if (align > 0) { byte[] block = new byte[align]; clean.Write(block, 0, (int)align); } offset += packageEntry.CompressedSize + align; uncompressedDataSize += packageEntry.UncompressedSize; compressedDataSize += packageEntry.CompressedSize + align; } } } else if (compressionType == Packages.PackageCompressionType.SolidZlib) { using (var compressed = new MemoryStream()) { var zlib = new DeflaterOutputStream(compressed, new Deflater(Deflater.DEFAULT_COMPRESSION)); long offset = 0; foreach (Packages.PackageEntry packageEntry in packageFile.Entries) { packageEntry.Offset = offset; this.ExportEntry(packageEntry.Name, zlib); int align = packageEntry.UncompressedSize.Align(2048) - packageEntry.UncompressedSize; if (align > 0) { byte[] block = new byte[align]; zlib.Write(block, 0, (int)align); } offset += packageEntry.UncompressedSize + align; uncompressedDataSize += packageEntry.UncompressedSize + align; } zlib.Close(); compressed.Seek(0, SeekOrigin.Begin); clean.WriteFromStream(compressed, compressed.Length); compressedDataSize = (int)compressed.Length; } } else { throw new InvalidOperationException(); } packageFile.PackageSize = (int)clean.Length; packageFile.UncompressedDataSize = uncompressedDataSize; packageFile.CompressedDataSize = compressedDataSize; clean.Seek(0, SeekOrigin.Begin); packageFile.Serialize(clean, this.LittleEndian, compressionType); // copy clean to real stream { this.Stream.Seek(0, SeekOrigin.Begin); clean.Seek(0, SeekOrigin.Begin); this.Stream.WriteFromStream(clean, clean.Length); } this.Stream.SetLength(clean.Length); clean.Close(); if (tempFileName != null) { File.Delete(tempFileName); } this.Entries.Clear(); this.OriginalEntries.Clear(); foreach (Packages.PackageEntry entry in packageFile.Entries) { this.Entries.Add(entry.Name, new StreamEntry() { Offset = entry.Offset, Size = entry.UncompressedSize, CompressedSize = entry.CompressedSize, CompressionType = entry.CompressionType, }); } }
private void WriteDataChunks() { byte[] pixels = _image.Pixels; byte[] data = new byte[_image.PixelWidth * _image.PixelHeight * 4 + _image.PixelHeight]; int rowLength = _image.PixelWidth * 4 + 1; for (int y = 0; y < _image.PixelHeight; y++) { byte compression = 0; if (y > 0) { compression = 2; } data[y * rowLength] = compression; for (int x = 0; x < _image.PixelWidth; x++) { // Calculate the offset for the new array. int dataOffset = y * rowLength + x * 4 + 1; // Calculate the offset for the original pixel array. int pixelOffset = (y * _image.PixelWidth + x) * 4; data[dataOffset + 0] = pixels[pixelOffset + 0]; data[dataOffset + 1] = pixels[pixelOffset + 1]; data[dataOffset + 2] = pixels[pixelOffset + 2]; data[dataOffset + 3] = pixels[pixelOffset + 3]; if (y > 0) { int lastOffset = ((y - 1) * _image.PixelWidth + x) * 4; data[dataOffset + 0] -= pixels[lastOffset + 0]; data[dataOffset + 1] -= pixels[lastOffset + 1]; data[dataOffset + 2] -= pixels[lastOffset + 2]; data[dataOffset + 3] -= pixels[lastOffset + 3]; } } } byte[] buffer = null; int bufferLength = 0; MemoryStream memoryStream = null; try { memoryStream = new MemoryStream(); using (DeflaterOutputStream zStream = new DeflaterOutputStream(memoryStream)) { zStream.Write(data, 0, data.Length); zStream.Flush(); zStream.Finish(); bufferLength = (int)memoryStream.Length; buffer = memoryStream.ToArray(); } } finally { if (memoryStream != null) { memoryStream.Dispose(); } } int numChunks = bufferLength / MaxBlockSize; if (bufferLength % MaxBlockSize != 0) { numChunks++; } for (int i = 0; i < numChunks; i++) { int length = bufferLength - i * MaxBlockSize; if (length > MaxBlockSize) { length = MaxBlockSize; } WriteChunk(PngChunkTypes.Data, buffer, i * MaxBlockSize, length); } }
public byte[] Save() { using (var output = new MemoryStream()) { output.WriteArray(Signature); using (var header = new MemoryStream()) { header.Write(IPAddress.HostToNetworkOrder(Width)); header.Write(IPAddress.HostToNetworkOrder(Height)); header.WriteByte(8); // Bit depth var colorType = Type == SpriteFrameType.Indexed8 ? PngColorType.Indexed | PngColorType.Color : Type == SpriteFrameType.Rgb24 ? PngColorType.Color : PngColorType.Color | PngColorType.Alpha; header.WriteByte((byte)colorType); header.WriteByte(0); // Compression header.WriteByte(0); // Filter header.WriteByte(0); // Interlacing WritePngChunk(output, "IHDR", header); } var alphaPalette = false; if (Palette != null) { using (var palette = new MemoryStream()) { foreach (var c in Palette) { palette.WriteByte(c.R); palette.WriteByte(c.G); palette.WriteByte(c.B); alphaPalette |= c.A > 0; } WritePngChunk(output, "PLTE", palette); } } if (alphaPalette) { using (var alpha = new MemoryStream()) { foreach (var c in Palette) { alpha.WriteByte(c.A); } WritePngChunk(output, "tRNS", alpha); } } using (var data = new MemoryStream()) { using (var compressed = new DeflaterOutputStream(data)) { var rowStride = Width * PixelStride; for (var y = 0; y < Height; y++) { // Write uncompressed scanlines for simplicity compressed.WriteByte(0); compressed.Write(Data, y * rowStride, rowStride); } compressed.Flush(); compressed.Finish(); WritePngChunk(output, "IDAT", data); } } foreach (var kv in EmbeddedData) { using (var text = new MemoryStream()) { text.WriteArray(Encoding.ASCII.GetBytes(kv.Key + (char)0 + kv.Value)); WritePngChunk(output, "tEXt", text); } } WritePngChunk(output, "IEND", new MemoryStream()); return(output.ToArray()); } }
private void Write(PdfStream pdfStream) { try { bool userDefinedCompression = pdfStream.GetCompressionLevel() != CompressionConstants.UNDEFINED_COMPRESSION; if (!userDefinedCompression) { int defaultCompressionLevel = document != null?document.GetWriter().GetCompressionLevel() : CompressionConstants .DEFAULT_COMPRESSION; pdfStream.SetCompressionLevel(defaultCompressionLevel); } bool toCompress = pdfStream.GetCompressionLevel() != CompressionConstants.NO_COMPRESSION; bool allowCompression = !pdfStream.ContainsKey(PdfName.Filter) && IsNotMetadataPdfStream(pdfStream); if (pdfStream.GetInputStream() != null) { Stream fout = this; DeflaterOutputStream def = null; OutputStreamEncryption ose = null; if (crypto != null && !crypto.IsEmbeddedFilesOnly()) { fout = ose = crypto.GetEncryptionStream(fout); } if (toCompress && (allowCompression || userDefinedCompression)) { UpdateCompressionFilter(pdfStream); fout = def = new DeflaterOutputStream(fout, pdfStream.GetCompressionLevel(), 0x8000); } this.Write((PdfDictionary)pdfStream); WriteBytes(iText.Kernel.Pdf.PdfOutputStream.stream); long beginStreamContent = GetCurrentPos(); byte[] buf = new byte[4192]; while (true) { int n = pdfStream.GetInputStream().Read(buf); if (n <= 0) { break; } fout.Write(buf, 0, n); } if (def != null) { def.Finish(); } if (ose != null) { ose.Finish(); } PdfNumber length = pdfStream.GetAsNumber(PdfName.Length); length.SetValue((int)(GetCurrentPos() - beginStreamContent)); pdfStream.UpdateLength(length.IntValue()); WriteBytes(iText.Kernel.Pdf.PdfOutputStream.endstream); } else { //When document is opened in stamping mode the output stream can be uninitialized. //We have to initialize it and write all data from streams input to streams output. if (pdfStream.GetOutputStream() == null && pdfStream.GetIndirectReference().GetReader() != null) { // If new specific compression is set for stream, // then compressed stream should be decoded and written with new compression settings byte[] bytes = pdfStream.GetIndirectReference().GetReader().ReadStreamBytes(pdfStream, false); if (userDefinedCompression) { bytes = DecodeFlateBytes(pdfStream, bytes); } pdfStream.InitOutputStream(new ByteArrayOutputStream(bytes.Length)); pdfStream.GetOutputStream().Write(bytes); } System.Diagnostics.Debug.Assert(pdfStream.GetOutputStream() != null, "PdfStream lost OutputStream"); ByteArrayOutputStream byteArrayStream; try { if (toCompress && !ContainsFlateFilter(pdfStream) && (allowCompression || userDefinedCompression)) { // compress UpdateCompressionFilter(pdfStream); byteArrayStream = new ByteArrayOutputStream(); DeflaterOutputStream zip = new DeflaterOutputStream(byteArrayStream, pdfStream.GetCompressionLevel()); if (pdfStream is PdfObjectStream) { PdfObjectStream objectStream = (PdfObjectStream)pdfStream; ((ByteArrayOutputStream)objectStream.GetIndexStream().GetOutputStream()).WriteTo(zip); ((ByteArrayOutputStream)objectStream.GetOutputStream().GetOutputStream()).WriteTo(zip); } else { System.Diagnostics.Debug.Assert(pdfStream.GetOutputStream() != null, "Error in outputStream"); ((ByteArrayOutputStream)pdfStream.GetOutputStream().GetOutputStream()).WriteTo(zip); } zip.Finish(); } else { if (pdfStream is PdfObjectStream) { PdfObjectStream objectStream = (PdfObjectStream)pdfStream; byteArrayStream = new ByteArrayOutputStream(); ((ByteArrayOutputStream)objectStream.GetIndexStream().GetOutputStream()).WriteTo(byteArrayStream); ((ByteArrayOutputStream)objectStream.GetOutputStream().GetOutputStream()).WriteTo(byteArrayStream); } else { System.Diagnostics.Debug.Assert(pdfStream.GetOutputStream() != null, "Error in outputStream"); byteArrayStream = (ByteArrayOutputStream)pdfStream.GetOutputStream().GetOutputStream(); } } if (CheckEncryption(pdfStream)) { ByteArrayOutputStream encodedStream = new ByteArrayOutputStream(); OutputStreamEncryption ose = crypto.GetEncryptionStream(encodedStream); byteArrayStream.WriteTo(ose); ose.Finish(); byteArrayStream = encodedStream; } } catch (System.IO.IOException ioe) { throw new PdfException(PdfException.IoException, ioe); } pdfStream.Put(PdfName.Length, new PdfNumber(byteArrayStream.Length)); pdfStream.UpdateLength((int)byteArrayStream.Length); this.Write((PdfDictionary)pdfStream); WriteBytes(iText.Kernel.Pdf.PdfOutputStream.stream); byteArrayStream.WriteTo(this); byteArrayStream.Close(); WriteBytes(iText.Kernel.Pdf.PdfOutputStream.endstream); } } catch (System.IO.IOException e) { throw new PdfException(PdfException.CannotWritePdfStream, e, pdfStream); } }
private uint Compress() { _compressedStream = new MemoryStream(); //var blockSize = _archive?.BlockSize ?? _blockSize; var blockCount = (uint)(((int)_baseStream.Length + _blockSize - 1) / _blockSize) + 1; var blockOffsets = new uint[blockCount]; blockOffsets[0] = 4 * blockCount; _compressedStream.Position = blockOffsets[0]; for (var blockIndex = 1; blockIndex < blockCount; blockIndex++) { using (var stream = new MemoryStream()) { using (var deflater = new DeflaterOutputStream(stream)) { for (var i = 0; i < _blockSize; i++) { var r = _baseStream.ReadByte(); if (r == -1) { break; } deflater.WriteByte((byte)r); } deflater.Finish(); deflater.Flush(); stream.Position = 0; // First byte in the block indicates the compression algorithm used. // TODO: add enum for compression modes _compressedStream.WriteByte(2); while (true) { var read = stream.ReadByte(); if (read == -1) { break; } _compressedStream.WriteByte((byte)read); } } blockOffsets[blockIndex] = (uint)_compressedStream.Position; } } _baseStream.Dispose(); _compressedStream.Position = 0; using (var writer = new BinaryWriter(_compressedStream, new System.Text.UTF8Encoding(false, true), true)) { for (var blockIndex = 0; blockIndex < blockCount; blockIndex++) { writer.Write(blockOffsets[blockIndex]); } } _compressedStream.Position = 0; return((uint)_compressedStream.Length); }
// methods /** * Compresses the stream. * * @throws PdfException if a filter is allready defined */ public void flateCompress() { if (!Document.compress) { return; } // check if the flateCompress-method has allready been if (compressed) { return; } // check if a filter allready exists PdfObject filter = get(PdfName.FILTER); if (filter != null) { if (filter.isName() && ((PdfName)filter).CompareTo(PdfName.FLATEDECODE) == 0) { return; } else if (filter.isArray() && ((PdfArray)filter).contains(PdfName.FLATEDECODE)) { return; } else { throw new PdfException("Stream could not be compressed: filter is not a name or array."); } } try { // compress MemoryStream stream = new MemoryStream(); DeflaterOutputStream zip = new DeflaterOutputStream(stream); if (streamBytes != null) { streamBytes.WriteTo(zip); } else { zip.Write(bytes, 0, bytes.Length); } //zip.Close(); zip.Finish(); // update the object streamBytes = stream; bytes = null; put(PdfName.LENGTH, new PdfNumber(streamBytes.Length)); if (filter == null) { put(PdfName.FILTER, PdfName.FLATEDECODE); } else { PdfArray filters = new PdfArray(filter); filters.Add(PdfName.FLATEDECODE); put(PdfName.FILTER, filters); } compressed = true; } catch (IOException ioe) { throw ioe; } }
static void GenerateBundles(ArrayList files) { string temp_s = "temp.s"; // Path.GetTempFileName (); string temp_c = "temp.c"; string temp_o = "temp.o"; if (compile_only) { temp_c = output; } if (object_out != null) { temp_o = object_out; } try { ArrayList c_bundle_names = new ArrayList(); ArrayList config_names = new ArrayList(); byte [] buffer = new byte [8192]; using (StreamWriter ts = new StreamWriter(File.Create(temp_s))) { using (StreamWriter tc = new StreamWriter(File.Create(temp_c))) { string prog = null; tc.WriteLine("/* This source code was produced by mkbundle, do not edit */"); tc.WriteLine("#include <mono/metadata/mono-config.h>"); tc.WriteLine("#include <mono/metadata/assembly.h>\n"); if (compress) { tc.WriteLine("typedef struct _compressed_data {"); tc.WriteLine("\tMonoBundledAssembly assembly;"); tc.WriteLine("\tint compressed_size;"); tc.WriteLine("} CompressedAssembly;\n"); } foreach (string url in files) { string fname = new Uri(url).LocalPath; string aname = Path.GetFileName(fname); string encoded = aname.Replace("-", "_").Replace(".", "_"); if (prog == null) { prog = aname; } Console.WriteLine(" embedding: " + fname); Stream stream = File.OpenRead(fname); long real_size = stream.Length; int n; if (compress) { MemoryStream ms = new MemoryStream(); DeflaterOutputStream deflate = new DeflaterOutputStream(ms); while ((n = stream.Read(buffer, 0, 8192)) != 0) { deflate.Write(buffer, 0, n); } stream.Close(); deflate.Finish(); byte [] bytes = ms.GetBuffer(); stream = new MemoryStream(bytes, 0, (int)ms.Length, false, false); } WriteSymbol(ts, "assembly_data_" + encoded, stream.Length); while ((n = stream.Read(buffer, 0, 8192)) != 0) { for (int i = 0; i < n; i++) { ts.Write("\t.byte {0}\n", buffer [i]); } } ts.WriteLine(); if (compress) { tc.WriteLine("extern const unsigned char assembly_data_{0} [];", encoded); tc.WriteLine("static CompressedAssembly assembly_bundle_{0} = {{{{\"{1}\"," + " assembly_data_{0}, {2}}}, {3}}};", encoded, aname, real_size, stream.Length); double ratio = ((double)stream.Length * 100) / real_size; Console.WriteLine(" compression ratio: {0:.00}%", ratio); } else { tc.WriteLine("extern const unsigned char assembly_data_{0} [];", encoded); tc.WriteLine("static const MonoBundledAssembly assembly_bundle_{0} = {{\"{1}\", assembly_data_{0}, {2}}};", encoded, aname, real_size); } stream.Close(); c_bundle_names.Add("assembly_bundle_" + encoded); try { FileStream cf = File.OpenRead(fname + ".config"); Console.WriteLine(" config from: " + fname + ".config"); tc.WriteLine("extern const unsigned char assembly_config_{0} [];", encoded); WriteSymbol(ts, "assembly_config_" + encoded, cf.Length); while ((n = cf.Read(buffer, 0, 8192)) != 0) { for (int i = 0; i < n; i++) { ts.Write("\t.byte {0}\n", buffer [i]); } } ts.WriteLine(); config_names.Add(new string[] { aname, encoded }); } catch (FileNotFoundException) { /* we ignore if the config file doesn't exist */ } } if (config_file != null) { FileStream conf; try { conf = File.OpenRead(config_file); } catch { Error(String.Format("Failure to open {0}", config_file)); return; } Console.WriteLine("System config from: " + config_file); tc.WriteLine("extern const char system_config;"); WriteSymbol(ts, "system_config", config_file.Length); int n; while ((n = conf.Read(buffer, 0, 8192)) != 0) { for (int i = 0; i < n; i++) { ts.Write("\t.byte {0}\n", buffer [i]); } } // null terminator ts.Write("\t.byte 0\n"); ts.WriteLine(); } if (machine_config_file != null) { FileStream conf; try { conf = File.OpenRead(machine_config_file); } catch { Error(String.Format("Failure to open {0}", machine_config_file)); return; } Console.WriteLine("Machine config from: " + machine_config_file); tc.WriteLine("extern const char machine_config;"); WriteSymbol(ts, "machine_config", machine_config_file.Length); int n; while ((n = conf.Read(buffer, 0, 8192)) != 0) { for (int i = 0; i < n; i++) { ts.Write("\t.byte {0}\n", buffer [i]); } } // null terminator ts.Write("\t.byte 0\n"); ts.WriteLine(); } ts.Close(); Console.WriteLine("Compiling:"); string cmd = String.Format("{0} -o {1} {2} ", GetEnv("AS", "as"), temp_o, temp_s); int ret = Execute(cmd); if (ret != 0) { Error("[Fail]"); return; } if (compress) { tc.WriteLine("\nstatic const CompressedAssembly *compressed [] = {"); } else { tc.WriteLine("\nstatic const MonoBundledAssembly *bundled [] = {"); } foreach (string c in c_bundle_names) { tc.WriteLine("\t&{0},", c); } tc.WriteLine("\tNULL\n};\n"); tc.WriteLine("static char *image_name = \"{0}\";", prog); tc.WriteLine("\nstatic void install_dll_config_files (void) {\n"); foreach (string[] ass in config_names) { tc.WriteLine("\tmono_register_config_for_assembly (\"{0}\", assembly_config_{1});\n", ass [0], ass [1]); } if (config_file != null) { tc.WriteLine("\tmono_config_parse_memory (&system_config);\n"); } if (machine_config_file != null) { tc.WriteLine("\tmono_register_machine_config (&machine_config);\n"); } tc.WriteLine("}\n"); if (config_dir != null) { tc.WriteLine("static const char *config_dir = \"{0}\";", config_dir); } else { tc.WriteLine("static const char *config_dir = NULL;"); } Stream template_stream; if (compress) { template_stream = Assembly.GetAssembly(typeof(MakeBundle)).GetManifestResourceStream("template_z.c"); } else { template_stream = Assembly.GetAssembly(typeof(MakeBundle)).GetManifestResourceStream("template.c"); } StreamReader s = new StreamReader(template_stream); string template = s.ReadToEnd(); tc.Write(template); if (!nomain) { Stream template_main_stream = Assembly.GetAssembly(typeof(MakeBundle)).GetManifestResourceStream("template_main.c"); StreamReader st = new StreamReader(template_main_stream); string maintemplate = st.ReadToEnd(); tc.Write(maintemplate); } tc.Close(); if (compile_only) { return; } string zlib = (compress ? "-lz" : ""); string debugging = "-g"; string cc = GetEnv("CC", IsUnix ? "cc" : "gcc -mno-cygwin"); if (style == "linux") { debugging = "-ggdb"; } if (static_link) { string smonolib; if (style == "osx") { smonolib = "`pkg-config --variable=libdir mono-2`/libmono-2.0.a "; } else { smonolib = "-Wl,-Bstatic -lmono-2.0 -Wl,-Bdynamic "; } cmd = String.Format("{4} -o {2} -Wall `pkg-config --cflags mono-2` {0} {3} " + "`pkg-config --libs-only-L mono-2` " + smonolib + "`pkg-config --libs-only-l mono-2 | sed -e \"s/\\-lmono-2.0 //\"` {1}", temp_c, temp_o, output, zlib, cc); } else { cmd = String.Format("{4} " + debugging + " -o {2} -Wall {0} `pkg-config --cflags --libs mono-2` {3} {1}", temp_c, temp_o, output, zlib, cc); } ret = Execute(cmd); if (ret != 0) { Error("[Fail]"); return; } Console.WriteLine("Done"); } } } finally { if (!keeptemp) { if (object_out == null) { File.Delete(temp_o); } if (!compile_only) { File.Delete(temp_c); } File.Delete(temp_s); } } }
public static void Main(string[] args) { bool verbose = false; bool showHelp = false; uint baseRevision = 0; uint revision = 0; string deletionsPath = null; bool noCrypto = false; const Endian endian = Endian.Little; var options = new OptionSet() { { "R|baseRevision=", "specify archive base revision", v => baseRevision = v == null ? 0 : uint.Parse(v) }, { "r|revision=", "specify archive revision", v => revision = v == null ? 0 : uint.Parse(v) }, { "d|deletions=", "path of deletions file", v => deletionsPath = v }, { "no-crypto", "don't use any encryption", v => noCrypto = v != null }, { "v|verbose", "show verbose messages", v => verbose = v != null }, { "h|help", "show this message and exit", v => showHelp = v != null }, }; List <string> extras; try { extras = options.Parse(args); } catch (OptionException e) { Console.Write("{0}: ", GetExecutableName()); Console.WriteLine(e.Message); Console.WriteLine("Try `{0} --help' for more information.", GetExecutableName()); return; } if (extras.Count < 1 || showHelp == true) { Console.WriteLine("Usage: {0} [OPTIONS]+ output_ipf input_directory+", GetExecutableName()); Console.WriteLine(); Console.WriteLine("Pack files from input directories into a archive."); Console.WriteLine(); Console.WriteLine("Options:"); options.WriteOptionDescriptions(Console.Out); return; } var inputPaths = new List <string>(); string outputPath; if (extras.Count == 1) { inputPaths.Add(extras[0]); outputPath = Path.ChangeExtension(extras[0], ".ipf"); } else { outputPath = Path.ChangeExtension(extras[0], ".ipf"); inputPaths.AddRange(extras.Skip(1)); } var pendingEntries = new SortedDictionary <string, PendingEntry>(); if (verbose == true) { Console.WriteLine("Finding files..."); } foreach (var relativePath in inputPaths) { string inputPath = Path.GetFullPath(relativePath); if (inputPath.EndsWith(Path.DirectorySeparatorChar.ToString(CultureInfo.InvariantCulture)) == true) { inputPath = inputPath.Substring(0, inputPath.Length - 1); } foreach (string path in Directory.GetFiles(inputPath, "*", SearchOption.AllDirectories)) { string fullPath = Path.GetFullPath(path); string partPath = fullPath.Substring(inputPath.Length + 1) .Replace(Path.DirectorySeparatorChar, '/') .Replace(Path.AltDirectorySeparatorChar, '/'); var key = partPath.ToLowerInvariant(); if (pendingEntries.ContainsKey(key) == true) { Console.WriteLine("Ignoring duplicate of {0}: {1}", partPath, fullPath); if (verbose == true) { Console.WriteLine(" Previously added from: {0}", pendingEntries[key]); } continue; } var archiveSeparatorIndex = partPath.IndexOf('/'); if (archiveSeparatorIndex < 0) { continue; } var archiveName = partPath.Substring(0, archiveSeparatorIndex); var fileName = partPath.Substring(archiveSeparatorIndex + 1); pendingEntries[key] = new PendingEntry(fullPath, archiveName, fileName); } } using (var output = File.Create(outputPath)) { var fileEntries = new List <ArchiveFileEntry>(); var deletionEntries = new List <ArchiveDeletionEntry>(); if (string.IsNullOrEmpty(deletionsPath) == false) { if (verbose == true) { Console.WriteLine("Reading deletions..."); } var serializer = JsonSerializer.Create(); using (var input = File.OpenRead(deletionsPath)) using (var streamReader = new StreamReader(input)) using (var jsonReader = new JsonTextReader(streamReader)) { var jsonDeletionEntries = serializer.Deserialize <JsonArchiveDeletionEntry[]>(jsonReader); deletionEntries.AddRange(jsonDeletionEntries.Select(jde => new ArchiveDeletionEntry() { Name = jde.Name, Archive = jde.Archive, })); } } if (verbose == true) { Console.WriteLine("Writing file data..."); } long current = 0; long total = pendingEntries.Count; var padding = total.ToString(CultureInfo.InvariantCulture).Length; foreach (var pendingEntry in pendingEntries.Select(kv => kv.Value)) { var fullPath = pendingEntry.FullPath; var archiveName = pendingEntry.ArchiveName; var fileName = pendingEntry.FileName; current++; if (verbose == true) { Console.WriteLine("[{0}/{1}] {2} => {3}", current.ToString(CultureInfo.InvariantCulture).PadLeft(padding), total, archiveName, fileName); } var bytes = File.ReadAllBytes(fullPath); var fileEntry = new ArchiveFileEntry(); fileEntry.Name = fileName; fileEntry.Archive = archiveName; fileEntry.Hash = CRC32.Compute(bytes, 0, bytes.Length); fileEntry.UncompressedSize = (uint)bytes.Length; fileEntry.Offset = (uint)output.Position; if (fileEntry.ShouldCompress == true) { int compressionLevel = Deflater.BEST_COMPRESSION; byte[] compressedBytes; using (var temp = new MemoryStream()) { var zlib = new DeflaterOutputStream(temp, new Deflater(compressionLevel, true)); zlib.WriteBytes(bytes); zlib.Finish(); temp.Flush(); temp.Position = 0; compressedBytes = temp.ToArray(); } if (noCrypto == false) { var crypto = new ArchiveCrypto(); crypto.Encrypt(compressedBytes, 0, compressedBytes.Length); } output.WriteBytes(compressedBytes); fileEntry.CompressedSize = (uint)compressedBytes.Length; } else { fileEntry.CompressedSize = fileEntry.UncompressedSize; output.WriteBytes(bytes); } fileEntries.Add(fileEntry); } if (verbose == true) { Console.WriteLine("Writing file table..."); } long fileTableOffset = output.Position; for (int i = 0; i < fileEntries.Count; i++) { fileEntries[i].Write(output, endian); } if (verbose == true) { Console.WriteLine("Writing deletion table..."); } long deletionTableOffset = output.Position; for (int i = 0; i < deletionEntries.Count; i++) { deletionEntries[i].Write(output, endian); } if (verbose == true) { Console.WriteLine("Writing header..."); } ArchiveHeader header; header.FileTableCount = (ushort)fileEntries.Count; header.FileTableOffset = (uint)fileTableOffset; header.DeletionTableCount = (ushort)deletionEntries.Count; header.DeletionTableOffset = (uint)deletionTableOffset; header.Magic = ArchiveHeader.Signature; header.BaseRevision = baseRevision; header.Revision = revision; header.Write(output, endian); if (verbose == true) { Console.WriteLine("Done!"); } } }
private static void Wrap(Stream output, byte[] bytes, CompressionScheme compressionScheme) { byte[] compressedBytes; if (bytes.Length <= BlockSize) { if (compressionScheme == CompressionScheme.LZO) { compressedBytes = new byte[bytes.Length + (bytes.Length / 16) + 64 + 3]; var actualCompressedSize = compressedBytes.Length; var result = MiniLZO.LZO.Compress( bytes, 0, bytes.Length, compressedBytes, 0, ref actualCompressedSize, new MiniLZO.CompressWorkBuffer()); if (result != MiniLZO.ErrorCode.Success) { throw new SaveCorruptionException($"LZO compression failure ({result})"); } Array.Resize(ref compressedBytes, actualCompressedSize); } else if (compressionScheme == CompressionScheme.Zlib) { using (var temp = new MemoryStream()) { var zlib = new DeflaterOutputStream(temp); zlib.WriteBytes(bytes); zlib.Finish(); temp.Flush(); temp.Position = 0; compressedBytes = temp.ReadBytes((int)temp.Length); } } else { throw new InvalidOperationException("unsupported compression scheme"); } } else { if (compressionScheme == CompressionScheme.LZO) { int innerCompressedOffset = 0; int innerCompressedSizeLeft = bytes.Length; using (var blockData = new MemoryStream()) { var blockCount = (innerCompressedSizeLeft + BlockSize) / BlockSize; blockData.WriteValueS32(blockCount, Endian.Big); blockData.Position = 4 + (blockCount * 8); var blockInfos = new List <Tuple <uint, uint> >(); while (innerCompressedSizeLeft > 0) { var blockUncompressedSize = Math.Min(BlockSize, innerCompressedSizeLeft); compressedBytes = new byte[blockUncompressedSize + (blockUncompressedSize / 16) + 64 + 3]; var actualCompressedSize = compressedBytes.Length; var result = MiniLZO.LZO.Compress( bytes, innerCompressedOffset, blockUncompressedSize, compressedBytes, 0, ref actualCompressedSize, new MiniLZO.CompressWorkBuffer()); if (result != MiniLZO.ErrorCode.Success) { throw new SaveCorruptionException($"LZO compression failure ({result})"); } blockData.Write(compressedBytes, 0, actualCompressedSize); blockInfos.Add(new Tuple <uint, uint>((uint)actualCompressedSize, BlockSize)); innerCompressedOffset += blockUncompressedSize; innerCompressedSizeLeft -= blockUncompressedSize; } blockData.Position = 4; foreach (var blockInfo in blockInfos) { blockData.WriteValueU32(blockInfo.Item1, Endian.Big); blockData.WriteValueU32(blockInfo.Item2, Endian.Big); } blockData.Position = 0; compressedBytes = blockData.ReadBytes((int)blockData.Length); } } else if (compressionScheme == CompressionScheme.Zlib) { int innerCompressedOffset = 0; int innerCompressedSizeLeft = bytes.Length; using (var blockData = new MemoryStream()) { var blockCount = (innerCompressedSizeLeft + BlockSize) / BlockSize; blockData.WriteValueS32(blockCount, Endian.Big); blockData.Position = 4 + (blockCount * 8); var blockInfos = new List <Tuple <uint, uint> >(); while (innerCompressedSizeLeft > 0) { var blockUncompressedSize = Math.Min(BlockSize, innerCompressedSizeLeft); using (var temp = new MemoryStream()) { var zlib = new DeflaterOutputStream(temp); zlib.Write(bytes, innerCompressedOffset, blockUncompressedSize); zlib.Finish(); temp.Flush(); temp.Position = 0; compressedBytes = temp.ReadBytes((int)temp.Length); } blockData.WriteBytes(compressedBytes); blockInfos.Add(new Tuple <uint, uint>((uint)compressedBytes.Length, BlockSize)); innerCompressedOffset += blockUncompressedSize; innerCompressedSizeLeft -= blockUncompressedSize; } blockData.Position = 4; foreach (var blockInfo in blockInfos) { blockData.WriteValueU32(blockInfo.Item1, Endian.Big); blockData.WriteValueU32(blockInfo.Item2, Endian.Big); } blockData.Position = 0; compressedBytes = blockData.ReadBytes((int)blockData.Length); } } else { throw new InvalidOperationException("unsupported platform"); } } byte[] uncompressedBytes; using (var uncompressedData = new MemoryStream()) { uncompressedData.WriteValueS32(bytes.Length, Endian.Big); uncompressedData.WriteBytes(compressedBytes); uncompressedData.Position = 0; uncompressedBytes = uncompressedData.ReadBytes((int)uncompressedData.Length); } byte[] computedHash; using (var sha1 = new System.Security.Cryptography.SHA1Managed()) { computedHash = sha1.ComputeHash(uncompressedBytes); } output.WriteBytes(computedHash); output.WriteBytes(uncompressedBytes); }
public static void Main(string[] args) { bool showHelp = false; bool overwriteFiles = false; bool verbose = false; var options = new OptionSet() { { "o|overwrite", "overwrite existing files", v => overwriteFiles = v != null }, { "v|verbose", "be verbose", v => verbose = v != null }, { "h|help", "show this message and exit", v => showHelp = v != null }, }; List <string> extras; try { extras = options.Parse(args); } catch (OptionException e) { Console.Write("{0}: ", GetExecutableName()); Console.WriteLine(e.Message); Console.WriteLine("Try `{0} --help' for more information.", GetExecutableName()); return; } if (extras.Count < 1 || extras.Count > 2 || showHelp == true) { Console.WriteLine("Usage: {0} [OPTIONS]+ input_rz [output_bin]", GetExecutableName()); Console.WriteLine(); Console.WriteLine("Options:"); options.WriteOptionDescriptions(Console.Out); return; } var inputPath = Path.GetFullPath(extras[0]); var outputPath = extras.Count > 1 ? extras[1] : inputPath + ".rz"; if (overwriteFiles == false && File.Exists(outputPath) == true) { Console.WriteLine("error: '{0}' exists"); return; } using (var temp = new MemoryStream()) { uint uncompressedSize; using (var input = File.OpenRead(inputPath)) { uncompressedSize = (uint)input.Length; var zlib = new DeflaterOutputStream(temp); zlib.WriteFromStream(input, input.Length); zlib.Finish(); } using (var output = File.Create(outputPath)) { output.WriteValueU32(0x525A0000, Endian.Big); output.WriteValueU32(0, Endian.Little); output.WriteValueU32(uncompressedSize, Endian.Little); output.WriteValueU32(0, Endian.Little); temp.Position = 0; output.WriteFromStream(temp, temp.Length); } } }
public static void Encrypt(string sourceFile, string destFile) { ulong size; ulong type; ulong id1; ulong id2; byte id3; byte id4; string temp; var(tmpName, tmp) = CreateTempFile(); using (var bw = new BinaryWriter(tmp)) { int a; LocalizationFile file = new LocalizationFile(sourceFile); var lines = file.Process(); List <short> span = new List <short>((int)MAX_BUFF_SIZE); var index = 0; foreach (var line in lines) { //strBuff.Clear(); span.Clear(); type = (ulong)line.Type; id1 = (ulong)line.Addr1; id2 = (ulong)line.Addr2; id3 = (byte)line.Addr3; id4 = (byte)line.Addr4; temp = line.Text; //string.Concat(line.Text,"\r\n"); for (a = 0; a < temp.Length; a++) { index = span.Count; span.Add((short)temp[a]); //strBuff[a] = temp[a]; if (a > 0) { if (temp[a] == 'n' && temp[a - 1].ToString() == "\\") { span[index - 1] = (short)CHAR_LF; span.RemoveAt(index); } } } size = (ulong)span.Count; bw.Write((UInt32)size); // strSize - 4 bw.Write((UInt32)type); //strType - 4 bw.Write((UInt32)id1); // Id1 - 4 bw.Write((UInt16)id2); // id2 -2 bw.Write((byte)id3); //id3 -1 bw.Write((byte)id4); //id4 -1 foreach (var c in span) // tempSpan) { bw.Write(c); // -size } bw.Write((short)CHAR_NULL); bw.Write((short)CHAR_NULL); } tmp.Seek(0, SeekOrigin.End); ulong uncompressedSize = (ulong)tmp.Position; ulong compressedSize; Span <byte> uncompressedData = new byte[uncompressedSize]; Span <byte> compressedData; tmp.Seek(0, SeekOrigin.Begin); using (var br = new BinaryReader(tmp)) { uncompressedData = br.ReadBytes((int)uncompressedSize); //using (var ms = new MemoryStream(uncompressedData.ToArray())) using (var output = new MemoryStream()) { using (var zlib = new DeflaterOutputStream(output, new Deflater(Deflater.BEST_SPEED))) { zlib.Write(uncompressedData.ToArray()); zlib.Finish(); compressedData = new byte[output.Length]; compressedSize = (ulong)output.Length; compressedData = output.ToArray().AsSpan(); } } } using (var dfs = new FileStream(destFile, FileMode.Create, FileAccess.Write)) using (var writer = new BinaryWriter(dfs, Encoding.Unicode)) { writer.Write((UInt32)uncompressedSize); writer.Write(compressedData); } } tmp.Dispose(); File.Delete(tmpName); }
private static void WriteLayer(FileStream stream, Grid grid, int layer) { byte[] data = BitConverter.GetBytes(grid.Width); stream.Write(data, 0, data.Length); data = BitConverter.GetBytes(grid.Height); stream.Write(data, 0, data.Length); //12x12 tile size default data = BitConverter.GetBytes(0xc000c); stream.Write(data, 0, data.Length); //Tileset = 0, Collision = 1 data = BitConverter.GetBytes((short)0x100); stream.Write(data, 0, data.Length); //OffsetX = 0,OffsetY = 0 data = BitConverter.GetBytes((long)0); stream.Write(data, 0, data.Length); //ScrollX = 1,ScrollY = 1 data = BitConverter.GetBytes((float)1); stream.Write(data, 0, data.Length); stream.Write(data, 0, data.Length); //WrapX = 0,WrapY = 0 data = BitConverter.GetBytes((short)0); stream.Write(data, 0, data.Length); //Visible = 1 stream.WriteByte(1); //Opacity = 1 data = BitConverter.GetBytes((float)1); stream.Write(data, 0, data.Length); //SubLayer Tileset = 255,Animation = 255,AnimationFrame = 255 stream.WriteByte(0xff); stream.WriteByte(0xff); stream.WriteByte(0xff); //Data blocks stream.WriteByte(0x2); data = BitConverter.GetBytes(MAIN); stream.Write(data, 0, data.Length); int size = grid.Width * grid.Height; data = new byte[size * 2]; byte[] directions = new byte[size]; for (int i = 0, k = 0; i < size; i++) { Cell cell = grid.Cells[i]; int itemCount = cell.Objects.Count; int objectCount = 0; bool wroteID = false; for (int j = 0; j < itemCount; j++) { Item item = cell.Objects[j]; if (!(item is LevelPath) && !(item is Level) && !(item is Special)) { if (objectCount < layer) { objectCount++; } else { data[k++] = (byte)item.ID; data[k++] = (byte)(item.ID >> 8); directions[i] = item.Direction; wroteID = true; break; } } } if (!wroteID) { data[k++] = 0xff; data[k++] = 0xff; directions[i] = 3; } } byte[] compressed; using (MemoryStream ms = new MemoryStream()) { Zip.ResetStream(ms); Zip.Write(data, 0, data.Length); Zip.Finish(); compressed = ms.ToArray(); } data = BitConverter.GetBytes(compressed.Length); stream.Write(data, 0, data.Length); stream.Write(compressed, 0, compressed.Length); data = BitConverter.GetBytes(DATA); stream.Write(data, 0, data.Length); //Cellsize stream.WriteByte(1); //Default value = 0 data = BitConverter.GetBytes((int)0); stream.Write(data, 0, data.Length); using (MemoryStream ms = new MemoryStream()) { Zip.ResetStream(ms); Zip.Write(directions, 0, directions.Length); Zip.Finish(); compressed = ms.ToArray(); } data = BitConverter.GetBytes(compressed.Length); stream.Write(data, 0, data.Length); stream.Write(compressed, 0, compressed.Length); }
/// <summary> /// Creates the static library. /// </summary> /// <param name='dataFile'> /// Data file. /// </param> /// <param name='needZeroEnd'> /// Need zero end. /// </param> public void CreateStaticLibrary(String dataFile, bool needZeroEnd) { // Generate the pretty name this.SymbolName = GetSymbolName(dataFile); // If we need a zero at the end (for text files), add 1 byte int size = (int)new FileInfo(dataFile).Length; byte[] fileBuffer = File.ReadAllBytes(dataFile); this.Logger.LogInfo("Embedding '" + dataFile + "'..."); // Use raw file this.InputSize = size; byte[] dataBuffer = fileBuffer; if (this.Compress) { // Compress the data file if required using (MemoryStream stream = new MemoryStream()) { using (DeflaterOutputStream deflate = new DeflaterOutputStream(stream)) { int n = 0, len = 0; while (n < size) { len = Math.Min(size - n, CHUNK); deflate.Write(fileBuffer, n, len); n += CHUNK; } if (needZeroEnd) { deflate.WriteByte(0); } deflate.Finish(); } dataBuffer = stream.ToArray(); stream.Close(); } } else if (needZeroEnd) { this.InputSize = size + 1; dataBuffer = new byte[this.InputSize]; Array.Copy(fileBuffer, dataBuffer, size); dataBuffer[size] = 0; } this.OutputSize = dataBuffer.Length; if (this.Compress) { this.Logger.LogInfo("Compression ratio: " + Math.Floor(100.0 * this.OutputSize / this.InputSize) + "%"); } // Compute the names String sFile = Path.Combine(this.OutputDirectory, this.SymbolName + ".s"); String oFile = Path.Combine(this.OutputDirectory, this.SymbolName + ".o"); String aFile = Path.Combine(this.OutputDirectory, this.SymbolName + ".a"); this.OutputFile = Path.Combine(this.OutputDirectory, "lib" + this.SymbolName + ".a"); // (1) Create the assembly source file this.Logger.LogDebug("Create assembly file '" + Path.GetFileName(sFile) + "'..."); String content = String.Format(CultureInfo.CurrentCulture, TEMPLATE, this.SymbolName, this.OutputSize, SPACER_BYTE); File.WriteAllText(sFile, content); // (2) Create the object file this.Logger.LogDebug("Create object file '" + Path.GetFileName(oFile) + "'..."); using (ProcessHelper helper = new ProcessHelper("cc", string.Format("{0} -c -o \"{1}\" \"{2}\"", this.ArchitectureFlags ?? String.Empty, oFile, sFile))) { helper.Logger = this.Logger; helper.Execute(); } // (3) Create the static library this.Logger.LogDebug("Create library file '" + Path.GetFileName(aFile) + "'..."); using (ProcessHelper helper = new ProcessHelper("libtool", string.Format("-o \"{0}\" \"{1}\"", aFile, oFile))) { helper.Logger = this.Logger; helper.Execute(); } // (4) Swap binary content this.Logger.LogDebug("Swaping content to '" + Path.GetFileName(this.OutputFile) + "'..."); // Not quite memory-efficient, but simpler to code byte[] outputBuffer = File.ReadAllBytes(aFile); // Search for the beginning and the end of the spacer zone int start = Locate(outputBuffer, new[] { SPACER_BYTE, SPACER_BYTE, SPACER_BYTE, SPACER_BYTE }); // Insert the data file content into the static library Array.Copy(dataBuffer, 0, outputBuffer, start, dataBuffer.Length); // Write the result on the disk File.WriteAllBytes(this.OutputFile, outputBuffer); }
/// <summary> /// Encodes the specified data. /// </summary> public byte[] Encode(byte[] data, PdfFlateEncodeMode mode) { MemoryStream ms = new MemoryStream(); // DeflateStream/GZipStream does not work immediately and I have not the leisure to work it out. // So I keep on using SharpZipLib even with .NET 2.0. #if NET_ZIP // See http://connect.microsoft.com/VisualStudio/feedback/ViewFeedback.aspx?FeedbackID=97064 // // Excerpt from the RFC 1950 specs for first byte: // // CMF (Compression Method and flags) // This byte is divided into a 4-bit compression method and a 4- // bit information field depending on the compression method. // // bits 0 to 3 CM Compression method // bits 4 to 7 CINFO Compression info // // CM (Compression method) // This identifies the compression method used in the file. CM = 8 // denotes the "deflate" compression method with a window size up // to 32K. This is the method used by gzip and PNG (see // references [1] and [2] in Chapter 3, below, for the reference // documents). CM = 15 is reserved. It might be used in a future // version of this specification to indicate the presence of an // extra field before the compressed data. // // CINFO (Compression info) // For CM = 8, CINFO is the base-2 logarithm of the LZ77 window // size, minus eight (CINFO=7 indicates a 32K window size). Values // of CINFO above 7 are not allowed in this version of the // specification. CINFO is not defined in this specification for // CM not equal to 8. ms.WriteByte(0x78); // Excerpt from the RFC 1950 specs for second byte: // // FLG (FLaGs) // This flag byte is divided as follows: // // bits 0 to 4 FCHECK (check bits for CMF and FLG) // bit 5 FDICT (preset dictionary) // bits 6 to 7 FLEVEL (compression level) // // The FCHECK value must be such that CMF and FLG, when viewed as // a 16-bit unsigned integer stored in MSB order (CMF*256 + FLG), // is a multiple of 31. // // FDICT (Preset dictionary) // If FDICT is set, a DICT dictionary identifier is present // immediately after the FLG byte. The dictionary is a sequence of // bytes which are initially fed to the compressor without // producing any compressed output. DICT is the Adler-32 checksum // of this sequence of bytes (see the definition of ADLER32 // below). The decompressor can use this identifier to determine // which dictionary has been used by the compressor. // // FLEVEL (Compression level) // These flags are available for use by specific compression // methods. The "deflate" method (CM = 8) sets these flags as // follows: // // 0 - compressor used fastest algorithm // 1 - compressor used fast algorithm // 2 - compressor used default algorithm // 3 - compressor used maximum compression, slowest algorithm // // The information in FLEVEL is not needed for decompression; it // is there to indicate if recompression might be worthwhile. ms.WriteByte(0x49); DeflateStream zip = new DeflateStream(ms, CompressionMode.Compress, true); zip.Write(data, 0, data.Length); zip.Close(); #else int level = Deflater.DEFAULT_COMPRESSION; switch (mode) { case PdfFlateEncodeMode.BestCompression: level = Deflater.BEST_COMPRESSION; break; case PdfFlateEncodeMode.BestSpeed: level = Deflater.BEST_SPEED; break; } DeflaterOutputStream zip = new DeflaterOutputStream(ms, new Deflater(level, false)); zip.Write(data, 0, data.Length); zip.Finish(); #endif #if !NETFX_CORE && !UWP ms.Capacity = (int)ms.Length; return(ms.GetBuffer()); #else return(ms.ToArray()); #endif }
protected void Build( TPackage package, IEnumerable <KeyValuePair <string, string> > paths, string outputPath, bool ps3) { var isCompressed = (package.Flags & Package.HeaderFlags.Compressed) != 0; var isCondensed = (package.Flags & Package.HeaderFlags.Condensed) != 0; package.Entries.Clear(); foreach (var kv in paths) { package.Entries.Add(new TEntry() { Name = kv.Key, }); } var baseOffset = package.EstimateHeaderSize(); package.Entries.Clear(); using (var output = File.Create(outputPath)) { if (isCondensed == true && isCompressed == true) { output.Seek(baseOffset, SeekOrigin.Begin); using (var compressed = new MemoryStream()) { var z = new ZLIB.ZOutputStream(compressed, ZLIB.zlibConst.Z_BEST_COMPRESSION); z.FlushMode = ZLIB.zlibConst.Z_SYNC_FLUSH; long offset = 0; foreach (var kv in paths) { using (var input = File.OpenRead(kv.Value)) { var entry = new TEntry(); entry.Name = kv.Key; entry.Offset = (uint)offset; entry.UncompressedSize = (uint)input.Length; long size = z.TotalOut; z.WriteFromStream(input, input.Length); size = z.TotalOut - size; entry.CompressedSize = (uint)size; offset += entry.UncompressedSize; package.Entries.Add(entry); } } package.CompressedSize = (uint)compressed.Length; package.UncompressedSize = (uint)offset; compressed.Position = 0; output.WriteFromStream(compressed, compressed.Length); } output.Seek(0, SeekOrigin.Begin); package.Serialize(output); } else if ( ps3 == true && isCompressed == true) { output.Seek(baseOffset, SeekOrigin.Begin); long offset = 0; uint uncompressedSize = 0; foreach (var kv in paths) { using (var input = File.OpenRead(kv.Value)) { if (isCondensed == false) { var offsetPadding = offset.Align(2048) - offset; if (offsetPadding > 0) { offset += offsetPadding; output.Seek(offsetPadding, SeekOrigin.Current); } var sizePadding = uncompressedSize.Align(2048) - uncompressedSize; if (sizePadding > 0) { uncompressedSize += sizePadding; } } var entry = new TEntry(); entry.Name = kv.Key; entry.Offset = (uint)offset; entry.UncompressedSize = (uint)input.Length; entry.CompressedSize = 0; var left = input.Length; while (left > 0) { using (var compressed = new MemoryStream()) { var chunkUncompressedSize = (uint)Math.Min(0x10000, left); var zlib = new DeflaterOutputStream(compressed, new Deflater(9, true)); zlib.WriteFromStream(input, chunkUncompressedSize); zlib.Finish(); var chunkCompressedSize = (uint)compressed.Length; if (chunkCompressedSize > 0xFFFF) { throw new InvalidOperationException(); } output.WriteValueU16((ushort)chunkCompressedSize, package.Endian); output.WriteValueU16(0, package.Endian); output.WriteValueU32(chunkUncompressedSize, package.Endian); entry.CompressedSize += 2 + 2 + 4; entry.CompressedSize += chunkCompressedSize; compressed.Position = 0; output.WriteFromStream(compressed, compressed.Length); left -= chunkUncompressedSize; } } offset += entry.CompressedSize; uncompressedSize += entry.UncompressedSize; package.Entries.Add(entry); } } package.CompressedSize = (uint)offset; package.UncompressedSize = uncompressedSize; } else if (isCompressed == true) { output.Seek(baseOffset, SeekOrigin.Begin); long offset = 0; uint uncompressedSize = 0; foreach (var kv in paths) { using (var input = File.OpenRead(kv.Value)) { if (isCondensed == false) { var offsetPadding = offset.Align(2048) - offset; if (offsetPadding > 0) { offset += offsetPadding; output.Seek(offsetPadding, SeekOrigin.Current); } var sizePadding = uncompressedSize.Align(2048) - uncompressedSize; if (sizePadding > 0) { uncompressedSize += sizePadding; } } var entry = new TEntry(); entry.Name = kv.Key; entry.Offset = (uint)offset; entry.UncompressedSize = (uint)input.Length; using (var compressed = new MemoryStream()) { var zlib = new DeflaterOutputStream(compressed); zlib.WriteFromStream(input, input.Length); zlib.Finish(); entry.CompressedSize = (uint)compressed.Length; compressed.Position = 0; output.WriteFromStream(compressed, compressed.Length); } offset += entry.CompressedSize; uncompressedSize += entry.UncompressedSize; package.Entries.Add(entry); } } package.CompressedSize = (uint)offset; package.UncompressedSize = uncompressedSize; } else { output.Seek(baseOffset, SeekOrigin.Begin); long offset = 0; foreach (var kv in paths) { using (var input = File.OpenRead(kv.Value)) { if (isCondensed == false) { var padding = offset.Align(2048) - offset; if (padding > 0) { offset += padding; output.Seek(padding, SeekOrigin.Current); } } else if ( isCondensed == true && isCompressed == false) { var padding = offset.Align(16) - offset; if (padding > 0) { offset += padding; output.Seek(padding, SeekOrigin.Current); } } var entry = new TEntry(); entry.Name = kv.Key; entry.Offset = (uint)offset; entry.UncompressedSize = (uint)input.Length; output.WriteFromStream(input, input.Length); entry.CompressedSize = 0xFFFFFFFF; offset += entry.UncompressedSize; package.Entries.Add(entry); } } package.CompressedSize = 0xFFFFFFFF; package.UncompressedSize = (uint)offset; } package.TotalSize = (uint)output.Length; output.Seek(0, SeekOrigin.Begin); package.Serialize(output); } }
public unsafe void Save(System.IO.TextWriter stream) { DataNode rootNode = new DataNode("ika-sprite"); rootNode.AddChild(new DataNode("version").AddChild(SPRITE_VER)); DataNode infoNode = new DataNode("information"); rootNode.AddChild(infoNode); infoNode.AddChild(new DataNode("title").AddChild("Untitled")); DataNode metaNode = new DataNode("meta"); infoNode.AddChild(metaNode); foreach (DictionaryEntry iter in Metadata) { metaNode.AddChild(new DataNode((string)iter.Key).AddChild((string)iter.Value)); } rootNode.AddChild(new DataNode("header") .AddChild(new DataNode("depth").AddChild("32")) ); DataNode scriptNode = new DataNode("scripts"); rootNode.AddChild(scriptNode); foreach (DictionaryEntry iter in Scripts) { scriptNode.AddChild( new DataNode("script").AddChild( new DataNode("label").AddChild((string)iter.Key) ) .AddChild((string)iter.Value) ); } DataNode frameNode = new DataNode("frames"); rootNode.AddChild(frameNode); frameNode .AddChild(new DataNode("count").AddChild(frames.Count)) .AddChild(new DataNode("dimensions") .AddChild(new DataNode("width").AddChild(Size.Width)) .AddChild(new DataNode("height").AddChild(Size.Height)) ) .AddChild(new DataNode("hotspot") .AddChild(new DataNode("x").AddChild(HotSpot.X)) .AddChild(new DataNode("y").AddChild(HotSpot.Y)) .AddChild(new DataNode("width").AddChild(HotSpot.Width)) .AddChild(new DataNode("height").AddChild(HotSpot.Height)) ); MemoryStream data = new MemoryStream(); foreach (Bitmap bmp in Frames) { BitmapData bd = bmp.LockBits( Rectangle.FromLTRB(0, 0, bmp.Width, bmp.Height), ImageLockMode.ReadOnly, PixelFormat.Format32bppArgb ); int numPixels = bmp.Width * bmp.Height; byte *b = (byte *)bd.Scan0; for (int i = 0; i < numPixels; i++) { // Swap red and blue data.WriteByte(b[2]); data.WriteByte(b[1]); data.WriteByte(b[0]); data.WriteByte(b[3]); b += 4; } bmp.UnlockBits(bd); } MemoryStream cdata = new MemoryStream(); DeflaterOutputStream dos = new DeflaterOutputStream(cdata); dos.Write(data.GetBuffer(), 0, (int)data.Position); dos.Finish(); string cdata64 = Convert.ToBase64String(cdata.GetBuffer(), 0, (int)cdata.Length); data.Close(); dos.Close(); frameNode.AddChild( new DataNode("data") .AddChild(new DataNode("format").AddChild("zlib")) .AddChild(cdata64) ); rootNode.Write(stream); }