/// <summary> /// /// </summary> /// <param name="data"></param> /// <returns></returns> public static byte[] Compress(byte[] data) { using (var compressedStream = new MemoryStream()) using (var zipStream = new GZipStream(compressedStream, CompressionMode.Compress)) { zipStream.Write(data, 0, data.Length); zipStream.Close(); return compressedStream.ToArray(); } }
public static byte[] Compress(byte[] b) { MemoryStream ms = new MemoryStream(); GZipStream zs = new GZipStream(ms, CompressionMode.Compress); zs.Write(b, 0, b.Length); return ms.ToArray(); }
public static void Compress(string text, string outfile) { byte[] buffer = Encoding.UTF8.GetBytes(text); MemoryStream ms = new MemoryStream(); using (GZipStream zip = new GZipStream(ms, CompressionMode.Compress, true)) { zip.Write(buffer, 0, buffer.Length); } ms.Position = 0; MemoryStream outStream = new MemoryStream(); byte[] compressed = new byte[ms.Length]; ms.Read(compressed, 0, compressed.Length); byte[] gzBuffer = new byte[compressed.Length + 4]; System.Buffer.BlockCopy(compressed, 0, gzBuffer, 4, compressed.Length); System.Buffer.BlockCopy(BitConverter.GetBytes(buffer.Length), 0, gzBuffer, 0, 4); string MimiBase64 = Convert.ToBase64String (gzBuffer); File.WriteAllText (outfile, MimiBase64); Console.WriteLine ("Base64 string saved as "+outfile+"\n"); //return Convert.ToBase64String (gzBuffer); }
/// <summary> /// Yields an Enumerable list of paths to GZTestData files /// </summary> public static IEnumerable<object[]> CompressedFiles() { if (_compressedFiles == null) { _compressedFiles = new List<object[]>(); // Crypto random data byte[] bytes = new byte[100000000]; var rand = RandomNumberGenerator.Create(); rand.GetBytes(bytes); string filePath = PerfUtils.GetTestFilePath() + ".gz"; using (FileStream output = File.Create(filePath)) using (GZipStream zip = new GZipStream(output, CompressionMode.Compress)) zip.Write(bytes, 0, bytes.Length); _compressedFiles.Add(new object[] { filePath }); // Create a compressed file with repeated segments bytes = Text.Encoding.UTF8.GetBytes(PerfUtils.CreateString(100000)); filePath = PerfUtils.GetTestFilePath() + ".gz"; using (FileStream output = File.Create(filePath)) using (GZipStream zip = new GZipStream(output, CompressionMode.Compress)) for (int i = 0; i < 1000; i++) zip.Write(bytes, 0, bytes.Length); _compressedFiles.Add(new object[] { filePath }); } return _compressedFiles; }
private static void SliceGZip(string sourceFile, string destinationDir, int numParts) { FileInfo fileInfo = new FileInfo(sourceFile); int size = (int)fileInfo.Length; int partSize = size / numParts; int lastPartSize = size - (numParts - 1) * partSize; string name = fileInfo.Name; string[] nameExtArr = name.Split('.'); FileStream source = new FileStream(sourceFile, FileMode.Open); source.Seek(0, SeekOrigin.Current); for (int i = 0; i < numParts; i++) { int currentSize = (i == (numParts - 1)) ? lastPartSize : partSize; byte[] buffer = new byte[currentSize]; source.Read(buffer, 0, currentSize); string currentFilePath = destinationDir + "Part-" + i + "." + nameExtArr[1] + ".gz"; using (FileStream dest = new FileStream(currentFilePath, FileMode.Create)) using (GZipStream gzipDest = new GZipStream(dest, CompressionMode.Compress, false)) gzipDest.Write(buffer, 0, currentSize); } source.Close(); }
public static void SplitFile(string inputFile, int parts, string path) { byte[] buffer = new byte[4096]; DirectoryInfo directorySelected = new DirectoryInfo(destination); using (Stream originalFile = File.OpenRead(sourceFile)) { int index = 1; while (originalFile.Position < originalFile.Length) { using (Stream compressedFile = File.Create(path + "\\" + index + ".gz")) { using (GZipStream compression = new GZipStream(compressedFile, CompressionMode.Compress)) { int chunkBytesRead = 0; while (chunkBytesRead < originalFile.Length / parts) { int bytesRead = originalFile.Read(buffer, 0, buffer.Length); if (bytesRead == 0) { break; } chunkBytesRead += bytesRead; compression.Write(buffer, 0, bytesRead); } } } index++; } } }
static void Main(string[] args) { if (args.Length < 1) { usage(); return; } else { string inputFile = args[0]; string outputFile = inputFile + ".gz"; try { // Get bytes from input stream FileStream inFileStream = new FileStream(Path.Combine(Environment.CurrentDirectory, inputFile), FileMode.Open); byte[] buffer = new byte[inFileStream.Length]; inFileStream.Read(buffer, 0, buffer.Length); inFileStream.Close(); // Create GZip file stream and compress input bytes FileStream outFileStream = new FileStream(Path.Combine(Environment.CurrentDirectory, outputFile), FileMode.Create); GZipStream compressedStream = new GZipStream(outFileStream, CompressionMode.Compress); compressedStream.Write(buffer, 0, buffer.Length); compressedStream.Close(); outFileStream.Close(); Console.WriteLine("The file has been compressed. UR Da Bomb!!!"); } catch (FileNotFoundException) { Console.WriteLine("Error: Specified file cannot be found."); } } }
public static byte[] Compress(byte[] data) { MemoryStream output = new MemoryStream(); GZipStream gzip = new GZipStream(output, CompressionMode.Compress, true); gzip.Write(data, 0, data.Length); gzip.Close(); return output.ToArray(); }
public static void Slice(string sourceFile, string destinationDirectory, int parts) { using (var source = new FileStream(sourceFile, FileMode.Open)) { byte[] buffer = new byte[4096]; List<byte> bytes = new List<byte>(); while (true) { int readBytes = source.Read(buffer, 0, buffer.Length); if (readBytes == 0) { break; } for (int i = 0; i < readBytes; i++) { bytes.Add(buffer[i]); } } int partSize = bytes.Count / parts; int leftOver = bytes.Count - partSize * parts; for (int i = 0; i < parts; i++) { var newFile = destinationDirectory + "part-" + i + ".gz"; using (var copy = new FileStream(newFile, FileMode.Create)) { using (var compressionStream = new GZipStream(copy, CompressionMode.Compress, false)) { if (i == parts - 1) { compressionStream.Write(bytes.ToArray(), i * partSize, partSize + leftOver); } else { compressionStream.Write(bytes.ToArray(), i * partSize, partSize); } } } } } }
public static byte[] Compress(this byte[] bytes) { using (MemoryStream ms = new MemoryStream()) { GZipStream Compress = new GZipStream(ms, CompressionMode.Compress); Compress.Write(bytes, 0, bytes.Length); Compress.Close(); return ms.ToArray(); } }
/// <summary> /// A string extension method that compress the given string to GZip byte array. /// </summary> /// <param name="stringToCompress">The stringToCompress to act on.</param> /// <returns>The string compressed into a GZip byte array.</returns> /// <example> /// <code> /// using System; /// using Microsoft.VisualStudio.TestTools.UnitTesting; /// using Z.ExtensionMethods; /// /// namespace ExtensionMethods.Examples /// { /// [TestClass] /// public class System_String_CompressGZip /// { /// [TestMethod] /// public void CompressGZip() /// { /// // Type /// string @this = "FizzBuzz"; /// /// // Exemples /// var result = @this.CompressGZip(); /// /// // Unit Test /// Assert.AreEqual("FizzBuzz", result.DecompressGZip()); /// } /// } /// } /// </code> /// </example> public static byte[] CompressGZip(this string stringToCompress) { byte[] stringAsBytes = Encoding.Default.GetBytes(stringToCompress); using (var memoryStream = new MemoryStream()) { using (var zipStream = new GZipStream(memoryStream, CompressionMode.Compress)) { zipStream.Write(stringAsBytes, 0, stringAsBytes.Length); zipStream.Close(); return (memoryStream.ToArray()); } } }
public static Byte[] CompressArray(Byte[] inBuf) { Logger.Enter(); MemoryStream memory = new MemoryStream(); using (GZipStream gzip = new GZipStream(memory, CompressionMode.Compress)) { gzip.Write(inBuf, 0, inBuf.Length); } Logger.Leave(); return memory.ToArray(); }
static void SliceCompress(string sourceFile, string destinationDirectory, int parts) { using (FileStream input = new FileStream(sourceFile, FileMode.Open)) { string extension = ".gz"; byte[] buffer = new byte[input.Length / parts]; long bytesRead = 0; for (int part = 1; part <= parts; part++) { using (FileStream output = new FileStream(destinationDirectory + @"File Part " + part + extension, FileMode.Create)) { filesToCombine.Add(destinationDirectory + @"File Part " + part + extension); using (GZipStream outputCompressed = new GZipStream(output, CompressionMode.Compress, false)) { if (part < parts) { int readBytes = input.Read(buffer, 0, buffer.Length); bytesRead += readBytes; if (readBytes == 0) { break; } outputCompressed.Write(buffer, 0, readBytes); } else { while (true) { int readBytes = input.Read(buffer, 0, buffer.Length); bytesRead += readBytes; if (readBytes == 0) { break; } outputCompressed.Write(buffer, 0, readBytes); } } } } } } }
/// <summary> /// Compresses this instance. /// </summary> public void Compress() { try { var binarydata = File.ReadAllBytes(FileInfo.FullName); if (binarydata?.Any() == true) { var length = binarydata.Length; using var zipper = new GZipStream(FileStream, CompressionMode.Compress); zipper?.Write(binarydata, 0, length); } } catch (IOException ex) { Fail(ex); } }
public static void saveStream(Stream stream, String location) { FileStream outStream = File.Create(location); GZipStream compress = new GZipStream(outStream, CompressionMode.Compress, false); byte[] buffer = new Byte[stream.Length]; int read = stream.Read(buffer, 0, buffer.Length); while (read > 0) { compress.Write(buffer, 0, read); read = stream.Read(buffer, 0, buffer.Length); } compress.Close(); outStream.Close(); stream.Close(); }
public static void CompressFile(string toCompressFileName,string targetFileName,bool IsDeleteSourceFile) { FileStream reader; reader = File.Open(toCompressFileName, FileMode.Open); FileStream writer; writer = File.Create(targetFileName); //压缩相关的流 MemoryStream ms = new MemoryStream(); GZipStream zipStream = new GZipStream(ms, CompressionMode.Compress, true); //往压缩流中写数据 byte[] sourceBuffer = new byte[reader.Length]; reader.Read(sourceBuffer, 0, sourceBuffer.Length); zipStream.Write(sourceBuffer, 0, sourceBuffer.Length); //一定要在内存流读取之前关闭压缩流 zipStream.Close(); zipStream.Dispose(); //从内存流中读数据 ms.Position = 0; //注意,不要遗漏此句 byte[] destBuffer = new byte[ms.Length]; ms.Read(destBuffer, 0, destBuffer.Length); writer.Write(destBuffer, 0, destBuffer.Length); //关闭并释放内存流 ms.Close(); ms.Dispose(); //关闭并释放文件流 writer.Close(); writer.Dispose(); reader.Close(); reader.Dispose(); if (IsDeleteSourceFile) { File.Delete(toCompressFileName); } }
private static void SliceFile(int parts) { using (var source = new FileStream(filePath, FileMode.Open)) { long sliceSize = source.Length / parts; long leftOver = source.Length - sliceSize * parts; for (int i = 0; i < parts; i++) { using (var destination = new FileStream(string.Format("../../Part-{0}.txt", i), FileMode.Create)) { using (var zip = new GZipStream(destination, CompressionMode.Compress, false)) { sliceSize = (i < parts - 1) ? sliceSize : sliceSize + leftOver; var buffer = new byte[sliceSize]; source.Read(buffer, 0, buffer.Length); zip.Write(buffer, 0, buffer.Length); } } } } }
/// <returns></returns> private static string CreateCompressedFile(CompressionType type) { const int fileSize = 100000000; PerfUtils utils = new PerfUtils(); string filePath = utils.GetTestFilePath() + ".gz"; switch (type) { case CompressionType.CryptoRandom: using (RandomNumberGenerator rand = RandomNumberGenerator.Create()) { byte[] bytes = new byte[fileSize]; rand.GetBytes(bytes); using (FileStream output = File.Create(filePath)) using (GZipStream zip = new GZipStream(output, CompressionMode.Compress)) zip.Write(bytes, 0, bytes.Length); } break; case CompressionType.RepeatedSegments: { byte[] bytes = new byte[fileSize / 1000]; new Random(128453).NextBytes(bytes); using (FileStream output = File.Create(filePath)) using (GZipStream zip = new GZipStream(output, CompressionMode.Compress)) for (int i = 0; i < 1000; i++) zip.Write(bytes, 0, bytes.Length); } break; case CompressionType.NormalData: { byte[] bytes = new byte[fileSize]; new Random(128453).NextBytes(bytes); using (FileStream output = File.Create(filePath)) using (GZipStream zip = new GZipStream(output, CompressionMode.Compress)) zip.Write(bytes, 0, bytes.Length); } break; } return filePath; }
static void SplitAndZipFile(string source, string destination, int parts) { using (var sourceFile = new FileStream(source, FileMode.Open)) { long partSize = (long)Math.Ceiling((double)sourceFile.Length / parts); string partName; Regex regex = new Regex(@"\.(?<=\.)(\w+)"); type = Convert.ToString(regex.Match(source)); long sizeEnd = sourceFile.Length; for (int i = 0; i < parts; i++) { partName = destination + "Part-" + (i + 1); filesList.Add(partName); using (var partedFile = new FileStream(destination + partName + ".gz", FileMode.Create)) { Console.WriteLine(partName); using (var compressionStream = new GZipStream(partedFile, CompressionMode.Compress, false)) { byte[] buffer = new byte[partSize]; int readBytes = sourceFile.Read(buffer, 0, buffer.Length); compressionStream.Write(buffer, 0, buffer.Length); } } sizeEnd = sourceFile.Length - (i * partSize); if (sizeEnd < partSize) { partSize = sizeEnd; } } } }
static void Slice(string sourceFile, string destinationDirectory, int parts) { byte[] buffer = new byte[4096]; using (FileStream readingStream = new FileStream(sourceFile, FileMode.Open)) { long size = readingStream.Length / 4096; size = size / parts; int readBytes; string fileExtension = readingStream.Name.Substring(readingStream.Name.Length - 4); for (int i = 0; i < parts - 1; i++) { using (FileStream writingStream = new FileStream(destinationDirectory + "\\Part-" + i + ".gz", FileMode.Create)) { using (GZipStream gz = new GZipStream(writingStream, CompressionMode.Compress, false)) { for (int j = 0; j < size; j++) { readBytes = readingStream.Read(buffer, 0, buffer.Length); gz.Write(buffer, 0, readBytes); } } } } using (FileStream writingStream = new FileStream(destinationDirectory + "\\Part-" + (parts - 1) + ".gz", FileMode.Create)) { using (GZipStream gz = new GZipStream(writingStream, CompressionMode.Compress, false)) { while ((readBytes = readingStream.Read(buffer, 0, buffer.Length)) != 0) { gz.Write(buffer, 0, readBytes); } } } } }
public static void Compress(FileInfo fileToCompress, String szOutFile) { using (FileStream originalFileStream = fileToCompress.OpenRead()) { using (FileStream archFileStream = File.Create(szOutFile)) { using (GZipStream archStream = new GZipStream(archFileStream, CompressionMode.Compress)) { byte[] buffer = new byte[1024]; int nRead; while ((nRead = originalFileStream.Read(buffer, 0, buffer.Length)) > 0) { archStream.Write(buffer, 0, nRead); } Logger.Info(string.Format("Compressed {0} from {1} to {2} bytes.", fileToCompress.Name , fileToCompress.Length.ToString() , archFileStream.Length.ToString() )); } } } }
private static void Slice(string sourceFile, string destinationDirectory, int parts) { using (var source = new FileStream(sourceFile, FileMode.Open)) { long partSize = (long)Math.Ceiling((double)source.Length / parts); // The offset at which to start reading from the source file long fileOffset = 0; ; string currPartPath; FileStream fsPart; long sizeRemaining = source.Length; // extracting name and extension of the input file string pattern = @"(\w+)(?=\.)\.(?<=\.)(\w+)"; Regex pairs = new Regex(pattern); matches = pairs.Matches(sourceFile); for (int i = 0; i < parts; i++) { currPartPath = destinationDirectory + matches[0].Groups[1] + String.Format(@"-{0}", i) + "." + "gz"; files.Add(currPartPath); // reading one part size using (fsPart = new FileStream(currPartPath, FileMode.Create)) { using (var compressionStream = new GZipStream(fsPart, CompressionMode.Compress, false)) { long currentPieceSize = 0; byte[] buffer = new byte[4096]; while (currentPieceSize < partSize) { int readBytes = source.Read(buffer, 0, buffer.Length); if (readBytes == 0) { break; } // creating one part size file compressionStream.Write(buffer, 0, readBytes); currentPieceSize += readBytes; } } } // calculating the remaining file size which iis still too be read sizeRemaining = (int)source.Length - (i * partSize); if (sizeRemaining < partSize) { partSize = sizeRemaining; } fileOffset += partSize; } } }
private async Task <IActionResult> GetTemplateCompress(int hubbubid, bool compress, IStatelessSession session) { ICriterion expression = Restrictions.Eq("Id", hubbubid); var hubbub = await session.CreateCriteria <ModbusHubbub>(). Add(expression) .UniqueResultAsync <ModbusHubbub>(); if (hubbub == null) { return(BadRequest(ApiResult.BadRequest("존재하지 않는 전력수집장치 ID 입니다"))); } ModbusHubbubMappingTemplate row = new ModbusHubbubMappingTemplate(); row.Hubbub = hubbub; row.ConnectionInfo = await GetIdentifiedObjectAsync <ModbusConnectionInfo>(hubbub.Connectionid, session); row.ModbusInputPointList = await GetDataByHubbubIdAsync <VwModbusInputPoint>(hubbub.Id, session); row.ModbusDigitalOutputPoints = await GetDataByHubbubIdAsync <VwDigitalOutputPoint>(hubbub.Id, session); row.ModbusDigitalStatusPoints = await GetDataByHubbubIdAsync <ModbusDigitalStatusPoint>(hubbub.Id, session); row.StandardAnalogPoints = await session.CreateCriteria <VwStandardAnalogPoint>() .Add(Restrictions.Eq("Disable", (SByte)0)) .ListAsync <VwStandardAnalogPoint>(); row.StandardPcsStatuses = await session.CreateCriteria <VwStandardPcsStatusPoint>().ListAsync <VwStandardPcsStatusPoint>(); //row.AnalogInputPoints = await GetGroupPointsAsync<VwAnalogPoint>(hubbub.Aigroupid, session); //row.DigitalInputGroup = await GetIdentifiedObjectAsync<DigitalInputGroup>(hubbub.Digroupid, session); //row.DigitalOutputGroup = await GetIdentifiedObjectAsync<DigitalOutputGroup>(hubbub.Dogroupid, session); //row.DigitalStatusGroup = await GetIdentifiedObjectAsync<DigitalStatusGroup>(hubbub.Stgroupid, session); //row.DigitalInputPoints = await GetGroupPointsAsync<ModbusDigitalInputPoint>(hubbub.Digroupid, session); //row.DigitalOutputPoints = await GetGroupPointsAsync<ModbusDigitalOutputPoint>(hubbub.Dogroupid, session); //row.DigitalStatusPoints = await GetGroupPointsAsync<ModbusDigitalStatusPoint>(hubbub.Stgroupid, session); JObject obj = JObject.FromObject(row); if (compress) { string ori_str = obj.ToString(); byte[] ori_bytes = Encoding.UTF8.GetBytes(ori_str); string ori_str_base64 = Convert.ToBase64String(ori_bytes); using (var outputStream = new MemoryStream()) { using (var gZipStream = new GZipStream(outputStream, CompressionMode.Compress)) { gZipStream.Write(ori_bytes, 0, ori_bytes.Length); } byte[] com_bytes = outputStream.ToArray(); string com_str_base64 = Convert.ToBase64String(com_bytes); return(Ok(com_str_base64)); } } else { return(Ok(row)); } }
static void GenerateBundles(List <string> files) { string temp_s = "temp.s"; // Path.GetTempFileName (); string temp_c = "temp.c"; string temp_o = "temp.o"; if (compile_only) { temp_c = output; } if (object_out != null) { temp_o = object_out; } try { List <string> c_bundle_names = new List <string> (); List <string[]> config_names = new List <string[]> (); byte [] buffer = new byte [8192]; using (StreamWriter ts = new StreamWriter(File.Create(temp_s))) { using (StreamWriter tc = new StreamWriter(File.Create(temp_c))) { string prog = null; #if XAMARIN_ANDROID tc.WriteLine("/* This source code was produced by mkbundle, do not edit */"); tc.WriteLine("\n#ifndef NULL\n#define NULL (void *)0\n#endif"); tc.WriteLine(@" typedef struct { const char *name; const unsigned char *data; const unsigned int size; } MonoBundledAssembly; void mono_register_bundled_assemblies (const MonoBundledAssembly **assemblies); void mono_register_config_for_assembly (const char* assembly_name, const char* config_xml); "); #else tc.WriteLine("#include <mono/metadata/mono-config.h>"); tc.WriteLine("#include <mono/metadata/assembly.h>\n"); #endif if (compress) { tc.WriteLine("typedef struct _compressed_data {"); tc.WriteLine("\tMonoBundledAssembly assembly;"); tc.WriteLine("\tint compressed_size;"); tc.WriteLine("} CompressedAssembly;\n"); } object monitor = new object(); var streams = new Dictionary <string, Stream> (); var sizes = new Dictionary <string, long> (); // Do the file reading and compression in parallel Action <string> body = delegate(string url) { string fname = new Uri(url).LocalPath; Stream stream = File.OpenRead(fname); long real_size = stream.Length; int n; if (compress) { MemoryStream ms = new MemoryStream(); GZipStream deflate = new GZipStream(ms, CompressionMode.Compress, leaveOpen: true); while ((n = stream.Read(buffer, 0, buffer.Length)) != 0) { deflate.Write(buffer, 0, n); } stream.Close(); deflate.Close(); byte [] bytes = ms.GetBuffer(); stream = new MemoryStream(bytes, 0, (int)ms.Length, false, false); } lock (monitor) { streams [url] = stream; sizes [url] = real_size; } }; //#if NET_4_5 #if FALSE Parallel.ForEach(files, body); #else foreach (var url in files) { body(url); } #endif // The non-parallel part foreach (var url in files) { string fname = new Uri(url).LocalPath; string aname = Path.GetFileName(fname); string encoded = aname.Replace("-", "_").Replace(".", "_"); if (prog == null) { prog = aname; } var stream = streams [url]; var real_size = sizes [url]; Console.WriteLine(" embedding: " + fname); WriteSymbol(ts, "assembly_data_" + encoded, stream.Length); WriteBuffer(ts, stream, buffer); if (compress) { tc.WriteLine("extern const unsigned char assembly_data_{0} [];", encoded); tc.WriteLine("static CompressedAssembly assembly_bundle_{0} = {{{{\"{1}\"," + " assembly_data_{0}, {2}}}, {3}}};", encoded, aname, real_size, stream.Length); double ratio = ((double)stream.Length * 100) / real_size; Console.WriteLine(" compression ratio: {0:.00}%", ratio); } else { tc.WriteLine("extern const unsigned char assembly_data_{0} [];", encoded); tc.WriteLine("static const MonoBundledAssembly assembly_bundle_{0} = {{\"{1}\", assembly_data_{0}, {2}}};", encoded, aname, real_size); } stream.Close(); c_bundle_names.Add("assembly_bundle_" + encoded); try { FileStream cf = File.OpenRead(fname + ".config"); Console.WriteLine(" config from: " + fname + ".config"); tc.WriteLine("extern const unsigned char assembly_config_{0} [];", encoded); WriteSymbol(ts, "assembly_config_" + encoded, cf.Length); WriteBuffer(ts, cf, buffer); ts.WriteLine(); config_names.Add(new string[] { aname, encoded }); } catch (FileNotFoundException) { /* we ignore if the config file doesn't exist */ } } if (config_file != null) { FileStream conf; try { conf = File.OpenRead(config_file); } catch { Error(String.Format("Failure to open {0}", config_file)); return; } Console.WriteLine("System config from: " + config_file); tc.WriteLine("extern const char system_config;"); WriteSymbol(ts, "system_config", config_file.Length); WriteBuffer(ts, conf, buffer); // null terminator ts.Write("\t.byte 0\n"); ts.WriteLine(); } if (machine_config_file != null) { FileStream conf; try { conf = File.OpenRead(machine_config_file); } catch { Error(String.Format("Failure to open {0}", machine_config_file)); return; } Console.WriteLine("Machine config from: " + machine_config_file); tc.WriteLine("extern const char machine_config;"); WriteSymbol(ts, "machine_config", machine_config_file.Length); WriteBuffer(ts, conf, buffer); ts.Write("\t.byte 0\n"); ts.WriteLine(); } ts.Close(); Console.WriteLine("Compiling:"); string cmd = String.Format("{0} -o {1} {2} ", GetEnv("AS", "as"), temp_o, temp_s); int ret = Execute(cmd); if (ret != 0) { Error("[Fail]"); return; } if (compress) { tc.WriteLine("\nstatic const CompressedAssembly *compressed [] = {"); } else { tc.WriteLine("\nstatic const MonoBundledAssembly *bundled [] = {"); } foreach (string c in c_bundle_names) { tc.WriteLine("\t&{0},", c); } tc.WriteLine("\tNULL\n};\n"); tc.WriteLine("static char *image_name = \"{0}\";", prog); tc.WriteLine("\nstatic void install_dll_config_files (void) {\n"); foreach (string[] ass in config_names) { tc.WriteLine("\tmono_register_config_for_assembly (\"{0}\", assembly_config_{1});\n", ass [0], ass [1]); } if (config_file != null) { tc.WriteLine("\tmono_config_parse_memory (&system_config);\n"); } if (machine_config_file != null) { tc.WriteLine("\tmono_register_machine_config (&machine_config);\n"); } tc.WriteLine("}\n"); if (config_dir != null) { tc.WriteLine("static const char *config_dir = \"{0}\";", config_dir); } else { tc.WriteLine("static const char *config_dir = NULL;"); } Stream template_stream; if (compress) { template_stream = System.Reflection.Assembly.GetAssembly(typeof(MakeBundle)).GetManifestResourceStream("template_z.c"); } else { template_stream = System.Reflection.Assembly.GetAssembly(typeof(MakeBundle)).GetManifestResourceStream("template.c"); } StreamReader s = new StreamReader(template_stream); string template = s.ReadToEnd(); tc.Write(template); if (!nomain) { Stream template_main_stream = System.Reflection.Assembly.GetAssembly(typeof(MakeBundle)).GetManifestResourceStream("template_main.c"); StreamReader st = new StreamReader(template_main_stream); string maintemplate = st.ReadToEnd(); tc.Write(maintemplate); } tc.Close(); if (compile_only) { return; } string zlib = (compress ? "-lz" : ""); string debugging = "-g"; string cc = GetEnv("CC", IsUnix ? "cc" : "gcc -mno-cygwin"); if (style == "linux") { debugging = "-ggdb"; } if (static_link) { string smonolib; if (style == "osx") { smonolib = "`pkg-config --variable=libdir mono-2`/libmono-2.0.a "; } else { smonolib = "-Wl,-Bstatic -lmono-2.0 -Wl,-Bdynamic "; } cmd = String.Format("{4} -o {2} -Wall `pkg-config --cflags mono-2` {0} {3} " + "`pkg-config --libs-only-L mono-2` " + smonolib + "`pkg-config --libs-only-l mono-2 | sed -e \"s/\\-lmono-2.0 //\"` {1}", temp_c, temp_o, output, zlib, cc); } else { cmd = String.Format("{4} " + debugging + " -o {2} -Wall {0} `pkg-config --cflags --libs mono-2` {3} {1}", temp_c, temp_o, output, zlib, cc); } ret = Execute(cmd); if (ret != 0) { Error("[Fail]"); return; } Console.WriteLine("Done"); } } } finally { if (!keeptemp) { if (object_out == null) { File.Delete(temp_o); } if (!compile_only) { File.Delete(temp_c); } File.Delete(temp_s); } } }
// move this to the Compression project /// <summary> /// Gzips the specified input. /// </summary> /// <param name="input"> The input. </param> /// <returns> </returns> /// <remarks> /// </remarks> public static byte[] Gzip(this string input) { var memStream = new MemoryStream(); using (var gzStr = new GZipStream(memStream, CompressionMode.Compress, CompressionLevel.BestCompression)) { gzStr.Write(input.ToByteArray(), 0, input.ToByteArray().Length); } return memStream.ToArray(); }
public static void PrepareKernel(string directory, byte[] kernelLookup, bool[] options, Random rnd, int seed) { string kernelDirectory = directory + "\\kernel\\"; // The battle folder where scene.bin resides string targetKernel = kernelDirectory + "KERNEL.bin"; // The kernel.bin for updating the lookup table string backupKernel = targetKernel + "Backup"; int[][] jaggedKernelInfo = new int[27][]; // An array of arrays, containing compressed size, uncompressed size, section ID ArrayList listedKernelData = new ArrayList(); // Contains all the compressed kernel section data byte[] header = new byte[4]; //Retrieves header information for conversion to int int compressedSize; // Stores the compressed size of the file int uncompressedSize; // Stores the uncompressed size of the file int sectionID; // Stores the section ID of the file int offset = 0; // Tracks where we are in the kernel.bin int headerOffset = 0; // Stores the absolute offset value for each section's header (updated on each loop) int r = 0; // ro ro int o = 0; // fight the powah // Step 1: Read the kernel headers while (r < 27) // 27 sections in the kernel { // Opens and reads the headers in the kernel.bin FileStream stepOne = new FileStream(targetKernel, FileMode.Open, FileAccess.Read); stepOne.Seek(headerOffset, SeekOrigin.Begin); stepOne.Read(header, 0, 2); // Header never exceeds 64 bytes compressedSize = AllMethods.GetLittleEndianInt(header, 0); stepOne.Read(header, 0, 2); // Header never exceeds 64 bytes uncompressedSize = AllMethods.GetLittleEndianInt(header, 0); stepOne.Read(header, 0, 2); // Header never exceeds 64 bytes sectionID = AllMethods.GetLittleEndianInt(header, 0); // Stored kernel header information in a jaggy array jaggedKernelInfo[o] = new int[] { compressedSize, uncompressedSize, sectionID }; stepOne.Close(); headerOffset += compressedSize + 6; r++; o++; stepOne.Close(); } r = 0; o = 0; // Step 2: Get the compressed data, uncompress it, and then randomise it while (r < 27) { int bytesRead; int size = jaggedKernelInfo[o][1]; byte[] uncompressedKernel = new byte[size]; // Used to hold the decompressed kernel section using (BinaryReader brg = new BinaryReader(new FileStream(targetKernel, FileMode.Open))) { // Calls method to convert little endian values into an integer byte[] compressedKernel = new byte[jaggedKernelInfo[o][0]]; // Used to hold the compressed scene file, where [o][1] is scene size brg.BaseStream.Seek(offset + 6, SeekOrigin.Begin); // Starts reading the compressed scene file brg.Read(compressedKernel, 0, compressedKernel.Length); using (MemoryStream inputWrapper = new MemoryStream(compressedKernel)) { using (MemoryStream decompressedOutput = new MemoryStream()) { using (GZipStream zipInput = new GZipStream(inputWrapper, CompressionMode.Decompress, true)) { while ((bytesRead = zipInput.Read(uncompressedKernel, 0, size)) != 0) { decompressedOutput.Write(uncompressedKernel, 0, bytesRead); } zipInput.Close(); } decompressedOutput.Close(); } inputWrapper.Close(); } brg.Close(); } // Sends decompressed scene data to be randomised by section switch (r) { case 0: Kernel.RandomiseSection0(uncompressedKernel, options, rnd, seed); break; case 1: Kernel.RandomiseSection1(uncompressedKernel, options, rnd, seed); break; case 2: Kernel.RandomiseSection2(uncompressedKernel, options, rnd, seed, kernelLookup); break; case 3: Kernel.RandomiseSection3(uncompressedKernel, options, rnd, seed); break; case 4: Kernel.RandomiseSection4(uncompressedKernel, options, rnd, seed); break; case 5: Kernel.RandomiseSection5(uncompressedKernel, options, rnd, seed); break; case 6: Kernel.RandomiseSection6(uncompressedKernel, options, rnd, seed); break; case 7: Kernel.RandomiseSection7(uncompressedKernel, options, rnd, seed); break; case 8: Kernel.RandomiseSection8(uncompressedKernel, options, rnd, seed); break; } // Recompress the altered uncompressed data back into GZip byte[] recompressedKernel; using (var result = new MemoryStream()) { using (var compressionStream = new GZipStream(result, CompressionMode.Compress)) { compressionStream.Write(uncompressedKernel, 0, uncompressedKernel.Length); compressionStream.Close(); } recompressedKernel = result.ToArray(); result.Close(); } // Offset is updated for the next pass before we write in our new value offset += jaggedKernelInfo[o][0] + 6; // The size is updated with the newly compressed/padded scene's length jaggedKernelInfo[o][0] = recompressedKernel.Length; // Byte array is added to the ArrayList listedKernelData.Add(recompressedKernel); r++; o++; } r = 0; o = 0; // Step 3: Rebuilding the Kernel.bin using (var outputStream = File.Create(targetKernel)) { // Loops until all 27 sections are headered and written while (r < 27) { // Write the header first byte[] bytes = new byte[2]; byte[] kernelHead = new byte[6]; ulong comSize = (ulong)jaggedKernelInfo[o][0]; ulong uncomSize = (ulong)jaggedKernelInfo[o][1]; ulong sectID = (ulong)jaggedKernelInfo[o][2]; bytes = AllMethods.GetLittleEndianConvert(comSize); kernelHead[0] = bytes[0]; kernelHead[1] = bytes[1]; bytes = AllMethods.GetLittleEndianConvert(uncomSize); kernelHead[2] = bytes[0]; kernelHead[3] = bytes[1]; bytes = AllMethods.GetLittleEndianConvert(sectID); kernelHead[4] = bytes[0]; kernelHead[5] = bytes[1]; // Takes the header data, converts it into a stream, and then appends it to the file-in-progress outputStream.Position = outputStream.Length; outputStream.Write(kernelHead, 0, kernelHead.Length); // Takes the byte data from the ArrayList, converts it into a stream, and then appends it to the file-in-progress byte[] kernelData = (byte[])listedKernelData[o]; outputStream.Position = outputStream.Length; outputStream.Write(kernelData, 0, kernelData.Length); r++; o++; } r = 0; o = 0; } }
private static void Compressor(string inFile, string outFile) { if (inFile == null) { throw new ArgumentNullException("inFile"); } if (outFile == null) { throw new ArgumentNullException("outFile"); } long fileSize = new FileInfo(inFile).Length; int blockSize = Math.Max(1024, (int)(fileSize >= 16 * 1024 * 1024 ? 1024 * 1024 : fileSize / 16)); using (var reader = new FileStream(inFile, FileMode.Open, FileAccess.Read, FileShare.None)) using (var writer = new BinaryWriter(new FileStream(outFile, FileMode.Create, FileAccess.Write, FileShare.None))) { _srcLen = reader.Length; ShowReadProgress(); writer.Write(Magic); long offsetPos = writer.BaseStream.Position; writer.Write(0L); var packedSizes = new List <int>(); var processor = new Processor( Environment.ProcessorCount, srcData => { srcData.Capacity = blockSize; var res = reader.Read(srcData.Data, 0, srcData.Capacity); Interlocked.Add(ref _srcRb, res); return(res); }, (srcData, srcDataSize, dstData) => { do { try { dstData.Capacity = srcData.Capacity; using (var memoryStream = new MemoryStream(dstData.Data, 0, dstData.Capacity)) { memoryStream.SetLength(0); using (var gzipStream = new GZipStream(memoryStream, CompressionMode.Compress, true)) gzipStream.Write(srcData.Data, 0, srcDataSize); return((int)memoryStream.Length); } } catch (NotSupportedException) { dstData.Capacity += (dstData.Capacity >> 3); } }while (true); }, (dstData, dstDataSize) => { writer.Write(dstData.Data, 0, dstDataSize); packedSizes.Add(dstDataSize); }); Console.CancelKeyPress += (sender, args) => { processor.CancelRequest(); args.Cancel = true; }; processor.Run(); while (!processor.WaitForAll(1000)) { ShowReadProgress(); } ShowReadProgress(); long tabPos = writer.BaseStream.Position; writer.Write(packedSizes.Count); foreach (int size in packedSizes) { writer.Write(size); } writer.BaseStream.Position = offsetPos; writer.Write(tabPos); } }
private void UploadToCdn() { try { // one thread only if (Interlocked.CompareExchange(ref work, 1, 0) == 0) { var @continue = false; try { CdnItem item; if (queue.TryDequeue(out item)) { @continue = true; var cdnpath = GetCdnPath(item.Bundle.Path); var key = new Uri(cdnpath).PathAndQuery.TrimStart('/'); var path = key.Remove(0, _container.Length + 1); var content = Encoding.UTF8.GetBytes(item.Response.Content); var inputStream = new MemoryStream(); if (ClientSettings.GZipEnabled) { using (var zip = new GZipStream(inputStream, CompressionMode.Compress, true)) { zip.Write(content, 0, content.Length); zip.Flush(); } } else { inputStream.Write(content, 0, content.Length); } inputStream.Position = 0; bool upload = true; var client = GetClient(); var etag = SelectelSharp.Common.Helpers.CalculateSHA1(item.Response.Content); var fileInfo = client.ListObjects(_container, 1, null, null, path); if (fileInfo != null && fileInfo.Any()) { upload = fileInfo.Single().Hash != etag; } if (upload) { var contentType = String.Empty; var mime = string.IsNullOrEmpty(contentType) ? MimeMapping.GetMimeMapping(Path.GetFileName(key)) : contentType; var customHeaders = new Dictionary <string, string>(); if (ClientSettings.GZipEnabled) { customHeaders.Add("Content-Encoding", "gzip"); } var cache = TimeSpan.FromDays(365); customHeaders.Add("Cache-Control", String.Format("public, maxage={0}", (int)cache.TotalSeconds)); customHeaders.Add("Expires", DateTime.UtcNow.Add(cache).ToString()); client.CreateObject(_container, inputStream, path, mime, 4096, customHeaders); } else { inputStream.Close(); } item.Bundle.CdnPath = cdnpath; } } catch (Exception err) { log.Error(err); } finally { work = 0; if (@continue) { Action upload = () => UploadToCdn(); upload.BeginInvoke(null, null); } } } } catch (Exception fatal) { log.Fatal(fatal); } }
static void Main(string[] args) { var testCount = 1; var testFile = "test.txt"; GenerateFile(testFile); var testData = File.ReadAllBytes(testFile); int size = 0; ExecuteTest(testCount, "Copy", testData, (data) => { var copy = new byte[data.Length]; data.CopyTo(copy, 0); return(copy.Length); }); ExecuteTest(testCount, "ft.lz4", testData, (data) => { using (var memoryStream = new MemoryStream(64 * 1024)) { using (var lz4 = FT.LZ4.LZ4Stream.Encode(memoryStream)) { lz4.Write(data, 0, data.Length); lz4.Flush(); return((int)memoryStream.Length); } } }); ExecuteTest(testCount, "Gzip", testData, (data) => { using (var memoryStream = new MemoryStream(64 * 1024)) { using (var gz = new GZipStream(memoryStream, CompressionMode.Compress)) { gz.Write(data, 0, data.Length); gz.Flush(); return((int)memoryStream.Length); } } }); ExecuteTest(testCount, "deflate", testData, (data) => { using (var memoryStream = new MemoryStream(64 * 1024)) { using (var deflate = new DeflateStream(memoryStream, CompressionMode.Compress)) { deflate.Write(data, 0, data.Length); deflate.Flush(); return((int)memoryStream.Length); } } }); /* * ExecuteTest(testCount, "k40s", testData, (data) => * { * using (var memoryStream = new MemoryStream(64 * 1024)) * { * using (var lz4 = K4os.Compression.LZ4.Streams.LZ4Stream.Encode(memoryStream)) * { * lz4.Write(data, 0, data.Length); * lz4.Flush(); * return (int)memoryStream.Length; * } * } * }); */ ExecuteTest(testCount, "lz4net", testData, (data) => { using (var memoryStream = new MemoryStream(64 * 1024)) { using (var lz4 = new LZ4.LZ4Stream(memoryStream, CompressionMode.Compress)) { lz4.Write(data, 0, data.Length); lz4.Flush(); return((int)memoryStream.Length); } } }); Console.ReadKey(); }
private bool SaveFile(string fileName) { if (_workingDir == null || (!this.SaveXaml && !this.SaveZaml)) { return(false); } _writerErrorOccurred = false; string fileNameWithoutExt = Path.GetFileNameWithoutExtension(fileName); string xamlFileName = Path.Combine(_workingDir.FullName, fileNameWithoutExt + ".xaml"); if (File.Exists(xamlFileName)) { File.SetAttributes(xamlFileName, FileAttributes.Normal); File.Delete(xamlFileName); } if (this.UseFrameXamlWriter) { XmlWriterSettings writerSettings = new XmlWriterSettings(); writerSettings.Indent = true; writerSettings.OmitXmlDeclaration = true; writerSettings.Encoding = Encoding.UTF8; using (FileStream xamlFile = File.Create(xamlFileName)) { using (XmlWriter writer = XmlWriter.Create(xamlFile, writerSettings)) { System.Windows.Markup.XamlWriter.Save(_drawing, writer); } } } else { try { XmlXamlWriter xamlWriter = new XmlXamlWriter(this.DrawingSettings); using (FileStream xamlFile = File.Create(xamlFileName)) { xamlWriter.Save(_drawing, xamlFile); } } catch { _writerErrorOccurred = true; if (_fallbackOnWriterError) { // If the file exist, we back it up and save a new file... if (File.Exists(xamlFileName)) { File.Move(xamlFileName, xamlFileName + ".bak"); } XmlWriterSettings writerSettings = new XmlWriterSettings(); writerSettings.Indent = true; writerSettings.OmitXmlDeclaration = true; writerSettings.Encoding = Encoding.UTF8; using (FileStream xamlFile = File.Create(xamlFileName)) { using (XmlWriter writer = XmlWriter.Create(xamlFile, writerSettings)) { System.Windows.Markup.XamlWriter.Save(_drawing, writer); } } } else { throw; } } } if (this.SaveZaml) { string zamlFileName = Path.ChangeExtension(xamlFileName, ".zaml"); if (File.Exists(zamlFileName)) { File.SetAttributes(zamlFileName, FileAttributes.Normal); File.Delete(zamlFileName); } FileStream zamlSourceFile = new FileStream( xamlFileName, FileMode.Open, FileAccess.Read, FileShare.Read); byte[] buffer = new byte[zamlSourceFile.Length]; // Read the file to ensure it is readable. int count = zamlSourceFile.Read(buffer, 0, buffer.Length); if (count != buffer.Length) { zamlSourceFile.Close(); return(false); } zamlSourceFile.Close(); FileStream zamlDestFile = File.Create(zamlFileName); GZipStream zipStream = new GZipStream(zamlDestFile, CompressionMode.Compress, true); zipStream.Write(buffer, 0, buffer.Length); zipStream.Close(); zamlDestFile.Close(); _zamlFile = zamlFileName; } _xamlFile = xamlFileName; if (!this.SaveXaml && File.Exists(xamlFileName)) { File.Delete(xamlFileName); _xamlFile = null; } return(true); }
/// <summary> /// Perform backup on the specified media /// </summary> public void Backup(BackupMedia media, String password = null) { // Make a determination that the user is allowed to perform this action if (AuthenticationContext.Current.Principal != AuthenticationContext.SystemPrincipal) { new PolicyPermission(System.Security.Permissions.PermissionState.Unrestricted, PolicyIdentifiers.ExportClinicalData).Demand(); } // Get the output medium var directoryName = this.GetBackupDirectory(media); string fileName = Path.Combine(directoryName, $"oizdc-{DateTime.Now.ToString("yyyyMMddHHmm")}.oiz.tar"); // Confirm if the user really really wants to backup if (String.IsNullOrEmpty(password) && !ApplicationContext.Current.Confirm(Strings.locale_backup_confirm)) { return; } // TODO: Audit the backup to the data to the central server AuditUtil.AuditDataExport(); // Try to backup the data try { this.m_tracer.TraceInfo("Beginning backup to {0}..", fileName); ApplicationContext.Current?.SetProgress(Strings.locale_backup, 0.25f); // Backup folders first var sourceDirectory = XamarinApplicationContext.Current.ConfigurationManager.ApplicationDataDirectory; using (var fs = File.Create(fileName)) using (var writer = new SharpCompress.Writers.Tar.TarWriter(fs, new SharpCompress.Writers.WriterOptions(SharpCompress.Common.CompressionType.None))) { this.BackupDirectory(writer, sourceDirectory, sourceDirectory); var appInfo = new ApplicationInfo(false).ToDiagnosticReport(); // Output appInfo using (var ms = new MemoryStream()) { XmlSerializer xsz = new XmlSerializer(appInfo.GetType()); xsz.Serialize(ms, appInfo); ms.Flush(); ms.Seek(0, SeekOrigin.Begin); writer.Write(".appinfo.xml", ms, DateTime.Now); } // Output declaration statement using (var ms = new MemoryStream(Encoding.UTF8.GetBytes($"User {(AuthenticationContext.Current?.Principal?.Identity?.Name ?? "SYSTEM")} created this backup on {DateTime.Now}. The end user was asked to confirm this decision to backup and acknolwedges all responsibilities for guarding this file."))) writer.Write("DISCLAIMER.TXT", ms, DateTime.Now); } this.m_tracer.TraceInfo("Beginning compression {0}..", fileName); using (var fs = File.OpenRead(fileName)) using (var gzs = new GZipStream(File.Create(fileName + ".gz"), CompressionMode.Compress)) { int br = 4096; byte[] buffer = new byte[br]; while (br == 4096) { br = fs.Read(buffer, 0, 4096); gzs.Write(buffer, 0, br); ApplicationContext.Current?.SetProgress(Strings.locale_backup_compressing, (float)fs.Position / (float)fs.Length * 0.5f + 0.5f); } } File.Delete(fileName); } catch (Exception ex) { this.m_tracer.TraceError("Error backing up to {0}: {1}", fileName, ex); throw; } }
private void UploadToCdn() { try { // one thread only if (Interlocked.CompareExchange(ref work, 1, 0) == 0) { var @continue = false; try { CdnItem item; if (queue.TryDequeue(out item)) { @continue = true; var cdnpath = GetCdnPath(item.Bundle.Path); var key = new Uri(cdnpath).PathAndQuery.TrimStart('/'); var content = Encoding.UTF8.GetBytes(item.Response.Content); var inputStream = new MemoryStream(); var storage = GetStorage(); if (ClientSettings.GZipEnabled) { using (var zip = new GZipStream(inputStream, CompressionMode.Compress, true)) { zip.Write(content, 0, content.Length); zip.Flush(); } } else { inputStream.Write(content, 0, content.Length); } var upload = false; Google.Apis.Storage.v1.Data.Object objInfo = null; try { objInfo = storage.GetObject(_bucket, key); } catch (GoogleApiException ex) { if (ex.HttpStatusCode == HttpStatusCode.NotFound) { upload = true; } else { throw; } } if (objInfo != null) { String contentMd5Hash = String.Empty; using (var sha1 = SHA1.Create()) { byte[] bytes = Encoding.UTF8.GetBytes(item.Response.Content); byte[] hashBytes = sha1.ComputeHash(bytes); StringBuilder sb = new StringBuilder(); foreach (byte b in hashBytes) { sb.Append(b.ToString("X2")); } contentMd5Hash = sb.ToString().ToLower(); } if (String.Compare(objInfo.Md5Hash, contentMd5Hash, StringComparison.InvariantCultureIgnoreCase) != 0) { upload = true; } } if (upload) { UploadObjectOptions uploadObjectOptions = new UploadObjectOptions { PredefinedAcl = PredefinedObjectAcl.PublicRead }; inputStream.Position = 0; var uploaded = storage.UploadObject(_bucket, key, MimeMapping.GetMimeMapping(Path.GetFileName(key)), inputStream, uploadObjectOptions, null); inputStream.Close(); if (uploaded.Metadata == null) { uploaded.Metadata = new Dictionary <String, String>(); } if (ClientSettings.GZipEnabled) { uploaded.ContentEncoding = "gzip"; } var cache = TimeSpan.FromDays(365); uploaded.CacheControl = String.Format("public, maxage={0}", (int)cache.TotalSeconds); uploaded.Metadata["Expires"] = DateTime.UtcNow.Add(TimeSpan.FromDays(365)).ToString("R"); storage.UpdateObject(uploaded); } else { inputStream.Close(); } item.Bundle.CdnPath = cdnpath; } } catch (Exception err) { log.Error(err); } finally { work = 0; if (@continue) { Action upload = () => UploadToCdn(); upload.BeginInvoke(null, null); } } } } catch (Exception fatal) { log.Fatal(fatal); } }
public static byte[] PrepareScene(string directory, bool[] options, Random rnd, int seed) { string sceneDirectory = directory + "\\battle\\"; // The battle folder where scene.bin resides string targetScene = sceneDirectory + "scene.bin"; // The target file itself string backupScene = targetScene + "Backup"; if (!Directory.Exists(backupScene)) // Ensures backup isn't overwritten { File.Copy(targetScene, backupScene, true); // Creates a backup of the scene.bin } byte[] header = new byte[64]; /* Stores the block header * [0-4] = Offset for first GZipped data file (3 enemies per file) * Header total size must be 40h - empty entries == FF FF FF FF */ int[][] jaggedSceneInfo = new int[256][]; // An array of arrays, containing offset, size, and absoluteoffset for each scene file ArrayList listedSceneData = new ArrayList(); // Contains all the compressed scene data int[][][] jaggedModelAttackTypes = new int[3000][][]; // Contains all the uncompressed Attack Anim data long initialSize; // The size of the initial scene.bin (can vary, up to 63 blocks but typically 32-33 int size; // The size of the compressed file int offset; // Stores the current scene offset int nextOffset; // Stores the next scene offset int absoluteOffset; // Stores the scene's absolute offset in the scene.bin int finalOffset = 0; // Stores the scene's adjusted offset in the scene.bin int headerOffset = 0; // Offset of the current block header; goes up in 2000h (8192) increments byte[] padder = new byte[1]; // Scene files, after compression, need to be FF padded to make them multiplicable by 4 padder[0] = 255; //Random rnd = new Random(Guid.NewGuid().GetHashCode()); byte[] kernelLookup = new byte[64]; // Stores the new lookup table to be written to the kernel.bin; blank values are FF int i = 0; while (i < 64) { kernelLookup[i] = 255; i++; } int r = 0; // C'mon get up and make some noise int o = 0; // while your whiles get looped by int c = 0; // the var street boys int k = 0; // *DJ scratching noises* int s = 0; // *DJ scratching noises intensify* /* Step 1: Read the Scene.bin and retrieve its data for use later * The goal of this step is to build an array containing information about each scene. * We then use this information to derive other important information (for instance, adjusting the header offsets) * To get the info we need, the header of each 'block' is read (2000h per block) and this tells us where to find each scene. * We need to use GZip compression to get the data out, but we cannot let the Gzipper hit the header or it will break. */ // Entire file is read; offsets and sizes for scenes are extracted and placed in a jagged array (an array of arrays) FileStream fs = new FileStream(targetScene, FileMode.Open, FileAccess.Read); initialSize = fs.Length; fs.Close(); while (headerOffset < initialSize) // 32 blocks of 2000h/8192 bytes each { // Opens and reads the default scene.bin FileStream stepOne = new FileStream(targetScene, FileMode.Open, FileAccess.Read); stepOne.Seek(headerOffset, SeekOrigin.Begin); stepOne.Read(header, 0, 64); // Header never exceeds 64 bytes stepOne.Close(); // Max of 16 sections in a header (is usually less however) while (r < 16) { // If the 2nd byte of the current header is FF then assume there are no more valid scene headers in this block if (header[c + 1] != 0xFF) { // Fetches the current header byte[] currentHeader = new byte[4]; currentHeader[0] = header[c]; currentHeader[1] = header[c + 1]; currentHeader[2] = header[c + 2]; currentHeader[3] = header[c + 3]; // Fetches the next header - ignored if we're at the end of the block header byte[] nextHeader = new byte[4]; if (r < 15) { nextHeader[0] = header[c + 4]; nextHeader[1] = header[c + 5]; nextHeader[2] = header[c + 6]; nextHeader[3] = header[c + 7]; } // Converts the current offset and the next offset into integer offset = AllMethods.GetLittleEndianInt(currentHeader, 0); nextOffset = AllMethods.GetLittleEndianInt(nextHeader, 0); // Checks that next header is not empty or if we are at the last header if (currentHeader[1] == 0xFF || nextHeader[1] == 0xFF || r == 15) { // If next header is FF FF FF FF, then we're at the end of file and should deduct 2000h to get current file size size = 8192 - (offset * 4); } else { // Difference between this offset and next offset provides size; offsets need to be *4 to get actual offset size = (nextOffset - offset) * 4; } // Gets absolute offset in the scene.bin for the current scene absoluteOffset = (offset * 4) + headerOffset; // Store our retrieved/derived information in our jagged array (watch out for the pointy bits) jaggedSceneInfo[o] = new int[] { offset, size, absoluteOffset, finalOffset }; o++; } c += 4; r++; } headerOffset += 8192; r = 0; c = 0; } o = 0; headerOffset = 0; /* Step 2: Randomising the scene data * Using absolute offset + compressed size, we locate and decompress the file. * We run the scene data through the randomiser. * We then recompress the returned data. * The size will now have changed; we will update this later while generating our new scene.bin */ // But first, we acquire the camera data of the target scene.bin ArrayList listedCameraData = Indexer.GetCameraData(jaggedSceneInfo, targetScene); // And the valid Animation Types for each ModelID jaggedModelAttackTypes = Indexer.GetAttackData(jaggedSceneInfo, targetScene); while (r < 256) { int bytesRead; byte[] uncompressedScene = new byte[7808]; // Used to hold the decompressed scene file using (BinaryReader brg = new BinaryReader(new FileStream(targetScene, FileMode.Open))) { // Calls method to convert little endian values into an integer byte[] compressedScene = new byte[jaggedSceneInfo[o][1]]; // Used to hold the compressed scene file, where [o][1] is scene size brg.BaseStream.Seek(jaggedSceneInfo[o][2], SeekOrigin.Begin); // Starts reading the compressed scene file brg.Read(compressedScene, 0, compressedScene.Length); using (MemoryStream inputWrapper = new MemoryStream(compressedScene)) { using (MemoryStream decompressedOutput = new MemoryStream()) { using (GZipStream zipInput = new GZipStream(inputWrapper, CompressionMode.Decompress, true)) { while ((bytesRead = zipInput.Read(uncompressedScene, 0, 7808)) != 0) { decompressedOutput.Write(uncompressedScene, 0, bytesRead); } zipInput.Close(); } decompressedOutput.Close(); } inputWrapper.Close(); } brg.Close(); } // Sends random camera data to be used int rand = (byte)rnd.Next(listedCameraData.Count); byte[] initCam = Indexer.InitialCamera(); byte[] randCam = (byte[])listedCameraData[rand]; int sceneID = r; // Sends decompressed scene data to be randomised Scene.RandomiseScene(uncompressedScene, randCam, sceneID, options, rnd, jaggedModelAttackTypes, seed, initCam); // Recompress the altered uncompressed data back into GZip byte[] recompressedScene; using (var result = new MemoryStream()) { using (var compressionStream = new GZipStream(result, CompressionMode.Compress)) { compressionStream.Write(uncompressedScene, 0, uncompressedScene.Length); compressionStream.Close(); } recompressedScene = result.ToArray(); result.Close(); } // Checks that the file is divisible by 4; FF padding is applied otherwise if (recompressedScene.Length % 4 == 3) // Remainder of 3, add 1 FF { recompressedScene = recompressedScene.Concat(padder).ToArray(); } else if (recompressedScene.Length % 4 == 2) // Remainder of 2, add 2 FFs { recompressedScene = recompressedScene.Concat(padder).ToArray(); recompressedScene = recompressedScene.Concat(padder).ToArray(); } else if (recompressedScene.Length % 4 == 1) // Remainder of 1, add 3 FFs { recompressedScene = recompressedScene.Concat(padder).ToArray(); recompressedScene = recompressedScene.Concat(padder).ToArray(); recompressedScene = recompressedScene.Concat(padder).ToArray(); } // The size is updated with the newly compressed/padded scene's length jaggedSceneInfo[o][1] = recompressedScene.Length; // Byte array is added to the ArrayList listedSceneData.Add(recompressedScene); r++; o++; } r = 0; o = 0; /* Step 3: Rebuilding the Scene.bin * We dynamically put scenes into a block until it would exceed 8192 bytes; then we create a new block. * The header is constantly updated with each new scene added to the block, using previous header to determine size. * When all 255 scenes are allocated, we finish up by padding off the last block to get a 40,000h/262,144 byte file. * The size will now have changed; we will update this later while generating our new scene.bin */ int sizeLimit = 8193; // Want to start by making a new header so we set size higher than limit to trigger that int headerInt; byte[] finalHeader = new byte[64]; using (var outputStream = File.Create(targetScene)) { int blockCount = 0; // Counts blocks for the kernel lookup table index // Loops until all 255 scenes are assigned to a block while (r < 256) { // Checks if the next scene will 'fit' into the current block. // No scene is added yet at this time, that is only done if there's space. sizeLimit += jaggedSceneInfo[o][1]; // If this returns true, then our block is 'full' and now needs to be padded to 8192 bytes exactly // 's' represents the number of scenes currently in the block, only 16 scenes can fit into one block if (sizeLimit >= 8192 || s == 16) { if (blockCount != 0) { s += kernelLookup[blockCount - 1]; } kernelLookup[blockCount] = (byte)s; blockCount++; // Pads the end of the block until it hits a divisor of 8192. outputStream.Position = outputStream.Length; while (outputStream.Length % 8192 > 0) { outputStream.Write(padder, 0, 1); } // A new blank header of FFs is made for the start of the next new block while (c < 64) { finalHeader[c] = 255; c++; } finalHeader[0] = 16; //First offset is always 16h in a header finalHeader[1] = 0; finalHeader[2] = 0; finalHeader[3] = 0; if (s != 0) { headerOffset += 8192; // Increment headerOffset } // Writes the header to the file at 8192 increments outputStream.Position = outputStream.Length; outputStream.Write(finalHeader, 0, 64); c = 0; k = 0; s = 0; sizeLimit = jaggedSceneInfo[o][1]; // Resets size to that of the first added scene in this new block sizeLimit += 64; } // When we write a compressed file in, we want to update the header for the next file. // We'll have a +4 incrementing value to write it into the appropriate header address. // This needs to avoid writing to the first header offset. // Takes the byte data from the ArrayList, converts it into a stream, and then appends it to the file-in-progress byte[] sceneData = (byte[])listedSceneData[o]; outputStream.Position = outputStream.Length; outputStream.Write(sceneData, 0, sceneData.Length); // Skips offset calculation if it is the first scene in the block as this is always 16h and has been written already if (s != 0) { // Calculates this scene's offset using the previous offset + that offset's file size. headerInt = AllMethods.GetPreviousLittleEndianInt(finalHeader, k); headerInt *= 4; headerInt += jaggedSceneInfo[o - 1][1]; headerInt /= 4; // Now we have new offset calculated, time to convert it into bytes and write to header. byte[] bytes = BitConverter.GetBytes(headerInt); finalHeader[k] = bytes[0]; finalHeader[k + 1] = bytes[1]; finalHeader[k + 2] = bytes[2]; finalHeader[k + 3] = bytes[3]; // Write bytes to offset of k + headerOffset to our file now. outputStream.Position = headerOffset; outputStream.Write(finalHeader, 0, 64); } r++; o++; k += 4; s++; } r = 0; // ahhhh o = 0; // We're gonna loop through whiles c = 0; // all night k = 0; // and try-catch every day s = 0; // *gene falls over a rogue assignment* // All scenes allocated, the final file must now be padded to 8192 bytes outputStream.Position = outputStream.Length; while (outputStream.Length % 8192 > 0) { outputStream.Write(padder, 0, 1); } // New scene.bin ready to go. Hopefully. outputStream.Close(); } return(kernelLookup); }
public void AddItem(LogItem item) { if (LogLevel == LogLevel.None || item == null || string.IsNullOrWhiteSpace(item.Exception.ToString())) { return; } try { var uniqueValue = (item.Exception + AdditionalData.ToDebugString()).Trim(); if (!_unique.Contains(uniqueValue)) { OnItemAdded.RaiseEvent(item, new EventArgs()); _unique.Add(uniqueValue); var file = Path.Combine( LogDir, string.Format( _fileName, DateTime.Now.ToString("yyyy_MM_dd"), LogLevel.ToString().ToLower(), (item.Exception + AdditionalData.ToDebugString()).ToMd5Hash())); if (File.Exists(file)) { return; } AddData(item.Exception); if (OutputConsole) { Console.ForegroundColor = ConsoleColor.Yellow; Console.WriteLine(item.Exception); Console.ResetColor(); } using ( var fileStream = new FileStream( file, FileMode.CreateNew, FileAccess.Write, FileShare.None, 4096, true)) { using (Stream gzStream = new GZipStream(fileStream, CompressionMode.Compress, false)) { var text = item.Exception.ToString(); text = item.Exception.Data.Cast <DictionaryEntry>() .Aggregate( text, (current, entry) => current + string.Format("{0}{1}: {2}", Environment.NewLine, entry.Key, entry.Value)); if (string.IsNullOrWhiteSpace(text.Trim())) { return; } var logByte = new UTF8Encoding(true).GetBytes(text); if (Compression) { gzStream.Write(logByte, 0, logByte.Length); } else { fileStream.Write(logByte, 0, logByte.Length); } } } } } catch (Exception ex) { Console.WriteLine(ex); } }
private static void Slice(string sourceFile, string destinationDirectory, int parts) { using (var source = new FileStream(sourceFile, FileMode.Open)) { long partSize = (long)Math.Ceiling((double)source.Length / parts); long fileOffset = 0; string currPartPath; FileStream fsPart; long sizeRemaining = source.Length; string pattern = @"(\w+)(?=\.)\.(?<=\.)(\w+)"; Regex pairs = new Regex(pattern); matches = pairs.Matches(sourceFile); for (int i = 0; i < parts; i++) { currPartPath = destinationDirectory + matches[0].Groups[1] + String.Format(@"-{0}", i) + "." + "gz"; files.Add(currPartPath); using (fsPart = new FileStream(currPartPath, FileMode.Create)) { using (var compressionStream = new GZipStream(fsPart, CompressionMode.Compress, false)) { long currentPieceSize = 0; byte[] buffer = new byte[4096]; while (currentPieceSize < partSize) { int readBytes = source.Read(buffer, 0, buffer.Length); if (readBytes == 0) { break; } compressionStream.Write(buffer, 0, readBytes); currentPieceSize += readBytes; } } } sizeRemaining = (int)source.Length - (i * partSize); if (sizeRemaining < partSize) { partSize = sizeRemaining; } fileOffset += partSize; } } }
public void schematic(byte[,] c_map, int[,] h_map, int x, int y, int h, int h_low, bool[] c_used) { y++; Int16 Int16_x = Convert.ToInt16(x); Int16 Int16_y = Convert.ToInt16(y); Int16 Int16_h = Convert.ToInt16(h); byte[, ,] blocks = new byte[x, y, h]; int blocks_total = x * y * h; int c_used_count = 0; byte c_used1 = 0; for (int i = 1; i < globalvars.colordata_arraysize_x; i++) { if (c_used[i]) { c_used1 = Convert.ToByte(i); break; } } h_low = Math.Abs(h_low); for (int yy = 0; yy < y; yy++) { for (int xx = 0; xx < x; xx++) { if (yy == 0) { blocks[xx, 0, h_low] = c_used1; } else { h_map[xx, yy - 1] += h_low; blocks[xx, yy, h_map[xx, yy - 1]] = c_map[xx, yy - 1]; } } } ArrayList byteList = new ArrayList(); byteList.AddRange(TAG_Compound("Schematic")); byteList.AddRange(TAG_Compound("Metadata")); byteList.AddRange(TAG_Int("WEOffsetX", 0)); byteList.AddRange(TAG_Int("WEOffsetY", 0)); byteList.AddRange(TAG_Int("WEOffsetZ", -1)); byteList.Add((byte)0);//CLOSE TAG_Compound("Metadata") byteList.AddRange(TAG_Compound("Palette")); for (int i = 0; i < globalvars.colordata_arraysize_x; i++) { if (c_used[i]) { byteList.AddRange(TAG_Int(globalvars.colordata[i, 4], i)); c_used_count++; } } byteList.Add((byte)0);//CLOSE TAG_Compound("Palette") byteList.AddRange(TAG_List_Empty("BlockEntities")); byteList.AddRange(TAG_Int("DataVersion", 2230)); byteList.AddRange(TAG_Int("PaletteMax", c_used_count)); byteList.AddRange(TAG_Int("Version", 2)); byteList.AddRange(TAG_Short("Length", Int16_y)); byteList.AddRange(TAG_Short("Width", Int16_x)); byteList.AddRange(TAG_Short("Height", Int16_h)); byteList.AddRange(TAG_Int_Array("Offset", new int[] { 0, 0, 0 })); byteList.AddRange(TAG_Byte_Array("BlockData", blocks_total)); byte[] BlockInfo = byteList.ToArray(typeof(byte)) as byte[]; byte[] BlockData = new byte[blocks_total + 1]; UInt32 uint32_count = 0; progressFileMax = h; worker1.ReportProgress(0);//update progress bars for (int hh = 0; hh < h; hh++) { for (int yy = 0; yy < y; yy++) { for (int xx = 0; xx < x; xx++) { BlockData[uint32_count] = blocks[xx, yy, hh]; uint32_count++; } } progressFile++; worker1.ReportProgress(0);//update progress bars } byte[] byteArray = BlockInfo.Concat(BlockData).ToArray(); using (var memStream = new MemoryStream()) { using (var compStream = new GZipStream(memStream, CompressionMode.Compress)) { compStream.Write(byteArray, 0, byteArray.Length); compStream.Flush(); } byte[] compressedArray = memStream.ToArray(); File.WriteAllBytes(@globalvars.pathSave + ".schem", compressedArray); } }
public bool Save([NotNull] Map mapToSave, [NotNull] string fileName) { if (mapToSave == null) { throw new ArgumentNullException("mapToSave"); } if (fileName == null) { throw new ArgumentNullException("fileName"); } using (FileStream mapStream = File.OpenWrite(fileName)) { BinaryWriter writer = new BinaryWriter(mapStream); // Version writer.Write(MapVersion); MemoryStream serializationStream = new MemoryStream(); DataContractJsonSerializer serializer = new DataContractJsonSerializer(typeof(OpticraftMetaData)); // Create and serialize core meta data OpticraftMetaData oMetadate = new OpticraftMetaData { X = mapToSave.Width, Y = mapToSave.Length, Z = mapToSave.Height, // Spawn SpawnX = mapToSave.Spawn.X, SpawnY = mapToSave.Spawn.Y, SpawnZ = mapToSave.Spawn.Z, SpawnOrientation = mapToSave.Spawn.R, SpawnPitch = mapToSave.Spawn.L }; // World related values. if (mapToSave.World != null) { oMetadate.Hidden = mapToSave.World.IsHidden; oMetadate.MinimumJoinRank = mapToSave.World.AccessSecurity.MinRank.Name; oMetadate.MinimumBuildRank = mapToSave.World.BuildSecurity.MinRank.Name; } else { oMetadate.Hidden = false; oMetadate.MinimumJoinRank = oMetadate.MinimumBuildRank = "guest"; } oMetadate.CreationDate = 0; // This is ctime for when the world was created. Unsure on how to extract it. Opticraft makes no use of it as of yet serializer.WriteObject(serializationStream, oMetadate); byte[] jsonMetaData = serializationStream.ToArray(); writer.Write(jsonMetaData.Length); writer.Write(jsonMetaData); // Now create and serialize core data store (zones) Zone[] zoneCache = mapToSave.Zones.Cache; OpticraftDataStore oDataStore = new OpticraftDataStore { Zones = new OpticraftZone[zoneCache.Length] }; int i = 0; foreach (Zone zone in zoneCache) { OpticraftZone oZone = new OpticraftZone { Name = zone.Name, MinimumRank = zone.Controller.MinRank.Name, Owner = "", X1 = zone.Bounds.XMin, X2 = zone.Bounds.XMax, Y1 = zone.Bounds.YMin, Y2 = zone.Bounds.YMax, Z1 = zone.Bounds.ZMin, Z2 = zone.Bounds.ZMax, Builders = new string[zone.Controller.ExceptionList.Included.Length] }; // Bounds // Builders int j = 0; foreach (PlayerInfo pInfo in zone.Controller.ExceptionList.Included) { oZone.Builders[j++] = pInfo.Name; } // Excluded players oZone.Excluded = new string[zone.Controller.ExceptionList.Excluded.Length]; j = 0; foreach (PlayerInfo pInfo in zone.Controller.ExceptionList.Excluded) { oZone.Builders[j++] = pInfo.Name; } oDataStore.Zones[i++] = oZone; } // Serialize it serializationStream = new MemoryStream(); serializer = new DataContractJsonSerializer(typeof(OpticraftDataStore)); serializer.WriteObject(serializationStream, oDataStore); byte[] jsonDataStore = serializationStream.ToArray(); writer.Write(jsonDataStore.Length); writer.Write(jsonDataStore); // Blocks MemoryStream blockStream = new MemoryStream(); using (GZipStream zipper = new GZipStream(blockStream, CompressionMode.Compress, true)) { zipper.Write(mapToSave.Blocks, 0, mapToSave.Blocks.Length); } byte[] compressedBlocks = blockStream.ToArray(); writer.Write(compressedBlocks.Length); writer.Write(compressedBlocks); } return(true); }
public void Zlib_GZipStream_FileName_And_Comments() { // select the name of the zip file string FileToCompress = System.IO.Path.Combine(TopLevelDir, "Zlib_GZipStream.dat"); Assert.IsFalse(System.IO.File.Exists(FileToCompress), "The temporary zip file '{0}' already exists.", FileToCompress); byte[] working = new byte[WORKING_BUFFER_SIZE]; int n = -1; int sz = this.rnd.Next(21000) + 15000; TestContext.WriteLine(" Creating file: {0} sz({1})", FileToCompress, sz); CreateAndFillFileText(FileToCompress, sz); System.IO.FileInfo fi1 = new System.IO.FileInfo(FileToCompress); int crc1 = DoCrc(FileToCompress); // four trials, all combos of FileName and Comment null or not null. for (int k = 0; k < 4; k++) { string CompressedFile = String.Format("{0}-{1}.compressed", FileToCompress, k); using (Stream input = File.OpenRead(FileToCompress)) { using (FileStream raw = new FileStream(CompressedFile, FileMode.Create)) { using (GZipStream compressor = new GZipStream(raw, CompressionMode.Compress, CompressionLevel.BestCompression, true)) { // FileName is optional metadata in the GZip bytestream if (k % 2 == 1) compressor.FileName = FileToCompress; // Comment is optional metadata in the GZip bytestream if (k > 2) compressor.Comment = "Compressing: " + FileToCompress; byte[] buffer = new byte[1024]; n = -1; while (n != 0) { if (n > 0) compressor.Write(buffer, 0, n); n = input.Read(buffer, 0, buffer.Length); } } } } System.IO.FileInfo fi2 = new System.IO.FileInfo(CompressedFile); Assert.IsTrue(fi1.Length > fi2.Length, String.Format("Compressed File is not smaller, trial {0} ({1}!>{2})", k, fi1.Length, fi2.Length)); // decompress twice: // once with System.IO.Compression.GZipStream and once with Alienlab.Zlib.GZipStream for (int j = 0; j < 2; j++) { using (var input = System.IO.File.OpenRead(CompressedFile)) { Stream decompressor = null; try { switch (j) { case 0: decompressor = new Alienlab.Zlib.GZipStream(input, CompressionMode.Decompress, true); break; case 1: decompressor = new System.IO.Compression.GZipStream(input, System.IO.Compression.CompressionMode.Decompress, true); break; } string DecompressedFile = String.Format("{0}.{1}.decompressed", CompressedFile, (j == 0) ? "Ionic" : "BCL"); TestContext.WriteLine("........{0} ...", System.IO.Path.GetFileName(DecompressedFile)); using (var s2 = System.IO.File.Create(DecompressedFile)) { n = -1; while (n != 0) { n = decompressor.Read(working, 0, working.Length); if (n > 0) s2.Write(working, 0, n); } } int crc2 = DoCrc(DecompressedFile); Assert.AreEqual<Int32>(crc1, crc2); } finally { if (decompressor != null) decompressor.Dispose(); } } } } }
public void Encode() { if (tileData.encoding == "csv") { string csv = ""; for (int j = 0; j < height; j++) { csv += "\n"; for (int i = 0; i < width; i++) { csv += tileFlags[i + j * width] + ","; } } csv = csv.TrimEnd(','); csv += "\n"; tileData.contents = csv; } else if (tileData.encoding == "base64") { MemoryStream stream = new MemoryStream(); using (BinaryWriter binaryWriter = new BinaryWriter(stream)){ for (int j = 0; j < height; j++) { for (int i = 0; i < width; i++) { binaryWriter.Write(tileFlags[i + j * width]); } } } byte[] bytes = stream.ToArray(); if (tileData.compression == "gzip") { using (MemoryStream compress = new MemoryStream()) { using (GZipStream gzip = new GZipStream(compress, CompressionMode.Compress)) { gzip.Write(bytes, 0, bytes.Length); } bytes = compress.ToArray(); } } else if (tileData.compression == "zlib") { using (MemoryStream compress = new MemoryStream()) { using (DeflateStream zlib = new DeflateStream(compress, CompressionMode.Compress)) { zlib.Write(bytes, 0, bytes.Length); } UInt32 a = 1; UInt32 b = 0; for (int i = 0; i < bytes.Length; i++) { a = (a + bytes[i]) % 65521; b = (b + a) % 65521; } UInt32 alder = (b << 16) | a; byte[] compressedBytes = compress.ToArray(); int len = compressedBytes.Length; bytes = new byte[len + 6]; Array.ConstrainedCopy(compressedBytes, 0, bytes, 2, len); // first 2 bytes - zlib header for default compression bytes[0] = 0x78; bytes[1] = 0x9C; // last 4 bytes - alder32 checksum bytes[len + 2] = (byte)((alder >> 24) & 0xFF); bytes[len + 3] = (byte)((alder >> 16) & 0xFF); bytes[len + 4] = (byte)((alder >> 8) & 0xFF); bytes[len + 5] = (byte)(alder & 0xFF); } } tileData.contents = Convert.ToBase64String(bytes); stream.Dispose(); } }
private static void Slice(string sourceFile, string destinationDirectory, int parts) { using (var source = new FileStream(sourceFile, FileMode.Open)) { long partSize = (long)Math.Ceiling((double)source.Length / parts); // The offset at which to start reading from the source file string currPartPath; FileStream fsPart; long sizeRemaining = source.Length; // extracting name and extension of the input file string pattern = @"(\w+)(?=\.)\.(?<=\.)(\w+)"; Regex pairs = new Regex(pattern); matches = pairs.Matches(sourceFile); for (int i = 0; i < parts; i++) { currPartPath = destinationDirectory + matches[0].Groups[1] + String.Format(@"-{0}", i) + "." + "gz"; files.Add(currPartPath); // reading one part size using (fsPart = new FileStream(currPartPath, FileMode.Create)) { using (var compressionStream = new GZipStream(fsPart, CompressionMode.Compress, false)) { long currentPieceSize = 0; byte[] buffer = new byte[4096]; while (currentPieceSize < partSize) { int readBytes = source.Read(buffer, 0, buffer.Length); if (readBytes == 0) { break; } // creating one part size file compressionStream.Write(buffer, 0, readBytes); currentPieceSize += readBytes; } } } // calculating the remaining file size which iis still too be read sizeRemaining = (int)source.Length - (i * partSize); if (sizeRemaining < partSize) { partSize = sizeRemaining; } } } }
private void UploadToCdn() { try { // one thread only if (Interlocked.CompareExchange(ref work, 1, 0) == 0) { var @continue = false; try { CdnItem item; if (queue.TryDequeue(out item)) { @continue = true; var cdnpath = GetCdnPath(item.Bundle.Path); var key = new Uri(cdnpath).PathAndQuery.TrimStart('/'); var content = Encoding.UTF8.GetBytes(item.Response.Content); var inputStream = new MemoryStream(); if (ClientSettings.GZipEnabled) { using (var zip = new GZipStream(inputStream, CompressionMode.Compress, true)) { zip.Write(content, 0, content.Length); zip.Flush(); } } else { inputStream.Write(content, 0, content.Length); } var checksum = AmazonS3Util.GenerateChecksumForContent(item.Response.Content, true); var config = new AmazonS3Config { RegionEndpoint = RegionEndpoint.GetBySystemName(s3region), UseHttp = true }; using (var s3 = new AmazonS3Client(s3publickey, s3privatekey, config)) { var upload = false; try { var request = new GetObjectMetadataRequest { BucketName = s3bucket, Key = key, }; var response = s3.GetObjectMetadata(request); upload = !string.Equals(checksum, response.Metadata["x-amz-meta-etag"], StringComparison.InvariantCultureIgnoreCase); } catch (AmazonS3Exception ex) { if (ex.StatusCode == HttpStatusCode.NotFound) { upload = true; } else { throw; } } if (upload) { var request = new PutObjectRequest { BucketName = s3bucket, CannedACL = S3CannedACL.PublicRead, AutoCloseStream = true, AutoResetStreamPosition = true, Key = key, ContentType = AmazonS3Util.MimeTypeFromExtension(Path.GetExtension(key).ToLowerInvariant()), InputStream = inputStream }; if (ClientSettings.GZipEnabled) { request.Headers.ContentEncoding = "gzip"; } var cache = TimeSpan.FromDays(365); request.Headers.CacheControl = string.Format("public, maxage={0}", (int)cache.TotalSeconds); request.Headers.ExpiresUtc = DateTime.UtcNow.Add(cache); request.Headers["x-amz-meta-etag"] = checksum; s3.PutObject(request); } else { inputStream.Close(); } item.Bundle.CdnPath = cdnpath; } } } catch (Exception err) { log.Error(err); } finally { work = 0; if (@continue) { Action upload = () => UploadToCdn(); upload.BeginInvoke(null, null); } } } } catch (Exception fatal) { log.Fatal(fatal); } }
private void Writer() { m_log.Info("[ASSET]: Writer started"); while (true) { string[] files = Directory.GetFiles(m_SpoolDirectory); if (files.Length > 0) { int tickCount = Environment.TickCount; for (int i = 0; i < files.Length; i++) { string hash = Path.GetFileNameWithoutExtension(files[i]); string s = HashToFile(hash); string diskFile = Path.Combine(m_FSBase, s); bool pathOk = false; // The cure for chicken bones! while (true) { try { // Try to make the directory we need for this file Directory.CreateDirectory(Path.GetDirectoryName(diskFile)); pathOk = true; break; } catch (System.IO.IOException) { // Creating the directory failed. This can't happen unless // a part of the path already exists as a file. Sadly the // SRAS data contains such files. string d = Path.GetDirectoryName(diskFile); // Test each path component in turn. If we can successfully // make a directory, the level below must be the chicken bone. while (d.Length > 0) { Console.WriteLine(d); try { Directory.CreateDirectory(Path.GetDirectoryName(d)); } catch (System.IO.IOException) { d = Path.GetDirectoryName(d); // We failed making the directory and need to // go up a bit more continue; } // We succeeded in making the directory and (d) is // the chicken bone break; } // Is the chicken alive? if (d.Length > 0) { Console.WriteLine(d); FileAttributes attr = File.GetAttributes(d); if ((attr & FileAttributes.Directory) == 0) { // The chicken bone should be resolved. // Return to writing the file. File.Delete(d); continue; } } } // Could not resolve, skipping m_log.ErrorFormat("[ASSET]: Could not resolve path creation error for {0}", diskFile); break; } if (pathOk) { try { byte[] data = File.ReadAllBytes(files[i]); using (GZipStream gz = new GZipStream(new FileStream(diskFile + ".gz", FileMode.Create), CompressionMode.Compress)) { gz.Write(data, 0, data.Length); gz.Close(); } File.Delete(files[i]); //File.Move(files[i], diskFile); } catch (System.IO.IOException e) { if (e.Message.StartsWith("Win32 IO returned ERROR_ALREADY_EXISTS")) { File.Delete(files[i]); } else { throw; } } } } int totalTicks = System.Environment.TickCount - tickCount; if (totalTicks > 0) // Wrap? { m_log.InfoFormat("[ASSET]: Write cycle complete, {0} files, {1} ticks, avg {2:F2}", files.Length, totalTicks, (double)totalTicks / (double)files.Length); } } Thread.Sleep(1000); } }
public byte[] GetRawData(Compression compressMode) { var bytes = new List <byte>(); //Add header - signature ("QRR") bytes.AddRange(new byte[] { 0x51, 0x52, 0x52, 0x00 }); //Add header - rowsize bytes.Add((byte)ModuleMatrix.Count); //Build data queue var dataQueue = new Queue <int>(); foreach (var row in ModuleMatrix) { foreach (var module in row) { dataQueue.Enqueue((bool)module ? 1 : 0); } } for (int i = 0; i < 8 - (ModuleMatrix.Count * ModuleMatrix.Count) % 8; i++) { dataQueue.Enqueue(0); } //Process queue while (dataQueue.Count > 0) { byte b = 0; for (int i = 7; i >= 0; i--) { b += (byte)(dataQueue.Dequeue() << i); } bytes.Add(b); } var rawData = bytes.ToArray(); //Compress stream (optional) if (compressMode.Equals(Compression.Deflate)) { using (var output = new MemoryStream()) { using (var dstream = new DeflateStream(output, CompressionMode.Compress)) { dstream.Write(rawData, 0, rawData.Length); } rawData = output.ToArray(); } } else if (compressMode.Equals(Compression.GZip)) { using (var output = new MemoryStream()) { using (GZipStream gzipStream = new GZipStream(output, CompressionMode.Compress, true)) { gzipStream.Write(rawData, 0, rawData.Length); } rawData = output.ToArray(); } } return(rawData); }
private void SendJson(HttpListenerResponse response, HttpListenerRequest request = null) { JObject json = new JObject(); int nodeIndex = 0; json["id"] = nodeIndex++; json["Text"] = "Sensor"; json["Min"] = "Min"; json["Value"] = "Value"; json["Max"] = "Max"; json["ImageURL"] = string.Empty; JArray children = new JArray { GenerateJsonForNode(_root, ref nodeIndex) }; json["Children"] = children; #if DEBUG string responseContent = json.ToString(Newtonsoft.Json.Formatting.Indented); #else string responseContent = json.ToString(Newtonsoft.Json.Formatting.None); #endif byte[] buffer = Encoding.UTF8.GetBytes(responseContent); bool acceptGzip; try { acceptGzip = (request != null) && (request.Headers["Accept-Encoding"].ToLower().IndexOf("gzip", StringComparison.OrdinalIgnoreCase) >= 0); } catch { acceptGzip = false; } if (acceptGzip) { response.AddHeader("Content-Encoding", "gzip"); } response.AddHeader("Cache-Control", "no-cache"); response.AddHeader("Access-Control-Allow-Origin", "*"); response.ContentType = "application/json"; try { if (acceptGzip) { using (var ms = new MemoryStream()) { using (var zip = new GZipStream(ms, CompressionMode.Compress, true)) zip.Write(buffer, 0, buffer.Length); buffer = ms.ToArray(); } } response.ContentLength64 = buffer.Length; Stream output = response.OutputStream; output.Write(buffer, 0, buffer.Length); output.Close(); } catch (HttpListenerException) { } response.Close(); }
public async Task MakeRequest(Batch batch) { Stopwatch watch = new Stopwatch(); try { Uri uri = new Uri(_client.Config.Host + "/v1/import"); // set the current request time batch.SentAt = DateTime.Now.ToString("o"); string json = JsonConvert.SerializeObject(batch); // Basic Authentication // https://segment.io/docs/tracking-api/reference/#authentication #if NET35 _httpClient.Headers.Add("Authorization", "Basic " + BasicAuthHeader(batch.WriteKey, string.Empty)); _httpClient.Headers.Add("Content-Type", "application/json; charset=utf-8"); #else _httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", BasicAuthHeader(batch.WriteKey, string.Empty)); #endif // Prepare request data; var requestData = Encoding.UTF8.GetBytes(json); // Compress request data if compression is set if (_client.Config.CompressRequest) { #if NET35 _httpClient.Headers.Add(HttpRequestHeader.ContentEncoding, "gzip"); #else //_httpClient.DefaultRequestHeaders.Add("Content-Encoding", "gzip"); #endif // Compress request data with GZip using (MemoryStream memory = new MemoryStream()) { using (GZipStream gzip = new GZipStream(memory, CompressionMode.Compress, true)) { gzip.Write(requestData, 0, requestData.Length); } requestData = memory.ToArray(); } } Logger.Info("Sending analytics request to Segment.io ..", new Dict { { "batch id", batch.MessageId }, { "json size", json.Length }, { "batch size", batch.batch.Count } }); // Retries with exponential backoff const int MAXIMUM_BACKOFF_DURATION = 10000; // Set maximum waiting limit to 10s int backoff = 100; // Set initial waiting time to 100ms int statusCode = (int)HttpStatusCode.OK; string responseStr = ""; while (backoff < MAXIMUM_BACKOFF_DURATION) { #if NET35 watch.Start(); try { var response = Encoding.UTF8.GetString(_httpClient.UploadData(uri, "POST", requestData)); watch.Stop(); Succeed(batch, watch.ElapsedMilliseconds); break; } catch (WebException ex) { watch.Stop(); var response = (HttpWebResponse)ex.Response; if (response != null) { statusCode = (int)response.StatusCode; if ((statusCode >= 500 && statusCode <= 600) || statusCode == 429) { // If status code is greater than 500 and less than 600, it indicates server error // Error code 429 indicates rate limited. // Retry uploading in these cases. Thread.Sleep(backoff); backoff *= 2; continue; } else if (statusCode >= 400) { responseStr = String.Format("Status Code {0}. ", statusCode); responseStr += ex.Message; break; } } } #else watch.Start(); ByteArrayContent content = new ByteArrayContent(requestData); content.Headers.ContentType = new MediaTypeHeaderValue("application/json"); if (_client.Config.CompressRequest) { content.Headers.ContentEncoding.Add("gzip"); } var response = await _httpClient.PostAsync(uri, content).ConfigureAwait(false); watch.Stop(); if (response.StatusCode == HttpStatusCode.OK) { Succeed(batch, watch.ElapsedMilliseconds); break; } else { statusCode = (int)response.StatusCode; if ((statusCode >= 500 && statusCode <= 600) || statusCode == 429) { // If status code is greater than 500 and less than 600, it indicates server error // Error code 429 indicates rate limited. // Retry uploading in these cases. await Task.Delay(backoff); backoff *= 2; continue; } else if (statusCode >= 400) { responseStr = String.Format("Status Code {0}. ", response.StatusCode); responseStr += await response.Content.ReadAsStringAsync().ConfigureAwait(false); break; } } #endif } if (backoff >= MAXIMUM_BACKOFF_DURATION || statusCode != (int)HttpStatusCode.OK) { Fail(batch, new APIException("Unexpected Status Code", responseStr), watch.ElapsedMilliseconds); } } catch (System.Exception e) { watch.Stop(); Fail(batch, e, watch.ElapsedMilliseconds); } }
public void MakeApiCall(CallRequestContainer reqContainer) { reqContainer.RequestHeaders["Content-Type"] = "application/json"; #if !UNITY_WSA && !UNITY_WP8 && !UNITY_WEBGL if (PlayFabSettings.CompressApiData) { reqContainer.RequestHeaders["Content-Encoding"] = "GZIP"; reqContainer.RequestHeaders["Accept-Encoding"] = "GZIP"; using (var stream = new MemoryStream()) { using (var zipstream = new GZipStream(stream, CompressionMode.Compress, CompressionLevel.BestCompression)) { zipstream.Write(reqContainer.Payload, 0, reqContainer.Payload.Length); } reqContainer.Payload = stream.ToArray(); } } #endif //Debug.LogFormat("Posting {0} to Url: {1}", req.Trim(), url); var www = new WWW(reqContainer.FullUrl, reqContainer.Payload, reqContainer.RequestHeaders); #if PLAYFAB_REQUEST_TIMING var stopwatch = System.Diagnostics.Stopwatch.StartNew(); #endif // Start the www corouting to Post, and get a response or error which is then passed to the callbacks. Action <string> wwwSuccessCallback = (response) => { try { #if PLAYFAB_REQUEST_TIMING var startTime = DateTime.UtcNow; #endif var httpResult = JsonWrapper.DeserializeObject <HttpResponseObject>(response); if (httpResult.code == 200) { // We have a good response from the server reqContainer.JsonResponse = JsonWrapper.SerializeObject(httpResult.data); reqContainer.DeserializeResultJson(); reqContainer.ApiResult.Request = reqContainer.ApiRequest; reqContainer.ApiResult.CustomData = reqContainer.CustomData; PlayFabHttp.instance.OnPlayFabApiResult(reqContainer.ApiResult); #if !DISABLE_PLAYFABCLIENT_API PlayFabDeviceUtil.OnPlayFabLogin(reqContainer.ApiResult); #endif try { PlayFabHttp.SendEvent(reqContainer.ApiEndpoint, reqContainer.ApiRequest, reqContainer.ApiResult, ApiProcessingEventType.Post); } catch (Exception e) { Debug.LogException(e); } #if PLAYFAB_REQUEST_TIMING stopwatch.Stop(); var timing = new PlayFabHttp.RequestTiming { StartTimeUtc = startTime, ApiEndpoint = reqContainer.ApiEndpoint, WorkerRequestMs = (int)stopwatch.ElapsedMilliseconds, MainThreadRequestMs = (int)stopwatch.ElapsedMilliseconds }; PlayFabHttp.SendRequestTiming(timing); #endif try { reqContainer.InvokeSuccessCallback(); } catch (Exception e) { Debug.LogException(e); } } else { if (reqContainer.ErrorCallback != null) { reqContainer.Error = PlayFabHttp.GeneratePlayFabError(reqContainer.ApiEndpoint, response, reqContainer.CustomData); PlayFabHttp.SendErrorEvent(reqContainer.ApiRequest, reqContainer.Error); reqContainer.ErrorCallback(reqContainer.Error); } } } catch (Exception e) { Debug.LogException(e); } }; Action <string> wwwErrorCallback = (errorCb) => { reqContainer.JsonResponse = errorCb; if (reqContainer.ErrorCallback != null) { reqContainer.Error = PlayFabHttp.GeneratePlayFabError(reqContainer.ApiEndpoint, reqContainer.JsonResponse, reqContainer.CustomData); PlayFabHttp.SendErrorEvent(reqContainer.ApiRequest, reqContainer.Error); reqContainer.ErrorCallback(reqContainer.Error); } }; PlayFabHttp.instance.StartCoroutine(PostPlayFabApiCall(www, wwwSuccessCallback, wwwErrorCallback)); }
private bool SaveXamlFile(Drawing drawing, string fileName, string imageFileName) { _writerErrorOccurred = false; string xamlFileName = null; if (string.IsNullOrWhiteSpace(imageFileName)) { string fileNameWithoutExt = Path.GetFileNameWithoutExtension(fileName); string workingDir = Path.GetDirectoryName(fileName); xamlFileName = Path.Combine(workingDir, fileNameWithoutExt + XamlExt); } else { string fileExt = Path.GetExtension(imageFileName); if (string.IsNullOrWhiteSpace(fileExt)) { xamlFileName = imageFileName + XamlExt; } else if (!string.Equals(fileExt, XamlExt, StringComparison.OrdinalIgnoreCase)) { xamlFileName = Path.ChangeExtension(imageFileName, XamlExt); } } if (File.Exists(xamlFileName)) { File.SetAttributes(xamlFileName, FileAttributes.Normal); File.Delete(xamlFileName); } if (this.UseFrameXamlWriter) { XmlWriterSettings writerSettings = new XmlWriterSettings(); writerSettings.Indent = true; writerSettings.Encoding = Encoding.UTF8; writerSettings.OmitXmlDeclaration = true; using (FileStream xamlFile = File.Create(xamlFileName)) { using (XmlWriter writer = XmlWriter.Create(xamlFile, writerSettings)) { System.Windows.Markup.XamlWriter.Save(drawing, writer); } } } else { try { XmlXamlWriter xamlWriter = new XmlXamlWriter(this.DrawingSettings); using (FileStream xamlFile = File.Create(xamlFileName)) { xamlWriter.Save(drawing, xamlFile); } } catch { _writerErrorOccurred = true; if (_fallbackOnWriterError) { if (File.Exists(xamlFileName)) { File.Move(xamlFileName, xamlFileName + BackupExt); } XmlWriterSettings writerSettings = new XmlWriterSettings(); writerSettings.Indent = true; writerSettings.Encoding = Encoding.UTF8; writerSettings.OmitXmlDeclaration = true; using (FileStream xamlFile = File.Create(xamlFileName)) { using (XmlWriter writer = XmlWriter.Create(xamlFile, writerSettings)) { System.Windows.Markup.XamlWriter.Save(drawing, writer); } } } else { throw; } } } if (this.SaveZaml) { string zamlFileName = Path.ChangeExtension(xamlFileName, CompressedXamlExt); if (File.Exists(zamlFileName)) { File.SetAttributes(zamlFileName, FileAttributes.Normal); File.Delete(zamlFileName); } FileStream zamlSourceFile = new FileStream(xamlFileName, FileMode.Open, FileAccess.Read, FileShare.Read); byte[] buffer = new byte[zamlSourceFile.Length]; // Read the file to ensure it is readable. int count = zamlSourceFile.Read(buffer, 0, buffer.Length); if (count != buffer.Length) { zamlSourceFile.Close(); return(false); } zamlSourceFile.Close(); FileStream zamlDestFile = File.Create(zamlFileName); GZipStream zipStream = new GZipStream(zamlDestFile, CompressionMode.Compress, true); zipStream.Write(buffer, 0, buffer.Length); zipStream.Close(); zamlDestFile.Close(); _zamlFile = zamlFileName; } _xamlFile = xamlFileName; if (!this.SaveXaml && File.Exists(xamlFileName)) { File.Delete(xamlFileName); _xamlFile = null; } return(true); }
/// <summary> /// Zips the file /// </summary> public void Zip() { byte[] byteArray = null; byteArray = GetRawFile(Url); //Prepare for compress using (MemoryStream ms = new MemoryStream()) using (GZipStream sw = new GZipStream(ms, CompressionMode.Compress)) { //Compress sw.Write(byteArray, 0, byteArray.Length); //Close, DO NOT FLUSH cause bytes will go missing... sw.Close(); byteArray = ms.ToArray(); ByteArrayToFile(string.Format("{0}{1}", BaseServerPath, FileName), byteArray); } FileLink = VirtualPathUtility.ToAbsolute(System.Configuration.ConfigurationManager.AppSettings["TEMPPATH"]) + FileName; }
public static void CompressThread(object threadInfo) { DispatchAttribute dispatchAttribute = (DispatchAttribute)threadInfo; byte[] data = dispatchAttribute.data; byte[] array = null; if (DispatchAPI.IsLocalConnection()) { dispatchAttribute.algorithm = 0; } try { using (MemoryStream memoryStream = new MemoryStream()) { if (dispatchAttribute.algorithm != 2) { if (dispatchAttribute.algorithm == 1) { using (GZipStream gZipStream = new GZipStream(memoryStream, CompressionMode.Compress)) { gZipStream.Write(data, 0, data.Length); gZipStream.Close(); memoryStream.Close(); array = memoryStream.ToArray(); gZipStream.Dispose(); memoryStream.Dispose(); goto IL_8E; } } if (dispatchAttribute.algorithm == 0) { array = dispatchAttribute.data; } } IL_8E :; } } catch (Exception ex) { Common.WriteLine("CompressThread: " + ex.Message, new string[0]); array = null; } if (array == null || array.Length == 0) { return; } double num = (double)array.Length; num /= (double)data.Length; if (array != null) { if (dispatchAttribute.type == 1024) { byte[] bytes = BitConverter.GetBytes(dispatchAttribute.cid); byte[] array2 = new byte[8 + array.Length]; Array.Copy(bytes, 0, array2, 0, bytes.Length); Array.Copy(array, 0, array2, 8, array.Length); array = array2; } dispatchAttribute.data = array; if (dispatchAttribute.cbCacheProcess != null) { dispatchAttribute.cbCacheProcess(dispatchAttribute); } if (dispatchAttribute.cbCallBack != null) { dispatchAttribute.cbCallBack(dispatchAttribute); } } }
private static void SerializePkgHeader(PkgHeader header, GZipStream gz) { var bytes = Encoding.UTF8.GetBytes(SimpleJson.SerializeObject(header)); gz.Write(bytes, 0, bytes.Length); }
public void ConvertToMapV3(int x, int y, string[,] pic) { int mapsX = Convert.ToInt32(Math.Ceiling((double)x / 128)); int mapsY = Convert.ToInt32(Math.Ceiling((double)y / 128)); int maps = mapsX * mapsY; byte[, ,] mapArray = new byte[128, 128, maps]; int xx = 0; int yy = 0; int mapCount = 0; int addX = 0; int addY = 0; string str = string.Empty; bool outOfRange = false; progressFileMax = maps; worker1.ReportProgress(0);//update progress bars for (int mapCountY = 0; mapCountY < mapsY; mapCountY++) { for (int mapCountX = 0; mapCountX < mapsX; mapCountX++) { for (int mapBlockCountY = 0; mapBlockCountY < 128; mapBlockCountY++) { for (int mapBlockCountX = 0; mapBlockCountX < 128; mapBlockCountX++) { //if ((mapBlockCountY > 126) | (mapBlockCountY == 0)) //{ // MessageBox.Show( // mapBlockCountX.ToString() + ", " + mapBlockCountY.ToString() + "\n" + // xx.ToString() + ", " + yy.ToString() // ); //} if ((xx < x) && (yy < y)) { str = pic[xx, yy]; outOfRange = false; } else { str = globalvars.colordata[29, 0]; outOfRange = true; } for (int a = 0; a < 4; a++) //shade of block in colordata[,] (0-3, locked map) { for (int b = 1; b < globalvars.colordata_arraysize_x; b++) //color of block colordata[,] { if (globalvars.colordata[b, a] == str) { int aa = 0; switch (a) { case 0: aa = 2; break; case 1: aa = 1; break; case 2: aa = 0; break; case 3: aa = 3; break; default: break; } if (outOfRange) { mapArray[mapBlockCountX, mapBlockCountY, mapCount] = 118;//ID 29 * 4 + 2 } else { mapArray[mapBlockCountX, mapBlockCountY, mapCount] = Convert.ToByte(b * 4 + aa); } } } } xx++; } xx = addX; yy++; } yy = addY; addX += 128; xx = addX; mapCount++; progressFile = mapCount; worker1.ReportProgress(0);//update progress bars } addY += 128; yy = addY; addX = 0; xx = addX; } for (int i = 0; i < maps; i++) { ArrayList byteList = new ArrayList(); byteList.Add((byte)10);//TAG_Compound ohne Name... byteList.Add((byte)0); byteList.Add((byte)0); byteList.AddRange(TAG_Int("DataVersion", 2230)); byteList.AddRange(TAG_Compound("data")); byteList.AddRange(TAG_List_Empty("banners")); byteList.AddRange(TAG_List_Empty("frames")); byteList.AddRange(TAG_Int("dimension", 0)); byteList.AddRange(TAG_Byte("locked", 1)); byteList.AddRange(TAG_Byte("scale", 0)); byteList.AddRange(TAG_Byte("trackingPosition", 1)); byteList.AddRange(TAG_Byte("unlimitedTracking", 0)); byteList.AddRange(TAG_Int("xCenter", 128)); byteList.AddRange(TAG_Int("zCenter", 128)); byteList.AddRange(TAG_Byte_Array("colors", 16384)); byte[] BlockInfo = byteList.ToArray(typeof(byte)) as byte[]; byte[] BlockData = new byte[16386];//+2 wegen CLOSE TAG_Compound("data") und CLOSE TAG_Compound("") int BlockCount = 0; for (int a = 0; a < 128; a++) { for (int b = 0; b < 128; b++) { BlockData[BlockCount] = mapArray[b, a, i]; BlockCount++; } } byte[] byteArray = BlockInfo.Concat(BlockData).ToArray(); using (var memStream = new MemoryStream()) { using (var compStream = new GZipStream(memStream, CompressionMode.Compress)) { compStream.Write(byteArray, 0, byteArray.Length); compStream.Flush(); } byte[] compressedArray = memStream.ToArray(); File.WriteAllBytes(@globalvars.pathSaveLockedMap + "_" + globalvars.startMap.ToString() + ".dat", compressedArray); } globalvars.startMap++; } if (checkBox_batch.Checked == false) { globalvars.startMap = int.Parse(textBox_startMap.Text); MessageBox.Show("Maps: " + maps.ToString()); } progressFile = 0; worker1.ReportProgress(0);//update progress bars }
public void Handler(IConsoleAdapter console, IErrorAdapter error) { if (!Silent) { if (!File.Exists(InputFile)) { console.WrapLine("Input map file does not exist...".Red()); return; } if (File.Exists($"{OutputFile}.N2SMAP")) { if (!console.Confirm("Output file exists... Would you like to overwrite it?".Red())) { return; } } } string jsonText; if (Xml) { console.WrapLine("Loading map from XML..."); var doc = new XmlDocument(); doc.LoadXml(File.ReadAllText(InputFile)); jsonText = JsonConvert.SerializeXmlNode(doc.FirstChild); } else { console.WrapLine("Loading map from JSON..."); jsonText = File.ReadAllText(InputFile); } var deserialized = JsonConvert.DeserializeObject <MapT>(jsonText); deserialized.GameVersion += ":N2SMap_Viewer"; var fb = new FlatBufferBuilder(1); console.WrapLine("Packing map..."); fb.Finish(N2S.FileFormat.Map.Pack(fb, deserialized).Value); var buf = fb.SizedByteArray(); using (var outputStream = new MemoryStream()) { //Here we're compressing the data to make it smaller console.WrapLine("Compressing map..."); using (var gZipStream = new GZipStream(outputStream, CompressionMode.Compress)) gZipStream.Write(buf, 0, buf.Length); //Writing compressed data to a file console.WrapLine("Writing map to file..."); File.WriteAllBytes(OutputFile + ".N2SMAP", outputStream.ToArray()); } console.WrapLine($"Complete! File written to {OutputFile}.N2SMAP"); }
/// <summary>Compresses the encapsulation payload using GZip compression. Compressed encapsulation payload is /// only supported with the 2.0 encoding.</summary> /// <returns>A <see cref="CompressionResult"/> value indicating the result of the compression operation. /// </returns> public CompressionResult CompressPayload() { if (IsSealed) { throw new InvalidOperationException("cannot modify a sealed frame"); } if (Encoding != Encoding.V20) { throw new NotSupportedException("payload compression is only supported with 2.0 encoding"); } else { IList <ArraySegment <byte> > payload = Payload; int encapsulationOffset = this is OutgoingResponseFrame ? 1 : 0; // The encapsulation always starts in the first segment of the payload (at position 0 or 1). Debug.Assert(encapsulationOffset < payload[0].Count); int sizeLength = Protocol == Protocol.Ice2 ? payload[0][encapsulationOffset].ReadSizeLength20() : 4; byte compressionStatus = payload.GetByte(encapsulationOffset + sizeLength + 2); if (compressionStatus != 0) { throw new InvalidOperationException("payload is already compressed"); } int encapsulationSize = payload.GetByteCount() - encapsulationOffset; // this includes the size length if (encapsulationSize < _compressionMinSize) { return(CompressionResult.PayloadTooSmall); } // Reserve memory for the compressed data, this should never be greater than the uncompressed data // otherwise we will just send the uncompressed data. byte[] compressedData = new byte[encapsulationOffset + encapsulationSize]; // Copy the byte before the encapsulation, if any if (encapsulationOffset == 1) { compressedData[0] = payload[0][0]; } // Write the encapsulation header int offset = encapsulationOffset + sizeLength; compressedData[offset++] = Encoding.Major; compressedData[offset++] = Encoding.Minor; // Set the compression status to '1' GZip compressed compressedData[offset++] = 1; // Write the size of the uncompressed data compressedData.AsSpan(offset, sizeLength).WriteFixedLengthSize20(encapsulationSize - sizeLength); offset += sizeLength; using var memoryStream = new MemoryStream(compressedData, offset, compressedData.Length - offset); using var gzipStream = new GZipStream( memoryStream, _compressionLevel == CompressionLevel.Fastest ? System.IO.Compression.CompressionLevel.Fastest : System.IO.Compression.CompressionLevel.Optimal); try { // The data to compress starts after the compression status byte, + 3 corresponds to (Encoding 2 // bytes, Compression status 1 byte) foreach (ArraySegment <byte> segment in payload.Slice(encapsulationOffset + sizeLength + 3)) { gzipStream.Write(segment); } gzipStream.Flush(); } catch (NotSupportedException) { // If the data doesn't fit in the memory stream NotSupportedException is thrown when GZipStream // try to expand the fixed size MemoryStream. return(CompressionResult.PayloadNotCompressible); } int binaryContextLastSegmentOffset = -1; if (_binaryContextOstr is OutputStream ostr) { // If there is a binary context, we make sure it uses its own segment(s). OutputStream.Position binaryContextEnd = ostr.Tail; binaryContextLastSegmentOffset = binaryContextEnd.Offset; // When we have a _binaryContextOstr, we wrote at least the size placeholder for the binary context // dictionary. Debug.Assert(binaryContextEnd.Segment > PayloadEnd.Segment || binaryContextEnd.Offset > PayloadEnd.Offset); // The first segment of the binary context is immediately after the payload ArraySegment <byte> segment = Data[PayloadEnd.Segment].Slice(PayloadEnd.Offset); if (segment.Count > 0) { Data.Insert(PayloadEnd.Segment + 1, segment); if (binaryContextEnd.Segment == PayloadEnd.Segment) { binaryContextLastSegmentOffset -= PayloadEnd.Offset; } } // else the binary context already starts with its own segment } int start = PayloadStart.Segment; if (PayloadStart.Offset > 0) { // There is non payload bytes in the first payload segment: we move them to their own segment. ArraySegment <byte> segment = Data[PayloadStart.Segment]; Data[PayloadStart.Segment] = segment.Slice(0, PayloadStart.Offset); start += 1; } Data.RemoveRange(start, PayloadEnd.Segment - start + 1); offset += (int)memoryStream.Position; Data.Insert(start, new ArraySegment <byte>(compressedData, 0, offset)); PayloadStart = new OutputStream.Position(start, 0); PayloadEnd = new OutputStream.Position(start, offset); Size = Data.GetByteCount(); if (_binaryContextOstr != null) { // Recreate binary context OutputStream _binaryContextOstr = new OutputStream(_binaryContextOstr.Encoding, Data, new OutputStream.Position(Data.Count - 1, binaryContextLastSegmentOffset)); } // Rewrite the encapsulation size compressedData.AsSpan(encapsulationOffset, sizeLength).WriteEncapsulationSize( offset - sizeLength - encapsulationOffset, Protocol.GetEncoding()); _payload = null; // reset cache return(CompressionResult.Success); } }
public void MakeApiCall(CallRequestContainer reqContainer) { //Set headers var headers = new Dictionary <string, string> { { "Content-Type", "application/json" } }; if (reqContainer.AuthKey == AuthType.DevSecretKey) { #if ENABLE_PLAYFABSERVER_API || ENABLE_PLAYFABADMIN_API headers.Add("X-SecretKey", PlayFabSettings.DeveloperSecretKey); #endif } else if (reqContainer.AuthKey == AuthType.LoginSession) { headers.Add("X-Authorization", AuthKey); } headers.Add("X-ReportErrorAsSuccess", "true"); headers.Add("X-PlayFabSDK", PlayFabSettings.VersionString); #if !UNITY_WSA && !UNITY_WP8 && !UNITY_WEBGL if (PlayFabSettings.CompressApiData) { headers.Add("Content-Encoding", "GZIP"); headers.Add("Accept-Encoding", "GZIP"); using (var stream = new MemoryStream()) { using (GZipStream zipstream = new GZipStream(stream, CompressionMode.Compress, CompressionLevel.BestCompression)) { zipstream.Write(reqContainer.Payload, 0, reqContainer.Payload.Length); } reqContainer.Payload = stream.ToArray(); } } #endif //Debug.LogFormat("Posting {0} to Url: {1}", req.Trim(), url); var www = new WWW(reqContainer.FullUrl, reqContainer.Payload, headers); #if PLAYFAB_REQUEST_TIMING var stopwatch = System.Diagnostics.Stopwatch.StartNew(); #endif // Start the www corouting to Post, and get a response or error which is then passed to the callbacks. Action <string> wwwSuccessCallback = (response) => { try { #if PLAYFAB_REQUEST_TIMING var startTime = DateTime.UtcNow; #endif var httpResult = JsonWrapper.DeserializeObject <HttpResponseObject>(response, PlayFabUtil.ApiSerializerStrategy); if (httpResult.code == 200) { // We have a good response from the server reqContainer.JsonResponse = JsonWrapper.SerializeObject(httpResult.data, PlayFabUtil.ApiSerializerStrategy); reqContainer.DeserializeResultJson(); reqContainer.ApiResult.Request = reqContainer.ApiRequest; reqContainer.ApiResult.CustomData = reqContainer.CustomData; #if !DISABLE_PLAYFABCLIENT_API ClientModels.UserSettings userSettings = null; var res = reqContainer.ApiResult as ClientModels.LoginResult; var regRes = reqContainer.ApiResult as ClientModels.RegisterPlayFabUserResult; if (res != null) { userSettings = res.SettingsForUser; AuthKey = res.SessionTicket; } else if (regRes != null) { userSettings = regRes.SettingsForUser; AuthKey = regRes.SessionTicket; } if (userSettings != null && AuthKey != null && userSettings.NeedsAttribution) { PlayFabIdfa.OnPlayFabLogin(); } #endif try { PlayFabHttp.SendEvent(reqContainer.ApiEndpoint, reqContainer.ApiRequest, reqContainer.ApiResult, ApiProcessingEventType.Post); } catch (Exception e) { Debug.LogException(e); } #if PLAYFAB_REQUEST_TIMING stopwatch.Stop(); var timing = new PlayFabHttp.RequestTiming { StartTimeUtc = startTime, ApiEndpoint = reqContainer.ApiEndpoint, WorkerRequestMs = (int)stopwatch.ElapsedMilliseconds, MainThreadRequestMs = (int)stopwatch.ElapsedMilliseconds }; PlayFabHttp.SendRequestTiming(timing); #endif try { reqContainer.InvokeSuccessCallback(); } catch (Exception e) { Debug.LogException(e); } } else { if (reqContainer.ErrorCallback != null) { reqContainer.Error = PlayFabHttp.GeneratePlayFabError(response, reqContainer.CustomData); PlayFabHttp.SendErrorEvent(reqContainer.ApiRequest, reqContainer.Error); reqContainer.ErrorCallback(reqContainer.Error); } } } catch (Exception e) { Debug.LogException(e); } }; Action <string> wwwErrorCallback = (errorCb) => { reqContainer.JsonResponse = errorCb; if (reqContainer.ErrorCallback != null) { reqContainer.Error = PlayFabHttp.GeneratePlayFabError(reqContainer.JsonResponse, reqContainer.CustomData); PlayFabHttp.SendErrorEvent(reqContainer.ApiRequest, reqContainer.Error); reqContainer.ErrorCallback(reqContainer.Error); } }; PlayFabHttp.instance.StartCoroutine(Post(www, wwwSuccessCallback, wwwErrorCallback)); }
static void GenerateBundles (List<string> files) { string temp_s = "temp.s"; // Path.GetTempFileName (); string temp_c = "temp.c"; string temp_o = "temp.o"; if (compile_only) temp_c = output; if (object_out != null) temp_o = object_out; try { List<string> c_bundle_names = new List<string> (); List<string[]> config_names = new List<string[]> (); using (StreamWriter ts = new StreamWriter (File.Create (temp_s))) { using (StreamWriter tc = new StreamWriter (File.Create (temp_c))) { string prog = null; #if XAMARIN_ANDROID tc.WriteLine ("/* This source code was produced by mkbundle, do not edit */"); tc.WriteLine ("\n#ifndef NULL\n#define NULL (void *)0\n#endif"); tc.WriteLine (@" typedef struct { const char *name; const unsigned char *data; const unsigned int size; } MonoBundledAssembly; void mono_register_bundled_assemblies (const MonoBundledAssembly **assemblies); void mono_register_config_for_assembly (const char* assembly_name, const char* config_xml); "); #else tc.WriteLine ("#include <mono/metadata/mono-config.h>"); tc.WriteLine ("#include <mono/metadata/assembly.h>\n"); #endif if (compress) { tc.WriteLine ("typedef struct _compressed_data {"); tc.WriteLine ("\tMonoBundledAssembly assembly;"); tc.WriteLine ("\tint compressed_size;"); tc.WriteLine ("} CompressedAssembly;\n"); } object monitor = new object (); var streams = new Dictionary<string, Stream> (); var sizes = new Dictionary<string, long> (); // Do the file reading and compression in parallel Action<string> body = delegate (string url) { string fname = LocateFile (new Uri (url).LocalPath); Stream stream = File.OpenRead (fname); long real_size = stream.Length; int n; if (compress) { byte[] cbuffer = new byte [8192]; MemoryStream ms = new MemoryStream (); GZipStream deflate = new GZipStream (ms, CompressionMode.Compress, leaveOpen:true); while ((n = stream.Read (cbuffer, 0, cbuffer.Length)) != 0){ deflate.Write (cbuffer, 0, n); } stream.Close (); deflate.Close (); byte [] bytes = ms.GetBuffer (); stream = new MemoryStream (bytes, 0, (int) ms.Length, false, false); } lock (monitor) { streams [url] = stream; sizes [url] = real_size; } }; //#if NET_4_5 #if FALSE Parallel.ForEach (files, body); #else foreach (var url in files) body (url); #endif // The non-parallel part byte [] buffer = new byte [8192]; // everything other than a-zA-Z0-9_ needs to be escaped in asm symbols. var symbolEscapeRE = new System.Text.RegularExpressions.Regex ("[^\\w_]"); foreach (var url in files) { string fname = LocateFile (new Uri (url).LocalPath); string aname = MakeBundle.GetAssemblyName (fname); string encoded = symbolEscapeRE.Replace (aname, "_"); if (prog == null) prog = aname; var stream = streams [url]; var real_size = sizes [url]; if (!quiet) Console.WriteLine (" embedding: " + fname); WriteSymbol (ts, "assembly_data_" + encoded, stream.Length); WriteBuffer (ts, stream, buffer); if (compress) { tc.WriteLine ("extern const unsigned char assembly_data_{0} [];", encoded); tc.WriteLine ("static CompressedAssembly assembly_bundle_{0} = {{{{\"{1}\"," + " assembly_data_{0}, {2}}}, {3}}};", encoded, aname, real_size, stream.Length); if (!quiet) { double ratio = ((double) stream.Length * 100) / real_size; Console.WriteLine (" compression ratio: {0:.00}%", ratio); } } else { tc.WriteLine ("extern const unsigned char assembly_data_{0} [];", encoded); tc.WriteLine ("static const MonoBundledAssembly assembly_bundle_{0} = {{\"{1}\", assembly_data_{0}, {2}}};", encoded, aname, real_size); } stream.Close (); c_bundle_names.Add ("assembly_bundle_" + encoded); try { FileStream cf = File.OpenRead (fname + ".config"); if (!quiet) Console.WriteLine (" config from: " + fname + ".config"); tc.WriteLine ("extern const unsigned char assembly_config_{0} [];", encoded); WriteSymbol (ts, "assembly_config_" + encoded, cf.Length); WriteBuffer (ts, cf, buffer); ts.WriteLine (); config_names.Add (new string[] {aname, encoded}); } catch (FileNotFoundException) { /* we ignore if the config file doesn't exist */ } } if (config_file != null){ FileStream conf; try { conf = File.OpenRead (config_file); } catch { Error ("Failure to open {0}", config_file); return; } if (!quiet) Console.WriteLine ("System config from: " + config_file); tc.WriteLine ("extern const char system_config;"); WriteSymbol (ts, "system_config", config_file.Length); WriteBuffer (ts, conf, buffer); // null terminator ts.Write ("\t.byte 0\n"); ts.WriteLine (); } if (machine_config_file != null){ FileStream conf; try { conf = File.OpenRead (machine_config_file); } catch { Error ("Failure to open {0}", machine_config_file); return; } if (!quiet) Console.WriteLine ("Machine config from: " + machine_config_file); tc.WriteLine ("extern const char machine_config;"); WriteSymbol (ts, "machine_config", machine_config_file.Length); WriteBuffer (ts, conf, buffer); ts.Write ("\t.byte 0\n"); ts.WriteLine (); } ts.Close (); if (compress) tc.WriteLine ("\nstatic const CompressedAssembly *compressed [] = {"); else tc.WriteLine ("\nstatic const MonoBundledAssembly *bundled [] = {"); foreach (string c in c_bundle_names){ tc.WriteLine ("\t&{0},", c); } tc.WriteLine ("\tNULL\n};\n"); tc.WriteLine ("static char *image_name = \"{0}\";", prog); if (ctor_func != null) { tc.WriteLine ("\nextern void {0} (void);", ctor_func); tc.WriteLine ("\n__attribute__ ((constructor)) static void mono_mkbundle_ctor (void)"); tc.WriteLine ("{{\n\t{0} ();\n}}", ctor_func); } tc.WriteLine ("\nstatic void install_dll_config_files (void) {\n"); foreach (string[] ass in config_names){ tc.WriteLine ("\tmono_register_config_for_assembly (\"{0}\", assembly_config_{1});\n", ass [0], ass [1]); } if (config_file != null) tc.WriteLine ("\tmono_config_parse_memory (&system_config);\n"); if (machine_config_file != null) tc.WriteLine ("\tmono_register_machine_config (&machine_config);\n"); tc.WriteLine ("}\n"); if (config_dir != null) tc.WriteLine ("static const char *config_dir = \"{0}\";", config_dir); else tc.WriteLine ("static const char *config_dir = NULL;"); Stream template_stream; if (compress) { template_stream = System.Reflection.Assembly.GetAssembly (typeof(MakeBundle)).GetManifestResourceStream ("template_z.c"); } else { template_stream = System.Reflection.Assembly.GetAssembly (typeof(MakeBundle)).GetManifestResourceStream ("template.c"); } StreamReader s = new StreamReader (template_stream); string template = s.ReadToEnd (); tc.Write (template); if (!nomain && custom_main == null) { Stream template_main_stream = System.Reflection.Assembly.GetAssembly (typeof(MakeBundle)).GetManifestResourceStream ("template_main.c"); StreamReader st = new StreamReader (template_main_stream); string maintemplate = st.ReadToEnd (); tc.Write (maintemplate); } tc.Close (); string assembler = GetEnv("AS", "as"); string as_cmd = String.Format("{0} -o {1} {2} ", assembler, temp_o, temp_s); Execute(as_cmd); if (compile_only) return; if (!quiet) Console.WriteLine("Compiling:"); if (style == "windows") { Func<string, string> quote = (pp) => { return "\"" + pp + "\""; }; string compiler = GetEnv("CC", "cl.exe"); string winsdkPath = GetEnv("WINSDK", @"C:\Program Files (x86)\Windows Kits\8.1"); string vsPath = GetEnv("VSINCLUDE", @"C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC"); string monoPath = GetEnv("MONOPREFIX", @"C:\Program Files (x86)\Mono"); string[] includes = new string[] {winsdkPath + @"\Include\um", winsdkPath + @"\Include\shared", vsPath + @"\include", monoPath + @"\include\mono-2.0", "." }; // string[] libs = new string[] { winsdkPath + @"\Lib\winv6.3\um\x86" , vsPath + @"\lib" }; var linkLibraries = new string[] { "kernel32.lib", "version.lib", "Ws2_32.lib", "Mswsock.lib", "Psapi.lib", "shell32.lib", "OleAut32.lib", "ole32.lib", "winmm.lib", "user32.lib", "libvcruntime.lib", "advapi32.lib", "OLDNAMES.lib", "libucrt.lib" }; string glue_obj = "mkbundle_glue.obj"; string monoLib; if (static_link) monoLib = LocateFile (monoPath + @"\lib\monosgen-2.0-static.lib"); else { Console.WriteLine ("WARNING: Dynamically linking the Mono runtime on Windows is not a tested option."); monoLib = LocateFile (monoPath + @"\lib\monosgen-2.0.lib"); LocateFile (monoPath + @"\lib\monosgen-2.0.dll"); // in this case, the .lib is just the import library, and the .dll is also needed } var compilerArgs = new List<string>(); compilerArgs.Add("/MT"); foreach (string include in includes) compilerArgs.Add(String.Format ("/I {0}", quote (include))); if (!nomain || custom_main != null) { compilerArgs.Add(quote(temp_c)); compilerArgs.Add(quote(temp_o)); if (custom_main != null) compilerArgs.Add(quote(custom_main)); compilerArgs.Add(quote(monoLib)); compilerArgs.Add("/link"); compilerArgs.Add("/NODEFAULTLIB"); compilerArgs.Add("/SUBSYSTEM:windows"); compilerArgs.Add("/ENTRY:mainCRTStartup"); compilerArgs.AddRange(linkLibraries); compilerArgs.Add("/out:"+ output); string cl_cmd = String.Format("{0} {1}", compiler, String.Join(" ", compilerArgs.ToArray())); Execute (cl_cmd); } else { // we are just creating a .lib compilerArgs.Add("/c"); // compile only compilerArgs.Add(temp_c); compilerArgs.Add(String.Format("/Fo" + glue_obj)); // .obj output name string cl_cmd = String.Format("{0} {1}", compiler, String.Join(" ", compilerArgs.ToArray())); Execute (cl_cmd); string librarian = GetEnv ("LIB", "lib.exe"); var librarianArgs = new List<string> (); librarianArgs.Add (String.Format ("/out:{0}.lib" + output)); librarianArgs.Add (temp_o); librarianArgs.Add (glue_obj); librarianArgs.Add (monoLib); string lib_cmd = String.Format("{0} {1}", librarian, String.Join(" ", librarianArgs.ToArray())); Execute (lib_cmd); } } else { string zlib = (compress ? "-lz" : ""); string debugging = "-g"; string cc = GetEnv("CC", "cc"); string cmd = null; if (style == "linux") debugging = "-ggdb"; if (static_link) { string smonolib; if (style == "osx") smonolib = "`pkg-config --variable=libdir mono-2`/libmono-2.0.a "; else smonolib = "-Wl,-Bstatic -lmono-2.0 -Wl,-Bdynamic "; cmd = String.Format("{4} -o '{2}' -Wall `pkg-config --cflags mono-2` {0} {3} " + "`pkg-config --libs-only-L mono-2` " + smonolib + "`pkg-config --libs-only-l mono-2 | sed -e \"s/\\-lmono-2.0 //\"` {1}", temp_c, temp_o, output, zlib, cc); } else { cmd = String.Format("{4} " + debugging + " -o '{2}' -Wall {0} `pkg-config --cflags --libs mono-2` {3} {1}", temp_c, temp_o, output, zlib, cc); } Execute (cmd); } if (!quiet) Console.WriteLine ("Done"); } } } finally { if (!keeptemp){ if (object_out == null){ File.Delete (temp_o); } if (!compile_only){ File.Delete (temp_c); } File.Delete (temp_s); } } }
public void OnNetworkMessage(IEnumerable <string> cmd, JObject args) { if (cmd.First() == "TextureFile") { var path = args["path"].Value <string>(); var data = args["data"].Value <string>(); int width = args["width"].Value <int>(); int height = args["height"].Value <int>(); ImageRequest request; lock (imageCache) { if (!pendingRequests.ContainsKey(path)) { return; //#TODO log/handle } request = pendingRequests[path]; } var dataBytes = Convert.FromBase64String(data); //ABGR -> BGRA for (int i = 0; i < dataBytes.Length; i += 4) { var A = dataBytes[i]; var B = dataBytes[i + 1]; var G = dataBytes[i + 2]; var R = dataBytes[i + 3]; dataBytes[i] = B; dataBytes[i + 1] = G; dataBytes[i + 2] = R; dataBytes[i + 3] = A; } var bmp = SKImage.FromPixelCopy(new SKImageInfo(width, height, SKColorType.Bgra8888), dataBytes); //if (bmp == null) Debugger.Break(); request.completionSource.SetResult(bmp); //if (bmp != null) //{ // using (var data2 = bmp.Encode(SKEncodedImageFormat.Png, 80)) // using (var stream = File.OpenWrite(Path.Combine("P:/", Path.ChangeExtension(Path.GetFileName(path), ".png") ))) // { // // save the data to a stream // data2.SaveTo(stream); // } //} lock (imageCache) { imageCache[path] = bmp; pendingRequests.Remove(path); } } else if (cmd.First() == "MapFile") { var path = args["name"].Value <string>(); var data = args["data"].Value <string>(); MapfileRequest request = pendingMapRequests.FirstOrDefault(x => path.StartsWith(x.name)); lock (imageCache) { request = pendingMapRequests.FirstOrDefault(x => path.StartsWith(x.name)); //#TODO log/handle if (request == null) { return; } } var dataBytes = Convert.FromBase64String(data); //If source was not compressed, still store it compressed, save some disk space especially on mobile if (path.EndsWith("z") || Path.GetExtension(path) == ".zip") { using (var writer = File.Create(Path.Combine(request.targetDirectory, path))) { writer.Write(dataBytes, 0, dataBytes.Length); } } else { path = path + "z"; //svgz using (var writer = File.Create(Path.Combine(request.targetDirectory, path))) { using (GZipStream compressionStream = new GZipStream(writer, CompressionMode.Compress)) { compressionStream.Write(dataBytes, 0, dataBytes.Length); } } } request.completionSource.SetResult(Path.Combine(request.targetDirectory, path)); lock (imageCache) { pendingMapRequests.Remove(request); } } }
static void Slice(string sourceFile, string destinationDirectory, int parts) { using (var sourceStream = new FileStream(sourceFile, FileMode.Open)) { //get the sizes of each part long partSize = (long)Math.Ceiling((double)sourceStream.Length / parts); // The offset at which to start reading from the source file long fileOffset = 0; string currentPartPath; FileStream fsPart; long sizeRemaining = sourceStream.Length; // extracting name and extension of the input file string pattern = @"((\w+)\.(\w+))"; Regex fileNames = new Regex(pattern); matches = fileNames.Matches(sourceFile); for (int i = 0; i < parts; i++) { currentPartPath = destinationDirectory + String.Format(@"Part{0}", i) + "." + matches[0].Groups[3]; files.Add(currentPartPath); using (fsPart = new FileStream(currentPartPath, FileMode.Create)) { using (var compressionStream = new GZipStream(fsPart, CompressionMode.Compress, false)) { long currentPieceSize = 0; byte[] buffer = new byte[4096]; while (currentPieceSize < partSize) { int readBytes = sourceStream.Read(buffer, 0, buffer.Length); if (readBytes == 0) { break; } compressionStream.Write(buffer, 0, readBytes); currentPieceSize += readBytes; } } } sizeRemaining = (int)sourceStream.Length - (i * partSize); if (sizeRemaining < partSize) { partSize = sizeRemaining; } fileOffset += partSize; } } }
/// <summary> /// GZip encodes a memory buffer to a compressed memory buffer /// </summary> /// <param name="Buffer"></param> /// <returns></returns> public static byte[] GZipMemory(byte[] Buffer) { MemoryStream ms = new MemoryStream(); GZipStream GZip = new GZipStream(ms, CompressionMode.Compress); GZip.Write(Buffer, 0, Buffer.Length); GZip.Close(); byte[] Result = ms.ToArray(); ms.Close(); return Result; }
/// <summary> /// Encodes one file to another file that is gzip compressed. /// File is overwritten if it exists and not locked. /// </summary> /// <param name="Filename"></param> /// <param name="OutputFile"></param> /// <returns></returns> public static bool GZipFile(string Filename, string OutputFile) { string InputFile = Filename; byte[] Buffer = File.ReadAllBytes(Filename); FileStream fs = new FileStream(OutputFile, FileMode.OpenOrCreate, FileAccess.Write); GZipStream GZip = new GZipStream(fs, CompressionMode.Compress); GZip.Write(Buffer, 0, Buffer.Length); GZip.Close(); fs.Close(); return true; }
static void GenerateBundles (List<string> files) { string temp_s = "temp.s"; // Path.GetTempFileName (); string temp_c = "temp.c"; string temp_o = "temp.o"; if (compile_only) temp_c = output; if (object_out != null) temp_o = object_out; try { List<string> c_bundle_names = new List<string> (); List<string[]> config_names = new List<string[]> (); byte [] buffer = new byte [8192]; using (StreamWriter ts = new StreamWriter (File.Create (temp_s))) { using (StreamWriter tc = new StreamWriter (File.Create (temp_c))) { string prog = null; #if XAMARIN_ANDROID tc.WriteLine ("/* This source code was produced by mkbundle, do not edit */"); tc.WriteLine ("\n#ifndef NULL\n#define NULL (void *)0\n#endif"); tc.WriteLine (@" typedef struct { const char *name; const unsigned char *data; const unsigned int size; } MonoBundledAssembly; void mono_register_bundled_assemblies (const MonoBundledAssembly **assemblies); void mono_register_config_for_assembly (const char* assembly_name, const char* config_xml); "); #else tc.WriteLine ("#include <mono/metadata/mono-config.h>"); tc.WriteLine ("#include <mono/metadata/assembly.h>\n"); #endif if (compress) { tc.WriteLine ("typedef struct _compressed_data {"); tc.WriteLine ("\tMonoBundledAssembly assembly;"); tc.WriteLine ("\tint compressed_size;"); tc.WriteLine ("} CompressedAssembly;\n"); } object monitor = new object (); var streams = new Dictionary<string, Stream> (); var sizes = new Dictionary<string, long> (); // Do the file reading and compression in parallel Action<string> body = delegate (string url) { string fname = new Uri (url).LocalPath; Stream stream = File.OpenRead (fname); long real_size = stream.Length; int n; if (compress) { MemoryStream ms = new MemoryStream (); GZipStream deflate = new GZipStream (ms, CompressionMode.Compress, leaveOpen:true); while ((n = stream.Read (buffer, 0, buffer.Length)) != 0){ deflate.Write (buffer, 0, n); } stream.Close (); deflate.Close (); byte [] bytes = ms.GetBuffer (); stream = new MemoryStream (bytes, 0, (int) ms.Length, false, false); } lock (monitor) { streams [url] = stream; sizes [url] = real_size; } }; //#if NET_4_5 #if FALSE Parallel.ForEach (files, body); #else foreach (var url in files) body (url); #endif // The non-parallel part foreach (var url in files) { string fname = new Uri (url).LocalPath; string aname = Path.GetFileName (fname); string encoded = aname.Replace ("-", "_").Replace (".", "_"); if (prog == null) prog = aname; var stream = streams [url]; var real_size = sizes [url]; Console.WriteLine (" embedding: " + fname); WriteSymbol (ts, "assembly_data_" + encoded, stream.Length); WriteBuffer (ts, stream, buffer); if (compress) { tc.WriteLine ("extern const unsigned char assembly_data_{0} [];", encoded); tc.WriteLine ("static CompressedAssembly assembly_bundle_{0} = {{{{\"{1}\"," + " assembly_data_{0}, {2}}}, {3}}};", encoded, aname, real_size, stream.Length); double ratio = ((double) stream.Length * 100) / real_size; Console.WriteLine (" compression ratio: {0:.00}%", ratio); } else { tc.WriteLine ("extern const unsigned char assembly_data_{0} [];", encoded); tc.WriteLine ("static const MonoBundledAssembly assembly_bundle_{0} = {{\"{1}\", assembly_data_{0}, {2}}};", encoded, aname, real_size); } stream.Close (); c_bundle_names.Add ("assembly_bundle_" + encoded); try { FileStream cf = File.OpenRead (fname + ".config"); Console.WriteLine (" config from: " + fname + ".config"); tc.WriteLine ("extern const unsigned char assembly_config_{0} [];", encoded); WriteSymbol (ts, "assembly_config_" + encoded, cf.Length); WriteBuffer (ts, cf, buffer); ts.WriteLine (); config_names.Add (new string[] {aname, encoded}); } catch (FileNotFoundException) { /* we ignore if the config file doesn't exist */ } } if (config_file != null){ FileStream conf; try { conf = File.OpenRead (config_file); } catch { Error (String.Format ("Failure to open {0}", config_file)); return; } Console.WriteLine ("System config from: " + config_file); tc.WriteLine ("extern const char system_config;"); WriteSymbol (ts, "system_config", config_file.Length); WriteBuffer (ts, conf, buffer); // null terminator ts.Write ("\t.byte 0\n"); ts.WriteLine (); } if (machine_config_file != null){ FileStream conf; try { conf = File.OpenRead (machine_config_file); } catch { Error (String.Format ("Failure to open {0}", machine_config_file)); return; } Console.WriteLine ("Machine config from: " + machine_config_file); tc.WriteLine ("extern const char machine_config;"); WriteSymbol (ts, "machine_config", machine_config_file.Length); WriteBuffer (ts, conf, buffer); ts.Write ("\t.byte 0\n"); ts.WriteLine (); } ts.Close (); Console.WriteLine ("Compiling:"); string cmd = String.Format ("{0} -o {1} {2} ", GetEnv ("AS", "as"), temp_o, temp_s); int ret = Execute (cmd); if (ret != 0){ Error ("[Fail]"); return; } if (compress) tc.WriteLine ("\nstatic const CompressedAssembly *compressed [] = {"); else tc.WriteLine ("\nstatic const MonoBundledAssembly *bundled [] = {"); foreach (string c in c_bundle_names){ tc.WriteLine ("\t&{0},", c); } tc.WriteLine ("\tNULL\n};\n"); tc.WriteLine ("static char *image_name = \"{0}\";", prog); tc.WriteLine ("\nstatic void install_dll_config_files (void) {\n"); foreach (string[] ass in config_names){ tc.WriteLine ("\tmono_register_config_for_assembly (\"{0}\", assembly_config_{1});\n", ass [0], ass [1]); } if (config_file != null) tc.WriteLine ("\tmono_config_parse_memory (&system_config);\n"); if (machine_config_file != null) tc.WriteLine ("\tmono_register_machine_config (&machine_config);\n"); tc.WriteLine ("}\n"); if (config_dir != null) tc.WriteLine ("static const char *config_dir = \"{0}\";", config_dir); else tc.WriteLine ("static const char *config_dir = NULL;"); Stream template_stream; if (compress) { template_stream = System.Reflection.Assembly.GetAssembly (typeof(MakeBundle)).GetManifestResourceStream ("template_z.c"); } else { template_stream = System.Reflection.Assembly.GetAssembly (typeof(MakeBundle)).GetManifestResourceStream ("template.c"); } StreamReader s = new StreamReader (template_stream); string template = s.ReadToEnd (); tc.Write (template); if (!nomain) { Stream template_main_stream = System.Reflection.Assembly.GetAssembly (typeof(MakeBundle)).GetManifestResourceStream ("template_main.c"); StreamReader st = new StreamReader (template_main_stream); string maintemplate = st.ReadToEnd (); tc.Write (maintemplate); } tc.Close (); if (compile_only) return; string zlib = (compress ? "-lz" : ""); string debugging = "-g"; string cc = GetEnv ("CC", IsUnix ? "cc" : "i686-pc-mingw32-gcc"); if (style == "linux") debugging = "-ggdb"; if (static_link) { string smonolib; if (style == "osx") smonolib = "`pkg-config --variable=libdir mono-2`/libmono-2.0.a "; else smonolib = "-Wl,-Bstatic -lmono-2.0 -Wl,-Bdynamic "; cmd = String.Format ("{4} -o {2} -Wall `pkg-config --cflags mono-2` {0} {3} " + "`pkg-config --libs-only-L mono-2` " + smonolib + "`pkg-config --libs-only-l mono-2 | sed -e \"s/\\-lmono-2.0 //\"` {1}", temp_c, temp_o, output, zlib, cc); } else { cmd = String.Format ("{4} " + debugging + " -o {2} -Wall {0} `pkg-config --cflags --libs mono-2` {3} {1}", temp_c, temp_o, output, zlib, cc); } ret = Execute (cmd); if (ret != 0){ Error ("[Fail]"); return; } Console.WriteLine ("Done"); } } } finally { if (!keeptemp){ if (object_out == null){ File.Delete (temp_o); } if (!compile_only){ File.Delete (temp_c); } File.Delete (temp_s); } } }