/// <summary> /// /// </summary> /// <param name="data"></param> /// <returns></returns> public static byte[] Compress(byte[] data) { using (var compressedStream = new MemoryStream()) using (var zipStream = new GZipStream(compressedStream, CompressionMode.Compress)) { zipStream.Write(data, 0, data.Length); zipStream.Close(); return compressedStream.ToArray(); } }
/// <summary> /// Yields an Enumerable list of paths to GZTestData files /// </summary> public static IEnumerable<object[]> CompressedFiles() { if (_compressedFiles == null) { _compressedFiles = new List<object[]>(); // Crypto random data byte[] bytes = new byte[100000000]; var rand = RandomNumberGenerator.Create(); rand.GetBytes(bytes); string filePath = PerfUtils.GetTestFilePath() + ".gz"; using (FileStream output = File.Create(filePath)) using (GZipStream zip = new GZipStream(output, CompressionMode.Compress)) zip.Write(bytes, 0, bytes.Length); _compressedFiles.Add(new object[] { filePath }); // Create a compressed file with repeated segments bytes = Text.Encoding.UTF8.GetBytes(PerfUtils.CreateString(100000)); filePath = PerfUtils.GetTestFilePath() + ".gz"; using (FileStream output = File.Create(filePath)) using (GZipStream zip = new GZipStream(output, CompressionMode.Compress)) for (int i = 0; i < 1000; i++) zip.Write(bytes, 0, bytes.Length); _compressedFiles.Add(new object[] { filePath }); } return _compressedFiles; }
static void Assemble(List<string> files, string destinationDirectory) { string fileOutputPath = destinationDirectory + "assembled" + "." + matches[0].Groups[3]; var fsSource = new FileStream(fileOutputPath, FileMode.Create); fsSource.Close(); using (fsSource = new FileStream(fileOutputPath, FileMode.Append)) { // reading the file paths of the parts from the files list foreach (var filePart in files) { using (var partSource = new FileStream(filePart, FileMode.Open)) { using (var compressionStream = new GZipStream(partSource,CompressionMode.Decompress,false)) { // copy the bytes from part to new assembled file Byte[] bytePart = new byte[4096]; while (true) { int readBytes = compressionStream.Read(bytePart, 0, bytePart.Length); if (readBytes == 0) { break; } fsSource.Write(bytePart, 0, readBytes); } } } } } }
private static void Assemble(List<string> file, string name, string destinationDir) { Directory.CreateDirectory(destinationDir); FileStream write = new FileStream(destinationDir + name, FileMode.Append); byte[] buffer = new byte[4096]; using (write) { for (int i = 0; i < file.Count; i++) { using (FileStream reader = new FileStream("../dir/" + file[i], FileMode.Open)) { using (GZipStream gz = new GZipStream(reader, CompressionMode.Decompress, false)) { while (true) { int bytes = gz.Read(buffer, 0, buffer.Length); if (bytes == 0) { break; } write.Write(buffer, 0, bytes); } } } } } }
static void Assemble(List<string> files, string destinationDirectory) { using (FileStream combined = new FileStream(destinationDirectory + "combined files" + extensionSource, FileMode.Create)) { for (int file = 0; file < files.Count; file++) { using (FileStream input = new FileStream(files[file], FileMode.Open)) { using (GZipStream commpression = new GZipStream(input, CompressionMode.Decompress, false)) { byte[] buffer = new byte[4096]; while (true) { int readBytes = commpression.Read(buffer, 0, buffer.Length); if (readBytes == 0) { break; } combined.Write(buffer, 0, readBytes); } } } } } }
public void Compress_Canterbury(int innerIterations, string fileName, CompressionLevel compressLevel) { byte[] bytes = File.ReadAllBytes(Path.Combine("GZTestData", "Canterbury", fileName)); PerfUtils utils = new PerfUtils(); FileStream[] filestreams = new FileStream[innerIterations]; GZipStream[] gzips = new GZipStream[innerIterations]; string[] paths = new string[innerIterations]; foreach (var iteration in Benchmark.Iterations) { for (int i = 0; i < innerIterations; i++) { paths[i] = utils.GetTestFilePath(); filestreams[i] = File.Create(paths[i]); } using (iteration.StartMeasurement()) for (int i = 0; i < innerIterations; i++) { gzips[i] = new GZipStream(filestreams[i], compressLevel); gzips[i].Write(bytes, 0, bytes.Length); gzips[i].Flush(); gzips[i].Dispose(); filestreams[i].Dispose(); } for (int i = 0; i < innerIterations; i++) File.Delete(paths[i]); } }
public static byte[] Compress(byte[] b) { MemoryStream ms = new MemoryStream(); GZipStream zs = new GZipStream(ms, CompressionMode.Compress); zs.Write(b, 0, b.Length); return ms.ToArray(); }
static void Main() { var sw = Stopwatch.StartNew(); using (var inputFile = File.OpenRead("ClassLibrary45.csv.gz")) { using (var gzip = new GZipStream(inputFile, CompressionMode.Decompress)) { using (var sr = new StreamReader(gzip)) { var data = from rawLine in sr.ReadToEnd().Split('\n') let line = rawLine.Trim() where !string.IsNullOrEmpty(line) && !line.StartsWith("//") let parts = line.Split(',') select new { ClassName = parts.First(), ContentId = parts.Last() }; var classes = data.ToDictionary(c => c.ClassName, c => c.ContentId); Console.WriteLine("{0} classes loaded.", classes.Count); } } } sw.Stop(); Console.WriteLine("Elapsed: {0} milliseconds.", sw.ElapsedMilliseconds); }
private static void AssembleGZip(List<string> files, string destinationDirectory) { string[] dotItems = files[0].Split('.'); string ext = dotItems[dotItems.Length - 2]; string destinationFile = destinationDirectory + "assembled." + ext; using (FileStream dest = new FileStream(destinationFile, FileMode.Append, FileAccess.Write)) { foreach (string inFile in files) { using (FileStream source = new FileStream(inFile, FileMode.Open)) { using (GZipStream sourceGZip = new GZipStream(source, CompressionMode.Decompress, false)) { byte[] buffer = new byte[4096]; int len; while ((len = sourceGZip.Read(buffer, 0, buffer.Length)) > 0) dest.Write(buffer, 0, len); } } } } }
private static void SliceGZip(string sourceFile, string destinationDir, int numParts) { FileInfo fileInfo = new FileInfo(sourceFile); int size = (int)fileInfo.Length; int partSize = size / numParts; int lastPartSize = size - (numParts - 1) * partSize; string name = fileInfo.Name; string[] nameExtArr = name.Split('.'); FileStream source = new FileStream(sourceFile, FileMode.Open); source.Seek(0, SeekOrigin.Current); for (int i = 0; i < numParts; i++) { int currentSize = (i == (numParts - 1)) ? lastPartSize : partSize; byte[] buffer = new byte[currentSize]; source.Read(buffer, 0, currentSize); string currentFilePath = destinationDir + "Part-" + i + "." + nameExtArr[1] + ".gz"; using (FileStream dest = new FileStream(currentFilePath, FileMode.Create)) using (GZipStream gzipDest = new GZipStream(dest, CompressionMode.Compress, false)) gzipDest.Write(buffer, 0, currentSize); } source.Close(); }
public static void SplitFile(string inputFile, int parts, string path) { byte[] buffer = new byte[4096]; DirectoryInfo directorySelected = new DirectoryInfo(destination); using (Stream originalFile = File.OpenRead(sourceFile)) { int index = 1; while (originalFile.Position < originalFile.Length) { using (Stream compressedFile = File.Create(path + "\\" + index + ".gz")) { using (GZipStream compression = new GZipStream(compressedFile, CompressionMode.Compress)) { int chunkBytesRead = 0; while (chunkBytesRead < originalFile.Length / parts) { int bytesRead = originalFile.Read(buffer, 0, buffer.Length); if (bytesRead == 0) { break; } chunkBytesRead += bytesRead; compression.Write(buffer, 0, bytesRead); } } } index++; } } }
private static void Assemble(int parts) { byte[] buffer = new byte[4096]; for (int i = 1; i <= parts; i++) { string source = String.Format("../../{0}.gz", i); FileStream partOfFile = new FileStream(source, FileMode.Open); FileStream assembledFile = new FileStream("../../assembled.txt", FileMode.Append); using (partOfFile) { using (assembledFile) { using (GZipStream decompression = new GZipStream(partOfFile, CompressionMode.Decompress)) { while (true) { int bytesRead = decompression.Read(buffer, 0, buffer.Length); if (bytesRead == 0) { break; } assembledFile.Write(buffer, 0, bytesRead); } } } } } }
public static MemoryStream readStream(FileStream stream) { MemoryStream outStream = new MemoryStream(); GZipStream compress = new GZipStream(stream, CompressionMode.Decompress, false); byte[] buffer = new Byte[stream.Length]; while (true) { int count = compress.Read(buffer, 0, buffer.Length); if (count != 0) { outStream.Write(buffer, 0, buffer.Length); } if (count != buffer.Length) { break; } } compress.Close(); outStream.Close(); stream.Close(); return new MemoryStream(outStream.ToArray()); }
static void Main(string[] args) { if (args.Length < 1) { usage(); return; } else { string inputFile = args[0]; string outputFile = inputFile + ".gz"; try { // Get bytes from input stream FileStream inFileStream = new FileStream(Path.Combine(Environment.CurrentDirectory, inputFile), FileMode.Open); byte[] buffer = new byte[inFileStream.Length]; inFileStream.Read(buffer, 0, buffer.Length); inFileStream.Close(); // Create GZip file stream and compress input bytes FileStream outFileStream = new FileStream(Path.Combine(Environment.CurrentDirectory, outputFile), FileMode.Create); GZipStream compressedStream = new GZipStream(outFileStream, CompressionMode.Compress); compressedStream.Write(buffer, 0, buffer.Length); compressedStream.Close(); outFileStream.Close(); Console.WriteLine("The file has been compressed. UR Da Bomb!!!"); } catch (FileNotFoundException) { Console.WriteLine("Error: Specified file cannot be found."); } } }
public static void Decompress(FileInfo archFile, out String szOutFile) { Logger.Enter(); using (FileStream archFileStream = archFile.OpenRead()) { String currentFileName = archFile.FullName; String newFileName = currentFileName.Remove( currentFileName.Length - archFile.Extension.Length); using (FileStream normalFileStream = File.Create(newFileName)) { using (GZipStream decompressionStream = new GZipStream(archFileStream, CompressionMode.Decompress)) { byte[] buffer = new byte[1024]; int nRead; while ((nRead = decompressionStream.Read(buffer, 0, buffer.Length)) > 0) { normalFileStream.Write(buffer, 0, nRead); } szOutFile = newFileName; Console.WriteLine("Decompressed: {0}", archFile.Name); } } } Logger.Leave(); }
static void Assemble(string[] files, string destinationDirectory) { using (var assemly = new FileStream(destinationDirectory + GetFileName() + "-Assembled" + GetExtention(), FileMode.Create)) { for (int i = 0; i < files.Length; i++) { using (var source = new FileStream(filePartsNames[i] + ".gz", FileMode.Open)) { using (var gz = new GZipStream(source, CompressionMode.Decompress)) { byte[] buffer = new byte[4096]; while (true) { int readBytes = gz.Read(buffer, 0, buffer.Length); if (readBytes == 0) { break; } assemly.Write(buffer, 0, buffer.Length); } } } } } }
public static void Compress(string text, string outfile) { byte[] buffer = Encoding.UTF8.GetBytes(text); MemoryStream ms = new MemoryStream(); using (GZipStream zip = new GZipStream(ms, CompressionMode.Compress, true)) { zip.Write(buffer, 0, buffer.Length); } ms.Position = 0; MemoryStream outStream = new MemoryStream(); byte[] compressed = new byte[ms.Length]; ms.Read(compressed, 0, compressed.Length); byte[] gzBuffer = new byte[compressed.Length + 4]; System.Buffer.BlockCopy(compressed, 0, gzBuffer, 4, compressed.Length); System.Buffer.BlockCopy(BitConverter.GetBytes(buffer.Length), 0, gzBuffer, 0, 4); string MimiBase64 = Convert.ToBase64String (gzBuffer); File.WriteAllText (outfile, MimiBase64); Console.WriteLine ("Base64 string saved as "+outfile+"\n"); //return Convert.ToBase64String (gzBuffer); }
private static void Assemble(List<string> files, string destinationDirectory) { var allData = new List<byte>(); for (int i = 0; i < files.Count; i++) { var sourceFile = files[i]; using (var source = new FileStream(sourceFile, FileMode.Open)) { using (var zip = new GZipStream(source, CompressionMode.Decompress)) { byte[] buffer = new byte[4096]; while (true) { int readBytes = zip.Read(buffer, 0, buffer.Length); if (readBytes == 0) { break; } for (int j = 0; j < readBytes; j++) { allData.Add(buffer[j]); } } } } } using (var copy = new FileStream(destinationDirectory, FileMode.Create)) { copy.Write(allData.ToArray(), 0, allData.Count); } }
public void BaseStream1() { var writeStream = new MemoryStream(); var zip = new GZipStream(writeStream, CompressionMode.Compress); Assert.Same(zip.BaseStream, writeStream); writeStream.Dispose(); }
public void BaseStream2() { var ms = new MemoryStream(); var zip = new GZipStream(ms, CompressionMode.Decompress); Assert.Same(zip.BaseStream, ms); ms.Dispose(); }
public void TestInit() { Stream baseStream = new MemoryStream(); GZipStream gzipStream = new GZipStream(baseStream, CompressionMode.Decompress); Assert.AreSame(baseStream, gzipStream.BaseStream); gzipStream.Close(); }
public async Task ModifyBaseStream() { var ms = await LocalMemoryStream.readAppFileAsync(gzTestFile("GZTestDocument.txt.gz")); var zip = new GZipStream(ms, CompressionMode.Decompress); int size = 1024; Byte[] bytes = new Byte[size]; zip.BaseStream.Read(bytes, 0, size); // This will throw if the underlying stream is not writeable as expected }
public static byte[] Compress(byte[] data) { MemoryStream output = new MemoryStream(); GZipStream gzip = new GZipStream(output, CompressionMode.Compress, true); gzip.Write(data, 0, data.Length); gzip.Close(); return output.ToArray(); }
public void CompressCanWrite() { var ms = new MemoryStream(); var zip = new GZipStream(ms, CompressionMode.Compress); Assert.True(zip.CanWrite, "GZipStream not CanWrite with CompressionMode.Compress"); zip.Dispose(); Assert.False(zip.CanWrite, "GZipStream CanWrite after dispose"); }
public static byte[] Compress(this byte[] bytes) { using (MemoryStream ms = new MemoryStream()) { GZipStream Compress = new GZipStream(ms, CompressionMode.Compress); Compress.Write(bytes, 0, bytes.Length); Compress.Close(); return ms.ToArray(); } }
public void CanDisposeGZipStream() { var ms = new MemoryStream(); var zip = new GZipStream(ms, CompressionMode.Compress); zip.Dispose(); Assert.Null(zip.BaseStream); zip.Dispose(); // Should be a no-op }
public void DecompressCanRead() { var ms = new MemoryStream(); var zip = new GZipStream(ms, CompressionMode.Decompress); Assert.True(zip.CanRead, "GZipStream not CanRead in Decompress"); zip.Dispose(); Assert.False(zip.CanRead, "GZipStream CanRead after dispose in Decompress"); }
public void Decompress() { var bytes = CompressToArray(); using (var memoryStream = new MemoryStream(bytes)) using (var gzipStream = new GZipStream(memoryStream, CompressionMode.Decompress)) using (var reader = new StreamReader(gzipStream)) { string text = reader.ReadToEnd(); Assert.AreEqual(Text, text); } }
public void WebRequest() { var bytes = GetHttpBin("http://httpbin.org/gzip"); using (var memoryStream = new MemoryStream(bytes)) using (var gzipStream = new GZipStream(memoryStream, CompressionMode.Decompress)) using (var reader = new StreamReader(gzipStream)) { string text = reader.ReadToEnd(); Assert.IsTrue(text.Contains("\"gzipped\": true"), "Request was not compressed!"); } }
protected override byte[] CompressToArray() { using (var memoryStream = new MemoryStream()) { using (var gzipStream = new GZipStream(memoryStream, CompressionMode.Compress)) using (var writer = new StreamWriter(gzipStream)) { writer.Write(Text); } return memoryStream.ToArray(); } }
static Trace() { string executablePath = Application.ExecutablePath; string directoryName = Path.GetDirectoryName(executablePath); if (!Directory.Exists(directoryName)) { Directory.CreateDirectory(directoryName); } string[] files = Directory.GetFiles(directoryName, "*.gz"); TimeSpan t = new TimeSpan(30, 0, 0, 0, 0); string[] array = files; foreach (string text in array) { FileInfo fileInfo = new FileInfo(text); if (TimeSpan.Compare(DateTime.Now.Subtract(fileInfo.CreationTime), t) > 0) { File.Delete(text); } } Filename = Path.Combine(directoryName, "TraceGMA.log"); if (File.Exists(Filename)) { FileInfo fileInfo2 = new FileInfo(Filename); if (fileInfo2.Length > 204800) { string path = string.Format("trace-{0}.gz", DateTime.Now.ToString("yyyy-MM-dd-hh-mm-ss")); using (FileStream fileStream2 = fileInfo2.OpenRead()) { using (FileStream fileStream = new FileStream(Path.Combine(directoryName, path), FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite)) { using (GZipStream gZipStream = new GZipStream(fileStream, CompressionMode.Compress)) { byte[] array2 = new byte[2048]; bool flag = false; while (!flag) { fileStream2.Position = 0L; fileStream.Position = 0L; try { int num2; for (long num = 0L; num < fileInfo2.Length; num += num2) { num2 = fileStream2.Read(array2, 0, array2.Length); gZipStream.Write(array2, 0, num2); } } finally { gZipStream.Close(); fileStream.Close(); fileStream2.Close(); flag = true; } } } } } File.Delete(Filename); } } }
public Map Load([NotNull] string fileName) { if (fileName == null) { throw new ArgumentNullException("fileName"); } using (FileStream mapStream = File.OpenRead(fileName)) { byte[] temp = new byte[8]; Map map = null; mapStream.Seek(-4, SeekOrigin.End); mapStream.Read(temp, 0, 4); mapStream.Seek(0, SeekOrigin.Begin); int uncompressedLength = BitConverter.ToInt32(temp, 0); byte[] data = new byte[uncompressedLength]; using (GZipStream reader = new GZipStream(mapStream, CompressionMode.Decompress, true)) { reader.Read(data, 0, uncompressedLength); } for (int i = 0; i < uncompressedLength - 1; i++) { if (data[i] != 0xAC || data[i + 1] != 0xED) { continue; } // bypassing the header crap int pointer = i + 6; Array.Copy(data, pointer, temp, 0, 2); pointer += IPAddress.HostToNetworkOrder(BitConverter.ToInt16(temp, 0)); pointer += 13; int headerEnd; // find the end of serialization listing for (headerEnd = pointer; headerEnd < data.Length - 1; headerEnd++) { if (data[headerEnd] == 0x78 && data[headerEnd + 1] == 0x70) { headerEnd += 2; break; } } // start parsing serialization listing int offset = 0; int width = 0, length = 0, height = 0; Position spawn = new Position(); while (pointer < headerEnd) { switch (( char )data[pointer]) { case 'Z': offset++; break; case 'F': case 'I': offset += 4; break; case 'J': offset += 8; break; } pointer += 1; Array.Copy(data, pointer, temp, 0, 2); short skip = IPAddress.HostToNetworkOrder(BitConverter.ToInt16(temp, 0)); pointer += 2; // look for relevant variables Array.Copy(data, headerEnd + offset - 4, temp, 0, 4); if (MemCmp(data, pointer, "width")) { width = ( ushort )IPAddress.HostToNetworkOrder(BitConverter.ToInt32(temp, 0)); } else if (MemCmp(data, pointer, "depth")) { height = ( ushort )IPAddress.HostToNetworkOrder(BitConverter.ToInt32(temp, 0)); } else if (MemCmp(data, pointer, "height")) { length = ( ushort )IPAddress.HostToNetworkOrder(BitConverter.ToInt32(temp, 0)); } else if (MemCmp(data, pointer, "xSpawn")) { spawn.X = ( short )(IPAddress.HostToNetworkOrder(BitConverter.ToInt32(temp, 0)) * 32 + 16); } else if (MemCmp(data, pointer, "ySpawn")) { spawn.Z = ( short )(IPAddress.HostToNetworkOrder(BitConverter.ToInt32(temp, 0)) * 32 + 16); } else if (MemCmp(data, pointer, "zSpawn")) { spawn.Y = ( short )(IPAddress.HostToNetworkOrder(BitConverter.ToInt32(temp, 0)) * 32 + 16); } pointer += skip; } map = new Map(null, width, length, height, false) { Spawn = spawn }; if (!map.ValidateHeader()) { throw new MapFormatException("One or more of the map dimensions are invalid."); } // find the start of the block array bool foundBlockArray = false; offset = Array.IndexOf <byte>(data, 0x00, headerEnd); while (offset != -1 && offset < data.Length - 2) { if (data[offset] == 0x00 && data[offset + 1] == 0x78 && data[offset + 2] == 0x70) { foundBlockArray = true; pointer = offset + 7; } offset = Array.IndexOf <byte>(data, 0x00, offset + 1); } // copy the block array... or fail if (foundBlockArray) { map.Blocks = new byte[map.Volume]; Array.Copy(data, pointer, map.Blocks, 0, map.Blocks.Length); map.ConvertBlockTypes(Mapping); } else { throw new MapFormatException("Could not locate block array."); } break; } return(map); } }
static void GenerateBundles(List <string> files) { string temp_s = "temp.s"; // Path.GetTempFileName (); string temp_c = "temp.c"; string temp_o = "temp.o"; if (compile_only) { temp_c = output; } if (object_out != null) { temp_o = object_out; } try { List <string> c_bundle_names = new List <string> (); List <string[]> config_names = new List <string[]> (); using (StreamWriter ts = new StreamWriter(File.Create(temp_s))) { using (StreamWriter tc = new StreamWriter(File.Create(temp_c))) { string prog = null; #if XAMARIN_ANDROID tc.WriteLine("/* This source code was produced by mkbundle, do not edit */"); tc.WriteLine("\n#ifndef NULL\n#define NULL (void *)0\n#endif"); tc.WriteLine(@" typedef struct { const char *name; const unsigned char *data; const unsigned int size; } MonoBundledAssembly; void mono_register_bundled_assemblies (const MonoBundledAssembly **assemblies); void mono_register_config_for_assembly (const char* assembly_name, const char* config_xml); "); #else tc.WriteLine("#include <mono/metadata/mono-config.h>"); tc.WriteLine("#include <mono/metadata/assembly.h>\n"); #endif if (compress) { tc.WriteLine("typedef struct _compressed_data {"); tc.WriteLine("\tMonoBundledAssembly assembly;"); tc.WriteLine("\tint compressed_size;"); tc.WriteLine("} CompressedAssembly;\n"); } object monitor = new object(); var streams = new Dictionary <string, Stream> (); var sizes = new Dictionary <string, long> (); // Do the file reading and compression in parallel Action <string> body = delegate(string url) { string fname = new Uri(url).LocalPath; Stream stream = File.OpenRead(fname); long real_size = stream.Length; int n; if (compress) { byte[] cbuffer = new byte [8192]; MemoryStream ms = new MemoryStream(); GZipStream deflate = new GZipStream(ms, CompressionMode.Compress, leaveOpen: true); while ((n = stream.Read(cbuffer, 0, cbuffer.Length)) != 0) { deflate.Write(cbuffer, 0, n); } stream.Close(); deflate.Close(); byte [] bytes = ms.GetBuffer(); stream = new MemoryStream(bytes, 0, (int)ms.Length, false, false); } lock (monitor) { streams [url] = stream; sizes [url] = real_size; } }; //#if NET_4_5 #if FALSE Parallel.ForEach(files, body); #else foreach (var url in files) { body(url); } #endif // The non-parallel part byte [] buffer = new byte [8192]; foreach (var url in files) { string fname = new Uri(url).LocalPath; string aname = Path.GetFileName(fname); string encoded = aname.Replace("-", "_").Replace(".", "_"); if (prog == null) { prog = aname; } var stream = streams [url]; var real_size = sizes [url]; Console.WriteLine(" embedding: " + fname); WriteSymbol(ts, "assembly_data_" + encoded, stream.Length); WriteBuffer(ts, stream, buffer); if (compress) { tc.WriteLine("extern const unsigned char assembly_data_{0} [];", encoded); tc.WriteLine("static CompressedAssembly assembly_bundle_{0} = {{{{\"{1}\"," + " assembly_data_{0}, {2}}}, {3}}};", encoded, aname, real_size, stream.Length); double ratio = ((double)stream.Length * 100) / real_size; Console.WriteLine(" compression ratio: {0:.00}%", ratio); } else { tc.WriteLine("extern const unsigned char assembly_data_{0} [];", encoded); tc.WriteLine("static const MonoBundledAssembly assembly_bundle_{0} = {{\"{1}\", assembly_data_{0}, {2}}};", encoded, aname, real_size); } stream.Close(); c_bundle_names.Add("assembly_bundle_" + encoded); try { FileStream cf = File.OpenRead(fname + ".config"); Console.WriteLine(" config from: " + fname + ".config"); tc.WriteLine("extern const unsigned char assembly_config_{0} [];", encoded); WriteSymbol(ts, "assembly_config_" + encoded, cf.Length); WriteBuffer(ts, cf, buffer); ts.WriteLine(); config_names.Add(new string[] { aname, encoded }); } catch (FileNotFoundException) { /* we ignore if the config file doesn't exist */ } } if (config_file != null) { FileStream conf; try { conf = File.OpenRead(config_file); } catch { Error(String.Format("Failure to open {0}", config_file)); return; } Console.WriteLine("System config from: " + config_file); tc.WriteLine("extern const char system_config;"); WriteSymbol(ts, "system_config", config_file.Length); WriteBuffer(ts, conf, buffer); // null terminator ts.Write("\t.byte 0\n"); ts.WriteLine(); } if (machine_config_file != null) { FileStream conf; try { conf = File.OpenRead(machine_config_file); } catch { Error(String.Format("Failure to open {0}", machine_config_file)); return; } Console.WriteLine("Machine config from: " + machine_config_file); tc.WriteLine("extern const char machine_config;"); WriteSymbol(ts, "machine_config", machine_config_file.Length); WriteBuffer(ts, conf, buffer); ts.Write("\t.byte 0\n"); ts.WriteLine(); } ts.Close(); Console.WriteLine("Compiling:"); string cmd = String.Format("{0} -o {1} {2} ", GetEnv("AS", "as"), temp_o, temp_s); int ret = Execute(cmd); if (ret != 0) { Error("[Fail]"); return; } if (compress) { tc.WriteLine("\nstatic const CompressedAssembly *compressed [] = {"); } else { tc.WriteLine("\nstatic const MonoBundledAssembly *bundled [] = {"); } foreach (string c in c_bundle_names) { tc.WriteLine("\t&{0},", c); } tc.WriteLine("\tNULL\n};\n"); tc.WriteLine("static char *image_name = \"{0}\";", prog); if (ctor_func != null) { tc.WriteLine("\nextern void {0} (void);", ctor_func); tc.WriteLine("\n__attribute__ ((constructor)) static void mono_mkbundle_ctor (void)"); tc.WriteLine("{{\n\t{0} ();\n}}", ctor_func); } tc.WriteLine("\nstatic void install_dll_config_files (void) {\n"); foreach (string[] ass in config_names) { tc.WriteLine("\tmono_register_config_for_assembly (\"{0}\", assembly_config_{1});\n", ass [0], ass [1]); } if (config_file != null) { tc.WriteLine("\tmono_config_parse_memory (&system_config);\n"); } if (machine_config_file != null) { tc.WriteLine("\tmono_register_machine_config (&machine_config);\n"); } tc.WriteLine("}\n"); if (config_dir != null) { tc.WriteLine("static const char *config_dir = \"{0}\";", config_dir); } else { tc.WriteLine("static const char *config_dir = NULL;"); } Stream template_stream; if (compress) { template_stream = System.Reflection.Assembly.GetAssembly(typeof(MakeBundle)).GetManifestResourceStream("template_z.c"); } else { template_stream = System.Reflection.Assembly.GetAssembly(typeof(MakeBundle)).GetManifestResourceStream("template.c"); } StreamReader s = new StreamReader(template_stream); string template = s.ReadToEnd(); tc.Write(template); if (!nomain) { Stream template_main_stream = System.Reflection.Assembly.GetAssembly(typeof(MakeBundle)).GetManifestResourceStream("template_main.c"); StreamReader st = new StreamReader(template_main_stream); string maintemplate = st.ReadToEnd(); tc.Write(maintemplate); } tc.Close(); if (compile_only) { return; } string zlib = (compress ? "-lz" : ""); string debugging = "-g"; string cc = GetEnv("CC", IsUnix ? "cc" : "i686-pc-mingw32-gcc"); if (style == "linux") { debugging = "-ggdb"; } if (static_link) { string smonolib; if (style == "osx") { smonolib = "`pkg-config --variable=libdir mono-2`/libmono-2.0.a "; } else { smonolib = "-Wl,-Bstatic -lmono-2.0 -Wl,-Bdynamic "; } cmd = String.Format("{4} -o {2} -Wall `pkg-config --cflags mono-2` {0} {3} " + "`pkg-config --libs-only-L mono-2` " + smonolib + "`pkg-config --libs-only-l mono-2 | sed -e \"s/\\-lmono-2.0 //\"` {1}", temp_c, temp_o, output, zlib, cc); } else { cmd = String.Format("{4} " + debugging + " -o {2} -Wall {0} `pkg-config --cflags --libs mono-2` {3} {1}", temp_c, temp_o, output, zlib, cc); } ret = Execute(cmd); if (ret != 0) { Error("[Fail]"); return; } Console.WriteLine("Done"); } } } finally { if (!keeptemp) { if (object_out == null) { File.Delete(temp_o); } if (!compile_only) { File.Delete(temp_c); } File.Delete(temp_s); } } }
public virtual async Task <TResponse> RequestAsync <TResponse>(RequestData requestData, CancellationToken cancellationToken ) where TResponse : class, ITransportResponse, new() { Action unregisterWaitHandle = null; int? statusCode = null; IEnumerable <string> warnings = null; Stream responseStream = null; Exception ex = null; string mimeType = null; ReadOnlyDictionary <TcpState, int> tcpStats = null; ReadOnlyDictionary <string, ThreadPoolStatistics> threadPoolStats = null; try { var data = requestData.PostData; var request = CreateHttpWebRequest(requestData); using (cancellationToken.Register(() => request.Abort())) { if (data != null) { var apmGetRequestStreamTask = Task.Factory.FromAsync(request.BeginGetRequestStream, r => request.EndGetRequestStream(r), null); unregisterWaitHandle = RegisterApmTaskTimeout(apmGetRequestStreamTask, request, requestData); using (var stream = await apmGetRequestStreamTask.ConfigureAwait(false)) { if (requestData.HttpCompression) { using (var zipStream = new GZipStream(stream, CompressionMode.Compress)) await data.WriteAsync(zipStream, requestData.ConnectionSettings, cancellationToken).ConfigureAwait(false); } else { await data.WriteAsync(stream, requestData.ConnectionSettings, cancellationToken).ConfigureAwait(false); } } unregisterWaitHandle?.Invoke(); } requestData.MadeItToResponse = true; //http://msdn.microsoft.com/en-us/library/system.net.httpwebresponse.getresponsestream.aspx //Either the stream or the response object needs to be closed but not both although it won't //throw any errors if both are closed atleast one of them has to be Closed. //Since we expose the stream we let closing the stream determining when to close the connection var apmGetResponseTask = Task.Factory.FromAsync(request.BeginGetResponse, r => request.EndGetResponse(r), null); unregisterWaitHandle = RegisterApmTaskTimeout(apmGetResponseTask, request, requestData); if (requestData.TcpStats) { tcpStats = TcpStats.GetStates(); } if (requestData.ThreadPoolStats) { threadPoolStats = ThreadPoolStats.GetStats(); } var httpWebResponse = (HttpWebResponse)await apmGetResponseTask.ConfigureAwait(false); HandleResponse(httpWebResponse, out statusCode, out responseStream, out mimeType); if (httpWebResponse.SupportsHeaders && httpWebResponse.Headers.HasKeys() && httpWebResponse.Headers.AllKeys.Contains("Warning")) { warnings = httpWebResponse.Headers.GetValues("Warning"); } } } catch (WebException e) { ex = e; if (e.Response is HttpWebResponse httpWebResponse) { HandleResponse(httpWebResponse, out statusCode, out responseStream, out mimeType); } } finally { unregisterWaitHandle?.Invoke(); } responseStream ??= Stream.Null; var response = await ResponseBuilder.ToResponseAsync <TResponse> (requestData, ex, statusCode, warnings, responseStream, mimeType, cancellationToken) .ConfigureAwait(false); // set TCP and threadpool stats on the response here so that in the event the request fails after the point of // gathering stats, they are still exposed on the call details. Ideally these would be set inside ResponseBuilder.ToResponse, // but doing so would be a breaking change in 7.x response.ApiCall.TcpStats = tcpStats; response.ApiCall.ThreadPoolStats = threadPoolStats; return(response); }
public virtual TResponse Request <TResponse>(RequestData requestData) where TResponse : class, ITransportResponse, new() { int?statusCode = null; IEnumerable <string> warnings = null; Stream responseStream = null; Exception ex = null; string mimeType = null; ReadOnlyDictionary <TcpState, int> tcpStats = null; ReadOnlyDictionary <string, ThreadPoolStatistics> threadPoolStats = null; try { var request = CreateHttpWebRequest(requestData); var data = requestData.PostData; if (data != null) { using (var stream = request.GetRequestStream()) { if (requestData.HttpCompression) { using (var zipStream = new GZipStream(stream, CompressionMode.Compress)) data.Write(zipStream, requestData.ConnectionSettings); } else { data.Write(stream, requestData.ConnectionSettings); } } } requestData.MadeItToResponse = true; if (requestData.TcpStats) { tcpStats = TcpStats.GetStates(); } if (requestData.ThreadPoolStats) { threadPoolStats = ThreadPoolStats.GetStats(); } //http://msdn.microsoft.com/en-us/library/system.net.httpwebresponse.getresponsestream.aspx //Either the stream or the response object needs to be closed but not both although it won't //throw any errors if both are closed atleast one of them has to be Closed. //Since we expose the stream we let closing the stream determining when to close the connection var httpWebResponse = (HttpWebResponse)request.GetResponse(); HandleResponse(httpWebResponse, out statusCode, out responseStream, out mimeType); //response.Headers.HasKeys() can return false even if response.Headers.AllKeys has values. if (httpWebResponse.SupportsHeaders && httpWebResponse.Headers.Count > 0 && httpWebResponse.Headers.AllKeys.Contains("Warning")) { warnings = httpWebResponse.Headers.GetValues("Warning"); } } catch (WebException e) { ex = e; if (e.Response is HttpWebResponse httpWebResponse) { HandleResponse(httpWebResponse, out statusCode, out responseStream, out mimeType); } } responseStream ??= Stream.Null; var response = ResponseBuilder.ToResponse <TResponse>(requestData, ex, statusCode, warnings, responseStream, mimeType); // set TCP and threadpool stats on the response here so that in the event the request fails after the point of // gathering stats, they are still exposed on the call details. Ideally these would be set inside ResponseBuilder.ToResponse, // but doing so would be a breaking change in 7.x response.ApiCall.TcpStats = tcpStats; response.ApiCall.ThreadPoolStats = threadPoolStats; return(response); }
// public void Save(string strFile, PwGroup pgDataSource, KdbxFormat fmt, // IStatusLogger slLogger) // { // bool bMadeUnhidden = UrlUtil.UnhideFile(strFile); // // IOConnectionInfo ioc = IOConnectionInfo.FromPath(strFile); // this.Save(IOConnection.OpenWrite(ioc), pgDataSource, format, slLogger); // // if(bMadeUnhidden) UrlUtil.HideFile(strFile, true); // Hide again // } /// <summary> /// Save the contents of the current <c>PwDatabase</c> to a KDBX file. /// </summary> /// <param name="sSaveTo">Stream to write the KDBX file into.</param> /// <param name="pgDataSource">Group containing all groups and /// entries to write. If <c>null</c>, the complete database will /// be written.</param> /// <param name="fmt">Format of the file to create.</param> /// <param name="slLogger">Logger that recieves status information.</param> public void Save(Stream sSaveTo, PwGroup pgDataSource, KdbxFormat fmt, IStatusLogger slLogger) { Debug.Assert(sSaveTo != null); if (sSaveTo == null) { throw new ArgumentNullException("sSaveTo"); } if (m_bUsedOnce) { throw new InvalidOperationException("Do not reuse KdbxFile objects!"); } m_bUsedOnce = true; m_format = fmt; m_slLogger = slLogger; m_xmlWriter = null; PwGroup pgRoot = (pgDataSource ?? m_pwDatabase.RootGroup); UTF8Encoding encNoBom = StrUtil.Utf8; CryptoRandom cr = CryptoRandom.Instance; byte[] pbCipherKey = null; byte[] pbHmacKey64 = null; m_pbsBinaries.Clear(); m_pbsBinaries.AddFrom(pgRoot); List <Stream> lStreams = new List <Stream>(); lStreams.Add(sSaveTo); HashingStreamEx sHashing = new HashingStreamEx(sSaveTo, true, null); lStreams.Add(sHashing); try { // Fix history entries (should not be necessary; just for safety, // as e.g. XPath searches depend on correct history entry UUIDs) if (m_pwDatabase.MaintainBackups()) { Debug.Assert(false); } m_uFileVersion = GetMinKdbxVersion(); int cbEncKey, cbEncIV; ICipherEngine iCipher = GetCipher(out cbEncKey, out cbEncIV); m_pbMasterSeed = cr.GetRandomBytes(32); m_pbEncryptionIV = cr.GetRandomBytes((uint)cbEncIV); // m_pbTransformSeed = cr.GetRandomBytes(32); PwUuid puKdf = m_pwDatabase.KdfParameters.KdfUuid; KdfEngine kdf = KdfPool.Get(puKdf); if (kdf == null) { throw new Exception(KLRes.UnknownKdf + MessageService.NewParagraph + // KLRes.FileNewVerOrPlgReq + MessageService.NewParagraph + "UUID: " + puKdf.ToHexString() + "."); } kdf.Randomize(m_pwDatabase.KdfParameters); if (m_format == KdbxFormat.Default) { if (m_uFileVersion < FileVersion32_4) { m_craInnerRandomStream = CrsAlgorithm.Salsa20; m_pbInnerRandomStreamKey = cr.GetRandomBytes(32); } else // KDBX >= 4 { m_craInnerRandomStream = CrsAlgorithm.ChaCha20; m_pbInnerRandomStreamKey = cr.GetRandomBytes(64); } m_randomStream = new CryptoRandomStream(m_craInnerRandomStream, m_pbInnerRandomStreamKey); } if (m_uFileVersion < FileVersion32_4) { m_pbStreamStartBytes = cr.GetRandomBytes(32); } Stream sXml; if (m_format == KdbxFormat.Default) { byte[] pbHeader = GenerateHeader(); m_pbHashOfHeader = CryptoUtil.HashSha256(pbHeader); MemUtil.Write(sHashing, pbHeader); sHashing.Flush(); ComputeKeys(out pbCipherKey, cbEncKey, out pbHmacKey64); Stream sPlain; if (m_uFileVersion < FileVersion32_4) { Stream sEncrypted = EncryptStream(sHashing, iCipher, pbCipherKey, cbEncIV, true); if ((sEncrypted == null) || (sEncrypted == sHashing)) { throw new SecurityException(KLRes.CryptoStreamFailed); } lStreams.Add(sEncrypted); MemUtil.Write(sEncrypted, m_pbStreamStartBytes); sPlain = new HashedBlockStream(sEncrypted, true); } else // KDBX >= 4 { // For integrity checking (without knowing the master key) MemUtil.Write(sHashing, m_pbHashOfHeader); byte[] pbHeaderHmac = ComputeHeaderHmac(pbHeader, pbHmacKey64); MemUtil.Write(sHashing, pbHeaderHmac); Stream sBlocks = new HmacBlockStream(sHashing, true, true, pbHmacKey64); lStreams.Add(sBlocks); sPlain = EncryptStream(sBlocks, iCipher, pbCipherKey, cbEncIV, true); if ((sPlain == null) || (sPlain == sBlocks)) { throw new SecurityException(KLRes.CryptoStreamFailed); } } lStreams.Add(sPlain); if (m_pwDatabase.Compression == PwCompressionAlgorithm.GZip) { sXml = new GZipStream(sPlain, CompressionMode.Compress); lStreams.Add(sXml); } else { sXml = sPlain; } if (m_uFileVersion >= FileVersion32_4) { WriteInnerHeader(sXml); // Binary header before XML } } else if (m_format == KdbxFormat.PlainXml) { sXml = sHashing; } else { Debug.Assert(false); throw new ArgumentOutOfRangeException("fmt"); } m_xmlWriter = XmlUtilEx.CreateXmlWriter(sXml); WriteDocument(pgRoot); m_xmlWriter.Flush(); } finally { CommonCleanUpWrite(lStreams, sHashing); if (pbCipherKey != null) { MemUtil.ZeroByteArray(pbCipherKey); } if (pbHmacKey64 != null) { MemUtil.ZeroByteArray(pbHmacKey64); } } }
protected override void ProcessItem(LogItem item) { if (item == null || string.IsNullOrWhiteSpace(item.Exception.ToString())) { return; } try { var file = Path.Combine( LogDir, string.Format( _fileName, DateTime.Now.ToString("yyyy_MM_dd"), LogLevel.ToString().ToLower(), (item.Exception + AdditionalData.ToDebugString()).ToMd5Hash())); if (File.Exists(file)) { return; } AddData(item.Exception); if (OutputConsole) { Console.ForegroundColor = ConsoleColor.Yellow; Console.WriteLine(item.Exception); Console.ResetColor(); } using ( var fileStream = new FileStream( file, FileMode.CreateNew, FileAccess.Write, FileShare.None, 4096, true)) { using (Stream gzStream = new GZipStream(fileStream, CompressionMode.Compress, false)) { var text = item.Exception.ToString(); text = item.Exception.Data.Cast <DictionaryEntry>() .Aggregate( text + Environment.NewLine + Environment.NewLine, (current, entry) => current + string.Format("{0}: {1}", entry.Key, entry.Value + Environment.NewLine)); if (string.IsNullOrWhiteSpace(text.Trim())) { return; } var logByte = new UTF8Encoding(true).GetBytes(text); if (Compression) { gzStream.Write(logByte, 0, logByte.Length); } else { fileStream.Write(logByte, 0, logByte.Length); } } } } catch (Exception ex) { Console.WriteLine(ex); } }