string BuildFile(IReadOnlyFilesystem fs, string path, long length) { byte[] buffer = new byte[length]; fs.Read(path, 0, length, ref buffer); return(Md5Context.Data(buffer, out _)); }
void TestFile(IReadOnlyFilesystem fs, string path, string md5, long length, string testFile) { byte[] buffer = new byte[length]; Errno ret = fs.Read(path, 0, length, ref buffer); Assert.AreEqual(Errno.NoError, ret, $"Unexpected error {ret} when reading \"{path}\" in {testFile}"); string data = Md5Context.Data(buffer, out _); Assert.AreEqual(md5, data, $"Got MD5 {data} for \"{path}\" in {testFile} but expected {md5}"); }
public void Md5RandomData() { byte[] data = new byte[1048576]; FileStream fs = new FileStream(Path.Combine(Consts.TestFilesRoot, "checksums", "random"), FileMode.Open, FileAccess.Read); fs.Read(data, 0, 1048576); fs.Close(); fs.Dispose(); Md5Context.Data(data, out byte[] result); Assert.AreEqual(ExpectedRandom, result); }
public void Md5RandomData() { byte[] data = new byte[1048576]; var fs = new FileStream(Path.Combine(Consts.TEST_FILES_ROOT, "Checksum test files", "random"), FileMode.Open, FileAccess.Read); fs.Read(data, 0, 1048576); fs.Close(); fs.Dispose(); Md5Context.Data(data, out byte[] result); Assert.AreEqual(_expectedRandom, result); }
public void EmptyData() { byte[] data = new byte[1048576]; var fs = new FileStream(Path.Combine(Consts.TEST_FILES_ROOT, "Checksum test files", "empty"), FileMode.Open, FileAccess.Read); fs.Read(data, 0, 1048576); fs.Close(); fs.Dispose(); Md5Context.Data(data, out byte[] result); result.Should().BeEquivalentTo(_expectedEmpty); }
public void CheckContents() { IFilter filter = new DiscImageChef.Filters.GZip(); filter.Open(location); Stream str = filter.GetDataForkStream(); byte[] data = new byte[1048576]; str.Read(data, 0, 1048576); str.Close(); str.Dispose(); filter.Close(); Md5Context.Data(data, out byte[] result); Assert.AreEqual(ExpectedContents, result); }
public void CheckResource() { IFilter filter = new MacBinary(); filter.Open(location); Stream str = filter.GetResourceForkStream(); byte[] data = new byte[286]; str.Read(data, 0, 286); str.Close(); str.Dispose(); filter.Close(); string result = Md5Context.Data(data, out _); Assert.AreEqual(EXPECTED_RESOURCE, result); }
public void CheckContents() { IFilter filter = new MacBinary(); filter.Open(location); Stream str = filter.GetDataForkStream(); byte[] data = new byte[737280]; str.Read(data, 0, 737280); str.Close(); str.Dispose(); filter.Close(); string result = Md5Context.Data(data, out _); Assert.AreEqual(EXPECTED_CONTENTS, result); }
void TestFileXattrs(IReadOnlyFilesystem fs, string path, Dictionary <string, string> xattrs, string testFile) { fs.ListXAttr(path, out List <string> contents); if (xattrs.Count == 0 && contents.Count == 0) { return; } List <string> expectedNotFound = new List <string>(); foreach (KeyValuePair <string, string> xattr in xattrs) { byte[] buffer = new byte[0]; Errno ret = fs.GetXattr(path, xattr.Key, ref buffer); if (ret == Errno.NoSuchExtendedAttribute || !contents.Contains(xattr.Key)) { expectedNotFound.Add(xattr.Key); continue; } contents.Remove(xattr.Key); Assert.AreEqual(Errno.NoError, ret, $"Unexpected error {ret} retrieving extended attributes for \"{path}\" in {testFile}"); string data = Md5Context.Data(buffer, out _); Assert.AreEqual(xattr.Value, data, $"Got MD5 {data} for {xattr.Key} of \"{path}\" in {testFile} but expected {xattr.Value}"); } Assert.IsEmpty(expectedNotFound, $"Could not find the following extended attributes of \"{path}\" in {testFile}: {string.Join(" ", expectedNotFound)}"); Assert.IsEmpty(contents, $"Found the following unexpected extended attributes of \"{path}\" in {testFile}: {string.Join(" ", contents)}"); }
public static void CompressFiles() { try { if (string.IsNullOrWhiteSpace(Context.DbInfo.Developer)) { Failed?.Invoke("Developer cannot be empty"); return; } if (string.IsNullOrWhiteSpace(Context.DbInfo.Product)) { Failed?.Invoke("Product cannot be empty"); return; } if (string.IsNullOrWhiteSpace(Context.DbInfo.Version)) { Failed?.Invoke("Version cannot be empty"); return; } string destinationFolder = ""; destinationFolder = Path.Combine(destinationFolder, Context.DbInfo.Developer); destinationFolder = Path.Combine(destinationFolder, Context.DbInfo.Product); destinationFolder = Path.Combine(destinationFolder, Context.DbInfo.Version); if (!string.IsNullOrWhiteSpace(Context.DbInfo.Languages)) { destinationFolder = Path.Combine(destinationFolder, Context.DbInfo.Languages); } if (!string.IsNullOrWhiteSpace(Context.DbInfo.Architecture)) { destinationFolder = Path.Combine(destinationFolder, Context.DbInfo.Architecture); } if (Context.DbInfo.Oem) { destinationFolder = Path.Combine(destinationFolder, "oem"); } if (!string.IsNullOrWhiteSpace(Context.DbInfo.Machine)) { destinationFolder = Path.Combine(destinationFolder, "for " + Context.DbInfo.Machine); } string destinationFile = ""; if (!string.IsNullOrWhiteSpace(Context.DbInfo.Format)) { destinationFile += "[" + Context.DbInfo.Format + "]"; } if (Context.DbInfo.Files) { if (destinationFile != "") { destinationFile += "_"; } destinationFile += "files"; } if (Context.DbInfo.Netinstall) { if (destinationFile != "") { destinationFile += "_"; } destinationFile += "netinstall"; } if (Context.DbInfo.Source) { if (destinationFile != "") { destinationFile += "_"; } destinationFile += "source"; } if (Context.DbInfo.Update) { if (destinationFile != "") { destinationFile += "_"; } destinationFile += "update"; } if (Context.DbInfo.Upgrade) { if (destinationFile != "") { destinationFile += "_"; } destinationFile += "upgrade"; } if (!string.IsNullOrWhiteSpace(Context.DbInfo.Description)) { if (destinationFile != "") { destinationFile += "_"; } destinationFile += Context.DbInfo.Description; } else if (destinationFile == "") { destinationFile = "archive"; } string destination = Path.Combine(destinationFolder, destinationFile) + ".zip"; Md5Context md5 = new Md5Context(); md5.Init(); byte[] tmp; string mdid = md5.Data(Encoding.UTF8.GetBytes(destination), out tmp); Console.WriteLine("MDID: {0}", mdid); if (dbCore.DbOps.ExistsOs(mdid)) { if (File.Exists(destination)) { Failed?.Invoke("OS already exists."); return; } Failed?.Invoke("OS already exists in the database but not in the repository, check for inconsistencies."); return; } if (File.Exists(destination)) { Failed?.Invoke("OS already exists in the repository but not in the database, check for inconsistencies."); return; } Context.DbInfo.Mdid = mdid; string filesPath; if (!string.IsNullOrEmpty(Context.TmpFolder) && Directory.Exists(Context.TmpFolder)) { filesPath = Context.TmpFolder; } else { filesPath = Context.Path; } string extension = null; switch (Settings.Current.CompressionAlgorithm) { case AlgoEnum.GZip: extension = ".gz"; break; case AlgoEnum.BZip2: extension = ".bz2"; break; case AlgoEnum.LZMA: extension = ".lzma"; break; case AlgoEnum.LZip: extension = ".lz"; break; } long totalSize = 0, currentSize = 0; foreach (KeyValuePair <string, DbOsFile> file in Context.Hashes) { totalSize += file.Value.Length; } #if DEBUG stopwatch.Restart(); #endif foreach (KeyValuePair <string, DbOsFile> file in Context.Hashes) { UpdateProgress?.Invoke("Compressing...", file.Value.Path, currentSize, totalSize); destinationFolder = Path.Combine(Settings.Current.RepositoryPath, file.Value.Sha256[0].ToString(), file.Value.Sha256[1].ToString(), file.Value.Sha256[2].ToString(), file.Value.Sha256[3].ToString(), file.Value.Sha256[4].ToString()); Directory.CreateDirectory(destinationFolder); destinationFile = Path.Combine(destinationFolder, file.Value.Sha256 + extension); if (!File.Exists(destinationFile)) { FileStream inFs = new FileStream(Path.Combine(filesPath, file.Value.Path), FileMode.Open, FileAccess.Read); FileStream outFs = new FileStream(destinationFile, FileMode.CreateNew, FileAccess.Write); Stream zStream = null; switch (Settings.Current.CompressionAlgorithm) { case AlgoEnum.GZip: zStream = new GZipStream(outFs, CompressionMode.Compress, CompressionLevel.BestCompression); break; case AlgoEnum.BZip2: zStream = new BZip2Stream(outFs, CompressionMode.Compress); break; case AlgoEnum.LZMA: zStream = new LzmaStream(new LzmaEncoderProperties(), false, outFs); outFs.Write(((LzmaStream)zStream).Properties, 0, ((LzmaStream)zStream).Properties.Length); outFs.Write(BitConverter.GetBytes(inFs.Length), 0, 8); break; case AlgoEnum.LZip: zStream = new LZipStream(outFs, CompressionMode.Compress); break; } byte[] buffer = new byte[BUFFER_SIZE]; while (inFs.Position + BUFFER_SIZE <= inFs.Length) { UpdateProgress2?.Invoke($"{inFs.Position / (double)inFs.Length:P}", $"{inFs.Position} / {inFs.Length} bytes", inFs.Position, inFs.Length); UpdateProgress?.Invoke("Compressing...", file.Value.Path, currentSize, totalSize); inFs.Read(buffer, 0, buffer.Length); zStream.Write(buffer, 0, buffer.Length); currentSize += buffer.Length; } buffer = new byte[inFs.Length - inFs.Position]; UpdateProgress2?.Invoke($"{inFs.Position / (double)inFs.Length:P}", $"{inFs.Position} / {inFs.Length} bytes", inFs.Position, inFs.Length); UpdateProgress?.Invoke("Compressing...", file.Value.Path, currentSize, totalSize); inFs.Read(buffer, 0, buffer.Length); zStream.Write(buffer, 0, buffer.Length); currentSize += buffer.Length; UpdateProgress2?.Invoke($"{inFs.Length / (double)inFs.Length:P}", "Finishing...", inFs.Length, inFs.Length); inFs.Close(); zStream.Close(); outFs.Dispose(); } else { currentSize += file.Value.Length; } } #if DEBUG stopwatch.Stop(); Console.WriteLine("Core.CompressFiles(): Took {0} seconds to compress files", stopwatch.Elapsed.TotalSeconds); #endif if (Context.Metadata != null) { MemoryStream xms = new MemoryStream(); XmlSerializer xs = new XmlSerializer(typeof(CICMMetadataType)); xs.Serialize(xms, Context.Metadata); xms.Position = 0; JsonSerializer js = new JsonSerializer { Formatting = Formatting.Indented, NullValueHandling = NullValueHandling.Ignore }; MemoryStream jms = new MemoryStream(); StreamWriter sw = new StreamWriter(jms, Encoding.UTF8, 1048576, true); js.Serialize(sw, Context.Metadata, typeof(CICMMetadataType)); sw.Close(); jms.Position = 0; destinationFolder = Path.Combine(Settings.Current.RepositoryPath, "metadata", mdid[0].ToString(), mdid[1].ToString(), mdid[2].ToString(), mdid[3].ToString(), mdid[4].ToString()); Directory.CreateDirectory(destinationFolder); FileStream xfs = new FileStream(Path.Combine(destinationFolder, mdid + ".xml"), FileMode.CreateNew, FileAccess.Write); xms.CopyTo(xfs); xfs.Close(); FileStream jfs = new FileStream(Path.Combine(destinationFolder, mdid + ".json"), FileMode.CreateNew, FileAccess.Write); jms.CopyTo(jfs); jfs.Close(); xms.Position = 0; jms.Position = 0; } FinishedWithText?.Invoke($"Correctly added operating system with MDID {mdid}"); } catch (ThreadAbortException) { } catch (Exception ex) { if (Debugger.IsAttached) { throw; } Failed?.Invoke($"Exception {ex.Message}\n{ex.InnerException}"); #if DEBUG Console.WriteLine("Exception {0}\n{1}", ex.Message, ex.InnerException); #endif } }