public void GetSizePairs_CheckSanityOfDupeSizeCountandDupeFileCount_Exercise() { int dupeCount = 10; var testPath = this.AssemblyPathLocation(); // Create some dummy duplicate data. // create a catalog var random = FileHelper.RandomString(4096 * 16); for (int i = 1; i <= dupeCount; i++) { File.WriteAllText($"{testPath}\\CDE_testFile{i}.txt", random); } //hacky creating catalog. Program.Container = BootStrapper.Components(); Program.CreateCache($"{testPath}.\\"); Program.CreateMd5OnCache(); //run tests. Console.WriteLine($"0 Directory.GetCurrentDirectory() {Directory.GetCurrentDirectory()}"); var rootEntries = RootEntry.LoadCurrentDirCache(); if (rootEntries.Count == 0) { Console.WriteLine("No Catalogs found."); Assert.Fail("No catalogs found."); } foreach (var r in rootEntries) { Console.WriteLine($"loaded {r.DefaultFileName}"); } var d = new Duplication(_logger, _configuration, _applicationDiagnostics); var sizePairDictionary = d.GetSizePairs(rootEntries); Console.WriteLine($"Number of Size Pairs {sizePairDictionary.Count}"); var sumOfUniqueHashesForEachSize = GetSumOfUniqueHashesForEachSize_ExcludePartialHash(sizePairDictionary); Console.WriteLine($"Sum of total unique hashes (split on filesize to) {sumOfUniqueHashesForEachSize}"); var dupePairEnum = d.GetDupePairs(rootEntries); var itemToVerify = dupePairEnum.SingleOrDefault(x => x.Key.Path.Contains("CDE_testFile")); //Assert we have at least a count of the number of dupe files we originally created. Assert.GreaterOrEqual(itemToVerify.Value.Count, dupeCount); //TODO: Cleanup test. }