/// <summary> /// 初始化屏幕复制。 /// </summary> /// <param name="adapter"> /// 所选适配器。 /// </param> /// <param name="output"> /// 所选输出。 /// </param> public void InitDuplicate(string adapter, string output) { var ad = Duplication.Adapters1.First(tmp => tmp.Description1.Description == adapter); var ou = Duplication.Outputs.First(tmp => tmp.Description.DeviceName == output); Duplication.InitDuplication(ad, ou, ref Screen, 1000 / Fps); }
public void ApplyMd5Checksum_CheckDupesAndCompleteFullHash_DoesItEnsureAllPartialDupesAreFullHashed_Exercise() { var rootEntries = RootEntry.LoadCurrentDirCache(); var d = new Duplication(_logger, _configuration, _applicationDiagnostics); d.ApplyMd5Checksum(rootEntries); }
/// <summary> /// 异步获取输出列表。 /// </summary> /// <param name="adapter"> /// 选择的适配器。 /// </param> /// <returns> /// 异步任务运行状态。 /// </returns> public async Task QueryOutputsAsync(string adapter) { var q = Duplication.Adapters1.First(tmp => tmp.Description1.Description == adapter); Duplication.QueryOutputs(q); await RefrushOutputs(); }
/// <summary> /// Represents the main entry point of the application. /// </summary> public static void Main(string[] args) { Console.Write("Please enter your name: "); string name = Console.ReadLine(); if (name.Trim() != string.Empty) { Greeter greeter = new Greeter(); Console.WriteLine(greeter.Greet(name)); } Console.Write("Please enter an integer: "); name = Console.ReadLine(); int number; bool success = Int32.TryParse(name, out number); if (success) { var numResult = Duplication.timesTwo(number); Console.WriteLine(numResult); Console.WriteLine(Duplication.multiplyByTwo(numResult)); } }
/// <inheritdoc /> /// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources /// </summary> public void Dispose() { // release resources Alive = false; Duplication.Dispose(); Duplication = null; Output.Dispose(); Output = null; Output1?.Dispose(); Output1 = null; Output6?.Dispose(); Output6 = null; DxgiDevice.Dispose(); DxgiDevice = null; Device.Dispose(); Device = null; Texture.Dispose(); Texture = null; Adapter1.Dispose(); Adapter1 = null; }
/// <summary> /// 选择输出并获取该输出分辨率。 /// </summary> /// <param name="output"> /// 所选输出字符串。 /// </param> /// <returns> /// 该输出分辨率。 /// </returns> public Task <string> SelectOutput(string output) { return(Task.Run(() => { if (Duplication.Outputs is null) { return "null"; } var output1 = Duplication.Outputs.First(tmp => tmp.Description.DeviceName == output); var screen = Duplication.SelectOutput(output1); return $"{screen.Width}x{screen.Height}"; })); }
/// <inheritdoc /> /// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources /// </summary> public void Dispose() { // release resources Duplication.Dispose(); Output.Dispose(); Output1?.Dispose(); Output6?.Dispose(); Context.Dispose(); DxgiDevice.Dispose(); Device.Dispose(); Texture.Dispose(); Adapter1.Dispose(); }
public void GetSizePairs_CheckSanityOfDupeSizeCountandDupeFileCount_Exercise() { int dupeCount = 10; var testPath = this.AssemblyPathLocation(); // Create some dummy duplicate data. // create a catalog var random = FileHelper.RandomString(4096 * 16); for (int i = 1; i <= dupeCount; i++) { File.WriteAllText($"{testPath}\\CDE_testFile{i}.txt", random); } //hacky creating catalog. Program.Container = BootStrapper.Components(); Program.CreateCache($"{testPath}.\\"); Program.CreateMd5OnCache(); //run tests. Console.WriteLine($"0 Directory.GetCurrentDirectory() {Directory.GetCurrentDirectory()}"); var rootEntries = RootEntry.LoadCurrentDirCache(); if (rootEntries.Count == 0) { Console.WriteLine("No Catalogs found."); Assert.Fail("No catalogs found."); } foreach (var r in rootEntries) { Console.WriteLine($"loaded {r.DefaultFileName}"); } var d = new Duplication(_logger, _configuration, _applicationDiagnostics); var sizePairDictionary = d.GetSizePairs(rootEntries); Console.WriteLine($"Number of Size Pairs {sizePairDictionary.Count}"); var sumOfUniqueHashesForEachSize = GetSumOfUniqueHashesForEachSize_ExcludePartialHash(sizePairDictionary); Console.WriteLine($"Sum of total unique hashes (split on filesize to) {sumOfUniqueHashesForEachSize}"); var dupePairEnum = d.GetDupePairs(rootEntries); var itemToVerify = dupePairEnum.SingleOrDefault(x => x.Key.Path.Contains("CDE_testFile")); //Assert we have at least a count of the number of dupe files we originally created. Assert.GreaterOrEqual(itemToVerify.Value.Count, dupeCount); //TODO: Cleanup test. }
public void GetDupePairs_CheckAllDupeFilesHaveFullHash_OK() { var rootEntries = RootEntry.LoadCurrentDirCache(); var d = new Duplication(_logger, _configuration, _applicationDiagnostics); var dupePairEnum = d.GetDupePairs(rootEntries); foreach (var dupe in dupePairEnum) { foreach (var de in dupe.Value) { if (de.ChildDE.IsPartialHash) { Console.WriteLine($"Trouble partial hash {de.FullPath}"); Assert.Fail(); } } } }
public void GetDupePairs_DupeHashDoesNotMatchDiffSizeFilesOrPartialHash_OK() { var re1 = new RootEntry { Path = @"C:\" }; var de1 = new DirEntry { Path = "de1", Size = 10, IsPartialHash = false }; de1.SetHash(10); var de2 = new DirEntry { Path = "de2", Size = 10, IsPartialHash = false }; de2.SetHash(11); var de3 = new DirEntry { Path = "de3", Size = 11, IsPartialHash = false }; de3.SetHash(10); var de4 = new DirEntry { Path = "de4", Size = 11, IsPartialHash = false }; de4.SetHash(11); var de5 = new DirEntry { Path = "de5", Size = 11, IsPartialHash = false }; de5.SetHash(11); var de6 = new DirEntry { Path = "de6", Size = 11, IsPartialHash = false }; de6.SetHash(12); var de7 = new DirEntry { Path = "de7", Size = 11, IsPartialHash = true }; de7.SetHash(12); var de8 = new DirEntry { Path = "de8", Size = 11, IsPartialHash = false }; de8.SetHash(12); var de9 = new DirEntry { Path = "de9", Size = 11, IsPartialHash = false }; var de10 = new DirEntry { Path = "de10", Size = 11, IsPartialHash = true }; de10.SetHash(13); re1.Children.Add(de1); re1.Children.Add(de2); re1.Children.Add(de3); re1.Children.Add(de4); re1.Children.Add(de5); re1.Children.Add(de6); re1.Children.Add(de7); re1.Children.Add(de8); re1.Children.Add(de9); re1.Children.Add(de10); var roots = new List <RootEntry> { re1 }; var d = new Duplication(_logger, _configuration, _applicationDiagnostics); var dp = d.GetDupePairs(roots); var dp1 = dp.First(); // ReSharper disable PossibleNullReferenceException var f1 = dp1.Value.FirstOrDefault(x => x.ChildDE == de4).ChildDE; Assert.That(f1, Is.EqualTo(de4)); var f2 = dp1.Value.FirstOrDefault(x => x.ChildDE == de5).ChildDE; Assert.That(f2, Is.EqualTo(de5)); // ReSharper restore PossibleNullReferenceException }
public void GetSizePairs_HashIrrelevant_NullIsNotAHashValue_PartialNotAUniqueHashForSize_OK() { var re1 = new RootEntry { Path = @"C:\" }; var de1 = new DirEntry { Path = "de1", Size = 10, IsPartialHash = false }; de1.SetHash(10); var de2 = new DirEntry { Path = "de2", Size = 10, IsPartialHash = false }; de2.SetHash(11); var de3 = new DirEntry { Path = "de3", Size = 11, IsPartialHash = false }; de3.SetHash(10); var de4 = new DirEntry { Path = "de4", Size = 11, IsPartialHash = false }; de4.SetHash(11); var de5 = new DirEntry { Path = "de5", Size = 11, IsPartialHash = false }; de5.SetHash(11); var de6 = new DirEntry { Path = "de6", Size = 11, IsPartialHash = false }; de6.SetHash(12); var de7 = new DirEntry { Path = "de7", Size = 11, IsPartialHash = true }; de7.SetHash(12); var de8 = new DirEntry { Path = "de8", Size = 11, IsPartialHash = false }; de8.SetHash(12); var de9 = new DirEntry { Path = "de9", Size = 11, IsPartialHash = false }; var de10 = new DirEntry { Path = "de10", Size = 11, IsPartialHash = true }; de10.SetHash(13); re1.Children.Add(de1); re1.Children.Add(de2); re1.Children.Add(de3); re1.Children.Add(de4); re1.Children.Add(de5); re1.Children.Add(de6); re1.Children.Add(de7); re1.Children.Add(de8); re1.Children.Add(de9); re1.Children.Add(de10); var roots = new List <RootEntry> { re1 }; re1.SetInMemoryFields(); var d = new Duplication(_logger, _configuration, _applicationDiagnostics); var sizePairDictionary = d.GetSizePairs(roots); Console.WriteLine($"Number of Size Pairs {sizePairDictionary.Count}"); Assert.That(sizePairDictionary.Count, Is.EqualTo(2)); var sumOfUniqueHashesForEachSize = GetSumOfUniqueHashesForEachSize_ExcludePartialHash(sizePairDictionary); Console.WriteLine($"Sum of total unique hashes (split on filesize to) {sumOfUniqueHashesForEachSize}"); Assert.That(sumOfUniqueHashesForEachSize, Is.EqualTo(5)); }
public void VerifyMultiplyByTwo() { const int number = 2; Assert.AreEqual(Duplication.multiplyByTwo(number), 4); }
public void ShouldProcessGeneratedCore(GeneratedCode g, Duplication d, bool shouldProcess) { _sut = new ContextFilter(g, d); Assert.AreEqual(shouldProcess, _sut.ShouldProcessOrRegister(Gen(1))); }
public void ShouldProcessMethodSecondTime(GeneratedCode g, Duplication d, bool shouldProcess) { _sut = new ContextFilter(g, d); Assert.True(_sut.ShouldProcessOrRegister(M(1))); Assert.AreEqual(shouldProcess, _sut.ShouldProcessOrRegister(M(1))); }
public void ShouldProcessSecondTimePartialClasses(GeneratedCode g, Duplication d) { _sut = new ContextFilter(g, d); Assert.True(_sut.ShouldProcessOrRegister(Part(1))); Assert.True(_sut.ShouldProcessOrRegister(Part(1))); }
public ContextFilter(GeneratedCode genCodeSetting, Duplication dupeSetting) { _genCodeSetting = genCodeSetting; _dupeSetting = dupeSetting; }
protected void SetUp(GeneratedCode genCode, Duplication dupe) { var uc = new ContextFilter(genCode, dupe); Sut = new ContextStatisticsExtractor(uc); }