/// <summary> /// Looks for Files in the Queue<<see cref="WorkFile"/>> and starts hashing them /// </summary> /// <param name="obj"><see cref="HandoverObject"/> containing the required references</param> void HashAsync(object obj) { var access = (HandoverObject)obj; using (var hasher = MD5.Create()) { bool panic = false; while (stillLoading || access.queque.Count > 0) { if (access.queque.Count > 0) { var work = new WorkFile() { pathID = -1 }; lock (access.queque) { if (access.queque.Count > 0) { work = access.queque.Dequeue(); } } if (work.pathID != -1) { var dat = hasher.ComputeHash(work.data); lock (access.results) { if (!access.results.ContainsKey(work.pathID)) { access.results.Add(work.pathID, dat); } } } if (!panic && usedMemory > Config.MAXMEMORYUSAGE) { panic = true; } else if (panic && usedMemory < Config.MINMEMORYUSAGE) //400.000.000 400mb { panic = false; } else if (!panic && usedMemory < Config.OKMEMORYUSAGE && stillLoading) //1.000.000.00 { Task.Delay(Config.HASHASYNCREGDELAY).Wait(); } else if (!panic && stillLoading) { Task.Delay(Config.HASHASYNCNOTOKDELAY).Wait(); } } else { GC.Collect(); Task.Delay(Config.HASHASYNCREGDELAY).Wait(); } } } running--; }
/// <summary> /// Looks for Files in the Queue<<see cref="WorkFile"/>> and starts hashing them /// </summary> /// <param name="obj"><see cref="HandoverObject"/> containing the required references</param> void HashAsync(object obj) { var access = (HandoverObject)obj; using (var hasher = MD5.Create()) { bool loadall = false; while (stillLoading) { if (access.queque.Count > 0) { var work = new WorkFile(); lock (access.queque) { if (access.queque.Count > 0) { work = access.queque.Dequeue(); } } if (work.path != null) { var dat = hasher.ComputeHash(work.data); lock (access.results) { if (!access.results.ContainsKey(work.path)) { access.results.Add(work.path, dat); } } } if (!loadall && usedMemory > 2000000000L) { loadall = true; } else if (loadall && usedMemory < 400000000L) //400.000.000 400mb { loadall = false; } else if (usedMemory < 1000000000L && stillLoading && !loadall) //1.000.000.00 { Task.Delay(100).Wait(); } else if (stillLoading && !loadall) //1.000.000.00 { Task.Delay(10).Wait(); } } else { GC.Collect(); Task.Delay(100).Wait(); } } } running--; }
/// <summary> /// Loads all identified duplicates into Memory for hashing by <see cref="HashAsync(object)"/> /// </summary> /// <param name="obj"><see cref="HandoverObject"/> containing the required references</param> void Reader(object obj) { position = 0; var access = (HandoverObject)obj; foreach (var dup in access.targets) { position++; foreach (var file in dup.instances) { var path = pathStorage[file]; //check for huge size var fi = new FileInfo(path); if (fi.Length > Config.MAXIMUMASYNCFILESIZE) { HashSync(path, file, access.results); } else { var wf = new WorkFile(); try { wf = new WorkFile() { data = File.ReadAllBytes(path), pathID = file }; } catch { } if (wf.data != null && wf.data.LongLength != 0) { lock (access.queque) { access.queque.Enqueue(wf); } } } } } stillLoading = false; }
/// <summary> /// Loads all identified duplicates into Memory for hashing by <see cref="HashAsync(object)"/> /// </summary> /// <param name="obj"><see cref="HandoverObject"/> containing the required references</param> void Reader(object obj) { position = 0; var access = (HandoverObject)obj; foreach (var dup in access.targets) { position++; foreach (var file in dup.instances) { //check for huge size var fi = new FileInfo(file); if (fi.Length > 1000000000L) { HashSync(file, access.results); } else { var wf = new WorkFile(); try { wf = new WorkFile() { data = File.ReadAllBytes(file), path = file }; } catch { } if (wf.data != null && wf.data.LongLength != 0) { lock (access.queque) { access.queque.Enqueue(wf); } } } } } stillLoading = false; }