예제 #1
0
파일: CommonEntry.cs 프로젝트: CDEApp/CDE
        public static string MakeFullPath(CommonEntry parentEntry, DirEntry dirEntry)
        {
            var a = parentEntry.FullPath ?? "pnull";
            var b = dirEntry.Path ?? "dnull";

            return(Filesystem.Path.Combine(a, b));
        }
예제 #2
0
        public int SizeCompareWithDirTo(DirEntry de)
        {
            if (de == null)
            {
                return(-1); // this before de
            }
            if (IsDirectory && !de.IsDirectory)
            {
                return(-1); // this before de
            }
            if (!IsDirectory && de.IsDirectory)
            {
                return(1); // this after de
            }
            //if (IsDirectory && de.IsDirectory)
            //{   // sort by path if both dir's and sorting by Size ? maybe fill in size in field Hmm ?
            //    // really cheap to calculate dir size.... i think i should fill it in ?
            //    return MyCompareInfo.Compare(Path, de.Path, MyCompareOptions);
            //}
            // the cast breaks this.
            var sizeCompare = Size.CompareTo(de.Size);

            if (sizeCompare == 0)
            {
                return(MyCompareInfo.Compare(Path, de.Path, MyCompareOptions));
            }
            return(sizeCompare);
        }
예제 #3
0
 public int PathCompareTo(DirEntry de)
 {
     if (de == null)
     {
         return(-1); // this before de
     }
     return(MyCompareInfo.Compare(Path, de.Path, MyCompareOptions));
 }
예제 #4
0
 private bool BuildDuplicateList(CommonEntry parentEntry, DirEntry dirEntry)
 {
     if (!dirEntry.IsPartialHash)
     {
         BuildDuplicateListIncludePartialHash(parentEntry, dirEntry);
     }
     return(true);
 }
예제 #5
0
 private void CalculatePartialMD5Hash(string fullPath, DirEntry de)
 {
     if (de.IsDirectory || de.IsHashDone)
     {
         _duplicationStatistics.AllreadyDonePartials++;
         return;
     }
     CalculateMD5Hash(fullPath, de, true);
 }
예제 #6
0
 // is this right ? for the simple compareResult invert we do in caller ? - maybe not ? keep dirs at top anyway ?
 public int PathCompareWithDirTo(DirEntry de)
 {
     if (de == null)
     {
         return(-1); // this before de
     }
     if (IsDirectory && !de.IsDirectory)
     {
         return(-1); // this before de
     }
     if (!IsDirectory && de.IsDirectory)
     {
         return(1); // this after de
     }
     return(MyCompareInfo.Compare(Path, de.Path, MyCompareOptions));
 }
예제 #7
0
 public int ModifiedCompareTo(DirEntry de)
 {
     if (de == null)
     {
         return(-1); // this before de
     }
     if (IsModifiedBad && !de.IsModifiedBad)
     {
         return(-1); // this before de
     }
     if (!IsModifiedBad && de.IsModifiedBad)
     {
         return(1); // this after de
     }
     if (IsModifiedBad && de.IsModifiedBad)
     {
         return(0);
     }
     return(DateTime.Compare(Modified, de.Modified));
 }
예제 #8
0
        private bool FindMatchesOnFileSize2(CommonEntry ce, DirEntry de)
        {
            if (de.IsDirectory || de.Size == 0) // || dirEntry.Size < 4096)
            {
                return(true);
            }

            var flatDirEntry = new PairDirEntry(ce, de);

            if (_duplicateFileSize.ContainsKey(de.Size))
            {
                _duplicateFileSize[de.Size].Add(flatDirEntry);
            }
            else
            {
                _duplicateFileSize[de.Size] = new List <PairDirEntry> {
                    flatDirEntry
                };
            }
            return(true);
        }
예제 #9
0
        private bool BuildDuplicateListIncludePartialHash(CommonEntry parentEntry, DirEntry dirEntry)
        {
            if (dirEntry.IsDirectory || !dirEntry.IsHashDone || dirEntry.Size == 0)
            {
                //TODO: how to deal with uncalculated files?
                return(true);
            }

            var info = new PairDirEntry(parentEntry, dirEntry);

            if (_duplicateFile.ContainsKey(dirEntry))
            {
                _duplicateFile[dirEntry].Add(info);
            }
            else
            {
                _duplicateFile[dirEntry] = new List <PairDirEntry> {
                    info
                };
            }
            return(true);
        }
예제 #10
0
        private bool CalculateFullMD5Hash(CommonEntry parentEntry, DirEntry dirEntry)
        {
            //ignore if we already have a hash.
            if (dirEntry.IsHashDone)
            {
                if (!dirEntry.IsPartialHash)
                {
                    return(true);
                }

                if (_dirEntriesRequiringFullHashing.Contains(dirEntry))
                {
                    var fullPath     = CommonEntry.MakeFullPath(parentEntry, dirEntry);
                    var longFullPath = Path.GetFullPath(fullPath);
                    CalculateMD5Hash(longFullPath, dirEntry, false);
                    if (Hack.BreakConsoleFlag)
                    {
                        Console.WriteLine("\n * Break key detected exiting full hashing phase outer.");
                        return(false);
                    }
                }
            }
            return(true);
        }
예제 #11
0
        private void CalculateMD5Hash(string fullPath, DirEntry de, bool doPartialHash)
        {
            var displayCounterInterval = _configuration.ProgressUpdateInterval > 1000
                                             ? _configuration.ProgressUpdateInterval / 10
                                             : _configuration.ProgressUpdateInterval;
            var configuration = new Configuration();

            if (doPartialHash)
            {
                //dont recalculate.
                if (de.IsHashDone && de.IsPartialHash)
                {
                    return;
                }
                var hashResponse = HashHelper.GetMD5HashResponseFromFile(fullPath, configuration.HashFirstPassSize);

                if (hashResponse != null)
                {
                    de.SetHash(hashResponse.Hash);
                    de.IsPartialHash = hashResponse.IsPartialHash;
                    _duplicationStatistics.BytesProcessed    += hashResponse.BytesHashed;
                    _duplicationStatistics.TotalFileBytes    += de.Size;
                    _duplicationStatistics.BytesNotProcessed += de.Size <= hashResponse.BytesHashed ? 0 : de.Size - hashResponse.BytesHashed;
                    if (de.IsPartialHash)
                    {
                        _duplicationStatistics.PartialHashes += 1;
                    }
                    else
                    {
                        _duplicationStatistics.FullHashes += 1;
                    }
                    if (_duplicationStatistics.FilesProcessed % displayCounterInterval == 0)
                    {
                        _logger.LogInfo("Progress through duplicate files at {0} of {1} which is {2:F2}% Largest {3:F2} MB, Smallest {4:F2} MB",
                                        _duplicationStatistics.FilesProcessed, _duplicationStatistics.FilesToCheckForDuplicatesCount,
                                        100 * (1.0 * _duplicationStatistics.FilesProcessed / _duplicationStatistics.FilesToCheckForDuplicatesCount),
                                        1.0 * _duplicationStatistics.LargestFileSize / (1024 * 1024),
                                        1.0 * _duplicationStatistics.SmallestFileSize / (1024 * 1024));
                    }

                    //_logger.LogDebug("Thread:{0}, File: {1}",Thread.CurrentThread.ManagedThreadId,fullPath);

                    //if (_duplicationStatistics.PartialHashes%displayCounterInterval == 0)
                    //{
                    //    Console.Write("p");
                    //}
                    //if (_duplicationStatistics.FullHashes%displayCounterInterval == 0)
                    //{
                    //    Console.Write("f");
                    //    Console.Write(" {0} ", hashResponse.BytesHashed);
                    //}
                }
                else
                {
                    _duplicationStatistics.FailedToHash += 1;
                }
            }
            else
            {
                if (de.IsHashDone && !de.IsPartialHash)
                {
                    _duplicationStatistics.AllreadyDoneFulls++;
                    return;
                }
                var hashResponse = HashHelper.GetMD5HashFromFile(fullPath);
                if (hashResponse != null)
                {
                    de.SetHash(hashResponse.Hash);
                    de.IsPartialHash = hashResponse.IsPartialHash;
                    _duplicationStatistics.FullHashes     += 1;
                    _duplicationStatistics.BytesProcessed += hashResponse.BytesHashed;
                    if (_duplicationStatistics.FilesProcessed % displayCounterInterval == 0)
                    {
                        _logger.LogInfo("Progress through duplicate files at {0} of {1} which is {2:.0}%",
                                        _duplicationStatistics.FilesProcessed, _duplicationStatistics.FilesToCheckForDuplicatesCount,
                                        100 * (1.0 * _duplicationStatistics.FilesProcessed / _duplicationStatistics.FilesToCheckForDuplicatesCount));
                    }

                    // SOME can have both partial and full done, so they have 1 in both counts... :(

                    //if (_duplicationStatistics.FullHashes%displayCounterInterval == 0)
                    //{
                    //    Console.Write("f");
                    //}
                }
                else
                {
                    _duplicationStatistics.FailedToHash += 1;
                }
            }
        }
예제 #12
0
파일: CommonEntry.cs 프로젝트: CDEApp/CDE
 public string MakeFullPath(DirEntry dirEntry)
 {
     return(MakeFullPath(this, dirEntry));
 }