コード例 #1
0
        public void PrintPathsHaveHash2()
        {
            var entryEnumerator = new EntryEnumerator(this);

            foreach (var entryKey in entryEnumerator)
            {
                Entry[] block;
                var     entryIndex = EntryIndex(entryKey.Index, out block);
                var     hash       = block[entryIndex].IsHashDone ? "#" : " ";
                Console.WriteLine($"{hash}{block[entryIndex].GetFullPath(this)}");
            }
        }
コード例 #2
0
        // Set FullPath on all IsDirectory fields in store.
        // This relies on enumerator being breadth first.
        public void SetInMemoryFields()
        {
            var entryEnumerator = new EntryEnumerator(this);

            foreach (var entryKey in entryEnumerator)
            {
                Entry[] block;
                var     entryIndex = EntryIndex(entryKey.Index, out block);
                if (block[entryIndex].IsDirectory)
                {   // set full path on this dir
                    block[entryIndex].FullPath = block[entryIndex].GetFullPath(this);
                }
            }
        }
コード例 #3
0
        public void SetSummaryFieldsXX()
        {
            // do i need a recursive version here can i use iterator ?
            // if i check path changes iterator might work ?
            // i know iterator is current entries first then down to first subdir.

            // enumerator is breadth first, so adding up files/dirs works.
            // but adding up sizes at each dir in tree isnt as simple...

            var entryEnumerator = new EntryEnumerator(this);
            var prevParentPath  = string.Empty;
            var size            = 0ul;
            var block           = new Entry[] {};
            int entryIndex      = -1;

            foreach (var entryKey in entryEnumerator)
            {
                entryIndex = EntryIndex(entryKey.Index, out block);
                string currParentPath = block[entryIndex].GetParentPath(this);
                //if (block[entryIndex].IsDirectory)
                //{
                //    dirStats.DirCount += 1;
                //}
                //else
                //{
                //    dirStats.FileCount += 1;
                //}
                if (currParentPath == prevParentPath)
                {
                    if (!block[entryIndex].IsDirectory)
                    {
                        size += block[entryIndex].Size;
                    }
                }
                else
                {
                    block[entryIndex].SetParentSize(this, size);
                    size = 0ul;
                }

                prevParentPath = currParentPath;
            }
            if (entryIndex >= 0) // catch the setting after whole tree processed.
            {
                block[entryIndex].SetParentSize(this, size);
            }
        }
コード例 #4
0
        public IDictionary <ulong, List <int> > GetSizePairs()
        {
            // try out idea of processing all in slices removing 1's each time.
            ulong bumpSize = 20000;// +200000000000;
            //bumpSize = 50000  +200000000000;
            ulong min = 0;
            ulong max = bumpSize;
            bool  goNext;
            int   loopy = 0;

            _duplicateFileSize = new Dictionary <ulong, List <int> >();

            //Console.WriteLine(String.Format("Post TraverseMatchOnFileSize: {0}, dupeDictCount {1}", _applicationDiagnostics.GetMemoryAllocated().FormatAsBytes(), _duplicateFileSize.Count));
            do
            {
                goNext = false;
                var entryEnumerator = new EntryEnumerator(this);
                foreach (var entryKey in entryEnumerator)
                {
                    Entry[] block;
                    var     index      = entryKey.Index;
                    var     entryIndex = EntryIndex(index, out block);
                    var     size       = block[entryIndex].Size;

                    if (size >= min && size < max)
                    {
                        if (!block[entryIndex].IsDirectory && size != 0)
                        {
                            if (_duplicateFileSize.ContainsKey(size))
                            {
                                _duplicateFileSize[size].Add(index);
                            }
                            else
                            {
                                _duplicateFileSize[size] = new List <int> {
                                    index
                                };
                            }
                        }
                    }
                    else if (size >= max)
                    {
                        goNext = true;
                    }
                }

                //Remove the single values from the dictionary.
                var pruneList = _duplicateFileSize.Where(kvp => kvp.Value.Count == 1)
                                .ToList();
                //Console.WriteLine("Prune 1's {0}", pruneList.Count);
                pruneList.ForEach(x => _duplicateFileSize.Remove(x.Key));

                if (goNext)
                {
                    min  = max;
                    max += bumpSize;
                    if (min > 2000000)
                    {
                        bumpSize = bumpSize + bumpSize;
                    }
                    //bumpSize *= (ulong)(bumpSize * 1.5);
                    //bumpSize = bumpSize + bumpSize;
                }
                ++loopy;
                //Console.WriteLine("loopy {0} min {1} max {2}", loopy, min, max);
                //GC.Collect();

                if (Hack.BreakConsoleFlag)
                {
                    break;
                }
            } while (goNext);

            Console.WriteLine($"loopy {loopy}");
            Console.WriteLine(
                $"Deleted entries from dictionary: {_applicationDiagnostics.GetMemoryAllocated().FormatAsBytes()}, dupeDictCount {_duplicateFileSize.Count}");
            return(_duplicateFileSize);
        }