/// <summary>
        /// Show all Duplicates in <see cref="MainWindow.spItems"/>
        /// </summary>
        /// <param name="ldf">All duplicates</param>
        /// <returns></returns>
        async Task DisplayDuplicates(List <DuplicateFile> ldf)
        {
            bool switcher = true;

            position = 0;
            SolidColorBrush light = new SolidColorBrush(Colors.White), dark = new SolidColorBrush(Colors.LightGray);
            int             next = 0;

            foreach (var df in ldf)
            {
                position++;
                switcher = !switcher;
                spItems.Children.Add(CreateEntry(df, switcher ? dark : light));
                if (((position * 100L) / ldf.Count) >= next)
                {
                    next += 10;
                    PBManager.UpdateCurrentPosition(position);
                    await Task.Delay(100);
                }
            }
        }
Example #2
0
        /*
         * void LinkAllDuplicates(List<DuplicateFile> duplicates) {
         *      int last = 0;
         *      for (long n = 0; n < duplicates.Count; n++) {
         *              LinkDuplicates(duplicates[(int)n]);
         *              pbStatus.Value = n;
         *              if ((n * 100) / duplicates.Count > last * 5) {
         *                      last++;
         *                      Task.Delay(100);
         *              }
         *      }
         * }
         */
        /// <summary>
        /// Show all Duplicates in <see cref="MainWindow.duplicatesListView"/>
        /// </summary>
        /// <param name="ldf">All duplicates</param>
        /// <returns></returns>
        async Task DisplayDuplicates(List <DuplicateFile> ldf)
        {
            bool switcher = true;

            position = 0;
            long next = 0;
            var  s    = Stopwatch.StartNew();

            foreach (var df in ldf)
            {
                position++;
                switcher = !switcher;
                hashedFiles.TryGetValue(df.instances[0], out var myhash);
                df.finalhash = myhash;
                duplicates_view.Add(df);
                if (s.ElapsedMilliseconds / 1000 >= next)
                {
                    next = (s.ElapsedMilliseconds / 1000) + 1;
                    PBManager.UpdateCurrentPosition(position);
                    await Task.Delay(33);
                }
            }
            PBManager.UpdateCurrentPosition(position);
        }
        /// <summary>
        /// Event für <see cref="btAnalyze"/>
        /// </summary>
        /// <param name="sender"><see cref="btAnalyze"/></param>
        private async void Button_Click(object sender, RoutedEventArgs e)
        {
            btLink.IsEnabled = false;
            var dir = tbPath.Text;

            spItems.Children.Clear();
            if (!IsOnNTFS(dir))
            {
                return;
            }
            var files = new Queue <WorkFile>();

            hashedFiles = new Dictionary <string, byte[]>();
            var filePaths = new List <NormalFile>();

            btAnalyze.IsEnabled = false;
            running             = 1;
            PBManager.Init(pbStatus, 7, 1);
            ThreadPool.QueueUserWorkItem(Discover, new Tuple <string, List <NormalFile> >(dir, filePaths));
            while (running > 0)
            {
                await Task.Delay(100);
            }
            PBManager.MoveToNextAction(filePaths.Count);
            var duplicates = new List <DuplicateFile>();

            running = 1;
            ThreadPool.QueueUserWorkItem(
                FindDuplicates,
                new Tuple <List <DuplicateFile>, List <NormalFile> >(duplicates, filePaths));
            while (running > 0)
            {
                await Task.Delay(100);

                PBManager.UpdateCurrentPosition(position);
            }
            PBManager.MoveToNextAction(duplicates.Count);
            stillLoading = true;
            ThreadPool.QueueUserWorkItem(MemoryMonitor);
            ThreadPool.QueueUserWorkItem(Reader, new HandoverObject()
            {
                queque = files, targets = duplicates, results = hashedFiles
            });
            for (int n = 0; n < 4; n++, running++)
            {
                ThreadPool.QueueUserWorkItem(HashAsync, new HandoverObject()
                {
                    queque = files, targets = duplicates, results = hashedFiles
                });
            }
            while (running > 0)
            {
                PBManager.UpdateCurrentPosition(position);
                await Task.Delay(100);
            }
            await Task.Delay(500);

            running = 1;
            PBManager.MoveToNextAction(duplicates.Count);
            ThreadPool.QueueUserWorkItem(Sortall, duplicates);
            while (running > 0)
            {
                PBManager.UpdateCurrentPosition(position);
                await Task.Delay(100);
            }
            PBManager.MoveToNextAction(1);
            finalDuplicates = FinalDuplicates(duplicates);
            PBManager.MoveToNextAction(1);
            finalDuplicates = CleanupDuplicates(finalDuplicates);
            PBManager.MoveToNextAction(finalDuplicates.Count);
            await DisplayDuplicates(finalDuplicates);

            btLink.IsEnabled    = true;
            btAnalyze.IsEnabled = true;
        }
Example #4
0
        /// <summary>
        /// Launches all hashing and analyzing logic
        /// </summary>
        /// <param name="path">Path to analyze. Does not have to be null checked</param>
        /// <returns></returns>
        async Task Analyzer(string path)
        {
            if (
                string.IsNullOrWhiteSpace(path) ||
                !Directory.Exists(path) ||
                !Util.IsOnNTFS(path)
                )
            {
                DisplayError(true);
                return;
            }
            DisplayError(false);
            pathStorage = new List <string>();
            var files      = new Queue <WorkFile>();
            var filePaths  = new List <NormalFile>();
            var duplicates = new List <DuplicateFile>();

            hashedFiles = new Dictionary <int, byte[]>();

            running = 1;
            PBManager.Init(pbStatus, 7, 1);
            Debug.WriteLine("Scanning...");
            ThreadPool.QueueUserWorkItem(Discover, new Tuple <string, List <NormalFile> >(path, filePaths));
            while (running > 0)
            {
                await Task.Delay(100);

                lbSBFound.Content = position;
            }

            PBManager.MoveToNextAction(filePaths.Count);
            position = 0;
            running  = 1;
            Debug.WriteLine("Identifying duplicates...");
            ThreadPool.QueueUserWorkItem(
                FindDuplicates,
                new Tuple <List <DuplicateFile>, List <NormalFile> >(duplicates, filePaths));
            while (running > 0)
            {
                await Task.Delay(100);

                PBManager.UpdateCurrentPosition(position);
                lbSBAnalyzed.Content = position;
            }
            filePaths.Clear();

            Debug.WriteLine("Found:" + duplicates.Count + " duplicates in " + filePaths.Count + " Files");
            PBManager.MoveToNextAction(duplicates.Count);

            stillLoading = true;
            running      = 1;
            ThreadPool.QueueUserWorkItem(MemoryMonitor);
            ThreadPool.QueueUserWorkItem(
                Reader,
                new HandoverObject()
            {
                queque  = files,
                targets = duplicates,
                results = hashedFiles
            }
                );
            for (short n = 0; n < Config.HASHTHREADS; n++, running++)
            {
                ThreadPool.QueueUserWorkItem(
                    HashAsync,
                    new HandoverObject()
                {
                    queque  = files,
                    targets = duplicates,
                    results = hashedFiles
                }
                    );
            }
            while (running > 0)
            {
                PBManager.UpdateCurrentPosition(position);
                lbSBHashed.Content = position;
                await Task.Delay(100);
            }
            Debug.WriteLine("Computed:" + hashedFiles.Count + " Hashes!");
            running = 1;
            PBManager.MoveToNextAction(duplicates.Count);

            ThreadPool.QueueUserWorkItem(Sortall, duplicates);
            while (running > 0)
            {
                PBManager.UpdateCurrentPosition(position);
                await Task.Delay(100);
            }
            Debug.WriteLine("Sorted!");
            PBManager.MoveToNextAction(1);

            finalDuplicates = FinalDuplicates(duplicates);
            duplicates.Clear();
            Debug.WriteLine("Identified final Duplicates. found: " + finalDuplicates.Count);
            PBManager.MoveToNextAction(1);

            finalDuplicates = CleanupDuplicates(finalDuplicates);
            Debug.WriteLine("Removed non-duplicates. final count: " + finalDuplicates.Count);
            PBManager.MoveToNextAction(finalDuplicates.Count);
            lbSBDuplicates.Content = finalDuplicates.Count;
            await DisplayDuplicates(finalDuplicates);

            btLink.IsEnabled = true;
        }