public void SetDuplicate(DuplicatedFile duplicate)
        {
            this.duplicate = duplicate;
            Bitmap icon = null;

            try
            {
                icon = Icon.ExtractAssociatedIcon(duplicate.Files[0].FullName).ToBitmap();
            }
            catch (Exception)
            {
            }
            pbxIcon.Image        = icon;
            Text                 = $"Duplications of: {duplicate.FileName}";
            lbFilename.Text      = duplicate.FileName;
            lbRepeatedTimes.Text = $"{duplicate.TimesRepeated} time{(duplicate.TimesRepeated > 1 ? "s" : "")}";
            lbFileSize.Text      = $"{Toolkit.GetSizeString(duplicate.AverageFileSize)}";
            lbTotalSize.Text     = $"{Toolkit.GetSizeString(duplicate.TotalDuplicationSize)}";
            lbSpaceLost.Text     = $"{Toolkit.GetSizeString(duplicate.SpaceLostByDuplication)}";
            dgvDuplicatedFiles.Rows.Clear();
            foreach (var file in duplicate.Files)
            {
                var added = dgvDuplicatedFiles.Rows.Add(file.FullName, "Open...", "Open directory...", "Delete");
                dgvDuplicatedFiles.Rows[added].Tag = file;
            }
        }
Ejemplo n.º 2
0
        private void AddDatagridRow(DuplicatedFile duplicate)
        {
            Bitmap icon = null;

            try
            {
                icon = Icon.ExtractAssociatedIcon(duplicate.Files[0].FullName).ToBitmap();
            }
            catch (Exception)
            {
                // TODO - Log the error extracting icon.
            }
            var added = dgvDuplicates.Rows.Add(icon, duplicate.FileName, duplicate.TimesRepeated, Toolkit.GetSizeString(duplicate.AverageFileSize), Toolkit.GetSizeString(duplicate.TotalDuplicationSize));

            dgvDuplicates.Rows[added].Tag = duplicate;
        }
Ejemplo n.º 3
0
        private List <DuplicatedFile> FindDuplicates(DirectoryInfo directory, CancellationToken cancel, List <FileInfo> accumulated_files = null, List <DuplicatedFile> accumulated_duplications = null)
        {
            var current_directory = directory;

            IEnumerable <DirectoryInfo> sub_directories;

            IEnumerable <FileInfo> directory_files;

            try
            {
                sub_directories = current_directory.EnumerateDirectories();
                directory_files = current_directory.EnumerateFiles();
            }
            catch (Exception ex)
            {
                (Progress as IProgress <DuplicateSearchProgress>).Report(new DuplicateSearchProgress {
                    Operation = DuplicateSearchOperation.ErrorFound, AdditionalInformation = ex.Message
                });
                return(accumulated_duplications);
            }

            if (accumulated_files == null)
            {
                accumulated_duplications = new List <DuplicatedFile>();
                accumulated_files        = directory_files.ToList();
            }
            else
            {
                foreach (var file in directory_files)
                {
                    dynamic result = accumulated_files.Where(f => f.Name == file.Name && f.Length == file.Length);
                    if ((result as IEnumerable <FileInfo>).Count() > 0)
                    {
                        var new_duplication = new DuplicatedFile {
                            FileName = file.Name
                        };
                        new_duplication.Files.Add(file);
                        new_duplication.Files.Add((result as IEnumerable <FileInfo>).ElementAt(0));
                        accumulated_duplications.Add(new_duplication);
                        _ = accumulated_files.Remove((result as IEnumerable <FileInfo>).ElementAt(0));
                    }
                    else
                    {
                        result = accumulated_duplications.Where(d => d.FileName == file.Name && d.AverageFileSize == file.Length);
                        if ((result as IEnumerable <DuplicatedFile>).Count() > 0)
                        {
                            var existing_duplication = (result as IEnumerable <DuplicatedFile>).ElementAt(0);
                            existing_duplication.Files.Add(file);
                        }
                        else
                        {
                            accumulated_files.Add(file);
                        }
                    }
                }
                ProcessedDirectoriesCount++;
            }

            (Progress as IProgress <DuplicateSearchProgress>).Report(new DuplicateSearchProgress {
                Operation = DuplicateSearchOperation.SearchingDuplicates, CurrentDirectory = current_directory.FullName, Percentage = ProcessedDirectoriesCount * 100 / DirectoriesCount
            });
            cancel.ThrowIfCancellationRequested();

            foreach (var dir in sub_directories)
            {
                _ = FindDuplicates(dir, cancel, accumulated_files, accumulated_duplications);
            }

            return(accumulated_duplications);
        }
 public FrmDuplicateViewer(DuplicatedFile duplicate)
 {
     InitializeComponent();
     SetDuplicate(duplicate);
 }