public IEnumerator <DuplicateGroup> GetEnumerator() { XDocument xdoc = _fileOps.LoadXml(_config.DuplicateFilePath); // Get all the duplicate groups IEnumerable <XElement> xmlGroups = xdoc.Descendants("G"); // Convert each group to a model foreach (XElement xmlGroup in xmlGroups) { DuplicateGroup duplicateGroup = new DuplicateGroup(); List <GroupFile> groupFiles = new List <GroupFile>(); IEnumerable <XElement> xmlImages = xmlGroup.Descendants("Image"); foreach (XElement xmlImage in xmlImages) { GroupFile groupFile = new GroupFile() { Checked = xmlImage.Attribute("Checked").Value != "0", FullName = xmlImage.Attribute("FileName").Value }; groupFiles.Add(groupFile); } duplicateGroup.Files = groupFiles; yield return(duplicateGroup); } }
public bool Execute() { _index = _groupsView.CurrentPosition; _savedGroup = _groups[_index]; foreach (var item in _savedGroup.FileList) { if (!item.Equals(_forSaveInfo)) { _forDeleteList.Add(new RenamedImage(item)); } } foreach (var image in _forDeleteList) { File.Move(image.ImageInfo.Path, image.PathWithTempExtension); } _groups.RemoveAt(_index); //_groupsView.Refresh(); string res = Application.Current.Resources["desc_deleteOtherFromGroup"] as string; _description = String.Format(res, _forSaveInfo.Path); return(true); }
protected IEnumerable <DuplicateGroup> GetDuplicates <TKey>(DuplicateGroup duplicateGroup, Func <string, TKey> keyFunc) { return(FindDuplicates(duplicateGroup.Duplicates, keyFunc) .Select(duplicates => new DuplicateGroup(duplicates)) .Where(group => group.ContainsDuplicates) .AsEnumerable()); }
public void GetEnumeratorGetsCorrectFilesInGroup() { // Arrange IFileSystem fs = this.ArrangeFileSystemForCsvData( new CsvDuplicateRecord() { Group = 1, FileName = "file1", Checked = 0 }, new CsvDuplicateRecord() { Group = 1, FileName = "file2", Checked = 0 }); // Act CsvDuplicateReader uut = new CsvDuplicateReader( config: Substitute.For <IDuplicateHandlerConfiguration>(), fileSystem: fs); // Assert DuplicateGroup group = Assert.Single(uut); Assert.Equal(2, group.Files.Count()); GroupFile file1 = Assert.Single(group.Files, f => f.FullName == "file1"); GroupFile file2 = Assert.Single(group.Files, f => f.FullName == "file2"); }
private void DeleteSelectedFilesInGroup(DuplicateGroup duplicateFileGroup, DeletionState deletionStatus, bool removeEmptyDirs, bool deleteToRecycleBin, CancellationToken cancellationToken) { var duplicateFiles = duplicateFileGroup.DuplicateFiles; var duplicateFilesCount = duplicateFiles.Count; for (var index = 0; index < duplicateFilesCount; index++) { cancellationToken.ThrowIfCancellationRequested(); var duplicatedFile = duplicateFiles[index]; if (!duplicatedFile.IsMarkedForDeletion) { continue; } var fileData = duplicatedFile.FileData; var fullFileName = fileData.FullName; Application.Current.Dispatcher.Invoke(() => OnDeletionMessage(string.Format(Resources.Log_Deleting_Name_Size, fullFileName, duplicatedFile.FileSize))); try { FileSystem.DeleteFile(fileData, deleteToRecycleBin); var indexClosure = index; Application.Current.Dispatcher.Invoke(() => duplicateFiles.RemoveAt(indexClosure)); index--; duplicateFilesCount--; deletionStatus.TotalDeletedCount++; var fileSize = fileData.Size; deletionStatus.TotalDeletedSize += fileSize; deletionStatus.DeletedSizeDelta = -fileSize; deletionStatus.DeletedCountDelta = -1; } catch (FileSystemException ex) { Application.Current.Dispatcher.Invoke(() => OnDeletionMessage(ex.FileFullName, string.Format(Resources.Log_Error_Deletion_Failed, ex.FileFullName, ex.Message), MessageType.Error)); continue; } finally { deletionStatus.CurrentFileForDeletionIndex++; Application.Current.Dispatcher.Invoke(() => OnDeletionStateChanged(deletionStatus)); } cancellationToken.ThrowIfCancellationRequested(); var dirPath = fileData.DirPath; if (!removeEmptyDirs || !FileSystem.IsDirectoryTreeEmpty(dirPath)) { continue; } FileSystem.DeleteDirectoryTreeWithParents(dirPath, message => Application.Current.Dispatcher.Invoke(() => OnDeletionMessage(string.Format(Resources.Log_Deleting_Name, message))), (path, errorMessage) => Application.Current.Dispatcher.Invoke(() => OnDeletionMessage(path, errorMessage, MessageType.Error)), deleteToRecycleBin); } }
public ComparatorViewModel(DuplicateGroup group, ObservableCollection <DuplicateGroup> groups, ICollectionView groupsView, IUndoRedoEngine undoRedoEngine, WindowService windowService, IConfigurationModel configuration) : this(undoRedoEngine, windowService, configuration) { _group = group; _groups = groups; _groupsView = groupsView; _list = new LinkedList <ImageInfoClass>(group.FileList); _current = _list.First; //_list.First.Next SaatiHelper.CalculateIndex(_list, configuration); }
private void UnmarkAll(DuplicateGroup duplicateFileGroup, DeletionState deletionState) { foreach (var duplicatedFile in duplicateFileGroup.DuplicateFiles) { if (duplicatedFile.IsMarkedForDeletion) { continue; } OnDeletionMessage(string.Format(Resources.Log_Unmarking_FullFileName, duplicatedFile.FileFullName)); duplicatedFile.IsMarkedForDeletion = false; deletionState.CurrentFileForDeletionIndex++; OnDeletionStateChanged(deletionState); } }
private ImageInfoClass GetBestImage(DuplicateGroup group) { ImageInfoClass bestImageInfo = null; foreach (var image in group.FileList) { if (bestImageInfo == null) { bestImageInfo = image; continue; } if (image.UtilityIndex > bestImageInfo.UtilityIndex) { bestImageInfo = image; } } return(bestImageInfo); }
public void CheckedValueResolvesToCorrectBoolean(int checkedValue, bool isChecked) { // Arrange IFileSystem fs = this.ArrangeFileSystemForCsvData( new CsvDuplicateRecord() { Group = 1, FileName = "file1", Checked = checkedValue }); // Act CsvDuplicateReader uut = new CsvDuplicateReader( config: Substitute.For <IDuplicateHandlerConfiguration>(), fileSystem: fs); // Assert DuplicateGroup group = Assert.Single(uut); GroupFile file = Assert.Single(group.Files); Assert.Equal(isChecked, file.Checked); }
public void CallingFilesPropertyAlwaysReturnsSameInstance() { // Arrange IFileSystem fs = this.ArrangeFileSystemForCsvData( new CsvDuplicateRecord() { Group = 1, FileName = "file1", Checked = 0 }); CsvDuplicateReader uut = new CsvDuplicateReader( config: Substitute.For <IDuplicateHandlerConfiguration>(), fileSystem: fs); // Act DuplicateGroup group = uut.Single(); IEnumerable <GroupFile> intersection = group.Files.Intersect(group.Files); IEnumerable <GroupFile> except = group.Files.Except(group.Files); // Assert Assert.Single(intersection); Assert.Empty(except); }
public void GetEnumeratorGetsCorrectFilesInGroup() { // Arrange XDocument xdoc = new XDocument( new XElement("root", this.ArrangeGroup( this.ArrangeFile(true, "file1"), this.ArrangeFile(false, "file2")))); IFileOperationsAbstraction fileOps = Substitute.For <IFileOperationsAbstraction>(); fileOps.LoadXml(Arg.Any <string>()).Returns(xdoc); XmlDuplicateReader uut = this.XmlDuplicateReaderWithDefaultMocks( fileOps: fileOps); // Act & Assert DuplicateGroup group = Assert.Single(uut); Assert.Equal(2, group.Files.Count()); GroupFile file1 = Assert.Single(group.Files, f => f.FullName == "file1" && f.Checked == true); GroupFile file2 = Assert.Single(group.Files, f => f.FullName == "file2" && f.Checked == false); }
public void CallingFilesPropertyAlwaysReturnsSameInstance() { // Arrange XDocument xdoc = new XDocument( new XElement("root", this.ArrangeGroup( this.ArrangeFile(false, "file1")))); IFileOperationsAbstraction fileOps = Substitute.For <IFileOperationsAbstraction>(); fileOps.LoadXml(Arg.Any <string>()).Returns(xdoc); XmlDuplicateReader uut = this.XmlDuplicateReaderWithDefaultMocks( fileOps: fileOps); // Act DuplicateGroup group = uut.Single(); IEnumerable <GroupFile> intersection = group.Files.Intersect(group.Files); IEnumerable <GroupFile> except = group.Files.Except(group.Files); // Assert Assert.Single(intersection); Assert.Empty(except); }
public static void ConvertToGroup(IList <DuplicateGroup> groups, IEnumerable <DuplPairViewModel> resultList) { groups.Clear(); foreach (var result in resultList) { DuplicateGroup finded = null; foreach (var group in groups) { if (group.ContainFile(result.FirstFile) || group.ContainFile(result.SecondFile)) { finded = group; break; } } if (finded != null) { finded.AddResult(result); } else { groups.Add(new DuplicateGroup(result)); } } }
/// <summary> /// Get duplicates by elements file hash. /// </summary> /// <param name="duplicateGroup">The group of elements to check.</param> /// <param name="maxByteLength">An optional value. If it is provided only the first bytes will be used to calculate the hash</param> /// <returns>The groups of duplicates that where found.</returns> public IEnumerable <DuplicateGroup> GetDuplicates(DuplicateGroup duplicateGroup, uint?maxByteLength = null) { return(GetDuplicates(duplicateGroup, filePath => _fileService.GetFileHash(filePath, maxByteLength))); }
/// <summary> /// Get duplicates by elements file size. /// </summary> /// <param name="duplicateGroup">The group of elements to check.</param> /// <returns>The groups of duplicates that where found.</returns> public IEnumerable <DuplicateGroup> GetDuplicates(DuplicateGroup duplicateGroup) { return(GetDuplicates(duplicateGroup, _fileService.GetFileSize)); }