/// <summary> /// Takes the scanned files and adds them to the <see cref="Duplicates"/> collection. /// The ScannedFiles will be sorted by Hash before being added to the ViewModel. /// Internally exposed for unit testing. /// It is assumed that <paramref name="scannedFiles"/> is sorted by Hash before calling. /// </summary> /// <param name="scannedFiles">The scanned files to add</param> internal void AddScannedFiles(List <ScannedFile> scannedFiles) { byte[] previousHash = null; BackgroundColor color = BackgroundColor.Transparent; ScannedFileHashComparer comparer = new ScannedFileHashComparer(); foreach (ScannedFile scannedFile in scannedFiles) { // If the hash is not same as the previous hash flip the same color if (previousHash != null && !comparer.Equals(previousHash, scannedFile.Hash)) { // If there are ever more than two BackgroundColor types, this flipping logic // will need to be revisited. color = 1 - color; } ScanResult scanResult = new ScanResult() { FilePath = scannedFile.Path, Hash = scannedFile.Hash, Background = color.ToString(), IsSelected = false }; Duplicates.Add(scanResult); previousHash = scannedFile.Hash; } }
public Duplicates FindDuplicatesMetod() { Duplicates globalDuplicates = new Duplicates(); if (MetodsClass.DirExists(PathFromUser)) { string[] AllFiles = MetodsClass.GetAllFiles(PathFromUser); List <FileClass> ListAllFilesAsClass = MetodsClass.ParseStringToClass(AllFiles.ToList()); List <List <FileClass> > ListGlobalDuplicates = MetodsClass.FindDuplicates(ListAllFilesAsClass); foreach (List <FileClass> y in ListGlobalDuplicates) { globalDuplicates.Add(ConvertToObservable(y)); } } else { MessageBox.Show("Can't find such path"); } return(globalDuplicates); }
public void FinalizeResolveDuplicates() { if (m_state != Upload506MasterState.HasDuplicates && m_state != Upload506MasterState.HasDuplicatesResolutionErrors) { return; } Duplicates.Where(d => d.Item.Resolution == (int)Upload506Resolution.Dismissed || d.Item.Resolution == (int)Upload506Resolution.Created || d.Item.Resolution == (int)Upload506Resolution.Updated) .ToList() .ForEach(d => { d.Resolved = true; d.Item.strCaseID = d.strCaseID; d.Item.idfCase = d.idfCase; }); if (Duplicates.Any(d => !d.Resolved)) { SetState(Upload506MasterState.HasDuplicatesResolutionErrors); } else { SetState(Upload506MasterState.ReadyForSave); } }
private void Delete_Click(object sender, EventArgs e) { i = 0; Dictionary <Int64, object> tobedel = new Dictionary <Int64, object>(); foreach (object a in Duplicates.CheckedItems) { tobedel.Add(i, a); i++; } foreach (KeyValuePair <Int64, object> a in tobedel) { File.Delete(a.Value.ToString()); Duplicates.Items.Remove(a); } Duplicates.Visible = false; path.Visible = false; Next.Visible = false; Delete.Visible = false; textBox2.Visible = false; richTextBox1.Visible = false; selectall.Visible = false; done.Visible = true; done.Text = " DONE!!! Your files have been successfully deleted."; Duplicates.Refresh(); }
static int flatten(Duplicates dups, bool keepFolders, bool recursive) { Plan plan = new Plan(dups, keepFolders, recursive); try { Console.WriteLine("Preparing plan..."); plan.prepare(new DirectoryInfo(Environment.CurrentDirectory)); plan.execute(); Console.WriteLine("Completed successfully"); } catch (DupsException e) { Console.WriteLine("Duplicates found for filename " + e.filename + ". Aborting."); return(EXIT_CODE_DUPS); } catch (IOException e) { Console.WriteLine("Error during file operation: " + e.Message); return(EXIT_CODE_IO_ERROR); } catch (UnauthorizedAccessException e) { Console.WriteLine("Access error during file operation: " + e.Message); Console.WriteLine("You could try to execute the program as administrator"); return(EXIT_CODE_IO_ERROR); } return(EXIT_CODE_OK); }
/// <summary>Does the work of finding and identifying duplicates</summary> private void FindDuplicates(ProgressForm dlg, object ctx, ProgressForm.Progress progress) // worker thread context { // Build a map of file data var dir = string.Empty; foreach (var path in Settings.SearchPaths) { if (dlg.CancelPending) { break; } foreach (var fi in Path_.EnumFileSystem(path, search_flags:SearchOption.AllDirectories, exclude:FileAttributes.Directory).Cast <System.IO.FileInfo>()) { if (dlg.CancelPending) { break; } // Report progress whenever the directory changes var d = Path_.Directory(fi.FullName) ?? string.Empty; if (d != dir) { dir = d; progress(new ProgressForm.UserState { Description = $"Scanning files...\r\n{dir}" }); } try { // Create file info for the file and look for a duplicate var finfo = new FileInfo(fi); FileInfo existing = FInfoMap.TryGetValue(finfo.Key, out existing) ? existing : null; if (existing != null) { Dispatcher.Invoke(() => { existing.Duplicates.Add(finfo); var idx = Duplicates.BinarySearch(existing, FileInfo.Compare); if (idx < 0) { Duplicates.Insert(~idx, existing); } }); } else { FInfoMap.Add(finfo.Key, finfo); } } catch (Exception ex) { Errors.Add($"Failed to add {fi.FullName} to the map. {ex.Message}"); } } } }
public void SetResolutionForDuplicate(long id, Upload506Resolution resolution) { var updateItem = Duplicates.FirstOrDefault(i => i.idfCase == id); if (updateItem != null) { updateItem.Item.Resolution = (int)resolution; } }
/// <summary> /// Call <see cref="IProcess.StartAsync(string)"/> for each selected duplicate file. /// </summary> private async Task PreviewSelectionAsync() { var selectedDuplicates = Duplicates.Where(t => t.IsSelected).Select(t => t.FilePath); foreach (string file in selectedDuplicates) { await _process.StartAsync(file); } }
private void CreateFileForDuplicates() { // new ID = max ID + 1 to avoid duplicates int newId = SetNextID(); string currentFile = string.Empty; string tstringsModifiedContent = $"#Default{Environment.NewLine}"; foreach (Xstr duplicate in Duplicates) { Xstr originalXstr = Lines.FirstOrDefault(x => x.Text == duplicate.Text); // if duplicated text exists in another xstr in the original file, then copy its ID if (originalXstr != null) { duplicate.Id = originalXstr.Id; duplicate.Treated = true; } // if there is another duplicate with the same text, we can reuse the same ID to avoid new duplicates in the new file else if (tstringsModifiedContent.Contains(duplicate.Text)) { Xstr result = Duplicates.FirstOrDefault(x => x.Treated && x.Text == duplicate.Text); if (result != null) { duplicate.Id = result.Id; duplicate.Treated = true; } else { throw new Exception(); } } else { duplicate.Id = newId; newId++; // add the name of the file in comment if (currentFile != duplicate.FileName) { currentFile = duplicate.FileName; tstringsModifiedContent += $"{Environment.NewLine}; {duplicate.FileName}{Environment.NewLine}"; } tstringsModifiedContent += $"{Environment.NewLine}{duplicate.Id}, {duplicate.Text}{Environment.NewLine}"; duplicate.Treated = true; } Parent.IncreaseProgress(Sender, CurrentProgress++); } tstringsModifiedContent += $"{Environment.NewLine}#End"; Utils.CreateFileWithPath(Path.Combine(DestinationFolder, "tables/tstringsModified-tlc.tbm"), tstringsModifiedContent); }
static void Main(string[] args) { Console.WriteLine("Hello World!"); var result = Duplicates.RemoveDuplicateChars("UsmanRafiq"); Console.WriteLine(result); _ = Console.ReadKey(); }
public void Add(Item item) { // Dupe-check list bool stacked = item.StackCount > 1 || _cache.StackableRecords.Contains(item.BaseRecord) || _cache.SpecialRecords.Contains(item.BaseRecord); // Special "single seed" items. if (stacked) { Stacked.Add(item); All.Add(item); return; } // TODO: Detect slith rings etc // We don't have this record at all, unknown to IA. Probably need to parse DB. bool unknownItem = !_cache.AllRecords.Contains(item.BaseRecord); if (unknownItem) { if (item.BaseRecord.StartsWith("records/storyelements/rewards/")) { Quest.Add(item); } else { Unknown.Add(item); } All.Add(item); return; } // We already have this item.. if (All.Any(m => m.Equals(item))) { Duplicates.Add(item); All.Add(item); return; } // We already have this item.. if (_playerItemDao.Exists(TransferStashService.Map(item, null, false))) { Duplicates.Add(item); All.Add(item); return; } Remaining.Add(item); All.Add(item); }
public void AddDuplicate(Duplicate dup) { if (dup.Location == PhysicalPath) { Duplicates.Add(dup); } else { Journal.Info($"{dup.Location} does not match instance {Name}"); } }
/// <summary> /// Send the selected files to the recycle bin /// </summary> private async Task RecycleSelectionAsync() { bool suppressRecycleFileDialog = Properties.Settings.Default.SuppressRecycleFileDialog; List <string> recycledItems = new List <string>(); var selectedItems = Duplicates.Where(t => t.IsSelected); foreach (ScanResult itemToRecycle in selectedItems) { if (await _recycleFile.RecycleAsync(itemToRecycle.FilePath, suppressRecycleFileDialog)) { var result = Duplicates.Remove(itemToRecycle); } } }
internal void BuildDuplicatesAndFileMappings() { Duplicates = Files .GroupBy(f => f.Name) .Where(g => g.Count() > 1) .Select(g => g.Key) .ToList(); FileMappings = (from file in Files let fileName = Duplicates.Contains(file.Name) ? $"{Path.GetFileNameWithoutExtension(file.Name)}_{Guid.NewGuid()}{Path.GetExtension(file.Name)}" : file.Name select new FileMapping(file.File, Path.Combine(file.ParentDir, fileName))) .ToList(); }
private void selectall_CheckedChanged(object sender, EventArgs e) { if (selectall.Checked) { for (int x = 0; x < Duplicates.Items.Count; x++) { Duplicates.SetItemChecked(x, true); } } if (selectall.Checked == false) { for (int x = 0; x < Duplicates.Items.Count; x++) { Duplicates.SetItemChecked(x, false); } } }
public void AnalyseDuplicates() { var maxResults = 100; var duplicates = Duplicates.FindDuplicateFiles(_index, maxResults); foreach (var group in duplicates) { Console.WriteLine($"{group.Etag} - {group.Count} files, Total: {group.TotalSize.ToFileSize()}, Individual: {group.IndividualSize.ToFileSize()}, PotentialSaving: {group.PotentialSaving.ToFileSize()}"); foreach (var file in group.Files.Take(3)) { Console.WriteLine($"\t{file.Key}:"); } Console.WriteLine(); } Console.WriteLine(); Console.WriteLine($"Total Savings: {duplicates.Sum(g => g.PotentialSaving).ToFileSize()}"); }
/// <summary> /// look for xstr in each file /// </summary> private void FetchXstr() { List <string> compatibleFiles = FilesList.Where(x => !x.Contains("-lcl.tbm") && !x.Contains("-tlc.tbm") && !x.Contains("strings.tbl")).ToList(); foreach (string file in compatibleFiles) { FileInfo fileInfo = new(file); string fileContent = File.ReadAllText(file); IEnumerable <Match> combinedResults = Utils.GetAllXstrFromFile(fileInfo, fileContent); foreach (Match match in combinedResults) { //match.Groups[0] => entire line //match.Groups[1] => text //match.Groups[2] => id if (int.TryParse(match.Groups[2].Value, out int id)) { string text = match.Groups[1].Value; // if id not existing, add a new line if (id >= 0 && !Lines.Any(x => x.Id == id)) { Lines.Add(new Xstr(id, text, fileInfo)); } // if id already existing but value is different, then put it in another list that will be treated separately else if (ManageDuplicates && (id < 0 || Lines.First(x => x.Id == id).Text != text)) { Duplicates.Add(new Xstr(id, text, fileInfo, match.Value)); } } else { throw new Exception(); } } } int maxProgress = Lines.Count + (ManageDuplicates ? FilesList.Count + Duplicates.Count : 0); Parent.SetMaxProgress(maxProgress); }
/// <summary> /// Creates table and mission files with new IDs /// </summary> /// <param name="duplicates"></param> /// <param name="modFolder"></param> /// <param name="destinationFolder"></param> /// <param name="currentProgress"></param> /// <param name="sender"></param> private void CreateModFilesWithNewIds() { Duplicates = Duplicates.OrderBy(x => x.FileName).ToList(); List <string> filesToModify = Duplicates.Select(x => x.FilePath).Distinct().ToList(); foreach (string sourceFile in filesToModify) { string fileName = Path.GetFileName(sourceFile); string fileContent = File.ReadAllText(sourceFile); foreach (Xstr lineToModify in Duplicates.Where(x => x.FileName == fileName)) { fileContent = Utils.ReplaceContentWithNewXstr(fileContent, lineToModify); } Utils.CreateFileWithNewContent(sourceFile, ModFolder, DestinationFolder, fileContent); Parent.IncreaseProgress(Sender, CurrentProgress++); } }
public void DismissAllDuplicates() { if (m_state != Upload506MasterState.HasDuplicates && m_state != Upload506MasterState.HasDuplicatesResolutionErrors) { return; } var duplicates = Duplicates.Where(d => !d.Resolved).ToList(); duplicates.ForEach(d => { d.Item.Resolution = (int)Upload506Resolution.Dismissed; d.Resolved = true; d.Item.strCaseID = d.strCaseID; d.Item.idfCase = d.idfCase; } ); SetState(Upload506MasterState.ReadyForSave); }
private void HandleRemove(SingleFileEntry item) { var items = SelectedItems.Cast <SingleFileEntry>().ToList(); CurrentItem = null; using (new CursorHelper(this)) { foreach (var selItem in items) { var i = Duplicates.Where(x => x.FilePath == selItem.FilePath).FirstOrDefault(); Duplicates.Remove(i); GC.Collect(); System.GC.WaitForPendingFinalizers(); FileSystem.DeleteFile(i.FilePath, UIOption.AllDialogs, RecycleOption.SendToRecycleBin, UICancelOption.ThrowException); } } }
public ActionResult Edit(User user) { if (!Duplicates.CheckEmail(user.Email, user.UserId)) { User users = db.Users.Find(user.UserId); users.Name = user.Name; users.Email = user.Email; users.ActiveUser = user.ActiveUser; users.Types = user.Types; users.Update_Time = DateTime.Now; db.Entry(users).State = EntityState.Modified; db.SaveChanges(); TempData["MessagePanel"] = "Usuário atualizado com sucesso"; } else { ModelState.AddModelError("Email", "Este email já está em uso"); return(View(user)); } return(RedirectToAction("Index", "Users")); }
public void Clear() { try { Items.Clear(); Duplicates.Clear(); m_lastErrorMessage = string.Empty; m_lastError = Upload506FileError.Success; FileName = null; FileContent = null; m_resultFileName = null; m_errorFileName = null; var disposable = StoredData as IDisposable; if (disposable != null) { disposable.Dispose(); } StoredData = null; if (m_errorsFile != null) { m_errorsFile.Dispose(); m_errorsFile = null; } if (m_resultFile != null) { m_resultFile.Dispose(); m_resultFile = null; } SetState(Upload506MasterState.ReadyForUpload); } catch (Exception ex) { LogError.Log("ErrorLog", ex); } }
public bool CheckForDuplicates() { if (m_state != Upload506MasterState.ReadyForCheckDuplicates) { return(false); } Duplicates.Clear(); using (var manager = DbManagerFactory.Factory.Create(EidssUserContext.Instance)) { foreach (var upload506Item in Items) { var updateItem = Upload506Duplicate.Accessor.Instance(null).SelectByItem(manager, upload506Item); if (updateItem != null) { Duplicates.Add(updateItem); } else { upload506Item.Resolution = (int)Upload506Resolution.Created; } } } if (Duplicates.Count > 0) { SetState(Upload506MasterState.HasDuplicates); return(false); } SetState(Upload506MasterState.ReadyForSave); return(true); }
private void HandleImport() { OpenFileDialog dlg = new OpenFileDialog(); dlg.Title = "Import duplicates from file"; var res = dlg.ShowDialog(); if (res == DialogResult.OK) { using (var c = new CursorHelper(this)) { using (Stream reader = new FileStream(dlg.FileName, FileMode.Open)) { XmlSerializer mySerializer = new XmlSerializer(typeof(ObservableCollection <SingleFileEntry>)); var items = (ObservableCollection <SingleFileEntry>)mySerializer.Deserialize(reader); Duplicates.Clear(); foreach (var i in items) { Duplicates.Add(i); } } } } }
private void CloseDuplicates(List <TextObjectBase> list) { if (list.Count == 0) { return; } Duplicates duplicates = list[0].Duplicates; switch (duplicates) { case Duplicates.Clear: CloseDuplicatesClear(list); break; case Duplicates.Hide: CloseDuplicatesHide(list); break; case Duplicates.Merge: CloseDuplicatesMerge(list); break; } }
public override void WritePropertiesData(DataWriter writer) { writer.WriteStartObject(Name); base.WritePropertiesData(writer); if (Status == Core.Process.ProcessStatus.Inactive) { writer.WriteFinishObject(); return; } if (!HasMapping("SchemaId")) { writer.WriteValue("SchemaId", SchemaId, Guid.Empty); } if (!HasMapping("DuplicateSchemaId")) { writer.WriteValue("DuplicateSchemaId", DuplicateSchemaId, Guid.Empty); } if (!HasMapping("EditPageId")) { writer.WriteValue("EditPageId", EditPageId, Guid.Empty); } if (!HasMapping("RecordId")) { writer.WriteValue("RecordId", RecordId, Guid.Empty); } if (!HasMapping("ContactSchemaId")) { writer.WriteValue("ContactSchemaId", ContactSchemaId, Guid.Empty); } if (!HasMapping("AccountSchemaId")) { writer.WriteValue("AccountSchemaId", AccountSchemaId, Guid.Empty); } if (Duplicates != null) { if (Duplicates.GetType().IsSerializable || Duplicates.GetType().GetInterface("ISerializable") != null) { writer.WriteSerializableObjectValue("Duplicates", Duplicates, null); } } if (SearchParameters != null) { if (SearchParameters.GetType().IsSerializable || SearchParameters.GetType().GetInterface("ISerializable") != null) { writer.WriteSerializableObjectValue("SearchParameters", SearchParameters, null); } } if (!HasMapping("DuplicatesFound")) { writer.WriteValue("DuplicatesFound", DuplicatesFound, false); } if (IdConstraints != null) { if (IdConstraints.GetType().IsSerializable || IdConstraints.GetType().GetInterface("ISerializable") != null) { writer.WriteSerializableObjectValue("IdConstraints", IdConstraints, null); } } if (!HasMapping("ReadSearchParameters")) { writer.WriteValue("ReadSearchParameters", ReadSearchParameters, false); } if (SearchParameterNames != null) { if (SearchParameterNames.GetType().IsSerializable || SearchParameterNames.GetType().GetInterface("ISerializable") != null) { writer.WriteSerializableObjectValue("SearchParameterNames", SearchParameterNames, null); } } if (!HasMapping("InsertResults")) { writer.WriteValue("InsertResults", InsertResults, false); } writer.WriteFinishObject(); }
public IActionResult DuplicatesInString(Duplicates dup) { ViewBag.DuplicateCount = dup.GetDuplicateCount(dup.InputString); return(View()); }
private void DoWorkEvent(object sender, DoWorkEventArgs e) { CanExecuteCancel = true; IsIdle = false; using (var c = new CursorHelper(this)) { var res = queryService.QueryDuplicates(bgw, e, startPath, string.IsNullOrEmpty(filter) ? "*.*" : filter); System.Windows.Application.Current.Dispatcher.Invoke(DispatcherPriority.Background, new ThreadStart(delegate { res.ForEach(x => Duplicates.Add(x)); })); } ProgressPercent = 0; CanExecuteCancel = false; IsIdle = true; }
public Plan(Duplicates dups, bool keepFolders, bool recursive) { this.dups = dups; this.keepFolders = keepFolders; this.recursive = recursive; }
private void HandleStart() { Duplicates.Clear(); bgw.RunWorkerAsync(progressBar); }