public async Task DuplicateCurrentRecord() { // Get the current image (or the selected image in the thumbnail grid) and duplicate it. // Note that this method shouldn't be called as the menueditDuplicate item will be disabled // if the above conditions aren't met, but we check anyways. if (this.IsDisplayingSingleImage() == false) { // We only allow duplication if we are displaying a single image in the main view return; } // Get the current image ImageRow row = this.DataHandler.ImageCache.Current; FileInfo fileInfo = new FileInfo(row.File); // Create a duplicate of it ImageRow duplicate = row.DuplicateRowWithCoreValues(this.DataHandler.FileDatabase.FileTable.NewRow(fileInfo)); // Insert the duplicated image into the filedata table List <ImageRow> imagesToInsert = new List <ImageRow> { duplicate }; this.DataHandler.FileDatabase.AddFiles(imagesToInsert, null); if (GlobalReferences.DetectionsExists) { // Get the ID of the duplicate file that was just inserted into the filedata table int duplicateFileID = this.DataHandler.FileDatabase.GetLastInsertedRow(Constant.DBTables.FileData, Constant.DatabaseColumn.ID); // Get the detections associated with the current row, if any DataRow[] detectionRows = this.DataHandler.FileDatabase.GetDetectionsFromFileID(row.ID); if (detectionRows.Length > 0) { // Create a new detection for each detection row, but using the duplicate's ID List <List <ColumnTuple> > detectionInsertionStatements = new List <List <ColumnTuple> >(); List <List <ColumnTuple> > classificationInsertionStatements = new List <List <ColumnTuple> >(); foreach (DataRow detectionRow in detectionRows) { detectionInsertionStatements.Clear(); // Fill it in with the current file's detection values List <ColumnTuple> detectionColumnsToUpdate = new List <ColumnTuple>() { new ColumnTuple(Constant.DetectionColumns.ImageID, duplicateFileID), new ColumnTuple(Constant.DetectionColumns.Category, (string)detectionRow[1]), new ColumnTuple(Constant.DetectionColumns.Conf, (float)Convert.ToDouble(detectionRow[2])), new ColumnTuple(Constant.DetectionColumns.BBox, (string)detectionRow[3]), }; detectionInsertionStatements.Add(detectionColumnsToUpdate); // Insert the detections into the Detections table this.DataHandler.FileDatabase.InsertDetection(detectionInsertionStatements); // Get the ID of the duplicate file that was just inserted into the filedata table int detectionID = this.DataHandler.FileDatabase.GetLastInsertedRow(Constant.DBTables.Detections, Constant.DetectionColumns.DetectionID); // Now get the classifications associated with each detection, if any DataRow[] classificationDataTableRows = this.DataHandler.FileDatabase.GetClassificationsFromDetectionID((long)detectionRow[0]); if (classificationDataTableRows.Length > 0) { // Fill it in with the current file's classification values classificationInsertionStatements.Clear(); foreach (DataRow classificationRow in classificationDataTableRows) { List <ColumnTuple> classificationColumnsToUpdate = new List <ColumnTuple>() { new ColumnTuple(Constant.ClassificationColumns.DetectionID, detectionID), new ColumnTuple(Constant.ClassificationColumns.Category, (string)classificationRow[1]), new ColumnTuple(Constant.ClassificationColumns.Conf, (float)Convert.ToDouble(classificationRow[2])) }; classificationInsertionStatements.Add(classificationColumnsToUpdate); } // Instert the classifications into the Classifications table this.DataHandler.FileDatabase.InsertClassifications(classificationInsertionStatements); } } } // Regenerate the internal detections and classifications table to include the new detections andclassifications this.DataHandler.FileDatabase.RefreshDetectionsDataTable(); this.DataHandler.FileDatabase.RefreshClassificationsDataTable(); // Check if we need this... this.DataHandler.FileDatabase.IndexCreateForDetectionsAndClassificationsIfNotExists(); } await this.FilesSelectAndShowAsync(); this.TryFileShowWithoutSliderCallback(DirectionEnum.Next); }