public IHttpActionResult PutDuplicateFile(int id, DuplicateFile duplicateFile) { if (!ModelState.IsValid) { return(BadRequest(ModelState)); } if (id != duplicateFile.DuplicateFileId) { return(BadRequest()); } db.Entry(duplicateFile).State = EntityState.Modified; try { db.SaveChanges(); } catch (DbUpdateConcurrencyException) { if (!DuplicateFileExists(id)) { return(NotFound()); } else { throw; } } return(StatusCode(HttpStatusCode.NoContent)); }
public static dynamic Find(string path) { fileDatabase = FilesInsideDir(path, true); List <DuplicateFile> toReturnList = new List <DuplicateFile>(); fileDatabaseClone = fileDatabase; fileDatabaseClone.Reverse(); foreach (var item in fileDatabase) { foreach (var itemClone in fileDatabaseClone) { Debug.WriteLine($"{item.Name}|{itemClone.Name}"); if (item.HashValue == itemClone.HashValue && item.Directory != itemClone.Directory) { report += $"{item.Name}|{item.Directory} è uguale a {itemClone.Name}|{itemClone.Directory}{Environment.NewLine}"; //report += $"{item.Name}|{item.Directory}|{itemClone.Name}|{itemClone.Directory}"; var duplicate = new DuplicateFile(); duplicate.Name = item.Name; duplicate.NameClone = itemClone.Name; duplicate.Directory = item.Directory; duplicate.DirectoryClone = itemClone.Directory; duplicate.HashCalc = item.HashValue; duplicate.HashCalcClone = itemClone.HashValue; toReturnList.Add(duplicate); } } } return(toReturnList); }
public static CL_DuplicateFile ToClient(this DuplicateFile duplicatefile) { CL_DuplicateFile cl = new CL_DuplicateFile { DuplicateFileID = duplicatefile.DuplicateFileID, FilePathFile1 = duplicatefile.FilePathFile1, FilePathFile2 = duplicatefile.FilePathFile2, Hash = duplicatefile.Hash, ImportFolderIDFile1 = duplicatefile.ImportFolderIDFile1, ImportFolderIDFile2 = duplicatefile.ImportFolderIDFile2, ImportFolder1 = RepoFactory.ImportFolder.GetByID(duplicatefile.ImportFolderIDFile1), ImportFolder2 = RepoFactory.ImportFolder.GetByID(duplicatefile.ImportFolderIDFile2), DateTimeUpdated = duplicatefile.DateTimeUpdated }; if (duplicatefile.GetAniDBFile() != null) { List <AniDB_Episode> eps = duplicatefile.GetAniDBFile().Episodes; if (eps.Count > 0) { cl.EpisodeNumber = eps[0].EpisodeNumber; cl.EpisodeType = eps[0].EpisodeType; cl.EpisodeName = eps[0].RomajiName; cl.AnimeID = eps[0].AnimeID; SVR_AniDB_Anime anime = RepoFactory.AniDB_Anime.GetByAnimeID(eps[0].AnimeID); if (anime != null) { cl.AnimeName = anime.MainTitle; } } } return(cl); }
public ClassFile AddFile(string name, double size, string md5) { ClassFile newItem = new ClassFile(size, name, this, md5); _content.Add(newItem); DuplicateFile.Add(newItem); return(newItem); }
public ActionResult DeleteConfirmed(int id) { DuplicateFile duplicateFile = db.DuplicateFiles.Find(id); db.DuplicateFiles.Remove(duplicateFile); db.SaveChanges(); return(RedirectToAction("Index")); }
public ActionResult Edit([Bind(Include = "id,value")] DuplicateFile duplicateFile) { if (ModelState.IsValid) { db.Entry(duplicateFile).State = EntityState.Modified; db.SaveChanges(); return(RedirectToAction("Index")); } return(View(duplicateFile)); }
public IHttpActionResult GetDuplicateFile(int id) { DuplicateFile duplicateFile = db.DuplicateFiles.Find(id); if (duplicateFile == null) { return(NotFound()); } return(Ok(duplicateFile)); }
public ActionResult Create([Bind(Include = "id,value")] DuplicateFile duplicateFile) { if (ModelState.IsValid) { db.DuplicateFiles.Add(duplicateFile); db.SaveChanges(); return(RedirectToAction("Index")); } return(View(duplicateFile)); }
public IHttpActionResult PostDuplicateFile(DuplicateFile duplicateFile) { if (!ModelState.IsValid) { return(BadRequest(ModelState)); } db.DuplicateFiles.Add(duplicateFile); db.SaveChanges(); return(CreatedAtRoute("DefaultApi", new { id = duplicateFile.DuplicateFileId }, duplicateFile)); }
public void Save(DuplicateFile obj) { using (var session = JMMService.SessionFactory.OpenSession()) { // populate the database using (var transaction = session.BeginTransaction()) { session.SaveOrUpdate(obj); transaction.Commit(); } } }
// GET: DuplicateFiles/Delete/5 public ActionResult Delete(int?id) { if (id == null) { return(new HttpStatusCodeResult(HttpStatusCode.BadRequest)); } DuplicateFile duplicateFile = db.DuplicateFiles.Find(id); if (duplicateFile == null) { return(HttpNotFound()); } return(View(duplicateFile)); }
public IHttpActionResult DeleteDuplicateFile(int id) { DuplicateFile duplicateFile = db.DuplicateFiles.Find(id); if (duplicateFile == null) { return(NotFound()); } db.DuplicateFiles.Remove(duplicateFile); db.SaveChanges(); return(Ok(duplicateFile)); }
public void Delete(int id) { using (var session = JMMService.SessionFactory.OpenSession()) { // populate the database using (var transaction = session.BeginTransaction()) { DuplicateFile cr = GetByID(id); if (cr != null) { session.Delete(cr); transaction.Commit(); } } } }
public List <DuplicateFile> FindDuplicates(List <FileData> files) { var result = new List <DuplicateFile>(); var distinctFileNames = files.Select(f => f.Name).Distinct(); foreach (var distinctFileName in distinctFileNames) { var fileCount = files.Count(f => f.Name == distinctFileName); if (fileCount > 1) { var duplicateFile = new DuplicateFile(distinctFileName, fileCount); result.Add(duplicateFile); } } return(result); }
public static SVR_AniDB_File GetAniDBFile(this DuplicateFile duplicatefile) => RepoFactory.AniDB_File.GetByHash( duplicatefile.Hash);
public ComparisonPair(DuplicationOwner owner, DuplicateFile duplicateFile) { this.Owner = owner; this.DuplicateFile = duplicateFile; }
private void ProcessFile_LocalInfo() { // hash and read media info for file int nshareID = -1; (SVR_ImportFolder folder, string filePath) = VideoLocal_PlaceRepository.GetFromFullPath(FileName); if (folder == null) { logger.Error($"Unable to locate Import Folder for {FileName}"); return; } IFileSystem f = folder.FileSystem; if (f == null) { logger.Error("Unable to open filesystem for: {0}", FileName); return; } long filesize = 0; if (folder.CloudID == null) // Local Access { if (!File.Exists(FileName)) { logger.Error("File does not exist: {0}", FileName); return; } int numAttempts = 0; // Wait 1 minute before giving up on trying to access the file while ((filesize = CanAccessFile(FileName)) == 0 && (numAttempts < 60)) { numAttempts++; Thread.Sleep(1000); logger.Error($@"Failed to access, (or filesize is 0) Attempt # {numAttempts}, {FileName}"); } // if we failed to access the file, get ouuta here if (numAttempts >= 60) { logger.Error("Could not access file: " + FileName); return; } //For systems with no locking while (FileModified(FileName, 3)) { Thread.Sleep(1000); logger.Error($@"An external process is modifying the file, {FileName}"); } } IObject source = f.Resolve(FileName); if (source == null || source.Status != Status.Ok || !(source is IFile source_file)) { logger.Error("Could not access file: " + FileName); return; } if (folder.CloudID.HasValue) { filesize = source_file.Size; } nshareID = folder.ImportFolderID; // check if we have already processed this file SVR_VideoLocal_Place vlocalplace = Repo.Instance.VideoLocal_Place.GetByFilePathAndImportFolderID(filePath, nshareID); SVR_VideoLocal vlocal = null; var filename = Path.GetFileName(filePath); if (vlocalplace != null) { vlocal = vlocalplace.VideoLocal; if (vlocal != null) { logger.Trace("VideoLocal record found in database: {0}", FileName); // This will only happen with DB corruption, so just clean up the mess. if (vlocalplace.FullServerPath == null) { if (vlocal.Places.Count == 1) { Repo.Instance.VideoLocal.Delete(vlocal); vlocal = null; } Repo.Instance.VideoLocal_Place.Delete(vlocalplace); vlocalplace = null; } if (vlocal != null && ForceHash) { vlocal.FileSize = filesize; vlocal.DateTimeUpdated = DateTime.Now; } } } bool duplicate = false; using (var txn = Repo.Instance.VideoLocal.BeginAddOrUpdate(() => vlocal, () => { logger.Trace("No existing VideoLocal, creating temporary record"); return(new SVR_VideoLocal { DateTimeUpdated = DateTime.Now, DateTimeCreated = DateTimeUpdated, FileName = filename, FileSize = filesize, Hash = string.Empty, CRC32 = string.Empty, MD5 = source_file?.MD5?.ToUpperInvariant() ?? string.Empty, SHA1 = source_file?.SHA1?.ToUpperInvariant() ?? string.Empty, IsIgnored = 0, IsVariation = 0 }); })) { if (vlocalplace == null) { logger.Trace("No existing VideoLocal_Place, creating a new record"); vlocalplace = new SVR_VideoLocal_Place { FilePath = filePath, ImportFolderID = nshareID, ImportFolderType = folder.ImportFolderType }; // Make sure we have an ID vlocalplace = Repo.Instance.VideoLocal_Place.BeginAdd(vlocalplace).Commit(); } using (var txn_vl = Repo.Instance.VideoLocal_Place.BeginAddOrUpdate(() => vlocalplace)) { // check if we need to get a hash this file if (string.IsNullOrEmpty(txn.Entity.Hash) || ForceHash) { logger.Trace("No existing hash in VideoLocal, checking XRefs"); if (!ForceHash) { // try getting the hash from the CrossRef List <CrossRef_File_Episode> crossRefs = Repo.Instance.CrossRef_File_Episode.GetByFileNameAndSize(filename, txn.Entity.FileSize); if (crossRefs.Any()) { txn.Entity.Hash = crossRefs[0].Hash; txn.Entity.HashSource = (int)HashSource.DirectHash; } } // try getting the hash from the LOCAL cache if (!ForceHash && string.IsNullOrEmpty(txn.Entity.Hash)) { List <FileNameHash> fnhashes = Repo.Instance.FileNameHash.GetByFileNameAndSize(filename, txn.Entity.FileSize); if (fnhashes != null && fnhashes.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes) { Repo.Instance.FileNameHash.Delete(fnh.FileNameHashID); } } // reinit this to check if we erased them fnhashes = Repo.Instance.FileNameHash.GetByFileNameAndSize(filename, txn.Entity.FileSize); if (fnhashes != null && fnhashes.Count == 1) { logger.Trace("Got hash from LOCAL cache: {0} ({1})", FileName, fnhashes[0].Hash); txn.Entity.Hash = fnhashes[0].Hash; txn.Entity.HashSource = (int)HashSource.WebCacheFileName; } } if (string.IsNullOrEmpty(txn.Entity.Hash)) { FillVideoHashes(txn.Entity); } //Cloud and no hash, Nothing to do, except maybe Get the mediainfo.... if (string.IsNullOrEmpty(txn.Entity.Hash) && folder.CloudID.HasValue) { logger.Trace("No Hash found for cloud " + filename + " putting in videolocal table with empty ED2K"); vlocal = txn.Commit(true); using (var upd = Repo.Instance.VideoLocal_Place.BeginAddOrUpdate(() => vlocalplace)) { upd.Entity.VideoLocalID = vlocal.VideoLocalID; vlocalplace = upd.Commit(); } if (vlocalplace.RefreshMediaInfo()) { txn_vl.Commit(true); } return; } // hash the file if (string.IsNullOrEmpty(txn.Entity.Hash) || ForceHash) { logger.Info("Hashing File: {0}", FileName); ShokoService.CmdProcessorHasher.QueueState = PrettyDescriptionHashing; DateTime start = DateTime.Now; // update the VideoLocal record with the Hash, since cloud support we calculate everything var hashes = FileHashHelper.GetHashInfo(FileName.Replace("/", $"{System.IO.Path.DirectorySeparatorChar}"), true, ShokoServer.OnHashProgress, true, true, true); TimeSpan ts = DateTime.Now - start; logger.Trace("Hashed file in {0:#0.0} seconds --- {1} ({2})", ts.TotalSeconds, FileName, Utils.FormatByteSize(txn.Entity.FileSize)); txn.Entity.Hash = hashes.ED2K?.ToUpperInvariant(); txn.Entity.CRC32 = hashes.CRC32?.ToUpperInvariant(); txn.Entity.MD5 = hashes.MD5?.ToUpperInvariant(); txn.Entity.SHA1 = hashes.SHA1?.ToUpperInvariant(); txn.Entity.HashSource = (int)HashSource.DirectHash; } FillMissingHashes(txn.Entity); // We should have a hash by now // before we save it, lets make sure there is not any other record with this hash (possible duplicate file) SVR_VideoLocal tlocal = Repo.Instance.VideoLocal.GetByHash(txn.Entity.Hash); bool changed = false; if (tlocal != null) { logger.Trace("Found existing VideoLocal with hash, merging info from it"); // Aid with hashing cloud. Merge hashes and save, regardless of duplicate file changed = tlocal.MergeInfoFrom(txn.Entity); vlocal = tlocal; List <SVR_VideoLocal_Place> preps = vlocal.Places.Where( a => a.ImportFolder.CloudID == folder.CloudID && !vlocalplace.FullServerPath.Equals(a.FullServerPath)).ToList(); foreach (var prep in preps) { if (prep == null) { continue; } // clean up, if there is a 'duplicate file' that is invalid, remove it. if (prep.FullServerPath == null) { Repo.Instance.VideoLocal_Place.Delete(prep); } else { FileSystemResult dupFileSystemResult = (FileSystemResult)prep.ImportFolder?.FileSystem?.Resolve(prep.FullServerPath); if (dupFileSystemResult == null || dupFileSystemResult.Status != Status.Ok) { Repo.Instance.VideoLocal_Place.Delete(prep); } } } var dupPlace = txn.Entity.Places.FirstOrDefault( a => a.ImportFolder.CloudID == folder.CloudID && !vlocalplace.FullServerPath.Equals(a.FullServerPath)); if (dupPlace != null) { logger.Warn("Found Duplicate File"); logger.Warn("---------------------------------------------"); logger.Warn($"New File: {vlocalplace.FullServerPath}"); logger.Warn($"Existing File: {dupPlace.FullServerPath}"); logger.Warn("---------------------------------------------"); // check if we have a record of this in the database, if not create one List <DuplicateFile> dupFiles = Repo.Instance.DuplicateFile.GetByFilePathsAndImportFolder( vlocalplace.FilePath, dupPlace.FilePath, vlocalplace.ImportFolderID, dupPlace.ImportFolderID); if (dupFiles.Count == 0) { dupFiles = Repo.Instance.DuplicateFile.GetByFilePathsAndImportFolder(dupPlace.FilePath, vlocalplace.FilePath, dupPlace.ImportFolderID, vlocalplace.ImportFolderID); } if (dupFiles.Count == 0) { DuplicateFile dup = new DuplicateFile { DateTimeUpdated = DateTime.Now, FilePathFile1 = vlocalplace.FilePath, FilePathFile2 = dupPlace.FilePath, ImportFolderIDFile1 = vlocalplace.ImportFolderID, ImportFolderIDFile2 = dupPlace.ImportFolderID, Hash = txn.Entity.Hash }; Repo.Instance.DuplicateFile.BeginAdd(dup).Commit(); } //Notify duplicate, don't delete duplicate = true; } } if (!duplicate || changed) { vlocal = txn.Commit(); } } } using (var upd = Repo.Instance.VideoLocal_Place.BeginAddOrUpdate(() => vlocalplace)) { upd.Entity.VideoLocalID = vlocal.VideoLocalID; upd.Commit(); } } if (duplicate) { CommandRequest_ProcessFile cr_procfile3 = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false); cr_procfile3.Save(); return; } // also save the filename to hash record // replace the existing records just in case it was corrupt List <FileNameHash> fnhashes2 = Repo.Instance.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize); if (fnhashes2 != null && fnhashes2.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes2) { Repo.Instance.FileNameHash.Delete(fnh.FileNameHashID); } } using (var upd = Repo.Instance.FileNameHash.BeginAddOrUpdate(() => fnhashes2?.Count == 1 ? fnhashes2[0] : null)) { upd.Entity.FileName = filename; upd.Entity.FileSize = vlocal.FileSize; upd.Entity.Hash = vlocal.Hash; upd.Entity.DateTimeUpdated = DateTime.Now; upd.Commit(); } if ((vlocal.Media == null) || vlocal.MediaVersion < SVR_VideoLocal.MEDIA_VERSION || vlocal.Duration == 0) { if (vlocalplace.RefreshMediaInfo()) { using (var upd = Repo.Instance.VideoLocal.BeginAddOrUpdate(() => vlocalplace.VideoLocal)) upd.Commit(true); } } // now add a command to process the file CommandRequest_ProcessFile cr_procfile = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false); cr_procfile.Save(); }
public static SVR_ImportFolder GetImportFolder1(this DuplicateFile duplicatefile) => Repo.Instance.ImportFolder .GetByID(duplicatefile.ImportFolderIDFile1);
public override void DeleteFile() { Parent.Remove(this); DuplicateFile.DeleteFile(this); }
public override async Task RunAsync(IProgress <ICommand> progress = null, CancellationToken token = default(CancellationToken)) { logger.Trace($"Checking File For Hashes: {File.FullName}"); try { ReportInit(progress); // hash and read media info for file int nshareID; long filesize = 0; if (_importFolder.CloudID == null) // Local Access { if (!System.IO.File.Exists(File.FullName)) { ReportError(progress, $"File does not exist: {File.FullName}"); return; } int numAttempts = 0; bool writeAccess = _importFolder.IsDropSource == 1; // Wait 1 minute before giving up on trying to access the file // first only do read to not get in something's way while ((filesize = CanAccessFile(File.FullName, false)) == 0 && (numAttempts < 60)) { numAttempts++; Thread.Sleep(1000); logger.Trace($@"Failed to access, (or filesize is 0) Attempt # {numAttempts}, {File.FullName}"); } // if we failed to access the file, get ouuta here if (numAttempts >= 60) { ReportError(progress, $"Could not access file: {File.FullName}"); return; } // At least 1s between to ensure that size has the chance to change // TODO make this a setting to allow fine tuning on various configs // TODO Make this able to be disabled. It adds 1.5s to hashing just waiting for the Linux/NAS use case int seconds = 8; int waitTime = seconds * 1000 / 2; Thread.Sleep(waitTime); numAttempts = 0; //For systems with no locking // TODO make this a setting as well while (FileModified(File.FullName, seconds, ref filesize, writeAccess) && numAttempts < 60) { numAttempts++; Thread.Sleep(waitTime); // Only show if it's more than 'seconds' past if (numAttempts != 0 && numAttempts * 2 % seconds == 0) { logger.Warn($@"The modified date is too soon. Waiting to ensure that no processes are writing to it. {numAttempts}/60 {File.FullName}"); } } // if we failed to access the file, get ouuta here if (numAttempts >= 60) { ReportError(progress, $"Could not access file: {File.FullName}"); return; } } ReportUpdate(progress, 10); if (_importFolder.CloudID.HasValue) { filesize = File.Size; } nshareID = _importFolder.ImportFolderID; // check if we have already processed this file SVR_VideoLocal_Place vlocalplace = Repo.Instance.VideoLocal_Place.GetByFilePathAndImportFolderID(_filePath, nshareID); SVR_VideoLocal vlocal = null; var filename = Path.GetFileName(_filePath); if (vlocalplace != null) { vlocal = vlocalplace.VideoLocal; if (vlocal != null) { logger.Trace("VideoLocal record found in database: {0}", File.FullName); // This will only happen with DB corruption, so just clean up the mess. if (vlocalplace.FullServerPath == null) { if (vlocal.Places.Count == 1) { Repo.Instance.VideoLocal.Delete(vlocal); vlocal = null; } Repo.Instance.VideoLocal_Place.Delete(vlocalplace); vlocalplace = null; } if (vlocal != null && Force) { vlocal.FileSize = filesize; vlocal.DateTimeUpdated = DateTime.Now; } } } bool duplicate = false; SVR_VideoLocal vlocal1 = vlocal; using (var txn = Repo.Instance.VideoLocal.BeginAddOrUpdate(vlocal1?.VideoLocalID ?? 0, () => { logger.Trace("No existing VideoLocal, creating temporary record"); return(new SVR_VideoLocal { DateTimeUpdated = DateTime.Now, DateTimeCreated = DateTime.Now, FileSize = filesize, Hash = string.Empty, CRC32 = string.Empty, MD5 = File?.MD5?.ToUpperInvariant() ?? string.Empty, SHA1 = File?.SHA1?.ToUpperInvariant() ?? string.Empty, IsIgnored = 0, IsVariation = 0 }); })) { vlocal = txn.Entity; if (vlocalplace == null) { logger.Trace("No existing VideoLocal_Place, creating a new record"); vlocalplace = new SVR_VideoLocal_Place { FilePath = _filePath, ImportFolderID = nshareID, ImportFolderType = _importFolder.ImportFolderType }; // Make sure we have an ID vlocalplace = Repo.Instance.VideoLocal_Place.BeginAdd(vlocalplace).Commit(); } // check if we need to get a hash this file // IDEs might warn of possible null. It is set in the lambda above, so it shouldn't ever be null if (string.IsNullOrEmpty(vlocal.Hash) || Force) { logger.Trace("No existing hash in VideoLocal, checking XRefs"); if (!Force) { // try getting the hash from the CrossRef List <CrossRef_File_Episode> crossRefs = Repo.Instance.CrossRef_File_Episode.GetByFileNameAndSize(filename, vlocal.FileSize); if (crossRefs.Any()) { vlocal.Hash = crossRefs[0].Hash; vlocal.HashSource = (int)HashSource.DirectHash; } } // try getting the hash from the LOCAL cache if (!Force && string.IsNullOrEmpty(vlocal.Hash)) { Repo.Instance.FileNameHash.FindAndDelete(() => { List <FileNameHash> fnhashes = Repo.Instance.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize); if (fnhashes != null && fnhashes.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records return(fnhashes); } return(new List <FileNameHash>()); }); // reinit this to check if we erased them FileNameHash fnhash = Repo.Instance.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize).FirstOrDefault(); logger.Trace("Got hash from LOCAL cache: {0} ({1})", File.FullName, fnhash.Hash); vlocal.Hash = fnhash.Hash; vlocal.HashSource = (int)HashSource.WebCacheFileName; } if (string.IsNullOrEmpty(vlocal.Hash)) { FillVideoHashes(vlocal); } //Cloud and no hash, Nothing to do, except maybe Get the mediainfo.... if (string.IsNullOrEmpty(vlocal.Hash) && _importFolder.CloudID.HasValue) { logger.Trace("No Hash found for cloud " + filename + " putting in videolocal table with empty ED2K"); vlocal = txn.Commit(true); int vlpid = vlocalplace.VideoLocalID; using (var upd = Repo.Instance.VideoLocal_Place.BeginAddOrUpdate(vlpid)) { upd.Entity.VideoLocalID = vlocal.VideoLocalID; vlocalplace = upd.Commit(); } if (vlocalplace.RefreshMediaInfo(vlocal)) { txn.Commit(true); } ReportFinish(progress); return; } // hash the file if (string.IsNullOrEmpty(vlocal.Hash) || Force) { logger.Info("Hashing File: {0}", File.FullName); _hashingState = true; DateTime start = DateTime.Now; // update the VideoLocal record with the Hash, since cloud support we calculate everything Hasher h = new Hasher(File, HashAll); string error = await h.RunAsync(new ChildProgress(20, 60, this, progress), token); if (error != null) { ReportError(progress, error); return; } TimeSpan ts = DateTime.Now - start; logger.Trace("Hashed file in {0:#0.0} seconds --- {1} ({2})", ts.TotalSeconds, File.FullName, Utils.FormatByteSize(vlocal.FileSize)); vlocal.Hash = h.Result.GetHash(HashTypes.ED2K); vlocal.CRC32 = h.Result.GetHash(HashTypes.CRC); vlocal.MD5 = h.Result.GetHash(HashTypes.MD5); vlocal.SHA1 = h.Result.GetHash(HashTypes.SHA1); vlocal.HashSource = (int)HashSource.DirectHash; } _hashingState = false; await FillMissingHashes(vlocal, token, progress); // We should have a hash by now // before we save it, lets make sure there is not any other record with this hash (possible duplicate file) // TODO Check this case. I'm not sure how EF handles changing objects that we are working on SVR_VideoLocal tlocal = Repo.Instance.VideoLocal.GetByHash(vlocal.Hash); bool changed = false; if (tlocal != null) { logger.Trace("Found existing VideoLocal with hash, merging info from it"); // Aid with hashing cloud. Merge hashes and save, regardless of duplicate file changed = tlocal.MergeInfoFrom(vlocal); vlocal = tlocal; List <SVR_VideoLocal_Place> preps = vlocal.Places.Where(a => a.ImportFolder.CloudID == _importFolder.CloudID && !vlocalplace.FullServerPath.Equals(a.FullServerPath)).ToList(); foreach (var prep in preps) { if (prep == null) { continue; } // clean up, if there is a 'duplicate file' that is invalid, remove it. if (prep.FullServerPath == null) { Repo.Instance.VideoLocal_Place.Delete(prep); } else { IResult dupFileSystemResult = prep.ImportFolder?.FileSystem?.Resolve(prep.FullServerPath); if (dupFileSystemResult == null || dupFileSystemResult.Status != NutzCode.CloudFileSystem.Status.Ok) { Repo.Instance.VideoLocal_Place.Delete(prep); } } } var dupPlace = vlocal.Places.FirstOrDefault(a => a.ImportFolder.CloudID == _importFolder.CloudID && !vlocalplace.FullServerPath.Equals(a.FullServerPath)); ReportUpdate(progress, 85); if (dupPlace != null) { logger.Warn("Found Duplicate File"); logger.Warn("---------------------------------------------"); logger.Warn($"New File: {vlocalplace.FullServerPath}"); logger.Warn($"Existing File: {dupPlace.FullServerPath}"); logger.Warn("---------------------------------------------"); // check if we have a record of this in the database, if not create one List <DuplicateFile> dupFiles = Repo.Instance.DuplicateFile.GetByFilePathsAndImportFolder(vlocalplace.FilePath, dupPlace.FilePath, vlocalplace.ImportFolderID, dupPlace.ImportFolderID); if (dupFiles.Count == 0) { dupFiles = Repo.Instance.DuplicateFile.GetByFilePathsAndImportFolder(dupPlace.FilePath, vlocalplace.FilePath, dupPlace.ImportFolderID, vlocalplace.ImportFolderID); } if (dupFiles.Count == 0) { DuplicateFile dup = new DuplicateFile { DateTimeUpdated = DateTime.Now, FilePathFile1 = vlocalplace.FilePath, FilePathFile2 = dupPlace.FilePath, ImportFolderIDFile1 = vlocalplace.ImportFolderID, ImportFolderIDFile2 = dupPlace.ImportFolderID, Hash = vlocal.Hash }; Repo.Instance.DuplicateFile.BeginAdd(dup).Commit(); } //Notify duplicate, don't delete duplicate = true; } } if (!duplicate || changed) { vlocal = txn.Commit(); } } ReportUpdate(progress, 90); int vlplid = vlocalplace.VideoLocalID; using (var upd = Repo.Instance.VideoLocal_Place.BeginAddOrUpdate(vlplid)) { upd.Entity.VideoLocalID = vlocal.VideoLocalID; vlocalplace = upd.Commit(); } } if (duplicate) { Queue.Instance.Add(new CmdServerProcessFile(vlocal.VideoLocalID, false)); ReportFinish(progress); return; } // also save the filename to hash record // replace the existing records just in case it was corrupt Repo.Instance.FileNameHash.FindAndDelete(() => { List <FileNameHash> fnhashes = Repo.Instance.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize); if (fnhashes != null && fnhashes.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records return(fnhashes); } return(new List <FileNameHash>()); }); ReportUpdate(progress, 95); using (var upd = Repo.Instance.FileNameHash.BeginAddOrUpdate(() => Repo.Instance.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize).FirstOrDefault())) { upd.Entity.FileName = filename; upd.Entity.FileSize = vlocal.FileSize; upd.Entity.Hash = vlocal.Hash; upd.Entity.DateTimeUpdated = DateTime.Now; upd.Commit(); } if (vlocal.Media == null || vlocal.MediaVersion < SVR_VideoLocal.MEDIA_VERSION || vlocal.Duration == 0) { int vid = vlocal.VideoLocalID; using (var upd = Repo.Instance.VideoLocal.BeginAddOrUpdate(vid)) if (vlocalplace.RefreshMediaInfo(upd.Entity)) { vlocal = upd.Commit(true); } } // now add a command to process the file Queue.Instance.Add(new CmdServerProcessFile(vlocal.VideoLocalID, false)); ReportFinish(progress); } catch (Exception ex) { ReportError(progress, $"Error processing ServerHashFile: {File.FullName}\n{ex}", ex); } }
private VideoLocal ProcessFile_LocalInfo() { // hash and read media info for file int nshareID = -1; string filePath = ""; ImportFolderRepository repNS = new ImportFolderRepository(); List <ImportFolder> shares = repNS.GetAll(); DataAccessHelper.GetShareAndPath(FileName, shares, ref nshareID, ref filePath); if (!File.Exists(FileName)) { logger.Error("File does not exist: {0}", FileName); return(null); } int numAttempts = 0; // Wait 3 minutes seconds before giving up on trying to access the file while ((!CanAccessFile(FileName)) && (numAttempts < 180)) { numAttempts++; Thread.Sleep(1000); Console.WriteLine("Attempt # " + numAttempts.ToString()); } // if we failed to access the file, get ouuta here if (numAttempts == 180) { logger.Error("Could not access file: " + FileName); return(null); } // check if we have already processed this file VideoLocal vlocal = null; VideoLocalRepository repVidLocal = new VideoLocalRepository(); FileNameHashRepository repFNHash = new FileNameHashRepository(); List <VideoLocal> vidLocals = repVidLocal.GetByFilePathAndShareID(filePath, nshareID); FileInfo fi = new FileInfo(FileName); if (vidLocals.Count > 0) { vlocal = vidLocals[0]; logger.Trace("VideoLocal record found in database: {0}", vlocal.VideoLocalID); if (ForceHash) { vlocal.FileSize = fi.Length; vlocal.DateTimeUpdated = DateTime.Now; } } else { logger.Trace("VideoLocal, creating new record"); vlocal = new VideoLocal(); vlocal.DateTimeUpdated = DateTime.Now; vlocal.DateTimeCreated = vlocal.DateTimeUpdated; vlocal.FilePath = filePath; vlocal.FileSize = fi.Length; vlocal.ImportFolderID = nshareID; vlocal.Hash = ""; vlocal.CRC32 = ""; vlocal.MD5 = ""; vlocal.SHA1 = ""; vlocal.IsIgnored = 0; vlocal.IsVariation = 0; } // check if we need to get a hash this file Hashes hashes = null; if (string.IsNullOrEmpty(vlocal.Hash) || ForceHash) { // try getting the hash from the CrossRef if (!ForceHash) { CrossRef_File_EpisodeRepository repCrossRefs = new CrossRef_File_EpisodeRepository(); List <CrossRef_File_Episode> crossRefs = repCrossRefs.GetByFileNameAndSize(Path.GetFileName(vlocal.FilePath), vlocal.FileSize); if (crossRefs.Count == 1) { vlocal.Hash = crossRefs[0].Hash; vlocal.HashSource = (int)HashSource.DirectHash; } } // try getting the hash from the LOCAL cache if (!ForceHash && string.IsNullOrEmpty(vlocal.Hash)) { List <FileNameHash> fnhashes = repFNHash.GetByFileNameAndSize(Path.GetFileName(vlocal.FilePath), vlocal.FileSize); if (fnhashes != null && fnhashes.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes) { repFNHash.Delete(fnh.FileNameHashID); } } if (fnhashes != null && fnhashes.Count == 1) { logger.Trace("Got hash from LOCAL cache: {0} ({1})", FileName, fnhashes[0].Hash); vlocal.Hash = fnhashes[0].Hash; vlocal.HashSource = (int)HashSource.WebCacheFileName; } } // hash the file if (string.IsNullOrEmpty(vlocal.Hash) || ForceHash) { DateTime start = DateTime.Now; logger.Trace("Calculating hashes for: {0}", FileName); // update the VideoLocal record with the Hash hashes = FileHashHelper.GetHashInfo(FileName, true, MainWindow.OnHashProgress, ServerSettings.Hash_CRC32, ServerSettings.Hash_MD5, ServerSettings.Hash_SHA1); TimeSpan ts = DateTime.Now - start; logger.Trace("Hashed file in {0} seconds --- {1} ({2})", ts.TotalSeconds.ToString("#0.0"), FileName, Utils.FormatByteSize(vlocal.FileSize)); vlocal.Hash = hashes.ed2k; vlocal.CRC32 = hashes.crc32; vlocal.MD5 = hashes.md5; vlocal.SHA1 = hashes.sha1; vlocal.HashSource = (int)HashSource.DirectHash; } // We should have a hash by now // before we save it, lets make sure there is not any other record with this hash (possible duplicate file) VideoLocal vidTemp = repVidLocal.GetByHash(vlocal.Hash); if (vidTemp != null) { // don't delete it, if it is actually the same record if (vidTemp.VideoLocalID != vlocal.VideoLocalID) { // delete the VideoLocal record logger.Warn("Deleting duplicate video file record"); logger.Warn("---------------------------------------------"); logger.Warn("Keeping record for: {0}", vlocal.FullServerPath); logger.Warn("Deleting record for: {0}", vidTemp.FullServerPath); logger.Warn("---------------------------------------------"); // check if we have a record of this in the database, if not create one DuplicateFileRepository repDups = new DuplicateFileRepository(); List <DuplicateFile> dupFiles = repDups.GetByFilePathsAndImportFolder(vlocal.FilePath, vidTemp.FilePath, vlocal.ImportFolderID, vidTemp.ImportFolderID); if (dupFiles.Count == 0) { dupFiles = repDups.GetByFilePathsAndImportFolder(vidTemp.FilePath, vlocal.FilePath, vidTemp.ImportFolderID, vlocal.ImportFolderID); } if (dupFiles.Count == 0) { DuplicateFile dup = new DuplicateFile(); dup.DateTimeUpdated = DateTime.Now; dup.FilePathFile1 = vlocal.FilePath; dup.FilePathFile2 = vidTemp.FilePath; dup.ImportFolderIDFile1 = vlocal.ImportFolderID; dup.ImportFolderIDFile2 = vidTemp.ImportFolderID; dup.Hash = vlocal.Hash; repDups.Save(dup); } repVidLocal.Delete(vidTemp.VideoLocalID); } } repVidLocal.Save(vlocal); // also save the filename to hash record // replace the existing records just in case it was corrupt FileNameHash fnhash = null; List <FileNameHash> fnhashes2 = repFNHash.GetByFileNameAndSize(Path.GetFileName(vlocal.FilePath), vlocal.FileSize); if (fnhashes2 != null && fnhashes2.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes2) { repFNHash.Delete(fnh.FileNameHashID); } } if (fnhashes2 != null && fnhashes2.Count == 1) { fnhash = fnhashes2[0]; } else { fnhash = new FileNameHash(); } fnhash.FileName = Path.GetFileName(vlocal.FilePath); fnhash.FileSize = vlocal.FileSize; fnhash.Hash = vlocal.Hash; fnhash.DateTimeUpdated = DateTime.Now; repFNHash.Save(fnhash); } // now check if we have stored a VideoInfo record bool refreshMediaInfo = false; VideoInfoRepository repVidInfo = new VideoInfoRepository(); VideoInfo vinfo = repVidInfo.GetByHash(vlocal.Hash); if (vinfo == null) { refreshMediaInfo = true; vinfo = new VideoInfo(); vinfo.Hash = vlocal.Hash; vinfo.Duration = 0; vinfo.FileSize = fi.Length; vinfo.DateTimeUpdated = DateTime.Now; vinfo.FileName = filePath; vinfo.AudioBitrate = ""; vinfo.AudioCodec = ""; vinfo.VideoBitrate = ""; vinfo.VideoBitDepth = ""; vinfo.VideoCodec = ""; vinfo.VideoFrameRate = ""; vinfo.VideoResolution = ""; repVidInfo.Save(vinfo); } else { // check if we need to update the media info if (vinfo.VideoCodec.Trim().Length == 0) { refreshMediaInfo = true; } else { refreshMediaInfo = false; } } if (refreshMediaInfo) { logger.Trace("Getting media info for: {0}", FileName); MediaInfoResult mInfo = FileHashHelper.GetMediaInfo(FileName, true); vinfo.AudioBitrate = string.IsNullOrEmpty(mInfo.AudioBitrate) ? "" : mInfo.AudioBitrate; vinfo.AudioCodec = string.IsNullOrEmpty(mInfo.AudioCodec) ? "" : mInfo.AudioCodec; vinfo.DateTimeUpdated = vlocal.DateTimeUpdated; vinfo.Duration = mInfo.Duration; vinfo.FileName = filePath; vinfo.FileSize = fi.Length; vinfo.VideoBitrate = string.IsNullOrEmpty(mInfo.VideoBitrate) ? "" : mInfo.VideoBitrate; vinfo.VideoBitDepth = string.IsNullOrEmpty(mInfo.VideoBitDepth) ? "" : mInfo.VideoBitDepth; vinfo.VideoCodec = string.IsNullOrEmpty(mInfo.VideoCodec) ? "" : mInfo.VideoCodec; vinfo.VideoFrameRate = string.IsNullOrEmpty(mInfo.VideoFrameRate) ? "" : mInfo.VideoFrameRate; vinfo.VideoResolution = string.IsNullOrEmpty(mInfo.VideoResolution) ? "" : mInfo.VideoResolution; vinfo.FullInfo = string.IsNullOrEmpty(mInfo.FullInfo) ? "" : mInfo.FullInfo; repVidInfo.Save(vinfo); } // now add a command to process the file CommandRequest_ProcessFile cr_procfile = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false); cr_procfile.Save(); return(vlocal); }
private VideoLocal_Place ProcessFile_LocalInfo() { // hash and read media info for file int nshareID = -1; string filePath = ""; Tuple <ImportFolder, string> tup = VideoLocal_PlaceRepository.GetFromFullPath(FileName); if (tup == null) { logger.Error($"Unable to locate file {FileName} inside the import folders"); return(null); } ImportFolder folder = tup.Item1; filePath = tup.Item2; IFileSystem f = tup.Item1.FileSystem; if (f == null) { logger.Error("Unable to open filesystem for: {0}", FileName); return(null); } long filesize = 0; if (folder.CloudID == null) // Local Access { if (!File.Exists(FileName)) { logger.Error("File does not exist: {0}", FileName); return(null); } int numAttempts = 0; // Wait 3 minutes seconds before giving up on trying to access the file while ((filesize = CanAccessFile(FileName)) == 0 && (numAttempts < 180)) { numAttempts++; Thread.Sleep(1000); Console.WriteLine("Attempt # " + numAttempts.ToString()); } // if we failed to access the file, get ouuta here if (numAttempts == 180) { logger.Error("Could not access file: " + FileName); return(null); } } FileSystemResult <IObject> source = f.Resolve(FileName); if (source == null || !source.IsOk || (!(source.Result is IFile))) { logger.Error("Could not access file: " + FileName); return(null); } IFile source_file = (IFile)source.Result; if (folder.CloudID.HasValue) { filesize = source_file.Size; } nshareID = folder.ImportFolderID; // check if we have already processed this file VideoLocal_Place vlocalplace = RepoFactory.VideoLocalPlace.GetByFilePathAndShareID(filePath, nshareID); VideoLocal vlocal; if (vlocalplace != null) { vlocal = vlocalplace.VideoLocal; logger.Trace("VideoLocal record found in database: {0}", vlocal.VideoLocalID); if (ForceHash) { vlocal.FileSize = filesize; vlocal.DateTimeUpdated = DateTime.Now; } } else { logger.Trace("VideoLocal, creating temporary record"); vlocal = new VideoLocal(); vlocal.DateTimeUpdated = DateTime.Now; vlocal.DateTimeCreated = vlocal.DateTimeUpdated; vlocal.FileName = Path.GetFileName(filePath); vlocal.FileSize = filesize; vlocal.Hash = string.Empty; vlocal.CRC32 = string.Empty; vlocal.MD5 = source_file.MD5.ToUpperInvariant() ?? string.Empty; vlocal.SHA1 = source_file.SHA1.ToUpperInvariant() ?? string.Empty; vlocal.IsIgnored = 0; vlocal.IsVariation = 0; vlocalplace = new VideoLocal_Place(); vlocalplace.FilePath = filePath; vlocalplace.ImportFolderID = nshareID; vlocalplace.ImportFolderType = folder.ImportFolderType; } // check if we need to get a hash this file Hashes hashes = null; if (string.IsNullOrEmpty(vlocal.Hash) || ForceHash) { // try getting the hash from the CrossRef if (!ForceHash) { List <CrossRef_File_Episode> crossRefs = RepoFactory.CrossRef_File_Episode.GetByFileNameAndSize(vlocal.FileName, vlocal.FileSize); if (crossRefs.Count == 1) { vlocal.Hash = crossRefs[0].Hash; vlocal.HashSource = (int)HashSource.DirectHash; } } // try getting the hash from the LOCAL cache if (!ForceHash && string.IsNullOrEmpty(vlocal.Hash)) { List <FileNameHash> fnhashes = RepoFactory.FileNameHash.GetByFileNameAndSize(vlocal.FileName, vlocal.FileSize); if (fnhashes != null && fnhashes.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes) { RepoFactory.FileNameHash.Delete(fnh.FileNameHashID); } } if (fnhashes != null && fnhashes.Count == 1) { logger.Trace("Got hash from LOCAL cache: {0} ({1})", FileName, fnhashes[0].Hash); vlocal.Hash = fnhashes[0].Hash; vlocal.HashSource = (int)HashSource.WebCacheFileName; } } if (string.IsNullOrEmpty(vlocal.Hash)) { FillVideoHashes(vlocal); } if (string.IsNullOrEmpty(vlocal.Hash) && folder.CloudID.HasValue) { //Cloud and no hash, Nothing to do, except maybe Get the mediainfo.... logger.Trace("No Hash found for cloud " + vlocal.FileName + " putting in videolocal table with empty ED2K"); RepoFactory.VideoLocal.Save(vlocal, false); vlocalplace.VideoLocalID = vlocal.VideoLocalID; RepoFactory.VideoLocalPlace.Save(vlocalplace); if (vlocalplace.RefreshMediaInfo()) { RepoFactory.VideoLocal.Save(vlocalplace.VideoLocal, true); } return(vlocalplace); } // hash the file if (string.IsNullOrEmpty(vlocal.Hash) || ForceHash) { JMMService.CmdProcessorHasher.QueueState = PrettyDescriptionHashing; DateTime start = DateTime.Now; logger.Trace("Calculating ED2K hashes for: {0}", FileName); // update the VideoLocal record with the Hash, since cloud support we calculate everything hashes = FileHashHelper.GetHashInfo(FileName.Replace("/", "\\"), true, MainWindow.OnHashProgress, true, true, true); TimeSpan ts = DateTime.Now - start; logger.Trace("Hashed file in {0} seconds --- {1} ({2})", ts.TotalSeconds.ToString("#0.0"), FileName, Utils.FormatByteSize(vlocal.FileSize)); vlocal.Hash = hashes.ed2k?.ToUpperInvariant(); vlocal.CRC32 = hashes.crc32?.ToUpperInvariant(); vlocal.MD5 = hashes.md5?.ToUpperInvariant(); vlocal.SHA1 = hashes.sha1?.ToUpperInvariant(); vlocal.HashSource = (int)HashSource.DirectHash; } FillMissingHashes(vlocal); // We should have a hash by now // before we save it, lets make sure there is not any other record with this hash (possible duplicate file) VideoLocal tlocal = RepoFactory.VideoLocal.GetByHash(vlocal.Hash); bool intercloudfolder = false; VideoLocal_Place prep = tlocal?.Places.FirstOrDefault(a => a.ImportFolder.CloudID == folder.CloudID && a.ImportFolderID == folder.ImportFolderID && vlocalplace.VideoLocal_Place_ID != a.VideoLocal_Place_ID); if (prep != null) { // delete the VideoLocal record logger.Warn("Deleting duplicate video file record"); logger.Warn("---------------------------------------------"); logger.Warn($"Keeping record for: {vlocalplace.FullServerPath}"); logger.Warn($"Deleting record for: {prep.FullServerPath}"); logger.Warn("---------------------------------------------"); // check if we have a record of this in the database, if not create one List <DuplicateFile> dupFiles = RepoFactory.DuplicateFile.GetByFilePathsAndImportFolder(vlocalplace.FilePath, prep.FilePath, vlocalplace.ImportFolderID, prep.ImportFolderID); if (dupFiles.Count == 0) { dupFiles = RepoFactory.DuplicateFile.GetByFilePathsAndImportFolder(prep.FilePath, vlocalplace.FilePath, prep.ImportFolderID, vlocalplace.ImportFolderID); } if (dupFiles.Count == 0) { DuplicateFile dup = new DuplicateFile(); dup.DateTimeUpdated = DateTime.Now; dup.FilePathFile1 = vlocalplace.FilePath; dup.FilePathFile2 = prep.FilePath; dup.ImportFolderIDFile1 = vlocalplace.ImportFolderID; dup.ImportFolderIDFile2 = prep.ImportFolderID; dup.Hash = vlocal.Hash; RepoFactory.DuplicateFile.Save(dup); } //Notify duplicate, don't delete } else if (tlocal != null) { vlocal = tlocal; intercloudfolder = true; } if (!intercloudfolder) { RepoFactory.VideoLocal.Save(vlocal, true); } vlocalplace.VideoLocalID = vlocal.VideoLocalID; RepoFactory.VideoLocalPlace.Save(vlocalplace); if (intercloudfolder) { CommandRequest_ProcessFile cr_procfile3 = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false); cr_procfile3.Save(); return(vlocalplace); } // also save the filename to hash record // replace the existing records just in case it was corrupt FileNameHash fnhash = null; List <FileNameHash> fnhashes2 = RepoFactory.FileNameHash.GetByFileNameAndSize(vlocal.FileName, vlocal.FileSize); if (fnhashes2 != null && fnhashes2.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes2) { RepoFactory.FileNameHash.Delete(fnh.FileNameHashID); } } if (fnhashes2 != null && fnhashes2.Count == 1) { fnhash = fnhashes2[0]; } else { fnhash = new FileNameHash(); } fnhash.FileName = vlocal.FileName; fnhash.FileSize = vlocal.FileSize; fnhash.Hash = vlocal.Hash; fnhash.DateTimeUpdated = DateTime.Now; RepoFactory.FileNameHash.Save(fnhash); } else { FillMissingHashes(vlocal); } if ((vlocal.Media == null) || vlocal.MediaVersion < VideoLocal.MEDIA_VERSION || vlocal.Duration == 0) { if (vlocalplace.RefreshMediaInfo()) { RepoFactory.VideoLocal.Save(vlocalplace.VideoLocal, true); } } // now add a command to process the file CommandRequest_ProcessFile cr_procfile = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false); cr_procfile.Save(); return(vlocalplace); }
public static SVR_ImportFolder GetImportFolder2(this DuplicateFile duplicatefile) => RepoFactory.ImportFolder .GetByID(duplicatefile.ImportFolderIDFile2);
public static string GetFullServerPath2(this DuplicateFile duplicatefile) => Path.Combine( duplicatefile.GetImportFolder2().ImportFolderLocation, duplicatefile.FilePathFile2);
private void ProcessFile_LocalInfo() { // hash and read media info for file int nshareID = -1; Tuple <SVR_ImportFolder, string> tup = VideoLocal_PlaceRepository.GetFromFullPath(FileName); if (tup == null) { logger.Error($"Unable to locate Import Folder for {FileName}"); return; } SVR_ImportFolder folder = tup.Item1; string filePath = tup.Item2; long filesize = 0; Exception e = null; if (!File.Exists(FileName)) { logger.Error("File does not exist: {0}", FileName); return; } if (ServerSettings.Instance.Import.FileLockChecking) { int numAttempts = 0; bool writeAccess = folder.IsDropSource == 1; bool aggressive = ServerSettings.Instance.Import.AggressiveFileLockChecking; // At least 1s between to ensure that size has the chance to change int waitTime = ServerSettings.Instance.Import.FileLockWaitTimeMS; if (waitTime < 1000) { waitTime = ServerSettings.Instance.Import.FileLockWaitTimeMS = 4000; ServerSettings.Instance.SaveSettings(); } if (!aggressive) { // Wait 1 minute before giving up on trying to access the file while ((filesize = CanAccessFile(FileName, writeAccess, ref e)) == 0 && (numAttempts < 60)) { numAttempts++; Thread.Sleep(waitTime); logger.Trace($@"Failed to access, (or filesize is 0) Attempt # {numAttempts}, {FileName}"); } } else { // Wait 1 minute before giving up on trying to access the file // first only do read to not get in something's way while ((filesize = CanAccessFile(FileName, false, ref e)) == 0 && (numAttempts < 60)) { numAttempts++; Thread.Sleep(1000); logger.Trace($@"Failed to access, (or filesize is 0) Attempt # {numAttempts}, {FileName}"); } // if we failed to access the file, get ouuta here if (numAttempts >= 60) { logger.Error("Could not access file: " + FileName); logger.Error(e); return; } int seconds = ServerSettings.Instance.Import.AggressiveFileLockWaitTimeSeconds; if (seconds < 0) { seconds = ServerSettings.Instance.Import.AggressiveFileLockWaitTimeSeconds = 8; ServerSettings.Instance.SaveSettings(); } Thread.Sleep(waitTime); numAttempts = 0; //For systems with no locking while (FileModified(FileName, seconds, ref filesize, writeAccess, ref e) && numAttempts < 60) { numAttempts++; Thread.Sleep(waitTime); // Only show if it's more than 'seconds' past if (numAttempts != 0 && numAttempts * 2 % seconds == 0) { logger.Warn( $@"The modified date is too soon. Waiting to ensure that no processes are writing to it. {numAttempts}/60 {FileName}" ); } } } // if we failed to access the file, get ouuta here if (numAttempts >= 60 || filesize == 0) { logger.Error("Could not access file: " + FileName); logger.Error(e); return; } } if (!File.Exists(FileName)) { logger.Error("Could not access file: " + FileName); return; } FileInfo sourceFile = new FileInfo(FileName); nshareID = folder.ImportFolderID; // check if we have already processed this file SVR_VideoLocal_Place vlocalplace = RepoFactory.VideoLocalPlace.GetByFilePathAndImportFolderID(filePath, nshareID); SVR_VideoLocal vlocal = null; var filename = Path.GetFileName(filePath); if (vlocalplace != null) { vlocal = vlocalplace.VideoLocal; if (vlocal != null) { logger.Trace("VideoLocal record found in database: {0}", FileName); // This will only happen with DB corruption, so just clean up the mess. if (vlocalplace.FullServerPath == null) { if (vlocal.Places.Count == 1) { RepoFactory.VideoLocal.Delete(vlocal); vlocal = null; } RepoFactory.VideoLocalPlace.Delete(vlocalplace); vlocalplace = null; } if (vlocal != null && ForceHash) { vlocal.FileSize = filesize; vlocal.DateTimeUpdated = DateTime.Now; } } } if (vlocal == null) { // TODO support reading MD5 and SHA1 from files via the standard way logger.Trace("No existing VideoLocal, creating temporary record"); vlocal = new SVR_VideoLocal { DateTimeUpdated = DateTime.Now, DateTimeCreated = DateTimeUpdated, FileName = filename, FileSize = filesize, Hash = string.Empty, CRC32 = string.Empty, MD5 = string.Empty, SHA1 = string.Empty, IsIgnored = 0, IsVariation = 0 }; } if (vlocalplace == null) { logger.Trace("No existing VideoLocal_Place, creating a new record"); vlocalplace = new SVR_VideoLocal_Place { FilePath = filePath, ImportFolderID = nshareID, ImportFolderType = folder.ImportFolderType }; // Make sure we have an ID RepoFactory.VideoLocalPlace.Save(vlocalplace); } // check if we need to get a hash this file if (string.IsNullOrEmpty(vlocal.Hash) || ForceHash) { logger.Trace("No existing hash in VideoLocal, checking XRefs"); if (!ForceHash) { // try getting the hash from the CrossRef List <CrossRef_File_Episode> crossRefs = RepoFactory.CrossRef_File_Episode.GetByFileNameAndSize(filename, vlocal.FileSize); if (crossRefs.Any()) { vlocal.Hash = crossRefs[0].Hash; vlocal.HashSource = (int)HashSource.DirectHash; } } // try getting the hash from the LOCAL cache if (!ForceHash && string.IsNullOrEmpty(vlocal.Hash)) { List <FileNameHash> fnhashes = RepoFactory.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize); if (fnhashes != null && fnhashes.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes) { RepoFactory.FileNameHash.Delete(fnh.FileNameHashID); } } // reinit this to check if we erased them fnhashes = RepoFactory.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize); if (fnhashes != null && fnhashes.Count == 1) { logger.Trace("Got hash from LOCAL cache: {0} ({1})", FileName, fnhashes[0].Hash); vlocal.Hash = fnhashes[0].Hash; vlocal.HashSource = (int)HashSource.WebCacheFileName; } } if (string.IsNullOrEmpty(vlocal.Hash)) { FillVideoHashes(vlocal); } // hash the file if (string.IsNullOrEmpty(vlocal.Hash) || ForceHash) { logger.Info("Hashing File: {0}", FileName); ShokoService.CmdProcessorHasher.QueueState = PrettyDescriptionHashing; DateTime start = DateTime.Now; // update the VideoLocal record with the Hash, since cloud support we calculate everything var hashes = FileHashHelper.GetHashInfo(FileName.Replace("/", $"{Path.DirectorySeparatorChar}"), true, ShokoServer.OnHashProgress, true, true, true); TimeSpan ts = DateTime.Now - start; logger.Trace("Hashed file in {0:#0.0} seconds --- {1} ({2})", ts.TotalSeconds, FileName, Utils.FormatByteSize(vlocal.FileSize)); vlocal.Hash = hashes.ED2K?.ToUpperInvariant(); vlocal.CRC32 = hashes.CRC32?.ToUpperInvariant(); vlocal.MD5 = hashes.MD5?.ToUpperInvariant(); vlocal.SHA1 = hashes.SHA1?.ToUpperInvariant(); vlocal.HashSource = (int)HashSource.DirectHash; } FillMissingHashes(vlocal); // We should have a hash by now // before we save it, lets make sure there is not any other record with this hash (possible duplicate file) SVR_VideoLocal tlocal = RepoFactory.VideoLocal.GetByHash(vlocal.Hash); bool duplicate = false; bool changed = false; if (tlocal != null) { logger.Trace("Found existing VideoLocal with hash, merging info from it"); // Aid with hashing cloud. Merge hashes and save, regardless of duplicate file changed = tlocal.MergeInfoFrom(vlocal); vlocal = tlocal; List <SVR_VideoLocal_Place> preps = vlocal.Places.Where(a => !vlocalplace.FullServerPath.Equals(a.FullServerPath)).ToList(); foreach (var prep in preps) { if (prep == null) { continue; } // clean up, if there is a 'duplicate file' that is invalid, remove it. if (prep.FullServerPath == null) { RepoFactory.VideoLocalPlace.Delete(prep); } else { if (!File.Exists(prep.FullServerPath)) { RepoFactory.VideoLocalPlace.Delete(prep); } } } var dupPlace = vlocal.Places.FirstOrDefault(a => !vlocalplace.FullServerPath.Equals(a.FullServerPath)); if (dupPlace != null) { logger.Warn("Found Duplicate File"); logger.Warn("---------------------------------------------"); logger.Warn($"New File: {vlocalplace.FullServerPath}"); logger.Warn($"Existing File: {dupPlace.FullServerPath}"); logger.Warn("---------------------------------------------"); if (ServerSettings.Instance.Import.AutomaticallyDeleteDuplicatesOnImport) { vlocalplace.RemoveRecordAndDeletePhysicalFile(); return; } // check if we have a record of this in the database, if not create one List <DuplicateFile> dupFiles = RepoFactory.DuplicateFile.GetByFilePathsAndImportFolder( vlocalplace.FilePath, dupPlace.FilePath, vlocalplace.ImportFolderID, dupPlace.ImportFolderID); if (dupFiles.Count == 0) { dupFiles = RepoFactory.DuplicateFile.GetByFilePathsAndImportFolder(dupPlace.FilePath, vlocalplace.FilePath, dupPlace.ImportFolderID, vlocalplace.ImportFolderID); } if (dupFiles.Count == 0) { DuplicateFile dup = new DuplicateFile { DateTimeUpdated = DateTime.Now, FilePathFile1 = vlocalplace.FilePath, FilePathFile2 = dupPlace.FilePath, ImportFolderIDFile1 = vlocalplace.ImportFolderID, ImportFolderIDFile2 = dupPlace.ImportFolderID, Hash = vlocal.Hash }; RepoFactory.DuplicateFile.Save(dup); } //Notify duplicate, don't delete duplicate = true; } } if (!duplicate || changed) { RepoFactory.VideoLocal.Save(vlocal, true); } vlocalplace.VideoLocalID = vlocal.VideoLocalID; RepoFactory.VideoLocalPlace.Save(vlocalplace); if (duplicate) { CommandRequest_ProcessFile cr_procfile3 = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false); cr_procfile3.Save(); return; } // also save the filename to hash record // replace the existing records just in case it was corrupt FileNameHash fnhash; List <FileNameHash> fnhashes2 = RepoFactory.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize); if (fnhashes2 != null && fnhashes2.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes2) { RepoFactory.FileNameHash.Delete(fnh.FileNameHashID); } } if (fnhashes2 != null && fnhashes2.Count == 1) { fnhash = fnhashes2[0]; } else { fnhash = new FileNameHash(); } fnhash.FileName = filename; fnhash.FileSize = vlocal.FileSize; fnhash.Hash = vlocal.Hash; fnhash.DateTimeUpdated = DateTime.Now; RepoFactory.FileNameHash.Save(fnhash); } else { FillMissingHashes(vlocal); } if (((vlocal.Media?.GeneralStream?.Duration ?? 0) == 0) || vlocal.MediaVersion < SVR_VideoLocal.MEDIA_VERSION) { if (vlocalplace.RefreshMediaInfo()) { RepoFactory.VideoLocal.Save(vlocalplace.VideoLocal, true); } } // now add a command to process the file CommandRequest_ProcessFile cr_procfile = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false, SkipMyList); cr_procfile.Save(); }