private SVR_VideoLocal_Place ProcessFile_LocalInfo() { // hash and read media info for file int nshareID = -1; string filePath = ""; Tuple <SVR_ImportFolder, string> tup = VideoLocal_PlaceRepository.GetFromFullPath(FileName); if (tup == null) { logger.Error($"Unable to locate Import Folder for {FileName}"); return(null); } SVR_ImportFolder folder = tup.Item1; filePath = tup.Item2; IFileSystem f = tup.Item1.FileSystem; if (f == null) { logger.Error("Unable to open filesystem for: {0}", FileName); return(null); } long filesize = 0; if (folder.CloudID == null) // Local Access { if (!File.Exists(FileName)) { logger.Error("File does not exist: {0}", FileName); return(null); } int numAttempts = 0; // Wait 3 minutes seconds before giving up on trying to access the file while ((filesize = CanAccessFile(FileName)) == 0 && (numAttempts < 180)) { numAttempts++; Thread.Sleep(1000); Console.WriteLine("Attempt # " + numAttempts.ToString()); } // if we failed to access the file, get ouuta here if (numAttempts == 180) { logger.Error("Could not access file: " + FileName); return(null); } } FileSystemResult <IObject> source = f.Resolve(FileName); if (source == null || !source.IsOk || (!(source.Result is IFile))) { logger.Error("Could not access file: " + FileName); return(null); } IFile source_file = (IFile)source.Result; if (folder.CloudID.HasValue) { filesize = source_file.Size; } nshareID = folder.ImportFolderID; // check if we have already processed this file SVR_VideoLocal_Place vlocalplace = RepoFactory.VideoLocalPlace.GetByFilePathAndShareID(filePath, nshareID); SVR_VideoLocal vlocal = null; if (vlocalplace != null) { vlocal = vlocalplace.VideoLocal; logger.Trace("VideoLocal record found in database: {0}", vlocal.VideoLocalID); if (vlocalplace.FullServerPath == null) { if (vlocal.Places.Count == 1) { RepoFactory.VideoLocal.Delete(vlocal); } RepoFactory.VideoLocalPlace.Delete(vlocalplace); vlocalplace = null; vlocal = null; } else if (ForceHash) { vlocal.FileSize = filesize; vlocal.DateTimeUpdated = DateTime.Now; } } if (vlocalplace == null) { logger.Trace("VideoLocal, creating temporary record"); vlocal = new SVR_VideoLocal(); vlocal.DateTimeUpdated = DateTime.Now; vlocal.DateTimeCreated = vlocal.DateTimeUpdated; vlocal.FileName = Path.GetFileName(filePath); vlocal.FileSize = filesize; vlocal.Hash = string.Empty; vlocal.CRC32 = string.Empty; vlocal.MD5 = source_file.MD5.ToUpperInvariant() ?? string.Empty; vlocal.SHA1 = source_file.SHA1.ToUpperInvariant() ?? string.Empty; vlocal.IsIgnored = 0; vlocal.IsVariation = 0; vlocalplace = new SVR_VideoLocal_Place(); vlocalplace.FilePath = filePath; vlocalplace.ImportFolderID = nshareID; vlocalplace.ImportFolderType = folder.ImportFolderType; } // check if we need to get a hash this file Hashes hashes = null; if (string.IsNullOrEmpty(vlocal.Hash) || ForceHash) { // try getting the hash from the CrossRef if (!ForceHash) { List <CrossRef_File_Episode> crossRefs = RepoFactory.CrossRef_File_Episode.GetByFileNameAndSize(vlocal.FileName, vlocal.FileSize); if (crossRefs.Count == 1) { vlocal.Hash = crossRefs[0].Hash; vlocal.HashSource = (int)HashSource.DirectHash; } } // try getting the hash from the LOCAL cache if (!ForceHash && string.IsNullOrEmpty(vlocal.Hash)) { List <FileNameHash> fnhashes = RepoFactory.FileNameHash.GetByFileNameAndSize(vlocal.FileName, vlocal.FileSize); if (fnhashes != null && fnhashes.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes) { RepoFactory.FileNameHash.Delete(fnh.FileNameHashID); } } // reinit this to check if we erased them fnhashes = RepoFactory.FileNameHash.GetByFileNameAndSize(vlocal.FileName, vlocal.FileSize); if (fnhashes != null && fnhashes.Count == 1) { logger.Trace("Got hash from LOCAL cache: {0} ({1})", FileName, fnhashes[0].Hash); vlocal.Hash = fnhashes[0].Hash; vlocal.HashSource = (int)HashSource.WebCacheFileName; } } if (string.IsNullOrEmpty(vlocal.Hash)) { FillVideoHashes(vlocal); } if (string.IsNullOrEmpty(vlocal.Hash) && folder.CloudID.HasValue) { //Cloud and no hash, Nothing to do, except maybe Get the mediainfo.... logger.Trace("No Hash found for cloud " + vlocal.FileName + " putting in videolocal table with empty ED2K"); RepoFactory.VideoLocal.Save(vlocal, false); vlocalplace.VideoLocalID = vlocal.VideoLocalID; RepoFactory.VideoLocalPlace.Save(vlocalplace); if (vlocalplace.RefreshMediaInfo()) { RepoFactory.VideoLocal.Save(vlocalplace.VideoLocal, true); } return(vlocalplace); } // hash the file if (string.IsNullOrEmpty(vlocal.Hash) || ForceHash) { logger.Info("Hashing File: {0}", FileName); ShokoService.CmdProcessorHasher.QueueState = PrettyDescriptionHashing; DateTime start = DateTime.Now; logger.Trace("Calculating ED2K hashes for: {0}", FileName); // update the VideoLocal record with the Hash, since cloud support we calculate everything hashes = FileHashHelper.GetHashInfo(FileName.Replace("/", "\\"), true, ShokoServer.OnHashProgress, true, true, true); TimeSpan ts = DateTime.Now - start; logger.Trace("Hashed file in {0} seconds --- {1} ({2})", ts.TotalSeconds.ToString("#0.0"), FileName, Utils.FormatByteSize(vlocal.FileSize)); vlocal.Hash = hashes.ED2K?.ToUpperInvariant(); vlocal.CRC32 = hashes.CRC32?.ToUpperInvariant(); vlocal.MD5 = hashes.MD5?.ToUpperInvariant(); vlocal.SHA1 = hashes.SHA1?.ToUpperInvariant(); vlocal.HashSource = (int)HashSource.DirectHash; } FillMissingHashes(vlocal); // We should have a hash by now // before we save it, lets make sure there is not any other record with this hash (possible duplicate file) SVR_VideoLocal tlocal = RepoFactory.VideoLocal.GetByHash(vlocal.Hash); bool duplicate = false; bool changed = false; if (tlocal != null) { // Aid with hashing cloud. Merge hashes and save, regardless of duplicate file changed = tlocal.MergeInfoFrom(vlocal); vlocal = tlocal; SVR_VideoLocal_Place prep = tlocal.Places.FirstOrDefault( a => a.ImportFolder.CloudID == folder.CloudID && a.ImportFolderID == folder.ImportFolderID && vlocalplace.VideoLocal_Place_ID != a.VideoLocal_Place_ID); // clean up, if there is a 'duplicate file' that is invalid, remove it. if (prep != null && prep.FullServerPath == null) { if (tlocal.Places.Count == 1) { RepoFactory.VideoLocal.Delete(tlocal); } RepoFactory.VideoLocalPlace.Delete(prep); prep = null; } prep = tlocal.Places.FirstOrDefault( a => a.ImportFolder.CloudID == folder.CloudID && vlocalplace.VideoLocal_Place_ID != a.VideoLocal_Place_ID); if (prep != null) { // delete the VideoLocal record logger.Warn("Found Duplicate File"); logger.Warn("---------------------------------------------"); logger.Warn($"New File: {vlocalplace.FullServerPath}"); logger.Warn($"Existing File: {prep.FullServerPath}"); logger.Warn("---------------------------------------------"); // check if we have a record of this in the database, if not create one List <DuplicateFile> dupFiles = RepoFactory.DuplicateFile.GetByFilePathsAndImportFolder( vlocalplace.FilePath, prep.FilePath, vlocalplace.ImportFolderID, prep.ImportFolderID); if (dupFiles.Count == 0) { dupFiles = RepoFactory.DuplicateFile.GetByFilePathsAndImportFolder(prep.FilePath, vlocalplace.FilePath, prep.ImportFolderID, vlocalplace.ImportFolderID); } if (dupFiles.Count == 0) { DuplicateFile dup = new DuplicateFile(); dup.DateTimeUpdated = DateTime.Now; dup.FilePathFile1 = vlocalplace.FilePath; dup.FilePathFile2 = prep.FilePath; dup.ImportFolderIDFile1 = vlocalplace.ImportFolderID; dup.ImportFolderIDFile2 = prep.ImportFolderID; dup.Hash = vlocal.Hash; RepoFactory.DuplicateFile.Save(dup); } //Notify duplicate, don't delete duplicate = true; } } if (!duplicate || changed) { RepoFactory.VideoLocal.Save(vlocal, true); } vlocalplace.VideoLocalID = vlocal.VideoLocalID; RepoFactory.VideoLocalPlace.Save(vlocalplace); if (duplicate) { CommandRequest_ProcessFile cr_procfile3 = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false); cr_procfile3.Save(); return(vlocalplace); } // also save the filename to hash record // replace the existing records just in case it was corrupt FileNameHash fnhash = null; List <FileNameHash> fnhashes2 = RepoFactory.FileNameHash.GetByFileNameAndSize(vlocal.FileName, vlocal.FileSize); if (fnhashes2 != null && fnhashes2.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes2) { RepoFactory.FileNameHash.Delete(fnh.FileNameHashID); } } if (fnhashes2 != null && fnhashes2.Count == 1) { fnhash = fnhashes2[0]; } else { fnhash = new FileNameHash(); } fnhash.FileName = vlocal.FileName; fnhash.FileSize = vlocal.FileSize; fnhash.Hash = vlocal.Hash; fnhash.DateTimeUpdated = DateTime.Now; RepoFactory.FileNameHash.Save(fnhash); } else { FillMissingHashes(vlocal); } if ((vlocal.Media == null) || vlocal.MediaVersion < SVR_VideoLocal.MEDIA_VERSION || vlocal.Duration == 0) { if (vlocalplace.RefreshMediaInfo()) { RepoFactory.VideoLocal.Save(vlocalplace.VideoLocal, true); } } // now add a command to process the file CommandRequest_ProcessFile cr_procfile = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false); cr_procfile.Save(); return(vlocalplace); }
private void ProcessFile_LocalInfo() { // hash and read media info for file int nshareID = -1; (SVR_ImportFolder folder, string filePath) = VideoLocal_PlaceRepository.GetFromFullPath(FileName); if (folder == null) { logger.Error($"Unable to locate Import Folder for {FileName}"); return; } IFileSystem f = folder.FileSystem; if (f == null) { logger.Error("Unable to open filesystem for: {0}", FileName); return; } long filesize = 0; if (folder.CloudID == null) // Local Access { if (!File.Exists(FileName)) { logger.Error("File does not exist: {0}", FileName); return; } int numAttempts = 0; // Wait 1 minute before giving up on trying to access the file while ((filesize = CanAccessFile(FileName)) == 0 && (numAttempts < 60)) { numAttempts++; Thread.Sleep(1000); logger.Error($@"Failed to access, (or filesize is 0) Attempt # {numAttempts}, {FileName}"); } // if we failed to access the file, get ouuta here if (numAttempts >= 60) { logger.Error("Could not access file: " + FileName); return; } //For systems with no locking while (FileModified(FileName, 3)) { Thread.Sleep(1000); logger.Error($@"An external process is modifying the file, {FileName}"); } } IObject source = f.Resolve(FileName); if (source == null || source.Status != Status.Ok || !(source is IFile source_file)) { logger.Error("Could not access file: " + FileName); return; } if (folder.CloudID.HasValue) { filesize = source_file.Size; } nshareID = folder.ImportFolderID; // check if we have already processed this file SVR_VideoLocal_Place vlocalplace = Repo.Instance.VideoLocal_Place.GetByFilePathAndImportFolderID(filePath, nshareID); SVR_VideoLocal vlocal = null; var filename = Path.GetFileName(filePath); if (vlocalplace != null) { vlocal = vlocalplace.VideoLocal; if (vlocal != null) { logger.Trace("VideoLocal record found in database: {0}", FileName); // This will only happen with DB corruption, so just clean up the mess. if (vlocalplace.FullServerPath == null) { if (vlocal.Places.Count == 1) { Repo.Instance.VideoLocal.Delete(vlocal); vlocal = null; } Repo.Instance.VideoLocal_Place.Delete(vlocalplace); vlocalplace = null; } if (vlocal != null && ForceHash) { vlocal.FileSize = filesize; vlocal.DateTimeUpdated = DateTime.Now; } } } bool duplicate = false; using (var txn = Repo.Instance.VideoLocal.BeginAddOrUpdate(() => vlocal, () => { logger.Trace("No existing VideoLocal, creating temporary record"); return(new SVR_VideoLocal { DateTimeUpdated = DateTime.Now, DateTimeCreated = DateTimeUpdated, FileName = filename, FileSize = filesize, Hash = string.Empty, CRC32 = string.Empty, MD5 = source_file?.MD5?.ToUpperInvariant() ?? string.Empty, SHA1 = source_file?.SHA1?.ToUpperInvariant() ?? string.Empty, IsIgnored = 0, IsVariation = 0 }); })) { if (vlocalplace == null) { logger.Trace("No existing VideoLocal_Place, creating a new record"); vlocalplace = new SVR_VideoLocal_Place { FilePath = filePath, ImportFolderID = nshareID, ImportFolderType = folder.ImportFolderType }; // Make sure we have an ID vlocalplace = Repo.Instance.VideoLocal_Place.BeginAdd(vlocalplace).Commit(); } using (var txn_vl = Repo.Instance.VideoLocal_Place.BeginAddOrUpdate(() => vlocalplace)) { // check if we need to get a hash this file if (string.IsNullOrEmpty(txn.Entity.Hash) || ForceHash) { logger.Trace("No existing hash in VideoLocal, checking XRefs"); if (!ForceHash) { // try getting the hash from the CrossRef List <CrossRef_File_Episode> crossRefs = Repo.Instance.CrossRef_File_Episode.GetByFileNameAndSize(filename, txn.Entity.FileSize); if (crossRefs.Any()) { txn.Entity.Hash = crossRefs[0].Hash; txn.Entity.HashSource = (int)HashSource.DirectHash; } } // try getting the hash from the LOCAL cache if (!ForceHash && string.IsNullOrEmpty(txn.Entity.Hash)) { List <FileNameHash> fnhashes = Repo.Instance.FileNameHash.GetByFileNameAndSize(filename, txn.Entity.FileSize); if (fnhashes != null && fnhashes.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes) { Repo.Instance.FileNameHash.Delete(fnh.FileNameHashID); } } // reinit this to check if we erased them fnhashes = Repo.Instance.FileNameHash.GetByFileNameAndSize(filename, txn.Entity.FileSize); if (fnhashes != null && fnhashes.Count == 1) { logger.Trace("Got hash from LOCAL cache: {0} ({1})", FileName, fnhashes[0].Hash); txn.Entity.Hash = fnhashes[0].Hash; txn.Entity.HashSource = (int)HashSource.WebCacheFileName; } } if (string.IsNullOrEmpty(txn.Entity.Hash)) { FillVideoHashes(txn.Entity); } //Cloud and no hash, Nothing to do, except maybe Get the mediainfo.... if (string.IsNullOrEmpty(txn.Entity.Hash) && folder.CloudID.HasValue) { logger.Trace("No Hash found for cloud " + filename + " putting in videolocal table with empty ED2K"); vlocal = txn.Commit(true); using (var upd = Repo.Instance.VideoLocal_Place.BeginAddOrUpdate(() => vlocalplace)) { upd.Entity.VideoLocalID = vlocal.VideoLocalID; vlocalplace = upd.Commit(); } if (vlocalplace.RefreshMediaInfo()) { txn_vl.Commit(true); } return; } // hash the file if (string.IsNullOrEmpty(txn.Entity.Hash) || ForceHash) { logger.Info("Hashing File: {0}", FileName); ShokoService.CmdProcessorHasher.QueueState = PrettyDescriptionHashing; DateTime start = DateTime.Now; // update the VideoLocal record with the Hash, since cloud support we calculate everything var hashes = FileHashHelper.GetHashInfo(FileName.Replace("/", $"{System.IO.Path.DirectorySeparatorChar}"), true, ShokoServer.OnHashProgress, true, true, true); TimeSpan ts = DateTime.Now - start; logger.Trace("Hashed file in {0:#0.0} seconds --- {1} ({2})", ts.TotalSeconds, FileName, Utils.FormatByteSize(txn.Entity.FileSize)); txn.Entity.Hash = hashes.ED2K?.ToUpperInvariant(); txn.Entity.CRC32 = hashes.CRC32?.ToUpperInvariant(); txn.Entity.MD5 = hashes.MD5?.ToUpperInvariant(); txn.Entity.SHA1 = hashes.SHA1?.ToUpperInvariant(); txn.Entity.HashSource = (int)HashSource.DirectHash; } FillMissingHashes(txn.Entity); // We should have a hash by now // before we save it, lets make sure there is not any other record with this hash (possible duplicate file) SVR_VideoLocal tlocal = Repo.Instance.VideoLocal.GetByHash(txn.Entity.Hash); bool changed = false; if (tlocal != null) { logger.Trace("Found existing VideoLocal with hash, merging info from it"); // Aid with hashing cloud. Merge hashes and save, regardless of duplicate file changed = tlocal.MergeInfoFrom(txn.Entity); vlocal = tlocal; List <SVR_VideoLocal_Place> preps = vlocal.Places.Where( a => a.ImportFolder.CloudID == folder.CloudID && !vlocalplace.FullServerPath.Equals(a.FullServerPath)).ToList(); foreach (var prep in preps) { if (prep == null) { continue; } // clean up, if there is a 'duplicate file' that is invalid, remove it. if (prep.FullServerPath == null) { Repo.Instance.VideoLocal_Place.Delete(prep); } else { FileSystemResult dupFileSystemResult = (FileSystemResult)prep.ImportFolder?.FileSystem?.Resolve(prep.FullServerPath); if (dupFileSystemResult == null || dupFileSystemResult.Status != Status.Ok) { Repo.Instance.VideoLocal_Place.Delete(prep); } } } var dupPlace = txn.Entity.Places.FirstOrDefault( a => a.ImportFolder.CloudID == folder.CloudID && !vlocalplace.FullServerPath.Equals(a.FullServerPath)); if (dupPlace != null) { logger.Warn("Found Duplicate File"); logger.Warn("---------------------------------------------"); logger.Warn($"New File: {vlocalplace.FullServerPath}"); logger.Warn($"Existing File: {dupPlace.FullServerPath}"); logger.Warn("---------------------------------------------"); // check if we have a record of this in the database, if not create one List <DuplicateFile> dupFiles = Repo.Instance.DuplicateFile.GetByFilePathsAndImportFolder( vlocalplace.FilePath, dupPlace.FilePath, vlocalplace.ImportFolderID, dupPlace.ImportFolderID); if (dupFiles.Count == 0) { dupFiles = Repo.Instance.DuplicateFile.GetByFilePathsAndImportFolder(dupPlace.FilePath, vlocalplace.FilePath, dupPlace.ImportFolderID, vlocalplace.ImportFolderID); } if (dupFiles.Count == 0) { DuplicateFile dup = new DuplicateFile { DateTimeUpdated = DateTime.Now, FilePathFile1 = vlocalplace.FilePath, FilePathFile2 = dupPlace.FilePath, ImportFolderIDFile1 = vlocalplace.ImportFolderID, ImportFolderIDFile2 = dupPlace.ImportFolderID, Hash = txn.Entity.Hash }; Repo.Instance.DuplicateFile.BeginAdd(dup).Commit(); } //Notify duplicate, don't delete duplicate = true; } } if (!duplicate || changed) { vlocal = txn.Commit(); } } } using (var upd = Repo.Instance.VideoLocal_Place.BeginAddOrUpdate(() => vlocalplace)) { upd.Entity.VideoLocalID = vlocal.VideoLocalID; upd.Commit(); } } if (duplicate) { CommandRequest_ProcessFile cr_procfile3 = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false); cr_procfile3.Save(); return; } // also save the filename to hash record // replace the existing records just in case it was corrupt List <FileNameHash> fnhashes2 = Repo.Instance.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize); if (fnhashes2 != null && fnhashes2.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes2) { Repo.Instance.FileNameHash.Delete(fnh.FileNameHashID); } } using (var upd = Repo.Instance.FileNameHash.BeginAddOrUpdate(() => fnhashes2?.Count == 1 ? fnhashes2[0] : null)) { upd.Entity.FileName = filename; upd.Entity.FileSize = vlocal.FileSize; upd.Entity.Hash = vlocal.Hash; upd.Entity.DateTimeUpdated = DateTime.Now; upd.Commit(); } if ((vlocal.Media == null) || vlocal.MediaVersion < SVR_VideoLocal.MEDIA_VERSION || vlocal.Duration == 0) { if (vlocalplace.RefreshMediaInfo()) { using (var upd = Repo.Instance.VideoLocal.BeginAddOrUpdate(() => vlocalplace.VideoLocal)) upd.Commit(true); } } // now add a command to process the file CommandRequest_ProcessFile cr_procfile = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false); cr_procfile.Save(); }
private void ProcessFile_LocalInfo() { // hash and read media info for file int nshareID = -1; Tuple <SVR_ImportFolder, string> tup = VideoLocal_PlaceRepository.GetFromFullPath(FileName); if (tup == null) { logger.Error($"Unable to locate Import Folder for {FileName}"); return; } SVR_ImportFolder folder = tup.Item1; string filePath = tup.Item2; long filesize = 0; Exception e = null; if (!File.Exists(FileName)) { logger.Error("File does not exist: {0}", FileName); return; } if (ServerSettings.Instance.Import.FileLockChecking) { int numAttempts = 0; bool writeAccess = folder.IsDropSource == 1; bool aggressive = ServerSettings.Instance.Import.AggressiveFileLockChecking; // At least 1s between to ensure that size has the chance to change int waitTime = ServerSettings.Instance.Import.FileLockWaitTimeMS; if (waitTime < 1000) { waitTime = ServerSettings.Instance.Import.FileLockWaitTimeMS = 4000; ServerSettings.Instance.SaveSettings(); } if (!aggressive) { // Wait 1 minute before giving up on trying to access the file while ((filesize = CanAccessFile(FileName, writeAccess, ref e)) == 0 && (numAttempts < 60)) { numAttempts++; Thread.Sleep(waitTime); logger.Trace($@"Failed to access, (or filesize is 0) Attempt # {numAttempts}, {FileName}"); } } else { // Wait 1 minute before giving up on trying to access the file // first only do read to not get in something's way while ((filesize = CanAccessFile(FileName, false, ref e)) == 0 && (numAttempts < 60)) { numAttempts++; Thread.Sleep(1000); logger.Trace($@"Failed to access, (or filesize is 0) Attempt # {numAttempts}, {FileName}"); } // if we failed to access the file, get ouuta here if (numAttempts >= 60) { logger.Error("Could not access file: " + FileName); logger.Error(e); return; } int seconds = ServerSettings.Instance.Import.AggressiveFileLockWaitTimeSeconds; if (seconds < 0) { seconds = ServerSettings.Instance.Import.AggressiveFileLockWaitTimeSeconds = 8; ServerSettings.Instance.SaveSettings(); } Thread.Sleep(waitTime); numAttempts = 0; //For systems with no locking while (FileModified(FileName, seconds, ref filesize, writeAccess, ref e) && numAttempts < 60) { numAttempts++; Thread.Sleep(waitTime); // Only show if it's more than 'seconds' past if (numAttempts != 0 && numAttempts * 2 % seconds == 0) { logger.Warn( $@"The modified date is too soon. Waiting to ensure that no processes are writing to it. {numAttempts}/60 {FileName}" ); } } } // if we failed to access the file, get ouuta here if (numAttempts >= 60 || filesize == 0) { logger.Error("Could not access file: " + FileName); logger.Error(e); return; } } if (!File.Exists(FileName)) { logger.Error("Could not access file: " + FileName); return; } FileInfo sourceFile = new FileInfo(FileName); nshareID = folder.ImportFolderID; // check if we have already processed this file SVR_VideoLocal_Place vlocalplace = RepoFactory.VideoLocalPlace.GetByFilePathAndImportFolderID(filePath, nshareID); SVR_VideoLocal vlocal = null; var filename = Path.GetFileName(filePath); if (vlocalplace != null) { vlocal = vlocalplace.VideoLocal; if (vlocal != null) { logger.Trace("VideoLocal record found in database: {0}", FileName); // This will only happen with DB corruption, so just clean up the mess. if (vlocalplace.FullServerPath == null) { if (vlocal.Places.Count == 1) { RepoFactory.VideoLocal.Delete(vlocal); vlocal = null; } RepoFactory.VideoLocalPlace.Delete(vlocalplace); vlocalplace = null; } if (vlocal != null && ForceHash) { vlocal.FileSize = filesize; vlocal.DateTimeUpdated = DateTime.Now; } } } if (vlocal == null) { // TODO support reading MD5 and SHA1 from files via the standard way logger.Trace("No existing VideoLocal, creating temporary record"); vlocal = new SVR_VideoLocal { DateTimeUpdated = DateTime.Now, DateTimeCreated = DateTimeUpdated, FileName = filename, FileSize = filesize, Hash = string.Empty, CRC32 = string.Empty, MD5 = string.Empty, SHA1 = string.Empty, IsIgnored = 0, IsVariation = 0 }; } if (vlocalplace == null) { logger.Trace("No existing VideoLocal_Place, creating a new record"); vlocalplace = new SVR_VideoLocal_Place { FilePath = filePath, ImportFolderID = nshareID, ImportFolderType = folder.ImportFolderType }; // Make sure we have an ID RepoFactory.VideoLocalPlace.Save(vlocalplace); } // check if we need to get a hash this file if (string.IsNullOrEmpty(vlocal.Hash) || ForceHash) { logger.Trace("No existing hash in VideoLocal, checking XRefs"); if (!ForceHash) { // try getting the hash from the CrossRef List <CrossRef_File_Episode> crossRefs = RepoFactory.CrossRef_File_Episode.GetByFileNameAndSize(filename, vlocal.FileSize); if (crossRefs.Any()) { vlocal.Hash = crossRefs[0].Hash; vlocal.HashSource = (int)HashSource.DirectHash; } } // try getting the hash from the LOCAL cache if (!ForceHash && string.IsNullOrEmpty(vlocal.Hash)) { List <FileNameHash> fnhashes = RepoFactory.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize); if (fnhashes != null && fnhashes.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes) { RepoFactory.FileNameHash.Delete(fnh.FileNameHashID); } } // reinit this to check if we erased them fnhashes = RepoFactory.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize); if (fnhashes != null && fnhashes.Count == 1) { logger.Trace("Got hash from LOCAL cache: {0} ({1})", FileName, fnhashes[0].Hash); vlocal.Hash = fnhashes[0].Hash; vlocal.HashSource = (int)HashSource.WebCacheFileName; } } if (string.IsNullOrEmpty(vlocal.Hash)) { FillVideoHashes(vlocal); } // hash the file if (string.IsNullOrEmpty(vlocal.Hash) || ForceHash) { logger.Info("Hashing File: {0}", FileName); ShokoService.CmdProcessorHasher.QueueState = PrettyDescriptionHashing; DateTime start = DateTime.Now; // update the VideoLocal record with the Hash, since cloud support we calculate everything var hashes = FileHashHelper.GetHashInfo(FileName.Replace("/", $"{Path.DirectorySeparatorChar}"), true, ShokoServer.OnHashProgress, true, true, true); TimeSpan ts = DateTime.Now - start; logger.Trace("Hashed file in {0:#0.0} seconds --- {1} ({2})", ts.TotalSeconds, FileName, Utils.FormatByteSize(vlocal.FileSize)); vlocal.Hash = hashes.ED2K?.ToUpperInvariant(); vlocal.CRC32 = hashes.CRC32?.ToUpperInvariant(); vlocal.MD5 = hashes.MD5?.ToUpperInvariant(); vlocal.SHA1 = hashes.SHA1?.ToUpperInvariant(); vlocal.HashSource = (int)HashSource.DirectHash; } FillMissingHashes(vlocal); // We should have a hash by now // before we save it, lets make sure there is not any other record with this hash (possible duplicate file) SVR_VideoLocal tlocal = RepoFactory.VideoLocal.GetByHash(vlocal.Hash); bool duplicate = false; bool changed = false; if (tlocal != null) { logger.Trace("Found existing VideoLocal with hash, merging info from it"); // Aid with hashing cloud. Merge hashes and save, regardless of duplicate file changed = tlocal.MergeInfoFrom(vlocal); vlocal = tlocal; List <SVR_VideoLocal_Place> preps = vlocal.Places.Where(a => !vlocalplace.FullServerPath.Equals(a.FullServerPath)).ToList(); foreach (var prep in preps) { if (prep == null) { continue; } // clean up, if there is a 'duplicate file' that is invalid, remove it. if (prep.FullServerPath == null) { RepoFactory.VideoLocalPlace.Delete(prep); } else { if (!File.Exists(prep.FullServerPath)) { RepoFactory.VideoLocalPlace.Delete(prep); } } } var dupPlace = vlocal.Places.FirstOrDefault(a => !vlocalplace.FullServerPath.Equals(a.FullServerPath)); if (dupPlace != null) { logger.Warn("Found Duplicate File"); logger.Warn("---------------------------------------------"); logger.Warn($"New File: {vlocalplace.FullServerPath}"); logger.Warn($"Existing File: {dupPlace.FullServerPath}"); logger.Warn("---------------------------------------------"); if (ServerSettings.Instance.Import.AutomaticallyDeleteDuplicatesOnImport) { vlocalplace.RemoveRecordAndDeletePhysicalFile(); return; } // check if we have a record of this in the database, if not create one List <DuplicateFile> dupFiles = RepoFactory.DuplicateFile.GetByFilePathsAndImportFolder( vlocalplace.FilePath, dupPlace.FilePath, vlocalplace.ImportFolderID, dupPlace.ImportFolderID); if (dupFiles.Count == 0) { dupFiles = RepoFactory.DuplicateFile.GetByFilePathsAndImportFolder(dupPlace.FilePath, vlocalplace.FilePath, dupPlace.ImportFolderID, vlocalplace.ImportFolderID); } if (dupFiles.Count == 0) { DuplicateFile dup = new DuplicateFile { DateTimeUpdated = DateTime.Now, FilePathFile1 = vlocalplace.FilePath, FilePathFile2 = dupPlace.FilePath, ImportFolderIDFile1 = vlocalplace.ImportFolderID, ImportFolderIDFile2 = dupPlace.ImportFolderID, Hash = vlocal.Hash }; RepoFactory.DuplicateFile.Save(dup); } //Notify duplicate, don't delete duplicate = true; } } if (!duplicate || changed) { RepoFactory.VideoLocal.Save(vlocal, true); } vlocalplace.VideoLocalID = vlocal.VideoLocalID; RepoFactory.VideoLocalPlace.Save(vlocalplace); if (duplicate) { CommandRequest_ProcessFile cr_procfile3 = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false); cr_procfile3.Save(); return; } // also save the filename to hash record // replace the existing records just in case it was corrupt FileNameHash fnhash; List <FileNameHash> fnhashes2 = RepoFactory.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize); if (fnhashes2 != null && fnhashes2.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes2) { RepoFactory.FileNameHash.Delete(fnh.FileNameHashID); } } if (fnhashes2 != null && fnhashes2.Count == 1) { fnhash = fnhashes2[0]; } else { fnhash = new FileNameHash(); } fnhash.FileName = filename; fnhash.FileSize = vlocal.FileSize; fnhash.Hash = vlocal.Hash; fnhash.DateTimeUpdated = DateTime.Now; RepoFactory.FileNameHash.Save(fnhash); } else { FillMissingHashes(vlocal); } if (((vlocal.Media?.GeneralStream?.Duration ?? 0) == 0) || vlocal.MediaVersion < SVR_VideoLocal.MEDIA_VERSION) { if (vlocalplace.RefreshMediaInfo()) { RepoFactory.VideoLocal.Save(vlocalplace.VideoLocal, true); } } // now add a command to process the file CommandRequest_ProcessFile cr_procfile = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false, SkipMyList); cr_procfile.Save(); }