public override int GetHashCode() { int hash = 1; if (FileNameHash != 0) { hash ^= FileNameHash.GetHashCode(); } if (PaddedFileSize != 0) { hash ^= PaddedFileSize.GetHashCode(); } if (UnpaddedFileSize != 0L) { hash ^= UnpaddedFileSize.GetHashCode(); } if (FileOffset != 0L) { hash ^= FileOffset.GetHashCode(); } if (sHAHash_ != null) { hash ^= SHAHash.GetHashCode(); } if (aESKey_ != null) { hash ^= AESKey.GetHashCode(); } if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return(hash); }
private void undoToolStripMenuItem_Click(object sender, EventArgs e) { TreeNode node = treeViewSfar.SelectedNode; DLCEditor.action undoAction = dlcEditor.listComplete[FileNameHash.Compute(node.Name)]; switch (undoAction) { case DLCEditor.action.add: treeViewSfar.SelectedNode.Remove(); dlcEditor.undoAddFile(node.Name); break; case DLCEditor.action.delete: node.BackColor = Color.Empty; dlcEditor.undoDeleteFile(node.Name); break; case DLCEditor.action.replace: node.BackColor = Color.Empty; dlcEditor.undoReplaceFile(node.Name); break; case DLCEditor.action.copy: break; } toolStripSaveFile.Enabled = dlcEditor.checkForExec(); }
//------------------------- GUI Treeview Functions -------------------------- private void treeViewSfar_AfterSelect(object sender, TreeViewEventArgs e) { TreeNode node = treeViewSfar.SelectedNode; if (node == null || isFolder(node)) { return; } //prevents the image change node.SelectedImageIndex = node.ImageIndex; // if the program is extracting or replacing a file, it doesn't update the bottom status label if (!backgroundWorkerExtractFile.IsBusy && !backgroundWorkerEditFile.IsBusy) { toolStripStatusLabel.Text = node.Name; toolStripStatusLabel.Visible = true; } if (!dlcBase.fileList.Contains(FileNameHash.Compute(node.Name))) { return; } //the hard part begins here! sfarFile entry = dlcBase.fileList[FileNameHash.Compute(node.Name)]; int fileBlockIndex; int fileComprSize = 0; float fileSize; string strFileSize = ""; fileBlockIndex = entry.blockSizeIndex; textBoxFullName.Text = node.Name; textBoxHash.Text = entry.nameHash.ToString(); setSize(entry.uncompressedSize, out fileSize, ref strFileSize); textBoxUncSize.Text = fileSize.ToString("0.0", CultureInfo.InvariantCulture); labelUncSizeBytes.Text = strFileSize; if (fileBlockIndex != -1) { for (int i = 0; i < entry.blockSizeArray.Length; i++) { fileComprSize += entry.blockSizeArray[i]; } setSize(fileComprSize, out fileSize, ref strFileSize); textBoxComprSize.Text = fileSize.ToString("0.0", CultureInfo.InvariantCulture); labelComprSizeBytes.Text = strFileSize; } else { setSize(entry.uncompressedSize, out fileSize, ref strFileSize); textBoxComprSize.Text = fileSize.ToString("0.0", CultureInfo.InvariantCulture); labelComprSizeBytes.Text = strFileSize; } textBoxEntry.Text = "0x" + entry.entryOffset.ToString("X8"); textBoxBlockIndex.Text = fileBlockIndex.ToString(); textBoxDataOffset.Text = "0x" + entry.dataOffset[0].ToString("X8"); }
public void setAddFile(string newFilePathName, string fileName) { if (!fileIsPresent(newFilePathName)) { listAdd.Add(FileNameHash.Compute(newFilePathName), new add(newFilePathName, fileName)); listComplete.Add(FileNameHash.Compute(newFilePathName), action.add); } }
public void setReplaceFile(string fileToReplace, string newFile) { if (fileIsPresent(fileToReplace)) { listReplace.Add(FileNameHash.Compute(fileToReplace), newFile); listComplete[FileNameHash.Compute(fileToReplace)] = action.replace; } }
public void setDeleteFile(string fileName) { if (fileIsPresent(fileName)) { listDelete.Add(FileNameHash.Compute(fileName), fileName); listComplete[FileNameHash.Compute(fileName)] = action.delete; } }
public void Save(FileNameHash obj) { using (var session = WebCache.SessionFactory.OpenSession()) { // populate the database using (var transaction = session.BeginTransaction()) { session.SaveOrUpdate(obj); transaction.Commit(); } } }
public FileNameHash GetByNameSizeAndHash(string filename, long filesize, string hash) { using (var session = JMMService.SessionFactory.OpenSession()) { FileNameHash fnhash = session .CreateCriteria(typeof(FileNameHash)) .Add(Restrictions.Eq("Hash", hash)) .Add(Restrictions.Eq("FileName", filename)) .Add(Restrictions.Eq("FileSize", filesize)) .UniqueResult <FileNameHash>(); return(fnhash); } }
public FileNameHash GetForUser(string username, long fileSize, string hash, string fileName) { using (var session = WebCache.SessionFactory.OpenSession()) { FileNameHash obj = session .CreateCriteria(typeof(FileNameHash)) .Add(Restrictions.Eq("Username", username)) .Add(Restrictions.Eq("FileSize", fileSize)) .Add(Restrictions.Eq("Hash", hash.Trim().ToUpper())) .Add(Restrictions.Eq("FileName", fileName)) .UniqueResult <FileNameHash>(); return(obj); } }
public void Delete(int id) { using (var session = WebCache.SessionFactory.OpenSession()) { // populate the database using (var transaction = session.BeginTransaction()) { FileNameHash cr = GetByID(id); if (cr != null) { session.Delete(cr); transaction.Commit(); } } } }
//------------------------- GUI Context Menu Functions -------------------------- private void addFileToolStripMenuItem_Click(object sender, EventArgs e) { TreeNode node = treeViewSfar.SelectedNode; if (isFolder(node) && DialogSelectFileToReplace.ShowDialog() == DialogResult.OK) { string selectedFile = Path.GetFileName(DialogSelectFileToReplace.FileName); string dlcNewFile = getFolderPath(node) + "/" + selectedFile; //check if the added file already exists in the dlc archive if (dlcBase.fileList.Contains(FileNameHash.Compute(dlcNewFile))) { DialogResult replaceQuestion = MessageBox.Show("Warning! " + dlcNewFile + " already exist in the archive, would you like to replace it?", "Warning, adding existing file", MessageBoxButtons.YesNo, MessageBoxIcon.Exclamation); if (replaceQuestion == DialogResult.No) { return; } else { TreeNode replaceNode = node.Nodes[node.Nodes.IndexOfKey(dlcNewFile)]; dlcEditor.setReplaceFile(dlcNewFile, DialogSelectFileToReplace.FileName); replaceNode.BackColor = Color.Yellow; toolStripSaveFile.Enabled = true; return; } } TreeNode last = node.Nodes.Add(selectedFile, selectedFile); last.Name = dlcNewFile; last.BackColor = Color.LightGreen; switch (Path.GetExtension(selectedFile)) { case ".afc": last.ImageIndex = 3; break; case ".bik": last.ImageIndex = 4; break; case ".tfc": last.ImageIndex = 2; break; default: last.ImageIndex = 1; break; } dlcEditor.setAddFile(dlcNewFile, DialogSelectFileToReplace.FileName); toolStripSaveFile.Enabled = true; treeViewSfar.SelectedNode = null; } }
public DLCBase(string fileName) { this.fileName = Path.GetFullPath(fileName); var fileStream = File.OpenRead(fileName); getStructure(fileStream); long minDataOffset; var fileListEntry = fileList[fileListHash]; if (fileListEntry == null) { throw new FormatException("File list not found."); } minDataOffset = fileListEntry.dataOffset[0]; using (var outStream = new MemoryStream()) { #if (WITH_GUI) DecompressEntry(fileListEntry, fileStream, outStream, null); #else DecompressEntry(fileListEntry, fileStream, outStream); #endif outStream.Position = 0; var reader = new StreamReader(outStream); while (reader.EndOfStream == false) { string line = reader.ReadLine(); FileNameHash hashFile = FileNameHash.Compute(line); fileList[hashFile].fileName = line; if (fileList[hashFile].dataOffset[0] < minDataOffset) { minDataOffset = fileList[hashFile].dataOffset[0]; } } } if (minDataOffset > dataOffset) { dataOffset = (uint)minDataOffset; } fileStream.Close(); }
public void setReplaceFile(string fileToReplace, string newFile) { System.Diagnostics.Debug.WriteLine("Attempting to replace " + fileToReplace + " with " + newFile); if (fileIsPresent(fileToReplace)) { System.Diagnostics.Debug.WriteLine("OK: Replacing " + fileToReplace + " with " + newFile); try { listReplace.Add(FileNameHash.Compute(fileToReplace), newFile); } catch { //Ignore } listComplete[FileNameHash.Compute(fileToReplace)] = action.replace; } }
private void propertiesToolStripMenuItem_Click(object sender, EventArgs e) { TreeNode node = treeViewSfar.SelectedNode; string message = ""; float fileSize; float comprRatio; string strFileSize = ""; if (isFile(node)) { FileNameHash fileHash = FileNameHash.Compute(node.Name); sfarFile entry = dlcBase.fileList[fileHash]; int compressedSize = 0; if (entry.blockSizeIndex != -1) { foreach (int i in entry.blockSizeArray) { compressedSize += i; } } else { compressedSize = (int)entry.uncompressedSize; } message += "Full Path: " + entry.fileName + "\n\nHash file name: " + fileHash; setSize(entry.uncompressedSize, out fileSize, ref strFileSize); message += "\n\nFile size: " + fileSize.ToString("0.0", CultureInfo.InvariantCulture) + " " + strFileSize; message += " (" + entry.uncompressedSize.ToString("0,0", CultureInfo.InvariantCulture) + " Bytes)"; setSize(compressedSize, out fileSize, ref strFileSize); message += "\n\nCompressed size: " + fileSize.ToString("0.0", CultureInfo.InvariantCulture) + " " + strFileSize; message += " (" + compressedSize.ToString("0,0", CultureInfo.InvariantCulture) + " Bytes)"; comprRatio = compressedSize / (float)entry.uncompressedSize * 100; message += "\n\nCompression Ratio: " + comprRatio.ToString("0.#") + "%"; MessageBox.Show(message, "Properties - " + node.Text, MessageBoxButtons.OK, MessageBoxIcon.None); } }
private FileNameHash HashFilePath(string filePath) { filePath = filePath.Replace('/', '\\'); filePath = filePath.ToLower(); FileNameHash hash = new FileNameHash(); uint len = (uint)filePath.Length; uint l = (len >> 1); int off, i; uint sum, temp, n; sum = 0; off = 0; for (i = 0; i < l; i++) { sum ^= (uint)(filePath[i]) << (off & 0x1F); off += 8; } hash.value1 = sum; sum = 0; off = 0; for (; i < len; i++) { temp = (uint)(filePath[i]) << (off & 0x1F); sum ^= temp; n = temp & 0x1F; sum = (sum << (32 - (int)n)) | (sum >> (int)n); off += 8; } hash.value2 = sum; return(hash); }
private VideoLocal_Place ProcessFile_LocalInfo() { // hash and read media info for file int nshareID = -1; string filePath = ""; Tuple <ImportFolder, string> tup = VideoLocal_PlaceRepository.GetFromFullPath(FileName); if (tup == null) { logger.Error($"Unable to locate file {FileName} inside the import folders"); return(null); } ImportFolder folder = tup.Item1; filePath = tup.Item2; IFileSystem f = tup.Item1.FileSystem; if (f == null) { logger.Error("Unable to open filesystem for: {0}", FileName); return(null); } long filesize = 0; if (folder.CloudID == null) // Local Access { if (!File.Exists(FileName)) { logger.Error("File does not exist: {0}", FileName); return(null); } int numAttempts = 0; // Wait 3 minutes seconds before giving up on trying to access the file while ((filesize = CanAccessFile(FileName)) == 0 && (numAttempts < 180)) { numAttempts++; Thread.Sleep(1000); Console.WriteLine("Attempt # " + numAttempts.ToString()); } // if we failed to access the file, get ouuta here if (numAttempts == 180) { logger.Error("Could not access file: " + FileName); return(null); } } FileSystemResult <IObject> source = f.Resolve(FileName); if (source == null || !source.IsOk || (!(source.Result is IFile))) { logger.Error("Could not access file: " + FileName); return(null); } IFile source_file = (IFile)source.Result; if (folder.CloudID.HasValue) { filesize = source_file.Size; } nshareID = folder.ImportFolderID; // check if we have already processed this file VideoLocal_Place vlocalplace = RepoFactory.VideoLocalPlace.GetByFilePathAndShareID(filePath, nshareID); VideoLocal vlocal; if (vlocalplace != null) { vlocal = vlocalplace.VideoLocal; logger.Trace("VideoLocal record found in database: {0}", vlocal.VideoLocalID); if (ForceHash) { vlocal.FileSize = filesize; vlocal.DateTimeUpdated = DateTime.Now; } } else { logger.Trace("VideoLocal, creating temporary record"); vlocal = new VideoLocal(); vlocal.DateTimeUpdated = DateTime.Now; vlocal.DateTimeCreated = vlocal.DateTimeUpdated; vlocal.FileName = Path.GetFileName(filePath); vlocal.FileSize = filesize; vlocal.Hash = string.Empty; vlocal.CRC32 = string.Empty; vlocal.MD5 = source_file.MD5.ToUpperInvariant() ?? string.Empty; vlocal.SHA1 = source_file.SHA1.ToUpperInvariant() ?? string.Empty; vlocal.IsIgnored = 0; vlocal.IsVariation = 0; vlocalplace = new VideoLocal_Place(); vlocalplace.FilePath = filePath; vlocalplace.ImportFolderID = nshareID; vlocalplace.ImportFolderType = folder.ImportFolderType; } // check if we need to get a hash this file Hashes hashes = null; if (string.IsNullOrEmpty(vlocal.Hash) || ForceHash) { // try getting the hash from the CrossRef if (!ForceHash) { List <CrossRef_File_Episode> crossRefs = RepoFactory.CrossRef_File_Episode.GetByFileNameAndSize(vlocal.FileName, vlocal.FileSize); if (crossRefs.Count == 1) { vlocal.Hash = crossRefs[0].Hash; vlocal.HashSource = (int)HashSource.DirectHash; } } // try getting the hash from the LOCAL cache if (!ForceHash && string.IsNullOrEmpty(vlocal.Hash)) { List <FileNameHash> fnhashes = RepoFactory.FileNameHash.GetByFileNameAndSize(vlocal.FileName, vlocal.FileSize); if (fnhashes != null && fnhashes.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes) { RepoFactory.FileNameHash.Delete(fnh.FileNameHashID); } } if (fnhashes != null && fnhashes.Count == 1) { logger.Trace("Got hash from LOCAL cache: {0} ({1})", FileName, fnhashes[0].Hash); vlocal.Hash = fnhashes[0].Hash; vlocal.HashSource = (int)HashSource.WebCacheFileName; } } if (string.IsNullOrEmpty(vlocal.Hash)) { FillVideoHashes(vlocal); } if (string.IsNullOrEmpty(vlocal.Hash) && folder.CloudID.HasValue) { //Cloud and no hash, Nothing to do, except maybe Get the mediainfo.... logger.Trace("No Hash found for cloud " + vlocal.FileName + " putting in videolocal table with empty ED2K"); RepoFactory.VideoLocal.Save(vlocal, false); vlocalplace.VideoLocalID = vlocal.VideoLocalID; RepoFactory.VideoLocalPlace.Save(vlocalplace); if (vlocalplace.RefreshMediaInfo()) { RepoFactory.VideoLocal.Save(vlocalplace.VideoLocal, true); } return(vlocalplace); } // hash the file if (string.IsNullOrEmpty(vlocal.Hash) || ForceHash) { JMMService.CmdProcessorHasher.QueueState = PrettyDescriptionHashing; DateTime start = DateTime.Now; logger.Trace("Calculating ED2K hashes for: {0}", FileName); // update the VideoLocal record with the Hash, since cloud support we calculate everything hashes = FileHashHelper.GetHashInfo(FileName.Replace("/", "\\"), true, MainWindow.OnHashProgress, true, true, true); TimeSpan ts = DateTime.Now - start; logger.Trace("Hashed file in {0} seconds --- {1} ({2})", ts.TotalSeconds.ToString("#0.0"), FileName, Utils.FormatByteSize(vlocal.FileSize)); vlocal.Hash = hashes.ed2k?.ToUpperInvariant(); vlocal.CRC32 = hashes.crc32?.ToUpperInvariant(); vlocal.MD5 = hashes.md5?.ToUpperInvariant(); vlocal.SHA1 = hashes.sha1?.ToUpperInvariant(); vlocal.HashSource = (int)HashSource.DirectHash; } FillMissingHashes(vlocal); // We should have a hash by now // before we save it, lets make sure there is not any other record with this hash (possible duplicate file) VideoLocal tlocal = RepoFactory.VideoLocal.GetByHash(vlocal.Hash); bool intercloudfolder = false; VideoLocal_Place prep = tlocal?.Places.FirstOrDefault(a => a.ImportFolder.CloudID == folder.CloudID && a.ImportFolderID == folder.ImportFolderID && vlocalplace.VideoLocal_Place_ID != a.VideoLocal_Place_ID); if (prep != null) { // delete the VideoLocal record logger.Warn("Deleting duplicate video file record"); logger.Warn("---------------------------------------------"); logger.Warn($"Keeping record for: {vlocalplace.FullServerPath}"); logger.Warn($"Deleting record for: {prep.FullServerPath}"); logger.Warn("---------------------------------------------"); // check if we have a record of this in the database, if not create one List <DuplicateFile> dupFiles = RepoFactory.DuplicateFile.GetByFilePathsAndImportFolder(vlocalplace.FilePath, prep.FilePath, vlocalplace.ImportFolderID, prep.ImportFolderID); if (dupFiles.Count == 0) { dupFiles = RepoFactory.DuplicateFile.GetByFilePathsAndImportFolder(prep.FilePath, vlocalplace.FilePath, prep.ImportFolderID, vlocalplace.ImportFolderID); } if (dupFiles.Count == 0) { DuplicateFile dup = new DuplicateFile(); dup.DateTimeUpdated = DateTime.Now; dup.FilePathFile1 = vlocalplace.FilePath; dup.FilePathFile2 = prep.FilePath; dup.ImportFolderIDFile1 = vlocalplace.ImportFolderID; dup.ImportFolderIDFile2 = prep.ImportFolderID; dup.Hash = vlocal.Hash; RepoFactory.DuplicateFile.Save(dup); } //Notify duplicate, don't delete } else if (tlocal != null) { vlocal = tlocal; intercloudfolder = true; } if (!intercloudfolder) { RepoFactory.VideoLocal.Save(vlocal, true); } vlocalplace.VideoLocalID = vlocal.VideoLocalID; RepoFactory.VideoLocalPlace.Save(vlocalplace); if (intercloudfolder) { CommandRequest_ProcessFile cr_procfile3 = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false); cr_procfile3.Save(); return(vlocalplace); } // also save the filename to hash record // replace the existing records just in case it was corrupt FileNameHash fnhash = null; List <FileNameHash> fnhashes2 = RepoFactory.FileNameHash.GetByFileNameAndSize(vlocal.FileName, vlocal.FileSize); if (fnhashes2 != null && fnhashes2.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes2) { RepoFactory.FileNameHash.Delete(fnh.FileNameHashID); } } if (fnhashes2 != null && fnhashes2.Count == 1) { fnhash = fnhashes2[0]; } else { fnhash = new FileNameHash(); } fnhash.FileName = vlocal.FileName; fnhash.FileSize = vlocal.FileSize; fnhash.Hash = vlocal.Hash; fnhash.DateTimeUpdated = DateTime.Now; RepoFactory.FileNameHash.Save(fnhash); } else { FillMissingHashes(vlocal); } if ((vlocal.Media == null) || vlocal.MediaVersion < VideoLocal.MEDIA_VERSION || vlocal.Duration == 0) { if (vlocalplace.RefreshMediaInfo()) { RepoFactory.VideoLocal.Save(vlocalplace.VideoLocal, true); } } // now add a command to process the file CommandRequest_ProcessFile cr_procfile = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false); cr_procfile.Save(); return(vlocalplace); }
private VideoLocal ProcessFile_LocalInfo() { // hash and read media info for file int nshareID = -1; string filePath = ""; ImportFolderRepository repNS = new ImportFolderRepository(); List <ImportFolder> shares = repNS.GetAll(); DataAccessHelper.GetShareAndPath(FileName, shares, ref nshareID, ref filePath); if (!File.Exists(FileName)) { logger.Error("File does not exist: {0}", FileName); return(null); } int numAttempts = 0; // Wait 3 minutes seconds before giving up on trying to access the file while ((!CanAccessFile(FileName)) && (numAttempts < 180)) { numAttempts++; Thread.Sleep(1000); Console.WriteLine("Attempt # " + numAttempts.ToString()); } // if we failed to access the file, get ouuta here if (numAttempts == 180) { logger.Error("Could not access file: " + FileName); return(null); } // check if we have already processed this file VideoLocal vlocal = null; VideoLocalRepository repVidLocal = new VideoLocalRepository(); FileNameHashRepository repFNHash = new FileNameHashRepository(); List <VideoLocal> vidLocals = repVidLocal.GetByFilePathAndShareID(filePath, nshareID); FileInfo fi = new FileInfo(FileName); if (vidLocals.Count > 0) { vlocal = vidLocals[0]; logger.Trace("VideoLocal record found in database: {0}", vlocal.VideoLocalID); if (ForceHash) { vlocal.FileSize = fi.Length; vlocal.DateTimeUpdated = DateTime.Now; } } else { logger.Trace("VideoLocal, creating new record"); vlocal = new VideoLocal(); vlocal.DateTimeUpdated = DateTime.Now; vlocal.DateTimeCreated = vlocal.DateTimeUpdated; vlocal.FilePath = filePath; vlocal.FileSize = fi.Length; vlocal.ImportFolderID = nshareID; vlocal.Hash = ""; vlocal.CRC32 = ""; vlocal.MD5 = ""; vlocal.SHA1 = ""; vlocal.IsIgnored = 0; vlocal.IsVariation = 0; } // check if we need to get a hash this file Hashes hashes = null; if (string.IsNullOrEmpty(vlocal.Hash) || ForceHash) { // try getting the hash from the CrossRef if (!ForceHash) { CrossRef_File_EpisodeRepository repCrossRefs = new CrossRef_File_EpisodeRepository(); List <CrossRef_File_Episode> crossRefs = repCrossRefs.GetByFileNameAndSize(Path.GetFileName(vlocal.FilePath), vlocal.FileSize); if (crossRefs.Count == 1) { vlocal.Hash = crossRefs[0].Hash; vlocal.HashSource = (int)HashSource.DirectHash; } } // try getting the hash from the LOCAL cache if (!ForceHash && string.IsNullOrEmpty(vlocal.Hash)) { List <FileNameHash> fnhashes = repFNHash.GetByFileNameAndSize(Path.GetFileName(vlocal.FilePath), vlocal.FileSize); if (fnhashes != null && fnhashes.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes) { repFNHash.Delete(fnh.FileNameHashID); } } if (fnhashes != null && fnhashes.Count == 1) { logger.Trace("Got hash from LOCAL cache: {0} ({1})", FileName, fnhashes[0].Hash); vlocal.Hash = fnhashes[0].Hash; vlocal.HashSource = (int)HashSource.WebCacheFileName; } } // hash the file if (string.IsNullOrEmpty(vlocal.Hash) || ForceHash) { DateTime start = DateTime.Now; logger.Trace("Calculating hashes for: {0}", FileName); // update the VideoLocal record with the Hash hashes = FileHashHelper.GetHashInfo(FileName, true, MainWindow.OnHashProgress, ServerSettings.Hash_CRC32, ServerSettings.Hash_MD5, ServerSettings.Hash_SHA1); TimeSpan ts = DateTime.Now - start; logger.Trace("Hashed file in {0} seconds --- {1} ({2})", ts.TotalSeconds.ToString("#0.0"), FileName, Utils.FormatByteSize(vlocal.FileSize)); vlocal.Hash = hashes.ed2k; vlocal.CRC32 = hashes.crc32; vlocal.MD5 = hashes.md5; vlocal.SHA1 = hashes.sha1; vlocal.HashSource = (int)HashSource.DirectHash; } // We should have a hash by now // before we save it, lets make sure there is not any other record with this hash (possible duplicate file) VideoLocal vidTemp = repVidLocal.GetByHash(vlocal.Hash); if (vidTemp != null) { // don't delete it, if it is actually the same record if (vidTemp.VideoLocalID != vlocal.VideoLocalID) { // delete the VideoLocal record logger.Warn("Deleting duplicate video file record"); logger.Warn("---------------------------------------------"); logger.Warn("Keeping record for: {0}", vlocal.FullServerPath); logger.Warn("Deleting record for: {0}", vidTemp.FullServerPath); logger.Warn("---------------------------------------------"); // check if we have a record of this in the database, if not create one DuplicateFileRepository repDups = new DuplicateFileRepository(); List <DuplicateFile> dupFiles = repDups.GetByFilePathsAndImportFolder(vlocal.FilePath, vidTemp.FilePath, vlocal.ImportFolderID, vidTemp.ImportFolderID); if (dupFiles.Count == 0) { dupFiles = repDups.GetByFilePathsAndImportFolder(vidTemp.FilePath, vlocal.FilePath, vidTemp.ImportFolderID, vlocal.ImportFolderID); } if (dupFiles.Count == 0) { DuplicateFile dup = new DuplicateFile(); dup.DateTimeUpdated = DateTime.Now; dup.FilePathFile1 = vlocal.FilePath; dup.FilePathFile2 = vidTemp.FilePath; dup.ImportFolderIDFile1 = vlocal.ImportFolderID; dup.ImportFolderIDFile2 = vidTemp.ImportFolderID; dup.Hash = vlocal.Hash; repDups.Save(dup); } repVidLocal.Delete(vidTemp.VideoLocalID); } } repVidLocal.Save(vlocal); // also save the filename to hash record // replace the existing records just in case it was corrupt FileNameHash fnhash = null; List <FileNameHash> fnhashes2 = repFNHash.GetByFileNameAndSize(Path.GetFileName(vlocal.FilePath), vlocal.FileSize); if (fnhashes2 != null && fnhashes2.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes2) { repFNHash.Delete(fnh.FileNameHashID); } } if (fnhashes2 != null && fnhashes2.Count == 1) { fnhash = fnhashes2[0]; } else { fnhash = new FileNameHash(); } fnhash.FileName = Path.GetFileName(vlocal.FilePath); fnhash.FileSize = vlocal.FileSize; fnhash.Hash = vlocal.Hash; fnhash.DateTimeUpdated = DateTime.Now; repFNHash.Save(fnhash); } // now check if we have stored a VideoInfo record bool refreshMediaInfo = false; VideoInfoRepository repVidInfo = new VideoInfoRepository(); VideoInfo vinfo = repVidInfo.GetByHash(vlocal.Hash); if (vinfo == null) { refreshMediaInfo = true; vinfo = new VideoInfo(); vinfo.Hash = vlocal.Hash; vinfo.Duration = 0; vinfo.FileSize = fi.Length; vinfo.DateTimeUpdated = DateTime.Now; vinfo.FileName = filePath; vinfo.AudioBitrate = ""; vinfo.AudioCodec = ""; vinfo.VideoBitrate = ""; vinfo.VideoBitDepth = ""; vinfo.VideoCodec = ""; vinfo.VideoFrameRate = ""; vinfo.VideoResolution = ""; repVidInfo.Save(vinfo); } else { // check if we need to update the media info if (vinfo.VideoCodec.Trim().Length == 0) { refreshMediaInfo = true; } else { refreshMediaInfo = false; } } if (refreshMediaInfo) { logger.Trace("Getting media info for: {0}", FileName); MediaInfoResult mInfo = FileHashHelper.GetMediaInfo(FileName, true); vinfo.AudioBitrate = string.IsNullOrEmpty(mInfo.AudioBitrate) ? "" : mInfo.AudioBitrate; vinfo.AudioCodec = string.IsNullOrEmpty(mInfo.AudioCodec) ? "" : mInfo.AudioCodec; vinfo.DateTimeUpdated = vlocal.DateTimeUpdated; vinfo.Duration = mInfo.Duration; vinfo.FileName = filePath; vinfo.FileSize = fi.Length; vinfo.VideoBitrate = string.IsNullOrEmpty(mInfo.VideoBitrate) ? "" : mInfo.VideoBitrate; vinfo.VideoBitDepth = string.IsNullOrEmpty(mInfo.VideoBitDepth) ? "" : mInfo.VideoBitDepth; vinfo.VideoCodec = string.IsNullOrEmpty(mInfo.VideoCodec) ? "" : mInfo.VideoCodec; vinfo.VideoFrameRate = string.IsNullOrEmpty(mInfo.VideoFrameRate) ? "" : mInfo.VideoFrameRate; vinfo.VideoResolution = string.IsNullOrEmpty(mInfo.VideoResolution) ? "" : mInfo.VideoResolution; vinfo.FullInfo = string.IsNullOrEmpty(mInfo.FullInfo) ? "" : mInfo.FullInfo; repVidInfo.Save(vinfo); } // now add a command to process the file CommandRequest_ProcessFile cr_procfile = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false); cr_procfile.Save(); return(vlocal); }
protected void Page_Load(object sender, EventArgs e) { Response.ContentType = "text/xml"; try { Response.Write(Constants.ERROR_XML); return; string sfsize = Utils.GetParam("fsize"); long fsize = 0; long.TryParse(sfsize, out fsize); if (fsize <= 0) { Response.Write(Constants.ERROR_XML); return; } string fname = Utils.GetParam("fname"); if (fname.Trim().Length == 0) { Response.Write(Constants.ERROR_XML); return; } string uname = Utils.GetParam("uname"); if (uname.Trim().Length == 0) { Response.Write(Constants.ERROR_XML); return; } FileNameHash_Rep repHash = new FileNameHash_Rep(); // check for user specific FileNameHash fnh = null; List <FileNameHash> recs = repHash.SearchForUser(uname, fsize, fname); if (recs.Count == 0) { // check for other users recs = repHash.SearchForAll(fsize, fname); if (recs.Count > 0) { fnh = recs[0]; } } else { fnh = recs[0]; } if (fnh == null) { Response.Write(Constants.ERROR_XML); return; } FileHashResult data = new FileHashResult(fnh.Hash); string ret = Utils.ConvertToXML(data, typeof(FileHashResult)); Response.Write(ret); } catch (Exception ex) { Response.Write(ex.ToString()); return; } }
private void extractToolStripMenuItem_Click(object sender, EventArgs e) { TreeNode node = treeViewSfar.SelectedNode; extractFileDialog.FileName = node.Text; List <sfarFile> listFiles = new List <sfarFile>(); //add selected file(s) to the extraction list if (isFile(node) && extractFileDialog.ShowDialog() == DialogResult.OK) { sfarFile entry = dlcBase.fileList[FileNameHash.Compute(node.Name)]; listFiles.Add(entry); } else if (isFolder(node) && extractFolderDialog.ShowDialog() == Microsoft.WindowsAPICodePack.Dialogs.CommonFileDialogResult.Ok) { foreach (sfarFile entry in dlcBase.fileList) { string fileName = entry.fileName; int indexStr; if (fileName == null) { continue; } indexStr = fileName.IndexOf(node.Text); if (indexStr != -1) { listFiles.Add(entry); } } } else // safety else, hopefully never enters { return; } toolStripProgressBar.Visible = true; toolStripStatusLabel.Visible = true; try { //main extraction backgroundWorkerExtractFile.RunWorkerAsync(new object[2] { listFiles, node }); while (backgroundWorkerExtractFile.IsBusy) { // Keep UI messages moving, so the form remains // responsive during the asynchronous operation. if (backgroundWorkerExtractFile.CancellationPending) { return; } else { Application.DoEvents(); } } toolStripStatusLabel.Text = "Done."; if (isFile(node)) { MessageBox.Show("File " + node.Text + " has been successfully extracted.", "Extraction success", MessageBoxButtons.OK, MessageBoxIcon.Information); } else { MessageBox.Show("All files from folder " + node.Text + " have been successfully extracted.", "Extraction success", MessageBoxButtons.OK, MessageBoxIcon.Information); } toolStripProgressBar.Visible = false; } catch (Exception exc) { MessageBox.Show("An error occurred while extracting " + node.Text + ":\n" + exc.Message, "Exception Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
public void undoReplaceFile(string fileToReplace) { listReplace.Remove(FileNameHash.Compute(fileToReplace)); listComplete[FileNameHash.Compute(fileToReplace)] = action.copy; }
public void undoDeleteFile(string fileName) { listDelete.Remove(FileNameHash.Compute(fileName)); listComplete[FileNameHash.Compute(fileName)] = action.copy; }
public override async Task RunAsync(IProgress <ICommand> progress = null, CancellationToken token = default(CancellationToken)) { logger.Trace($"Checking File For Hashes: {File.FullName}"); try { ReportInit(progress); // hash and read media info for file int nshareID; long filesize = 0; if (_importFolder.CloudID == null) // Local Access { if (!System.IO.File.Exists(File.FullName)) { ReportError(progress, $"File does not exist: {File.FullName}"); return; } int numAttempts = 0; bool writeAccess = _importFolder.IsDropSource == 1; // Wait 1 minute before giving up on trying to access the file // first only do read to not get in something's way while ((filesize = CanAccessFile(File.FullName, false)) == 0 && (numAttempts < 60)) { numAttempts++; Thread.Sleep(1000); logger.Trace($@"Failed to access, (or filesize is 0) Attempt # {numAttempts}, {File.FullName}"); } // if we failed to access the file, get ouuta here if (numAttempts >= 60) { ReportError(progress, $"Could not access file: {File.FullName}"); return; } // At least 1s between to ensure that size has the chance to change // TODO make this a setting to allow fine tuning on various configs // TODO Make this able to be disabled. It adds 1.5s to hashing just waiting for the Linux/NAS use case int seconds = 8; int waitTime = seconds * 1000 / 2; Thread.Sleep(waitTime); numAttempts = 0; //For systems with no locking // TODO make this a setting as well while (FileModified(File.FullName, seconds, ref filesize, writeAccess) && numAttempts < 60) { numAttempts++; Thread.Sleep(waitTime); // Only show if it's more than 'seconds' past if (numAttempts != 0 && numAttempts * 2 % seconds == 0) { logger.Warn($@"The modified date is too soon. Waiting to ensure that no processes are writing to it. {numAttempts}/60 {File.FullName}"); } } // if we failed to access the file, get ouuta here if (numAttempts >= 60) { ReportError(progress, $"Could not access file: {File.FullName}"); return; } } ReportUpdate(progress, 10); if (_importFolder.CloudID.HasValue) { filesize = File.Size; } nshareID = _importFolder.ImportFolderID; // check if we have already processed this file SVR_VideoLocal_Place vlocalplace = Repo.Instance.VideoLocal_Place.GetByFilePathAndImportFolderID(_filePath, nshareID); SVR_VideoLocal vlocal = null; var filename = Path.GetFileName(_filePath); if (vlocalplace != null) { vlocal = vlocalplace.VideoLocal; if (vlocal != null) { logger.Trace("VideoLocal record found in database: {0}", File.FullName); // This will only happen with DB corruption, so just clean up the mess. if (vlocalplace.FullServerPath == null) { if (vlocal.Places.Count == 1) { Repo.Instance.VideoLocal.Delete(vlocal); vlocal = null; } Repo.Instance.VideoLocal_Place.Delete(vlocalplace); vlocalplace = null; } if (vlocal != null && Force) { vlocal.FileSize = filesize; vlocal.DateTimeUpdated = DateTime.Now; } } } bool duplicate = false; SVR_VideoLocal vlocal1 = vlocal; using (var txn = Repo.Instance.VideoLocal.BeginAddOrUpdate(vlocal1?.VideoLocalID ?? 0, () => { logger.Trace("No existing VideoLocal, creating temporary record"); return(new SVR_VideoLocal { DateTimeUpdated = DateTime.Now, DateTimeCreated = DateTime.Now, FileSize = filesize, Hash = string.Empty, CRC32 = string.Empty, MD5 = File?.MD5?.ToUpperInvariant() ?? string.Empty, SHA1 = File?.SHA1?.ToUpperInvariant() ?? string.Empty, IsIgnored = 0, IsVariation = 0 }); })) { vlocal = txn.Entity; if (vlocalplace == null) { logger.Trace("No existing VideoLocal_Place, creating a new record"); vlocalplace = new SVR_VideoLocal_Place { FilePath = _filePath, ImportFolderID = nshareID, ImportFolderType = _importFolder.ImportFolderType }; // Make sure we have an ID vlocalplace = Repo.Instance.VideoLocal_Place.BeginAdd(vlocalplace).Commit(); } // check if we need to get a hash this file // IDEs might warn of possible null. It is set in the lambda above, so it shouldn't ever be null if (string.IsNullOrEmpty(vlocal.Hash) || Force) { logger.Trace("No existing hash in VideoLocal, checking XRefs"); if (!Force) { // try getting the hash from the CrossRef List <CrossRef_File_Episode> crossRefs = Repo.Instance.CrossRef_File_Episode.GetByFileNameAndSize(filename, vlocal.FileSize); if (crossRefs.Any()) { vlocal.Hash = crossRefs[0].Hash; vlocal.HashSource = (int)HashSource.DirectHash; } } // try getting the hash from the LOCAL cache if (!Force && string.IsNullOrEmpty(vlocal.Hash)) { Repo.Instance.FileNameHash.FindAndDelete(() => { List <FileNameHash> fnhashes = Repo.Instance.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize); if (fnhashes != null && fnhashes.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records return(fnhashes); } return(new List <FileNameHash>()); }); // reinit this to check if we erased them FileNameHash fnhash = Repo.Instance.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize).FirstOrDefault(); logger.Trace("Got hash from LOCAL cache: {0} ({1})", File.FullName, fnhash.Hash); vlocal.Hash = fnhash.Hash; vlocal.HashSource = (int)HashSource.WebCacheFileName; } if (string.IsNullOrEmpty(vlocal.Hash)) { FillVideoHashes(vlocal); } //Cloud and no hash, Nothing to do, except maybe Get the mediainfo.... if (string.IsNullOrEmpty(vlocal.Hash) && _importFolder.CloudID.HasValue) { logger.Trace("No Hash found for cloud " + filename + " putting in videolocal table with empty ED2K"); vlocal = txn.Commit(true); int vlpid = vlocalplace.VideoLocalID; using (var upd = Repo.Instance.VideoLocal_Place.BeginAddOrUpdate(vlpid)) { upd.Entity.VideoLocalID = vlocal.VideoLocalID; vlocalplace = upd.Commit(); } if (vlocalplace.RefreshMediaInfo(vlocal)) { txn.Commit(true); } ReportFinish(progress); return; } // hash the file if (string.IsNullOrEmpty(vlocal.Hash) || Force) { logger.Info("Hashing File: {0}", File.FullName); _hashingState = true; DateTime start = DateTime.Now; // update the VideoLocal record with the Hash, since cloud support we calculate everything Hasher h = new Hasher(File, HashAll); string error = await h.RunAsync(new ChildProgress(20, 60, this, progress), token); if (error != null) { ReportError(progress, error); return; } TimeSpan ts = DateTime.Now - start; logger.Trace("Hashed file in {0:#0.0} seconds --- {1} ({2})", ts.TotalSeconds, File.FullName, Utils.FormatByteSize(vlocal.FileSize)); vlocal.Hash = h.Result.GetHash(HashTypes.ED2K); vlocal.CRC32 = h.Result.GetHash(HashTypes.CRC); vlocal.MD5 = h.Result.GetHash(HashTypes.MD5); vlocal.SHA1 = h.Result.GetHash(HashTypes.SHA1); vlocal.HashSource = (int)HashSource.DirectHash; } _hashingState = false; await FillMissingHashes(vlocal, token, progress); // We should have a hash by now // before we save it, lets make sure there is not any other record with this hash (possible duplicate file) // TODO Check this case. I'm not sure how EF handles changing objects that we are working on SVR_VideoLocal tlocal = Repo.Instance.VideoLocal.GetByHash(vlocal.Hash); bool changed = false; if (tlocal != null) { logger.Trace("Found existing VideoLocal with hash, merging info from it"); // Aid with hashing cloud. Merge hashes and save, regardless of duplicate file changed = tlocal.MergeInfoFrom(vlocal); vlocal = tlocal; List <SVR_VideoLocal_Place> preps = vlocal.Places.Where(a => a.ImportFolder.CloudID == _importFolder.CloudID && !vlocalplace.FullServerPath.Equals(a.FullServerPath)).ToList(); foreach (var prep in preps) { if (prep == null) { continue; } // clean up, if there is a 'duplicate file' that is invalid, remove it. if (prep.FullServerPath == null) { Repo.Instance.VideoLocal_Place.Delete(prep); } else { IResult dupFileSystemResult = prep.ImportFolder?.FileSystem?.Resolve(prep.FullServerPath); if (dupFileSystemResult == null || dupFileSystemResult.Status != NutzCode.CloudFileSystem.Status.Ok) { Repo.Instance.VideoLocal_Place.Delete(prep); } } } var dupPlace = vlocal.Places.FirstOrDefault(a => a.ImportFolder.CloudID == _importFolder.CloudID && !vlocalplace.FullServerPath.Equals(a.FullServerPath)); ReportUpdate(progress, 85); if (dupPlace != null) { logger.Warn("Found Duplicate File"); logger.Warn("---------------------------------------------"); logger.Warn($"New File: {vlocalplace.FullServerPath}"); logger.Warn($"Existing File: {dupPlace.FullServerPath}"); logger.Warn("---------------------------------------------"); // check if we have a record of this in the database, if not create one List <DuplicateFile> dupFiles = Repo.Instance.DuplicateFile.GetByFilePathsAndImportFolder(vlocalplace.FilePath, dupPlace.FilePath, vlocalplace.ImportFolderID, dupPlace.ImportFolderID); if (dupFiles.Count == 0) { dupFiles = Repo.Instance.DuplicateFile.GetByFilePathsAndImportFolder(dupPlace.FilePath, vlocalplace.FilePath, dupPlace.ImportFolderID, vlocalplace.ImportFolderID); } if (dupFiles.Count == 0) { DuplicateFile dup = new DuplicateFile { DateTimeUpdated = DateTime.Now, FilePathFile1 = vlocalplace.FilePath, FilePathFile2 = dupPlace.FilePath, ImportFolderIDFile1 = vlocalplace.ImportFolderID, ImportFolderIDFile2 = dupPlace.ImportFolderID, Hash = vlocal.Hash }; Repo.Instance.DuplicateFile.BeginAdd(dup).Commit(); } //Notify duplicate, don't delete duplicate = true; } } if (!duplicate || changed) { vlocal = txn.Commit(); } } ReportUpdate(progress, 90); int vlplid = vlocalplace.VideoLocalID; using (var upd = Repo.Instance.VideoLocal_Place.BeginAddOrUpdate(vlplid)) { upd.Entity.VideoLocalID = vlocal.VideoLocalID; vlocalplace = upd.Commit(); } } if (duplicate) { Queue.Instance.Add(new CmdServerProcessFile(vlocal.VideoLocalID, false)); ReportFinish(progress); return; } // also save the filename to hash record // replace the existing records just in case it was corrupt Repo.Instance.FileNameHash.FindAndDelete(() => { List <FileNameHash> fnhashes = Repo.Instance.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize); if (fnhashes != null && fnhashes.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records return(fnhashes); } return(new List <FileNameHash>()); }); ReportUpdate(progress, 95); using (var upd = Repo.Instance.FileNameHash.BeginAddOrUpdate(() => Repo.Instance.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize).FirstOrDefault())) { upd.Entity.FileName = filename; upd.Entity.FileSize = vlocal.FileSize; upd.Entity.Hash = vlocal.Hash; upd.Entity.DateTimeUpdated = DateTime.Now; upd.Commit(); } if (vlocal.Media == null || vlocal.MediaVersion < SVR_VideoLocal.MEDIA_VERSION || vlocal.Duration == 0) { int vid = vlocal.VideoLocalID; using (var upd = Repo.Instance.VideoLocal.BeginAddOrUpdate(vid)) if (vlocalplace.RefreshMediaInfo(upd.Entity)) { vlocal = upd.Commit(true); } } // now add a command to process the file Queue.Instance.Add(new CmdServerProcessFile(vlocal.VideoLocalID, false)); ReportFinish(progress); } catch (Exception ex) { ReportError(progress, $"Error processing ServerHashFile: {File.FullName}\n{ex}", ex); } }
public bool isFileSetForReplacement(string filename) { return(listReplace.ContainsKey(FileNameHash.Compute(filename))); }
public static void Compress(string dir, string outputPath) { if (!Directory.Exists(dir)) { throw new ArgumentException("Invalid argument: not a directory."); } if (Directory.Exists(outputPath)) { if (outputPath[outputPath.Length - 1].CompareTo('\\') != 0) { outputPath += "\\"; } outputPath += "Default.sfar"; } if (dir[dir.Length - 1].CompareTo('\\') != 0) { dir += "\\"; } string bio = dir + "BIOGame"; string unk = dir + "__UNKNOWN"; if (Directory.Exists(bio)) { if (Directory.Exists(unk)) { Directory.Delete(unk, true); } } else { throw new NotImplementedException("Incorrect folder location, BIOGame folder not founded"); } uint pointerEntry = sfarFileTableOffset; uint pointerBlockSize = 0x0; //not defined yet uint pointerData = 0x0; //not defined yet uint entryBlockSize = 0x1E; //30 bytes uint totBlockSize = 0; int posBlockSize = 0; var inputBlock = new byte[sfarMaxBlockSize]; var outputBlock = new byte[sfarMaxBlockSize]; FileNameHash fileListHash = new FileNameHash(new byte[] { 0xB5, 0x50, 0x19, 0xCB, 0xF9, 0xD3, 0xDA, 0x65, 0xD5, 0x5B, 0x32, 0x1C, 0x00, 0x19, 0x69, 0x7C, }); SortedDictionary <FileNameHash, string> fileTable = new SortedDictionary <FileNameHash, string>(); string allFileList = ""; //get the filelist to put inside the sfar archive string[] fileList = Directory.GetFiles(dir, "*.*", SearchOption.AllDirectories); sfarFileTableCount = (uint)fileList.Length + 1; //updating the pointer of block size index sfarBlockSizeTableOffset = pointerBlockSize = sfarFileTableOffset + (sfarFileTableCount * entryBlockSize); uint counter = 1; //creating the file using (FileStream stream = new FileStream(outputPath, FileMode.Create)) { using (BinaryWriter writer = new BinaryWriter(stream)) { //writing sfar header writer.Write(sfarHeader); writer.Write(sfarVersion); writer.Write(sfarDataOffset); writer.Write(sfarFileTableOffset); writer.Write(sfarFileTableCount); writer.Write(sfarBlockSizeTableOffset); writer.Write(sfarMaxBlockSize); writer.Write((uint)CompressionScheme.Lzma); uint actualUncompressedBlockSize = sfarMaxBlockSize; uint actualCompressedBlockSize = sfarMaxBlockSize; uint aux = 0; //--------------------------------------------------------------------------------------------- //calculating num of blocksize blocks foreach (string show in fileList) { string extension = Path.GetExtension(show); //if the file is a .bik or .afc it's not counted if ((extension.CompareTo(".bik") != 0 && extension.CompareTo(".afc") != 0) || forceCompress) { aux = (uint)Math.Ceiling(Getsize(show) / (double)sfarMaxBlockSize); totBlockSize += aux; } } //adding the filelist string blocksize blocks to the total num of blocksizes foreach (string show in fileList) { var temp = show.Remove(0, dir.Length - 1) + Environment.NewLine; temp = temp.Replace("\\", "/"); allFileList += temp; } aux = (uint)Math.Ceiling(allFileList.Length / (double)sfarMaxBlockSize); totBlockSize += aux; //--------------------------------------------------------------------------------------------- sfarDataOffset = pointerData = (totBlockSize * 2) + sfarBlockSizeTableOffset; stream.Seek(8, SeekOrigin.Begin); writer.Write(sfarDataOffset); //creating the SORTED hash table array string strFileNameHash; foreach (string fileName in fileList) { strFileNameHash = fileName.Remove(0, dir.Length - 1); strFileNameHash = strFileNameHash.Replace("\\", "/"); fileTable.Add(FileNameHash.Compute(strFileNameHash), fileName); } fileTable.Add(fileListHash, ""); Stream streamRead; uint fileListPointerEntry = 0; uint fileLength; FileNameHash fileNameHash; MemoryStream encStream; int initialPosBlock = 0; ushort[] blockSizeArray; foreach (KeyValuePair <FileNameHash, string> kvp in fileTable) { if (kvp.Value == "") //this is the file list entry, it doesn't do anything for now { fileListPointerEntry = pointerEntry; pointerEntry += entryBlockSize; } else { if (verbose) { Console.WriteLine("File: {0}\n Entry Offset: {1:X8}", kvp.Value, pointerEntry); Console.WriteLine(" Block Index Offset: {0:X8}", pointerBlockSize); Console.WriteLine(" Data Offset: {0:X8}", pointerData); Console.WriteLine(" Initial blocknum: {0}", posBlockSize); } encStream = new MemoryStream(); string fileName = kvp.Value; fileLength = (uint)Getsize(fileName); fileNameHash = kvp.Key; streamRead = new FileStream(fileName, FileMode.Open); string extension = Path.GetExtension(fileName); if ((extension.CompareTo(".bik") == 0 || extension.CompareTo(".afc") == 0) && !forceCompress) { outputBlock = new byte[fileLength]; streamRead.Read(outputBlock, 0, (int)fileLength); encStream.Write(outputBlock, 0, (int)fileLength); initialPosBlock = -1; } else { CompressFile(streamRead, out blockSizeArray, encStream); //seeking the sfar to the last Block Size offset stream.Seek(pointerBlockSize, SeekOrigin.Begin); for (int i = 0; i < blockSizeArray.Length; i++) { writer.Write(blockSizeArray[i]); } pointerBlockSize = (uint)stream.Position; initialPosBlock = posBlockSize; posBlockSize += blockSizeArray.Length; } //seeking the sfar to the last entry offset stream.Seek(pointerEntry, SeekOrigin.Begin); writer.Write(fileNameHash.A.Swap()); writer.Write(fileNameHash.B.Swap()); writer.Write(fileNameHash.C.Swap()); writer.Write(fileNameHash.D.Swap()); writer.Write(initialPosBlock); writer.Write(fileLength); writer.Write((byte)0); writer.Write(pointerData); writer.Write((byte)0); pointerEntry = (uint)stream.Position; //seeking the sfar to the last data offset stream.Seek(pointerData, SeekOrigin.Begin); encStream.WriteTo(stream); pointerData += (uint)encStream.Length; if (verbose) { Console.WriteLine(" Total Uncompressed: {0} B, Total Compressed: {1} B\n Files Packed: {2}/{3}\n", fileLength, encStream.Length, counter++, sfarFileTableCount - 1); } } //end big else } // end of foreach // writing the fileList streamRead = new MemoryStream(ASCIIEncoding.Default.GetBytes(allFileList)); fileLength = (uint)allFileList.Length; fileNameHash = fileListHash; encStream = new MemoryStream(); if (verbose) { Console.WriteLine("FileList entry Offset: {0:X8}", fileListPointerEntry); Console.WriteLine(" Block Index Offset: {0:X8}", pointerBlockSize); Console.WriteLine(" Data Offset: {0:X8}", pointerData); Console.WriteLine(" Initial blocknum: {0}", posBlockSize); } CompressFile(streamRead, out blockSizeArray, encStream); //seeking the sfar to the last Block Size offset stream.Seek(pointerBlockSize, SeekOrigin.Begin); for (int i = 0; i < blockSizeArray.Length; i++) { writer.Write(blockSizeArray[i]); } pointerBlockSize = (uint)stream.Position; initialPosBlock = posBlockSize; posBlockSize += blockSizeArray.Length; //write the file list entry block stream.Seek(fileListPointerEntry, SeekOrigin.Begin); writer.Write(fileNameHash.A.Swap()); writer.Write(fileNameHash.B.Swap()); writer.Write(fileNameHash.C.Swap()); writer.Write(fileNameHash.D.Swap()); writer.Write(initialPosBlock); writer.Write(fileLength); writer.Write((byte)0); writer.Write(pointerData); writer.Write((byte)0); //seeking the sfar to the last data offset stream.Seek(pointerData, SeekOrigin.Begin); encStream.WriteTo(stream); pointerData += (uint)encStream.Length; if (verbose) { Console.WriteLine(" Total Uncompressed: {0} B, Total Compressed: {1} B\n", fileLength, encStream.Length); } encStream.Close(); //closing the main binary file writer writer.Close(); } if (verbose) { Console.WriteLine("------ END ------"); Console.WriteLine("File Table Count: {0}", sfarFileTableCount); Console.WriteLine("File Table (Entry) Offset: {0:X8}", sfarFileTableOffset); Console.WriteLine("Block Size Offset: {0:X8}", sfarBlockSizeTableOffset); Console.WriteLine("Total blocks: {0}", totBlockSize); Console.WriteLine("Data Offset: {0:X8}", sfarDataOffset); } } }
public bool isFileSetForDelete(string filename) { return(listDelete.ContainsKey(FileNameHash.Compute(filename))); }
public static void Main(string[] args) { bool showHelp = false; bool extractUnknowns = true; bool overwriteFiles = false; bool verbose = false; var options = new OptionSet { { "o|overwrite", "overwrite existing files", v => overwriteFiles = v != null }, { "nu|no-unknowns", "don't extract unknown files", v => extractUnknowns = v == null }, { "v|verbose", "be verbose", v => verbose = v != null }, { "h|help", "show this message and exit", v => showHelp = v != null }, }; List <string> extras; try { extras = options.Parse(args); } catch (OptionException e) { Console.Write("{0}: ", GetExecutableName()); Console.WriteLine(e.Message); Console.WriteLine("Try `{0} --help' for more information.", GetExecutableName()); return; } if (extras.Count < 1 || extras.Count > 2 || showHelp) { Console.WriteLine("Usage: {0} [OPTIONS]+ input_sfar [output_dir]", GetExecutableName()); Console.WriteLine(); Console.WriteLine("Options:"); options.WriteOptionDescriptions(Console.Out); return; } string inputPath = extras[0]; string outputPath = extras.Count > 1 ? extras[1] : Path.ChangeExtension(inputPath, null); Manager manager = Manager.Load(); if (manager.ActiveProject == null) { Console.WriteLine("Warning: no active project loaded."); } HashList <FileNameHash> hashes = manager.LoadLists( "*.filelist", FileNameHash.Compute, s => s.Replace("\\", "/")); using (FileStream input = File.OpenRead(inputPath)) { var sfx = new SFXArchiveFile(); sfx.Deserialize(input); long current = 0; long total = sfx.Entries.Count; int padding = total.ToString(CultureInfo.InvariantCulture).Length; if (sfx.CompressionScheme != CompressionScheme.None && sfx.CompressionScheme != CompressionScheme.Lzma && sfx.CompressionScheme != CompressionScheme.Lzx) { Console.WriteLine("Unsupported compression scheme!"); return; } var inputBlock = new byte[sfx.MaximumBlockSize]; var outputBlock = new byte[sfx.MaximumBlockSize]; var hashesFromFile = new Dictionary <FileNameHash, string>(); // todo: figure out what the file name is var fileNameListNameHash = new FileNameHash( new byte[] { 0xB5, 0x50, 0x19, 0xCB, 0xF9, 0xD3, 0xDA, 0x65, 0xD5, 0x5B, 0x32, 0x1C, 0x00, 0x19, 0x69, 0x7C, }); Entry fileNameListEntry = sfx.Entries .FirstOrDefault(e => e.NameHash == fileNameListNameHash); if (fileNameListEntry != null) { using (var temp = new MemoryStream()) { DecompressEntry( sfx, fileNameListEntry, input, inputBlock, temp, outputBlock); temp.Position = 0; var reader = new StreamReader(temp); while (reader.EndOfStream == false) { string line = reader.ReadLine(); hashesFromFile.Add(FileNameHash.Compute(line), line); } } } foreach (var entry in sfx.Entries) { current++; string entryName = hashes[entry.NameHash]; if (entryName == null) { if (hashesFromFile.ContainsKey(entry.NameHash)) { entryName = hashesFromFile[entry.NameHash]; } } if (entryName == null) { if (extractUnknowns == false) { continue; } entryName = entry.NameHash.ToString(); entryName = Path.Combine("__UNKNOWN", entryName); } else { entryName = entryName.Replace("/", "\\"); if (entryName.StartsWith("\\")) { entryName = entryName.Substring(1); } } string entryPath = Path.Combine(outputPath, entryName); if (overwriteFiles == false && File.Exists(entryPath)) { continue; } if (verbose) { Console.WriteLine("[{0}/{1}] {2}", current.ToString(CultureInfo.InvariantCulture).PadLeft(padding), total, entryName); } input.Seek(entry.Offset, SeekOrigin.Begin); Directory.CreateDirectory(Path.GetDirectoryName(entryPath)); using (FileStream output = File.Create(entryPath)) { DecompressEntry( sfx, entry, input, inputBlock, output, outputBlock); } } } }
//int highPerc = 0; #else public void Execute(string outputFile) { #endif var inputPath = dlcBase.fileName; if (!File.Exists(inputPath)) { throw new FileNotFoundException("Error: the input file doesn't exists"); } /*string filePathToReplace = selectedFile; * string fileToReplace = Path.GetFileName(filePathToReplace); * FileNameHash fileToReplaceHash = new FileNameHash();*/ string outputFileName = Path.GetFileNameWithoutExtension(inputPath); int inPointerBlockSize = 0; int outPointerEntry = 0x20; int outPointerEntryFileList = 0; int outPointerBlockSize = 0; int outPointerData = 0; int blocksToRemove = 0; int blocksToAdd = 0; using (FileStream input = File.OpenRead(inputPath), output = File.OpenWrite(outputFile)) { string dlcFileList = ""; int outNumOfEntries = 0; //recreating the file list foreach (var kvp in listComplete) { if (kvp.Value != action.delete) { outNumOfEntries++; if (kvp.Key == DLCBase.fileListHash) { continue; } switch (kvp.Value) { case action.copy: case action.replace: dlcFileList += dlcBase.fileList[kvp.Key].fileName + Environment.NewLine; break; case action.add: dlcFileList += listAdd[kvp.Key].fileName + Environment.NewLine; break; } } } blocksToRemove += dlcBase.fileList[DLCBase.fileListHash].blockSizeArray.Length; blocksToAdd += (int)Math.Ceiling(dlcFileList.Length / (double)DLCBase.MaximumBlockSize); foreach (var kvp in listAdd) { string fPath = kvp.Value.filePath; if ((Path.GetExtension(fPath) != ".bik" && Path.GetExtension(fPath) != ".afc")) { blocksToAdd += (int)Math.Ceiling(DLCPack.Getsize(kvp.Value.filePath) / (double)DLCBase.MaximumBlockSize); } } foreach (var kvp in listReplace) { if (dlcBase.fileList[kvp.Key].blockSizeIndex != -1) { blocksToRemove += dlcBase.fileList[kvp.Key].blockSizeArray.Length; blocksToAdd += (int)Math.Ceiling(DLCPack.Getsize(kvp.Value) / (double)DLCBase.MaximumBlockSize); } } foreach (var kvp in listDelete) { if (dlcBase.fileList[kvp.Key].blockSizeIndex != -1) { blocksToRemove += dlcBase.fileList[kvp.Key].blockSizeArray.Length; } } var inputBlock = new byte[DLCBase.MaximumBlockSize]; var outputBlock = new byte[DLCBase.MaximumBlockSize]; //writing header of new sfar file input.Seek(0, 0); input.Read(inputBlock, 0, 32); output.Write(inputBlock, 0, 32); //getting initial blocks and data offsets inPointerBlockSize = 0x20 + (dlcBase.fileList.Count * 0x1E); outPointerBlockSize = 0x20 + outNumOfEntries * 0x1E; input.Seek(8, 0); int inDataOffset = input.ReadValueS32(); int outBlockCount = ((inDataOffset - inPointerBlockSize) / 2) - blocksToRemove + blocksToAdd; outPointerData = outPointerBlockSize + (outBlockCount * 2); //writing new header's values output.Seek(8, 0); output.WriteValueS32(outPointerData); output.Seek(16, 0); output.WriteValueS32(outNumOfEntries); output.Seek(20, 0); output.WriteValueS32(outPointerBlockSize); if (verbose) { Console.WriteLine("num entries: {0}", outNumOfEntries); Console.WriteLine("data offset: {0:X8}", inDataOffset); Console.WriteLine("blocks to remove: {0}", blocksToRemove); Console.WriteLine("blocks to add: {0}", blocksToAdd); Console.WriteLine("old block offset: {0:X8}", inPointerBlockSize); Console.WriteLine("new block offset: {0:X8}", outPointerBlockSize); Console.WriteLine("old block count: {0}", (inDataOffset - inPointerBlockSize) / 2); Console.WriteLine("new block count: {0}", outBlockCount); Console.WriteLine("pointer data: {0:X8}\n", outPointerData); } int numBlocks; int outDataOffset; int blockIndexCounter = 0; int outBlockIndex = 0; int fileSize = 0; int outInitialDataOffset = outPointerData; int outInitialBlockOffset = outPointerBlockSize; foreach (var kvp in listComplete) { count++; if (kvp.Value == action.delete) { continue; } if (kvp.Key == DLCBase.fileListHash) { //Console.WriteLine("File List Found at {0:X8}", outPointerEntry); outPointerEntryFileList = outPointerEntry; outPointerEntry += 0x1E; continue; } sfarFile entry; FileNameHash hashEntry = kvp.Key; if (kvp.Value == action.add) { string fPath = listAdd[kvp.Key].filePath; entry = new sfarFile(); entry.nameHash = kvp.Key; entry.dataOffset = new long[1]; fileSize = (int)DLCPack.Getsize(fPath); if ((Path.GetExtension(fPath) == ".bik" || Path.GetExtension(fPath) == ".afc")) { entry.blockSizeIndex = -1; } } else { entry = dlcBase.fileList[kvp.Key]; fileSize = (int)entry.uncompressedSize; } outDataOffset = outPointerData; outBlockIndex = blockIndexCounter; /*#if (WITH_GUI) * int perc = (int)Math.Ceiling((float)count++ / (float)listComplete.Count * 100); * if (perc > highPerc) * { * highPerc = perc; * if (perc > 100) * perc = 100; * worker.ReportProgress(perc); * } #endif*/ switch (kvp.Value) { case action.copy: if (worker != null) { worker.ReportProgress(0, count + "/" + listComplete.Count + ": Copying " + Path.GetFileName(entry.fileName)); } if (entry.blockSizeIndex == -1) { inDataOffset = (int)entry.dataOffset[0]; input.Seek(inDataOffset, 0); inputBlock = new byte[fileSize]; input.Read(inputBlock, 0, fileSize); output.Seek(outPointerData, 0); output.Write(inputBlock, 0, fileSize); outPointerData += fileSize; outBlockIndex = entry.blockSizeIndex; } else { numBlocks = (int)Math.Ceiling(fileSize / (double)DLCBase.MaximumBlockSize); inDataOffset = (int)entry.dataOffset[0]; for (int i = 0; i < numBlocks; i++) { uint blockSize = entry.blockSizeArray[i]; if ((ushort)blockSize != entry.blockSizeArray[i]) { throw new Exception("different blocksizes"); } blockSize = blockSize == 0 ? DLCBase.MaximumBlockSize : blockSize; inputBlock = new byte[blockSize]; input.Seek(inDataOffset, 0); input.Read(inputBlock, 0, (int)blockSize); inDataOffset += (int)blockSize; output.Seek(outPointerBlockSize, 0); if (blockSize == DLCBase.MaximumBlockSize) { output.WriteValueU16(0); } else { output.WriteValueU16((ushort)blockSize); } if (outPointerBlockSize > outInitialDataOffset) { throw new Exception("Block index offset values out of range,\n last block: " + blockIndexCounter + "\n Pointer Block: " + outPointerBlockSize.ToString("X8") + "\n Data Offset: " + outInitialDataOffset.ToString("X8")); } outPointerBlockSize += 2; output.Seek(outPointerData, 0); output.Write(inputBlock, 0, (int)blockSize); if (output.Position - outPointerData != blockSize) { Console.WriteLine(" diff position: {0}, blocksize: {1}", output.Position - outPointerData, blockSize); throw new Exception("error writing file"); } outPointerData += (int)blockSize; } blockIndexCounter += numBlocks; } break; case action.add: case action.replace: string selectedFile; if (kvp.Value == action.replace) { selectedFile = listReplace[kvp.Key]; if (worker != null) { worker.ReportProgress(0, count + "/" + listComplete.Count + ": Replacing " + Path.GetFileName(selectedFile)); } } else { selectedFile = listAdd[kvp.Key].filePath; if (worker != null) { worker.ReportProgress(0, count + "/" + listComplete.Count + ": Adding " + Path.GetFileName(selectedFile)); } } output.Seek(outPointerBlockSize, 0); //compressing the replacing file ushort[] blockSizeArray; using (FileStream streamFile = new FileStream(selectedFile, FileMode.Open, FileAccess.Read)) { fileSize = (int)streamFile.Length; if ((Path.GetExtension(selectedFile) == ".bik" || Path.GetExtension(selectedFile) == ".afc") && entry.blockSizeIndex == -1) { outBlockIndex = -1; output.Seek(outPointerData, SeekOrigin.Begin); output.WriteFromStream(streamFile, streamFile.Length); outPointerData += (int)streamFile.Length; } else { byte[][] comprArr; DLCPack.CompressFile(streamFile, out blockSizeArray, out comprArr, Threads); for (int i = 0; i < blockSizeArray.Length; i++) { output.WriteValueU16(blockSizeArray[i]); } outPointerBlockSize += (blockSizeArray.Length * 2); blockIndexCounter += blockSizeArray.Length; output.Seek(outPointerData, SeekOrigin.Begin); int totallength = 0; for (int i = 0; i < comprArr.Length; i++) { output.WriteBytes(comprArr[i]); totallength += comprArr[i].Length; } //outPointerData += (int)streamFile.Length; outPointerData += totallength; } } #region blah di blah /* * FileStream streamFile = File.OpenRead(selectedFile); * if ((Path.GetExtension(selectedFile) == ".bik" || * Path.GetExtension(selectedFile) == ".afc") && * entry.blockSizeIndex == -1) * { * streamFile.CopyTo(encStream); * outBlockIndex = -1; * } * else * { * //DLCPack.CompressFile(streamFile, out blockSizeArray, encStream, worker); * byte[][] comprArr; * DLCPack.CompressFile(streamFile, out blockSizeArray, out comprArr); * * for (int i = 0; i < blockSizeArray.Length; i++) * { * output.WriteValueU16(blockSizeArray[i]); * } * outPointerBlockSize += (blockSizeArray.Length * 2); * blockIndexCounter += blockSizeArray.Length; * } * output.Seek((long)outPointerData, 0); * encStream.WriteTo(output); * outPointerData += (int)encStream.Length; * * fileSize = (int)streamFile.Length; * streamFile.Close(); */ #endregion break; }// end switch if (worker != null) { worker.ReportProgress(100); } output.Seek(outPointerEntry, 0); output.WriteValueU32(hashEntry.A.Swap()); output.WriteValueU32(hashEntry.B.Swap()); output.WriteValueU32(hashEntry.C.Swap()); output.WriteValueU32(hashEntry.D.Swap()); output.WriteValueS32(outBlockIndex); output.WriteValueS32(fileSize); output.WriteValueU8(0); output.WriteValueS32(outDataOffset); output.WriteValueU8(0); outPointerEntry += 0x1E; if (outPointerEntry > outInitialBlockOffset) { throw new Exception("Entry index offset values out of range"); } }// end of foreach //writing the file list entry, blocksizes & data outDataOffset = outPointerData; outBlockIndex = blockIndexCounter; { MemoryStream streamRead = new MemoryStream(ASCIIEncoding.Default.GetBytes(dlcFileList)); ushort[] blockSizeArray; MemoryStream encStream = new MemoryStream(); DLCPack.CompressFile(streamRead, out blockSizeArray, encStream); output.Seek(outPointerBlockSize, 0); for (int i = 0; i < blockSizeArray.Length; i++) { output.WriteValueU16(blockSizeArray[i]); } outPointerBlockSize += (blockSizeArray.Length * 2); blockIndexCounter += blockSizeArray.Length; output.Seek(outPointerData, 0); encStream.WriteTo(output); outPointerData += (int)encStream.Length; fileSize = (int)streamRead.Length; output.Seek(outPointerEntryFileList, 0); output.WriteValueU32(DLCBase.fileListHash.A.Swap()); output.WriteValueU32(DLCBase.fileListHash.B.Swap()); output.WriteValueU32(DLCBase.fileListHash.C.Swap()); output.WriteValueU32(DLCBase.fileListHash.D.Swap()); output.WriteValueS32(outBlockIndex); output.WriteValueS32(fileSize); output.WriteValueU8(0x00); output.WriteValueS32(outDataOffset); output.WriteValueU8(0x00); outPointerEntry = (int)output.Position; } }// end of using... }
private void ProcessFile_LocalInfo() { // hash and read media info for file int nshareID = -1; Tuple <SVR_ImportFolder, string> tup = VideoLocal_PlaceRepository.GetFromFullPath(FileName); if (tup == null) { logger.Error($"Unable to locate Import Folder for {FileName}"); return; } SVR_ImportFolder folder = tup.Item1; string filePath = tup.Item2; IFileSystem f = tup.Item1.FileSystem; if (f == null) { logger.Error("Unable to open filesystem for: {0}", FileName); return; } long filesize = 0; Exception e = null; if (folder.CloudID == null) // Local Access { if (!File.Exists(FileName)) { logger.Error("File does not exist: {0}", FileName); return; } int numAttempts = 0; bool writeAccess = folder.IsDropSource == 1; // Wait 1 minute before giving up on trying to access the file // first only do read to not get in something's way while ((filesize = CanAccessFile(FileName, false, ref e)) == 0 && (numAttempts < 60)) { numAttempts++; Thread.Sleep(1000); logger.Trace($@"Failed to access, (or filesize is 0) Attempt # {numAttempts}, {FileName}"); } // if we failed to access the file, get ouuta here if (numAttempts >= 60) { logger.Error("Could not access file: " + FileName); logger.Error(e); return; } // At least 1s between to ensure that size has the chance to change // TODO make this a setting to allow fine tuning on various configs // TODO Make this able to be disabled. It adds 1.5s to hashing just waiting for the Linux/NAS use case int seconds = 8; int waitTime = seconds * 1000 / 2; Thread.Sleep(waitTime); numAttempts = 0; //For systems with no locking // TODO make this a setting as well while (FileModified(FileName, seconds, ref filesize, writeAccess, ref e) && numAttempts < 60) { numAttempts++; Thread.Sleep(waitTime); // Only show if it's more than 'seconds' past if (numAttempts != 0 && numAttempts * 2 % seconds == 0) { logger.Warn($@"The modified date is too soon. Waiting to ensure that no processes are writing to it. {numAttempts}/60 {FileName}"); } } // if we failed to access the file, get ouuta here if (numAttempts >= 60 || filesize == 0) { logger.Error("Could not access file: " + FileName); logger.Error(e); return; } } FileSystemResult <IObject> source = f.Resolve(FileName); if (source == null || !source.IsOk || !(source.Result is IFile)) { logger.Error("Could not access file: " + FileName); return; } IFile source_file = (IFile)source.Result; if (folder.CloudID.HasValue) { filesize = source_file.Size; } nshareID = folder.ImportFolderID; // check if we have already processed this file SVR_VideoLocal_Place vlocalplace = RepoFactory.VideoLocalPlace.GetByFilePathAndImportFolderID(filePath, nshareID); SVR_VideoLocal vlocal = null; var filename = Path.GetFileName(filePath); if (vlocalplace != null) { vlocal = vlocalplace.VideoLocal; if (vlocal != null) { logger.Trace("VideoLocal record found in database: {0}", FileName); // This will only happen with DB corruption, so just clean up the mess. if (vlocalplace.FullServerPath == null) { if (vlocal.Places.Count == 1) { RepoFactory.VideoLocal.Delete(vlocal); vlocal = null; } RepoFactory.VideoLocalPlace.Delete(vlocalplace); vlocalplace = null; } if (vlocal != null && ForceHash) { vlocal.FileSize = filesize; vlocal.DateTimeUpdated = DateTime.Now; } } } if (vlocal == null) { logger.Trace("No existing VideoLocal, creating temporary record"); vlocal = new SVR_VideoLocal { DateTimeUpdated = DateTime.Now, DateTimeCreated = DateTimeUpdated, FileName = filename, FileSize = filesize, Hash = string.Empty, CRC32 = string.Empty, MD5 = source_file?.MD5?.ToUpperInvariant() ?? string.Empty, SHA1 = source_file?.SHA1?.ToUpperInvariant() ?? string.Empty, IsIgnored = 0, IsVariation = 0 }; } if (vlocalplace == null) { logger.Trace("No existing VideoLocal_Place, creating a new record"); vlocalplace = new SVR_VideoLocal_Place { FilePath = filePath, ImportFolderID = nshareID, ImportFolderType = folder.ImportFolderType }; // Make sure we have an ID RepoFactory.VideoLocalPlace.Save(vlocalplace); } // check if we need to get a hash this file if (string.IsNullOrEmpty(vlocal.Hash) || ForceHash) { logger.Trace("No existing hash in VideoLocal, checking XRefs"); if (!ForceHash) { // try getting the hash from the CrossRef List <CrossRef_File_Episode> crossRefs = RepoFactory.CrossRef_File_Episode.GetByFileNameAndSize(filename, vlocal.FileSize); if (crossRefs.Any()) { vlocal.Hash = crossRefs[0].Hash; vlocal.HashSource = (int)HashSource.DirectHash; } } // try getting the hash from the LOCAL cache if (!ForceHash && string.IsNullOrEmpty(vlocal.Hash)) { List <FileNameHash> fnhashes = RepoFactory.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize); if (fnhashes != null && fnhashes.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes) { RepoFactory.FileNameHash.Delete(fnh.FileNameHashID); } } // reinit this to check if we erased them fnhashes = RepoFactory.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize); if (fnhashes != null && fnhashes.Count == 1) { logger.Trace("Got hash from LOCAL cache: {0} ({1})", FileName, fnhashes[0].Hash); vlocal.Hash = fnhashes[0].Hash; vlocal.HashSource = (int)HashSource.WebCacheFileName; } } if (string.IsNullOrEmpty(vlocal.Hash)) { FillVideoHashes(vlocal); } //Cloud and no hash, Nothing to do, except maybe Get the mediainfo.... if (string.IsNullOrEmpty(vlocal.Hash) && folder.CloudID.HasValue) { logger.Trace("No Hash found for cloud " + filename + " putting in videolocal table with empty ED2K"); RepoFactory.VideoLocal.Save(vlocal, false); vlocalplace.VideoLocalID = vlocal.VideoLocalID; RepoFactory.VideoLocalPlace.Save(vlocalplace); if (vlocalplace.RefreshMediaInfo()) { RepoFactory.VideoLocal.Save(vlocalplace.VideoLocal, true); } return; } // hash the file if (string.IsNullOrEmpty(vlocal.Hash) || ForceHash) { logger.Info("Hashing File: {0}", FileName); ShokoService.CmdProcessorHasher.QueueState = PrettyDescriptionHashing; DateTime start = DateTime.Now; // update the VideoLocal record with the Hash, since cloud support we calculate everything var hashes = FileHashHelper.GetHashInfo(FileName.Replace("/", $"{System.IO.Path.DirectorySeparatorChar}"), true, ShokoServer.OnHashProgress, true, true, true); TimeSpan ts = DateTime.Now - start; logger.Trace("Hashed file in {0:#0.0} seconds --- {1} ({2})", ts.TotalSeconds, FileName, Utils.FormatByteSize(vlocal.FileSize)); vlocal.Hash = hashes.ED2K?.ToUpperInvariant(); vlocal.CRC32 = hashes.CRC32?.ToUpperInvariant(); vlocal.MD5 = hashes.MD5?.ToUpperInvariant(); vlocal.SHA1 = hashes.SHA1?.ToUpperInvariant(); vlocal.HashSource = (int)HashSource.DirectHash; } FillMissingHashes(vlocal); // We should have a hash by now // before we save it, lets make sure there is not any other record with this hash (possible duplicate file) SVR_VideoLocal tlocal = RepoFactory.VideoLocal.GetByHash(vlocal.Hash); bool duplicate = false; bool changed = false; if (tlocal != null) { logger.Trace("Found existing VideoLocal with hash, merging info from it"); // Aid with hashing cloud. Merge hashes and save, regardless of duplicate file changed = tlocal.MergeInfoFrom(vlocal); vlocal = tlocal; List <SVR_VideoLocal_Place> preps = vlocal.Places.Where( a => a.ImportFolder.CloudID == folder.CloudID && !vlocalplace.FullServerPath.Equals(a.FullServerPath)).ToList(); foreach (var prep in preps) { if (prep == null) { continue; } // clean up, if there is a 'duplicate file' that is invalid, remove it. if (prep.FullServerPath == null) { RepoFactory.VideoLocalPlace.Delete(prep); } else { FileSystemResult dupFileSystemResult = prep.ImportFolder?.FileSystem?.Resolve(prep.FullServerPath); if (dupFileSystemResult == null || !dupFileSystemResult.IsOk) { RepoFactory.VideoLocalPlace.Delete(prep); } } } var dupPlace = vlocal.Places.FirstOrDefault( a => a.ImportFolder.CloudID == folder.CloudID && !vlocalplace.FullServerPath.Equals(a.FullServerPath)); if (dupPlace != null) { logger.Warn("Found Duplicate File"); logger.Warn("---------------------------------------------"); logger.Warn($"New File: {vlocalplace.FullServerPath}"); logger.Warn($"Existing File: {dupPlace.FullServerPath}"); logger.Warn("---------------------------------------------"); // check if we have a record of this in the database, if not create one List <DuplicateFile> dupFiles = RepoFactory.DuplicateFile.GetByFilePathsAndImportFolder( vlocalplace.FilePath, dupPlace.FilePath, vlocalplace.ImportFolderID, dupPlace.ImportFolderID); if (dupFiles.Count == 0) { dupFiles = RepoFactory.DuplicateFile.GetByFilePathsAndImportFolder(dupPlace.FilePath, vlocalplace.FilePath, dupPlace.ImportFolderID, vlocalplace.ImportFolderID); } if (dupFiles.Count == 0) { DuplicateFile dup = new DuplicateFile { DateTimeUpdated = DateTime.Now, FilePathFile1 = vlocalplace.FilePath, FilePathFile2 = dupPlace.FilePath, ImportFolderIDFile1 = vlocalplace.ImportFolderID, ImportFolderIDFile2 = dupPlace.ImportFolderID, Hash = vlocal.Hash }; RepoFactory.DuplicateFile.Save(dup); } //Notify duplicate, don't delete duplicate = true; } } if (!duplicate || changed) { RepoFactory.VideoLocal.Save(vlocal, true); } vlocalplace.VideoLocalID = vlocal.VideoLocalID; RepoFactory.VideoLocalPlace.Save(vlocalplace); if (duplicate) { CommandRequest_ProcessFile cr_procfile3 = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false); cr_procfile3.Save(); return; } // also save the filename to hash record // replace the existing records just in case it was corrupt FileNameHash fnhash; List <FileNameHash> fnhashes2 = RepoFactory.FileNameHash.GetByFileNameAndSize(filename, vlocal.FileSize); if (fnhashes2 != null && fnhashes2.Count > 1) { // if we have more than one record it probably means there is some sort of corruption // lets delete the local records foreach (FileNameHash fnh in fnhashes2) { RepoFactory.FileNameHash.Delete(fnh.FileNameHashID); } } if (fnhashes2 != null && fnhashes2.Count == 1) { fnhash = fnhashes2[0]; } else { fnhash = new FileNameHash(); } fnhash.FileName = filename; fnhash.FileSize = vlocal.FileSize; fnhash.Hash = vlocal.Hash; fnhash.DateTimeUpdated = DateTime.Now; RepoFactory.FileNameHash.Save(fnhash); } else { FillMissingHashes(vlocal); } if ((vlocal.Media == null) || vlocal.MediaVersion < SVR_VideoLocal.MEDIA_VERSION || vlocal.Duration == 0) { if (vlocalplace.RefreshMediaInfo()) { RepoFactory.VideoLocal.Save(vlocalplace.VideoLocal, true); } } // now add a command to process the file CommandRequest_ProcessFile cr_procfile = new CommandRequest_ProcessFile(vlocal.VideoLocalID, false); cr_procfile.Save(); }
// returns false if we should try again after the timer // TODO Generify this and Move and make a return model instead of tuple public (bool, string, string) RenameFile(bool preview = false, string scriptName = null) { if (scriptName != null && scriptName.Equals(Shoko.Models.Constants.Renamer.TempFileName)) { return(true, string.Empty, "Error: Do not attempt to use a temp file to rename."); } if (ImportFolder == null) { logger.Error($"Error: The renamer can't get the import folder for ImportFolderID: {ImportFolderID}, File: \"{FilePath}\""); return(true, string.Empty, "Error: Could not find the file"); } string renamed = RenameFileHelper.GetFilename(this, scriptName); if (string.IsNullOrEmpty(renamed)) { logger.Error($"Error: The renamer returned a null or empty name for: \"{FilePath}\""); return(true, string.Empty, "Error: The file renamer returned a null or empty value"); } if (renamed.StartsWith("*Error: ")) { logger.Error($"Error: The renamer returned an error on file: \"{FilePath}\"\n {renamed}"); return(true, string.Empty, renamed.Substring(1)); } // actually rename the file string fullFileName = FullServerPath; // check if the file exists if (string.IsNullOrEmpty(fullFileName)) { logger.Error($"Error could not find the original file for renaming, or it is in use: \"{fullFileName}\""); return(false, renamed, "Error: Could not access the file"); } if (!File.Exists(fullFileName)) { logger.Error($"Error could not find the original file for renaming, or it is in use: \"{fullFileName}\""); return(false, renamed, "Error: Could not access the file"); } // actually rename the file string path = Path.GetDirectoryName(fullFileName); string newFullName = Path.Combine(path, renamed); var textStreams = SubtitleHelper.GetSubtitleStreams(this); try { if (fullFileName.Equals(newFullName, StringComparison.InvariantCultureIgnoreCase)) { logger.Info($"Renaming file SKIPPED! no change From \"{fullFileName}\" to \"{newFullName}\""); return(true, renamed, string.Empty); } if (File.Exists(newFullName)) { logger.Info($"Renaming file SKIPPED! Destination Exists \"{newFullName}\""); return(true, renamed, "Error: The filename already exists"); } if (preview) { return(false, renamed, string.Empty); } ShokoServer.PauseWatchingFiles(); logger.Info($"Renaming file From \"{fullFileName}\" to \"{newFullName}\""); try { var file = new FileInfo(fullFileName); file.MoveTo(newFullName); } catch (Exception e) { logger.Info($"Renaming file FAILED! From \"{fullFileName}\" to \"{newFullName}\" - {e}"); ShokoServer.UnpauseWatchingFiles(); return(false, renamed, "Error: Failed to rename file"); } // Rename external subs! var oldBasename = Path.GetFileNameWithoutExtension(fullFileName); var newBasename = Path.GetFileNameWithoutExtension(renamed); foreach (TextStream sub in textStreams) { if (string.IsNullOrEmpty(sub.Filename)) { continue; } var oldSubPath = Path.Combine(path, sub.Filename); if (!File.Exists(oldSubPath)) { logger.Error($"Unable to rename external subtitle \"{sub.Filename}\". Cannot access the file"); continue; } var newSub = sub.Filename.Replace(oldBasename, newBasename); try { var file = new FileInfo(oldSubPath); file.MoveTo(newSub); } catch (Exception e) { logger.Error($"Unable to rename external subtitle \"{sub.Filename}\" to \"{newSub}\". {e}"); } } logger.Info($"Renaming file SUCCESS! From \"{fullFileName}\" to \"{newFullName}\""); Tuple <SVR_ImportFolder, string> tup = VideoLocal_PlaceRepository.GetFromFullPath(newFullName); if (tup == null) { logger.Error($"Unable to LOCATE file \"{newFullName}\" inside the import folders"); ShokoServer.UnpauseWatchingFiles(); return(false, renamed, "Error: Unable to resolve new path"); } // Before we change all references, remap Duplicate Files List <DuplicateFile> dups = RepoFactory.DuplicateFile.GetByFilePathAndImportFolder(FilePath, ImportFolderID); if (dups != null && dups.Count > 0) { foreach (var dup in dups) { bool dupchanged = false; if (dup.FilePathFile1.Equals(FilePath, StringComparison.InvariantCultureIgnoreCase) && dup.ImportFolderIDFile1 == ImportFolderID) { dup.FilePathFile1 = tup.Item2; dupchanged = true; } else if (dup.FilePathFile2.Equals(FilePath, StringComparison.InvariantCultureIgnoreCase) && dup.ImportFolderIDFile2 == ImportFolderID) { dup.FilePathFile2 = tup.Item2; dupchanged = true; } if (dupchanged) { RepoFactory.DuplicateFile.Save(dup); } } } // Rename hash xrefs var filenameHash = RepoFactory.FileNameHash.GetByHash(VideoLocal.Hash); if (!filenameHash.Any(a => a.FileName.Equals(renamed))) { FileNameHash fnhash = new FileNameHash { DateTimeUpdated = DateTime.Now, FileName = renamed, FileSize = VideoLocal.FileSize, Hash = VideoLocal.Hash }; RepoFactory.FileNameHash.Save(fnhash); } FilePath = tup.Item2; RepoFactory.VideoLocalPlace.Save(this); // just in case VideoLocal.FileName = renamed; RepoFactory.VideoLocal.Save(VideoLocal, false); } catch (Exception ex) { logger.Info($"Renaming file FAILED! From \"{fullFileName}\" to \"{newFullName}\" - {ex.Message}"); logger.Error(ex, ex.ToString()); return(true, string.Empty, $"Error: {ex.Message}"); } ShokoServer.UnpauseWatchingFiles(); return(true, renamed, string.Empty); }
public void undoAddFile(string newFilePathName) { listAdd.Remove(FileNameHash.Compute(newFilePathName)); listComplete.Remove(FileNameHash.Compute(newFilePathName)); }