public override void Write(BinaryWriter bw) { base.Write(bw); if (Tree == null) { bw.Write(false); } else { bw.Write(true); Tree.Write(bw); } if (Game == null) { bw.Write(false); } else { bw.Write(true); Game.Write(bw); } int count = _dirDats.Count; bw.Write(count); for (int i = 0; i < count; i++) { _dirDats[i].Write(bw); } count = _children.Count; bw.Write(count); for (int i = 0; i < count; i++) { FileType type = _children[i].FileType; bw.Write((byte)type); _children[i].Write(bw); } if (DBTypeGet.isCompressedDir(FileType)) { bw.Write((byte)ZipStatus); } }
private static void Compare(RvFile dbDir, RvFile fileDir, bool report, bool enableCancel) { string fullDir = dbDir.FullName; FileType ft = dbDir.FileType; // now we scan down the dbDir and the scanDir, comparing them. // if we find a match we mark dbDir as found. // if we are missing a file in fileDir we mark that file in dbDir as missing. // if we find extra files in fileDir we add it to dbDir and mark it as unknown. // we also recurse into any sub directories. int dbIndex = 0; int fileIndex = 0; while (dbIndex < dbDir.ChildCount || fileIndex < fileDir.ChildCount) { RvFile dbChild = null; RvFile fileChild = null; int res = 0; if (dbIndex < dbDir.ChildCount && fileIndex < fileDir.ChildCount) { dbChild = dbDir.Child(dbIndex); fileChild = fileDir.Child(fileIndex); //Debug.WriteLine($@"{dbChild.Name} , {fileChild.Name}"); res = DBHelper.CompareName(dbChild, fileChild); } else if (fileIndex < fileDir.ChildCount) { //Get any remaining filedir's fileChild = fileDir.Child(fileIndex); res = 1; } else if (dbIndex < dbDir.ChildCount) { //Get any remaining dbDir's dbChild = dbDir.Child(dbIndex); res = -1; } if (report) { if (fileChild != null) { long timenow = DateTime.Now.Ticks; if (timenow - _lastUpdateTime > TimeSpan.TicksPerSecond / 10) { _lastUpdateTime = timenow; _thWrk.Report(new bgwValue2(fileIndex + 1)); _thWrk.Report(new bgwText2(Path.Combine(fullDir, fileChild.Name))); } } } // if this file was found in the DB switch (res) { case 0: if (dbChild == null || fileChild == null) { ReportError.SendAndShow("Error in File Scanning Code."); break; } //Complete MultiName Compare List <RvFile> dbs = new List <RvFile>(); List <RvFile> files = new List <RvFile>(); int dbsCount = 1; int filesCount = 1; dbs.Add(dbChild); files.Add(fileChild); while (dbIndex + dbsCount < dbDir.ChildCount && DBHelper.CompareName(dbChild, dbDir.Child(dbIndex + dbsCount)) == 0) { dbs.Add(dbDir.Child(dbIndex + dbsCount)); dbsCount += 1; } while (fileIndex + filesCount < fileDir.ChildCount && DBHelper.CompareName(fileChild, fileDir.Child(fileIndex + filesCount)) == 0) { files.Add(fileDir.Child(fileIndex + filesCount)); filesCount += 1; } bool caseTest = files.Count > 1; // if we only have one file, we don't need to test twice. // so we need to do a case sensitive match first and then a case insensitive match // indexCase=0 means do full case filename test // indexCase=1 means do case insensitive test for (int indexCase = (caseTest ? 0 : 1); indexCase < 2; indexCase += 1) { for (int indexfile = 0; indexfile < filesCount; indexfile++) { if (files[indexfile].SearchFound) { continue; } for (int indexdb = 0; indexdb < dbsCount; indexdb++) { if (dbs[indexdb].SearchFound) { continue; } bool matched = Scanner.Compare.Phase1Test(dbs[indexdb], files[indexfile], EScanLevel, indexCase, out bool matchedAlt); if (!matched) { continue; } MatchFound(dbs[indexdb], files[indexfile], matchedAlt); dbs[indexdb].SearchFound = true; files[indexfile].SearchFound = true; } if (files[indexfile].SearchFound) { continue; } for (int indexdb = 0; indexdb < dbsCount; indexdb++) { if (dbs[indexdb].SearchFound) { continue; } bool matched = Scanner.Compare.Phase2Test(dbs[indexdb], files[indexfile], EScanLevel, indexCase, fullDir, _thWrk, ref _fileErrorAbort, out bool matchedAlt); if (!matched) { continue; } MatchFound(dbs[indexdb], files[indexfile], matchedAlt); dbs[indexdb].SearchFound = true; files[indexfile].SearchFound = true; } } } for (int indexdb = 0; indexdb < dbsCount; indexdb++) { if (dbs[indexdb].SearchFound) { dbIndex++; continue; } DBFileNotFound(dbs[indexdb], dbDir, ref dbIndex); } for (int indexfile = 0; indexfile < filesCount; indexfile++) { if (files[indexfile].SearchFound) { continue; } if (NewFileFound(files[indexfile], dbDir, dbIndex)) { dbIndex++; } } fileIndex += filesCount; break; case 1: if (NewFileFound(fileChild, dbDir, dbIndex)) { dbIndex++; } fileIndex++; break; case -1: DBFileNotFound(dbChild, dbDir, ref dbIndex); break; } if (_fileErrorAbort) { return; } if (enableCancel && !DBTypeGet.isCompressedDir(ft) && _thWrk.CancellationPending) { return; } } }
/// <summary> /// Called from 5 places: /// 1: ScanFiles: main top level loop. /// 2: MatchFound: called after a ZIP/SevenZip is matched to an item in the DB, where the zip has either changed or never been scanned or level 3 scanning /// 3: MatchFound: called when an directory is matched to an item in the DB that is not from a DAT. (This is a directory not found in the main tree, as main tree dir's are processes in top level loop /// 4: NewFileFound: called after a new unmatched ZIP/SevenZip is found. /// 5: NewFileFound: called after a new unmatched DIR is found. /// </summary> /// <param name="dbDir"></param> /// <param name="report"></param> private static void CheckADir(RvFile dbDir, bool report) { if (_cacheSaveTimer.Elapsed.TotalMinutes > Settings.rvSettings.CacheSaveTimePeriod) { _thWrk.Report("Saving Cache"); DB.Write(); _thWrk.Report("Saving Cache Complete"); _cacheSaveTimer.Reset(); _cacheSaveTimer.Start(); } string fullDir = dbDir.FullName; if (report) { _thWrk.Report(new bgwText2(fullDir)); } // this is a temporary rvDir structure to store the data about the actual directory/files we are scanning // we will first populate this variable with the real file data from the directory, and then compare it // with the data in dbDir. RvFile fileDir = null; Debug.WriteLine(fullDir); FileType ft = dbDir.FileType; #region "Populate fileDir" // if we are scanning a ZIP file then populate scanDir from the ZIP file switch (ft) { case FileType.Zip: case FileType.SevenZip: fileDir = Populate.FromAZipFile(dbDir, EScanLevel, _thWrk); break; case FileType.Dir: fileDir = Populate.FromADir(dbDir, EScanLevel, _thWrk, ref _fileErrorAbort); break; default: ReportError.SendAndShow("Un supported file type in CheckADir " + ft); break; } #endregion if (fileDir == null) { ReportError.SendAndShow("Unknown Reading File Type in Dir Scanner"); return; } if (report) { _thWrk.Report(new bgwSetRange2(fileDir.ChildCount)); _thWrk.Report(new bgwRange2Visible(true)); } if (!DBTypeGet.isCompressedDir(ft) && _thWrk.CancellationPending) { return; } Compare(dbDir, fileDir, report, true); }
public static void Compare(RvDir dbDir, RvDir fileDir, bool report, bool enableCancel) { string fullDir = dbDir.FullName; FileType ft = dbDir.FileType; // now we scan down the dbDir and the scanDir, comparing them. // if we find a match we mark dbDir as found. // if we are missing a file in scanDir we mark that file in dbDir as missing. // if we find extra files in scanDir we add it to dbDir and mark it as unknown. // we also recurse into any sub directories. int dbIndex = 0; int fileIndex = 0; while (dbIndex < dbDir.ChildCount || fileIndex < fileDir.ChildCount) { RvBase dbChild = null; RvBase fileChild = null; int res = 0; if (dbIndex < dbDir.ChildCount && fileIndex < fileDir.ChildCount) { dbChild = dbDir.Child(dbIndex); fileChild = fileDir.Child(fileIndex); res = DBHelper.CompareName(dbChild, fileChild); } else if (fileIndex < fileDir.ChildCount) { //Get any remaining filedir's fileChild = fileDir.Child(fileIndex); res = 1; } else if (dbIndex < dbDir.ChildCount) { //Get any remaining dbDir's dbChild = dbDir.Child(dbIndex); res = -1; } if (report) { if (fileChild != null) { long timenow = DateTime.Now.Ticks; if ((timenow - _lastUpdateTime) > (TimeSpan.TicksPerSecond / 10)) { _lastUpdateTime = timenow; _bgw.ReportProgress(0, new bgwValue2(fileIndex)); _bgw.ReportProgress(0, new bgwText2(Path.Combine(fullDir, fileChild.Name))); } } } // if this file was found in the DB switch (res) { case 0: if (dbChild == null || fileChild == null) { ReportError.SendAndShow(Resources.FileScanning_CheckADir_Error_in_File_Scanning_Code); break; } //Complete MultiName Compare List <RvBase> dbs = new List <RvBase>(); List <RvBase> files = new List <RvBase>(); int dbsCount = 1; int filesCount = 1; dbs.Add(dbChild); files.Add(fileChild); while (dbIndex + dbsCount < dbDir.ChildCount && DBHelper.CompareName(dbChild, dbDir.Child(dbIndex + dbsCount)) == 0) { dbs.Add(dbDir.Child(dbIndex + dbsCount)); dbsCount += 1; } while (fileIndex + filesCount < fileDir.ChildCount && DBHelper.CompareName(fileChild, fileDir.Child(fileIndex + filesCount)) == 0) { files.Add(fileDir.Child(fileIndex + filesCount)); filesCount += 1; } for (int indexfile = 0; indexfile < filesCount; indexfile++) { if (files[indexfile].SearchFound) { continue; } for (int indexdb = 0; indexdb < dbsCount; indexdb++) { if (dbs[indexdb].SearchFound) { continue; } bool matched = FullCompare(dbs[indexdb], files[indexfile], false, fullDir, EScanLevel); if (!matched) { continue; } MatchFound(dbs[indexdb], files[indexfile]); dbs[indexdb].SearchFound = true; files[indexfile].SearchFound = true; } if (files[indexfile].SearchFound) { continue; } for (int indexdb = 0; indexdb < dbsCount; indexdb++) { if (dbs[indexdb].SearchFound) { continue; } bool matched = FullCompare(dbs[indexdb], files[indexfile], true, fullDir, EScanLevel); if (!matched) { continue; } MatchFound(dbs[indexdb], files[indexfile]); dbs[indexdb].SearchFound = true; files[indexfile].SearchFound = true; } } for (int indexdb = 0; indexdb < dbsCount; indexdb++) { if (dbs[indexdb].SearchFound) { dbIndex++; continue; } DBFileNotFound(dbs[indexdb], dbDir, ref dbIndex); } for (int indexfile = 0; indexfile < filesCount; indexfile++) { if (files[indexfile].SearchFound) { continue; } NewFileFound(files[indexfile], dbDir, dbIndex); dbIndex++; } fileIndex += filesCount; break; case 1: NewFileFound(fileChild, dbDir, dbIndex); dbIndex++; fileIndex++; break; case -1: DBFileNotFound(dbChild, dbDir, ref dbIndex); break; } if (_fileErrorAbort) { return; } if (enableCancel && !DBTypeGet.isCompressedDir(ft) && _bgw.CancellationPending) { return; } } }
private static void CheckADir(RvDir dbDir, bool report) { if (_cacheSaveTimer.Elapsed.TotalMinutes > Settings.CacheSaveTimePeriod) { _bgw.ReportProgress(0, "Saving Cache"); DB.Write(); _bgw.ReportProgress(0, "Saving Cache Complete"); _cacheSaveTimer.Reset(); _cacheSaveTimer.Start(); } string fullDir = dbDir.FullName; if (report) { _bgw.ReportProgress(0, new bgwText2(fullDir)); } DatStatus chechingDatStatus = dbDir.IsInToSort ? DatStatus.InToSort : DatStatus.NotInDat; // this is a temporary rvDir structure to store the data about the actual directory/files we are scanning // we will first populate this variable with the real file data from the directory, and then compare it // with the data in dbDir. RvDir fileDir = null; Debug.WriteLine(fullDir); FileType ft = dbDir.FileType; #region "Populate fileDir" // if we are scanning a ZIP file then populate scanDir from the ZIP file switch (ft) { case FileType.Zip: { fileDir = new RvDir(ft); // open the zip file ZipFile checkZ = new ZipFile(); ZipReturn zr = checkZ.ZipFileOpen(fullDir, dbDir.TimeStamp, true); if (zr == ZipReturn.ZipGood) { dbDir.ZipStatus = checkZ.ZipStatus; // to be Scanning a ZIP file means it is either new or has changed. // as the code below only calls back here if that is true. // // Level1: Only use header CRC's // Just get the CRC for the ZIP headers. // // Level2: Fully checksum changed only files // We know this file has been changed to do a full checksum scan. // // Level3: Fully checksum everything // So do a full checksum scan. if (EScanLevel == eScanLevel.Level2 || EScanLevel == eScanLevel.Level3) { checkZ.DeepScan(); } // add all of the file information from the zip file into scanDir for (int i = 0; i < checkZ.LocalFilesCount(); i++) { RvFile tFile = new RvFile(DBTypeGet.FileFromDir(ft)) { Name = checkZ.Filename(i), ZipFileIndex = i, ZipFileHeaderPosition = checkZ.LocalHeader(i), Size = checkZ.UncompressedSize(i), CRC = checkZ.CRC32(i) }; // all 3 levels read the CRC from the ZIP header tFile.SetStatus(chechingDatStatus, GotStatus.Got); tFile.FileStatusSet(FileStatus.SizeFromHeader | FileStatus.CRCFromHeader); // if we are in level 2 or level 3 then do a full CheckSum Scan. if (EScanLevel == eScanLevel.Level2 || EScanLevel == eScanLevel.Level3) { // DeepScan will return ZipReturn.ZipCRCDecodeError if the headers CRC and // the actual CRC do not match. // So we just need to set the MD5 and SHA1 from the ZIP file. zr = checkZ.FileStatus(i); if (zr == ZipReturn.ZipUntested) { _bgw.ReportProgress(0, new bgwShowCorrupt(zr, fullDir + " : " + checkZ.Filename(i))); } else if (zr != ZipReturn.ZipGood) { _bgw.ReportProgress(0, new bgwShowCorrupt(zr, fullDir + " : " + checkZ.Filename(i))); tFile.GotStatus = GotStatus.Corrupt; } else { tFile.MD5 = checkZ.MD5(i); tFile.SHA1 = checkZ.SHA1(i); tFile.FileStatusSet(FileStatus.SizeVerified | FileStatus.CRCVerified | FileStatus.SHA1Verified | FileStatus.MD5Verified); } } fileDir.ChildAdd(tFile); } } else if (zr == ZipReturn.ZipFileLocked) { _bgw.ReportProgress(0, new bgwShowError(fullDir, "Zip File Locked")); dbDir.TimeStamp = 0; dbDir.GotStatus = GotStatus.FileLocked; } else { _bgw.ReportProgress(0, new bgwShowCorrupt(zr, fullDir)); dbDir.GotStatus = GotStatus.Corrupt; } checkZ.ZipFileClose(); } break; case FileType.Dir: { fileDir = new RvDir(FileType.Dir); DirectoryInfo oDir = new DirectoryInfo(fullDir); DirectoryInfo[] oDirs = oDir.GetDirectories(); FileInfo[] oFiles = oDir.GetFiles(); // add all the subdirectories into scanDir foreach (DirectoryInfo dir in oDirs) { RvBase tDir = new RvDir(FileType.Dir) { Name = dir.Name, TimeStamp = dir.LastWriteTime, }; tDir.SetStatus(chechingDatStatus, GotStatus.Got); fileDir.ChildAdd(tDir); } // add all the files into scanDir foreach (FileInfo oFile in oFiles) { // if we find any zip files add them as zip files. string fExt = Path.GetExtension(oFile.Name); switch (fExt.ToLower()) { case ".zip": { RvDir tGame = new RvDir(FileType.Zip) { Name = Path.GetFileNameWithoutExtension(oFile.Name), TimeStamp = oFile.LastWriteTime, }; tGame.SetStatus(chechingDatStatus, GotStatus.Got); fileDir.ChildAdd(tGame); } break; default: { string fName = oFile.Name; if (fName == "__RomVault.tmp") { File.Delete(oFile.FullName); continue; } // Scanning a file // // Level1 & 2 : (are the same for files) Fully checksum changed only files // Here we are just getting the TimeStamp of the File, and later // if the TimeStamp was not matched we will have to read the files CRC, MD5 & SHA1 // // Level3: Fully checksum everything // Get everything about the file right here so // read CRC, MD5 & SHA1 // add all the files in the sub-directory to scanDir RvFile tFile = new RvFile(FileType.File) { Name = fName, Size = (ulong)oFile.Length, TimeStamp = oFile.LastWriteTime }; tFile.FileStatusSet(FileStatus.SizeVerified); int errorCode = CHD.CheckFile(oFile, out tFile.SHA1CHD, out tFile.MD5CHD, out tFile.CHDVersion); if (errorCode == 0) { if (tFile.SHA1CHD != null) { tFile.FileStatusSet(FileStatus.SHA1CHDFromHeader); } if (tFile.MD5CHD != null) { tFile.FileStatusSet(FileStatus.MD5CHDFromHeader); } tFile.SetStatus(chechingDatStatus, GotStatus.Got); // if we are scanning at Level3 then we get all the info here if (EScanLevel == eScanLevel.Level3) { DeepScanFile(fullDir, tFile); ChdManCheck(fullDir, tFile); } } else if (errorCode == 32) { tFile.GotStatus = GotStatus.FileLocked; _bgw.ReportProgress(0, new bgwShowError(fullDir, "File Locked")); } else { string filename = Path.Combine(fullDir, tFile.Name); ReportError.Show("File: " + filename + " Error: " + new Win32Exception(errorCode).Message + ". Scan Aborted."); _fileErrorAbort = true; return; } fileDir.ChildAdd(tFile); } break; } } } break; default: ReportError.SendAndShow("Un supported file type in CheckADir " + ft); break; } #endregion if (fileDir == null) { ReportError.SendAndShow("Unknown Reading File Type in Dir Scanner"); return; } if (report) { _bgw.ReportProgress(0, new bgwSetRange2(fileDir.ChildCount - 1)); _bgw.ReportProgress(0, new bgwRange2Visible(true)); } if (!DBTypeGet.isCompressedDir(ft) && _bgw.CancellationPending) { return; } Compare(dbDir, fileDir, report, true); }
public override void Read(BinaryReader br, List <RvDat> parentDirDats) { base.Read(br, parentDirDats); bool foundTree = br.ReadBoolean(); if (foundTree) { Tree = new RvTreeRow(); Tree.Read(br); } else { Tree = null; } bool foundGame = br.ReadBoolean(); if (foundGame) { Game = new RvGame(); Game.Read(br); } else { Game = null; } int count = br.ReadInt32(); _dirDats.Clear(); for (int i = 0; i < count; i++) { RvDat dat = new RvDat { DatIndex = i }; dat.Read(br); _dirDats.Add(dat); string datname = TreeFullName + @"\" + dat.GetData(RvDat.DatData.DatName); if ((datname.Length >= 9) && (datname.Substring(0, 9) == @"RomVault\")) { datname = datname.Substring(9); } DB.Bgw.ReportProgress(0, new bgwText("Loading: " + datname)); DB.Bgw.ReportProgress((int)br.BaseStream.Position); } if (_dirDats.Count > 0) { parentDirDats = _dirDats; } count = br.ReadInt32(); _children.Clear(); for (int i = 0; i < count; i++) { RvBase tChild = DBTypeGet.GetRvType((FileType)br.ReadByte()); tChild.Parent = this; tChild.Read(br, parentDirDats); _children.Add(tChild); } if (DBTypeGet.isCompressedDir(FileType)) { ZipStatus = (ZipStatus)br.ReadByte(); } }