public List <bool> FileChecker(string aPath, bool Force = false, int OnlySize = 0) { var rv = new List <bool>(); var inputFile = CheckFile(aPath); if (inputFile != null || Force) { if (Force && inputFile == null) { var toCheck = FractHashTree.CreateRecsFromMemory(File.ReadAllBytes(aPath), MinHashSize, GetHP, 0, 0, OnlySize); rv.AddRange(HashRecLookup(toCheck)); } else { foreach (var ms in inputFile.Sections) { if (!ms.IsCode || !ms.IsExec) { continue; } var totSiz = FractHashTree.TotalHashesForSize(ms.RawFileSize, MinHashSize); var hr = new HashRec[totSiz]; FractHashTree.CreateRecsFromFile(aPath, ms, MinHashSize, (int)totSiz, hr, 0, GetHP); rv.AddRange(HashRecLookup(hr)); } } } return(rv); }
void FillHashBuff(ParallelOptions po) { int TotalHashGenCount = 0; int HashGenCnt = 0; int LoadedCnt = 0; HashRec[] hashX; Stopwatch sw = Stopwatch.StartNew(); do { Extract next = null; #region Partition // prescan enough entries to not overspill the specified hash buffer count long CountForMaxBuff = 0; ConcurrentStack <Extract> ReadyList = new ConcurrentStack <Extract>(); while (!DoneDirScan || !LoadList.IsEmpty) { LoadList.TryPop(out next); if (next == null && !DoneDirScan) { if (po.CancellationToken.IsCancellationRequested) { return; } Thread.Yield(); continue; } foreach (var ms in next.Sections) { if (!ms.IsCode && !ms.IsExec) { continue; } var BufferSize = (uint)((ms.RawFileSize + 0xfff) & ~0xfff); CountForMaxBuff += FractHashTree.TotalHashesForSize(BufferSize, MinHashSize); } if (CountForMaxBuff < BufferCount) { ReadyList.Push(next); } // add it back for reprocessing else { LoadList.Push(next); if (po.CancellationToken.IsCancellationRequested) { return; } po.CancellationToken.ThrowIfCancellationRequested(); break; } } #endregion try { hashX = new HashRec[BufferCount]; } catch (Exception ex) { WriteColor(ConsoleColor.Red, $"BuferCount {BufferCount} too large, try something a bit smaller (however keep it as large as you can :)"); WriteColor(ConsoleColor.Yellow, $"{ex.ToString()}"); source.Cancel(); return; } //WriteColor(ConsoleColor.White, $"Parallel partition from {StartingAvailable} to {CurrAvailableMax} starting."); Parallel.ForEach(ReadyList, (hashFile) => //for (int i = StartingAvailable; i < CurrAvailableMax; i++) { if (po.CancellationToken.IsCancellationRequested) { return; } Interlocked.Increment(ref LoadedCnt); foreach (var ms in hashFile.Sections) { // ONLY hash CODE/EXEC file sections & PEHeader if (!ms.IsCode && !ms.IsExec) { continue; } if (ms.RawFileSize <= 0) { LogEx(0, $"Compressed/malishous PE {hashFile.FileName} is too small. Consider manual review of section [{ms.Name}] (e.g. UPX will overlap sections so we will hash it on next pass, TODO: UPX decoder)."); continue; } //var tot = (int)FractHashTree.TotalHashesForSize(ms.RawFileSize, MinHashSize); //var myCnt = Interlocked.Add(ref HashGenCnt, tot); //var fht = new FractHashTree(hashFile.FileName, ms, MinHashSize, GetHP); //var dht = fht.DumpRecTree(); //var len = dht.Count(); //var myLim = Interlocked.Add(ref HashGenCnt, len); //dht.CopyTo(0, hashX, myLim - len, len); var ReadSize = ms.VirtualSize; var BufferSize = (int)((ReadSize + 0xfff) & ~0xfff); var memBuff = new byte[BufferSize]; using (var fread = new FileStream(hashFile.FileName, FileMode.Open, FileAccess.Read, FileShare.Read, PAGE_SIZE)) { fread.Seek(ms.RawFilePointer, SeekOrigin.Begin); fread.Read(memBuff, 0, (int)ReadSize); } var recs = FractHashTree.CreateRecsFromMemory(memBuff, MinHashSize, GetHP, hashFile.rID, 0, 0, true); if (HashGenCnt + recs.Length > hashX.Length) { LoadList.Push(hashFile); break; } var myLim = Interlocked.Add(ref HashGenCnt, recs.Length); recs.CopyTo(hashX, myLim - recs.Length); //FractHashTree.CreateRecsFromFile(hashFile.FileName, ms, MinHashSize, tot, hashX, myCnt - tot, GetHP); if ((LoadedCnt % 100) == 0 && sw.Elapsed.TotalSeconds > 0) { WriteColor(ConsoleColor.Green, $"HashGen entries: {HashGenCnt:N0} - per second { ((TotalHashGenCount + HashGenCnt) / sw.Elapsed.TotalSeconds):N0}"); } //} } }); if (po.CancellationToken.IsCancellationRequested) { return; } TotalHashGenCount += HashGenCnt; WriteColor(ConsoleColor.Green, $"Filled queue {HashGenCnt:N0}, signaling readyqueue."); WriteColor(ConsoleColor.Green, $"Loaded-Files/Generated-Hash-Values {LoadedCnt:N0}/{TotalHashGenCount:N0}. HashGen: {(TotalHashGenCount / sw.Elapsed.TotalSeconds):N0} per second."); sw.Stop(); ReadyQueue.Add(Tuple.Create <int, HashRec[]>(HashGenCnt, hashX)); HashGenCnt = 0; sw.Start(); } while (!DoneDirScan || !LoadList.IsEmpty); sw.Stop(); WriteColor(ConsoleColor.Green, $"Finished Files/Hashes {LoadedCnt:N0}/{TotalHashGenCount:N0}. HashGen: {(TotalHashGenCount / sw.Elapsed.TotalSeconds):N0} per second."); return; }