Exemplo n.º 1
0
        public List <bool> FileChecker(string aPath, bool Force = false, int OnlySize = 0)
        {
            var rv        = new List <bool>();
            var inputFile = CheckFile(aPath);

            if (inputFile != null || Force)
            {
                if (Force && inputFile == null)
                {
                    var toCheck = FractHashTree.CreateRecsFromMemory(File.ReadAllBytes(aPath), MinHashSize, GetHP, 0, 0, OnlySize);
                    rv.AddRange(HashRecLookup(toCheck));
                }
                else
                {
                    foreach (var ms in inputFile.Sections)
                    {
                        if (!ms.IsCode || !ms.IsExec)
                        {
                            continue;
                        }

                        var totSiz = FractHashTree.TotalHashesForSize(ms.RawFileSize, MinHashSize);
                        var hr     = new HashRec[totSiz];
                        FractHashTree.CreateRecsFromFile(aPath, ms, MinHashSize, (int)totSiz, hr, 0, GetHP);
                        rv.AddRange(HashRecLookup(hr));
                    }
                }
            }
            return(rv);
        }
Exemplo n.º 2
0
        public IEnumerable <Tuple <string, double, List <bool> > > DirectoryChecker(string folder, string glob, int OnlySize = 0)
        {
            foreach (var toScan in Directory.EnumerateFiles(folder, glob, SearchOption.AllDirectories))
            {
                List <bool> rv       = new List <bool>();
                int         len      = (int)new FileInfo(toScan).Length;
                int         alignLen = (int)((len + 0xfff) & ~0xfff);

                var buf = new byte[alignLen];

                using (var f = new FileStream(toScan, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
                    f.Read(buf, 0, alignLen);

                var toCheck = FractHashTree.CreateRecsFromMemory(buf, MinHashSize, GetHP, 0, 0, OnlySize);
                //var bits = HDB.BitmapScan(toCheck);
                int Found = 0;
                foreach (var bit in toCheck)
                {
                    if (HDB.GetIdxBit(bit.Index))
                    {
                        Found++;
                        rv.Add(true);
                    }
                    else
                    {
                        rv.Add(false);
                    }
                }
                yield return(Tuple.Create <string, double, List <bool> >(toScan, Found * 100.0 / toCheck.Length, rv));
            }
        }
Exemplo n.º 3
0
        public void DumpToCloud(ParallelOptions po)
        {
            long           TotalDBWrites = 0;
            Extract        hashFile      = null;
            List <HashRec> batch         = null;

            if (GetHP == null)
            {
                GetHP = new Func <HashLib.IHash>(() => { return(HashLib.HashFactory.Crypto.CreateTiger2()); });
            }

            GeneratedSW = Stopwatch.StartNew();
            while (!DoneDirScan || FL.LoadList.Count > 0)
            {
                FL.LoadList.TryPop(out hashFile);
                if (hashFile == null && !DoneDirScan)
                {
                    if (po.CancellationToken.IsCancellationRequested)
                    {
                        return;
                    }
                    Thread.Yield();
                    continue;
                }

                if (po.CancellationToken.IsCancellationRequested)
                {
                    return;
                }

                foreach (var ms in hashFile.Sections)
                {
                    // ONLY hash CODE/EXEC file sections & PEHeader
                    if (!ms.IsCode && !ms.IsExec)
                    {
                        continue;
                    }

                    var ReadSize   = ms.VirtualSize;
                    var BufferSize = (int)((ReadSize + 0xfff) & ~0xfff);
                    var memBuff    = new byte[BufferSize];

                    using (var fread = new FileStream(hashFile.FileName, FileMode.Open, FileAccess.Read, FileShare.Read, PAGE_SIZE))
                    {
                        fread.Seek(ms.RawFilePointer, SeekOrigin.Begin);
                        fread.Read(memBuff, 0, (int)ReadSize);
                    }

                    foreach (var items in FractHashTree.CreateRecsFromMemoryPartion(memBuff, MinHashSize, GetHP, hashFile.rID))
                    {
                        if (items == null)
                        {
                            continue;
                        }

                        foreach (var item in items)
                        {
                            batch = batches[item.FullHash[0]];

                            bool contains = batch.Any(x => x.FullHash.SequenceEqual(item.FullHash));

                            if (contains)
                            {
                                continue;
                            }

                            batch.Add(item);

                            Interlocked.Increment(ref TotalRequested);

                            if (batch.Count == 100)
                            {
                                Interlocked.Add(ref TotalDBWrites, batch.Count);

                                if ((TotalRequested % 100) == 0)
                                {
                                    WriteColor(ConsoleColor.Green, $"Generated {TotalDBWrites:N0} entries {(TotalDBWrites/ GeneratedSW.Elapsed.TotalSeconds):N0} per second. Task time: {GeneratedSW.Elapsed}");
                                }

                                // signal uploader
                                ReadyQueue.Add(batch);

                                // reset batch
                                batch = new List <HashRec>();
                                batches[item.FullHash[0]] = batch;
                            }
                        }
                    }
                }
            }
            foreach (var b in batches)
            {
                if (b.Count < 1)
                {
                    continue;
                }

                ReadyQueue.Add(b);
                TotalDBWrites += b.Count;
            }
            ReadyQueue.CompleteAdding();
            WriteColor(ConsoleColor.Green, $"Finished DB write {TotalDBWrites:N0} NEW entries. Requsted {TotalRequested:N0} (reduced count reflects de-duplication). Task time: {GeneratedSW.Elapsed}");
        }
Exemplo n.º 4
0
        public int LoadFromMem(byte[] Input)
        {
            int written = 0;
            var hashArr = FractHashTree.CreateRecsFromMemory(Input, MinHashSize, GetHP);
            var Count   = hashArr.Length;

            using (var fs = new FileStream(DBFile, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite, DB_READ_SIZE))
            {
                // we need 2 pages now since were block reading and we might pick a hash that start's scan
                // at the very end of a page
                byte[] buff = new byte[DB_READ_SIZE];
                byte[] zero = new byte[HASH_REC_BYTES];
                int    i = 0, firstIndex = 0, zeroIndex = 0;
                bool   WriteBack = false;

                do
                {
                    var Index = hashArr[i].Index;
                    // convert Index to PageIndex
                    var DBPage = (long)((Index & SortMask) & ~DB_PAGE_MASK);

                    // find block offset for this hash
                    fs.Seek(DBPage, SeekOrigin.Begin);
                    fs.Read(buff, 0, DB_READ_SIZE);
                    WriteBack = false;

                    do
                    {
                        // skip duplicates
                        if (i + 1 < Count &&
                            hashArr[i].Index == hashArr[i + 1].Index &&
                            hashArr[i].CompressedHash == hashArr[i + 1].CompressedHash)
                        {
                            i++;
                            continue;
                        }

                        if (i < Count)
                        {
                            // re-read Inxex since we could be on the inner loop
                            Index = hashArr[i].Index;
                            // Index inside of a page
                            var PageIndex = Index & DB_PAGE_MASK;

                            // Hash to populate the DB with
                            var toWrite = HashRec.ToByteArr(hashArr[i]);

                            // do we already have this hash from disk?
                            firstIndex = buff.SearchBytes(toWrite, (int)PageIndex, toWrite.Length);
                            if (firstIndex < 0)
                            {
                                zeroIndex = buff.SearchBytes(zero, (int)PageIndex, zero.Length);
                                if (zeroIndex >= 0)
                                {
                                    // we want the modified buffer to get written back
                                    WriteBack = true;
                                    int j, k;
                                    // update buff with new hash entry for write back
                                    //Array.Copy(toWrite, 0, buff, zeroIndex, toWrite.Length);
                                    for (j = zeroIndex, k = 0; j < zeroIndex + toWrite.Length; j++, k++)
                                    {
                                        buff[j] = toWrite[k];
                                    }

                                    written++;
                                    // set to the origional index, shift down since were bit aligned
                                    HDB.SetIdxBit(Index);
                                }
                                else if (zeroIndex < 0)
                                {
                                    var strerr = $"HASH TABLE SATURATED! YOU NEED TO MAKE THE DB LARGER!!";
                                    WriteColor(ConsoleColor.Red, strerr);
                                    throw new ApplicationException(strerr);
                                }
                            }
                        }
                        i++;

                        // continue to next entry if it's in the same block
                    } while (i < Count && (((hashArr[i].Index & SortMask) & ~DB_PAGE_MASK) == (ulong)DBPage));

                    if (WriteBack)
                    {
                        // reset seek position
                        fs.Seek(DBPage, SeekOrigin.Begin);
                        // only write back 1 page if we can help it
                        fs.Write(buff, 0, DB_READ_SIZE);
                    }
                } while (i < Count);
            }
            return(written);
        }
Exemplo n.º 5
0
        void FillHashBuff(ParallelOptions po)
        {
            int TotalHashGenCount = 0;
            int HashGenCnt        = 0;
            int LoadedCnt         = 0;

            HashRec[] hashX;

            Stopwatch sw = Stopwatch.StartNew();

            do
            {
                Extract next = null;
                #region Partition
                // prescan enough entries to not overspill the specified hash buffer count
                long CountForMaxBuff = 0;
                ConcurrentStack <Extract> ReadyList = new ConcurrentStack <Extract>();

                while (!DoneDirScan || !LoadList.IsEmpty)
                {
                    LoadList.TryPop(out next);
                    if (next == null && !DoneDirScan)
                    {
                        if (po.CancellationToken.IsCancellationRequested)
                        {
                            return;
                        }
                        Thread.Yield();
                        continue;
                    }

                    foreach (var ms in next.Sections)
                    {
                        if (!ms.IsCode && !ms.IsExec)
                        {
                            continue;
                        }

                        var BufferSize = (uint)((ms.RawFileSize + 0xfff) & ~0xfff);
                        CountForMaxBuff += FractHashTree.TotalHashesForSize(BufferSize, MinHashSize);
                    }

                    if (CountForMaxBuff < BufferCount)
                    {
                        ReadyList.Push(next);
                    }
                    // add it back for reprocessing
                    else
                    {
                        LoadList.Push(next);
                        if (po.CancellationToken.IsCancellationRequested)
                        {
                            return;
                        }
                        po.CancellationToken.ThrowIfCancellationRequested();
                        break;
                    }
                }

                #endregion
                try
                {
                    hashX = new HashRec[BufferCount];
                }
                catch (Exception ex)
                {
                    WriteColor(ConsoleColor.Red, $"BuferCount {BufferCount} too large, try something a bit smaller (however keep it as large as you can :)");
                    WriteColor(ConsoleColor.Yellow, $"{ex.ToString()}");
                    source.Cancel();
                    return;
                }

                //WriteColor(ConsoleColor.White, $"Parallel partition from {StartingAvailable} to {CurrAvailableMax} starting.");
                Parallel.ForEach(ReadyList,
                                 (hashFile) =>
                                 //for (int i = StartingAvailable; i < CurrAvailableMax; i++)
                {
                    if (po.CancellationToken.IsCancellationRequested)
                    {
                        return;
                    }

                    Interlocked.Increment(ref LoadedCnt);
                    foreach (var ms in hashFile.Sections)
                    {
                        // ONLY hash CODE/EXEC file sections & PEHeader
                        if (!ms.IsCode && !ms.IsExec)
                        {
                            continue;
                        }

                        if (ms.RawFileSize <= 0)
                        {
                            LogEx(0, $"Compressed/malishous PE {hashFile.FileName} is too small.  Consider manual review of section [{ms.Name}] (e.g. UPX will overlap sections so we will hash it on next pass, TODO: UPX decoder).");
                            continue;
                        }

                        //var tot = (int)FractHashTree.TotalHashesForSize(ms.RawFileSize, MinHashSize);

                        //var myCnt = Interlocked.Add(ref HashGenCnt, tot);
                        //var fht = new FractHashTree(hashFile.FileName, ms, MinHashSize, GetHP);
                        //var dht = fht.DumpRecTree();
                        //var len = dht.Count();
                        //var myLim = Interlocked.Add(ref HashGenCnt, len);
                        //dht.CopyTo(0, hashX, myLim - len, len);

                        var ReadSize   = ms.VirtualSize;
                        var BufferSize = (int)((ReadSize + 0xfff) & ~0xfff);
                        var memBuff    = new byte[BufferSize];

                        using (var fread = new FileStream(hashFile.FileName, FileMode.Open, FileAccess.Read, FileShare.Read, PAGE_SIZE))
                        {
                            fread.Seek(ms.RawFilePointer, SeekOrigin.Begin);
                            fread.Read(memBuff, 0, (int)ReadSize);
                        }

                        var recs = FractHashTree.CreateRecsFromMemory(memBuff, MinHashSize, GetHP, hashFile.rID, 0, 0, true);
                        if (HashGenCnt + recs.Length > hashX.Length)
                        {
                            LoadList.Push(hashFile);
                            break;
                        }

                        var myLim = Interlocked.Add(ref HashGenCnt, recs.Length);
                        recs.CopyTo(hashX, myLim - recs.Length);

                        //FractHashTree.CreateRecsFromFile(hashFile.FileName, ms, MinHashSize, tot, hashX, myCnt - tot, GetHP);

                        if ((LoadedCnt % 100) == 0 && sw.Elapsed.TotalSeconds > 0)
                        {
                            WriteColor(ConsoleColor.Green, $"HashGen entries: {HashGenCnt:N0} - per second { ((TotalHashGenCount + HashGenCnt) / sw.Elapsed.TotalSeconds):N0}");
                        }
                        //}
                    }
                });
                if (po.CancellationToken.IsCancellationRequested)
                {
                    return;
                }

                TotalHashGenCount += HashGenCnt;

                WriteColor(ConsoleColor.Green, $"Filled queue {HashGenCnt:N0}, signaling readyqueue.");
                WriteColor(ConsoleColor.Green, $"Loaded-Files/Generated-Hash-Values {LoadedCnt:N0}/{TotalHashGenCount:N0}.  HashGen: {(TotalHashGenCount / sw.Elapsed.TotalSeconds):N0} per second.");

                sw.Stop();
                ReadyQueue.Add(Tuple.Create <int, HashRec[]>(HashGenCnt, hashX));
                HashGenCnt = 0;
                sw.Start();
            } while (!DoneDirScan || !LoadList.IsEmpty);

            sw.Stop();
            WriteColor(ConsoleColor.Green, $"Finished Files/Hashes {LoadedCnt:N0}/{TotalHashGenCount:N0}.  HashGen: {(TotalHashGenCount / sw.Elapsed.TotalSeconds):N0} per second.");
            return;
        }