private void LoadHashesFromStoreFile(string file, HashSet <byte[]> hashes, OperationProgress progress) { foreach (byte[] hash in this.LoadHashesFromStoreFile(file)) { hashes.Add(hash); } }
private static IEnumerable <string> GetLinesFromFile(string sourceFile, OperationProgress progress) { if (!File.Exists(sourceFile)) { throw new FileNotFoundException("The source file was not found", sourceFile); } using (StreamReader reader = new StreamReader(sourceFile)) { progress.FileReadInProgress = true; progress.FileReadStartTime = DateTime.Now; progress.FileSizeTotal = reader.BaseStream.Length; while (!reader.EndOfStream) { string line = reader.ReadLine(); progress.FileReadPosition = reader.BaseStream.Position; if (line == null || line.Length <= 0) { continue; } yield return(line); } } progress.FileReadInProgress = false; }
public static void ImportHexHashesFromFile(Store store, StoreType storeType, string sourceFile, CancellationToken ct, int batchSize = DefaultBatchSize, OperationProgress progress = null) { bool batched = false; if (progress == null) { progress = new OperationProgress(); } if (batchSize < 0) { batchSize = DefaultBatchSize; } progress.Status = $"Loading unsorted hexadecimal hashes from {sourceFile}"; HashSet <byte[]> hashes = new HashSet <byte[]>(ByteArrayComparer.Comparer); foreach (byte[] hash in Store.GetHexHashesFromFile(sourceFile, store.HashLength, progress)) { ct.ThrowIfCancellationRequested(); progress.IncrementTotalProcessed(); hashes.Add(hash); if (batchSize > 0 && hashes.Count >= batchSize) { if (!batched) { batched = true; store.StartBatch(storeType); } store.AddToStore(hashes, storeType, ct, progress); hashes.Clear(); } } if (hashes.Count > 0) { store.AddToStore(hashes, storeType, ct, progress); } if (batched) { progress.Status = $"Sorting hashes into store"; store.EndBatch(storeType, ct, progress); } progress.Status = "Done"; }
public void ConsolidateAndSort(OperationProgress progress, CancellationToken ct) { progress.Status = "Consolidating and sorting new entries"; List <string> files = Directory.EnumerateFiles(this.StorePath, "*.db.bin").ToList(); progress.ConsolidateStoreInProgress = true; progress.ConsolidateStoreTotal = files.Count; progress.ConsolidateStoreStartTime = DateTime.Now; progress.ConsolidateStorePosition = 0; ParallelOptions o = new ParallelOptions(); o.MaxDegreeOfParallelism = Debugger.IsAttached ? 1 : -1; o.CancellationToken = ct; Parallel.ForEach(files, o, tempFile => { HashSet <byte[]> existingHashes = new HashSet <byte[]>(ByteArrayComparer.Comparer); HashSet <byte[]> newHashes = new HashSet <byte[]>(ByteArrayComparer.Comparer); string realFile = tempFile.Substring(0, tempFile.Length - 4); Trace.WriteLine($"Consolidating {realFile}"); this.LoadHashesFromStoreFile(realFile, existingHashes, progress); this.LoadHashesFromStoreFile(tempFile, newHashes, progress); bool hasChanges = this.MergeHashSets(existingHashes, newHashes, progress); if (hasChanges) { this.WriteStoreFile(realFile, false, existingHashes); } progress.IncrementConsolidateStorePosition(); File.Delete(tempFile); }); progress.ConsolidateStoreInProgress = false; }
public static void ImportFromStore(Store targetStore, Store sourceStore, StoreType sourceStoreType, StoreType destinationStoreType, CancellationToken ct, OperationProgress progress = null) { string lastRange = null; HashSet <byte[]> hashes = new HashSet <byte[]>(ByteArrayComparer.Comparer); if (progress == null) { progress = new OperationProgress(); } progress.Status = $"Merging hashes from source store"; foreach (byte[] hash in sourceStore.GetHashes(sourceStoreType)) { ct.ThrowIfCancellationRequested(); progress.IncrementTotalProcessed(); string range = targetStore.GetRangeFromHash(hash); if (range != lastRange && hashes.Count > 0) { targetStore.AddToStore(hashes, lastRange, destinationStoreType, progress); hashes.Clear(); } hashes.Add(hash); lastRange = range; } if (hashes.Count > 0) { targetStore.AddToStore(hashes, lastRange, destinationStoreType, progress); hashes.Clear(); } progress.Status = "Done"; }
public static void ImportHexHashesFromSortedFile(Store store, StoreType storeType, string sourceFile, CancellationToken ct, OperationProgress progress = null) { string lastRange = null; HashSet <byte[]> hashes = new HashSet <byte[]>(ByteArrayComparer.Comparer); if (progress == null) { progress = new OperationProgress(); } progress.Status = $"Loading sorted hexadecimal hashes from {sourceFile}"; foreach (byte[] hash in Store.GetHexHashesFromFile(sourceFile, store.HashLength, progress)) { ct.ThrowIfCancellationRequested(); progress.IncrementTotalProcessed(); string range = store.GetRangeFromHash(hash); if (range != lastRange && hashes.Count > 0) { store.AddToStore(hashes, lastRange, storeType, progress); hashes.Clear(); } hashes.Add(hash); lastRange = range; } if (hashes.Count > 0) { store.AddToStore(hashes, lastRange, storeType, progress); hashes.Clear(); } progress.Status = "Done"; }
private void RemoveFromStore(Dictionary <string, HashSet <byte[]> > hashes, StoreType storeType, CancellationToken ct, bool emptyAfterCommit, OperationProgress progress) { try { ParallelOptions o = new ParallelOptions(); o.CancellationToken = ct; o.MaxDegreeOfParallelism = Debugger.IsAttached ? 1 : -1; progress.FlushStoreInProgress = true; progress.FlushStoreTotal = hashes.Count; progress.FlushStorePosition = 0; progress.FlushStoreStartTime = DateTime.Now; Parallel.ForEach(hashes, o, group => { this.RemoveFromStore(group.Value, group.Key, storeType, progress); progress.IncrementFlushStorePosition(); if (emptyAfterCommit) { group.Value.Clear(); } }); } finally { progress.FlushStoreInProgress = false; } }
public void AddToStore(HashSet <byte[]> hashes, StoreType storeType, CancellationToken ct, OperationProgress progress) { this.AddToStore( hashes .GroupBy(this.GetRangeFromHash, StringComparer.OrdinalIgnoreCase) .ToDictionary(g => g.Key, g => new HashSet <byte[]>(g, ByteArrayComparer.Comparer)), storeType, ct, false, progress); }
public static void ImportPasswordsFromFile(Store store, StoreType storeType, string sourceFile, CancellationToken ct, int batchSize = DefaultBatchSize, OperationProgress progress = null) { bool batched = false; if (progress == null) { progress = new OperationProgress(); } if (batchSize < 0) { batchSize = DefaultBatchSize; } progress.Status = $"Loading plain-text passwords from {sourceFile}"; Dictionary <string, HashSet <byte[]> > importData = new Dictionary <string, HashSet <byte[]> >(store.HashOffset ^ 16); int currentCount = 0; foreach (string line in GetLinesFromFile(sourceFile, progress)) { ct.ThrowIfCancellationRequested(); if (line.Length <= 0) { continue; } progress.Status = "Reading lines from file"; byte[] hash = null; progress?.IncrementTotalProcessed(); if (storeType == StoreType.Word) { hash = store.ComputeHash(StringNormalizer.Normalize(line)); } else { hash = store.ComputeHash(line); } string range = store.GetRangeFromHash(hash); if (!importData.TryGetValue(range, out HashSet <byte[]> rangeStore)) { rangeStore = new HashSet <byte[]>(ByteArrayComparer.Comparer); importData.Add(range, rangeStore); } if (!rangeStore.Add(hash)) { progress?.IncrementHashesDiscarded(); } else { currentCount++; } if (batchSize > 0 && currentCount >= batchSize) { if (!batched) { batched = true; store.StartBatch(storeType); } progress.Status = "Flushing batch to store"; store.AddToStore(importData, storeType, ct, true, progress); currentCount = 0; } } if (currentCount > 0) { progress.Status = "Flushing batch to store"; store.AddToStore(importData, storeType, ct, true, progress); } if (batched) { progress.Status = "Sorting hashes into store"; store.EndBatch(storeType, ct, progress); } progress.Status = "Done"; }
private static IEnumerable <byte[]> GetHexHashesFromFile(string sourceFile, int hashBinaryLength, OperationProgress progress) { if (!File.Exists(sourceFile)) { throw new FileNotFoundException("The source file was not found", sourceFile); } int hashStringLength = hashBinaryLength * 2; int lineCount = 0; foreach (string line in Store.GetLinesFromFile(sourceFile, progress)) { lineCount++; if (line.Length < hashStringLength) { throw new InvalidDataException($"Line #{lineCount} was not recognized as a hexadecimal hash. The line was not the expected length.\r\nThe following line was invalid:\r\n{line}"); } if (line.Length == hashStringLength) { yield return(line.HexStringToBytes()); continue; } char next = line[hashStringLength]; if (!(next == ':' || next == '\r' || next == '\n')) { throw new InvalidDataException($"Line #{lineCount} was not recognized as a hexadecimal hash. Lines must end with a new line character or colon\r\nThe following line was invalid:\r\n{line}"); } yield return(line.Substring(0, hashStringLength).HexStringToBytes()); } }
public void EndBatch(OperationProgress progress, CancellationToken ct) { this.ConsolidateAndSort(progress, ct); this.IsInBatch = false; }
public override void EndBatch(StoreType storeType, CancellationToken ct, OperationProgress progress) { this.GetInstance(storeType).EndBatch(progress, ct); }
protected override void AddToStore(HashSet <byte[]> hashes, string range, StoreType storeType, OperationProgress progress) { this.GetInstance(storeType).AddHashRangeToStore(hashes, range, progress); }
public void AddHashRangeToStore(HashSet <byte[]> incomingHashes, string range, OperationProgress progress) { if (incomingHashes == null) { throw new ArgumentNullException(nameof(incomingHashes)); } if (range == null) { throw new ArgumentNullException(nameof(range)); } if (this.IsInBatch) { this.AddHashRangeToTempStore(incomingHashes, range); return; } string file = Path.Combine(this.StorePath, $"{range}.db"); bool hasChanges = false; HashSet <byte[]> hashesToProcess; if (File.Exists(file)) { HashSet <byte[]> originalHashes = new HashSet <byte[]>(ByteArrayComparer.Comparer); this.LoadHashesFromStoreFile(file, originalHashes, progress); hasChanges = this.MergeHashSets(originalHashes, incomingHashes, progress); hashesToProcess = originalHashes; } else { progress?.IncrementHashesAdded(incomingHashes.Count); hashesToProcess = incomingHashes; hasChanges = true; } if (hasChanges) { this.WriteStoreFile(file, false, hashesToProcess); } }
private bool MergeHashSets(HashSet <byte[]> existingHashes, HashSet <byte[]> newHashes, OperationProgress progress) { bool write = false; foreach (byte[] hash in newHashes) { if (existingHashes.Add(hash)) { progress?.IncrementHashesAdded(); write = true; } else { progress?.IncrementHashesDiscarded(); } } return(write); }
protected abstract void RemoveFromStore(HashSet <byte[]> hashes, string range, StoreType storeType, OperationProgress progress);
public abstract void EndBatch(StoreType storeType, CancellationToken ct, OperationProgress progress);
public void RemoveHashRangeFromStore(HashSet <byte[]> hashesToRemove, string range, OperationProgress progress) { if (hashesToRemove == null) { throw new ArgumentNullException(nameof(hashesToRemove)); } if (range == null) { throw new ArgumentNullException(nameof(range)); } string file = Path.Combine(this.StorePath, $"{range}.db"); bool hasChanges = false; HashSet <byte[]> storedHashes = new HashSet <byte[]>(ByteArrayComparer.Comparer); if (File.Exists(file)) { this.LoadHashesFromStoreFile(file, storedHashes, progress); foreach (byte[] hashToRemove in hashesToRemove) { hasChanges |= storedHashes.Remove(hashToRemove); } } if (hasChanges) { this.WriteStoreFile(file, false, storedHashes); } }