private static void RemoveSomeIntData(HashStorage <int> hashStorage, int removeSize) { for (var i = 0; i < removeSize; i++) { hashStorage.Remove(i); } }
protected override void WriteImpl(ReadOnlySpan <byte> source, long offset) { long blockIndex = offset / SectorSize; long hashPos = blockIndex * DigestSize; int toWrite = (int)Math.Min(source.Length, GetSize() - offset); byte[] dataBuffer = ArrayPool <byte> .Shared.Rent(SectorSize); try { source.CopyTo(dataBuffer); byte[] hash = DoHash(dataBuffer, 0, toWrite); if (Type == IntegrityStorageType.Save && source.IsEmpty()) { Array.Clear(hash, 0, DigestSize); } BaseStorage.Write(source, offset); HashStorage.Write(hash, hashPos); BlockValidities[blockIndex] = Validity.Unchecked; } finally { ArrayPool <byte> .Shared.Return(dataBuffer); } }
protected override Result FlushImpl() { Result rc = HashStorage.Flush(); if (rc.IsFailure()) { return(rc); } return(base.FlushImpl()); }
private void WorkerOnRunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e) { var hashResult = (HashResult)e.Result; LoadingIcon.Visibility = Visibility.Hidden; ProgressBar.Visibility = Visibility.Hidden; DataGrid.Visibility = Visibility.Visible; StatisticsPanel.Visibility = Visibility.Visible; PageDataGrid.ItemsSource = hashResult.PageRepresentations; BucketDataGrid.ItemsSource = hashResult.BucketRepresentations; Storage = hashResult.Storage; PageTable = hashResult.PageTable; OverflowLabel.Content = $"{hashResult.overflowPct:0.######}%"; CollisionLabel.Content = $"{hashResult.collisionsPct:0.######}%"; AccessLabel.Content = $"{hashResult.avgAccess:#.##}"; }
// STRING private static void HashStorageWithStrings() { var hashStorage = new HashStorage <string>(); var data = GenerateStringData(); hashStorage.Add("!!!"); hashStorage.Add("~~~~~"); hashStorage.Add("cda"); foreach (var i in data) { hashStorage.Add(i); } Console.WriteLine("---== STRING ==---"); Console.WriteLine(hashStorage.ToString()); }
// INT private static void HashStorageWithInts(int size) { var hashStorage = new HashStorage <int>(); var data = GenerateIntData(size); foreach (var i in data) { hashStorage.Add(i); } Console.WriteLine("---== INT ==---"); Console.Write(hashStorage.ToString()); Console.WriteLine("Removing..."); RemoveSomeIntData(hashStorage, 20); Console.Write(hashStorage.ToString()); const int element = 21; Console.WriteLine($"Is '21' in collection?: {hashStorage.Contains(element)}\n"); }
protected override Result WriteImpl(long offset, ReadOnlySpan <byte> source) { long blockIndex = offset / SectorSize; long hashPos = blockIndex * DigestSize; Result rc = GetSize(out long storageSize); if (rc.IsFailure()) { return(rc); } int toWrite = (int)Math.Min(source.Length, storageSize - offset); byte[] dataBuffer = ArrayPool <byte> .Shared.Rent(SectorSize); try { source.CopyTo(dataBuffer); byte[] hash = DoHash(dataBuffer, 0, toWrite); if (Type == IntegrityStorageType.Save && source.IsEmpty()) { Array.Clear(hash, 0, DigestSize); } BaseStorage.Write(offset, source); HashStorage.Write(hashPos, hash); BlockValidities[blockIndex] = Validity.Unchecked; } finally { ArrayPool <byte> .Shared.Return(dataBuffer); } return(Result.Success); }
public void FsTrim() { if (Type != IntegrityStorageType.Save) { return; } Span <byte> digest = stackalloc byte[DigestSize]; for (int i = 0; i < SectorCount; i++) { long hashPos = i * DigestSize; HashStorage.Read(digest, hashPos); if (!Util.IsEmpty(digest)) { continue; } int dataOffset = i * SectorSize; BaseStorage.Fill(SaveDataFileSystem.TrimFillValue, dataOffset, SectorSize); } }
// DOUBLE private static void HashStorageWithDoubles(int size) { var hashStorage = new HashStorage <double>(); var data = GenerateDoubleData(size); foreach (var i in data) { hashStorage.Add(i); } Console.WriteLine("---== DOUBLE ==---"); Console.WriteLine(hashStorage.ToString()); const double element = 2.0; try { hashStorage.Add(element); } catch (Exception exception) { Console.WriteLine($"Trying add {element}: {exception.Message}"); } }
private void ReadImpl(Span <byte> destination, long offset, IntegrityCheckLevel integrityCheckLevel) { int count = destination.Length; if (count < 0 || count > SectorSize) { throw new ArgumentOutOfRangeException(nameof(destination), "Length is invalid."); } long blockIndex = offset / SectorSize; if (BlockValidities[blockIndex] == Validity.Invalid && integrityCheckLevel == IntegrityCheckLevel.ErrorOnInvalid) { // Todo: Differentiate between the top and lower layers ThrowHelper.ThrowResult(ResultFs.InvalidHashInIvfc, "Hash error!"); } bool needsHashCheck = integrityCheckLevel != IntegrityCheckLevel.None && BlockValidities[blockIndex] == Validity.Unchecked; if (Type != IntegrityStorageType.Save && !needsHashCheck) { BaseStorage.Read(destination, offset); return; } Span <byte> hashBuffer = stackalloc byte[DigestSize]; long hashPos = blockIndex * DigestSize; HashStorage.Read(hashBuffer, hashPos); if (Type == IntegrityStorageType.Save) { if (Util.IsEmpty(hashBuffer)) { destination.Clear(); BlockValidities[blockIndex] = Validity.Valid; return; } if (!needsHashCheck) { BaseStorage.Read(destination, offset); return; } } byte[] dataBuffer = ArrayPool <byte> .Shared.Rent(SectorSize); try { BaseStorage.Read(destination, offset); destination.CopyTo(dataBuffer); if (BlockValidities[blockIndex] != Validity.Unchecked) { return; } int bytesToHash = SectorSize; if (count < SectorSize) { // Pad out unused portion of block Array.Clear(dataBuffer, count, SectorSize - count); // Partition FS hashes don't pad out an incomplete block if (Type == IntegrityStorageType.PartitionFs) { bytesToHash = count; } } byte[] hash = DoHash(dataBuffer, 0, bytesToHash); Validity validity = Util.SpansEqual(hashBuffer, hash) ? Validity.Valid : Validity.Invalid; BlockValidities[blockIndex] = validity; if (validity == Validity.Invalid && integrityCheckLevel == IntegrityCheckLevel.ErrorOnInvalid) { ThrowHelper.ThrowResult(ResultFs.InvalidHashInIvfc, "Hash error!"); } } finally { ArrayPool <byte> .Shared.Return(dataBuffer); } }
public override void Flush() { HashStorage.Flush(); base.Flush(); }
private void ReadImpl(Span <byte> destination, long offset, IntegrityCheckLevel integrityCheckLevel) { int count = destination.Length; if (count < 0 || count > SectorSize) { throw new ArgumentOutOfRangeException(nameof(destination), "Length is invalid."); } Span <byte> hashBuffer = stackalloc byte[DigestSize]; long blockIndex = offset / SectorSize; long hashPos = blockIndex * DigestSize; if (BlockValidities[blockIndex] == Validity.Invalid && integrityCheckLevel == IntegrityCheckLevel.ErrorOnInvalid) { throw new InvalidDataException("Hash error!"); } HashStorage.Read(hashBuffer, hashPos); if (Type == IntegrityStorageType.Save && Util.IsEmpty(hashBuffer)) { destination.Clear(); BlockValidities[blockIndex] = Validity.Valid; return; } byte[] dataBuffer = ArrayPool <byte> .Shared.Rent(SectorSize); try { BaseStorage.Read(dataBuffer, offset, count, 0); dataBuffer.AsSpan(0, count).CopyTo(destination); if (integrityCheckLevel == IntegrityCheckLevel.None) { return; } if (BlockValidities[blockIndex] != Validity.Unchecked) { return; } int bytesToHash = SectorSize; if (count < SectorSize) { // Pad out unused portion of block Array.Clear(dataBuffer, count, SectorSize - count); // Partition FS hashes don't pad out an incomplete block if (Type == IntegrityStorageType.PartitionFs) { bytesToHash = count; } } byte[] hash = DoHash(dataBuffer, 0, bytesToHash); Validity validity = Util.SpansEqual(hashBuffer, hash) ? Validity.Valid : Validity.Invalid; BlockValidities[blockIndex] = validity; if (validity == Validity.Invalid && integrityCheckLevel == IntegrityCheckLevel.ErrorOnInvalid) { throw new InvalidDataException("Hash error!"); } } finally { ArrayPool <byte> .Shared.Return(dataBuffer); } }
static int Main(string[] args) { var options = new Options(); if (!Parser.Default.ParseArguments(args, options)) { return(1); } var cars = options.Cars.Count > 0 ? string.Join(",", options.Cars).Split(',').Select(x => x.Trim()) : Directory.GetDirectories(options.Directory).Select(Path.GetFileName); var rulesSets = string.Join(",", options.RulesFile).Split(',').Select(x => RulesSet.FromFile(x.Trim())).ToList(); var databasesFiles = string.Join(",", options.DatabaseFile).Split(',').Select(x => x.Trim()).ToArray(); var hashStorage = options.Mode == ProgramMode.TestCars ? HashStorage.FromFile(databasesFiles) : new HashStorage(); foreach (var carId in cars) { if (options.Verbose) { Console.Error.WriteLine(carId); } var carDir = Path.Combine(options.Directory, carId); if (!Directory.Exists(carDir)) { Console.Error.WriteLine("! directory '{0}' not found", carDir); continue; } if (options.Mode == ProgramMode.CollectDatabase) { var entry = new StringBuilder(); entry.Append(carId); entry.Append(":"); foreach (var rulesSet in rulesSets) { if (rulesSet != rulesSets[0]) { entry.Append(","); } var hashValue = rulesSet.GetHash(carDir); foreach (var simular in hashStorage.FindSimular(carId, rulesSet.Id, hashValue, options.Threshold, options.Information ? rulesSet : null)) { Console.Error.WriteLine("! {0}: {1} and {2}, {3:F1}%", rulesSet.Id, carId, simular.CarId, simular.Value * 100); if (options.Information) { Console.Error.WriteLine(" " + string.Join(", ", simular.WorkedRules.Select(x => x.ToString()))); } } entry.Append(rulesSet.Id); entry.Append("="); entry.Append(hashValue); hashStorage.Add(carId, rulesSet.Id, hashValue); } Console.WriteLine(entry.ToString()); } else if (options.Mode == ProgramMode.TestCars) { if (hashStorage.HasCar(carId)) { continue; } foreach (var rulesSet in rulesSets) { var hashValue = rulesSet.GetHash(carDir); foreach (var simular in hashStorage.FindSimular(carId, rulesSet.Id, hashValue, options.Threshold, options.Information ? rulesSet : null)) { Console.WriteLine("{0}: {1} and {2}, {3:F1}%", rulesSet.Id, carId, simular.CarId, simular.Value * 100); if (options.Information) { Console.Error.WriteLine(" " + string.Join(", ", simular.WorkedRules.Select(x => x.ToString()))); } } } } } return(0); }
private void WorkerOnDoWork(object sender, DoWorkEventArgs e) { var hashargs = (HashArgs)e.Argument; var lines = File.ReadLines(hashargs.Path).ToArray(); int pageCount; int pageSize; int progress = 0; if (hashargs.Choice == HashArgs.InputChoice.PageCount) { pageCount = hashargs.Input; pageSize = (int)Math.Ceiling((decimal)((float)lines.Length / pageCount)); } else { pageSize = hashargs.Input; pageCount = (int)Math.Ceiling((decimal)((float)lines.Length / pageSize)); } var pageTable = new PageTable <string>(pageCount, pageSize); var hashStorage = new HashStorage <Address>(hashargs.BucketCount, hashargs.BucketSize); for (int i = 0; i < pageTable.Pages.Length; i++) { Pages.Add(new List <PageRepresentation>()); } var pageRepresentations = new List <PageRepresentation>(); var bucketRepresentations = new List <BucketRepresentation>(); for (int i = 0; i < lines.Length; i++) { var p = i * 100 / lines.Length; if (p > progress) { progress = p; _worker.ReportProgress(progress); } var address = pageTable.Insert(lines[i]); var rep = new PageRepresentation { Page = address.Page + 1, Line = address.Line + 1, Index = i, Text = lines[i] }; Pages[address.Page].Add(rep); pageRepresentations.Add(rep); hashStorage.Insert(lines[i], address); } var buckets = hashStorage.Buckets; string buildContent(HashStorage <Address> .Entry[] entries) { var res = "["; for (int i = 0; i < entries.Length; i++) { if (entries[i] != null) { res += $"{entries[i].key}-P{entries[i].value.Page+1}L{entries[i].value.Line+1}"; } if (i != entries.Length - 1) { res += ","; } } return(res + "]"); } for (int i = 0; i < buckets.Length; i++) { int counter = 0; var bucket = buckets[i]; while (bucket != null) { bucketRepresentations.Add(new BucketRepresentation { BucketID = $"{i}-{counter}", Content = buildContent(bucket.Entries), Overflow = bucket.Next != null ? $"{i}-{counter+1}": "" }); bucket = bucket.Next; counter++; } } var collisionPct = (float)hashStorage.collisionCount * 100 / lines.Length; var overflowPct = (float)hashStorage.Buckets.Aggregate(0, (i, b) => { if (b.Next != null) { return(i + 1); } return(i); }) * 100 / hashStorage.Buckets.Length; var usedBuckets = hashStorage.Buckets.Where(b => !b.Empty).ToArray(); var avgAccess = (float)usedBuckets .Sum(b => b.Count()) / usedBuckets.Count(); e.Result = new HashResult { PageRepresentations = pageRepresentations, BucketRepresentations = bucketRepresentations, Storage = hashStorage, PageTable = pageTable, collisionsPct = collisionPct, overflowPct = overflowPct, avgAccess = avgAccess }; }
public MD5Service(ILogger logger, HashStorage storage) : base(logger, storage) { }
public HashService(ILogger logger, HashStorage storage) { this.logger = logger; this.HashStorage = storage; }