public void DefaultCompute() { var fnv = Fnv1.Create(); var hash = fnv.Compute(_check); Assert.AreEqual(HashResult, BinaryPrimitives.ReadUInt32BigEndian(hash)); }
public void Hash64_Stream_ReturnsExpectedValues(FnvTestVector data) { using var stream = new MemoryStream(data.Buffer); ulong result = Fnv1.Hash64(stream); Assert.That(result, Is.EqualTo(data.ExpectedResult64)); }
public async Task Hash64_StreamAsync_ReturnsExpectedValuesAsync(FnvTestVector data) { using var stream = new MemoryStream(data.Buffer); ulong result = await Fnv1.Hash64Async(stream); Assert.That(result, Is.EqualTo(data.ExpectedResult64)); }
public async Task Hash32_StreamAsync_ReturnsExpectedValuesAsync(FnvTestVector data) { using var stream = new MemoryStream(data.Buffer); uint result = await Fnv1.Hash32Async(stream); Assert.AreEqual(data.ExpectedResult32, result); }
public void Hash32_Stream_ReturnsExpectedValues(FnvTestVector data) { using var stream = new MemoryStream(data.Buffer); uint result = Fnv1.Hash32(stream); Assert.AreEqual(data.ExpectedResult32, result); }
public void Hash64_Stream_ReturnsExpectedValues(FnvTestVector data) { using (var stream = new MemoryStream(data.Buffer)) { ulong result = Fnv1.Hash64(stream); Assert.AreEqual(data.ExpectedResult64, result); } }
public async Task Hash64_StreamAsync_ReturnsExpectedValuesAsync(FnvTestVector data) { using (var stream = new MemoryStream(data.Buffer)) { ulong result = await Fnv1.Hash64Async(stream); Assert.AreEqual(data.ExpectedResult64, result); } }
public void Hash64_ReturnsExpectedValues(FnvTestVector data) { ulong result = Fnv1.Hash64(data.Buffer); Assert.AreEqual(data.ExpectedResult64, result); }
public void Hash32_ReturnsExpectedValues(FnvTestVector data) { uint result = Fnv1.Hash32(data.Buffer); Assert.AreEqual(data.ExpectedResult32, result); }
public void Hash64_NullBuffer_ThrowsArgumentNullException() { Assert.Throws <ArgumentNullException>(() => Fnv1.Hash64(null)); }
protected override void Initialize() { _hashCode = (this.Id != null) ? Fnv1.ComputeHash32(this.Id) : 0; }
public void Hash64_ReturnsExpectedValues(FnvTestVector data) { ulong result = Fnv1.Hash64(data.Buffer); Assert.That(result, Is.EqualTo(data.ExpectedResult64)); }
public void Hash32_ReturnsExpectedValues(FnvTestVector data) { uint result = Fnv1.Hash32(data.Buffer); Assert.That(result, Is.EqualTo(data.ExpectedResult32)); }
public override int GetHashCode() { return(((int)this.Algorithm) ^ Fnv1.ComputeHash32(this.Value)); }
public void Save(Stream output, IList <IArchiveFileInfo> files) { using var bw = new BinaryWriterX(output, ByteOrder.BigEndian); var fnv = Fnv1.Create(); // Get distinct strings var stringMap = GetStringMap(files); // Calculate offsets var tableInfoOffset = HeaderSize; var assetOffset = (tableInfoOffset + TableInfoSize + 0x3F) & ~0x3F; var entryOffset = (assetOffset + files.Select(x => x.FilePath.GetFirstDirectory(out _)).Distinct().Count() * AssetSize + 0x3F) & ~0x3F; var stringOffset = (entryOffset + files.Count * EntrySize + 0x3F) & ~0x3F; var fileOffset = (stringOffset + stringMap.Sum(x => x.Key.Length + 1) + 0x3F) & ~0x3F; // Write files var entries = new List <PacEntry>(); var fileMap = new Dictionary <uint, (long, long)>(); var distinctFileCount = 0; var filePosition = fileOffset; foreach (var file in files.Cast <PacArchiveFileInfo>().OrderBy(x => x.FilePath)) { // Update entry data file.FilePath.ToRelative().GetFirstDirectory(out var filePath); file.Entry.decompSize = (int)file.FileSize; file.Entry.extensionOffset = (int)stringMap[file.FilePath.GetExtensionWithDot()] + stringOffset; file.Entry.extensionFnvHash = BinaryPrimitives.ReadUInt32BigEndian(fnv.Compute(Encoding.ASCII.GetBytes(file.FilePath.GetExtensionWithDot()))); file.Entry.stringOffset = (int)stringMap[filePath.FullName] + stringOffset; file.Entry.fnvHash = BinaryPrimitives.ReadUInt32BigEndian(fnv.Compute(Encoding.ASCII.GetBytes(filePath.FullName))); // Check if file already exists var fileHash = file.GetHash(); if (fileMap.ContainsKey(fileHash)) { file.Entry.offset = (int)fileMap[fileHash].Item1; file.Entry.compSize = file.Entry.compSize2 = (int)fileMap[fileHash].Item2; entries.Add(file.Entry); continue; } // Write file data output.Position = filePosition; var writtenSize = file.SaveFileData(output); file.Entry.offset = filePosition; file.Entry.compSize = file.Entry.compSize2 = (int)writtenSize; entries.Add(file.Entry); fileMap[fileHash] = (filePosition, writtenSize); distinctFileCount++; filePosition += (int)writtenSize; } bw.WriteAlignment(); // Write strings output.Position = stringOffset; foreach (var pair in stringMap) { bw.WriteString(pair.Key, Encoding.ASCII, false); } // Write entries output.Position = entryOffset; bw.WriteMultiple(entries); // Write assets var entryPosition = entryOffset; var assetCount = 0; output.Position = assetOffset; foreach (var fileGroup in files.OrderBy(x => x.FilePath).GroupBy(x => x.FilePath.GetFirstDirectory(out _))) { var fileCount = fileGroup.Count(); bw.WriteType(new PacAsset { count = fileCount, entryOffset = entryPosition, stringOffset = (int)stringMap[fileGroup.Key] + stringOffset, fnvHash = BinaryPrimitives.ReadUInt32BigEndian(fnv.Compute(Encoding.ASCII.GetBytes(fileGroup.Key))) }); entryPosition += fileCount * EntrySize; assetCount++; } // Write table info output.Position = tableInfoOffset; bw.WriteType(new PacTableInfo { fileOffset = fileOffset, entryOffset = entryOffset, stringOffset = stringOffset, assetOffset = assetOffset, unpaddedFileSize = (int)output.Length, fileCount = distinctFileCount, entryCount = entries.Count, stringCount = stringMap.Count, assetCount = assetCount }); // Write header output.Position = 0; _header.dataOffset = fileOffset; bw.WriteType(_header); // Pad file to 0x1000 output.Position = output.Length; bw.WriteAlignment(0x1000); }
public override int GetHashCode() { return((this.Id != null) ? Fnv1.ComputeHash32(this.Id) : 0); }