public ulong[] VerifyDataHashTree(bool rehash = false) { ulong dataBlockCount = XvdMath.OffsetToPageNumber((ulong)_io.Stream.Length - UserDataOffset); var invalidBlocks = new List <ulong>(); for (ulong i = 0; i < dataBlockCount; i++) { var hashEntryOffset = CalculateHashEntryOffsetForBlock(i, 0); _io.Stream.Position = (long)hashEntryOffset; byte[] oldhash = _io.Reader.ReadBytes((int)DataHashEntryLength); var dataToHashOffset = XvdMath.PageNumberToOffset(i) + UserDataOffset; _io.Stream.Position = (long)dataToHashOffset; byte[] data = _io.Reader.ReadBytes((int)PAGE_SIZE); byte[] hash = HashUtils.ComputeSha256(data); Array.Resize(ref hash, (int)DataHashEntryLength); if (hash.IsEqualTo(oldhash)) { continue; } invalidBlocks.Add(i); if (!rehash) { continue; } _io.Stream.Position = (long)hashEntryOffset; _io.Writer.Write(hash); } return(invalidBlocks.ToArray()); }
public bool VerifyHashTree() { if (!IsDataIntegrityEnabled) { return(true); } _io.Stream.Position = (long)HashTreeOffset; byte[] hash = HashUtils.ComputeSha256(_io.Reader.ReadBytes((int)PAGE_SIZE)); if (!Header.TopHashBlockHash.IsEqualTo(hash)) { return(false); } if (HashTreeLevels == 1) { return(true); } var blocksPerLevel = 0xAA; ulong topHashTreeBlock = 0; uint hashTreeLevel = 1; while (hashTreeLevel < HashTreeLevels) { uint dataBlockNum = 0; if (Header.NumberOfHashedPages != 0) { while (dataBlockNum < Header.NumberOfHashedPages) { _io.Stream.Position = (long)CalculateHashEntryOffsetForBlock(dataBlockNum, hashTreeLevel - 1); byte[] blockHash = HashUtils.ComputeSha256(_io.Reader.ReadBytes((int)PAGE_SIZE)); Array.Resize(ref blockHash, (int)HASH_ENTRY_LENGTH); var upperHashBlockOffset = CalculateHashEntryOffsetForBlock(dataBlockNum, hashTreeLevel); topHashTreeBlock = XvdMath.OffsetToPageNumber(upperHashBlockOffset - HashTreeOffset); _io.Stream.Position = (long)upperHashBlockOffset; byte[] expectedHash = _io.Reader.ReadBytes((int)HASH_ENTRY_LENGTH); if (!expectedHash.IsEqualTo(blockHash)) { // wrong hash return(false); } dataBlockNum += (uint)blocksPerLevel; } } hashTreeLevel++; blocksPerLevel = blocksPerLevel * 0xAA; } if (topHashTreeBlock != 0) { Console.WriteLine(@"Top level hash page calculated to be at {0}, should be 0!", topHashTreeBlock); } return(true); }
public bool CalculateHashTree() { uint blocksPerLevel = 0xAA; uint hashTreeLevel = 1; while (hashTreeLevel < HashTreeLevels) { uint dataBlockNum = 0; if (Header.NumberOfHashedPages != 0) { while (dataBlockNum < Header.NumberOfHashedPages) { _io.Stream.Position = (long)CalculateHashEntryOffsetForBlock(dataBlockNum, hashTreeLevel - 1); byte[] blockHash = HashUtils.ComputeSha256(_io.Reader.ReadBytes((int)PAGE_SIZE)); Array.Resize(ref blockHash, (int)HASH_ENTRY_LENGTH); _io.Stream.Position = (long)CalculateHashEntryOffsetForBlock(dataBlockNum, hashTreeLevel); byte[] oldHash = _io.Reader.ReadBytes((int)HASH_ENTRY_LENGTH); if (!blockHash.IsEqualTo(oldHash)) { _io.Stream.Position -= (int)HASH_ENTRY_LENGTH; // todo: maybe return a list of blocks that needed rehashing _io.Writer.Write(blockHash); } dataBlockNum += blocksPerLevel; } } hashTreeLevel++; blocksPerLevel = blocksPerLevel * 0xAA; } _io.Stream.Position = (long)HashTreeOffset; byte[] hash = HashUtils.ComputeSha256(_io.Reader.ReadBytes((int)PAGE_SIZE)); Header.TopHashBlockHash = hash; return(true); }
public bool VerifyXvcHash(bool rehash = false) { if (!IsXvcFile) { return(true); } ulong hashTreeSize = HashTreePageCount * PAGE_SIZE; var ms = new MemoryStream(); var msIo = new IO(ms); msIo.Writer.WriteStruct(XvcInfo); // fix region headers to match pre-hashtable for (int i = 0; i < XvcInfo.RegionCount; i++) { var region = RegionHeaders[i]; region.Hash = 0; if (IsDataIntegrityEnabled) { if (HashTreeOffset >= region.Offset && region.Offset + region.Length > HashTreeOffset) { region.Length -= hashTreeSize; } else if (region.Offset > HashTreeOffset) { region.Offset -= hashTreeSize; } } msIo.Writer.WriteStruct(region); } for (int i = 0; i < XvcInfo.UpdateSegmentCount; i++) { var segment = UpdateSegments[i]; var hashTreeEnd = XvdMath.BytesToPages(HashTreeOffset) + HashTreePageCount; if (segment.PageNum >= hashTreeEnd) { segment.PageNum -= (uint)HashTreePageCount; } segment.Hash = 0; msIo.Writer.WriteStruct(segment); } if (RegionSpecifiers != null) { for (int i = 0; i < XvcInfo.RegionSpecifierCount; i++) { msIo.Writer.WriteStruct(RegionSpecifiers[i]); } } if (Header.XvcDataLength > msIo.Stream.Length) { msIo.Stream.SetLength(Header.XvcDataLength); } if (IsDataIntegrityEnabled) { // remove hash table offset from the special regions if (XvcInfo.InitialPlayOffset > HashTreeOffset) { msIo.Stream.Position = 0xD28; msIo.Writer.Write(XvcInfo.InitialPlayOffset - hashTreeSize); } if (XvcInfo.PreviewOffset > HashTreeOffset) { msIo.Stream.Position = 0xD40; msIo.Writer.Write(XvcInfo.PreviewOffset - hashTreeSize); } } byte[] xvcData = ms.ToArray(); msIo.Dispose(); byte[] hash = HashUtils.ComputeSha256(xvcData); bool isValid = Header.OriginalXvcDataHash.IsEqualTo(hash); if (rehash) { Header.OriginalXvcDataHash = hash; } return(isValid); //todo: investigate why this gets the correct hash for dev XVCs but fails for retail ones, might be to do with retail XVC data having a content ID that doesn't match with VDUID/UDUID }