public ulong[] VerifyDataHashTree(bool rehash = false) { ulong dataBlockCount = XvdMath.OffsetToPageNumber((ulong)_io.Stream.Length - UserDataOffset); var invalidBlocks = new List <ulong>(); for (ulong i = 0; i < dataBlockCount; i++) { var hashEntryOffset = CalculateHashEntryOffsetForBlock(i, 0); _io.Stream.Position = (long)hashEntryOffset; byte[] oldhash = _io.Reader.ReadBytes((int)DataHashEntryLength); var dataToHashOffset = XvdMath.PageNumberToOffset(i) + UserDataOffset; _io.Stream.Position = (long)dataToHashOffset; byte[] data = _io.Reader.ReadBytes((int)PAGE_SIZE); byte[] hash = HashUtils.ComputeSha256(data); Array.Resize(ref hash, (int)DataHashEntryLength); if (hash.IsEqualTo(oldhash)) { continue; } invalidBlocks.Add(i); if (!rehash) { continue; } _io.Stream.Position = (long)hashEntryOffset; _io.Writer.Write(hash); } return(invalidBlocks.ToArray()); }
public bool VirtualToLogicalDriveOffset(ulong virtualOffset, out ulong logicalOffset) { logicalOffset = 0; if (virtualOffset >= Header.DriveSize) { throw new InvalidOperationException( $"Virtual offset 0x{virtualOffset:X} is outside drivedata length 0x{Header.DriveSize:X}"); } if (Header.Type > XvdType.Dynamic) { throw new NotSupportedException($"Xvd type {Header.Type} is unhandled"); } if (Header.Type == XvdType.Dynamic) { var dataStartOffset = virtualOffset + XvdMath.PageNumberToOffset(Header.NumberOfMetadataPages); var pageNumber = XvdMath.OffsetToPageNumber(dataStartOffset); var inBlockOffset = XvdMath.InBlockOffset(dataStartOffset); var firstDynamicPage = XvdMath.QueryFirstDynamicPage(Header.NumberOfMetadataPages); if (pageNumber >= firstDynamicPage) { var firstDynamicPageBytes = XvdMath.PageNumberToOffset(firstDynamicPage); var blockNumber = XvdMath.OffsetToBlockNumber(dataStartOffset - firstDynamicPageBytes); ulong allocatedBlock = ReadBat(blockNumber); if (allocatedBlock == INVALID_SECTOR) { return(false); } dataStartOffset = XvdMath.PageNumberToOffset(allocatedBlock) + inBlockOffset; pageNumber = XvdMath.OffsetToPageNumber(dataStartOffset); } var dataBackingBlockNum = XvdMath.ComputeDataBackingPageNumber(Header.Type, HashTreeLevels, HashTreePageCount, pageNumber); logicalOffset = XvdMath.PageNumberToOffset(dataBackingBlockNum); logicalOffset += XvdMath.InPageOffset(dataStartOffset); logicalOffset += XvdMath.PageNumberToOffset(Header.EmbeddedXvdPageCount); logicalOffset += Header.MutableDataLength; logicalOffset += XVD_HEADER_INCL_SIGNATURE_SIZE; logicalOffset += PAGE_SIZE; } else { // Xvd type fixed logicalOffset = virtualOffset; logicalOffset += XvdMath.PageNumberToOffset(Header.EmbeddedXvdPageCount); logicalOffset += Header.MutableDataLength; logicalOffset += XvdMath.PageNumberToOffset(Header.NumberOfMetadataPages); logicalOffset += XVD_HEADER_INCL_SIGNATURE_SIZE; logicalOffset += PAGE_SIZE; } return(true); }
public bool VerifyHashTree() { if (!IsDataIntegrityEnabled) { return(true); } _io.Stream.Position = (long)HashTreeOffset; byte[] hash = HashUtils.ComputeSha256(_io.Reader.ReadBytes((int)PAGE_SIZE)); if (!Header.TopHashBlockHash.IsEqualTo(hash)) { return(false); } if (HashTreeLevels == 1) { return(true); } var blocksPerLevel = 0xAA; ulong topHashTreeBlock = 0; uint hashTreeLevel = 1; while (hashTreeLevel < HashTreeLevels) { uint dataBlockNum = 0; if (Header.NumberOfHashedPages != 0) { while (dataBlockNum < Header.NumberOfHashedPages) { _io.Stream.Position = (long)CalculateHashEntryOffsetForBlock(dataBlockNum, hashTreeLevel - 1); byte[] blockHash = HashUtils.ComputeSha256(_io.Reader.ReadBytes((int)PAGE_SIZE)); Array.Resize(ref blockHash, (int)HASH_ENTRY_LENGTH); var upperHashBlockOffset = CalculateHashEntryOffsetForBlock(dataBlockNum, hashTreeLevel); topHashTreeBlock = XvdMath.OffsetToPageNumber(upperHashBlockOffset - HashTreeOffset); _io.Stream.Position = (long)upperHashBlockOffset; byte[] expectedHash = _io.Reader.ReadBytes((int)HASH_ENTRY_LENGTH); if (!expectedHash.IsEqualTo(blockHash)) { // wrong hash return(false); } dataBlockNum += (uint)blocksPerLevel; } } hashTreeLevel++; blocksPerLevel = blocksPerLevel * 0xAA; } if (topHashTreeBlock != 0) { Console.WriteLine(@"Top level hash page calculated to be at {0}, should be 0!", topHashTreeBlock); } return(true); }
internal bool RemoveData(ulong offset, ulong numPages) { var page = XvdMath.OffsetToPageNumber(offset); var length = numPages * PAGE_SIZE; _io.Stream.Position = (long)offset; if (!_io.DeleteBytes((long)length)) { return(false); } if (!IsXvcFile) { return(true); } if (XvcInfo.InitialPlayOffset > offset) { XvcInfo.InitialPlayOffset -= length; } if (XvcInfo.PreviewOffset > offset) { XvcInfo.PreviewOffset -= length; } for (int i = 0; i < RegionHeaders.Count; i++) { var region = RegionHeaders[i]; region.Hash = 0; // ??? if (offset >= region.Offset && region.Offset + region.Length > offset) { region.Length -= length; // offset is part of region, reduce length } else if (region.Offset > offset) { region.Offset -= length; // offset is before region, reduce offset } RegionHeaders[i] = region; // region is a copy instead of a reference due to it being a struct, so we have to replace the original data ourselves } for (int i = 0; i < UpdateSegments.Count; i++) { var segment = UpdateSegments[i]; if (segment.PageNum < page) { continue; } segment.PageNum -= (uint)numPages; UpdateSegments[i] = segment; } return(true); }
internal bool AddData(ulong offset, ulong numPages) { var page = XvdMath.OffsetToPageNumber(offset); var length = numPages * PAGE_SIZE; _io.Stream.Position = (long)offset; if (!_io.AddBytes((long)length)) { return(false); } if (!IsXvcFile) { return(true); } if (XvcInfo.InitialPlayOffset > offset) { XvcInfo.InitialPlayOffset += length; } if (XvcInfo.PreviewOffset > offset) { XvcInfo.PreviewOffset += length; } for (int i = 0; i < RegionHeaders.Count; i++) { var region = RegionHeaders[i]; region.Hash = 0; // ??? if (offset >= region.Offset && region.Offset + region.Length > offset) { region.Length += length; // offset is part of region, add to length } else if (region.Offset > offset) { region.Offset += length; // offset is before region, add to offset } RegionHeaders[i] = region; } for (int i = 0; i < UpdateSegments.Count; i++) { var segment = UpdateSegments[i]; if (segment.PageNum < page) { continue; } segment.PageNum += (uint)numPages; UpdateSegments[i] = segment; } return(true); }
internal bool CryptSectionXts(bool encrypt, byte[] key, uint headerId, ulong offset, ulong length) { var startPage = XvdMath.OffsetToPageNumber(offset - UserDataOffset); ulong numPages = XvdMath.BytesToPages(length); // Pre-read data unit numbers to minimize needing to seek around the file List <uint> dataUnits = null; if (IsDataIntegrityEnabled) { dataUnits = new List <uint>(); for (uint page = 0; page < numPages; page++) { // fetch dataUnit from hash table entry for this page // TODO: seems we'll have to insert dataUnit when re-adding hashtables... // last 4 bytes of hash entry = dataUnit _io.Stream.Position = (long)CalculateHashEntryOffsetForBlock(startPage + page, 0) + 0x14; dataUnits.Add(_io.Reader.ReadUInt32()); } } var tweakAesKey = new byte[0x10]; var dataAesKey = new byte[0x10]; var tweak = new byte[0x10]; // Split tweak- / Data AES key Array.Copy(key, tweakAesKey, 0x10); Array.Copy(key, 0x10, dataAesKey, 0, 0x10); // Copy VDUID and header Id as tweak var headerIdBytes = BitConverter.GetBytes(headerId); Array.Copy(Header.VDUID, 0, tweak, 0x8, 0x8); Array.Copy(headerIdBytes, 0, tweak, 0x4, 0x4); var cipher = new AesXtsTransform(tweak, dataAesKey, tweakAesKey, encrypt); // Perform crypto! _io.Stream.Position = (long)offset; for (uint page = 0; page < numPages; page++) { var transformedData = new byte[PAGE_SIZE]; var pageOffset = _io.Stream.Position; var origData = _io.Reader.ReadBytes((int)PAGE_SIZE); cipher.TransformDataUnit(origData, 0, origData.Length, transformedData, 0, dataUnits?[(int)page] ?? page); _io.Stream.Position = pageOffset; _io.Writer.Write(transformedData); } return(true); }