internal bool CryptSectionXts(bool encrypt, byte[] key, uint headerId, ulong offset, ulong length) { var startPage = XvdMath.OffsetToPageNumber(offset - UserDataOffset); ulong numPages = XvdMath.BytesToPages(length); // Pre-read data unit numbers to minimize needing to seek around the file List <uint> dataUnits = null; if (IsDataIntegrityEnabled) { dataUnits = new List <uint>(); for (uint page = 0; page < numPages; page++) { // fetch dataUnit from hash table entry for this page // TODO: seems we'll have to insert dataUnit when re-adding hashtables... // last 4 bytes of hash entry = dataUnit _io.Stream.Position = (long)CalculateHashEntryOffsetForBlock(startPage + page, 0) + 0x14; dataUnits.Add(_io.Reader.ReadUInt32()); } } var tweakAesKey = new byte[0x10]; var dataAesKey = new byte[0x10]; var tweak = new byte[0x10]; // Split tweak- / Data AES key Array.Copy(key, tweakAesKey, 0x10); Array.Copy(key, 0x10, dataAesKey, 0, 0x10); // Copy VDUID and header Id as tweak var headerIdBytes = BitConverter.GetBytes(headerId); Array.Copy(Header.VDUID, 0, tweak, 0x8, 0x8); Array.Copy(headerIdBytes, 0, tweak, 0x4, 0x4); var cipher = new AesXtsTransform(tweak, dataAesKey, tweakAesKey, encrypt); // Perform crypto! _io.Stream.Position = (long)offset; for (uint page = 0; page < numPages; page++) { var transformedData = new byte[PAGE_SIZE]; var pageOffset = _io.Stream.Position; var origData = _io.Reader.ReadBytes((int)PAGE_SIZE); cipher.TransformDataUnit(origData, 0, origData.Length, transformedData, 0, dataUnits?[(int)page] ?? page); _io.Stream.Position = pageOffset; _io.Writer.Write(transformedData); } return(true); }
public bool VerifyXvcHash(bool rehash = false) { if (!IsXvcFile) { return(true); } ulong hashTreeSize = HashTreePageCount * PAGE_SIZE; var ms = new MemoryStream(); var msIo = new IO(ms); msIo.Writer.WriteStruct(XvcInfo); // fix region headers to match pre-hashtable for (int i = 0; i < XvcInfo.RegionCount; i++) { var region = RegionHeaders[i]; region.Hash = 0; if (IsDataIntegrityEnabled) { if (HashTreeOffset >= region.Offset && region.Offset + region.Length > HashTreeOffset) { region.Length -= hashTreeSize; } else if (region.Offset > HashTreeOffset) { region.Offset -= hashTreeSize; } } msIo.Writer.WriteStruct(region); } for (int i = 0; i < XvcInfo.UpdateSegmentCount; i++) { var segment = UpdateSegments[i]; var hashTreeEnd = XvdMath.BytesToPages(HashTreeOffset) + HashTreePageCount; if (segment.PageNum >= hashTreeEnd) { segment.PageNum -= (uint)HashTreePageCount; } segment.Hash = 0; msIo.Writer.WriteStruct(segment); } if (RegionSpecifiers != null) { for (int i = 0; i < XvcInfo.RegionSpecifierCount; i++) { msIo.Writer.WriteStruct(RegionSpecifiers[i]); } } if (Header.XvcDataLength > msIo.Stream.Length) { msIo.Stream.SetLength(Header.XvcDataLength); } if (IsDataIntegrityEnabled) { // remove hash table offset from the special regions if (XvcInfo.InitialPlayOffset > HashTreeOffset) { msIo.Stream.Position = 0xD28; msIo.Writer.Write(XvcInfo.InitialPlayOffset - hashTreeSize); } if (XvcInfo.PreviewOffset > HashTreeOffset) { msIo.Stream.Position = 0xD40; msIo.Writer.Write(XvcInfo.PreviewOffset - hashTreeSize); } } byte[] xvcData = ms.ToArray(); msIo.Dispose(); byte[] hash = HashUtils.ComputeSha256(xvcData); bool isValid = Header.OriginalXvcDataHash.IsEqualTo(hash); if (rehash) { Header.OriginalXvcDataHash = hash; } return(isValid); //todo: investigate why this gets the correct hash for dev XVCs but fails for retail ones, might be to do with retail XVC data having a content ID that doesn't match with VDUID/UDUID }