void ReadBitStream(IO.EndianReader s, byte[] hashBuffer) { int max_bit_stream_size = GetBitStreamSize(); bool is_probably_from_mcc = hashBuffer.EqualsZero(); byte[] bs_bytes; using (var hasher = Program.GetGen3RuntimeDataHasher()) { int bs_length = ReadBitStreamSize(s, hasher, max_bit_stream_size, is_probably_from_mcc); bs_bytes = new byte[IntegerMath.Align(IntegerMath.kInt32AlignmentBit, bs_length)]; s.Read(bs_bytes, bs_length); hasher.TransformFinalBlock(bs_bytes, 0, bs_length); InvalidData = hasher.Hash.EqualsArray(hashBuffer) == false; } if (RequireValidHashes && InvalidData) { Data = null; } else { using (var ms = new System.IO.MemoryStream(bs_bytes)) using (var bs = new IO.BitStream(ms, System.IO.FileAccess.Read, streamName: "GameVariant")) { bs.StreamMode = System.IO.FileAccess.Read; Data.Serialize(bs); } } }
protected override byte[] ProcessFinalBlock(byte[] inputBuffer, int inputOffset, int inputCount) { // it's okay to modify inputBuffer here since it's the final block and it's actually BlockHashAlgorithm's internal buffer ulong msg_bit_length = ((ulong)TotalBytesProcessed + (ulong)inputCount) << 3; if (inputOffset > 0 && inputCount > 0) { // memmove the bytes starting at inputOffset to the start of the buffer Array.Copy(inputBuffer, inputOffset, inputBuffer, 0, inputCount); } inputOffset = 0; Array.Clear(inputBuffer, inputCount, BlockSize - inputCount); // write the padding byte then align up to the next word boundary (proceeding bytes are already zero due to above Clear) int input_offset = inputCount; inputBuffer[input_offset++] = (byte)Version; // padding byte input_offset = IntegerMath.Align(IntegerMath.kInt64AlignmentBit, input_offset); // if we don't have enough space to encode the length, process what we have now as a block. // remaining bytes are still all zero, due to above Clear. if (input_offset > (BlockSize - sizeof(ulong))) { ProcessBlock(inputBuffer, inputOffset, 1); input_offset = 0; Array.Clear(inputBuffer, 0, BlockSize); } // write out the number of bytes that were processed before finalization for (input_offset = BlockSize - sizeof(ulong) ; msg_bit_length != 0 ; inputBuffer[input_offset] = (byte)msg_bit_length, msg_bit_length >>= 8, ++input_offset) { } ProcessBlock(inputBuffer, inputOffset, 1); if (HashValue == null) { HashValue = new byte[HashSizeValue / kWordCount]; } Bits.ArrayCopy(mRegs, 0, HashValue, 0, mRegs.Length); return(HashValue); }
void SerializeInternal(IO.EndianStream s) { bool reading = s.IsReading; long stream_length = reading ? s.BaseStream.Length - s.BaseStream.Position : 0; #region Header if (reading) { if (ValidateData) { if (stream_length < BinaryDataTreeHeader.kSizeOf) { throw new InvalidDataException("Expected more bytes for header data"); } } } long headerPosition = s.BaseStream.Position; mHeader.Serialize(s); if (reading) { if (ValidateData) { mHeader.Validate(); long min_expected_bytes_remaining = BinaryDataTreeHeader.kSizeOf + (BinaryDataTreeSectionHeader.kSizeOf * mHeader.UserSectionCount); if (s.BaseStream.Length < min_expected_bytes_remaining) { throw new InvalidDataException("Expected more bytes for header and user sections data"); } } } #endregion #region Data long data_position = s.BaseStream.Position; if (reading) { if (ValidateData) { long total_size = BinaryDataTreeHeader.kSizeOf + mHeader.DataSize; if (s.BaseStream.Length < total_size) { throw new InvalidDataException("Expected more bytes for header and payload data"); } uint actual_data_crc = GetDataCrc32(s.BaseStream); if (mHeader.DataCrc32 != actual_data_crc) { throw new InvalidDataException(string.Format("Invalid Data CRC 0x{0}, expected 0x{1}", actual_data_crc.ToString("X8"), mHeader.DataCrc32.ToString("X8"))); } } } #endregion #region Sections var section_headers = new BinaryDataTreeSectionHeader[mHeader.UserSectionCount]; s.StreamArray(section_headers); if (reading) { if (ValidateData) { foreach (var header in section_headers) { if (stream_length < (header.Offset + header.Size)) { throw new InvalidDataException("Expected more bytes for section data"); } } } } long offset_cursor = data_position; uint nodes_size = mHeader[BinaryDataTreeSectionID.NodeSectionIndex]; long nodes_offset = nodes_size > 0 ? offset_cursor : 0; offset_cursor += nodes_size; uint name_values_size = mHeader[BinaryDataTreeSectionID.NameValueSectionIndex]; long name_values_offset = name_values_size > 0 ? offset_cursor : 0; offset_cursor += name_values_size; uint name_data_size = mHeader[BinaryDataTreeSectionID.NameDataSectionIndex]; long name_data_offset = name_data_size > 0 ? offset_cursor : 0; offset_cursor += name_data_size; if (mHeader[BinaryDataTreeSectionID.ValueDataSectionIndex] > 0) { offset_cursor = IntegerMath.Align(IntegerMath.k16ByteAlignmentBit, offset_cursor); } uint value_data_size = mHeader[BinaryDataTreeSectionID.ValueDataSectionIndex]; long value_data_offset = value_data_size > 0 ? offset_cursor : 0; offset_cursor += value_data_size; if (reading) { if (ValidateData) { if (stream_length < offset_cursor) { throw new InvalidDataException("Expected more bytes for section data"); } } } #endregion if (!reading) { s.Seek(headerPosition); mHeader.Serialize(s); } }