public byte[] ToArray() { MemoryStream buffer = new MemoryStream(); buffer.WriteValueU32(firstVal); buffer.WriteValueS32(pccRef.Names.FindIndex(name => name == "None")); buffer.Seek(16, SeekOrigin.Begin); buffer.WriteValueU32(otherVal); buffer.WriteValueS32(enumTextureGroups.Count); foreach (ByteProp byteProp in enumTextureGroups) { buffer.WriteValueS32(pccRef.Names.FindIndex(name => name == byteProp.name)); buffer.WriteValueS32(byteProp.value); } return buffer.ToArray(); }
/* * Name function: Compress * Purpose: compress a part of the byte array into a Zlib Block * Input: - buffer: byte array * - offset: starting offset inside the array * - count: num of bytes to compress starting from the offset * Output: compressed byte array block, the structure is: * - magic word * - max segment size * - total compressed size * - total uncompressed size * - segment list * - compressed data list */ public static byte[] Compress(byte[] buffer, int offset, int count) { if(buffer == null) throw new ArgumentNullException(); if (count < 0) throw new FormatException(); if (offset + count > buffer.Length) throw new IndexOutOfRangeException(); MemoryStream headBlock = new MemoryStream(); MemoryStream dataBlock = new MemoryStream(); DeflaterOutputStream zipStream; int numSeg = (int)Math.Ceiling((double)count / (double)maxSegmentSize); headBlock.WriteValueU32(magic); headBlock.WriteValueU32(maxSegmentSize); headBlock.WriteValueU32(0x0); //total compressed size, still to calculate headBlock.WriteValueS32(count); //total uncompressed size for (int i = count; i > 0; i -= (int)maxSegmentSize) { int copyBytes = Math.Min(i, (int)maxSegmentSize); uint precCompSize = (uint)dataBlock.Length; zipStream = new DeflaterOutputStream(dataBlock); zipStream.Write(buffer, offset + (count - i), copyBytes); zipStream.Flush(); zipStream.Finish(); headBlock.WriteValueU32((uint)dataBlock.Length - precCompSize); //compressed segment size headBlock.WriteValueS32(copyBytes); //uncompressed segment size //Console.WriteLine(" Segment size: {0}, total read: {1}, compr size: {2}", maxSegmentSize, copyBytes, (uint)dataBlock.Length - precCompSize); } headBlock.Seek(8, SeekOrigin.Begin); headBlock.WriteValueS32((int)dataBlock.Length); // total compressed size byte[] finalBlock = new byte[headBlock.Length + dataBlock.Length]; Buffer.BlockCopy(headBlock.ToArray(), 0, finalBlock, 0, (int)headBlock.Length); Buffer.BlockCopy(dataBlock.ToArray(), 0, finalBlock, (int)headBlock.Length, (int)dataBlock.Length); headBlock.Close(); dataBlock.Close(); return finalBlock; }
void SetData(UInt32 Length, byte opCode, byte Flags) { this.Length = Length; this.opCode = opCode; this.Flags = Flags; this.Data = new byte[6]; using (var stream = new MemoryStream(this.Data)) { stream.WriteValueU32(this.Length, false); stream.WriteValueU8(this.opCode); stream.WriteValueU8(this.Flags); this.Data = stream.ToArray(); stream.Flush(); } }
/// <summary> /// Block headers are sent in a headers packet in response to a getheaders message. /// </summary> /// <remarks> /// https://en.bitcoin.it/wiki/Protocol_specification#Block_Headers /// </remarks> /// <example> /// nodejs: https://github.com/zone117x/node-stratum-pool/blob/master/lib/blockTemplate.js#L85 /// </example> /// <param name="job"></param> /// <param name="merkleRoot"></param> /// <param name="nTime"></param> /// <param name="nonce"></param> /// <returns></returns> public static byte[] SerializeHeader(IJob job, byte[] merkleRoot, UInt32 nTime, UInt32 nonce) { byte[] result; using (var stream = new MemoryStream()) { stream.WriteValueU32(nonce.BigEndian()); stream.WriteValueU32(Convert.ToUInt32(job.EncodedDifficulty, 16).BigEndian()); stream.WriteValueU32(nTime.BigEndian()); stream.WriteBytes(merkleRoot); stream.WriteBytes(job.PreviousBlockHash.HexToByteArray()); stream.WriteValueU32(job.BlockTemplate.Version.BigEndian()); result = stream.ToArray(); result = result.ReverseBytes(); } return result; }
public byte[] ToArray(int pccExportDataOffset) { MemoryStream buffer = new MemoryStream(); buffer.Write(headerData, 0, headerData.Length); foreach (KeyValuePair<string, PropertyReader.Property> kvp in properties) { PropertyReader.Property property = kvp.Value; // this is the part when I get rid of the LODGroup property!!!!!!!!!!!!!!! // the texture will use texturegroup_world as default texturegroup //if (kvp.Key == "LODGroup") // continue; if (kvp.Key == "LODBias") continue; if (kvp.Key == "InternalFormatLODBias") continue; buffer.Write(property.raw, 0, property.raw.Length); if (kvp.Key == "UnpackMin") { buffer.Write(property.raw, 0, property.raw.Length); buffer.Write(property.raw, 0, property.raw.Length); } } buffer.WriteValueU32(numMipMaps); foreach (ImageInfo imgInfo in imgList) { buffer.WriteValueS32((int)imgInfo.storageType); buffer.WriteValueS32(imgInfo.uncSize); buffer.WriteValueS32(imgInfo.cprSize); if (imgInfo.storageType == storage.pccSto) { buffer.WriteValueS32((int)(imgInfo.offset + pccExportDataOffset + dataOffset)); buffer.Write(imageData, imgInfo.offset, imgInfo.uncSize); } else buffer.WriteValueS32(imgInfo.offset); buffer.WriteValueU32(imgInfo.imgSize.width); buffer.WriteValueU32(imgInfo.imgSize.height); } // Texture2D footer, 24 bytes size buffer.Write(imageData, imageData.Length-24, 24); return buffer.ToArray(); }
public static void Write(SFXSaveGameFile save, Stream output) { if (save == null) { throw new ArgumentNullException("save"); } if (output == null) { throw new ArgumentNullException("output"); } using (var memory = new MemoryStream()) { memory.WriteValueU32(save.Version, save._Endian); var writer = new Unreal.FileWriter(memory, save._Version, save._Endian); save.Serialize(writer); if (save._Version >= 27) { memory.Position = 0; uint checksum = 0; var buffer = new byte[1024]; while (memory.Position < memory.Length) { int read = memory.Read(buffer, 0, 1024); checksum = Crc32.Compute(buffer, 0, read, checksum); } save._Checksum = checksum; memory.WriteValueU32(checksum, save._Endian); } memory.Position = 0; output.WriteFromStream(memory, memory.Length); } }
public void Serialize(Stream output) { var endian = this.Endian; const uint headerSize = 32; output.WriteValueU32(0x42424947, endian); output.WriteValueU32(this.Version, endian); var keys = new List<string>() {""}; int maxValueLength = 0; var blob = new StringBuilder(); foreach (var file in this.Files) { keys.Add(file.Name); foreach (var section in file.Sections) { keys.Add(section.Key); foreach (var value in section.Value) { keys.Add(value.Key); foreach (var item in value.Value) { if (item.Value != null) { blob.Append(item.Value + '\0'); maxValueLength = Math.Max(maxValueLength, item.Value.Length); } } } } } var huffmanEncoder = new Huffman.Encoder(); huffmanEncoder.Build(blob.ToString()); keys = keys.Distinct().OrderBy(k => k.HashCrc32()).ToList(); int maxKeyLength = keys.Max(k => k.Length); uint stringTableSize; using (var data = new MemoryStream()) { data.Position = 4; data.WriteValueS32(keys.Count, endian); data.Position = 4 + 4 + (8 * keys.Count); var offsets = new List<KeyValuePair<uint, uint>>(); foreach (var key in keys) { var offset = (uint)data.Position; data.WriteValueU16((ushort)key.Length, endian); data.WriteString(key, Encoding.UTF8); offsets.Add(new KeyValuePair<uint, uint>(key.HashCrc32(), offset)); } data.Position = 8; foreach (var kv in offsets) { data.WriteValueU32(kv.Key, endian); data.WriteValueU32(kv.Value - 8, endian); } data.Position = 0; data.WriteValueU32((uint)data.Length, endian); data.Position = 0; stringTableSize = (uint)data.Length; output.Seek(headerSize, SeekOrigin.Begin); output.WriteFromStream(data, data.Length); } uint huffmanSize; using (var data = new MemoryStream()) { var pairs = huffmanEncoder.GetPairs(); data.WriteValueU16((ushort)pairs.Length, endian); foreach (var pair in pairs) { data.WriteValueS32(pair.Left, endian); data.WriteValueS32(pair.Right, endian); } data.Position = 0; huffmanSize = (uint)data.Length; output.Seek(headerSize + stringTableSize, SeekOrigin.Begin); output.WriteFromStream(data, data.Length); } var bits = new BitArray(huffmanEncoder.TotalBits); var bitOffset = 0; uint indexSize; using (var index = new MemoryStream()) { var fileDataOffset = 2 + (this.Files.Count * 6); var files = new List<KeyValuePair<ushort, int>>(); foreach (var file in this.Files.OrderBy(f => keys.IndexOf(f.Name))) { files.Add(new KeyValuePair<ushort, int>( (ushort)keys.IndexOf(file.Name), fileDataOffset)); var sectionDataOffset = 2 + (file.Sections.Count * 6); var sections = new List<KeyValuePair<ushort, int>>(); foreach (var section in file.Sections.OrderBy(s => keys.IndexOf(s.Key))) { sections.Add(new KeyValuePair<ushort, int>( (ushort)keys.IndexOf(section.Key), sectionDataOffset)); var valueDataOffset = 2 + (section.Value.Count * 6); var values = new List<KeyValuePair<ushort, int>>(); foreach (var value in section.Value.OrderBy(v => keys.IndexOf(v.Key))) { index.Position = fileDataOffset + sectionDataOffset + valueDataOffset; values.Add(new KeyValuePair<ushort, int>( (ushort)keys.IndexOf(value.Key), valueDataOffset)); index.WriteValueU16((ushort)value.Value.Count, endian); valueDataOffset += 2; foreach (var item in value.Value) { if (item.Type == 1) { index.WriteValueS32((1 << 29) | bitOffset, endian); } else if (item.Type == 0 || item.Type == 2 || item.Type == 3 || item.Type == 4) { index.WriteValueS32((item.Type << 29) | bitOffset, endian); bitOffset += huffmanEncoder.Encode((item.Value ?? "") + '\0', bits, bitOffset); } valueDataOffset += 4; } } index.Position = fileDataOffset + sectionDataOffset; index.WriteValueU16((ushort)values.Count, endian); sectionDataOffset += 2; foreach (var value in values) { index.WriteValueU16(value.Key, endian); index.WriteValueS32(value.Value, endian); sectionDataOffset += 6; } sectionDataOffset += valueDataOffset; } index.Position = fileDataOffset; index.WriteValueU16((ushort)sections.Count, endian); fileDataOffset += 2; foreach (var section in sections) { index.WriteValueU16(section.Key, endian); index.WriteValueS32(section.Value, endian); fileDataOffset += 6; } fileDataOffset += sectionDataOffset; } index.Position = 0; index.WriteValueU16((ushort)files.Count, endian); foreach (var file in files) { index.WriteValueU16(file.Key, endian); index.WriteValueS32(file.Value, endian); } index.Position = 0; indexSize = (uint)index.Length; output.Seek(headerSize + stringTableSize + huffmanSize, SeekOrigin.Begin); output.WriteFromStream(index, index.Length); } output.Seek(headerSize + stringTableSize + huffmanSize + indexSize, SeekOrigin.Begin); output.WriteValueS32(bits.Length, endian); var bytes = new byte[(bits.Length - 1) / 8 + 1]; bits.CopyTo(bytes, 0); output.WriteBytes(bytes); output.Seek(8, SeekOrigin.Begin); output.WriteValueS32(maxKeyLength, endian); output.WriteValueS32(maxValueLength, endian); output.WriteValueU32(stringTableSize, endian); output.WriteValueU32(huffmanSize, endian); output.WriteValueU32(indexSize, endian); output.WriteValueS32(bytes.Length, endian); output.Seek(0, SeekOrigin.Begin); output.WriteValueU32(0x666D726D, endian); }
public byte[] ThisToArray(uint pccExportDataOffset, ME2PCCObject pcc) { MemoryStream buffer = new MemoryStream(); buffer.Write(headerData, 0, headerData.Length); if (properties.ContainsKey("LODGroup")) { properties["LODGroup"].Value.StringValue = "TEXTUREGROUP_LightAndShadowMap"; properties["LODGroup"].Value.String2 = pcc.Names[0]; } else { buffer.WriteValueS64(pcc.AddName("LODGroup")); buffer.WriteValueS64(pcc.AddName("ByteProperty")); buffer.WriteValueS64(8); buffer.WriteValueS64(pcc.AddName("TEXTUREGROUP_LightAndShadowMap")); } int count = 0; foreach (KeyValuePair<string, SaltPropertyReader.Property> kvp in properties) { SaltPropertyReader.Property prop = kvp.Value; if (prop.Name == "UnpackMin") { for (int j = 0; j < UnpackNum; j++) { buffer.WriteValueS64(pcc.AddName(prop.Name)); buffer.WriteValueS64(pcc.AddName(prop.TypeVal.ToString())); buffer.WriteValueS32(prop.Size); buffer.WriteValueS32(j); buffer.WriteValueF32(prop.Value.FloatValue, Endian.Little); } continue; } buffer.WriteValueS64(pcc.AddName(prop.Name)); if (prop.Name == "None") { for (int j = 0; j < 12; j++) buffer.WriteByte(0); } else { buffer.WriteValueS64(pcc.AddName(prop.TypeVal.ToString())); buffer.WriteValueS64(prop.Size); switch (prop.TypeVal) { case SaltPropertyReader.Type.IntProperty: buffer.WriteValueS32(prop.Value.IntValue); break; case SaltPropertyReader.Type.BoolProperty: buffer.WriteValueS32(prop.Value.IntValue); break; case SaltPropertyReader.Type.NameProperty: buffer.WriteValueS64(pcc.AddName(prop.Value.StringValue)); // Heff: Modified to handle name references. //var index = pcc.AddName(prop.Value.StringValue); //buffer.WriteValueS32(index); //buffer.WriteValueS32(prop.Value.NameValue.count); break; case SaltPropertyReader.Type.StrProperty: buffer.WriteValueS32(prop.Value.StringValue.Length + 1); foreach (char c in prop.Value.StringValue) buffer.WriteByte((byte)c); buffer.WriteByte(0); break; case SaltPropertyReader.Type.StructProperty: string strVal = prop.Value.StringValue; if (prop.Name.ToLowerInvariant().Contains("guid")) strVal = "Guid"; buffer.WriteValueS64(pcc.AddName(strVal)); foreach (SaltPropertyReader.PropertyValue value in prop.Value.Array) buffer.WriteValueS32(value.IntValue); break; case SaltPropertyReader.Type.ByteProperty: buffer.WriteValueS32(pcc.AddName(prop.Value.StringValue)); buffer.WriteValueS32(pcc.AddName(prop.Value.String2)); break; case SaltPropertyReader.Type.FloatProperty: buffer.WriteValueF32(prop.Value.FloatValue, Endian.Little); break; default: throw new FormatException("unknown property"); } } } buffer.WriteValueS32((int)buffer.Position + (int)pccExportDataOffset); //Remove empty textures List<ImageInfo> tempList = new List<ImageInfo>(); foreach (ImageInfo imgInfo in privateimgList) { if (imgInfo.storageType != storage.empty) tempList.Add(imgInfo); } privateimgList = tempList; numMipMaps = (uint)privateimgList.Count; buffer.WriteValueU32(numMipMaps); foreach (ImageInfo imgInfo in privateimgList) { buffer.WriteValueS32((int)imgInfo.storageType); buffer.WriteValueS32(imgInfo.uncSize); buffer.WriteValueS32(imgInfo.cprSize); if (imgInfo.storageType == storage.pccSto) { buffer.WriteValueS32((int)(buffer.Position + pccExportDataOffset)); buffer.Write(imageData, imgInfo.offset, imgInfo.uncSize); } else buffer.WriteValueS32(imgInfo.offset); if (imgInfo.imgSize.width < 4) buffer.WriteValueU32(4); else buffer.WriteValueU32(imgInfo.imgSize.width); if (imgInfo.imgSize.height < 4) buffer.WriteValueU32(4); else buffer.WriteValueU32(imgInfo.imgSize.height); } buffer.WriteBytes(footerData); return buffer.ToArray(); }
public void Create() { // create the first part. using (var stream = new MemoryStream()) { stream.WriteValueU32(Version.LittleEndian()); // write version if(PoolConfig.Coin.Options.IsProofOfStakeHybrid) // if coin is a proof-of-stake coin stream.WriteValueU32(BlockTemplate.CurTime); // include time-stamp in the transaction. // write transaction input. stream.WriteBytes(Serializers.VarInt(InputsCount)); stream.WriteBytes(Inputs.First().PreviousOutput.Hash.Bytes); stream.WriteValueU32(Inputs.First().PreviousOutput.Index.LittleEndian()); // write signature script lenght var signatureScriptLenght = (UInt32)(Inputs.First().SignatureScript.Initial.Length + ExtraNonce.ExtraNoncePlaceholder.Length + Inputs.First().SignatureScript.Final.Length); stream.WriteBytes(Serializers.VarInt(signatureScriptLenght).ToArray()); stream.WriteBytes(Inputs.First().SignatureScript.Initial); Initial = stream.ToArray(); } /* The generation transaction must be split at the extranonce (which located in the transaction input scriptSig). Miners send us unique extranonces that we use to join the two parts in attempt to create a valid share and/or block. */ // create the second part. using (var stream = new MemoryStream()) { // transaction input stream.WriteBytes(Inputs.First().SignatureScript.Final); stream.WriteValueU32(Inputs.First().Sequence); // transaction inputs end here. // transaction output var outputBuffer = Outputs.GetBuffer(); stream.WriteBytes(outputBuffer); // transaction output ends here. stream.WriteValueU32(LockTime.LittleEndian()); if (PoolConfig.Coin.Options.TxMessageSupported) stream.WriteBytes(TxMessage); Final = stream.ToArray(); } }
/// <summary> /// Encoded an integer to save space. /// </summary> /// <remarks> /// Integer can be encoded depending on the represented value to save space. Variable length integers always precede /// an array/vector of a type of data that may vary in length. Longer numbers are encoded in little endian. /// </remarks> /// <specification>https://en.bitcoin.it/wiki/Protocol_specification#Variable_length_integer</specification> /// <example> /// nodejs: https://c9.io/raistlinthewiz/bitcoin-coinbase-varint-nodejs /// </example> /// <returns></returns> public static byte[] VarInt(UInt32 value) { if (value < 0xfd) return new[] { (byte)value }; byte[] result; using (var stream = new MemoryStream()) { if (value < 0xffff) { stream.WriteValueU8(0xfd); stream.WriteValueU16(((UInt16)value).LittleEndian()); } else if (value < 0xffffffff) { stream.WriteValueU8(0xfe); stream.WriteValueU32(value.LittleEndian()); } else { stream.WriteValueU8(0xff); stream.WriteValueU16(((UInt16)value).LittleEndian()); } result = stream.ToArray(); } return result; }
/// <summary> /// decompress an entire ME3 pcc file into a new stream /// </summary> /// <param name="input">pcc file passed in stream format</param> /// <returns>a decompressed array of bytes</returns> public static MemoryStream DecompressME3(Stream input) { input.Seek(0, SeekOrigin.Begin); var magic = input.ReadValueU32(Endian.Little); if (magic != 0x9E2A83C1 && magic.Swap() != 0x9E2A83C1) { throw new FormatException("not a pcc file"); } var endian = magic == 0x9E2A83C1 ? Endian.Little : Endian.Big; var versionLo = input.ReadValueU16(endian); var versionHi = input.ReadValueU16(endian); if (versionLo != 684 && versionHi != 194) { throw new FormatException("unsupported pcc version"); } long headerSize = 8; input.Seek(4, SeekOrigin.Current); headerSize += 4; var folderNameLength = input.ReadValueS32(endian); headerSize += 4; var folderNameByteLength = folderNameLength >= 0 ? folderNameLength : (-folderNameLength * 2); input.Seek(folderNameByteLength, SeekOrigin.Current); headerSize += folderNameByteLength; var packageFlagsOffset = input.Position; var packageFlags = input.ReadValueU32(endian); headerSize += 4; if ((packageFlags & 0x02000000u) == 0) { throw new FormatException("pcc file is already decompressed"); } if ((packageFlags & 8) != 0) { input.Seek(4, SeekOrigin.Current); headerSize += 4; } uint nameCount = input.ReadValueU32(endian); uint nameOffset = input.ReadValueU32(endian); input.Seek(52, SeekOrigin.Current); headerSize += 60; var generationsCount = input.ReadValueU32(endian); input.Seek(generationsCount * 12, SeekOrigin.Current); headerSize += generationsCount * 12; input.Seek(20, SeekOrigin.Current); headerSize += 24; var blockCount = input.ReadValueU32(endian); int headBlockOff = (int)input.Position; var afterBlockTableOffset = headBlockOff + (blockCount * 16); var indataOffset = afterBlockTableOffset + 8; byte[] buff; input.Seek(0, SeekOrigin.Begin); MemoryStream output = new MemoryStream(); output.Seek(0, SeekOrigin.Begin); output.WriteFromStream(input, headerSize); output.WriteValueU32(0, endian); // block count input.Seek(afterBlockTableOffset, SeekOrigin.Begin); output.WriteFromStream(input, 8); //check if has extra name list (don't know it's usage...) if ((packageFlags & 0x10000000) != 0) { long curPos = output.Position; output.WriteFromStream(input, nameOffset - curPos); } //decompress blocks in parallel Task<byte[]>[] tasks = new Task<byte[]>[blockCount]; uint[] uncompressedOffsets = new uint[blockCount]; for (int i = 0; i < blockCount; i++) { input.Seek(headBlockOff, SeekOrigin.Begin); uncompressedOffsets[i] = input.ReadValueU32(endian); var uncompressedSize = input.ReadValueU32(endian); var compressedOffset = input.ReadValueU32(endian); var compressedSize = input.ReadValueU32(endian); headBlockOff = (int)input.Position; buff = new byte[compressedSize]; input.Seek(compressedOffset, SeekOrigin.Begin); input.Read(buff, 0, buff.Length); tasks[i] = ZBlock.DecompressAsync(buff); } Task.WaitAll(tasks); for (int i = 0; i < blockCount; i++) { output.Seek(uncompressedOffsets[i], SeekOrigin.Begin); output.WriteBytes(tasks[i].Result); } output.Seek(packageFlagsOffset, SeekOrigin.Begin); output.WriteValueU32(packageFlags & ~0x02000000u, endian); return output; }
public void Create() { // create the first part. using (var stream = new MemoryStream()) { stream.WriteValueU32(Version.LittleEndian()); // write version // for proof-of-stake coins we need here timestamp - https://github.com/zone117x/node-stratum-pool/blob/b24151729d77e0439e092fe3a1cdbba71ca5d12e/lib/transactions.js#L210 // write transaction input. stream.WriteBytes(Serializers.VarInt(InputsCount)); stream.WriteBytes(Inputs.First().PreviousOutput.Hash.Bytes); stream.WriteValueU32(Inputs.First().PreviousOutput.Index.LittleEndian()); // write signature script lenght var signatureScriptLenght = (UInt32)(Inputs.First().SignatureScript.Initial.Length + ExtraNonce.ExtraNoncePlaceholder.Length + Inputs.First().SignatureScript.Final.Length); stream.WriteBytes(Serializers.VarInt(signatureScriptLenght).ToArray()); stream.WriteBytes(Inputs.First().SignatureScript.Initial); Initial = stream.ToArray(); } /* The generation transaction must be split at the extranonce (which located in the transaction input scriptSig). Miners send us unique extranonces that we use to join the two parts in attempt to create a valid share and/or block. */ // create the second part. using (var stream = new MemoryStream()) { // transaction input stream.WriteBytes(Inputs.First().SignatureScript.Final); stream.WriteValueU32(Inputs.First().Sequence); // transaction inputs end here. // transaction output var outputBuffer = Outputs.GetBuffer(); stream.WriteBytes(outputBuffer); // transaction output ends here. stream.WriteValueU32(LockTime.LittleEndian()); if (SupportTxMessages) stream.WriteBytes(Message); Final = stream.ToArray(); } }
public void Serialize(Stream output) { var saveGame = this.SaveGame; byte[] innerUncompressedBytes; using (var innerUncompressedData = new MemoryStream()) { saveGame.Compose(); try { ProtoBuf.Serializer.Serialize(innerUncompressedData, saveGame); } finally { saveGame.Decompose(); } innerUncompressedData.Position = 0; innerUncompressedBytes = innerUncompressedData.ReadBytes((uint)innerUncompressedData.Length); } byte[] innerCompressedBytes; using (var innerCompressedData = new MemoryStream()) { var endian = this.Endian; innerCompressedData.WriteValueS32(0, Endian.Big); innerCompressedData.WriteString("WSG"); innerCompressedData.WriteValueU32(2, endian); innerCompressedData.WriteValueU32(CRC32.Hash(innerUncompressedBytes, 0, innerUncompressedBytes.Length), endian); // crc32 innerCompressedData.WriteValueS32(innerUncompressedBytes.Length, endian); var encoder = new Huffman.Encoder(); encoder.Build(innerUncompressedBytes); innerCompressedData.WriteBytes(encoder.Encode(innerUncompressedBytes)); innerCompressedData.Position = 0; innerCompressedData.WriteValueU32((uint)(innerCompressedData.Length - 4), Endian.Big); innerCompressedData.Position = 0; innerCompressedBytes = innerCompressedData.ReadBytes((uint)innerCompressedData.Length); } byte[] compressedBytes; if (innerCompressedBytes.Length <= BlockSize) { compressedBytes = new byte[innerCompressedBytes.Length + (innerCompressedBytes.Length / 16) + 64 + 3]; var actualCompressedSize = compressedBytes.Length; var result = LZO.Compress(innerCompressedBytes, 0, innerCompressedBytes.Length, compressedBytes, 0, ref actualCompressedSize); if (result != LZO.ErrorCode.Success) { throw new SaveCorruptionException(string.Format("LZO compression failure ({0})", result)); } Array.Resize(ref compressedBytes, actualCompressedSize); } else { int innerCompressedOffset = 0; int innerCompressedSizeLeft = innerCompressedBytes.Length; using (var blockData = new MemoryStream()) { var blockCount = (innerCompressedSizeLeft + BlockSize) / BlockSize; blockData.WriteValueS32(blockCount, Endian.Big); blockData.Position = 4 + (blockCount * 8); var blockInfos = new List<Tuple<uint, uint>>(); while (innerCompressedSizeLeft > 0) { var blockUncompressedSize = Math.Min(BlockSize, innerCompressedSizeLeft); compressedBytes = new byte[blockUncompressedSize + (blockUncompressedSize / 16) + 64 + 3]; var actualCompressedSize = compressedBytes.Length; var result = LZO.Compress(innerCompressedBytes, innerCompressedOffset, blockUncompressedSize, compressedBytes, 0, ref actualCompressedSize); if (result != LZO.ErrorCode.Success) { throw new SaveCorruptionException(string.Format("LZO compression failure ({0})", result)); } blockData.Write(compressedBytes, 0, actualCompressedSize); blockInfos.Add(new Tuple<uint, uint>((uint)actualCompressedSize, BlockSize)); innerCompressedOffset += blockUncompressedSize; innerCompressedSizeLeft -= blockUncompressedSize; } blockData.Position = 4; foreach (var blockInfo in blockInfos) { blockData.WriteValueU32(blockInfo.Item1, Endian.Big); blockData.WriteValueU32(blockInfo.Item2, Endian.Big); } blockData.Position = 0; compressedBytes = blockData.ReadBytes((uint)blockData.Length); } } byte[] uncompressedBytes; using (var uncompressedData = new MemoryStream()) { uncompressedData.WriteValueS32(innerCompressedBytes.Length, Endian.Big); uncompressedData.WriteBytes(compressedBytes); uncompressedData.Position = 0; uncompressedBytes = uncompressedData.ReadBytes((uint)uncompressedData.Length); } byte[] computedHash; using (var sha1 = new System.Security.Cryptography.SHA1Managed()) { computedHash = sha1.ComputeHash(uncompressedBytes); } output.WriteBytes(computedHash); output.WriteBytes(uncompressedBytes); }
public Stream Serialize(Endian endianness) { MemoryStream data = new MemoryStream(); data.WriteValueU32((uint)LocalDataResolvers.Count, endianness); data.WriteValueU32((uint)RemoteDataResolvers.Count, endianness); data.WriteValueU32((uint)Unknown2s.Count, endianness); data.WriteValueU32(0, endianness); // 0 because we crashed if it was >0 data.WriteValueU32((uint)Unknown4s.Count, endianness); foreach (LocalDataResolver r in LocalDataResolvers) { var val = r.Serialize(); data.WriteValueU64(val, endianness); } foreach (RemoteDataResolver r in RemoteDataResolvers) { data.WriteValueU64(r.Serialize(), endianness); } foreach (Unknown2Resolver r in Unknown2s) { data.WriteValueU32(r.Serialize(), endianness); } foreach (Unknown4Resolver r in Unknown4s) { data.WriteValueU32(r.Serialize(), endianness); } data.Position = 0; return data; }
public byte[] ToArray(int pccExportDataOffset, PCCObject pcc) { MemoryStream buffer = new MemoryStream(); buffer.Write(headerData, 0, headerData.Length); if (properties.ContainsKey("LODGroup")) { properties["LODGroup"].Value.StringValue = "TEXTUREGROUP_LightAndShadowMap"; //properties["LODGroup"].Value.IntValue = 1025; } else { buffer.WriteValueS64(pcc.AddName("LODGroup")); buffer.WriteValueS64(pcc.AddName("ByteProperty")); buffer.WriteValueS64(8); buffer.WriteValueS32(pcc.AddName("TEXTUREGROUP_LightAndShadowMap")); buffer.WriteValueS32(1025); } foreach (KeyValuePair<string, SaltPropertyReader.Property> kvp in properties) { SaltPropertyReader.Property prop = kvp.Value; if (prop.Name == "UnpackMin") { for (int j = 0; j < UnpackNum; j++) { buffer.WriteValueS64(pcc.AddName(prop.Name)); buffer.WriteValueS64(pcc.AddName(prop.TypeVal.ToString())); buffer.WriteValueS32(prop.Size); buffer.WriteValueS32(j); buffer.WriteValueF32(prop.Value.FloatValue, Endian.Little); } continue; } buffer.WriteValueS64(pcc.AddName(prop.Name)); if (prop.Name == "None") { for (int j = 0; j < 12; j++) buffer.WriteByte(0); } else { buffer.WriteValueS64(pcc.AddName(prop.TypeVal.ToString())); buffer.WriteValueS64(prop.Size); switch (prop.TypeVal) { case SaltPropertyReader.Type.IntProperty: buffer.WriteValueS32(prop.Value.IntValue); break; case SaltPropertyReader.Type.BoolProperty: buffer.Seek(-4, SeekOrigin.Current); buffer.WriteValueS32(prop.Value.IntValue); buffer.Seek(4, SeekOrigin.Current); break; case SaltPropertyReader.Type.NameProperty: buffer.WriteValueS64(pcc.AddName(prop.Value.StringValue)); break; case SaltPropertyReader.Type.StrProperty: buffer.WriteValueS32(prop.Value.StringValue.Length + 1); foreach (char c in prop.Value.StringValue) buffer.WriteByte((byte)c); buffer.WriteByte(0); break; case SaltPropertyReader.Type.StructProperty: buffer.WriteValueS64(pcc.AddName(prop.Value.StringValue)); foreach (SaltPropertyReader.PropertyValue value in prop.Value.Array) buffer.WriteValueS32(value.IntValue); break; case SaltPropertyReader.Type.ByteProperty: buffer.WriteValueS32(pcc.AddName(prop.Value.StringValue)); buffer.WriteValueS32(prop.Value.IntValue); break; case SaltPropertyReader.Type.FloatProperty: buffer.WriteValueF32(prop.Value.FloatValue, Endian.Little); break; default: throw new FormatException("unknown property"); } } } buffer.WriteValueS32((int)(pccOffset + buffer.Position + 4)); //Remove empty textures List<ImageInfo> tempList = new List<ImageInfo>(); foreach (ImageInfo imgInfo in imgList) { if (imgInfo.storageType != storage.empty) tempList.Add(imgInfo); } imgList = tempList; numMipMaps = (uint)imgList.Count; buffer.WriteValueU32(numMipMaps); foreach (ImageInfo imgInfo in imgList) { buffer.WriteValueS32((int)imgInfo.storageType); buffer.WriteValueS32(imgInfo.uncSize); buffer.WriteValueS32(imgInfo.cprSize); if (imgInfo.storageType == storage.pccSto) { buffer.WriteValueS32((int)(imgInfo.offset + pccExportDataOffset + dataOffset)); buffer.Write(imageData, imgInfo.offset, imgInfo.uncSize); } else if (imgInfo.storageType == storage.pccCpr) { buffer.WriteValueS32((int)(imgInfo.offset + pccExportDataOffset + dataOffset)); buffer.Write(imageData, imgInfo.offset, imgInfo.cprSize); } else buffer.WriteValueS32(imgInfo.offset); if (imgInfo.imgSize.width < 4) buffer.WriteValueU32(4); else buffer.WriteValueU32(imgInfo.imgSize.width); if (imgInfo.imgSize.height < 4) buffer.WriteValueU32(4); else buffer.WriteValueU32(imgInfo.imgSize.height); } buffer.WriteBytes(footerData); return buffer.ToArray(); }
public void Save(Stream output) { MemoryStream memory = new MemoryStream(); UnrealStream stream = new UnrealStream(memory, false, this.Version); memory.WriteValueU32(this.Version); this.Serialize(stream); if (this.Version >= 27) { memory.Position = 0; uint checksum = 0; byte[] data = new byte[1024]; while (memory.Position < memory.Length) { int read = memory.Read(data, 0, 1024); checksum = CRC32.Compute(data, 0, read, checksum); } this.Checksum = checksum; memory.WriteValueU32(checksum); } // copy out { memory.Position = 0; byte[] data = new byte[1024]; while (memory.Position < memory.Length) { int read = memory.Read(data, 0, 1024); output.Write(data, 0, read); } } }
public Stream Serialize() { MemoryStream data = new MemoryStream(); uint dataSize = 0; // Header stuff data.WriteValueU32(0x39444350); data.WriteValueEnum<PCD9.Format>(this.Format); data.Seek(4, SeekOrigin.Current); //skip dataSize for now data.WriteValueU32(this.Unknown0C); data.WriteValueU16(this.Width); data.WriteValueU16(this.Height); data.WriteValueU8(this.BPP); data.WriteValueU8((byte)(this.Mipmaps.Count - 1)); data.WriteValueU16(this.Unknown16); // Data stuff //TODO sort to make sure the biggest is first? will the order ever change? for (int i = 0; i < this.Mipmaps.Count; i++) { // Write image data data.WriteBytes(this.Mipmaps[i].Data); // Add length to total dataSize += (uint)this.Mipmaps[i].Data.Length; } // Write dataSize data.Seek(8, SeekOrigin.Begin); data.WriteValueU32(dataSize); data.Position = 0; return data; }
/// <summary> /// compress an entire ME3 pcc into a byte array. /// </summary> /// <param name="uncompressedPcc">uncompressed pcc stream.</param> /// <returns>a compressed array of bytes.</returns> public static Stream Compress(Stream uncompressedPcc) { uncompressedPcc.Position = 0; var magic = uncompressedPcc.ReadValueU32(Endian.Little); if (magic != 0x9E2A83C1 && magic.Swap() != 0x9E2A83C1) { throw new FormatException("not a pcc package"); } var endian = magic == 0x9E2A83C1 ? Endian.Little : Endian.Big; var encoding = endian == Endian.Little ? Encoding.Unicode : Encoding.BigEndianUnicode; var versionLo = uncompressedPcc.ReadValueU16(endian); var versionHi = uncompressedPcc.ReadValueU16(endian); if (versionLo != 684 && versionHi != 194) { throw new FormatException("unsupported version"); } uncompressedPcc.Seek(4, SeekOrigin.Current); var folderNameLength = uncompressedPcc.ReadValueS32(endian); var folderNameByteLength = folderNameLength >= 0 ? folderNameLength : (-folderNameLength * 2); uncompressedPcc.Seek(folderNameByteLength, SeekOrigin.Current); var packageFlagsOffset = uncompressedPcc.Position; var packageFlags = uncompressedPcc.ReadValueU32(endian); if ((packageFlags & 8) != 0) { uncompressedPcc.Seek(4, SeekOrigin.Current); } var nameCount = uncompressedPcc.ReadValueU32(endian); var namesOffset = uncompressedPcc.ReadValueU32(endian); var exportCount = uncompressedPcc.ReadValueU32(endian); var exportInfosOffset = uncompressedPcc.ReadValueU32(endian); SortedDictionary<uint, uint> exportDataOffsets = new SortedDictionary<uint, uint>(); Stream data; if ((packageFlags & 0x02000000) == 0) { data = uncompressedPcc; } else { throw new FormatException("pcc data is compressed"); } // get info about export data, sizes and offsets data.Seek(exportInfosOffset, SeekOrigin.Begin); for (uint i = 0; i < exportCount; i++) { var classIndex = data.ReadValueS32(endian); data.Seek(4, SeekOrigin.Current); var outerIndex = data.ReadValueS32(endian); var objectNameIndex = data.ReadValueS32(endian); data.Seek(16, SeekOrigin.Current); uint exportDataSize = data.ReadValueU32(endian); uint exportDataOffset = data.ReadValueU32(endian); exportDataOffsets.Add(exportDataOffset, exportDataSize); data.Seek(4, SeekOrigin.Current); var count = data.ReadValueU32(endian); data.Seek(count * 4, SeekOrigin.Current); data.Seek(20, SeekOrigin.Current); } const uint maxBlockSize = 0x100000; Stream outputStream = new MemoryStream(); // copying pcc header byte[] buffer = new byte[130]; uncompressedPcc.Seek(0, SeekOrigin.Begin); uncompressedPcc.Read(buffer, 0, 130); outputStream.Write(buffer, 0, buffer.Length); //add compressed pcc flag uncompressedPcc.Seek(12, SeekOrigin.Begin); folderNameLength = uncompressedPcc.ReadValueS32(); folderNameByteLength = folderNameLength >= 0 ? folderNameLength : (-folderNameLength * 2); uncompressedPcc.Seek(folderNameByteLength, SeekOrigin.Current); outputStream.Seek(uncompressedPcc.Position, SeekOrigin.Begin); packageFlags = uncompressedPcc.ReadValueU32(); packageFlags |= 0x02000000; // add compression flag outputStream.WriteValueU32(packageFlags); outputStream.Seek(buffer.Length, SeekOrigin.Begin); long outOffsetData; long outOffsetBlockInfo; long inOffsetData = namesOffset; List<int> blockSizes = new List<int>(); int countSize = (int)(exportDataOffsets.Min(obj => obj.Key) - namesOffset); //count the number of blocks and relative sizes uint lastOffset = exportDataOffsets.Min(obj => obj.Key); foreach (KeyValuePair<uint, uint> exportInfo in exportDataOffsets) { // part that adds empty spaces (leaved when editing export data and moved to the end of pcc) into the count if (exportInfo.Key != lastOffset) { int emptySpace = (int)(exportInfo.Key - lastOffset); if (countSize + emptySpace > maxBlockSize) { blockSizes.Add(countSize); countSize = 0; } else countSize += emptySpace; } // adds export data into the count if (countSize + exportInfo.Value > maxBlockSize) { blockSizes.Add(countSize); countSize = (int)exportInfo.Value; } else { countSize += (int)exportInfo.Value; } lastOffset = exportInfo.Key + exportInfo.Value; } blockSizes.Add(countSize); outputStream.WriteValueS32(blockSizes.Count); outOffsetBlockInfo = outputStream.Position; outOffsetData = namesOffset + (blockSizes.Count * 16); uncompressedPcc.Seek(namesOffset, SeekOrigin.Begin); //divide the block in segments for (int i = 0; i < blockSizes.Count; i++) { int currentUncBlockSize = blockSizes[i]; outputStream.Seek(outOffsetBlockInfo, SeekOrigin.Begin); outputStream.WriteValueU32((uint)uncompressedPcc.Position); outputStream.WriteValueS32(currentUncBlockSize); outputStream.WriteValueU32((uint)outOffsetData); byte[] inputBlock = new byte[currentUncBlockSize]; uncompressedPcc.Read(inputBlock, 0, currentUncBlockSize); byte[] compressedBlock = ZBlock.Compress(inputBlock, 0, inputBlock.Length); outputStream.WriteValueS32(compressedBlock.Length); outOffsetBlockInfo = outputStream.Position; outputStream.Seek(outOffsetData, SeekOrigin.Begin); outputStream.Write(compressedBlock, 0, compressedBlock.Length); outOffsetData = outputStream.Position; } //copying some unknown values + extra names list int bufferSize = (int)namesOffset - 0x86; buffer = new byte[bufferSize]; uncompressedPcc.Seek(0x86, SeekOrigin.Begin); uncompressedPcc.Read(buffer, 0, buffer.Length); outputStream.Seek(outOffsetBlockInfo, SeekOrigin.Begin); outputStream.Write(buffer, 0, buffer.Length); outputStream.Seek(0, SeekOrigin.Begin); return outputStream; }
/// <summary> /// Creates a serialized string used in script signature. /// </summary> /// <remarks> /// </remarks> /// <example> /// python: http://runnable.com/U3Mya-5oZntF5Ira/bitcoin-coinbase-serialize-string-python /// nodejs: https://github.com/zone117x/node-stratum-pool/blob/dfad9e58c661174894d4ab625455bb5b7428881c/lib/util.js#L153 /// </example> /// <param name="input"></param> /// <returns></returns> public static byte[] SerializeString(string input) { if (input.Length < 253) return ArrayHelpers.Combine(new[] { (byte)input.Length }, Encoding.UTF8.GetBytes(input)); // if input string is >=253, we need need a special format. byte[] result; using (var stream = new MemoryStream()) { if (input.Length < 0x10000) { stream.WriteValueU8(253); stream.WriteValueU16(((UInt16)input.Length).LittleEndian()); // write packed length. } else if ((long)input.Length < 0x100000000) { stream.WriteValueU8(254); stream.WriteValueU32(((UInt32)input.Length).LittleEndian()); // write packed length. } else { stream.WriteValueU8(255); stream.WriteValueU16(((UInt16)input.Length).LittleEndian()); // write packed length. } stream.WriteString(input); result = stream.ToArray(); } return result; }
public byte[] ToArray(uint pccExportDataOffset, ME3PCCObject pcc) { using (MemoryStream tempStream = new MemoryStream()) { tempStream.WriteBytes(headerData); // Whilst testing get rid of this // Heff: Seems like the shadowmap was the best solution in most cases, // adding an exception for known problematic animated textures for now. // (See popup in tpftools) if (properties.ContainsKey("LODGroup")) properties["LODGroup"].Value.String2 = "TEXTUREGROUP_Shadowmap"; else { tempStream.WriteValueS64(pcc.addName2("LODGroup")); tempStream.WriteValueS64(pcc.addName2("ByteProperty")); tempStream.WriteValueS64(8); tempStream.WriteValueS64(pcc.addName2("TextureGroup")); tempStream.WriteValueS64(pcc.addName2("TEXTUREGROUP_Shadowmap")); } foreach (KeyValuePair<string, SaltPropertyReader.Property> kvp in properties) { SaltPropertyReader.Property prop = kvp.Value; if (prop.Name == "UnpackMin") { for (int i = 0; i < UnpackNum; i++) { tempStream.WriteValueS64(pcc.addName2(prop.Name)); tempStream.WriteValueS64(pcc.addName2(prop.TypeVal.ToString())); tempStream.WriteValueS32(prop.Size); tempStream.WriteValueS32(i); tempStream.WriteValueF32(prop.Value.FloatValue); } continue; } tempStream.WriteValueS64(pcc.addName2(prop.Name)); if (prop.Name == "None") continue; tempStream.WriteValueS64(pcc.addName2(prop.TypeVal.ToString())); tempStream.WriteValueS64(prop.Size); switch (prop.TypeVal) { case SaltPropertyReader.Type.FloatProperty: tempStream.WriteValueF32(prop.Value.FloatValue); break; case SaltPropertyReader.Type.IntProperty: tempStream.WriteValueS32(prop.Value.IntValue); break; case SaltPropertyReader.Type.NameProperty: tempStream.WriteValueS64(pcc.addName2(prop.Value.StringValue)); // Heff: Modified to handle name references. //var nameIndex = pcc.addName2(prop.Value.StringValue); //tempStream.WriteValueS32(nameIndex); //tempStream.WriteValueS32(prop.Value.NameValue.count); break; case SaltPropertyReader.Type.ByteProperty: tempStream.WriteValueS64(pcc.addName2(prop.Value.StringValue)); tempStream.WriteValueS64(pcc.addName2(prop.Value.String2)); // Heff: Modified to handle name references. //var valueIndex = pcc.addName2(prop.Value.String2); //tempStream.WriteValueS32(valueIndex); //tempStream.WriteValueS32(prop.Value.NameValue.count); //tempStream.WriteValueS32(pcc.addName2(prop.Value.String2)); //byte[] footer = new byte[4]; //Buffer.BlockCopy(prop.raw, prop.raw.Length - 4, footer, 0, 4); //tempStream.WriteBytes(footer); break; case SaltPropertyReader.Type.BoolProperty: tempStream.WriteValueBoolean(prop.Value.Boolereno); break; case SaltPropertyReader.Type.StructProperty: tempStream.WriteValueS64(pcc.addName2(prop.Value.StringValue)); for (int i = 0; i < prop.Size; i++) tempStream.WriteByte((byte)prop.Value.Array[i].IntValue); break; default: throw new NotImplementedException("Property type: " + prop.TypeVal + ", not yet implemented. TELL ME ABOUT THIS!"); } } //Remove empty textures List<ImageInfo> tempList = new List<ImageInfo>(); foreach (ImageInfo imgInfo in privateimgList) { if (imgInfo.storageType != storage.empty) tempList.Add(imgInfo); } privateimgList = tempList; numMipMaps = (uint)privateimgList.Count; tempStream.WriteValueU32(numMipMaps); foreach (ImageInfo imgInfo in privateimgList) { tempStream.WriteValueS32((int)imgInfo.storageType); tempStream.WriteValueS32(imgInfo.uncSize); tempStream.WriteValueS32(imgInfo.cprSize); if (imgInfo.storageType == storage.pccSto) { tempStream.WriteValueS32((int)(imgInfo.offset + pccExportDataOffset + dataOffset)); tempStream.Write(imageData, imgInfo.offset, imgInfo.uncSize); } else tempStream.WriteValueS32(imgInfo.offset); tempStream.WriteValueU32(imgInfo.imgSize.width); tempStream.WriteValueU32(imgInfo.imgSize.height); } //// Texture2D footer, 24 bytes size - changed to 20 //tempStream.Write(imageData, imageData.Length - 20, 20); tempStream.WriteBytes(footerData); return tempStream.ToArray(); } #region Unused Code /* bool lodExists = false; foreach (KeyValuePair<string, PropertyReader.Property> kvp in properties) { PropertyReader.Property property = kvp.Value; if (kvp.Key == "LODGroup") { lodExists = true; break; } } MemoryStream buffer = new MemoryStream(); buffer.Write(headerData, 0, headerData.Length); if (lodExists) { // extracting values from LODGroup Property PropertyReader.Property LODGroup = properties["LODGroup"]; string textureGroupName = pcc.Names[LODGroup.Value.IntValue]; bool nameExists = false; string newTextureGroupName = "TEXTUREGROUP_Shadowmap"; if (String.Compare(newTextureGroupName, textureGroupName) != 0) { textureGroupName = newTextureGroupName; if (!pcc.Names.Exists(name => name == newTextureGroupName)) pcc.Names.Add(newTextureGroupName); using (MemoryStream rawStream = new MemoryStream(LODGroup.raw)) { rawStream.Seek(32, SeekOrigin.Begin); rawStream.WriteValueS32(pcc.Names.FindIndex(name => name == newTextureGroupName)); rawStream.WriteValueS32(0); properties["LODGroup"].raw = rawStream.ToArray(); } } else nameExists = true; //MemoryStream buffer = new MemoryStream(); //buffer.Write(headerData, 0, headerData.Length); foreach (KeyValuePair<string, PropertyReader.Property> kvp in properties) { PropertyReader.Property property = kvp.Value; if (kvp.Key == "LODBias") continue; if (kvp.Key == "InternalFormatLODBias") continue; if (kvp.Key == "LODGroup" && nameExists == false) { int name; if (!nameExists) name = pcc.Names.Count - 1; //Warranty Voiders Name redirect hack^^ else name = LODGroup.Value.IntValue; ME3_HR_Patch.Helper.BitConverter.IsLittleEndian = true; byte[] buff = ME3_HR_Patch.Helper.BitConverter.GetBytes(name); for (int i = 0; i < 4; i++) property.raw[i + 24] = buff[i]; } buffer.Write(property.raw, 0, property.raw.Length); if (kvp.Key == "UnpackMin") { buffer.Write(property.raw, 0, property.raw.Length); buffer.Write(property.raw, 0, property.raw.Length); } } } else { //MemoryStream buffer = new MemoryStream(); //buffer.Write(headerData, 0, headerData.Length); int lodID = pcc.findName("LODGroup"); if (lodID == -1) { pcc.addName("LODGroup"); lodID = pcc.Names.Count - 1; } buffer.WriteBytes(ME3_HR_Patch.Helper.BitConverter.GetBytes(lodID)); buffer.WriteBytes(ME3_HR_Patch.Helper.BitConverter.GetBytes((int)0)); lodID = pcc.findName("ByteProperty"); buffer.WriteBytes(ME3_HR_Patch.Helper.BitConverter.GetBytes(lodID)); buffer.WriteBytes(ME3_HR_Patch.Helper.BitConverter.GetBytes((int)0)); //Write an int buffer.WriteBytes(ME3_HR_Patch.Helper.BitConverter.GetBytes((int)8)); buffer.WriteBytes(ME3_HR_Patch.Helper.BitConverter.GetBytes((int)0)); lodID = pcc.findName("TextureGroup"); if (lodID == -1) { pcc.addName("TextureGroup"); lodID = pcc.Names.Count - 1; } buffer.WriteBytes(ME3_HR_Patch.Helper.BitConverter.GetBytes(lodID)); buffer.WriteBytes(ME3_HR_Patch.Helper.BitConverter.GetBytes((int)0)); lodID = pcc.findName("TEXTUREGROUP_Shadowmap"); if (lodID == -1) { pcc.addName("TEXTUREGROUP_Shadowmap"); lodID = pcc.Names.Count - 1; } buffer.WriteBytes(ME3_HR_Patch.Helper.BitConverter.GetBytes(lodID)); buffer.WriteBytes(ME3_HR_Patch.Helper.BitConverter.GetBytes((int)0)); foreach (KeyValuePair<string, PropertyReader.Property> kvp in properties) { PropertyReader.Property property = kvp.Value; if (kvp.Key == "LODBias") continue; if (kvp.Key == "InternalFormatLODBias") continue; if (kvp.Key == "LODGroup") { int name = pcc.Names.Count - 1; //Warranty Voiders Name redirect hack^^ ME3_HR_Patch.Helper.BitConverter.IsLittleEndian = true; byte[] buff = ME3_HR_Patch.Helper.BitConverter.GetBytes(name); for (int i = 0; i < 4; i++) property.raw[i + 24] = buff[i]; } buffer.Write(property.raw, 0, property.raw.Length); if (kvp.Key == "UnpackMin") { buffer.Write(property.raw, 0, property.raw.Length); buffer.Write(property.raw, 0, property.raw.Length); } } } buffer.WriteValueU32(numMipMaps); foreach (ImageInfo imgInfo in imgList) { buffer.WriteValueS32((int)imgInfo.storageType); buffer.WriteValueS32(imgInfo.uncSize); buffer.WriteValueS32(imgInfo.cprSize); if (imgInfo.storageType == storage.pccSto) { buffer.WriteValueS32((int)(imgInfo.offset + pccExportDataOffset + dataOffset)); buffer.Write(imageData, imgInfo.offset, imgInfo.uncSize); } else buffer.WriteValueS32(imgInfo.offset); buffer.WriteValueU32(imgInfo.imgSize.width); buffer.WriteValueU32(imgInfo.imgSize.height); } // Texture2D footer, 24 bytes size buffer.Write(imageData, imageData.Length - 24, 24); byte[] rawData = buffer.ToArray(); return rawData; */ #endregion }
public Stream Serialize() { MemoryStream data = new MemoryStream(); uint sectionCount = (uint)this.Sections.Count; Stream[] resolvers = new MemoryStream[sectionCount]; // for serialized resolvers uint unknown04_size = 0; uint unknown08_size = 0; // Write DRM Header data.WriteValueU32(this.Version, this.Endianness); data.WriteValueU32(0); // skip for now data.WriteValueU32(0); // skip for now data.WriteValueU32(0); // unknown0C data.WriteValueU32(0); // unknown10 data.WriteValueU32(sectionCount, this.Endianness); // Write DRM Section Headers for (int i = 0; i < sectionCount; i++) { DRM.Section section = this.Sections[i]; // Serialize resolvers to get length, data will be used later uint resolverLen; if (section.Resolver != null) { resolvers[i] = section.Resolver.Serialize(this.Endianness); resolverLen = (uint)resolvers[i].Length; } else { resolvers[i] = null; resolverLen = 0; } data.WriteValueU32((uint)section.Data.Length, this.Endianness); data.WriteValueU8((byte)section.Type); data.WriteValueU8(section.Unknown05); data.WriteValueU16(section.Unknown06, this.Endianness); data.WriteValueU32((uint)section.Flags | (resolverLen << 8), this.Endianness); data.WriteValueU32(section.Id, this.Endianness); data.WriteValueU32(section.Unknown10, this.Endianness); } // Write Unknown08s for (int i = 0; i < Unknown08s.Count; i++) { unknown08_size += ((uint)Unknown08s[i].Length + 1); data.WriteStringZ(Unknown08s[i]); } // Write Unknown04s for (int i = 0; i < Unknown04s.Count; i++) { unknown04_size += ((uint)Unknown04s[i].Length + 1); data.WriteStringZ(Unknown04s[i]); } // Write DRM Section Data for (int i = 0; i < sectionCount; i++) { if (resolvers[i] != null) { data.WriteFromStream(resolvers[i], resolvers[i].Length); } data.WriteFromStream(this.Sections[i].Data, this.Sections[i].Data.Length); this.Sections[i].Data.Position = 0; } // Go back and write unknowns length data.Seek(4, SeekOrigin.Begin); data.WriteValueU32(unknown04_size); data.WriteValueU32(unknown08_size); data.Position = 0; return data; }