internal void Write(BinaryDataWriter writer, List <Field> fields) { Dictionary <ushort, uint> buffer = new Dictionary <ushort, uint>(fields.Count); long pos = writer.Position; for (int i = 0; i < fields.Count; i++) { writer.Seek(pos + fields[i].Offset, SeekOrigin.Begin); switch (fields[i].Type) { case FieldType.Int32: uint value = (uint)Values[i]; Console.WriteLine($"SAVED Fields {fields[i].Name} {Values[i]}"); if (fields[i].Bitmask == uint.MaxValue) { writer.Write(value); } else { if (!buffer.ContainsKey(fields[i].Offset)) { buffer[fields[i].Offset] = 0u; } buffer[fields[i].Offset] |= ((uint)(value << fields[i].Shift) & fields[i].Bitmask); } break; case FieldType.Float: writer.Write((float)Values[i]); break; case FieldType.String: writer.Write((string)Values[i], BinaryStringFormat.ZeroTerminated); break; case FieldType.Int16: writer.Write((short)Values[i]); break; case FieldType.Byte: writer.Write((byte)Values[i]); break; case FieldType.StringJIS: writer.Write((string)Values[i], BinaryStringFormat.ZeroTerminated, Encoding.GetEncoding("shift_jis")); break; } foreach (var val in buffer) { writer.Seek(pos + val.Key, SeekOrigin.Begin); writer.Write(val.Value); } } }
private void AlignBytes(BinaryDataWriter writer, int alignment, byte value = 0x00) { var startPos = writer.Position; long position = writer.Seek((-writer.Position % alignment + alignment) % alignment, SeekOrigin.Current); writer.Seek(startPos, System.IO.SeekOrigin.Begin); while (writer.Position != position) { writer.Write(value); } }
private void Write(BinaryDataWriter writer) { if (IsBigEndian) { writer.ByteOrder = ByteOrder.BigEndian; } uint recordSize = MaxFieldSize(); writer.Write(Records.Count); writer.Write(Fields.Count); writer.Write(16 + (Fields.Count * 12)); writer.Write(MaxFieldSize()); for (int i = 0; i < Fields.Count; i++) { Fields[i].Write(writer); } long pos = writer.Position; for (int i = 0; i < Records.Count; i++) { writer.Seek(pos + (i * recordSize), SeekOrigin.Begin); Records[i].Write(writer, Fields); } AlignBytes(writer, 0x20, 0x40); }
private byte[] WriteShaderParams(ByteOrder byteOrder) { var mem = new MemoryStream(); using (var writer = new BinaryDataWriter(mem)) { writer.ByteOrder = byteOrder; int index = 0; uint Offset = 0; foreach (var param in ShaderParams.Values) { param.DataOffset = (ushort)Offset; param.DependIndex = (ushort)index; param.DependedIndex = (ushort)index; writer.Seek(param.DataOffset, SeekOrigin.Begin); WriteParamData(writer, param.DataValue); Offset += (param.DataSize + (uint)param.PaddingLength); index++; } } return(mem.ToArray()); }
private byte[] ToRawData(List <VertexBufferHelperAttrib> helperAttribs) { int length = 0; for (int i = 0; i < helperAttribs.Count; i++) { length += helperAttribs[i].Data.Length * (int)helperAttribs[i].Stride; } // Create a write for the raw bytes of the correct endianness. byte[] raw = new byte[length]; using (BinaryDataWriter writer = new BinaryDataWriter(new MemoryStream(raw, true))) { writer.ByteOrder = ByteOrder; for (int v = 0; v < helperAttribs[0].Data.Length; v++) { for (int i = 0; i < helperAttribs.Count; i++) { long pos = writer.Position; // Get a conversion callback transforming the Vector4F instances into raw data. Action <BinaryDataWriter, Vector4F> callback = writer.GetGX2AttribCallback(helperAttribs[i].Format); callback.Invoke(writer, helperAttribs[i].Data[v]); writer.Seek(pos + helperAttribs[i].Stride, SeekOrigin.Begin); } } } return(raw); }
private byte[] ToRawData(VertexBufferHelperAttrib helperAttrib) { // Create a write for the raw bytes of the correct endianness. byte[] raw = new byte[helperAttrib.Data.Length * helperAttrib.Stride]; using (BinaryDataWriter writer = new BinaryDataWriter(new MemoryStream(raw, true))) { writer.ByteOrder = ByteOrder; // Get a conversion callback transforming the Vector4F instances into raw data. Action <BinaryDataWriter, Vector4F> callback = writer.GetGX2AttribCallback(helperAttrib.Format); // Write the elements. foreach (Vector4F element in helperAttrib.Data) { long pos = writer.Position; callback.Invoke(writer, element); writer.Seek(pos + helperAttrib.Stride, SeekOrigin.Begin); } } return(raw); }
public void Save(string path) { var fs = File.Create(path); BinaryDataWriter writer = new BinaryDataWriter(fs, Encoding.UTF8); Header.WriteTo(writer); for (int i = 0; i < Header.MessageCount; i++) { if (Entries[i].Label != null) { writer.Write(Entries[i].ID); writer.Write(Entries[i].Address); } } var lbl_start = writer.Position; for (int i = 0; i < Header.MessageCount; i++) { if (Entries[i].Label != null) { writer.Write(Entries[i].Label, BinaryStringFormat.ZeroTerminated); } } var msg_start = writer.Position; for (int i = 0; i < Header.MessageCount; i++) { if (Entries[i].Message != null) { writer.Write(Entries[i].Message, BinaryStringFormat.ZeroTerminated); } } Header.LabelBlockSize = (uint)(msg_start - lbl_start); Header.MessageBlockSize = (uint)(writer.Position - msg_start); writer.Seek(0, SeekOrigin.Begin); Header.WriteTo(writer); writer.Flush(); fs.Close(); }
protected void WriteContent(object rootReferenceKey) { using (_writer) { // Write the header, specifying magic bytes, version and main node offsets. _writer.Write(BYAML_MAGIC); _writer.Write(_version); Offset nameArrayOffset = _writer.ReserveOffset(); Offset stringArrayOffset = _writer.ReserveOffset(); Offset pathArrayOffset = _supportPaths ? _writer.ReserveOffset() : null; Offset rootOffset = _writer.ReserveOffset(); // Write the main nodes. _writer.Align(4); nameArrayOffset.Satisfy(); WriteStringArrayNode(_writer, _nameArray); if (_stringArray.Length == 0) { stringArrayOffset.Satisfy(0); } else { _writer.Align(4); stringArrayOffset.Satisfy(); WriteStringArrayNode(_writer, _stringArray); } // Include a path array offset if requested. if (_supportPaths) { if (_pathArray.Count == 0) { pathArrayOffset.Satisfy(0); } else { _writer.Align(4); pathArrayOffset.Satisfy(); WritePathArrayNode(_writer, _pathArray); } } _writer.Align(4); //write value stack (Dictionary, Array, long, uint, double) int valStackPos = (int)_writer.BaseStream.Position; //write all dictionaries foreach (KeyValuePair <object, ByamlDict> keyValuePair in _dictionaries) { _writer.Seek(valStackPos + keyValuePair.Value.offset, SeekOrigin.Begin); if (keyValuePair.Key == rootReferenceKey) { rootOffset.Satisfy(); } if (_byteOrder == ByteOrder.BigEndian) { _writer.Write((uint)ByamlNodeType.Dictionary << 24 | (uint)keyValuePair.Value.entries.Length); } else { _writer.Write((uint)ByamlNodeType.Dictionary | (uint)keyValuePair.Value.entries.Length << 8); } foreach ((string key, Entry entry) in keyValuePair.Value.entries) { if (_byteOrder == ByteOrder.BigEndian) { _writer.Write(Array.IndexOf(_nameArray, key) << 8 | (byte)entry.type); } else { _writer.Write(Array.IndexOf(_nameArray, key) | (byte)entry.type << 24); } WriteValue(entry); } } //write all arrays foreach (KeyValuePair <object, ByamlArr> keyValuePair in _arrays) { _writer.Seek(valStackPos + keyValuePair.Value.offset, SeekOrigin.Begin); if (keyValuePair.Key == rootReferenceKey) { rootOffset.Satisfy(); } if (_byteOrder == ByteOrder.BigEndian) { _writer.Write((uint)ByamlNodeType.Array << 24 | (uint)keyValuePair.Value.entries.Length); } else { _writer.Write((uint)ByamlNodeType.Array | (uint)keyValuePair.Value.entries.Length << 8); } foreach (Entry entry in keyValuePair.Value.entries) { _writer.Write((byte)entry.type); } _writer.Align(4); foreach (Entry entry in keyValuePair.Value.entries) { WriteValue(entry); } } //write all 8 byte values foreach (var keyValuePair in _eightByteValues) { _writer.Seek(valStackPos + keyValuePair.Value, SeekOrigin.Begin); _writer.Write(keyValuePair.Key); } void WriteValue(Entry entry) { // Only write the offset for the complex value contents, write simple values directly. switch (entry.type) { case ByamlNodeType.StringIndex: _writer.Write((uint)Array.IndexOf(_stringArray, entry.value)); break; case ByamlNodeType.PathIndex: _writer.Write(_pathArray.IndexOf(entry.value)); break; case ByamlNodeType.Dictionary: _writer.Write(valStackPos + _dictionaries[(object)entry.value].offset); break; case ByamlNodeType.Array: _writer.Write(valStackPos + _arrays[(object)entry.value].offset); break; case ByamlNodeType.Boolean: _writer.Write(entry.value ? 1 : 0); break; case ByamlNodeType.Integer: case ByamlNodeType.Float: case ByamlNodeType.UInteger: _writer.Write(entry.value); break; case ByamlNodeType.Double: case ByamlNodeType.ULong: case ByamlNodeType.Long: _writer.Write(valStackPos + _eightByteValues[entry.value]); return; case ByamlNodeType.Null: _writer.Write(0); break; } } } }
/// <summary> /// Decompresses the Yaz0-compressed contents of the input <see cref="Stream"/> and writes them directly into /// the given output <see cref="MemoryStream"/>. Both streams stay open after this method returned the number of /// decompressed bytes written. /// </summary> /// <param name="input">The input <see cref="Stream"/> from which the Yaz0-compressed data will be read.</param> /// <param name="output">The output <see cref="MemoryStream"/> to which the decompressed data will be written /// directly.</param> /// <returns>The number of decompressed bytes written to the output stream.</returns> public static int Decompress(Stream input, MemoryStream output) { using (BinaryDataReader reader = new BinaryDataReader(input, true)) using (BinaryDataWriter writer = new BinaryDataWriter(output, true)) { reader.ByteOrder = ByteOrder.BigEndian; // Read and check the header. if (reader.ReadString(4) != "Yaz0") { throw new Yaz0Exception("Invalid Yaz0 header."); } uint decompressedSize = reader.ReadUInt32(); reader.Position += 8; // Padding // Decompress the data. int decompressedBytes = 0; while (decompressedBytes < decompressedSize) { // Read the configuration byte of a decompression setting group, and go through each bit of it. byte groupConfig = reader.ReadByte(); for (int i = 7; i >= 0; i--) { // Check if bit of the current chunk is set. if ((groupConfig & (1 << i)) == (1 << i)) { // Bit is set, copy 1 raw byte to the output. writer.Write(reader.ReadByte()); decompressedBytes++; } else if (decompressedBytes < decompressedSize) // This does not make sense for last byte. { // Bit is not set and data copying configuration follows, either 2 or 3 bytes long. ushort dataBackSeekOffset = reader.ReadUInt16(); int dataSize; // If the nibble of the first back seek offset byte is 0, the config is 3 bytes long. byte nibble = (byte)(dataBackSeekOffset >> 12 /*1 byte (8 bits) + 1 nibble (4 bits)*/); if (nibble == 0) { // Nibble is 0, the number of bytes to read is in third byte, which is (size + 0x12). dataSize = reader.ReadByte() + 0x12; } else { // Nibble is not 0, and determines (size + 0x02) of bytes to read. dataSize = nibble + 0x02; // Remaining bits are the real back seek offset. dataBackSeekOffset &= 0x0FFF; } // Since bytes can be reread right after they were written, write and read bytes one by one. for (int j = 0; j < dataSize; j++) { // Read one byte from the current back seek position. writer.Position -= dataBackSeekOffset + 1; byte readByte = (byte)writer.BaseStream.ReadByte(); // Write the byte to the end of the memory stream. writer.Seek(0, SeekOrigin.End); writer.Write(readByte); decompressedBytes++; } } } } return(decompressedBytes); } }
/// <summary> /// Decompresses the Yaz0-compressed contents of the input <see cref="Stream"/> and writes them directly into /// the given output <see cref="MemoryStream"/>. Both streams stay open after this method returned the number of /// decompressed bytes written. /// </summary> /// <param name="input">The input <see cref="Stream"/> from which the Yaz0-compressed data will be read.</param> /// <param name="output">The output <see cref="MemoryStream"/> to which the decompressed data will be written /// directly.</param> /// <returns>The number of decompressed bytes written to the output stream.</returns> public static int Decompress(Stream input, MemoryStream output) { using (BinaryDataReader reader = new BinaryDataReader(input, true)) using (BinaryDataWriter writer = new BinaryDataWriter(output, true)) { reader.ByteOrder = ByteOrder.BigEndian; // Read and check the header. if (reader.ReadString(4) != "Yaz0") { throw new Yaz0Exception("Invalid Yaz0 header."); } uint decompressedSize = reader.ReadUInt32(); reader.Position += 8; // Padding // Decompress the data. int decompressedBytes = 0; while (decompressedBytes < decompressedSize) { // Read the configuration byte of a decompression setting group, and go through each bit of it. byte groupConfig = reader.ReadByte(); for (int i = 7; i >= 0; i--) { // Check if bit of the current chunk is set. if ((groupConfig & (1 << i)) == (1 << i)) { // Bit is set, copy 1 raw byte to the output. writer.Write(reader.ReadByte()); decompressedBytes++; } else if (decompressedBytes < decompressedSize) // This does not make sense for last byte. { // Bit is not set and data copying configuration follows, either 2 or 3 bytes long. ushort dataBackSeekOffset = reader.ReadUInt16(); int dataSize; // If the nibble of the first back seek offset byte is 0, the config is 3 bytes long. byte nibble = (byte)(dataBackSeekOffset >> 12/*1 byte (8 bits) + 1 nibble (4 bits)*/); if (nibble == 0) { // Nibble is 0, the number of bytes to read is in third byte, which is (size + 0x12). dataSize = reader.ReadByte() + 0x12; } else { // Nibble is not 0, and determines (size + 0x02) of bytes to read. dataSize = nibble + 0x02; // Remaining bits are the real back seek offset. dataBackSeekOffset &= 0x0FFF; } // Since bytes can be reread right after they were written, write and read bytes one by one. for (int j = 0; j < dataSize; j++) { // Read one byte from the current back seek position. writer.Position -= dataBackSeekOffset + 1; byte readByte = (byte)writer.BaseStream.ReadByte(); // Write the byte to the end of the memory stream. writer.Seek(0, SeekOrigin.End); writer.Write(readByte); decompressedBytes++; } } } } return decompressedBytes; } }