public void CreateFromSubStreamThrowIfInvalidArgument() { var stream = new MemoryStream(); stream.WriteByte(0xCA); stream.WriteByte(0xFE); Assert.That( () => DataStreamFactory.FromStream(null, 0, 0), Throws.ArgumentNullException); Assert.That( () => DataStreamFactory.FromStream(stream, -1, 0), Throws.InstanceOf <ArgumentOutOfRangeException>()); Assert.That( () => DataStreamFactory.FromStream(stream, 0, -1), Throws.InstanceOf <ArgumentOutOfRangeException>()); Assert.That( () => DataStreamFactory.FromStream(stream, 3, 0), Throws.InstanceOf <ArgumentOutOfRangeException>()); Assert.That( () => DataStreamFactory.FromStream(stream, 1, 2), Throws.InstanceOf <ArgumentOutOfRangeException>()); stream.Dispose(); }
public void CreateFromStreamAllowsToExpand() { using var stream = new MemoryStream(); var dataStream = DataStreamFactory.FromStream(stream); Assert.That(() => stream.WriteByte(0xFE), Throws.Nothing); Assert.That(stream.Length, Is.EqualTo(1)); dataStream.Dispose(); }
public void CreateFromSubStreamDoesNotAllowToExpand() { using var stream = new MemoryStream(); stream.WriteByte(0xCA); stream.WriteByte(0xFE); stream.WriteByte(0xBE); var dataStream = DataStreamFactory.FromStream(stream, 1, 2); dataStream.Position = 2; Assert.That(() => dataStream.WriteByte(0xAA), Throws.InvalidOperationException); }
public void InstanceWriter() { // Get the new header and calculate the absolute position. var tradHeader = PeFile.Sections.First(x => x.Name == ".trad"); // Initialize the writer. Writer = new DataWriter(DataStreamFactory.FromStream(GenerateStream(PeFile))) { Stream = { Position = (long)tradHeader.Offset } }; }
public void CreateFromStreamUseStream() { var stream = new MemoryStream(); var dataStream = DataStreamFactory.FromStream(stream); Assert.That(dataStream.BaseStream, Is.AssignableFrom <StreamWrapper>()); Assert.That( ((StreamWrapper)dataStream.BaseStream).BaseStream, Is.SameAs(stream)); stream.Dispose(); }
public void CreateFromSubStreamTransferOwnership() { using var stream = new MemoryStream(); stream.WriteByte(0xCA); int beforeCount = DataStream.ActiveStreams; var dataStream = DataStreamFactory.FromStream(stream, 0, 1); Assert.That(DataStream.ActiveStreams, Is.EqualTo(beforeCount + 1)); dataStream.Dispose(); Assert.That(() => stream.ReadByte(), Throws.InstanceOf <ObjectDisposedException>()); }
public void CreateFromSubStreamUseStream() { var stream = new MemoryStream(); stream.WriteByte(0xCA); stream.WriteByte(0xFE); stream.WriteByte(0xBE); var dataStream = DataStreamFactory.FromStream(stream, 1, 2); Assert.That(dataStream.BaseStream, Is.AssignableFrom <StreamWrapper>()); Assert.That( ((StreamWrapper)dataStream.BaseStream).BaseStream, Is.SameAs(stream)); Assert.That(dataStream.Position, Is.EqualTo(0)); Assert.That(dataStream.Offset, Is.EqualTo(1)); Assert.That(dataStream.Length, Is.EqualTo(2)); stream.Dispose(); }
public BinaryFormat Convert(BinaryFormat source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } if (source.Stream.Length < MinimumSize) { throw new FormatException("Required more data"); } if (source.Stream.Length > MaximumSize) { throw new FormatException("Too much data"); } buffer = new byte[source.Stream.Length]; source.Stream.Position = 0; source.Stream.Read(buffer, 0, buffer.Length); idx = 0; ReadHeader(); output = new byte[decompressedLength]; outIdx = 0; while (outIdx < decompressedLength) { if (GetNextFlag()) { CopyByte(); } else { CopyDecompressedSequence(); } } var stream = DataStreamFactory.FromStream(new System.IO.MemoryStream(output)); return(new BinaryFormat(stream)); }
private static Po LoadPo(string language) { string resourceName = $"{ResourcesName.Prefix}.{language}.po"; var assembly = typeof(L10n).Assembly; var stream = assembly.GetManifestResourceStream(resourceName); if (stream == null) { Logger.Log($"Cannot find language resource: {resourceName}"); return(null); } try { using var binaryPo = new BinaryFormat(DataStreamFactory.FromStream(stream)); return((Po)ConvertFormat.With <Binary2Po>(binaryPo)); } catch (Exception ex) { Logger.Log($"Error parsing language resource: {ex}"); return(null); } }
public BinaryFormat Convert(Clyt source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } var stream = new MemoryStream(); BinaryFormat binary = new BinaryFormat(DataStreamFactory.FromStream(stream)); XDocument xml = new XDocument(new XDeclaration("1.0", "utf-8", "yes")); XElement root = new XElement("clyt"); xml.Add(root); root.Add(new XElement("children")); ExportPanel(root, source.RootPanel); xml.Save(stream); binary.Stream.Length = stream.Length; return(binary); }
public NodeContainerFormat Convert(NodeContainerFormat source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } NodeContainerFormat output = new NodeContainerFormat(); DataReader komaReader = new DataReader(Koma.Stream); int komaEntryNumber = (int)(komaReader.Stream.Length / KOMA_ENTRY_SIZE); for (int i = 0; i < komaEntryNumber; i++) { byte[] entry = komaReader.ReadBytes(KOMA_ENTRY_SIZE); // DTX NAME FROM ARM9 byte letterKomaName = entry[04]; byte numberKomaName = entry[05]; DataReader armReader = new DataReader(Arm.Stream); string dtxName = ""; armReader.Stream.RunInPosition( () => { dtxName = armReader.ReadString(); }, (KOMA_NAME_TABLE_OFFSET + letterKomaName * 4)); dtxName += "_" + numberKomaName; if (numberKomaName == 0) { dtxName += 0; } log.Debug("dtxName:" + dtxName); // DTX SHAPE byte indexGroupKshape = entry[08]; byte indexElementKshape = entry[09]; DataReader komaShapeReader = new DataReader(Komashape.Stream); long komaShapeOffset = 0; komaShapeReader.Stream.RunInPosition( () => komaShapeOffset = ((komaShapeReader.ReadInt32() + indexElementKshape) * 0x18) + 0x40, (indexGroupKshape * 4)); log.Debug("komaShapeOffset:" + komaShapeOffset); // DTX File Node dtx = Navigator.SearchNode <Node>(source.Root, Path.Combine("/" + Directory, "koma-" + dtxName + ".dtx")); DataReader dtxReader = new DataReader(dtx.Stream); int magicid = dtxReader.ReadInt32(); byte type = dtxReader.ReadByte(); byte type_alt = dtxReader.ReadByte(); short totalFramesNumber = dtxReader.ReadInt16(); short digPointer = dtxReader.ReadInt16(); short unknown = dtxReader.ReadInt16(); byte[] width = new byte[totalFramesNumber]; byte[] height = new byte[totalFramesNumber]; short[] frameIndex = new short[totalFramesNumber]; for (int j = 0; j < totalFramesNumber; j++) { width[j] = dtxReader.ReadByte(); height[j] = dtxReader.ReadByte(); frameIndex[j] = dtxReader.ReadInt16(); } BinaryFormat bfDIG = new BinaryFormat(dtx.Stream, (long)digPointer, (dtx.Stream.Length - (long)digPointer)); DIG dig = (DIG)ConvertFormat.With <Binary2DIG>(bfDIG); // Iterate KomaShape komaShapeReader.Stream.Position = komaShapeOffset; // Fichero Dig tiene 08 de ancho y 872 de alto // 08 * 872 / 2 = 3488 bytes byte[] dtxPixels = new byte[192 * 240 / 2]; // *** REVISAR int x = 0; int y = 0; log.Debug("==KOMASHAPE=="); // Iterate kshape for (int k = 0; k < 0x14; k++) { byte blockDTX = komaShapeReader.ReadByte(); log.Debug(k + " - Byte: " + blockDTX); if (blockDTX > 00) { blockDTX -= 1; // Empieza el primer bloque en el dtx long startIndex = frameIndex[blockDTX] * 0x20 + dig.PixelsStart + 32; log.Debug("startIndex:" + startIndex); int blockSize = width[blockDTX] * 8 * height[blockDTX] * 8; for (int l = 0; l < blockSize; l++) { int position = GetIndex(PixelEncoding.Lineal, x, y, 192, 240, new Size(8, 8)); dtxPixels[position] = dig.Pixels.GetData()[startIndex + l]; log.Debug(l + " - dtxPixels:" + dtxPixels[l]); x += 1; if (x >= 192) { x = 0; y += 1; } log.Debug("x: " + x); log.Debug("y: " + y); } } x += 48; if (x >= 192) { x = 0; y += 48; } log.Debug("x: " + x); log.Debug("y: " + y); } log.Debug("===="); // Generate new image PixelArray extractedDTX = new PixelArray { Width = 192, Height = 240, }; Palette palette = dig.Palette; extractedDTX.SetData(dtxPixels, PixelEncoding.Lineal, ColorFormat.Indexed_8bpp); var img = extractedDTX.CreateBitmap(palette, 0); var s = new MemoryStream(); img.Save(s, System.Drawing.Imaging.ImageFormat.Png); img.Save("test.png"); // Add to container var n = new Node(dtxName, new BinaryFormat(DataStreamFactory.FromStream(s))); output.Root.Add(n); } return(output); }
private Tuple <byte[], int> SerializeTable(ArmpTable table, int baseOffset) { using var currentTable = new System.IO.MemoryStream(); using DataStream ds = DataStreamFactory.FromStream(currentTable); var writer = new DataWriter(ds) { DefaultEncoding = Encoding.UTF8, Endianness = EndiannessMode.LittleEndian, }; var header = new Types.ArmpTableHeader { RecordCount = table.RecordCount, FieldCount = table.FieldCount, ValueStringCount = table.ValueStringCount, RecordInvalid = table.RecordInvalid, RecordIdPointer = table.RecordIds == null ? -1 : 0, RecordExistencePointer = table.RecordExistence == null ? -1 : 0, FieldTypePointer = table.FieldTypes == null ? -1 : 0, ValuesPointer = 0, Id = table.Id, Flags = table.Flags, ValueStringPointer = 0, FieldIdPointer = table.FieldIds == null ? -1 : 0, FieldInvalid = table.FieldInvalid, RecordOrderPointer = table.RecordOrder == null ? -1 : 0, FieldOrderPointer = table.FieldOrder == null ? -1 : 0, FieldExistencePointer = table.FieldExistence == null ? -1 : 0, IndexerPointer = 0, GameVarFieldTypePointer = 0, EmptyValuesPointer = table.EmptyValues == null ? -1 : 0, RawRecordMemberInfoPointer = table.RawRecordMemberInfo == null ? -1 : 0, FieldInfoPointer = table.FieldInfo == null ? -1 : 0, }; int currentOffset = baseOffset; long[][] subTablesOffsets = new long[table.FieldCount][]; // 1. Child tables for (int field = 0; field < table.FieldCount; field++) { subTablesOffsets[field] = new long[table.RecordCount]; if (table.RawRecordMemberInfo[field] != Enums.FieldType.Table) { continue; } object[] data = table.Values[field]; if (data == null) { continue; } for (int record = 0; record < table.RecordCount; record++) { var subTable = (ArmpTable)data[record]; if (subTable == null) { subTablesOffsets[field][record] = 0x00000000; continue; } (byte[] subTableData, int subTableOffset) = SerializeTable(subTable, currentOffset); long startPos = writer.Stream.Position; writer.Write(subTableData); subTablesOffsets[field][record] = subTableOffset; writer.WritePadding(0x00, 0x10); long endPos = writer.Stream.Position; currentOffset += (int)(endPos - startPos); } } // 2. Indexer if (table.Indexer != null) { (byte[] indexerData, int indexerOffset) = SerializeTable(table.Indexer, currentOffset); long startPos = writer.Stream.Position; writer.Write(indexerData); header.IndexerPointer = indexerOffset; writer.WritePadding(0x00, 0x10); long endPos = writer.Stream.Position; currentOffset += (int)(endPos - startPos); } int currentTableOffset = currentOffset; long headerOffset = writer.Stream.Position; writer.WriteTimes(0x00, 0x50); currentOffset += 0x50; // Header size WriteRecordExistence(writer, table, header, ref currentOffset); WriteFieldExistence(writer, table, header, ref currentOffset); WriteRecordIds(writer, table, header, ref currentOffset); WriteFieldIds(writer, table, header, ref currentOffset); WriteValueStrings(writer, table, header, ref currentOffset); WriteFieldTypes(writer, table, header, ref currentOffset); WriteRecordMemberInfo(writer, table, header, ref currentOffset); WriteValues(writer, table, header, subTablesOffsets, ref currentOffset); WriteEmptyValues(writer, table, header, ref currentOffset); WriteRecordOrder(writer, table, header, ref currentOffset); WriteFieldOrder(writer, table, header, ref currentOffset); WriteFieldInfo(writer, table, header, ref currentOffset); WriteGameVarFieldType(writer, table, header, ref currentOffset); _ = writer.Stream.Seek(headerOffset, System.IO.SeekOrigin.Begin); writer.WriteOfType(header); return(new Tuple <byte[], int>(currentTable.ToArray(), currentTableOffset)); }
private Node ProcessDirectory( uint directoryIndex, DataReader reader, ParIndex index, ref bool[] processedDirectories) { if (processedDirectories[directoryIndex]) { return(null); } _ = reader.Stream.Seek(0x20 + (directoryIndex * 0x40), System.IO.SeekOrigin.Begin); string name = reader.ReadString(0x40).TrimEnd('\0'); if (string.IsNullOrEmpty(name)) { name = "."; } _ = reader.Stream.Seek(index.DirectoryStartOffset + (directoryIndex * 0x20), System.IO.SeekOrigin.Begin); var directoryInfo = reader.Read <ParDirectoryInfo>() as ParDirectoryInfo; var directory = new Node(name, new NodeContainerFormat()) { Tags = { ["SubdirectoryCount"] = directoryInfo.SubdirectoryCount, ["SubdirectoryStartIndex"] = directoryInfo.SubdirectoryStartIndex, ["FileCount"] = directoryInfo.FileCount, ["FileStartIndex"] = directoryInfo.FileStartIndex, ["RawAttributes"] = directoryInfo.RawAttributes, }, }; for (uint i = directoryInfo.SubdirectoryStartIndex; i < directoryInfo.SubdirectoryStartIndex + directoryInfo.SubdirectoryCount; i++) { Node child = ProcessDirectory(i, reader, index, ref processedDirectories); if (child != null) { directory.Add(child); } } for (uint i = directoryInfo.FileStartIndex; i < directoryInfo.FileStartIndex + directoryInfo.FileCount; i++) { _ = reader.Stream.Seek(0x20 + (0x40 * index.DirectoryCount) + (i * 0x40), System.IO.SeekOrigin.Begin); string fileName = reader.ReadString(0x40).TrimEnd('\0'); _ = reader.Stream.Seek(index.FileStartOffset + (i * 0x20), System.IO.SeekOrigin.Begin); var fileInfo = reader.Read <ParFileInfo>() as ParFileInfo; long offset = ((long)fileInfo.ExtendedOffset << 32) | fileInfo.DataOffset; DataStream stream = DataStreamFactory.FromStream(reader.Stream, offset, fileInfo.CompressedSize); var binaryFormat = new ParFile(fileInfo, stream); var file = new Node(fileName, binaryFormat) { Tags = { ["RawAttributes"] = fileInfo.RawAttributes, ["Timestamp"] = fileInfo.Timestamp, }, }; directory.Add(file); } processedDirectories[directoryIndex] = true; return(directory); }
public void CreateFromStreamThrowIfInvalidArgument() { Assert.That( () => DataStreamFactory.FromStream(null), Throws.ArgumentNullException); }