public override void Read(Stream stream) { uint filenamePosition = stream.ReadUInt() - Flag; Name = Program.GetFilename(filenamePosition); uint numberOfEntries = stream.ReadUInt(); Entries = new List <Entry>((int)numberOfEntries); for (int i = 0; i < numberOfEntries; i++) { uint entryPosition = stream.ReadUInt(); long currentPosition = stream.Position; if (entryPosition < currentPosition) { Entries.Add(new DirectoryEntry { Name = ".." }); continue; } stream.Position = entryPosition; Entries.Add(Create(stream.ReadUInt())); stream.Position -= 4; Entries[i].Read(stream); stream.Position = currentPosition; } }
public void ReadFromGameFiles(Stream file, StringTable colourNames) { ColourID = file.ReadUInt(); LatinName = colourNames.Get((ushort)file.ReadUInt()); JapaneseName = colourNames.Get((ushort)file.ReadUInt()); ThumbnailColour = file.ReadUInt(); }
internal FChunkPart(Stream reader) { reader.ReadUInt(); // dataSize Guid = new FGuid(reader).ToString(); Offset = reader.ReadUInt(); Size = reader.ReadUInt(); }
public override void FromRaw(Stream s, int mapVersion) { ElementId = s.ReadUInt(); Hue.Red = (byte)s.ReadByte(); Hue.Green = (byte)s.ReadByte(); Hue.Blue = (byte)s.ReadByte(); Shadow.Red = (byte)s.ReadByte(); Shadow.Green = (byte)s.ReadByte(); Shadow.Blue = (byte)s.ReadByte(); if (mapVersion <= 4) { Offset.X = (short)s.ReadByte(); Offset.Y = (short)s.ReadByte(); PixelOffset.X = (short)(Offset.X * AtouinConstants.CellHalfWidth); PixelOffset.Y = (short)(Offset.Y * AtouinConstants.CellHalfHeight); } else { PixelOffset.X = s.ReadShort(); PixelOffset.Y = s.ReadShort(); Offset.X = (short)(PixelOffset.X / AtouinConstants.CellHalfWidth); Offset.Y = (short)(PixelOffset.Y / AtouinConstants.CellHalfHeight); } Altitude = (byte)s.ReadByte(); Identifier = s.ReadUInt(); }
private void FillBuffer() { /* * TUninstallCrcHeader = packed record * Size, NotSize: Cardinal; (uint) * CRC: Longint; (int) * end; */ uint size = innerStream.ReadUInt(data); uint notSize = innerStream.ReadUInt(data); System.Diagnostics.Debug.WriteLine($"CRC block size: {size}"); // skip CRC, we will not check it anyway innerStream.Read(data, 0, 4); if (size != ~notSize) { throw new Exception("File record header is corrupt (size != notSize)"); } dataAvailable = (int)size; dataPos = 0; dataAvailable = innerStream.Read(data, 0, dataAvailable); }
protected void ResignPackage(Stream kv, int headerStart, int size, int toSignLoc) { var rsaParameters = GetRSAParameters(kv); // read the certificate kv.Position = 0x9B8 + (kv.Length == 0x4000 ? 0x10 : 0); Certificate.PublicKeyCertificateSize = kv.ReadShort(); kv.Read(Certificate.OwnerConsoleId, 0, 5); Certificate.OwnerConsolePartNumber = kv.ReadWString(0x11); Certificate.OwnerConsoleType = (ConsoleType)(kv.ReadUInt() & 3); Certificate.DateGeneration = kv.ReadWString(8); Certificate.PublicExponent = kv.ReadUInt(); kv.Read(Certificate.PublicModulus, 0, 128); kv.Read(Certificate.CertificateSignature, 0, 256); ConsoleId = Certificate.OwnerConsoleId.ToHex(); HeaderHash = HashBlock(headerStart, ((HeaderSize + 0xFFF) & 0xF000) - headerStart); var rsaEncryptor = new RSACryptoServiceProvider(); var rsaSigFormat = new RSAPKCS1SignatureFormatter(rsaEncryptor); rsaEncryptor.ImportParameters(rsaParameters); rsaSigFormat.SetHashAlgorithm("SHA1"); var signature = rsaSigFormat.CreateSignature(HashBlock(toSignLoc, size)); Array.Reverse(signature); Certificate.Signature = signature; }
/* Get the offsets, lengths, and filenames of all the files */ public override ArchiveFileList GetFileList(ref Stream data) { try { /* Get the number of files */ uint files = data.ReadUInt(0x0); /* Create the array of files now */ ArchiveFileList fileList = new ArchiveFileList(files); /* Now we can get the file offsets, lengths, and filenames */ for (uint i = 0; i < files; i++) { fileList.Entries[i] = new ArchiveFileList.Entry( data.ReadUInt(0x4 + (i * 0x2C)), // Offset data.ReadUInt(0x8 + (i * 0x2C)), // Length data.ReadString(0xC + (i * 0x2C), 36) // Filename ); } return fileList; } catch { return null; } }
public override void Read(Stream stream) { uint filenamePosition = stream.ReadUInt(); Name = Program.GetFilename(filenamePosition); Location = stream.ReadUInt(); Size = stream.ReadUInt(); }
public override void Read(Stream stream) { //Console.WriteLine($"{stream.Position}"); uint filenamePosition = stream.ReadUInt(); Name = Program.GetFilename(filenamePosition); Location = stream.ReadUInt(); Size = stream.ReadUInt(); }
public override void FromRaw(Stream s, int fileVersion) { base.FromRaw(s, fileVersion); if (fileVersion == 4) { MinDelay = s.ReadUInt(); MaxDelay = s.ReadUInt(); } }
protected virtual void ReadDataFromFile(Stream file) { for (int i = 0; i < data.Length; i++) { file.Position = 8 * (i + 1); uint blockStart = file.ReadUInt(); uint blockSize = file.ReadUInt(); Read(file, blockStart, blockSize, data[i]); } }
public static Chunk ReadBodyChunk(Stream s, GBXLBSContext context, GBXNodeRefList list, out string key) { uint chunkID = s.ReadUInt(); if (chunkID == facade) { key = null; return(null); } Chunk newChunk; try { if (IsChunkSkippable(chunkID)) { Trace.Assert(s.ReadUInt() == skip, "Error reading chunk. ChunkID:" + chunkID.ToString("X")); long chunkEndPos = s.ReadUInt() + s.Position; newChunk = ReadChunk(s, chunkID, new GBXLBSContext(), list, out key); Trace.Assert(s.Position == chunkEndPos, "Error reading chunk. ChunkID:" + chunkID.ToString("X")); } else { newChunk = ReadChunk(s, chunkID, context, list, out key); } } catch (KeyNotFoundException) { //Chunk not in database, testing if it can be skipped uint testSkip = BitConverter.ToUInt32(s.SimplePeek(4), 0); if (testSkip == skip) //Chunk can be skipped, chunk info not necessary { s.Position += 4; uint length = s.ReadUInt(); newChunk = new SkippedChunk(s, (int)length, new GBXLBSContext(), new GBXNodeRefList()) { ChunkID = chunkID, isSkippable = true }; key = skippedChunkKey + " " + chunkID.ToString("X"); } else { //This exeption will cause the read to end. throw new UnknownChunkException("Chunk could not be read ChunkID: " + chunkID.ToString("X")); } } return(newChunk); }
public uint ReadFromStream(Stream stream) { uint result = 0; result += stream.ReadX2String(ref _previousHash, 32); result += stream.ReadX2String(ref _dataHash, 32); result += stream.ReadUInt(ref _index); result += stream.ReadUInt(ref _blockTimestamp); return(result); }
public override void Read(Stream stream) { //Console.WriteLine($"{stream.Position}"); uint filenamePosition = stream.ReadUInt() - Flag; Name = Program.GetFilename(filenamePosition); Location = stream.ReadUInt(); UncompressedSize = stream.ReadUInt(); Size = stream.ReadUInt(); //Console.WriteLine($"{Location},{Size},{Name},Archive"); }
private void FindCentralEndRecord() { _archiveStream.Seek(-17, SeekOrigin.End); uint signature = 0; while (_archiveStream.Position > 5 && signature != CentralEndRecordSignature) { _archiveStream.Seek(-5, SeekOrigin.Current); signature = _archiveStream.ReadUInt(); } ReadCentralEndRecord(); }
/* Checks to see if the input stream is a Storybook archive */ public override bool Check(ref Stream input, string filename) { try { return (input.ReadUInt(0x4).SwapEndian() == 0x10 && (input.ReadUInt(0xC).SwapEndian() == 0xFFFFFFFF || input.ReadUInt(0xC).SwapEndian() == 0x00000000)); } catch { return false; } }
/* Checks to see if the input stream is a MRG archive */ public override bool Check(ref Stream input, string filename) { try { return (input.ReadString(0x0, 4) == ArchiveHeader.TEX && input.ReadUInt(0x4) != input.Length && input.ReadUInt(0x4) != input.Length - 4); } catch { return false; } }
public static HashBlock[] Destream(Stream inputStream) { uint count = inputStream.ReadUInt(); var hashBlocks = new HashBlock[count]; for (int i = 0; i < count; ++i) { hashBlocks[i] = new HashBlock {Hash = new byte[16]}; inputStream.Read(hashBlocks[i].Hash, 0, 16); hashBlocks[i].Length = inputStream.ReadInt(); hashBlocks[i].Offset = inputStream.ReadLong(); hashBlocks[i].Checksum = inputStream.ReadUInt(); } return hashBlocks; }
public static void Dump(Stream file, string directory, int fileNumber) { uint outerStructureCount = file.ReadUInt(); uint outerStartOfIndexes = file.ReadUInt(); for (int i = 0; i < outerStructureCount; i++) { file.Position = outerStartOfIndexes + (i * 4); uint outerStructurePos = file.ReadUInt(); file.Position = outerStructurePos; file.ReadUInt(); // always 0x0C uint structureCount = file.ReadUInt(); uint startOfIndexes = file.ReadUInt(); string listName = file.ReadCharacters(); using (var outFile = new FileStream(Path.Combine(directory, $"{fileNumber}_CarClasses_{listName}.csv"), FileMode.Create, FileAccess.Write)) { using (TextWriter output = new StreamWriter(outFile, Encoding.UTF8)) { using (var csv = new CsvWriter(output)) { csv.Configuration.QuoteAllFields = true; csv.Configuration.RegisterClassMap <CarClassCSVMap>(); csv.WriteHeader <CarClassData>(); csv.NextRecord(); for (int j = 0; j < structureCount; j++) { file.Position = startOfIndexes + (j * 4) + outerStructurePos; uint structurePos = file.ReadUInt(); file.Position = structurePos + outerStructurePos; var data = new CarClassData(); file.ReadUInt(); // always 0x10 uint secondStringOffset = file.ReadUInt(); data.Unknown = file.ReadUShort(); data.UnlockLevel = file.ReadUShort(); data.OpponentGroup = file.ReadUShort(); data.Unknown2 = file.ReadUShort(); data.CarName = file.ReadCharacters(); file.Position = structurePos + outerStructurePos + secondStringOffset; data.EventRestriction = file.ReadCharacters(); csv.WriteRecord(data); csv.NextRecord(); } } } } } }
public static void Dump(Stream file, string directory, int fileNumber) { uint structureCount = file.ReadUInt(); uint startOfIndexes = file.ReadUInt(); using (var outFile = new FileStream(Path.Combine(directory, $"{fileNumber}_Demos.csv"), FileMode.Create, FileAccess.Write)) { using (TextWriter output = new StreamWriter(outFile, Encoding.UTF8)) { using (var csv = new CsvWriter(output)) { csv.Configuration.QuoteAllFields = true; csv.Configuration.RegisterClassMap <DemoCSVMap>(); csv.WriteHeader <DemoData>(); csv.NextRecord(); for (int i = 0; i < structureCount; i++) { file.Position = startOfIndexes + (i * 4); uint structurePos = file.ReadUInt(); uint nextStructurePos; if (i + 1 < structureCount) { nextStructurePos = file.ReadUInt(); } else { nextStructurePos = (uint)file.Length; } uint structureSize = nextStructurePos - structurePos; file.Position = structurePos; file.ReadUInt(); // always 0x14 var data = new DemoData(); uint secondStringOffset = file.ReadUInt(); file.ReadUInt(); // end of struct offset - ignore it data.Unknown = file.ReadUInt(); data.IsLocked = file.ReadUInt(); data.Filename = file.ReadCharacters(); file.Position = structurePos + secondStringOffset; data.Course = file.ReadCharacters(); csv.WriteRecord(data); csv.NextRecord(); } } } } }
public void Read(Stream s) { Stream = s; var header = s.ReadByte(); if (header != 69) { return; } FileVersion = s.ReadByte(); var elementCount = s.ReadUInt(); for (int i = 0; i < elementCount; i++) { var element = new Element(this); element.Read(s); Elements.Add(element); } if (FileVersion >= 8) { var gfxCount = s.ReadInt(); for (int i = 0; i < gfxCount; i++) { JpgMap.Add(s.ReadInt()); } } }
public GBXString(Stream s) { uint length = s.ReadUInt(); byte[] srcdata = s.SimpleRead((int)length); Value = UTF8.GetString(srcdata); }
public virtual void ReadFromCDO(Stream stream, bool isQuad, List <ShadowVertex> vertices) { uint data = stream.ReadUInt(); IsGradientShaded = (data & 0x8000_0000) == 0; int vertex0Ref = (int)data & 0x3F; Vertex0 = vertices[vertex0Ref]; int vertex1Ref = (int)(data >> 6) & 0x3F; Vertex1 = vertices[vertex1Ref]; int vertex2Ref = (int)(data >> 12) & 0x3F; Vertex2 = vertices[vertex2Ref]; int vertex3Ref = (int)(data >> 18) & 0x3F; if (isQuad) { Vertex3 = vertices[vertex3Ref]; } else if (vertex3Ref != 0x00) { throw new Exception("Vertex 3 in shadow triangle not zero"); } }
protected override void VideoHandler(Stream packet) { var time = packet.ReadUInt(); var length = (uint)packet.GetAvaliableByteCounts(); if (_numberLostFragments > 0) { _firstKeyFrame = false; } if ((packet.ReadByte() & 0xF0) == 0x10) { _firstKeyFrame = true; } packet.Position--; if (!_firstKeyFrame) { //丢失关键帧 return; } _numberLostFragments = 0; if (_publisher != null && _publisher.PublisherId == StreamId) { //_publication.PushVideoPacket(packet.ReadUInt32(),packet,_numberLostFragments); _publisher.FeedData(packet, length, 0, length, time, false); } else if (OutStream != null) { TotalBytes += length; OutStream.FeedData(packet, length, 0, length, time, false); } }
protected override void VideoHandler(Stream packet) { var time = packet.ReadUInt(); var length = (uint)packet.GetAvaliableByteCounts(); if (_numberLostFragments > 0) _firstKeyFrame = false; if ((packet.ReadByte() & 0xF0) == 0x10) _firstKeyFrame = true; packet.Position--; if (!_firstKeyFrame) { //丢失关键帧 return; } _numberLostFragments = 0; if (_publisher != null && _publisher.PublisherId == StreamId) { //_publication.PushVideoPacket(packet.ReadUInt32(),packet,_numberLostFragments); _publisher.FeedData(packet, length, 0, length, time, false); } else if (OutStream != null) { TotalBytes += length; OutStream.FeedData(packet, length, 0, length, time, false); } }
public GBXXml(Stream stream) { var length = stream.ReadUInt(); var mems = new MemoryStream(stream.SimpleRead((int)length)); _serializer = new XmlSerializer(typeof(T)); Xml = (T)_serializer.Deserialize(mems); }
protected override void AudioHandler(Stream packet) { if (_publisher != null && _publisher.PublisherId == StreamId) { var time = packet.ReadUInt(); var length = (uint)packet.GetAvaliableByteCounts(); // _publication.PushAudioPacket(packet.ReadUInt32(), packet, _numberLostFragments); _publisher.FeedData(packet, length, 0, length, time, true); _numberLostFragments = 0; }else if (OutStream != null) { var time = packet.ReadUInt(); var length = (uint)packet.GetAvaliableByteCounts(); TotalBytes += length; OutStream.FeedData(packet, length, 0, length, time, true); } }
public static HashBlock[] Destream(Stream inputStream) { uint count = inputStream.ReadUInt(); var hashBlocks = new HashBlock[count]; for (int i = 0; i < count; ++i) { hashBlocks[i] = new HashBlock { Hash = new byte[16] }; inputStream.Read(hashBlocks[i].Hash, 0, 16); hashBlocks[i].Length = inputStream.ReadInt(); hashBlocks[i].Offset = inputStream.ReadLong(); hashBlocks[i].Checksum = inputStream.ReadUInt(); } return(hashBlocks); }
private static List <byte[]> ReadMainStructs(Stream file) { ushort structCount = file.ReadUShort(); file.ReadUInt(); // always zero? uint structSize = file.ReadUInt(); var structs = new List <byte[]>(structCount); for (ushort i = 0; i < structCount; i++) { var buffer = new byte[structSize]; file.Read(buffer); structs.Add(buffer); } return(structs); }
protected override void AudioHandler(Stream packet) { if (_publisher != null && _publisher.PublisherId == StreamId) { var time = packet.ReadUInt(); var length = (uint)packet.GetAvaliableByteCounts(); // _publication.PushAudioPacket(packet.ReadUInt32(), packet, _numberLostFragments); _publisher.FeedData(packet, length, 0, length, time, true); _numberLostFragments = 0; } else if (OutStream != null) { var time = packet.ReadUInt(); var length = (uint)packet.GetAvaliableByteCounts(); TotalBytes += length; OutStream.FeedData(packet, length, 0, length, time, true); } }
public void Read(Stream stream) { for (int i = 0; i < 60; i++) { stream.Position = (i * 4) + 8; var period = new TimePeriod(); period.Read(stream, stream.ReadUInt()); TimePeriods.Add(period); } }
private static void Decompress(string filename, Stream file) { if (file.ReadUInt() != Version) { Console.WriteLine("Unknown GTZ version"); return; } uint compressedSize = file.ReadUInt(); if (file.Length != compressedSize + 16) { Console.WriteLine("Incorrect file size"); return; } uint uncompressedSize = file.ReadUInt(); using (var compressed = new MemoryStream()) { file.CopyTo(compressed); compressed.Position = 0; using (var decompressed = new MemoryStream()) { LZSS.Decompress(compressed, decompressed); if (decompressed.Length < uncompressedSize) { Console.WriteLine("Decompressed data too short"); return; } decompressed.Position = 0; decompressed.SetLength(uncompressedSize); filename = filename.EndsWith(Extension) ? filename.Replace(Extension, "") : $"decompressed_{filename}"; using (var output = new FileStream(filename, FileMode.Create, FileAccess.Write)) { decompressed.CopyTo(output); } } } }
public override void FromRaw(Stream s, int fileVersion) { var entityLookLength = s.ReadInt(); EntityLook = s.ReadUTFBytes(entityLookLength); HorizontalSymmetry = s.ReadBoolean(); if (fileVersion >= 7) { PlayAnimation = s.ReadBoolean(); } if (fileVersion >= 6) { PlayAnimStatic = s.ReadBoolean(); } if (fileVersion >= 5) { MinDelay = s.ReadUInt(); MaxDelay = s.ReadUInt(); } }
public virtual bool LoadFrom(Stream stream) { uint count = stream.ReadUInt(); for (int i = 0; i < count; i++) { var key = stream.ReadString(); var val = stream.ReadString(); KeyValues.Add(key, val); } return true; }
public override bool LoadFrom(Stream stream) { base.LoadFrom(stream); Attribute.LoadFrom(stream); BaseTypeName = stream.ReadString(); //types uint typeCount = stream.ReadUInt(); for (int i = 0; i < typeCount; i++) { byte isClass = (byte)stream.ReadByte(); if (isClass == 1) { SirenCustomClass type = new SirenCustomClass(); type.LoadFrom(stream); type.Parent = this; Types.Add(type.Name, type); } else { SirenCustomEnum type = new SirenCustomEnum(); type.LoadFrom(stream); type.Parent = this; Types.Add(type.Name, type); } } //fields uint fieldCount = stream.ReadUInt(); for (int i = 0; i < fieldCount; i++) { SirenField field = new SirenField(); field.LoadFrom(stream); field.ParentType = this; FieldNameDict.Add(field.Name, field); field.Index = (ushort)(FieldNameDict.Count - 1); } return(true); }
public virtual bool LoadFrom(Stream stream) { uint count = stream.ReadUInt(); for (int i = 0; i < count; i++) { var key = stream.ReadString(); var val = stream.ReadString(); KeyValues.Add(key, val); } return(true); }
/// <remarks>Note how the read advances to the FileNameOffset and reads only FileNameLength bytes</remarks> public void Read(Stream stream) { var startOfRecord = stream.Position; RecordLength = stream.ReadUInt( ); MajorVersion = stream.ReadUShort( ); MinorVersion = stream.ReadUShort( ); FileReferenceNumber = stream.ReadULong( ); ParentFileReferenceNumber = stream.ReadULong( ); Usn = stream.ReadLong( ); TimeStamp = stream.ReadLong( ); Reason = ( UsnReason )stream.ReadUInt( ); SourceInfo = stream.ReadUInt( ); SecurityId = stream.ReadUInt( ); FileAttributes = stream.ReadUInt( ); FileNameLength = stream.ReadUShort( ); FileNameOffset = stream.ReadUShort( ); stream.Position = startOfRecord + FileNameOffset; FileName = Encoding.Unicode.GetString(stream.ReadBytes(FileNameLength)); stream.Position = startOfRecord + RecordLength; }
public override void SendStream(Stream stream,int length) { var marker = stream.ReadByte() | 0xF0; var echoTime = marker == (Target == null ? 0xFE : 0xFD); stream.ReadUShort(); if (echoTime) stream.ReadUShort(); var type = stream.ReadByte(); if (type == 0x10) { var sizePos = stream.Position; var size = stream.ReadUShort(); var flags = stream.ReadByte(); var idFlow = stream.Read7BitLongValue(); var stage = stream.Read7BitLongValue(); if (idFlow == 2 && stage == 1) { var deltaNAck = stream.Read7BitLongValue(); var len = (ushort) stream.ReadByte(); stream.Position += len; stream.ReadByte(); stream.ReadByte();//type stream.ReadUInt();//timestamp var amfReader = new AMF0Reader(stream); var str = amfReader.ReadShortString(true); var num = amfReader.ReadAMFDouble(true); var pos = stream.Position; var connectionInfo = amfReader.ReadVariant(); connectionInfo["tcUrl"] = MiddleSession.QueryUrl; connectionInfo["app"] = MiddleSession.QueryUrl.Split('/').Last(); stream.Position = pos; var amfWriter = new AMF0Writer(stream); amfWriter.WriteObject(connectionInfo, true); length = (int) stream.Position; len = (ushort) (stream.Position - sizePos-2); stream.Position = sizePos; stream.Write(len); } } stream.Position = 6; base.SendStream(stream,length); }
/* Decompress */ public override MemoryStream Decompress(ref Stream data) { try { // Compressed & Decompressed Data Information uint CompressedSize = (uint)data.Length; uint DecompressedSize = data.ReadUInt(0x0) >> 8; uint SourcePointer = 0x4; uint DestPointer = 0x0; if (DecompressedSize == 0) // Next 4 bytes are the decompressed size { DecompressedSize = data.ReadUInt(0x4); SourcePointer += 0x4; } byte[] CompressedData = data.ToByteArray(); byte[] DecompressedData = new byte[DecompressedSize]; // Start Decompression while (SourcePointer < CompressedSize && DestPointer < DecompressedSize) { byte Flag = CompressedData[SourcePointer]; // Compression Flag SourcePointer++; for (int i = 7; i >= 0; i--) { if ((Flag & (1 << i)) == 0) // Data is not compressed { DecompressedData[DestPointer] = CompressedData[SourcePointer]; SourcePointer++; DestPointer++; } else // Data is compressed { int Distance; int Amount; // Let's determine how many bytes the distance & length pair take up switch (CompressedData[SourcePointer] >> 4) { case 0: // 3 bytes Distance = (((CompressedData[SourcePointer + 1] & 0xF) << 8) | CompressedData[SourcePointer + 2]) + 1; Amount = (((CompressedData[SourcePointer] & 0xF) << 4) | (CompressedData[SourcePointer + 1] >> 4)) + 17; SourcePointer += 3; break; case 1: // 4 bytes Distance = (((CompressedData[SourcePointer + 2] & 0xF) << 8) | CompressedData[SourcePointer + 3]) + 1; Amount = (((CompressedData[SourcePointer] & 0xF) << 12) | (CompressedData[SourcePointer + 1] << 4) | (CompressedData[SourcePointer + 2] >> 4)) + 273; SourcePointer += 4; break; default: // 2 bytes Distance = (((CompressedData[SourcePointer] & 0xF) << 8) | CompressedData[SourcePointer + 1]) + 1; Amount = (CompressedData[SourcePointer] >> 4) + 1; SourcePointer += 2; break; } // Copy the data for (int j = 0; j < Amount; j++) DecompressedData[DestPointer + j] = DecompressedData[DestPointer - Distance + j]; DestPointer += (uint)Amount; } // Check for out of range if (SourcePointer >= CompressedSize || DestPointer >= DecompressedSize) break; } } return new MemoryStream(DecompressedData); } catch { return null; // An error occured while decompressing } }
// Get file list containing the entries in the archive public override ArchiveFileList GetFileList(ref Stream data) { try { /* Get the number of files */ uint files = data.ReadUInt(0x4).SwapEndian(); /* Create the array of files now */ ArchiveFileList fileList = new ArchiveFileList(files); /* See if the archive contains filenames */ bool containsFilenames = (files > 0 && data.ReadUInt(0x8) != 0x8 + (files * 0x8) && data.ReadString(0x8 + (files * 0x8), 4) == "FLST"); /* Now we can get the file offsets, lengths, and filenames */ for (int i = 0; i < files; i++) { fileList.Entries[i] = new ArchiveFileList.Entry( data.ReadUInt(0x8 + (i * 0x8)).SwapEndian(), // Offset data.ReadUInt(0xC + (i * 0x8)).SwapEndian(), // Length (containsFilenames ? data.ReadString(0xC + (files * 0x8) + (i * 0x40), 64) : string.Empty) // Filename ); } return fileList; } catch { /* Something went wrong, so return nothing */ return null; } }
/* Get the offsets, lengths, and filenames of all the files */ public override ArchiveFileList GetFileList(ref Stream data) { try { /* Get the number of files */ uint files = data.ReadUInt(0x4); /* Create the array of files now */ ArchiveFileList fileList = new ArchiveFileList(files); /* Find the metadata location */ uint metadataLocation = data.ReadUInt((files * 0x8) + 0x8); if (metadataLocation == 0x0) metadataLocation = data.ReadUInt(data.ReadUInt(0x8) - 0x8); /* Now we can get the file offsets, lengths, and filenames */ for (uint i = 0; i < files; i++) { fileList.Entries[i] = new ArchiveFileList.Entry( data.ReadUInt(0x8 + (i * 0x8)), // Offset data.ReadUInt(0xC + (i * 0x8)), // Length (metadataLocation == 0x0 ? String.Empty : data.ReadString(metadataLocation + (i * 0x30), 32)) // Filename ); } return fileList; } catch { /* Something went wrong, so return nothing */ return null; } }
public override bool LoadFrom(Stream stream) { Mode = (SirenClassGenerateMode)stream.ReadUInt(); Dir = stream.ReadString(); return true; }
/* Get the offsets, lengths, and filenames of all the files */ public override ArchiveFileList GetFileList(ref Stream data) { try { /* Get the number of files */ uint files = data.ReadUInt(0x4).SwapEndian(); /* Create the array of files now */ ArchiveFileList fileList = new ArchiveFileList(files); /* Now we can get the file offsets, lengths, and filenames */ for (uint i = 0; i < files; i++) { string filename = data.ReadString(0x10 + (i * 0x28), 32); fileList.Entries[i] = new ArchiveFileList.Entry( data.ReadUInt(0x08 + (i * 0x28)).SwapEndian(), // Offset data.ReadUInt(0x0C + (i * 0x28)).SwapEndian(), // Length (filename == String.Empty ? String.Empty : filename + (filename.IsAllUpperCase() ? ".GVR" : ".gvr")) // Filename ); } return fileList; } catch { /* Something went wrong, so return nothing */ return null; } }
public Bitmap DecodeFrame(int frameIndex, Stream inStream) { if (inStream == null) return null; try { // Check the frame header: if (inStream.ReadUInt() != MAGIC_FRAME) return null; int encodedFrameIndex = inStream.ReadSShort(); if (encodedFrameIndex != frameIndex) return null; BitmapData currentData = currentFrame.LockBits( new Rectangle(0, 0, frameWidth, frameHeight), ImageLockMode.ReadOnly, currentFrame.PixelFormat); BitmapData previousData = previousFrame.LockBits( new Rectangle(0, 0, frameWidth, frameHeight), ImageLockMode.ReadOnly, previousFrame.PixelFormat); if (VisualizeMCBlockTypes) { debugFrame = new Bitmap(frameWidth, frameHeight, PixelFormat.Format24bppRgb); debugFrameData = debugFrame.LockBits( new Rectangle(0, 0, frameWidth, frameHeight), ImageLockMode.ReadOnly, debugFrame.PixelFormat); } int pixelBytes = GetBytesPerPixel(currentFrame.PixelFormat); // TODO: handle bad value FrameType frameType = (FrameType)inStream.ReadUByte(); switch (frameType) { case FrameType.Intra: DecodeIntraFrame(inStream, currentData, pixelBytes); break; case FrameType.Predicted: DecodePredictedFrame(inStream, currentData, previousData, pixelBytes); break; } currentFrame.UnlockBits(currentData); previousFrame.UnlockBits(previousData); if (VisualizeMCBlockTypes) { debugFrame.UnlockBits(debugFrameData); debugFrame.Save(String.Format("debug{0:000000}.png", frameIndex), ImageFormat.Png); debugFrame.Dispose(); } } catch (EndOfStreamException ex) { Log("Exception: {0} {1}", ex.Message, ex.StackTrace); return null; } Bitmap result = currentFrame; // double buffering // save the current bitmap to act as previous one when decoding the next frame SwapBitmaps(ref previousFrame, ref currentFrame); //currentFrame.Dispose(); return result; }
/* Get the offsets, lengths, and filenames of all the files */ public override ArchiveFileList GetFileList(ref Stream data) { try { /* Get the number of files */ uint files = data.ReadUInt(0x0); /* Create the array of files now */ ArchiveFileList fileList = new ArchiveFileList(files); /* Now we can get the file offsets, lengths, and filenames */ for (int i = 0; i < files; i++) { fileList.Entries[i] = new ArchiveFileList.Entry( data.ReadUInt(0x14 + (i * 0x18)) * 0x800, // Offset data.ReadUInt(0x18 + (i * 0x18)), // Length data.ReadString(0x04 + (i * 0x18), 16) // Filename ); } return fileList; } catch { /* Something went wrong, so return nothing */ return null; } }
/* To simplify the process greatly, we are going to convert * the GVM to a new format */ public override MemoryStream TranslateData(ref Stream stream) { try { /* Get the number of files, and format type in the stream */ ushort files = stream.ReadUShort(0xA).SwapEndian(); byte formatType = stream.ReadByte(0x9); /* Now let's see what information is contained inside the metadata */ bool containsFilename = (formatType & (1 << 3)) > 0; bool containsPixelFormat = (formatType & (1 << 2)) > 0; bool containsDimensions = (formatType & (1 << 1)) > 0; bool containsGlobalIndex = (formatType & (1 << 0)) > 0; /* Let's figure out the metadata size */ int size_filename = 0, size_pixelFormat = 0, size_dimensions = 0, size_globalIndex = 0; if (containsFilename) size_filename = 28; if (containsPixelFormat) size_pixelFormat = 2; if (containsDimensions) size_dimensions = 2; if (containsGlobalIndex) size_globalIndex = 4; int metaDataSize = 2 + size_filename + size_pixelFormat + size_dimensions + size_globalIndex; /* Now create the header */ MemoryStream data = new MemoryStream(); data.Write(files); /* Ok, try to find out data */ uint sourceOffset = stream.ReadUInt(0x4) + 0x8; /* Write each file in the header */ uint offset = 0x2 + ((uint)files * 0x24); for (int i = 0; i < files; i++) { /* Ok, get the size of the GVR file */ uint length = stream.ReadUInt(sourceOffset + 0x4) + 8; /* Make sure this is a valid file length */ if (sourceOffset + length > stream.Length) length -= 16; // For some reason some GVR files are like this. if (sourceOffset + length > stream.Length) throw new Exception(); /* Write the offset, file length, and filename */ data.Write(offset); // Offset data.Write(length + 16); // Length if (containsFilename) data.Write(stream.ReadString(0xE + (i * metaDataSize), 28), 28); // Filename else data.Position += 28; /* Add the GBIX header */ data.Position = offset; data.Write(GraphicHeader.GBIX); data.Write((int)0x8); /* Copy the global index */ if (containsGlobalIndex) data.Write(stream.ReadUInt(0xE + size_filename + size_pixelFormat + size_dimensions + (i * metaDataSize))); else data.Position += 4; /* Write out the 0x0 in the header */ data.Write(new byte[] { 0x0, 0x0, 0x0, 0x0 }); /* Now copy the file */ data.Write(stream, sourceOffset, length); data.Position = 0x26 + (i * 0x24); sourceOffset += length.RoundUp(16); /* Increment the offset */ offset += length + 16; } return data; } catch { /* Something went wrong, so send as blank stream */ return new MemoryStream(); } }
public override bool LoadFrom(Stream stream) { Mode = (SirenFieldGenerateMode)stream.ReadUInt(); return true; }
/* Get the offsets, lengths, and filenames of all the files */ public override ArchiveFileList GetFileList(ref Stream data) { try { /* Get the offset of each section of the NARC file */ uint offset_fatb = data.ReadUShort(0xC); uint offset_fntb = offset_fatb + data.ReadUInt(offset_fatb + 0x4); uint offset_fimg = offset_fntb + data.ReadUInt(offset_fntb + 0x4); /* Stuff for filenames */ bool containsFilenames = (data.ReadUInt(offset_fntb + 0x8) == 8); uint offset_filename = offset_fntb + 0x10; /* Get the number of files */ uint files = data.ReadUInt(offset_fatb + 0x8); /* Create the array of files now */ ArchiveFileList fileList = new ArchiveFileList(files); /* Now we can get the file offsets, lengths, and filenames */ for (uint i = 0; i < files; i++) { /* Get the offset & length */ uint offset = data.ReadUInt(offset_fatb + 0x0C + (i * 0x8)); uint length = data.ReadUInt(offset_fatb + 0x10 + (i * 0x8)) - offset; /* Get the filename, if the NARC contains filenames */ string filename = String.Empty; if (containsFilenames) { /* Ok, since the NARC contains filenames, let's go grab it now */ byte filename_length = data.ReadByte(offset_filename); filename = data.ReadString(offset_filename + 1, filename_length); offset_filename += (uint)(filename_length + 1); } fileList.Entries[i] = new ArchiveFileList.Entry( offset + offset_fimg + 0x8, // Offset length, // Length filename // Filename ); } return fileList; } catch { /* Something went wrong, so return nothing */ return null; } }
public override bool LoadFrom(Stream stream) { base.LoadFrom(stream); Attribute.LoadFrom(stream); UnderlyType = stream.ReadString(); uint count = stream.ReadUInt(); for (int i = 0; i < count; i++) { FieldNames.Add(stream.ReadString()); FieldValues.Add(stream.ReadInt()); } return true; }
/* Get the offsets, lengths, and filenames of all the files */ public override ArchiveFileList GetFileList(ref Stream data) { try { /* Get the number of files */ uint files = data.ReadUInt(0x30); /* Create the array of files now */ ArchiveFileList fileList = new ArchiveFileList(files); /* See if the archive contains filenames */ bool containsFilenames = (files > 0 && data.ReadUInt(0x3C + (files * 0x14)) + 0x20 != 0x3C + (files * 0x1C) && data.ReadString(0x3C + (files * 0x1C), 4) == "FLST"); /* Now we can get the file offsets, lengths, and filenames */ for (uint i = 0; i < files; i++) { /* Get the offset & length */ uint offset = data.ReadUInt(0x40 + (files * 0x14) + (i * 0x8)) + 0x20; uint length = data.ReadUInt(0x3C + (files * 0x14) + (i * 0x8)); /* Check for filenames */ string filename = String.Empty; if (containsFilenames) filename = data.ReadString(0x40 + (files * 0x1C) + (i * 0x40), 64); /* GIM files can also contain their original filename in the footer */ if (filename == string.Empty && length > 40 && data.ReadString(offset, 12, false) == GraphicHeader.MIG) { uint filenameOffset = data.ReadUInt(offset + 0x24) + 0x30; if (filenameOffset < length) filename = Path.GetFileNameWithoutExtension(data.ReadString(offset + filenameOffset, (int)(length - filenameOffset))); if (filename != String.Empty) filename += (filename.IsAllUpperCase() ? ".GIM" : ".gim"); } fileList.Entries[i] = new ArchiveFileList.Entry( offset, // Offset length, // Length filename // Filename ); } return fileList; } catch { /* Something went wrong, so return nothing */ return null; } }
/* Get the offsets, lengths, and filenames of all the files */ public override ArchiveFileList GetFileList(ref Stream data) { try { /* Get the number of files */ uint files = data.ReadUInt(0x4); /* Create the array of files now */ ArchiveFileList fileList = new ArchiveFileList(files); /* Now we can get the file offsets, lengths, and filenames */ for (uint i = 0; i < files; i++) { /* Get filename and extension */ string filename = data.ReadString(0x20 + (i * 0x30), 32, Encoding.GetEncoding("Shift_JIS")); // Name string fileext = data.ReadString(0x10 + (i * 0x30), 4); // Extension fileList.Entries[i] = new ArchiveFileList.Entry( data.ReadUInt(0x14 + (i * 0x30)), // Offset data.ReadUInt(0x18 + (i * 0x30)), // Length (filename == String.Empty ? String.Empty : filename) + (fileext == string.Empty ? string.Empty : '.' + fileext) // Filename ); } return fileList; } catch { /* Something went wrong, so return nothing */ return null; } }
/* Decompress */ public override MemoryStream Decompress(ref Stream data) { try { // Compressed & Decompressed Data Information uint CompressedSize = data.ReadUInt(0x4); uint DecompressedSize = data.ReadUInt(0x8); byte[] CompressedData = data.ToByteArray(); byte[] DecompressedData = new byte[DecompressedSize]; byte[] DestBuffer = new byte[0x1000]; uint SourcePointer = 0x10; uint DestPointer = 0x0; uint BufferPointer = 0xFEE; // Start Decompression while (SourcePointer < CompressedSize && DestPointer < DecompressedSize) { byte Flag = CompressedData[SourcePointer]; // Compression Flag SourcePointer++; for (int i = 0; i < 8; i++) { if ((Flag & (1 << i)) > 0) // Data is not compressed { DecompressedData[DestPointer] = CompressedData[SourcePointer]; DestBuffer[BufferPointer] = DecompressedData[DestPointer]; SourcePointer++; DestPointer++; BufferPointer = (BufferPointer + 1) & 0xFFF; } else // Data is compressed { int Offset = ((((CompressedData[SourcePointer + 1] >> 4) & 0xF) << 8) | CompressedData[SourcePointer]); int Amount = (CompressedData[SourcePointer + 1] & 0xF) + 3; SourcePointer += 2; for (int j = 0; j < Amount; j++) { DecompressedData[DestPointer + j] = DestBuffer[(Offset + j) & 0xFFF]; DestBuffer[BufferPointer] = DecompressedData[DestPointer + j]; BufferPointer = (BufferPointer + 1) & 0xFFF; } DestPointer += (uint)Amount; } // Check for out of range if (SourcePointer >= CompressedSize || DestPointer >= DecompressedSize) break; } } return new MemoryStream(DecompressedData); } catch { return null; // An error occured while decompressing } }
protected override bool FeedDataVideo(Stream pData, uint dataLength, uint processedLength, uint totalLength, uint absoluteTimestamp, bool isAudio) { var pos = pData.Position; _videoBytesCount += dataLength; _videoPacketsCount++; //1. Test and see if this is an inbound RTMP stream. If so, //we have to strip out the RTMP 9 bytes header var inStreamType = InStream.Type; if ((inStreamType == ST_IN_NET_RTMP) || (inStreamType == ST_IN_NET_LIVEFLV)) { //2. Test and see if we have a brand new packet if (processedLength == 0) { //3.This must be a payload packet, not codec setup pData.ReadByte(); if (pData.ReadByte() != 1) return true; //4. since this is a brand new packet, empty previous buffer _videoBuffer.IgnoreAll(); pData.Position -= 2; } //5. Store the data into the buffer pData.CopyPartTo(_videoBuffer,(int) dataLength); //6. Test and see if this is the last chunk of the RTMP packet if (dataLength + processedLength == totalLength) { //7. This is the last chunk. Get the pointer and length pData = _videoBuffer; pData.Position = 0; dataLength = (uint) _videoBuffer.GetAvaliableByteCounts(); //8. We must have at least 9 bytes (RTMP header size) if (dataLength < 9) { WARN("Bogus packet"); return true; } //9. Read the composition timestamp and add it to the //absolute timestamp pData.Position = 1; var compositionTimeStamp = (pData.ReadUInt()) & 0x00ffffff; absoluteTimestamp += compositionTimeStamp; //10. Ignore RTMP header and composition offset dataLength -= 5; uint nalSize = 0; //uint32_t tsIncrement = 0; //11. Start looping over the RTMP payload. Each NAL has a 4 bytes //header indicating the length of the following NAL while (dataLength >= 4) { //12. Read the nal size and compare it to the actual amount //of data remaining on the buffer nalSize = pData.ReadUInt(); pos = pData.Position; if (nalSize > (dataLength - 4)) { WARN("Bogus packet"); return true; } //13. skip theNAL size field dataLength -= 4; //14. Is this a 0 sized NAL? if so, skip it if (nalSize == 0) continue; //15. Feed the NAL unit using RTP FUA if (!FeedDataVideoFUA(pData, nalSize, 0, nalSize, absoluteTimestamp)) { //+ (double) tsIncrement / 90000.00)) { FATAL("Unable to feed data"); return false; } //16. move to the next NAL dataLength -= nalSize; } } return true; } else { //17. This is NAL stream. Feed it as it is return FeedDataVideoFUA(pData, dataLength, processedLength, totalLength,absoluteTimestamp); } }
/* Get the offsets, lengths, and filenames of all the files */ public override ArchiveFileList GetFileList(ref Stream data) { try { /* Get the number of files */ ushort files = data.ReadUShort(0x0); /* Create the array of files now */ ArchiveFileList fileList = new ArchiveFileList(files); /* Now we can get the file offsets, lengths, and filenames */ for (int i = 0; i < files; i++) { /* Get the filename */ string filename = data.ReadString(0xA + (i * 0x24), 28); fileList.Entries[i] = new ArchiveFileList.Entry( data.ReadUInt(0x2 + (i * 0x24)), // Offset data.ReadUInt(0x6 + (i * 0x24)), // Length (filename == String.Empty ? String.Empty : filename + (filename.IsAllUpperCase() ? ".PVR" : ".pvr")) // Filename ); } return fileList; } catch { /* Something went wrong, so return nothing */ return null; } }
public override bool LoadFrom(Stream stream) { base.LoadFrom(stream); Attribute.LoadFrom(stream); BaseTypeName = stream.ReadString(); //types uint typeCount = stream.ReadUInt(); for (int i = 0; i < typeCount; i++) { byte isClass = (byte)stream.ReadByte(); if (isClass == 1) { SirenCustomClass type = new SirenCustomClass(); type.LoadFrom(stream); type.Parent = this; Types.Add(type.Name, type); } else { SirenCustomEnum type = new SirenCustomEnum(); type.LoadFrom(stream); type.Parent = this; Types.Add(type.Name, type); } } //fields uint fieldCount = stream.ReadUInt(); for (int i = 0; i < fieldCount; i++) { SirenField field = new SirenField(); field.LoadFrom(stream); field.ParentType = this; FieldNameDict.Add(field.Name, field); field.Index = (ushort)(FieldNameDict.Count - 1); } return true; }
/* To simplify the process greatly, we are going to convert * the Storybook Archive to a new format */ public override MemoryStream TranslateData(ref Stream stream) { try { /* Get the number of files */ uint files = stream.ReadUInt(0x0).SwapEndian(); /* Now create the header */ MemoryStream data = new MemoryStream(); data.Write(files); /* Write each file in the header */ uint offset = 0xC + (files * 0x2C); for (int i = 0; i < files; i++) { uint length = stream.ReadUInt(0x3C + (i * 0x30)).SwapEndian(); data.Write(offset); // Offset data.Write(length); // Length data.Write(stream.ReadString(0x10 + (i * 0x30), 36), 36); // Filename /* Let's write the decompressed data */ uint sourceOffset = stream.ReadUInt(0x34 + (i * 0x30)).SwapEndian(); uint sourceLength = stream.ReadUInt(0x38 + (i * 0x30)).SwapEndian(); Stream compressedData = stream.Copy(sourceOffset, sourceLength); /* Decompress the data */ PRS decompressor = new PRS(); MemoryStream decompressedData = decompressor.Decompress(ref compressedData, length); if (decompressedData == null) throw new Exception(); /* Write the data */ data.Position = offset; data.Write(decompressedData); data.Position = 0x30 + (i * 0x2C); decompressedData.Close(); offset += length; } return data; } catch { return new MemoryStream(); } }