/// <summary> /// Commits the edits to the current archive file and disposes of this class. /// </summary> public override void Commit() { if (m_disposed) { throw new ObjectDisposedException(GetType().FullName); } GetKeyRange(m_sortedTreeFile.m_firstKey, m_sortedTreeFile.m_lastKey); if (m_tree != null) { m_tree.Flush(); m_tree = null; } if (m_binaryStream1 != null) { m_binaryStream1.Dispose(); m_binaryStream1 = null; } if (m_subStream != null) { m_subStream.Dispose(); m_subStream = null; } m_currentTransaction.CommitAndDispose(); InternalDispose(); }
public void Dispose() { if (!disposed) { m_input.Dispose(); disposed = true; } }
public override SoundInput TryOpen(IBinaryStream file) { var header = file.ReadHeader(0x12); if (!header.AsciiEqual("\0\0\0\0")) { return(null); } int riff_length = header.ToInt32(4); if (file.Length != riff_length + 8) { return(null); } if (!header.AsciiEqual(8, "\0\0\0\0\0\0\0\0")) { return(null); } int header_length = header.ToUInt16(0x10); if (header_length < 0x10 || header_length > riff_length) { return(null); } header = file.ReadHeader(0x18 + header_length); if (!header.AsciiEqual(0x14 + header_length, "data")) { return(null); } var header_bytes = new byte[0x10] { (byte)'R', (byte)'I', (byte)'F', (byte)'F', header[4], header[5], header[6], header[7], (byte)'W', (byte)'A', (byte)'V', (byte)'E', (byte)'f', (byte)'m', (byte)'t', (byte)' ' }; Stream riff = new StreamRegion(file.AsStream, 0x10); riff = new PrefixStream(header_bytes, riff); var wav = new BinaryStream(riff, file.Name); try { return(Wav.TryOpen(wav)); } catch { wav.Dispose(); throw; } }
protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { // Dispose any managed objects // ... fileReader.Dispose(); } // Now disposed of any unmanaged objects // ... _disposed = true; } }
public BFLYT(ref string filename) { BinaryStream s = new BinaryStream(new FileStream(filename, FileMode.Open)); header = new Header(ref s); for (int i = 0; i < header.sectionCount; i++) { ReadSections(ref s); } System.Windows.Forms.MessageBox.Show($"[{s.BaseStream.Position}] stream end"); System.Windows.Forms.MessageBox.Show($"Done"); s.Flush(); s.Dispose(); }
public override IImageDecoder OpenImage(ArcFile arc, Entry entry) { var header = new byte[2] { (byte)'B', (byte)'M' }; Stream input = arc.File.CreateStream(entry.Offset, entry.Size); input = new PrefixStream(header, input); var bin = new BinaryStream(input, entry.Name); try { return(new ImageFormatDecoder(bin)); } catch { bin.Dispose(); throw; } }
public override void Read(BinaryStream bs) { using (BinaryStream payload = new BinaryStream()) { // check if compressed uint magic = bs.Read.UInt(); bs.ByteOffset = 0; if (magic == 559903) { try { Util.Common.UnGzipUnknownTargetSize(bs, payload); Compressed = true; } catch { payload.ByteOffset = 0; payload.Write.ByteArray(bs.Read.ByteArray((int)bs.Length)); } } else { payload.Write.ByteArray(bs.Read.ByteArray((int)bs.Length)); Console.WriteLine(payload.Length); } bs.Dispose(); payload.ByteOffset = 0; if (payload.Length == 0) { Compressed = false; return; } ReadPayload(payload); } }
public override IImageDecoder OpenImage(ArcFile arc, Entry entry) { if (arc.File.View.ReadUInt16(entry.Offset) != 0xB2BD) // ~'BM' { return(base.OpenImage(arc, entry)); } Stream input = arc.File.CreateStream(entry.Offset + 2, entry.Size - 2); input = new PrefixStream(BitmapHeader, input); var bitmap = new BinaryStream(input, entry.Name); try { return(new ImageFormatDecoder(bitmap)); } catch { bitmap.Dispose(); throw; } }
public void TestSubFileStream() { const int BlockSize = 256; MemoryPoolTest.TestMemoryLeak(); //string file = Path.GetTempFileName(); //System.IO.File.Delete(file); try { //using (FileSystemSnapshotService service = FileSystemSnapshotService.CreateFile(file)) using (TransactionalFileStructure service = TransactionalFileStructure.CreateInMemory(BlockSize)) { using (TransactionalEdit edit = service.BeginEdit()) { SubFileStream fs = edit.CreateFile(SubFileName.Empty); BinaryStream bs = new BinaryStream(fs); for (int x = 0; x < 20000000; x++) { bs.Write(1L); } bs.Position = 0; BinaryStreamBenchmark.Run(bs, false); bs.Dispose(); fs.Dispose(); edit.CommitAndDispose(); } } } finally { //System.IO.File.Delete(file); } MemoryPoolTest.TestMemoryLeak(); }
public mNode Read() { using var file = File.Open(FileName, FileMode.Open); Stream = new BinaryStream(file, ByteConverter.Big); string magic = Stream.ReadString(4); if (magic == "Proj") { // Text version return(null); } else if (magic != "MPRJ") { Console.WriteLine($"Not a MPRJ Binary file."); return(null); } Version = (byte)Stream.DecodeBitsAndAdvance(); if (Version != 0 && Version != 1) { Console.WriteLine($"Unsupported MPRJ Version {Version}."); return(null); } var rootPrjNode = new mNode(); rootPrjNode.IsRoot = true; // For version 0 if (Version == 1) { Stream.Position += 1; // Skip scope type } Console.WriteLine($"MPRJ Version: {Version}"); rootPrjNode.Read(this); Stream.Dispose(); return(rootPrjNode); }
/// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> /// <filterpriority>2</filterpriority> public void Dispose() { if (!m_disposed) { try { if (m_binaryStream != null) { m_binaryStream.Dispose(); } if (m_subStream != null) { m_subStream.Dispose(); } } finally { m_subStream = null; m_binaryStream = null; m_tree = null; m_disposed = true; } } }
public void Test() { Assert.AreEqual(Globals.MemoryPool.AllocatedBytes, 0L); //string file = Path.GetTempFileName(); //System.IO.File.Delete(file); try { //using (FileSystemSnapshotService service = FileSystemSnapshotService.CreateFile(file)) using (TransactionalFileStructure service = TransactionalFileStructure.CreateInMemory(BlockSize)) { using (TransactionalEdit edit = service.BeginEdit()) { SubFileStream fs = edit.CreateFile(SubFileName.CreateRandom()); BinaryStream bs = new BinaryStream(fs); bs.Write((byte)1); bs.Dispose(); fs.Dispose(); edit.CommitAndDispose(); } { ReadSnapshot read = service.Snapshot; SubFileStream f1 = read.OpenFile(0); BinaryStream bs1 = new BinaryStream(f1); if (bs1.ReadUInt8() != 1) { throw new Exception(); } using (TransactionalEdit edit = service.BeginEdit()) { SubFileStream f2 = edit.OpenFile(0); BinaryStream bs2 = new BinaryStream(f2); if (bs2.ReadUInt8() != 1) { throw new Exception(); } bs2.Write((byte)3); bs2.Dispose(); } //rollback should be issued; if (bs1.ReadUInt8() != 0) { throw new Exception(); } bs1.Dispose(); { ReadSnapshot read2 = service.Snapshot; SubFileStream f2 = read2.OpenFile(0); BinaryStream bs2 = new BinaryStream(f2); if (bs2.ReadUInt8() != 1) { throw new Exception(); } if (bs2.ReadUInt8() != 0) { throw new Exception(); } bs2.Dispose(); } } using (TransactionalEdit edit = service.BeginEdit()) { SubFileStream f2 = edit.OpenFile(0); BinaryStream bs2 = new BinaryStream(f2); bs2.Write((byte)13); bs2.Write((byte)23); bs2.Dispose(); edit.RollbackAndDispose(); } //rollback should be issued; } } finally { //System.IO.File.Delete(file); } Assert.AreEqual(Globals.MemoryPool.AllocatedBytes, 0L); Assert.IsTrue(true); }
public override void Read(BinaryStream bs) { Stopwatch sw = new Stopwatch(); sw.Start(); // read header HeaderInfo headerInfo = bs.Read.Type <HeaderInfo>(); this.Patch = headerInfo.patchName; this.Timestamp = new DateTime(); this.Timestamp = Util.Time.DateTimeFromUnixTimestampMicroseconds((long)headerInfo.timestamp); this.Flags = headerInfo.flags; this.fileVersion = headerInfo.version; //Console.WriteLine("read: "+sw.ElapsedMilliseconds); sw.Restart(); // deobfuscate byte[] data = bs.Read.ByteArray((int)headerInfo.payloadSize); MTXor(Checksum.FFnv32(headerInfo.patchName), ref data); //Console.WriteLine("dxor: " + sw.ElapsedMilliseconds); sw.Restart(); // cleanup memory, the original stream is not needed anymore bs.Dispose(); bs = null; GC.Collect(); // read compression header // uint inflated size // uint padding // ushort 0x78 0x01 zlib deflate low/no compression uint inflatedSize = UIntFromBufferLE(ref data); ushort ds = UShortFromBufferLE(ref data, 8); byte[] inflated = new byte[inflatedSize]; Inflate(data, ref inflated, SharpCompress.Compressors.Deflate.CompressionLevel.BestSpeed, (int)inflatedSize, 10); BinaryStream ibs = new BinaryStream(new MemoryStream((inflated))); data = null; //Console.WriteLine("infl: " + sw.ElapsedMilliseconds); sw.Restart(); // cleanup memory, the deobfuscated stream is not needed anymore GC.Collect(); // read table header this.tableVersion = ibs.Read.UInt(); ushort indexLength = ibs.Read.UShort(); // read table info TableInfo[] tableInfos = new TableInfo[indexLength]; for (ushort i = 0; i < indexLength; i++) { tableInfos[i] = ibs.Read.Type <TableInfo>(); } // read field info FieldInfo[][] fieldInfos = new FieldInfo[indexLength][]; for (int i = 0; i < indexLength; i++) { fieldInfos[i] = new FieldInfo[tableInfos[i].numFields]; for (int x = 0; x < tableInfos[i].numFields; x++) { fieldInfos[i][x] = ibs.Read.Type <FieldInfo>(); } } // read row info RowInfo[] rowInfos = new RowInfo[indexLength]; for (ushort i = 0; i < indexLength; i++) { rowInfos[i] = ibs.Read.Type <RowInfo>(); } // build tables Tables = new List <Table>(indexLength); for (ushort i = 0; i < indexLength; i++) { TableInfo tableInfo = tableInfos[i]; FieldInfo[] fieldInfo = fieldInfos[i]; RowInfo rowInfo = rowInfos[i]; // setup table Table table = new Table(); table.Id = tableInfo.id; // add fields table.Columns = new List <Column>(tableInfo.numFields); int currentWidth = 0; for (int x = 0; x < tableInfo.numFields; x++) { Column field = new Column(); field.Id = fieldInfos[i][x].id; field.Type = (DBType)fieldInfos[i][x].type; // fix removed fields? (weird padding some places) if (fieldInfo[x].start != currentWidth) { int padding = fieldInfo[x].start - currentWidth; field.Padding = padding; currentWidth += padding; } currentWidth += DBTypeLength((DBType)fieldInfo[x].type); table.Columns.Add(field); } // if any, add nullable fields if (tableInfo.nullableBitfields != 0) { int count = 0; for (int x = 0; x < tableInfo.numFields; x++) { if (fieldInfos[i][x].nullableIndex != 255) { count++; } } Column[] nullableColumns = new Column[count]; for (int x = 0; x < tableInfo.numFields; x++) { if (fieldInfos[i][x].nullableIndex != 255) { nullableColumns[fieldInfos[i][x].nullableIndex] = table.Columns[x]; } } table.NullableColumn = new List <Column>(nullableColumns); } else { table.NullableColumn = new List <Column>(); } Tables.Add(table); } //Console.WriteLine("tabl: " + sw.ElapsedMilliseconds); sw.Restart(); // read rows ConcurrentQueue <int> tableRowsReadQueue = new ConcurrentQueue <int>(); for (ushort i = 0; i < indexLength; i++) { tableRowsReadQueue.Enqueue(i); } Parallel.For(0, numThreads, new ParallelOptions { MaxDegreeOfParallelism = numThreads }, q => { BinaryStream dbs = new BinaryStream(new MemoryStream(inflated)); while (tableRowsReadQueue.Count != 0) { int i; if (!tableRowsReadQueue.TryDequeue(out i)) { continue; } TableInfo tableInfo = tableInfos[i]; FieldInfo[] fieldInfo = fieldInfos[i]; RowInfo rowInfo = rowInfos[i]; Tables[i].Rows = new List <Row>(); for (int y = 0; y < rowInfo.rowCount; y++) { Row row = new Row(tableInfo.numFields); dbs.ByteOffset = rowInfo.rowOffset + (tableInfo.numBytes * y) + fieldInfo[0].start; for (int z = 0; z < tableInfo.numFields; z++) { if (Tables[i].Columns[z].Padding != 0) { dbs.ByteOffset += Tables[i].Columns[z].Padding; } // just read the basic type now, unpack & decrypt later to reduce seeking row.Fields.Add(ReadDBType(dbs, (DBType)fieldInfo[z].type)); } // null out nulls again :P if (tableInfo.nullableBitfields > 0) { byte[] nulls = dbs.Read.BitArray(tableInfo.nullableBitfields * 8); for (int n = 0; n < Tables[i].NullableColumn.Count; n++) { if (nulls[n] == 1) { int index = Tables[i].Columns.IndexOf(Tables[i].NullableColumn[n]); row[index] = null; } } } Tables[i].Rows.Add(row); } } }); inflated = null; //Console.WriteLine("rows: " + sw.ElapsedMilliseconds); sw.Restart(); // seek to the very end of the tables/start of data RowInfo lri = rowInfos[rowInfos.Length - 1]; TableInfo lti = tableInfos[tableInfos.Length - 1]; ibs.ByteOffset = lri.rowOffset + (lri.rowCount * lti.numBytes); // copy the data to a new stream int dataLength = (int)(ibs.Length - ibs.ByteOffset); byte[] dataBlock = ibs.Read.ByteArray(dataLength); // cleanup ibs.Dispose(); ibs = null; GC.Collect(); // get unique data entry keys HashSet <uint> uniqueKeys = new HashSet <uint>(); ConcurrentQueue <uint> uniqueQueue = new ConcurrentQueue <uint>(); uniqueEntries = new Dictionary <uint, byte[]>(); for (int i = 0; i < Tables.Count; i++) { for (int x = 0; x < Tables[i].Columns.Count; x++) { DBType type = Tables[i].Columns[x].Type; if (IsDataType(type)) { for (int y = 0; y < Tables[i].Rows.Count; y++) { uint?k = (uint?)Tables[i].Rows[y][x]; if (k != null) { if (!uniqueKeys.Contains((uint)k)) { uniqueKeys.Add((uint)k); uniqueQueue.Enqueue((uint)k); } } } } } } //Console.WriteLine("uniq: " + sw.ElapsedMilliseconds); sw.Restart(); // unpack & decrypt unique data entries to cache Parallel.For(0, numThreads, new ParallelOptions { MaxDegreeOfParallelism = numThreads }, i => { BinaryStream dbs = new BinaryStream(new MemoryStream(dataBlock)); while (uniqueQueue.Count != 0) { uint key; if (!uniqueQueue.TryDequeue(out key)) { continue; } byte[] d = GetDataEntry(dbs, key); lock (uniqueEntries) { uniqueEntries.Add(key, d); } } dbs.Dispose(); }); dataBlock = null; //Console.WriteLine("upac: " + sw.ElapsedMilliseconds); sw.Restart(); // copy data entires to the tables from cache for (int z = 0; z < Tables.Count; z++) { for (int x = 0; x < Tables[z].Columns.Count; x++) { DBType type = Tables[z].Columns[x].Type; if (IsDataType(type)) { Parallel.For(0, Tables[z].Rows.Count, y => { uint?k = (uint?)Tables[z].Rows[y][x]; object obj = null; if (k != null) { if (uniqueEntries.ContainsKey((uint)k)) { byte[] d = uniqueEntries[(uint)k]; if (d != null) { obj = BytesToDBType(type, d); } } } Tables[z].Rows[y][x] = obj; }); } } } //Console.WriteLine("assi: " + sw.ElapsedMilliseconds); sw.Restart(); // cleanup :> uniqueKeys = null; uniqueQueue = null; //uniqueEntries = null; // dont clean up these in case you need to look up data entries post load headerInfo = null; tableInfos = null; fieldInfos = null; rowInfos = null; GC.Collect(); }