private int ReadBytes(byte[] buffer, int offset, int count, bool compute) { var t = 0; do { var r = inner.Read(buffer, offset, count); if (r == 0) { break; } if (compute) { for (var i = 0; i < r; i++) { crc32.Add(buffer[offset + i]); } } count -= r; t += r; }while (count > 0); return(t); }
/// <summary> /// Commits the provided transaction /// </summary> public void CommitTransaction(ref TransactionToken token) { AssertionFailedException.Assert(token.State == StateOpen); token.State = StateCommitted; MemoryStream buffer = token.Object as MemoryStream; if (buffer == null) { return; // nothing to commit } byte[] bytes = buffer.GetBuffer(); Crc32 crc = new Crc32(); crc.Add(bytes, 4, (int)buffer.Position - 4); PrimitiveSerializer.Int32.WriteTo(crc.Value, buffer); int len = (int)buffer.Position; PrimitiveSerializer.Int32.WriteTo((0xee << 24) + len, buffer); buffer.Position = 0; PrimitiveSerializer.Int32.WriteTo((0xbb << 24) + len, buffer); bytes = buffer.GetBuffer(); WriteBytes(bytes, 0, len + 4); }
public static long Get(byte[] data, int start, int length) { Crc32 crc32 = new Crc32(); crc32.Add(data, start, length); return(crc32.Get()); }
public static long Get(byte[] data) { Crc32 crc32 = new Crc32(); crc32.Add(data); return(crc32.Get()); }
public void TestAddByteRange() { Crc32 all = new Crc32(new byte[] { 0x2, 0x3, 0x4, 0x5, 0x6 }); Crc32 crc = new Crc32(); Assert.AreEqual(0, crc.Value); crc.Add(new byte[] { 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8 }, 1, 5); Assert.AreEqual(all.Value, crc.Value); }
public void TestHashValue() { Crc32 crc = new Crc32(); Assert.AreEqual(0, crc.Value); Assert.AreEqual(0, crc.GetHashCode()); crc.Add(0x1b); Assert.AreNotEqual(0, crc.Value); Assert.AreEqual(crc.Value, crc.GetHashCode()); }
private static void AssertCrc32(byte[] input, int expected) { Crc32 crc = new Crc32(input); Assert.AreEqual(expected, crc.Value); crc = new Crc32(0); crc.Add(input); Assert.AreEqual(expected, crc.Value); crc = new Crc32(); crc.Add(input, 0, input.Length); Assert.AreEqual(expected, crc.Value); crc = new Crc32(); foreach (byte b in input) crc.Add(b); Assert.AreEqual(expected, crc.Value); }
private static void AssertCrc32(byte[] input, int expected) { Crc32 crc = new Crc32(input); Assert.AreEqual(expected, crc.Value); crc = new Crc32(0); crc.Add(input); Assert.AreEqual(expected, crc.Value); crc = new Crc32(); crc.Add(input, 0, input.Length); Assert.AreEqual(expected, crc.Value); crc = new Crc32(); foreach (byte b in input) { crc.Add(b); } Assert.AreEqual(expected, crc.Value); }
private void UpdateCache(Session s, BinaryReader r, DatFile.Mft mft, CustomEntry c) { using (var w = new BinaryWriter(r.BaseStream)) { var entry = mft.entries[INDEX_ENTRIES]; //write the data w.BaseStream.Position = mft.entries[c.index].offset; var size = c.WriteTo(w.BaseStream, s.compressor); if (size != c.entry.size) { //rewrite the size of the entry w.BaseStream.Position = entry.offset + (c.index + 1) * 24 + 8; w.Write(size); //note that although the data for this entry has changed, the crc hasn't due to how it's calculated //calculate the crc of the main entry r.BaseStream.Position = entry.offset; var buffer = r.ReadBytes(entry.size); var crc32 = new Crc32(); for (var i = 0; i < buffer.Length; i++) { if (i < 72 || i >= 96) //skip 3rd entry { crc32.Add(buffer[i]); } } //rewrite the crc w.BaseStream.Position = entry.offset + (INDEX_ENTRIES + 1) * 24 + 20; w.Write(crc32.CRC); } } }
/// <summary> /// Replay the log file from the position provided and output the new log position /// </summary> IEnumerable <LogEntry> EnumerateLog(long[] position) { lock (_logSync) { long pos = 0; long length; if (!File.Exists(_options.FileName)) { position[0] = 0; yield break; } using (MemoryStream buffer = new MemoryStream(8192)) using (Stream io = new FileStream(_options.FileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite, 0x10000, FileOptions.SequentialScan)) { bool valid = true; const int minSize = 16; byte[] bytes = buffer.GetBuffer(); int size, temp, nbytes, szcontent; short opCount; LogEntry entry = new LogEntry(); length = io.Length; if (position[0] < 0 || position[0] > length) { position[0] = length; yield break; } bool fixedOffset = position[0] > 0; io.Position = position[0]; while (valid && (pos = position[0] = io.Position) + minSize < length) { try { size = PrimitiveSerializer.Int32.ReadFrom(io); size = ((byte)(size >> 24) == 0xbb) ? size & 0x00FFFFFF : -1; if (size < minSize || pos + size + 4 > length) { if (fixedOffset) { yield break; } break; } fixedOffset = false; if (size > buffer.Capacity) { buffer.Capacity = (size + 8192); bytes = buffer.GetBuffer(); } szcontent = size - 8; buffer.Position = 0; buffer.SetLength(szcontent); nbytes = 0; while (nbytes < szcontent && (temp = io.Read(bytes, nbytes, szcontent - nbytes)) != 0) { nbytes += temp; } if (nbytes != szcontent) { break; } Crc32 crc = new Crc32(); crc.Add(bytes, 0, nbytes); temp = PrimitiveSerializer.Int32.ReadFrom(io); if (crc.Value != temp) { break; } temp = PrimitiveSerializer.Int32.ReadFrom(io); if ((byte)(temp >> 24) != 0xee || (temp & 0x00FFFFFF) != size) { break; } entry.TransactionId = PrimitiveSerializer.Int32.ReadFrom(buffer); _transactionId = Math.Max(_transactionId, entry.TransactionId + 1); opCount = PrimitiveSerializer.Int16.ReadFrom(buffer); if (opCount <= 0 || opCount >= short.MaxValue) { break; } } catch (InvalidDataException) { break; } while (opCount-- > 0) { entry.OpCode = (OperationCode)PrimitiveSerializer.Int16.ReadFrom(buffer); if (entry.OpCode != OperationCode.Add && entry.OpCode != OperationCode.Update && entry.OpCode != OperationCode.Remove) { valid = false; break; } try { entry.Key = _options.KeySerializer.ReadFrom(buffer); entry.Value = (entry.OpCode == OperationCode.Remove) ? default(TValue) : _options.ValueSerializer.ReadFrom(buffer); } catch { valid = false; break; } if ((buffer.Position == buffer.Length) != (opCount == 0)) { valid = false; break; } yield return(entry); } } } if (!_options.ReadOnly && pos < length) { TruncateLog(pos); } } }
private bool UpdateCache(BinaryReader r, int build = 0) { var mft = DatFile.ReadMft(r); var s = session; CustomEntry c = null; var counter = 2; for (var i = mft.entries.Length - 1; i >= 0; --i) { var e = mft.entries[i]; switch (e.baseId) { case ID_CACHE: c = new CustomEntry() { index = i, entry = e, }; if (--counter == 0) { i = 1; } break; case ID_LOCAL: if (build == 0) { build = ReadBuild(s.compressor, r.BaseStream, e); } if (--counter == 0) { i = 1; } break; } } if (build > 0) { if (c == null) { //entry will need to be created, which will require updating the entries and ids var blockSize = mft.BlockSize; var ofs = 0L; DatFile.MftEntry entry; c = new CustomEntry() { index = mft.entries.Length, }; //find the next position to write data (not considering free space) for (var i = mft.entries.Length - 1; i >= 0; --i) { var e = mft.entries[i]; var l = e.offset + e.size; if (l > ofs) { ofs = l; } } ofs = (ofs / blockSize + 1) * blockSize; using (var w = new BinaryWriter(r.BaseStream, Encoding.ASCII, true)) { var crc32 = new Crc32(); byte[] buffer; entry = mft.entries[INDEX_IDS]; if (!HasFreeSpace(entry, blockSize, 8)) { r.BaseStream.Position = entry.offset; buffer = r.ReadBytes(entry.size); w.BaseStream.Position = ofs; w.Write(buffer); entry.offset = ofs; ofs += ((entry.size + 8) / blockSize + 1) * blockSize; } //adding id w.BaseStream.Position = entry.offset + entry.size; w.Write(ID_CACHE); w.Write(c.index + 1); entry.size += 8; //update size w.BaseStream.Position = mft.entries[INDEX_ENTRIES].offset + (INDEX_IDS + 1) * 24; w.Write(entry.offset); w.Write(entry.size); //recalculate crc r.BaseStream.Position = entry.offset; buffer = r.ReadBytes(entry.size); foreach (var b in buffer) { crc32.Add(b); } //update crc w.BaseStream.Position = mft.entries[INDEX_ENTRIES].offset + (INDEX_IDS + 1) * 24 + 20; w.Write(crc32.CRC); crc32.Reset(); entry = mft.entries[INDEX_ENTRIES]; if (!HasFreeSpace(entry, blockSize, 24)) { r.BaseStream.Position = entry.offset; buffer = r.ReadBytes(entry.size); w.BaseStream.Position = ofs; w.Write(buffer); entry.offset = ofs; ofs += ((entry.size + 24) / blockSize + 1) * blockSize; } c.entry = new DatFile.MftEntry() { compression = 8, baseId = ID_CACHE, fileId = ID_CACHE, crc = 1214729159, flags = 3, offset = ofs, }; c.data = CreateGw2Cache(s.compressor, build, c.entry.compression == 8); c.length = c.data.Length; var entries = new DatFile.MftEntry[c.index + 1]; Array.Copy(mft.entries, entries, mft.entries.Length); entries[c.index] = c.entry; mft.entries = entries; //adding entry w.BaseStream.Position = entry.offset + entry.size; WriteEntry(w, c.entry); entry.size += 24; //update count w.BaseStream.Position = entry.offset + 12; w.Write(entries.Length + 1); //update size w.BaseStream.Position = entry.offset + (INDEX_ENTRIES + 1) * 24; w.Write(entry.offset); w.Write(entry.size); //entries crc is updated when writing data //update header w.BaseStream.Position = 24; w.Write(entry.offset); w.Write(entry.size); } } else { c.data = CreateGw2Cache(s.compressor, build, c.entry.compression == 8); c.length = c.data.Length; } UpdateCache(s, r, mft, c); return(true); } return(false); }
public BlockStreamReader(FragmentedFile file, long ordinal, BlockFlags typeExpected, bool validated) { _file = file; _blockPos = 0; _validated = validated; _block = new FileBlock(file._blockSize, file._useAlignedIo); _file.ReadBlock(ordinal, _block, file._blockSize, typeExpected, _validated); if (_validated) { _expectedSum = _block.CheckSum; _checksum = new Crc32(); _checksum.Add(_block.BlockData, _block.DataOffset, _block.Length); if (_block.NextBlockId == 0 && _checksum != _expectedSum) throw new InvalidDataException(); } }