/// <summary> /// Computes a CRC-32 for a BSP file read from a <see cref="Stream"/>. /// </summary> /// <remarks> /// The checksum is a concatenation of all lumps in the BSP except for /// the entities lump; it is not merely a checksum of the entire content file. /// </remarks> /// <param name="input">The stream of data for the BSP file.</param> /// <returns>The CRC32 checksum for the given BSP file.</returns> /// <exception cref="ArgumentException"> /// The input stream does not support reading or seeking, is <see langword="null"/>, or is already closed. /// </exception> /// <exception cref="InvalidDataException"> /// The identifier in the BSP is invalid or the BSP version is unsupported. /// </exception> public static uint Compute(Stream input) { if (!input.CanSeek) { throw new ArgumentException("Input stream must be seekable.", nameof(input)); } using var reader = new BinaryReader(input); var header = new Header(reader); ImmutableArray <Lump> lumps = header.Lumps.Sort(); uint crc = uint.MaxValue; byte[] chunk = new byte[65536]; foreach (Lump lump in lumps) { if (lump.Type == LUMP_ENTITIES) { continue; // Entities lump should never be in the checksum. } // Every append XORs the CRC with uint.MaxValue at the start and // at the end. Source Engine does not do this. Therefore, take // the bitwise complement to cancel out the XORs. foreach (var bytesRead in lump.Read(chunk)) { crc = ~Crc32Algorithm.Append(~crc, chunk, 0, bytesRead); } } return(crc); }
public static void WriteChunk(FileStream file, dynamic data, ref uint crc32a, ref uint crc32b) { byte[] output = BitConverter.GetBytes(data); file.Write(output, 0, output.Length); crc32a = Crc32Algorithm.Append(crc32a, output); crc32b = Crc32Algorithm.Append(crc32b, output); }
public static byte[] ReadChunk(FileStream file, int length, ref uint crc32) { byte[] data = new byte[length]; file.Read(data, 0, length); crc32 = Crc32Algorithm.Append(crc32, data); return(data); }
public static uint GetCrc32FromFile(string fileName) { if (!File.Exists(fileName)) { throw new FileNotFoundException($"File '{fileName}' not found!", fileName); } using (var fs = File.OpenRead(fileName)) { var result = 0u; var buffer = new byte[4096]; int read; var totalread = 0L; var length = fs.Length; while ((read = fs.Read(buffer, 0, buffer.Length)) > 0) { totalread += read; result = Crc32Algorithm.Append(result, buffer, 0, read); if (totalread >= length) { break; } } return(result); } }
public static uint ComputeCRC(BinaryReader reader) { if (reader == null) { throw new ArgumentNullException(nameof(reader)); } uint crc = 0; var buffer = new byte[1024]; var bytesLeft = reader.BaseStream.Length; while (bytesLeft > 0) { var bytesToRead = bytesLeft < buffer.Length ? (int)bytesLeft : buffer.Length; var bytesRead = reader.Read(buffer, 0, bytesToRead); crc = Crc32Algorithm.Append(crc, buffer, 0, bytesToRead); bytesLeft -= bytesToRead; } return(crc); }
public static T ReadStruct <T>(this Stream m_stream, Crc32Algorithm crc32 = null) where T : struct { try { var size = Marshal.SizeOf <T>(); var m_temp = new byte[size]; m_stream.Read(m_temp, 0, size); var handle = GCHandle.Alloc(m_temp, GCHandleType.Pinned); var item = Marshal.PtrToStructure <T>(handle.AddrOfPinnedObject()); if (crc32 != null) { crc32.Append(m_temp); } handle.Free(); return(item); } catch (Exception ex) { throw ex; } }
public static void WriteChunk(FileStream file, byte data, ref uint crc32) { byte[] output = new byte[1] { data }; file.Write(output, 0, 1); crc32 = Crc32Algorithm.Append(crc32, output); }
public void WriteStruct <T>(T value, Crc32Algorithm crc32) { var buffer = new byte[Marshal.SizeOf <T>()]; var handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); Marshal.StructureToPtr(value, handle.AddrOfPinnedObject(), true); m_stream.Write(buffer, 0, buffer.Length); crc32.Append(buffer); handle.Free(); }
public void WriteStructs <T>(T[] array, Crc32Algorithm crc32) { var size = Marshal.SizeOf <T>(); var buffer = new byte[size]; var handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); for (int i = 0; i < array.Length; i++) { Marshal.StructureToPtr(array[i], handle.AddrOfPinnedObject(), true); m_stream.Write(buffer, 0, buffer.Length); crc32.Append(buffer); } handle.Free(); }
private void WriteStruct <T>(T value, Crc32Algorithm crc32 = null) where T : struct { m_temp = new byte[Marshal.SizeOf <T>()]; var handle = GCHandle.Alloc(m_temp, GCHandleType.Pinned); Marshal.StructureToPtr(value, handle.AddrOfPinnedObject(), true); m_stream.Write(m_temp, 0, m_temp.Length); if (crc32 != null) { crc32.Append(m_temp); } handle.Free(); }
public static async Task <uint> DoGetCrc32FromFile(string fileName, CancellationToken ct = default(CancellationToken), IProgress <double> progress = null) { return(await Task.Run(() => { if (!File.Exists(fileName)) { throw new FileNotFoundException($"File '{fileName}' not found!", fileName); } using (var fs = File.OpenRead(fileName)) { var result = 0u; var buffer = new byte[4096]; int read; var totalread = 0L; var length = fs.Length; var lastProgress = 0d; while ((read = fs.Read(buffer, 0, buffer.Length)) > 0) { // ct.ThrowIfCancellationRequested(); totalread += read; result = Crc32Algorithm.Append(result, buffer, 0, read); var newProgress = (double)totalread / length * 100; if (newProgress - lastProgress > 1) { progress?.Report(newProgress); lastProgress = newProgress; } if (totalread >= length) { break; } } return result; } }, ct)); }
public T ReadStruct <T>(Crc32Algorithm crc32) { var size = Marshal.SizeOf <T>(); var buffer = new byte[size]; var handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); var read = m_stream.Read(buffer, 0, size); if (read != size) { throw new EndOfStreamException(); } var item = Marshal.PtrToStructure <T>(handle.AddrOfPinnedObject()); crc32.Append(buffer); handle.Free(); return(item); }
private T ReadStruct <T>(Crc32Algorithm crc32 = null) where T : struct { var size = Marshal.SizeOf <T>(); m_temp = new byte[size]; m_stream.Read(m_temp, 0, size); var handle = GCHandle.Alloc(m_temp, GCHandleType.Pinned); var item = Marshal.PtrToStructure <T>(handle.AddrOfPinnedObject()); if (crc32 != null) { crc32.Append(m_temp); } handle.Free(); return(item); }
private void WriteStructs <T>(T[] array, Crc32Algorithm crc32 = null) where T : struct { var size = Marshal.SizeOf <T>(); m_temp = new byte[size]; for (int i = 0; i < array.Length; i++) { var handle = GCHandle.Alloc(m_temp, GCHandleType.Pinned); Marshal.StructureToPtr(array[i], handle.AddrOfPinnedObject(), true); m_stream.Write(m_temp, 0, m_temp.Length); if (crc32 != null) { crc32.Append(m_temp); } handle.Free(); } }
public T[] ReadStructs <T>(uint count, Crc32Algorithm crc32) { var size = Marshal.SizeOf <T>(); var items = new T[count]; var buffer = new byte[size]; var handle = GCHandle.Alloc(buffer, GCHandleType.Pinned); for (uint i = 0; i < count; i++) { var read = m_stream.Read(buffer, 0, size); if (read != size) { throw new EndOfStreamException(); } items[i] = Marshal.PtrToStructure <T>(handle.AddrOfPinnedObject()); crc32.Append(buffer); } handle.Free(); return(items); }
//about 5 times faster than md5. Ballache though public static uint GenerateCRC32FromFile(string fileName) { #if UNITY_EDITOR Stopwatch x = new Stopwatch(); x.Start(); #endif uint crc = 0; FileStream fs = null; try { fs = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.Read); int i = 0; int block = 4096; byte[] buffer = new byte[block]; int l = (int)fs.Length; while (i < l) { fs.Read(buffer, 0, block); crc = Crc32Algorithm.Append(crc, buffer); i += block; } fs.Close(); } catch (Exception e) { Debug.LogError("AssetBundleUtils | GenerateCRC32FromFile failed to generate CRC: " + fileName + ". Exception: " + e.ToString()); } finally { if (fs != null) { fs.Close(); } } #if UNITY_EDITOR x.Stop(); Debug.Log("CRC32 " + fileName + ":" + x.ElapsedMilliseconds + "ms <> " + crc.ToString()); #endif return(crc); }
public static byte[] CalculateForkHash(ISpecProvider specProvider, long headNumber, Keccak genesisHash) { uint crc = 0; long[] transitionBlocks = specProvider.TransitionBlocks; byte[] blockNumberBytes = new byte[8]; crc = Crc32Algorithm.Append(crc, genesisHash.Bytes); for (int i = 0; i < transitionBlocks.Length; i++) { if (transitionBlocks[i] > headNumber) { break; } BinaryPrimitives.WriteUInt64BigEndian(blockNumberBytes, (ulong)transitionBlocks[i]); crc = Crc32Algorithm.Append(crc, blockNumberBytes); } byte[] forkHash = new byte[4]; BinaryPrimitives.TryWriteUInt32BigEndian(forkHash, crc); return(forkHash); }
private T[] ReadStructs <T>(uint count, Crc32Algorithm crc32 = null) where T : struct { var size = Marshal.SizeOf <T>(); var items = new T[count]; m_temp = new byte[size]; for (uint i = 0; i < count; i++) { m_stream.Read(m_temp, 0, size); var handle = GCHandle.Alloc(m_temp, GCHandleType.Pinned); items[i] = Marshal.PtrToStructure <T>(handle.AddrOfPinnedObject()); if (crc32 != null) { crc32.Append(m_temp); } handle.Free(); } return(items); }
/// <summary> /// Calculate a CRC32 value for this file header instance. /// </summary> /// <returns>The CRC32 value.</returns> public uint CalculateCRC32() { var hash = new Crc32Algorithm(false); hash.Append(0x57325243); hash.Append(version); hash.Append(flags); hash.Append(timeStamp); hash.Append(buildVersion); hash.Append(objectSize); hash.Append(fileSize); hash.Append(0xDEADBEEF); hash.Append(numChunks); foreach (var h in tableHeaders) { hash.Append(h.offset); hash.Append(h.size); hash.Append(h.crc32); } return(hash.HashUInt32); }
public void Import(string filename) { if (!File.Exists(filename)) { throw new FileNotFoundException(); } using (FileStream file = File.OpenRead(filename)) { uint hCRC32 = 0, fCRC32 = 0; byte[] CRC32_check; // header byte[] word_magic = FileIO.ReadChunk(file, file_word_magic.Length, ref hCRC32, ref fCRC32); if (!Win32.ByteArrayCompare(word_magic, file_word_magic)) { throw new InvalidDataException(); } byte ver_write = FileIO.ReadChunk(file, 0x1, ref hCRC32, ref fCRC32)[0]; byte ver_read = FileIO.ReadChunk(file, 0x1, ref hCRC32, ref fCRC32)[0]; byte endianness = FileIO.ReadChunk(file, 0x1, ref hCRC32, ref fCRC32)[0]; uint off_meta = BitConverter.ToUInt32(FileIO.ReadChunk(file, 0x4, ref hCRC32, ref fCRC32), 0); uint off_def = BitConverter.ToUInt32(FileIO.ReadChunk(file, 0x4, ref hCRC32, ref fCRC32), 0); uint off_body = BitConverter.ToUInt32(FileIO.ReadChunk(file, 0x4, ref hCRC32, ref fCRC32), 0); uint len_data = BitConverter.ToUInt32(FileIO.ReadChunk(file, 0x4, ref hCRC32, ref fCRC32), 0); CRC32_check = FileIO.ReadChunk(file, 0x4, ref fCRC32); if (Crc32Algorithm.Append(hCRC32, CRC32_check) != 0x2144DF1C) { throw new InvalidDataException(); } // meta byte[] in_meta_player = FileIO.ReadChunk(file, 0x20, ref fCRC32); byte in_meta_track = FileIO.ReadChunk(file, 0x1, ref fCRC32)[0]; byte in_meta_vehicle = FileIO.ReadChunk(file, 0x1, ref fCRC32)[0]; byte[] in_meta_upgrade_level = FileIO.ReadChunk(file, 0x7, ref fCRC32); byte[] in_meta_upgrade_health = FileIO.ReadChunk(file, 0x7, ref fCRC32); // def List <DataCollection.DataBlock.Path> in_path = new List <DataCollection.DataBlock.Path>(); List <uint> in_offset = new List <uint>(); List <uint> in_length = new List <uint>(); while (file.Position < off_body) { in_path.Add((DataCollection.DataBlock.Path)FileIO.ReadChunk(file, 0x1, ref fCRC32)[0]); in_offset.Add(BitConverter.ToUInt32(FileIO.ReadChunk(file, 0x4, ref fCRC32), 0)); in_length.Add(BitConverter.ToUInt32(FileIO.ReadChunk(file, 0x4, ref fCRC32), 0)); } // body List <DataCollection> in_frame = new List <DataCollection>(); while (file.Position < off_meta + len_data) { DataCollection frame = new DataCollection(); for (int i = 0; i < in_path.Count; i++) { byte[] in_data = FileIO.ReadChunk(file, (int)in_length[i], ref fCRC32); DataCollection.DataBlock block = new DataCollection.DataBlock(in_data, in_path[i], in_offset[i]); frame.data.Add(block); } in_frame.Add(frame); } // footer CRC32_check = FileIO.ReadChunk(file, 0x4); if (Crc32Algorithm.Append(fCRC32, CRC32_check) != 0x2144DF1C) { throw new InvalidDataException(); } byte[] word_eof = FileIO.ReadChunk(file, file_word_eof.Length); if (!Win32.ByteArrayCompare(word_eof, file_word_eof)) { throw new InvalidDataException(); } // output meta_track = in_meta_track; meta_vehicle = in_meta_vehicle; meta_player = in_meta_player; meta_upgrade_level = in_meta_upgrade_level; meta_upgrade_health = in_meta_upgrade_health; data.Clear(); foreach (DataCollection frame in in_frame) { data.Add(frame); } initialized = true; } }
private uint CalculateHeaderCRC32() { var hash = new Crc32Algorithm(false); hash.Append(BitConverter.GetBytes(MAGIC)); hash.Append(BitConverter.GetBytes(m_fileheader.version)); hash.Append(BitConverter.GetBytes(m_fileheader.flags)); hash.Append(BitConverter.GetBytes(m_fileheader.timeStamp)); hash.Append(BitConverter.GetBytes(m_fileheader.buildVersion)); hash.Append(BitConverter.GetBytes(m_fileheader.fileSize)); hash.Append(BitConverter.GetBytes(m_fileheader.bufferSize)); hash.Append(BitConverter.GetBytes(DEADBEEF)); hash.Append(BitConverter.GetBytes(m_fileheader.numChunks)); foreach (var h in m_tableheaders) { hash.Append(BitConverter.GetBytes(h.offset)); hash.Append(BitConverter.GetBytes(h.itemCount)); hash.Append(BitConverter.GetBytes(h.crc32)); } return(hash.HashUInt32); }
public void Write(BinaryWriter file) { m_stream = file.BaseStream; // update data #region Update Data m_fileheader.timeStamp = CDateTime.Now.ToUInt64(); //this will change any vanilla assets simply by opening and saving in wkit m_fileheader.numChunks = (uint)chunks.Count; // add new names // TODO foreach (var c in chunks) { c.SetExportType((ushort)GetStringIndex(c.Type, true)); } // Update strings uint stringbuffer_offset = 160; // always 160 m_tableheaders[0].offset = stringbuffer_offset; m_strings = GetNewStrings(); UpdateDictionary(); // Update Offsets var inverseDictionary = m_dictionary.ToDictionary(x => x.Value, x => x.Key); for (var i = 0; i < names.Count; i++) { var newoffset = inverseDictionary[names[i].Str]; if (names[i].Name.value != newoffset) { names[i].SetOffset(newoffset); } } for (var i = 0; i < imports.Count; i++) { var newoffset = inverseDictionary[imports[i].DepotPathStr]; if (newoffset != imports[i].Import.depotPath) { imports[i].SetOffset(newoffset); } } for (var i = 0; i < embedded.Count; i++) { var newoffset = inverseDictionary[embedded[i].Handle]; if (newoffset != embedded[i].Embedded.path) { embedded[i].SetOffset(newoffset); } } #endregion headerOffset = 0; using (var ms = new MemoryStream()) using (var bw = new BinaryWriter(ms)) { // first write the file to memory // this also sets m_fileheader.fileSize and m_fileheader.bufferSize, offsets WriteBuffers(bw); // Write headers once to allocate the space for it WriteHeader(file); headerOffset = (uint)file.BaseStream.Position; // Write buffers ms.Seek(0, SeekOrigin.Begin); ms.WriteTo(file.BaseStream); } #region Update Offsets for (var i = 0; i < chunks.Count; i++) { var newoffset = chunks[i].Export.dataOffset + headerOffset; chunks[i].SetOffset(newoffset); } for (var i = 0; i < embedded.Count; i++) { var newoffset = embedded[i].Embedded.dataOffset + headerOffset; embedded[i].SetOffset(newoffset); } m_fileheader.fileSize += headerOffset; m_fileheader.bufferSize += headerOffset; #endregion for (int i = 0; i < chunks.Count; i++) { FixExportCRC32(chunks[i].Export); } for (int i = 0; i < buffers.Count; i++) { FixBufferCRC32(buffers[i].Buffer); } // Write headers again with fixed offsets //m_fileheader.crc32 = CalculateHeaderCRC32(); WriteHeader(file); m_fileheader.crc32 = CalculateHeaderCRC32(); WriteFileHeader(file); m_stream = null; // LOCAL METHODS void UpdateDictionary() { var size = m_strings.Length; m_tableheaders[0].size = (uint)size; m_tableheaders[0].crc32 = Crc32Algorithm.Compute(m_strings); m_dictionary = new Dictionary <uint, string>(); StringBuilder sb = new StringBuilder(); uint offset = 0; for (uint i = 0; i < size; i++) { var b = m_strings[i]; if (b == 0) { m_dictionary.Add(offset, sb.ToString()); sb.Clear(); offset = i + 1; } else { sb.Append((char)b); } } } byte[] GetNewStrings() { var newnames = new List <string>(); var newstrings = new List <byte>(); foreach (CR2WNameWrapper name in names) { if (!newnames.Contains(name.Str)) { newnames.Add(name.Str); } } foreach (CR2WImportWrapper import in imports) { if (!newnames.Contains(import.DepotPathStr)) { newnames.Add(import.DepotPathStr); } } foreach (CR2WEmbeddedWrapper emb in embedded) { if (!newnames.Contains(emb.Handle)) { newnames.Add(emb.Handle); } } foreach (var str in newnames) { if (str != null) { var bytes = Encoding.Default.GetBytes(str); foreach (var b in bytes) { newstrings.Add(b); } } newstrings.Add((byte)0); } return(newstrings.ToArray()); } void FixExportCRC32(CR2WExport export) //FIXME do I wann keep the ref? { m_stream.Seek(export.dataOffset, SeekOrigin.Begin); var m_temp = new byte[export.dataSize]; m_stream.Read(m_temp, 0, m_temp.Length); export.crc32 = Crc32Algorithm.Compute(m_temp); } void FixBufferCRC32(CR2WBuffer buffer) //FIXME do I wann keep the ref? { //This might throw errors, the way it should be checked for is by reading //the object tree to find the deferred data buffers that will point to a buffer. //The flag of the parent object indicates where to read the data from. //For now this is a crude workaround. if (m_hasInternalBuffer) { m_stream.Seek(buffer.offset, SeekOrigin.Begin); var m_temp = new byte[buffer.diskSize]; m_stream.Read(m_temp, 0, m_temp.Length); buffer.crc32 = Crc32Algorithm.Compute(m_temp); } else { /*var path = String.Format("{0}.{1}.buffer", m_filePath, buffer.index); * if (!File.Exists(path)) * { * return; * } * m_temp = File.ReadAllBytes(path); * buffer.crc32 = Crc32Algorithm.Compute(m_temp);*/ } } uint CalculateHeaderCRC32() { var hash = new Crc32Algorithm(false); hash.Append(BitConverter.GetBytes(MAGIC)); hash.Append(BitConverter.GetBytes(m_fileheader.version)); hash.Append(BitConverter.GetBytes(m_fileheader.flags)); hash.Append(BitConverter.GetBytes(m_fileheader.timeStamp)); hash.Append(BitConverter.GetBytes(m_fileheader.buildVersion)); hash.Append(BitConverter.GetBytes(m_fileheader.fileSize)); hash.Append(BitConverter.GetBytes(m_fileheader.bufferSize)); hash.Append(BitConverter.GetBytes(DEADBEEF)); hash.Append(BitConverter.GetBytes(m_fileheader.numChunks)); foreach (var h in m_tableheaders) { hash.Append(BitConverter.GetBytes(h.offset)); hash.Append(BitConverter.GetBytes(h.size)); hash.Append(BitConverter.GetBytes(h.crc32)); } return(hash.HashUInt32); } }
public void TestClientMessageReceiveEvent() { string expectedIp = "127.0.0.1"; int expectedPort = 44402; SocketServer target = new SocketServer(); target.Port = expectedPort; target.BindingIPs = new string[] { expectedIp }; int clientStartCount = 0; int clientStopCount = 0; int messagesReceived = 0; string actualMeta = null; byte[] actualBody = null; long clientId = -1; using (AutoResetEvent isStarted = new AutoResetEvent(false)) using (AutoResetEvent messageReceived = new AutoResetEvent(false)) { target.RegisterClientEvents(c => { clientStartCount++; clientId = c.Id; isStarted.Set(); } , c => clientStopCount++ , (c, m, bs, b) => { messagesReceived++; actualMeta = m; using (var stream = new MemoryStream()) { b.CopyTo(stream); actualBody = stream.ToArray(); } messageReceived.Set(); }); target.Start(); TcpClient client = new TcpClient(); client.Connect(expectedIp, expectedPort); isStarted.WaitOne(Debugger.IsAttached ? Timeout.Infinite : 1500); string meta = "This is a test"; byte[] metaBytes = Encoding.UTF8.GetBytes(meta); byte[] body = Encoding.UTF8.GetBytes("This is the body"); var t = Crc32Algorithm.Append(0, metaBytes); var crc = Crc32Algorithm.Append(t, body); MessageHeader header = new MessageHeader() { BodySize = (ulong)body.LongLength, MetaSize = (ushort)metaBytes.Length, }; byte[] headerBytes = header.SerializeHeader(); using (var stream = client.GetStream()) { stream.Write(headerBytes, 0, headerBytes.Length); stream.Write(metaBytes, 0, metaBytes.Length); stream.Write(body, 0, body.Length); } messageReceived.WaitOne(Debugger.IsAttached ? Timeout.Infinite : 1500); target.Stop(); Assert.AreEqual(1, clientStartCount); Assert.AreEqual(1, messagesReceived); Assert.AreEqual(meta, actualMeta); Assert.AreEqual(body, actualBody); Assert.AreEqual(1, clientStopCount); } }
public static async ValueTask <ReadResult <TEntity> > ReadAsync <TEntity>(Stream utf8Json, DdbClassInfo classInfo, DynamoDbContextMetadata metadata, bool returnCrc, int defaultBufferSize = DefaultBufferSize, CancellationToken cancellationToken = default) where TEntity : class { var readerState = new JsonReaderState(); var readStack = new DdbEntityReadStack(DdbEntityReadStack.DefaultStackLength, metadata); try { readStack.GetCurrent().ClassInfo ??= classInfo; var buffer = ArrayPool <byte> .Shared.Rent(defaultBufferSize); var clearMax = 0; try { var bytesInBuffer = 0; uint crc = 0; var isFirstBlock = true; while (true) { var isFinalBlock = false; while (true) { var bytesRead = await utf8Json.ReadAsync(new Memory <byte>(buffer, bytesInBuffer, buffer.Length - bytesInBuffer), cancellationToken).ConfigureAwait(false); if (bytesRead == 0) { isFinalBlock = true; break; } if (returnCrc) { crc = Crc32Algorithm.Append(crc, buffer, bytesInBuffer, bytesRead); } bytesInBuffer += bytesRead; if (bytesInBuffer == buffer.Length) { break; } } if (bytesInBuffer > clearMax) { clearMax = bytesInBuffer; } readStack.UseFastPath = isFirstBlock && isFinalBlock; readStack.Buffer = buffer; readStack.BufferStart = 0; readStack.BufferLength = bytesInBuffer; ReadCore <TEntity>(ref readerState, isFinalBlock, new ReadOnlySpan <byte>(buffer, 0, bytesInBuffer), ref readStack); var bytesConsumed = (int)readStack.BytesConsumed; bytesInBuffer -= bytesConsumed; if (isFinalBlock) { break; } // Check if we need to shift or expand the buffer because there wasn't enough data to complete deserialization. if ((uint)bytesInBuffer > ((uint)buffer.Length / 2)) { // We have less than half the buffer available, double the buffer size. byte[] dest = ArrayPool <byte> .Shared.Rent((buffer.Length < (int.MaxValue / 2))?buffer.Length * 2 : int.MaxValue); // Copy the unprocessed data to the new buffer while shifting the processed bytes. Buffer.BlockCopy(buffer, bytesConsumed, dest, 0, bytesInBuffer); new Span <byte>(buffer, 0, clearMax).Clear(); ArrayPool <byte> .Shared.Return(buffer); clearMax = bytesInBuffer; buffer = dest; } else if (bytesInBuffer != 0) { // Shift the processed bytes to the beginning of buffer to make more room. Buffer.BlockCopy(buffer, bytesConsumed, buffer, 0, bytesInBuffer); } isFirstBlock = false; } return(new ReadResult <TEntity>((TEntity)readStack.GetCurrent().ReturnValue !, crc)); } finally { new Span <byte>(buffer, 0, clearMax).Clear(); ArrayPool <byte> .Shared.Return(buffer); } } finally { readStack.Dispose(); } }
public static async ValueTask <ReadResult <Document> > ReadAsync(Stream utf8Json, IParsingOptions options, bool returnCrc, CancellationToken cancellationToken = default) { var readerState = new JsonReaderState(); var readStack = new DdbReadStack(DdbReadStack.DefaultStackLength, options.Metadata); try { options.StartParsing(ref readStack); var buffer = ArrayPool <byte> .Shared.Rent(DefaultBufferSize); var clearMax = 0; try { var bytesInBuffer = 0; uint crc = 0; while (true) { var isFinalBlock = false; while (true) { var bytesRead = await utf8Json.ReadAsync(new Memory <byte>(buffer, bytesInBuffer, buffer.Length - bytesInBuffer), cancellationToken).ConfigureAwait(false); if (bytesRead == 0) { isFinalBlock = true; break; } if (returnCrc) { crc = Crc32Algorithm.Append(crc, buffer, bytesInBuffer, bytesRead); } bytesInBuffer += bytesRead; if (bytesInBuffer == buffer.Length) { break; } } if (bytesInBuffer > clearMax) { clearMax = bytesInBuffer; } ReadCore(ref readerState, isFinalBlock, new ReadOnlySpan <byte>(buffer, 0, bytesInBuffer), ref readStack, options); var bytesConsumed = (int)readStack.BytesConsumed; bytesInBuffer -= bytesConsumed; if (isFinalBlock) { break; } // Check if we need to shift or expand the buffer because there wasn't enough data to complete deserialization. if ((uint)bytesInBuffer > ((uint)buffer.Length / 2)) { // We have less than half the buffer available, double the buffer size. byte[] dest = ArrayPool <byte> .Shared.Rent((buffer.Length < (int.MaxValue / 2))?buffer.Length * 2 : int.MaxValue); // Copy the unprocessed data to the new buffer while shifting the processed bytes. Buffer.BlockCopy(buffer, bytesConsumed, dest, 0, bytesInBuffer); new Span <byte>(buffer, 0, clearMax).Clear(); ArrayPool <byte> .Shared.Return(buffer); clearMax = bytesInBuffer; buffer = dest; } else if (bytesInBuffer != 0) { // Shift the processed bytes to the beginning of buffer to make more room. Buffer.BlockCopy(buffer, bytesConsumed, buffer, 0, bytesInBuffer); } } return(new ReadResult <Document>(readStack.GetCurrent().CreateDocumentFromBuffer(), crc)); } finally { new Span <byte>(buffer, 0, clearMax).Clear(); ArrayPool <byte> .Shared.Return(buffer); } } finally { readStack.Dispose(); } }
public static void WriteChunk(FileStream file, byte[] data, ref uint crc32a, ref uint crc32b) { file.Write(data, 0, data.Length); crc32a = Crc32Algorithm.Append(crc32a, data); crc32b = Crc32Algorithm.Append(crc32b, data); }
uint CalculateHeaderCRC32(SFileHeader fileheader) { var hash = new Crc32Algorithm(false); hash.Append(BitConverter.GetBytes(0x57325243)); hash.Append(BitConverter.GetBytes(fileheader.fileversion)); hash.Append(BitConverter.GetBytes(fileheader.flags)); hash.Append(BitConverter.GetBytes(fileheader.timestamp)); hash.Append(BitConverter.GetBytes(fileheader.buildversion)); hash.Append(BitConverter.GetBytes(fileheader.disksize)); hash.Append(BitConverter.GetBytes(fileheader.memsize)); hash.Append(BitConverter.GetBytes(0xDEADBEEF)); hash.Append(BitConverter.GetBytes(fileheader.numchunks)); foreach (var h in tableheaders) { hash.Append(BitConverter.GetBytes(h.Offset)); hash.Append(BitConverter.GetBytes(h.Size)); hash.Append(BitConverter.GetBytes(h.CRC32)); } return(hash.HashUInt32); }
public void Import(string filename) { //need to reimplement endianness converting, but low priority cuz there is probably not going to be variance from exporter across platforms //string filename = @"K:\Projects\swe1r\overlay\SWE1R Overlay\Format\Racer.State.WriteTest.e1rs"; FileStream file = File.OpenRead(filename); uint headerCRC32 = 0; uint dataCRC32 = 0; // READ HEADER // read data byte[] inMagicWord = FileIO.ReadChunk(file, fileMagicWord.Length, ref headerCRC32); if (!Win32.ByteArrayCompare(inMagicWord, fileMagicWord)) { throw new Exception("Read Savestate: Invalid filetype."); } byte inVerSrc = FileIO.ReadChunk(file, 0x1, ref headerCRC32)[0]; //ideal/generated-from version byte inVerRead = FileIO.ReadChunk(file, 0x1, ref headerCRC32)[0]; //readable version bool inBigEndian = Convert.ToBoolean(FileIO.ReadChunk(file, 0x1, ref headerCRC32)[0]); byte[] inDataLen = FileIO.ReadChunk(file, 0x4, ref headerCRC32); byte[] inDataOff = FileIO.ReadChunk(file, 0x2, ref headerCRC32); byte[] inHeaderCRC32 = FileIO.ReadChunk(file, 0x4); // convert to big endian if needed //if (inBigEndian) //{ // inDataLen = inDataLen.Reverse().ToArray(); // inDataOff = inDataOff.Reverse().ToArray(); // inHeaderCRC32 = inHeaderCRC32.Reverse().ToArray(); //} // check crc32 if (Crc32Algorithm.Append(headerCRC32, inHeaderCRC32, 0, 0x4) != 0x2144DF1C) { throw new Exception("Read Savestate: Header invalid."); } // check eof file.Seek(BitConverter.ToUInt16(inDataOff, 0) + BitConverter.ToUInt32(inDataLen, 0), SeekOrigin.Begin); byte[] inEOFCheck = FileIO.ReadChunk(file, fileEOFWord.Length); if (!Win32.ByteArrayCompare(inEOFCheck, fileEOFWord)) { throw new Exception("Read Savestate: File length invalid."); } // READ DATA file.Seek(BitConverter.ToUInt16(inDataOff, 0), SeekOrigin.Begin); List <DataBlock> inDataBlocks = new List <DataBlock>(); while (file.Position < BitConverter.ToUInt16(inDataOff, 0) + BitConverter.ToUInt32(inDataLen, 0) - 4) { uint blockCRC32 = 0; DataBlock.Path p = (DataBlock.Path)FileIO.ReadChunk(file, 0x1, ref blockCRC32, ref dataCRC32)[0]; uint o = BitConverter.ToUInt32(FileIO.ReadChunk(file, 0x4, ref blockCRC32, ref dataCRC32), 0); int l = (int)BitConverter.ToUInt32(FileIO.ReadChunk(file, 0x4, ref blockCRC32, ref dataCRC32), 0); //typecasted to preserve original encoding from uint //byte[] d = FileIO.ReadChunk(file, BitConverter.ToInt32((inBigEndian ? inDataBlocks.Last().length.Reverse().ToArray() : inDataBlocks.Last().length), 0), ref blockCRC32, ref dataCRC32); byte[] d = FileIO.ReadChunk(file, l, ref blockCRC32, ref dataCRC32); inDataBlocks.Add(new DataBlock(d, p, o, Core.DataType.None)); byte[] crc32 = FileIO.ReadChunk(file, 0x4, ref dataCRC32); //if (inBigEndian) // inDataBlocks.Last().ReverseArrays(); if (Crc32Algorithm.Append(blockCRC32, crc32) != 0x2144DF1C) { throw new Exception("Read Savestate: Data block " + inDataBlocks.Count + " invalid."); } } // check entire data set is valid byte[] inDataCRC32 = FileIO.ReadChunk(file, 0x4); //if (inBigEndian) // inDataCRC32 = inDataCRC32.Reverse().ToArray(); if (Crc32Algorithm.Append(dataCRC32, inDataCRC32, 0, 0x4) != 0x2144DF1C) { throw new Exception("Read Savestate: Data invalid."); } // check end of data is actually end of file inEOFCheck = FileIO.ReadChunk(file, fileEOFWord.Length); if (!Win32.ByteArrayCompare(inEOFCheck, fileEOFWord)) { throw new Exception("Read Savestate: File length invalid."); } //Savestate output = new Savestate(inDataBlocks.ToArray(), inDataPod, inDataTrack); //need to implement a way to update data blocks with data not sourced from the game directly in order to actually use the read data file.Dispose(); file.Close(); //return output; }