public Crc32LittleEndian(Crc32Algorithm algorithm) : base(Tables[(int)algorithm]) { }
public static async ValueTask <ReadResult <TEntity> > ReadAsync <TEntity>(Stream utf8Json, DdbClassInfo classInfo, DynamoDbContextMetadata metadata, bool returnCrc, int defaultBufferSize = DefaultBufferSize, CancellationToken cancellationToken = default) where TEntity : class { var readerState = new JsonReaderState(); var readStack = new DdbEntityReadStack(DdbEntityReadStack.DefaultStackLength, metadata); try { readStack.GetCurrent().ClassInfo ??= classInfo; var buffer = ArrayPool <byte> .Shared.Rent(defaultBufferSize); var clearMax = 0; try { var bytesInBuffer = 0; uint crc = 0; var isFirstBlock = true; while (true) { var isFinalBlock = false; while (true) { var bytesRead = await utf8Json.ReadAsync(new Memory <byte>(buffer, bytesInBuffer, buffer.Length - bytesInBuffer), cancellationToken).ConfigureAwait(false); if (bytesRead == 0) { isFinalBlock = true; break; } if (returnCrc) { crc = Crc32Algorithm.Append(crc, buffer, bytesInBuffer, bytesRead); } bytesInBuffer += bytesRead; if (bytesInBuffer == buffer.Length) { break; } } if (bytesInBuffer > clearMax) { clearMax = bytesInBuffer; } readStack.UseFastPath = isFirstBlock && isFinalBlock; readStack.Buffer = buffer; readStack.BufferStart = 0; readStack.BufferLength = bytesInBuffer; ReadCore <TEntity>(ref readerState, isFinalBlock, new ReadOnlySpan <byte>(buffer, 0, bytesInBuffer), ref readStack); var bytesConsumed = (int)readStack.BytesConsumed; bytesInBuffer -= bytesConsumed; if (isFinalBlock) { break; } // Check if we need to shift or expand the buffer because there wasn't enough data to complete deserialization. if ((uint)bytesInBuffer > ((uint)buffer.Length / 2)) { // We have less than half the buffer available, double the buffer size. byte[] dest = ArrayPool <byte> .Shared.Rent((buffer.Length < (int.MaxValue / 2))?buffer.Length * 2 : int.MaxValue); // Copy the unprocessed data to the new buffer while shifting the processed bytes. Buffer.BlockCopy(buffer, bytesConsumed, dest, 0, bytesInBuffer); new Span <byte>(buffer, 0, clearMax).Clear(); ArrayPool <byte> .Shared.Return(buffer); clearMax = bytesInBuffer; buffer = dest; } else if (bytesInBuffer != 0) { // Shift the processed bytes to the beginning of buffer to make more room. Buffer.BlockCopy(buffer, bytesConsumed, buffer, 0, bytesInBuffer); } isFirstBlock = false; } return(new ReadResult <TEntity>((TEntity)readStack.GetCurrent().ReturnValue !, crc)); } finally { new Span <byte>(buffer, 0, clearMax).Clear(); ArrayPool <byte> .Shared.Return(buffer); } } finally { readStack.Dispose(); } }
public Crc32BigEndian(Crc32Algorithm algorithm) : base(Tables[(int)algorithm]) { }
public static int GetVBucketId(string atrId) { var bytes = Encoding.UTF8.GetBytes(atrId); return((int)Crc32Algorithm.Compute(bytes) & VBucketPartitionCount - 1); }
public static uint CrcOfBytes(byte[] data) { return(Crc32Algorithm.Compute(data)); }
private static void ExtractFile(NameSpaceFile nsf, DirectoryInfo currentOutputDirectory) { var path = Path.Combine(currentOutputDirectory.FullName, nsf.Name); bool useJson = !exportBson; var obj = nsf.Object as WzProperty; if (obj == null) { Console.WriteLine("Unable to export {0}, as its not a WzProperty", nsf.NodePath); return; } byte[] data; using (var exp = new Exporter(exportOptions, obj)) { if (useJson) { data = Encoding.ASCII.GetBytes(exp.ToJson()); } else { data = exp.ToBson(); } if (exportOptions.HasFlag(Options.ExternalImageExport)) { // Generate image path of the first 4 characters of the name (if possible) var imgDir = globalOutputDirectory.CreateSubdirectory("images"); var name = obj.Name.Replace(".img", ""); if (name.Length > 0) { imgDir = imgDir.CreateSubdirectory("" + name[0]); } if (name.Length > 1) { imgDir = imgDir.CreateSubdirectory("" + name[1]); } if (name.Length > 2) { imgDir = imgDir.CreateSubdirectory("" + name[2]); } if (name.Length > 3) { imgDir = imgDir.CreateSubdirectory("" + name[3]); } ExtractImages(exp, imgDir); } } nsf.Checksum = (int)Crc32Algorithm.Compute(data); // This will replace .img with .hash.(json|bson) var outputFile = Path.ChangeExtension(path, $".{nsf.Checksum:X8}." + (useJson ? "json" : "bson")); if (File.Exists(outputFile)) { return; } Console.WriteLine("Writing {0}", outputFile); File.WriteAllBytes( outputFile, data ); }
public static Vector2[] GetPositions(float[] radiuses) { var radByte = radiuses.Select((r) => (byte)(r / 10f * byte.MaxValue)).ToArray(); var hash = Crc32Algorithm.Compute(radByte); if (_positionsChache.TryGetValue(hash, out var chachedResult)) { return(chachedResult); } var poses = new Vector2[radiuses.Length]; for (int i = 0; i < poses.Length; i++) { poses[i] = new Vector2(-1000, -1000); } poses[0] = Vector2.Empty; if (_posiblePoints == null) { var pointList = new List <Vector2>(); for (int i = 1; i <= 64; i++) { for (int j = 1; j <= 64; j++) { pointList.Add(new Vector2(i / 8f, j / 8f)); } } _posiblePoints = pointList.OrderBy((p) => p.SqrLength()).ToArray(); } bool CheckCollision(int index) { for (int i = 0; i < poses.Length; i++) { if (i == index) { continue; } float rSqr = radiuses[i] + radiuses[index]; rSqr *= rSqr; if ((poses[i] - poses[index]).SqrLength() < rSqr) { return(true); } } return(false); } for (int i = 1; i < poses.Length; i++) { foreach (var point in _posiblePoints) { poses[i] = point.Clone(); if (!CheckCollision(i)) { break; } } } var avgPoint = Vector2.Empty; for (int i = 0; i < poses.Length; i++) { float ox = poses[i].X; float oy = poses[i].Y; float x = ox * 0.7071074f - oy * 0.7071074f; float y = ox * 0.7071074f + oy * 0.7071074f; poses[i] = new Vector2(x, -y); avgPoint += poses[i]; } avgPoint /= poses.Length; for (int i = 0; i < poses.Length; i++) { poses[i] = poses[i] - avgPoint; } _positionsChache.Add(hash, poses); return(poses); }
public async void GetCustomValues(string filename) { if (!File.Exists(filename)) { throw new Exception("BAR file does not exist!"); } using var file = File.OpenRead(filename); var reader = new BinaryReader(file); reader.BaseStream.Seek(entry.Offset, SeekOrigin.Begin); byte[] data = reader.ReadBytes(entry.FileSize2); await Task.Run(() => { entry.CRC32 = Crc32Algorithm.Compute(data); }); reader.BaseStream.Seek(entry.Offset, SeekOrigin.Begin); HeaderText = new string(reader.ReadChars(4)); reader.BaseStream.Seek(entry.Offset, SeekOrigin.Begin); Header = reader.ReadInt32(); if (entry.Extension == ".DDT") { if (Alz4Utils.IsAlz4File(data)) { data = await Alz4Utils.ExtractAlz4BytesAsync(data); } else { if (L33TZipUtils.IsL33TZipFile(data)) { data = await L33TZipUtils.ExtractL33TZippedBytesAsync(data); } } PreviewDdt = new DdtFile(data, true); var flagList = new List <string>(); if (PreviewDdt.Usage.HasFlag(DdtFileTypeUsage.AlphaTest)) { flagList.Add(DdtFileTypeUsage.AlphaTest.ToString()); } if (PreviewDdt.Usage.HasFlag(DdtFileTypeUsage.LowDetail)) { flagList.Add(DdtFileTypeUsage.LowDetail.ToString()); } if (PreviewDdt.Usage.HasFlag(DdtFileTypeUsage.Bump)) { flagList.Add(DdtFileTypeUsage.Bump.ToString()); } if (PreviewDdt.Usage.HasFlag(DdtFileTypeUsage.Cube)) { flagList.Add(DdtFileTypeUsage.Cube.ToString()); } if (flagList.Count > 0) { DdtUsage = ((byte)PreviewDdt.Usage).ToString() + " (" + string.Join('+', flagList) + ")"; } else { DdtUsage = ((byte)PreviewDdt.Usage).ToString(); } DdtAlpha = ((byte)PreviewDdt.Alpha).ToString() + " (" + PreviewDdt.Alpha.ToString() + ")"; DdtFormat = ((byte)PreviewDdt.Format).ToString() + " (" + PreviewDdt.Format.ToString() + ")"; gpDDT.Visibility = Visibility.Visible; } }
static void Main(string[] args) { var path = Environment.GetEnvironmentVariable("H5_GANGSTER_RESOURCE"); if (string.IsNullOrEmpty(path)) { Console.WriteLine(@"请设置环境变量H5_GANGSTER_RESOURCE至你的svn\H5-Gangster\Egret\resource目录"); Console.ReadKey(); return; } if (!Directory.Exists(path)) { Console.WriteLine(@"资源路径不存在:{0}", path); Console.ReadKey(); return; } switch (GetInput("请选择要执行的操作 1.生成初始版本 2.生成patch版本", new int[2] { 1, 2 })) { case 1: { using (TextWriter writer = new StreamWriter(@".\init.txt")) { var roots = Directory.GetDirectories(path); foreach (var root in roots) { var files = Directory.GetFiles(root, "*.*", SearchOption.AllDirectories); foreach (var file in files) { FileStream stream = new FileStream(file, FileMode.Open); byte[] buffer = new byte[stream.Length]; stream.Read(buffer, 0, (int)stream.Length); stream.Close(); uint crc32 = Crc32Algorithm.Compute(buffer); Console.WriteLine("处理文件:{0}-{1}", Path.GetFileName(file), crc32); var fileRelatePath = file.Substring(path.Length + 1); fileRelatePath = fileRelatePath.Replace('\\', '/'); writer.WriteLine("{0}:{1}", fileRelatePath, crc32); } } } } break; case 2: if (!File.Exists(@".\init.txt")) { Console.WriteLine(@"初始文件不存在请先执行1操作"); Console.ReadKey(); return; } Dictionary <string, uint> initFileCrc = new Dictionary <string, uint>(); using (TextReader reader = new StreamReader(@".\init.txt")) { while (true) { var line = reader.ReadLine(); if (string.IsNullOrEmpty(line)) { break; } var parts = line.Split(':'); var fileRelatePath = parts[0]; uint crc32 = uint.Parse(parts[1]); initFileCrc.Add(fileRelatePath, crc32); } } string patchFileName; while (true) { Console.WriteLine("请输入版本号例如:1.0"); patchFileName = Console.ReadLine(); if (patchFileName.IndexOfAny(Path.GetInvalidFileNameChars()) >= 0) { Console.WriteLine("输入的名称非法!请重新输入"); } else { break; } } using (TextWriter patchWriter = new StreamWriter(@"./" + patchFileName + ".txt")) { var roots = Directory.GetDirectories(path); foreach (var root in roots) { var files = Directory.GetFiles(root, "*.*", SearchOption.AllDirectories); foreach (var file in files) { FileStream stream = new FileStream(file, FileMode.Open); byte[] buffer = new byte[stream.Length]; stream.Read(buffer, 0, (int)stream.Length); stream.Close(); uint crc32 = Crc32Algorithm.Compute(buffer); var fileRelatePath = file.Substring(path.Length + 1); fileRelatePath = fileRelatePath.Replace('\\', '/'); if (initFileCrc.ContainsKey(fileRelatePath)) { if (initFileCrc[fileRelatePath] == crc32) { continue; } } Console.WriteLine("新增Patch文件:{0}-{1}", Path.GetFileName(file), crc32); patchWriter.WriteLine("{0}:{1}", fileRelatePath, crc32); } } } Console.WriteLine("生成Patch文件成功"); Console.ReadKey(); break; } }
public static Ird Parse(byte[] content) { if (content == null) { throw new ArgumentNullException(nameof(content)); } if (content.Length < 200) { throw new ArgumentException("Data is too small to be a valid IRD structure", nameof(content)); } if (BitConverter.ToInt32(content, 0) != Ird.Magic) { using (var compressedStream = new MemoryStream(content, false)) using (var gzip = new GZipStream(compressedStream, CompressionMode.Decompress)) using (var decompressedStream = new MemoryStream()) { gzip.CopyTo(decompressedStream); content = decompressedStream.ToArray(); } } if (BitConverter.ToInt32(content, 0) != Ird.Magic) { throw new FormatException("Not a valid IRD file"); } var result = new Ird(); using (var stream = new MemoryStream(content, false)) using (var reader = new BinaryReader(stream, Encoding.UTF8)) { reader.ReadInt32(); // magic result.Version = reader.ReadByte(); result.ProductCode = Encoding.ASCII.GetString(reader.ReadBytes(9)); result.TitleLength = reader.ReadByte(); result.Title = Encoding.UTF8.GetString(reader.ReadBytes(result.TitleLength)); result.UpdateVersion = Encoding.ASCII.GetString(reader.ReadBytes(4)).Trim(); result.GameVersion = Encoding.ASCII.GetString(reader.ReadBytes(5)).Trim(); result.AppVersion = Encoding.ASCII.GetString(reader.ReadBytes(5)).Trim(); if (result.Version == 7) { result.Id = reader.ReadInt32(); } result.HeaderLength = reader.ReadInt32(); result.Header = reader.ReadBytes(result.HeaderLength); result.FooterLength = reader.ReadInt32(); result.Footer = reader.ReadBytes(result.FooterLength); result.RegionCount = reader.ReadByte(); result.RegionMd5Checksums = new List <byte[]>(result.RegionCount); for (var i = 0; i < result.RegionCount; i++) { result.RegionMd5Checksums.Add(reader.ReadBytes(16)); } result.FileCount = reader.ReadInt32(); result.Files = new List <IrdFile>(result.FileCount); for (var i = 0; i < result.FileCount; i++) { var file = new IrdFile(); file.Offset = reader.ReadInt64(); file.Md5Checksum = reader.ReadBytes(16); result.Files.Add(file); } result.Unknown = reader.ReadInt32(); if (result.Version == 9) { result.Pic = reader.ReadBytes(115); } result.Data1 = reader.ReadBytes(16); result.Data2 = reader.ReadBytes(16); if (result.Version < 9) { result.Pic = reader.ReadBytes(115); } result.Uid = reader.ReadInt32(); var dataLength = reader.BaseStream.Position; result.Crc32 = reader.ReadUInt32(); var crc32 = Crc32Algorithm.Compute(content, 0, (int)dataLength); if (result.Crc32 != crc32) { throw new InvalidDataException($"Corrupted IRD data, expected {result.Crc32:x8}, but was {crc32:x8}"); } } return(result); }
public static void Encode(Image image, Stream stream) { var bw = new BeBinaryWriter(stream, Encoding.Default, leaveOpen: true); bw.Write(new byte[] { 0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A }); void WriteChunk(string type, byte[] data) { Debug.Assert(type.Length == 4); bw.Write(data.Length); var td = Encoding.ASCII.GetBytes(type).Concat(data).ToArray(); bw.Write(td); bw.Write(Crc32Algorithm.Compute(td)); } void Chunk(string type, Action <BeBinaryWriter> func) { using (var ms = new MemoryStream()) using (var sbw = new BeBinaryWriter(ms)) { func(sbw); sbw.Flush(); WriteChunk(type, ms.ToArray()); } } Chunk("IHDR", w => { w.Write(image.Size.Width); w.Write(image.Size.Height); w.Write((byte)8); switch (image.ColorMode) { case ColorMode.Greyscale: w.Write((byte)0); break; case ColorMode.Rgb: w.Write((byte)2); break; case ColorMode.Rgba: w.Write((byte)6); break; default: throw new NotImplementedException(); } w.Write((byte)0); // Compression mode w.Write((byte)0); // Filter w.Write((byte)0); // Interlace }); var ps = Image.PixelSize(image.ColorMode); var stride = image.Size.Width * ps; var imem = new byte[image.Size.Height + image.Size.Width * image.Size.Height * ps]; // One byte per scanline for filter (0) for (var y = 0; y < image.Size.Height; ++y) { Array.Copy(image.Data, y * stride, imem, y * stride + y + 1, stride); } using (var ms = new MemoryStream()) { using (var ds = new ZlibStream(ms, CompressionMode.Compress, CompressionLevel.BestSpeed, leaveOpen: true)) { ds.Write(imem, 0, imem.Length); ds.Flush(); } ms.Flush(); WriteChunk("IDAT", ms.ToArray()); } Chunk("IEND", w => { }); bw.Flush(); }
private void WriteCrc(ref byte[] message_bytes) { uint crc_calculated = Crc32Algorithm.Compute(message_bytes); Array.Copy(BitConverter.GetBytes(crc_calculated), 0, message_bytes, 8, 4); }
private Header ProcessHeader(byte[] message_bytes) { Header header = new Header(); int index = 0; header.identifier = "DSUC"; if (message_bytes[0] != header.identifier[0] || message_bytes[1] != header.identifier[1] || message_bytes[2] != header.identifier[2] || message_bytes[3] != header.identifier[3]) { return(null); } index += 4; header.protocol_version = BitConverter.ToUInt16(message_bytes, index); index += 2; if (header.protocol_version > max_protocol_version_supported) { return(null); } header.packet_size = BitConverter.ToUInt16(message_bytes, index); index += 2; if (header.packet_size < 0) { return(null); } header.packet_size += 16; if (header.packet_size > message_bytes.Length) { return(null); } else if (header.packet_size < message_bytes.Length) { byte[] truncated_message = new byte[header.packet_size]; Array.Copy(message_bytes, truncated_message, header.packet_size); message_bytes = truncated_message; } header.crc_value = BitConverter.ToUInt32(message_bytes, index); // Zero out the CRC so we can recalculate to compare message_bytes[index++] = 0; message_bytes[index++] = 0; message_bytes[index++] = 0; message_bytes[index++] = 0; uint crc_calculated = Crc32Algorithm.Compute(message_bytes); if (header.crc_value != crc_calculated) { return(null); } header.client_id = BitConverter.ToUInt32(message_bytes, index); index += 4; header.message_type = BitConverter.ToUInt32(message_bytes, index); index += 4; return(header); }
public ChunkInfo(byte[] chunkBytes, long absoluteOffset, int chunkNumber) { var l = LogManager.GetLogger("ChunkInfo"); l.Trace( $"\r\n-------------------------------------------- NEW CHUNK at 0x{absoluteOffset:X} ------------------------------------------------\r\n"); ChunkBytes = chunkBytes; AbsoluteOffset = absoluteOffset; ChunkNumber = chunkNumber; ErrorRecords = new Dictionary <long, string>(); EventRecords = new List <EventRecord>(); FirstEventRecordNumber = BitConverter.ToInt64(chunkBytes, 0x8); LastEventRecordNumber = BitConverter.ToInt64(chunkBytes, 0x10); FirstEventRecordIdentifier = BitConverter.ToInt64(chunkBytes, 0x18); LastEventRecordIdentifier = BitConverter.ToInt64(chunkBytes, 0x20); if (FirstEventRecordIdentifier == -1) { return; } var tableOffset = BitConverter.ToUInt32(chunkBytes, 0x28); LastRecordOffset = BitConverter.ToUInt32(chunkBytes, 0x2C); FreeSpaceOffset = BitConverter.ToUInt32(chunkBytes, 0x30); //TODO how to calculate this? across what data? all event records? var crcEventRecordsData = BitConverter.ToUInt32(chunkBytes, 0x34); Crc = BitConverter.ToInt32(chunkBytes, 0x7c); var inputArray = new byte[120 + 384 + 4]; Buffer.BlockCopy(chunkBytes, 0, inputArray, 0, 120); Buffer.BlockCopy(chunkBytes, 128, inputArray, 120, 384); Crc32Algorithm.ComputeAndWriteToEnd(inputArray); // last 4 bytes contains CRC CalculatedCrc = BitConverter.ToInt32(inputArray, inputArray.Length - 4); var index = 0; var tableData = new byte[0x100]; Buffer.BlockCopy(chunkBytes, (int)tableOffset, tableData, 0, 0x100); StringTableEntries = new Dictionary <uint, StringTableEntry>(); var stringOffsets = new List <uint>(); var ticksForTimeDelta = 10000000 * EventLog.TimeDiscrepancyThreshold; //10000000 == ticks in a second while (index < tableData.Length) { var stringOffset = BitConverter.ToUInt32(tableData, index); index += 4; if (stringOffset == 0) { continue; } stringOffsets.Add(stringOffset); } foreach (var stringOffset in stringOffsets) { GetStringTableEntry(stringOffset); } l.Trace("String table entries"); foreach (var stringTableEntry in StringTableEntries.Keys.OrderBy(t => t)) { l.Trace(StringTableEntries[stringTableEntry]); } l.Trace(""); var templateTableData = new byte[0x80]; Buffer.BlockCopy(chunkBytes, 0x180, templateTableData, 0, 0x80); var tableTemplateOffsets = new List <uint>(); index = 0; while (index < templateTableData.Length) { var templateOffset = BitConverter.ToUInt32(templateTableData, index); index += 4; if (templateOffset == 0) { continue; } //the actual table definitions live at this Offset + 0x1000 for header, - 10 bytes for some reason. //This is where the 0xc op code will be tableTemplateOffsets.Add(templateOffset); } Templates = new Dictionary <int, Template>(); //to get all the templates and cache them foreach (var tableTemplateOffset in tableTemplateOffsets.OrderBy(t => t)) { var actualOffset = absoluteOffset + tableTemplateOffset - 10; //yea, -10 index = (int)tableTemplateOffset - 10; l.Trace( $"Chunk absoluteOffset: 0x{AbsoluteOffset:X} tableTemplateOffset: 0x{tableTemplateOffset:X} actualOffset: 0x {actualOffset:X} chunkBytes[index]: 0x{chunkBytes[index]:X} LastRecordOffset 0x{LastRecordOffset:X} FreeSpaceOffset 0x{FreeSpaceOffset:X}"); var template = GetTemplate(index); if (template == null) { l.Trace( $"Implausable template at actual offset: 0x{actualOffset} tableTemplateOffset 0x{tableTemplateOffset:X} FreeSpaceOffset: 0x{FreeSpaceOffset} chunk absolute offset: 0x{AbsoluteOffset:X}"); continue; } if (Templates.ContainsKey(template.TemplateOffset) == false) { Templates.Add(template.TemplateOffset - 0x18, template); } if (template.NextTemplateOffset <= 0) { continue; } var nextTemplateId = template.NextTemplateOffset; while (nextTemplateId > 0) { var bbb = GetTemplate(nextTemplateId - 10); nextTemplateId = bbb.NextTemplateOffset; if (Templates.ContainsKey(bbb.TemplateOffset) == false) { Templates.Add(bbb.TemplateOffset - 0x18, bbb); } } } l.Trace("Template definitions"); foreach (var esTemplate in Templates.OrderBy(t => t.Key)) { l.Trace($"key: 0x{esTemplate.Key:X4} {esTemplate.Value}"); } l.Trace(""); index = (int)tableOffset + 0x100 + 0x80; //get to start of event Records l.Trace($"\r\nChunk data before processing records: {this}"); const int recordSig = 0x2a2a; long lastRecordNumber = 0; while (index < chunkBytes.Length) { var sig = BitConverter.ToInt32(chunkBytes, index); if (sig != recordSig) { l.Trace( $"Found an invalid signature at 0x{absoluteOffset + index:X}"); break; } var recordOffset = index; //do not read past the last known defined record if (recordOffset - absoluteOffset > LastRecordOffset) { l.Trace( "Reached last record offset. Stopping"); break; } var recordSize = BitConverter.ToUInt32(chunkBytes, index + 4); var recordNumber = BitConverter.ToInt64(chunkBytes, index + 8); try { if (recordNumber < FirstEventRecordIdentifier || recordNumber > LastEventRecordIdentifier) { //outside known good range, so ignore l.Trace( $"Record at offset 0x{AbsoluteOffset + recordOffset:X} falls outside valid record identifier range. Skipping"); break; } var ms = new MemoryStream(chunkBytes, index, (int)recordSize); var br = new BinaryReader(ms, Encoding.UTF8); index += (int)recordSize; var er = new EventRecord(br, recordOffset, this); EventRecords.Add(er); lastRecordNumber = er.RecordNumber; if (er.ExtraDataOffset > 0) { try { //hidden data! recordSize = BitConverter.ToUInt32(ms.ToArray(), (int)er.ExtraDataOffset + 4); recordNumber = BitConverter.ToInt64(ms.ToArray(), (int)er.ExtraDataOffset + 8); if (recordNumber != lastRecordNumber) { ms = new MemoryStream(ms.ToArray(), (int)er.ExtraDataOffset, (int)recordSize); br = new BinaryReader(ms, Encoding.UTF8); er = new EventRecord(br, (int)(recordOffset + er.ExtraDataOffset), this); er.HiddenRecord = true; l.Warn($"Record #: {er.RecordNumber} (timestamp: {er.TimeCreated:yyyy-MM-dd HH:mm:ss.fffffff}): Warning! A hidden record was found! Possible DanderSpritz use detected!"); EventRecords.Add(er); } } catch (Exception) { //oh well, we tried //l.Warn($"Error when attempting to recover possible hidden record: {e.Message}"); } } //ticksForTimeDelta == totalticks for discrepancy value if (EventLog.LastSeenTicks > 0 && EventLog.LastSeenTicks - ticksForTimeDelta > er.TimeCreated.Ticks) { l.Warn($"Record #: {er.RecordNumber} (timestamp: {er.TimeCreated:yyyy-MM-dd HH:mm:ss.fffffff}): Warning! Time just went backwards! Last seen time before change: {new DateTimeOffset(EventLog.LastSeenTicks,TimeSpan.Zero).ToUniversalTime():yyyy-MM-dd HH:mm:ss.fffffff}"); } EventLog.LastSeenTicks = er.TimeCreated.Ticks; } catch (Exception e) { l.Trace( $"First event record ident-num: {FirstEventRecordIdentifier}-{FirstEventRecordNumber} Last event record ident-num: {LastEventRecordIdentifier}-{LastEventRecordNumber} last record offset 0x{LastRecordOffset:X}"); l.Error( $"Record error at offset 0x{AbsoluteOffset + recordOffset:X}, record #: {recordNumber} error: {e.Message}"); if (ErrorRecords.ContainsKey(recordNumber) == false) { ErrorRecords.Add(recordNumber, e.Message); } } } }
public uint CalculateChecksum(string fileName) { var data = fileSystemFacade.ReadAllBytes(fileName); return(Crc32Algorithm.Compute(data)); }
public ChunkInfo(byte[] chunkBytes, long absoluteOffset, int chunkNumber) { var l = LogManager.GetLogger("ChunkInfo"); l.Trace( $"\r\n-------------------------------------------- NEW CHUNK at 0x{absoluteOffset:X} ------------------------------------------------\r\n"); ChunkBytes = chunkBytes; AbsoluteOffset = absoluteOffset; ChunkNumber = chunkNumber; ErrorRecords = new Dictionary <long, string>(); EventIdMetrics = new Dictionary <long, int>(); EventRecords = new List <EventRecord>(); FirstEventRecordNumber = BitConverter.ToInt64(chunkBytes, 0x8); LastEventRecordNumber = BitConverter.ToInt64(chunkBytes, 0x10); FirstEventRecordIdentifier = BitConverter.ToInt64(chunkBytes, 0x18); LastEventRecordIdentifier = BitConverter.ToInt64(chunkBytes, 0x20); if (FirstEventRecordIdentifier == -1) { return; } var tableOffset = BitConverter.ToUInt32(chunkBytes, 0x28); LastRecordOffset = BitConverter.ToUInt32(chunkBytes, 0x2C); FreeSpaceOffset = BitConverter.ToUInt32(chunkBytes, 0x30); //TODO how to calculate this? across what data? all event records? var crcEventRecordsData = BitConverter.ToUInt32(chunkBytes, 0x34); Crc = BitConverter.ToInt32(chunkBytes, 0x7c); var inputArray = new byte[120 + 384 + 4]; Buffer.BlockCopy(chunkBytes, 0, inputArray, 0, 120); Buffer.BlockCopy(chunkBytes, 128, inputArray, 120, 384); Crc32Algorithm.ComputeAndWriteToEnd(inputArray); // last 4 bytes contains CRC CalculatedCrc = BitConverter.ToInt32(inputArray, inputArray.Length - 4); var index = 0; var tableData = new byte[0x100]; Buffer.BlockCopy(chunkBytes, (int)tableOffset, tableData, 0, 0x100); StringTableEntries = new Dictionary <uint, StringTableEntry>(); var stringOffsets = new List <uint>(); while (index < tableData.Length) { var stringOffset = BitConverter.ToUInt32(tableData, index); index += 4; if (stringOffset == 0) { continue; } stringOffsets.Add(stringOffset); } foreach (var stringOffset in stringOffsets) { GetStringTableEntry(stringOffset); } l.Trace("String table entries"); foreach (var stringTableEntry in StringTableEntries.Keys.OrderBy(t => t)) { l.Trace(StringTableEntries[stringTableEntry]); } l.Trace(""); var templateTableData = new byte[0x80]; Buffer.BlockCopy(chunkBytes, 0x180, templateTableData, 0, 0x80); var tableTemplateOffsets = new List <uint>(); index = 0; while (index < templateTableData.Length) { var templateOffset = BitConverter.ToUInt32(templateTableData, index); index += 4; if (templateOffset == 0) { continue; } //the actual table definitions live at this Offset + 0x1000 for header, - 10 bytes for some reason. //This is where the 0xc op code will be tableTemplateOffsets.Add(templateOffset); } Templates = new Dictionary <int, Template>(); //to get all the templates and cache them foreach (var tableTemplateOffset in tableTemplateOffsets.OrderBy(t => t)) { var actualOffset = absoluteOffset + tableTemplateOffset - 10; //yea, -10 index = (int)tableTemplateOffset - 10; l.Trace( $"Chunk absoluteOffset: 0x{AbsoluteOffset:X} tableTemplateOffset: 0x{tableTemplateOffset:X} actualOffset: 0x {actualOffset:X} chunkBytes[index]: 0x{chunkBytes[index]:X} LastRecordOffset 0x{LastRecordOffset:X} FreeSpaceOffset 0x{FreeSpaceOffset:X}"); var template = GetTemplate(index); if (template == null) { l.Trace( $"Implausable template at actual offset: 0x{actualOffset} tableTemplateOffset 0x{tableTemplateOffset:X} FreeSpaceOffset: 0x{FreeSpaceOffset} chunk absolute offset: 0x{AbsoluteOffset:X}"); continue; } if (Templates.ContainsKey(template.TemplateOffset) == false) { Templates.Add(template.TemplateOffset - 0x18, template); } if (template.NextTemplateOffset <= 0) { continue; } var nextTemplateId = template.NextTemplateOffset; while (nextTemplateId > 0) { var bbb = GetTemplate(nextTemplateId - 10); nextTemplateId = bbb.NextTemplateOffset; if (Templates.ContainsKey(bbb.TemplateOffset) == false) { Templates.Add(bbb.TemplateOffset - 0x18, bbb); } } } l.Trace("Template definitions"); foreach (var esTemplate in Templates.OrderBy(t => t.Key)) { l.Trace($"key: 0x{esTemplate.Key:X4} {esTemplate.Value}"); } l.Trace(""); index = (int)tableOffset + 0x100 + 0x80; //get to start of event Records l.Trace($"\r\nChunk data before processing records: {this}"); const int recordSig = 0x2a2a; while (index < chunkBytes.Length) { var sig = BitConverter.ToInt32(chunkBytes, index); if (sig != recordSig) { l.Trace( $"Found an invalid signature at 0x{absoluteOffset + index:X}"); break; } var recordOffset = index; //do not read past the last known defined record if (recordOffset - absoluteOffset > LastRecordOffset) { l.Trace( "Reached last record offset. Stopping"); break; } var recordSize = BitConverter.ToUInt32(chunkBytes, index + 4); var recordNumber = BitConverter.ToInt64(chunkBytes, index + 8); try { if (recordNumber < FirstEventRecordIdentifier || recordNumber > LastEventRecordIdentifier) { //outside known good range, so ignore l.Debug( $"Record at offset 0x{AbsoluteOffset + recordOffset:X} falls outside valid record identifier range. Skipping"); break; } var ms = new MemoryStream(chunkBytes, index, (int)recordSize); var br = new BinaryReader(ms, Encoding.UTF8); index += (int)recordSize; var er = new EventRecord(br, recordOffset, this); EventRecords.Add(er); if (EventIdMetrics.ContainsKey(er.EventId) == false) { EventIdMetrics.Add(er.EventId, 0); } EventIdMetrics[er.EventId] += 1; } catch (Exception e) { l.Trace( $"First event record ident-num: {FirstEventRecordIdentifier}-{FirstEventRecordNumber} Last event record ident-num: {LastEventRecordIdentifier}-{LastEventRecordNumber} last record offset 0x{LastRecordOffset:X}"); l.Error( $"Record error at offset 0x{AbsoluteOffset + recordOffset:X}, record #: {recordNumber} error: {e.Message}"); if (ErrorRecords.ContainsKey(recordNumber) == false) { ErrorRecords.Add(recordNumber, e.Message); } } } }
public static long GetCrc32(this byte[] input) { return(Crc32Algorithm.Compute(input)); }
private void start() { int numBytesRead = 0; while (_isProcessing) { var n = ReadFromStream(prelude); numBytesRead += n; n = ReadFromStream(preludeCRC); var preludeCRCBytes = preludeCRC.ToArray(); if (BitConverter.IsLittleEndian) { Array.Reverse(preludeCRCBytes); } numBytesRead += n; var inputArray = new byte[prelude.Length + 4]; System.Buffer.BlockCopy(prelude, 0, inputArray, 0, prelude.Length); // write real data to inputArray Crc32Algorithm.ComputeAndWriteToEnd(inputArray); // last 4 bytes contains CRC // transferring data or writing reading, and checking as final operation if (!Crc32Algorithm.IsValidWithCrcAtEnd(inputArray)) { throw new ArgumentException("invalid prelude CRC"); } if (!Enumerable.SequenceEqual(inputArray.Skip(prelude.Length).Take(4), preludeCRCBytes)) { throw new ArgumentException("Prelude CRC Mismatch"); } var bytes = prelude.Take(4).ToArray(); if (BitConverter.IsLittleEndian) { Array.Reverse(bytes); } int totalLength = BitConverter.ToInt32(bytes, 0); bytes = prelude.Skip(4).Take(4).ToArray(); if (BitConverter.IsLittleEndian) { Array.Reverse(bytes); } int headerLength = BitConverter.ToInt32(bytes, 0); int payloadLength = totalLength - headerLength - 16; var headers = new byte[headerLength]; var payload = new byte[payloadLength]; int num = ReadFromStream(headers); if (num != headerLength) { throw new IOException("insufficient data"); } num = ReadFromStream(payload); if (num != payloadLength) { throw new IOException("insufficient data"); } numBytesRead += num; num = ReadFromStream(messageCRC); var messageCRCBytes = messageCRC.ToArray(); if (BitConverter.IsLittleEndian) { Array.Reverse(messageCRCBytes); } // now verify message CRC inputArray = new byte[totalLength]; System.Buffer.BlockCopy(prelude, 0, inputArray, 0, prelude.Length); System.Buffer.BlockCopy(preludeCRC, 0, inputArray, prelude.Length, preludeCRC.Length); System.Buffer.BlockCopy(headers, 0, inputArray, prelude.Length + preludeCRC.Length, headerLength); System.Buffer.BlockCopy(payload, 0, inputArray, prelude.Length + preludeCRC.Length + headerLength, payloadLength); // write real data to inputArray Crc32Algorithm.ComputeAndWriteToEnd(inputArray); // last 4 bytes contains CRC // transferring data or writing reading, and checking as final operation if (!Crc32Algorithm.IsValidWithCrcAtEnd(inputArray)) { throw new ArgumentException("invalid message CRC"); } if (!Enumerable.SequenceEqual(inputArray.Skip(totalLength - 4).Take(4), messageCRCBytes)) { throw new ArgumentException("message CRC Mismatch"); } Dictionary <String, String> headerMap = extractHeaders(headers); string value = null; if (headerMap.TryGetValue(":message-type", out value)) { if (value.Equals(":error")) { string errorCode = null; string errorMessage = null; headerMap.TryGetValue(":error-code", out errorCode); headerMap.TryGetValue(":error-message", out errorMessage); throw new SelectObjectContentException(errorCode + ":" + errorMessage); } } if (headerMap.TryGetValue(":event-type", out value)) { if (value.Equals("End")) { // throw new UnexpectedShortReadException("Insufficient data"); this._isProcessing = false; break; } if (value.Equals("Cont") || payloadLength < 1) { continue; } if (value.Equals("Progress")) { ProgressMessage progress = new ProgressMessage(); using (var stream = new MemoryStream(payload)) progress = (ProgressMessage) new XmlSerializer(typeof(ProgressMessage)).Deserialize(stream); this.Progress = progress; } if (value.Equals("Stats")) { Console.WriteLine("payload|" + Encoding.UTF7.GetString(payload)); StatsMessage stats = new StatsMessage(); using (var stream = new MemoryStream(payload)) stats = (StatsMessage) new XmlSerializer(typeof(StatsMessage)).Deserialize(stream); this.Stats = stats; } if (value.Equals("Records")) { this.Payload.Write(payload, 0, payloadLength); continue; } } } this._isProcessing = false; this.Payload.Seek(0, SeekOrigin.Begin); this.payloadStream.Close(); }
private void backgroundWorker1_DoWork(object sender, System.ComponentModel.DoWorkEventArgs e) { BackgroundWorker worker = sender as BackgroundWorker; hashing = true; for (int i = fCount; i < files.Count; i++) { if (state == "paused") { break; } if (worker.CancellationPending == true || cancel == true) { e.Cancel = true; break; } else if (files[i].skip == true) { fCount += 1; worker.ReportProgress(-1); continue; } FileStream stream = File.OpenRead(files[i].currentFilePath); byte[] hash = null; string cHex = ""; bool flag = true; int bufferSize = 4096; byte[] readAheadBuffer, buffer; int readAheadBytesRead, bytesRead; long cnt = 0; size = stream.Length; // find number of times loop runs till end of stream cnt = size / Convert.ToInt64(bufferSize); // find interval in which to update progressbar if (cnt <= 100 && cnt > 1) { // example -> cnt = 49, after below calc cnt = 2, so update progress bar by 2 every time loop runs cnt = (100 / (cnt + 1)); // to show cnt < 100 or not flag = false; } else if (cnt > 100) { // example -> cnt = 200, after below calc cnt = 3, update progress bar by 1 every time loop runs cnt times (3 in this case) cnt = (cnt / 100) + 1; } else if (cnt <= 1) { // example -> cnt = 0 (size < bufferSize), cnt = 100 cause loop only runs once in this case // example -> cnt = 1 (size = bufferSize = 4096), cnt = 100 cause loop only runs once in this case cnt = 100; flag = false; } // set stream position to totalBytesRead which holds paused position or default 0 (start of stream) stream.Position = totalBytesRead; readAheadBuffer = new byte[bufferSize]; // read next 4096 bytes (returns number of bytes read) readAheadBytesRead = stream.Read(readAheadBuffer, 0, readAheadBuffer.Length); // update totalBytesRead counter by number of bytes read totalBytesRead += readAheadBytesRead; // used for speed calc tmp1 = tmp2 = totalBytesRead; // if starting for the first time (not resuming) if (state == "unknown") { crc32 = new Crc32Algorithm(); worker.ReportProgress(0, size); } do { if (skip || remove || cancel || state == "paused") { break; } // if there are any pending operations if (pending) { pending = false; if (operation == "skip") { for (int j = 0; j < selIndex.Length; j++) { if (selIndex[j] < fCount) { // already hashed file continue; } else if (fCount == selIndex[j]) { // if currently hashing file is to be skipped skip = true; } files[selIndex[j]].skip = true; } worker.ReportProgress(-2); if (skip) { break; } } else if (operation == "remove") { int tmp = fCount; for (int j = 0; j < selIndex.Length; j++) { if (selIndex[j] < tmp) { // removing already hashed file fCount -= 1; i -= 1; } else if (tmp == selIndex[j]) { // if currently hashing file is to be removed i -= 1; remove = true; } files.RemoveAt(selIndex[j]); } worker.ReportProgress(-2); if (remove) { break; } } else if (operation == "addfiles") { foreach (var item in fileHolder) { string name = item.Substring(item.LastIndexOf("\\") + 1); itemClass obj = new itemClass(item, name); files.Add(obj); } worker.ReportProgress(-2); } } bytesRead = readAheadBytesRead; buffer = readAheadBuffer; // read next 4096 bytes while current 4096 bytes are being processed readAheadBuffer = new byte[bufferSize]; readAheadBytesRead = stream.Read(readAheadBuffer, 0, readAheadBuffer.Length); totalBytesRead += readAheadBytesRead; tmp1 = totalBytesRead; // if end of stream reached if (readAheadBytesRead == 0) { crc32.TransformFinalBlock(buffer, 0, bytesRead); } else { crc32.TransformBlock(buffer, 0, bytesRead, buffer, 0); } // if cnt > 100 if (flag) { p += 1; if (p == cnt) { p = 0; worker.ReportProgress(1, totalBytesRead); } } else { worker.ReportProgress(Convert.ToInt32(cnt), totalBytesRead); } } while (readAheadBytesRead != 0); if (cancel == false && state != "paused" && skip == false && remove == false) { // convert hash to readable form and store it hash = crc32.Hash; foreach (byte b in hash) { cHex += b.ToString("x2"); } files[fCount].hash = cHex.ToUpper(); fCount += 1; // check if size > 4096 to prevent reporting 100% progess twice in cases of size being < 4096 if (size > 4096) { worker.ReportProgress(100, size); } totalBytesRead = 0; p = 0; // so that it creates new crc32 state = "unknown"; } if (remove) { remove = false; totalBytesRead = 0; p = 0; state = "unknown"; } if (skip) { skip = false; files[fCount].skip = true; fCount += 1; totalBytesRead = 0; p = 0; state = "unknown"; worker.ReportProgress(-1); } if (state != "paused") { crc32.Dispose(); } stream.Dispose(); } hashing = false; }
private static uint GetCRC32Hash(byte[] source) { return(Crc32Algorithm.Compute(source)); }
private static void RunOptions(Options opts) { var inputFilePath = opts.InputFilePath; var keyFilePath = opts.KeyFilePath; var manufacturerName = opts.ManufacturerName; var version = opts.Version; var outputFilePath = opts.OutputFilePath; uint startAddr = 0; uint endAddr = 0; // check ManufacturerName length if (manufacturerName.Length > 31) { throw new ConstraintException("Manufacturer name is too long (max: 31)"); } // parse startAddr and endAddr try { startAddr = Convert.ToUInt32(opts.StartAddr, 16); } catch (FormatException) { startAddr = Convert.ToUInt32(opts.StartAddr); } try { endAddr = Convert.ToUInt32(opts.EndAddr, 16); } catch (FormatException) { endAddr = Convert.ToUInt32(opts.EndAddr); } // read firmware binary var rawFirmwareBinaryList = new List <byte>(); using (var binaryReader = new BinaryReader(new FileStream(inputFilePath, FileMode.Open, FileAccess.Read, FileShare.Read))) { while (true) { byte tempByte; try { tempByte = binaryReader.ReadByte(); } catch (EndOfStreamException) { break; } rawFirmwareBinaryList.Add(tempByte); } } while (rawFirmwareBinaryList.Count % 16 != 0) { rawFirmwareBinaryList.Add(0); } // calculate CRC32 var addrCrc = Crc32Algorithm.Compute(BitConverter.GetBytes(startAddr).Concat(BitConverter.GetBytes(endAddr)).ToArray()); var dataCrc = Crc32Algorithm.Compute(rawFirmwareBinaryList.ToArray()); var firstBlock = BitConverter.GetBytes(addrCrc) .Concat(BitConverter.GetBytes(startAddr)) .Concat(BitConverter.GetBytes(endAddr)) .Concat(BitConverter.GetBytes(dataCrc)); // read key byte[] key; using (var binaryReader = new BinaryReader(new FileStream(keyFilePath, FileMode.Open, FileAccess.Read, FileShare.Read))) { key = binaryReader.ReadBytes(32); } // create BootloaderFile var file = new BootloaderFile(); // encrypt var encryptedBinary = Utilities.Encrypt( firstBlock.Concat(rawFirmwareBinaryList).ToArray(), key, ref file ); file.Data = encryptedBinary.Skip(16).ToArray(); file.ManufacturerName = manufacturerName; file.FirstBlock = encryptedBinary.Take(16).ToArray(); // parse version var splitVersion = version.Split("."); for (var i = 0; i < 4; i++) { file.FirmwareVersion[i] = ushort.Parse(splitVersion[i]); } file.WriteBootloaderFile(outputFilePath); Console.WriteLine(file.ToFancyString()); Console.Write("IV: "); }
public unsafe Task <IEnumerable <EventMeta> > Write(IEventPayloads payload) { lock (_view) { // early out if we are complete if (_isComplete) { return(null); } // early out if we don't have enough space left in the log var space = _view.Capacity - _position; if (space < CalculateSize(payload)) { return(null); } var events = new List <EventMeta>(); try { byte *ptr = null; _view.SafeMemoryMappedViewHandle.AcquirePointer(ref ptr); var start = _position; // write event count in the batch *(ptr + _position) = (byte)payload.Count; _position += sizeof(byte); foreach (var e in payload) { // write event meta block var meta = new EventMeta { Created = DateTime.UtcNow, Id = _start + _count }; *(EventMeta *)(ptr + _position) = meta; _position += sizeof(EventMeta); // write event payload *(ushort *)(ptr + _position) = (ushort)e.Count; _position += sizeof(ushort); Marshal.Copy(e.Array, e.Offset, (IntPtr)(ptr + _position), e.Count); _position += e.Count; // increment event count _count++; events.Add(meta); } // post-fix batch with crc var length = (int)(_position - start); var crcBuffer = GetBuffer(length); Marshal.Copy((IntPtr)(ptr + start), crcBuffer, 0, length); var crc = Crc32Algorithm.Compute(crcBuffer, 0, length); *(uint *)(ptr + _position) = crc; _position += sizeof(uint); } finally { _view.SafeMemoryMappedViewHandle.ReleasePointer(); } // store a handle to the task - to be completed when the buffer is flushed var tcs = new TaskCompletionSource <IEnumerable <EventMeta> >(); _awaiting.Add(() => tcs.SetResult(events)); return(tcs.Task); } }
public static uint CrcOfFile(string fileName) { return(Crc32Algorithm.Compute(File.ReadAllBytes(fileName))); }
public void Import(string filename) { if (!File.Exists(filename)) { throw new FileNotFoundException(); } using (FileStream file = File.OpenRead(filename)) { uint hCRC32 = 0, fCRC32 = 0; byte[] CRC32_check; // header byte[] word_magic = FileIO.ReadChunk(file, file_word_magic.Length, ref hCRC32, ref fCRC32); if (!Win32.ByteArrayCompare(word_magic, file_word_magic)) { throw new InvalidDataException(); } byte ver_write = FileIO.ReadChunk(file, 0x1, ref hCRC32, ref fCRC32)[0]; byte ver_read = FileIO.ReadChunk(file, 0x1, ref hCRC32, ref fCRC32)[0]; byte endianness = FileIO.ReadChunk(file, 0x1, ref hCRC32, ref fCRC32)[0]; uint off_meta = BitConverter.ToUInt32(FileIO.ReadChunk(file, 0x4, ref hCRC32, ref fCRC32), 0); uint off_def = BitConverter.ToUInt32(FileIO.ReadChunk(file, 0x4, ref hCRC32, ref fCRC32), 0); uint off_body = BitConverter.ToUInt32(FileIO.ReadChunk(file, 0x4, ref hCRC32, ref fCRC32), 0); uint len_data = BitConverter.ToUInt32(FileIO.ReadChunk(file, 0x4, ref hCRC32, ref fCRC32), 0); CRC32_check = FileIO.ReadChunk(file, 0x4, ref fCRC32); if (Crc32Algorithm.Append(hCRC32, CRC32_check) != 0x2144DF1C) { throw new InvalidDataException(); } // meta byte[] in_meta_player = FileIO.ReadChunk(file, 0x20, ref fCRC32); byte in_meta_track = FileIO.ReadChunk(file, 0x1, ref fCRC32)[0]; byte in_meta_vehicle = FileIO.ReadChunk(file, 0x1, ref fCRC32)[0]; byte[] in_meta_upgrade_level = FileIO.ReadChunk(file, 0x7, ref fCRC32); byte[] in_meta_upgrade_health = FileIO.ReadChunk(file, 0x7, ref fCRC32); // def List <DataCollection.DataBlock.Path> in_path = new List <DataCollection.DataBlock.Path>(); List <uint> in_offset = new List <uint>(); List <uint> in_length = new List <uint>(); while (file.Position < off_body) { in_path.Add((DataCollection.DataBlock.Path)FileIO.ReadChunk(file, 0x1, ref fCRC32)[0]); in_offset.Add(BitConverter.ToUInt32(FileIO.ReadChunk(file, 0x4, ref fCRC32), 0)); in_length.Add(BitConverter.ToUInt32(FileIO.ReadChunk(file, 0x4, ref fCRC32), 0)); } // body List <DataCollection> in_frame = new List <DataCollection>(); while (file.Position < off_meta + len_data) { DataCollection frame = new DataCollection(); for (int i = 0; i < in_path.Count; i++) { byte[] in_data = FileIO.ReadChunk(file, (int)in_length[i], ref fCRC32); DataCollection.DataBlock block = new DataCollection.DataBlock(in_data, in_path[i], in_offset[i]); frame.data.Add(block); } in_frame.Add(frame); } // footer CRC32_check = FileIO.ReadChunk(file, 0x4); if (Crc32Algorithm.Append(fCRC32, CRC32_check) != 0x2144DF1C) { throw new InvalidDataException(); } byte[] word_eof = FileIO.ReadChunk(file, file_word_eof.Length); if (!Win32.ByteArrayCompare(word_eof, file_word_eof)) { throw new InvalidDataException(); } // output meta_track = in_meta_track; meta_vehicle = in_meta_vehicle; meta_player = in_meta_player; meta_upgrade_level = in_meta_upgrade_level; meta_upgrade_health = in_meta_upgrade_health; data.Clear(); foreach (DataCollection frame in in_frame) { data.Add(frame); } initialized = true; } }
private void ProcessIncoming(byte[] localMsg, IPEndPoint clientEP) { try { int currIdx = 0; if (localMsg[0] != 'D' || localMsg[1] != 'S' || localMsg[2] != 'U' || localMsg[3] != 'C') { return; } else { currIdx += 4; } uint protocolVer = BitConverter.ToUInt16(localMsg, currIdx); currIdx += 2; if (protocolVer > MaxProtocolVersion) { return; } uint packetSize = BitConverter.ToUInt16(localMsg, currIdx); currIdx += 2; if (packetSize < 0) { return; } packetSize += 16; //size of header if (packetSize > localMsg.Length) { return; } else if (packetSize < localMsg.Length) { byte[] newMsg = new byte[packetSize]; Array.Copy(localMsg, newMsg, packetSize); localMsg = newMsg; } uint crcValue = BitConverter.ToUInt32(localMsg, currIdx); //zero out the crc32 in the packet once we got it since that's whats needed for calculation localMsg[currIdx++] = 0; localMsg[currIdx++] = 0; localMsg[currIdx++] = 0; localMsg[currIdx++] = 0; uint crcCalc = Crc32Algorithm.Compute(localMsg); if (crcValue != crcCalc) { return; } uint clientId = BitConverter.ToUInt32(localMsg, currIdx); currIdx += 4; uint messageType = BitConverter.ToUInt32(localMsg, currIdx); currIdx += 4; if (messageType == (uint)MessageType.DSUC_VersionReq) { byte[] outputData = new byte[8]; int outIdx = 0; Array.Copy(BitConverter.GetBytes((uint)MessageType.DSUS_VersionRsp), 0, outputData, outIdx, 4); outIdx += 4; Array.Copy(BitConverter.GetBytes((ushort)MaxProtocolVersion), 0, outputData, outIdx, 2); outIdx += 2; outputData[outIdx++] = 0; outputData[outIdx++] = 0; SendPacket(clientEP, outputData, 1001); } else if (messageType == (uint)MessageType.DSUC_ListPorts) { // Requested information on gamepads - return MAC address int numPadRequests = BitConverter.ToInt32(localMsg, currIdx); currIdx += 4; if (numPadRequests < 0 || numPadRequests > 4) { return; } int requestsIdx = currIdx; for (int i = 0; i < numPadRequests; i++) { byte currRequest = localMsg[requestsIdx + i]; if (currRequest < 0 || currRequest > 4) { return; } } byte[] outputData = new byte[16]; for (byte i = 0; i < numPadRequests; i++) { byte currRequest = localMsg[requestsIdx + i]; var padData = controllers[i]; //controllers[currRequest]; int outIdx = 0; Array.Copy(BitConverter.GetBytes((uint)MessageType.DSUS_PortInfo), 0, outputData, outIdx, 4); outIdx += 4; outputData[outIdx++] = (byte)padData.PadId; outputData[outIdx++] = (byte)padData.constate; outputData[outIdx++] = (byte)padData.model; outputData[outIdx++] = (byte)padData.connection; var addressBytes = padData.PadMacAddress.GetAddressBytes(); if (addressBytes.Length == 6) { outputData[outIdx++] = addressBytes[0]; outputData[outIdx++] = addressBytes[1]; outputData[outIdx++] = addressBytes[2]; outputData[outIdx++] = addressBytes[3]; outputData[outIdx++] = addressBytes[4]; outputData[outIdx++] = addressBytes[5]; } else { outputData[outIdx++] = 0; outputData[outIdx++] = 0; outputData[outIdx++] = 0; outputData[outIdx++] = 0; outputData[outIdx++] = 0; outputData[outIdx++] = 0; } outputData[outIdx++] = (byte)padData.battery; //(byte)padData.BatteryStatus; outputData[outIdx++] = 0; SendPacket(clientEP, outputData, 1001); } } else if (messageType == (uint)MessageType.DSUC_PadDataReq) { byte regFlags = localMsg[currIdx++]; byte idToReg = localMsg[currIdx++]; PhysicalAddress macToReg = null; { byte[] macBytes = new byte[6]; Array.Copy(localMsg, currIdx, macBytes, 0, macBytes.Length); currIdx += macBytes.Length; macToReg = new PhysicalAddress(macBytes); } lock (clients) { if (clients.ContainsKey(clientEP)) { clients[clientEP].RequestPadInfo(regFlags, idToReg, macToReg); } else { var clientTimes = new ClientRequestTimes(); clientTimes.RequestPadInfo(regFlags, idToReg, macToReg); clients[clientEP] = clientTimes; } } } } catch (Exception e) { } }
private void WriteStringBuffer() { m_tableheaders[0].crc32 = Crc32Algorithm.Compute(m_strings); m_stream.Write(m_strings, 0, m_strings.Length); }
public static async ValueTask <ReadResult <Document> > ReadAsync(Stream utf8Json, IParsingOptions options, bool returnCrc, CancellationToken cancellationToken = default) { var readerState = new JsonReaderState(); var readStack = new DdbReadStack(DdbReadStack.DefaultStackLength, options.Metadata); try { options.StartParsing(ref readStack); var buffer = ArrayPool <byte> .Shared.Rent(DefaultBufferSize); var clearMax = 0; try { var bytesInBuffer = 0; uint crc = 0; while (true) { var isFinalBlock = false; while (true) { var bytesRead = await utf8Json.ReadAsync(new Memory <byte>(buffer, bytesInBuffer, buffer.Length - bytesInBuffer), cancellationToken).ConfigureAwait(false); if (bytesRead == 0) { isFinalBlock = true; break; } if (returnCrc) { crc = Crc32Algorithm.Append(crc, buffer, bytesInBuffer, bytesRead); } bytesInBuffer += bytesRead; if (bytesInBuffer == buffer.Length) { break; } } if (bytesInBuffer > clearMax) { clearMax = bytesInBuffer; } ReadCore(ref readerState, isFinalBlock, new ReadOnlySpan <byte>(buffer, 0, bytesInBuffer), ref readStack, options); var bytesConsumed = (int)readStack.BytesConsumed; bytesInBuffer -= bytesConsumed; if (isFinalBlock) { break; } // Check if we need to shift or expand the buffer because there wasn't enough data to complete deserialization. if ((uint)bytesInBuffer > ((uint)buffer.Length / 2)) { // We have less than half the buffer available, double the buffer size. byte[] dest = ArrayPool <byte> .Shared.Rent((buffer.Length < (int.MaxValue / 2))?buffer.Length * 2 : int.MaxValue); // Copy the unprocessed data to the new buffer while shifting the processed bytes. Buffer.BlockCopy(buffer, bytesConsumed, dest, 0, bytesInBuffer); new Span <byte>(buffer, 0, clearMax).Clear(); ArrayPool <byte> .Shared.Return(buffer); clearMax = bytesInBuffer; buffer = dest; } else if (bytesInBuffer != 0) { // Shift the processed bytes to the beginning of buffer to make more room. Buffer.BlockCopy(buffer, bytesConsumed, buffer, 0, bytesInBuffer); } } return(new ReadResult <Document>(readStack.GetCurrent().CreateDocumentFromBuffer(), crc)); } finally { new Span <byte>(buffer, 0, clearMax).Clear(); ArrayPool <byte> .Shared.Return(buffer); } } finally { readStack.Dispose(); } }
public async Task <IActionResult> UpImage([FromForm] UploadModel model) { if (!ModelState.IsValid) { ApiData.code = 1005; ApiData.msg = ModelState.GetErrMsg(); return(Ok(ApiData)); } if (Request.Form.Files == null || Request.Form.Files.Count == 0 || !Request.Form.Files[0].IsImage()) { ApiData.code = 1006; ApiData.msg = "请上传图片文件"; return(Ok(ApiData)); } var bucket = _bucketService.GetBucketBayName(model.bucket); if (bucket == null) { ApiData.code = 2001; ApiData.msg = "bucket错误"; return(Ok(ApiData)); } IFormFile file = Request.Form.Files[0]; string sha1 = file.GetSHA1(); var item = _bucketImageService.GetSHA1(sha1); if (item != null) { ApiData.code = 0; ApiData.msg = "上传成功"; ApiData.data = new { url = $"/oss/imagecn{item.visiturl}" }; return(Ok(ApiData)); } if (!ValidSignature(model.signature, file.GetMD5(), model.VERB)) { ApiData.code = 1005; ApiData.msg = "签名验证失败"; return(Ok(ApiData)); } uint crc32 = Crc32Algorithm.Compute(EncryptorHelper.GetMD5Byte(Guid.NewGuid().ToString())); var dir = Math.Abs(crc32) % 256;//256个子目录 string f_dir = Math.Abs(crc32).ToString(); string path = System.IO.Path.Combine(MediaItemConfig.RootDir, bucket.name, dir.ToString(), f_dir); //保存文件并且获取文件的相对存储路径 var image = file.CreateImagePathFromStream(_mediaItemStorage, path); string visiturl = $"/oss/imagecn/{bucket.name}/{dir}/{f_dir}/{image.NewFileName}"; _bucketImageService.AddImage(new Entities.bucket_image() { id = CombGuid.NewGuidAsString(), bucket_id = bucket.id, creation_time = DateTime.Now, ext_name = image.ExtName, sha1 = sha1, visiturl = visiturl, io_path = image.IOPath, width = image.Width, height = image.Height, length = file.Length }); ApiData.code = 0; ApiData.msg = "上传成功"; ApiData.data = new { url = visiturl }; await Task.FromResult(0); return(Ok(ApiData)); }
public static uint Compute(Crc32Algorithm algorithm, byte[] buffer, int offset, int count) { return(Process(Tables[(int)algorithm], 0xFFFFFFFF, buffer, offset, count) ^ 0xFFFFFFFF); }
/// <summary> /// Update header /// This is needed when anything changes /// </summary> private void Update() { log.Info("Updating entries..."); var realEntries = Entries.OfType <GMD_Entry>().ToList(); if (realEntries.Count > 0) { // First info entry always has Index = 0 and KeyOffset = 0 realEntries[0].InfoTableEntry.StringIndex = 0; realEntries[0].InfoTableEntry.KeyOffset = 0; } for (int i = 1; i < realEntries.Count; i++) // Start at 1 { realEntries[i].InfoTableEntry.StringIndex = Entries.IndexOf(realEntries[i]); int prevKeyOffset = realEntries[i - 1].InfoTableEntry.KeyOffset; int prevKeySize = ExEncoding.UTF8.GetByteCount(realEntries[i - 1].Key) + 1; // +1 for szString end realEntries[i].InfoTableEntry.KeyOffset = prevKeyOffset + prevKeySize; } // Check and update hashes (CRC32 with bitwise complement, aka reverse each bit) log.Info("Checking and updating hashes..."); foreach (var entry in realEntries) { byte[] keyBytes = ExEncoding.UTF8.GetBytes(entry.Key); // Hash 1 byte[] input1 = new byte[keyBytes.Length * 2]; keyBytes.CopyTo(input1, 0); keyBytes.CopyTo(input1, keyBytes.Length); uint hash1 = ~Crc32Algorithm.Compute(input1); if (entry.InfoTableEntry.Hash1 != hash1) { log.Info($"Hash1 doesn't match, using new hash\nOld hash: {entry.InfoTableEntry.Hash1:X04}\nNew hash: {hash1:X04}"); entry.InfoTableEntry.Hash1 = hash1; } // Hash 2 byte[] input2 = new byte[keyBytes.Length * 3]; keyBytes.CopyTo(input2, 0); keyBytes.CopyTo(input2, keyBytes.Length); keyBytes.CopyTo(input2, keyBytes.Length * 2); uint hash2 = ~Crc32Algorithm.Compute(input2); // If hash1 doesn't match then this hash obviously doesn't match as well since they have the same input (InfoTableEntry.Key) if (entry.InfoTableEntry.Hash2 != hash2) { log.Info($"Hash2 doesn't match, using new hash\nOld hash: {entry.InfoTableEntry.Hash2:X04}\nNew hash: {hash2:X04}"); entry.InfoTableEntry.Hash2 = hash2; } } log.Info("Updating header..."); // String Count log.Info("Current StringCount = " + _header.StringCount); _header.StringCount = Entries.Count; log.Info("New StringCount = " + _header.StringCount); // Key Count log.Info("Current KeyCount = " + _header.KeyCount); _header.KeyCount = realEntries.Count; log.Info("New KeyCount = " + _header.KeyCount); // StringBlockSize log.Info("Current StringBlockSize = " + _header.StringBlockSize); int newSize = 0; foreach (IGMD_Entry entry in Entries) { newSize += ExEncoding.UTF8.GetByteCount(entry.Value) + 1; // +1 because szString } _header.StringBlockSize = newSize; log.Info("New StringBlockSize = " + _header.StringBlockSize); // KeyBlockSize log.Info("Current KeyBlockSize = " + _header.KeyBlockSize); if (_header.KeyCount > 0) { int lastKeySize = ExEncoding.UTF8.GetByteCount(realEntries.Last().Key) + 1; // +1 for szString end _header.KeyBlockSize = realEntries.Last().InfoTableEntry.KeyOffset + lastKeySize; } else { _header.KeyBlockSize = 0; } log.Info("New KeyBlockSize = " + _header.KeyBlockSize); }
public static uint Compute(Crc32Algorithm algorithm, byte[] buffer, int offset, int count) { return Process(Tables[(int)algorithm], 0xFFFFFFFF, buffer, offset, count) ^ 0xFFFFFFFF; }
public void SerializeTables() { #region Preparation foreach (var r in resources) { r.typeid = AddName(r.type); r.offset = AddString(r.path); } #endregion uint shift = 160; byte[] data = null; var temp = new List <byte>(); #region Table 1 - Strings foreach (var s in strings) { temp.AddRange(Encoding.ASCII.GetBytes(s.Value)); temp.Add(0); } data = temp.ToArray(); temp.Clear(); headers[0].data = data; headers[0].size = Convert.ToUInt32(data.Length); headers[0].crc32 = Crc32Algorithm.Compute(data); headers[0].offset = data.Length > 0 ? shift : 0; shift += Convert.ToUInt32(data.Length); #endregion #region Table 2 - Names foreach (var n in names) { temp.AddRange(BitConverter.GetBytes(n.offset)); temp.AddRange(BitConverter.GetBytes(n.hash)); } data = temp.ToArray(); temp.Clear(); headers[1].data = data; headers[1].size = Convert.ToUInt32(names.Count); headers[1].crc32 = Crc32Algorithm.Compute(data); headers[1].offset = data.Length > 0 ? shift : 0; shift += Convert.ToUInt32(data.Length); #endregion #region Table 3 - Resources foreach (var r in resources) { temp.AddRange(BitConverter.GetBytes(r.offset)); temp.AddRange(BitConverter.GetBytes(r.typeid)); temp.AddRange(BitConverter.GetBytes(r.flags)); } data = temp.ToArray(); temp.Clear(); headers[2].data = data; headers[2].size = Convert.ToUInt32(resources.Count); headers[2].crc32 = Crc32Algorithm.Compute(data); headers[2].offset = data.Length > 0 ? shift : 0; shift += Convert.ToUInt32(data.Length); #endregion #region Table 4 - Unknown foreach (var i in table4items) { temp.AddRange(BitConverter.GetBytes(i.classId)); temp.AddRange(BitConverter.GetBytes(i.unknown1)); temp.AddRange(BitConverter.GetBytes(i.propertyId)); temp.AddRange(BitConverter.GetBytes(i.unknown2)); temp.AddRange(BitConverter.GetBytes(i.hash)); } data = temp.ToArray(); temp.Clear(); headers[3].data = data; headers[3].size = Convert.ToUInt32(table4items.Count);; headers[3].crc32 = Crc32Algorithm.Compute(data); headers[3].offset = data.Length > 0 ? shift : 0; shift += Convert.ToUInt32(data.Length); #endregion #region Table 5 - Objects uint tempoffset = shift + 24u * (uint)objects.Count; foreach (var o in objects) { o.offset += tempoffset; temp.AddRange(BitConverter.GetBytes(o.typeID)); temp.AddRange(BitConverter.GetBytes(o.flags)); temp.AddRange(BitConverter.GetBytes(o.parentID)); temp.AddRange(BitConverter.GetBytes(o.size)); temp.AddRange(BitConverter.GetBytes(o.offset)); temp.AddRange(BitConverter.GetBytes(o.template)); temp.AddRange(BitConverter.GetBytes(o.crc32)); } data = temp.ToArray(); temp.Clear(); headers[4].data = data; headers[4].size = Convert.ToUInt32(objects.Count); headers[4].crc32 = Crc32Algorithm.Compute(data); headers[4].offset = data.Length > 0 ? shift : 0; shift += Convert.ToUInt32(data.Length); #endregion #region Table 6 - Buffers foreach (var b in buffers) { temp.AddRange(BitConverter.GetBytes(b.flags)); temp.AddRange(BitConverter.GetBytes(b.index)); temp.AddRange(BitConverter.GetBytes(b.offset)); temp.AddRange(BitConverter.GetBytes(b.diskSize)); temp.AddRange(BitConverter.GetBytes(b.memSize)); temp.AddRange(BitConverter.GetBytes(b.crc32)); } data = temp.ToArray(); headers[5].data = data; headers[5].size = Convert.ToUInt32(buffers.Count); headers[5].crc32 = Crc32Algorithm.Compute(data); headers[5].offset = data.Length > 0 ? shift : 0; shift += Convert.ToUInt32(data.Length); #endregion #region Table 7 - Embedded foreach (var e in embedded) { temp.AddRange(BitConverter.GetBytes(e.importIndex)); temp.AddRange(BitConverter.GetBytes(e.path)); temp.AddRange(BitConverter.GetBytes(e.pathHash)); temp.AddRange(BitConverter.GetBytes(e.offset)); temp.AddRange(BitConverter.GetBytes(e.length)); } data = temp.ToArray(); headers[6].data = data; headers[6].size = Convert.ToUInt32(embedded.Count); headers[6].crc32 = Crc32Algorithm.Compute(data); headers[6].offset = data.Length > 0 ? shift : 0; shift += Convert.ToUInt32(data.Length); #endregion }