private void ProcessCarList(Chunk chunk) { var newSize = chunk.Size - Reader.AlignToBoundary(0x10); if (newSize % 0xD0 != 0) { throw new InvalidDataException("Malformed car list chunk"); } Progress?.Report("Processing car list"); while (Stream.Position < chunk.EndOffset) { var cti = BinaryHelpers.ReadStruct <CarTypeInfo>(Reader); //Debug.WriteLine("{0} {1} ({2})", cti.ManufacturerName, cti.BaseModelName, cti.UsageType); var acr = new AaronCarRecord(); acr.BaseModelName = cti.BaseModelName; acr.CarTypeName = cti.CarTypeName; acr.ManufacturerName = cti.ManufacturerName; acr.DefaultBasePaint = cti.DefaultBasePaint; acr.DefaultSkinNumber = cti.DefaultSkinNumber; acr.Skinnable = cti.Skinnable; acr.UsageType = cti.UsageType; _carService.AddCar(acr); HashResolver.Add(cti.CarTypeNameHash, cti.CarTypeName); } this.GenerateHashes(); }
private static void BlockDecompress(Stream stream, byte[] outData) { while (stream.Position < stream.Length) { var header = BinaryHelpers.ReadStruct <CIPHeader>(stream); if (header.Magic != 0x55441122) { throw new InvalidDataException($"Invalid magic! Expected 0x55441122, got 0x{header.Magic:X8}"); } var data = new byte[header.CSize - 24]; var decompressed = new byte[header.USize]; if (stream.Read(data, 0, data.Length) != data.Length) { throw new InvalidDataException($"Failed to read {data.Length} bytes from stream"); } Compressor.Decompress(data, decompressed); Array.ConstrainedCopy( decompressed, 0, outData, header.UPos, decompressed.Length); } }
private void ProcessCarInfoArrayChunk(Chunk chunk, BinaryReader reader) { int alignedSize = chunk.GetAlignedSize(0x10); if (alignedSize % 0xD0 != 0) { throw new ChunkCorruptedException("CarTypeInfoArray is malformed!"); } reader.AlignToBoundary(0x10); for (int i = 0; i < alignedSize / 0xD0; i++) { CarTypeInfo carTypeInfo = BinaryHelpers.ReadStruct <CarTypeInfo>(reader); CarRecord carRecord = new CarRecord { BaseModelName = carTypeInfo.BaseModelName, CarTypeName = carTypeInfo.CarTypeName, DefaultBasePaint = carTypeInfo.DefaultBasePaint, DefaultSkinNumber = carTypeInfo.DefaultSkinNumber, GeometryFilename = carTypeInfo.GeometryFilename, ManufacturerName = carTypeInfo.ManufacturerName, MemoryType = (CarMemoryType)carTypeInfo.CarMemTypeHash, Skinnable = carTypeInfo.Skinnable, UsageType = carTypeInfo.UsageType }; Database.CarRecordManager.AddCarRecord(carRecord); } }
private void ProcessAttributes(Chunk chunk) { trackedUnknownAttributes.Clear(); Progress?.Report("Processing attributes table"); while (Stream.Position < chunk.EndOffset) { var carPartAttribute = BinaryHelpers.ReadStruct <CarPartAttribute>(Reader); _attributes.Add(carPartAttribute); } }
private void ProcessPresetCars(Chunk chunk) { Progress?.Report("Processing preset cars"); while (Stream.Position < chunk.EndOffset) { var fePresetCar = BinaryHelpers.ReadStruct <FEPresetCar>(Reader); _presetCarService.AddPresetCar(PresetConverter.ConvertFeToAaronPresetCar(fePresetCar)); } }
/// <summary> /// Reads a compressed-in-place file. This involves reading a header and then /// proceeding to a shared routine for decompression. /// </summary> /// <param name="stream"></param> /// <returns></returns> public static byte[] ReadBlockFile(Stream stream) { var br = new BinaryReader(stream); var fileHeader = BinaryHelpers.ReadStruct <CIPFileHeader>(br); if (fileHeader.Magic != 0x66113388) { throw new InvalidDataException("Invalid header"); } return(ReadCompressedBlocks(stream, fileHeader.USize)); }
public void TestStreamRoundTrip() { using var ms = new MemoryStream(); BinaryHelpers.WriteStruct(ms, BenchmarkStructure); ms.Position = 0; Assert.AreEqual(ms.Length, Marshal.SizeOf <SimpleStructure>()); var unmarshalledStructure = BinaryHelpers.ReadStruct <SimpleStructure>(ms); Assert.AreEqual(BenchmarkStructure.StringValue, unmarshalledStructure.StringValue); Assert.AreEqual(BenchmarkStructure.IntValue, unmarshalledStructure.IntValue); Assert.AreEqual(BenchmarkStructure.FloatValue, unmarshalledStructure.FloatValue); CollectionAssert.AreEqual(BenchmarkStructure.ByteArray, unmarshalledStructure.ByteArray); }
private void ProcessPresetSkins(Chunk chunk) { Progress?.Report("Processing preset skins"); while (Stream.Position < chunk.EndOffset) { var pss = BinaryHelpers.ReadStruct <PresetSkin>(Reader); var ps = new AaronPresetSkinRecord(); ps.PresetName = pss.PresetName; ps.PaintGroup = pss.PaintGroup; ps.PaintHue = pss.PaintHue; ps.Saturation = pss.PaintSaturation; ps.Variance = pss.PaintVariance; _presetSkinService.AddPresetSkin(ps); } }
/// <summary> /// Processes the car part database header chunk /// </summary> /// <param name="chunk"></param> /// <param name="reader"></param> public void ProcessHeader(Chunk chunk, BinaryReader reader) { if (chunk.Size != 0x40) { throw new ChunkCorruptedException("Invalid CarPartPack header!"); } reader.BaseStream.Seek(8, SeekOrigin.Current); _header = BinaryHelpers.ReadStruct <CarPartPackHeader>(reader); if (_header.Version != 6) { throw new ChunkCorruptedException("Invalid version in CarPartPack header!"); } Debug.WriteLine( "Part database: {0} attributes, {1} attribute tables, {2} type names, {3} model tables, {4} parts", _header.NumAttributes, _header.NumAttributeTables, _header.NumTypeNames, _header.NumModelTables, _header.NumParts); }
private void ProcessCarPartTable(Chunk chunk) { if (chunk.Size % 0xC != 0) { throw new InvalidDataException("Malformed car part table chunk"); } DebugTiming.BeginTiming("ProcessCarPartTable"); Progress?.Report("Processing car parts"); List <AaronCarPartAttribute> attributes = new List <AaronCarPartAttribute>(); var allCollections = _carPartService.GetCarPartCollections(); Dictionary <int, AaronCarPartCollection> collectionsDict = allCollections.ToDictionary(c => allCollections.IndexOf(c), c => c); while (Stream.Position < chunk.EndOffset) { DBCarPart dbCarPart = BinaryHelpers.ReadStruct <DBCarPart>(Reader); var part = new AaronCarPartRecord(); part.Name = HashResolver.Resolve(dbCarPart.Hash); part.Attributes = this.PrepareAttributes(dbCarPart); attributes.AddRange(part.Attributes); //_carPartService.GetCarPartCollections() collectionsDict[dbCarPart.CarIndex].Parts.Add(part); //_carPartService.GetCarPartCollectionByIndex(dbCarPart.CarIndex).Parts.Add(part); } var uniqueAttribs = attributes.DistinctBy(a => a.GetHashCode()).Count(); DebugTiming.EndTiming("ProcessCarPartTable"); // we can discard internal attribute data now _attributeOffsetTables.Clear(); _attributes.Clear(); }
/// <summary> /// Processes the part list chunk /// </summary> /// <param name="chunk"></param> /// <param name="reader"></param> public void ProcessPartsChunk(Chunk chunk, BinaryReader reader) { if (chunk.Size % 0xC != 0) { throw new ChunkCorruptedException("Invalid CarPartPack part list chunk!"); } int numParts = chunk.Size / 0xC; if (numParts != _header.NumParts) { throw new ChunkCorruptedException($"CarPartPack header says there are {_header.NumParts} parts, but the part list has {numParts}!"); } for (int i = 0; i < numParts; i++) { _parts.Add(BinaryHelpers.ReadStruct <DBCarPart>(reader)); } Debug.WriteLine("Loaded {0} parts", _parts.Count); GeneratePartCollections(); Debug.WriteLine("Generated part collections"); }
/// <summary> /// Processes the attribute list chunk /// </summary> /// <param name="chunk"></param> /// <param name="reader"></param> public void ProcessAttributesChunk(Chunk chunk, BinaryReader reader) { if (chunk.Size % 8 != 0) { throw new ChunkCorruptedException("Invalid CarPartPack attributes chunk!"); } var numAttributes = chunk.Size >> 3; if (numAttributes != _header.NumAttributes) { throw new ChunkCorruptedException($"CarPartPack header says there are {_header.NumAttributes} attributes, but the attribute list has {numAttributes}!"); } for (int i = 0; i < numAttributes; i++) { CarPartAttributeData carPartAttribute = BinaryHelpers.ReadStruct <CarPartAttributeData>(reader); _attributes.Add(carPartAttribute); } PrepareAttributes(); Debug.WriteLine("Loaded {0} attributes", _attributes.Count); }
private void ProcessSlotOverrideData(Chunk chunk) { if ((chunk.Size - 3444) % 0x24 != 0) { throw new InvalidDataException("BCHUNK_CARINFO_SLOTTYPES is invalid!"); } Progress?.Report("Processing slot override data"); _carPartService.SetSlotOverrideData(Reader.ReadBytes(3444)); while (Stream.Position < chunk.EndOffset) { var ss = BinaryHelpers.ReadStruct <SpoilerStructure>(Reader); var car = _carService.FindCarByHash(ss.CarTypeNameHash); car.Spoiler = new AaronSpoilerRecord { CarTypeNameHash = ss.CarTypeNameHash, SpoilerType = (AaronSpoilerType)ss.SpoilerHash }; } }
private void ProcessBoundsPack(Chunk chunk) { Progress?.Report("Processing car bounds pack"); Stream.Align(0x10); var boundsHeader = BinaryHelpers.ReadStruct <BoundsHeader>(Reader); var car = _carService.FindCarByCollisionHash(boundsHeader.NameHash); if (car.BoundsPack != null) { throw new InvalidDataException("Duplicate bounds pack for " + car.CarTypeName); } var bp = new AaronBoundsPack { Entries = new List <AaronBoundsEntry>(boundsHeader.NumBounds), NameHash = boundsHeader.NameHash, PointClouds = new List <AaronBoundsPointCloud>() }; for (var i = 0; i < boundsHeader.NumBounds; i++) { var boundsStruct = BinaryHelpers.ReadStruct <Bounds>(Reader); Debug.Assert(boundsStruct.CollectionPtr == 0); var boundsEntry = new AaronBoundsEntry { Position = boundsStruct.Position, AttributeName = boundsStruct.AttributeName, ChildIndex = boundsStruct.ChildIndex, Flags = (AaronBoundsFlags)boundsStruct.Flags, HalfDimensions = boundsStruct.HalfDimensions, NameHash = boundsStruct.NameHash, NumChildren = boundsStruct.NumChildren, Orientation = boundsStruct.Orientation, PCloudIndex = boundsStruct.PCloudIndex, Pivot = boundsStruct.Pivot, Surface = boundsStruct.Surface }; bp.Entries.Add(boundsEntry); } var pointCloudHeader = BinaryHelpers.ReadStruct <PCloudHeader>(Reader); bp.PointClouds = new List <AaronBoundsPointCloud>(pointCloudHeader.NumPClouds); for (var i = 0; i < pointCloudHeader.NumPClouds; i++) { int numPoints = Reader.ReadInt32(); Reader.ReadInt64(); // fPad Reader.ReadUInt32(); // fPList var pointCloud = new AaronBoundsPointCloud { Vertices = new List <Vector4>(numPoints) }; for (var j = 0; j < numPoints; j++) { var vertex = BinaryHelpers.ReadStruct <Vector4>(Reader); pointCloud.Vertices.Add(vertex); } bp.PointClouds.Add(pointCloud); } car.BoundsPack = bp; }