public void DeserializeNestedTypes_FromSpan() { var tagStruct = (StructTestTag)BlamSerializer.Deserialize(typeof(StructTestTag), testTagData, 0, 100, null); AssertData(tagStruct); tagStruct = BlamSerializer.Deserialize <StructTestTag>(testTagData, 0, 100, null); AssertData(tagStruct); var tagRef = (ClassTestTag)BlamSerializer.Deserialize(typeof(ClassTestTag), testTagData, 0, 100, null); AssertData(tagRef); tagRef = BlamSerializer.Deserialize <ClassTestTag>(testTagData, 0, 100, null); AssertData(tagRef); var tagStructInstance = new StructTestTag(); tagStructInstance = (StructTestTag)BlamSerializer.DeserializeInto(tagStructInstance, typeof(StructTestTag), testTagData, 0, 100, null); AssertData(tagStructInstance); tagStructInstance = new StructTestTag(); tagStructInstance = BlamSerializer.DeserializeInto <StructTestTag>(tagStructInstance, testTagData, 0, 100, null); AssertData(tagStructInstance); var tagRefInstance = new ClassTestTag(); tagRefInstance = (ClassTestTag)BlamSerializer.DeserializeInto(tagRefInstance, typeof(ClassTestTag), testTagData, 0, 100, null); AssertData(tagRefInstance); tagRefInstance = new ClassTestTag(); tagRefInstance = BlamSerializer.DeserializeInto <ClassTestTag>(tagRefInstance, testTagData, 0, 100, null); AssertData(tagRefInstance); }
private TMap CreateMap <TMap>(MapStream reader, byte streamId = 0) where TMap : IMap, new() { var map = new TMap(); BlamSerializer.DeserializeInto(map, reader.GetStream(streamId)); map.Load(streamId, reader); return(map); }
public void Deserialize_WrapperUsingInPlaceObjec() { var tagRef = BlamSerializer.Deserialize <EmptyWrapper>(new MemoryStream(testTagData), 0, 100, null); AssertData(tagRef); var offsetData = new byte[24 + testTagData.Length]; testTagData.CopyTo(offsetData, 20); tagRef = BlamSerializer.Deserialize <EmptyWrapper>(new MemoryStream(offsetData), 20, 80, null); AssertData(tagRef); }
public void Deserialize_ReferenceValue() { var tagRef = BlamSerializer.Deserialize <ReferenceValueHolderTag>(new MemoryStream(testTagData), 0, 100, null); AssertData(tagRef.SubValue); var offsetData = new byte[24 + testTagData.Length]; testTagData.CopyTo(offsetData, 20); tagRef = BlamSerializer.Deserialize <ReferenceValueHolderTag>(new MemoryStream(offsetData), 20, 80, null); AssertData(tagRef.SubValue); }
public void Run() { using var map = new FileStream(this.args.FilePath, FileMode.Open, FileAccess.ReadWrite, FileShare.Read, 80192, false); var sig = H2BaseMap.CalculateSignature(map); var sigOffset = BlamSerializer.StartsAt <H2mccMapHeader>(h => h.StoredSignature); map.WriteUInt32At(sigOffset, (uint)sig); map.Seek(0, SeekOrigin.Begin); // Create destination map on disk using var outMap = File.OpenWrite(this.args.OutPath); H2mccCompression.Compress(map, outMap); }
public static BaseTag CreateTag(uint id, string name, TagIndexEntry index, IH2Map map, MapStream reader) { var tagType = GetTypeForTag(index.Tag); BaseTag tag; if (tagType == null) { tag = new UnknownTag(id, index.Tag.ToString()) { Name = name, Length = (uint)index.DataSize, Offset = (uint)index.Offset.Value, InternalSecondaryMagic = map.SecondaryMagic + index.Offset.Value }; } else { BaseTag instance; // PERF: check ctor existence ahead of time try { //var ctor = tagType.GetConstructor(new[] { typeof(uint) }); //instance = (BaseTag)ctor.Invoke(new object[] { id }); instance = Activator.CreateInstance(tagType, new object[] { id }) as BaseTag; } catch { instance = (BaseTag)FormatterServices.GetUninitializedObject(tagType); } tag = (BaseTag)BlamSerializer.DeserializeInto(instance, tagType, reader.GetStream(map.OriginFile), index.Offset.Value, map.SecondaryMagic, map); } tag.Name = name; tag.TagIndexEntry = index; tag.DataFile = map.OriginFile; tag.PopulateExternalData(reader); return(tag); }
public IH2Map Load(string mapFileName) { Span <byte> header = new byte[2048]; using (var peek = File.OpenRead(Path.Combine(this.mapRoot, mapFileName))) { peek.Read(header); } var baseHeader = BlamSerializer.Deserialize <H2HeaderBase>(header); return(baseHeader.Version switch { MapVersion.Halo2 => LoadH2Map(mapFileName, header), MapVersion.Halo2Mcc => LoadH2mccMap(mapFileName), _ => throw new NotSupportedException() });
public void DeserializeNestedTypes_FromStream() { var tagStruct = (StructTestTag)BlamSerializer.Deserialize(typeof(StructTestTag), new MemoryStream(testTagData), 0, 100, null); AssertData(tagStruct); tagStruct = BlamSerializer.Deserialize <StructTestTag>(new MemoryStream(testTagData), 0, 100, null); AssertData(tagStruct); var tagRef = (ClassTestTag)BlamSerializer.Deserialize(typeof(ClassTestTag), new MemoryStream(testTagData), 0, 100, null); AssertData(tagRef); tagRef = BlamSerializer.Deserialize <ClassTestTag>(new MemoryStream(testTagData), 0, 100, null); AssertData(tagRef); var offsetData = new byte[24 + testTagData.Length]; testTagData.CopyTo(offsetData, 20); tagRef = BlamSerializer.Deserialize <ClassTestTag>(new MemoryStream(offsetData), 20, 80, null); AssertData(tagRef); var tagStructInstance = new StructTestTag(); tagStructInstance = (StructTestTag)BlamSerializer.DeserializeInto(tagStructInstance, typeof(StructTestTag), new MemoryStream(testTagData), 0, 100, null); AssertData(tagStructInstance); tagStructInstance = new StructTestTag(); tagStructInstance = BlamSerializer.DeserializeInto <StructTestTag>(tagStructInstance, new MemoryStream(testTagData), 0, 100, null); AssertData(tagStructInstance); var tagRefInstance = new ClassTestTag(); tagRefInstance = (ClassTestTag)BlamSerializer.DeserializeInto(tagRefInstance, typeof(ClassTestTag), new MemoryStream(testTagData), 0, 100, null); AssertData(tagRefInstance); tagRefInstance = new ClassTestTag(); tagRefInstance = BlamSerializer.DeserializeInto <ClassTestTag>(tagRefInstance, new MemoryStream(testTagData), 0, 100, null); AssertData(tagRefInstance); }
public async Task Run() { using var inmemMap = new MemoryStream(); using (var map = File.Open(this.Args.MapPath, FileMode.Open)) { H2mccCompression.Decompress(map, inmemMap); inmemMap.Position = 0; // Load to determine where to write patches to var factory = new MapFactory(this.Args.MapPath); this.scene = factory.LoadSingleH2mccMap(inmemMap); } var tagPatcher = new TagPatcher(scene, inmemMap); var settings = new JsonSerializerOptions() { ReadCommentHandling = JsonCommentHandling.Skip }; var patches = JsonSerializer.Deserialize <TagPatch[]>(File.ReadAllText(this.Args.TagPatchPath), settings); foreach (var patch in patches) { tagPatcher.Apply(patch); } inmemMap.Position = 0; var sig = H2BaseMap.CalculateSignature(inmemMap); inmemMap.WriteInt32At(BlamSerializer.StartsAt <H2vMapHeader>(h => h.StoredSignature), sig); inmemMap.Position = 0; using (var map = File.Open(this.Args.MapPath, FileMode.Open)) { inmemMap.CopyTo(map); } }
public override void PopulateExternalData(MapStream reader) { foreach (var part in Parts) { var headerOffset = new NormalOffset((int)part.DataBlockRawOffset); var mapData = reader.GetStream(headerOffset.Location); part.Header = BlamSerializer.Deserialize<ModelResourceBlockHeader>(mapData, headerOffset.Value); foreach (var resource in part.Resources) { var dataOffset = part.DataBlockRawOffset + 8 + part.DataPreambleSize + resource.Offset; mapData.Position = new NormalOffset((int)dataOffset).Value; var resourceData = new byte[resource.Size]; var readCount = mapData.Read(resourceData, 0, resource.Size); Debug.Assert(readCount == resource.Size); resource.Data = resourceData; } var meshes = ModelResourceContainerProcessor.ProcessContainer(part, ModelShaderReferences); if(this.BoundingBoxes.Length > 0) { var bbIndex = 0; var maxBounds = new Vector3( this.BoundingBoxes[bbIndex].MaxX, this.BoundingBoxes[bbIndex].MaxY, this.BoundingBoxes[bbIndex].MaxZ); var minBounds = new Vector3( this.BoundingBoxes[bbIndex].MinX, this.BoundingBoxes[bbIndex].MinY, this.BoundingBoxes[bbIndex].MinZ); var maxUV = new Vector2( this.BoundingBoxes[bbIndex].MaxU, this.BoundingBoxes[bbIndex].MaxV); var minUV = new Vector2( this.BoundingBoxes[bbIndex].MinU, this.BoundingBoxes[bbIndex].MinV); var mesh = meshes[0]; for (var i = 0; i < mesh.Verticies.Length; i++) { var vert = mesh.Verticies[i]; var newPos = part.Flags.HasFlag(Properties.CompressedVerts) ? new Vector3( Decompress(vert.Position.X, minBounds.X, maxBounds.X), Decompress(vert.Position.Y, minBounds.Y, maxBounds.Y), Decompress(vert.Position.Z, minBounds.Z, maxBounds.Z) ) : vert.Position; var newTex = part.Flags.HasFlag(Properties.CompressedTexCoords) ? new Vector2( Decompress(vert.TexCoords.X, minUV.X, maxUV.X), Decompress(vert.TexCoords.Y, minUV.Y, maxUV.Y) ) : vert.TexCoords; // Workaround for JIT issue // https://github.com/dotnet/runtime/issues/1241 var newVert = new VertexFormat(newPos, newTex, vert.Normal, vert.Tangent, vert.Bitangent); mesh.Verticies[i] = newVert; } } part.Model = new MeshCollection(meshes); } }
private ResolvedTagPropertyInfo ResolvePropertyInfo(TagIndexEntry tagInfo, string propertyPath) { var topTagType = TagFactory.GetTypeForTag(tagInfo.Tag); var steps = PropertyAccessorParser.ExtractProperties(propertyPath); var offset = 0; var stepType = topTagType; foreach (var step in steps) { var prop = stepType.GetProperty(step.PropertyName); if (prop == null) { throw new Exception($"Couldn't find property '{step.PropertyName}' on type '{stepType}'"); } if (step.AccessType == PropertyAccessorParser.PropertyAccessType.Normal) { offset += BlamSerializer.StartsAt(stepType, step.PropertyName); stepType = prop.PropertyType; } else if (step.AccessType == PropertyAccessorParser.PropertyAccessType.ElementAccess) { if (prop.PropertyType.IsArray == false || step.ElementArgument is not int) { throw new NotSupportedException("Only arrays are currently supported for element access"); } var elementSize = BlamSerializer.SizeOf(prop.PropertyType.GetElementType()); var elementOffset = (elementSize * ((int)step.ElementArgument)); if (prop.GetCustomAttribute <ReferenceArrayAttribute>() != null) { var startsAt = BlamSerializer.StartsAt(stepType, step.PropertyName); // Read element array base offset var baseOffset = new SecondaryOffset(this.originalMap, this.mapToPatch.ReadInt32At(tagInfo.Offset.Value + offset + startsAt + 4)); // baseOffset is the absolute offset, need to subtract tag offset and prior property offsets to get relative offset += baseOffset.Value - tagInfo.Offset.Value - offset + elementOffset; } else if (prop.GetCustomAttribute <PrimitiveArrayAttribute>() != null) { offset += elementOffset; } else { throw new Exception("Only primitive and reference arrays are supported"); } stepType = prop.PropertyType.GetElementType(); } } return(new ResolvedTagPropertyInfo() { RelativeOffset = offset, PropertyType = stepType }); }
//[InternalReferenceValue(540)] //public object[] MiscObject20Cao { get; set; } //[InternalReferenceValue(548)] //public object[] MiscObject21Cao { get; set; } //[InternalReferenceValue(556)] //public object[] MiscObject22Cao { get; set; } //[InternalReferenceValue(564)] //public object[] MiscObject23Cao { get; set; } public override void PopulateExternalData(MapStream reader) { foreach (var part in RenderChunks) { if (part.DataBlockRawOffset == uint.MaxValue) { // TODO: determine why this happens, and if it's expected? //Console.WriteLine("Bsp part with max DataBlock offset"); part.Model = new MeshCollection(new ModelMesh[0]); continue; } var headerOffset = new NormalOffset((int)part.DataBlockRawOffset); var mapData = reader.GetStream(headerOffset.Location); part.Header = BlamSerializer.Deserialize <ModelResourceBlockHeader>(mapData, headerOffset.Value); foreach (var resource in part.Resources) { var dataOffset = part.DataBlockRawOffset + 8 + part.DataPreambleSize + resource.Offset; mapData.Position = new NormalOffset((int)dataOffset).Value; var resourceData = new byte[resource.Size]; var readCount = mapData.Read(resourceData, 0, resource.Size); Debug.Assert(readCount == resource.Size); resource.Data = resourceData; } var meshes = ModelResourceContainerProcessor.ProcessContainer(part, ModelShaderReferences); part.Model = new MeshCollection(meshes); } foreach (var def in InstancedGeometryDefinitions) { if (def.DataBlockRawOffset == uint.MaxValue) { Console.WriteLine("InstancedGeometry with max DataBlock offset"); def.Model = new MeshCollection(new ModelMesh[0]); continue; } var headerOffset = new NormalOffset((int)def.DataBlockRawOffset); var mapData = reader.GetStream(headerOffset.Location); def.Header = BlamSerializer.Deserialize <ModelResourceBlockHeader>(mapData, headerOffset.Value); foreach (var resource in def.Resources) { var dataOffset = def.DataBlockRawOffset + 8 + def.DataPreambleSize + resource.Offset; mapData.Position = new NormalOffset((int)dataOffset).Value; var resourceData = new byte[resource.Size]; var readCount = mapData.Read(resourceData, 0, resource.Size); Debug.Assert(readCount == resource.Size); resource.Data = resourceData; } var meshes = ModelResourceContainerProcessor.ProcessContainer(def, ModelShaderReferences, "InstancedGeometry_" + def.DataBlockRawOffset); def.Model = new MeshCollection(meshes); } }
public static void Run(BulkPatchTaskArgs args) { if (args.RawMapsDirectory == null) { // TODO: better args.RawMapsDirectory = @"C:\Program Files\ModifiableWindowsApps\HaloMCC\halo2\h2_maps_win64_dx11"; } if (Directory.Exists(args.RawMapsDirectory) == false) { Console.WriteLine("Please enter the path to your 'clean' maps"); Console.Write(">"); args.RawMapsDirectory = Console.ReadLine().Trim(); } var patches = FindDirectory(args.PatchesDirectory); var maps = FindDirectory(args.RawMapsDirectory); Directory.CreateDirectory(args.PatchedMapsDirectory); var folderFilter = GetRegexForFilter(args.PatchFilter); var patchDirs = Directory.GetDirectories(patches) .Where(d => folderFilter.IsMatch(Path.GetFileName(d))) .ToArray(); Parallel.ForEach(patchDirs, patchDir => { var mapName = Path.GetFileName(patchDir); var rawMapPath = Path.Combine(maps, mapName + ".map"); var patchedMapPath = Path.Combine(args.PatchedMapsDirectory, mapName + ".map"); if (File.Exists(rawMapPath) == false) { Console.WriteLine($"Patch dir for '{mapName}', but no map found at '{rawMapPath}'"); return; } var rawMap = File.OpenRead(rawMapPath); var patchedMap = new MemoryStream(); H2mccCompression.Decompress(rawMap, patchedMap); var factory = new MapFactory(Path.GetDirectoryName(rawMapPath)); var scene = factory.LoadSingleH2mccMap(patchedMap); var tagPatches = Directory.GetFiles(patchDir, "*.tagpatch", SearchOption.AllDirectories); var tagPatcher = new TagPatcher(scene, patchedMap); foreach (var tagPatchPath in tagPatches) { Console.WriteLine($"TagPatching '{scene.Header.Name}' with '{tagPatchPath.Substring(patchDir.Length)}'"); var settings = new JsonSerializerOptions() { ReadCommentHandling = JsonCommentHandling.Skip }; var patches = JsonSerializer.Deserialize <TagPatch[]>(File.ReadAllText(tagPatchPath), settings); foreach (var patch in patches) { tagPatcher.Apply(patch); } } var scriptPatchFiles = Directory.GetFiles(patchDir, "*.tree", SearchOption.AllDirectories); foreach (var patchFile in scriptPatchFiles) { Console.WriteLine($"Patching '{scene.Header.Name}' with '{patchFile.Substring(patchDir.Length)}'"); ScriptTreePatcher.PatchMap(scene, patchedMap, patchFile); } var sig = H2BaseMap.CalculateSignature(patchedMap); patchedMap.WriteUInt32At(BlamSerializer.StartsAt <H2mccMapHeader>(h => h.StoredSignature), (uint)sig); using var patchedFile = new FileStream(patchedMapPath, FileMode.Create); H2mccCompression.Compress(patchedMap, patchedFile); }); Console.WriteLine("Done!"); }
/// <summary> /// Decompresses the provided stream into the other stream, if the map has not already been decompressed. /// If the provided stream has already been decompressed, it will simply return the original stream /// </summary> public static Stream DecompressInline(Stream input) { using var compressed = input; if (compressed.CanSeek == false) { throw new NotSupportedException("Must be able to seek on compressed"); } var fourCC = compressed.ReadUInt32At(0); if (fourCC == DecompressedFourCC) { compressed.Position = 0; return(compressed); } if (fourCC != RealFourCC) { throw new Exception("Provided map stream was not valid"); } var decompressTo = new MemoryStream(); var info = BlamSerializer.Deserialize <H2mccCompressionSections>(compressed); var header = new Span <byte>(new byte[4096]); compressed.Position = 0; compressed.Read(header); decompressTo.WriteUInt32(DecompressedFourCC); decompressTo.Write(header.Slice(4)); foreach (var section in info.Sections) { if (section.Offset == 0 || section.Count == 0) { continue; } // Looks like compression is optional and count is negative when that happens if (section.Count < 0) { var realCount = -section.Count; var buf = copyPool.Rent(realCount); compressed.Position = section.Offset; var readCount = compressed.Read(buf); if (readCount != realCount) { copyPool.Return(buf); throw new Exception("Unable to read the amount of data required"); } decompressTo.Write(((Span <byte>)buf).Slice(0, readCount)); copyPool.Return(buf); } else { compressed.Seek(section.Offset + 2, SeekOrigin.Begin); using var deflate = new DeflateStream(compressed, CompressionMode.Decompress, leaveOpen: true); deflate.CopyTo(decompressTo); } } decompressTo.Position = 0; return(decompressTo); }
/// <summary> /// Compresses the provided stream into the other stream, if the map was previously decompressed. /// If the provided stream was not previously decompressed, it will simply copy and return false /// </summary> /// <returns>A boolean indicating if compression was performed</returns> public static bool Compress(Stream decompressed, Stream compressTo) { if (decompressed.CanSeek == false) { throw new NotSupportedException("Must be able to seek on decompressed"); } if (compressTo.CanSeek == false) { throw new NotSupportedException("Must be able to seek on compressTo"); } var fourCC = decompressed.ReadUInt32At(0); if (fourCC == RealFourCC) { decompressed.Position = 0; decompressed.CopyTo(compressTo); return(false); } if (fourCC != DecompressedFourCC) { throw new Exception("Provided decompressed map stream was not valid for compression"); } decompressed.Position = 0; compressTo.Position = 0; var header = new Span <byte>(new byte[BlamSerializer.SizeOf <H2mccMapHeader>()]); // Copy header decompressed.Read(header); compressTo.WriteUInt32(RealFourCC); compressTo.Write(header.Slice(4)); // Write empty compression info until we're done var compressionSections = new Span <byte>(new byte[8192]); compressTo.Write(compressionSections); var sections = new List <H2mccCompressionSections.CompressionSection>(); var chunk = new Span <byte>(new byte[CompressionChunkSize]); // Compress chunks, write to compressTo while (decompressed.Position < decompressed.Length - 1) { var bytesToTake = Math.Min(CompressionChunkSize, decompressed.Length - decompressed.Position); var readBytes = decompressed.Read(chunk); Debug.Assert(readBytes == bytesToTake); using var compressed = new MemoryStream(); using (var compressor = new DeflateStream(compressed, CompressionLevel.Optimal, true)) { compressor.Write(chunk.ToArray(), 0, readBytes); } compressed.Seek(0, SeekOrigin.Begin); var section = new H2mccCompressionSections.CompressionSection((int)compressed.Length + 2, (uint)compressTo.Position); sections.Add(section); // Write magic bytes compressTo.Write(BitConverter.GetBytes((ushort)5416)); compressed.CopyTo(compressTo); } // Go back and write compression section info compressTo.Seek(BlamSerializer.SizeOf <H2mccMapHeader>(), SeekOrigin.Begin); foreach (var section in sections) { compressTo.WriteInt32(section.Count); compressTo.WriteUInt32(section.Offset); } return(true); }
public static void PatchMap(H2mccMap scene, Stream map, string patchFilePath) { var scenarioStart = scene.TagIndex[scene.IndexHeader.Scenario].Offset.Value; var nodesStart = BlamSerializer.StartsAt <ScenarioTag>(s => s.ScriptSyntaxNodes); var nodeCount = map.ReadUInt32At(scenarioStart + nodesStart); var nodeOffset = (int)map.ReadUInt32At(scenarioStart + nodesStart + 4) - scene.SecondaryMagic; var nodeSize = BlamSerializer.SizeOf <ScenarioTag.ScriptSyntaxNode>(); var patchLines = File.ReadAllLines(patchFilePath); foreach (var line in patchLines) { if (string.IsNullOrWhiteSpace(line)) { continue; } if (ShouldPatchFrom(scene, line, out var patch)) { Console.WriteLine($"\t Patching {scene.Header.Name} [{patch.Index}]"); var patchStart = nodeOffset + patch.Index * nodeSize; // Fixup next node's check value. We never change check values, so we can // re-use the 'old' nodes here to get that info if (patch.NodeData.NextIndex == ushort.MaxValue) { patch.NodeData.NextCheckval = ushort.MaxValue; } else { var nextNode = scene.Scenario.ScriptSyntaxNodes[patch.NodeData.NextIndex]; patch.NodeData.NextCheckval = nextNode.Checkval; } // Fixup next node's check value for scope/invocation nodes if ((patch.NodeData.NodeType == NodeType.BuiltinInvocation || patch.NodeData.NodeType == NodeType.ScriptInvocation)) { if (patch.NodeData.NodeData_H16 == ushort.MaxValue) { patch.NodeData.NodeData_32 = patch.NodeData.NodeData_H16 | ((uint)ushort.MaxValue) << 16; } else { var nextNode = scene.Scenario.ScriptSyntaxNodes[patch.NodeData.NodeData_H16]; patch.NodeData.NodeData_32 = patch.NodeData.NodeData_H16 | ((uint)nextNode.Checkval) << 16; } } //map.WriteUInt16At(patchStart + 0, patch.NodeData.Checkval); map.WriteUInt16At(patchStart + 2, patch.NodeData.OperationId); map.WriteUInt16At(patchStart + 4, (ushort)patch.NodeData.DataType); map.WriteUInt16At(patchStart + 6, (ushort)patch.NodeData.NodeType); map.WriteUInt16At(patchStart + 8, patch.NodeData.NextIndex); map.WriteUInt16At(patchStart + 10, patch.NodeData.NextCheckval); map.WriteUInt16At(patchStart + 12, patch.NodeData.NodeString); //map.WriteUInt16At(patchStart + 14, patch.NodeData.ValueH); map.WriteUInt32At(patchStart + 16, patch.NodeData.NodeData_32); } } }