public static void FindSoundsExD(ulong key, HashSet <ulong> done, Dictionary <ulong, List <ulong> > ret, Dictionary <ulong, Record> map, CASCHandler handler, Dictionary <ulong, ulong> replace, ulong parent) { if (replace.ContainsKey(key)) { key = replace[key]; } if (!map.ContainsKey(key)) { return; } if (!done.Add(key)) { return; } if (CheckAddEntry(ret, parent, key)) { return; } using (Stream effectStream = Util.OpenFile(map[key], handler)) { if (effectStream == null) { return; } Chunked chunked = new Chunked(effectStream, true, ChunkManager.Instance); FindSoundsChunked(chunked, done, ret, map, handler, replace, parent, key); } }
public void Write(ICLIFlags flags, Stream output, FindLogic.Combo.ComboInfo info, FindLogic.Combo.ModelInfoNew modelInfo, Stream modelStream, Stream refposeStream) { bool doRefpose = false; if (flags is ExtractFlags extractFlags) { doRefpose = extractFlags.ExtractRefpose; } // erm, we need to wrap for now using (Chunked modelChunked = new Chunked(modelStream)) { string materialPath = ""; OWMatWriter14 materialWriter = new OWMatWriter14(); if (modelInfo.ModelLooks.Count > 0) { FindLogic.Combo.ModelLookInfo modelLookInfo = info.ModelLooks[modelInfo.ModelLooks.First()]; materialPath = Path.Combine("ModelLooks", modelLookInfo.GetNameIndex() + materialWriter.Format); } // data is object[] { bool exportAttachments, string materialReference, string modelName, bool onlyOneLOD, bool skipCollision } Write(flags, modelChunked, output, new List <byte>(new byte[] { 0, 1, 0xFF }), new object[] { true, materialPath, $"Model {GetFileName(modelInfo.GUID)}", null, true }, modelInfo); if (!doRefpose) { return; } RefPoseWriter refPoseWriter = new RefPoseWriter(); refPoseWriter.Write(modelChunked, refposeStream, true); refposeStream.Position = 0; } }
public void GetFewParticles() { foreach (ulong key in TrackedFiles[0xA5]) { STUUnlock_HighlightIntro highlightIntro = GetInstance <STUUnlock_HighlightIntro>(key); if (highlightIntro?.Animation == null) { continue; } using (Stream animStream = OpenFile(highlightIntro.Animation)) { using (BinaryReader animReader = new BinaryReader(animStream)) { animStream.Position = 0x18L; ulong infokey = animReader.ReadUInt64(); if (infokey == 0) { continue; } using (Stream chunkStream = OpenFile(infokey)) { Chunked chunked = new Chunked(chunkStream); if (chunked.GetAllOfTypeFlat <RPCE>().Length == 1) { Console.Out.WriteLine(GetString(highlightIntro.CosmeticName)); } } } } } }
public bool WriteCloth(Chunked model, Stream output, HTLC cloth, uint index) // todo: { culture.NumberFormat.NumberDecimalSeparator = "."; System.Threading.Thread.CurrentThread.CurrentCulture = culture; IChunk chunk = model.FindNextChunk("lksm").Value; if (chunk == null) { return(false); } lksm skeleton = (lksm)chunk; if (cloth.Nodes[index] == null) { return(false); } List <HTLC.ClothNode> nodes = cloth.Nodes[index].ToList(); using (StreamWriter writer = new StreamWriter(output)) { writer.WriteLine("{0}", nodes.Count); writer.WriteLine("version 1"); writer.WriteLine("nodes"); Dictionary <HTLC.ClothNode, uint> boneMap = new Dictionary <HTLC.ClothNode, uint>(); uint nodeIndex = 0; foreach (HTLC.ClothNode clothNode in nodes) { uint bone = nodeIndex; bool isNonskele = false; if (cloth.NodeBones[index].ContainsKey((short)bone) && cloth.NodeBones[index][(int)bone] != -1) { bone = skeleton.IDs[cloth.NodeBones[index][(int)bone]]; isNonskele = true; } boneMap[clothNode] = nodeIndex; writer.WriteLine($"{nodeIndex} \"bone_{(isNonskele ? "" : "noskele_")}{bone:X}\" {clothNode.VerticalParent}"); nodeIndex++; } writer.WriteLine("end"); writer.WriteLine("skeleton"); writer.WriteLine("time 0"); foreach (HTLC.ClothNode node in nodes) { if (!boneMap.ContainsKey(node)) { continue; } short parent = node.VerticalParent; float parentX = parent != -1 ? nodes[parent].X : 0f; float parentY = parent != -1 ? nodes[parent].Y : 0f; float parentZ = parent != -1 ? nodes[parent].Z : 0f; writer.Write($"{boneMap[node]}"); writer.Write($" {node.X-parentX:0.000000}"); writer.Write($" {node.Y-parentY:0.000000}"); writer.Write($" {node.Z-parentZ:0.000000}"); writer.WriteLine(" 0 0 0"); } } return(true); }
public bool Write(Chunked model, Stream output, List <byte> LODs, Dictionary <ulong, List <ImageLayer> > layers, object[] data) { //Console.Out.WriteLine("Writing OWMAT"); ushort versionMajor = 1; ushort versionMinor = 0; Dictionary <string, TextureType> typeData = null; if (data != null && data.Length > 0 && data[0].GetType() == typeof(Dictionary <string, TextureType>)) { typeData = (Dictionary <string, TextureType>)data[0]; versionMajor = 1; versionMinor = 1; } using (BinaryWriter writer = new BinaryWriter(output)) { writer.Write(versionMajor); writer.Write(versionMinor); writer.Write(layers.Keys.LongCount()); // nr materials foreach (KeyValuePair <ulong, List <ImageLayer> > layer in layers) { writer.Write(layer.Key); HashSet <string> images = new HashSet <string>(); foreach (ImageLayer image in layer.Value) { string old = $"{GUID.LongKey(image.Key):X12}.dds"; if (typeData != null) { try { images.Add(typeData.First(new Func <KeyValuePair <string, TextureType>, bool>(delegate(KeyValuePair <string, TextureType> input) { return(Path.GetFileName(input.Key).ToUpperInvariant() == old.ToUpperInvariant()); })).Key); } catch { images.Add(old); } } else { images.Add(old); } } writer.Write(images.Count); foreach (string image in images) { writer.Write(image); if (typeData != null && typeData.ContainsKey(image)) { writer.Write((byte)DDSTypeDetect.Detect(typeData[image])); } else { writer.Write((byte)0xFF); } } } } return(true); }
public override void OnDataReceived(object sender, socks5.TCP.DataEventArgs e) { if (e.Buffer.FindString("HTTP/1.") != -1 && e.Buffer.FindString("\r\n\r\n") != -1) { e.Buffer = e.Buffer.ReplaceString("\r\n", "\r\nX-Served-By: Socks5Server\r\n"); e.Count = e.Count + "X-Served-By: Socks5Server\r\n".Length; } //get chunked. Chunked c = new Chunked(e.Client.Sock, e.Buffer, e.Count); e.Buffer = c.ChunkedData; e.Count = c.ChunkedData.Length; }
public static void FindSoundsChunked(Chunked chunked, HashSet <ulong> done, Dictionary <ulong, List <ulong> > ret, Dictionary <ulong, Record> map, CASCHandler handler, Dictionary <ulong, ulong> replace, ulong parent, ulong mykey) { OSCE[] osces = chunked.GetAllOfTypeFlat <OSCE>(); foreach (OSCE osce in osces) { FindSoundsEx(osce.Data.Sound, done, ret, map, handler, replace, mykey); } FECE[] feces = chunked.GetAllOfTypeFlat <FECE>(); foreach (FECE fece in feces) { FindSoundsExD(fece.Data.Effect, done, ret, map, handler, replace, mykey); } }
public bool Write(Chunked model, Stream output, List <byte> LODs, Dictionary <ulong, List <ImageLayer> > layers, object[] data) { Dictionary <string, TextureType> typeData = null; if (data != null && data.Length > 0 && data[0].GetType() == typeof(Dictionary <string, TextureType>)) { typeData = (Dictionary <string, TextureType>)data[0]; } Dictionary <ulong, Dictionary <ulong, string> > nameMap = new Dictionary <ulong, Dictionary <ulong, string> >(); foreach (KeyValuePair <ulong, List <ImageLayer> > layer in layers) { nameMap[layer.Key] = new Dictionary <ulong, string>(); foreach (ImageLayer image in layer.Value) { string old = $"{GUID.LongKey(image.Key):X12}.dds"; if (typeData != null) { try { nameMap[layer.Key].Add(image.Key, typeData.First(new Func <KeyValuePair <string, TextureType>, bool>(delegate(KeyValuePair <string, TextureType> input) { return(Path.GetFileName(input.Key).ToUpperInvariant() == old.ToUpperInvariant()); })).Key); } catch { nameMap[layer.Key].Add(image.Key, old); } } else { nameMap[layer.Key].Add(image.Key, old); } } } using (StreamWriter writer = new StreamWriter(output)) { foreach (KeyValuePair <ulong, List <ImageLayer> > pair in layers) { writer.WriteLine("newmtl {0:X16}", pair.Key); writer.WriteLine("Kd 1 1 1"); foreach (ImageLayer layer in pair.Value) { writer.WriteLine("map_Kd \"{0}\"", nameMap[pair.Key][layer.Key]); } writer.WriteLine(""); } } return(true); }
public static void SaveItem(string basePath, string heroName, string containerName, string folderName, ICLIFlags flags, ItemInfo item, STUHero hero) { if (item == null) { return; } if (!(item.Unlock is STUUnlock_VoiceLine vl)) { return; } const string type = "VoiceLines"; string name = GetValidFilename(item.Name).Replace(".", ""); STUVoiceSetComponent soundSetComponentContainer = GetInstance <STUVoiceSetComponent>(hero.EntityMain); if (soundSetComponentContainer?.VoiceSet == null) { Debugger.Log(0, "DataTool.SaveLogic.Unlock.VoiceLine", "[DataTool.SaveLogic.Unlock.VoiceLine]: VoiceSet not found"); return; } FindLogic.Combo.ComboInfo info = new FindLogic.Combo.ComboInfo(); FindLogic.Combo.Find(info, soundSetComponentContainer.VoiceSet); FindLogic.Combo.VoiceSetInfo voiceSetInfo = info.VoiceSets[soundSetComponentContainer.VoiceSet]; List <FindLogic.Combo.VoiceLineInstanceInfo> voiceLineInstances = new List <FindLogic.Combo.VoiceLineInstanceInfo>(); using (Stream vlStream = OpenFile(vl.EffectResource)) { using (Chunked vlChunk = new Chunked(vlStream)) { foreach (SVCE svce in vlChunk.GetAllOfTypeFlat <SVCE>()) { if (svce == null) { continue; } if (voiceSetInfo.VoiceLineInstances.ContainsKey(svce.Data.VoiceStimulus)) { voiceLineInstances.AddRange(voiceSetInfo.VoiceLineInstances[svce.Data.VoiceStimulus]); } } } } string output = Path.Combine(basePath, containerName, heroName ?? "", type, folderName, name); Combo.SaveVoiceStimuli(flags, output, info, voiceLineInstances, false); }
public void ChunkPrefixHasCorrectResults() { AssertChunkPrefix(Chunked.ChunkPrefix(1), "1\r\n"); AssertChunkPrefix(Chunked.ChunkPrefix(15), "f\r\n"); AssertChunkPrefix(Chunked.ChunkPrefix(0x10), "10\r\n"); AssertChunkPrefix(Chunked.ChunkPrefix(0x80), "80\r\n"); AssertChunkPrefix(Chunked.ChunkPrefix(0xff), "ff\r\n"); AssertChunkPrefix(Chunked.ChunkPrefix(0x10), "10\r\n"); AssertChunkPrefix(Chunked.ChunkPrefix(0x100), "100\r\n"); AssertChunkPrefix(Chunked.ChunkPrefix(0x1000), "1000\r\n"); AssertChunkPrefix(Chunked.ChunkPrefix(0x10000), "10000\r\n"); AssertChunkPrefix(Chunked.ChunkPrefix(0x100000), "100000\r\n"); AssertChunkPrefix(Chunked.ChunkPrefix(0x1000000), "1000000\r\n"); AssertChunkPrefix(Chunked.ChunkPrefix(0x10000000), "10000000\r\n"); AssertChunkPrefix(Chunked.ChunkPrefix(0), "0\r\n"); AssertChunkPrefix(Chunked.ChunkPrefix(0xffffffff), "ffffffff\r\n"); }
public void ExtractVCCE(ICLIFlags toolFlags) { string basePath; if (toolFlags is ExtractFlags flags) { basePath = flags.OutputPath; } else { throw new Exception("no output path"); } const string container = "DebugVCCE"; foreach (ulong key in TrackedFiles[0x8F]) { // if (GUID.Index(key) != 0xDEADBEEF) continue; using (Stream chunkedStream = OpenFile(key)) { Chunked chunked = new Chunked(chunkedStream); int vcceIndex = 0; VCCE[] vcces = chunked.GetAllOfTypeFlat <VCCE>(); foreach (VCCE vcce in vcces) { using (Bitmap b = new Bitmap(vcce.Data.TableCount, 1)) { for (int i = 0; i < vcce.SecondaryEntries.Length; i++) { VCCE.SecondaryEntry secondary = vcce.SecondaryEntries[i]; VCCE.Entry primary = vcce.Entries[i]; byte col = (byte)(primary.A * 255); Color color = Color.FromArgb(col, col, col); b.SetPixel(i, 0, color); } string file = Path.Combine(basePath, container, GetFileName(key), $"{vcceIndex}.png"); CreateDirectoryFromFile(file); b.Save(file, ImageFormat.Png); } vcceIndex++; } } } }
public static void DumpChunks(Stream file, string @out) { if (!Directory.Exists(@out)) { Directory.CreateDirectory(@out); } file.Position = 0; Chunked chunked = new Chunked(file, false, new ChunkManager()); if (chunked.Chunks == null) { Console.Out.WriteLine("Unknown error while parsing chunk file"); } uint i = 0; if (chunked.Chunks != null) { foreach (IChunk instance in chunked.Chunks) { MemoryChunk dummy = instance as MemoryChunk; if (dummy == null) { if (System.Diagnostics.Debugger.IsAttached) { System.Diagnostics.Debugger.Break(); } } else { string filename = $"{i:X8}_{dummy.RootIdentifier}_{dummy.Identifier}.chunk"; using (Stream outputStream = File.Open(Path.Combine(@out, filename), FileMode.Create, FileAccess.Write, FileShare.Read)) { dummy.Data.Position = 0; dummy.Data.CopyTo(outputStream); } } i++; } } Console.Out.WriteLine("Dumped {0} chunks", i); }
public override void OnServerDataReceived(object sender, socks5.TCP.DataEventArgs e) { Utils.Add(new DataCapture.Data(e.Request, e.Buffer, e.Count, DataCapture.DataType.Received)); return; //If it's an HTTP request, make sure to use chunked. Read until chunked or content-length. if (Chunked.IsHTTP(e.Buffer) && Chunked.IsChunked(e.Buffer)) { //Chunked c = new Chunked(e.Client.Sock, e.Buffer, e.Count); //e.Buffer = c.ChunkedData; //e.Count = c.ChunkedData.Length; //Utils.Add(new DataCapture.Data(e.Request, c.ChunkedData, c.ChunkedData.Length, DataCapture.DataType.Received)); Utils.Add(new DataCapture.Data(e.Request, e.Buffer, e.Count, DataCapture.DataType.Received)); } //Save data into sent data list. else { } }
public IEnumerable <string> GetLines() { NameValueCollection resolvedQuery = HttpUtility.ParseQueryString(string.Empty); resolvedQuery[EncodingKey] = Encoding; resolvedQuery[ChunkedKey] = Chunked.ToString().ToLower(); resolvedQuery[LinesKey] = Lines.ToString(CultureInfo.InvariantCulture); resolvedQuery[DeflateLevelKey] = DeflateLevel.ToString(CultureInfo.InvariantCulture); resolvedQuery[SleepKey] = Sleep.ToString(CultureInfo.InvariantCulture); return(new[] { string.Format(CultureInfo.InvariantCulture, "Optional query string fields:"), string.Format(CultureInfo.InvariantCulture, " '{0}' The type of compression to use. Must be in ['{1}'].", EncodingKey, string.Join("', '", Encodings)), string.Format(CultureInfo.InvariantCulture, " '{0}' Whether to use Transfer-Encoding: chunked. must be 'true' or 'false'.", ChunkedKey), string.Format(CultureInfo.InvariantCulture, " '{0}' The number of lines to return. Must be an integer greater than or equal to 0.", LinesKey), string.Format(CultureInfo.InvariantCulture, " '{0}' The compression level for DEFLATE. Must be an integer between 0 and 9 (inclusive).", DeflateLevelKey), string.Format(CultureInfo.InvariantCulture, " '{0}' The milliseconds between returning each line. Must be an integer greater than or equal to 0.", SleepKey), string.Format(CultureInfo.InvariantCulture, "Resolved query string:"), string.Format(CultureInfo.InvariantCulture, " {0}", resolvedQuery) }); }
public bool Write(Chunked model, Stream output, List <byte> LODs, Dictionary <ulong, List <ImageLayer> > layers, object[] data) { culture.NumberFormat.NumberDecimalSeparator = "."; System.Threading.Thread.CurrentThread.CurrentCulture = culture; IChunk chunk = model.FindNextChunk("lksm").Value; if (chunk == null) { return(false); } lksm skeleton = (lksm)chunk; using (StreamWriter writer = new StreamWriter(output)) { writer.WriteLine("{0}", skeleton.Data.bonesAbs); writer.WriteLine("version 1"); writer.WriteLine("nodes"); for (int i = 0; i < skeleton.Data.bonesAbs; ++i) { writer.WriteLine("{0} \"bone_{1:X4}\" {2}", i, skeleton.IDs[i], skeleton.Hierarchy[i]); } writer.WriteLine("end"); writer.WriteLine("skeleton"); writer.WriteLine("time 0"); for (int i = 0; i < skeleton.Data.bonesAbs; ++i) { Matrix3x4 bone = skeleton.Matrices34Inverted[i]; APPLIB.Quaternion3D quat = new APPLIB.Quaternion3D(bone[0, 3], bone[0, 0], bone[0, 1], bone[0, 2]); APPLIB.Vector3D rot = APPLIB.C3D.ToEulerAngles(quat); Vector3 scale = new Vector3(bone[1, 0], bone[1, 1], bone[1, 2]); Vector3 pos = new Vector3(bone[2, 0], bone[2, 1], bone[2, 2]); if (rot.X == -3.14159274f && rot.Y == 0 && rot.Z == 0) { rot = new APPLIB.Vector3D(0, 3.14159274f, 3.14159274f); // effectively the same but you know, eulers. } writer.WriteLine(String.Format(CultureInfo.InvariantCulture, "{0} {1:0.000000} {2:0.000000} {3:0.000000} {4:0.000000} {5:0.000000} {6:0.000000} {7:0.000000} {8:0.000000} {9:0.000000}", i, pos.X, pos.Y, pos.Z, rot.X, rot.Y, rot.Z, scale.X, scale.Y, scale.Z)); } } return(true); }
public bool Write(Chunked model, Stream output, bool keepOpen = false) { culture.NumberFormat.NumberDecimalSeparator = "."; System.Threading.Thread.CurrentThread.CurrentCulture = culture; IChunk chunk = model.FindNextChunk("lksm").Value; if (chunk == null) { return(false); } lksm skeleton = (lksm)chunk; short[] hierarchy = (short[])skeleton.Hierarchy.Clone(); HashSet <short> weightedParNodes = new HashSet <short>(); Dictionary <int, HTLC.ClothNode> nodeMap = new Dictionary <int, HTLC.ClothNode>(); HTLC cloth = model.FindNextChunk("HTLC").Value as HTLC; if (cloth != null) { uint clothIndex = 0; foreach (HTLC.ClothNode[] nodeCollection in cloth.Nodes) { if (nodeCollection == null) { continue; } int nodeIndex = 0; foreach (HTLC.ClothNode node in nodeCollection) { int parentRaw = node.VerticalParent; if (cloth.NodeBones[clothIndex].ContainsKey(nodeIndex) && cloth.NodeBones[clothIndex].ContainsKey(parentRaw)) { // good code: if (cloth.NodeBones[clothIndex][nodeIndex] != -1) { hierarchy[cloth.NodeBones[clothIndex][nodeIndex]] = cloth.NodeBones[clothIndex][parentRaw]; if (cloth.NodeBones[clothIndex][parentRaw] == -1) { HTLC.ClothNodeWeight weightedBone = node.Bones.Aggregate((i1, i2) => i1.Weight > i2.Weight ? i1 : i2); hierarchy[cloth.NodeBones[clothIndex][nodeIndex]] = weightedBone.Bone; weightedParNodes.Add(cloth.NodeBones[clothIndex][nodeIndex]); } } // else: on subskele // todo: add subskelebones? } else { if (cloth.NodeBones[clothIndex].ContainsKey(nodeIndex)) // if on main skele // good code: { if (cloth.NodeBones[clothIndex][nodeIndex] != -1) { hierarchy[cloth.NodeBones[clothIndex][nodeIndex]] = -1; HTLC.ClothNodeWeight weightedBone = node.Bones.Aggregate((i1, i2) => i1.Weight > i2.Weight ? i1 : i2); hierarchy[cloth.NodeBones[clothIndex][nodeIndex]] = weightedBone.Bone; weightedParNodes.Add(cloth.NodeBones[clothIndex][nodeIndex]); } // else: on subskele // todo: add subskelebones? } } if (cloth.NodeBones[clothIndex].ContainsKey(nodeIndex)) { // good code: nodeMap[cloth.NodeBones[clothIndex][nodeIndex]] = node; } nodeIndex++; } clothIndex++; } } using (StreamWriter writer = new StreamWriter(output, Encoding.Default, 512, keepOpen)) { writer.WriteLine("{0}", skeleton.Data.bonesAbs); writer.WriteLine("version 1"); writer.WriteLine("nodes"); for (int i = 0; i < skeleton.Data.bonesAbs; ++i) { writer.WriteLine("{0} \"bone_{1:X4}\" {2}", i, skeleton.IDs[i], hierarchy[i]); } writer.WriteLine("end"); writer.WriteLine("skeleton"); writer.WriteLine("time 0"); for (int i = 0; i < skeleton.Data.bonesAbs; ++i) { Matrix3x4 bone = skeleton.Matrices34Inverted[i]; Quaternion3D quat = new Quaternion3D(bone[0, 3], bone[0, 0], bone[0, 1], bone[0, 2]); Vector3D rot = C3D.ToEulerAngles(quat); Vector3 scale = new Vector3(bone[1, 0], bone[1, 1], bone[1, 2]); Vector3 pos = new Vector3(bone[2, 0], bone[2, 1], bone[2, 2]); if (nodeMap.ContainsKey(i)) { HTLC.ClothNode thisNode = nodeMap[i]; if (weightedParNodes.Contains((short)i)) { Vector3 pos2 = GetGlobalPos(skeleton.Matrices34Inverted, hierarchy[i], hierarchy); pos.X = thisNode.X - pos2.X; pos.Y = thisNode.Y - pos2.Y; pos.Z = thisNode.Z - pos2.Z; } else if (nodeMap.ContainsKey(hierarchy[i])) { HTLC.ClothNode parentNode = nodeMap[hierarchy[i]]; pos.X = thisNode.X - parentNode.X; pos.Y = thisNode.Y - parentNode.Y; pos.Z = thisNode.Z - parentNode.Z; } else { pos.X = thisNode.X; pos.Y = thisNode.Y; pos.Z = thisNode.Z; } } if (rot.X == -3.14159274f && rot.Y == 0 && rot.Z == 0) { rot = new Vector3D(0, 3.14159274f, 3.14159274f); // effectively the same but you know, eulers. } writer.WriteLine(String.Format(CultureInfo.InvariantCulture, "{0} {1:0.000000} {2:0.000000} {3:0.000000} {4:0.000000} {5:0.000000} {6:0.000000} {7:0.000000} {8:0.000000} {9:0.000000}", i, pos.X, pos.Y, pos.Z, rot.X, rot.Y, rot.Z, scale.X, scale.Y, scale.Z)); } } return(true); }
public static extern IntPtr chunked_add(Chunked chunked, uint size);
public void Parse(Dictionary <ushort, List <ulong> > track, Dictionary <ulong, Record> map, CASCHandler handler, bool quiet, OverToolFlags flags) { string output = flags.Positionals[2]; List <string> maps = flags.Positionals.Skip(3).ToList(); bool skipCmodel = !flags.ExportCollision; for (int i = 0; i < maps.Count; ++i) { maps[i] = maps[i].ToUpperInvariant().TrimStart('0'); } bool mapWildcard = maps.Count == 0; if (maps.Count > 0 && maps.Contains("*")) { mapWildcard = true; } char animEncoding = flags.AnimFormat; if (flags.Raw) { animEncoding = '+'; } bool suppressAnimations = flags.SkipAnimations; if (animEncoding == '+' && !flags.RawAnimation) { suppressAnimations = true; } char modelEncoding = flags.ModelFormat; if (flags.Raw) { modelEncoding = '+'; } bool suppressModels = flags.SkipModels; if (modelEncoding == '+' && !flags.RawModel) { suppressModels = true; } IDataWriter animWriter = null; if (animEncoding != 0 && animEncoding != '+') { Assembly asm = typeof(IDataWriter).Assembly; Type t = typeof(IDataWriter); List <Type> types = asm.GetTypes().Where(tt => tt != t && t.IsAssignableFrom(tt)).ToList(); foreach (Type tt in types) { if (animWriter != null) { break; } if (tt.IsInterface) { continue; } IDataWriter tmp = (IDataWriter)Activator.CreateInstance(tt); for (int i = 0; i < tmp.Identifier.Length; ++i) { if (tmp.Identifier[i] == animEncoding) { animWriter = tmp; break; } } } } IDataWriter modelWriter = null; if (modelEncoding != 0 && modelEncoding != '+') { Assembly asm = typeof(IDataWriter).Assembly; Type t = typeof(IDataWriter); List <Type> types = asm.GetTypes().Where(tt => tt != t && t.IsAssignableFrom(tt)).ToList(); foreach (Type tt in types) { if (modelWriter != null) { break; } if (tt.IsInterface) { continue; } IDataWriter tmp = (IDataWriter)Activator.CreateInstance(tt); for (int i = 0; i < tmp.Identifier.Length; ++i) { if (tmp.Identifier[i] == modelEncoding) { modelWriter = tmp; break; } } } } List <ulong> masters = track[0x9F]; List <byte> LODs = new List <byte>(new byte[5] { 0, 1, 128, 254, 255 }); Dictionary <ulong, ulong> replace = new Dictionary <ulong, ulong>(); foreach (ulong masterKey in masters) { if (!map.ContainsKey(masterKey)) { continue; } STUD masterStud = new STUD(Util.OpenFile(map[masterKey], handler)); if (masterStud.Instances == null) { continue; } MapMaster master = (MapMaster)masterStud.Instances[0]; if (master == null) { continue; } string name = Util.GetString(master.Header.name.key, map, handler); if (string.IsNullOrWhiteSpace(name)) { name = $"Unknown{GUID.Index(master.Header.data.key):X}"; } if (!mapWildcard && !(maps.Contains(name.ToUpperInvariant()) || maps.Contains($"{GUID.Index(masterKey):X}"))) { continue; } string outputPath = string.Format("{0}{1}{2}{1}{3:X}{1}", output, Path.DirectorySeparatorChar, Util.SanitizePath(name), GUID.Index(master.Header.data.key)); if (!map.ContainsKey(master.Header.data.key)) { continue; } HashSet <ulong> parsed = new HashSet <ulong>(); Dictionary <ulong, ulong> animList = new Dictionary <ulong, ulong>(); Dictionary <ulong, List <ulong> > soundData = new Dictionary <ulong, List <ulong> >(); if (!map.ContainsKey(master.DataKey(1))) { continue; } if (!map.ContainsKey(master.DataKey(2))) { continue; } if (!map.ContainsKey(master.DataKey(8))) { continue; } if (!map.ContainsKey(master.DataKey(0xB))) { continue; } if (!map.ContainsKey(master.DataKey(0x11))) { continue; } if (!map.ContainsKey(master.DataKey(0x10))) { continue; } using (Stream mapStream = Util.OpenFile(map[master.DataKey(1)], handler)) { Console.Out.WriteLine("Extracting map {0} with ID {1:X8}", name, GUID.Index(master.Header.data.key)); Map mapData = new Map(mapStream); IDataWriter owmap = new OWMAPWriter(); Dictionary <ulong, List <string> >[] used = null; if (!Directory.Exists(outputPath)) { Directory.CreateDirectory(outputPath); } HashSet <ulong> soundDone = new HashSet <ulong>(); Sound.FindSoundsEx(master.Header.audio.key, soundDone, soundData, map, handler, replace, master.Header.data.key); using (Stream map2Stream = Util.OpenFile(map[master.DataKey(2)], handler)) { if (map2Stream != null) { Map map2Data = new Map(map2Stream); using (Stream map8Stream = Util.OpenFile(map[master.DataKey(8)], handler)) { if (map8Stream != null) { Map map8Data = new Map(map8Stream); using (Stream mapBStream = Util.OpenFile(map[master.DataKey(0xB)], handler)) { if (mapBStream != null) { Map mapBData = new Map(mapBStream, true); using (Stream map11Stream = Util.OpenFile(map[master.DataKey(0x11)], handler)) { if (map11Stream != null) { Map11 map11 = new Map11(map11Stream); Sound.FindSoundsSTUD(map11.main, soundDone, soundData, map, handler, replace, masterKey, master.DataKey(0x11)); Sound.FindSoundsSTUD(map11.secondary, soundDone, soundData, map, handler, replace, masterKey, master.DataKey(0x11)); } } mapBStream.Position = (long)(Math.Ceiling((float)mapBStream.Position / 16.0f) * 16); // Future proofing for (int i = 0; i < mapBData.STUDs.Count; ++i) { STUD stud = mapBData.STUDs[i]; Sound.FindSoundsSTUD(stud, soundDone, soundData, map, handler, replace, master.DataKey(0xB), master.DataKey(0xB)); } for (int i = 0; i < mapBData.Records.Length; ++i) { if (mapBData.Records[i] != null && mapBData.Records[i].GetType() != typeof(MapEntity)) { continue; } MapEntity mapprop = (MapEntity)mapBData.Records[i]; if (!map.ContainsKey(mapprop.Header.Entity)) { continue; } Sound.FindSoundsEx(mapprop.Header.Entity, soundDone, soundData, map, handler, replace, master.DataKey(0xB)); HashSet <ulong> bindingModels = new HashSet <ulong>(); Dictionary <ulong, List <ImageLayer> > bindingTextures = new Dictionary <ulong, List <ImageLayer> >(); using (Stream bindingFile = Util.OpenFile(map[mapprop.Header.Entity], handler)) { STUD binding = new STUD(bindingFile, true, STUDManager.Instance, false, true); foreach (ISTUDInstance instance in binding.Instances) { if (instance == null) { continue; } if (instance.Name != binding.Manager.GetName(typeof(ComplexModelRecord))) { continue; } ComplexModelRecord cmr = (ComplexModelRecord)instance; mapprop.ModelLook = cmr.Data.material.key; mapprop.Model = cmr.Data.model.key; Skin.FindAnimations(cmr.Data.animationList.key, soundData, animList, replace, parsed, map, handler, bindingModels, bindingTextures, mapprop.Model); Skin.FindAnimations(cmr.Data.secondaryAnimationList.key, soundData, animList, replace, parsed, map, handler, bindingModels, bindingTextures, mapprop.Model); break; } } mapBData.Records[i] = mapprop; } using (Stream mapLStream = Util.OpenFile(map[master.DataKey(9)], handler)) { Map mapLData = new Map(mapLStream); using (Stream outputStream = File.Open($"{outputPath}{Util.SanitizePath(name)}{owmap.Format}", FileMode.Create, FileAccess.Write)) { used = owmap.Write(outputStream, mapData, map2Data, map8Data, mapBData, mapLData, name, modelWriter); } } } } } } } } IDataWriter owmat = new OWMATWriter(); using (Stream map10Stream = Util.OpenFile(map[master.DataKey(0x10)], handler)) { Map10 physics = new Map10(map10Stream); using (Stream outputStream = File.Open($"{outputPath}physics{modelWriter.Format}", FileMode.Create, FileAccess.Write)) { modelWriter.Write(physics, outputStream, new object[0]); } } if (used != null) { Dictionary <ulong, List <string> > models = used[0]; Dictionary <ulong, List <string> > materials = used[1]; Dictionary <ulong, Dictionary <ulong, List <ImageLayer> > > cache = new Dictionary <ulong, Dictionary <ulong, List <ImageLayer> > >(); if (!suppressModels) { foreach (KeyValuePair <ulong, List <string> > modelpair in models) { if (!map.ContainsKey(modelpair.Key)) { continue; } if (!parsed.Add(modelpair.Key)) { continue; } HashSet <string> extracted = new HashSet <string>(); using (Stream modelStream = Util.OpenFile(map[modelpair.Key], handler)) { Chunked mdl = new Chunked(modelStream, true); modelStream.Position = 0; if (modelEncoding != '+' && modelWriter != null) { foreach (string modelOutput in modelpair.Value) { if (!extracted.Add(modelOutput)) { continue; } using (Stream outputStream = File.Open($"{outputPath}{modelOutput}", FileMode.Create, FileAccess.Write)) { if (modelWriter.Write(mdl, outputStream, LODs, new Dictionary <ulong, List <ImageLayer> >(), new object[5] { null, null, null, null, skipCmodel })) { if (!quiet) { Console.Out.WriteLine("Wrote model {0}", modelOutput); } } else { if (!quiet) { Console.Out.WriteLine("Failed to write model"); } } } } } if (flags.RawModel) { using (Stream outputStream = File.Open($"{outputPath}{GUID.LongKey(modelpair.Key):X12}.{GUID.Type(modelpair.Key):X3}", FileMode.Create, FileAccess.Write)) { if (modelWriter.Write(mdl, outputStream, LODs, new Dictionary <ulong, List <ImageLayer> >(), new object[5] { null, null, null, null, skipCmodel })) { if (!quiet) { Console.Out.WriteLine("Wrote raw model {0:X12}.{1:X3}", GUID.LongKey(modelpair.Key), GUID.Type(modelpair.Key)); } } else { if (!quiet) { Console.Out.WriteLine("Failed to write model"); } } } } } } } if (!suppressAnimations) { foreach (KeyValuePair <ulong, ulong> kv in animList) { ulong parent = kv.Value; ulong key = kv.Key; Stream animStream = Util.OpenFile(map[key], handler); if (animStream == null) { continue; } Animation anim = new Animation(animStream); animStream.Position = 0; string outpath = string.Format("{0}Animations{1}{2:X12}{1}{5}{1}{3:X12}.{4:X3}", outputPath, Path.DirectorySeparatorChar, GUID.Index(parent), GUID.LongKey(key), GUID.Type(key), anim.Header.priority); if (!Directory.Exists(Path.GetDirectoryName(outpath))) { Directory.CreateDirectory(Path.GetDirectoryName(outpath)); } if (flags.RawAnimation) { using (Stream outp = File.Open(outpath, FileMode.Create, FileAccess.Write)) { animStream.CopyTo(outp); if (!quiet) { Console.Out.WriteLine("Wrote raw animation {0}", outpath); } } } if (animEncoding != '+' && animWriter != null) { outpath = string.Format("{0}Animations{1}{2:X12}{1}{5}{1}{3:X12}.{4}", outputPath, Path.DirectorySeparatorChar, GUID.Index(parent), GUID.LongKey(key), animWriter.Format, anim.Header.priority); using (Stream outp = File.Open(outpath, FileMode.Create, FileAccess.Write)) { animWriter.Write(anim, outp); if (!quiet) { Console.Out.WriteLine("Wrote animation {0}", outpath); } } } } } if (!flags.SkipSound) { Console.Out.WriteLine("Dumping sounds..."); string soundPath = $"{outputPath}Sounds{Path.DirectorySeparatorChar}"; if (!Directory.Exists(soundPath)) { Directory.CreateDirectory(soundPath); } DumpVoice.Save(soundPath, soundData, map, handler, quiet); } if (!flags.SkipTextures) { foreach (KeyValuePair <ulong, List <string> > matpair in materials) { Dictionary <ulong, List <ImageLayer> > tmp = new Dictionary <ulong, List <ImageLayer> >(); if (cache.ContainsKey(matpair.Key)) { tmp = cache[matpair.Key]; } else { Skin.FindTextures(matpair.Key, tmp, new Dictionary <ulong, ulong>(), new HashSet <ulong>(), map, handler); cache.Add(matpair.Key, tmp); } Dictionary <string, TextureType> types = new Dictionary <string, TextureType>(); foreach (KeyValuePair <ulong, List <ImageLayer> > kv in tmp) { ulong materialId = kv.Key; List <ImageLayer> sublayers = kv.Value; HashSet <ulong> materialParsed = new HashSet <ulong>(); foreach (ImageLayer layer in sublayers) { if (!materialParsed.Add(layer.Key)) { continue; } KeyValuePair <string, TextureType> pair = Skin.SaveTexture(layer.Key, materialId, map, handler, outputPath, quiet, $"Textures/{GUID.Index(matpair.Key):X8}"); if (pair.Key == null) { continue; } types.Add(pair.Key, pair.Value); } } foreach (string matOutput in matpair.Value) { if (File.Exists($"{outputPath}{matOutput}")) { continue; } using (Stream outputStream = File.Open($"{outputPath}{matOutput}", FileMode.Create, FileAccess.Write)) { if (owmat.Write(null, outputStream, null, tmp, new object[1] { types })) { if (!quiet) { Console.Out.WriteLine("Wrote material {0}", matOutput); } } else { if (!quiet) { Console.Out.WriteLine("Failed to write material"); } } } } } } } } } }
public bool Write(Chunked chunked, Stream output, List <byte> LODs, Dictionary <ulong, List <ImageLayer> > layers, object[] opts) { IChunk chunk = chunked.FindNextChunk("MNRM").Value; if (chunk == null) { return(false); } MNRM model = (MNRM)chunk; chunk = chunked.FindNextChunk("CLDM").Value; CLDM materials = null; if (chunk != null) { materials = (CLDM)chunk; } NumberFormatInfo numberFormatInfo = new NumberFormatInfo(); numberFormatInfo.NumberDecimalSeparator = "."; using (StreamWriter writer = new StreamWriter(output)) { uint faceOffset = 1; if (opts.Length > 1 && opts[1] != null && opts[1].GetType() == typeof(string) && ((string)opts[1]).Length > 0) { writer.WriteLine("mtllib {0}", (string)opts[1]); } Dictionary <byte, List <int> > LODMap = new Dictionary <byte, List <int> >(); for (int i = 0; i < model.Submeshes.Length; ++i) { SubmeshDescriptor submesh = model.Submeshes[i]; if (opts.Length > 4 && opts[4] != null && opts[4].GetType() == typeof(bool) && (bool)opts[4] == true) { if (submesh.flags == SubmeshFlags.COLLISION_MESH) { continue; } } if (LODs != null && !LODs.Contains(submesh.lod)) { continue; } if (!LODMap.ContainsKey(submesh.lod)) { LODMap.Add(submesh.lod, new List <int>()); } LODMap[submesh.lod].Add(i); } foreach (KeyValuePair <byte, List <int> > kv in LODMap) { //Console.Out.WriteLine("Writing LOD {0}", kv.Key); writer.WriteLine("o Submesh_{0}", kv.Key); foreach (int i in kv.Value) { SubmeshDescriptor submesh = model.Submeshes[i]; if (materials != null) { writer.WriteLine("g Material_{0:X16}", materials.Materials[submesh.material]); writer.WriteLine("usemtl {0:X16}", materials.Materials[submesh.material]); } else { writer.WriteLine("g Material_{0}", i); } ModelVertex[] vertex = model.Vertices[i]; ModelVertex[] normal = model.Normals[i]; ModelUV[][] uvs = model.TextureCoordinates[i]; ModelUV[] uv = uvs[0]; ModelIndice[] index = model.Indices[i]; for (int j = 0; j < vertex.Length; ++j) { writer.WriteLine("v {0} {1} {2}", vertex[j].x, vertex[j].y, vertex[j].z); } for (int j = 0; j < vertex.Length; ++j) { writer.WriteLine("vt {0} {1}", uv[j].u.ToString("0.######", numberFormatInfo), uv[j].v.ToString("0.######", numberFormatInfo)); } if (uvs.Length > 1) { for (int j = 0; j < uvs.Length; ++j) { for (int k = 0; k < vertex.Length; ++k) { writer.WriteLine("vt{0} {0} {1}", j, uvs[j][k].u.ToString("0.######", numberFormatInfo), uvs[j][k].v.ToString("0.######", numberFormatInfo)); } } } for (int j = 0; j < vertex.Length; ++j) { writer.WriteLine("vn {0} {1} {2}", normal[j].x, normal[j].y, normal[j].z); } writer.WriteLine(""); for (int j = 0; j < index.Length; ++j) { writer.WriteLine("f {0}/{0}/{0} {1}/{1}/{1} {2}/{2}/{2}", index[j].v1 + faceOffset, index[j].v2 + faceOffset, index[j].v3 + faceOffset); } faceOffset += (uint)vertex.Length; writer.WriteLine(""); } if (opts.Length > 3 && opts[3] != null && opts[3].GetType() == typeof(bool) && (bool)opts[3] == true) { break; } } } return(true); }
public bool Write(Chunked chunked, Stream stream, List <byte> LODs, Dictionary <ulong, List <ImageLayer> > layers, object[] opts) { IChunk chunk = chunked.FindNextChunk("MNRM").Value; if (chunk == null) { return(false); } MNRM model = (MNRM)chunk; chunk = chunked.FindNextChunk("CLDM").Value; CLDM materials = null; if (chunk != null) { materials = (CLDM)chunk; } chunk = chunked.FindNextChunk("lksm").Value; lksm skeleton = null; if (chunk != null) { skeleton = (lksm)chunk; } //Console.Out.WriteLine("Writing BIN"); using (BinaryWriter writer = new BinaryWriter(stream)) { writer.Write((uint)323232); writer.Write((ushort)2); writer.Write((ushort)99); WriteString(writer, "XNAaraL"); writer.Write((uint)5); WriteString(writer, "OVERWATCH"); WriteString(writer, "BLIZZARD"); WriteString(writer, "NULL"); writer.Write((uint)180); // hash writer.Write((uint)1); // items // item 1 writer.Write((uint)1); // type; 1 = pose; 2 = flags; 255 = padding // pose writer.Write((uint)0); // size pow 4 writer.Write((uint)0); // op info; bone count /* * pose data is always ASCII. * Each line is: * for each bone: * boneName:rotx roty rotz posx posy posz scalex scaley scalez */ if (skeleton != null) { writer.Write((uint)skeleton.Data.bonesAbs); for (int i = 0; i < skeleton.Data.bonesAbs; ++i) { WriteString(writer, $"bone_{skeleton.IDs[i]:X4}"); short parent = skeleton.Hierarchy[i]; if (parent == -1) { parent = (short)i; } writer.Write(parent); OpenTK.Vector3 bonePos = skeleton.Matrices[i].ExtractTranslation(); writer.Write(bonePos.X); writer.Write(bonePos.Y); writer.Write(bonePos.Z); } } else { writer.Write((uint)0); } Dictionary <byte, List <int> > LODMap = new Dictionary <byte, List <int> >(); uint sz = 0; uint lookForLod = 0; bool lodOnly = false; if (opts.Length > 3 && opts[3] != null && opts[3].GetType() == typeof(bool) && (bool)opts[3] == true) { lodOnly = true; } for (int i = 0; i < model.Submeshes.Length; ++i) { SubmeshDescriptor submesh = model.Submeshes[i]; if (opts.Length > 4 && opts[4] != null && opts[4].GetType() == typeof(bool) && (bool)opts[4] == true) { if (submesh.flags == SubmeshFlags.COLLISION_MESH) { continue; } } if (LODs != null && !LODs.Contains(submesh.lod)) { continue; } if (lodOnly && lookForLod > 0 && submesh.lod != lookForLod) { continue; } if (!LODMap.ContainsKey(submesh.lod)) { LODMap.Add(submesh.lod, new List <int>()); } lookForLod = submesh.lod; sz++; LODMap[submesh.lod].Add(i); } writer.Write(sz); foreach (KeyValuePair <byte, List <int> > kv in LODMap) { //Console.Out.WriteLine("Writing LOD {0}", kv.Key); foreach (int i in kv.Value) { SubmeshDescriptor submesh = model.Submeshes[i]; ModelVertex[] vertex = model.Vertices[i]; ModelVertex[] normal = model.Normals[i]; ModelUV[][] uv = model.TextureCoordinates[i]; ModelIndice[] index = model.Indices[i]; ModelBoneData[] bones = model.Bones[i]; ulong materialKey = submesh.material; if (materials != null) { materialKey = materials.Materials[submesh.material]; } WriteString(writer, $"Submesh_{i}.{kv.Key}.{materialKey:X16}"); writer.Write((uint)uv.Length); if (layers.ContainsKey(materialKey)) { List <ImageLayer> materialLayers = layers[materialKey]; uint count = 0; HashSet <ulong> done = new HashSet <ulong>(); for (int j = 0; j < materialLayers.Count; ++j) { if (done.Add(materialLayers[j].Key)) { count += 1; } } writer.Write(count); done.Clear(); for (int j = 0; j < materialLayers.Count; ++j) { if (done.Add(materialLayers[j].Key)) { writer.Write($"{GUID.LongKey(materialLayers[j].Key):X12}.dds"); writer.Write((uint)0); } } } else { writer.Write((uint)uv.Length); for (int j = 0; j < uv.Length; ++j) { writer.Write($"{materialKey:X16}_UV{j}.dds"); writer.Write((uint)j); } } writer.Write((uint)vertex.Length); for (int j = 0; j < vertex.Length; ++j) { writer.Write(vertex[j].x); writer.Write(vertex[j].y); writer.Write(vertex[j].z); writer.Write(-normal[j].x); writer.Write(-normal[j].y); writer.Write(-normal[j].z); writer.Write((byte)255); writer.Write((byte)255); writer.Write((byte)255); writer.Write((byte)255); for (int k = 0; k < uv.Length; ++k) { writer.Write((float)uv[k][j].u); writer.Write((float)uv[k][j].v); } if (skeleton != null && skeleton.Data.bonesAbs > 0) { if (bones != null && bones[j].boneIndex != null && bones[j].boneWeight != null) { writer.Write(skeleton.Lookup[bones[j].boneIndex[0]]); writer.Write(skeleton.Lookup[bones[j].boneIndex[1]]); writer.Write(skeleton.Lookup[bones[j].boneIndex[2]]); writer.Write(skeleton.Lookup[bones[j].boneIndex[3]]); writer.Write(bones[j].boneWeight[0]); writer.Write(bones[j].boneWeight[1]); writer.Write(bones[j].boneWeight[2]); writer.Write(bones[j].boneWeight[3]); } else { writer.Write((ushort)0); writer.Write((ushort)0); writer.Write((ushort)0); writer.Write((ushort)0); writer.Write(0.0f); writer.Write(0.0f); writer.Write(0.0f); writer.Write(0.0f); } } } writer.Write((uint)index.Length); for (int j = 0; j < index.Length; ++j) { writer.Write((uint)index[j].v1); writer.Write((uint)index[j].v2); writer.Write((uint)index[j].v3); } } } } return(true); }
// ReSharper disable once InconsistentNaming public bool Write(Chunked chunked, Stream output, List <byte> LODs, Dictionary <ulong, List <ImageLayer> > layers, object[] data) { IChunk chunk = chunked.FindNextChunk("MNRM").Value; if (chunk == null) { return(false); } MNRM model = (MNRM)chunk; chunk = chunked.FindNextChunk("CLDM").Value; CLDM materials = null; if (chunk != null) { materials = (CLDM)chunk; } chunk = chunked.FindNextChunk("lksm").Value; lksm skeleton = null; if (chunk != null) { skeleton = (lksm)chunk; } chunk = chunked.FindNextChunk("PRHM").Value; PRHM hardpoints = null; if (chunk != null) { hardpoints = (PRHM)chunk; } short[] hierarchy = (short[])skeleton?.Hierarchy.Clone(); Dictionary <int, HTLC.ClothNode> nodeMap = new Dictionary <int, HTLC.ClothNode>(); if (chunked.FindNextChunk("HTLC").Value is HTLC cloth) { uint clothIndex = 0; foreach (HTLC.ClothNode[] nodeCollection in cloth.Nodes) { if (nodeCollection == null) { continue; } int nodeIndex = 0; foreach (HTLC.ClothNode node in nodeCollection) { int parentRaw = node.VerticalParent; if (cloth.NodeBones[clothIndex].ContainsKey(nodeIndex) && cloth.NodeBones[clothIndex].ContainsKey(parentRaw)) { if (cloth.NodeBones[clothIndex][nodeIndex] != -1) { hierarchy[cloth.NodeBones[clothIndex][nodeIndex]] = cloth.NodeBones[clothIndex][parentRaw]; if (cloth.NodeBones[clothIndex][parentRaw] == -1) { HTLC.ClothNodeWeight weightedBone = node.Bones.Aggregate((i1, i2) => i1.Weight > i2.Weight ? i1 : i2); hierarchy[cloth.NodeBones[clothIndex][nodeIndex]] = weightedBone.Bone; } } } else { if (cloth.NodeBones[clothIndex].ContainsKey(nodeIndex)) // if on main skele { if (cloth.NodeBones[clothIndex][nodeIndex] != -1) { hierarchy[cloth.NodeBones[clothIndex][nodeIndex]] = -1; HTLC.ClothNodeWeight weightedBone = node.Bones.Aggregate((i1, i2) => i1.Weight > i2.Weight ? i1 : i2); hierarchy[cloth.NodeBones[clothIndex][nodeIndex]] = weightedBone.Bone; } } } if (cloth.NodeBones[clothIndex].ContainsKey(nodeIndex)) { // good code: if (cloth.NodeBones[clothIndex][nodeIndex] != -1) { nodeMap[cloth.NodeBones[clothIndex][nodeIndex]] = node; } } nodeIndex++; } clothIndex++; } } using (BinaryWriter writer = new BinaryWriter(output)) { writer.Write((ushort)1); // version major writer.Write((ushort)4); // version minor if (data.Length > 1 && data[1] is string && ((string)data[1]).Length > 0) { writer.Write((string)data[1]); } else { writer.Write((byte)0); } if (data.Length > 2 && data[2] is string && ((string)data[2]).Length > 0) { writer.Write((string)data[2]); } else { writer.Write((byte)0); } if (skeleton == null) { writer.Write((ushort)0); // number of bones } else { writer.Write(skeleton.Data.bonesAbs); } // ReSharper disable once InconsistentNaming Dictionary <byte, List <int> > LODMap = new Dictionary <byte, List <int> >(); uint sz = 0; uint lookForLod = 0; bool lodOnly = data.Length > 3 && data[3] is bool && (bool)data[3]; for (int i = 0; i < model.Submeshes.Length; ++i) { SubmeshDescriptor submesh = model.Submeshes[i]; if (data.Length > 4 && data[4] is bool && (bool)data[4]) { if (submesh.flags == SubmeshFlags.COLLISION_MESH) { continue; } } if (LODs != null && !LODs.Contains(submesh.lod)) { continue; } if (lodOnly && lookForLod > 0 && submesh.lod != lookForLod) { continue; } if (!LODMap.ContainsKey(submesh.lod)) { LODMap.Add(submesh.lod, new List <int>()); } lookForLod = submesh.lod; sz++; LODMap[submesh.lod].Add(i); } //long meshCountPos = writer.BaseStream.Position; writer.Write(sz); writer.Write(hardpoints?.HardPoints.Length ?? 0); if (skeleton != null) { for (int i = 0; i < skeleton.Data.bonesAbs; ++i) { writer.Write(IdToString("bone", skeleton.IDs[i])); short parent = hierarchy[i]; if (parent == -1) { parent = (short)i; } writer.Write(parent); Matrix3x4 bone = skeleton.Matrices34[i]; Quaternion rot = new Quaternion(bone[0, 0], bone[0, 1], bone[0, 2], bone[0, 3]); Vector3 scl = new Vector3(bone[1, 0], bone[1, 1], bone[1, 2]); Vector3 pos = new Vector3(bone[2, 0], bone[2, 1], bone[2, 2]); if (nodeMap.ContainsKey(i)) { HTLC.ClothNode thisNode = nodeMap[i]; pos.X = thisNode.X; pos.Y = thisNode.Y; pos.Z = thisNode.Z; } writer.Write(pos.X); writer.Write(pos.Y); writer.Write(pos.Z); writer.Write(scl.X); writer.Write(scl.Y); writer.Write(scl.Z); writer.Write(rot.X); writer.Write(rot.Y); writer.Write(rot.Z); writer.Write(rot.W); } } foreach (KeyValuePair <byte, List <int> > kv in LODMap) { foreach (int i in kv.Value) { SubmeshDescriptor submesh = model.Submeshes[i]; ModelVertex[] vertex = model.Vertices[i]; ModelVertex[] normal = model.Normals[i]; ModelUV[][] uv = model.TextureCoordinates[i]; ModelIndice[] index = model.Indices[i]; ModelBoneData[] bones = model.Bones[i]; writer.Write($"Submesh_{i}.{kv.Key}.{materials.Materials[submesh.material]:X16}"); writer.Write(materials.Materials[submesh.material]); writer.Write((byte)uv.Length); writer.Write(vertex.Length); writer.Write(index.Length); for (int j = 0; j < vertex.Length; ++j) { writer.Write(vertex[j].x); writer.Write(vertex[j].y); writer.Write(vertex[j].z); writer.Write(-normal[j].x); writer.Write(-normal[j].y); writer.Write(-normal[j].z); foreach (ModelUV[] t in uv) { writer.Write(t[j].u); writer.Write(t[j].v); } if (skeleton != null && bones != null && bones[j].boneIndex != null && bones[j].boneWeight != null) { writer.Write((byte)4); writer.Write(skeleton.Lookup[bones[j].boneIndex[0]]); writer.Write(skeleton.Lookup[bones[j].boneIndex[1]]); writer.Write(skeleton.Lookup[bones[j].boneIndex[2]]); writer.Write(skeleton.Lookup[bones[j].boneIndex[3]]); writer.Write(bones[j].boneWeight[0]); writer.Write(bones[j].boneWeight[1]); writer.Write(bones[j].boneWeight[2]); writer.Write(bones[j].boneWeight[3]); } else { // bone -> size + index + weight writer.Write((byte)0); } } List <ModelIndiceModifiable> indexNew = new List <ModelIndiceModifiable>(); foreach (ModelIndice indice in index) { indexNew.Add(new ModelIndiceModifiable { v1 = indice.v1, v2 = indice.v2, v3 = indice.v3 }); } foreach (ModelIndiceModifiable indice in indexNew) { writer.Write((byte)3); writer.Write(indice.v1); writer.Write(indice.v2); writer.Write(indice.v3); } } } if (hardpoints != null) { // attachments foreach (PRHM.HardPoint hp in hardpoints.HardPoints) { writer.Write(IdToString("attachment_", GUID.Index(hp.HardPointGUID))); Matrix4 mat = hp.Matrix.ToOpenTK(); Vector3 pos = mat.ExtractTranslation(); Quaternion rot = mat.ExtractRotation(); writer.Write(pos.X); writer.Write(pos.Y); writer.Write(pos.Z); writer.Write(rot.X); writer.Write(rot.Y); writer.Write(rot.Z); writer.Write(rot.W); } // extension 1.1 foreach (PRHM.HardPoint hp in hardpoints.HardPoints) { writer.Write(IdToString("bone", GUID.Index(hp.GUIDx012))); } } // ext 1.3: cloth writer.Write(0); // ext 1.4: embedded refpose if (skeleton != null) { for (int i = 0; i < skeleton.Data.bonesAbs; ++i) { writer.Write(IdToString("bone", skeleton.IDs[i])); short parent = hierarchy[i]; // if (parent == -1) { // parent = (short)i; // } writer.Write(parent); Matrix3x4 bone = skeleton.Matrices34Inverted[i]; // Quaternion3D quat = new Quaternion3D(bone[0, 0], bone[0, 1], bone[0, 2], bone[0, 3]); // why are they different Quaternion3D quat = new Quaternion3D(bone[0, 3], bone[0, 0], bone[0, 1], bone[0, 2]); Vector3D rot = C3D.ToEulerAngles(quat); if (rot.X == -3.14159274f && rot.Y == 0 && rot.Z == 0) { rot = new Vector3D(0, 3.14159274f, 3.14159274f); // effectively the same but you know, eulers. } Vector3 scl = new Vector3(bone[1, 0], bone[1, 1], bone[1, 2]); Vector3 pos = new Vector3(bone[2, 0], bone[2, 1], bone[2, 2]); writer.Write(pos.X); writer.Write(pos.Y); writer.Write(pos.Z); writer.Write(scl.X); writer.Write(scl.Y); writer.Write(scl.Z); writer.Write(rot.X); writer.Write(rot.Y); writer.Write(rot.Z); } } } return(true); }
public bool Write(Chunked model, Stream output, List <byte> LODs, Dictionary <ulong, List <ImageLayer> > layers, object[] data) { return(false); }
public static void Main(string[] args) { writers = new List <IDataWriter>(); Assembly asm = typeof(IDataWriter).Assembly; Type t = typeof(IDataWriter); List <Type> types = asm.GetTypes().Where(tt => tt != t && t.IsAssignableFrom(tt)).ToList(); foreach (Type tt in types) { if (tt.IsInterface) { continue; } writers.Add((IDataWriter)Activator.CreateInstance(tt)); } if (args.Length < 3) { Console.Out.WriteLine("Usage (model): ConvertTool.exe file type [model args] output_file"); Console.Out.WriteLine("Usage (animation): ConvertTool.exe file type output_file"); Console.Out.WriteLine("type can be:"); Console.Out.WriteLine(" t - supprt - type - {0, -30} - normal extension", "name"); Console.Out.WriteLine("".PadLeft(60, '-')); foreach (IDataWriter w in writers) { if (!w.SupportLevel.HasFlag(WriterSupport.MODEL)) { continue; } Console.Out.WriteLine(" {0} - {1} - {2} - {3,-30} - {4}", w.Identifier[0], SupportLevel(w.SupportLevel), TypeLevel(w.SupportLevel), w.Name, w.Format); } foreach (IDataWriter w in writers) { if (w.SupportLevel.HasFlag(WriterSupport.MODEL)) { continue; } Console.Out.WriteLine(" {0} - {1} - {2} - {3,-30} - {4}", w.Identifier[0], SupportLevel(w.SupportLevel), TypeLevel(w.SupportLevel), w.Name, w.Format); } Console.Out.WriteLine("vutbpm = vertex / uv / attachment / bone / pose / material support"); Console.Out.WriteLine("ampre = anim / model / map / refpose / material definition"); Console.Out.WriteLine("model args:"); Console.Out.WriteLine(" -l n - only save LOD, where N is lod"); Console.Out.WriteLine(" -t - save attachment points (sockets)"); Console.Out.WriteLine(" -L - only save first LOD found"); Console.Out.WriteLine(" -c - save collision models"); return; } Console.Out.WriteLine("{0} v{1}", Assembly.GetExecutingAssembly().GetName().Name, OWLib.Util.GetVersion()); string dataFile = args[0]; char type = args[1][0]; string outputFile = args[args.Length - 1]; IDataWriter writer = null; foreach (IDataWriter w in writers) { if (w.Identifier.Contains(type)) { writer = w; break; } } if (writer == null) { Console.Error.WriteLine("Unsupported format {0}", type); return; } Console.Out.WriteLine("Opening {0}", dataFile); using (Stream dataStream = File.Open(dataFile, FileMode.Open, FileAccess.Read)) { if (writer.SupportLevel.HasFlag(WriterSupport.ANIM)) { Console.Out.WriteLine("Converting animation..."); Animation anim = new Animation(dataStream, false); using (Stream output = File.Open(outputFile, FileMode.Create, FileAccess.Write)) { if (writer.Write(anim, output, new object[] { })) { Console.Out.WriteLine("Wrote animation"); } else { Console.Out.WriteLine("Failed to write animation"); } } } else if (writer.SupportLevel.HasFlag(WriterSupport.MODEL)) { List <byte> lods = null; bool attachments = false; bool firstLod = false; bool skipCmodel = true; if (args.Length > 3) { int i = 2; while (i < args.Length - 2) { string arg = args[i]; ++i; if (arg[0] == '-') { if (arg[1] == 'l') { if (lods == null) { lods = new List <byte>(); } byte b = byte.Parse(args[i], System.Globalization.NumberStyles.Number); lods.Add(b); ++i; } else if (arg[1] == 'L') { firstLod = true; } else if (arg[1] == 't') { attachments = true; } else if (arg[1] == 'c') { skipCmodel = false; } } else { continue; } } } Chunked model = new Chunked(dataStream); using (Stream output = File.Open(outputFile, FileMode.Create, FileAccess.Write)) { Console.Out.WriteLine("Converting model..."); if (writer.Write(model, output, lods, new Dictionary <ulong, List <ImageLayer> >(), new object[] { attachments, null, null, firstLod, skipCmodel })) { Console.Out.WriteLine("Wrote model"); } else { Console.Out.WriteLine("Failed to write model"); } } } else { Console.Out.WriteLine("Cant export."); } } }
// ReSharper disable once InconsistentNaming // data is object[] { bool exportAttachments, string materialReference, string modelName, bool onlyOneLOD, bool skipCollision } public void Write(ICLIFlags flags, Chunked chunked, Stream output, List <byte> LODs, object[] data, FindLogic.Combo.ModelInfoNew modelInfo) { byte?flagLOD = null; if (flags is ExtractFlags extractFlags) { flagLOD = extractFlags.LOD; } IChunk chunk = chunked.FindNextChunk("MNRM").Value; if (chunk == null) { return; } MNRM model = (MNRM)chunk; chunk = chunked.FindNextChunk("CLDM").Value; CLDM materials = null; if (chunk != null) { materials = (CLDM)chunk; } chunk = chunked.FindNextChunk("lksm").Value; lksm skeleton = null; if (chunk != null) { skeleton = (lksm)chunk; } chunk = chunked.FindNextChunk("PRHM").Value; PRHM hardpoints = null; if (chunk != null) { hardpoints = (PRHM)chunk; } HTLC cloth = chunked.FindNextChunk("HTLC").Value as HTLC; short[] hierarchy = (short[])skeleton?.Hierarchy.Clone(); Dictionary <int, HTLC.ClothNode> nodeMap = new Dictionary <int, HTLC.ClothNode>(); if (cloth != null) { uint clothIndex = 0; foreach (HTLC.ClothNode[] nodeCollection in cloth.Nodes) { if (nodeCollection == null) { continue; } int nodeIndex = 0; foreach (HTLC.ClothNode node in nodeCollection) { int parentRaw = node.VerticalParent; if (cloth.NodeBones[clothIndex].ContainsKey(nodeIndex) && cloth.NodeBones[clothIndex].ContainsKey(parentRaw)) { if (cloth.NodeBones[clothIndex][nodeIndex] != -1) { hierarchy[cloth.NodeBones[clothIndex][nodeIndex]] = cloth.NodeBones[clothIndex][parentRaw]; if (cloth.NodeBones[clothIndex][parentRaw] == -1) { HTLC.ClothNodeWeight weightedBone = node.Bones.Aggregate((i1, i2) => i1.Weight > i2.Weight ? i1 : i2); hierarchy[cloth.NodeBones[clothIndex][nodeIndex]] = weightedBone.Bone; } } } else { if (cloth.NodeBones[clothIndex].ContainsKey(nodeIndex)) { if (cloth.NodeBones[clothIndex][nodeIndex] != -1) { hierarchy[cloth.NodeBones[clothIndex][nodeIndex]] = -1; HTLC.ClothNodeWeight weightedBone = node.Bones.Aggregate((i1, i2) => i1.Weight > i2.Weight ? i1 : i2); hierarchy[cloth.NodeBones[clothIndex][nodeIndex]] = weightedBone.Bone; } } } if (cloth.NodeBones[clothIndex].ContainsKey(nodeIndex)) { if (cloth.NodeBones[clothIndex][nodeIndex] != -1) { nodeMap[cloth.NodeBones[clothIndex][nodeIndex]] = node; } } nodeIndex++; } clothIndex++; } } using (BinaryWriter writer = new BinaryWriter(output)) { writer.Write((ushort)1); // version major writer.Write((ushort)5); // version minor if (data.Length > 1 && data[1] is string && ((string)data[1]).Length > 0) { writer.Write((string)data[1]); } else { writer.Write((byte)0); } if (data.Length > 2 && data[2] is string && ((string)data[2]).Length > 0) { writer.Write((string)data[2]); } else { writer.Write((byte)0); } if (skeleton == null) { writer.Write((ushort)0); // number of bones } else { writer.Write(skeleton.Data.bonesAbs); } // ReSharper disable once InconsistentNaming Dictionary <byte, List <int> > LODMap = new Dictionary <byte, List <int> >(); uint sz = 0; uint lookForLod = 0; if (model.Submeshes.Any(x => x.lod == flagLOD)) { lookForLod = (byte)flagLOD; } else if (flagLOD != null) { SubmeshDescriptor nextLowest = model.Submeshes.Where(p => p.lod < flagLOD).OrderBy(x => x.lod).LastOrDefault(); if (nextLowest.verticesToDraw == 0 && nextLowest.indexCount == 0) // not real mesh { SubmeshDescriptor nextHighest = model.Submeshes.Where(p => p.lod > flagLOD).OrderBy(x => x.lod).FirstOrDefault(); lookForLod = nextHighest.lod; } else { lookForLod = nextLowest.lod; } } for (int i = 0; i < model.Submeshes.Length; ++i) { SubmeshDescriptor submesh = model.Submeshes[i]; if (data.Length > 4 && data[4] is bool && (bool)data[4]) { if (submesh.flags == SubmeshFlags.COLLISION_MESH) { continue; } } if (lookForLod > 0 && submesh.lod != lookForLod && submesh.lod != 255) { continue; } if (!LODMap.ContainsKey(submesh.lod)) { LODMap.Add(submesh.lod, new List <int>()); } sz++; LODMap[submesh.lod].Add(i); } writer.Write(sz); writer.Write(hardpoints?.HardPoints.Length ?? 0); if (skeleton != null) { for (int i = 0; i < skeleton.Data.bonesAbs; ++i) { writer.Write(IdToString("bone", skeleton.IDs[i])); short parent = hierarchy[i]; if (parent == -1) { parent = (short)i; } writer.Write(parent); Matrix3x4 bone = skeleton.Matrices34[i]; Quaternion rot = new Quaternion(bone[0, 0], bone[0, 1], bone[0, 2], bone[0, 3]); Vector3 scl = new Vector3(bone[1, 0], bone[1, 1], bone[1, 2]); Vector3 pos = new Vector3(bone[2, 0], bone[2, 1], bone[2, 2]); if (nodeMap.ContainsKey(i)) { HTLC.ClothNode thisNode = nodeMap[i]; pos.X = thisNode.X; pos.Y = thisNode.Y; pos.Z = thisNode.Z; } writer.Write(pos.X); writer.Write(pos.Y); writer.Write(pos.Z); writer.Write(scl.X); writer.Write(scl.Y); writer.Write(scl.Z); writer.Write(rot.X); writer.Write(rot.Y); writer.Write(rot.Z); writer.Write(rot.W); } } foreach (KeyValuePair <byte, List <int> > kv in LODMap) { foreach (int i in kv.Value) { SubmeshDescriptor submesh = model.Submeshes[i]; ModelVertex[] vertex = model.Vertices[i]; ModelVertex[] normal = model.Normals[i]; ModelUV[][] uv = model.TextureCoordinates[i]; ModelIndice[] index = model.Indices[i]; ModelBoneData[] bones = model.Bones[i]; writer.Write($"Submesh_{i}.{kv.Key}.{materials.Materials[submesh.material]:X16}"); writer.Write(materials.Materials[submesh.material]); writer.Write((byte)uv.Length); writer.Write(vertex.Length); writer.Write(index.Length); for (int j = 0; j < vertex.Length; ++j) { writer.Write(vertex[j].x); writer.Write(vertex[j].y); writer.Write(vertex[j].z); writer.Write(-normal[j].x); writer.Write(-normal[j].y); writer.Write(-normal[j].z); foreach (ModelUV[] t in uv) { writer.Write(t[j].u); writer.Write(t[j].v); } if (skeleton != null && bones != null && bones[j].boneIndex != null && bones[j].boneWeight != null) { writer.Write((byte)4); writer.Write(skeleton.Lookup[bones[j].boneIndex[0]]); writer.Write(skeleton.Lookup[bones[j].boneIndex[1]]); writer.Write(skeleton.Lookup[bones[j].boneIndex[2]]); writer.Write(skeleton.Lookup[bones[j].boneIndex[3]]); writer.Write(bones[j].boneWeight[0]); writer.Write(bones[j].boneWeight[1]); writer.Write(bones[j].boneWeight[2]); writer.Write(bones[j].boneWeight[3]); } else { // bone -> size + index + weight writer.Write((byte)0); } } List <ModelIndiceModifiable> indexNew = new List <ModelIndiceModifiable>(); foreach (ModelIndice indice in index) { indexNew.Add(new ModelIndiceModifiable { v1 = indice.v1, v2 = indice.v2, v3 = indice.v3 }); } foreach (ModelIndiceModifiable indice in indexNew) { writer.Write((byte)3); writer.Write(indice.v1); writer.Write(indice.v2); writer.Write(indice.v3); } } } if (hardpoints != null) { // attachments foreach (PRHM.HardPoint hp in hardpoints.HardPoints) { writer.Write(IdToString("hardpoint", GUID.Index(hp.HardPointGUID))); Matrix4 mat = hp.Matrix.ToOpenTK(); Vector3 pos = mat.ExtractTranslation(); Quaternion rot = mat.ExtractRotation(); writer.Write(pos.X); writer.Write(pos.Y); writer.Write(pos.Z); writer.Write(rot.X); writer.Write(rot.Y); writer.Write(rot.Z); writer.Write(rot.W); } // extension 1.1 foreach (PRHM.HardPoint hp in hardpoints.HardPoints) { writer.Write(IdToString("bone", GUID.Index(hp.GUIDx012))); } } // ext 1.3: cloth writer.Write(0); // ext 1.4: embedded refpose if (skeleton != null) { for (int i = 0; i < skeleton.Data.bonesAbs; ++i) { writer.Write(IdToString("bone", skeleton.IDs[i])); short parent = hierarchy[i]; writer.Write(parent); Matrix3x4 bone = skeleton.Matrices34Inverted[i]; Quaternion3D quat = new Quaternion3D(bone[0, 3], bone[0, 0], bone[0, 1], bone[0, 2]); Vector3D rot = C3D.ToEulerAngles(quat); // ReSharper disable CompareOfFloatsByEqualityOperator if (rot.X == -3.14159274f && rot.Y == 0 && rot.Z == 0) { rot = new Vector3D(0, 3.14159274f, 3.14159274f); // effectively the same but you know, eulers. } // ReSharper restore CompareOfFloatsByEqualityOperator Vector3 scl = new Vector3(bone[1, 0], bone[1, 1], bone[1, 2]); Vector3 pos = new Vector3(bone[2, 0], bone[2, 1], bone[2, 2]); writer.Write(pos.X); writer.Write(pos.Y); writer.Write(pos.Z); writer.Write(scl.X); writer.Write(scl.Y); writer.Write(scl.Z); writer.Write(rot.X); writer.Write(rot.Y); writer.Write(rot.Z); } } // ext 1.5: guid writer.Write(GUID.Index(modelInfo.GUID)); // ext 1.6: cloth 2.0 if (cloth == null) { writer.Write(0); } else { writer.Write(cloth.Descriptors.Length); for (int i = 0; i < cloth.Descriptors.Length; i++) { var desc = cloth.Descriptors[i]; writer.Write(desc.Name); writer.Write(cloth.Nodes[i].Length); foreach (HTLC.ClothNode clothNode in cloth.Nodes[i]) { writer.Write(clothNode.Bones.Length); foreach (HTLC.ClothNodeWeight clothNodeWeight in clothNode.Bones) { writer.Write(clothNodeWeight.Bone); writer.Write(clothNodeWeight.Weight); } } } } } }
public bool Write(Chunked chunked, Stream output, List <byte> LODs, Dictionary <ulong, List <ImageLayer> > layers, object[] data) { IChunk chunk = chunked.FindNextChunk("MNRM").Value; if (chunk == null) { return(false); } MNRM model = (MNRM)chunk; chunk = chunked.FindNextChunk("CLDM").Value; CLDM materials = null; if (chunk != null) { materials = (CLDM)chunk; } chunk = chunked.FindNextChunk("lksm").Value; lksm skeleton = null; if (chunk != null) { skeleton = (lksm)chunk; } chunk = chunked.FindNextChunk("PRHM").Value; PRHM hardpoints = null; if (chunk != null) { hardpoints = (PRHM)chunk; } //Console.Out.WriteLine("Writing OWMDL"); using (BinaryWriter writer = new BinaryWriter(output)) { writer.Write((ushort)1); // version major writer.Write((ushort)1); // version minor if (data.Length > 1 && data[1] != null && data[1].GetType() == typeof(string) && ((string)data[1]).Length > 0) { writer.Write((string)data[1]); } else { writer.Write((byte)0); } if (data.Length > 2 && data[2] != null && data[2].GetType() == typeof(string) && ((string)data[2]).Length > 0) { writer.Write((string)data[2]); } else { writer.Write((byte)0); } if (skeleton == null) { writer.Write((ushort)0); // number of bones } else { writer.Write(skeleton.Data.bonesAbs); } Dictionary <byte, List <int> > LODMap = new Dictionary <byte, List <int> >(); uint sz = 0; uint lookForLod = 0; bool lodOnly = false; if (data.Length > 3 && data[3] != null && data[3].GetType() == typeof(bool) && (bool)data[3] == true) { lodOnly = true; } for (int i = 0; i < model.Submeshes.Length; ++i) { SubmeshDescriptor submesh = model.Submeshes[i]; if (data.Length > 4 && data[4] != null && data[4].GetType() == typeof(bool) && (bool)data[4] == true) { if ((SubmeshFlags)submesh.flags == SubmeshFlags.COLLISION_MESH) { continue; } } if (LODs != null && !LODs.Contains(submesh.lod)) { continue; } if (lodOnly && lookForLod > 0 && submesh.lod != lookForLod) { continue; } if (!LODMap.ContainsKey(submesh.lod)) { LODMap.Add(submesh.lod, new List <int>()); } lookForLod = submesh.lod; sz++; LODMap[submesh.lod].Add(i); } writer.Write(sz); if (hardpoints != null) { writer.Write(hardpoints.HardPoints.Length); } else { writer.Write((int)0); // number of attachments } if (skeleton != null) { for (int i = 0; i < skeleton.Data.bonesAbs; ++i) { writer.Write(IdToString("bone", skeleton.IDs[i])); short parent = skeleton.Hierarchy[i]; if (parent == -1) { parent = (short)i; } writer.Write(parent); Matrix3x4 bone = skeleton.Matrices34[i]; Quaternion rot = new Quaternion(bone[0, 0], bone[0, 1], bone[0, 2], bone[0, 3]); Vector3 scl = new Vector3(bone[1, 0], bone[1, 1], bone[1, 2]); Vector3 pos = new Vector3(bone[2, 0], bone[2, 1], bone[2, 2]); writer.Write(pos.X); writer.Write(pos.Y); writer.Write(pos.Z); writer.Write(scl.X); writer.Write(scl.X); writer.Write(scl.X); writer.Write(rot.X); writer.Write(rot.Y); writer.Write(rot.Z); writer.Write(rot.W); } } foreach (KeyValuePair <byte, List <int> > kv in LODMap) { //Console.Out.WriteLine("Writing LOD {0}", kv.Key); foreach (int i in kv.Value) { SubmeshDescriptor submesh = model.Submeshes[i]; ModelVertex[] vertex = model.Vertices[i]; ModelVertex[] normal = model.Normals[i]; ModelUV[][] uv = model.TextureCoordinates[i]; ModelIndice[] index = model.Indices[i]; ModelBoneData[] bones = model.Bones[i]; writer.Write($"Submesh_{i}.{kv.Key}.{materials.Materials[submesh.material]:X16}"); writer.Write(materials.Materials[submesh.material]); writer.Write((byte)uv.Length); writer.Write(vertex.Length); writer.Write(index.Length); for (int j = 0; j < vertex.Length; ++j) { writer.Write(vertex[j].x); writer.Write(vertex[j].y); writer.Write(vertex[j].z); writer.Write(-normal[j].x); writer.Write(-normal[j].y); writer.Write(-normal[j].z); for (int k = 0; k < uv.Length; ++k) { writer.Write((float)uv[k][j].u); writer.Write((float)uv[k][j].v); } if (skeleton != null && bones != null && bones[j].boneIndex != null && bones[j].boneWeight != null) { writer.Write((byte)4); writer.Write(skeleton.Lookup[bones[j].boneIndex[0]]); writer.Write(skeleton.Lookup[bones[j].boneIndex[1]]); writer.Write(skeleton.Lookup[bones[j].boneIndex[2]]); writer.Write(skeleton.Lookup[bones[j].boneIndex[3]]); writer.Write(bones[j].boneWeight[0]); writer.Write(bones[j].boneWeight[1]); writer.Write(bones[j].boneWeight[2]); writer.Write(bones[j].boneWeight[3]); } else { // bone -> size + index + weight writer.Write((byte)0); } } for (int j = 0; j < index.Length; ++j) { writer.Write((byte)3); writer.Write((int)index[j].v1); writer.Write((int)index[j].v2); writer.Write((int)index[j].v3); } } } if (hardpoints != null) { // attachments for (int i = 0; i < hardpoints.HardPoints.Length; ++i) { PRHM.HardPoint hp = hardpoints.HardPoints[i]; writer.Write(IdToString("attachment_", hp.id)); Matrix4 mat = hp.matrix.ToOpenTK(); Vector3 pos = mat.ExtractTranslation(); Quaternion rot = mat.ExtractRotation(); writer.Write(pos.X); writer.Write(pos.Y); writer.Write(pos.Z); writer.Write(rot.X); writer.Write(rot.Y); writer.Write(rot.Z); writer.Write(rot.W); } // extension 1.1 for (int i = 0; i < hardpoints.HardPoints.Length; ++i) { PRHM.HardPoint hp = hardpoints.HardPoints[i]; writer.Write(IdToString("bone", hp.id)); } } } return(true); }
public bool Write(Chunked chunked, Stream output, List <byte> LODs, Dictionary <ulong, List <ImageLayer> > layers, object[] opts) { culture.NumberFormat.NumberDecimalSeparator = "."; System.Threading.Thread.CurrentThread.CurrentCulture = culture; IChunk chunk = chunked.FindNextChunk("MNRM").Value; if (chunk == null) { return(false); } MNRM model = (MNRM)chunk; chunk = chunked.FindNextChunk("CLDM").Value; CLDM materials = null; if (chunk != null) { materials = (CLDM)chunk; } chunk = chunked.FindNextChunk("lksm").Value; lksm skeleton = null; if (chunk != null) { skeleton = (lksm)chunk; } //Console.Out.WriteLine("Writing ASCII"); using (StreamWriter writer = new StreamWriter(output)) { if (skeleton != null) { writer.WriteLine(skeleton.Data.bonesAbs); for (int i = 0; i < skeleton.Data.bonesAbs; ++i) { writer.WriteLine("bone_{0:X4}", skeleton.IDs[i]); writer.WriteLine(skeleton.Hierarchy[i]); OpenTK.Vector3 bonePos = skeleton.Matrices[i].ExtractTranslation(); writer.WriteLine("{0:0.000000} {1:0.000000} {2:0.000000}", bonePos.X, bonePos.Y, bonePos.Z); } } else { writer.WriteLine("0"); } Dictionary <byte, List <int> > LODMap = new Dictionary <byte, List <int> >(); uint sz = 0; uint lookForLod = 0; bool lodOnly = false; if (opts.Length > 3 && opts[3] != null && opts[3].GetType() == typeof(bool) && (bool)opts[3] == true) { lodOnly = true; } for (int i = 0; i < model.Submeshes.Length; ++i) { SubmeshDescriptor submesh = model.Submeshes[i]; if (opts.Length > 4 && opts[4] != null && opts[4].GetType() == typeof(bool) && (bool)opts[4] == true) { if (submesh.flags == SubmeshFlags.COLLISION_MESH) { continue; } } if (LODs != null && !LODs.Contains(submesh.lod)) { continue; } if (lodOnly && lookForLod > 0 && submesh.lod != lookForLod) { continue; } if (!LODMap.ContainsKey(submesh.lod)) { LODMap.Add(submesh.lod, new List <int>()); } lookForLod = submesh.lod; sz++; LODMap[submesh.lod].Add(i); } writer.WriteLine(sz); foreach (KeyValuePair <byte, List <int> > kv in LODMap) { //Console.Out.WriteLine("Writing LOD {0}", kv.Key); foreach (int i in kv.Value) { SubmeshDescriptor submesh = model.Submeshes[i]; ModelVertex[] vertex = model.Vertices[i]; ModelVertex[] normal = model.Normals[i]; ModelUV[][] uv = model.TextureCoordinates[i]; ModelIndice[] index = model.Indices[i]; ModelBoneData[] bones = model.Bones[i]; ulong materialKey = submesh.material; if (materials != null) { materialKey = materials.Materials[submesh.material]; } writer.WriteLine("Submesh_{0}.{1}.{2:X16}", i, kv.Key, materialKey); writer.WriteLine(uv.Length); if (layers.ContainsKey(materialKey)) { List <ImageLayer> materialLayers = layers[materialKey]; uint count = 0; HashSet <ulong> done = new HashSet <ulong>(); for (int j = 0; j < materialLayers.Count; ++j) { if (done.Add(materialLayers[j].Key)) { count += 1; } } writer.WriteLine(count); done.Clear(); for (int j = 0; j < materialLayers.Count; ++j) { if (done.Add(materialLayers[j].Key)) { writer.WriteLine($"{GUID.LongKey(materialLayers[j].Key):X12}.dds"); writer.WriteLine(0); } } } else { writer.WriteLine(uv.Length); for (int j = 0; j < uv.Length; ++j) { writer.WriteLine("{0:X16}_UV{1}.dds", materialKey, j); writer.WriteLine(j); } } writer.WriteLine(vertex.Length); for (int j = 0; j < vertex.Length; ++j) { writer.WriteLine("{0} {1} {2}", vertex[j].x, vertex[j].y, vertex[j].z); writer.WriteLine("{0} {1} {2}", -normal[j].x, -normal[j].y, -normal[j].z); writer.WriteLine("255 255 255 255"); for (int k = 0; k < uv.Length; ++k) { writer.WriteLine("{0:0.######} {1:0.######}", uv[k][j].u, uv[k][j].v); } if (skeleton != null && skeleton.Data.bonesAbs > 0) { if (bones != null && bones[j].boneIndex != null && bones[j].boneWeight != null) { writer.WriteLine("{0} {1} {2} {3}", skeleton.Lookup[bones[j].boneIndex[0]], skeleton.Lookup[bones[j].boneIndex[1]], skeleton.Lookup[bones[j].boneIndex[2]], skeleton.Lookup[bones[j].boneIndex[3]]); writer.WriteLine("{0:0.######} {1:0.######} {2:0.######} {3:0.######}", bones[j].boneWeight[0], bones[j].boneWeight[1], bones[j].boneWeight[2], bones[j].boneWeight[3]); } else { writer.WriteLine("0 0 0 0"); writer.WriteLine("0 0 0 0"); } } } writer.WriteLine(index.Length); for (int j = 0; j < index.Length; ++j) { writer.WriteLine("{0} {1} {2}", index[j].v1, index[j].v2, index[j].v3); } } } writer.WriteLine(""); } return(true); }
public static extern IntPtr chunked_get_sparse(Chunked chunked, uint size, uint index);
public bool Write(Chunked model, Stream output, List <byte> LODs, Dictionary <ulong, List <ImageLayer> > layers, params object[] data) { throw new NotImplementedException(); }
public static extern IntPtr chunked_remove(Chunked chunked, uint size, uint index);
public void Process(EffectInfo effectInfo, KeyValuePair <ChunkPlaybackInfo, IChunk> chunk, Dictionary <ulong, ulong> replacements) { // todo: STUVoiceStimulus has f3099f20/m_volume // probably more stuff too // hey have some notes about particles: // 000000003CEC.006 - 000000001D3D.08F = ana - guardian: // one RPCE, 61 chunks // seems to be at correct position with rpce at rot: x=90 // 000000003796.006 - 000000001A31.08F = genji - warrior's salute: // one RPCE, 64 chunks. // VCCE might be a texture/material transform // A B C D = R G B A // see 'extract-debug-vcce' if (effectInfo == null) { return; } if (chunk.Value == null) { return; } if (replacements == null) { replacements = new Dictionary <ulong, ulong>(); } if (chunk.Value.GetType() == typeof(TCFE)) { TCFE tcfe = chunk.Value as TCFE; if (tcfe == null) { return; } effectInfo.EffectLength = tcfe.Data.EndTime1; } if (chunk.Value.GetType() == typeof(DMCE)) { DMCE dmce = chunk.Value as DMCE; if (dmce == null) { return; } AddDMCE(effectInfo, dmce, chunk.Key, replacements); } if (chunk.Value.GetType() == typeof(CECE)) { CECE cece = chunk.Value as CECE; if (cece == null) { return; } AddCECE(effectInfo, cece, chunk.Key, replacements); } if (chunk.Value.GetType() == typeof(OSCE)) { OSCE osce = chunk.Value as OSCE; if (osce == null) { return; } AddOSCE(effectInfo, osce, chunk.Key, replacements); } if (chunk.Value.GetType() == typeof(FECE)) { FECE fece = chunk.Value as FECE; if (fece == null) { return; } EffectInfo feceInfo = null; ulong effectKey = fece.Data.Effect; if (replacements.ContainsKey(fece.Data.Effect)) { effectKey = replacements[fece.Data.Effect]; } using (Stream feceStream = IO.OpenFile(effectKey)) { if (feceStream != null) { using (Chunked feceChunkednew = new Chunked(feceStream)) { EffectParser sub = new EffectParser(feceChunkednew, fece.Data.Effect); feceInfo = sub.ProcessAll(replacements); } } } AddFECE(effectInfo, fece.Data.Effect, feceInfo, chunk.Key, replacements); } if (chunk.Value.GetType() == typeof(NECE)) { NECE nece = chunk.Value as NECE; if (nece == null) { return; } AddNECE(effectInfo, nece, chunk.Key, replacements); } if (chunk.Value.GetType() == typeof(RPCE)) { RPCE rpce = chunk.Value as RPCE; if (rpce == null) { return; } AddRPCE(effectInfo, rpce, chunk.Key, replacements); } if (chunk.Value.GetType() == typeof(SSCE)) { SSCE ssce = chunk.Value as SSCE; if (ssce == null) { return; } AddSSCE(effectInfo, ssce, chunk.Key.PreviousChunk?.GetType(), replacements); } if (chunk.Value.GetType() == typeof(SVCE)) { SVCE svce = chunk.Value as SVCE; if (svce == null) { return; } AddSVCE(effectInfo, svce, chunk.Key, replacements); } }