public SceneDataLoader(string fileName, WWorld world) { m_world = world; m_reader = new EndianBinaryReader(File.ReadAllBytes(fileName), System.Text.Encoding.ASCII, Endian.Big); m_chunkList = new List <ChunkHeader>(); int chunkCount = m_reader.ReadInt32(); for (int i = 0; i < chunkCount; i++) { string fourCC = m_reader.ReadString(4); MapLayer layer = ChunkHeader.FourCCToLayer(ref fourCC); FourCC enumFourCC = FourCCConversion.GetEnumFromString(fourCC); ChunkHeader chunk = new ChunkHeader(enumFourCC, m_reader.ReadInt32(), m_reader.ReadInt32()); chunk.Layer = layer; m_chunkList.Add(chunk); } var sortedList = m_chunkList.OrderBy(x => x.ChunkOffset); m_chunkList = new List <ChunkHeader>(sortedList); }
public void CreateEntity() { if (!EditorSelection.SingleObjectSelected) { return; } WDOMNode selected = EditorSelection.PrimarySelectedObject; SerializableDOMNode newNode = null; if (selected is SerializableDOMNode) { SerializableDOMNode origNode = selected as SerializableDOMNode; Type selType = selected.GetType(); newNode = (SerializableDOMNode)Activator.CreateInstance(selType, origNode.FourCC, m_world); newNode.PostLoad(); newNode.SetParent(selected.Parent); if (origNode.Parent is WDOMLayeredGroupNode) { newNode.Layer = origNode.Layer; } } else if (selected is WDOMLayeredGroupNode) { WDOMLayeredGroupNode lyrNode = selected as WDOMLayeredGroupNode; Type newObjType = null; if (lyrNode.FourCC >= FourCC.ACTR && lyrNode.FourCC <= FourCC.ACTb) { newObjType = typeof(Actor); } else if (lyrNode.FourCC >= FourCC.SCOB && lyrNode.FourCC <= FourCC.SCOb) { newObjType = typeof(ScaleableObject); } else if (lyrNode.FourCC >= FourCC.TRES && lyrNode.FourCC <= FourCC.TREb) { newObjType = typeof(TreasureChest); } string unlayedFourCC = lyrNode.FourCC.ToString(); MapLayer layer = ChunkHeader.FourCCToLayer(ref unlayedFourCC); FourCC enumVal = FourCCConversion.GetEnumFromString(unlayedFourCC); newNode = (SerializableDOMNode)Activator.CreateInstance(newObjType, enumVal, m_world); newNode.Layer = layer; newNode.PostLoad(); newNode.SetParent(lyrNode); } else if (selected is WDOMGroupNode) { WDOMGroupNode grpNode = selected as WDOMGroupNode; if (grpNode.FourCC == FourCC.ACTR || grpNode.FourCC == FourCC.SCOB || grpNode.FourCC == FourCC.TRES) { return; } Type newObjType = FourCCConversion.GetTypeFromEnum(grpNode.FourCC); newNode = (SerializableDOMNode)Activator.CreateInstance(newObjType, grpNode.FourCC, m_world); newNode.PostLoad(); newNode.SetParent(grpNode); } else { return; } if (newNode != null) { EditorSelection.ClearSelection(); EditorSelection.AddToSelection(newNode); } // ToDo: This can spawn specific classes the same way that the actor loader does. }
public void ExportToStream(EndianBinaryWriter writer, WScene scene) { // Build a dictionary which lists unique FourCC's and a list of all relevant actors. var actorCategories = new Dictionary <string, List <WActorNode> >(); foreach (var child in scene) { WActorNode actor = child as WActorNode; if (actor != null) { string fixedFourCC = ChunkHeader.LayerToFourCC(actor.FourCC, actor.Layer); if (!actorCategories.ContainsKey(fixedFourCC)) { actorCategories[fixedFourCC] = new List <WActorNode>(); } actorCategories[fixedFourCC].Add(actor); } } // Create a chunk header for each one. var chunkHeaders = new List <ChunkHeader>(); foreach (var kvp in actorCategories) { ChunkHeader header = new ChunkHeader(); header.FourCC = kvp.Key; header.ElementCount = kvp.Value.Count; chunkHeaders.Add(header); } long chunkStart = writer.BaseStream.Position; // Write the Header writer.Write(chunkHeaders.Count); for (int i = 0; i < chunkHeaders.Count; i++) { writer.Write((int)0); // Dummy Placeholder values for the Chunk Header. writer.Write((int)0); writer.Write((int)0); } // For each chunk, write the data for that chunk. Before writing the data, get the current offset and update the header. List <WActorNode>[] dictionaryData = new List <WActorNode> [actorCategories.Count]; actorCategories.Values.CopyTo(dictionaryData, 0); for (int i = 0; i < chunkHeaders.Count; i++) { ChunkHeader header = chunkHeaders[i]; chunkHeaders[i] = new ChunkHeader(header.FourCC, header.ElementCount, (int)(writer.BaseStream.Position - chunkStart)); List <WActorNode> actors = dictionaryData[i]; foreach (var actor in actors) { MapActorDescriptor template = m_sActorDescriptors.Find(x => x.FourCC == actor.FourCC); if (template == null) { Console.WriteLine("Unsupported FourCC (\"{0}\") for exporting!", actor.FourCC); continue; } WriteActorToChunk(actor, template, writer); } } // Now that we've written every actor to file we can go back and re-write the headers now that we know their offsets. writer.BaseStream.Position = chunkStart + 0x4; // 0x4 is the offset to the Chunk Headers foreach (var header in chunkHeaders) { writer.WriteFixedString(header.FourCC, 4); // FourCC writer.Write(header.ElementCount); // Number of Entries writer.Write(header.ChunkOffset); // Offset from start of file. } // Seek to the end of the file, and then pad us to 32-byte alignment. writer.BaseStream.Seek(0, SeekOrigin.End); int delta = WMath.Pad32Delta(writer.BaseStream.Position); for (int i = 0; i < delta; i++) { writer.Write((byte)0xFF); } }
public void ExportToStream(EndianBinaryWriter writer, WScene scene) { // Build a dictionary which lists unique FourCC's and a list of all relevant actors. var actorCategories = new Dictionary <FourCC, List <SerializableDOMNode> >(); foreach (var child in scene) { var groupNode = child as WDOMGroupNode; if (groupNode == null) { continue; } // If this is an ACTR, SCOB, or TRES group node, we have to dig into it to get the layers. if (groupNode.FourCC == FourCC.ACTR || groupNode.FourCC == FourCC.SCOB || groupNode.FourCC == FourCC.TRES) { foreach (var layer in groupNode.Children) { foreach (var obj in layer.Children) { var actor = obj as SerializableDOMNode; if (actor != null) { AddObjectToDictionary(actor, actorCategories); } } } } else { foreach (var obj in groupNode.Children) { var actor = obj as SerializableDOMNode; if (actor != null) { AddObjectToDictionary(actor, actorCategories); } } } } // Create a chunk header for each one. var chunkHeaders = new List <ChunkHeader>(); foreach (var kvp in actorCategories) { ChunkHeader header = new ChunkHeader(); header.FourCC = kvp.Key; header.ElementCount = kvp.Value.Count; chunkHeaders.Add(header); } long chunkStart = writer.BaseStream.Position; // Write the Header writer.Write(chunkHeaders.Count); for (int i = 0; i < chunkHeaders.Count; i++) { writer.Write((int)0); // Dummy Placeholder values for the Chunk Header. writer.Write((int)0); writer.Write((int)0); } // For each chunk, write the data for that chunk. Before writing the data, get the current offset and update the header. List <SerializableDOMNode>[] dictionaryData = new List <SerializableDOMNode> [actorCategories.Count]; actorCategories.Values.CopyTo(dictionaryData, 0); for (int i = 0; i < chunkHeaders.Count; i++) { ChunkHeader header = chunkHeaders[i]; chunkHeaders[i] = new ChunkHeader(header.FourCC, header.ElementCount, (int)(writer.BaseStream.Position - chunkStart)); List <SerializableDOMNode> actors = dictionaryData[i]; foreach (var actor in actors) { MapActorDescriptor template = Globals.ActorDescriptors.Find(x => x.FourCC == actor.FourCC); if (template == null) { Console.WriteLine("Unsupported FourCC (\"{0}\") for exporting!", actor.FourCC); continue; } actor.PreSave(); actor.Save(writer); //WriteActorToChunk(actor, template, writer); } } // Now that we've written every actor to file we can go back and re-write the headers now that we know their offsets. writer.BaseStream.Position = chunkStart + 0x4; // 0x4 is the offset to the Chunk Headers foreach (var header in chunkHeaders) { writer.WriteFixedString(FourCCConversion.GetStringFromEnum(header.FourCC), 4); // FourCC writer.Write(header.ElementCount); // Number of Entries writer.Write(header.ChunkOffset); // Offset from start of file. } // Seek to the end of the file, and then pad us to 32-byte alignment. writer.BaseStream.Seek(0, SeekOrigin.End); int delta = WMath.Pad32Delta(writer.BaseStream.Position); for (int i = 0; i < delta; i++) { writer.Write(0xFF); } }