public void CheckedTags(TreeNodeCollection nodes) { foreach (System.Windows.Forms.TreeNode tagitem in nodes) { if (tagitem.Checked) { if (tagitem.Level != 0) { int tag_table_ref = Int32.Parse(tagitem.Name); int datum_index = DATA_READ.ReadINT_LE(tag_table_ref + 4, map_stream); if (!AddList.ContainsKey(datum_index)) { AddList.Add(datum_index, tagitem.Text); } } else { CheckedTags(tagitem.Nodes); } } if (tagitem.Nodes.Count != 0) { CheckedTags(tagitem.Nodes); } } textBox4.Text = AddList.Count.ToString() + " Tags Selected"; }
public Rebase_meta(string file) { InitializeComponent(); XmlDocument xd = new XmlDocument(); xd.Load(file); compile_list = new List <injectRefs>(); directory = DATA_READ.ReadDirectory_from_file_location(file); int new_index = 0x3BA4;//new datum_indexes starting from 0x3BA4 foreach (XmlNode Xn in xd.SelectNodes("config/tag")) { injectRefs temp = new injectRefs(); temp.old_datum = int.Parse(Xn.SelectSingleNode("datum").InnerText, NumberStyles.HexNumber); temp.new_datum = new_index++; temp.file_name = Xn.SelectSingleNode("name").InnerText; temp.type = DATA_READ.ReadTAG_TYPE_form_name(temp.file_name); //lets add the tag to the list compile_list.Add(temp); } }
private void extractMetaToolStripMenuItem_Click(object sender, EventArgs e) { if (map_loaded) { if (treeView1.SelectedNode != null) { int tag_table_ref = Int32.Parse(treeView1.SelectedNode.Name); string type = DATA_READ.ReadTAG_TYPE(tag_table_ref, map_stream); int datum_index = DATA_READ.ReadINT_LE(tag_table_ref + 4, map_stream); //Meta Extractor MetaExtractor meta_extract; meta_extract = new MetaExtractor(datum_index, type, SID_list, map_stream); meta_extract.Show(); } else { MessageBox.Show("Select a TAG", "Hint"); } } else { MessageBox.Show("Select a map First", "Hint"); } }
private void openMapToolStripMenuItem_Click(object sender, EventArgs e) { //Map opening stuff OpenFileDialog map_file = new OpenFileDialog(); map_file.Filter = "Halo 2 Vista Map (*.map)|*.map"; if (map_file.ShowDialog() == DialogResult.OK) { map_stream = new StreamReader(map_file.FileName); SID_list = new Dictionary <int, string>();//initialise our SIDs AllTagslist = new Dictionary <int, string>(); table_off = DATA_READ.ReadINT_LE(0x10, map_stream); table_size = DATA_READ.ReadINT_LE(0x14, map_stream); file_table_offset = DATA_READ.ReadINT_LE(0x2D0, map_stream); table_start = table_off + 0xC * DATA_READ.ReadINT_LE(table_off + 4, map_stream) + 0x20; scnr_off = table_off + table_size; scnr_memaddr = DATA_READ.ReadINT_LE(table_start + 0x8, map_stream);//scnr tag index is 0x0 map_name = map_file.FileName; initialize_treeview(); map_loaded = true; TagToolStripMenu.Visible = true; metaToolStripMenuItem.Visible = true; } }
//function display the tag structure private void getTagStructureToolStripMenuItem_Click(object sender, EventArgs e) { if (map_loaded) { if (treeView1.SelectedNode != null) { string type = DATA_READ.ReadTAG_TYPE(Int32.Parse(treeView1.SelectedNode.Name), map_stream); plugins_field temp = DATA_READ.Get_Tag_stucture_from_plugin(type); if (temp != null) { TreeNode tn = temp.Get_field_structure(); tn.Text = type; treeView1.Nodes.Clear(); treeView1.Nodes.Add(tn); } else { MessageBox.Show("The plugin of type " + type + " doesn't exist", "ERROR"); } map_loaded = false; } else { MessageBox.Show("Select a TAG", "Hint"); } } else { MessageBox.Show("Select a map First", "Hint"); } }
List <UnisonRefs> type_ref_list;//they are used to universally reference a tag depending on the type of tagRef /// <summary> /// constructor to intialize stuff /// </summary> /// <param name="file"></param> /// <param name="type"></param> public Resyncer(string file, string type) { directory = DATA_READ.ReadDirectory_from_file_location(file); resync_type = type; XmlDocument xd = new XmlDocument(); xd.Load(file); compile_list = new List <injectRefs>(); scneario_list = new List <string>(); type_ref_list = new List <UnisonRefs>(); foreach (XmlNode Xn in xd.SelectNodes("config/tag")) { injectRefs temp = new injectRefs(); temp.old_datum = int.Parse(Xn.SelectSingleNode("datum").InnerText, NumberStyles.HexNumber); temp.new_datum = -1; temp.file_name = Xn.SelectSingleNode("name").InnerText; temp.type = DATA_READ.ReadTAG_TYPE_form_name(temp.file_name); scneario_list.Add(Xn.SelectSingleNode("scenario").InnerText); //lets add the tag to the list compile_list.Add(temp); } sync(); }
/// <summary> /// Generates a list of StringIndexes contained in the map /// </summary> /// <returns>List of StringID_info</returns> List <StringID_info> Get_SID_list(string map_loc) { List <StringID_info> ret = new List <StringID_info>(); StreamReader map_stream = new StreamReader(map_loc); int string_table_count = DATA_READ.ReadINT_LE(0x170, map_stream); int string_index_table_offset = DATA_READ.ReadINT_LE(0x178, map_stream); int string_table_offset = DATA_READ.ReadINT_LE(0x17C, map_stream); for (int index = 0; index < string_table_count; index++) { int table_off = DATA_READ.ReadINT_LE(string_index_table_offset + index * 0x4, map_stream) & 0xFFFF; string STRING = DATA_READ.ReadSTRING(string_table_offset + table_off, map_stream); if (STRING.Length > 0) { int SID = DATA_READ.Generate_SID(index, 0x0, STRING);//set is 0x0 cuz i couldnt figure out any other value StringID_info SIDI = new StringID_info(); SIDI.string_index_table_index = string_index_table_offset + index * 0x4; SIDI.string_table_offset = table_off; SIDI.StringID = SID; SIDI.STRING = STRING; ret.Add(SIDI); } } map_stream.Close(); return(ret); }
List <UnisonRefs> type_ref_list;//they are used to universally reference a tag depending on the type of tagRef public Rebase_meta(string file) { InitializeComponent(); XmlDocument xd = new XmlDocument(); xd.Load(file); compile_list = new List <injectRefs>(); tag_scenarios = new List <string>(); type_ref_list = new List <UnisonRefs>(); directory = DATA_READ.ReadDirectory_from_file_location(file); int new_index = 0x3BA4;//new datum_indexes starting from 0x3BA4 foreach (XmlNode Xn in xd.SelectNodes("config/tag")) { injectRefs temp = new injectRefs(); temp.old_datum = int.Parse(Xn.SelectSingleNode("datum").InnerText, NumberStyles.HexNumber); temp.new_datum = new_index++; temp.file_name = Xn.SelectSingleNode("name").InnerText; temp.type = DATA_READ.ReadTAG_TYPE_form_name(temp.file_name); tag_scenarios.Add(Xn.SelectSingleNode("scenario").InnerText); //lets add the tag to the list compile_list.Add(temp); } //now lets fill the unison List List <string> blacklisted_type = new List <string>(); foreach (injectRefs inj_temp in compile_list) { if (!blacklisted_type.Contains(inj_temp.type)) { bool any_occurence = false; for (int i = 0; i < type_ref_list.Count(); i++) { UnisonRefs uni_temp = type_ref_list[i]; if (uni_temp.type == inj_temp.type) { any_occurence = true; blacklisted_type.Add(inj_temp.type); type_ref_list.Remove(uni_temp); } } if (!any_occurence) { UnisonRefs my_temp_ref = new UnisonRefs(); my_temp_ref.type = inj_temp.type; my_temp_ref.new_datum = inj_temp.new_datum; my_temp_ref.file_name = inj_temp.file_name; type_ref_list.Add(my_temp_ref); } } } }
/// <summary> /// return a list all tagRefs mentioned in the meta and the extended meta /// </summary> /// <returns></returns> public List <tagRef> Get_all_tag_refs() { List <tagRef> ret = new List <tagRef>(); //first i add all my the tagRefs in the concerned meta foreach (int temp_off in ref_tags) { string type = DATA_READ.ReadTAG_TYPE(temp_off, data); int temp_datum = DATA_READ.ReadINT_LE(temp_off + 4, data); //i only list them if they are valid if (temp_datum != -1) { tagRef temp_tagref = new tagRef(); temp_tagref.type = type; temp_tagref.datum_index = temp_datum; ret.Add(temp_tagref); } } //list_extended object is only created when we are reading from a map if (list_extended != null) { //then we add the extended_meta dependencies List <int> key_list = list_extended.Keys.ToList <int>(); foreach (int temp_key in key_list) { extended_meta temp_meta = list_extended[temp_key]; ret.AddRange(temp_meta.Get_all_tag_refs()); } } //listing the WCtagRefs //we can only do this when we are reading from a map if (map_stream != null) { //some meta reading prologue int table_off = DATA_READ.ReadINT_LE(0x10, map_stream); int table_start = table_off + 0xC * DATA_READ.ReadINT_LE(table_off + 4, map_stream) + 0x20; foreach (int temp_off in ref_WCtags) { int temp_datum = DATA_READ.ReadINT_LE(temp_off, data);//we read it from meta data if (temp_datum != -1) { string type = DATA_READ.ReadTAG_TYPE(table_start + (0xFFFF & temp_datum) * 0x10, map_stream);//we read this from map stream tagRef temp_WCtagref = new tagRef(); temp_WCtagref.datum_index = temp_datum; temp_WCtagref.type = type; ret.Add(temp_WCtagref); } } } return(ret); }
/// <summary> /// Initialises tree view upon opening a map file /// </summary> void initialize_treeview() { treeView1.Nodes.Clear(); int tag_count = 0; int path_start = 0; for (int i = 0; ; i++) { int tag_table_REF = table_start + 0x10 * i; if (tag_table_REF > table_size + table_start) { break; } string type = DATA_READ.ReadTAG_TYPE(tag_table_REF, map_stream); int datum_index = DATA_READ.ReadINT_LE(tag_table_REF + 4, map_stream); string path = DATA_READ.ReadSTRING(file_table_offset + path_start, map_stream); if (datum_index != -1) { //lets check the mem addrs validity before adding it to the list int mem_addr = DATA_READ.ReadINT_LE(tag_table_REF + (datum_index & 0xffff) * 0x10 + 8, map_stream); if (mem_addr != 0x0) { AllTagList.Add(datum_index, path);//Adding only Map Specific tags with Internal Reference only to list } if (treeView1.Nodes.IndexOfKey(type) == -1) { treeView1.Nodes.Add(type, "- " + type); } int index = treeView1.Nodes.IndexOfKey(type); //HEX Values contains ABCDEF treeView1.Nodes[index].Nodes.Add(tag_table_REF.ToString(), "- " + path); //add this stuff to the SID list SID_list.Add(datum_index, path); //ugh! is basically the last tag if (type.CompareTo("ugh!") == 0) { break; } path_start += path.Length + 1; } tag_count = i; } treeView1.Sort(); textBox2.Text = tag_count.ToString() + " Total Tags"; }
Dictionary <int, extended_meta> list_extended; //<mem_off,extended_meta obj>,a dictionary containing the extended meta by their memory address to prevent redundancy. /// <summary> /// use to read meta data from a map file /// </summary> /// <param name="datum_index">the datum index of the tag</param> /// <param name="sr">the path of the tag.eg: characters/elite/elite_mp</param> /// <param name="sr">the stream object</param> public meta(int datum_index, string path, StreamReader sr) { //initialise some stuff this.datum_index = datum_index; this.path = path; map_stream = sr; //some meta reading prologue int table_off = DATA_READ.ReadINT_LE(0x10, map_stream); int table_size = DATA_READ.ReadINT_LE(0x14, map_stream); int table_start = table_off + 0xC * DATA_READ.ReadINT_LE(table_off + 4, map_stream) + 0x20; int scnr_off = table_off + table_size; int scnr_memaddr = DATA_READ.ReadINT_LE(table_start + 0x8, map_stream);//scnr tag index is 0x0(mostly) //steps concerned with the specified meta type = DATA_READ.ReadTAG_TYPE(table_start + (0xFFFF & datum_index) * 0x10, map_stream); mem_off = DATA_READ.ReadINT_LE(table_start + ((0xFFFF & datum_index) * 0x10) + 0x8, map_stream); size = DATA_READ.ReadINT_LE(table_start + ((0xFFFF & datum_index) * 0x10) + 0xC, map_stream); //read the meta from the map data = new byte[size]; if (!DATA_READ.Check_shared(sr)) { //normal map map_stream.BaseStream.Position = scnr_off + (mem_off - scnr_memaddr); } else { //shared map map_stream.BaseStream.Position = scnr_off + (mem_off - 0x3c000); //0x3c000 is a hardcoded value in blam engine } map_stream.BaseStream.Read(data, 0, size); //read and store the plugin structure plugin = DATA_READ.Get_Tag_stucture_from_plugin(type); //lets initialise some lists and dictionaries ref_data = new List <int>(); ref_tags = new List <int>(); ref_reflexive = new List <int>(); ref_stringID = new List <int>(); ref_extended = new Dictionary <int, int>(); list_extended = new Dictionary <int, extended_meta>(); ref_WCtags = new List <int>(); //now lets search for all kinds of stuff List_deps(0x0, plugin); }
/// <summary> /// Gets the list of tags contained in the map /// </summary> /// <param name="map_loc"></param> /// <returns></returns> List <tag_info> Get_Tag_list(string map_loc) { List <tag_info> ret = new List <tag_info>(); StreamReader map_stream = new StreamReader(map_loc); int table_off = DATA_READ.ReadINT_LE(0x10, map_stream); int table_size = DATA_READ.ReadINT_LE(0x14, map_stream); int file_table_offset = DATA_READ.ReadINT_LE(0x2D0, map_stream); int table_start = table_off + 0xC * DATA_READ.ReadINT_LE(table_off + 4, map_stream) + 0x20; int path_start = 0; for (int i = 0; ; i++) { int tag_table_REF = table_start + 0x10 * i; if (tag_table_REF > table_size + table_start) { break; } string type = DATA_READ.ReadTAG_TYPE(tag_table_REF, map_stream); int datum_index = DATA_READ.ReadINT_LE(tag_table_REF + 4, map_stream); string path = DATA_READ.ReadSTRING(file_table_offset + path_start, map_stream); if (datum_index != -1) { tag_info temp = new tag_info(); temp.datum_index = datum_index; temp.type = type; temp.file_loc = path; ret.Add(temp); //ugh! is basically the last tag if (type.CompareTo("ugh!") == 0) { break; } path_start += path.Length + 1; } } map_stream.Close(); return(ret); }
private void extract_button_Click(object sender, EventArgs e) { isRecursive = recursive_radio_.Checked; isOverrideOn = override_tags_.Checked; isOutDBOn = output_db_.Checked; DestinationFolder = textBox1.Text; MapsFolder = MainBox.map_path; string mapName = DATA_READ.Read_File_from_file_location(MainBox.map_name); int TotalTags = TagsList.Count; curent_tag_status.Visible = true; if (DestinationFolder == "") { curent_tag_status.Text = "Select a Destination Folder Please"; return; } if (MapsFolder == "") { curent_tag_status.Text = "Select a Maps Folder Please"; return; } curent_tag_status.Text = "Initializing Decompiler"; MainBox.CloseMap(); progressBar1.Value = 0; progressBar1.Maximum = TotalTags; int index = 0; foreach (int i in TagsList.Keys) { tag_count_stats.Text = "[" + index + "/" + TotalTags + "]"; curent_tag_status.Text = "Extracting Objects : " + TagsList.Values.ElementAt(index); MainBox.H2Test.Halo2_ExtractTagCache(i, isRecursive, isOutDBOn, isOverrideOn, DestinationFolder, MapsFolder, mapName); progressBar1.Value++; //update the progress bar index++; } if (MessageBox.Show("Extraction Done!", "Progress", MessageBoxButtons.OK) == DialogResult.OK) { MainBox.ReOpenMap(); } }
private void extract_button_Click(object sender, EventArgs e) { isRecursive = recursive_radio_.Checked; isOverrideOn = override_tags_.Checked; isOutDBOn = output_db_.Checked; DestinationFolder = textBox3.Text; string mapName = DATA_READ.Read_File_from_file_location(MainBox.map_name); int TotalTags = AddList.Count; current_tag_status.Visible = true; if (DestinationFolder == "") { current_tag_status.Text = "Select a Destination Folder Please"; return; } current_tag_status.Text = "Initializing Decompiler"; MainBox.CloseMap(); progressBar1.Value = 0; progressBar1.Maximum = ExtractList.Count; int index = 0; foreach (int i in ExtractList.Keys) { current_tag_status.Text = "Extracting Objects : " + ExtractList.Values.ElementAt(index); MainBox.H2Test.Halo2_ExtractTagCache(i, isRecursive, isOutDBOn, isOverrideOn, DestinationFolder, H2V_BaseMapsDirectory + "\\", mapName); progressBar1.Value++; //update the progress bar index++; } current_tag_status.Text = "Extraction Complete!"; if (MessageBox.Show("Extraction Done!", "Progress", MessageBoxButtons.OK) == DialogResult.OK) { MainBox.ReOpenMap(); } clear_button.Enabled = false; extract_button.Enabled = false; ExtractList.Clear(); richTextBox1.Text = ""; }
/// <summary> /// use to rebase current meta data to a newer memory address(affects reflexive,extended and data ref fields only) /// </summary> /// <param name="new_base">the new memory address to which the meta has to rebased</param> public void Rebase_meta(int new_base) { //first rebase reflexive fields foreach (int off in ref_reflexive) { int old_mem_addr = DATA_READ.ReadINT_LE(off + 4, data); int new_mem_addr = new_base + (old_mem_addr - mem_off); DATA_READ.WriteINT_LE(new_mem_addr, off + 4, data); } //then we rebase all the dataref fields foreach (int off in ref_data) { int old_mem_addr = DATA_READ.ReadINT_LE(off + 4, data); int new_mem_addr = new_base + (old_mem_addr - mem_off); DATA_READ.WriteINT_LE(new_mem_addr, off + 4, data); } //we venture in extended meta if it is even concerned if (list_extended != null) { //for extended meta stuff we are gonna first rebase the extended meta(s) first and set the offsets accordingly //well extende meta are gonna follow meta one by one int extended_new_base = new_base + size; List <int> key_mems = list_extended.Keys.ToList <int>(); //Rebase extended meta foreach (int temp_key in key_mems) { extended_meta temp_meta = list_extended[temp_key]; temp_meta.Rebase_meta(extended_new_base); extended_new_base += temp_meta.Get_Total_size(); } //now lets update the offsets with the newer values List <int> extend_off = ref_extended.Keys.ToList <int>(); foreach (int temp_off in extend_off) { int extend_mem_addr = ref_extended[temp_off]; extended_meta temp_ext = list_extended[extend_mem_addr]; int new_mem_addr = temp_ext.Get_mem_addr(); DATA_READ.WriteINT_LE(new_mem_addr, temp_off + 4, data); } } //update the base to which meta has been compiled mem_off = new_base; }
/// <summary> /// Initialises tree view upon opening a map file /// </summary> void initialize_treeview() { treeView1.Nodes.Clear(); int path_start = 0; for (int i = 0; ; i++) { int tag_table_REF = table_start + 0x10 * i; if (tag_table_REF > table_size + table_start) { break; } string type = DATA_READ.ReadTAG_TYPE(tag_table_REF, map_stream); int datum_index = DATA_READ.ReadINT_LE(tag_table_REF + 4, map_stream); string path = DATA_READ.ReadSTRING(file_table_offset + path_start, map_stream); if (datum_index != -1) { //lets check the mem addrs validity before adding it to the list int mem_addr = DATA_READ.ReadINT_LE(tag_table_REF + (datum_index & 0xffff) * 0x10 + 8, map_stream); if (mem_addr != 0x0) { datum_list.Add(datum_index);//lets add this to the list } if (treeView1.Nodes.IndexOfKey(type) == -1) { treeView1.Nodes.Add(type, type); } int index = treeView1.Nodes.IndexOfKey(type); //HEX Values contains ABCDEF treeView1.Nodes[index].Nodes.Add(tag_table_REF.ToString(), path); //add this stuff to the SID list SID_list.Add(datum_index, path); //ugh! is basically the last tag if (type.CompareTo("ugh!") == 0) { break; } path_start += path.Length + 1; } } }
/// <summary> /// used to null all the stringId in theconcerned meta and extended meta /// </summary> public void Null_StringID() { //first null all my string ids foreach (int temp_off in ref_stringID) { DATA_READ.WriteINT_LE(0x0, temp_off, data); } //we then proceed for extended meta List <int> keys = list_extended.Keys.ToList <int>(); foreach (int temp_key in keys) { extended_meta temp_meta = list_extended[temp_key]; temp_meta.Null_StringID(); } }
Dictionary <int, extended_meta> list_extended; //<mem_off,extended_meta obj>,a dictionary containing the extended meta by their memory address to prevent redundancy. /// <summary> /// extended meta is similar to the meta /// </summary> /// <param name="mem_address"></param> /// <param name="size">the total size of the extended meta containg all occurences</param> /// <param name="count"></param> /// <param name="plugin"></param> /// <param name="sr"></param> public extended_meta(int mem_address, int size, int count, plugins_field plugin, StreamReader sr) { this.mem_off = mem_address; this.size = size; this.plugin = plugin; this.map_stream = sr; this.entry_size = size / count; //some meta reading prologue int table_off = DATA_READ.ReadINT_LE(0x10, map_stream); int table_size = DATA_READ.ReadINT_LE(0x14, map_stream); int table_start = table_off + 0xC * DATA_READ.ReadINT_LE(table_off + 4, map_stream) + 0x20; int scnr_off = table_off + table_size; int scnr_memaddr = DATA_READ.ReadINT_LE(table_start + 0x8, map_stream);//scnr tag index is 0x0(mostly) //read the extended_meta from the map data = new byte[this.size]; if (!DATA_READ.Check_shared(sr)) { //normal map map_stream.BaseStream.Position = scnr_off + (mem_off - scnr_memaddr); } else { //shared map map_stream.BaseStream.Position = scnr_off + (mem_off - 0x3c000); //0x3c000 is a hardcoded value in blam engine } map_stream.BaseStream.Read(data, 0, this.size); //lets initialise some lists and dictionaries ref_data = new List <int>(); ref_tags = new List <int>(); ref_stringID = new List <int>(); ref_reflexive = new List <int>(); ref_extended = new Dictionary <int, int>(); list_extended = new Dictionary <int, extended_meta>(); ref_WCtags = new List <int>(); //we we have loop through the extended meta stuff for (int i = 0; i < count; i++) { List_deps(i * entry_size, plugin); } }
//Tag extraction stuff private void extractTagToolStripMenuItem_Click(object sender, EventArgs e) { if (map_loaded) { if (treeView1.SelectedNode != null) { //Extraction for a single tag if (treeView1.SelectedNode.Name.CompareTo(treeView1.SelectedNode.Text) != 0) { int tag_table_ref = Int32.Parse(treeView1.SelectedNode.Name); int datum_index = DATA_READ.ReadINT_LE(tag_table_ref + 4, map_stream); map_stream.Close(); obj.Halo2_ExtractTagCache(datum_index, DATA_READ.Read_File_from_file_location(map_name)); map_stream = new StreamReader(map_name); } else { //Extraction for a whole same bunch of tags List <int> DatumsList = new List <int>(); foreach (TreeNode tn in treeView1.SelectedNode.Nodes) { int tag_table_ref = Int32.Parse(tn.Name); int datum_index = DATA_READ.ReadINT_LE(tag_table_ref + 4, map_stream); DatumsList.Add(datum_index); } map_stream.Close(); int index = 1; foreach (int i in DatumsList) { obj.Halo2_ExtractTagCache(i, DATA_READ.Read_File_from_file_location(map_name)); progressBar1.Value = (index++) * 100 / DatumsList.Count;//update the progress bar } map_stream = new StreamReader(map_name); } } } progressBar1.Value = 0;//reset the progress bar }
/// <summary> /// used to read meta data from a meta file along with ability to modify mem_off /// </summary> /// <param name="meta"></param> /// <param name="type"></param> /// <param name="size"></param> /// <param name="path"></param> /// <param name="mem_off"></param> public meta(byte[] meta, string type, int size, string path, int mem_off) { data = meta; this.type = type; this.size = size; this.path = path; this.mem_off = mem_off;//this is for awkward cases when i rebase the meta to some other shit //lets initialise some lists and dictionaries ref_data = new List <int>(); ref_tags = new List <int>(); ref_reflexive = new List <int>(); ref_stringID = new List <int>(); ref_WCtags = new List <int>(); //while extracting the meta ,i fix all the extended meta stuff so we dont need it now plugin = DATA_READ.Get_Tag_stucture_from_plugin(type); List_deps(0x0, plugin); }
/// <summary> /// Generates a full self dependent meta file(except tag refs) /// </summary> /// <returns></returns> public byte[] Generate_meta_file() { byte[] ret = new byte[this.Get_Total_size()]; //we first copy the root meta data into it DATA_READ.ArrayCpy(ret, data, 0x0, size); //now we go for extended meta List <int> extend_keys = list_extended.Keys.ToList <int>(); //here we go foreach (int temp_key in extend_keys) { extended_meta temp_meta = list_extended[temp_key]; int start_off = temp_meta.Get_mem_addr() - mem_off; DATA_READ.ArrayCpy(ret, temp_meta.Generate_meta_file(), start_off, temp_meta.Get_Total_size()); } return(ret); }
/// <summary> /// used to read meta data from a meta file /// </summary> /// <param name="meta"></param> /// <param name="type"></param> /// <param name="size"></param> /// <param name="path"></param> public meta(byte[] meta, string type, int size, string path) { data = meta; this.type = type; this.size = size; this.path = path; this.mem_off = 0x0;//i usually rebase meta to 0x0 when extracting //lets initialise some lists and dictionaries ref_data = new List <int>(); ref_tags = new List <int>(); ref_reflexive = new List <int>(); ref_stringID = new List <int>(); ref_WCtags = new List <int>(); //while extracting the meta ,i fix all the extended meta stuff so we dont need it now //plugin plugin = DATA_READ.Get_Tag_stucture_from_plugin(type); List_deps(0x0, plugin); }
private void timer2_Tick(object sender, EventArgs e) { //i used timer instead of a loop because of the fancy progress bar(IT looks COOL) if (index < datum_list.Count) { obj.Halo2_ExtractTagCache(datum_list[index], DATA_READ.Read_File_from_file_location(map_name)); progressBar1.Value = (index + 1) * 100 / datum_list.Count; //update the progress bar //we have to increment the index index++; } else { progressBar1.Value = 0; //reset the progres bar map_stream = new StreamReader(map_name); //lets load the map map_loaded = true; //well now u are now free index = 0; //reset the index timer2.Enabled = false; } }
private void extractTagToolStripMenuItem_Click(object sender, EventArgs e) { if (map_loaded) { Dictionary <int, string> Extractlist = new Dictionary <int, string>(); if (treeView1.SelectedNode != null) { //Extraction for a single tag if (treeView1.SelectedNode.Name.CompareTo(treeView1.SelectedNode.Text) != 0) { int tag_table_ref = Int32.Parse(treeView1.SelectedNode.Name); int datum_index = DATA_READ.ReadINT_LE(tag_table_ref + 4, map_stream); Extractlist.Add(datum_index, treeView1.SelectedNode.Text); } else { //Extraction for a whole same bunch of tags foreach (TreeNode tn in treeView1.SelectedNode.Nodes) { int tag_table_ref = Int32.Parse(tn.Name); int datum_index = DATA_READ.ReadINT_LE(tag_table_ref + 4, map_stream); Extractlist.Add(datum_index, tn.Text); } } TagExtractor ob = new TagExtractor(Extractlist, false); ob.Show(); } else { MessageBox.Show("Select a Tag First!", "CRASHED!!", MessageBoxButtons.OK); } } else { MessageBox.Show("No Map Loaded ,Reload it", "Error!!", MessageBoxButtons.OK); } }
void DumpTagList() { if (treeView1.SelectedNode.Nodes.Count != 0) { string[] x = new string[treeView1.SelectedNode.Nodes.Count]; int i = 0; foreach (TreeNode tn in treeView1.SelectedNode.Nodes) { int tag_table_ref = Int32.Parse(tn.Name); int datum_index = DATA_READ.ReadINT_LE(tag_table_ref + 4, map_stream); string Name = System.IO.Path.GetFileNameWithoutExtension(tn.Text); x[i++] = Name + "," + "0x" + datum_index.ToString("X");; } File.WriteAllLines(Application.StartupPath + @"\TagsList.txt", x); } else { MessageBox.Show("Select Tag Nodes First", "Error", MessageBoxButtons.OK); } }
private void Extract_Click(object sender, EventArgs e) { string log = "\nEXTRACTION LOG : ";//or log string rel_path = SID_list[extraction_list[0].datum_index] + "." + extraction_list[0].type; XmlTextWriter xw = new XmlTextWriter(textBox1.Text + "\\" + DATA_READ.Read_File_from_file_location(rel_path) + ".xml", Encoding.UTF8); xw.Formatting = Formatting.Indented; xw.WriteStartElement("config"); if (textBox1.Text.Length > 0) { for (int i = 0; i < extraction_list.Count; i++) { tagRef temp_tagref = extraction_list[i]; int datum = temp_tagref.datum_index; string type = temp_tagref.type; if (datum != -1) { if (!extracted_list.Contains(datum)) { if (File.Exists(Application.StartupPath + "\\plugins\\" + type + ".xml")) { if (SID_list.ContainsKey(datum)) { meta obj = new meta(datum, SID_list[datum], map_stream); obj.Rebase_meta(0x0); if (checkBox1.Checked) { obj.Null_StringID(); } if (radioButton1.Checked == true) { //add recursivity extraction_list.AddRange(obj.Get_all_tag_refs()); } byte[] data = obj.Generate_meta_file(); string path = textBox1.Text + "\\" + obj.Get_Path() + "." + obj.Get_Type(); string directory = DATA_READ.ReadDirectory_from_file_location(path); //lets create our directory System.IO.Directory.CreateDirectory(directory); //create our file StreamWriter sw = new StreamWriter(path); sw.BaseStream.Write(data, 0, obj.Get_Total_size()); sw.Dispose(); //write to configuration xml xw.WriteStartElement("tag"); xw.WriteStartElement("name"); xw.WriteString(obj.Get_Path() + "." + type); //writing in the inner most level ie,name xw.WriteEndElement(); //name level xw.WriteStartElement("datum"); xw.WriteString(datum.ToString("X")); //writing in the inner most level ie here,datum xw.WriteEndElement(); //datum level xw.WriteEndElement(); //tag level //at least mention this in the logs log += "\nExtracted meta " + datum.ToString("X") + " to " + path; //add it to the extracted list extracted_list.Add(datum); } else { log += "\nCouldnot find stringID to datum_index " + datum.ToString("X"); } } else { log += "\nPlugin " + type + ".xml doesnt exist"; extracted_list.Add(datum); } } } } //close the config field and close the xml handle xw.WriteEndElement(); xw.Dispose(); //Log box LogBox lb = new LogBox(log); lb.Show(); //wprk is now done so lets close this stupid box this.Close(); } else { MessageBox.Show("At least Select the Directory", "Error"); } }
/// <summary> /// a function to fill in all the Lists and Dictionaries /// </summary> /// <param name="off">the starting offset where the stuff is being read</param> /// <param name="fields">the concerned field(s) in that section</param> void List_deps(int off, plugins_field fields) { List <int> temp = fields.Get_tag_ref_list(); //first we look for tag_refs and add them foreach (int i in temp) { int Toff = off + i;//it contains type //we add this off to the list if it doesnt contain the off already if (!ref_tags.Contains(Toff)) { ref_tags.Add(Toff); } } //then we look for data_refs and add them temp = fields.Get_data_ref_list(); foreach (int i in temp) { int Toff = off + i; //we add this off to the list if it doesnt contain the off already if (!ref_data.Contains(Toff)) { ref_data.Add(Toff); } } //then we look for stringId refs and add them temp = fields.Get_stringID_ref_list(); foreach (int i in temp) { int Toff = off + i; //we add this off to the list if it doesnt contain the off already if (!ref_stringID.Contains(Toff)) { ref_stringID.Add(Toff); } } //now we look into reflexive fields and extended meta and add them accordingly List <plugins_field> Ptemp = fields.Get_reflexive_list(); foreach (plugins_field i_Pfield in Ptemp) { int Toff = off + i_Pfield.Get_offset();//field table off contains count int count = DATA_READ.ReadINT_LE(Toff, data); int field_memaddr = DATA_READ.ReadINT_LE(Toff + 4, data); int entry_size = i_Pfield.Get_entry_size(); //entry_size int field_off = field_memaddr - mem_off; //its the offset of the field from the starting of the meta data if (count > 0) { //now we check whether its inside meta or a case of extended meta if ((field_memaddr >= mem_off) && (field_off < size)) { //inside meta //we add this off which contains reflexive table to the list if it doesnt contain the off already if (!ref_reflexive.Contains(Toff)) { ref_reflexive.Add(Toff); //after adding it to the list we look into them,recursively for (int j = 0; j < count; j++) { List_deps(field_off + j * entry_size, i_Pfield); } } } else { //extended meta //we add this off to the list if it doesnt contain the off already if (!ref_extended.ContainsKey(Toff)) { ref_extended.Add(Toff, field_memaddr); //now we create and add extended_meta to the list if it isnt already there if (!list_extended.ContainsKey(field_memaddr)) { extended_meta temp_extend = new extended_meta(field_memaddr, entry_size * count, count, i_Pfield, map_stream); list_extended.Add(field_memaddr, temp_extend); } //we dont need to look into them as extended meta does it for us } } } } //now we go for withClass attribute tagRefs,they are a bit different as they only contain the datum index of the refered tag temp = fields.Get_WCtag_ref_list(); foreach (int i in temp) { int Toff = off + i; //we add this off to the list if it doesnt contain the off already if (!ref_WCtags.Contains(Toff)) { ref_WCtags.Add(Toff); } } }
/// <summary> /// a function to fill in all the Lists and Dictionaries /// </summary> /// <param name="off">the starting offset where the stuff is being read</param> /// <param name="fields">the concerned field(s) in that section</param> void List_deps(int off, plugins_field fields) { List <int> temp = fields.Get_tag_ref_list(); //first we look for tag_refs and add them foreach (int i in temp) { int Toff = off + i;//it contains type //we add this off to the list if it doesnt contain the off already if (!ref_tags.Contains(Toff)) { ref_tags.Add(Toff); } } //then we look for data_refs and add them temp = fields.Get_data_ref_list(); foreach (int i in temp) { int Toff = off + i; //we add this off to the list if it doesnt contain the off already if (!ref_data.Contains(Toff)) { ref_data.Add(Toff); } } //then we look for stringId refs and add them temp = fields.Get_stringID_ref_list(); foreach (int i in temp) { int Toff = off + i; //we add this off to the list if it doesnt contain the off already if (!ref_stringID.Contains(Toff)) { ref_stringID.Add(Toff); } } //now we look into reflexive fields and extended meta and add them accordingly List <plugins_field> Ptemp = fields.Get_reflexive_list(); foreach (plugins_field i_Pfield in Ptemp) { int Toff = off + i_Pfield.Get_offset();//field table off contains count int count = DATA_READ.ReadINT_LE(Toff, data); int field_memaddr = DATA_READ.ReadINT_LE(Toff + 4, data); int entry_size = i_Pfield.Get_entry_size(); //entry_size int field_off = field_memaddr - mem_off; //its the offset of the field from the starting of the meta data if (count > 0) { //now we check whether its inside meta or a case of extended meta if ((field_memaddr >= mem_off) && (field_off < size)) { //inside meta //we add this off which contains reflexive table to the list if it doesnt contain the off already if (!ref_reflexive.Contains(Toff)) { ref_reflexive.Add(Toff); //after adding it to the list we look into them,recursively for (int j = 0; j < count; j++) { List_deps(field_off + j * entry_size, i_Pfield); } } } else { //extended meta(IN SUCCESSFULL RUN ,EXTENDED META ONLY APPEARS ONLY WHEN WE READ FROM A MAP) //but first we check whether we are reading meta from a map,or an exracted file,its rather easy if (list_extended != null) { //we add this off to the list if it doesnt contain the off already if (!ref_extended.ContainsKey(Toff)) { ref_extended.Add(Toff, field_memaddr); //now we create and add extended_meta to the list if it isnt already there if (!list_extended.ContainsKey(field_memaddr)) { extended_meta temp_extend = new extended_meta(field_memaddr, entry_size * count, count, i_Pfield, map_stream); list_extended.Add(field_memaddr, temp_extend); } //we dont need to look into them as extended meta does it for us } } else { //the program will only reach here when u try to use an extended meta on meta file. //any meta which i extract from a map file have all issues of extended_meta fixed. throw new Exception("Meta file " + path + "." + type + " is broken.\nEither debug the extraction proceedure or fix the meta file"); } } } } //now we go for withClass attribute tagRefs,they are a bit different as they only contain the datum index of the refered tag temp = fields.Get_WCtag_ref_list(); foreach (int i in temp) { int Toff = off + i; //we add this off to the list if it doesnt contain the off already if (!ref_WCtags.Contains(Toff)) { ref_WCtags.Add(Toff); } } }
private void button1_Click(object sender, EventArgs e) { string log = "\nCOMPILATION : "; //the File where we are gonna compile our stuff StreamWriter sw = new StreamWriter(directory + "\\tags.meta"); //the Tables data StreamWriter sw_t = new StreamWriter(directory + "\\tables.meta"); //creating a table byte[] tables = new byte[0x10 * compile_list.Count]; //index of custom metas int custom_table_index = 0; int new_base = Int32.Parse(textBox1.Text.Substring(2), NumberStyles.HexNumber);//the offset from the map_mem_base 0x1511020(lockout) foreach (injectRefs temp_ref in compile_list) { if (File.Exists(directory + "\\" + temp_ref.file_name)) { //lets open the file FileStream fs = new FileStream(directory + "\\" + temp_ref.file_name, FileMode.Append); long size = fs.Position; fs.Close(); //lets load it into memory byte[] meta = new byte[size]; //Filestream imposed some probs StreamReader sr = new StreamReader(directory + "\\" + temp_ref.file_name); //lets read the data sr.BaseStream.Read(meta, 0, (int)size); sr.Dispose(); meta obj = new meta(meta, temp_ref.type, (int)size, temp_ref.file_name); log += obj.Update_datum_indexes(compile_list); obj.Rebase_meta(new_base); //write it to the file sw.BaseStream.Write(meta, 0, (int)size); DATA_READ.WriteTAG_TYPE_LE(temp_ref.type, custom_table_index * 0x10, tables); DATA_READ.WriteINT_LE(temp_ref.new_datum, custom_table_index * 0x10 + 4, tables); DATA_READ.WriteINT_LE(new_base, custom_table_index * 0x10 + 8, tables); DATA_READ.WriteINT_LE((int)size, custom_table_index * 0x10 + 0xC, tables); log += "\n Written tag " + temp_ref.file_name + " with new datum as " + temp_ref.new_datum.ToString("X"); //increase the tag_offset new_base += (int)size; } else { log += "\nFile doesnt exists : " + temp_ref.file_name; } custom_table_index++; } sw_t.BaseStream.Write(tables, 0, 0x10 * compile_list.Count()); sw.Dispose(); sw_t.Dispose(); //lets launch the log box LogBox lb = new LogBox(log); lb.Show(); this.Close(); }
private void DumpShadersToolStripMenuItem_Click(object sender, EventArgs e) { { if (map_loaded) { FolderBrowserDialog fbd = new FolderBrowserDialog(); fbd.ShowNewFolderButton = true; MessageBox.Show("Select a directory to export the shader dump. Preferably an empty folder."); if (fbd.ShowDialog() == System.Windows.Forms.DialogResult.OK) { string tags_directory = fbd.SelectedPath; StreamWriter log = new StreamWriter(tags_directory + '\\' + map_name.Substring(map_name.LastIndexOf('\\') + 1) + ".shader_log"); foreach (TreeNode element in treeView1.Nodes["shad"].Nodes) { int table_ref = Int32.Parse(element.Name); int datum = DATA_READ.ReadINT_LE(table_ref + 4, map_stream); int mem_off = DATA_READ.ReadINT_LE(table_ref + 8, map_stream); int size = DATA_READ.ReadINT_LE(table_ref + 0xc, map_stream); meta meta_obj = new meta(datum, SID_list[datum], map_stream); meta_obj.Rebase_meta(0x0); if (meta_obj.Get_Total_size() != 0) { byte[] meta_data = meta_obj.Generate_meta_file(); string text_path = tags_directory + '\\' + SID_list[datum] + ".txt"; //lets create our directory System.IO.Directory.CreateDirectory(DATA_READ.ReadDirectory_from_file_location(text_path)); StreamWriter sw = new StreamWriter(text_path); //supoosing each shad contains only one Post process block element int PPB_off = DATA_READ.ReadINT_LE(0x24, meta_data); int stem_datum = DATA_READ.ReadINT_LE(PPB_off, meta_data); int bitmap_count = DATA_READ.ReadINT_LE(PPB_off + 0x4, meta_data); int bitmapB_off = DATA_READ.ReadINT_LE(PPB_off + 0x8, meta_data); //write the stemp path string out_temp; if (stem_datum != 0 && stem_datum != -1) { if (SID_list.TryGetValue(stem_datum, out out_temp)) { sw.WriteLine(SID_list[stem_datum]); } else { sw.WriteLine("---"); } } for (int i = 0; i < bitmap_count; i++) { int bitm_datum = DATA_READ.ReadINT_LE(bitmapB_off + i * 0xC, meta_data); if (bitm_datum != 0 && bitm_datum != -1) { if (SID_list.TryGetValue(bitm_datum, out out_temp)) { if (SID_list[bitm_datum] == "") { sw.WriteLine(" "); } else { sw.WriteLine(SID_list[bitm_datum]); } } else { sw.WriteLine("---"); } } else { sw.WriteLine(" "); } } log.WriteLine(SID_list[datum] + ".txt"); sw.Close(); } else { log.WriteLine("---"); } } log.Close(); MessageBox.Show("Extraction Complete"); } } else { MessageBox.Show("Load a map first"); } } }