private void Extract_Click(object sender, EventArgs e) { string log = "\nEXTRACTION LOG : ";//or log string rel_path = SID_list[extraction_list[0].datum_index] + "." + extraction_list[0].type; XmlTextWriter xw = new XmlTextWriter(textBox1.Text + "\\" + DATA_READ.Read_File_from_file_location(rel_path) + ".xml", Encoding.UTF8); xw.Formatting = Formatting.Indented; xw.WriteStartElement("config"); if (textBox1.Text.Length > 0) { for (int i = 0; i < extraction_list.Count; i++) { tagRef temp_tagref = extraction_list[i]; int datum = temp_tagref.datum_index; string type = temp_tagref.type; if (datum != -1) { if (!extracted_list.Contains(datum)) { if (File.Exists(Application.StartupPath + "\\plugins\\" + type + ".xml")) { if (SID_list.ContainsKey(datum)) { meta obj = new meta(datum, SID_list[datum], map_stream); obj.Rebase_meta(0x0); if (checkBox1.Checked) { obj.Null_StringID(); } if (radioButton1.Checked == true) { //add recursivity extraction_list.AddRange(obj.Get_all_tag_refs()); } byte[] data = obj.Generate_meta_file(); string path = textBox1.Text + "\\" + obj.Get_Path() + "." + obj.Get_Type(); string directory = DATA_READ.ReadDirectory_from_file_location(path); //lets create our directory System.IO.Directory.CreateDirectory(directory); //create our file StreamWriter sw = new StreamWriter(path); sw.BaseStream.Write(data, 0, obj.Get_Total_size()); sw.Dispose(); //write to configuration xml xw.WriteStartElement("tag"); xw.WriteStartElement("name"); xw.WriteString(obj.Get_Path() + "." + type); //writing in the inner most level ie,name xw.WriteEndElement(); //name level xw.WriteStartElement("datum"); xw.WriteString(datum.ToString("X")); //writing in the inner most level ie here,datum xw.WriteEndElement(); //datum level xw.WriteEndElement(); //tag level //at least mention this in the logs log += "\nExtracted meta " + datum.ToString("X") + " to " + path; //add it to the extracted list extracted_list.Add(datum); } else { log += "\nCouldnot find stringID to datum_index " + datum.ToString("X"); } } else { log += "\nPlugin " + type + ".xml doesnt exist"; extracted_list.Add(datum); } } } } //close the config field and close the xml handle xw.WriteEndElement(); xw.Dispose(); //Log box LogBox lb = new LogBox(log); lb.Show(); //wprk is now done so lets close this stupid box this.Close(); } else { MessageBox.Show("At least Select the Directory", "Error"); } }
private void button1_Click(object sender, EventArgs e) { string log = "\nCOMPILATION : "; //the File where we are gonna compile our stuff StreamWriter sw = new StreamWriter(directory + "\\tags.meta"); //the Tables data StreamWriter sw_t = new StreamWriter(directory + "\\tables.meta"); //creating a table byte[] tables = new byte[0x10 * compile_list.Count]; //index of custom metas int custom_table_index = 0; int new_base = Int32.Parse(textBox1.Text.Substring(2), NumberStyles.HexNumber);//the offset from the map_mem_base 0x1511020(lockout) foreach (injectRefs temp_ref in compile_list) { if (File.Exists(directory + "\\" + temp_ref.file_name)) { //lets open the file FileStream fs = new FileStream(directory + "\\" + temp_ref.file_name, FileMode.Append); long size = fs.Position; fs.Close(); //lets load it into memory byte[] meta = new byte[size]; //Filestream imposed some probs StreamReader sr = new StreamReader(directory + "\\" + temp_ref.file_name); //lets read the data sr.BaseStream.Read(meta, 0, (int)size); sr.Dispose(); meta obj = new meta(meta, temp_ref.type, (int)size, temp_ref.file_name); log += obj.Update_datum_indexes(compile_list); obj.Rebase_meta(new_base); //write it to the file sw.BaseStream.Write(meta, 0, (int)size); DATA_READ.WriteTAG_TYPE_LE(temp_ref.type, custom_table_index * 0x10, tables); DATA_READ.WriteINT_LE(temp_ref.new_datum, custom_table_index * 0x10 + 4, tables); DATA_READ.WriteINT_LE(new_base, custom_table_index * 0x10 + 8, tables); DATA_READ.WriteINT_LE((int)size, custom_table_index * 0x10 + 0xC, tables); log += "\n Written tag " + temp_ref.file_name + " with new datum as " + temp_ref.new_datum.ToString("X"); //increase the tag_offset new_base += (int)size; } else { log += "\nFile doesnt exists : " + temp_ref.file_name; } custom_table_index++; } sw_t.BaseStream.Write(tables, 0, 0x10 * compile_list.Count()); sw.Dispose(); sw_t.Dispose(); //lets launch the log box LogBox lb = new LogBox(log); lb.Show(); this.Close(); }
/// <summary> /// actuall function to resync the desired type of tagRefs in accordance with the new map /// </summary> void sync() { OpenFileDialog ofd = new OpenFileDialog(); MessageBox.Show("OPEN PARENT MAP", "GitGud"); ofd.Filter = "Halo 2 Vista Map (*.map)|*.map"; if (ofd.ShowDialog() == DialogResult.OK) { string parent_map = ofd.FileName; MessageBox.Show("OPEN TARGET MAP", "GitGud"); ofd.Filter = "Halo 2 Vista Map (*.map)|*.map"; if (ofd.ShowDialog() == DialogResult.OK) { string target_loc = ofd.FileName; string log = "LOG_BOX\n"; List <injectRefs> resync_list = Get_Resync_List(parent_map, target_loc); foreach (injectRefs temp_ref in compile_list) { if (File.Exists(directory + "\\" + temp_ref.file_name)) { //lets open the file FileStream fs = new FileStream(directory + "\\" + temp_ref.file_name, FileMode.Append); long size = fs.Position; fs.Close(); //lets load it into memory byte[] meta = new byte[size]; //Filestream imposed some probs StreamReader sr = new StreamReader(directory + "\\" + temp_ref.file_name); //lets read the data sr.BaseStream.Read(meta, 0, (int)size); sr.Dispose(); meta obj = new meta(meta, temp_ref.type, (int)size, temp_ref.file_name); log += obj.Update_datum_indexes(resync_list, type_ref_list); //write it to the file StreamWriter sw = new StreamWriter(directory + "\\" + temp_ref.file_name); sw.BaseStream.Seek(0, SeekOrigin.Begin); sw.BaseStream.Write(meta, 0, (int)size); sw.Dispose(); } else { log += "\nFile doesnt exists : " + temp_ref.file_name; } } StreamWriter sw_1 = new StreamWriter(directory + "\\Tagref_resync_log.txt"); sw_1.Write(log); LogBox lol = new LogBox(log); lol.Show(); } } }
private void _generate_cache() { string log = "\nCOMPILATION : "; int new_base = Int32.Parse(textBox1.Text.Substring(2), NumberStyles.HexNumber);//the offset from the map_mem_base 0x1511020(lockout) int meta_size = 0x0; int tag_index = 0x0; //maintain an array of meta Queue <byte[]> meta_list = new Queue <byte[]>(); Queue <long> size_list = new Queue <long>(); byte[] tables = new byte[0x10 * compile_list.Count]; foreach (injectRefs temp_ref in compile_list) { if (File.Exists(directory + "\\" + temp_ref.file_name)) { //lets open the file FileStream fs = new FileStream(directory + "\\" + temp_ref.file_name, FileMode.Append); long size = fs.Position; fs.Close(); //lets load it into memory byte[] meta = new byte[size]; //Filestream imposed some probs StreamReader sr = new StreamReader(directory + "\\" + temp_ref.file_name); //lets read the data sr.BaseStream.Read(meta, 0, (int)size); sr.Dispose(); //rebase the meta meta obj = new meta(meta, temp_ref.type, (int)size, temp_ref.file_name); log += obj.Update_datum_indexes(compile_list, type_ref_list); obj.Rebase_meta(new_base + meta_size); meta_list.Enqueue(meta); //add to the meta list size_list.Enqueue(size); //add to the size_list //tag_table_stuff DATA_READ.WriteTAG_TYPE_LE(temp_ref.type, tag_index * 0x10, tables); DATA_READ.WriteINT_LE(temp_ref.new_datum, tag_index * 0x10 + 4, tables); DATA_READ.WriteINT_LE(new_base + meta_size, tag_index * 0x10 + 8, tables); DATA_READ.WriteINT_LE((int)size, tag_index * 0x10 + 0xC, tables); log += "\n Written tag " + temp_ref.file_name + " with new datum as " + temp_ref.new_datum.ToString("X"); //increase the tag_offset meta_size += (int)size; //increase the tag_count } else { log += "\nFile doesnt exists : " + temp_ref.file_name; } tag_index++; } StreamWriter sw = new StreamWriter(directory + "\\tags.cache"); byte[] temp = new byte[0x4]; byte[] lol = { 0 }; sw.BaseStream.Write(Encoding.ASCII.GetBytes("tag_table"), 0, "tag_table".Length); sw.BaseStream.Write(lol, 0, 1); DATA_READ.WriteINT_LE(0x34, 0, temp); sw.BaseStream.Write(temp, 0, 0x4); DATA_READ.WriteINT_LE(0x10 * compile_list.Count, 0, temp); sw.BaseStream.Write(temp, 0, 0x4); sw.BaseStream.Write(Encoding.ASCII.GetBytes("tag_data"), 0, "tag_data".Length); sw.BaseStream.Write(lol, 0, 1); DATA_READ.WriteINT_LE(0x34 + 0x10 * compile_list.Count, 0, temp); sw.BaseStream.Write(temp, 0, 0x4); DATA_READ.WriteINT_LE(meta_size, 0, temp); sw.BaseStream.Write(temp, 0, 0x4); sw.BaseStream.Write(Encoding.ASCII.GetBytes("tag_maps"), 0, "tag_maps".Length); sw.BaseStream.Write(lol, 0, 1); DATA_READ.WriteINT_LE(0x34 + 0x10 * compile_list.Count + meta_size, 0, temp); sw.BaseStream.Write(temp, 0, 0x4); DATA_READ.WriteINT_LE(tag_scenarios[0].Length + 1, 0, temp); sw.BaseStream.Write(temp, 0, 0x4); sw.BaseStream.Write(tables, 0, 0x10 * compile_list.Count); while (meta_list.Count != 0) { sw.BaseStream.Write(meta_list.Dequeue(), 0x0, (int)size_list.Dequeue()); } sw.BaseStream.Write(Encoding.ASCII.GetBytes(tag_scenarios[0]), 0, tag_scenarios[0].Length); sw.BaseStream.Write(lol, 0, 1); sw.Dispose(); //atleast mention the universally acclaimed tag log += "\ntype referenced tags are :"; foreach (UnisonRefs uni_temp in type_ref_list) { log += "\nReffered " + uni_temp.type + " to " + uni_temp.new_datum.ToString("X") + " file : " + uni_temp.file_name; } //writing log sw = new StreamWriter(directory + "\\compile_log.txt"); sw.Write(log); //lets launch the log box LogBox lb = new LogBox(log); lb.Show(); }