/// <summary> /// use to rebase current meta data to a newer memory address(affects reflexive,extended and data ref fields only) /// </summary> /// <param name="new_base">the new memory address to which the meta has to rebased</param> public void Rebase_meta(int new_base) { //first rebase reflexive fields foreach (int off in ref_reflexive) { int old_mem_addr = DATA_READ.ReadINT_LE(off + 4, data); int new_mem_addr = new_base + (old_mem_addr - mem_off); DATA_READ.WriteINT_LE(new_mem_addr, off + 4, data); } //then we rebase all the dataref fields foreach (int off in ref_data) { int old_mem_addr = DATA_READ.ReadINT_LE(off + 4, data); int new_mem_addr = new_base + (old_mem_addr - mem_off); DATA_READ.WriteINT_LE(new_mem_addr, off + 4, data); } //we venture in extended meta if it is even concerned if (list_extended != null) { //for extended meta stuff we are gonna first rebase the extended meta(s) first and set the offsets accordingly //well extende meta are gonna follow meta one by one int extended_new_base = new_base + size; List <int> key_mems = list_extended.Keys.ToList <int>(); //Rebase extended meta foreach (int temp_key in key_mems) { extended_meta temp_meta = list_extended[temp_key]; temp_meta.Rebase_meta(extended_new_base); extended_new_base += temp_meta.Get_Total_size(); } //now lets update the offsets with the newer values List <int> extend_off = ref_extended.Keys.ToList <int>(); foreach (int temp_off in extend_off) { int extend_mem_addr = ref_extended[temp_off]; extended_meta temp_ext = list_extended[extend_mem_addr]; int new_mem_addr = temp_ext.Get_mem_addr(); DATA_READ.WriteINT_LE(new_mem_addr, temp_off + 4, data); } } //update the base to which meta has been compiled mem_off = new_base; }
/// <summary> /// used to null all the stringId in theconcerned meta and extended meta /// </summary> public void Null_StringID() { //first null all my string ids foreach (int temp_off in ref_stringID) { DATA_READ.WriteINT_LE(0x0, temp_off, data); } //we then proceed for extended meta List <int> keys = list_extended.Keys.ToList <int>(); foreach (int temp_key in keys) { extended_meta temp_meta = list_extended[temp_key]; temp_meta.Null_StringID(); } }
private void button1_Click(object sender, EventArgs e) { string log = "\nCOMPILATION : "; //the File where we are gonna compile our stuff StreamWriter sw = new StreamWriter(directory + "\\tags.meta"); //the Tables data StreamWriter sw_t = new StreamWriter(directory + "\\tables.meta"); //creating a table byte[] tables = new byte[0x10 * compile_list.Count]; //index of custom metas int custom_table_index = 0; int new_base = Int32.Parse(textBox1.Text.Substring(2), NumberStyles.HexNumber);//the offset from the map_mem_base 0x1511020(lockout) foreach (injectRefs temp_ref in compile_list) { if (File.Exists(directory + "\\" + temp_ref.file_name)) { //lets open the file FileStream fs = new FileStream(directory + "\\" + temp_ref.file_name, FileMode.Append); long size = fs.Position; fs.Close(); //lets load it into memory byte[] meta = new byte[size]; //Filestream imposed some probs StreamReader sr = new StreamReader(directory + "\\" + temp_ref.file_name); //lets read the data sr.BaseStream.Read(meta, 0, (int)size); sr.Dispose(); meta obj = new meta(meta, temp_ref.type, (int)size, temp_ref.file_name); log += obj.Update_datum_indexes(compile_list); obj.Rebase_meta(new_base); //write it to the file sw.BaseStream.Write(meta, 0, (int)size); DATA_READ.WriteTAG_TYPE_LE(temp_ref.type, custom_table_index * 0x10, tables); DATA_READ.WriteINT_LE(temp_ref.new_datum, custom_table_index * 0x10 + 4, tables); DATA_READ.WriteINT_LE(new_base, custom_table_index * 0x10 + 8, tables); DATA_READ.WriteINT_LE((int)size, custom_table_index * 0x10 + 0xC, tables); log += "\n Written tag " + temp_ref.file_name + " with new datum as " + temp_ref.new_datum.ToString("X"); //increase the tag_offset new_base += (int)size; } else { log += "\nFile doesnt exists : " + temp_ref.file_name; } custom_table_index++; } sw_t.BaseStream.Write(tables, 0, 0x10 * compile_list.Count()); sw.Dispose(); sw_t.Dispose(); //lets launch the log box LogBox lb = new LogBox(log); lb.Show(); this.Close(); }
/// <summary> /// a function that updates the datum indexes acoording to the list supplied /// </summary> /// <param name="tag_list"></param> /// <returns>return a log about different encounters</returns> public string Update_datum_indexes(List <injectRefs> tag_list) { string log = "\nUPDATE DATUM : " + path; //we loop through each offset //Updating TagRefs foreach (int temp_off in ref_tags) { int temp_old_datum = DATA_READ.ReadINT_LE(temp_off + 4, data); //next we loop through each list bool sucess = false; foreach (injectRefs temp_ref in tag_list) { int old_datum = temp_ref.old_datum; int new_datum = temp_ref.new_datum; if (old_datum == temp_old_datum) { //we found a match if (!sucess) { DATA_READ.WriteINT_LE(new_datum, temp_off + 4, data); log += "\nSuccesfully refered " + temp_old_datum.ToString("X") + " to " + new_datum.ToString("X"); sucess = true; } else { log += "\nMultiple occurences of old datum " + old_datum + " has been found"; } } } if (!sucess) { log += "\nCouldnot find reference to " + temp_old_datum.ToString("X"); } } //Updating WCtagRefs foreach (int temp_off in ref_WCtags) { int temp_old_datum = DATA_READ.ReadINT_LE(temp_off, data); bool sucess = false; foreach (injectRefs temp_ref in tag_list) { int old_datum = temp_ref.old_datum; int new_datum = temp_ref.new_datum; if (old_datum == temp_old_datum) { //we found a match if (!sucess) { DATA_READ.WriteINT_LE(new_datum, temp_off, data); log += "\nSuccesfully refered " + temp_old_datum.ToString("X") + " to " + new_datum.ToString("X"); sucess = true; } else { log += "\nMultiple occurences of old datum " + old_datum + " has been found"; } } } if (!sucess) { log += "\nCouldnot find reference to " + temp_old_datum.ToString("X"); } } return(log); }
private void _generate_cache() { string log = "\nCOMPILATION : "; int new_base = Int32.Parse(textBox1.Text.Substring(2), NumberStyles.HexNumber);//the offset from the map_mem_base 0x1511020(lockout) int meta_size = 0x0; int tag_index = 0x0; //maintain an array of meta Queue <byte[]> meta_list = new Queue <byte[]>(); Queue <long> size_list = new Queue <long>(); byte[] tables = new byte[0x10 * compile_list.Count]; foreach (injectRefs temp_ref in compile_list) { if (File.Exists(directory + "\\" + temp_ref.file_name)) { //lets open the file FileStream fs = new FileStream(directory + "\\" + temp_ref.file_name, FileMode.Append); long size = fs.Position; fs.Close(); //lets load it into memory byte[] meta = new byte[size]; //Filestream imposed some probs StreamReader sr = new StreamReader(directory + "\\" + temp_ref.file_name); //lets read the data sr.BaseStream.Read(meta, 0, (int)size); sr.Dispose(); //rebase the meta meta obj = new meta(meta, temp_ref.type, (int)size, temp_ref.file_name); log += obj.Update_datum_indexes(compile_list, type_ref_list); obj.Rebase_meta(new_base + meta_size); meta_list.Enqueue(meta); //add to the meta list size_list.Enqueue(size); //add to the size_list //tag_table_stuff DATA_READ.WriteTAG_TYPE_LE(temp_ref.type, tag_index * 0x10, tables); DATA_READ.WriteINT_LE(temp_ref.new_datum, tag_index * 0x10 + 4, tables); DATA_READ.WriteINT_LE(new_base + meta_size, tag_index * 0x10 + 8, tables); DATA_READ.WriteINT_LE((int)size, tag_index * 0x10 + 0xC, tables); log += "\n Written tag " + temp_ref.file_name + " with new datum as " + temp_ref.new_datum.ToString("X"); //increase the tag_offset meta_size += (int)size; //increase the tag_count } else { log += "\nFile doesnt exists : " + temp_ref.file_name; } tag_index++; } StreamWriter sw = new StreamWriter(directory + "\\tags.cache"); byte[] temp = new byte[0x4]; byte[] lol = { 0 }; sw.BaseStream.Write(Encoding.ASCII.GetBytes("tag_table"), 0, "tag_table".Length); sw.BaseStream.Write(lol, 0, 1); DATA_READ.WriteINT_LE(0x34, 0, temp); sw.BaseStream.Write(temp, 0, 0x4); DATA_READ.WriteINT_LE(0x10 * compile_list.Count, 0, temp); sw.BaseStream.Write(temp, 0, 0x4); sw.BaseStream.Write(Encoding.ASCII.GetBytes("tag_data"), 0, "tag_data".Length); sw.BaseStream.Write(lol, 0, 1); DATA_READ.WriteINT_LE(0x34 + 0x10 * compile_list.Count, 0, temp); sw.BaseStream.Write(temp, 0, 0x4); DATA_READ.WriteINT_LE(meta_size, 0, temp); sw.BaseStream.Write(temp, 0, 0x4); sw.BaseStream.Write(Encoding.ASCII.GetBytes("tag_maps"), 0, "tag_maps".Length); sw.BaseStream.Write(lol, 0, 1); DATA_READ.WriteINT_LE(0x34 + 0x10 * compile_list.Count + meta_size, 0, temp); sw.BaseStream.Write(temp, 0, 0x4); DATA_READ.WriteINT_LE(tag_scenarios[0].Length + 1, 0, temp); sw.BaseStream.Write(temp, 0, 0x4); sw.BaseStream.Write(tables, 0, 0x10 * compile_list.Count); while (meta_list.Count != 0) { sw.BaseStream.Write(meta_list.Dequeue(), 0x0, (int)size_list.Dequeue()); } sw.BaseStream.Write(Encoding.ASCII.GetBytes(tag_scenarios[0]), 0, tag_scenarios[0].Length); sw.BaseStream.Write(lol, 0, 1); sw.Dispose(); //atleast mention the universally acclaimed tag log += "\ntype referenced tags are :"; foreach (UnisonRefs uni_temp in type_ref_list) { log += "\nReffered " + uni_temp.type + " to " + uni_temp.new_datum.ToString("X") + " file : " + uni_temp.file_name; } //writing log sw = new StreamWriter(directory + "\\compile_log.txt"); sw.Write(log); //lets launch the log box LogBox lb = new LogBox(log); lb.Show(); }