public void Import(TERF terf, int filenum, bool insert, int newsize, int complvl) { if (!valid && complvl == 0) // No need to do anything since no other compressed files and the import is uncompressed { return; } if (!valid && complvl == 5 && this.CompTable.Count == 0) // We have no loaded comp table and user is importing a compressed file into the dat { Init(terf); } if (filenum == -1) // Append with a new file { this.CompTable.Add(new ct_entry(complvl, newsize)); } else if (insert == true) // Insert entry at desired position. { this.CompTable.Insert(filenum, new ct_entry(complvl, newsize)); } else // Rewrite exisiting file. { this.CompTable[filenum] = new ct_entry(complvl, newsize); } changed = true; // Set changed flag. needsfixed = true; valid = true; }
public DAT() { ParentTerf = new TERF(); grfx = new CustomBitmap(); bytecount = 0; isterf = false; }
public void Write(DAT dat, TERF terf, int count) { if (nullfile) { return; } else if (compressed_data.Count > 0) { foreach (byte b in this.compressed_data) { dat.binwriter.Write(b); } return; } else if (undefined_data.Count > 0) { foreach (byte b in this.undefined_data) { dat.binwriter.Write(b); } return; } else if (mmap_data.Header.Files > 0) { mmap_data.Write(dat, count); return; } }
public int GetSize(TERF terf) { int size = 8 + (int)(this.DirTable.Count * 8); // Entire size is (8 for header + each dir entry *8) and the result is padded out to a multiple of the terf filepad int pad = terf.GetPad(size); // Padding is included in the dir1 size size += pad; return(size); }
public void Fix(TERF terf) { for (int c = 0; c < DataFiles.Count; c++) { DataFiles[c].Fix(c); } this.datalength = (uint)GetSize(terf); }
public void Init(TERF terf) { this.length = 0; this.CompTable = new List <ct_entry>(); for (int c = 0; c < terf.files; c++) { this.CompTable.Add(new ct_entry(0, 0)); } }
public Filetype(TERF terf, MMAP custom) { mmap_data = custom; compressed_data = new List <byte>(); undefined_data = new List <byte>(); filetype = ""; nullfile = false; changed = false; size = this.GetSize(); }
// to do : fix public int GetSize(TERF terf) { int size = 8 + terf.GetPad(8); for (int c = 0; c < this.DataFiles.Count; c++) { int s = this.DataFiles[c].GetSize() + terf.GetPad(this.DataFiles[c].GetSize()); size += s; } return(size); }
public int GetSize(TERF terf) { if (this.CompTable.Count == 0) { return(0); } int size = 8 + (int)this.CompTable.Count * 8; int pad = terf.GetPad(size); size += pad; return(size); }
public void Import(int position, TERF terf, MMAP custom) { Filetype user = new Filetype(terf, custom); if (position == -1) { this.DataFiles.Add(user); terf.Dir1.Import(terf, position, false, user.size); terf.Comp.Import(terf, position, false, user.size, 0); } changed = true; }
public Filetype(DAT dat, TERF terf) { mmap_data = new MMAP(); compressed_data = new List <byte>(); undefined_data = new List <byte>(); filetype = ""; nullfile = false; changed = false; needsfixed = false; size = 0; this.Read(dat, terf, false, -1); }
public void Read(DAT dat, TERF terf) { if (this.Data_id != dat.binreader.ReadUInt32()) { dat.binreader.BaseStream.Position -= 4; dat.errormsg = "Problem with DIR1 / COMP"; return; } this.datalength = dat.binreader.ReadUInt32(); dat.binreader.BaseStream.Position += (terf.GetPad(8)); // Advance through any needed padding #region Read Data files this.DataFiles = new List <Filetype>(); for (int c = 0; c < terf.files; c++) { bool compressed = false; int size = -1; size = (int)terf.Dir1.DirTable[c].filelength; // set size from dir info if (terf.Comp.length > 0) // If there is a comp table { if (terf.Comp.CompTable[c].file_complevel == 5) // If this file is compressed { compressed = true; } } Filetype ft = new Filetype(); // Set up new instance of data type ft.Read(dat, terf, compressed, size); // Read in this data file if (dat.errormsg != "") { return; } DataFiles.Add(ft); // Add to datafiles list if (size == 0) { ft.nullfile = true; } int pad = terf.GetPad(size); dat.binreader.BaseStream.Position += pad; // Skip through padding for this file } #endregion }
public void Write(DAT dat, TERF terf, ProgressBar datprogress) { dat.binwriter.Write(this.Data_id); dat.binwriter.Write(this.datalength); dat.WriteNulls(terf.GetPad(8)); for (int c = 0; c < terf.files; c++) { DataFiles[c].Write(dat, terf, c); dat.WriteNulls(terf.GetPad((int)terf.Dir1.DirTable[c].filelength)); datprogress.PerformStep(); } }
public void Write(DAT dat, TERF terf) { if (needsfixed) { this.Fix(terf); } dat.binwriter.Write(this.dir1_id); dat.binwriter.Write(this.length); foreach (direntry entry in this.DirTable) { dat.binwriter.Write(entry.datatable_offset); dat.binwriter.Write(entry.filelength); } dat.WriteNulls(terf.GetPad((this.DirTable.Count * 8) + 8)); }
public void Import(TERF terf, int filenum, bool insert, int newsize) { if (filenum == -1) // Append with a new file { this.DirTable.Add(new direntry(0, newsize)); } else if (insert == true) // Insert entry at desired position. { DirTable.Insert(filenum, new direntry(0, newsize)); } else { DirTable[filenum].filelength = (uint)newsize; // Replace an exisiting file. } changed = true; // Set changed flag. needsfixed = true; }
// Fix Comp Table from an import public void Fix(TERF terf) { if (this.CompTable.Count == 0) { return; } valid = false; this.length = 0; // Check file 0 to see if it was still compressed, if not remove it from the comp list as we know what this file is. if (terf.Data.DataFiles[0].compressed_data.Count == 199) { terf.Data.DataFiles[0].Fix(0); } if (terf.Data.DataFiles[0].mmap_data.Header.Files > 0) { this.CompTable[0].file_complevel = 0; this.CompTable[0].file_uncomplength = 220; } // We are going to ignore the comp table unless there is a compressed file for (int c = 0; c < terf.Data.DataFiles.Count; c++) { if (c > this.CompTable.Count) { this.CompTable.Add(new ct_entry(0, terf.Data.DataFiles[c].size)); // We have added a file to a terf that still has compression if (terf.Data.DataFiles[c].compressed_data.Count > 0) // This shouldnt happen, but adding a check to make sure the { this.CompTable[c].file_complevel = 5; // added file isnt compressed. Change it if it is. } } if (this.CompTable[c].file_complevel == 5) { valid = true; } } if (valid) { this.length = (uint)this.GetSize(terf); } needsfixed = false; }
public void Write(DAT dat, TERF terf) { if (needsfixed) { Fix(terf); } if (!valid) { return; } dat.binwriter.Write(this.comp_id); dat.binwriter.Write(this.length); foreach (ct_entry entry in this.CompTable) { dat.binwriter.Write(entry.file_complevel); dat.binwriter.Write(entry.file_uncomplength); } dat.WriteNulls(terf.GetPad((int)this.length)); }
public void Fix(TERF terf) // Fix DIR1, specify terf to account for nested terf in data { this.DirTable = new List <direntry>(); uint holder = 8 + (uint)terf.GetPad(8); // Holder for current offset for (int c = 0; c < terf.Data.DataFiles.Count; c++) { direntry de = new direntry(); de.datatable_offset = holder; de.filelength = (uint)terf.Data.DataFiles[c].GetSize(); this.DirTable.Add(de); holder += (uint)terf.Data.DataFiles[c].GetSize(); int pad = terf.GetPad(terf.Data.DataFiles[c].GetSize()); holder += (uint)pad; } this.length = (uint)(this.GetSize(terf)); needsfixed = false; }
public void Read(DAT dat, TERF terf) { if (this.dir1_id != dat.binreader.ReadUInt32()) { dat.binreader.BaseStream.Position -= 4; dat.errormsg = "Problem with Terf header"; return; } this.DirTable = new List <direntry>(); this.length = dat.binreader.ReadUInt32(); for (int c = 0; c < dat.ParentTerf.files; c++) { direntry de = new direntry(); de.datatable_offset = dat.binreader.ReadUInt32(); de.filelength = dat.binreader.ReadUInt32(); this.DirTable.Add(de); } dat.binreader.BaseStream.Position += terf.GetPad(8 + (this.DirTable.Count * 8)); // Padding is part of the dir1 size }
public void Read(DAT dat, TERF terf, bool compressed, int dirsize) { size = dirsize; if (dirsize == 0) { filetype = "NULL"; nullfile = true; return; } else if (compressed) { filetype = "COMP"; for (int c = 0; c < dirsize; c++) { this.compressed_data.Add(dat.binreader.ReadByte()); } return; } else if (this.mmap_data.Read(dat) == true) { filetype = "MMAP"; return; } else if (dirsize > 0) { for (int c = 0; c < dirsize; c++) { this.undefined_data.Add(dat.binreader.ReadByte()); } filetype = "????"; dat.errormsg = "Format"; } }
public void Read(DAT dat, TERF terf) { this.CompTable = new List <ct_entry>(); if (this.comp_id != dat.binreader.ReadUInt32()) { dat.binreader.BaseStream.Position -= 4; valid = false; return; } valid = true; this.length = dat.binreader.ReadUInt32(); for (int c = 0; c < dat.ParentTerf.files; c++) { ct_entry ct = new ct_entry(); ct.file_complevel = dat.binreader.ReadUInt32(); ct.file_uncomplength = dat.binreader.ReadUInt32(); dat.bytecount += 8; this.CompTable.Add(ct); } }