// to do : fix public int GetSize(TERF terf) { int size = 8 + terf.GetPad(8); for (int c = 0; c < this.DataFiles.Count; c++) { int s = this.DataFiles[c].GetSize() + terf.GetPad(this.DataFiles[c].GetSize()); size += s; } return(size); }
public void Read(DAT dat, TERF terf) { if (this.Data_id != dat.binreader.ReadUInt32()) { dat.binreader.BaseStream.Position -= 4; dat.errormsg = "Problem with DIR1 / COMP"; return; } this.datalength = dat.binreader.ReadUInt32(); dat.binreader.BaseStream.Position += (terf.GetPad(8)); // Advance through any needed padding #region Read Data files this.DataFiles = new List <Filetype>(); for (int c = 0; c < terf.files; c++) { bool compressed = false; int size = -1; size = (int)terf.Dir1.DirTable[c].filelength; // set size from dir info if (terf.Comp.length > 0) // If there is a comp table { if (terf.Comp.CompTable[c].file_complevel == 5) // If this file is compressed { compressed = true; } } Filetype ft = new Filetype(); // Set up new instance of data type ft.Read(dat, terf, compressed, size); // Read in this data file if (dat.errormsg != "") { return; } DataFiles.Add(ft); // Add to datafiles list if (size == 0) { ft.nullfile = true; } int pad = terf.GetPad(size); dat.binreader.BaseStream.Position += pad; // Skip through padding for this file } #endregion }
public void Write(DAT dat, TERF terf, ProgressBar datprogress) { dat.binwriter.Write(this.Data_id); dat.binwriter.Write(this.datalength); dat.WriteNulls(terf.GetPad(8)); for (int c = 0; c < terf.files; c++) { DataFiles[c].Write(dat, terf, c); dat.WriteNulls(terf.GetPad((int)terf.Dir1.DirTable[c].filelength)); datprogress.PerformStep(); } }
public int GetSize(TERF terf) { int size = 8 + (int)(this.DirTable.Count * 8); // Entire size is (8 for header + each dir entry *8) and the result is padded out to a multiple of the terf filepad int pad = terf.GetPad(size); // Padding is included in the dir1 size size += pad; return(size); }
public int GetSize(TERF terf) { if (this.CompTable.Count == 0) { return(0); } int size = 8 + (int)this.CompTable.Count * 8; int pad = terf.GetPad(size); size += pad; return(size); }
public void Fix(TERF terf) // Fix DIR1, specify terf to account for nested terf in data { this.DirTable = new List <direntry>(); uint holder = 8 + (uint)terf.GetPad(8); // Holder for current offset for (int c = 0; c < terf.Data.DataFiles.Count; c++) { direntry de = new direntry(); de.datatable_offset = holder; de.filelength = (uint)terf.Data.DataFiles[c].GetSize(); this.DirTable.Add(de); holder += (uint)terf.Data.DataFiles[c].GetSize(); int pad = terf.GetPad(terf.Data.DataFiles[c].GetSize()); holder += (uint)pad; } this.length = (uint)(this.GetSize(terf)); needsfixed = false; }
public void Write(DAT dat, TERF terf) { if (needsfixed) { this.Fix(terf); } dat.binwriter.Write(this.dir1_id); dat.binwriter.Write(this.length); foreach (direntry entry in this.DirTable) { dat.binwriter.Write(entry.datatable_offset); dat.binwriter.Write(entry.filelength); } dat.WriteNulls(terf.GetPad((this.DirTable.Count * 8) + 8)); }
public void Write(DAT dat, TERF terf) { if (needsfixed) { Fix(terf); } if (!valid) { return; } dat.binwriter.Write(this.comp_id); dat.binwriter.Write(this.length); foreach (ct_entry entry in this.CompTable) { dat.binwriter.Write(entry.file_complevel); dat.binwriter.Write(entry.file_uncomplength); } dat.WriteNulls(terf.GetPad((int)this.length)); }
public void Read(DAT dat, TERF terf) { if (this.dir1_id != dat.binreader.ReadUInt32()) { dat.binreader.BaseStream.Position -= 4; dat.errormsg = "Problem with Terf header"; return; } this.DirTable = new List <direntry>(); this.length = dat.binreader.ReadUInt32(); for (int c = 0; c < dat.ParentTerf.files; c++) { direntry de = new direntry(); de.datatable_offset = dat.binreader.ReadUInt32(); de.filelength = dat.binreader.ReadUInt32(); this.DirTable.Add(de); } dat.binreader.BaseStream.Position += terf.GetPad(8 + (this.DirTable.Count * 8)); // Padding is part of the dir1 size }