public File(BF.BigFile parent, BF.Index parentIndex, uint[] rawIndexData, int hashNamePosition, int offsetPosition, int lengthPosition) { mNamePrefix = ""; mNameSuffix = ""; mParentBigFile = parent; mParentIndex = parentIndex; mRawIndexData = rawIndexData; uint rawHash = rawIndexData[hashNamePosition]; mHashedName = BD.HexConverter.ByteArrayToHexString(BD.BinaryConverter.UIntToByteArray(rawHash)); mOffset = rawIndexData[offsetPosition] * mParentIndex.OffsetMultiplier; mLength = (int)rawIndexData[lengthPosition]; CheckFileDataForSanity(); if (mIsValidReference) { GetHeaderData(); mType = GetFileType(); mCanBeReplaced = true; } else { mType = BF.FileType.FromType(BF.FileType.FILE_TYPE_Invalid); } GetNameComponents(); }
public override void ReadIndex() { ReadEntries(); int numIndices = mEntries.GetUpperBound(0) + 1; Indices = new BF.Index[numIndices]; for (int i = 0; i <= mEntries.GetUpperBound(0); i++) { BF.FileIndex tIndex = (BF.FileIndex)BF.Index.CreateIndex(mParentBigFile, mSubIndexType); tIndex.Name = "Sub-Index-" + string.Format("{0:D3}", i); tIndex.RawIndexData = mEntries[i]; tIndex.Offset = mEntries[i][mOffsetPosition]; //read subindex if (i > 0) { mLoadedPercent = (((float)i / (float)mEntries.GetUpperBound(0)) * READ_CONTENT_PERCENT) + READ_INDEX_PERCENT; } //launch another sub-thread so that the loaded percent stays updated Thread siThread = new Thread(new ThreadStart(tIndex.ReadIndex)); siThread.Start(); do { Thread.Sleep(1); }while (siThread.IsAlive); //tIndex.ReadIndex(); Indices[i] = tIndex; mFileCount += tIndex.FileCount; } }
public Index(string name, BF.BigFile parentBigFile, BF.Index parentIndex, long offset) { Initialize(); Name = name; ParentBigFile = parentBigFile; ParentIndex = parentIndex; Offset = offset; }
public File(BF.BigFile parent, BF.Index parentIndex, uint[] rawIndexData, string hashedName, int originPosition, int offsetPosition, int lengthPosition) { mParentBigFile = parent; mParentIndex = parentIndex; mRawIndexData = rawIndexData; mHashedName = hashedName; mOffset = originPosition + (rawIndexData[offsetPosition] * mParentIndex.OffsetMultiplier); mLength = (int)rawIndexData[lengthPosition]; CheckFileDataForSanity(); if (mIsValidReference) { GetHeaderData(); mType = GetFileType(); mCanBeReplaced = true; } else { mType = BF.FileType.FromType(BF.FileType.FILE_TYPE_Invalid); } GetNameComponents(); }
public void LoadBigFile() { //Type.LoadHashLookupTable(Path); LoadState = "Reading indices"; mMasterIndex = BF.Index.CreateMasterIndex(this); Thread indexThread = new Thread(new ThreadStart(mMasterIndex.ReadIndex)); //indexThread.Priority = ThreadPriority.AboveNormal; indexThread.Start(); do { LoadState = "Reading index contents (" + (int)mMasterIndex.mLoadedPercent + "%)"; mLoadedPercent = (int)(mMasterIndex.mLoadedPercent * 0.98); Thread.Sleep(5); }while (indexThread.IsAlive); //mMasterIndex.ReadIndex(); LoadState = "Sorting directories"; BuildMasterDirectory(); mLoadedPercent = 100; LoadState = "Done"; }
public BloodOmen2WrappedFile(BF.BigFile parent, BF.Index parentIndex, uint[] rawIndexData, string hashedName, int offset, int length) : base() { mParentBigFile = parent; mParentIndex = parentIndex; mRawIndexData = rawIndexData; mHashedName = hashedName; mOffset = offset; mLength = length; CheckFileDataForSanity(); if (mIsValidReference) { GetHeaderData(); mType = GetFileType(); mCanBeReplaced = true; } else { mType = BF.FileType.FromType(BF.FileType.FILE_TYPE_Invalid); } GetNameComponents(); }
public PandemoniumFileIndex(string name, BF.BigFile parentBigFile, BF.Index parentIndex, long offset) : base(name, parentBigFile, parentIndex, offset) { }
public IndexIndex(string name, BF.BigFile parentBigFile, BF.Index parentIndex, long offset) : base(name, parentBigFile, parentIndex, offset) { mSubIndexType = IndexType.Unknown; }
public SR1Proto1CompressedFile(BF.BigFile parent, BF.Index parentIndex, uint[] rawIndexData, string hashedName, int offsetPosition, int lengthPosition, int compressedLengthPosition) : base(parent, parentIndex, rawIndexData, hashedName, offsetPosition, lengthPosition, compressedLengthPosition) { }
public WhiplashBigFileFileIndex(string name, BF.BigFile parentBigFile, BF.Index parentIndex, long offset) : base(name, parentBigFile, parentIndex, offset) { }
public CompressedFile(BF.BigFile parent, BF.Index parentIndex, uint[] rawIndexData, string hashedName, int offsetPosition, int lengthPosition, int compressedLengthPosition) : base(parent, parentIndex, rawIndexData, hashedName, offsetPosition, lengthPosition) { mCompressedLength = (int)rawIndexData[compressedLengthPosition]; mCanBeReplaced = false; }
public SR1Proto1FileIndex(string name, BF.BigFile parentBigFile, BF.Index parentIndex, long offset) : base(name, parentBigFile, parentIndex, offset) { mCompressedLengthPosition = 2; }
public FileIndexWithCompressedFiles(string name, BF.BigFile parentBigFile, BF.Index parentIndex, long offset) : base(name, parentBigFile, parentIndex, offset) { mCompressedLengthPosition = 0; }
public MadDashRacingBigFileIndex(string name, BF.BigFile parentBigFile, BF.Index parentIndex, long offset) : base(name, parentBigFile, parentIndex, offset) { }
public SoulReaver2AirForgeDemoFileIndex(string name, BF.BigFile parentBigFile, BF.Index parentIndex, long offset) : base(name, parentBigFile, parentIndex, offset) { mCompressedLengthPosition = 3; }
public SoulReaverPlaystationPALFileIndex(string name, BF.BigFile parentBigFile, BF.Index parentIndex, long offset) : base(name, parentBigFile, parentIndex, offset) { _XorValue16 = 0xB722; _XorValue32 = 0xB722B722; }
public FileIndexWithSeparateHashes(string name, BF.BigFile parentBigFile, BF.Index parentIndex, long offset) : base(name, parentBigFile, parentIndex, offset) { }
protected void ExportIndexRecursive(BF.Index whichIndex, string targetPath) { FileStream oStream; StreamWriter oWriter; //open the file stream and enter the name of the current index oStream = new FileStream(targetPath, FileMode.Append, FileAccess.Write); oWriter = new StreamWriter(oStream); //if the current index has a separate table of hashes, dump that first bool hasSeparateHashes = true; try { BF.FileIndexWithSeparateHashes testCast = (BF.FileIndexWithSeparateHashes)whichIndex; } catch (InvalidCastException ex) { hasSeparateHashes = false; } if (hasSeparateHashes) { BF.FileIndexWithSeparateHashes hashIndex = (BF.FileIndexWithSeparateHashes)whichIndex; oWriter.WriteLine("Filename Hashes"); oWriter.WriteLine("Entry Number,Hex,Dec"); int hashNum = 0; foreach (string currentHash in hashIndex.Hashes) { string info = hashNum.ToString() + ","; //info += string.Format("{0:X8},", currentHash); //info += string.Format("{0:000000000000}", currentHash); info += currentHash; oWriter.WriteLine(info); hashNum++; } } //dump the raw contents of the current index oWriter.WriteLine("Index Name, Entry Number, Values in Hex, Values in Dec"); int entryNum = 0; foreach (uint[] currentEntry in whichIndex.Entries) { string info = whichIndex.Name + "," + entryNum + ","; foreach (uint currentValue in currentEntry) { info += string.Format("{0:X8},", currentValue); } info += " ,"; foreach (uint currentValue in currentEntry) { info += string.Format("{0:000000000000},", currentValue); } oWriter.WriteLine(info); entryNum++; } oWriter.Close(); oStream.Close(); //if this index has subindices, process them if ((whichIndex.Indices != null) && (whichIndex.Indices.GetUpperBound(0) > 0)) { foreach (BF.Index nextIndex in whichIndex.Indices) { ExportIndexRecursive(nextIndex, targetPath); } } }