// Returns the number of clusters that the value (size?) will span across uint ClusterSpanned(long value) { // Add the cluster size because if we don't, then upon doing this math we // will get the actual number - 1 // EXAMPLE: number = 0x689 or something, and we round it down. That number // is now 0, and 0/x == 0. long rounded = ((value % xFile.PartitionInfo.ClusterSize != 0) ? VariousFunctions.DownToNearestCluster(value, xFile.PartitionInfo.ClusterSize) + xFile.PartitionInfo.ClusterSize : value); // Divide rounded by cluster size to see how many clusters it spans across... return((uint)(rounded / xFile.PartitionInfo.ClusterSize)); }
long GetRealSectorOffset(long off) { // Get the size up to the nearest cluster // Divide by cluster size // That is the block index. long SizeInCluster = VariousFunctions.DownToNearest200(off - VariousFunctions.DownToNearestCluster(off, xFile.PartitionInfo.ClusterSize));//VariousFunctions.GetBlockOffset(xFile.StartingCluster) + 0x4000; long SizeInCluster = VariousFunctions.DownToNearestCluster(off, xFile.PartitionInfo.ClusterSize) / xFile.PartitionInfo.ClusterSize) uint Cluster = (uint)(VariousFunctions.DownToNearestCluster(off, xFile.PartitionInfo.ClusterSize) / xFile.PartitionInfo.ClusterSize); //Cluster = (Cluster == 0) ? 0 : Cluster - 1; try { long Underlying = VariousFunctions.GetBlockOffset(xFile.BlocksOccupied[Cluster], xFile); return(Underlying + SizeInCluster); } catch { return(VariousFunctions.GetBlockOffset(xFile.BlocksOccupied[Cluster - 1], xFile)); } }
uint DetermineBlockIndex(long Off) { // Pre-planning... I need to figure ref the rounded offset in order // to determine the cluster that this bitch is in // So now that we have the rounded number, we can long rounded = VariousFunctions.DownToNearestCluster(Off, xFile.PartitionInfo.ClusterSize); // Loop for each cluster, determining if the sizes match for (uint i = 0; i < xFile.BlocksOccupied.Length; i++) { long off = VariousFunctions.GetBlockOffset(xFile.BlocksOccupied[i], xFile); if (off == rounded) { return(i); } } throw new Exception("Block not allocated to this file!"); }
public override int Read(byte[] array, int offset, int count) { // If we're at the end of the stream, just return 0 since we can't read beyond it! if (this.Position == this.Length) { return(0); } // This will represent the amount of data we read, and will be our return value. int DataRead = 0; // This will act as our "resetting" at the end long InitialPosition = Position; // This int will represent the amount of data we have to remove off of the // beginning of the initial read array, due to not being in a 0-based offset int beginningDataToRemove = (int)(Position - VariousFunctions.DownToNearest200(Position)); // Pseudocount is basically there to check and see if ClustersSpanned will be greater than what it should be int Pseudocount = (int)(count + (Position - VariousFunctions.DownToNearestCluster(Position, xFile.PartitionInfo.ClusterSize))); // Used for keeping the original pseudocount in the loop int RealPseudocount = (int)(count + VariousFunctions.DownToNearestCluster(Position, xFile.PartitionInfo.ClusterSize)); bool EndingCluster = false; if (Pseudocount > Length) { Pseudocount = (int)(VariousFunctions.DownToNearestCluster(Length - Position, xFile.PartitionInfo.ClusterSize) + VariousFunctions.UpToNearestCluster((Length - Position), xFile.PartitionInfo.ClusterSize)); RealPseudocount = (int)(Length - Position); EndingCluster = true; } // This int will represent the number of clusters that our data spans int ClustersSpanned = (int)ClusterSpanned(Pseudocount); #if DEBUG System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch(); sw.Start(); #endif for (int i = 0; i < ClustersSpanned && (count - DataRead) > 0; i++) { // This int will represent the amount of data that we can read in this cluster int DataReadableCluster = (int)(xFile.PartitionInfo.ClusterSize - (Position - VariousFunctions.DownToNearestCluster(Position, xFile.PartitionInfo.ClusterSize))).UpToNearest200(); // This int will represent the amount of data we need to add to the array int AddToArray = DataReadableCluster; // If that number right above is going to be too much data than we need, let's shrink it down if (DataReadableCluster > RealPseudocount - DataRead) { DataReadableCluster = (int)VariousFunctions.UpToNearest200(beginningDataToRemove + (RealPseudocount - DataRead)); // Looks like we're on the last readthrough! AddToArray = RealPseudocount - DataRead; } else if (DataReadableCluster > count - DataRead) { DataReadableCluster = (int)VariousFunctions.UpToNearest200(beginningDataToRemove + (count - DataRead)); // Looks like we're on the last readthrough! AddToArray = count - DataRead; } else if (i == 0 && Position + DataReadableCluster + AddToArray - beginningDataToRemove > xFile.PartitionInfo.ClusterSize) { // Leave datareadablecluster alone, change the addtoarray value AddToArray -= beginningDataToRemove; } if (AddToArray == 0) { break; } // This array will represent a temp array for holding the data to copy // to the array PASSED in the arguments byte[] TempData = new byte[DataReadableCluster]; // Set our IO position Underlying.Position = GetRealSectorOffset(xPositionInFile); // If we've already read this data... if (LastRead200Offset == Underlying.Position && DataReadableCluster <= 0x200) { // Save us a disk I/O operation! TempData = LastRead200; } else { // Set the LastRead200Offset LastRead200Offset = (AddToArray <= 0x200) ? Underlying.Position : LastRead200Offset; // Read the data Underlying.Read(TempData, 0, DataReadableCluster); // Set the LastRead200 data if (AddToArray <= 0x200) { Array.Copy(TempData, LastRead200, 0x200); } } #if !DEBUG && !TRACE if (xFile.Parent.Name == "FFFE07D1") { byte[] TempDebug = new byte[AddToArray]; Array.Copy(TempData, beginningDataToRemove, TempDebug, 0, AddToArray); uint CRC = Crc32.Compute(TempDebug); byte[] RealFileBuffer = new byte[AddToArray]; FileStream fs = System.IO.File.Open(@"C:\Users\Lander\Desktop\E886364B9B6A4F3A", FileMode.Open); fs.Position = Position; fs.Read(RealFileBuffer, 0, AddToArray); fs.Close(); uint RealCRC = Crc32.Compute(RealFileBuffer); } #endif // Copy the data we read (or got somehow!) over to the output array try { Array.Copy(TempData, ((i == 0) ? beginningDataToRemove : 0), array, offset + DataRead, AddToArray); } catch (Exception e) { throw e; } #if !DEBUG && !TRACE if (xFile.Parent.Name == "FFFE07D1") { byte[] TempDebug = new byte[DataRead]; Array.Copy(array, TempDebug, DataRead); uint CRC = Crc32.Compute(TempDebug); FileStream fs = System.IO.File.Open(@"C:\Users\Lander\Desktop\E886364B9B6A4F3A", FileMode.Open); fs.Position = InitialPosition; byte[] RealFileBuffer = new byte[DataRead]; fs.Read(RealFileBuffer, 0, DataRead); fs.Close(); uint RealCRC = Crc32.Compute(RealFileBuffer); } #endif // Increase the DataRead value DataRead += AddToArray; // Increase the position in the file Position = InitialPosition + DataRead; } Position = InitialPosition + DataRead; return(DataRead); }
/* So Microsoft likes to be weird and return an int for how many * bytes were read, vs. the array that they read... so basically * the array comes in as all null bytes, then comes back filled * in (sort of like how when you leave your drink around at a bar * and come back, you find it filled up by some nice guy who wants * to drug you and take you back to his apartment). SO, instead of * loading the array at the beginning of reading, we need to load * everything in to a different array each time, add that to a list, * then finally set the byte array. I'VariousFunctions on drugs.*/ #endregion public override int Read(byte[] array, int offset, int count) { if (this.Position == this.Length) { return(0); } long p = xPositionInFile; // Before we do anything, we're going to check our cached buffer // to see if we can do anything with our previous buffer uint CurrentIndex = 0; byte[] b_Return; // If the number of bytes they're reading is smaller than // the cluster size... // AND // If they want to read a small enough amount of data to where we can // read the data without any trickery... //Creatively named WHAT becuase I have no idea what I was doing here. long what = VariousFunctions.UpToNearestCluster(RealOffset, xFile.PartitionInfo.ClusterSize) - RealSectorOffset; if (count <= xFile.PartitionInfo.ClusterSize && what >= count) { // Get the amount to remove off of the beginning of our list... long v_bToRemove = RealOffset - RealSectorOffset; // Get the amount to remove off the the end of our list long up = VariousFunctions.UpToNearest200(RealOffset + count); long v_eToRemove = up - (RealOffset + count); // Get the total amount of data we have to read long v_ToRead = VariousFunctions.UpToNearest200(v_bToRemove + v_eToRemove + count); // Set our return value's length b_Return = new byte[v_ToRead]; // Read our shit Underlying.Read(b_Return, offset, (int)v_ToRead); // Copy our return to the original array Array.Copy(b_Return, v_bToRemove, array, 0x0, b_Return.Length - (v_bToRemove + v_eToRemove)); // Clear the b_Return array Array.Clear(b_Return, 0, b_Return.Length); } // Else, the data they want to read spans across multiple clusters, // yet is less than the cluster size itself else { long DataRead = 0; /* TODO: * 1.) Get the amount of data we have to read total * 2.) Get the amount of data we have to read for the beginning * and the end of our read * 3.) Get the amount of data we have to remove off of the * beginning and end of our buffer * 4.) Remove that. * 5.) ????*/ // Data to remove off of the beginning long v_bToRemove = RealOffset - RealSectorOffset; //long v_eToRemove = VariousFunctions.UpToNearest200(xPositionInFile + count + v_bToRemove) - (xPositionInFile + count + v_bToRemove); // Data total to read... // Get the amount of data we can read for this beginning cluster long v_Cluster = VariousFunctions.UpToNearestCluster(RealSectorOffset, xFile.PartitionInfo.ClusterSize) - RealSectorOffset; // Get the amount of data to skim off of the end. By doing the number rounded up to the nearest 0x200 byte boundary // subtracted by the non-rounded number, we are efficiently getting the difference. What. Why did I say efficiently long v_eToRemove = VariousFunctions.UpToNearest200((count - v_Cluster) + v_bToRemove) - ((count - v_Cluster) + v_bToRemove); // This gets the number of bytes we have to read in the final cluster. long v_eToReadNotRounded = ((count - v_Cluster) + v_bToRemove) - VariousFunctions.DownToNearestCluster(count - v_Cluster + v_bToRemove, xFile.PartitionInfo.ClusterSize); // The amount of data to read for each other cluster... long v_ToReadBetween = ((count - v_Cluster) + v_bToRemove) - v_eToReadNotRounded; b_Return = new byte[v_Cluster]; // Read the first bit of data... Underlying.Read(b_Return, 0, (int)v_Cluster); // Copy the return buffer to the actual array Array.Copy(b_Return, v_bToRemove, array, 0, b_Return.Length - v_bToRemove); // Clear the return b_Return = new byte[0]; DataRead += v_Cluster - v_bToRemove; Position += DataRead; // Loop for each cluster inbetween if (((count - v_Cluster) + v_bToRemove - v_eToReadNotRounded) != 0) { for (int i = 0; i < ClusterSpanned((count - v_Cluster) + v_bToRemove - v_eToReadNotRounded); i++) { b_Return = new byte[xFile.PartitionInfo.ClusterSize]; Underlying.Read(b_Return, 0, b_Return.Length); // Copy the return buffer to the actual array Array.Copy(b_Return, 0, array, DataRead, b_Return.Length); DataRead += xFile.PartitionInfo.ClusterSize; Position += xFile.PartitionInfo.ClusterSize; } } // Read our final data... //Underlying.Position = VariousFunctions.GetBlockOffset(xFile.BlocksOccupied[CurrentIndex + 1], xFile); b_Return = new byte[VariousFunctions.UpToNearest200(count - DataRead)]; // Read the data for this final cluster Underlying.Read(b_Return, 0, (int)VariousFunctions.UpToNearest200(count - DataRead)); // Copy that to the array Array.Copy(b_Return, 0x0, array, DataRead, b_Return.Length - v_eToRemove); // Clear the buffer b_Return = new byte[0]; } //PreviouslyRead = array; //PreviouslyReadOffset = xPositionInFile; Position = p + count; // Just return the count. Assholes. return(array.Length); }