public static void readRootFST(FilesystemDirectory fs, WrappedInputStream s, int fstOffset, int fstSize, int virtualOffset) { //TODO: Check against FST max size in header, and also Wii RAM limit (since this is also used on Wii I think, otherwise I'd say GameCube RAM limit) (88MB for Wii, 24MB system / 43MB total for GameCube) as it would be impossible for real hardware to read a bigger filesystem //TODO: Wii shifts offset by one long origPos = s.Position; Dictionary <int, FilesystemDirectory> parentDirectories = new Dictionary <int, FilesystemDirectory> { { 0, fs } }; try { s.Position = fstOffset + 8; //1 byte flag at fstOffset + 0: Filesystem root _must_ be a directory //Name offset is irrelevant since the root doesn't really have a name (I think it's just set to 0 anyway) //Parent index would also be irrelevant because it doesn't have a parent by definition //TODO: Throw error or something if not a directory //TODO: Throw error if number of entries * 12 > fstSize int numberOfEntries = s.readIntBE(); int fntOffset = fstOffset + (numberOfEntries * 12); int fntSize = fstSize - ((numberOfEntries) * 12); s.Position = fntOffset; byte[] fnt = s.read(fntSize); s.Position = fstOffset + 12; //Due to weirdness, we need to figure out which directories these files go in afterwards. It's really weird. I just hope it makes enough sense for whatever purpose you're reading this code for. Dictionary <int, FilesystemFile> filesToAdd = new Dictionary <int, FilesystemFile>(); Dictionary <int, int> directoryNextIndexes = new Dictionary <int, int> { { 0, numberOfEntries } }; for (int i = 0; i < numberOfEntries - 1; ++i) { byte[] entry = s.read(12); readFileEntry(entry, fnt, virtualOffset, parentDirectories, i + 1, filesToAdd, directoryNextIndexes); } //Now that we have the directory structure, add the files to them //This sucks, by the way //Like it's not that my code sucks (although it probably kinda does), it's like... I hate the GameCube filesystem foreach (var fileToAdd in filesToAdd) { int fileIndex = fileToAdd.Key; FilesystemFile file = fileToAdd.Value; for (int potentialParentIndex = fileIndex - 1; potentialParentIndex >= 0; potentialParentIndex--) { if (directoryNextIndexes.ContainsKey(potentialParentIndex)) { if (directoryNextIndexes[potentialParentIndex] > fileIndex) { var parentDir = parentDirectories[potentialParentIndex]; parentDir.addChild(file); break; } } } } } finally { s.Position = origPos; } }
public static void readFileEntry(byte[] entry, byte[] fnt, int virtualOffset, Dictionary <int, FilesystemDirectory> parentDirs, int index, Dictionary <int, FilesystemFile> filesToAdd, Dictionary <int, int> directoryNextIndexes) { bool isDirectory = entry[0] > 0; //FIXME: Should check this type flag, sometimes garbage files end up here where they have neither 0 (file) or 1 (directory). It seems to be on beta discs and such so it's probably caused by incorrect header entries causing the FST parsing to stuff up int filenameOffset = (entry[1] << 16) | (entry[2] << 8) | entry[3]; string name = getNullTerminatedString(fnt, filenameOffset); if (isDirectory) { var dir = new FilesystemDirectory { name = name }; parentDirs.Add(index, dir); int parentIndex = (entry[4] << 24) | (entry[5] << 16) | (entry[6] << 8) | entry[7]; int nextIndex = (entry[8] << 24) | (entry[9] << 16) | (entry[10] << 8) | entry[11]; //If I'm understanding all this correctly (I probably am not): The next index out of all the indexes that isn't a child of this directory; i.e. all indexes between here and next index are in this directory directoryNextIndexes.Add(index, nextIndex); //TODO: There has never been a case where parentDirs doesn't contain parentIndex other than the aforementioned garbage files, but we really should make this more robust parentDirs[parentIndex].addChild(dir); } else { int fileOffset = (entry[4] << 24) | (entry[5] << 16) | (entry[6] << 8) | entry[7]; int fileLength = (entry[8] << 24) | (entry[9] << 16) | (entry[10] << 8) | entry[11]; var file = new FilesystemFile { name = name, offset = fileOffset + virtualOffset, size = fileLength }; filesToAdd.Add(index, file); } }
public async Task Should_give_md5_hash_of_file() { var sut = new FilesystemFile(ResourceHelpers.GetResourceFileInfo("SampleContent.txt")); var hash = await sut.GetHashAsync(); hash.Should().NotBeEmpty(); hash.Should().Be("0edb2a42eee7dc39e8a9d15ecd827000"); }
public async Task Should_not_blow_up_if_file_does_not_exist() { string tempFileInfo = $"{ResourceHelpers.GetTempFilePath()}/{Guid.NewGuid()}.txt"; var sut = new FilesystemFile(new FileInfo(tempFileInfo)); File.Exists(sut.RealPath).Should().BeFalse(); await sut.DeleteAsync(); }
public async Task Should_return_contents_of_file() { var sut = new FilesystemFile(ResourceHelpers.GetResourceFileInfo("SampleContent.txt")); var bytes = await sut.ReadToEndAsync(); string contents = Encoding.UTF8.GetString(bytes); contents.Should().StartWith("Lorem ipsum"); }
public async Task Should_give_stream() { var sut = new FilesystemFile(ResourceHelpers.GetResourceFileInfo("SampleContent.txt")); using (var stream = await sut.OpenReadAsync()) { stream.Should().NotBe(null); stream.Position.Should().Be(0); } }
public async Task Should_create_file_when_stream_opened() { string tempFileInfo = $"{ResourceHelpers.GetTempFilePath()}/{Guid.NewGuid()}.txt"; var sut = new FilesystemFile(new FileInfo(tempFileInfo)); File.Exists(sut.RealPath).Should().BeFalse(); using (await sut.OpenWriteAsync()) {} File.Exists(sut.RealPath).Should().BeTrue(); File.Delete(sut.RealPath); }
public async Task Should_be_able_to_read_file_contents_from_stream() { var sut = new FilesystemFile(ResourceHelpers.GetResourceFileInfo("SampleContent.txt")); using (var stream = await sut.OpenReadAsync()) using (var reader = new StreamReader(stream)) { var content = reader.ReadToEnd(); content.Should().NotBeEmpty(); content.Should().StartWith("Lorem ipsum"); } }
public async Task Should_write_content_to_file() { string tempFileInfo = $"{ResourceHelpers.GetTempFilePath()}/{Guid.NewGuid()}.txt"; var sut = new FilesystemFile(new FileInfo(tempFileInfo)); using (var stream = await sut.OpenWriteAsync()) using (var writer = new StreamWriter(stream)) { writer.Write(WRITE_CONTENT); } string fileContents = File.ReadAllText(sut.RealPath); fileContents.Should().Be(WRITE_CONTENT); File.Delete(sut.RealPath); }
private byte[] getFile(FilesystemFile fileNode) { var stream = file.stream; long origPos = stream.Position; try { stream.Position = fileNode.offset; if (fileNode.size > int.MaxValue) { MessageBox.Show("Sowwy! I can't extract this because it's too dang big"); return(null); } return(stream.read((int)fileNode.size)); } finally { stream.Position = origPos; } }
private void extractFile(FilesystemFile fileNode) { byte[] data = getFile(fileNode); if (data == null) { return; } SaveFileDialog fileDialog = new SaveFileDialog() { DefaultExt = Path.GetExtension(fileNode.name), FileName = fileNode.name, }; if (fileDialog.ShowDialog() == DialogResult.OK) { using (var stream = fileDialog.OpenFile()) { stream.Write(data, 0, data.Length); } MessageBox.Show("Done!"); } }
public void Should_not_have_file_in_place_before_creation() { var sut = new FilesystemFile(new FileInfo($"{ResourceHelpers.GetTempFilePath()}/{Guid.NewGuid()}.txt")); File.Exists(sut.RealPath).Should().BeFalse(); }