public IBigFileFileInfo[] ReadFileInfos(Stream stream, ref BigFileSegmentHeader segmentHeader, ref BigFileHeaderStruct header) { if (version == null) { throw new NullReferenceException("Version cannot be null!"); } log.Info("Reading big file file infos, count: {0}", header.Files); BigFileVersions.DebugLogVersion(version, log); diag.StartStopwatch(); IBigFileFileInfo[] infos = new IBigFileFileInfo[header.Files]; IBigFileFileInfo tmpInfo = version.CreateFileInfo(); int fileOffset = segmentHeader.InfoOffset + header.StructSize; log.Debug("File info offset: {0:X8}", fileOffset); byte[] buffer = buffers[tmpInfo.StructSize]; stream.Seek(fileOffset, SeekOrigin.Begin); for (int i = 0; i < header.Files; i++) { stream.Read(buffer, 0, tmpInfo.StructSize); infos[i] = tmpInfo.FromBytes(buffer); infos[i].DebugLog(log); } log.Info("File infos read! Time taken: {0}ms", diag.StopwatchTime); log.Info("File count: {0}", header.Files); return(infos); }
public BigFileFile(IBigFileFileInfo _fileInfo, BigFileFolder _parentFolder) { FileInfo = _fileInfo; ParentFolder = _parentFolder; name = FileInfo.Name.EncodeToGoodString(); Archetype = this.CreateArchetype(); Archetype.File = this; }
public static int CalculateFolderOffset(IBigFileVersion version, ref BigFileSegmentHeader segmentHeader, ref BigFileHeaderStruct header) { if (version == null) { throw new NullReferenceException("There's no version! Can't calculate folder offset!"); } IBigFileFileInfo tmpFileInfo = version.CreateFileInfo(); int baseSize = (segmentHeader.InfoOffset + header.StructSize) + (header.Files * tmpFileInfo.StructSize); baseSize = (((baseSize - 1) / 8) + 1) * 8; // align to 8 bytes return(baseSize); }
public static void Copy(this IBigFileFileInfo source, IBigFileFileInfo target) { target.Offset = source.Offset; target.Key = source.Key; target.Unknown_01 = source.Unknown_01; target.FileType = source.FileType; target.Folder = source.Folder; target.TimeStamp = source.TimeStamp; target.Flags = source.Flags; target.FileNumber = source.FileNumber; Array.Copy(source.CRC32, target.CRC32, source.CRC32.Length); Array.Copy(source.Name, target.Name, source.Name.Length); target.Unknown_03 = source.Unknown_03; target.ZIP = source.ZIP; }
public UnpackedFolderMapAndFilesList CreateFolderTreeAndFilesListFromDirectory(DirectoryInfo dir, UnpackedRenamedFileMapping mapping, FileMappingData defaultMappingData) { log.Info("Creating folder tree and files list from directory " + dir.FullName); IBigFileFileInfo[] fileInfos = new IBigFileFileInfo[mapping.KeyMap.Count]; List <IBigFileFolderInfo> folderInfos = new List <IBigFileFolderInfo>(); Dictionary <short, BigFileFolder> folderMap = new Dictionary <short, BigFileFolder>(); short folderCount = 0; int fileCount = 0; Dictionary <string, UnpackedRenamedFileMapping.RenamedFileMappingData> temp = new Dictionary <string, UnpackedRenamedFileMapping.RenamedFileMappingData>(mapping.RenamedMap); BigFileFolder recursion(DirectoryInfo directory, string dirName, BigFileFolder parentFolder) { IBigFileFolderInfo folderInfo = version.CreateFolderInfo(); folderInfo.Unknown_01 = 0; folderInfo.PreviousFolder = parentFolder != null ? parentFolder.FolderIndex : (short)-1; folderInfo.NextFolder = -1; folderInfo.Unknown_02 = 0; folderInfo.Name = parentFolder == null ? //oh my lawdy what is this EDIT 4/5/2018: what the f**k "/".EncodeToBadString(length: 54) : directory.Name.EncodeToBadString(length: 54); folderInfos.Add(folderInfo); BigFileFolder thisFolder = new BigFileFolder(folderCount, folderInfo, folderMap); folderMap.Add(folderCount, thisFolder); foreach (FileInfo file in directory.GetFiles()) { if (file.Name.EndsWith(".header")) { continue; } string fileName = dirName + "//" + file.Name; UnpackedRenamedFileMapping.RenamedFileMappingData mappingData = mapping[fileName]; temp.Remove(fileName); IBigFileFileInfo fileInfo = version.CreateFileInfo(); defaultMappingData[mappingData.Key].FileInfo.Copy(fileInfo); fileInfo.Key = mappingData.Key; //fileInfo.FileNumber = fileCount; fileInfo.Name = mappingData.OriginalName.EncodeToBadString(length: 60); fileInfo.Folder = folderCount; log.Debug("Add file " + file.FullName); fileInfos[fileCount] = fileInfo; fileCount++; } folderCount++; foreach (DirectoryInfo dirInfo in directory.GetDirectories()) { if (parentFolder == null) //ONLY THE FIRST RECURSION, PREVENTS ADDING WRONG FOLDERS WHEN PACKING { thisFolder.SubFolders.Add(recursion(dirInfo, dirInfo.Name, thisFolder)); } else { thisFolder.SubFolders.Add(recursion(dirInfo, dirName + "/" + dirInfo.Name, thisFolder)); } } return(thisFolder); } recursion(dir, "", null); if (fileCount != mapping.KeyMap.Count) { log.Error(string.Format("Missing {0} files!", temp.Count)); foreach (KeyValuePair <string, UnpackedRenamedFileMapping.RenamedFileMappingData> kvp in temp) { log.Error(string.Format(" >{0}", kvp.Value.FileName)); } } return(new UnpackedFolderMapAndFilesList() { folderMap = folderMap, filesList = fileInfos, foldersList = folderInfos.ToArray(), }); }
private void internal_OnPackFinished(BigFilePackInfo info) { while (IsPacking) { Thread.Sleep(500); //wait for all threads to finish } log.Info("All chunking threads finished their work!"); log.Info(" >Chunking result:"); log.Info(" {0,6} {1,6} {2,6} {3,6}", "Thread", "Time", "Start", "Count"); for (int i = 0; i < info.Options.Threads; i++) { log.Info(" {0,6} {1,4}s {2,6} {3,6}", packInfos[i].ThreadID, packInfos[i].diag.StopwatchTime / 1000, packInfos[i].startIndex, packInfos[i].count); } log.Info("Starting packaging"); string targetFileName = info.Options.Directory.FullName + @"\" + info.Options.BigFileName + BigFileConst.BIGFILE_EXTENSION; FileInfo targetFileInfo = new FileInfo(targetFileName); if (targetFileInfo.Exists) { WinMessageBoxResult overwriteResult = WinMessageBox.Show("The file\n" + targetFileName + "\n already exists.\n\nOverwrite?", "File already exists", WinMessageBoxFlags.btnYesNo); if (overwriteResult != WinMessageBoxResult.Yes) { log.Error("Target file already exists and the user chose not to overwrite!"); if (info.Options.DeleteChunks) { log.Info("Deleting generated chunks!"); for (int threadID = 0; threadID < info.Options.Threads; threadID++) { string metadataFilename = Environment.CurrentDirectory + BigFileConst.PACK_STAGING_DIR + info.Options.BigFileName + ".meta" + packInfos[threadID].ThreadID.ToString(); string chunkFileName = Environment.CurrentDirectory + BigFileConst.PACK_STAGING_DIR + info.Options.BigFileName + ".chunk" + packInfos[threadID].ThreadID.ToString(); File.Delete(metadataFilename); File.Delete(chunkFileName); log.Info("Deleted metadata file {0}", metadataFilename); log.Info("Deleted chunk file {0}", chunkFileName); } } return; } } info.diag.StartStopwatch(); using (FileStream targetFS = new FileStream(targetFileName, FileMode.Create, FileAccess.Write)) { //Dictionary<int, ChunkedFileMetadata> metadataMap = new Dictionary<int, ChunkedFileMetadata>(); List <ChunkedFileMetadata> metadataList = new List <ChunkedFileMetadata>(); int chunkedFileOffsetInTargetFile = 0; for (int threadID = 0; threadID < info.Options.Threads; threadID++) { string metadataFilename = Environment.CurrentDirectory + BigFileConst.PACK_STAGING_DIR + info.Options.BigFileName + ".meta" + packInfos[threadID].ThreadID.ToString(); log.Info("Collating metadata from file " + metadataFilename); //extract the metadata from the metadata files using (FileStream metaFS = new FileStream(metadataFilename, FileMode.Open, FileAccess.Read)) { byte[] tmpBuffer = info.IOBuffers[8]; metaFS.Read(tmpBuffer, 0, 8); int fileCount = BitConverter.ToInt32(tmpBuffer, 0); int chunkFileSize = BitConverter.ToInt32(tmpBuffer, 4); tmpBuffer = info.IOBuffers[12]; for (int j = 0; j < fileCount; j++) { metaFS.Read(tmpBuffer, 0, 12); int offset = BitConverter.ToInt32(tmpBuffer, 8); if (offset != -1) { offset += chunkedFileOffsetInTargetFile; } ChunkedFileMetadata mdata = new ChunkedFileMetadata() { Number = BitConverter.ToInt32(tmpBuffer, 0), Key = BitConverter.ToInt32(tmpBuffer, 4), Offset = offset }; metadataList.Add(mdata); } chunkedFileOffsetInTargetFile += chunkFileSize; } if (info.Options.DeleteChunks) { log.Info("Deleting metadata file..."); File.Delete(metadataFilename); } } log.Info("Metadata collation took {0,4}s", info.diag.StopwatchTime / 1000); info.diag.StartStopwatch(); //write the segment header to the target bigfile log.Info("Writing segment header to new bigfile..."); info.bigFile.Segment.WriteSegmentHeader(targetFS, ref info.bigFile.SegmentHeader); log.Info("Segment header written!"); //create a new header with the number of files we're packing log.Info("Writing file header to new bigfile..."); BigFileHeaderStruct header = new BigFileHeaderStruct() { Files = metadataList.Count, Folders = (short)info.bigFile.RawFolderInfos.Length, //oh boy BigFileVersion = info.bigFile.Version.Identifier, Unknown_02 = info.bigFile.FileHeader.Unknown_02, }; header.DebugLog(log); info.bigFile.Header.WriteHeader(targetFS, ref header); log.Info("File header written!"); //create a list of file infos to write, copying all but the offset and file number from the original file info log.Info("Creating new file info list..."); IBigFileFileInfo[] newFileInfos = new IBigFileFileInfo[metadataList.Count]; for (int i = 0; i < metadataList.Count; i++) { newFileInfos[i] = info.bigFile.Version.CreateFileInfo(); info.bigFile.FileMap[metadataList[i].Key].FileInfo.Copy(newFileInfos[i]); if (metadataList[i].Offset == -1) { newFileInfos[i].Offset = -1; log.Error("METATADA FILE OFFSET IS -1"); } else { if (metadataList[i].Offset % 8 != 0) { log.Error("WAIT WHAT: {0} {1:X4}", metadataList[i].Offset, metadataList[i].Key); } newFileInfos[i].Offset = metadataList[i].Offset / 8; } newFileInfos[i].FileNumber = metadataList[i].Number; newFileInfos[i].ZIP = (info.Options.Flags & BigFileFlags.Compress) != 0 ? newFileInfos[i].ZIP : 0; } log.Info("New file info list created!"); log.Info("Writing file and folder infos to new bigfile..."); //write file infos to file info.bigFile.FilesAndFolders.WriteFileInfos(targetFS, newFileInfos); //write folder infos to file info.bigFile.FilesAndFolders.WriteFolderInfos(targetFS, info.bigFile.RawFolderInfos); log.Info("File and folder infos written!"); log.Info("File metadata generation took {0,4}s", info.diag.StopwatchTime / 1000); info.diag.StartStopwatch(); //copy chunk file data to target bigfile for (int threadID = 0; threadID < info.Options.Threads; threadID++) { string chunkFileName = Environment.CurrentDirectory + BigFileConst.PACK_STAGING_DIR + info.Options.BigFileName + ".chunk" + packInfos[threadID].ThreadID.ToString(); log.Info("Copying chunk data from chunk {0}", chunkFileName); log.Info(" Current offset: {0:X8}", targetFS.Position); byte[] buffer = info.IOBuffers[IOBuffers.MB * 36]; using (FileStream chunkFS = new FileStream(chunkFileName, FileMode.Open, FileAccess.Read)) { int readSize = -1; while ((readSize = chunkFS.Read(buffer, 0, IOBuffers.MB * 36)) != 0) { targetFS.Write(buffer, 0, readSize); } } log.Info("Chunk data copied! Current offset: {0:X8}", targetFS.Position); if (info.Options.DeleteChunks) { log.Info("Deleting chunk..."); File.Delete(chunkFileName); } } log.Info("All chunk data written!"); log.Info("Chunk data copying time taken: {0,4}s", info.diag.StopwatchTime / 1000); log.Info("Bigfile packing finished!"); } }