Esempio n. 1
0
        public void AddFileReferencesToFile(BigFileFile file, int[] header)
        {
            log.Debug("Loading file references for file: " + file.Name);
            log.Debug("  Reference count: " + header.Length.ToString());

            if (file.FileReferences != null)
            {
                log.Debug("File {0} (key{1:X8}) already has references loaded!", file.Name, file.FileInfo.Key);
                return;
            }

            BigFileFile[] references = new BigFileFile[header.Length];

            for (int i = 0; i < header.Length; i++)
            {
                references[i] = file.MappingData[header[i]];
            }

            file.FileReferences = references;
        }
Esempio n. 2
0
        private BigFilePackOperationStatus internal_ThreadedPack(BigFilePackOptions options)
        {
            //set up threads
            int dividedCount     = bigFile.FileMap.FilesList.Length / options.Threads;
            int dividedRemainder = bigFile.FileMap.FilesList.Length % options.Threads;

            log.Info("Divided files into " + options.Threads + " pools of " + dividedCount + " with " + dividedRemainder + " left over (to be tacked onto the last!)");

            BigFileFile[] files = new BigFileFile[bigFile.FileMap.FilesList.Length];
            Array.Copy(bigFile.FileMap.FilesList, files, files.Length);
            Array.Sort(files,
                       (fileA, fileB) =>
            {
                return(fileA.FileInfo.Offset.CompareTo(fileB.FileInfo.Offset));
            });

            List <BigFilePackInfo> infos = new List <BigFilePackInfo>();

            for (int i = 0; i < options.Threads; i++)
            {
                packInfos[i].Options    = options;
                packInfos[i].startIndex = i * dividedCount;
                packInfos[i].count      = dividedCount;
                packInfos[i].bigFile    = bigFile;
                packInfos[i].filesList  = files;
            }
            packInfos[options.Threads - 1].count      += dividedRemainder;
            packInfos[options.Threads - 1].OnCompleted = internal_OnPackFinished; //the last thread gets the job of stitching together the chunks

            for (int i = 0; i < options.Threads; i++)
            {
                ThreadPool.QueueUserWorkItem(internal_GenBigFileChunk, packInfos[i]);
                infos.Add(packInfos[i]);
            }

            return(new BigFilePackOperationStatus(infos));
        }
Esempio n. 3
0
        public void MapFilesToFolders(BigFileFolder rootFolder, FileMappingData mapping)
        {
            log.Info("Mapping files to folders...");

            stopwatch.Reset();
            stopwatch.Start();

            for (int i = 0; i < mapping.FilesList.Length; i++)
            {
                BigFileFile file = mapping.FilesList[i];
                if (file != null)
                {
                    BigFileFolder folder = rootFolder.FolderMap[mapping.FilesList[i].FileInfo.Folder];
                    if (folder != null)
                    {
                        folder.Files.Add(mapping.FilesList[i]);
                    }
                }
            }

            diagData.MapFilesToFolders = stopwatch.ElapsedMilliseconds;

            stopwatch.Reset();
        }
Esempio n. 4
0
        private void internal_UnpackFiles(object state)
        {
            UnpackThreadInfo info = state as UnpackThreadInfo;

            info.isUnpacking = true;
            info.stopwatch.Reset();
            info.stopwatch.Start();

            //int segmentDataOffset = info.bigFile.FileUtil.CalculateDataOffset(ref info.bigFile.SegmentHeader, ref info.bigFile.FileHeader);
            //byte[] buffer = info.buffers[4];
            BigFileFile[] files = new BigFileFile[info.count];
            Array.Copy(info.bigFile.FileMap.FilesList, info.startIndex, files, 0, info.count);

            IEnumerator <int[]> headers = info.bigFile.FileReader.ReadAllHeaders(files, info.buffers, info.options.Flags).GetEnumerator();
            IEnumerator <int>   data    = info.bigFile.FileReader.ReadAllData(files, info.buffers, info.options.Flags).GetEnumerator();

            for (int i = 0; i < files.Length; i++)
            {
                info.progress = i;

                headers.MoveNext();
                data.MoveNext();

                log.Info("Unpacking file {0}", files[i].Name);

                //********************************************//
                //DON'T FORGET THE ******* UNPACK SUBDIRECTORY//
                //********************************************//
                string dataFileName = info.options.Directory.FullName + "\\"
                                      + BigFileConst.UNPACK_DIR + "\\"
                                      + info.fileMapping[files[i].FileInfo.Key].FileName;

                string headerFileName = dataFileName + BigFileConst.UNPACKED_HEADER_FILE_EXTENSION;

                using (FileStream dataFS = File.Create(dataFileName))
                    using (FileStream headerFS = File.Create(headerFileName))
                    {
                        int   size   = data.Current;
                        int[] header = headers.Current;
                        if (size != -1)
                        {
                            int headerCount = header.Length;
                            dataFS.Write(info.buffers[size], 0, size);

                            headerFS.Write(headerCount.ToByteArray(info.buffers[4]), 0, 4);
                            for (int j = 0; j < headerCount; j++)
                            {
                                headerFS.Write(header[j].ToByteArray(info.buffers[4]), 0, 4);
                                if (header[j] == 0)
                                {
                                    log.Error("WTF");
                                }
                            }
                        }
                        else
                        {
                            log.Error("Can't unpack file {0} because size is -1", files[i].Name);
                        }
                    }
            }

            //int index = -1;
            //foreach (int size in info.bigFile.FileReader.ReadAllRaw(files, info.buffers, info.options.Flags))
            //{
            //    index++;

            //    info.progress = index;

            //    log.Info("Unpacking file {0}", files[index].Name);

            //    //********************************************//
            //    //DON'T FORGET THE ******* UNPACK SUBDIRECTORY//
            //    //********************************************//
            //    string dataFileName = info.options.Directory.FullName + "\\"
            //                        + BigFileConst.UNPACK_DIR + "\\"
            //                        + info.fileMapping[files[index].FileInfo.Key].FileName;

            //    string headerFileName = dataFileName + BigFileConst.UNPACKED_HEADER_FILE_EXTENSION;

            //    IEnumerable<int> p = info.bigFile.FileReader.ReadAllData(files, info.buffers, info.options.Flags);

            //    using (FileStream dataFS = File.Create(dataFileName))
            //    using (FileStream headerFS = File.Create(headerFileName))
            //    {
            //        if (size != -1)
            //        {
            //            int headerCount = BitConverter.ToInt32(info.buffers[size], 0);
            //            int dataOffset = headerCount * 4 + 4;

            //            if (dataOffset > size)
            //            {
            //                log.Error("Hmmm... something's wrong here");
            //                headerFS.Write(info.buffers[size], 0, size);
            //            }
            //            else
            //            {
            //                dataFS.Write(info.buffers[size], dataOffset, size - dataOffset);
            //                headerFS.Write(info.buffers[size], 0, dataOffset);
            //            }
            //        }
            //        else
            //        {
            //            log.Error("Can't unpack file {0} because size is -1", files[index].Name);
            //        }
            //    }
            //}

            log.Info("Unpack thread (ID:" + info.threadID + ") finished work!");
            info.isUnpacking = false;
            info.stopwatch.Stop();

            info.OnWorkDoneCallback.Invoke(info);
        }
Esempio n. 5
0
        public FileMappingData CreateFileMappingData(BigFileFolder rootFolder, IBigFileFileInfo[] fileInfos)
        {
            log.Info("Creating mapping data...");
            log.Info("Creating files list...  Count: " + fileInfos.Length);

            stopwatch.Reset();
            stopwatch.Start();

            FileMappingData mappingData = new FileMappingData();

            BigFileFile[] filesList = new BigFileFile[fileInfos.Length];

            for (int i = 0; i < fileInfos.Length; i++)
            {
                BigFileFile newFile = null;
                if (fileInfos[i] != null)
                {
                    newFile             = new BigFileFile(fileInfos[i], rootFolder.FolderMap[fileInfos[i].Folder]);
                    newFile.MappingData = mappingData;
                    filesList[i]        = newFile;
                }
                else
                {
                    log.Error(string.Format("File info at index {0} is null!", i));
                }
            }

            diagData.CreateFilesList = stopwatch.ElapsedMilliseconds;

            log.Info("List created!");

            log.Info("Creating file mappings...");

            stopwatch.Reset();
            stopwatch.Start();

            Dictionary <int, BigFileFile> fileKeyMapping = new Dictionary <int, BigFileFile>();

            for (int i = 0; i < filesList.Length; i++)
            {
                if (fileInfos[i]?.Name == null)
                {
                    continue;
                }

                if (!fileKeyMapping.ContainsKey(fileInfos[i].Key))
                {
                    fileKeyMapping.Add(fileInfos[i].Key, filesList[i]);
                }
                else
                {
                    log.Error("File key mapping already contains key " + fileInfos[i].Key + " (File: " + filesList[i].Name + ")");
                }

                if (filesList[i].FileInfo.FileNumber == -1)
                {
                    log.Debug(string.Format("File number is -1! (key:{0:X8}) (offset:{1:X8})", fileInfos[i].Key, fileInfos[i].Offset));
                }
            }

            log.Info("Mappings created!");

            foreach (KeyValuePair <short, BigFileFolder> kvp in rootFolder.FolderMap)
            {
                kvp.Value.FileMap = mappingData;
            }

            diagData.CreateKeyAndNumMappings = stopwatch.ElapsedMilliseconds;

            stopwatch.Reset();

            mappingData.FilesList  = filesList;
            mappingData.KeyMapping = fileKeyMapping;

            log.Info("mappingData count: {0}", mappingData.FilesList.Length);

            return(mappingData);
        }
Esempio n. 6
0
        private void internal_GenBigFileChunk(object state)
        {
            BigFilePackInfo info = state as BigFilePackInfo;

            info.isPacking = true;
            info.diag.StartStopwatch();

            string tempDir = Environment.CurrentDirectory + BigFileConst.PACK_STAGING_DIR;

            Directory.CreateDirectory(tempDir);
            string chunkFileName    = tempDir + info.Options.BigFileName + ".chunk" + info.ThreadID.ToString();
            string metadataFilename = tempDir + info.Options.BigFileName + ".meta" + info.ThreadID.ToString();

            log.Info("Generating bigfile chunk: " + chunkFileName);

            using (FileStream chunkFS = new FileStream(chunkFileName, FileMode.Create, FileAccess.Write))
                using (FileStream metaFS = new FileStream(metadataFilename, FileMode.Create, FileAccess.Write))
                {
                    //fill 8 bytes to be filled with number of files chunked and final size later
                    metaFS.Write(BitConverter.GetBytes((long)0), 0, 8);

                    BigFileFile[] filesToWrite = new BigFileFile[info.count];
                    Array.Copy(info.filesList, info.startIndex, filesToWrite, 0, info.count);

                    log.Error("Thread ID {0} - First file is {1}", info.ThreadID, filesToWrite[0].Name);
                    log.Error("Thread ID {0} - Last file is {1}", info.ThreadID, filesToWrite[filesToWrite.Length - 1].Name);

                    BigFileFile currFile = null;

                    int index = -1;
                    foreach (int size in bigFile.FileReader.ReadAllRaw(filesToWrite, info.IOBuffers, info.Options.Flags))
                    {
                        index++;

                        currFile = filesToWrite[index];

                        int size2 = size;

                        //if (currFile.FileInfo.FileType == 0x3c)
                        //{
                        //    log.Error("Skipped file {0}", currFile.Name);
                        //    index++;
                        //    continue;
                        //}

                        log.Debug("Packing file {0}, size: {1}, ZIP: {2}", currFile.Name, size, currFile.FileInfo.ZIP);

                        if (size2 < 0)
                        {
                            metaFS.Write(BitConverter.GetBytes(currFile.FileInfo.FileNumber), 0, 4);
                            metaFS.Write(BitConverter.GetBytes(currFile.FileInfo.Key), 0, 4);
                            metaFS.Write(BitConverter.GetBytes(-1), 0, 4);
                            log.Error("WAIT WHAT");
                        }
                        else
                        {
                            //write the file number, key, and offset to metadata file
                            metaFS.Write(BitConverter.GetBytes(currFile.FileInfo.FileNumber), 0, 4);
                            metaFS.Write(BitConverter.GetBytes(currFile.FileInfo.Key), 0, 4);
                            metaFS.Write(BitConverter.GetBytes((int)chunkFS.Position), 0, 4);

                            if (currFile.FileInfo.ZIP == 1 && (info.Options.Flags & BigFileFlags.Compress) != 0)
                            {
                                int sizePos = (int)chunkFS.Position;
                                chunkFS.Write(info.IOBuffers[8], 0, 8); //write 8 bytes of garbage to fill the space for decompressed and compressed size
                                using (ZlibStream zs = new ZlibStream(chunkFS, Ionic.Zlib.CompressionMode.Compress, true))
                                {
                                    zs.Write(info.IOBuffers[size], 0, size);
                                }

                                int newPos    = (int)chunkFS.Position;
                                int remainder = ((((newPos - sizePos) - 1) / 8 + 1) * 8) - (newPos - sizePos);

                                for (int i = 0; i < remainder; i++)
                                {
                                    chunkFS.WriteByte(0x00);
                                }

                                newPos = (int)chunkFS.Position;

                                int compressedSize = newPos - sizePos - 4;


                                //go back to the file offset and write the compressed and decompressed sizes
                                chunkFS.Seek(sizePos, SeekOrigin.Begin);
                                chunkFS.Write(BitConverter.GetBytes(compressedSize), 0, 4);
                                chunkFS.Write(BitConverter.GetBytes(size), 0, 4);
                                chunkFS.Seek(newPos, SeekOrigin.Begin);
                            }
                            else
                            {
                                int sizePos = (int)chunkFS.Position;
                                chunkFS.Write(BitConverter.GetBytes(size), 0, 4);
                                chunkFS.Write(info.IOBuffers[size], 0, size);
                                int remainder = (((((int)chunkFS.Position - sizePos) - 1) / 8 + 1) * 8) - ((int)chunkFS.Position - sizePos);
                                for (int i = 0; i < remainder; i++)
                                {
                                    chunkFS.WriteByte(0x00);
                                }
                            }
                        }

                        info.filesChunked++;
                    }

                    //write number of files chunked and final file size
                    metaFS.Seek(0, SeekOrigin.Begin);
                    metaFS.Write(BitConverter.GetBytes(info.filesChunked), 0, 4);
                    metaFS.Write(BitConverter.GetBytes(chunkFS.Length), 0, 4);

                    WinMessageBox.Show(chunkFS.Length.ToString(), "HA", WinMessageBoxFlags.btnOkay);
                }

            info.isPacking = false;
            info.diag.StopStopwatch();

            log.Info("Thread (ID: {0}) finished chunking work, time: {1,5}s", info.ThreadID, info.diag.StopwatchTime / 1000);

            if (info.OnCompleted != null)
            {
                info.OnCompleted.Invoke(info);
            }
        }