Exemplo n.º 1
0
        private void Read(BinaryReader br, Archive parent)
        {
            FileTableOffset = br.ReadUInt32();
            FileTableSize   = br.ReadUInt32();

            Crc                     = br.ReadUInt64();
            FileEntryCount          = br.ReadUInt32();
            FileSegmentCount        = br.ReadUInt32();
            ResourceDependencyCount = br.ReadUInt32();

            // read tables
            for (int i = 0; i < FileEntryCount; i++)
            {
                var entry = new FileEntry(br, parent);

                if (!FileEntries.ContainsKey(entry.NameHash64))
                {
                    FileEntries.Add(entry.NameHash64, entry);
                }
                else
                {
                    // TODO
                }
            }

            for (int i = 0; i < FileSegmentCount; i++)
            {
                FileSegments.Add(new FileSegment(br, i));
            }

            for (int i = 0; i < ResourceDependencyCount; i++)
            {
                Dependencies.Add(new Dependency(br, i));
            }
        }
Exemplo n.º 2
0
        void SerializeAllocationTable(IO.EndianStream s)
        {
            long entries_count = FileEntries.Count;

            s.Stream(ref entries_count);
            if (entries_count > 0)
            {
                if (s.IsReading)
                {
                    FileEntries.Capacity = (int)entries_count;
                    for (int x = 0; x < entries_count; x++)
                    {
                        var e = new CaPackageEntry();
                        s.Stream(ref e);
                        FileEntries.Add(e);
                    }
                }
                else if (s.IsWriting)
                {
                    foreach (var e in FileEntries)
                    {
                        var e_copy = e;
                        s.Stream(ref e_copy);
                    }
                }
            }
        }
Exemplo n.º 3
0
        /// <summary>
        /// Return whether a file exists with the given path.
        /// </summary>
        /// <param name="localFullPath">The full path relative to the root of this zip archive.</param>
        public bool FileExists(string localFullPath)
        {
            if (localFullPath == null)
            {
                throw new ArgumentNullException(nameof(localFullPath));
            }

            return(FileEntries.Contains(localFullPath));
        }
        private void AddFiles(string physicalPathRoot, string[] sourceFiles, ParallelTasks <CompressBlock> tasks)
        {
            // Write file data sequentially
            ulong decompressedFileOffset = 0;
            var   readBuffer             = new byte[BlockSize];
            var   readBufferPos          = 0;
            ulong blockOffset            = 0;

            foreach (string filePath in sourceFiles)
            {
                using var fs = File.OpenRead(Path.Combine(physicalPathRoot, filePath));

                var fileEntry = new FileEntry()
                {
                    PathHash           = GetHashForPath(filePath),
                    DecompressedOffset = decompressedFileOffset,
                    DecompressedSize   = (uint)fs.Length,
                };

                decompressedFileOffset += fileEntry.DecompressedSize;

                FileEntries.Add(fileEntry);

                // This appends data until a 256KB block write/flush is triggered - combining multiple files into single block entries
                int read;
                while ((read = fs.Read(readBuffer, readBufferPos, readBuffer.Length - readBufferPos)) > 0)
                {
                    if (readBufferPos + read < BlockSize)
                    {
                        readBufferPos += read;
                        break;
                    }

                    tasks.AddItem(new CompressBlock()
                    {
                        DecompressedOffset = blockOffset,
                        DecompressedSize   = BlockSize,
                        DataBuffer         = readBuffer
                    });

                    readBufferPos = 0;
                    readBuffer    = new byte[BlockSize];
                    blockOffset  += BlockSize;
                }
            }

            if (readBufferPos > 0)
            {
                tasks.AddItem(new CompressBlock()
                {
                    DecompressedOffset = blockOffset,
                    DecompressedSize   = (uint)readBufferPos,
                    DataBuffer         = readBuffer
                });
            }
        }
Exemplo n.º 5
0
        /// <summary>
        /// Gets file byte data from most principle location; considering filetimes and backup status.
        /// The user must manually call EndAllDatAccess to close access to any opened .dat files during the process.
        /// </summary>
        /// <param name="relativeFilePath">The file path relative to HGL installation directory.</param>
        /// <param name="ignorePatchedOut">If true, will ignore the files patched out state effectivly forcing file reading from .dats as if it was never patched out.</param>
        /// <returns>The file byte array, or null on error.</returns>
        public byte[] GetFileBytes(String relativeFilePath, bool ignorePatchedOut = false)
        {
            String directoryString = Path.GetDirectoryName(relativeFilePath).ToLower() + "\\";
            String fileName        = Path.GetFileName(relativeFilePath).ToLower();
            UInt64 filePathHash    = Crypt.GetStringsSHA1UInt64(directoryString, fileName);

            PackFileEntry fileEntry;

            return(FileEntries.TryGetValue(filePathHash, out fileEntry) ? GetFileBytes(fileEntry, ignorePatchedOut) : null);
        }
Exemplo n.º 6
0
        private void ReadHeader(BinaryReaderEx input)
        {
            int       curOffset = 0;
            FileEntry pboEntry;

            do
            {
                pboEntry = new FileEntry(input)
                {
                    StartOffset = curOffset
                };

                curOffset += pboEntry.DataSize;

                if (pboEntry.IsVersion)
                {
                    string name;
                    string value;
                    do
                    {
                        name = input.ReadAsciiz();
                        if (name == "")
                        {
                            break;
                        }
                        Properties.AddLast(name);

                        value = input.ReadAsciiz();
                        Properties.AddLast(value);

                        if (name == "prefix")
                        {
                            Prefix = value;
                        }
                    }while (name != "");

                    if (Properties.Count % 2 != 0)
                    {
                        throw new Exception("metaData count is not even.");
                    }
                }
                else if (pboEntry.FileName != "")
                {
                    FileEntries.AddLast(pboEntry);
                }
            }while (pboEntry.FileName != "" || FileEntries.Count == 0);

            DataOffset = (int)input.Position;

            if (Prefix == null)
            {
                Prefix = Path.GetFileNameWithoutExtension(PBOFilePath);
            }
        }
Exemplo n.º 7
0
        /// <summary>
        /// Return an enumeration over files that exist as immediate descendents of the given directory
        /// If localFullRootPath is null, return the files in the root of the archive
        /// Otherwise, return the full path of each directory in the supplied path
        ///
        /// - Each returned string must begin with the value of localFullRootPath
        /// - Trailing spaces must be trimmed
        /// </summary>
        /// <param name="localFullRootPath">The (optional) path to search for files in</param>
        public IEnumerable <string> EnumerateFiles(string localFullRootPath)
        {
            localFullRootPath = FormatRootPath(localFullRootPath);
            var files = FileEntries
                        .Where(s => s.StartsWith(localFullRootPath, StringComparison.OrdinalIgnoreCase));

            foreach (var file in files)
            {
                var remainder = file.Substring(localFullRootPath.Length);
                if (remainder.Any(c => c == '/'))
                {
                    continue;
                }
                yield return(file);
            }
        }
        public void BuildFromFileList(string physicalPathRoot, string[] sourceFiles)
        {
            WriterDecompressedBlockOffset = 0;
            byte[] tempCompressedBuffer = new byte[WriterBlockSizeThreshold * 2];

            long totalBlockSize = sourceFiles.Sum(file => new FileInfo(Path.Combine(physicalPathRoot, file)).Length);
            int  blockCount     = (int)((totalBlockSize + WriterBlockSizeThreshold) / WriterBlockSizeThreshold);
            int  fileCount      = sourceFiles.Length;

            using var fs            = File.Open(_archivePath, _allowOverwrite ? FileMode.Create : FileMode.CreateNew, FileAccess.ReadWrite, FileShare.None);
            using var archiveWriter = new BinaryWriter(fs, Encoding.UTF8, true);
            using var blockStream   = new MemoryStream();

            // Reserve space for the header
            archiveWriter.BaseStream.Position = CalculateArchiveHeaderLength(fileCount, blockCount);

            // Write file data sequentially
            ulong decompressedFileOffset = 0;

            foreach (string filePath in sourceFiles)
            {
                using var reader = new BinaryReader(File.OpenRead(Path.Combine(physicalPathRoot, filePath)));

                var fileEntry = new FileEntry()
                {
                    PathHash           = GetHashForPath(filePath),
                    DecompressedOffset = decompressedFileOffset,
                    DecompressedSize   = (uint)reader.BaseStream.Length,
                };

                // This appends data until a 256KB block write/flush is triggered - combining multiple files into single block entries
                reader.BaseStream.CopyTo(blockStream);
                decompressedFileOffset += fileEntry.DecompressedSize;

                WriteBlockEntries(archiveWriter, blockStream, false, tempCompressedBuffer);
                FileEntries.Add(fileEntry);
            }

            WriteBlockEntries(archiveWriter, blockStream, true, tempCompressedBuffer);

            // Rewind & insert headers before the compressed data
            archiveWriter.BaseStream.Position = 0;
            WriteArchiveHeaders(archiveWriter);
        }
        private void WriteArchiveHeaders(BinaryWriter writer)
        {
            FileEntries.Sort((x, y) => x.PathHash.CompareTo(y.PathHash));
            Header.FileEntryCount = (uint)FileEntries.Count;

            BlockEntries.Sort((x, y) => x.DecompressedOffset.CompareTo(y.DecompressedOffset));
            Header.BlockEntryCount = (uint)BlockEntries.Count;

            Header.ToData(writer);

            foreach (var entry in FileEntries)
            {
                entry.ToData(writer, Header);
            }

            foreach (var entry in BlockEntries)
            {
                entry.ToData(writer, Header);
            }
        }
Exemplo n.º 10
0
 public NuspecBuilder WithFile(string source, string target)
 {
     FileEntries.Add(Tuple.Create(source, target));
     return(this);
 }
Exemplo n.º 11
0
        public bool ReadHeader(BinaryReader br)
        {
            if (!br.CheckHeaderString("MP00", reversed: true))
            {
                return(false);
            }

            EntryCount = br.ReadInt32();

            for (var i = 0; i < EntryCount; ++i)
            {
                var entry = new FileEntry(br);

                FileEntries.Add(entry.Crc, entry);

                var crc  = entry.Crc;
                var crc2 = entry.Crc2;

                var block = StreamingManager.GetStreamBlockByCRC(crc, out string source);
                if (block != null)
                {
                    Console.WriteLine($"Adding entry: 0x{entry.Crc:X8} => {block.FileName,-45} through {source}");
                }

                block = StreamingManager.GetStreamBlockByCRC(crc2, out string source2);
                if (block != null)
                {
                    Console.WriteLine($"Adding entry: 0x{crc:X8} => 0x{crc2:X8} => {block.FileName,-45} through {source2}");
                }
                else
                {
                    Console.WriteLine($"Adding unknown entry: 0x{crc:X8} => 0x{crc2:X8}");
                }
            }

            Array3D8 = new int[2 * EntryCount];

            for (var i = 0; i < EntryCount; ++i)
            {
                Array3D8[2 * i]     = br.ReadInt32();
                Array3D8[2 * i + 1] = br.ReadInt32();

                var entryCrc = Array3D8[2 * i];
                var crc2     = Array3D8[2 * i + 1];

                if (!FileEntries.ContainsKey(entryCrc))
                {
                    Console.WriteLine($"ERROR: 0x{entryCrc:X8} => 0x{crc2:X8} is not a valid fileentry!");
                    continue;
                }

                var entry = FileEntries[entryCrc];
                if (entry.Crc == entryCrc && entry.Crc2 == crc2)
                {
                    continue;
                }

                string source;
                var    block = StreamingManager.GetStreamBlockByCRC(entryCrc, out source);
                if (block != null)
                {
                    Console.WriteLine($"Map entry: 0x{entryCrc:X8} => {block.FileName,-45} through {source}");
                }

                block = StreamingManager.GetStreamBlockByCRC(crc2, out source);
                if (block != null)
                {
                    Console.WriteLine($"Map entry: 0x{entryCrc:X8} 0x{crc2:X8} => {block.FileName,-45} through {source}");
                }
                else
                {
                    Console.WriteLine($"Map unknown entry: 0x{entryCrc:X8} => 0x{crc2:X8}");
                }
            }

            return(true);
        }
Exemplo n.º 12
0
 public void AddFileEntry(AwesomeFileEntry entry)
 {
     FileEntries.Add(entry);
 }
Exemplo n.º 13
0
        /// <summary>
        /// Parses a single index file on the specified path. Checking for accompanying dat file and populating file index.
        /// </summary>
        /// <param name="packFile">The full path of the index file to parse.</param>
        private void _LoadIndexFile(PackFile packFile)
        {
            // loop through index files
            foreach (PackFileEntry currFileEntry in packFile.Files)
            {
                //if (currFileEntry.Name.Contains("bldg_c_station_warp_next_layout.xml.cooked") || currFileEntry.Name.Contains("sku."))
                //{
                //    int bp = 0;
                //}

                ulong pathHash = currFileEntry.PathHash;

                // have we added the file yet
                if (!FileEntries.ContainsKey(pathHash))
                {
                    FileEntries.Add(pathHash, currFileEntry);
                    continue;
                }

                // we haven't added the file, so we need to compare file times and backup states
                PackFileEntry origFileEntry = FileEntries[pathHash];

                // do backup checks first as they'll "override" the FileTime values (i.e. file not found causes game to go to older version)
                // if currFile IS a backup, and orig is NOT, then add to Siblings as game will be loading orig over "backup" anyways
                if (currFileEntry.IsPatchedOut && !origFileEntry.IsPatchedOut)
                {
                    if (origFileEntry.Siblings == null)
                    {
                        origFileEntry.Siblings = new List <PackFileEntry>();
                    }
                    origFileEntry.Siblings.Add(currFileEntry);

                    continue;
                }

                // if curr is NOT a backup, but orig IS, then we want to update (i.e. don't care about FileTime; as above)
                // OR if orig is older than curr, we also want to update/re-arrange Siblings, etc
                if ((!currFileEntry.IsPatchedOut && origFileEntry.IsPatchedOut) ||
                    origFileEntry.FileTime < currFileEntry.FileTime)
                {
                    // set the Siblings list to the updated FileEntry and null out other
                    if (origFileEntry.Siblings != null)
                    {
                        currFileEntry.Siblings = origFileEntry.Siblings;
                        origFileEntry.Siblings = null;
                    }

                    // add the "orig" (now old) to the curr FileEntry.Siblings list
                    if (currFileEntry.Siblings == null)
                    {
                        currFileEntry.Siblings = new List <PackFileEntry>();
                    }
                    currFileEntry.Siblings.Add(origFileEntry);
                    FileEntries[pathHash] = currFileEntry;

                    continue;
                }

                // if curr is older (or equal to; hellgate000 has duplicates) than the orig, then add this to the Siblings list (i.e. orig is newer)
                if (origFileEntry.FileTime >= currFileEntry.FileTime)
                {
                    if (origFileEntry.Siblings == null)
                    {
                        origFileEntry.Siblings = new List <PackFileEntry>();
                    }
                    origFileEntry.Siblings.Add(currFileEntry);

                    continue;
                }

                Debug.Assert(false, "End of 'if (FileEntries.ContainsKey(hash))'", "wtf??\n\nThis shouldn't happen, please report this.");
            }
        }
Exemplo n.º 14
0
 private Boolean CanClearEntries()
 => ((CanExecute()) && (FileEntries.HasItems()));