/// <summary> /// Returns the file data, decompressed if needed /// </summary> /// <param name="item">The grf file</param> /// <param name="decompress">Should the data decompressed?</param> /// <returns></returns> public byte[] GetFileData(FileItem item, bool decompress) { byte[] buf = null; bool isUpdated = item.IsAdded || item.IsUpdated; if (isUpdated) { // Load data from file buf = File.ReadAllBytes(item.NewFilepath); } else if (item.FileData == null || item.FileData.Length != item.LengthCompressedAlign) { // Cache data CacheFileData(item); buf = item.FileData; } else { buf = item.FileData; } if (isUpdated == false && buf != null && buf.Length > 0) { // deocde, if needed if (item.Cycle >= 0 && Deflate.IsMagicHead(buf) == false) { EncryptionHelper.DecryptFileData(buf, item.Cycle == 0, item.Cycle); } // Decompress data if (decompress) { buf = Deflate.Decompress(buf); } } return buf; }
/// <summary> /// Reads the uncompressed body of versions between 0x100 and 0x103. /// No compression of the body but a mess on filenames. /// </summary> /// <param name="binReader"></param> /// <param name="fileCount"></param> /// <param name="skipFiles"></param> /// <returns></returns> private bool ReadFilesVersion1(BinaryReader binReader, int fileCount, bool skipFiles) { _fileTableLength = (ulong)(binReader.BaseStream.Length - binReader.BaseStream.Position); _filetableUncompressed = binReader.ReadBytes((int)_fileTableLength); // Read only body? if (skipFiles == false) { for (int i = 0, offset = 0; i < fileCount; i++) { var itemTableOffset = (uint)offset; var entryType = _filetableUncompressed[offset + 12]; var offset2 = offset + BitConverter.ToInt32(_filetableUncompressed, offset) + 4; if (entryType == 0) { offset = offset2 + 17; continue; } var nameLen = _filetableUncompressed[offset] - 6; // These are client limits if (nameLen >= GrfMaxFilenameLength) { throw new Exception("Filename on index " + i + " is " + nameLen + " bytes long, max length is " + GrfMaxFilenameLength + "."); } var nameBuf = new byte[nameLen]; Buffer.BlockCopy(_filetableUncompressed, offset + 6, nameBuf, 0, nameLen); var name = EncryptionHelper.DecodeFileName(nameBuf); // Check and fix the filename if (name.Contains('\0')) { name = name.Substring(0, name.IndexOf('\0')); } var compressedLenAligned = (uint)(BitConverter.ToInt32(_filetableUncompressed, offset2 + 4) - 37579); var realLen = (uint)BitConverter.ToInt32(_filetableUncompressed, offset2 + 8); var pos = (uint)BitConverter.ToInt32(_filetableUncompressed, offset2 + 13); var cycle = 0; var compressedLen = 0; if (name.Contains(".")) { var ext = "." + name.Split('.').Last().ToLower(); compressedLen = BitConverter.ToInt32(_filetableUncompressed, offset2) - BitConverter.ToInt32(_filetableUncompressed, offset2 + 8) - 715; if (ext != ".gnd" && ext != ".gat" && ext != ".act" && ext != ".str") { cycle = 1; for (int j = 10; compressedLen >= j; j *= 10) cycle++; } } name = Tools.UnifyPath(name); var item = new FileItem { TableOffset = itemTableOffset, Index = Files.Count, Filepath = name, LengthCompressed = (uint)compressedLen, LengthCompressedAlign = compressedLenAligned, LengthUnCompressed = realLen, Flags = entryType, // base offset + header length DataOffset = pos + GrfHeaderLen }; Files.Add(item.NameHash, item); _stringlist.Add(item.NameHash); _fileDataLength += item.LengthCompressedAlign; offset += (int)GrfFileLen; #if !DISABLE_GRF_EVENTS OnItemAdded(item, i, fileCount); #endif } } return true; }
/// <summary> /// Extract the file data to the given path /// <para>Note: the full path will be RootFolder + FilePath</para> /// </summary> /// <param name="rootFolder">The lokal root to save the data</param> /// <param name="item">The grf file</param> /// <param name="clearData">Should the data be cleaned (item.Flush()) after writing?</param> public void ExtractFile(string rootFolder, FileItem item, bool clearData) { ExtractFile(rootFolder, item, clearData, false); }
/// <summary> /// Extract the file data to the given path /// <para>Note: The FilePath will be ignored if ignoreFilePath is set to true</para> /// </summary> /// <param name="rootFolder">The lokal root to save the data</param> /// <param name="item">The grf file</param> /// <param name="clearData">Should the data be cleaned (item.Flush()) after writing?</param> /// <param name="ignoreFilePath">Ignore item filepath?</param> public void ExtractFile(string rootFolder, FileItem item, bool clearData, bool ignoreFilePath) { if (item.Flags == 0 || item.IsAdded) { return; } byte[] data = GetFileData(item, true); rootFolder = Tools.UnifyPath(rootFolder); if (rootFolder.EndsWith("/") == false) { rootFolder += "/"; } string extractDir = rootFolder; if (ignoreFilePath == false) { extractDir = Path.Combine(extractDir, Tools.UnifyPath(Path.GetDirectoryName(item.Filepath))); } if (Directory.Exists(extractDir) == false) { Directory.CreateDirectory(extractDir); } var filename = Path.GetFileName(item.Filepath); if (string.IsNullOrEmpty(filename)) { throw new Exception("Unable to extract filename from item filepath: " + item.Filepath); } var extractFilepath = Path.Combine(extractDir, filename); try { if (File.Exists(extractFilepath)) { File.Delete(extractFilepath); } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex); } File.WriteAllBytes(extractFilepath, data); if (clearData) { item.FileData = null; } }
/// <summary> /// Caches the file data /// <para>Note: Updated files wont be recached!</para> /// <para>Only an empty Buffer will be created, if data is null</para> /// </summary> /// <param name="item">The grf file</param> public void CacheFileData(FileItem item) { EnsureFilestream(); // Data from added or updated files if (item.IsAdded || item.IsUpdated) { item.FileData = File.ReadAllBytes(item.NewFilepath); return; } item.FileData = new byte[item.LengthCompressedAlign]; if (item.LengthCompressedAlign > 0) { // maybe its a Directory _fileStream.Seek(item.DataOffset, SeekOrigin.Begin); if ((_fileStream.Position + item.LengthCompressedAlign) >= _fileStream.Length) { throw new Exception("End of Stream reached - can not read Filedata from GRF!"); } _fileStream.Read(item.FileData, 0, (int)item.LengthCompressedAlign); } }
/// <summary> /// Removes the given file from internal lists /// The grf needs to be saved to save the changes /// </summary> /// <param name="item">The grf file</param> public void DeleteFile(FileItem item) { DeleteFile(FileItem.BuildNameHash(item.Filepath)); }
/// <summary> /// Triggers the ItemWrite event. /// </summary> /// <param name="item"></param> public void OnItemWrite(FileItem item) { if (ItemWrite == null) { return; } var p = (int)(((float)item.Index / Files.Count) * 100); if (p == _currentItemPercent) { return; } _currentItemPercent = p; ItemWrite(item, p); }
/// <summary> /// Reads the uncompressed body of versions equal or above 0x200. /// The body is ZIP (deflate) compressed. /// </summary> /// <param name="binReader"></param> /// <param name="fileCount"></param> /// <param name="skipFiles"></param> /// <returns></returns> private bool ReadFilesVersion2(BinaryReader binReader, int fileCount, bool skipFiles) { int lengthCompressed = binReader.ReadInt32(); int lengthUnCompressed = binReader.ReadInt32(); _fileTableLength = (ulong)lengthUnCompressed; var bufCompressed = new byte[lengthCompressed]; _filetableUncompressed = new byte[(int)_fileTableLength]; binReader.Read(bufCompressed, 0, lengthCompressed); _filetableUncompressed = Deflate.Decompress(bufCompressed); /* * if (_filetableUncompressed.Length != (int)_fileTableLength) { * throw new Exception("Filesize missmatch! Uncompressed Body Size is not equal to Uncompressed Length!"); * } */ // Only read body? if (skipFiles == false) { for (int i = 0, offset = 0; i < fileCount; i++) { var filepath = string.Empty; char c; var itemTableOffset = (uint)offset; while ((c = (char)_filetableUncompressed[offset++]) != '\0') { filepath += c; } filepath = Tools.UnifyPath(filepath); var item = new FileItem { TableOffset = itemTableOffset, Index = Files.Count, Filepath = filepath, Flags = _filetableUncompressed[offset + 12] }; // File or directory? if (item.IsFile) { item.LengthCompressed = BitConverter.ToUInt32(_filetableUncompressed, offset); item.LengthCompressedAlign = BitConverter.ToUInt32(_filetableUncompressed, offset + 4); item.LengthUnCompressed = BitConverter.ToUInt32(_filetableUncompressed, offset + 8); // Offset is base offset + grf header item.DataOffset = BitConverter.ToUInt32(_filetableUncompressed, offset + 13) + GrfHeaderLen; // from eAtehna, DES encryption item.Cycle = 1; switch (item.Flags) { case 3: for (var lop = 10; item.LengthCompressed >= lop; lop = lop * 10, item.Cycle++) { } break; case 5: item.Cycle = 0; break; default: item.Cycle = -1; break; } } else { // skip dirs offset += (int)GrfFileLen; continue; } // FIX: Some files in a tested grf are duplicated? // I cant remember grf version or something else.. if (GetFileByHash(item.NameHash) != null) { // Duplicate file, just skip it offset += (int)GrfFileLen; continue; } Files.Add(item.NameHash, item); _stringlist.Add(item.NameHash); _fileDataLength += item.LengthCompressedAlign; offset += (int)GrfFileLen; #if !DISABLE_GRF_EVENTS OnItemAdded(item, i, fileCount); #endif } } return(true); }
/// <summary> /// Extract the file data to the given path /// <para>Note: the full path will be RootFolder + FilePath</para> /// </summary> /// <param name="rootFolder">The lokal root to save the data</param> /// <param name="item">The grf file</param> /// <param name="clearData">Should the data be cleaned (item.Flush()) after writing?</param> public void ExtractFile(string rootFolder, FileItem item, bool clearData) { ExtractFile(rootFolder, item, clearData, false); }
/// <summary> /// Triggers the ItemAdded event. /// </summary> /// <param name="item"></param> /// <param name="num"></param> /// <param name="maxCount"></param> public void OnItemAdded(FileItem item, int num, int maxCount) { if (ItemAdded == null) { return; } var p = (int)(((float)num / maxCount) * 100); if (p == _currentItemPercent) { return; } _currentItemPercent = p; ItemAdded(item, p); }
/// <summary> /// Removes the given file from internal lists /// The grf needs to be saved to save the changes /// </summary> /// <param name="item">The grf file</param> public void DeleteFile(FileItem item) { DeleteFile(FileItem.BuildNameHash(item.Filepath)); }
/// <summary> /// Add the grf file to the internal lists /// <para>Note: File will be deleted and a clone added if the file already exists</para> /// </summary> /// <param name="item">The grf file</param> /// <returns>The new item state</returns> public FileItemState AddFile(FileItem item) { FileItem existingItem; bool replaceExistingItem = false; if ((existingItem = GetFileByHash(item.NameHash)) != null) { // Replace old item if (existingItem.IsAdded) { // A newly added item should be replaced.. // Remove it and add as new one DeleteFile(existingItem); replaceExistingItem = true; } else { // Update existing item with new uncompressed data existingItem.State |= FileItemState.Updated; // Mark as not deleted existingItem.State &= ~FileItemState.Deleted; // Check for file data if (item.IsAdded && item.NewFilepath != null && File.Exists(item.NewFilepath)) { existingItem.NewFilepath = item.NewFilepath; } else if (item.FileData != null) { // Save data in tmp file string tmpFilepath = Path.GetTempPath(); File.WriteAllBytes(tmpFilepath, item.FileData); existingItem.NewFilepath = tmpFilepath; } else { throw new Exception("Unable to fetch item data."); } // Updated compressed length existingItem.LengthCompressed = (uint)new FileInfo(existingItem.NewFilepath).Length; existingItem.FileData = new byte[0]; // Inform the client about the update of an existing item return(FileItemState.Updated); } } var newItem = item.Clone() as FileItem; if (newItem == null) { throw new Exception("Failed to clone item."); } // Realy new item or just a replace? if (replaceExistingItem) { // Just replace the reference newItem.State = FileItemState.Updated; Files[newItem.NameHash] = newItem; } else { // Add new item newItem.State = FileItemState.Added; Files.Add(newItem.NameHash, newItem); _stringlist.Add(newItem.NameHash); } // Okay, this is handy.. // we return true, because the file was added as new item // buuut.. if the file was previously found AND the existing one was a NEW file // the new file will be deleted and the new one added // // so we need to "fake" the result, because of we dont add a new one // we replace the existing one.. // complicated.. if (replaceExistingItem) { // We didnt add a new item return(FileItemState.Updated); } return(FileItemState.Added); }
/// <summary> /// Builds the hash from the given filepath and returns the grf file with the hash /// <para>Note: returns null if not found</para> /// </summary> /// <param name="filepath">File filepath</param> /// <returns></returns> public FileItem GetFileByName(string filepath) { var nameHash = FileItem.BuildNameHash(filepath); return(GetFileByHash(nameHash)); }
/// <summary> /// Reads the uncompressed body of versions equal or above 0x200. /// The body is ZIP (deflate) compressed. /// </summary> /// <param name="binReader"></param> /// <param name="fileCount"></param> /// <param name="skipFiles"></param> /// <returns></returns> private bool ReadFilesVersion2(BinaryReader binReader, int fileCount, bool skipFiles) { int lengthCompressed = binReader.ReadInt32(); int lengthUnCompressed = binReader.ReadInt32(); _fileTableLength = (ulong)lengthUnCompressed; var bufCompressed = new byte[lengthCompressed]; _filetableUncompressed = new byte[(int)_fileTableLength]; binReader.Read(bufCompressed, 0, lengthCompressed); _filetableUncompressed = Deflate.Decompress(bufCompressed); /* if (_filetableUncompressed.Length != (int)_fileTableLength) { throw new Exception("Filesize missmatch! Uncompressed Body Size is not equal to Uncompressed Length!"); } */ // Only read body? if (skipFiles == false) { for (int i = 0, offset = 0; i < fileCount; i++) { var filepath = string.Empty; char c; var itemTableOffset = (uint)offset; while ((c = (char)_filetableUncompressed[offset++]) != '\0') { filepath += c; } filepath = Tools.UnifyPath(filepath); var item = new FileItem { TableOffset = itemTableOffset, Index = Files.Count, Filepath = filepath, Flags = _filetableUncompressed[offset + 12] }; // File or directory? if (item.IsFile) { item.LengthCompressed = BitConverter.ToUInt32(_filetableUncompressed, offset); item.LengthCompressedAlign = BitConverter.ToUInt32(_filetableUncompressed, offset + 4); item.LengthUnCompressed = BitConverter.ToUInt32(_filetableUncompressed, offset + 8); // Offset is base offset + grf header item.DataOffset = BitConverter.ToUInt32(_filetableUncompressed, offset + 13) + GrfHeaderLen; // from eAtehna, DES encryption item.Cycle = 1; switch (item.Flags) { case 3: for (var lop = 10; item.LengthCompressed >= lop; lop = lop * 10, item.Cycle++) { } break; case 5: item.Cycle = 0; break; default: item.Cycle = -1; break; } } else { // skip dirs offset += (int)GrfFileLen; continue; } // FIX: Some files in a tested grf are duplicated? // I cant remember grf version or something else.. if (GetFileByHash(item.NameHash) != null) { // Duplicate file, just skip it offset += (int)GrfFileLen; continue; } Files.Add(item.NameHash, item); _stringlist.Add(item.NameHash); _fileDataLength += item.LengthCompressedAlign; offset += (int)GrfFileLen; #if !DISABLE_GRF_EVENTS OnItemAdded(item, i, fileCount); #endif } } return true; }
/// <summary> /// Add the grf file to the internal lists /// <para>Note: File will be deleted and a clone added if the file already exists</para> /// </summary> /// <param name="item">The grf file</param> /// <returns>The new item state</returns> public FileItemState AddFile(FileItem item) { FileItem existingItem; bool replaceExistingItem = false; if ((existingItem = GetFileByHash(item.NameHash)) != null) { // Replace old item if (existingItem.IsAdded) { // A newly added item should be replaced.. // Remove it and add as new one DeleteFile(existingItem); replaceExistingItem = true; } else { // Update existing item with new uncompressed data existingItem.State |= FileItemState.Updated; // Mark as not deleted existingItem.State &= ~FileItemState.Deleted; // Check for file data if (item.IsAdded && item.NewFilepath != null && File.Exists(item.NewFilepath)) { existingItem.NewFilepath = item.NewFilepath; } else if (item.FileData != null) { // Save data in tmp file string tmpFilepath = Path.GetTempPath(); File.WriteAllBytes(tmpFilepath, item.FileData); existingItem.NewFilepath = tmpFilepath; } else { throw new Exception("Unable to fetch item data."); } // Updated compressed length existingItem.LengthCompressed = (uint)new FileInfo(existingItem.NewFilepath).Length; existingItem.FileData = new byte[0]; // Inform the client about the update of an existing item return FileItemState.Updated; } } var newItem = item.Clone() as FileItem; if (newItem == null) { throw new Exception("Failed to clone item."); } // Realy new item or just a replace? if (replaceExistingItem) { // Just replace the reference newItem.State = FileItemState.Updated; Files[newItem.NameHash] = newItem; } else { // Add new item newItem.State = FileItemState.Added; Files.Add(newItem.NameHash, newItem); _stringlist.Add(newItem.NameHash); } // Okay, this is handy.. // we return true, because the file was added as new item // buuut.. if the file was previously found AND the existing one was a NEW file // the new file will be deleted and the new one added // // so we need to "fake" the result, because of we dont add a new one // we replace the existing one.. // complicated.. if (replaceExistingItem) { // We didnt add a new item return FileItemState.Updated; } return FileItemState.Added; }
public object Clone() { var item = new FileItem { Index = Index, Filepath = Filepath, LengthCompressed = LengthCompressed, LengthCompressedAlign = LengthCompressedAlign, LengthUnCompressed = LengthUnCompressed, Flags = Flags, DataOffset = DataOffset, Cycle = Cycle, State = State, NewFilepath = NewFilepath }; if (FileData != null) { item.FileData = FileData.Clone() as byte[]; } return item; }
/// <summary> /// Reads the uncompressed body of versions between 0x100 and 0x103. /// No compression of the body but a mess on filenames. /// </summary> /// <param name="binReader"></param> /// <param name="fileCount"></param> /// <param name="skipFiles"></param> /// <returns></returns> private bool ReadFilesVersion1(BinaryReader binReader, int fileCount, bool skipFiles) { _fileTableLength = (ulong)(binReader.BaseStream.Length - binReader.BaseStream.Position); _filetableUncompressed = binReader.ReadBytes((int)_fileTableLength); // Read only body? if (skipFiles == false) { for (int i = 0, offset = 0; i < fileCount; i++) { var itemTableOffset = (uint)offset; var entryType = _filetableUncompressed[offset + 12]; var offset2 = offset + BitConverter.ToInt32(_filetableUncompressed, offset) + 4; if (entryType == 0) { offset = offset2 + 17; continue; } var nameLen = _filetableUncompressed[offset] - 6; // These are client limits if (nameLen >= GrfMaxFilenameLength) { throw new Exception("Filename on index " + i + " is " + nameLen + " bytes long, max length is " + GrfMaxFilenameLength + "."); } var nameBuf = new byte[nameLen]; Buffer.BlockCopy(_filetableUncompressed, offset + 6, nameBuf, 0, nameLen); var name = EncryptionHelper.DecodeFileName(nameBuf); // Check and fix the filename if (name.Contains('\0')) { name = name.Substring(0, name.IndexOf('\0')); } var compressedLenAligned = (uint)(BitConverter.ToInt32(_filetableUncompressed, offset2 + 4) - 37579); var realLen = (uint)BitConverter.ToInt32(_filetableUncompressed, offset2 + 8); var pos = (uint)BitConverter.ToInt32(_filetableUncompressed, offset2 + 13); var cycle = 0; var compressedLen = 0; if (name.Contains(".")) { var ext = "." + name.Split('.').Last().ToLower(); compressedLen = BitConverter.ToInt32(_filetableUncompressed, offset2) - BitConverter.ToInt32(_filetableUncompressed, offset2 + 8) - 715; if (ext != ".gnd" && ext != ".gat" && ext != ".act" && ext != ".str") { cycle = 1; for (int j = 10; compressedLen >= j; j *= 10) { cycle++; } } } name = Tools.UnifyPath(name); var item = new FileItem { TableOffset = itemTableOffset, Index = Files.Count, Filepath = name, LengthCompressed = (uint)compressedLen, LengthCompressedAlign = compressedLenAligned, LengthUnCompressed = realLen, Flags = entryType, // base offset + header length DataOffset = pos + GrfHeaderLen }; Files.Add(item.NameHash, item); _stringlist.Add(item.NameHash); _fileDataLength += item.LengthCompressedAlign; offset += (int)GrfFileLen; #if !DISABLE_GRF_EVENTS OnItemAdded(item, i, fileCount); #endif } } return(true); }