public EdataContentFile(string name, EdataDir dir = null) : base(name) { Directory = dir; if (dir != null) dir.Files.Add(this); }
public EdataDir(string name, EdataDir parent = null) : base(name) { Parent = parent; if (parent != null) parent.Children.Add(this); }
public EdataDir(string name, EdataDir parent = null) : base(name) { Parent = parent; if (parent != null) { parent.Children.Add(this); } }
public EdataContentFile(string name, EdataDir dir = null) : base(name) { Directory = dir; if (dir != null) { dir.Files.Add(this); } }
public int GetSubTreeSize(EdataDir dir, int version = Version2) { int size = 0; foreach (var child in dir.Children) { size += GetDirSize(child); size += GetSubTreeSize(dir); } size += dir.Files.Sum(file => GetFileSize(file)); return size; }
public int GetDirSize(EdataDir dir, int version = Version2) { int size = 0; // GroupSize size += 4; // Entry Size size += 4; // Zero terminated string size += dir.Name.Length + 1; if (size % 2 == 1) size++; return size; }
public void NormalizeDirs(EdataDir dir) { if (dir.Parent != null && dir.Files.Count == 0 && dir.Children.Count == 1) { var child = dir.Children[0]; dir.Name += child.Name; foreach (var edataDir in child.Children) { dir.Children.Add(edataDir); edataDir.Parent = dir; } foreach (var edataDir in dir.Children.Where(c => c != child)) child.Children.Remove(edataDir); foreach (var file in child.Files) { dir.Files.Add(file); file.Directory = dir; } foreach (var file in dir.Files) child.Files.Remove(file); dir.Children.Remove(child); } if (dir.Parent != null && dir.Files.Count == 1 && dir.Children.Count == 0) { var file = dir.Files[0]; file.Name = dir.Name + file.Name; file.Directory = dir.Parent; dir.Parent.Files.Add(file); dir.Parent.Children.Remove(dir); dir.Parent = null; } for (int c = 0; c < dir.Children.Count; c++) NormalizeDirs(dir.Children[c]); }
protected ObservableCollection<MeshContentFile> ReadMeshDictionary(Stream s, MeshFile f) { var files = new ObservableCollection<MeshContentFile>(); var dirs = new List<EdataDir>(); var endings = new List<long>(); s.Seek(f.SubHeader.Dictionary.Offset, SeekOrigin.Begin); long dirEnd = f.SubHeader.Dictionary.Offset + f.SubHeader.Dictionary.Size; while (s.Position < dirEnd) { var buffer = new byte[4]; s.Read(buffer, 0, 4); int fileGroupId = BitConverter.ToInt32(buffer, 0); if (fileGroupId == 0) { var file = new MeshContentFile(); s.Read(buffer, 0, 4); file.FileEntrySize = BitConverter.ToUInt32(buffer, 0); var minp = new Point3D(); s.Read(buffer, 0, buffer.Length); minp.X = BitConverter.ToSingle(buffer, 0); s.Read(buffer, 0, buffer.Length); minp.Y = BitConverter.ToSingle(buffer, 0); s.Read(buffer, 0, buffer.Length); minp.Z = BitConverter.ToSingle(buffer, 0); file.MinBoundingBox = minp; var maxp = new Point3D(); s.Read(buffer, 0, buffer.Length); maxp.X = BitConverter.ToSingle(buffer, 0); s.Read(buffer, 0, buffer.Length); maxp.Y = BitConverter.ToSingle(buffer, 0); s.Read(buffer, 0, buffer.Length); maxp.Z = BitConverter.ToSingle(buffer, 0); file.MaxBoundingBox = maxp; s.Read(buffer, 0, buffer.Length); file.Flags = BitConverter.ToUInt32(buffer, 0); buffer = new byte[2]; s.Read(buffer, 0, buffer.Length); file.MultiMaterialMeshIndex = BitConverter.ToUInt16(buffer, 0); s.Read(buffer, 0, buffer.Length); file.HierarchicalAseModelSkeletonIndex = BitConverter.ToUInt16(buffer, 0); file.Name = Utils.ReadString(s); file.Path = MergePath(dirs, file.Name); if (file.Name.Length % 2 == 0) s.Seek(1, SeekOrigin.Current); files.Add(file); while (endings.Count > 0 && s.Position == endings.Last()) { dirs.Remove(dirs.Last()); endings.Remove(endings.Last()); } } else if (fileGroupId > 0) { var dir = new EdataDir(); s.Read(buffer, 0, 4); dir.FileEntrySize = BitConverter.ToInt32(buffer, 0); if (dir.FileEntrySize != 0) endings.Add(dir.FileEntrySize + s.Position - 8); else if (endings.Count > 0) endings.Add(endings.Last()); dir.Name = Utils.ReadString(s); if (dir.Name.Length % 2 == 0) s.Seek(1, SeekOrigin.Current); dirs.Add(dir); } } return files; }
/// <summary> /// The only tricky part about that algorythm is that you have to skip one byte if the length of the File/Dir name PLUS nullbyte is an odd number. /// </summary> /// <returns>A Collection of the Files found in the Dictionary</returns> protected ObservableCollection<EdataContentFile> ReadEdatV2Dictionary() { var files = new ObservableCollection<EdataContentFile>(); var dirs = new List<EdataDir>(); var endings = new List<long>(); using (FileStream fileStream = File.Open(FilePath, FileMode.Open)) { fileStream.Seek(Header.DictOffset, SeekOrigin.Begin); long dirEnd = Header.DictOffset + Header.DictLength; uint id = 0; while (fileStream.Position < dirEnd) { var buffer = new byte[4]; fileStream.Read(buffer, 0, 4); int fileGroupId = BitConverter.ToInt32(buffer, 0); if (fileGroupId == 0) { var file = new EdataContentFile(); fileStream.Read(buffer, 0, 4); file.FileEntrySize = BitConverter.ToInt32(buffer, 0); buffer = new byte[8]; fileStream.Read(buffer, 0, buffer.Length); file.Offset = BitConverter.ToInt64(buffer, 0); fileStream.Read(buffer, 0, buffer.Length); file.Size = BitConverter.ToInt64(buffer, 0); var checkSum = new byte[16]; fileStream.Read(checkSum, 0, checkSum.Length); file.Checksum = checkSum; file.Name = Utils.ReadString(fileStream); file.Path = MergePath(dirs, file.Name); if (file.Name.Length % 2 == 0) fileStream.Seek(1, SeekOrigin.Current); file.Id = id; id++; ResolveFileType(fileStream, file); files.Add(file); while (endings.Count > 0 && fileStream.Position == endings.Last()) { dirs.Remove(dirs.Last()); endings.Remove(endings.Last()); } } else if (fileGroupId > 0) { var dir = new EdataDir(); fileStream.Read(buffer, 0, 4); dir.FileEntrySize = BitConverter.ToInt32(buffer, 0); if (dir.FileEntrySize != 0) endings.Add(dir.FileEntrySize + fileStream.Position - 8); else if (endings.Count > 0) endings.Add(endings.Last()); dir.Name = Utils.ReadString(fileStream); if (dir.Name.Length % 2 == 0) fileStream.Seek(1, SeekOrigin.Current); dirs.Add(dir); } } } return files; }
public void WriteDirToStream(EdataDir dir, Stream s, int version = Version2) { foreach (var child in dir.Children) { s.Write(BitConverter.GetBytes(GetSubTreeSize(child)), 0, 4); s.Write(BitConverter.GetBytes(GetDirSize(child)), 0, 4); var nameData = Encoding.Unicode.GetBytes(dir.Name); s.Write(nameData, 0, nameData.Length); s.Write(Encoding.Unicode.GetBytes("\0"), 0, 1); if ((nameData.Length + 1) % 2 == 1) s.Seek(1, SeekOrigin.Current); } foreach (var file in dir.Files) { s.Write(BitConverter.GetBytes(0), 0, 4); s.Write(BitConverter.GetBytes(GetFileSize(file)), 0, 4); s.Write(BitConverter.GetBytes(file.Offset), 0, 8); s.Write(BitConverter.GetBytes(file.Size), 0, 8); s.Write(file.Checksum, 0, 16); var nameData = Encoding.Unicode.GetBytes(file.Name); s.Write(nameData, 0, nameData.Length); if (nameData.Length % 2 == 1) s.Seek(1, SeekOrigin.Current); } }
public void WriteAllFilesInTree(EdataDir node, EdataPackage pack, Stream s, long fileOffset) { foreach (var file in node.Files) { var data = pack.GetRawData(file, false); file.Offset = s.Position - fileOffset; file.Size = data.Length; s.Write(data, 0, data.Length); } foreach (var child in node.Children) WriteAllFilesInTree(child, pack, s, fileOffset); }
public EdataDir NormalizeFiles(EdataDir dir, EdataDir recursionRoot = null) { if (recursionRoot == null) recursionRoot = dir; var matches = new Dictionary<string, int>(); foreach (var child in dir.Children) { NormalizeFiles(child, recursionRoot); var tmp = new StringBuilder(); foreach (var c in child.Name) { tmp.Append(c); var tmpStr = tmp.ToString(); if (matches.ContainsKey(tmpStr)) matches[tmpStr]++; else matches.Add(tmpStr, 1); } } foreach (var file in dir.Files) { var tmp = new StringBuilder(); foreach (var c in file.Name) { tmp.Append(c); var tmpStr = tmp.ToString(); if (matches.ContainsKey(tmpStr)) matches[tmpStr]++; else matches.Add(tmpStr, 1); } } while (matches.Count > 0) { var max = matches.OrderByDescending(x => x.Value).ThenByDescending(x => x.Key.Length).FirstOrDefault(); if (max.Equals(default(KeyValuePair<string, int>)) || max.Value == 1) break; var newDir = new EdataDir(max.Key, dir); var dirsToClean = new List<EdataDir>(); foreach (var subDir in dir.Children.Where(d => d.Name.StartsWith(max.Key)).Where(subDir => subDir != newDir)) { dirsToClean.Add(subDir); subDir.Parent = newDir; newDir.Children.Add(subDir); subDir.Name = subDir.Name.TrimStart(max.Key.ToCharArray()); } foreach (var subDir in dirsToClean.Where(subDir => dir.Children.Contains(subDir))) dir.Children.Remove(subDir); var filesToClean = new List<EdataContentFile>(); foreach (var file in dir.Files.Where(file => file.Name.StartsWith(max.Key))) { filesToClean.Add(file); file.Directory = newDir; newDir.Files.Add(file); file.Name = file.Name.TrimStart(max.Key.ToCharArray()); } foreach (var file in filesToClean.Where(file => dir.Files.Contains(file))) dir.Files.Remove(file); matches = matches.Where(x => !(x.Key.StartsWith(max.Key) || max.Key.StartsWith(x.Key))).ToDictionary(match => match.Key, match => match.Value); NormalizeFiles(newDir, recursionRoot); } return recursionRoot; }