public int Save(Stream fileStream, uint?sizeLimit, int startIndex = 0, List <string> splitList = null) { uint size = 0; int endIndex = -1; bool isRootPAC = !sizeLimit.HasValue; // BINA Header var writer = new BINAWriter(fileStream, Header); if (Header.ID == 0) { // Get a random non-zero value between 1 and 0xFFFFFFFF // because I care too much™ var rand = new Random(); do { Header.ID = unchecked ((uint)rand.Next( int.MinValue, int.MaxValue)); }while (Header.ID == 0); } // Generate list of files to pack, categorized by extension var filesByExt = new Dictionary <string, List <DataContainer> >(); for (int i = startIndex; i < Data.Count; ++i) { var file = (Data[i] as ArchiveFile); if (file == null) { continue; } int extIndex = file.Name.IndexOf('.'); if (extIndex == -1) { Console.WriteLine( "WARNING: Skipped {0} as it has no extension!", file.Name); continue; } string ext = file.Name.Substring(extIndex); if (!Types.ContainsKey(ext)) { Console.WriteLine( "WARNING: Skipped {0} as its extension ({1}) is unsupported!", file.Name, ext); continue; } // Root-Exclusive Type Check var dataEntry = new DataEntry(file.Data); if (isRootPAC) { dataEntry.DataType = (!RootExclusiveTypes.Contains(ext)) ? DataEntryTypes.NotHere : DataEntryTypes.Regular;; } else if (RootExclusiveTypes.Contains(ext)) { continue; } // BINA Header Check if (file.Data != null && file.Data.Length > 3 && file.Data[0] == 0x42 && file.Data[1] == 0x49 && file.Data[2] == 0x4E && file.Data[3] == 0x41) { dataEntry.DataType = DataEntryTypes.BINAFile; } // Split if you exceed the sizeLimit if (!isRootPAC && sizeLimit.HasValue) { // Not very accurate but close enough™ size += (uint)file.Data.Length; if (size >= sizeLimit.Value) { endIndex = i; break; } } // Add Node to list, making the list first if necessary List <DataContainer> files; if (!filesByExt.ContainsKey(ext)) { files = new List <DataContainer>(); filesByExt.Add(ext, files); } else { files = filesByExt[ext]; } // We use substring instead of Path.GetFileNameWithoutExtension // to support files with multiple extensions (e.g. *.grass.bin) string shortName = file.Name.Substring(0, extIndex); files.Add(new DataContainer(shortName, dataEntry)); } // Pack file list into Node Trees and generate types list //var fileTrees = new Dictionary<string, NodeTree>(); var types = new List <DataContainer>(); foreach (var fileType in filesByExt) { var fileTree = new NodeTree(fileType.Value) { CustomData = fileType.Key.Substring(1) }; types.Add(new DataContainer( Types[fileType.Key], fileTree)); } // Pack types list into Node Tree var typeTree = new NodeTree(types); // Write types Node Tree const string typeTreePrefix = "typeTree"; typeTree.Write(writer, typeTreePrefix); // Write file Node Trees and generate an array of trees in the // same order as they'll be in the file for easier writing var fileTrees = new NodeTree[typeTree.DataNodeIndices.Count]; for (int i = 0; i < fileTrees.Length; ++i) { int dataNodeIndex = typeTree.DataNodeIndices[i]; var tree = (NodeTree)typeTree.Nodes[dataNodeIndex].Data; writer.FillInOffsetLong( $"{typeTreePrefix}nodeDataOffset{dataNodeIndex}", true, false); fileTrees[i] = tree; tree.Write(writer, $"fileTree{i}"); } // Write Data Node Indices typeTree.WriteDataIndices(writer, typeTreePrefix); for (int i = 0; i < fileTrees.Length; ++i) { fileTrees[i].WriteDataIndices(writer, $"fileTree{i}"); } // Write Child Node Indices typeTree.WriteChildIndices(writer, typeTreePrefix); for (int i = 0; i < fileTrees.Length; ++i) { fileTrees[i].WriteChildIndices(writer, $"fileTree{i}"); } // Write Split PACs section if (isRootPAC && splitList != null) { writer.Write((ulong)splitList.Count); writer.AddOffset($"splitPACsOffset", 8); writer.FillInOffsetLong($"splitPACsOffset", true, false); Header.SplitListLength += 16; for (int i = 0; i < splitList.Count; ++i) { writer.AddString($"splitPACName{i}", splitList[i], 8); Header.SplitListLength += 8; } } // Write File Entries long fileEntriesOffset = fileStream.Position; for (int i = 0; i < fileTrees.Length; ++i) { fileTrees[i].WriteDataEntries(writer, $"fileTree{i}", (string)fileTrees[i].CustomData, Header.ID); } // Write String Table uint stringTablePos = (uint)fileStream.Position; writer.WriteStringTable(Header); writer.FixPadding(8); // Write File Data long fileDataOffset = fileStream.Position; for (int i = 0; i < fileTrees.Length; ++i) { fileTrees[i].WriteData(writer, $"fileTree{i}", (string)fileTrees[i].CustomData); } // Write Offset Table writer.FixPadding(8); uint footerPos = writer.WriteFooter(Header); // Fill-In Header uint fileSize = (uint)fileStream.Position; writer.BaseStream.Position = 0; Header.NodeTreeLength = (uint)(fileEntriesOffset - PACxHeader.Length) - Header.SplitListLength; Header.FileEntriesLength = stringTablePos - (uint)fileEntriesOffset; Header.StringTableLength = (uint)fileDataOffset - stringTablePos; Header.DataLength = (footerPos - (uint)fileDataOffset); // 5 if there are splits and this is the root, 2 if this is a split, 1 if no splits Header.PacType = (sizeLimit.HasValue) ? PACxHeader.PACTypes.IsSplit : (splitList != null) ? PACxHeader.PACTypes.HasSplits : PACxHeader.PACTypes.HasNoSplits; Header.SplitCount = (splitList == null) ? 0U : (uint)splitList.Count; Header.FinishWrite(writer); return(endIndex); }