public DerivFunction(Function1D f) { code = f.code; p = f.p; if (f.dfdp.Length == f.p.Length) { dfdp = f.dfdp; fast = true; this.f = f.f; } else { dfdp = new SmallData(); fast = true; dfdp.Length = p.Length; this.f = new Code(F); } }
public bool Match(ref SmallData b) { if (Length != b.Length) { return(false); } if (Data1 != b.Data1) { return(false); } if (Data2 != b.Data2) { return(false); } if (Data3 != b.Data3) { return(false); } if (Data4 != b.Data4) { return(false); } if (Data5 != b.Data5) { return(false); } if (Data6 != b.Data6) { return(false); } if (Data7 != b.Data7) { return(false); } if (Data8 != b.Data8) { return(false); } return(true); }
//Write the nodes out to a file public bool Save(string filename, ref string error) { //Check for invalid UTF foreach (var node in Root.IterateAll()) { if (node.Children == null && node.Data == null) { error = string.Format("{0} is empty. Can't write UTF", GetUtfPath(node)); return(false); } } Dictionary <string, int> stringOffsets = new Dictionary <string, int>(); Dictionary <LUtfNode, int> dataOffsets = new Dictionary <LUtfNode, int>(); List <string> strings = new List <string>(); using (var writer = new BinaryWriter(File.Create(filename))) { int currentDataOffset = 0; int bytesSaved = 0; List <SmallData> smallDatas = new List <SmallData>(); foreach (var node in Root.IterateAll()) { if (!strings.Contains(node.Name)) { strings.Add(node.Name); } if (node.Data != null) { int dataAlloc = node.Data.Length + 3 & ~3; node.Write = true; //De-duplicate data up to 64 bytes if (node.Data.Length <= 64) { var small = new SmallData(node.Data); int idx = -1; for (int i = 0; i < smallDatas.Count; i++) { if (smallDatas[i].Match(ref small)) { idx = i; break; } } if (idx == -1) { small.Offset = currentDataOffset; smallDatas.Add(small); dataOffsets.Add(node, currentDataOffset); currentDataOffset += dataAlloc; } else { node.Write = false; bytesSaved += smallDatas[idx].Length; dataOffsets.Add(node, smallDatas[idx].Offset); } } else { dataOffsets.Add(node, currentDataOffset); currentDataOffset += dataAlloc; } } } LibreLancer.FLLog.Info("UTF", "Bytes Saved: " + bytesSaved); byte[] stringBlock; using (var mem = new MemoryStream()) { foreach (var str in strings) { stringOffsets.Add(str, (int)mem.Position); var strx = str; if (strx == "/") { strx = "\\"; } var strb = Encoding.ASCII.GetBytes(strx); mem.Write(strb, 0, strb.Length); mem.WriteByte(0); //null terminate } strings = null; stringBlock = mem.ToArray(); } byte[] nodeBlock; using (var mem = new MemoryStream()) { WriteNode(Root, new BinaryWriter(mem), stringOffsets, dataOffsets, true); nodeBlock = mem.ToArray(); } int strAlloc = stringBlock.Length + 3 & ~3; /*write signature*/ writer.Write((byte)'U'); writer.Write((byte)'T'); writer.Write((byte)'F'); writer.Write((byte)' '); writer.Write(LibreLancer.Utf.UtfFile.FILE_VERSION); writer.Write((int)56); //nodeBlockOffset writer.Write((int)nodeBlock.Length); //nodeBlockLength writer.Write((int)0); //unused entry offset writer.Write((int)44); //entry Size - Not accurate but FL expects it to be 44 writer.Write((int)56 + nodeBlock.Length); //stringBlockOffset writer.Write((int)strAlloc); //namesAllocatedSize writer.Write((int)stringBlock.Length); //namesUsedSize var dataBlockDesc = writer.BaseStream.Position; writer.Write((int)(56 + nodeBlock.Length + strAlloc)); writer.Write((int)0); //unused writer.Write((int)0); //unused writer.Write((ulong)125596224000000000); //Fake filetime writer.Write(nodeBlock); writer.Write(stringBlock); for (int i = 0; i < (strAlloc - stringBlock.Length); i++) { writer.Write((byte)0); } stringBlock = null; nodeBlock = null; //write out data block foreach (var node in Root.IterateAll()) { if (node.Write && node.Data != null) { writer.Write(node.Data); int dataAlloc = node.Data.Length + 3 & ~3; for (int i = 0; i < (dataAlloc - node.Data.Length); i++) { writer.Write((byte)0); } } } } return(true); }
bool SaveV2(string filename, ref string error) { Dictionary <string, int> stringOffsets = new Dictionary <string, int>(); Dictionary <LUtfNode, int> dataOffsets = new Dictionary <LUtfNode, int>(); List <string> strings = new List <string>(); using (var writer = new BinaryWriter(File.Create(filename))) { int currentDataOffset = 0; int bytesSaved = 0; List <SmallData> smallDatas = new List <SmallData>(); int nodeCount = 0; foreach (var node in Root.IterateAll()) { nodeCount++; if (!strings.Contains(node.Name)) { strings.Add(node.Name); } if (node.Data != null) { node.Write = node.Data.Length > 8; if (node.Data.Length > 8 && node.Data.Length <= 64) { var small = new SmallData(node.Data); int idx = -1; for (int i = 0; i < smallDatas.Count; i++) { if (smallDatas[i].Match(ref small)) { idx = i; break; } } if (idx == -1) { small.Offset = currentDataOffset; smallDatas.Add(small); dataOffsets.Add(node, currentDataOffset); currentDataOffset += node.Data.Length; } else { node.Write = false; bytesSaved += smallDatas[idx].Length; dataOffsets.Add(node, smallDatas[idx].Offset); } } else if (node.Data.Length > 8) { dataOffsets.Add(node, currentDataOffset); if (node.Data.Length > 128) { var compressed = CompressDeflate(node.Data); if (compressed.Length < (node.Data.Length * 0.9)) { node.CompressedData = compressed; currentDataOffset += node.CompressedData.Length; } else { currentDataOffset += node.Data.Length; } } else { currentDataOffset += node.Data.Length; } } } } //string block byte[] stringBlock; using (var mem = new MemoryStream()) { foreach (var str in strings) { stringOffsets.Add(str, (int)mem.Position); var strx = str; if (strx == "/") { strx = "\\"; } var strb = Encoding.UTF8.GetBytes(strx); mem.Write(BitConverter.GetBytes((short)strb.Length)); mem.Write(strb, 0, strb.Length); } strings = null; stringBlock = mem.ToArray(); } ushort flags = 0; var stringsComp = CompressDeflate(stringBlock); if (stringsComp.Length < (stringBlock.Length / 2)) { flags = 0x1; //deflate compress strings stringBlock = stringsComp; } //sig writer.Write((byte)'X'); writer.Write((byte)'U'); writer.Write((byte)'T'); writer.Write((byte)'F'); //v1 writer.Write((byte)1); //no flags writer.Write(flags); //sizes writer.Write(stringBlock.Length); writer.Write(nodeCount * 17); writer.Write(currentDataOffset); //write strings writer.Write(stringBlock); stringBlock = null; //node block int index = 0; WriteNodeV2(Root, writer, stringOffsets, dataOffsets, ref index, true); //data block foreach (var node in Root.IterateAll()) { if (node.Write && node.Data != null) { writer.Write(node.CompressedData ?? node.Data); node.CompressedData = null; } } } return(true); }