protected bool WriteAttributesList_FmtB(BinaryStream stream, NomadObject obj) { var ptr = (int)stream.Position; byte[] buffer = new byte[0]; using (var bs = new BinaryStream(1024)) { WriteAttributesData_FmtB(bs, obj.Attributes, ptr); buffer = bs.ToArray(); } var length = buffer.Length; if (length > 65535) { throw new InvalidOperationException("Attribute data too large."); } stream.Write((short)length); stream.Write(buffer, 0, length); // did we write any data? return(length > 0); }
public void Serialize(BinaryStream output) { output.Write(LastSortedCRC); output.Write(CompressedSize); output.Write(DecompressedSize); output.Write(CompressedBytes); }
public void Serialize(BinaryStream output) { output.Write(Version); output.Write(Sections.Count); foreach (var section in Sections) { section.Serialize(output); } }
public void WriteTo(BinaryStream stream, int baseOffset = 0) { var value = Value; switch (Type) { case DescriptorType.None: stream.WriteByte(value); break; case DescriptorType.BigValue: case DescriptorType.Reference: { var code = (byte)~Type; if (ReferenceType == ReferenceType.Offset) { var ptr = (int)stream.Position + baseOffset; var offset = (ptr - value); if (offset < 0) { throw new InvalidOperationException("Cannot write a forward-offset!"); } value = offset; } if (GlobalFlags.HasFlag(DescriptorFlags.Use24Bit)) { if ((value & 0xFFFFFF) != value) { throw new InvalidOperationException($"Descriptor value '{value}' too large, cannot fit into 24-bits!"); } value <<= 8; value |= code; stream.Write(value); } else { stream.WriteByte(code); stream.Write(value); } } break; } }
public override void Serialize(BinaryStream stream) { Offset = (int)stream.Position; var nChildren = Children.Count; var nAttributes = Attributes.Count; var nD = DescriptorTag.Create(nChildren); nD.WriteTo(stream); stream.Write(Hash); if (nAttributes > 0) { // write attributes foreach (var attribute in Attributes) { attribute.Serialize(stream, true); // hash attribute.Serialize(stream, false); // data } } else { // no attributes to write! stream.WriteByte(0); } // now write the children out foreach (var child in Children) { child.Serialize(stream); } }
public void Serialize(BinaryStream stream) { if (Use32Bit) { var uid32 = (int)(UID & 0xFFFFFFFF); stream.Write(uid32); } else { stream.Write(UID); } stream.Write(Offset - 8); stream.Write((ushort)TotalCount); stream.Write((ushort)NodesCount); }
public byte[] Unpack() { var memory = new BinaryStream(1024); using (var bs = new BinaryStream(Data)) { for (int i = 1; i < Blocks.Count; i++) { var block = Blocks[i - 1]; var next = Blocks[i]; var size = (int)(next.VirtualOffset - block.VirtualOffset); bs.Seek(block.FileOffset - 4, SeekOrigin.Begin); memory.Seek(block.VirtualOffset, SeekOrigin.Begin); if (block.IsCompressed == true) { var zlib = new InflaterInputStream(bs); zlib.CopyTo(memory); } else { var buffer = bs.ReadBytes(size); memory.Write(buffer, 0, size); } } } return(memory.ToArray()); }
public void Serialize(BinaryStream stream, int baseOffset = 0) { if (Type == DataType.RML) { throw new InvalidOperationException("Cannot serialize RML data directly!"); } var ptr = (int)stream.Position; var oldSize = Size; if (Size > 0) { var writeData = true; if (CanBeCached) { if (WriteCache.IsCached(this)) { var cache = WriteCache.GetData(this); if (cache.Size == Size) { stream.Position = (cache.Offset + 1); var buf = stream.ReadBytes(cache.Size); var key = Memory.GetCRC32(Buffer); var bufKey = Memory.GetCRC32(buf); stream.Position = ptr; // slow as f**k, but there's no room for error if (key == bufKey) { var nD = DescriptorTag.CreateReference(cache.Offset, ReferenceType.Offset); nD.WriteTo(stream, baseOffset); writeData = false; } } } else { WriteCache.Cache(ptr, this); } } if (writeData) { var nD = DescriptorTag.Create(Size); nD.WriteTo(stream, baseOffset); stream.Write(Buffer); } } else { // nothing to write! stream.WriteByte(0); } }
public void Serialize(BinaryStream stream) { var nodesCount = 0; var attrsCount = 0; switch (Type) { case ContainerType.Objects: { var root = (NodeObject)Root; nodesCount = Utils.GetTotalNumberOfNodes(root); attrsCount = root.Attributes.Count; } break; case ContainerType.Classes: { var root = (NodeClass)Root; nodesCount = Utils.GetTotalNumberOfNodes(root); attrsCount = root.Attributes.Count; } break; } if (attrsCount == 0) { attrsCount = 1; } var totalCount = (nodesCount + attrsCount); Debug.WriteLine(">> Writing FCB header..."); stream.Write((int)Magic); stream.Write((short)Type); stream.Write((short)MagicNumber.FB); // ;) stream.Write(totalCount); stream.Write(nodesCount); Debug.WriteLine(">> Writing data..."); Root.Serialize(stream); }
protected void WriteAttributeRmlData(BinaryStream stream, NomadValue attr) { var rmlBuffer = attr.Data.Buffer; var rmlSize = DescriptorTag.Create(rmlBuffer.Length); rmlSize.WriteTo(stream); stream.Write(rmlBuffer); }
public void Serialize(BinaryStream stream) { stream.Write(4 + Data.Length); stream.Write(Data, 0, Data.Length); stream.Write(Blocks.Count); foreach (var block in Blocks) { stream.Write(block.VirtualOffset); uint foic = 0; foic |= block.FileOffset; foic &= 0x7FFFFFFF; foic |= (block.IsCompressed == true ? 1u : 0u) << 31; stream.Write(foic); } }
public void Serialize(BinaryStream stream, bool writeHash) { if (writeHash) { stream.Write(Hash); } else { Serialize(stream); } }
public void Serialize(BinaryStream stream) { stream.Write(Signature); stream.Write(Version); uint offset = 0; offset += 20; stream.Write(offset); offset += 4 + (uint)Data.Data.Length + 4 + ((uint)Data.Blocks.Count * 8); stream.Write(offset); offset += 4 + (uint)Header.Data.Length + 4 + ((uint)Header.Blocks.Count * 8); stream.Write(offset); Data.Serialize(stream); Header.Serialize(stream); Descriptor.Serialize(stream); }
public void Serialize(BinaryStream output) { output.Write(StringCount); foreach (var value in SortedEnums) { output.Write(value); } foreach (var value2 in StringOffsets) { output.Write(value2); } foreach (var tuple in IdValuePairs) { output.Write(tuple.Key); output.Write(tuple.Value, Encoding.Unicode); } }
protected void WriteAttributeData(BinaryStream stream, NomadValue attr, int baseOffset = 0) { var ptr = (int)stream.Position + baseOffset; var data = attr.Data; var type = data.Type; var size = data.Size; var buffer = data.Buffer; if (data.Type == DataType.RML) { throw new InvalidOperationException("Cannot serialize RML data directly!"); } var oldSize = size; var attrData = (Format != FormatType.Resource) ? Utils.GetAttributeDataMiniBuffer(buffer, type) : Utils.GetAttributeDataBuffer(buffer, type); size = attrData.Length; var writeData = true; if (size > 4) { // return cached instance, else cache it and return empty var cache = WriteCache.PreCache(ptr, attrData, size); if (!cache.IsEmpty) { // sizes must match if (cache.Size == size) { var nD = DescriptorTag.CreateReference(cache.Offset, ReferenceType.Offset); nD.WriteTo(stream, baseOffset); writeData = false; } } } if (writeData) { var nD = DescriptorTag.Create(size); nD.WriteTo(stream); stream.Write(attrData); } }
protected void WriteHeader(BinaryStream stream, NomadObject data) { var childCount = 0; var attrCount = data.Attributes.Count; if (attrCount == 0) { attrCount = 1; } foreach (var obj in data) { if (obj.IsObject) { childCount++; continue; } } var totalCount = (childCount + attrCount); int magic = Nomad.Magic; int type = (ushort)Format; if (Nomad.WriteSealOfApproval) { // won't trip the "debug info" flag for version 2 resources; // has no effect on versions 3 and 5 (which is why it went undetected for so long) // always check your bits, folks! type |= (MagicNumber.fB << 16); // ;) } stream.Write(magic); stream.Write(type); stream.Write(totalCount); stream.Write(childCount); }
protected void WriteAttribute_FmtA(BinaryStream stream, NomadValue attr) { Context.State = ContextStateType.Member; Context.MemberIndex++; stream.Write(attr.Id.Hash); if (attr.IsRml) { WriteAttributeRmlData(stream, attr); } else { WriteAttributeData(stream, attr); } }
protected void WriteObject_FmtA(BinaryStream stream, NomadObject obj) { Context.State = ContextStateType.Object; Context.ObjectIndex++; var ptr = (int)stream.Position; var idx = NomadCache.Find(obj); if (idx != -1) { var cached = NomadCache.Refs[idx]; var tag = DescriptorTag.CreateReference(Context.GetIdx(cached), ReferenceType.Index); tag.WriteTo(stream); } else { var nChildren = DescriptorTag.Create(obj.Children.Count); var nAttributes = DescriptorTag.Create(obj.Attributes.Count); Context.AddRef(obj, ptr); nChildren.WriteTo(stream); stream.Write(obj.Id.Hash); if (obj.IsRml) { WriteRmlData(stream, obj); } else { nAttributes.WriteTo(stream); Context.State = ContextStateType.Member; foreach (var attr in obj.Attributes) { WriteAttribute_FmtA(stream, attr); } foreach (var child in obj.Children) { WriteObject_FmtA(stream, child); } } } }
protected void WriteAttributesData_FmtB(BinaryStream stream, List <NomadValue> attributes, int baseOffset) { var ptr = (int)stream.Position + baseOffset; var nAttrs = attributes.Count; if (nAttrs > 0) { var attrData = new byte[nAttrs * 4]; using (var bs = new BinaryStream(attrData)) { foreach (var attr in attributes) { bs.Write(attr.Id.Hash); } } var cache = WriteCache.PreCache(ptr, attrData, nAttrs); if (!cache.IsEmpty) { var ndAttrs = DescriptorTag.CreateReference(cache.Offset, ReferenceType.Offset); ndAttrs.WriteTo(stream, baseOffset); } else { var count = DescriptorTag.Create(nAttrs); count.WriteTo(stream); stream.Write(attrData); } foreach (var attr in attributes) { WriteAttribute_FmtB(stream, attr, baseOffset); } } else { // nothing to write stream.WriteByte(0); } }
protected void WriteRmlData(BinaryStream stream, NomadObject data) { byte[] rmlBuffer = null; using (var bs = new BinaryStream(1024)) { var rmlData = new NomadRmlSerializer(); rmlData.Serialize(bs, data); rmlBuffer = bs.ToArray(); } var size = DescriptorTag.Create(rmlBuffer.Length); var next = DescriptorTag.Create(size); next.WriteTo(stream); size.WriteTo(stream); stream.Write(rmlBuffer, 0, rmlBuffer.Length); }
private void WriteAttributeHashes(BinaryStream stream) { var ptr = (int)stream.Position; var nAttrs = Attributes.Count; if (nAttrs > 0) { var attrHBuf = new byte[(nAttrs * 4) + 1]; using (var buf = new BinaryStream(attrHBuf)) { buf.WriteByte(nAttrs); foreach (var attr in Attributes) { attr.Serialize(buf, true); } } if (WriteCache.IsCached(attrHBuf, nAttrs)) { var cache = WriteCache.GetData(attrHBuf, nAttrs); var nhD = DescriptorTag.CreateReference(cache.Offset, ReferenceType.Offset); nhD.WriteTo(stream); } else { WriteCache.Cache(ptr, attrHBuf, nAttrs); stream.Write(attrHBuf); } } else { // nothing to write stream.WriteByte(0); } }
protected void WriteObject_FmtB(BinaryStream stream, NomadObject obj) { Context.State = ContextStateType.Object; Context.ObjectIndex++; var ptr = (int)stream.Position; var idx = NomadCache.Find(obj); if (idx != -1) { var cached = NomadCache.Refs[idx]; var offset = (int)Context.GetPtr(cached); var tag = DescriptorTag.CreateReference(offset, ReferenceType.Offset); tag.WriteTo(stream); } else { var reference = new NomadReference(obj); var cached = reference.Get(); Context.AddRef(cached, ptr); var count = DescriptorTag.Create(obj.Children.Count); count.WriteTo(stream); stream.Write(obj.Id.Hash); WriteAttributesList_FmtB(stream, obj); foreach (var child in obj.Children) { WriteObject_FmtB(stream, child); } } }
public void Serialize(BinaryStream output) { output.Write(Name.Hash); output.Write(StringCount); foreach (var locStr in LocalizedStrings) { locStr.Serialize(output); } var vals = new List <DecompressedValues>(); var locStrs = new List <OasisLocalizedString>(); var len = 0; var crc = 0; foreach (var locStr in LocalizedStrings) { locStrs.Add(locStr); len += (locStr.Value.Length * 2); if ((len >= MAX_LENGTH) && (locStr.Enum != crc)) { var val = new DecompressedValues(locStrs); vals.Add(val); locStrs = new List <OasisLocalizedString>(); len = 0; crc = 0; } else { crc = locStr.Enum; } } if (locStrs.Count != 0) { var val = new DecompressedValues(locStrs); vals.Add(val); locStrs = new List <OasisLocalizedString>(); } output.Write(vals.Count); foreach (var val in vals) { var cpr = new CompressedValues(); using (var bs = new BinaryStream(1024)) { val.Serialize(bs); var buffer = bs.ToArray(); var size = buffer.Length; byte[] cprBuffer = null; var lz = new LZ4Compressor64(); var cprSize = lz.Compress(buffer, cprBuffer); cpr.CompressedBytes = cprBuffer; cpr.CompressedSize = cprSize; cpr.DecompressedSize = size; cpr.LastSortedCRC = val.SortedEnums.Last(); } cpr.Serialize(output); } }
public void Serialize(BinaryStream output) { output.Write(Id); output.Write(Section); output.Write(Enum); }
public override void Serialize(Stream stream, NomadObject data) { if (Context.State == ContextStateType.End) { Context.Reset(); } if (data.Id != "RML_DATA") { throw new InvalidOperationException("RML data wasn't prepared before initializing."); } if ((data.Children.Count != 1) || (data.Attributes.Count != 0)) { throw new InvalidOperationException("RML data is malformed and cannot be serialized properly."); } var _stream = (stream as BinaryStream) ?? new BinaryStream(stream); var rmlRoot = data.Children[0]; if (!rmlRoot.IsRml) { throw new InvalidOperationException("You can't serialize non-RML data as RML data, dumbass!"); } _strings.Clear(); var strLookup = new Dictionary <string, int>(); var strPtr = 0; var getStrIdx = new Func <string, int>((str) => { var ptr = 0; if (str == null) { str = String.Empty; } if (strLookup.ContainsKey(str)) { ptr = strLookup[str]; } else { // add to lookup ptr = strPtr; strLookup.Add(str, strPtr); // add to string table _strings.Add(strPtr, str); // must have null-terminator! var strLen = 1; if (str != null) { strLen += str.Length; } strPtr += strLen; } return(ptr); }); var entries = new List <NomadData>(); var elemsCount = 1; var attrsCount = 0; entries.Add(rmlRoot); // iterates through attributes then children (and children's children, etc.) foreach (var nd in rmlRoot) { if (!nd.IsRml) { throw new InvalidOperationException("Can't serialize non-RML data!"); } if (nd.IsAttribute) { ++attrsCount; } else if (nd.IsObject) { ++elemsCount; } entries.Add(nd); } // rough size estimate var rmlSize = ((elemsCount * 4) + (attrsCount * 2)); var strTableLen = -1; byte[] rmlBuffer = null; using (var ms = new BinaryStream(rmlSize)) { var writeInt = new Action <int>((ptr) => { var nD = DescriptorTag.Create(ptr); nD.WriteTo(ms); }); var writeRml = new Action <NomadData>((nd) => { var nameIdx = getStrIdx(nd.Id); var valIdx = -1; if (nd.IsObject) { Context.State = ContextStateType.Object; Context.ObjectIndex++; var obj = (NomadObject)nd; valIdx = getStrIdx(obj.Tag); writeInt(nameIdx); writeInt(valIdx); writeInt(obj.Attributes.Count); writeInt(obj.Children.Count); } else if (nd.IsAttribute) { Context.State = ContextStateType.Member; Context.MemberIndex++; var attr = (NomadValue)nd; valIdx = getStrIdx(attr.Data); // required for attributes ms.WriteByte(0); writeInt(nameIdx); writeInt(valIdx); } }); writeRml(rmlRoot); // enumerates attributes, then children (+ nested children) foreach (var rml in rmlRoot) { writeRml(rml); } // setup string table size strTableLen = strPtr; // write out string table foreach (var kv in _strings) { var str = kv.Value; var strLen = (str != null) ? str.Length : 0; var strBuf = new byte[strLen + 1]; if (strLen > 0) { Encoding.UTF8.GetBytes(str, 0, strLen, strBuf, 0); } ms.Write(strBuf); } // commit buffer rmlBuffer = ms.ToArray(); rmlSize = rmlBuffer.Length; } var bufSize = 5; // header + 3 small ints // expand size as needed if (strTableLen >= 254) { bufSize += 4; } if (elemsCount >= 254) { bufSize += 4; } if (attrsCount >= 254) { bufSize += 4; } // calculate the final size (hopefully) bufSize += rmlSize; byte[] result = null; using (var ms = new BinaryStream(bufSize)) { ms.WriteByte(0); ms.WriteByte(Reserved); DescriptorTag[] descriptors = { DescriptorTag.Create(strTableLen), DescriptorTag.Create(elemsCount), DescriptorTag.Create(attrsCount), }; foreach (var desc in descriptors) { desc.WriteTo(ms); } // write RML data (+ string table) ms.Write(rmlBuffer); // profit!!! result = ms.ToArray(); } _stream.Write(result, 0, result.Length); Context.State = ContextStateType.End; }
public void SaveBinary(string filename) { var root = GetNodeClass(); byte[] buffer; Debug.WriteLine(">> Generating binary data..."); using (var stream = new BinaryStream(BufferSize)) { // list of references sorted by their UID var references = GetEntityReferences(true); // we need to write the offset to our infos here Debug.WriteLine(">> Writing infos header..."); var ptr = stream.Position; stream.Position += 4; stream.Write(references.Count); Debug.WriteLine(">> Writing FCB header..."); stream.Write((int)Magic); stream.Write((short)Type); stream.Write((short)MagicNumber.FB); // ;) var nodesCount = Utils.GetTotalNumberOfNodes(root); var attrCount = root.Attributes.Count; if (attrCount == 0) { attrCount = 1; } var totalCount = (nodesCount + attrCount); stream.Write(totalCount); stream.Write(nodesCount); root.Serialize(stream); //Debug.WriteLine(">> Optimizing data..."); //using (var bs = new BinaryStream(BufferSize)) //{ // root.Serialize(bs); // // var dataLen = bs.Position; // bs.SetLength(dataLen); // // var data = OptimizedData.Create(nodesCount, bs.ToArray()); // data.WriteTo(stream); //} var refsOffset = (int)(Memory.Align(stream.Position, 8) - ptr); Debug.WriteLine(">> Writing infos offset..."); stream.Position = ptr; stream.Write(refsOffset); Debug.WriteLine(">> Writing infos..."); stream.Position = refsOffset; foreach (var reference in references) { var refData = new EntityReferenceData(reference) { Use32Bit = Use32Bit, }; refData.Serialize(stream); } var size = (int)stream.Position; buffer = new byte[size]; Debug.WriteLine(">> Copying to buffer..."); stream.Position = 0; stream.Read(buffer, 0, size); } Debug.WriteLine(">> Writing to file..."); File.WriteAllBytes(filename, buffer); }
public void Serialize(BinaryStream stream) { stream.Write(Offset); stream.Write((ushort)TotalCount); stream.Write((ushort)ChildCount); }
public NomadObject ReadXmlObject(XElement xml, NomadObject parent = null) { Context.State = ContextStateType.Object; Context.ObjectIndex++; var name = xml.Name.LocalName; var id = StringId.Parse(name); var isRml = false; if (parent != null) { isRml = parent.IsRml; } if (!isRml && (id == "RML_DATA")) { isRml = true; } if (isRml) { XElement rmlElem = null; foreach (var elem in xml.Elements()) { if (rmlElem != null) { throw new XmlException("Too many elements in RML_DATA node!"); } rmlElem = elem; } if (rmlElem == null) { throw new XmlException("Empty RML_DATA nodes are cancerous to your health!"); } var rmlRoot = new NomadObject(true) { Id = "RML_DATA" }; ReadRmlObject(rmlElem, rmlRoot); byte[] rmlBuffer = null; using (var bs = new BinaryStream(1024)) { // don't write size yet bs.Position += 4; var rmlData = new NomadRmlSerializer(); rmlData.Serialize(bs, rmlRoot); // write size var rmlSize = (int)(bs.Position - 4); bs.Position = 0; bs.Write(rmlSize); rmlBuffer = bs.ToArray(); } if (parent != null) { var rml = new NomadValue() { Id = id, Data = new AttributeData(DataType.RML, rmlBuffer), }; parent.Attributes.Add(rml); } return(rmlRoot); } var result = new NomadObject(isRml) { Id = id }; foreach (var attr in xml.Attributes()) { ReadXmlAttribute(attr, result); } foreach (var node in xml.Elements()) { ReadXmlObject(node, result); } if (parent != null) { parent.Children.Add(result); } return(result); }
public override void Serialize(BinaryStream stream) { Offset = (int)stream.Position; var nChildren = Children.Count; var nAttributes = Attributes.Count; var writeData = true; if (Size > 16) { if (WriteCache.IsCached(this)) { var cache = WriteCache.GetData(this); var obj = cache.Object as NodeClass; if ((obj != null) && obj.Equals(this)) { Debug.WriteLine($">> [Class:{Offset:X8}] Instance cached @ {cache.Offset:X8} with key {cache.Checksum:X8}"); var nD = DescriptorTag.CreateReference(cache.Offset, ReferenceType.Offset); nD.WriteTo(stream); writeData = false; } else { Debug.WriteLine($">> [Class:{Offset:X8}] !!! FALSE POSITIVE !!!"); } } else { Debug.WriteLine($">> [Class:{Offset:X8}] Caching new instance with key {GetHashCode():X8}"); WriteCache.Cache(Offset, this); } } if (writeData) { var nD = DescriptorTag.Create(nChildren); nD.WriteTo(stream); stream.Write(Hash); // skip size parameter for now stream.Position += 2; var attrsPtr = stream.Position; if (nAttributes > 0) { WriteAttributeHashes(stream); // write attribute data foreach (var attribute in Attributes) { attribute.Serialize(stream); } } else { // no attributes to write! stream.WriteByte(0); } var childrenPtr = stream.Position; var attrsSize = (int)(childrenPtr - attrsPtr); if (attrsSize > 65535) { throw new InvalidOperationException("Attribute data too large."); } // write attributes size stream.Position = (attrsPtr - 2); stream.Write((short)attrsSize); // now write the children out stream.Position = childrenPtr; foreach (var child in Children) { child.Serialize(stream); } } }