public void WriteTo(BinaryStream stream, int baseOffset = 0) { var value = Value; switch (Type) { case DescriptorType.None: stream.WriteByte(value); break; case DescriptorType.BigValue: case DescriptorType.Reference: { var code = (byte)~Type; if (ReferenceType == ReferenceType.Offset) { var ptr = (int)stream.Position + baseOffset; var offset = (ptr - value); if (offset < 0) { throw new InvalidOperationException("Cannot write a forward-offset!"); } value = offset; } if (GlobalFlags.HasFlag(DescriptorFlags.Use24Bit)) { if ((value & 0xFFFFFF) != value) { throw new InvalidOperationException($"Descriptor value '{value}' too large, cannot fit into 24-bits!"); } value <<= 8; value |= code; stream.Write(value); } else { stream.WriteByte(code); stream.Write(value); } } break; } }
public void Serialize(BinaryStream stream, int baseOffset = 0) { if (Type == DataType.RML) { throw new InvalidOperationException("Cannot serialize RML data directly!"); } var ptr = (int)stream.Position; var oldSize = Size; if (Size > 0) { var writeData = true; if (CanBeCached) { if (WriteCache.IsCached(this)) { var cache = WriteCache.GetData(this); if (cache.Size == Size) { stream.Position = (cache.Offset + 1); var buf = stream.ReadBytes(cache.Size); var key = Memory.GetCRC32(Buffer); var bufKey = Memory.GetCRC32(buf); stream.Position = ptr; // slow as f**k, but there's no room for error if (key == bufKey) { var nD = DescriptorTag.CreateReference(cache.Offset, ReferenceType.Offset); nD.WriteTo(stream, baseOffset); writeData = false; } } } else { WriteCache.Cache(ptr, this); } } if (writeData) { var nD = DescriptorTag.Create(Size); nD.WriteTo(stream, baseOffset); stream.Write(Buffer); } } else { // nothing to write! stream.WriteByte(0); } }
public override void Serialize(BinaryStream stream) { Offset = (int)stream.Position; var nChildren = Children.Count; var nAttributes = Attributes.Count; var nD = DescriptorTag.Create(nChildren); nD.WriteTo(stream); stream.Write(Hash); if (nAttributes > 0) { // write attributes foreach (var attribute in Attributes) { attribute.Serialize(stream, true); // hash attribute.Serialize(stream, false); // data } } else { // no attributes to write! stream.WriteByte(0); } // now write the children out foreach (var child in Children) { child.Serialize(stream); } }
protected void WriteAttributesData_FmtB(BinaryStream stream, List <NomadValue> attributes, int baseOffset) { var ptr = (int)stream.Position + baseOffset; var nAttrs = attributes.Count; if (nAttrs > 0) { var attrData = new byte[nAttrs * 4]; using (var bs = new BinaryStream(attrData)) { foreach (var attr in attributes) { bs.Write(attr.Id.Hash); } } var cache = WriteCache.PreCache(ptr, attrData, nAttrs); if (!cache.IsEmpty) { var ndAttrs = DescriptorTag.CreateReference(cache.Offset, ReferenceType.Offset); ndAttrs.WriteTo(stream, baseOffset); } else { var count = DescriptorTag.Create(nAttrs); count.WriteTo(stream); stream.Write(attrData); } foreach (var attr in attributes) { WriteAttribute_FmtB(stream, attr, baseOffset); } } else { // nothing to write stream.WriteByte(0); } }
private void WriteAttributeHashes(BinaryStream stream) { var ptr = (int)stream.Position; var nAttrs = Attributes.Count; if (nAttrs > 0) { var attrHBuf = new byte[(nAttrs * 4) + 1]; using (var buf = new BinaryStream(attrHBuf)) { buf.WriteByte(nAttrs); foreach (var attr in Attributes) { attr.Serialize(buf, true); } } if (WriteCache.IsCached(attrHBuf, nAttrs)) { var cache = WriteCache.GetData(attrHBuf, nAttrs); var nhD = DescriptorTag.CreateReference(cache.Offset, ReferenceType.Offset); nhD.WriteTo(stream); } else { WriteCache.Cache(ptr, attrHBuf, nAttrs); stream.Write(attrHBuf); } } else { // nothing to write stream.WriteByte(0); } }
public override void Serialize(Stream stream, NomadObject data) { if (Context.State == ContextStateType.End) { Context.Reset(); } if (data.Id != "RML_DATA") { throw new InvalidOperationException("RML data wasn't prepared before initializing."); } if ((data.Children.Count != 1) || (data.Attributes.Count != 0)) { throw new InvalidOperationException("RML data is malformed and cannot be serialized properly."); } var _stream = (stream as BinaryStream) ?? new BinaryStream(stream); var rmlRoot = data.Children[0]; if (!rmlRoot.IsRml) { throw new InvalidOperationException("You can't serialize non-RML data as RML data, dumbass!"); } _strings.Clear(); var strLookup = new Dictionary <string, int>(); var strPtr = 0; var getStrIdx = new Func <string, int>((str) => { var ptr = 0; if (str == null) { str = String.Empty; } if (strLookup.ContainsKey(str)) { ptr = strLookup[str]; } else { // add to lookup ptr = strPtr; strLookup.Add(str, strPtr); // add to string table _strings.Add(strPtr, str); // must have null-terminator! var strLen = 1; if (str != null) { strLen += str.Length; } strPtr += strLen; } return(ptr); }); var entries = new List <NomadData>(); var elemsCount = 1; var attrsCount = 0; entries.Add(rmlRoot); // iterates through attributes then children (and children's children, etc.) foreach (var nd in rmlRoot) { if (!nd.IsRml) { throw new InvalidOperationException("Can't serialize non-RML data!"); } if (nd.IsAttribute) { ++attrsCount; } else if (nd.IsObject) { ++elemsCount; } entries.Add(nd); } // rough size estimate var rmlSize = ((elemsCount * 4) + (attrsCount * 2)); var strTableLen = -1; byte[] rmlBuffer = null; using (var ms = new BinaryStream(rmlSize)) { var writeInt = new Action <int>((ptr) => { var nD = DescriptorTag.Create(ptr); nD.WriteTo(ms); }); var writeRml = new Action <NomadData>((nd) => { var nameIdx = getStrIdx(nd.Id); var valIdx = -1; if (nd.IsObject) { Context.State = ContextStateType.Object; Context.ObjectIndex++; var obj = (NomadObject)nd; valIdx = getStrIdx(obj.Tag); writeInt(nameIdx); writeInt(valIdx); writeInt(obj.Attributes.Count); writeInt(obj.Children.Count); } else if (nd.IsAttribute) { Context.State = ContextStateType.Member; Context.MemberIndex++; var attr = (NomadValue)nd; valIdx = getStrIdx(attr.Data); // required for attributes ms.WriteByte(0); writeInt(nameIdx); writeInt(valIdx); } }); writeRml(rmlRoot); // enumerates attributes, then children (+ nested children) foreach (var rml in rmlRoot) { writeRml(rml); } // setup string table size strTableLen = strPtr; // write out string table foreach (var kv in _strings) { var str = kv.Value; var strLen = (str != null) ? str.Length : 0; var strBuf = new byte[strLen + 1]; if (strLen > 0) { Encoding.UTF8.GetBytes(str, 0, strLen, strBuf, 0); } ms.Write(strBuf); } // commit buffer rmlBuffer = ms.ToArray(); rmlSize = rmlBuffer.Length; } var bufSize = 5; // header + 3 small ints // expand size as needed if (strTableLen >= 254) { bufSize += 4; } if (elemsCount >= 254) { bufSize += 4; } if (attrsCount >= 254) { bufSize += 4; } // calculate the final size (hopefully) bufSize += rmlSize; byte[] result = null; using (var ms = new BinaryStream(bufSize)) { ms.WriteByte(0); ms.WriteByte(Reserved); DescriptorTag[] descriptors = { DescriptorTag.Create(strTableLen), DescriptorTag.Create(elemsCount), DescriptorTag.Create(attrsCount), }; foreach (var desc in descriptors) { desc.WriteTo(ms); } // write RML data (+ string table) ms.Write(rmlBuffer); // profit!!! result = ms.ToArray(); } _stream.Write(result, 0, result.Length); Context.State = ContextStateType.End; }
public override void Serialize(BinaryStream stream) { Offset = (int)stream.Position; var nChildren = Children.Count; var nAttributes = Attributes.Count; var writeData = true; if (Size > 16) { if (WriteCache.IsCached(this)) { var cache = WriteCache.GetData(this); var obj = cache.Object as NodeClass; if ((obj != null) && obj.Equals(this)) { Debug.WriteLine($">> [Class:{Offset:X8}] Instance cached @ {cache.Offset:X8} with key {cache.Checksum:X8}"); var nD = DescriptorTag.CreateReference(cache.Offset, ReferenceType.Offset); nD.WriteTo(stream); writeData = false; } else { Debug.WriteLine($">> [Class:{Offset:X8}] !!! FALSE POSITIVE !!!"); } } else { Debug.WriteLine($">> [Class:{Offset:X8}] Caching new instance with key {GetHashCode():X8}"); WriteCache.Cache(Offset, this); } } if (writeData) { var nD = DescriptorTag.Create(nChildren); nD.WriteTo(stream); stream.Write(Hash); // skip size parameter for now stream.Position += 2; var attrsPtr = stream.Position; if (nAttributes > 0) { WriteAttributeHashes(stream); // write attribute data foreach (var attribute in Attributes) { attribute.Serialize(stream); } } else { // no attributes to write! stream.WriteByte(0); } var childrenPtr = stream.Position; var attrsSize = (int)(childrenPtr - attrsPtr); if (attrsSize > 65535) { throw new InvalidOperationException("Attribute data too large."); } // write attributes size stream.Position = (attrsPtr - 2); stream.Write((short)attrsSize); // now write the children out stream.Position = childrenPtr; foreach (var child in Children) { child.Serialize(stream); } } }