private void LoadKeys(byte[] handleList) { Keys.Clear(); using (var ms = new MemoryStream(handleList)) using (var reader = new BinaryReader(ms)) { var numHandles = reader.ReadInt32(); for (var i = 0; i < numHandles; i++) { var entry = BinUtils.ReadStruct <Key2TableEntry>(reader); Keys.Add((int)entry.NameIndex, entry); } } }
/// <summary> /// Reads the structure headers for the LSOF resource /// </summary> /// <param name="s">Stream to read the node headers from</param> /// <param name="longNodes">Use the long (V3) on-disk node format</param> private void ReadNodes(Stream s, bool longNodes) { #if DEBUG_LSF_SERIALIZATION Console.WriteLine(" ----- DUMP OF NODE TABLE -----"); #endif Nodes = new List <NodeInfo>(); using (var reader = new BinaryReader(s)) { Int32 index = 0; while (s.Position < s.Length) { var resolved = new NodeInfo(); if (longNodes) { var item = BinUtils.ReadStruct <NodeEntryV3>(reader); resolved.ParentIndex = item.ParentIndex; resolved.NameIndex = item.NameIndex; resolved.NameOffset = item.NameOffset; resolved.FirstAttributeIndex = item.FirstAttributeIndex; } else { var item = BinUtils.ReadStruct <NodeEntryV2>(reader); resolved.ParentIndex = item.ParentIndex; resolved.NameIndex = item.NameIndex; resolved.NameOffset = item.NameOffset; resolved.FirstAttributeIndex = item.FirstAttributeIndex; } #if DEBUG_LSF_SERIALIZATION Console.WriteLine(String.Format( "{0}: {1} (parent {2}, firstAttribute {3})", Nodes.Count, Names[resolved.NameIndex][resolved.NameOffset], resolved.ParentIndex, resolved.FirstAttributeIndex )); #endif Nodes.Add(resolved); index++; } } }
private NodeAttribute ReadAttribute(NodeAttribute.DataType type, BinaryReader reader, uint length) { // LSF and LSB serialize the buffer types differently, so specialized // code is added to the LSB and LSf serializers, and the common code is // available in BinUtils.ReadAttribute() switch (type) { case NodeAttribute.DataType.DT_String: case NodeAttribute.DataType.DT_Path: case NodeAttribute.DataType.DT_FixedString: case NodeAttribute.DataType.DT_LSString: case NodeAttribute.DataType.DT_WString: case NodeAttribute.DataType.DT_LSWString: { var attr = new NodeAttribute(type); attr.Value = ReadString(reader, (int)length); return(attr); } case NodeAttribute.DataType.DT_TranslatedString: { var attr = new NodeAttribute(type); var str = new TranslatedString(); var valueLength = reader.ReadInt32(); str.Value = ReadString(reader, valueLength); var handleLength = reader.ReadInt32(); str.Handle = ReadString(reader, handleLength); attr.Value = str; return(attr); } case NodeAttribute.DataType.DT_ScratchBuffer: { var attr = new NodeAttribute(type); attr.Value = reader.ReadBytes((int)length); return(attr); } default: return(BinUtils.ReadAttribute(type, reader)); } }
private Stream readSocket() { var nets = m_TcpClient.GetStream(); var total = BinUtils.ReadInt32(nets); if (total >= Protocol.BSON_SIZE_LIMIT) { throw new MongoDBConnectorProtocolException(StringConsts.PROTO_SOCKET_READ_EXCEED_LIMIT_ERROR.Args(total, Protocol.BSON_SIZE_LIMIT)); } var leftToRead = total - sizeof(Int32); //the total size includes the 4 bytes m_BufferStream.SetLength(total); var buffer = m_BufferStream.GetBuffer(); BinUtils.WriteInt32(buffer, total, 0); socketRead(nets, buffer, sizeof(Int32), leftToRead); m_Received.BindBuffer(buffer, 0, total); return(m_Received); //todo stats }
/* * struct { * MsgHeader header; // standard message header * int32 ZERO; // 0 - reserved for future use * int32 numberOfCursorIDs; // number of cursorIDs in message * int64* cursorIDs; // sequence of cursorIDs to close * }*/ public static Int32 Write_KILL_CURSORS(Stream stream, Int32 requestID, Cursor[] cursors) { stream.Position = STD_HDR_LEN; //skip the header BinUtils.WriteInt32(stream, 0); //ZERO BinUtils.WriteInt32(stream, cursors.Length); for (var i = 0; i < cursors.Length; i++) { BinUtils.WriteInt64(stream, cursors[i].ID); } var total = (Int32)stream.Position; stream.Position = 0; writeStandardHeader(stream, total, requestID, 0, OP_KILL_CURSORS); return(total); }
public void Save(Stream stream, StoryDebugInfo debugInfo) { var msg = ToProtobuf(debugInfo); using (var ms = new MemoryStream()) using (var codedStream = new CodedOutputStream(ms)) { msg.WriteTo(codedStream); codedStream.Flush(); byte[] proto = ms.ToArray(); byte flags = BinUtils.MakeCompressionFlags(LSLib.LS.Enums.CompressionMethod.LZ4, LSLib.LS.Enums.CompressionLevel.FastCompression); byte[] compressed = BinUtils.Compress(proto, flags); stream.Write(compressed, 0, compressed.Length); using (var writer = new BinaryWriter(stream, Encoding.UTF8, true)) { writer.Write((UInt32)proto.Length); } } }
public static void WriteVlv(this byte[] data, UInt32 value, ref int offset) { Debug.Assert(value <= 0xfffffff); uint byteCount = 1; UInt32 store = value & 0x7F; value >>= 7; while (value != 0) { store <<= 8; store |= (value & 0x7F) | 0x80; value >>= 7; byteCount++; } BinUtils.SwapEndianess(store); Unsafe.CopyBlockUnaligned(ref Unsafe.As <UInt32, byte>(ref store), ref data[offset], byteCount); Console.WriteLine(byteCount); offset += (int)byteCount; }
public static Int32 Write_QUERY(Stream stream, Int32 requestID, Database db, Collection collection, //may be null for $CMD QueryFlags flags, Int32 numberToSkip, Int32 numberToReturn, BSONDocument query, BSONDocument selector //may be null ) { stream.Position = STD_HDR_LEN; //skip the header BinUtils.WriteInt32(stream, (Int32)flags); //if collection==null then query the $CMD collection var fullNameBuffer = collection != null ? collection.m_FullNameCStringBuffer : db.m_CMD_NameCStringBuffer; stream.Write(fullNameBuffer, 0, fullNameBuffer.Length); BinUtils.WriteInt32(stream, numberToSkip); BinUtils.WriteInt32(stream, numberToReturn); query.WriteAsBSON(stream); if (selector != null) { selector.WriteAsBSON(stream); } var total = (Int32)stream.Position; stream.Position = 0; writeStandardHeader(stream, total, requestID, 0, OP_QUERY); return(total); }
/* * struct { * MsgHeader header; // standard message header * int32 ZERO; // 0 - reserved for future use * cstring fullCollectionName; // "dbname.collectionname" * int32 numberToReturn; // number of documents to return * int64 cursorID; // cursorID from the OP_REPLY * } * */ public static Int32 Write_GET_MORE(Stream stream, Int32 requestID, Collection collection, Cursor cursor) { stream.Position = STD_HDR_LEN; //skip the header BinUtils.WriteInt32(stream, 0); //ZERO //Collection name var fullNameBuffer = collection.m_FullNameCStringBuffer; stream.Write(fullNameBuffer, 0, fullNameBuffer.Length); BinUtils.WriteInt32(stream, cursor.FetchBy); BinUtils.WriteInt64(stream, cursor.ID); var total = (Int32)stream.Position; stream.Position = 0; writeStandardHeader(stream, total, requestID, 0, OP_GET_MORE); return(total); }
public StoryDebugInfo Load(byte[] msgPayload) { UInt32 decompressedSize; byte[] lengthBuf = new byte[4]; Array.Copy(msgPayload, msgPayload.Length - 4, lengthBuf, 0, 4); using (var ms = new MemoryStream(lengthBuf)) using (var reader = new BinaryReader(ms, Encoding.UTF8, true)) { decompressedSize = reader.ReadUInt32(); } var compressed = new byte[msgPayload.Length - 4]; Array.Copy(msgPayload, 0, compressed, 0, msgPayload.Length - 4); byte flags = BinUtils.MakeCompressionFlags(LSLib.LS.Enums.CompressionMethod.LZ4, LSLib.LS.Enums.CompressionLevel.FastCompression); byte[] decompressed = BinUtils.Decompress(compressed, (int)decompressedSize, flags); var msg = StoryDebugInfoMsg.Parser.ParseFrom(decompressed); var debugInfo = FromProtobuf(msg); return(debugInfo); }
public void Write(Resource resource) { Compression = CompressionMethod.LZ4; CompressionLevel = CompressionLevel.MaxCompression; using (this.Writer = new BinaryWriter(Stream)) using (this.NodeStream = new MemoryStream()) using (this.NodeWriter = new BinaryWriter(NodeStream)) using (this.AttributeStream = new MemoryStream()) using (this.AttributeWriter = new BinaryWriter(AttributeStream)) using (this.ValueStream = new MemoryStream()) using (this.ValueWriter = new BinaryWriter(ValueStream)) { NextNodeIndex = 0; NextAttributeIndex = 0; NodeIndices = new Dictionary <Node, int>(); StringHashMap = new List <List <string> >(StringHashMapSize); while (StringHashMap.Count < StringHashMapSize) { StringHashMap.Add(new List <string>()); } WriteRegions(resource); byte[] stringBuffer = null; using (var stringStream = new MemoryStream()) using (var stringWriter = new BinaryWriter(stringStream)) { WriteStaticStrings(stringWriter); stringBuffer = stringStream.ToArray(); } var nodeBuffer = NodeStream.ToArray(); var attributeBuffer = AttributeStream.ToArray(); var valueBuffer = ValueStream.ToArray(); var header = new Header(); header.Magic = BitConverter.ToUInt32(Header.Signature, 0); header.Version = Version; header.EngineVersion = (resource.Metadata.majorVersion << 24) | (resource.Metadata.minorVersion << 16) | (resource.Metadata.revision << 8) | resource.Metadata.buildNumber; bool chunked = (header.Version >= FileVersion.VerChunkedCompress); byte[] stringsCompressed = BinUtils.Compress(stringBuffer, Compression, CompressionLevel); byte[] nodesCompressed = BinUtils.Compress(nodeBuffer, Compression, CompressionLevel, chunked); byte[] attributesCompressed = BinUtils.Compress(attributeBuffer, Compression, CompressionLevel, chunked); byte[] valuesCompressed = BinUtils.Compress(valueBuffer, Compression, CompressionLevel, chunked); header.StringsUncompressedSize = (UInt32)stringBuffer.Length; header.StringsSizeOnDisk = (UInt32)stringsCompressed.Length; header.NodesUncompressedSize = (UInt32)nodeBuffer.Length; header.NodesSizeOnDisk = (UInt32)nodesCompressed.Length; header.AttributesUncompressedSize = (UInt32)attributeBuffer.Length; header.AttributesSizeOnDisk = (UInt32)attributesCompressed.Length; header.ValuesUncompressedSize = (UInt32)valueBuffer.Length; header.ValuesSizeOnDisk = (UInt32)valuesCompressed.Length; header.CompressionFlags = BinUtils.MakeCompressionFlags(Compression, CompressionLevel); header.Unknown2 = 0; header.Unknown3 = 0; header.Extended = ExtendedNodes ? 1u : 0u; BinUtils.WriteStruct <Header>(Writer, ref header); Writer.Write(stringsCompressed, 0, stringsCompressed.Length); Writer.Write(nodesCompressed, 0, nodesCompressed.Length); Writer.Write(attributesCompressed, 0, attributesCompressed.Length); Writer.Write(valuesCompressed, 0, valuesCompressed.Length); } }
public Resource Read() { using (var reader = new BinaryReader(Stream)) { var hdr = BinUtils.ReadStruct <Header>(reader); if (hdr.Magic != BitConverter.ToUInt32(Header.Signature, 0)) { var msg = String.Format( "Invalid LSF signature; expected {0,8:X}, got {1,8:X}", BitConverter.ToUInt32(Header.Signature, 0), hdr.Magic ); throw new InvalidDataException(msg); } if (hdr.Version < FileVersion.VerInitial || hdr.Version > FileVersion.CurrentVersion) { var msg = String.Format("LSF version {0} is not supported", hdr.Version); throw new InvalidDataException(msg); } bool isCompressed = BinUtils.CompressionFlagsToMethod(hdr.CompressionFlags) != CompressionMethod.None; if (hdr.StringsSizeOnDisk > 0 || hdr.StringsUncompressedSize > 0) { uint onDiskSize = isCompressed ? hdr.StringsSizeOnDisk : hdr.StringsUncompressedSize; byte[] compressed = reader.ReadBytes((int)onDiskSize); byte[] uncompressed; if (isCompressed) { uncompressed = BinUtils.Decompress(compressed, (int)hdr.StringsUncompressedSize, hdr.CompressionFlags); } else { uncompressed = compressed; } using (var namesStream = new MemoryStream(uncompressed)) { ReadNames(namesStream); } } if (hdr.NodesSizeOnDisk > 0 || hdr.NodesUncompressedSize > 0) { uint onDiskSize = isCompressed ? hdr.NodesSizeOnDisk : hdr.NodesUncompressedSize; var uncompressed = Decompress(reader, onDiskSize, hdr.NodesUncompressedSize, hdr); #if DEBUG_LSF_SERIALIZATION using (var nodesFile = new FileStream("nodes.bin", FileMode.Create, FileAccess.Write)) { nodesFile.Write(uncompressed, 0, uncompressed.Length); } #endif using (var nodesStream = new MemoryStream(uncompressed)) { var longNodes = hdr.Version >= FileVersion.VerExtendedNodes && hdr.Extended == 1; ReadNodes(nodesStream, longNodes); } } if (hdr.AttributesSizeOnDisk > 0 || hdr.AttributesUncompressedSize > 0) { uint onDiskSize = isCompressed ? hdr.AttributesSizeOnDisk : hdr.AttributesUncompressedSize; var uncompressed = Decompress(reader, onDiskSize, hdr.AttributesUncompressedSize, hdr); #if DEBUG_LSF_SERIALIZATION using (var attributesFile = new FileStream("attributes.bin", FileMode.Create, FileAccess.Write)) { attributesFile.Write(uncompressed, 0, uncompressed.Length); } #endif using (var attributesStream = new MemoryStream(uncompressed)) { var longAttributes = hdr.Version >= FileVersion.VerExtendedNodes && hdr.Extended == 1; ReadAttributes(attributesStream, longAttributes); } } if (hdr.ValuesSizeOnDisk > 0 || hdr.ValuesUncompressedSize > 0) { uint onDiskSize = isCompressed ? hdr.ValuesSizeOnDisk : hdr.ValuesUncompressedSize; var uncompressed = Decompress(reader, onDiskSize, hdr.ValuesUncompressedSize, hdr); var valueStream = new MemoryStream(uncompressed); this.Values = valueStream; #if DEBUG_LSF_SERIALIZATION using (var valuesFile = new FileStream("values.bin", FileMode.Create, FileAccess.Write)) { valuesFile.Write(uncompressed, 0, uncompressed.Length); } #endif } else { this.Values = new MemoryStream(); } Resource resource = new Resource(); ReadRegions(resource); resource.Metadata.majorVersion = (hdr.EngineVersion & 0xff000000) >> 24; resource.Metadata.minorVersion = (hdr.EngineVersion & 0xff0000) >> 16; resource.Metadata.revision = (hdr.EngineVersion & 0xff00) >> 8; resource.Metadata.buildNumber = (hdr.EngineVersion & 0xff); return(resource); } }
/// <summary> /// Reads the attribute headers for the LSOF resource /// </summary> /// <param name="s">Stream to read the attribute headers from</param> /// <param name="longAttributes">Use the long (V3) on-disk attribute format</param> private void ReadAttributes(Stream s, bool longAttributes) { Attributes = new List <AttributeInfo>(); using (var reader = new BinaryReader(s)) { #if DEBUG_LSF_SERIALIZATION var rawAttributes = new List <AttributeEntry>(); #endif var prevAttributeRefs = new List <Int32>(); UInt32 dataOffset = 0; Int32 index = 0; while (s.Position < s.Length) { var attribute = BinUtils.ReadStruct <AttributeEntry>(reader); var resolved = new AttributeInfo(); resolved.NameIndex = attribute.NameIndex; resolved.NameOffset = attribute.NameOffset; resolved.TypeId = attribute.TypeId; resolved.Length = attribute.Length; resolved.DataOffset = dataOffset; resolved.NextAttributeIndex = -1; if (longAttributes) { BinUtils.ReadStruct <AttributeEntryV3>(reader); } var nodeIndex = attribute.NodeIndex + 1; if (prevAttributeRefs.Count > nodeIndex) { if (prevAttributeRefs[nodeIndex] != -1) { Attributes[prevAttributeRefs[nodeIndex]].NextAttributeIndex = index; } prevAttributeRefs[nodeIndex] = index; } else { while (prevAttributeRefs.Count < nodeIndex) { prevAttributeRefs.Add(-1); } prevAttributeRefs.Add(index); } #if DEBUG_LSF_SERIALIZATION rawAttributes.Add(attribute); #endif dataOffset += resolved.Length; Attributes.Add(resolved); index++; } #if DEBUG_LSF_SERIALIZATION Console.WriteLine(" ----- DUMP OF ATTRIBUTE REFERENCES -----"); for (int i = 0; i < prevAttributeRefs.Count; i++) { Console.WriteLine(String.Format("Node {0}: last attribute {1}", i, prevAttributeRefs[i])); } Console.WriteLine(" ----- DUMP OF ATTRIBUTE TABLE -----"); for (int i = 0; i < Attributes.Count; i++) { var resolved = Attributes[i]; var attribute = rawAttributes[i]; var debug = String.Format( "{0}: {1} (offset {2:X}, typeId {3}, nextAttribute {4}, node {5})", i, Names[resolved.NameIndex][resolved.NameOffset], resolved.DataOffset, resolved.TypeId, resolved.NextAttributeIndex, attribute.NodeIndex ); Console.WriteLine(debug); } #endif } }