public async Task TestInitialize() { DBFilesBTreeTests.DeleteFiles(); this.provider = new FilesProvider("Data", DBFilesBTreeTests.CollectionName, 8192, BlocksInCache, 8192, Encoding.UTF8, 10000, true); this.file = await this.provider.GetFile("Default"); }
public static async Task ClassInitialize(TestContext Context) { if (File.Exists(DBFilesBTreeTests.MasterFileName)) { File.Delete(DBFilesBTreeTests.MasterFileName); } if (File.Exists(DBFilesBTreeTests.FileName)) { File.Delete(DBFilesBTreeTests.FileName); } if (File.Exists(DBFilesBTreeTests.BlobFileName)) { File.Delete(DBFilesBTreeTests.BlobFileName); } if (File.Exists(DBFilesBTreeTests.NamesFileName)) { File.Delete(DBFilesBTreeTests.NamesFileName); } #if !LW provider = new FilesProvider("Data", "Default", BlockSize, BlocksInCache, Math.Max(BlockSize / 2, 1024), Encoding.UTF8, 10000, true); #else provider = new FilesProvider("Data", "Default", BlockSize, BlocksInCache, Math.Max(BlockSize / 2, 1024), Encoding.UTF8, 10000); #endif file = await provider.GetFile("Default"); }
/// <summary> /// <see cref="IDisposable.Dispose"/> /// </summary> public void Dispose() { this.dictionaryFile?.Dispose(); this.dictionaryFile = null; this.recordHandler = null; }
/// <summary> /// This class manages a string dictionary in a persisted file. /// </summary> /// <param name="FileName">File name of index file.</param> /// <param name="BlobFileName">Name of file in which BLOBs are stored.</param> /// <param name="CollectionName">Collection Name.</param> /// <param name="Provider">Files provider.</param> /// <param name="RetainInMemory">Retain the dictionary in memory.</param> public StringDictionary(string FileName, string BlobFileName, string CollectionName, FilesProvider Provider, bool RetainInMemory) { this.provider = Provider; this.collectionName = CollectionName; this.encoding = this.provider.Encoding; this.timeoutMilliseconds = this.provider.TimeoutMilliseconds; this.genericSerializer = new GenericObjectSerializer(this.provider); this.keyValueSerializer = new KeyValueSerializer(this.provider, this.genericSerializer); this.recordHandler = new StringDictionaryRecords(this.collectionName, this.encoding, (this.provider.BlockSize - ObjectBTreeFile.BlockHeaderSize) / 2 - 4, this.genericSerializer, this.provider); this.dictionaryFile = new ObjectBTreeFile(FileName, this.collectionName, BlobFileName, this.provider.BlockSize, this.provider.BlobBlockSize, this.provider, this.encoding, this.timeoutMilliseconds, #if NETSTANDARD1_5 this.provider.Encrypted, Provider.Debug, this.recordHandler); #else Provider.Debug, this.recordHandler); #endif if (RetainInMemory) { this.inMemory = new Dictionary <string, object>(); } else { this.inMemory = null; } }
public async Task DBFiles_Provider_01_ByReference() { ByReference Obj = new ByReference() { Default = DBFilesBTreeTests.CreateDefault(100), Simple = DBFilesBTreeTests.CreateSimple(100) }; await this.provider.Insert(Obj); ObjectBTreeFile File = await this.provider.GetFile("Default"); await DBFilesBTreeTests.AssertConsistent(File, this.provider, 3, Obj, true); Console.Out.WriteLine(await DBFilesBTreeTests.ExportXML(File, "Data\\BTree.xml")); Assert.AreNotEqual(Guid.Empty, Obj.ObjectId); Assert.AreNotEqual(Guid.Empty, Obj.Default.ObjectId); Assert.AreNotEqual(Guid.Empty, Obj.Simple.ObjectId); ByReference Obj2 = await this.provider.LoadObject <ByReference>(Obj.ObjectId); DBFilesObjectSerializationTests.AssertEqual(Obj2.Default, Obj.Default); DBFilesObjectSerializationTests.AssertEqual(Obj2.Simple, Obj.Simple); }
/// <summary> /// Makes sure to unlock the parent when enumeration is done. /// </summary> /// <param name="Cursor">Cursor to underlying result set.</param> /// <param name="TimeoutMilliseconds">Time to wait to get access to underlying database.</param> /// <param name="File">Parent file.</param> /// <param name="LockType">Type of lock to release.</param> internal ParentLockedCursor(ICursor <T> Cursor, int TimeoutMilliseconds, ObjectBTreeFile File, LockType LockType) { this.cursor = Cursor; this.timeoutMilliseconds = TimeoutMilliseconds; this.file = File; this.lockType = LockType; }
/// <summary> /// <see cref="IDisposable.Dispose"/> /// </summary> public void Dispose() { this.indexFile?.Dispose(); this.indexFile = null; this.objectFile = null; this.recordHandler = null; }
/// <summary> /// Provides a cursor that joins results from multiple cursors. It only returns an object once, regardless of how many times /// it appears in the different child cursors. /// </summary> /// <param name="ChildFilters">Child filters.</param> /// <param name="File">File being searched.</param> /// <param name="Locked">If locked access is desired.</param> public UnionCursor(Filter[] ChildFilters, ObjectBTreeFile File, bool Locked) { this.childFilters = ChildFilters; this.nrCursors = this.childFilters.Length; this.file = File; this.currentCursor = null; this.locked = Locked; }
public void TestCleanup() { if (this.provider != null) { this.provider.Dispose(); this.provider = null; this.file = null; } }
/// <summary> /// Provides a cursor that joins results from multiple cursors. It only returns an object once, regardless of how many times /// it appears in the different child cursors. /// </summary> /// <param name="ChildFilters">Child filters.</param> /// <param name="File">File being searched.</param> /// <param name="LockType">If locked access is desired.</param> /// <param name="LockParent">If parent file is to be locked as well.</param> public UnionCursor(Filter[] ChildFilters, ObjectBTreeFile File, LockType LockType, bool LockParent) { this.childFilters = ChildFilters; this.nrCursors = this.childFilters.Length; this.file = File; this.currentCursor = null; this.lockType = LockType; this.lockParent = LockParent; }
public async Task TestInitialize() { DeleteFiles(); this.provider = new FilesProvider(Folder, CollectionName, this.BlockSize, BlocksInCache, Math.Max(BlockSize / 2, 1024), Encoding.UTF8, 10000, true); this.file = await this.provider.GetFile(CollectionName); this.start = DateTime.Now; }
private static void WriteStat(XmlWriter w, ObjectBTreeFile File, FileStatistics Stat) { w.WriteStartElement("Stat"); if (!double.IsNaN(Stat.AverageBytesUsedPerBlock)) { w.WriteAttributeString("avgBytesPerBlock", CommonTypes.Encode(Stat.AverageBytesUsedPerBlock)); } if (!double.IsNaN(Stat.AverageObjectSize)) { w.WriteAttributeString("avgObjSize", CommonTypes.Encode(Stat.AverageObjectSize)); } if (!double.IsNaN(Stat.AverageObjectsPerBlock)) { w.WriteAttributeString("avgObjPerBlock", CommonTypes.Encode(Stat.AverageObjectsPerBlock)); } w.WriteAttributeString("hasComments", CommonTypes.Encode(Stat.HasComments)); w.WriteAttributeString("isBalanced", CommonTypes.Encode(Stat.IsBalanced)); w.WriteAttributeString("isCorrupt", CommonTypes.Encode(Stat.IsCorrupt)); w.WriteAttributeString("maxBytesPerBlock", Stat.MaxBytesUsedPerBlock.ToString()); w.WriteAttributeString("maxDepth", Stat.MaxDepth.ToString()); w.WriteAttributeString("maxObjSize", Stat.MaxObjectSize.ToString()); w.WriteAttributeString("maxObjPerBlock", Stat.MaxObjectsPerBlock.ToString()); w.WriteAttributeString("minBytesPerBlock", Stat.MinBytesUsedPerBlock.ToString()); w.WriteAttributeString("minDepth", Stat.MinDepth.ToString()); w.WriteAttributeString("minObjSize", Stat.MinObjectSize.ToString()); w.WriteAttributeString("minObjPerBlock", Stat.MinObjectsPerBlock.ToString()); w.WriteAttributeString("nrBlobBlocks", Stat.NrBlobBlocks.ToString()); w.WriteAttributeString("nrBlobBytes", Stat.NrBlobBytesTotal.ToString()); w.WriteAttributeString("nrBlobBytesUnused", Stat.NrBlobBytesUnused.ToString()); w.WriteAttributeString("nrBlobBytesUsed", Stat.NrBlobBytesUsed.ToString()); w.WriteAttributeString("nrBlocks", Stat.NrBlocks.ToString()); w.WriteAttributeString("nrBytes", Stat.NrBytesTotal.ToString()); w.WriteAttributeString("nrBytesUnused", Stat.NrBytesUnused.ToString()); w.WriteAttributeString("nrBytesUsed", Stat.NrBytesUsed.ToString()); w.WriteAttributeString("nrObjects", Stat.NrObjects.ToString()); w.WriteAttributeString("usage", CommonTypes.Encode(Stat.Usage)); if (Stat.NrBlobBytesTotal > 0) { w.WriteAttributeString("blobUsage", CommonTypes.Encode((100.0 * Stat.NrBlobBytesUsed) / Stat.NrBlobBytesTotal)); } if (Stat.HasComments) { foreach (string Comment in Stat.Comments) { w.WriteElementString("Comment", Comment); } } w.WriteEndElement(); }
public static void ClassCleanup() { if (provider != null) { provider.Dispose(); provider = null; } file = null; }
/// <summary> /// This class manages a string dictionary in a persisted file. /// </summary> /// <param name="FileName">File name of index file.</param> /// <param name="BlobFileName">Name of file in which BLOBs are stored.</param> /// <param name="CollectionName">Collection Name.</param> /// <param name="Provider">Files provider.</param> /// <param name="RetainInMemory">Retain the dictionary in memory.</param> public static async Task <StringDictionary> Create(string FileName, string BlobFileName, string CollectionName, FilesProvider Provider, bool RetainInMemory) { StringDictionary Result = new StringDictionary(CollectionName, Provider, RetainInMemory); Result.dictionaryFile = await ObjectBTreeFile.Create(FileName, Result.collectionName, BlobFileName, Result.provider.BlockSize, Result.provider.BlobBlockSize, Result.provider, Result.encoding, Result.timeoutMilliseconds, Result.provider.Encrypted, Result.recordHandler); return(Result); }
public async Task TestInitialize() { DBFilesBTreeTests.DeleteFiles(); #if LW this.provider = await FilesProvider.CreateAsync("Data", DBFilesBTreeTests.CollectionName, 8192, BlocksInCache, 8192, Encoding.UTF8, 10000); #else this.provider = await FilesProvider.CreateAsync("Data", DBFilesBTreeTests.CollectionName, 8192, BlocksInCache, 8192, Encoding.UTF8, 10000, true); #endif this.file = await this.provider.GetFile("Default"); }
public static async Task ClassInitialize(TestContext Context) { DBFilesBTreeTests.DeleteFiles(); #if LW provider = await FilesProvider.CreateAsync("Data", "Default", BlockSize, BlocksInCache, Math.Max(BlockSize / 2, 1024), Encoding.UTF8, 10000); #else provider = await FilesProvider.CreateAsync("Data", "Default", BlockSize, BlocksInCache, Math.Max(BlockSize / 2, 1024), Encoding.UTF8, 10000, true); #endif file = await provider.GetFile("Default"); }
public void TestCleanup() { Console.Out.WriteLine("Elapsed time: " + (DateTime.Now - this.start).ToString()); if (this.provider != null) { this.provider.Dispose(); this.provider = null; this.file = null; } }
internal static async Task <string> ExportXML(ObjectBTreeFile File, string XmlFileName) { string Xml = await File.ExportGraphXML(false); if (!string.IsNullOrEmpty(XmlFileName)) { System.IO.File.WriteAllText(XmlFileName, Xml); } return(Xml); }
public async Task TestInitialize() { if (File.Exists(MasterFileName + ".bak")) { File.Delete(MasterFileName + ".bak"); } if (File.Exists(MasterFileName)) { File.Copy(MasterFileName, MasterFileName + ".bak"); File.Delete(MasterFileName); } if (File.Exists(FileName + ".bak")) { File.Delete(FileName + ".bak"); } if (File.Exists(FileName)) { File.Copy(FileName, FileName + ".bak"); File.Delete(FileName); } if (File.Exists(BlobFileName + ".bak")) { File.Delete(BlobFileName + ".bak"); } if (File.Exists(BlobFileName)) { File.Copy(BlobFileName, BlobFileName + ".bak"); File.Delete(BlobFileName); } if (File.Exists(NamesFileName + ".bak")) { File.Delete(NamesFileName + ".bak"); } if (File.Exists(NamesFileName)) { File.Copy(NamesFileName, NamesFileName + ".bak"); File.Delete(NamesFileName); } this.provider = new FilesProvider(Folder, CollectionName, this.BlockSize, BlocksInCache, Math.Max(BlockSize / 2, 1024), Encoding.UTF8, 10000, true); this.file = await this.provider.GetFile(CollectionName); this.start = DateTime.Now; }
/// <summary> /// Gets the Object ID for a given object. /// </summary> /// <param name="Value">Object reference.</param> /// <param name="InsertIfNotFound">Insert object into database with new Object ID, if no Object ID is set.</param> /// <returns>Object ID for <paramref name="Value"/>.</returns> /// <exception cref="NotSupportedException">Thrown, if the corresponding class does not have an Object ID property, /// or if the corresponding property type is not supported.</exception> public override async Task <Guid> GetObjectId(object Value, bool InsertIfNotFound) { if (Value is GenericObject Obj) { if (!Obj.ObjectId.Equals(Guid.Empty)) { return(Obj.ObjectId); } if (!InsertIfNotFound) { throw new Exception("Object has no Object ID defined."); } ObjectBTreeFile File = await this.provider.GetFile(Obj.CollectionName); Guid ObjectId; if (await File.TryBeginWrite(0)) { try { ObjectId = await File.SaveNewObjectLocked(Value, this); } finally { await File.EndWrite(); } foreach (IndexBTreeFile Index in File.Indices) { await Index.SaveNewObject(ObjectId, Value, this); } } else { Tuple <Guid, Storage.BlockInfo> Rec = await File.PrepareObjectIdForSaveLocked(Value, this); ObjectId = Rec.Item1; File.QueueForSave(Value, this); } Obj.ObjectId = ObjectId; return(ObjectId); } else { throw new NotSupportedException("Objects of type " + Value.GetType().FullName + " not supported."); } }
/// <summary> /// This class manages an index file to a <see cref="ObjectBTreeFile"/>. /// </summary> /// <param name="FileName">File name of index file.</param> /// <param name="ObjectFile">Object file storing actual objects.</param> /// <param name="Provider">Files provider.</param> /// <param name="FieldNames">Field names to build the index on. By default, sort order is ascending. /// If descending sort order is desired, prefix the corresponding field name by a hyphen (minus) sign.</param> internal IndexBTreeFile(string FileName, ObjectBTreeFile ObjectFile, FilesProvider Provider, params string[] FieldNames) { this.objectFile = ObjectFile; this.collectionName = this.objectFile.CollectionName; this.encoding = this.objectFile.Encoding; this.recordHandler = new IndexRecords(this.collectionName, this.encoding, this.objectFile.InlineObjectSizeLimit, FieldNames); this.genericSerializer = new GenericObjectSerializer(this.objectFile.Provider); this.indexFile = new ObjectBTreeFile(FileName, string.Empty, string.Empty, this.objectFile.BlockSize, this.objectFile.BlobBlockSize, Provider, this.encoding, this.objectFile.TimeoutMilliseconds, this.objectFile.Encrypted, this.recordHandler); this.recordHandler.Index = this; }
public async Task TestInitialize() { if (!File.Exists(DBFilesBTreeTests.MasterFileName + ".bak") || !File.Exists(DBFilesBTreeTests.DefaultFileName + ".bak") || !File.Exists(DBFilesBTreeTests.DefaultBlobFileName + ".bak") || !File.Exists(DBFilesBTreeTests.DefaultLabelsFileName + ".bak")) { Assert.Inconclusive("No backup files to test against."); } if (File.Exists(DBFilesBTreeTests.MasterFileName)) { File.Delete(DBFilesBTreeTests.MasterFileName); } if (File.Exists(DBFilesBTreeTests.DefaultFileName)) { File.Delete(DBFilesBTreeTests.DefaultFileName); } if (File.Exists(DBFilesBTreeTests.DefaultBlobFileName)) { File.Delete(DBFilesBTreeTests.DefaultBlobFileName); } if (File.Exists(DBFilesBTreeTests.DefaultLabelsFileName)) { File.Delete(DBFilesBTreeTests.DefaultLabelsFileName); } File.Copy(DBFilesBTreeTests.MasterFileName + ".bak", DBFilesBTreeTests.MasterFileName); File.Copy(DBFilesBTreeTests.DefaultFileName + ".bak", DBFilesBTreeTests.DefaultFileName); File.Copy(DBFilesBTreeTests.DefaultBlobFileName + ".bak", DBFilesBTreeTests.DefaultBlobFileName); File.Copy(DBFilesBTreeTests.DefaultLabelsFileName + ".bak", DBFilesBTreeTests.DefaultLabelsFileName); int BlockSize = this.LoadBlockSize(); #if LW this.provider = await FilesProvider.CreateAsync(DBFilesBTreeTests.Folder, DBFilesBTreeTests.CollectionName, BlockSize, 10000, Math.Max(BlockSize / 2, 1024), Encoding.UTF8, 10000); #else this.provider = await FilesProvider.CreateAsync(DBFilesBTreeTests.Folder, DBFilesBTreeTests.CollectionName, BlockSize, 10000, Math.Max(BlockSize / 2, 1024), Encoding.UTF8, 10000, true); #endif this.file = await this.provider.GetFile(DBFilesBTreeTests.CollectionName); this.start = DateTime.Now; await DBFilesBTreeTests.ExportXML(this.file, "Data\\BTreeBefore.xml"); }
/// <summary> /// This class manages an index file to a <see cref="ObjectBTreeFile"/>. /// </summary> /// <param name="FileName">File name of index file.</param> /// <param name="ObjectFile">Object file storing actual objects.</param> /// <param name="Provider">Files provider.</param> /// <param name="FieldNames">Field names to build the index on. By default, sort order is ascending. /// If descending sort order is desired, prefix the corresponding field name by a hyphen (minus) sign.</param> internal static async Task <IndexBTreeFile> Create(string FileName, ObjectBTreeFile ObjectFile, FilesProvider Provider, params string[] FieldNames) { IndexBTreeFile Result = new IndexBTreeFile() { objectFile = ObjectFile, collectionName = ObjectFile.CollectionName, encoding = ObjectFile.Encoding }; Result.recordHandler = new IndexRecords(Result.collectionName, Result.encoding, Result.objectFile.InlineObjectSizeLimit, FieldNames); Result.genericSerializer = new GenericObjectSerializer(Result.objectFile.Provider); Result.indexFile = await ObjectBTreeFile.Create(FileName, Result.collectionName, string.Empty, Result.objectFile.BlockSize, Result.objectFile.BlobBlockSize, Provider, Result.encoding, Result.objectFile.TimeoutMilliseconds, Result.objectFile.Encrypted, Result.recordHandler); Result.recordHandler.Index = Result; return(Result); }
internal ObjectBTreeFileEnumerator(ObjectBTreeFile File, IRecordHandler RecordHandler, IObjectSerializer DefaultSerializer) { this.file = File; this.currentBlockIndex = 0; this.currentBlock = null; this.currentReader = null; this.currentHeader = null; this.blockUpdateCounter = File.BlockUpdateCounter; this.locked = false; this.recordHandler = RecordHandler; this.startingPoint = null; this.defaultSerializer = DefaultSerializer; this.timeoutMilliseconds = this.file.TimeoutMilliseconds; this.Reset(); if (this.defaultSerializer == null && typeof(T) != typeof(object)) { this.defaultSerializer = this.file.Provider.GetObjectSerializer(typeof(T)); } }
public async Task TestInitialize() { if (File.Exists(DBFilesBTreeTests.MasterFileName + ".bak")) { File.Delete(DBFilesBTreeTests.MasterFileName + ".bak"); } if (File.Exists(DBFilesBTreeTests.MasterFileName)) { File.Copy(DBFilesBTreeTests.MasterFileName, DBFilesBTreeTests.MasterFileName + ".bak"); File.Delete(DBFilesBTreeTests.MasterFileName); } if (File.Exists(DBFilesBTreeTests.FileName + ".bak")) { File.Delete(DBFilesBTreeTests.FileName + ".bak"); } if (File.Exists(DBFilesBTreeTests.FileName)) { File.Copy(DBFilesBTreeTests.FileName, DBFilesBTreeTests.FileName + ".bak"); File.Delete(DBFilesBTreeTests.FileName); } if (File.Exists(DBFilesBTreeTests.BlobFileName + ".bak")) { File.Delete(DBFilesBTreeTests.BlobFileName + ".bak"); } if (File.Exists(DBFilesBTreeTests.BlobFileName)) { File.Copy(DBFilesBTreeTests.BlobFileName, DBFilesBTreeTests.BlobFileName + ".bak"); File.Delete(DBFilesBTreeTests.BlobFileName); } this.provider = new FilesProvider("Data", DBFilesBTreeTests.CollectionName, 8192, BlocksInCache, 8192, Encoding.UTF8, 10000, true); this.file = await this.provider.GetFile("Default"); }
private async Task DBFiles_BTree_Test_DeleteObjects(int c, bool CheckForEachObject) { Random Gen = new Random(); Simple[] Objects = new Simple[c]; Simple Obj; int i; for (i = 0; i < c; i++) { Objects[i] = Obj = CreateSimple(this.MaxStringLength); await this.file.SaveNewObject(Obj); } while (c > 0) { i = Gen.Next(0, c); Obj = Objects[i]; c--; if (i < c) { Array.Copy(Objects, i + 1, Objects, i, c - i); } if (CheckForEachObject) { try { this.provider.CloseFile(this.file.CollectionName); this.file = null; File.Copy(FileName, FileName + ".bak", true); File.Copy(BlobFileName, BlobFileName + ".bak", true); File.Copy(NamesFileName, NamesFileName + ".bak", true); this.file = await this.provider.GetFile(CollectionName); if (File.Exists(ObjIdFileName)) { File.Delete(ObjIdFileName); } File.WriteAllBytes(ObjIdFileName, Obj.ObjectId.ToByteArray()); if (File.Exists(BlockSizeFileName)) { File.Delete(BlockSizeFileName); } File.WriteAllText(BlockSizeFileName, this.BlockSize.ToString()); Console.Out.WriteLine(await ExportXML(this.file, "Data\\BTreeBefore.xml")); Console.Out.WriteLine(Obj.ObjectId); await this.file.DeleteObject(Obj); //Console.Out.WriteLine(await ExportXML(this.file, "Data\\BTreeAfter.xml")); await AssertConsistent(this.file, this.provider, null, null, true); for (i = 0; i < c; i++) { Obj = await this.file.LoadObject <Simple>(Objects[i].ObjectId); DBFilesObjectSerializationTests.AssertEqual(Objects[i], Obj); } } catch (Exception ex) { if (this.file != null) { Console.Out.WriteLine(await ExportXML(this.file, "Data\\BTreeError.xml")); } ExceptionDispatchInfo.Capture(ex).Throw(); } } else { await this.file.DeleteObject(Obj); } } FileStatistics Stat = await AssertConsistent(this.file, this.provider, null, null, true); AssertEx.Same(0, this.file.Count); AssertEx.Same(1, Stat.NrBlocks); AssertEx.Same(0, Stat.NrBlobBlocks); }
internal ObjectBTreeFileEnumerator(ObjectBTreeFile File, bool Locked, IRecordHandler RecordHandler, BlockInfo StartingPoint) : this(File, Locked, RecordHandler, StartingPoint, null) { }
internal ObjectBTreeFileEnumerator(ObjectBTreeFile File, IRecordHandler RecordHandler) : this(File, RecordHandler, null) { }
internal static async Task <FileStatistics> AssertConsistent(ObjectBTreeFile File, FilesProvider Provider, int?ExpectedNrObjects, object LastObjectAdded, bool WriteStat) { FileStatistics Statistics = await File.ComputeStatistics(); if (WriteStat) { Console.Out.WriteLine("Block Size: " + Statistics.BlockSize.ToString()); Console.Out.WriteLine("#Blocks: " + Statistics.NrBlocks.ToString()); Console.Out.WriteLine("#BLOB Blocks: " + Statistics.NrBlobBlocks.ToString()); Console.Out.WriteLine("#Bytes used: " + Statistics.NrBytesUsed.ToString()); Console.Out.WriteLine("#Bytes unused: " + Statistics.NrBytesUnused.ToString()); Console.Out.WriteLine("#Bytes total: " + Statistics.NrBytesTotal.ToString()); Console.Out.WriteLine("#BLOB Bytes used: " + Statistics.NrBlobBytesUsed.ToString()); Console.Out.WriteLine("#BLOB Bytes unused: " + Statistics.NrBlobBytesUnused.ToString()); Console.Out.WriteLine("#BLOB Bytes total: " + Statistics.NrBlobBytesTotal.ToString()); Console.Out.WriteLine("#Block loads: " + Statistics.NrBlockLoads.ToString()); Console.Out.WriteLine("#Cache loads: " + Statistics.NrCacheLoads.ToString()); Console.Out.WriteLine("#Block saves: " + Statistics.NrBlockSaves.ToString()); Console.Out.WriteLine("#BLOB Block loads: " + Statistics.NrBlobBlockLoads.ToString()); Console.Out.WriteLine("#BLOB Block saves: " + Statistics.NrBlobBlockSaves.ToString()); Console.Out.WriteLine("#Objects: " + Statistics.NrObjects.ToString()); Console.Out.WriteLine("Smallest object: " + Statistics.MinObjectSize.ToString()); Console.Out.WriteLine("Largest object: " + Statistics.MaxObjectSize.ToString()); Console.Out.WriteLine("Average object: " + Statistics.AverageObjectSize.ToString("F1")); Console.Out.WriteLine("Usage: " + Statistics.Usage.ToString("F2") + " %"); Console.Out.WriteLine("Min(Depth): " + Statistics.MinDepth.ToString()); Console.Out.WriteLine("Max(Depth): " + Statistics.MaxDepth.ToString()); Console.Out.WriteLine("Min(Objects/Block): " + Statistics.MinObjectsPerBlock.ToString()); Console.Out.WriteLine("Max(Objects/Block): " + Statistics.MaxObjectsPerBlock.ToString()); Console.Out.WriteLine("Avg(Objects/Block): " + Statistics.AverageObjectsPerBlock.ToString("F1")); Console.Out.WriteLine("Min(Bytes Used/Block): " + Statistics.MinBytesUsedPerBlock.ToString()); Console.Out.WriteLine("Max(Bytes Used/Block): " + Statistics.MaxBytesUsedPerBlock.ToString()); Console.Out.WriteLine("Avg(Bytes Used/Block): " + Statistics.AverageBytesUsedPerBlock.ToString("F1")); Console.Out.WriteLine("Is Corrupt: " + Statistics.IsCorrupt.ToString()); Console.Out.WriteLine("Is Balanced: " + Statistics.IsBalanced.ToString()); Console.Out.WriteLine("Has Comments: " + Statistics.HasComments.ToString()); } if (Statistics.HasComments) { Console.Out.WriteLine(); foreach (string Comment in Statistics.Comments) { Console.Out.WriteLine(Comment); } } try { if (Statistics.IsCorrupt || !Statistics.IsBalanced) { Console.Out.WriteLine(); Console.Out.WriteLine(await ExportXML(File, "Data\\BTreeError.xml")); Console.Out.WriteLine(); Assert.IsFalse(Statistics.IsCorrupt, "Database is corrupt."); Assert.IsTrue(Statistics.IsBalanced, "Database is unbalanced."); } if (ExpectedNrObjects.HasValue) { AssertEx.Same(ExpectedNrObjects.Value, Statistics.NrObjects); } } catch (Exception ex) { SaveLastObject(Provider, LastObjectAdded); ExceptionDispatchInfo.Capture(ex).Throw(); } foreach (IndexBTreeFile Index in File.Indices) { await AssertConsistent(Index.IndexFile, Provider, ExpectedNrObjects, null, WriteStat); } return(Statistics); }
/// <summary> /// Serializes an object to a binary destination. /// </summary> /// <param name="Writer">Binary destination.</param> /// <param name="WriteTypeCode">If a type code is to be output.</param> /// <param name="Embedded">If the object is embedded into another.</param> /// <param name="Value">The actual object to serialize.</param> public void Serialize(BinarySerializer Writer, bool WriteTypeCode, bool Embedded, object Value) { if (Value is GenericObject TypedValue) { BinarySerializer WriterBak = Writer; IObjectSerializer Serializer; object Obj; if (!Embedded) { Writer = new BinarySerializer(Writer.CollectionName, Writer.Encoding, true); } if (WriteTypeCode) { if (TypedValue == null) { Writer.WriteBits(ObjectSerializer.TYPE_NULL, 6); return; } else { Writer.WriteBits(ObjectSerializer.TYPE_OBJECT, 6); } } else if (TypedValue == null) { throw new NullReferenceException("Value cannot be null."); } if (string.IsNullOrEmpty(TypedValue.TypeName)) { Writer.WriteVariableLengthUInt64(0); } else { Writer.WriteVariableLengthUInt64(this.provider.GetFieldCode(TypedValue.CollectionName, TypedValue.TypeName)); } if (Embedded) { Writer.WriteVariableLengthUInt64(this.provider.GetFieldCode(null, string.IsNullOrEmpty(TypedValue.CollectionName) ? this.provider.DefaultCollectionName : TypedValue.CollectionName)); } foreach (KeyValuePair <string, object> Property in TypedValue) { Writer.WriteVariableLengthUInt64(this.provider.GetFieldCode(TypedValue.CollectionName, Property.Key)); Obj = Property.Value; if (Obj == null) { Writer.WriteBits(ObjectSerializer.TYPE_NULL, 6); } else { if (Obj is GenericObject) { this.Serialize(Writer, true, true, Obj); } else { Serializer = this.provider.GetObjectSerializer(Obj.GetType()); Serializer.Serialize(Writer, true, true, Obj); } } } Writer.WriteVariableLengthUInt64(0); if (!Embedded) { if (!TypedValue.ObjectId.Equals(Guid.Empty)) { WriterBak.Write(TypedValue.ObjectId); } else { Guid NewObjectId = ObjectBTreeFile.CreateDatabaseGUID(); WriterBak.Write(NewObjectId); TypedValue.ObjectId = NewObjectId; } byte[] Bin = Writer.GetSerialization(); WriterBak.WriteVariableLengthUInt64((ulong)Bin.Length); WriterBak.WriteRaw(Bin); } } else { IObjectSerializer Serializer = this.provider.GetObjectSerializer(Value.GetType()); Serializer.Serialize(Writer, WriteTypeCode, Embedded, Value); } }