private void PackSlotItem(BTreeAlgorithm parent, System.IO.BinaryWriter writer, BTreeItemOnDisk slot, ref int keySizeOnDisk, bool?isKeySimpleType) { if (slot != null) { IO.OnDiskBinaryWriter _writer = (IO.OnDiskBinaryWriter)writer; long streamPos = _writer.LogicalPosition; if (slot.Value.IsDirty) { slot.VersionNumber++; } CollectionOnDisk.WritePersistentData(parent, slot.VersionNumber, writer); CollectionOnDisk.WritePersistentData(parent, slot.Key, writer, ItemType.Key); if (parent.IsDataInKeySegment) { if (parent.IsDataLongInt) { writer.Write((long)slot.Value.Data); } else { if (parent.PersistenceType == PersistenceType.Unknown) { parent.PersistenceType = CollectionOnDisk.GetPersistenceType(parent, slot.Value.Data, ItemType.Value); parent.IsDirty = true; } //** write the value and keep track of its data size and location in the disk buffer. //slot.ValueBlockIndex = DiskBuffer.GetIndexOf(((OnDiskBinaryWriter)writer).DataBlock); //slot.ValueIndexInBlock = ((OnDiskBinaryWriter) writer).DataBlockPosition; long startPos = _writer.LogicalPosition; //parent.PersistenceType CollectionOnDisk.WritePersistentData(parent, slot.Value.Data, writer, ItemType.Value); slot.HintSizeOnDisk = (int)(_writer.LogicalPosition - startPos); //** } slot.Value.IsDirty = false; } else { if (slot.Value.diskBuffer == null) { slot.Value.DiskBuffer = parent.CreateBlock(); //new Sop.DataBlock(parent.DataBlockSize); } writer.Write(parent.GetId(slot.Value.DiskBuffer)); } if (keySizeOnDisk == 0) { int keyDataSize = (int)(_writer.LogicalPosition - streamPos); keySizeOnDisk = keyDataSize; parent.HintKeySizeOnDisk = keyDataSize; } } }
// deserialize value from entry's disk buffer... private object GetValue(BTreeItemOnDisk entry) { var itm = entry; if (itm == null) { return(null); } if (!itm.ValueLoaded || itm.IsDisposed) { Sop.DataBlock d = itm.Value.DiskBuffer; var itemOnDisk = ReadFromBlock(d); if (itemOnDisk != null) { if (itemOnDisk is ItemOnDisk) { var iod = (ItemOnDisk)itemOnDisk; iod.DiskBuffer = itm.Value.DiskBuffer; itm.Value = iod; if (iod.Data == null && iod.DataIsUserDefined && onValueUnpack != null) { iod.Data = onValueUnpack(OnDiskBinaryReader); } // fold DiskBuffer to save memory... if (iod.Data != null && itm.Value != null && itm.Value.diskBuffer != null) { itm.Value.diskBuffer.Fold(); if (itm.Value.diskBuffer.CountMembers() >= File.Profile.BigDataBlockCount) { var r = itm.Value.Data; // nullify big data to conserve memory itm.Value.Data = null; return(r); } } } else { throw new SopException( string.Format("Unexpected item of type {0} was deserialized.", itemOnDisk.GetType())); } } itm.ValueLoaded = true; } return(itm.Value != null ? itm.Value.Data : null); }
/// <summary> /// Add Item(Key & Value) to the proper location on B-Tree. /// </summary> /// <param name="item"></param> public void Add(BTreeItemOnDisk item) { #if (DEBUG) Log.Logger.Instance.Verbose("BTreeAlgorithm.Add."); #endif if (RootNode == null) { throw new InvalidOperationException("Can't Add item to a Close ObjectStore."); } if (HintSequentialRead) { HintSequentialRead = false; } BeginTreeMaintenance(); bool dataSaved = false; if (!IsUnique) { if (AutoFlush) { SetBigDataValue(item); dataSaved = true; } } if (RootNode.Add(this, item)) { if (!dataSaved && AutoFlush) { SetBigDataValue(item); } } else { EndTreeMaintenance(); SetCurrentItemAddress(-1, 0); throw new DuplicateKeyException(string.Format("Item with key {0} already exists.", item.Key)); } Distribute(); Promote(); EndTreeMaintenance(); UpdateCount(UpdateCountType.Increment); SaveBlocks(MaxBlocks, false); RegisterChange(true); // Current item pointer points to null after add of an item. SetCurrentItemAddress(-1, 0); }
public void RemoveAvailableBlock(long dataAddress) { if (_isDuringMaintenance) { _duringMaintenanceItems.Remove(dataAddress); return; } var itm = new BTreeItemOnDisk(File.DataBlockSize, dataAddress, 0); _isDuringMaintenance = true; BeginTreeMaintenance(); Remove(itm); _segmentRemoved = true; _isDuringMaintenance = false; AddMaintenanceAddedItems(); EndTreeMaintenance(); }
public void Add(DeletedBlockInfo value) { Locker.Lock(); if (Search(value.StartBlockAddress)) { Locker.Unlock(); return; } if (Log.Logger.Instance.IsVerboseEnabled) { Log.Logger.Instance.Log(Log.LogLevels.Verbose, "FileRecycler.Add: {0}", value.ToString()); } var itm = new BTreeItemOnDisk(DataBlockSize, value.StartBlockAddress, value.EndBlockAddress); base.Add(itm); Locker.Unlock(); }
public bool AddIfNotExist(BTreeItemOnDisk item) { if (RootNode == null) { throw new InvalidOperationException("Can't Add item to a Close ObjectStore."); } if (HintSequentialRead) { HintSequentialRead = false; } BeginTreeMaintenance(); bool uniqueSetting = IsUnique; IsUnique = true; bool r = RootNode.Add(this, item); IsUnique = uniqueSetting; if (r) { if (AutoFlush) { SetBigDataValue(item); } } else { EndTreeMaintenance(); return(r); } Distribute(); Promote(); EndTreeMaintenance(); UpdateCount(UpdateCountType.Increment); SaveBlocks(MaxBlocks, false); RegisterChange(true); // Current item pointer points to null after add of an item. SetCurrentItemAddress(-1, 0); return(r); }
/// <summary> /// Add Item(Key & Value) to the proper location on B-Tree. /// </summary> /// <param name="item"></param> public void Add(BTreeItemOnDisk item) { if (RootNode == null) { throw new InvalidOperationException("Can't Add item to a Close ObjectStore."); } if (HintSequentialRead) { HintSequentialRead = false; } BeginTreeMaintenance(); RootNode.Add(this, item); ProcessDistribution(); ProcessPromotion(); EndTreeMaintenance(); UpdateCount(UpdateCountType.Increment); SaveBlocks(MaxBlocks, false); RegisterChange(true); // Current item pointer points to null after add of an item. SetCurrentItemAddress(-1, 0); }
internal void ProcessDistribution() { if (DistributeSibling != null) { Log.Logger.Instance.Log(Log.LogLevels.Information, "ProcessDistribution: DistributeSibling Node Address {0}.", DistributeSibling.GetAddress(this)); } while (DistributeSibling != null) { BTreeNodeOnDisk n = DistributeSibling; BTreeItemOnDisk item = DistributeItem; DistributeSibling = null; DistributeItem = null; if (DistributeLeftDirection) { n.DistributeToLeft(this, item); } else { n.DistributeToRight(this, item); } } }
public bool Detach() { BTreeNodeOnDisk currNode = CurrentNode; if (currNode == null || CurrentItemReference.NodeItemIndex < 0) { return(false); } BTreeItemOnDisk itm = currNode.Slots[CurrentItemReference.NodeItemIndex]; itm.Value.Data = null; if (IsDataInKeySegment) { return(true); } // reset the disk buffer so it won't get recycled... (intention is to re-attach it as another entry) itm.Value.diskBuffer = CreateBlock(); itm.ValueLoaded = true; itm.Value.IsDirty = false; itm.IsDirty = true; itm.Value.DataIsUserDefined = false; IsDirty = true; return(true); }
public bool SetAvailableBlock(long availableBlockAddress, long availableBlockNewAddress, long availableBlockNewSize) { if (_isDuringMaintenance) { if (_duringMaintenanceItems.ContainsKey(availableBlockAddress)) { _duringMaintenanceItems.Remove(availableBlockAddress); _duringMaintenanceItems[availableBlockNewAddress] = availableBlockNewSize; return(true); } return(false); } var itm = new BTreeItemOnDisk(File.DataBlockSize, availableBlockAddress, 0); BeginTreeMaintenance(); try { if (Search(itm)) { _isDuringMaintenance = true; Remove(); long r = addAvailableBlock(availableBlockNewAddress, availableBlockNewSize); _currentRecycleIndex = r > 0 ? r : availableBlockNewAddress; _isDuringMaintenance = false; AddMaintenanceAddedItems(); return(true); } } finally { EndTreeMaintenance(); } return(false); }
/// <summary> /// Do class variable/object initialization. Usually invoked from this class' constructor. /// </summary> /// <param name="bTree">Parent BTree</param> /// <param name="parentNodeAddress">Parent Node</param> protected internal void Initialize(BTree.BTreeAlgorithm bTree, long parentNodeAddress) { DiskBuffer = bTree.CreateBlock(); //new Sop.DataBlock(bTree.IndexBlockSize); Slots = new BTreeItemOnDisk[bTree.SlotLength]; ParentAddress = parentNodeAddress; }
/// <summary> /// Unpack Deserializes Node from Stream /// </summary> /// <param name="parent"></param> /// <param name="reader"></param> public void Unpack(IInternalPersistent parent, System.IO.BinaryReader reader) { Count = reader.ReadInt16(); HintSizeOnDisk = reader.ReadInt32(); ParentAddress = reader.ReadInt64(); if (Slots == null) { Slots = new BTreeItemOnDisk[((BTreeAlgorithm)parent).SlotLength]; } if (ChildrenAddresses == null) { ChildrenAddresses = new long[Slots.Length + 1]; ResetArray(ChildrenAddresses, -1); } short newCount = 0; for (int i = 0; i <= Slots.Length; i++) { ChildrenAddresses[i] = reader.ReadInt64(); if (ChildrenAddresses[i] != -1) { newCount++; } } if (ChildrenAddresses[0] == -1L) { ChildrenAddresses = null; } else if (newCount > 0 && Count != newCount - 1) { Count = (short)(newCount - 1); } for (int i = 0; i < Count; i++) { Slots[i] = new BTreeItemOnDisk(); object key = null; int vn = reader.ReadInt32(); Slots[i].VersionNumber = vn; if (Slots[i].Key is IPersistentVersioned) { ((IPersistentVersioned)Slots[i].Key).VersionNumber = Slots[i].VersionNumber; } // read key from disk CollectionOnDisk.ReadPersistentData(parent, reader, ref key); if (key == null) { if (((BTreeAlgorithm)parent).onKeyUnpack != null) { key = ((BTreeAlgorithm)parent).onKeyUnpack(reader); } if (key == null) { if (i == 0) { ((BTreeAlgorithm)parent).RootNeedsReload = true; return; } throw new InvalidOperationException( "Can't DeSerialize Key, ensure there is a TypeStore Entry for this data type."); } } Slots[i].Key = key; Slots[i].Value = new ItemOnDisk(); if (((BTreeAlgorithm)parent).IsDataInKeySegment) { if (((BTreeAlgorithm)parent).IsDataLongInt) { long l = reader.ReadInt64(); Slots[i].Value.Data = l; } else { if (((BTreeAlgorithm)parent).PersistenceType == PersistenceType.Unknown) { throw new InvalidOperationException("Parent BTreeAlgorithm PersistenceType is unknown."); } // write the value and keep track of its data size and location in the disk buffer. long startPos = reader.BaseStream.Position; if (CollectionOnDisk.ReadPersistentData(parent, reader, ref Slots[i].Value.Data, ItemType.Value) == null) { ((BTreeAlgorithm)parent).ValueUnpack(reader, Slots[i]); } Slots[i].HintSizeOnDisk = (int)(reader.BaseStream.Position - startPos); } Slots[i].ValueLoaded = true; Slots[i].Value.IsDirty = false; } else { // read Address of Value in Data Segment long l = reader.ReadInt64(); Slots[i].Value.DiskBuffer = ((BTreeAlgorithm)parent).CreateBlock(); // new Sop.DataBlock((DataBlockSize) parent.DiskBuffer.Length); ((CollectionOnDisk)parent).SetIsDirty(Slots[i].Value.DiskBuffer, false); Slots[i].ValueLoaded = false; ((BTreeAlgorithm)parent).DataBlockDriver.SetId(Slots[i].Value.DiskBuffer, l); Slots[i].Value.DiskBuffer.contiguousBlockCount = reader.ReadUInt16(); } } }
//private const int ObjectBlockCountThreshold = 64; /// <summary> /// If in AutoFlush mode and data Value is not saved in Key segment, /// this method will Add/Update Big Data Value to disk and at end of the process, /// set the Value to null to conserve memory. /// </summary> /// <param name="item"></param> /// <returns></returns> private bool SetBigDataValue(BTreeItemOnDisk item) { if (!IsDataInKeySegment) { // load meta data so Blocks layout in-memory can get recreated... long da = GetId(item.Value.DiskBuffer); if (da >= 0) { if (!item.Value.DiskBuffer.IsFolded) { if (item.Value.DiskBuffer.IsFoldedInfoOnDisk) { // read the block chain info so these blocks will not be replaced during update! var metaBlocks = DataBlockDriver.ReadBlockInfoFromDisk(this, da); // assign read block chain info to a DataBlock simulating a "Folded" scenario. item.Value.diskBuffer.foldedDataAddresses = metaBlocks.ToArray(); item.Value.diskBuffer.contiguousBlockCount = metaBlocks[0].BlockCount; } } // delete the blocks on disk if they are big and data being set is null if (item.Value.Data == null && item.Value.DiskBuffer.IsFolded && item.Value.DiskBuffer.CountMembers() >= 1024) { DataSet.DataBlockDriver.Remove(DataSet, item.Value.DiskBuffer); item.Value.DiskBuffer = CreateBlock(); } } // save big data to disk if (item.Value.diskBuffer != null) { item.Value.diskBuffer.Unfold(this); } Sop.DataBlock b = WriteToBlock(item.Value); DataSet.DataBlockDriver.SetDiskBlock(DataSet, b, false, false); var cnt = b.ProcessHeadSets(); // store blocks to the pool if block count < 500, otherwise direct write to disk! if (cnt < File.Profile.BigDataBlockCount) { AddToBlocks(b, Blocks); item.ValueLoaded = true; item.Value.IsDirty = false; item.IsDirty = false; item.Value.diskBuffer.Fold(); } else { if (cnt <= 500) { AddToBlocks(b, Blocks); } else { // direct write to disk var blocks = new Collections.Generic.SortedDictionary <long, Sop.DataBlock>(); AddToBlocks(b, blocks); WriteBlocksToDisk(DataSet, blocks, false); } item.IsDirty = false; item.Value.IsDirty = false; item.ValueLoaded = false; item.Value.diskBuffer.Fold(); // nullify big data to conserve memory item.Value.Data = null; } return(true); } return(false); }
private long addAvailableBlock(long dataAddress, long dataSize) { if (_isDuringMaintenance) { long l; if (_duringMaintenanceItems.TryGetValue(dataAddress, out l)) { if (dataSize > l) { _duringMaintenanceItems[dataAddress] = dataSize; } } else { _duringMaintenanceItems.Add(dataAddress, dataSize); } return(0); } var itm = new BTreeItemOnDisk(File.DataBlockSize, dataAddress, dataSize) { Value = { DiskBuffer = CreateBlock() } }; BeginTreeMaintenance(); try { if (Count == 0 || RootNode.Count == 0) { bool adjustCount = Count == 1 && RootNode.Count == 0; _isDuringMaintenance = true; Add(itm); _isDuringMaintenance = false; AddMaintenanceAddedItems(); if (adjustCount) { UpdateCount(UpdateCountType.Decrement); } } else if (!this.Search(dataAddress)) { //** Detect and merge contiguous deleted blocks short passCount = 0; if (!this.MovePrevious()) { this.MoveFirst(); } while (true) { var item = (BTreeItemOnDisk)this.CurrentEntry; long k2 = (long)item.Key; long i = 0; object cv = CurrentValue; i = (long)cv; if (_region.Equals(dataAddress, dataSize, k2, i) || _region.FirstWithinSecond(dataAddress, dataSize, k2, i)) { return(k2); } else if (_region.FirstWithinSecond(k2, i, dataAddress, dataSize)) { _isDuringMaintenance = true; Remove(); itm.Key = dataAddress; itm.Value.Data = dataSize; Add(itm); _isDuringMaintenance = false; AddMaintenanceAddedItems(); return(dataAddress); } else if (dataAddress + dataSize == k2) { long newSize = i + dataSize; if (newSize <= _segmentSize) { _isDuringMaintenance = true; Remove(); itm.Key = dataAddress; itm.Value.Data = newSize; Add(itm); _isDuringMaintenance = false; AddMaintenanceAddedItems(); return(dataAddress); } break; } else if (k2 + i == dataAddress) { if (i + dataSize <= _segmentSize) { _isDuringMaintenance = true; CurrentValue = i + dataSize; _isDuringMaintenance = false; AddMaintenanceAddedItems(); return(k2); } break; } else if (++passCount >= 2) { break; } if (!MoveNext()) { break; } } _isDuringMaintenance = true; this.Add(itm); _isDuringMaintenance = false; AddMaintenanceAddedItems(); } else { long currSize = (long)CurrentValue; if (currSize < dataSize) { _isDuringMaintenance = true; CurrentValue = dataSize; _isDuringMaintenance = false; AddMaintenanceAddedItems(); } } } finally { EndTreeMaintenance(); } return(0); }