private unsafe USlice Store(Slice data)
		{
			uint size = checked((uint)data.Count);
			var buffer = m_keys.AllocateAligned(size);
			UnmanagedHelpers.CopyUnsafe(buffer, data);
			return new USlice(buffer, size);
		}
Example #2
0
 public ActivityEntry(DebugActionType actionType, Slice key, string treeName, object value)
 {
     ActionType = actionType;
     Key = key;
     TreeName = treeName;
     Value = value;
 }
Example #3
0
        public void TestEmptySliceAtZero()
        {
            Slice<char> x = new Slice<char>("test!");
            x = x.Slc(0, 0);

            Assert.AreEqual("", x.ToString());
        }
Example #4
0
        // Initialize *this for looking up user_key at a snapshot with
        // the specified sequence number.
        public LookupKey(Slice user_key, UInt64 sequence)
        {
            int usize = user_key.Size;
            int needed = usize + 13;  // A conservative estimate
            ByteArrayPointer dst;

            if (needed <= space_.Length)
            {
                dst = new ByteArrayPointer(space_);
            }
            else
            {
                dst = new ByteArrayPointer(needed);
            }

            start_ = dst;
            dst = Coding.EncodeVarint32(dst, (uint)(usize + 8));
            kstart_ = dst;

            user_key.Data.CopyTo(dst, usize);

            dst += usize;

            Coding.EncodeFixed64(dst, Global.PackSequenceAndType(sequence ,Global.kValueTypeForSeek));
            end_ = dst + 8;
        }
Example #5
0
        public void TestEmptySliceAtEnd()
        {
            Slice<char> x = new Slice<char>("test!");
            x = x.Slc(x.Len());

            Assert.AreEqual("", x.ToString());
        }
Example #6
0
        public void TestEmptySliceAtMid()
        {
            Slice<int> x = new Slice<int>(new[] { 1, 2, 3, 4 });
            x = x.Slc(1, 1);

            Assert.IsEmpty(x.ToArray());
        }
		public ReadResult Read(string treeName, Slice key, WriteBatch writeBatch = null)
		{
		    Tree tree = null;

			if (writeBatch != null)
			{
				WriteBatch.BatchOperationType operationType;
			    Stream stream;
			    ushort? version;
			    if (writeBatch.TryGetValue(treeName, key, out stream, out version, out operationType))
			    {
			        if (!version.HasValue) 
                        tree = GetTree(treeName);

					switch (operationType)
					{
						case WriteBatch.BatchOperationType.Add:
					    {
					        var reader = new ValueReader(stream);
					        return new ReadResult(reader, version.HasValue ? (ushort)(version.Value + 1) : tree.ReadVersion(key));
					    }
						case WriteBatch.BatchOperationType.Delete:
							return null;
					}
				}
			}

		    if (tree == null) 
                tree = GetTree(treeName);

			return tree.Read(key);
		}
Example #8
0
    public bool TestCreateOverArray(Tester t)
    {
        for (int i = 0; i < 2; i++) {
            var ints = new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };

            // Try out two ways of creating a slice:
            Slice<int> slice;
            if (i == 0) {
                slice = new Slice<int>(ints);
            }
            else {
                slice = ints.Slice();
            }
            t.AssertEqual(ints.Length, slice.Length);

            // Now try out two ways of walking the slice's contents:
            for (int j = 0; j < ints.Length; j++) {
                t.AssertEqual(ints[j], slice[j]);
            }
            {
                int j = 0;
                foreach (var x in slice) {
                    t.AssertEqual(ints[j], x);
                    j++;
                }
            }
        }
        return true;
    }
Example #9
0
        /// <summary>
        /// OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here.
        /// </summary>
        /// <param name="data">Slice object keyed by symbol containing the stock data</param>
        public override void OnData(Slice data)
        {
            if (_consolidator.Consolidated == null) return;

            // submit limit order for yesterdays low each morning
            if (Time.TimeOfDay == new TimeSpan(9, 30, 1))
            {
                var newOrderTicket = LimitOrder("SPY", 100, .995m*((TradeBar)_consolidator.Consolidated).Low);
                _tickets.Add(newOrderTicket);
            }

            if (_tickets.Count == 0) return;

            var lastTicket = _tickets.Last();

            // start brining in the limit at noon, add a penny each time
            var afterNoonEvery15Minutes = Time.TimeOfDay >= new TimeSpan(12, 0, 0) && Time.TimeOfDay.Minutes%15 == 0 && Time.TimeOfDay.Seconds == 0;
            if (afterNoonEvery15Minutes && lastTicket.Status.IsOpen())
            {
                // move the limit price a penny higher
                lastTicket.Update(new UpdateOrderFields {LimitPrice = lastTicket.Get(OrderField.LimitPrice) + 0.01m});
            }

            if (Time.TimeOfDay >= new TimeSpan(15, 30, 0) && lastTicket.Status.IsOpen())
            {
                lastTicket.Cancel();
            }
        }
Example #10
0
		public bool Contains(string treeName, Slice key, out ushort? version, WriteBatch writeBatch = null)
		{
			if (writeBatch != null)
			{
				WriteBatch.BatchOperationType operationType;
				Stream stream;
				if (writeBatch.TryGetValue(treeName, key, out stream, out version, out operationType))
				{
					switch (operationType)
					{
						case WriteBatch.BatchOperationType.Add:
							return true;
						case WriteBatch.BatchOperationType.Delete:
							return false;
						default:
							throw new ArgumentOutOfRangeException(operationType.ToString());
					}
				}
			}

			var tree = GetTree(treeName);
			var readVersion = tree.ReadVersion(key);

			var exists = readVersion > 0;

			version = exists ? (ushort?)readVersion : null;

			return exists;
		}
        public FoundPage Find(Slice key)
        {
            for (int i = 0; i < _cache.Length; i++)
            {
                var page = _cache[i];
                if (page == null)
                    continue;

                var first = page.FirstKey;
                var last = page.LastKey;
                
                switch (key.Options)
                {
                    case SliceOptions.BeforeAllKeys:
                        if (first.Options == SliceOptions.BeforeAllKeys)
                            return page;
                        break;
                    case SliceOptions.AfterAllKeys:
                        if (last.Options == SliceOptions.AfterAllKeys)
                            return page;
                        break;
                    case SliceOptions.Key:
                        if ((first.Options != SliceOptions.BeforeAllKeys && key.Compare(first) < 0))
                            continue;
                        if (last.Options != SliceOptions.AfterAllKeys && key.Compare(last) > 0)
                            continue;
                        return page;
                    default:
                        throw new ArgumentException(key.Options.ToString());
                }
            }

            return null;
        }
		internal FdbWatch(FdbFuture<Slice> future, Slice key, Slice value)
		{
			Contract.Requires(future != null);
			m_future = future;
			m_key = key;
			m_value = value;
		}
Example #13
0
        // Add an entry into memtable that maps key to value at the
        // specified sequence number and with the specified type.
        // Typically value will be empty if type==kTypeDeletion.
        public void Add(UInt64 seq, int type,
						 Slice key,
						 Slice value)
        {
            // Format of an entry is concatenation of:
            //  key_size     : varint32 of internal_key.size()
            //  key bytes    : char[internal_key.size()]
            //  value_size   : varint32 of value.size()
            //  value bytes  : char[value.size()]
            int key_size = key.Size;
            int val_size = value.Size;
            int internal_key_size = key_size + 8;
            int encoded_len =
                    Coding.VarintLength(internal_key_size) + internal_key_size +
                    Coding.VarintLength(val_size) + val_size;
            ByteArrayPointer buf = new ByteArrayPointer(encoded_len);
            ByteArrayPointer p = Coding.EncodeVarint32(buf, (uint)internal_key_size);

            key.Data.CopyTo(p, key_size);

            p += key_size;
            Coding.EncodeFixed64(p, (seq << 8) | (UInt64)type);

            p += 8;
            p = Coding.EncodeVarint32(p, (uint)val_size);

            value.Data.CopyTo(p, val_size);

            Debug.Assert((p + val_size) - buf == encoded_len);
            m_table.Insert(buf);
        }
Example #14
0
        public override void FindShortestSeparator(ref string start, Slice limit)
        {
            // Find length of common prefix
            int minLength = Math.Min(start.Length, limit.Size);
            int diffIndex = 0;

            while ((diffIndex < minLength) && (start[diffIndex].Equals((char)limit[diffIndex])))
            {
                diffIndex++;
            }

            if (diffIndex >= minLength)
            {
                // Do not shorten if one string is a prefix of the other
            }
            else
            {
                byte diffByte = (byte)start[diffIndex];
                if (diffByte < 0xff &&
                    diffByte + 1 < limit[diffIndex])
                {
                    start = start.Set(diffIndex,(char) (start[diffIndex] + 1));

                    start = start.Resize(diffIndex + 1);

                    Debug.Assert(Compare(new Slice(start), limit) < 0);
                }
            }
        }
        /// <summary>
        /// OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here.
        /// </summary>
        /// <param name="data">Slice object keyed by symbol containing the stock data</param>
        public override void OnData(Slice data)
        {
            if (Transactions.OrdersCount == 0)
            {
                MarketOrder("SPY", 100);
            }

            foreach (var kvp in data.Delistings)
            {
                _delistedSymbols.Add(kvp.Key);
            }

            if (_changes != null && _changes.AddedSecurities.All(x => data.Bars.ContainsKey(x.Symbol)))
            {
                foreach (var security in _changes.AddedSecurities)
                {
                    Log(Time + ": Added Security: " + security.Symbol.ID);
                    MarketOnOpenOrder(security.Symbol, 100);
                }
                foreach (var security in _changes.RemovedSecurities)
                {
                    Log(Time + ": Removed Security: " + security.Symbol.ID);
                    if (!_delistedSymbols.Contains(security.Symbol))
                    {
                        MarketOnOpenOrder(security.Symbol, -100);
                    }
                }
                _changes = null;
            }
        }
Example #16
0
        /// <summary>
        /// OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here.
        /// </summary>
        /// <param name="data">Slice object keyed by symbol containing the stock data</param>
        public override void OnData(Slice data)
        {
            // MARKET ORDERS

            MarketOrders();

            // LIMIT ORDERS

            LimitOrders();

            // STOP MARKET ORDERS

            StopMarketOrders();

            // STOP LIMIT ORDERS

            StopLimitOrders();

            // MARKET ON OPEN ORDERS

            MarketOnOpenOrders();

            // MARKET ON CLOSE ORDERS

            MarketOnCloseOrders();
        }
Example #17
0
 public PageSplitter(Transaction tx,
     Tree tree,
     SliceComparer cmp,
     Slice newKey,
     int len,
     long pageNumber,
     NodeFlags nodeType,
     ushort nodeVersion,
     Cursor cursor,
     TreeMutableState treeState)
 {
     _tx = tx;
     _tree = tree;
     _cmp = cmp;
     _newKey = newKey;
     _len = len;
     _pageNumber = pageNumber;
     _nodeType = nodeType;
     _nodeVersion = nodeVersion;
     _cursor = cursor;
     _treeState = treeState;
     Page page = _cursor.Pages.First.Value;
     _page = tx.ModifyPage(page.PageNumber, page);
     _cursor.Pop();
 }
Example #18
0
        private Status(CodeEnum code, Slice msg, Slice msg2)
        {
            Debug.Assert(code != CodeEnum.kOk);
            int len1 = msg.Size;

            int len2 = 0;

            if (msg2 != null)
            {
                len2 = msg2.Size;
            }

            int size = len1 + (len2 != 0 ? (2 + len2) : 0);
            byte[] result = new byte[size + 5];

            Buffer.BlockCopy(BitConverter.GetBytes(size), 0, result, 0, 4);

            result[4] = (byte) Code;

            msg.Data.CopyTo(result,5, len1);

            if (len2!=0)
            {
                result[5 + len1] = (byte)':';
                result[6 + len1] = (byte)' ';

                msg2.Data.CopyTo(result, 7 + len1, len2);
            }

            m_state = result;
        }
Example #19
0
        public bool Match(Slice slice)
        {
            BinaryExpression shift;
            id = slice.Expression as Identifier;
            if (id != null)
            {
                shift = ctx.GetValue(id) as BinaryExpression;
            }
            else
            {
                shift = slice.Expression as BinaryExpression;
            }
            if (shift == null)
                return false;
            if (shift.Operator != BinaryOperator.Shl)
                return false;
            Constant c = shift.Right as Constant;
            if (c == null)
                return false;
            if (c.ToInt32() != slice.Offset)
                return false;

            expr = shift.Left;
            dt = slice.DataType;
            return true;
        }
Example #20
0
        public void Indexing_ThrowsIfOutOfRange()
        {
            var inner = new[] { 1, 2, 3, 4, 5 };
            var subject = new Slice<int>(inner, 1, 2);

            Check.ThatCode(() => subject[3]).Throws<IndexOutOfRangeException>();
        }
Example #21
0
        public void IsEnumerable()
        {
            var inner = new[] { 1, 2, 3, 4 };
            var subject = new Slice<int>(inner, 0, 2);

            Check.That((IEnumerable<int>)subject).ContainsExactly(1, 2);
        }
Example #22
0
        public void NegativeLengthGoesToZero()
        {
            var inner = new[] { 1, 2, 3, 4, 5 };
            var subject = new Slice<int>(inner, 1, -1);

            Check.That(subject.Length).IsEqualTo(0);
        }
Example #23
0
        public bool Match(Slice slice)
        {
            MemoryAccess acc = slice.Expression as MemoryAccess;
            if (acc == null)
                return false;

            b = acc.EffectiveAddress;
            Constant offset = Constant.Create(b.DataType, 0);
            BinaryOperator op = Operator.IAdd;
            BinaryExpression ea = b as BinaryExpression;
            if (ea != null)
            {
                Constant c= ea.Right as Constant;
                if (c != null)
                {
                    offset = c;
                    b = ea.Left;
                }
            }
            else
            {
                b = acc.EffectiveAddress;
            }
            int bitBegin = slice.Offset;
            int bitEnd = bitBegin + slice.DataType.BitSize;
            if (0 <= bitBegin && bitEnd <= acc.DataType.BitSize)
            {
                offset = op.ApplyConstants(offset, Constant.Create(acc.EffectiveAddress.DataType, slice.Offset / 8));
                b = new MemoryAccess(acc.MemoryId, new BinaryExpression(op, offset.DataType, b, offset), slice.DataType);
                return true;
            }
            return false;
        }
        /// <summary>
        /// Event - v3.0 DATA EVENT HANDLER: (Pattern) Basic template for user to override for receiving all subscription data in a single event
        /// </summary>
        /// <param name="slice">The current slice of data keyed by symbol string</param>
        public override void OnData(Slice slice)
        {
            if (!Portfolio.Invested)
            {
                OptionChain chain;
                if (slice.OptionChains.TryGetValue(OptionSymbol, out chain))
                {
                    // find the second call strike under market price expiring today
                    var contract = (
                        from optionContract in chain.OrderByDescending(x => x.Strike)
                        where optionContract.Right == OptionRight.Call
                        where optionContract.Expiry == Time.Date
                        where optionContract.Strike < chain.Underlying.Price
                        select optionContract
                        ).Skip(2).FirstOrDefault();

                    if (contract != null)
                    {
                        var quantity = CalculateOrderQuantity(contract.Symbol, -1m);
                        MarketOrder(contract.Symbol, quantity);
                        MarketOnCloseOrder(contract.Symbol, -quantity);
                    }
                }
            }
        }
Example #25
0
	    public static void DumpHumanReadable(Transaction tx, string path, Page start)
	    {
		    using (var writer = File.CreateText(path))
		    {
                var stack = new Stack<Page>();
                stack.Push(start);
				writer.WriteLine("Root page #{0}",start.PageNumber);
			    while (stack.Count > 0)
			    {
					var currentPage = stack.Pop();
				    if (currentPage.IsLeaf)
				    {						
						writer.WriteLine();
						writer.WriteLine("Page #{0}, NumberOfEntries = {1}, Flags = {2} (Leaf), Used: {3} : {4}", currentPage.PageNumber,currentPage.NumberOfEntries,currentPage.Flags, currentPage.SizeUsed, currentPage.CalcSizeUsed());
						if(currentPage.NumberOfEntries <= 0)
							writer.WriteLine("Empty page (tree corrupted?)");
					    
						
					    for (int nodeIndex = 0; nodeIndex < currentPage.NumberOfEntries;nodeIndex++)
					    {
						    var node = currentPage.GetNode(nodeIndex);
						    var key = currentPage.GetNodeKey(node);

							writer.WriteLine("Node #{0}, Flags = {1}, {4} = {2}, Key = {3}, Entry Size: {5}", nodeIndex, node->Flags, node->DataSize, MaxString(key.ToString(), 25), node->Flags == NodeFlags.Data ? "Size" : "Page",
                                SizeOf.NodeEntry(node));
					    }
						writer.WriteLine();
				    }
				    else if(currentPage.IsBranch) 
				    {
						writer.WriteLine();
						writer.WriteLine("Page #{0}, NumberOfEntries = {1}, Flags = {2} (Branch), Used: {3} : {4}", currentPage.PageNumber, currentPage.NumberOfEntries, currentPage.Flags, currentPage.SizeUsed, currentPage.SizeUsed);

						var key = new Slice(SliceOptions.Key);
						for (int nodeIndex = 0; nodeIndex < currentPage.NumberOfEntries; nodeIndex++)
						{
							var node = currentPage.GetNode(nodeIndex);
							writer.WriteLine("Node #{2}, {0}  / to page #{1}, Entry Size: {3}", GetBranchNodeString(nodeIndex, key, currentPage, node), node->PageNumber, nodeIndex,
                                SizeOf.NodeEntry(node));
						}

						for (int nodeIndex = 0; nodeIndex < currentPage.NumberOfEntries; nodeIndex++)
						{
							var node = currentPage.GetNode(nodeIndex);
							if (node->PageNumber < 0 || node->PageNumber > tx.State.NextPageNumber)
							{
								writer.Write("Found invalid reference to page #{0}", currentPage.PageNumber);
								stack.Clear();
								break;
							}

							var child = tx.GetReadOnlyPage(node->PageNumber);
							stack.Push(child);
						}
						
						writer.WriteLine();
					}
			    }
		    }
	    }
		/// <summary>Opens the directory with the given <paramref name="name"/>.
		/// If the directory does not exist, it is created (creating parent directories if necessary).
		/// If layer is specified, it is checked against the layer of an existing directory or set as the layer of a new directory.
		/// </summary>
		public static Task<FdbDirectorySubspace> CreateOrOpenAsync(this IFdbDirectory directory, IFdbTransactional db, string name, Slice layer, CancellationToken cancellationToken)
		{
			if (directory == null) throw new ArgumentNullException("directory");
			if (db == null) throw new ArgumentNullException("db");
			if (name == null) throw new ArgumentNullException("name");
			return db.ReadWriteAsync((tr) => directory.CreateOrOpenAsync(tr, new[] { name }, layer), cancellationToken);
		}
Example #27
0
        public Status DecodeFrom(ref Slice input)
        {
            ByteArrayPointer magicPtr = input.Data + (kEncodedLength - 8);

            UInt32 magicLo = Coding.DecodeFixed32(magicPtr);
            UInt32 magicHi = Coding.DecodeFixed32(magicPtr + 4);

            UInt64 magic = (((UInt64)(magicHi) << 32) | ((UInt64)(magicLo)));

            if (magic != FormatHelper.kTableMagicNumber)
            {
                return Status.InvalidArgument("not an sstable (bad magic number)");
            }

            Status result = MetaindexHandle.DecodeFrom(ref input);
            if (result.IsOk)
            {
                result = IndexHandle.DecodeFrom(ref input);
            }
            if (result.IsOk)
            {
                // We skip over any leftover data (just padding for now) in "input"
                ByteArrayPointer end = magicPtr + 8;
                input = new Slice(end, input.Data + input.Size - end);
            }

            return result;
        }
		/// <summary>Opens the directory with the given <paramref name="path"/>.
		/// If the directory does not exist, it is created (creating parent directories if necessary).
		/// If layer is specified, it is checked against the layer of an existing directory or set as the layer of a new directory.
		/// </summary>
		public static Task<FdbDirectorySubspace> CreateOrOpenAsync(this IFdbDirectory directory, IFdbTransactional db, IEnumerable<string> path, Slice layer, CancellationToken cancellationToken)
		{
			if (directory == null) throw new ArgumentNullException("directory");
			if (db == null) throw new ArgumentNullException("db");
			if (path == null) throw new ArgumentNullException("path");
			return db.ReadWriteAsync((tr) => directory.CreateOrOpenAsync(tr, path, layer), cancellationToken);
		}
Example #29
0
		/// <summary>
		/// Write <paramref name="data"/> to the blob, starting at <param name="offset"/> and overwriting any existing data at that location. The length of the blob is increased if necessary.
		/// </summary>
		public static Task WriteAsync(this FdbBlob blob, IFdbTransactional db, long offset, Slice data, CancellationToken cancellationToken)
		{
			if (blob == null) throw new ArgumentNullException("blob");
			if (db == null) throw new ArgumentNullException("db");

			return db.ReadWriteAsync((tr) => blob.WriteAsync(tr, offset, data), cancellationToken);
		}
Example #30
0
		public bool Seek(Slice key)
		{
			if (this.ValidateCurrentKey(Current, _cmp) == false)
				return false;
			CurrentKey = NodeHeader.GetData(_tx, _item);
			return true;
		}
Example #31
0
        public static void DumpHumanReadable(Transaction tx, string path, Page start)
        {
            using (var writer = File.CreateText(path))
            {
                var stack = new Stack <Page>();
                stack.Push(start);
                writer.WriteLine("Root page #{0}", start.PageNumber);
                while (stack.Count > 0)
                {
                    var currentPage = stack.Pop();
                    if (currentPage.IsLeaf)
                    {
                        writer.WriteLine();
                        writer.WriteLine("Page #{0}, NumberOfEntries = {1}, Flags = {2} (Leaf), Used: {3} : {4}", currentPage.PageNumber, currentPage.NumberOfEntries, currentPage.Flags, currentPage.SizeUsed, currentPage.CalcSizeUsed());
                        if (currentPage.NumberOfEntries <= 0)
                        {
                            writer.WriteLine("Empty page (tree corrupted?)");
                        }


                        for (int nodeIndex = 0; nodeIndex < currentPage.NumberOfEntries; nodeIndex++)
                        {
                            var node = currentPage.GetNode(nodeIndex);
                            var key  = currentPage.GetNodeKey(node);

                            writer.WriteLine("Node #{0}, Flags = {1}, {4} = {2}, Key = {3}, Entry Size: {5}", nodeIndex, node->Flags, node->DataSize, MaxString(key.ToString(), 25), node->Flags == NodeFlags.Data ? "Size" : "Page",
                                             SizeOf.NodeEntry(node));
                        }
                        writer.WriteLine();
                    }
                    else if (currentPage.IsBranch)
                    {
                        writer.WriteLine();
                        writer.WriteLine("Page #{0}, NumberOfEntries = {1}, Flags = {2} (Branch), Used: {3} : {4}", currentPage.PageNumber, currentPage.NumberOfEntries, currentPage.Flags, currentPage.SizeUsed, currentPage.SizeUsed);

                        var key = new Slice(SliceOptions.Key);
                        for (int nodeIndex = 0; nodeIndex < currentPage.NumberOfEntries; nodeIndex++)
                        {
                            var node = currentPage.GetNode(nodeIndex);
                            writer.WriteLine("Node #{2}, {0}  / to page #{1}, Entry Size: {3}", GetBranchNodeString(nodeIndex, key, currentPage, node), node->PageNumber, nodeIndex,
                                             SizeOf.NodeEntry(node));
                        }

                        for (int nodeIndex = 0; nodeIndex < currentPage.NumberOfEntries; nodeIndex++)
                        {
                            var node = currentPage.GetNode(nodeIndex);
                            if (node->PageNumber < 0 || node->PageNumber > tx.State.NextPageNumber)
                            {
                                writer.Write("Found invalid reference to page #{0}", currentPage.PageNumber);
                                stack.Clear();
                                break;
                            }

                            var child = tx.GetReadOnlyPage(node->PageNumber);
                            stack.Push(child);
                        }

                        writer.WriteLine();
                    }
                }
            }
        }
Example #32
0
        public bool Contains(SnapshotReader snapshot, Slice key, WriteBatch writeBatch)
        {
            ushort?version;

            return(Contains(snapshot, key, writeBatch, out version));
        }
Example #33
0
 public virtual IIterator MultiRead(SnapshotReader snapshot, Slice key)
 {
     return(snapshot.MultiRead(TableName, key));
 }
Example #34
0
        private long?TryFindLargeValue(Transaction tx, TreeIterator it, int num)
        {
            int         numberOfNeededFullSections = num / NumberOfPagesInSection;
            int         numberOfExtraBitsNeeded    = num % NumberOfPagesInSection;
            int         foundSections  = 0;
            MemorySlice startSection   = null;
            long?       startSectionId = null;
            var         sections       = new List <Slice>();

            do
            {
                var stream = it.CreateReaderForCurrent();
                {
                    var current          = new StreamBitArray(stream);
                    var currentSectionId = it.CurrentKey.CreateReader().ReadBigEndianInt64();

                    //need to find full free pages
                    if (current.SetCount < NumberOfPagesInSection)
                    {
                        ResetSections(ref foundSections, sections, ref startSection, ref startSectionId);
                        continue;
                    }

                    //those sections are not following each other in the memory
                    if (startSectionId != null && currentSectionId != startSectionId + foundSections)
                    {
                        ResetSections(ref foundSections, sections, ref startSection, ref startSectionId);
                    }

                    //set the first section of the sequence
                    if (startSection == null)
                    {
                        startSection   = it.CurrentKey;
                        startSectionId = currentSectionId;
                    }

                    sections.Add(it.CurrentKey);
                    foundSections++;

                    if (foundSections != numberOfNeededFullSections)
                    {
                        continue;
                    }

                    //we found enough full sections now we need just a bit more
                    if (numberOfExtraBitsNeeded == 0)
                    {
                        foreach (var section in sections)
                        {
                            tx.FreeSpaceRoot.Delete(section);
                        }

                        return(startSectionId * NumberOfPagesInSection);
                    }

                    var nextSectionId = currentSectionId + 1;
                    var nextId        = new Slice(EndianBitConverter.Big.GetBytes(nextSectionId));
                    var read          = tx.FreeSpaceRoot.Read(nextId);
                    if (read == null)
                    {
                        //not a following next section
                        ResetSections(ref foundSections, sections, ref startSection, ref startSectionId);
                        continue;
                    }

                    var next = new StreamBitArray(read.Reader);

                    if (next.HasStartRangeCount(numberOfExtraBitsNeeded) == false)
                    {
                        //not enough start range count
                        ResetSections(ref foundSections, sections, ref startSection, ref startSectionId);
                        continue;
                    }

                    //mark selected bits to false
                    if (next.SetCount == numberOfExtraBitsNeeded)
                    {
                        tx.FreeSpaceRoot.Delete(nextId);
                    }
                    else
                    {
                        for (int i = 0; i < numberOfExtraBitsNeeded; i++)
                        {
                            next.Set(i, false);
                        }
                        tx.FreeSpaceRoot.Add(nextId, next.ToStream());
                    }

                    foreach (var section in sections)
                    {
                        tx.FreeSpaceRoot.Delete(section);
                    }

                    return(startSectionId * NumberOfPagesInSection);
                }
            } while (it.MoveNext());

            return(null);
        }
Example #35
0
        public static void Dump(Transaction tx, string path, Page start, int showNodesEvery = 25)
        {
            using (var writer = File.CreateText(path))
            {
                writer.WriteLine(@"
digraph structs {
    node [shape=Mrecord]
    rankdir=LR;
    bgcolor=transparent;
");

                var stack = new Stack <Page>();
                stack.Push(start);
                var references = new StringBuilder();
                while (stack.Count > 0)
                {
                    var p = stack.Pop();

                    writer.WriteLine(@"
    subgraph cluster_p_{0} {{ 
        label=""Page #{0}"";
        color={3};
    p_{0} [label=""Page: {0}|{1}|Entries: {2:#,#} | {4:p} : {5:p} utilization""];

", p.PageNumber, p.Flags, p.NumberOfEntries, p.IsLeaf ? "black" : "blue",
                                     Math.Round(((AbstractPager.PageSize - p.SizeLeft) / (double)AbstractPager.PageSize), 2),
                                     Math.Round(((AbstractPager.PageSize - p.CalcSizeLeft()) / (double)AbstractPager.PageSize), 2));
                    MemorySlice key = new Slice(SliceOptions.Key);
                    if (p.IsLeaf && showNodesEvery > 0)
                    {
                        writer.WriteLine("		p_{0}_nodes [label=\" Entries:", p.PageNumber);
                        for (int i = 0; i < p.NumberOfEntries; i += showNodesEvery)
                        {
                            if (i != 0 && showNodesEvery >= 5)
                            {
                                writer.WriteLine(" ... {0:#,#} keys redacted ...", showNodesEvery - 1);
                            }
                            var node = p.GetNode(i);
                            key = p.GetNodeKey(node);
                            writer.WriteLine("{0} - {2} {1:#,#}", MaxString(key.ToString(), 25),
                                             node->DataSize, node->Flags == NodeFlags.Data ? "Size" : "Page");
                        }
                        if (p.NumberOfEntries < showNodesEvery)
                        {
                            writer.WriteLine(" ... {0:#,#} keys redacted ...", p.NumberOfEntries - 1);
                        }
                        writer.WriteLine("\"];");
                    }
                    else if (p.IsBranch)
                    {
                        writer.Write("		p_{0}_refs [label=\"", p.PageNumber);
                        for (int i = 0; i < p.NumberOfEntries; i++)
                        {
                            var node = p.GetNode(i);

                            writer.Write("{3}<{2}> {0}  / to page {1}", GetBranchNodeString(i, key, p, node), node->PageNumber,
                                         i, i == 0 ? "" : "|");
                        }
                        writer.WriteLine("\"];");
                        var prev = -1L;
                        for (int i = 0; i < p.NumberOfEntries; i++)
                        {
                            var node = p.GetNode(i);
                            if (node->PageNumber < 0 || node->PageNumber > tx.State.NextPageNumber)
                            {
                                writer.Write("		p_{0}_refs [label=\"CORRUPTED\"; Color=RED];", p.PageNumber);
                                stack.Clear();
                                break;
                            }
                            var child = tx.GetReadOnlyPage(node->PageNumber);
                            stack.Push(child);

                            references.AppendFormat("	p_{0}_refs:{3} -> p_{1} [label=\"{2}\"];", p.PageNumber, child.PageNumber, GetBranchNodeString(i, key, p, node), i).AppendLine();
                            if (prev > -1)
                            {
                                references.AppendFormat("	p_{0} -> p_{1} [style=\"invis\"];", child.PageNumber, prev);
                            }

                            prev = child.PageNumber;
                        }
                    }
                    writer.WriteLine("	}");
                }
                writer.WriteLine(references.ToString());

                writer.WriteLine("}");
            }
        }
 /// <summary>Creates a new reader over a slice</summary>
 /// <param name="buffer">Slice that will be used as the underlying buffer</param>
 public SliceReader(Slice buffer)
 {
     m_buffer      = buffer;
     this.Position = 0;
 }
 /// <summary>Creates a reader on a segment of a byte array</summary>
 public static SliceReader FromBuffer(byte[] buffer, int offset, int count)
 {
     return(new SliceReader(Slice.Create(buffer, offset, count)));
 }
 /// <summary>Creates a reader on a byte array</summary>
 public static SliceReader FromBuffer(byte[] buffer)
 {
     return(new SliceReader(Slice.Create(buffer)));
 }
Example #39
0
        public unsafe void WriteReferences(CurrentIndexingScope indexingScope, RavenTransaction tx)
        {
            // IndexSchema:
            // having 'Users' and 'Addresses' we will end up with
            //
            // #Users (tree) - splitted by collection so we can easily return all items of same collection to the indexing function
            // |- addresses/1 (key) -> [ users/1, users/2 ]
            // |- addresses/2 (key) -> [ users/3 ]
            //
            // References (tree) - used in delete operations
            // |- users/1 -> [ addresses/1 ]
            // |- users/2 -> [ addresses/1 ]
            // |- users/3 -> [ addresses/2 ]
            //
            // $Users (tree) - holding highest visible etag of 'referenced collection' per collection, so we will have a starting point for references processing
            // |- Addresses (key) -> 5
            if (indexingScope.ReferencesByCollection != null)
            {
                var referencesTree = tx.InnerTransaction.ReadTree(IndexSchema.References);

                foreach (var collections in indexingScope.ReferencesByCollection)
                {
                    var collectionTree = tx.InnerTransaction.CreateTree("#" + collections.Key); // #collection

                    foreach (var keys in collections.Value)
                    {
                        using (Slice.From(tx.InnerTransaction.Allocator, keys.Key, ByteStringType.Immutable, out Slice key))
                        {
                            foreach (var referenceKey in keys.Value)
                            {
                                collectionTree.MultiAdd(referenceKey, key);
                                referencesTree.MultiAdd(key, referenceKey);
                            }

                            RemoveReferences(key, collections.Key, keys.Value, tx);
                        }
                    }
                }
            }

            if (indexingScope.ReferenceEtagsByCollection != null)
            {
                foreach (var kvp in indexingScope.ReferenceEtagsByCollection)
                {
                    var collectionEtagTree = tx.InnerTransaction.CreateTree("$" + kvp.Key); // $collection
                    foreach (var collections in kvp.Value)
                    {
                        if (_referencedCollections.TryGetValue(collections.Key, out CollectionName collectionName) == false)
                        {
                            throw new InvalidOperationException(
                                      $"Could not find collection {collections.Key} in the index storage collections. Should not happen ever!");
                        }

                        using (Slice.From(tx.InnerTransaction.Allocator, collectionName.Name, ByteStringType.Immutable, out Slice collectionKey))
                        {
                            var etag = collections.Value;

                            var result  = collectionEtagTree.Read(collectionKey);
                            var oldEtag = result?.Reader.ReadLittleEndianInt64();
                            if (oldEtag >= etag)
                            {
                                continue;
                            }

                            using (Slice.External(tx.InnerTransaction.Allocator, (byte *)&etag, sizeof(long), out Slice etagSlice))
                                collectionEtagTree.Add(collectionKey, etagSlice);
                        }
                    }
                }
            }
        }
Example #40
0
        public unsafe IndexFailureInformation UpdateStats(DateTime indexingTime, IndexingRunStats stats)
        {
            if (_logger.IsInfoEnabled)
            {
                _logger.Info($"Updating statistics for '{_index.Name}'. Stats: {stats}.");
            }

            using (_contextPool.AllocateOperationContext(out TransactionOperationContext context))
                using (var tx = context.OpenWriteTransaction())
                {
                    var result = new IndexFailureInformation
                    {
                        Name = _index.Name
                    };

                    var table = tx.InnerTransaction.OpenTable(_errorsSchema, "Errors");

                    var statsTree = tx.InnerTransaction.ReadTree(IndexSchema.StatsTree);

                    result.MapAttempts  = statsTree.Increment(IndexSchema.MapAttemptsSlice, stats.MapAttempts);
                    result.MapSuccesses = statsTree.Increment(IndexSchema.MapSuccessesSlice, stats.MapSuccesses);
                    result.MapErrors    = statsTree.Increment(IndexSchema.MapErrorsSlice, stats.MapErrors);

                    var currentMaxNumberOfOutputs = statsTree.Read(IndexSchema.MaxNumberOfOutputsPerDocument)?.Reader.ReadLittleEndianInt32();

                    using (statsTree.DirectAdd(IndexSchema.MaxNumberOfOutputsPerDocument, sizeof(int), out byte *ptr))
                    {
                        *(int *)ptr = currentMaxNumberOfOutputs > stats.MaxNumberOfOutputsPerDocument
                        ? currentMaxNumberOfOutputs.Value
                        : stats.MaxNumberOfOutputsPerDocument;
                    }

                    if (_index.Type.IsMapReduce())
                    {
                        result.ReduceAttempts  = statsTree.Increment(IndexSchema.ReduceAttemptsSlice, stats.ReduceAttempts);
                        result.ReduceSuccesses = statsTree.Increment(IndexSchema.ReduceSuccessesSlice, stats.ReduceSuccesses);
                        result.ReduceErrors    = statsTree.Increment(IndexSchema.ReduceErrorsSlice, stats.ReduceErrors);
                    }

                    var binaryDate = indexingTime.ToBinary();
                    using (Slice.External(context.Allocator, (byte *)&binaryDate, sizeof(long), out Slice binaryDateslice))
                        statsTree.Add(IndexSchema.LastIndexingTimeSlice, binaryDateslice);

                    if (stats.Errors != null)
                    {
                        for (var i = Math.Max(stats.Errors.Count - MaxNumberOfKeptErrors, 0); i < stats.Errors.Count; i++)
                        {
                            var error          = stats.Errors[i];
                            var ticksBigEndian = Bits.SwapBytes(error.Timestamp.Ticks);
                            using (var document = context.GetLazyString(error.Document))
                                using (var action = context.GetLazyString(error.Action))
                                    using (var e = context.GetLazyString(error.Error))
                                    {
                                        var tvb = new TableValueBuilder
                                        {
                                            { (byte *)&ticksBigEndian, sizeof(long) },
                                            { document.Buffer, document.Size },
                                            { action.Buffer, action.Size },
                                            { e.Buffer, e.Size }
                                        };
                                        table.Insert(tvb);
                                    }
                        }

                        CleanupErrors(table);
                    }

                    tx.Commit();

                    return(result);
                }
        }
Example #41
0
 public AddConflictRangeCommand(Slice begin, Slice end, FdbConflictRangeType type)
 {
     this.Begin = begin;
     this.End   = end;
     this.Type  = type;
 }
Example #42
0
 public ClearRangeCommand(Slice begin, Slice end)
 {
     this.Begin = begin;
     this.End   = end;
 }
Example #43
0
 public AtomicCommand(Slice key, Slice param, FdbMutationType mutation)
 {
     this.Key      = key;
     this.Param    = param;
     this.Mutation = mutation;
 }
        private async Task <Optional <T> > PopHighContentionAsync([NotNull] IFdbDatabase db, CancellationToken ct)
        {
            int   backOff = 10;
            Slice waitKey = Slice.Empty;

            ct.ThrowIfCancellationRequested();

            using (var tr = db.BeginTransaction(ct))
            {
#if DEBUG
                tr.Annotate("PopHighContention()");
#endif

                FdbException error = null;
                try
                {
                    // Check if there are other people waiting to be popped. If so, we cannot pop before them.
                    waitKey = await AddConflictedPopAsync(tr, forced : false).ConfigureAwait(false);

                    if (waitKey.IsNull)
                    {                     // No one else was waiting to be popped
                        var item = await PopSimpleAsync(tr).ConfigureAwait(false);

                        await tr.CommitAsync().ConfigureAwait(false);

                        return(item);
                    }
                    else
                    {
                        await tr.CommitAsync().ConfigureAwait(false);
                    }
                }
                catch (FdbException e)
                {
                    // note: cannot await inside a catch(..) block, so flag the error and process it below
                    error = e;
                }

                if (error != null)
                {                 // If we didn't succeed, then register our pop request
                    waitKey = await AddConflictedPopAsync(db, forced : true, ct : ct).ConfigureAwait(false);
                }

                // The result of the pop will be stored at this key once it has been fulfilled
                var resultKey = ConflictedItemKey(this.ConflictedPop.Keys.DecodeLast <Slice>(waitKey));

                tr.Reset();

                // Attempt to fulfill outstanding pops and then poll the database
                // checking if we have been fulfilled

                while (!ct.IsCancellationRequested)
                {
                    error = null;
                    try
                    {
                        while (!(await FulfillConflictedPops(db, ct).ConfigureAwait(false)))
                        {
                            //NOP ?
                        }
                    }
                    catch (FdbException e)
                    {
                        // cannot await in catch(..) block so process it below
                        error = e;
                    }

                    if (error != null && error.Code != FdbError.NotCommitted)
                    {
                        // If the error is 1020 (not_committed), then there is a good chance
                        // that somebody else has managed to fulfill some outstanding pops. In
                        // that case, we proceed to check whether our request has been fulfilled.
                        // Otherwise, we handle the error in the usual fashion.

                        await tr.OnErrorAsync(error.Code).ConfigureAwait(false);

                        continue;
                    }

                    error = null;
                    try
                    {
                        tr.Reset();

                        var sw = System.Diagnostics.Stopwatch.StartNew();

                        var tmp = await tr.GetValuesAsync(new Slice[] { waitKey, resultKey }).ConfigureAwait(false);

                        var value  = tmp[0];
                        var result = tmp[1];

                        // If waitKey is present, then we have not been fulfilled
                        if (value.HasValue)
                        {
#if DEBUG
                            tr.Annotate("Wait {0} ms : {1} / {2}", backOff, Environment.TickCount, sw.ElapsedTicks);
#endif
                            //TODO: we should rewrite this using Watches !
                            await Task.Delay(backOff, ct).ConfigureAwait(false);

#if DEBUG
                            tr.Annotate("After wait : {0} / {1}", Environment.TickCount, sw.ElapsedTicks);
#endif
                            backOff = Math.Min(1000, backOff * 2);
                            continue;
                        }

                        if (result.IsNullOrEmpty)
                        {
                            return(default(Optional <T>));
                        }

                        tr.Clear(resultKey);
                        await tr.CommitAsync().ConfigureAwait(false);

                        return(this.Encoder.DecodeValue(result));
                    }
                    catch (FdbException e)
                    {
                        error = e;
                    }

                    if (error != null)
                    {
                        await tr.OnErrorAsync(error.Code).ConfigureAwait(false);
                    }
                }

                ct.ThrowIfCancellationRequested();
                // make the compiler happy
                throw new InvalidOperationException();
            }
        }
Example #45
0
 public SetCommand(Slice key, Slice value)
 {
     this.Key   = key;
     this.Value = value;
 }
Example #46
0
 public virtual string ResolveBegin(Slice key)
 {
     return(FdbKey.PrettyPrint(key, FdbKey.PrettyPrintMode.Begin));
 }
Example #47
0
 public virtual string ResolveEnd(Slice key)
 {
     return(FdbKey.PrettyPrint(key, FdbKey.PrettyPrintMode.End));
 }
Example #48
0
 public ClearCommand(Slice key)
 {
     this.Key = key;
 }
Example #49
0
 protected virtual string ResolveKey(Slice key, Func <Slice, string> resolver)
 {
     return(resolver == null?FdbKey.Dump(key) : resolver(key));
 }
Example #50
0
        protected unsafe int PutMapResults(LazyStringValue lowerId, IEnumerable <MapResult> mappedResults, TransactionOperationContext indexContext, IndexingStatsScope stats)
        {
            EnsureValidStats(stats);

            using (Slice.External(indexContext.Allocator, lowerId, out Slice docIdAsSlice))
            {
                Queue <MapEntry> existingEntries = null;

                using (_stats.GetMapEntriesTree.Start())
                    MapReduceWorkContext.DocumentMapEntries.RepurposeInstance(docIdAsSlice, clone: false);

                if (MapReduceWorkContext.DocumentMapEntries.NumberOfEntries > 0)
                {
                    using (_stats.GetMapEntries.Start())
                        existingEntries = GetMapEntries(MapReduceWorkContext.DocumentMapEntries);
                }

                int resultsCount = 0;

                foreach (var mapResult in mappedResults)
                {
                    using (mapResult.Data)
                    {
                        resultsCount++;

                        var reduceKeyHash = mapResult.ReduceKeyHash;

                        long id = -1;

                        if (existingEntries?.Count > 0)
                        {
                            var existing        = existingEntries.Dequeue();
                            var storeOfExisting = GetResultsStore(existing.ReduceKeyHash, indexContext, false);

                            if (reduceKeyHash == existing.ReduceKeyHash)
                            {
                                using (var existingResult = storeOfExisting.Get(existing.Id))
                                {
                                    if (ResultsBinaryEqual(mapResult.Data, existingResult.Data))
                                    {
                                        continue;
                                    }
                                }

                                id = existing.Id;
                            }
                            else
                            {
                                using (_stats.RemoveResult.Start())
                                {
                                    MapReduceWorkContext.DocumentMapEntries.Delete(existing.Id);
                                    storeOfExisting.Delete(existing.Id);
                                }
                            }
                        }

                        using (_stats.PutResult.Start())
                        {
                            if (id == -1)
                            {
                                id = MapReduceWorkContext.NextMapResultId++;

                                using (Slice.External(indexContext.Allocator, (byte *)&reduceKeyHash, sizeof(ulong), out Slice val))
                                    MapReduceWorkContext.DocumentMapEntries.Add(id, val);
                            }

                            GetResultsStore(reduceKeyHash, indexContext, create: true).Add(id, mapResult.Data);
                        }
                    }
                }

                HandleIndexOutputsPerDocument(lowerId, resultsCount, stats);

                DocumentDatabase.Metrics.MapReduceIndexes.MappedPerSec.Mark(resultsCount);

                while (existingEntries?.Count > 0)
                {
                    // need to remove remaining old entries

                    var oldResult = existingEntries.Dequeue();

                    var oldState = GetResultsStore(oldResult.ReduceKeyHash, indexContext, create: false);

                    using (_stats.RemoveResult.Start())
                    {
                        oldState.Delete(oldResult.Id);
                        MapReduceWorkContext.DocumentMapEntries.Delete(oldResult.Id);
                    }
                }

                return(resultsCount);
            }
        }
Example #51
0
 public WatchCommand(Slice key)
 {
     this.Key = key;
 }
Example #52
0
 public virtual string Resolve(Slice key)
 {
     return(FdbKey.PrettyPrint(key, FdbKey.PrettyPrintMode.Single));
 }
Example #53
0
 /// <summary>
 /// Sets the current slice
 /// </summary>
 /// <param name="slice">The Slice object</param>
 public void SetCurrentSlice(Slice slice) => _baseAlgorithm.SetCurrentSlice(slice);
Example #54
0
        public void MultiDelete(Slice key, Slice value, ushort?version = null)
        {
            State.IsModified = true;
            Lazy <Cursor> lazy;
            NodeHeader *  node;
            var           page = FindPageFor(key, out node, out lazy);

            if (page == null || page.LastMatch != 0)
            {
                return; //nothing to delete - key not found
            }

            page = _tx.ModifyPage(page.PageNumber, this, page);

            var item = page.GetNode(page.LastSearchPosition);

            if (item->Flags == NodeFlags.MultiValuePageRef) //multi-value tree exists
            {
                var tree = OpenMultiValueTree(_tx, key, item);

                tree.Delete(value, version);

                // previously, we would convert back to a simple model if we dropped to a single entry
                // however, it doesn't really make sense, once you got enough values to go to an actual nested
                // tree, you are probably going to remain that way, or be removed completely.
                if (tree.State.EntriesCount != 0)
                {
                    return;
                }
                _tx.TryRemoveMultiValueTree(this, key);
                _tx.FreePage(tree.State.RootPageNumber);

                Delete(key);
            }
            else // we use a nested page here
            {
                var nestedPage = new Page(NodeHeader.DirectAccess(_tx, item), "multi tree", (ushort)NodeHeader.GetDataSize(_tx, item));
                var nestedItem = nestedPage.Search(value);
                if (nestedPage.LastMatch != 0) // value not found
                {
                    return;
                }

                if (item->Flags == NodeFlags.PageRef)
                {
                    throw new InvalidOperationException($"Multi trees don't use overflows. Tree name: {Name}");
                }

                var nestedPagePtr = NodeHeader.DirectAccess(_tx, item);

                nestedPage = new Page(nestedPagePtr, "multi tree", (ushort)NodeHeader.GetDataSize(_tx, item))
                {
                    LastSearchPosition = nestedPage.LastSearchPosition
                };

                CheckConcurrency(key, value, version, nestedItem->Version, TreeActionType.Delete);
                nestedPage.RemoveNode(nestedPage.LastSearchPosition);
                if (nestedPage.NumberOfEntries == 0)
                {
                    Delete(key);
                }
            }
        }
Example #55
0
        public void MultiAdd(Slice key, Slice value, ushort?version = null)
        {
            if (value == null)
            {
                throw new ArgumentNullException("value");
            }
            int maxNodeSize = AbstractPager.NodeMaxSize;

            if (value.Size > maxNodeSize)
            {
                throw new ArgumentException(
                          "Cannot add a value to child tree that is over " + maxNodeSize + " bytes in size", "value");
            }
            if (value.Size == 0)
            {
                throw new ArgumentException("Cannot add empty value to child tree");
            }

            State.IsModified = true;
            State.Flags     |= TreeFlags.MultiValueTrees;

            Lazy <Cursor> lazy;
            NodeHeader *  node;
            var           page = FindPageFor(key, out node, out lazy);

            if (page == null || page.LastMatch != 0)
            {
                MultiAddOnNewValue(_tx, key, value, version, maxNodeSize);
                return;
            }

            page = _tx.ModifyPage(page.PageNumber, this, page);
            var item = page.GetNode(page.LastSearchPosition);

            // already was turned into a multi tree, not much to do here
            if (item->Flags == NodeFlags.MultiValuePageRef)
            {
                var existingTree = OpenMultiValueTree(_tx, key, item);
                existingTree.DirectAdd(value, 0, version: version);
                return;
            }

            if (item->Flags == NodeFlags.PageRef)
            {
                throw new InvalidOperationException($"Multi trees don't use overflows. Tree name: {Name}");
            }

            var nestedPagePtr = NodeHeader.DirectAccess(_tx, item);

            var nestedPage = new Page(nestedPagePtr, "multi tree", (ushort)NodeHeader.GetDataSize(_tx, item));

            var existingItem = nestedPage.Search(value);

            if (nestedPage.LastMatch != 0)
            {
                existingItem = null;// not an actual match, just greater than
            }
            ushort previousNodeRevision = existingItem != null ?  existingItem->Version : (ushort)0;

            CheckConcurrency(key, value, version, previousNodeRevision, TreeActionType.Add);

            if (existingItem != null)
            {
                // maybe same value added twice?
                var tmpKey = page.GetNodeKey(item);
                if (tmpKey.Compare(value) == 0)
                {
                    return; // already there, turning into a no-op
                }
                nestedPage.RemoveNode(nestedPage.LastSearchPosition);
            }

            var valueToInsert = nestedPage.PrepareKeyToInsert(value, nestedPage.LastSearchPosition);

            if (nestedPage.HasSpaceFor(_tx, valueToInsert, 0))
            {
                // we are now working on top of the modified root page, we can just modify the memory directly
                nestedPage.AddDataNode(nestedPage.LastSearchPosition, valueToInsert, 0, previousNodeRevision);
                return;
            }

            if (page.HasSpaceFor(_tx, valueToInsert, 0))
            {
                // page has space for an additional node in nested page ...

                var requiredSpace = nestedPage.PageSize +                  // existing page
                                    nestedPage.GetRequiredSpace(value, 0); // new node

                if (requiredSpace + Constants.NodeHeaderSize <= maxNodeSize)
                {
                    // ... and it won't require to create an overflow, so we can just expand the current value, no need to create a nested tree yet

                    EnsureNestedPagePointer(page, item, ref nestedPage, ref nestedPagePtr);

                    var newPageSize = (ushort)Math.Min(Utils.NearestPowerOfTwo(requiredSpace), maxNodeSize - Constants.NodeHeaderSize);

                    ExpandMultiTreeNestedPageSize(_tx, key, valueToInsert, nestedPagePtr, newPageSize, nestedPage.PageSize);

                    return;
                }
            }

            EnsureNestedPagePointer(page, item, ref nestedPage, ref nestedPagePtr);

            // we now have to convert this into a tree instance, instead of just a nested page
            var tree = Create(_tx, KeysPrefixing, TreeFlags.MultiValue);

            for (int i = 0; i < nestedPage.NumberOfEntries; i++)
            {
                var existingValue = nestedPage.GetNodeKey(i);
                tree.DirectAdd(existingValue, 0);
            }
            tree.DirectAdd(value, 0, version: version);
            _tx.AddMultiValueTree(this, key, tree);
            // we need to record that we switched to tree mode here, so the next call wouldn't also try to create the tree again
            DirectAdd(key, sizeof(TreeRootHeader), NodeFlags.MultiValuePageRef);
        }
Example #56
0
 private static void ThrowCouldNotSeekToFirstElement(Slice treeName)
 {
     throw new InvalidOperationException($"Could not seek to the first element of {treeName} tree");
 }
 public SliceReader(byte[] buffer, int offset, int count)
 {
     m_buffer      = new Slice(buffer, offset, count);
     this.Position = 0;
 }
Example #58
0
 public virtual ReadResult Read(SnapshotReader snapshot, Slice key, WriteBatch writeBatch)
 {
     return(snapshot.Read(TableName, key, writeBatch));
 }
 public SliceReader(byte[] buffer)
 {
     m_buffer      = new Slice(buffer, 0, buffer.Length);
     this.Position = 0;
 }
Example #60
0
 public void __delslice__(Slice slice)
 {
     using (new MmapLocker(this)) {
         throw PythonOps.TypeError("mmap object doesn't support slice deletion");
     }
 }