public FloatDocValuesAnonymousInnerClassHelper(FloatFieldSource outerInstance, FloatFieldSource @this, FieldCache.Floats arr, Bits valid)
     : base(@this)
 {
     this.outerInstance = outerInstance;
     this.arr = arr;
     this.valid = valid;
 }
Exemple #2
0
 public MultiBits(Bits[] subs, int[] starts, bool defaultValue)
 {
     Debug.Assert(starts.Length == 1 + subs.Length);
     this.Subs = subs;
     this.Starts = starts;
     this.DefaultValue = defaultValue;
 }
Exemple #3
0
    public static StunMessage Parse(byte[] bytes)
    {
      var bits = new Bits(bytes);

      if (bits.Pop() != false || bits.Pop() != false)
      {
        throw new Exception("First two bits must be zeroes.");
      }

      var type = StunMessageType.Parse(bits.PopBits(14));

      var length = BitConverter.ToUInt16(bits.PopLittleEndianBytes(2), 0);

      var magicCookie = BitConverter.ToUInt32(bits.PopLittleEndianBytes(4), 0);
      if (magicCookie != MagicCookie)
      {
        throw new ArgumentException($"The parse magic cookie {magicCookie} doesn't match {MagicCookie}.", nameof(MagicCookie));
      }

      var attributes = new List<StunMessageAttribute>();
      for (var index = 0; index < length; index++)
      {
        attributes.Add(StunMessageAttribute.Parse(bits.PopBits(12 * 8)));
      }

      var id = StunMessageId.Parse(bits.PopLittleEndianBytes(12));

      return new StunMessage(type, attributes, id);
    }
Exemple #4
0
    public static StunMessageClass Parse(Bits bits)
    {
      if (bits.Count != 2)
      {
        throw new ArgumentException("The number of bits must be 2.", nameof(bits));
      }

      if (Request.Bits.SequenceEqual(bits.ToBitArray()))
      {
        return Request;
      }

      if (SuccessResponse.Bits.SequenceEqual(bits.ToBitArray()))
      {
        return SuccessResponse;
      }

      if (FailureResponse.Bits.SequenceEqual(bits.ToBitArray()))
      {
        return FailureResponse;
      }

      if (Indication.Bits.SequenceEqual(bits.ToBitArray()))
      {
        return Indication;
      }

      throw new InvalidOperationException("Invalid class.");
    }
 public DoubleDocValuesAnonymousInnerClassHelper(DoubleFieldSource outerInstance, DoubleFieldSource @this, FieldCache.Doubles arr, Bits valid)
     : base(@this)
 {
     this.outerInstance = outerInstance;
     this.arr = arr;
     this.valid = valid;
 }
 internal MatchAllScorer(MatchAllDocsQuery outerInstance, IndexReader reader, Bits liveDocs, Weight w, float score)
     : base(w)
 {
     this.OuterInstance = outerInstance;
     this.LiveDocs = liveDocs;
     this.Score_Renamed = score;
     MaxDoc = reader.MaxDoc;
 }
 private SlowCompositeReaderWrapper(CompositeReader reader)
     : base()
 {
     @in = reader;
     fields = MultiFields.GetFields(@in);
     liveDocs = MultiFields.GetLiveDocs(@in);
     @in.RegisterParentReader(this);
 }
 protected internal ValueSourceScorer(IndexReader reader, FunctionValues values)
     : base(null)
 {
     this.reader = reader;
     this.maxDoc = reader.MaxDoc;
     this.values = values;
     CheckDeletes = true;
     this.liveDocs = MultiFields.GetLiveDocs(reader);
 }
Exemple #9
0
 public override DocIdSet GetDocIdSet(AtomicReaderContext context, Bits acceptDocs)
 {
     if (processingMode == ProcessingMode.PM_FAST_INVALIDATION)
     {
         return FastBits(context.AtomicReader, acceptDocs);
     }
     else
     {
         return CorrectBits(context.AtomicReader, acceptDocs);
     }
 }
            public DistanceFunctionValue(DistanceValueSource outerInstance, AtomicReader reader)
            {
                this.outerInstance = outerInstance;

                ptX = FieldCache.DEFAULT.GetDoubles(reader, outerInstance.strategy.FieldNameX, true);
                ptY = FieldCache.DEFAULT.GetDoubles(reader, outerInstance.strategy.FieldNameY, true);
                validX = FieldCache.DEFAULT.GetDocsWithField(reader, outerInstance.strategy.FieldNameX);
                validY = FieldCache.DEFAULT.GetDocsWithField(reader, outerInstance.strategy.FieldNameY);

                from = outerInstance.from;
                calculator = outerInstance.strategy.SpatialContext.DistCalc;
                nullValue = (outerInstance.strategy.SpatialContext.IsGeo ? 180 * outerInstance.multiplier : double.MaxValue);
            }
Exemple #11
0
    public static StunMessageMethod Parse(Bits bits)
    {
      if (bits.Count != 12)
      {
        throw new ArgumentException("The number of bits must be 12.", nameof(bits));
      }

      if (Binding.Bits.SequenceEqual(bits.ToBitArray()))
      {
        return Binding;
      }

      throw new InvalidOperationException("Invalid method.");
    }
Exemple #12
0
        public override DocIdSet GetDocIdSet(AtomicReaderContext context, Bits acceptDocs)
        {
            Terms terms = context.AtomicReader.Terms(term.Field());
            if (terms == null)
            {
                return null;
            }

            TermsEnum termsEnum = terms.Iterator(null);
            if (!termsEnum.SeekExact(term.Bytes()))
            {
                return null;
            }
            return new DocIdSetAnonymousInnerClassHelper(this, acceptDocs, termsEnum);
        }
Exemple #13
0
        private FixedBitSet CorrectBits(AtomicReader reader, Bits acceptDocs)
        {
            FixedBitSet bits = new FixedBitSet(reader.MaxDoc); //assume all are INvalid
            Terms terms = reader.Fields.Terms(fieldName);

            if (terms == null)
            {
                return bits;
            }

            TermsEnum termsEnum = terms.Iterator(null);
            DocsEnum docs = null;
            while (true)
            {
                BytesRef currTerm = termsEnum.Next();
                if (currTerm == null)
                {
                    break;
                }
                else
                {
                    docs = termsEnum.Docs(acceptDocs, docs, DocsEnum.FLAG_NONE);
                    int doc = docs.NextDoc();
                    if (doc != DocIdSetIterator.NO_MORE_DOCS)
                    {
                        if (keepMode == KeepMode.KM_USE_FIRST_OCCURRENCE)
                        {
                            bits.Set(doc);
                        }
                        else
                        {
                            int lastDoc = doc;
                            while (true)
                            {
                                lastDoc = doc;
                                doc = docs.NextDoc();
                                if (doc == DocIdSetIterator.NO_MORE_DOCS)
                                {
                                    break;
                                }
                            }
                            bits.Set(lastDoc);
                        }
                    }
                }
            }
            return bits;
        }
        private void CheckTerms(Terms terms, Bits liveDocs, params string[] termsList)
        {
            Assert.IsNotNull(terms);
            TermsEnum te = terms.Iterator(null);

            foreach (string t in termsList)
            {
                BytesRef b = te.Next();
                Assert.IsNotNull(b);
                Assert.AreEqual(t, b.Utf8ToString());
                DocsEnum td = TestUtil.Docs(Random(), te, liveDocs, null, DocsEnum.FLAG_NONE);
                Assert.IsTrue(td.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
                Assert.AreEqual(0, td.DocID());
                Assert.AreEqual(td.NextDoc(), DocIdSetIterator.NO_MORE_DOCS);
            }
            Assert.IsNull(te.Next());
        }
Exemple #15
0
    public static StunMessageType Parse(Bits bits)
    {
      if (bits.Count != 14)
      {
        throw new ArgumentException("The number of bits must be 14.", nameof(bits));
      }

      var methodBits = new Bits(12);
      var classBits = new Bits(2);

      methodBits.AddBits(bits.PopBits(5));
      classBits.AddBit(bits.Pop());
      methodBits.AddBits(bits.PopBits(3));
      classBits.AddBit(bits.Pop());
      methodBits.AddBits(bits);

      return new StunMessageType(StunMessageMethod.Parse(methodBits), StunMessageClass.Parse(classBits));
    }
Exemple #16
0
    public byte[] ToLittleEndianByteArray()
    {
      var bits = new Bits(20);

      bits.AddOffBit();
      bits.AddOffBit();

      bits.AddBits(Type.Bits);

      var length = (ushort)(Attributes.Count() * 4 * 8);
      bits.AddUInt16LittleEndian(length);

      bits.AddUInt32LittleEndian(MagicCookie);

      bits.AddBytesLittleEndian(Id.Bytes);

      return bits.ToLittleEndianByteArray();
    }
Exemple #17
0
        static void Main(string[] args)
        {
            //INPUT
            uint numN = uint.Parse(Console.ReadLine());
            List<int> startPos = new List<int>();
            for (int i =0; i<2;i++)
            {
                startPos.Add(int.Parse(Console.ReadLine()));
            }
            int modK = int.Parse(Console.ReadLine());

            //Create Bits
            List<Bits>[] toSwap = new List<Bits>[startPos.Count];
            for (int i = 0; i < toSwap.Length; i++)
            {
                toSwap[i] = new List<Bits>();
                for (int b = startPos[i]; b < startPos[i] + modK; b++)
                {
                    Bits bit = new Bits(0, b);
                    toSwap[i].Add(bit);
                }
            }
            //Assign Value to all bits
            for (int i = 0; i < toSwap.Length; i++)
            {
                for(int b = 0; b< toSwap[i].Count; b++)
                {
                    toSwap[i][b].Value = GetValue(numN, toSwap[i][b].Position);
                }
            }
            //switch Bits
            for (int i = 0; i < toSwap.Length-1; i++)
            {
                for(int b = 0; b < toSwap[i].Count;b++ )
                {
                    numN = SwapBits(numN, toSwap[i][b].Value, toSwap[i + 1][b].Position);
                    numN = SwapBits(numN, toSwap[i+1][b].Value, toSwap[i][b].Position);
                }

            }

            Console.WriteLine((uint)numN);
        }
Exemple #18
0
    public StunMessageType(StunMessageMethod method, StunMessageClass @class)
    {
      if (!method.PermittedClasses.Contains(@class))
      {
        throw new ArgumentException($"The class {@class} is not permitted by the method {method}.", nameof(@class));
      }
      
      Name = method.Name + " " + @class.Name;

      var methodBits = new Bits(method.Bits);
      var classBits = new Bits(@class.Bits);
      var bits = new Bits(14);
      bits.AddBits(methodBits.PopBits(5));
      bits.AddBit(classBits.Pop());
      bits.AddBits(methodBits.PopBits(3));
      bits.AddBit(classBits.Pop());
      bits.AddBits(methodBits);

      Bits = bits.ToBitArray();
    }
Exemple #19
0
 public override DocIdSet GetDocIdSet(AtomicReaderContext context, Bits acceptDocs)
 {
     return null;
 }
Exemple #20
0
        public void WriteToROM(List <NPCObject> NPCObjects, ref int offset)
        {
            // Declare index of this instance in a collection
            int index = NPCObjects.IndexOf(this);

            // Get the reference info for this instance
            var referenceInfo = GetReferenceInfo(NPCObjects, index);

            // Write the reference count
            rom[offset] = (byte)referenceInfo.Count;

            // Start writing base data
            Bits.SetBitsByByte(rom, offset++, (byte)((int)EngageType << 4), true);

            // Speed plus
            rom[offset] = SpeedPlus;

            // B2 unknown bits
            Bits.SetBit(rom, offset, 3, B2b3);
            Bits.SetBit(rom, offset, 4, B2b4);
            Bits.SetBit(rom, offset, 5, B2b5);
            Bits.SetBit(rom, offset, 6, B2b6);
            Bits.SetBit(rom, offset++, 7, B2b7);

            // B3 unknown bits
            Bits.SetBit(rom, offset, 0, B3b0);
            Bits.SetBit(rom, offset, 1, B3b1);
            Bits.SetBit(rom, offset, 2, B3b2);
            Bits.SetBit(rom, offset, 3, B3b3);
            Bits.SetBit(rom, offset, 4, B3b4);
            Bits.SetBit(rom, offset, 5, B3b5);
            Bits.SetBit(rom, offset, 6, B3b6);
            Bits.SetBit(rom, offset++, 7, B3b7);

            // NPCID
            Bits.SetShort(rom, offset, referenceInfo.BaseNPCID << 2);

            // B4b0,1
            Bits.SetBit(rom, offset, 0, B4b0);
            Bits.SetBit(rom, offset++, 1, B4b1);

            // Action
            Bits.SetBitsByByte(rom, offset++, (byte)((referenceInfo.BaseAction << 4) & 0xF0), true); // lower 4 bits
            rom[offset] = (byte)(referenceInfo.BaseAction >> 4);                                     // lower 6 bits

            // B7b6,7
            Bits.SetBit(rom, offset, 6, B7b6);
            Bits.SetBit(rom, offset++, 7, B7b7);

            // Event / Pack
            if (EngageType != EngageType.Battle)
            {
                Bits.SetShort(rom, offset, referenceInfo.BaseEvent);
            }
            else
            {
                rom[offset] = (byte)referenceInfo.BasePack;
            }
            offset++;

            // Engage trigger
            rom[offset] &= 0x0F;
            rom[offset] |= (byte)(EngageTrigger << 4);

            // After battle
            if (EngageType == EngageType.Battle)
            {
                rom[offset] &= 0xF0;
                rom[offset] |= (byte)(AfterBattle << 1);
            }
            offset++;

            // Write any references to ROM
            int count = referenceInfo.Count;

            while (count-- >= 0)
            {
                WriteReference(referenceInfo, ref offset);
            }
        }
Exemple #21
0
 /// <summary>
 /// Returns an ID for this operation, unique within a completion manager. The ID is valid only for the duration of the operation.
 /// </summary>
 public ulong GetUniqueId()
 {
     Contract.Requires(EntryId != AvailableMarker);
     return(Bits.GetLongFromInts(unchecked ((uint)PoolNodeId), (uint)EntryId));
 }
 /// <exception cref="System.IO.IOException"></exception>
 public virtual void WriteShort(int position, int v)
 {
     Bits.WriteShort(Buffer, position, (short)v, _isBigEndian);
 }
 public void WriteShort(int v, ByteOrder byteOrder)
 {
     EnsureAvailable(Bits.ShortSizeInBytes);
     Bits.WriteShort(Buffer, Pos, (short)v, byteOrder == ByteOrder.BigEndian);
     Pos += Bits.ShortSizeInBytes;
 }
 /// <exception cref="System.IO.IOException"></exception>
 public virtual void WriteShort(int v)
 {
     EnsureAvailable(Bits.ShortSizeInBytes);
     Bits.WriteShort(Buffer, Pos, (short)v, _isBigEndian);
     Pos += Bits.ShortSizeInBytes;
 }
 public void Reset(Bits liveDocs, int[] positions, int[] startOffsets, int[] endOffsets, int[] payloadLengths, byte[] payloadBytes)
 {
     this.LiveDocs = liveDocs;
     this.Positions = positions;
     this.StartOffsets = startOffsets;
     this.EndOffsets = endOffsets;
     this.PayloadOffsets = payloadLengths;
     this.PayloadBytes = payloadBytes;
     this.Doc = -1;
     DidNext = false;
     NextPos = 0;
 }
Exemple #26
0
 public override DocIdSet GetDocIdSet(AtomicReaderContext context, Bits acceptDocs)
 {
     return(new AnonymousDocIdSet());
 }
Exemple #27
0
 public override DocIdSet GetDocIdSet(AtomicReaderContext context, Bits acceptDocs)
 {
     return(null);
 }
Exemple #28
0
 public override DocsAndPositionsEnum DocsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags)
 {
     throw new NotSupportedException();
 }
Exemple #29
0
        public void PageModificationInAnyTreeMustRemoveItFromListOfFreedPagesInAllStores()
        {
            using (var database = CreateDocumentDatabase())
                using (var index = MapReduceIndex.CreateNew(new IndexDefinition()
                {
                    Name = "Users_ByCount_GroupByLocation",
                    Maps = { "from user in docs.Users select new { user.Location, Count = 1 }" },
                    Reduce =
                        "from result in results group result by result.Location into g select new { Location = g.Key, Count = g.Sum(x => x.Count) }",
                    Type = IndexType.MapReduce,
                    Fields =
                    {
                        { "Location", new IndexFieldOptions {
                              Storage = FieldStorage.Yes
                          } },
                        { "Count",    new IndexFieldOptions {
                              Storage = FieldStorage.Yes
                          } }
                    }
                }, database))
                {
                    index._threadAllocations = NativeMemory.CurrentThreadStats;

                    var mapReduceContext = new MapReduceIndexingContext();
                    using (var contextPool = new TransactionContextPool(database.DocumentsStorage.Environment))
                    {
                        var indexStorage = new IndexStorage(index, contextPool, database);
                        var reducer      = new ReduceMapResultsOfStaticIndex(index, index._compiled.Reduce, index.Definition, indexStorage, new MetricCounters(), mapReduceContext);

                        using (index._contextPool.AllocateOperationContext(out TransactionOperationContext indexContext))
                        {
                            using (var tx = indexContext.OpenWriteTransaction())
                            {
                                mapReduceContext.MapPhaseTree    = tx.InnerTransaction.CreateTree(MapReduceIndexBase <MapIndexDefinition, IndexField> .MapPhaseTreeName);
                                mapReduceContext.ReducePhaseTree = tx.InnerTransaction.CreateTree(MapReduceIndexBase <MapIndexDefinition, IndexField> .ReducePhaseTreeName);

                                var store1 = new MapReduceResultsStore(1, MapResultsStorageType.Tree, indexContext, mapReduceContext, true);
                                var store2 = new MapReduceResultsStore(2, MapResultsStorageType.Tree, indexContext, mapReduceContext, true);

                                mapReduceContext.StoreByReduceKeyHash.Add(1, store1);
                                mapReduceContext.StoreByReduceKeyHash.Add(2, store2);

                                // we're cheating here a bit as the originally this issue was reproduced on very large amount of data
                                //
                                // we choose page 541 because it's going to be used when calling store1.Add() below
                                // let's pretend this page that was freed as the result of deletion in store2
                                // the important thing is that store1 will be processed before processing store2 and we'll store the aggregation result for page 541 in PageNumberToReduceResult table
                                // the issue was that modification of page 541 in the tree of store1 didn't remove it from FreedPages of store2
                                // in result the processing of store2 removed page 541 from the table

                                long pageNumber = 541;

                                if (tx.InnerTransaction.LowLevelTransaction.Environment.Options.ForceUsing32BitsPager || PlatformDetails.Is32Bits)
                                {
                                    // in 32 bits we might allocate different pages, 93 is going to be used during store1.Add() calls
                                    pageNumber = 93;
                                }

                                mapReduceContext.FreedPages.Add(pageNumber);

                                for (int i = 0; i < 200; i++)
                                {
                                    using (var mappedResult = indexContext.ReadObject(new DynamicJsonValue
                                    {
                                        ["Count"] = 1,
                                        ["Location"] = new string('c', 1024)
                                    }, $"users/{i}"))
                                    {
                                        store1.Add(i, mappedResult);
                                    }
                                }

                                var writeOperation = new Lazy <IndexWriteOperation>(() => index.IndexPersistence.OpenIndexWriter(tx.InnerTransaction, null));

                                try
                                {
                                    var stats = new IndexingStatsScope(new IndexingRunStats());
                                    reducer.Execute(null, indexContext,
                                                    writeOperation,
                                                    stats, CancellationToken.None);

                                    Assert.DoesNotContain(pageNumber, mapReduceContext.FreedPages);

                                    var table = indexContext.Transaction.InnerTransaction.OpenTable(ReduceMapResultsBase <MapReduceIndexDefinition> .ReduceResultsSchema,
                                                                                                    ReduceMapResultsBase <MapReduceIndexDefinition> .PageNumberToReduceResultTableName);

                                    var page = Bits.SwapBytes(pageNumber);

                                    unsafe
                                    {
                                        using (Slice.External(indexContext.Allocator, (byte *)&page, sizeof(long), out Slice pageSlice))
                                        {
                                            Assert.True(table.ReadByKey(pageSlice, out TableValueReader tvr));
                                        }
                                    }
                                }
                                finally
                                {
                                    if (writeOperation.IsValueCreated)
                                    {
                                        writeOperation.Value.Dispose();
                                    }
                                }
                            }
                        }
                    }
                }
        }
        public PutOperationResults PutDocument(DocumentsOperationContext context, string id,
                                               string expectedChangeVector,
                                               BlittableJsonReaderObject document,
                                               long?lastModifiedTicks = null,
                                               string changeVector    = null,
                                               DocumentFlags flags    = DocumentFlags.None,
                                               NonPersistentDocumentFlags nonPersistentFlags = NonPersistentDocumentFlags.None)
        {
            if (context.Transaction == null)
            {
                ThrowRequiresTransaction();
                return(default(PutOperationResults)); // never hit
            }

#if DEBUG
            var documentDebugHash = document.DebugHash;
            document.BlittableValidation();
            BlittableJsonReaderObject.AssertNoModifications(document, id, assertChildren: true);
            AssertMetadataWasFiltered(document);
#endif

            var newEtag       = _documentsStorage.GenerateNextEtag();
            var modifiedTicks = lastModifiedTicks ?? _documentDatabase.Time.GetUtcNow().Ticks;

            id = BuildDocumentId(id, newEtag, out bool knownNewId);
            using (DocumentIdWorker.GetLowerIdSliceAndStorageKey(context, id, out Slice lowerId, out Slice idPtr))
            {
                var collectionName = _documentsStorage.ExtractCollectionName(context, document);
                var table          = context.Transaction.InnerTransaction.OpenTable(DocsSchema, collectionName.GetTableName(CollectionTableType.Documents));

                var oldValue = default(TableValueReader);
                if (knownNewId == false)
                {
                    // delete a tombstone if it exists, if it known that it is a new ID, no need, so we can skip it
                    DeleteTombstoneIfNeeded(context, collectionName, lowerId.Content.Ptr, lowerId.Size);

                    table.ReadByKey(lowerId, out oldValue);
                }

                BlittableJsonReaderObject oldDoc = null;
                if (oldValue.Pointer == null)
                {
                    if (string.IsNullOrEmpty(expectedChangeVector) == false)
                    {
                        ThrowConcurrentExceptionOnMissingDoc(id, expectedChangeVector);
                    }
                }
                else
                {
                    if (string.IsNullOrEmpty(expectedChangeVector) == false)
                    {
                        var oldChangeVector = TableValueToChangeVector(context, (int)DocumentsTable.ChangeVector, ref oldValue);
                        if (string.Compare(expectedChangeVector, oldChangeVector, StringComparison.Ordinal) != 0)
                        {
                            ThrowConcurrentException(id, expectedChangeVector, oldChangeVector);
                        }
                    }

                    oldDoc = new BlittableJsonReaderObject(oldValue.Read((int)DocumentsTable.Data, out int oldSize), oldSize, context);
                    var oldCollectionName = _documentsStorage.ExtractCollectionName(context, oldDoc);
                    if (oldCollectionName != collectionName)
                    {
                        ThrowInvalidCollectionNameChange(id, oldCollectionName, collectionName);
                    }

                    var oldFlags = TableValueToFlags((int)DocumentsTable.Flags, ref oldValue);

                    if ((nonPersistentFlags & NonPersistentDocumentFlags.ByAttachmentUpdate) != NonPersistentDocumentFlags.ByAttachmentUpdate &&
                        (nonPersistentFlags & NonPersistentDocumentFlags.FromReplication) != NonPersistentDocumentFlags.FromReplication)
                    {
                        if ((oldFlags & DocumentFlags.HasAttachments) == DocumentFlags.HasAttachments)
                        {
                            flags |= DocumentFlags.HasAttachments;
                        }
                    }
                }

                var result = BuildChangeVectorAndResolveConflicts(context, id, lowerId, newEtag, document, changeVector, expectedChangeVector, flags, oldValue);
                changeVector        = result.ChangeVector;
                nonPersistentFlags |= result.NonPersistentFlags;
                if (nonPersistentFlags.Contain(NonPersistentDocumentFlags.Resolved))
                {
                    flags |= DocumentFlags.Resolved;
                }

                if (collectionName.IsHiLo == false &&
                    (flags & DocumentFlags.Artificial) != DocumentFlags.Artificial)
                {
                    if (ShouldRecreateAttachments(context, lowerId, oldDoc, document, ref flags, nonPersistentFlags))
                    {
#if DEBUG
                        if (document.DebugHash != documentDebugHash)
                        {
                            throw new InvalidDataException("The incoming document " + id + " has changed _during_ the put process, " +
                                                           "this is likely because you are trying to save a document that is already stored and was moved");
                        }
#endif
                        document = context.ReadObject(document, id, BlittableJsonDocumentBuilder.UsageMode.ToDisk);
#if DEBUG
                        documentDebugHash = document.DebugHash;
                        document.BlittableValidation();
                        BlittableJsonReaderObject.AssertNoModifications(document, id, assertChildren: true);
                        AssertMetadataWasFiltered(document);
                        AttachmentsStorage.AssertAttachments(document, flags);
#endif
                    }

                    if (nonPersistentFlags.Contain(NonPersistentDocumentFlags.FromReplication) == false &&
                        (flags.Contain(DocumentFlags.Resolved) ||
                         _documentDatabase.DocumentsStorage.RevisionsStorage.Configuration != null
                        ))
                    {
                        var shouldVersion = _documentDatabase.DocumentsStorage.RevisionsStorage.ShouldVersionDocument(collectionName, nonPersistentFlags, oldDoc, document,
                                                                                                                      ref flags, out RevisionsCollectionConfiguration configuration);
                        if (shouldVersion)
                        {
                            _documentDatabase.DocumentsStorage.RevisionsStorage.Put(context, id, document, flags, nonPersistentFlags,
                                                                                    changeVector, modifiedTicks, configuration, collectionName);
                        }
                    }
                }

                using (Slice.From(context.Allocator, changeVector, out var cv))
                    using (table.Allocate(out TableValueBuilder tvb))
                    {
                        tvb.Add(lowerId);
                        tvb.Add(Bits.SwapBytes(newEtag));
                        tvb.Add(idPtr);
                        tvb.Add(document.BasePointer, document.Size);
                        tvb.Add(cv.Content.Ptr, cv.Size);
                        tvb.Add(modifiedTicks);
                        tvb.Add((int)flags);
                        tvb.Add(context.GetTransactionMarker());

                        if (oldValue.Pointer == null)
                        {
                            table.Insert(tvb);
                        }
                        else
                        {
                            table.Update(oldValue.Id, tvb);
                        }
                    }

                if (collectionName.IsHiLo == false)
                {
                    _documentDatabase.ExpiredDocumentsCleaner?.Put(context, lowerId, document);
                }

                context.LastDatabaseChangeVector = changeVector;
                _documentDatabase.Metrics.Docs.PutsPerSec.MarkSingleThreaded(1);
                _documentDatabase.Metrics.Docs.BytesPutsPerSec.MarkSingleThreaded(document.Size);

                context.Transaction.AddAfterCommitNotification(new DocumentChange
                {
                    ChangeVector   = changeVector,
                    CollectionName = collectionName.Name,
                    Id             = id,
                    Type           = DocumentChangeTypes.Put,
                });

#if DEBUG
                if (document.DebugHash != documentDebugHash)
                {
                    throw new InvalidDataException("The incoming document " + id + " has changed _during_ the put process, " +
                                                   "this is likely because you are trying to save a document that is already stored and was moved");
                }
                document.BlittableValidation();
                BlittableJsonReaderObject.AssertNoModifications(document, id, assertChildren: true);
                AssertMetadataWasFiltered(document);
                AttachmentsStorage.AssertAttachments(document, flags);
#endif
                return(new PutOperationResults
                {
                    Etag = newEtag,
                    Id = id,
                    Collection = collectionName,
                    ChangeVector = changeVector,
                    Flags = flags,
                    LastModified = new DateTime(modifiedTicks)
                });
            }
        }
Exemple #31
0
        public void Update_same_value_to_fixed_sized_index_throws()
        {
            using (var tx = Env.WriteTransaction())
            {
                Slice.From(tx.Allocator, "EtagIndexName", out var etagIndexName);
                var fixedSizedIndex = new TableSchema.FixedSizeSchemaIndexDef
                {
                    Name       = etagIndexName,
                    IsGlobal   = true,
                    StartIndex = 1,
                };

                var tableSchema = new TableSchema()
                                  .DefineFixedSizeIndex(fixedSizedIndex)
                                  .DefineKey(new TableSchema.SchemaIndexDef
                {
                    StartIndex = 0,
                    Count      = 1,
                });

                tableSchema.Create(tx, "Items", 16);
                var        itemsTable = tx.OpenTable(tableSchema, "Items");
                const long number1    = 1L;
                const long number2    = 2L;
                const long number3    = 3L;

                using (itemsTable.Allocate(out TableValueBuilder builder))
                    using (Slice.From(tx.Allocator, "val1", out var key))
                    {
                        builder.Add(key);
                        builder.Add(Bits.SwapBytes(number1));
                        itemsTable.Set(builder);
                    }

                using (itemsTable.Allocate(out TableValueBuilder builder))
                    using (Slice.From(tx.Allocator, "val2", out var key))
                    {
                        builder.Add(key);
                        builder.Add(Bits.SwapBytes(number2));
                        itemsTable.Set(builder);
                    }

                using (itemsTable.Allocate(out TableValueBuilder builder))
                    using (Slice.From(tx.Allocator, "val1", out var key))
                    {
                        builder.Add(key);
                        builder.Add(Bits.SwapBytes(number3));
                        itemsTable.Set(builder);
                    }

                using (itemsTable.Allocate(out TableValueBuilder builder))
                    using (Slice.From(tx.Allocator, "val2", out var key))
                    {
                        builder.Add(key);
                        builder.Add(Bits.SwapBytes(number3));

                        var exception = Assert.Throws <VoronErrorException>(() => itemsTable.Set(builder));
                        Assert.True(exception.Message.StartsWith("Attempt to add duplicate value"));
                    }
            }
        }
 public AllDeletedFilterReader(AtomicReader @in)
     : base(@in)
 {
     LiveDocs_Renamed = new Bits_MatchNoBits(@in.MaxDoc);
     Debug.Assert(MaxDoc == 0 || HasDeletions);
 }
            public override DocsAndPositionsEnum DocsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags)
            {
                if (!StorePositions && !StoreOffsets)
                {
                    return null;
                }

                TVDocsAndPositionsEnum docsAndPositionsEnum;
                if (reuse != null && reuse is TVDocsAndPositionsEnum)
                {
                    docsAndPositionsEnum = (TVDocsAndPositionsEnum)reuse;
                }
                else
                {
                    docsAndPositionsEnum = new TVDocsAndPositionsEnum();
                }
                docsAndPositionsEnum.Reset(liveDocs, Positions, StartOffsets, EndOffsets, PayloadOffsets, PayloadData);
                return docsAndPositionsEnum;
            }
Exemple #34
0
 public override DocIdSet GetDocIdSet(AtomicReaderContext context, Bits acceptDocs)
 {
     return(new FixedBitSet(context.AtomicReader.MaxDoc));
 }
 public bool this[Bits bit]
 {
     get { return _bitstring[(int)bit]; }
 }
Exemple #36
0
 /// <summary>
 /// Prepares the numeric controls and buttons in the byte editor for editing a command's binary data.
 /// </summary>
 private void UpdateNumericControls()
 {
     this.byteEditor.LoadBytes(Bits.Copy(Command.Data));
     this.textBoxHex.Text = BitConverter.ToString(Command.Data);
 }
 /// <exception cref="System.IO.IOException"></exception>
 public virtual void WriteLong(long v)
 {
     EnsureAvailable(Bits.LongSizeInBytes);
     Bits.WriteLong(Buffer, Pos, v, _isBigEndian);
     Pos += Bits.LongSizeInBytes;
 }
 /// <exception cref="System.IO.IOException"></exception>
 public virtual void WriteChar(int position, int v)
 {
     Bits.WriteChar(Buffer, position, (char)v, _isBigEndian);
 }
 public void WriteShort(int position, int v, ByteOrder byteOrder)
 {
     Bits.WriteShort(Buffer, position, (short)v, byteOrder == ByteOrder.BigEndian);
 }
Exemple #40
0
        // Read/write ROM
        private void ReadFromROM()
        {
            int offset = Index * 16 + 0x3EF830;

            X                = (byte)rom[offset++];
            Y                = (byte)rom[offset++];
            ShowCheckBit     = (byte)(rom[offset] & 0x07);
            ShowCheckAddress = (ushort)(((Bits.GetShort(rom, offset++) & 0x1FF) >> 3) + 0x7045);
            GoToLocation     = (rom[offset++] & 0x40) == 0x40;
            if (!GoToLocation)
            {
                RunEvent = Bits.GetShort(rom, offset);
                offset  += 4;
            }
            else
            {
                WhichLocationCheckBit     = (byte)(rom[offset] & 0x07);
                WhichLocationCheckAddress = (ushort)(((Bits.GetShort(rom, offset) & 0x1FF) >> 3) + 0x7045); offset += 2;
                GoLocationA = rom[offset++];
                GoLocationB = rom[offset++];
            }
            if (Bits.GetShort(rom, offset) == 0xFFFF)
            {
                EnabledToEast      = false;
                CheckAddressToEast = 0x7045;
                offset            += 2;
            }
            else
            {
                EnabledToEast      = true;
                CheckBitToEast     = (byte)(rom[offset] & 0x07);
                CheckAddressToEast = (ushort)(((Bits.GetShort(rom, offset) & 0x1FF) >> 3) + 0x7045); offset++;
                LocationToEast     = (byte)(rom[offset] >> 1); offset++;
            }
            if (Bits.GetShort(rom, offset) == 0xFFFF)
            {
                EnabledToSouth      = false;
                CheckAddressToSouth = 0x7045;
                offset += 2;
            }
            else
            {
                EnabledToSouth      = true;
                CheckBitToSouth     = (byte)(rom[offset] & 0x07);
                CheckAddressToSouth = (ushort)(((Bits.GetShort(rom, offset++) & 0x1FF) >> 3) + 0x7045);
                LocationToSouth     = (byte)(rom[offset++] >> 1);
            }
            if (Bits.GetShort(rom, offset) == 0xFFFF)
            {
                EnabledToWest      = false;
                CheckAddressToWest = 0x7045;
                offset            += 2;
            }
            else
            {
                EnabledToWest      = true;
                CheckBitToWest     = (byte)(rom[offset] & 0x07);
                CheckAddressToWest = (ushort)(((Bits.GetShort(rom, offset++) & 0x1FF) >> 3) + 0x7045);
                LocationToWest     = (byte)(rom[offset++] >> 1);
            }
            if (Bits.GetShort(rom, offset) == 0xFFFF)
            {
                EnabledToNorth      = false;
                CheckAddressToNorth = 0x7045;
                offset += 2;
            }
            else
            {
                EnabledToNorth      = true;
                CheckBitToNorth     = (byte)(rom[offset] & 0x07);
                CheckAddressToNorth = (ushort)(((Bits.GetShort(rom, offset++) & 0x1FF) >> 3) + 0x7045);
                LocationToNorth     = (byte)(rom[offset] >> 1);
            }

            // Read name text
            int pointer = Bits.GetShort(rom, Index * 2 + 0x3EFD00);

            offset = pointer + 0x3EFD80;
            List <char> symbols = new List <char>();

            for (int i = 0; rom[offset] != 0x06 && rom[offset] != 0x00; i++)
            {
                symbols.Add((char)rom[offset++]);
            }
            Name = new char[symbols.Count];
            int a = 0;

            foreach (char c in symbols)
            {
                Name[a++] = c;
            }
        }
 /// <exception cref="System.IO.IOException"></exception>
 public virtual void WriteInt(int position, int v)
 {
     Bits.WriteInt(Buffer, position, v, _isBigEndian);
 }
 /// <exception cref="System.IO.IOException"></exception>
 public virtual void WriteChar(int v)
 {
     EnsureAvailable(Bits.CharSizeInBytes);
     Bits.WriteChar(Buffer, Pos, (char)v, _isBigEndian);
     Pos += Bits.CharSizeInBytes;
 }
        void CompletionCallback(out Action <object> callback, out object state)
        {
            int slot = this.headTail;
            int slotLowPri;

            while (true)
            {
                Fx.Assert(Bits.Count(slot) != -1, "CompletionCallback called on idle IOTS!");

                bool wasEmpty = Bits.Count(slot) == 0;
                if (wasEmpty)
                {
                    // We're about to set this to idle.  First check the low-priority queue.  This alone doesn't
                    // guarantee we service all the low-pri items - there hasn't even been an Interlocked yet.  But
                    // we take care of that later.
                    slotLowPri = this.headTailLowPri;
                    while (Bits.CountNoIdle(slotLowPri) != 0)
                    {
                        if (slotLowPri == (slotLowPri = Interlocked.CompareExchange(ref this.headTailLowPri,
                                                                                    Bits.IncrementLo(slotLowPri), slotLowPri)))
                        {
                            this.overlapped.Post(this);
                            this.slotsLowPri[slotLowPri & SlotMaskLowPri].DequeueWorkItem(out callback, out state);
                            return;
                        }
                    }
                }

                if (slot == (slot = Interlocked.CompareExchange(ref this.headTail, Bits.IncrementLo(slot), slot)))
                {
                    if (!wasEmpty)
                    {
                        this.overlapped.Post(this);
                        this.slots[slot & SlotMask].DequeueWorkItem(out callback, out state);
                        return;
                    }

                    // We just set the IOThreadScheduler to idle.  Check if a low-priority item got added in the
                    // interim.
                    // Interlocked calls create a thread barrier, so this read will give us the value of
                    // headTailLowPri at the time of the interlocked that set us to idle, or later.  The invariant
                    // here is that either the low-priority queue was empty at some point after we set the IOTS to
                    // idle (so that the next enqueue will notice, and issue a Post), or that the IOTS was unidle at
                    // some point after we set it to idle (so that the next attempt to go idle will verify that the
                    // low-priority queue is empty).
                    slotLowPri = this.headTailLowPri;

                    if (Bits.CountNoIdle(slotLowPri) != 0)
                    {
                        // Whoops, go back from being idle (unless someone else already did).  If we go back, start
                        // over.  (We still owe a Post.)
                        slot = Bits.IncrementLo(slot);
                        if (slot == Interlocked.CompareExchange(ref this.headTail, slot + Bits.HiOne, slot))
                        {
                            slot += Bits.HiOne;
                            continue;
                        }

                        // We know that there's a low-priority work item.  But we also know that the IOThreadScheduler
                        // wasn't idle.  It's best to let it take care of itself, since according to this method, we
                        // just set the IOThreadScheduler to idle so shouldn't take on any tasks.
                    }

                    break;
                }
            }

            callback = null;
            state    = null;
            return;
        }
        bool TryCoalesce(out Action <object> callback, out object state)
        {
            int slot = this.headTail;
            int slotLowPri;

            while (true)
            {
                if (Bits.Count(slot) > 0)
                {
                    if (slot == (slot = Interlocked.CompareExchange(ref this.headTail, Bits.IncrementLo(slot), slot)))
                    {
                        this.slots[slot & SlotMask].DequeueWorkItem(out callback, out state);
                        return(true);
                    }
                    continue;
                }

                slotLowPri = this.headTailLowPri;
                if (Bits.CountNoIdle(slotLowPri) > 0)
                {
                    if (slotLowPri == (slotLowPri = Interlocked.CompareExchange(ref this.headTailLowPri,
                                                                                Bits.IncrementLo(slotLowPri), slotLowPri)))
                    {
                        this.slotsLowPri[slotLowPri & SlotMaskLowPri].DequeueWorkItem(out callback, out state);
                        return(true);
                    }
                    slot = this.headTail;
                    continue;
                }

                break;
            }

            callback = null;
            state    = null;
            return(false);
        }
Exemple #45
0
        public void WriteToROM()
        {
            int offset = Index * 16 + 0x3EF830;

            rom[offset++] = X;
            rom[offset++] = Y;
            Bits.SetShort(rom, offset, (ushort)((ShowCheckAddress - 0x7045) << 3));
            rom[offset++] |= ShowCheckBit;
            Bits.SetBit(rom, offset++, 6, GoToLocation);
            if (!GoToLocation)
            {
                Bits.SetShort(rom, offset, RunEvent); offset += 2;
                Bits.SetShort(rom, offset, 0xFFFF); offset   += 2;
            }
            else
            {
                Bits.SetShort(rom, offset, (ushort)((WhichLocationCheckAddress - 0x7045) << 3));
                rom[offset]  |= WhichLocationCheckBit; offset += 2;
                rom[offset++] = GoLocationA;
                rom[offset++] = GoLocationB;
            }
            if (!EnabledToEast)
            {
                Bits.SetShort(rom, offset, 0xFFFF);
                offset += 2;
            }
            else
            {
                Bits.SetShort(rom, offset, (ushort)((CheckAddressToEast - 0x7045) << 3));
                rom[offset++] |= CheckBitToEast;
                rom[offset++] |= (byte)(LocationToEast << 1);
            }
            if (!EnabledToSouth)
            {
                Bits.SetShort(rom, offset, 0xFFFF);
                offset += 2;
            }
            else
            {
                Bits.SetShort(rom, offset, (ushort)((CheckAddressToSouth - 0x7045) << 3));
                rom[offset++] |= CheckBitToSouth;
                rom[offset++] |= (byte)(LocationToSouth << 1);
            }
            if (!EnabledToWest)
            {
                Bits.SetShort(rom, offset, 0xFFFF);
                offset += 2;
            }
            else
            {
                Bits.SetShort(rom, offset, (ushort)((CheckAddressToWest - 0x7045) << 3));
                rom[offset++] |= CheckBitToWest;
                rom[offset++] |= (byte)(LocationToWest << 1);
            }
            if (!EnabledToNorth)
            {
                Bits.SetShort(rom, offset, 0xFFFF);
                offset += 2;
            }
            else
            {
                Bits.SetShort(rom, offset, (ushort)((CheckAddressToNorth - 0x7045) << 3));
                rom[offset++] |= CheckBitToNorth;
                rom[offset++] |= (byte)(LocationToNorth << 1);
            }
        }
 public void WriteLong(long v, ByteOrder byteOrder)
 {
     EnsureAvailable(Bits.LongSizeInBytes);
     Bits.WriteLong(Buffer, Pos, v, byteOrder == ByteOrder.BigEndian);
     Pos += Bits.LongSizeInBytes;
 }
 public override Scorer Scorer(AtomicReaderContext context, Bits acceptDocs)
 {
     Scorer subQueryScorer = subQueryWeight.Scorer(context, acceptDocs);
     if (subQueryScorer == null)
     {
         return null;
     }
     var valSrcScorers = new Scorer[valSrcWeights.Length];
     for (int i = 0; i < valSrcScorers.Length; i++)
     {
         valSrcScorers[i] = valSrcWeights[i].Scorer(context, acceptDocs);
     }
     return new CustomScorer(outerInstance, outerInstance.GetCustomScoreProvider(context), this, queryWeight,
         subQueryScorer, valSrcScorers);
 }
        /*  NOTE: This code depends on the form of Qil expressions generated by XPathPatternBuilder.
         *  More specifically, it recognizes the following two patterns:
         *
         *  A) /, *, @*, text(), comment(), processing-instruction():
         *      (And* $x:(IsType RefTo LiteralType))
         *
         *  B) foo, @ns:foo, processing-instruction('foo'):
         *      (And* $x:(And (IsType RefTo LiteralType) (Eq (NameOf RefTo) LiteralQName)))
         *
         *  where all RefTo refer to 'it', and LiteralType has exactly one NodeKind bit set.
         *
         *  If one of patterns recognized, we nip $x off of the nested And sequence:
         *      (And* (And2 (And1 $x:* $y:*) $z:*))  =>  (And* (And2 $y:* $z:*))
         */
        private void NipOffTypeNameCheck()
        {
            QilBinary[] leftPath = new QilBinary[4]; // Circular buffer for last 4 And nodes
            int         idx      = -1;               // Index of last element in leftPath
            QilNode     node     = _condition !;     // Walker through left path of the tree

            _nodeKind = XmlNodeKindFlags.None;
            _qname    = null;

            while (node.NodeType == QilNodeType.And)
            {
                node = (leftPath[++idx & 3] = (QilBinary)node).Left;
            }

            // Recognizing (IsType RefTo LiteralType)
            if (!(node.NodeType == QilNodeType.IsType))
            {
                return;
            }

            QilBinary isType = (QilBinary)node;

            if (!(isType.Left == _iterator && isType.Right.NodeType == QilNodeType.LiteralType))
            {
                return;
            }

            XmlNodeKindFlags nodeKinds = isType.Right.XmlType !.NodeKinds;

            if (!Bits.ExactlyOne((uint)nodeKinds))
            {
                return;
            }

            // Recognized pattern A, check for B
            QilNode x = isType;

            _nodeKind = nodeKinds;
            QilBinary lastAnd = leftPath[idx & 3];

            if (lastAnd != null && lastAnd.Right.NodeType == QilNodeType.Eq)
            {
                QilBinary eq = (QilBinary)lastAnd.Right;

                // Recognizing (Eq (NameOf RefTo) LiteralQName)
                if (eq.Left.NodeType == QilNodeType.NameOf &&
                    ((QilUnary)eq.Left).Child == _iterator && eq.Right.NodeType == QilNodeType.LiteralQName
                    )
                {
                    // Recognized pattern B
                    x      = lastAnd;
                    _qname = (QilName?)((QilLiteral)eq.Right).Value;
                    idx--;
                }
            }

            // Nip $x off the condition
            QilBinary and1 = leftPath[idx & 3];
            QilBinary and2 = leftPath[--idx & 3];

            if (and2 != null)
            {
                and2.Left = and1.Right;
            }
            else if (and1 != null)
            {
                _condition = and1.Right;
            }
            else
            {
                _condition = null;
            }
        }
Exemple #49
0
 private bool _TestBit(Bits bit)
 {
     const ulong bitMask = 1;
     return ((_bitField & (bitMask << (int)bit)) != 0);
 }
        private void MaybeTrainCompressionDictionary(Table table, FixedSizeTree etagsTree)
        {
            // the idea is that we'll get better results by including the most recently modified documents
            // by iterating over the tag index, which is guaranteed to be always increasing
            var dataIds = ArrayPool <long> .Shared.Rent(256);

            var sizes = ArrayPool <UIntPtr> .Shared.Rent(256);

            try
            {
                int used         = 0;
                var totalSize    = 0;
                int totalSkipped = 0;

                using (var it = etagsTree.Iterate())
                {
                    if (it.SeekToLast() == false)
                    {
                        return; // empty table, nothing to train on
                    }
                    do
                    {
                        long id = it.CreateReaderForCurrent().ReadLittleEndianInt64();
                        table.DirectRead(id, out var size);
                        if (size > 32 * 1024)
                        {
                            if (totalSkipped++ > 16 * 1024)
                            {
                                return;  // we are scanning too much, no need to try this hard
                            }
                            // we don't want to skip documents that are too big, they will compress
                            // well on their own, and likely be *too* unique to add meaningfully to the
                            // dictionary
                            continue;
                        }

                        sizes[used]     = (UIntPtr)size;
                        dataIds[used++] = id;
                        totalSize      += size;
                    } while (used < 256 && it.MovePrev() && totalSize < 1024 * 1024);
                }

                if (used < 16)
                {
                    return;// too few samples to measure
                }
                var tx = table._tx;
                using (tx.Allocator.Allocate(totalSize, out var buffer))
                {
                    var cur = buffer.Ptr;
                    for (int i = 0; i < used; i++)
                    {
                        var ptr = table.DirectRead(dataIds[i], out var size);
                        Memory.Copy(cur, ptr, size);
                        cur += size;
                    }

                    using (tx.Allocator.Allocate(
                               // the dictionary
                               Constants.Storage.PageSize - PageHeader.SizeOf - sizeof(CompressionDictionaryInfo)
                               , out var dictionaryBuffer))
                    {
                        Span <byte> dictionaryBufferSpan = dictionaryBuffer.ToSpan();
                        ZstdLib.Train(new ReadOnlySpan <byte>(buffer.Ptr, totalSize),
                                      new ReadOnlySpan <UIntPtr>(sizes, 0, used),
                                      ref dictionaryBufferSpan);

                        var dictionariesTree = tx.CreateTree(TableSchema.CompressionDictionariesSlice);

                        var newId = (int)(dictionariesTree.State.NumberOfEntries + 1);

                        using var compressionDictionary = new ZstdLib.CompressionDictionary(newId, dictionaryBuffer.Ptr, dictionaryBufferSpan.Length, 3);

                        if (ShouldReplaceDictionary(tx, compressionDictionary) == false)
                        {
                            return;
                        }

                        table.CurrentCompressionDictionaryId           = newId;
                        compressionDictionary.ExpectedCompressionRatio = GetCompressionRatio(CompressedBuffer.Length, RawBuffer.Length);

                        var rev = Bits.SwapBytes(newId);
                        using (Slice.External(tx.Allocator, (byte *)&rev, sizeof(int), out var slice))
                            using (dictionariesTree.DirectAdd(slice, sizeof(CompressionDictionaryInfo) + dictionaryBufferSpan.Length, out var dest))
                            {
                                *((CompressionDictionaryInfo *)dest) =
                                    new CompressionDictionaryInfo {
                                    ExpectedCompressionRatio = compressionDictionary.ExpectedCompressionRatio
                                };
                                Memory.Copy(dest + sizeof(CompressionDictionaryInfo), dictionaryBuffer.Ptr, dictionaryBufferSpan.Length);
                            }

                        tx.LowLevelTransaction.OnDispose += RecreateRecoveryDictionaries;
                    }
                }
            }
            finally
            {
                ArrayPool <long> .Shared.Return(dataIds);

                ArrayPool <UIntPtr> .Shared.Return(sizes);
            }
        }
Exemple #51
0
 public override DocIdSet GetDocIdSet(AtomicReaderContext context, Bits acceptDocs)
 {
     DocIdSet innerNullIteratorSet = new DocIdSetAnonymousInnerClassHelper2(this);
     return new FilteredDocIdSetAnonymousInnerClassHelper2(this, innerNullIteratorSet);
 }
Exemple #52
0
 private static int CalculateCompressionAcceleration(int size)
 {
     return(Bits.CeilLog2(size));
 }
 public override DocsEnum Docs(Bits liveDocs, DocsEnum reuse, int flags) // ignored
 {
     TVDocsEnum docsEnum;
     if (reuse != null && reuse is TVDocsEnum)
     {
         docsEnum = (TVDocsEnum)reuse;
     }
     else
     {
         docsEnum = new TVDocsEnum();
     }
     docsEnum.Reset(liveDocs, Freq);
     return docsEnum;
 }
Exemple #54
0
 /// <summary>
 /// Line Spectral Pair Unquantification.
 /// </summary>
 /// <param name="lsp">
 /// Line Spectral Pairs table.
 /// </param>
 /// <param name="order"></param>
 /// <param name="bits">
 /// peex bits buffer.
 /// </param>
 public abstract void unquant(float[] lsp, int order, Bits bits);
 public virtual void Reset(Bits liveDocs, int freq)
 {
     this.LiveDocs = liveDocs;
     this.Freq_Renamed = freq;
     this.Doc = -1;
     DidNext = false;
 }
        public IDisposable GetTemporaryPage(LowLevelTransaction tx, int pageSize, out TemporaryPage tmp)
        {
            if (pageSize < Constants.Storage.PageSize)
            {
                ThrowInvalidPageSize(pageSize);
            }

            if (pageSize > Constants.Compression.MaxPageSize)
            {
                ThrowPageSizeTooBig(pageSize);
            }

            Debug.Assert(pageSize == Bits.NextPowerOf2(pageSize));

            EnsureInitialized();

            var index = GetTempPagesPoolIndex(pageSize);

            if (_pool.Length <= index)
            {
                lock (_expandPoolLock)
                {
                    if (_pool.Length <= index) // someone could get the lock and add it meanwhile
                    {
                        var oldSize = _pool.Length;

                        var newPool = new ConcurrentQueue <DecompressionBuffer> [index + 1];
                        Array.Copy(_pool, newPool, _pool.Length);
                        for (var i = oldSize; i < newPool.Length; i++)
                        {
                            newPool[i] = new ConcurrentQueue <DecompressionBuffer>();
                        }
                        _pool = newPool;
                    }
                }
            }

            DecompressionBuffer buffer;

            var queue = _pool[index];

            tmp = null;

            while (queue.TryDequeue(out buffer))
            {
                if (buffer.CanReuse == false)
                {
                    continue;
                }

                try
                {
                    buffer.EnsureValidPointer(tx);
                    tmp = buffer.TempPage;
                    break;
                }
                catch (ObjectDisposedException)
                {
                    // we could dispose the pager during the cleanup
                }
            }

            if (tmp == null)
            {
                var allocationInPages = pageSize / Constants.Storage.PageSize;

                lock (_decompressionPagerLock) // once we fill up the pool we won't be allocating additional pages frequently
                {
                    if (_lastUsedPage + allocationInPages > _maxNumberOfPagesInScratchBufferPool)
                    {
                        CreateNewBuffersPager(_options.MaxScratchBufferSize);
                    }

                    try
                    {
                        var numberOfPagesBeforeAllocate = _compressionPager.NumberOfAllocatedPages;

                        _compressionPager.EnsureContinuous(_lastUsedPage, allocationInPages);

                        if (_compressionPager.NumberOfAllocatedPages > numberOfPagesBeforeAllocate)
                        {
                            _scratchSpaceMonitor.Increase((_compressionPager.NumberOfAllocatedPages - numberOfPagesBeforeAllocate) * Constants.Storage.PageSize);
                        }
                    }
                    catch (InsufficientMemoryException)
                    {
                        // RavenDB-10830: failed to lock memory of temp buffers in encrypted db, let's create new file with initial size

                        CreateNewBuffersPager(DecompressedPagesCache.Size * Constants.Compression.MaxPageSize);
                        throw;
                    }

                    buffer = new DecompressionBuffer(_compressionPager, _lastUsedPage, pageSize, this, index, tx);

                    _lastUsedPage += allocationInPages;

                    void CreateNewBuffersPager(long size)
                    {
                        _oldPagers        = _oldPagers.Append(_compressionPager);
                        _compressionPager = CreateDecompressionPager(size);
                        _lastUsedPage     = 0;
                    }
                }

                tmp = buffer.TempPage;
            }

            Interlocked.Add(ref _currentlyUsedBytes, pageSize);

            return(tmp.ReturnTemporaryPageToPool);
        }
 public override DocsAndPositionsEnum DocsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags)
 {
     return ActualEnum.DocsAndPositions(liveDocs, reuse, flags);
 }
 /// <exception cref="System.IO.IOException"></exception>
 public virtual void WriteLong(int position, long v)
 {
     Bits.WriteLong(Buffer, position, v, _isBigEndian);
 }
 public ObjectTypesSupported WithBit(Bits bit, bool set = true)
 {
     return new ObjectTypesSupported(_bitstring.WithBit((int)bit, set));
 }
 public void WriteLong(int position, long v, ByteOrder byteOrder)
 {
     Bits.WriteLong(Buffer, position, v, byteOrder == ByteOrder.BigEndian);
 }