/// <summary>
 /// Creates a new <seealso cref="RecyclingByteBlockAllocator"/>
 /// </summary>
 /// <param name="blockSize">
 ///          the block size in bytes </param>
 /// <param name="maxBufferedBlocks">
 ///          maximum number of buffered byte block </param>
 /// <param name="bytesUsed">
 ///          <seealso cref="Counter"/> reference counting internally allocated bytes </param>
 public RecyclingByteBlockAllocator(int blockSize, int maxBufferedBlocks, Counter bytesUsed)
     : base(blockSize)
 {
     FreeByteBlocks = new byte[maxBufferedBlocks][];
     this.MaxBufferedBlocks_Renamed = maxBufferedBlocks;
     this.BytesUsed_Renamed = bytesUsed;
 }
Пример #2
0
 /// <summary>
 /// Creates a new <seealso cref="BytesRefArray"/> with a counter to track allocated bytes
 /// </summary>
 public BytesRefArray(Counter bytesUsed)
 {
     this.Pool = new ByteBlockPool(new ByteBlockPool.DirectTrackingAllocator(bytesUsed));
     Pool.NextBuffer();
     bytesUsed.AddAndGet(RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + RamUsageEstimator.NUM_BYTES_INT);
     this.BytesUsed = bytesUsed;
 }
Пример #3
0
 public SortedDocValuesWriter(FieldInfo fieldInfo, Counter iwBytesUsed)
 {
     this.FieldInfo = fieldInfo;
     this.IwBytesUsed = iwBytesUsed;
     Hash = new BytesRefHash(new ByteBlockPool(new ByteBlockPool.DirectTrackingAllocator(iwBytesUsed)), BytesRefHash.DEFAULT_CAPACITY, new BytesRefHash.DirectBytesStartArray(BytesRefHash.DEFAULT_CAPACITY, iwBytesUsed));
     Pending = new AppendingDeltaPackedLongBuffer(PackedInts.COMPACT);
     BytesUsed = Pending.RamBytesUsed();
     iwBytesUsed.AddAndGet(BytesUsed);
 }
        /**
         * initializes searcher with a document set
         */
        public override void SetUp()
        {
            base.SetUp();
            counter = Lucene.Net.Util.Counter.NewCounter(true); 
            counterThread = new TimeLimitingCollector.TimerThread(counter);
            counterThread.Start();
            string[] docText = {
                "docThatNeverMatchesSoWeCanRequireLastDocCollectedToBeGreaterThanZero",
                "one blah three",
                "one foo three multiOne",
                "one foobar three multiThree",
                "blueberry pancakes",
                "blueberry pie",
                "blueberry strudel",
                "blueberry pizza",
            };
            directory = NewDirectory();
            RandomIndexWriter iw = new RandomIndexWriter(Random(), directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));

            for (int i = 0; i < N_DOCS; i++)
            {
                Add(docText[i % docText.Length], iw);
            }
            reader = iw.Reader;
            iw.Dispose();
            searcher = NewSearcher(reader);

            BooleanQuery booleanQuery = new BooleanQuery();
            booleanQuery.Add(new TermQuery(new Term(FIELD_NAME, "one")), BooleanClause.Occur.SHOULD);
            // start from 1, so that the 0th doc never matches
            for (int i = 1; i < docText.Length; i++)
            {
                string[] docTextParts = Regex.Split(docText[i], "\\s+");
                foreach (string docTextPart in docTextParts)
                {
                    // large query so that search will be longer
                    booleanQuery.Add(new TermQuery(new Term(FIELD_NAME, docTextPart)), BooleanClause.Occur.SHOULD);
                }
            }

            query = booleanQuery;

            // warm the searcher
            searcher.Search(query, null, 1000);
        }
Пример #5
0
 /// <summary>
 /// Creates a new <seealso cref="BytesRefHash"/>
 /// </summary>
 public BytesRefHash(ByteBlockPool pool, int capacity, BytesStartArray bytesStartArray)
 {
     HashSize = capacity;
     HashHalfSize = HashSize >> 1;
     HashMask = HashSize - 1;
     this.Pool = pool;
     Ids = new int[HashSize];
     Arrays.Fill(Ids, -1);
     this.bytesStartArray = bytesStartArray;
     BytesStart = bytesStartArray.Init();
     BytesUsed = bytesStartArray.BytesUsed() == null ? Counter.NewCounter() : bytesStartArray.BytesUsed();
     BytesUsed.AddAndGet(HashSize * RamUsageEstimator.NUM_BYTES_INT);
 }
Пример #6
0
 public DirectBytesStartArray(int initSize, Counter counter)
 {
     this.BytesUsed_Renamed = counter;
     this.InitSize = initSize;
 }
Пример #7
0
 public DirectTrackingAllocator(int blockSize, Counter bytesUsed)
     : base(blockSize)
 {
     this.BytesUsed = bytesUsed;
 }
Пример #8
0
 public DirectTrackingAllocator(Counter bytesUsed)
     : this(BYTE_BLOCK_SIZE, bytesUsed)
 {
 }
 public IntBlockAllocator(Counter bytesUsed)
     : base(IntBlockPool.INT_BLOCK_SIZE)
 {
     this.BytesUsed = bytesUsed;
 }
        public DocumentsWriterPerThread(string segmentName, Directory directory, LiveIndexWriterConfig indexWriterConfig, InfoStream infoStream, DocumentsWriterDeleteQueue deleteQueue, FieldInfos.Builder fieldInfos)
        {
            this.DirectoryOrig = directory;
            this.Directory = new TrackingDirectoryWrapper(directory);
            this.FieldInfos = fieldInfos;
            this.IndexWriterConfig = indexWriterConfig;
            this.InfoStream = infoStream;
            this.Codec = indexWriterConfig.Codec;
            this.docState = new DocState(this, infoStream);
            this.docState.Similarity = indexWriterConfig.Similarity;
            bytesUsed = Counter.NewCounter();
            ByteBlockAllocator = new DirectTrackingAllocator(bytesUsed);
            PendingUpdates = new BufferedUpdates();
            intBlockAllocator = new IntBlockAllocator(bytesUsed);
            this.DeleteQueue = deleteQueue;
            Debug.Assert(numDocsInRAM == 0, "num docs " + numDocsInRAM);
            PendingUpdates.Clear();
            DeleteSlice = deleteQueue.NewSlice();

            SegmentInfo_Renamed = new SegmentInfo(DirectoryOrig, Constants.LUCENE_MAIN_VERSION, segmentName, -1, false, Codec, null);
            Debug.Assert(numDocsInRAM == 0);
            if (INFO_VERBOSE && infoStream.IsEnabled("DWPT"))
            {
                infoStream.Message("DWPT", Thread.CurrentThread.Name + " init seg=" + segmentName + " delQueue=" + deleteQueue);
            }
            // this should be the last call in the ctor
            // it really sucks that we need to pull this within the ctor and pass this ref to the chain!
            Consumer = indexWriterConfig.IndexingChain.GetChain(this);
        }
Пример #11
0
 /// <summary>
 /// Creates a new <seealso cref="RecyclingByteBlockAllocator"/> with a block size of
 /// <seealso cref="ByteBlockPool#BYTE_BLOCK_SIZE"/>, upper buffered docs limit of
 /// <seealso cref="#DEFAULT_BUFFERED_BLOCKS"/> ({@value #DEFAULT_BUFFERED_BLOCKS}).
 ///
 /// </summary>
 public RecyclingByteBlockAllocator()
     : this(ByteBlockPool.BYTE_BLOCK_SIZE, 64, Counter.NewCounter(false))
 {
 }
Пример #12
0
 /// <summary>
 /// Creates a new <seealso cref="RecyclingByteBlockAllocator"/>.
 /// </summary>
 /// <param name="blockSize">
 ///          the block size in bytes </param>
 /// <param name="maxBufferedBlocks">
 ///          maximum number of buffered byte block </param>
 public RecyclingByteBlockAllocator(int blockSize, int maxBufferedBlocks)
     : this(blockSize, maxBufferedBlocks, Counter.NewCounter(false))
 {
 }
Пример #13
0
 public DirectBytesStartArray(int initSize)
     : this(initSize, Counter.NewCounter())
 {
 }
Пример #14
0
 public DirectBytesStartArray(int initSize, Counter counter)
 {
     this.bytesUsed  = counter;
     this.m_initSize = initSize;
 }
Пример #15
0
 public DirectBytesStartArray(int initSize, Counter counter)
 {
     this.BytesUsed_Renamed = counter;
     this.InitSize          = initSize;
 }