示例#1
0
 /// <summary>Reset inputs so that the test run would behave, input wise, as if it just started.</summary>
 public virtual void ResetInputs()
 {
     m_source.PrintStatistics("docs");
     // re-initiate since properties by round may have changed.
     SetConfig(m_config, m_source);
     m_source.ResetInputs();
     numDocsCreated.Set(0);
     ResetLeftovers();
 }
示例#2
0
 public virtual void Reset(Random random)
 {
     lock (this)
     {
         Dispose();
         Open(random);
         Id.Set(0);
     }
 }
 public virtual void Clear()
 {
     lock (this)
     {
         updates.Clear();
         nextGen = 1;
         numTerms.Set(0);
         bytesUsed.Set(0);
     }
 }
示例#4
0
 internal virtual void Clear()
 {
     terms.Clear();
     queries.Clear();
     docIDs.Clear();
     numericUpdates.Clear();
     binaryUpdates.Clear();
     numTermDeletes.Set(0);
     numNumericUpdates.Set(0);
     numBinaryUpdates.Set(0);
     bytesUsed.Set(0);
 }
示例#5
0
        // Purge dead threads
        private void Purge()
        {
            lock (hardRefs)
            {
                int stillAliveCount = 0;
                //Placing in try-finally to ensure HardRef threads are removed in the case of an exception
                List <Thread> Removed = new List <Thread>();
                try
                {
                    for (IEnumerator <Thread> it = hardRefs.Keys.GetEnumerator(); it.MoveNext();)
                    {
                        Thread t = it.Current;
                        if (!t.IsAlive)
                        {
                            Removed.Add(t);
                        }
                        else
                        {
                            stillAliveCount++;
                        }
                    }
                }
                finally
                {
                    foreach (Thread thd in Removed)
                    {
                        hardRefs.Remove(thd);
                    }
                }

                int nextCount = (1 + stillAliveCount) * PURGE_MULTIPLIER;
                if (nextCount <= 0)
                {
                    // defensive: int overflow!
                    nextCount = 1000000;
                }

                countUntilPurge.Set(nextCount);
            }
        }
示例#6
0
        public void Put(TKey key, TValue value)
        {
            IDictionary <TKey, TValue> primary;
            IDictionary <TKey, TValue> secondary;

            if (swapped)
            {
                primary   = cache2;
                secondary = cache1;
            }
            else
            {
                primary   = cache1;
                secondary = cache2;
            }
            primary[key] = value;

            if (countdown.DecrementAndGet() == 0)
            {
                // Time to swap

                // NOTE: there is saturation risk here, that the
                // thread that's doing the clear() takes too long to
                // do so, while other threads continue to add to
                // primary, but in practice this seems not to be an
                // issue (see LUCENE-2075 for benchmark & details)

                // First, clear secondary
                secondary.Clear();

                // Second, swap
                swapped = !swapped;

                // Third, reset countdown
                countdown.Set(maxSize);
            }
        }
示例#7
0
        /// <summary>
        /// Replaces the current taxonomy with the given one. This method should
        /// generally be called in conjunction with
        /// <see cref="IndexWriter.AddIndexes(Directory[])"/> to replace both the taxonomy
        /// as well as the search index content.
        /// </summary>
        public virtual void ReplaceTaxonomy(Directory taxoDir)
        {
            lock (this)
            {
                // replace the taxonomy by doing IW optimized operations
                indexWriter.DeleteAll();
                indexWriter.AddIndexes(taxoDir);
                shouldRefreshReaderManager = true;
                InitReaderManager(); // ensure that it's initialized
                RefreshReaderManager();
                nextID     = indexWriter.MaxDoc;
                taxoArrays = null; // must nullify so that it's re-computed next time it's needed

                // need to clear the cache, so that addCategory won't accidentally return
                // old categories that are in the cache.
                cache.Clear();
                cacheIsComplete = false;
                shouldFillCache = true;
                cacheMisses.Set(0);

                // update indexEpoch as a taxonomy replace is just like it has be recreated
                ++indexEpoch;
            }
        }
 public override void Run()
 {
     if (VERBOSE)
     {
         Console.WriteLine(Thread.CurrentThread.Name + ": launch search thread");
     }
     while (Environment.TickCount < stopTimeMS)
     {
         try
         {
             IndexSearcher s = outerInstance.GetCurrentSearcher();
             try
             {
                 // Verify 1) IW is correctly setting
                 // diagnostics, and 2) segment warming for
                 // merged segments is actually happening:
                 foreach (AtomicReaderContext sub in s.IndexReader.Leaves)
                 {
                     SegmentReader segReader = (SegmentReader)sub.Reader;
                     IDictionary <string, string> diagnostics = segReader.SegmentInfo.Info.Diagnostics;
                     assertNotNull(diagnostics);
                     string source;
                     diagnostics.TryGetValue("source", out source);
                     assertNotNull(source);
                     if (source.Equals("merge", StringComparison.Ordinal))
                     {
                         assertTrue("sub reader " + sub + " wasn't warmed: warmed=" + outerInstance.warmed + " diagnostics=" + diagnostics + " si=" + segReader.SegmentInfo,
                                    !outerInstance.m_assertMergedSegmentsWarmed || outerInstance.warmed.ContainsKey(segReader.core));
                     }
                 }
                 if (s.IndexReader.NumDocs > 0)
                 {
                     outerInstance.SmokeTestSearcher(s);
                     Fields fields = MultiFields.GetFields(s.IndexReader);
                     if (fields == null)
                     {
                         continue;
                     }
                     Terms terms = fields.GetTerms("body");
                     if (terms == null)
                     {
                         continue;
                     }
                     TermsEnum termsEnum     = terms.GetIterator(null);
                     int       seenTermCount = 0;
                     int       shift;
                     int       trigger;
                     if (totTermCount.Get() < 30)
                     {
                         shift   = 0;
                         trigger = 1;
                     }
                     else
                     {
                         trigger = totTermCount.Get() / 30;
                         shift   = Random.Next(trigger);
                     }
                     while (Environment.TickCount < stopTimeMS)
                     {
                         BytesRef term = termsEnum.Next();
                         if (term == null)
                         {
                             totTermCount.Set(seenTermCount);
                             break;
                         }
                         seenTermCount++;
                         // search 30 terms
                         if ((seenTermCount + shift) % trigger == 0)
                         {
                             //if (VERBOSE) {
                             //System.out.println(Thread.currentThread().getName() + " now search body:" + term.Utf8ToString());
                             //}
                             totHits.AddAndGet(outerInstance.RunQuery(s, new TermQuery(new Term("body", term))));
                         }
                     }
                     //if (VERBOSE) {
                     //System.out.println(Thread.currentThread().getName() + ": search done");
                     //}
                 }
             }
             finally
             {
                 outerInstance.ReleaseSearcher(s);
             }
         }
         catch (Exception t)
         {
             Console.WriteLine(Thread.CurrentThread.Name + ": hit exc");
             outerInstance.m_failed.Set(true);
             Console.WriteLine(t.ToString());
             throw new Exception(t.ToString(), t);
         }
     }
 }
        private readonly IDictionary <SegmentCoreReaders, bool?> warmed = new WeakDictionary <SegmentCoreReaders, bool?>(); //new ConcurrentHashMapWrapper<SegmentCoreReaders, bool?>(new HashMap<SegmentCoreReaders, bool?>());
        // Collections.synchronizedMap(new WeakHashMap<SegmentCoreReaders, bool?>());

        public virtual void RunTest(string testName)
        {
            failed.Set(false);
            addCount.Set(0);
            delCount.Set(0);
            packCount.Set(0);

            long t0 = Environment.TickCount;

            Random        random  = new Random(Random().Next());
            LineFileDocs  docs    = new LineFileDocs(random, DefaultCodecSupportsDocValues());
            DirectoryInfo tempDir = CreateTempDir(testName);

            dir = GetDirectory(NewMockFSDirectory(tempDir)); // some subclasses rely on this being MDW
            if (dir is BaseDirectoryWrapper)
            {
                ((BaseDirectoryWrapper)dir).CheckIndexOnClose = false; // don't double-checkIndex, we do it ourselves.
            }
            MockAnalyzer analyzer = new MockAnalyzer(Random());

            analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
            IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetInfoStream(new FailOnNonBulkMergesInfoStream());

            if (LuceneTestCase.TEST_NIGHTLY)
            {
                // newIWConfig makes smallish max seg size, which
                // results in tons and tons of segments for this test
                // when run nightly:
                MergePolicy mp = conf.MergePolicy;
                if (mp is TieredMergePolicy)
                {
                    ((TieredMergePolicy)mp).MaxMergedSegmentMB = 5000.0;
                }
                else if (mp is LogByteSizeMergePolicy)
                {
                    ((LogByteSizeMergePolicy)mp).MaxMergeMB = 1000.0;
                }
                else if (mp is LogMergePolicy)
                {
                    ((LogMergePolicy)mp).MaxMergeDocs = 100000;
                }
            }

            conf.SetMergedSegmentWarmer(new IndexReaderWarmerAnonymousInnerClassHelper(this));

            if (VERBOSE)
            {
                conf.SetInfoStream(new PrintStreamInfoStreamAnonymousInnerClassHelper(this, Console.Out));
            }
            writer = new IndexWriter(dir, conf);
            TestUtil.ReduceOpenFiles(writer);

            TaskScheduler es = Random().NextBoolean() ? null : TaskScheduler.Default;

            DoAfterWriter(es);

            int NUM_INDEX_THREADS = TestUtil.NextInt(Random(), 2, 4);

            int RUN_TIME_SEC = LuceneTestCase.TEST_NIGHTLY ? 300 : RANDOM_MULTIPLIER;

            ISet <string>             delIDs     = new ConcurrentHashSet <string>(new HashSet <string>());
            ISet <string>             delPackIDs = new ConcurrentHashSet <string>(new HashSet <string>());
            ConcurrentQueue <SubDocs> allSubDocs = new ConcurrentQueue <SubDocs>();

            long stopTime = Environment.TickCount + (RUN_TIME_SEC * 1000);

            ThreadClass[] indexThreads = LaunchIndexingThreads(docs, NUM_INDEX_THREADS, stopTime, delIDs, delPackIDs, allSubDocs.ToList());

            if (VERBOSE)
            {
                Console.WriteLine("TEST: DONE start " + NUM_INDEX_THREADS + " indexing threads [" + (Environment.TickCount - t0) + " ms]");
            }

            // Let index build up a bit
            Thread.Sleep(100);

            DoSearching(es, stopTime);

            if (VERBOSE)
            {
                Console.WriteLine("TEST: all searching done [" + (Environment.TickCount - t0) + " ms]");
            }

            for (int thread = 0; thread < indexThreads.Length; thread++)
            {
                indexThreads[thread].Join();
            }

            if (VERBOSE)
            {
                Console.WriteLine("TEST: done join indexing threads [" + (Environment.TickCount - t0) + " ms]; addCount=" + addCount + " delCount=" + delCount);
            }

            IndexSearcher s = FinalSearcher;

            if (VERBOSE)
            {
                Console.WriteLine("TEST: finalSearcher=" + s);
            }

            assertFalse(failed.Get());

            bool doFail = false;

            // Verify: make sure delIDs are in fact deleted:
            foreach (string id in delIDs)
            {
                TopDocs hits = s.Search(new TermQuery(new Term("docid", id)), 1);
                if (hits.TotalHits != 0)
                {
                    Console.WriteLine("doc id=" + id + " is supposed to be deleted, but got " + hits.TotalHits + " hits; first docID=" + hits.ScoreDocs[0].Doc);
                    doFail = true;
                }
            }

            // Verify: make sure delPackIDs are in fact deleted:
            foreach (string id in delPackIDs)
            {
                TopDocs hits = s.Search(new TermQuery(new Term("packID", id)), 1);
                if (hits.TotalHits != 0)
                {
                    Console.WriteLine("packID=" + id + " is supposed to be deleted, but got " + hits.TotalHits + " matches");
                    doFail = true;
                }
            }

            // Verify: make sure each group of sub-docs are still in docID order:
            foreach (SubDocs subDocs in allSubDocs.ToList())
            {
                TopDocs hits = s.Search(new TermQuery(new Term("packID", subDocs.packID)), 20);
                if (!subDocs.deleted)
                {
                    // We sort by relevance but the scores should be identical so sort falls back to by docID:
                    if (hits.TotalHits != subDocs.subIDs.Count)
                    {
                        Console.WriteLine("packID=" + subDocs.packID + ": expected " + subDocs.subIDs.Count + " hits but got " + hits.TotalHits);
                        doFail = true;
                    }
                    else
                    {
                        int lastDocID  = -1;
                        int startDocID = -1;
                        foreach (ScoreDoc scoreDoc in hits.ScoreDocs)
                        {
                            int docID = scoreDoc.Doc;
                            if (lastDocID != -1)
                            {
                                assertEquals(1 + lastDocID, docID);
                            }
                            else
                            {
                                startDocID = docID;
                            }
                            lastDocID = docID;
                            Document doc = s.Doc(docID);
                            assertEquals(subDocs.packID, doc.Get("packID"));
                        }

                        lastDocID = startDocID - 1;
                        foreach (string subID in subDocs.subIDs)
                        {
                            hits = s.Search(new TermQuery(new Term("docid", subID)), 1);
                            assertEquals(1, hits.TotalHits);
                            int docID = hits.ScoreDocs[0].Doc;
                            if (lastDocID != -1)
                            {
                                assertEquals(1 + lastDocID, docID);
                            }
                            lastDocID = docID;
                        }
                    }
                }
                else
                {
                    // Pack was deleted -- make sure its docs are
                    // deleted.  We can't verify packID is deleted
                    // because we can re-use packID for update:
                    foreach (string subID in subDocs.subIDs)
                    {
                        assertEquals(0, s.Search(new TermQuery(new Term("docid", subID)), 1).TotalHits);
                    }
                }
            }

            // Verify: make sure all not-deleted docs are in fact
            // not deleted:
            int endID = Convert.ToInt32(docs.NextDoc().Get("docid"), CultureInfo.InvariantCulture);

            docs.Dispose();

            for (int id = 0; id < endID; id++)
            {
                string stringID = id.ToString(CultureInfo.InvariantCulture);
                if (!delIDs.Contains(stringID))
                {
                    TopDocs hits = s.Search(new TermQuery(new Term("docid", stringID)), 1);
                    if (hits.TotalHits != 1)
                    {
                        Console.WriteLine("doc id=" + stringID + " is not supposed to be deleted, but got hitCount=" + hits.TotalHits + "; delIDs=" + string.Join(",", delIDs.ToArray()));
                        doFail = true;
                    }
                }
            }
            assertFalse(doFail);

            assertEquals("index=" + writer.SegString() + " addCount=" + addCount + " delCount=" + delCount, addCount.Get() - delCount.Get(), s.IndexReader.NumDocs);
            ReleaseSearcher(s);

            writer.Commit();

            assertEquals("index=" + writer.SegString() + " addCount=" + addCount + " delCount=" + delCount, addCount.Get() - delCount.Get(), writer.NumDocs);

            DoClose();
            writer.Dispose(false);

            // Cannot shutdown until after writer is closed because
            // writer has merged segment warmer that uses IS to run
            // searches, and that IS may be using this es!

            /*if (es != null)
             * {
             * es.shutdown();
             * es.awaitTermination(1, TimeUnit.SECONDS);
             * }*/

            TestUtil.CheckIndex(dir);
            dir.Dispose();
            //System.IO.Directory.Delete(tempDir.FullName, true);
            TestUtil.Rm(tempDir);

            if (VERBOSE)
            {
                Console.WriteLine("TEST: done [" + (Environment.TickCount - t0) + " ms]");
            }
        }