public void Index_Exists() { using (var luceneDir = new RandomIdRAMDirectory()) using (var indexer = new TestIndex(luceneDir, new StandardAnalyzer(Version.LUCENE_30))) { indexer.EnsureIndex(true); Assert.IsTrue(indexer.IndexExists()); } }
public void Can_Overwrite_Index_During_Indexing_Operation() { const int ThreadCount = 1000; using (var d = new RandomIdRAMDirectory()) using (var writer = new IndexWriter(d, new CultureInvariantStandardAnalyzer(Version.LUCENE_30), IndexWriter.MaxFieldLength.LIMITED)) using (var customIndexer = new TestIndex(writer)) using (var customSearcher = (LuceneSearcher)customIndexer.GetSearcher()) { var waitHandle = new ManualResetEvent(false); var opCompleteCount = 0; void OperationComplete(object sender, IndexOperationEventArgs e) { Interlocked.Increment(ref opCompleteCount); Console.WriteLine($"OperationComplete: {opCompleteCount}"); if (opCompleteCount == ThreadCount) { //signal that we are done waitHandle.Set(); } } //add the handler for optimized since we know it will be optimized last based on the commit count customIndexer.IndexOperationComplete += OperationComplete; //remove the normal indexing error handler customIndexer.IndexingError -= IndexInitializer.IndexingError; //run in async mode customIndexer.RunAsync = true; //get a node from the data repo var node = _contentService.GetPublishedContentByXPath("//*[string-length(@id)>0 and number(@id)>0]") .Root .Elements() .First(); //get the id for th node we're re-indexing. var id = (int)node.Attribute("id"); //spawn a bunch of threads to perform some reading var tasks = new List <Task>(); //reindex the same node a bunch of times - then while this is running we'll overwrite below for (var i = 0; i < ThreadCount; i++) { var indexer = customIndexer; tasks.Add(Task.Factory.StartNew(() => { //get next id and put it to the back of the list int docId = i; var cloned = new XElement(node); Debug.WriteLine("Indexing {0}", docId); indexer.IndexItem(cloned.ConvertToValueSet(IndexTypes.Content)); }, TaskCreationOptions.LongRunning)); } Thread.Sleep(100); //overwrite! customIndexer.EnsureIndex(true); try { Task.WaitAll(tasks.ToArray()); } catch (AggregateException e) { var sb = new StringBuilder(); sb.Append(e.Message + ": "); foreach (var v in e.InnerExceptions) { sb.Append(v.Message + "; "); } Assert.Fail(sb.ToString()); } //reset the async mode and remove event handler customIndexer.IndexingError += IndexInitializer.IndexingError; customIndexer.RunAsync = false; //wait until we are done waitHandle.WaitOne(TimeSpan.FromMinutes(2)); writer.WaitForMerges(); //ensure no data since it's a new index var results = customSearcher.CreateQuery().Field("nodeName", (IExamineValue) new ExamineValue(Examineness.Explicit, "Home")).Execute(); //the total times that OperationComplete event should be fired is 1000 Assert.AreEqual(1000, opCompleteCount); //should be less than the total inserted because we overwrote it in the middle of processing Debug.WriteLine("TOTAL RESULTS: " + results.TotalItemCount); Assert.Less(results.Count(), 1000); } }