private void RunTest(SpatialContext ctx, SpatialStrategy strategy, Func <SpatialArgs, Query> createQuery) { var analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30); using (var luceneDir = new RandomIdRAMDirectory()) { string id1 = 1.ToString(); string id2 = 2.ToString(); string id3 = 3.ToString(); string id4 = 4.ToString(); using (var indexer = new TestIndex(luceneDir, analyzer)) { indexer.DocumentWriting += (sender, args) => Indexer_DocumentWriting(args, ctx, strategy); indexer.IndexItems(new[] { ValueSet.FromObject(id1, "content", new { nodeName = "location 1", bodyText = "Zanzibar is in Africa", lat = -6.1357, lng = 39.3621 }), ValueSet.FromObject(id2, "content", new { nodeName = "location 2", bodyText = "In Canada there is a town called Sydney in Nova Scotia", lat = 46.1368, lng = -60.1942 }), ValueSet.FromObject(id3, "content", new { nodeName = "location 3", bodyText = "Sydney is the capital of NSW in Australia", lat = -33.8688, lng = 151.2093 }), ValueSet.FromObject(id4, "content", new { nodeName = "location 4", bodyText = "Somewhere unknown", lat = 50, lng = 50 }) }); DoSpatialSearch(ctx, strategy, indexer, SearchRadius, id3, createQuery, lat: -33, lng: 151); DoSpatialSearch(ctx, strategy, indexer, SearchRadius, id2, createQuery, lat: 46, lng: -60); DoSpatialSearch(ctx, strategy, indexer, SearchRadius, id1, createQuery, lat: -6, lng: 39); DoSpatialSearch(ctx, strategy, indexer, SearchRadius, id4, createQuery, lat: 50, lng: 50); } } }
public void Rebuild_Index() { using (var d = new RandomIdRAMDirectory()) using (var indexer = new TestIndex(d, new StandardAnalyzer(Version.LUCENE_30))) { indexer.CreateIndex(); indexer.IndexItems(indexer.AllData()); var indexWriter = indexer.GetIndexWriter(); var reader = indexWriter.GetReader(); Assert.AreEqual(100, reader.NumDocs()); } }
public void Track_Readers() { var analyzer = new StandardAnalyzer(Version.LUCENE_30); using (var luceneDir = new RandomIdRAMDirectory()) using (var indexer = new TestIndex(luceneDir, analyzer)) { indexer.IndexItems(new[] { ValueSet.FromObject(1.ToString(), "content", new { nodeName = "umbraco", headerText = "world", writerName = "administrator" }), ValueSet.FromObject(2.ToString(), "content", new { nodeName = "umbraco", headerText = "umbraco", writerName = "administrator" }), ValueSet.FromObject(3.ToString(), "content", new { nodeName = "umbraco", headerText = "umbraco", writerName = "administrator" }), ValueSet.FromObject(4.ToString(), "content", new { nodeName = "hello", headerText = "world", writerName = "blah" }) }); LuceneSearcher searcher = (LuceneSearcher)indexer.GetSearcher(); IndexSearcher luceneSearcher = (IndexSearcher)searcher.GetLuceneSearcher(); //Arrange var sc = searcher.CreateQuery("content").Field("writerName", "administrator"); //Act var results = sc.Execute(); using (var e1 = results.GetEnumerator()) { Assert.AreEqual(2, luceneSearcher.IndexReader.RefCount); using (var e2 = results.Skip(2).GetEnumerator()) { Assert.AreEqual(3, luceneSearcher.IndexReader.RefCount); } Assert.AreEqual(2, luceneSearcher.IndexReader.RefCount); } Assert.AreEqual(1, luceneSearcher.IndexReader.RefCount); } }
public void Index_Ensure_No_Duplicates_In_Async() { using (var d = new RandomIdRAMDirectory()) using (var writer = new IndexWriter(d, new CultureInvariantStandardAnalyzer(Version.LUCENE_30), IndexWriter.MaxFieldLength.LIMITED)) using (var customIndexer = new TestIndex(writer)) //using (var customSearcher = (LuceneSearcher)customIndexer.GetSearcher()) { var waitHandle = new ManualResetEvent(false); void OperationComplete(object sender, IndexOperationEventArgs e) { //signal that we are done waitHandle.Set(); } //add the handler for optimized since we know it will be optimized last based on the commit count customIndexer.IndexOperationComplete += OperationComplete; //remove the normal indexing error handler customIndexer.IndexingError -= IndexInitializer.IndexingError; //run in async mode customIndexer.RunAsync = true; //get a node from the data repo var idQueue = new ConcurrentQueue <int>(Enumerable.Range(1, 3)); var node = _contentService.GetPublishedContentByXPath("//*[string-length(@id)>0 and number(@id)>0]") .Root .Elements() .First(); //reindex the same nodes a bunch of times for (var i = 0; i < idQueue.Count * 20; i++) { //get next id and put it to the back of the list int docId; if (idQueue.TryDequeue(out docId)) { idQueue.Enqueue(docId); var cloned = new XElement(node); cloned.Attribute("id").Value = docId.ToString(CultureInfo.InvariantCulture); Debug.WriteLine("Indexing {0}", docId); customIndexer.IndexItems(new[] { cloned.ConvertToValueSet(IndexTypes.Content) }); Thread.Sleep(100); } } //reset the async mode and remove event handler customIndexer.IndexingError += IndexInitializer.IndexingError; customIndexer.RunAsync = false; //wait until we are done waitHandle.WaitOne(); writer.WaitForMerges(); //ensure no duplicates var customSearcher = (LuceneSearcher)customIndexer.GetSearcher(); var results = customSearcher.CreateQuery().Field("nodeName", (IExamineValue) new ExamineValue(Examineness.Explicit, "Home")).Execute(); Assert.AreEqual(3, results.Count()); } }