示例#1
0
        private static RAMDirectory MakeEmptyIndex(int numDeletedDocs)
        {
            RAMDirectory d = new RAMDirectory();
            IndexWriter  w = new IndexWriter(d, new WhitespaceAnalyzer(), true, MaxFieldLength.LIMITED);

            for (int i = 0; i < numDeletedDocs; i++)
            {
                w.AddDocument(new Document());
            }
            w.Commit();
            w.DeleteDocuments(new MatchAllDocsQuery());
            w.Commit();

            if (0 < numDeletedDocs)
            {
                Assert.IsTrue(w.HasDeletions(), "writer has no deletions");
            }

            Assert.AreEqual(numDeletedDocs, w.MaxDoc(), "writer is missing some deleted docs");
            Assert.AreEqual(0, w.NumDocs(), "writer has non-deleted docs");
            w.Close();
            IndexReader r = IndexReader.Open(d, true);

            Assert.AreEqual(numDeletedDocs, r.NumDeletedDocs, "reader has wrong number of deleted docs");
            r.Close();
            return(d);
        }
示例#2
0
        public static void BuildIndexes()
        {
            SqlConnection connection = new SqlConnection(@"Data Source=localhost\SQLEXPRESS;Initial Catalog=toodledo;Integrated Security=SSPI;");
            SqlCommand    cmd        = new SqlCommand();

            //SqlDataReader reader;
            cmd.CommandText = "SELECT id, title, body FROM dbo.Content";
            cmd.CommandType = CommandType.Text;
            cmd.Connection  = connection;
            connection.Open();
            var reader = cmd.ExecuteReader();

            var dir = FSDirectory.Open(new System.IO.DirectoryInfo(@"C:\lucene"));

            Lucene.Net.Analysis.Standard.StandardAnalyzer analyzer = new Lucene.Net.Analysis.Standard.StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30);
            Lucene.Net.Index.IndexWriter writer = new Lucene.Net.Index.IndexWriter(dir, analyzer, Lucene.Net.Index.IndexWriter.MaxFieldLength.UNLIMITED);
            writer.DeleteAll();

            while (reader.Read())
            {
                var id    = reader.GetFieldValue <int>(0);
                var title = reader.GetFieldValue <string>(1);
                var body  = reader.GetFieldValue <string>(2);
                writer.AddDocument(create_doc(id, title, body));
            }
            connection.Close();

            writer.Optimize();
            writer.Commit();
        }
        public override void  SetUp()
        {
            base.SetUp();
            // prepare a small index with just a few documents.
            base.SetUp();
            dir   = new RAMDirectory();
            anlzr = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
            IndexWriter iw = new IndexWriter(dir, anlzr, IndexWriter.MaxFieldLength.LIMITED);
            // add docs not exactly in natural ID order, to verify we do check the order of docs by scores
            int remaining = N_DOCS;

            bool[] done = new bool[N_DOCS];
            int    i    = 0;

            while (remaining > 0)
            {
                if (done[i])
                {
                    throw new System.Exception("to set this test correctly N_DOCS=" + N_DOCS + " must be primary and greater than 2!");
                }
                AddDoc(iw, i);
                done[i] = true;
                i       = (i + 4) % N_DOCS;
                if (doMultiSegment && remaining % 3 == 0)
                {
                    iw.Commit();
                }
                remaining--;
            }
            iw.Close();
        }
示例#4
0
        private void CreateIndex(EditViewArticle article)
        {
            //using (var analyzer = new Lucene.Net.Analysis.Standard.StandardAnalyzer(Lucene.Net.Util.LuceneVersion.LUCENE_48))
            //{
            var options = new Lucene.Net.Index.IndexWriterConfig(Lucene.Net.Util.LuceneVersion.LUCENE_48, null)
            {
                OpenMode = Lucene.Net.Index.OpenMode.CREATE
            };

            //using (var indexWriter = new Lucene.Net.Index.IndexWriter(_directory, analyzer, Lucene.Net.Index.IndexWriter.MaxFieldLength.UNLIMITED))
            using (var indexWriter = new Lucene.Net.Index.IndexWriter(_directory, options))
            {
                var document = new Lucene.Net.Documents.Document
                {
                    new Lucene.Net.Documents.TextField("Id", article.Id.ToString(), Lucene.Net.Documents.Field.Store.YES),
                    new Lucene.Net.Documents.TextField("Title", article.Title, Lucene.Net.Documents.Field.Store.YES),
                    // HTML文本
                    // old版本//document.Add(new Lucene.Net.Documents.Field("Contents", article.Contents, Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.ANALYZED));
                    //document.Add(new Lucene.Net.Documents.TextField("Contents", article.Contents, Lucene.Net.Documents.Field.Store.YES));
                    // 纯文本
                    new Lucene.Net.Documents.TextField("TContents", article.Summary, Lucene.Net.Documents.Field.Store.YES),
                    new Lucene.Net.Documents.TextField("CreateTime", article.CreateTime.ToString(), Lucene.Net.Documents.Field.Store.YES)
                };

                indexWriter.AddDocument(document, this.CreateAnalyzer());
                indexWriter.Commit();
            }
            //}
        }
 public void TestCustomMergeScheduler()
 {
     // we don't really need to execute anything, just to make sure the custom MS
     // compiles. But ensure that it can be used as well, e.g., no other hidden
     // dependencies or something. Therefore, don't use any random API !
     Directory dir = new RAMDirectory();
     IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, null);
     conf.SetMergeScheduler(new ReportingMergeScheduler());
     IndexWriter writer = new IndexWriter(dir, conf);
     writer.AddDocument(new Document());
     writer.Commit(); // trigger flush
     writer.AddDocument(new Document());
     writer.Commit(); // trigger flush
     writer.ForceMerge(1);
     writer.Dispose();
     dir.Dispose();
 }
        public virtual void  TestOutOfOrderCollection()
        {
            Directory   dir    = new RAMDirectory();
            IndexWriter writer = new IndexWriter(dir, null, MaxFieldLength.UNLIMITED);

            for (int i = 0; i < 10; i++)
            {
                writer.AddDocument(new Document());
            }
            writer.Commit();
            writer.Close();

            bool[]          inOrder         = new bool[] { false, true };
            System.String[] actualTSDCClass = new System.String[] { "OutOfOrderTopScoreDocCollector", "InOrderTopScoreDocCollector" };

            // Save the original value to set later.
            bool origVal = BooleanQuery.GetAllowDocsOutOfOrder();

            BooleanQuery.SetAllowDocsOutOfOrder(true);

            BooleanQuery bq = new BooleanQuery();

            // Add a Query with SHOULD, since bw.scorer() returns BooleanScorer2
            // which delegates to BS if there are no mandatory clauses.
            bq.Add(new MatchAllDocsQuery(), Occur.SHOULD);
            // Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
            // the clause instead of BQ.
            bq.SetMinimumNumberShouldMatch(1);
            try
            {
                IndexSearcher searcher = new IndexSearcher(dir);
                for (int i = 0; i < inOrder.Length; i++)
                {
                    TopDocsCollector tdc = TopScoreDocCollector.create(3, inOrder[i]);
                    Assert.AreEqual("Lucene.Net.Search.TopScoreDocCollector+" + actualTSDCClass[i], tdc.GetType().FullName);

                    searcher.Search(new MatchAllDocsQuery(), tdc);

                    ScoreDoc[] sd = tdc.TopDocs().ScoreDocs;
                    Assert.AreEqual(3, sd.Length);
                    for (int j = 0; j < sd.Length; j++)
                    {
                        Assert.AreEqual(j, sd[j].doc, "expected doc Id " + j + " found " + sd[j].doc);
                    }
                }
            }
            finally
            {
                // Whatever happens, reset BooleanQuery.allowDocsOutOfOrder to the
                // original value. Don't set it to false in case the implementation in BQ
                // will change some day.
                BooleanQuery.SetAllowDocsOutOfOrder(origVal);
            }
        }
 private static void IndexDocsNoFacets(IndexWriter indexWriter)
 {
     int numDocs = AtLeast(2);
     for (int i = 0; i < numDocs; i++)
     {
         Document doc = new Document();
         AddField(doc);
         indexWriter.AddDocument(doc);
     }
     indexWriter.Commit(); // flush a segment
 }
		public virtual void  TestOutOfOrderCollection()
		{
			
			Directory dir = new RAMDirectory();
			IndexWriter writer = new IndexWriter(dir, null, MaxFieldLength.UNLIMITED);
			for (int i = 0; i < 10; i++)
			{
				writer.AddDocument(new Document());
			}
			writer.Commit();
			writer.Close();
			
			bool[] inOrder = new bool[]{false, true};
			System.String[] actualTSDCClass = new System.String[]{"OutOfOrderTopScoreDocCollector", "InOrderTopScoreDocCollector"};
			
			// Save the original value to set later.
			bool origVal = BooleanQuery.GetAllowDocsOutOfOrder();
			
			BooleanQuery.SetAllowDocsOutOfOrder(true);
			
			BooleanQuery bq = new BooleanQuery();
			// Add a Query with SHOULD, since bw.scorer() returns BooleanScorer2
			// which delegates to BS if there are no mandatory clauses.
			bq.Add(new MatchAllDocsQuery(), Occur.SHOULD);
			// Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
			// the clause instead of BQ.
			bq.SetMinimumNumberShouldMatch(1);
			try
			{
				
				IndexSearcher searcher = new IndexSearcher(dir);
				for (int i = 0; i < inOrder.Length; i++)
				{
					TopDocsCollector tdc = TopScoreDocCollector.create(3, inOrder[i]);
					Assert.AreEqual("Lucene.Net.Search.TopScoreDocCollector+" + actualTSDCClass[i], tdc.GetType().FullName);
					
					searcher.Search(new MatchAllDocsQuery(), tdc);
					
					ScoreDoc[] sd = tdc.TopDocs().scoreDocs;
					Assert.AreEqual(3, sd.Length);
					for (int j = 0; j < sd.Length; j++)
					{
						Assert.AreEqual(j, sd[j].doc, "expected doc Id " + j + " found " + sd[j].doc);
					}
				}
			}
			finally
			{
				// Whatever happens, reset BooleanQuery.allowDocsOutOfOrder to the
				// original value. Don't set it to false in case the implementation in BQ
				// will change some day.
				BooleanQuery.SetAllowDocsOutOfOrder(origVal);
			}
		}
 private static void IndexDocsWithFacetsNoTerms(IndexWriter indexWriter, TaxonomyWriter taxoWriter, IDictionary<string, int?> expectedCounts)
 {
     Random random = Random();
     int numDocs = AtLeast(random, 2);
     FacetsConfig config = Config;
     for (int i = 0; i < numDocs; i++)
     {
         Document doc = new Document();
         AddFacets(doc, config, false);
         indexWriter.AddDocument(config.Build(taxoWriter, doc));
     }
     indexWriter.Commit(); // flush a segment
 }
        public virtual void  TestReuseAcrossWriters()
        {
            Directory dir = new MockRAMDirectory();

            SnapshotDeletionPolicy dp     = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
            IndexWriter            writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED, null);

            // Force frequent flushes
            writer.SetMaxBufferedDocs(2);
            Document doc = new Document();

            doc.Add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
            for (int i = 0; i < 7; i++)
            {
                writer.AddDocument(doc, null);
                if (i % 2 == 0)
                {
                    writer.Commit(null);
                }
            }
            IndexCommit cp = dp.Snapshot();

            CopyFiles(dir, cp);
            writer.Close();
            CopyFiles(dir, cp);

            writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED, null);
            CopyFiles(dir, cp);
            for (int i = 0; i < 7; i++)
            {
                writer.AddDocument(doc, null);
                if (i % 2 == 0)
                {
                    writer.Commit(null);
                }
            }
            CopyFiles(dir, cp);
            writer.Close();
            CopyFiles(dir, cp);
            dp.Release();
            writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED, null);
            writer.Close();

            Assert.Throws <System.IO.FileNotFoundException>(() => CopyFiles(dir, cp), "did not hit expected IOException");
            dir.Close();
        }
        public virtual void  TestBasic()
        {
            HashSet <string> fileExtensions = new HashSet <string>();

            fileExtensions.Add("fdt");
            fileExtensions.Add("fdx");

            Directory    primaryDir   = new MockRAMDirectory();
            RAMDirectory secondaryDir = new MockRAMDirectory();

            FileSwitchDirectory fsd    = new FileSwitchDirectory(fileExtensions, primaryDir, secondaryDir, true);
            IndexWriter         writer = new IndexWriter(fsd, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED, null);

            writer.UseCompoundFile = false;
            TestIndexWriterReader.CreateIndexNoClose(true, "ram", writer);
            IndexReader reader = writer.GetReader(null);

            Assert.AreEqual(100, reader.MaxDoc);
            writer.Commit(null);
            // we should see only fdx,fdt files here
            System.String[] files = primaryDir.ListAll(null);
            Assert.IsTrue(files.Length > 0);
            for (int x = 0; x < files.Length; x++)
            {
                System.String ext = FileSwitchDirectory.GetExtension(files[x]);
                Assert.IsTrue(fileExtensions.Contains(ext));
            }
            files = secondaryDir.ListAll(null);
            Assert.IsTrue(files.Length > 0);
            // we should not see fdx,fdt files here
            for (int x = 0; x < files.Length; x++)
            {
                System.String ext = FileSwitchDirectory.GetExtension(files[x]);
                Assert.IsFalse(fileExtensions.Contains(ext));
            }
            reader.Close();
            writer.Close();

            files = fsd.ListAll(null);
            for (int i = 0; i < files.Length; i++)
            {
                Assert.IsNotNull(files[i]);
            }
            fsd.Close();
        }
 private static void IndexDocsWithFacetsAndSomeTerms(IndexWriter indexWriter, TaxonomyWriter taxoWriter, IDictionary<string, int?> expectedCounts)
 {
     Random random = Random();
     int numDocs = AtLeast(random, 2);
     FacetsConfig config = Config;
     for (int i = 0; i < numDocs; i++)
     {
         Document doc = new Document();
         bool hasContent = random.NextBoolean();
         if (hasContent)
         {
             AddField(doc);
         }
         AddFacets(doc, config, hasContent);
         indexWriter.AddDocument(config.Build(taxoWriter, doc));
     }
     indexWriter.Commit(); // flush a segment
 }
 public virtual void  TestBasic()
 {
     HashSet<string> fileExtensions = new HashSet<string>();
     fileExtensions.Add("fdt");
     fileExtensions.Add("fdx");
     
     Directory primaryDir = new MockRAMDirectory();
     RAMDirectory secondaryDir = new MockRAMDirectory();
     
     FileSwitchDirectory fsd = new FileSwitchDirectory(fileExtensions, primaryDir, secondaryDir, true);
     IndexWriter writer = new IndexWriter(fsd, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
     writer.UseCompoundFile = false;
     TestIndexWriterReader.CreateIndexNoClose(true, "ram", writer);
     IndexReader reader = writer.GetReader();
     Assert.AreEqual(100, reader.MaxDoc);
     writer.Commit();
     // we should see only fdx,fdt files here
     System.String[] files = primaryDir.ListAll();
     Assert.IsTrue(files.Length > 0);
     for (int x = 0; x < files.Length; x++)
     {
         System.String ext = FileSwitchDirectory.GetExtension(files[x]);
         Assert.IsTrue(fileExtensions.Contains(ext));
     }
     files = secondaryDir.ListAll();
     Assert.IsTrue(files.Length > 0);
     // we should not see fdx,fdt files here
     for (int x = 0; x < files.Length; x++)
     {
         System.String ext = FileSwitchDirectory.GetExtension(files[x]);
         Assert.IsFalse(fileExtensions.Contains(ext));
     }
     reader.Close();
     writer.Close();
     
     files = fsd.ListAll();
     for (int i = 0; i < files.Length; i++)
     {
         Assert.IsNotNull(files[i]);
     }
     fsd.Close();
 }
示例#14
0
        public virtual void TestOutOfOrderCollection()
        {
            Directory   dir    = new RAMDirectory();
            IndexWriter writer = new IndexWriter(dir, null, MaxFieldLength.UNLIMITED);

            for (int i = 0; i < 10; i++)
            {
                writer.AddDocument(new Document());
            }
            writer.Commit();
            writer.Close();

            bool[]          inOrder         = new bool[] { false, true };
            System.String[] actualTSDCClass = new System.String[]
            { "OutOfOrderTopScoreDocCollector", "InOrderTopScoreDocCollector" };

            BooleanQuery bq = new BooleanQuery();

            // Add a Query with SHOULD, since bw.scorer() returns BooleanScorer2
            // which delegates to BS if there are no mandatory clauses.
            bq.Add(new MatchAllDocsQuery(), Occur.SHOULD);
            // Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
            // the clause instead of BQ.
            bq.MinimumNumberShouldMatch = 1;
            IndexSearcher searcher = new IndexSearcher(dir, true);

            for (int i = 0; i < inOrder.Length; i++)
            {
                TopDocsCollector <ScoreDoc> tdc = TopScoreDocCollector.Create(3, inOrder[i]);
                Assert.AreEqual("Lucene.Net.Search.TopScoreDocCollector+" + actualTSDCClass[i], tdc.GetType().FullName);

                searcher.Search(new MatchAllDocsQuery(), tdc);

                ScoreDoc[] sd = tdc.TopDocs().ScoreDocs;
                Assert.AreEqual(3, sd.Length);
                for (int j = 0; j < sd.Length; j++)
                {
                    Assert.AreEqual(j, sd[j].Doc, "expected doc Id " + j + " found " + sd[j].Doc);
                }
            }
        }
示例#15
0
        public bool RemoveMember(int memberId)
        {
            try
            {
                Lucene.Net.Store.Directory dir = Lucene.Net.Store.FSDirectory.GetDirectory(_indexFileLocation, false);

                Lucene.Net.Index.IndexWriter indexWriter = new Lucene.Net.Index.IndexWriter(dir, new Lucene.Net.Analysis.Standard.StandardAnalyzer(), false);

                Lucene.Net.Index.Term idTerm = new Lucene.Net.Index.Term("MemberID", memberId.ToString());
                indexWriter.DeleteDocuments(idTerm);

                indexWriter.Commit();
                indexWriter.Close();
            }
            catch (Exception er)
            {
                return(false);
            }


            return(true);
        }
        public virtual void TestOutOfOrderCollection()
        {

            Directory dir = new RAMDirectory();
            IndexWriter writer = new IndexWriter(dir, null, MaxFieldLength.UNLIMITED);
            for (int i = 0; i < 10; i++)
            {
                writer.AddDocument(new Document());
            }
            writer.Commit();
            writer.Close();

            bool[] inOrder = new bool[] {false, true};
            System.String[] actualTSDCClass = new System.String[]
                                                  {"OutOfOrderTopScoreDocCollector", "InOrderTopScoreDocCollector"};

            BooleanQuery bq = new BooleanQuery();
            // Add a Query with SHOULD, since bw.scorer() returns BooleanScorer2
            // which delegates to BS if there are no mandatory clauses.
            bq.Add(new MatchAllDocsQuery(), Occur.SHOULD);
            // Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
            // the clause instead of BQ.
            bq.MinimumNumberShouldMatch = 1;
            IndexSearcher searcher = new IndexSearcher(dir, true);
            for (int i = 0; i < inOrder.Length; i++)
            {
                TopDocsCollector<ScoreDoc> tdc = TopScoreDocCollector.Create(3, inOrder[i]);
                Assert.AreEqual("Lucene.Net.Search.TopScoreDocCollector+" + actualTSDCClass[i], tdc.GetType().FullName);

                searcher.Search(new MatchAllDocsQuery(), tdc);

                ScoreDoc[] sd = tdc.TopDocs().ScoreDocs;
                Assert.AreEqual(3, sd.Length);
                for (int j = 0; j < sd.Length; j++)
                {
                    Assert.AreEqual(j, sd[j].Doc, "expected doc Id " + j + " found " + sd[j].Doc);
                }
            }
        }
示例#17
0
        public virtual void TestMmapIndex()
        {
            // sometimes the directory is not cleaned by rmDir, because on Windows it
            // may take some time until the files are finally dereferenced. So clean the
            // directory up front, or otherwise new IndexWriter will fail.
            var dirPath = CreateTempDir("testLuceneMmap");
            RmDir(dirPath.FullName);
            var dir = new MMapDirectory(dirPath, null);

            // plan to add a set of useful stopwords, consider changing some of the
            // interior filters.
            using (var analyzer = new MockAnalyzer(Random()))
            {
                // TODO: something about lock timeouts and leftover locks.
                using (var writer = new IndexWriter(dir,
                    new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetOpenMode(
                        IndexWriterConfig.OpenMode_e.CREATE)))
                {
                    writer.Commit();
                    using (IndexReader reader = DirectoryReader.Open(dir))
                    {
                        var searcher = NewSearcher(reader);
                        var num = AtLeast(1000);
                        for (int dx = 0; dx < num; dx++)
                        {
                            var f = RandomField();
                            var doc = new Document();
                            doc.Add(NewTextField("data", f, Field.Store.YES));
                            writer.AddDocument(doc);
                        }
                    }
                }

                RmDir(dirPath.FullName);
            }
        }
示例#18
0
 public override void  SetUp()
 {
     base.SetUp();
     // prepare a small index with just a few documents.  
     base.SetUp();
     dir = new RAMDirectory();
     anlzr = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
     IndexWriter iw = new IndexWriter(dir, anlzr, IndexWriter.MaxFieldLength.LIMITED);
     // add docs not exactly in natural ID order, to verify we do check the order of docs by scores
     int remaining = N_DOCS;
     bool[] done = new bool[N_DOCS];
     int i = 0;
     while (remaining > 0)
     {
         if (done[i])
         {
             throw new System.Exception("to set this test correctly N_DOCS=" + N_DOCS + " must be primary and greater than 2!");
         }
         AddDoc(iw, i);
         done[i] = true;
         i = (i + 4) % N_DOCS;
         if (doMultiSegment && remaining % 3 == 0) 
         {
             iw.Commit();
         }
         remaining--;
     }
     iw.Close();
 }
        public virtual void TestDirectory()
        {
            Store.Directory indexDir = NewDirectory();
            Store.Directory taxoDir = NewDirectory();
            IndexWriter w = new IndexWriter(indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
            var tw = new DirectoryTaxonomyWriter(taxoDir);
            // first empty commit
            w.Commit();
            tw.Commit();
            var mgr = new SearcherTaxonomyManager(indexDir, taxoDir, null);
            FacetsConfig config = new FacetsConfig();
            config.SetMultiValued("field", true);
            AtomicBoolean stop = new AtomicBoolean();

            // How many unique facets to index before stopping:
            int ordLimit = TEST_NIGHTLY ? 100000 : 6000;

            var indexer = new IndexerThread(w, config, tw, mgr, ordLimit, stop);
            indexer.Start();

            try
            {
                while (!stop.Get())
                {
                    SearcherAndTaxonomy pair = mgr.Acquire();
                    try
                    {
                        //System.out.println("search maxOrd=" + pair.taxonomyReader.getSize());
                        FacetsCollector sfc = new FacetsCollector();
                        pair.searcher.Search(new MatchAllDocsQuery(), sfc);
                        Facets facets = GetTaxonomyFacetCounts(pair.taxonomyReader, config, sfc);
                        FacetResult result = facets.GetTopChildren(10, "field");
                        if (pair.searcher.IndexReader.NumDocs > 0)
                        {
                            //System.out.println(pair.taxonomyReader.getSize());
                            Assert.True(result.ChildCount > 0);
                            Assert.True(result.LabelValues.Length > 0);
                        }

                        //if (VERBOSE) {
                        //System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0)));
                        //}
                    }
                    finally
                    {
                        mgr.Release(pair);
                    }
                }
            }
            finally
            {
                indexer.Join();
            }

            if (VERBOSE)
            {
                Console.WriteLine("TEST: now stop");
            }

            IOUtils.Close(mgr, tw, w, taxoDir, indexDir);
        }
        public virtual void TestReferenceDecrementIllegally()
        {
            Directory dir = NewDirectory();
            IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergeScheduler(new ConcurrentMergeScheduler()));
            SearcherManager sm = new SearcherManager(writer, false, new SearcherFactory());
            writer.AddDocument(new Document());
            writer.Commit();
            sm.MaybeRefreshBlocking();

            IndexSearcher acquire = sm.Acquire();
            IndexSearcher acquire2 = sm.Acquire();
            sm.Release(acquire);
            sm.Release(acquire2);

            acquire = sm.Acquire();
            acquire.IndexReader.DecRef();
            sm.Release(acquire);
            try
            {
                sm.Acquire();
                Assert.Fail("acquire should have thrown an InvalidOperationException since we modified the refCount outside of the manager");
            }
            catch (InvalidOperationException ex)
            {
                //
            }

            // sm.Dispose(); -- already closed
            writer.Dispose();
            dir.Dispose();
        }
示例#21
0
        public bool RemoveMember(int memberId)
        {
            try
            {

                Lucene.Net.Store.Directory dir = Lucene.Net.Store.FSDirectory.GetDirectory(_indexFileLocation, false);

                Lucene.Net.Index.IndexWriter indexWriter = new Lucene.Net.Index.IndexWriter(dir, new Lucene.Net.Analysis.Standard.StandardAnalyzer(), false);

                Lucene.Net.Index.Term idTerm = new Lucene.Net.Index.Term("MemberID", memberId.ToString());
                indexWriter.DeleteDocuments(idTerm);

                indexWriter.Commit();
                indexWriter.Close();
            }
            catch (Exception er)
            {
                return false;
            }

            return true;
        }
示例#22
0
 private static IndexReader MakeEmptyIndex(Random random, int numDocs)
 {
     Debug.Assert(numDocs > 0);
     Directory d = new MockDirectoryWrapper(random, new RAMDirectory());
     IndexWriter w = new IndexWriter(d, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer(random)));
     for (int i = 0; i < numDocs; i++)
     {
         w.AddDocument(new Document());
     }
     w.ForceMerge(1);
     w.Commit();
     w.Dispose();
     DirectoryReader reader = DirectoryReader.Open(d);
     return new AllDeletedFilterReader(LuceneTestCase.GetOnlySegmentReader(reader));
 }
        private void indexTwoDocs(TaxonomyWriter taxoWriter, IndexWriter indexWriter, FacetsConfig config, bool withContent)
        {
            for (int i = 0; i < 2; i++)
            {
                Document doc = new Document();
                if (withContent)
                {
                    doc.Add(new StringField("f", "a", Field.Store.NO));
                }
                if (config != null)
                {
                    doc.Add(new FacetField("A", Convert.ToString(i)));
                    indexWriter.AddDocument(config.Build(taxoWriter, doc));
                }
                else
                {
                    indexWriter.AddDocument(doc);
                }
            }

            indexWriter.Commit();
        }
 private static void IndexDocsWithFacetsNoTerms(IndexWriter indexWriter, TaxonomyWriter taxoWriter, IDictionary<string, int?> expectedCounts)
 {
     Random random = Random();
     int numDocs = AtLeast(random, 2);
     FacetsConfig config = Config;
     for (int i = 0; i < numDocs; i++)
     {
         Document doc = new Document();
         AddFacets(doc, config, false);
         indexWriter.AddDocument(config.Build(taxoWriter, doc));
     }
     indexWriter.Commit(); // flush a segment
 }
示例#25
0
        public virtual void TestEmptyStringVsNullStringSort()
        {
            Directory dir = NewDirectory();
            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
            Document doc = new Document();
            doc.Add(NewStringField("f", "", Field.Store.NO));
            doc.Add(NewStringField("t", "1", Field.Store.NO));
            w.AddDocument(doc);
            w.Commit();
            doc = new Document();
            doc.Add(NewStringField("t", "1", Field.Store.NO));
            w.AddDocument(doc);

            IndexReader r = DirectoryReader.Open(w, true);
            w.Dispose();
            IndexSearcher s = NewSearcher(r);
            TopDocs hits = s.Search(new TermQuery(new Term("t", "1")), null, 10, new Sort(new SortField("f", SortField.Type_e.STRING)));
            Assert.AreEqual(2, hits.TotalHits);
            // null sorts first
            Assert.AreEqual(1, hits.ScoreDocs[0].Doc);
            Assert.AreEqual(0, hits.ScoreDocs[1].Doc);
            r.Dispose();
            dir.Dispose();
        }
        public virtual void  TestReuseAcrossWriters()
        {
            Directory dir = new MockRAMDirectory();
            
            SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
            IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
            // Force frequent flushes
            writer.SetMaxBufferedDocs(2);
            Document doc = new Document();
            doc.Add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
            for (int i = 0; i < 7; i++)
            {
                writer.AddDocument(doc);
                if (i % 2 == 0)
                {
                    writer.Commit();
                }
            }
            IndexCommit cp =  dp.Snapshot();
            CopyFiles(dir, cp);
            writer.Close();
            CopyFiles(dir, cp);

            writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
            CopyFiles(dir, cp);
            for (int i = 0; i < 7; i++)
            {
                writer.AddDocument(doc);
                if (i % 2 == 0)
                {
                    writer.Commit();
                }
            }
            CopyFiles(dir, cp);
            writer.Close();
            CopyFiles(dir, cp);
            dp.Release();
            writer = new IndexWriter(dir, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
            writer.Close();

            Assert.Throws<System.IO.FileNotFoundException>(() => CopyFiles(dir, cp), "did not hit expected IOException");
            dir.Close();
        }
示例#27
0
        public virtual void TestMaxScore()
        {
            Directory d = NewDirectory();
            // Not RIW because we need exactly 2 segs:
            IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
            int id = 0;
            for (int seg = 0; seg < 2; seg++)
            {
                for (int docIDX = 0; docIDX < 10; docIDX++)
                {
                    Document doc = new Document();
                    doc.Add(NewStringField("id", "" + docIDX, Field.Store.YES));
                    StringBuilder sb = new StringBuilder();
                    for (int i = 0; i < id; i++)
                    {
                        sb.Append(' ');
                        sb.Append("text");
                    }
                    doc.Add(NewTextField("body", sb.ToString(), Field.Store.NO));
                    w.AddDocument(doc);
                    id++;
                }
                w.Commit();
            }

            IndexReader r = DirectoryReader.Open(w, true);
            w.Dispose();
            Query q = new TermQuery(new Term("body", "text"));
            IndexSearcher s = NewSearcher(r);
            float maxScore = s.Search(q, 10).MaxScore;
            Assert.AreEqual(maxScore, s.Search(q, null, 3, Sort.INDEXORDER, Random().NextBoolean(), true).MaxScore, 0.0);
            Assert.AreEqual(maxScore, s.Search(q, null, 3, Sort.RELEVANCE, Random().NextBoolean(), true).MaxScore, 0.0);
            Assert.AreEqual(maxScore, s.Search(q, null, 3, new Sort(new SortField[] { new SortField("id", SortField.Type_e.INT, false) }), Random().NextBoolean(), true).MaxScore, 0.0);
            Assert.AreEqual(maxScore, s.Search(q, null, 3, new Sort(new SortField[] { new SortField("id", SortField.Type_e.INT, true) }), Random().NextBoolean(), true).MaxScore, 0.0);
            r.Dispose();
            d.Dispose();
        }
示例#28
0
 private static RAMDirectory MakeEmptyIndex(int numDeletedDocs)
 {
     RAMDirectory d = new RAMDirectory();
     IndexWriter w = new IndexWriter(d, new WhitespaceAnalyzer(), true, MaxFieldLength.LIMITED);
     for (int i = 0; i < numDeletedDocs; i++)
     {
         w.AddDocument(new Document());
     }
     w.Commit();
     w.DeleteDocuments(new MatchAllDocsQuery());
     w.Commit();
     
     if (0 < numDeletedDocs)
         Assert.IsTrue(w.HasDeletions(), "writer has no deletions");
     
     Assert.AreEqual(numDeletedDocs, w.MaxDoc(), "writer is missing some deleted docs");
     Assert.AreEqual(0, w.NumDocs(), "writer has non-deleted docs");
     w.Close();
     IndexReader r = IndexReader.Open(d, true);
     Assert.AreEqual(numDeletedDocs, r.NumDeletedDocs, "reader has wrong number of deleted docs");
     r.Close();
     return d;
 }
        public virtual void TestPerFieldCodec()
        {
            int NUM_DOCS = AtLeast(173);
            if (VERBOSE)
            {
                Console.WriteLine("TEST: NUM_DOCS=" + NUM_DOCS);
            }

            using (BaseDirectoryWrapper dir = NewDirectory())
            {
                dir.CheckIndexOnClose = false; // we use a custom codec provider
                using (IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetCodec(new CustomPerFieldCodec()).SetMergePolicy(NewLogMergePolicy(3))))
                {
                    Documents.Document doc = new Documents.Document();
                    // uses default codec:
                    doc.Add(NewTextField("field1", "this field uses the standard codec as the test", Field.Store.NO));
                    // uses pulsing codec:
                    Field field2 = NewTextField("field2", "this field uses the pulsing codec as the test", Field.Store.NO);
                    doc.Add(field2);

                    Field idField = NewStringField("id", "", Field.Store.NO);

                    doc.Add(idField);
                    for (int i = 0; i < NUM_DOCS; i++)
                    {
                        idField.StringValue = "" + i;
                        w.AddDocument(doc);
                        if ((i + 1) % 10 == 0)
                        {
                            w.Commit();
                        }
                    }
                    if (VERBOSE)
                    {
                        Console.WriteLine("TEST: now delete id=77");
                    }
                    w.DeleteDocuments(new Term("id", "77"));

                    using (IndexReader r = DirectoryReader.Open(w, true))
                    {
                        Assert.AreEqual(NUM_DOCS - 1, r.NumDocs);
                        IndexSearcher s = NewSearcher(r);
                        Assert.AreEqual(NUM_DOCS - 1, s.Search(new TermQuery(new Term("field1", "standard")), 1).TotalHits);
                        Assert.AreEqual(NUM_DOCS - 1, s.Search(new TermQuery(new Term("field2", "pulsing")), 1).TotalHits);
                    }

                    if (VERBOSE)
                    {
                        Console.WriteLine("\nTEST: now delete 2nd doc");
                    }
                    w.DeleteDocuments(new Term("id", "44"));

                    if (VERBOSE)
                    {
                        Console.WriteLine("\nTEST: now force merge");
                    }
                    w.ForceMerge(1);
                    if (VERBOSE)
                    {
                        Console.WriteLine("\nTEST: now open reader");
                    }
                    using (IndexReader r = DirectoryReader.Open(w, true))
                    {
                        Assert.AreEqual(NUM_DOCS - 2, r.MaxDoc);
                        Assert.AreEqual(NUM_DOCS - 2, r.NumDocs);
                        IndexSearcher s = NewSearcher(r);
                        Assert.AreEqual(NUM_DOCS - 2, s.Search(new TermQuery(new Term("field1", "standard")), 1).TotalHits);
                        Assert.AreEqual(NUM_DOCS - 2, s.Search(new TermQuery(new Term("field2", "pulsing")), 1).TotalHits);
                        Assert.AreEqual(1, s.Search(new TermQuery(new Term("id", "76")), 1).TotalHits);
                        Assert.AreEqual(0, s.Search(new TermQuery(new Term("id", "77")), 1).TotalHits);
                        Assert.AreEqual(0, s.Search(new TermQuery(new Term("id", "44")), 1).TotalHits);

                        if (VERBOSE)
                        {
                            Console.WriteLine("\nTEST: now close NRT reader");
                        }
                    }
                }
            }
        }
 private static void IndexDocsNoFacets(IndexWriter indexWriter)
 {
     int numDocs = AtLeast(2);
     for (int i = 0; i < numDocs; i++)
     {
         Document doc = new Document();
         AddField(doc);
         indexWriter.AddDocument(doc);
     }
     indexWriter.Commit(); // flush a segment
 }
 private static void IndexDocsWithFacetsAndSomeTerms(IndexWriter indexWriter, TaxonomyWriter taxoWriter, IDictionary<string, int?> expectedCounts)
 {
     Random random = Random();
     int numDocs = AtLeast(random, 2);
     FacetsConfig config = Config;
     for (int i = 0; i < numDocs; i++)
     {
         Document doc = new Document();
         bool hasContent = random.NextBoolean();
         if (hasContent)
         {
             AddField(doc);
         }
         AddFacets(doc, config, hasContent);
         indexWriter.AddDocument(config.Build(taxoWriter, doc));
     }
     indexWriter.Commit(); // flush a segment
 }
        public virtual void TestReplaceTaxonomyDirectory()
        {
            Store.Directory indexDir = NewDirectory();
            Store.Directory taxoDir = NewDirectory();
            IndexWriter w = new IndexWriter(indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
            var tw = new DirectoryTaxonomyWriter(taxoDir);
            w.Commit();
            tw.Commit();

            Store.Directory taxoDir2 = NewDirectory();
            var tw2 = new DirectoryTaxonomyWriter(taxoDir2);
            tw2.AddCategory(new FacetLabel("a", "b"));
            tw2.Dispose();

            var mgr = new SearcherTaxonomyManager(indexDir, taxoDir, null);
            SearcherAndTaxonomy pair = mgr.Acquire();
            try
            {
                Assert.AreEqual(1, pair.taxonomyReader.Size);
            }
            finally
            {
                mgr.Release(pair);
            }

            w.AddDocument(new Document());
            tw.ReplaceTaxonomy(taxoDir2);
            taxoDir2.Dispose();
            w.Commit();
            tw.Commit();

            mgr.MaybeRefresh();
            pair = mgr.Acquire();
            try
            {
                Assert.AreEqual(3, pair.taxonomyReader.Size);
            }
            finally
            {
                mgr.Release(pair);
            }

            IOUtils.Close(mgr, tw, w, taxoDir, indexDir);
        }
示例#33
0
 public virtual void TestListenerCalled()
 {
     Directory dir = NewDirectory();
     IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
     AtomicBoolean afterRefreshCalled = new AtomicBoolean(false);
     SearcherManager sm = new SearcherManager(iw, false, new SearcherFactory());
     sm.AddListener(new RefreshListenerAnonymousInnerClassHelper(this, afterRefreshCalled));
     iw.AddDocument(new Document());
     iw.Commit();
     Assert.IsFalse(afterRefreshCalled.Get());
     sm.MaybeRefreshBlocking();
     Assert.IsTrue(afterRefreshCalled.Get());
     sm.Dispose();
     iw.Dispose();
     dir.Dispose();
 }
示例#34
0
        public virtual void TestReferenceDecrementIllegally([ValueSource(typeof(ConcurrentMergeSchedulers), "Values")]IConcurrentMergeScheduler scheduler)
        {
            Directory dir = NewDirectory();
            var config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))
                            .SetMergeScheduler(scheduler);
            IndexWriter writer = new IndexWriter(dir, config);
            SearcherManager sm = new SearcherManager(writer, false, new SearcherFactory());
            writer.AddDocument(new Document());
            writer.Commit();
            sm.MaybeRefreshBlocking();

            IndexSearcher acquire = sm.Acquire();
            IndexSearcher acquire2 = sm.Acquire();
            sm.Release(acquire);
            sm.Release(acquire2);

            acquire = sm.Acquire();
            acquire.IndexReader.DecRef();
            sm.Release(acquire);

            Assert.Throws<InvalidOperationException>(() => sm.Acquire(), "acquire should have thrown an InvalidOperationException since we modified the refCount outside of the manager");

            // sm.Dispose(); -- already closed
            writer.Dispose();
            dir.Dispose();
        }
示例#35
0
        public virtual void TestRAMDirectoryNoLocking()
        {
            MockDirectoryWrapper dir = new MockDirectoryWrapper(Random(), new RAMDirectory());
            dir.LockFactory = NoLockFactory.DoNoLockFactory;
            dir.WrapLockFactory = false; // we are gonna explicitly test we get this back
            Assert.IsTrue(typeof(NoLockFactory).IsInstanceOfType(dir.LockFactory), "RAMDirectory.setLockFactory did not take");

            IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
            writer.Commit(); // required so the second open succeed
            // Create a 2nd IndexWriter.  this is normally not allowed but it should run through since we're not
            // using any locks:
            IndexWriter writer2 = null;
            try
            {
                writer2 = new IndexWriter(dir, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetOpenMode(IndexWriterConfig.OpenMode_e.APPEND));
            }
            catch (Exception e)
            {
                Console.Out.Write(e.StackTrace);
                Assert.Fail("Should not have hit an IOException with no locking");
            }

            writer.Dispose();
            if (writer2 != null)
            {
                writer2.Dispose();
            }
        }