A memory-resident Directory implementation. Locking implementation is by default the SingleInstanceLockFactory but can be changed with #setLockFactory.

Warning: this class is not intended to work with huge indexes. Everything beyond several hundred megabytes will waste resources (GC cycles), because it uses an internal buffer size of 1024 bytes, producing millions of {@code byte[1024]} arrays. this class is optimized for small memory-resident indexes. It also has bad concurrency on multithreaded environments.

It is recommended to materialize large indexes on disk and use MMapDirectory, which is a high-performance directory implementation working directly on the file system cache of the operating system, so copying data to Java heap space is not useful.

Inheritance: Lucene.Net.Store.BaseDirectory
Exemplo n.º 1
0
        public virtual void  TestSimpleSkip()
        {
            RAMDirectory dir    = new RAMDirectory();
            IndexWriter  writer = new IndexWriter(dir, new PayloadAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
            Term         term   = new Term("test", "a");

            for (int i = 0; i < 5000; i++)
            {
                Document d1 = new Document();
                d1.Add(new Field(term.Field, term.Text, Field.Store.NO, Field.Index.ANALYZED));
                writer.AddDocument(d1, null);
            }
            writer.Commit(null);
            writer.Optimize(null);
            writer.Close();

            IndexReader          reader = SegmentReader.GetOnlySegmentReader(dir, null);
            SegmentTermPositions tp     = (SegmentTermPositions)reader.TermPositions(null);

            tp.freqStream = new CountingStream(this, tp.freqStream);

            for (int i = 0; i < 2; i++)
            {
                counter = 0;
                tp.Seek(term, null);

                CheckSkipTo(tp, 14, 185);                 // no skips
                CheckSkipTo(tp, 17, 190);                 // one skip on level 0
                CheckSkipTo(tp, 287, 200);                // one skip on level 1, two on level 0

                // this test would fail if we had only one skip level,
                // because than more bytes would be read from the freqStream
                CheckSkipTo(tp, 4800, 250);                 // one skip on level 2
            }
        }
        public virtual void  TestDanish()
        {
            /* build an index */
            RAMDirectory danishIndex = new RAMDirectory();
            IndexWriter  writer      = new IndexWriter(danishIndex, new SimpleAnalyzer(), T, IndexWriter.MaxFieldLength.LIMITED, null);

            // Danish collation orders the words below in the given order
            // (example taken from TestSort.testInternationalSort() ).
            System.String[] words = new System.String[] { "H\u00D8T", "H\u00C5T", "MAND" };
            for (int docnum = 0; docnum < words.Length; ++docnum)
            {
                Document doc = new Document();
                doc.Add(new Field("content", words[docnum], Field.Store.YES, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("body", "body", Field.Store.YES, Field.Index.NOT_ANALYZED));
                writer.AddDocument(doc, null);
            }
            writer.Optimize(null);
            writer.Close();

            IndexReader   reader = IndexReader.Open((Directory)danishIndex, true, null);
            IndexSearcher search = new IndexSearcher(reader);

            System.Globalization.CompareInfo c = new System.Globalization.CultureInfo("da" + "-" + "dk").CompareInfo;

            // Unicode order would not include "H\u00C5T" in [ "H\u00D8T", "MAND" ],
            // but Danish collation does.
            ScoreDoc[] result = search.Search(Csrq("content", "H\u00D8T", "MAND", F, F, c), null, 1000, null).ScoreDocs;
            AssertEquals("The index Term should be included.", 1, result.Length);

            result = search.Search(Csrq("content", "H\u00C5T", "MAND", F, F, c), null, 1000, null).ScoreDocs;
            AssertEquals("The index Term should not be included.", 0, result.Length);
            search.Close();
        }
Exemplo n.º 3
0
        public void MrsJones()
        {
            using (var dir = new RAMDirectory())
            using (var analyzer = new LowerCaseKeywordAnalyzer())
            {
                using (var writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED))
                {
                    var document = new Lucene.Net.Documents.Document();
                    document.Add(new Field("Name", "MRS. SHABA", Field.Store.NO, Field.Index.ANALYZED_NO_NORMS));
                    writer.AddDocument(document);
                }

                var searcher = new IndexSearcher(dir, true);

                var termEnum = searcher.IndexReader.Terms();
                while (termEnum.Next())
                {
                    var buffer = termEnum.Term.Text;
                    Console.WriteLine(buffer);
                }

                var queryParser = new RangeQueryParser(Version.LUCENE_29, "", analyzer);
                var query = queryParser.Parse("Name:\"MRS. S*\"");
                Console.WriteLine(query);
                var result = searcher.Search(query, 10);

                Assert.NotEqual(0, result.TotalHits);
            }
        }
        public static void Main()
        {
            var directory = new RAMDirectory();

            var provider = new LuceneDataProvider(directory, Version.LUCENE_30);

            // add some documents
            using (var session = provider.OpenSession<Article>())
            {
                session.Add(new Article { Author = "John Doe", BodyText = "some body text", PublishDate = DateTimeOffset.UtcNow });
            }

            var articles = provider.AsQueryable<Article>();

            var threshold = DateTimeOffset.UtcNow.Subtract(TimeSpan.FromDays(30));

            var articlesByJohn = from a in articles
                          where a.Author == "John Smith" && a.PublishDate > threshold
                          orderby a.Title
                          select a;

            var searchResults = from a in articles
                                 where a.SearchText == "some search query"
                                 select a;

        }
Exemplo n.º 5
0
        public virtual void  TestMethod()
        {
            RAMDirectory directory = new RAMDirectory();

            System.String[] values = new System.String[] { "1", "2", "3", "4" };

            try
            {
                IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
                for (int i = 0; i < values.Length; i++)
                {
                    Document doc = new Document();
                    doc.Add(new Field(FIELD, values[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
                    writer.AddDocument(doc, null);
                }
                writer.Close();

                BooleanQuery booleanQuery1 = new BooleanQuery();
                booleanQuery1.Add(new TermQuery(new Term(FIELD, "1")), Occur.SHOULD);
                booleanQuery1.Add(new TermQuery(new Term(FIELD, "2")), Occur.SHOULD);

                BooleanQuery query = new BooleanQuery();
                query.Add(booleanQuery1, Occur.MUST);
                query.Add(new TermQuery(new Term(FIELD, "9")), Occur.MUST_NOT);

                IndexSearcher indexSearcher = new IndexSearcher(directory, true, null);
                ScoreDoc[]    hits          = indexSearcher.Search(query, null, 1000, null).ScoreDocs;
                Assert.AreEqual(2, hits.Length, "Number of matched documents");
            }
            catch (System.IO.IOException e)
            {
                Assert.Fail(e.Message);
            }
        }
Exemplo n.º 6
0
        protected virtual void LoadIndexerConfiguration()
        {
            NameValueCollection appSettings = ConfigurationManager.AppSettings;

            InDebug = false;
            string inDebug = appSettings["InDebug"];

            if (string.IsNullOrEmpty(inDebug) == false)
            {
                InDebug = Convert.ToBoolean(inDebug);
            }

            string indexFileSystemLocation = appSettings["IndexFileSystemLocation"];

            // Store the index on disk
            if (string.IsNullOrEmpty(indexFileSystemLocation) == false)
            {
                IndexDirectory = LuceneStore.FSDirectory.Open(new DirectoryInfo(indexFileSystemLocation));
            }
            // Store the index in memory
            else
            {
                IndexDirectory = new LuceneStore.RAMDirectory();
            }

            string itemsToIndexFilename = appSettings["ItemsToIndexFilename"];

            if (string.IsNullOrEmpty(itemsToIndexFilename) == false)
            {
                ItemsToIndexFilename = itemsToIndexFilename;
                DeserializeItemsToIndex();
            }

            DocumentCreator = Factory.Instance.Create <U> ();
        }
        protected virtual Directory GetDirectory()
        {
            if(this._directory == null)
                this._directory = new RAMDirectory();

            return this._directory;
        }
Exemplo n.º 8
0
 public void Dispose()
 {
     facetHandlers = null;
     if (directory.isOpen_ForNUnit) directory.Dispose();
     directory = null;
     analyzer = null;
 }
        public void Ensure_Children_Are_Sorted()
        {
            using (var luceneDir = new RAMDirectory())
            {
                var indexer = IndexInitializer.GetUmbracoIndexer(luceneDir);
                indexer.RebuildIndex();

                var searcher = IndexInitializer.GetUmbracoSearcher(luceneDir);
                var result = searcher.Search(searcher.CreateSearchCriteria().Id(1111).Compile());
                Assert.IsNotNull(result);
                Assert.AreEqual(1, result.TotalItemCount);

                var searchItem = result.First();
                var backedMedia = new ExamineBackedMedia(searchItem, indexer, searcher);
                var children = backedMedia.ChildrenAsList.Value;

                var currSort = 0;
                for (var i = 0; i < children.Count(); i++)
                {
                    Assert.GreaterOrEqual(children[i].SortOrder, currSort);
                    currSort = children[i].SortOrder;
                }
            }

        }
Exemplo n.º 10
0
        public void TestReadersWriters()
        {
            Directory dir;
            
            using(dir = new RAMDirectory())
            {
                Document doc;
                IndexWriter writer;
                IndexReader reader;

                using (writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED))
                {
                    Field field = new Field("name", "value", Field.Store.YES,Field.Index.ANALYZED);
                    doc = new Document();
                    doc.Add(field);
                    writer.AddDocument(doc);
                    writer.Commit();

                    using (reader = writer.GetReader())
                    {
                        IndexReader r1 = reader.Reopen();
                    }

                    Assert.Throws<AlreadyClosedException>(() => reader.Reopen(), "IndexReader shouldn't be open here");
                }
                
                Assert.Throws<AlreadyClosedException>(() => writer.AddDocument(doc), "IndexWriter shouldn't be open here");

                Assert.IsTrue(dir.isOpen_ForNUnit, "RAMDirectory");
            }
            Assert.IsFalse(dir.isOpen_ForNUnit, "RAMDirectory");
        }
Exemplo n.º 11
0
        /*public TestCustomSearcherSort(System.String name):base(name)
         * {
         * }*/

        /*[STAThread]
         * public static void  Main(System.String[] argv)
         * {
         *      // TestRunner.run(suite()); // {{Aroush-2.9}} how is this done in NUnit?
         * }*/

        /*public static Test suite()
         * {
         *      return new TestSuite(typeof(TestCustomSearcherSort));
         * }*/


        // create an index for testing
        private Directory GetIndex()
        {
            RAMDirectory indexStore = new RAMDirectory();
            IndexWriter  writer     = new IndexWriter(indexStore, new StandardAnalyzer(Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED, null);
            RandomGen    random     = new RandomGen(this, NewRandom());

            for (int i = 0; i < INDEX_SIZE; ++i)
            {
                // don't decrease; if to low the problem doesn't show up
                Document doc = new Document();
                if ((i % 5) != 0)
                {
                    // some documents must not have an entry in the first sort field
                    doc.Add(new Field("publicationDate_", random.GetLuceneDate(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                }
                if ((i % 7) == 0)
                {
                    // some documents to match the query (see below)
                    doc.Add(new Field("content", "test", Field.Store.YES, Field.Index.ANALYZED));
                }
                // every document has a defined 'mandant' field
                doc.Add(new Field("mandant", System.Convert.ToString(i % 3), Field.Store.YES, Field.Index.NOT_ANALYZED));
                writer.AddDocument(doc, null);
            }
            writer.Optimize(null);
            writer.Close();
            return(indexStore);
        }
Exemplo n.º 12
0
        private static RAMDirectory MakeEmptyIndex(int numDeletedDocs)
        {
            RAMDirectory d = new RAMDirectory();
            IndexWriter  w = new IndexWriter(d, new WhitespaceAnalyzer(), true, MaxFieldLength.LIMITED, null);

            for (int i = 0; i < numDeletedDocs; i++)
            {
                w.AddDocument(new Document(), null);
            }
            w.Commit(null);
            w.DeleteDocuments(null, new MatchAllDocsQuery());
            w.Commit(null);

            if (0 < numDeletedDocs)
            {
                Assert.IsTrue(w.HasDeletions(null), "writer has no deletions");
            }

            Assert.AreEqual(numDeletedDocs, w.MaxDoc(), "writer is missing some deleted docs");
            Assert.AreEqual(0, w.NumDocs(null), "writer has non-deleted docs");
            w.Close();
            IndexReader r = IndexReader.Open((Directory)d, true, null);

            Assert.AreEqual(numDeletedDocs, r.NumDeletedDocs, "reader has wrong number of deleted docs");
            r.Close();
            return(d);
        }
Exemplo n.º 13
0
        public override void  SetUp()
        {
            base.SetUp();
            Document doc;

            RAMDirectory rd1 = new RAMDirectory();
            IndexWriter  iw1 = new IndexWriter(rd1, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            doc = new Document();
            doc.Add(new Field("field1", "the quick brown fox jumps", Field.Store.YES, Field.Index.ANALYZED));
            doc.Add(new Field("field2", "the quick brown fox jumps", Field.Store.YES, Field.Index.ANALYZED));
            doc.Add(new Field("field4", "", Field.Store.NO, Field.Index.ANALYZED));
            iw1.AddDocument(doc, null);

            iw1.Close();
            RAMDirectory rd2 = new RAMDirectory();
            IndexWriter  iw2 = new IndexWriter(rd2, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            doc = new Document();
            doc.Add(new Field("field0", "", Field.Store.NO, Field.Index.ANALYZED));
            doc.Add(new Field("field1", "the fox jumps over the lazy dog", Field.Store.YES, Field.Index.ANALYZED));
            doc.Add(new Field("field3", "the fox jumps over the lazy dog", Field.Store.YES, Field.Index.ANALYZED));
            iw2.AddDocument(doc, null);

            iw2.Close();

            this.ir1 = IndexReader.Open((Directory)rd1, true, null);
            this.ir2 = IndexReader.Open((Directory)rd2, true, null);
        }
Exemplo n.º 14
0
        public virtual void  TestSeek()
        {
            Directory   directory = new RAMDirectory();
            IndexWriter writer    = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            for (int i = 0; i < 10; i++)
            {
                Document doc = new Document();
                doc.Add(new Field(this.field, "a b", Field.Store.YES, Field.Index.ANALYZED));
                writer.AddDocument(doc, null);
            }

            writer.Close();
            IndexReader   reader = IndexReader.Open(directory, true, null);
            TermPositions tp     = reader.TermPositions(null);

            tp.Seek(new Term(this.field, "b"), null);
            for (int i = 0; i < 10; i++)
            {
                tp.Next(null);
                Assert.AreEqual(tp.Doc, i);
                Assert.AreEqual(tp.NextPosition(null), 1);
            }
            tp.Seek(new Term(this.field, "a"), null);
            for (int i = 0; i < 10; i++)
            {
                tp.Next(null);
                Assert.AreEqual(tp.Doc, i);
                Assert.AreEqual(tp.NextPosition(null), 0);
            }
        }
Exemplo n.º 15
0
        public static Lucene.Net.Store.Directory CreateIndex()
        {
            var directory = new Lucene.Net.Store.RAMDirectory();
            var dte       = DTEManager.getCurrentDTE();

            var content = ConvertTextToDataTable(GetSolutionDirectory(dte));


            using (Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_30))
                using (var writer = new IndexWriter(directory, analyzer, new IndexWriter.MaxFieldLength(1000)))
                {
                    foreach (DataRow row in content.Rows)
                    {
                        var document = new Lucene.Net.Documents.Document();

                        document.Add(new Field("path", row["FileName"].ToString(), Field.Store.YES, Field.Index.ANALYZED));
                        document.Add(new Field("contents", row["Text"].ToString(), Field.Store.YES, Field.Index.ANALYZED));

                        writer.AddDocument(document);
                    }

                    writer.Optimize();
                    writer.Flush(true, true, true);
                }

            return(directory);
        }
        public void Ensure_Result_Has_All_Values()
        {
            using (var luceneDir = new RAMDirectory())
            {
                var indexer = IndexInitializer.GetUmbracoIndexer(luceneDir);
                indexer.RebuildIndex();

                var searcher = IndexInitializer.GetUmbracoSearcher(luceneDir);
                var result = searcher.Search(searcher.CreateSearchCriteria().Id(1111).Compile());
                Assert.IsNotNull(result);
                Assert.AreEqual(1, result.TotalItemCount);

                var searchItem = result.First();
                var backedMedia = new ExamineBackedMedia(searchItem, indexer, searcher);

                Assert.AreEqual(searchItem.Id, backedMedia.Id);
                Assert.AreEqual(searchItem.Fields["sortOrder"], backedMedia.SortOrder.ToString());
                Assert.AreEqual(searchItem.Fields["urlName"], backedMedia.UrlName);
                Assert.AreEqual(DateTools.StringToDate(searchItem.Fields["createDate"]), backedMedia.CreateDate);
                Assert.AreEqual(DateTools.StringToDate(searchItem.Fields["updateDate"]), backedMedia.UpdateDate);
                Assert.AreEqual(Guid.Parse(searchItem.Fields["version"]), backedMedia.Version);
                Assert.AreEqual(searchItem.Fields["level"], backedMedia.Level.ToString());
                Assert.AreEqual(searchItem.Fields["writerID"], backedMedia.WriterID.ToString());
                Assert.AreEqual(searchItem.Fields["writerID"], backedMedia.CreatorID.ToString()); //there's only writerId in the xml
                Assert.AreEqual(searchItem.Fields["writerName"], backedMedia.CreatorName);
                Assert.AreEqual(searchItem.Fields["writerName"], backedMedia.WriterName); //tehre's only writer name in the xml
            }

        }
Exemplo n.º 17
0
		public virtual void  TestCustomLockFactory()
		{
			Directory dir = new RAMDirectory();
			MockLockFactory lf = new MockLockFactory(this);
			dir.SetLockFactory(lf);
			
			// Lock prefix should have been set:
			Assert.IsTrue(lf.lockPrefixSet, "lock prefix was not set by the RAMDirectory");
			
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
			
			// add 100 documents (so that commit lock is used)
			for (int i = 0; i < 100; i++)
			{
				AddDoc(writer);
			}
			
			// Both write lock and commit lock should have been created:
			Assert.AreEqual(1, lf.locksCreated.Count, "# of unique locks created (after instantiating IndexWriter)");
			Assert.IsTrue(lf.makeLockCount >= 1, "# calls to makeLock is 0 (after instantiating IndexWriter)");
			
			for (System.Collections.IEnumerator e = lf.locksCreated.Keys.GetEnumerator(); e.MoveNext(); )
			{
				System.String lockName = (System.String) e.Current;
				MockLockFactory.MockLock lock_Renamed = (MockLockFactory.MockLock) lf.locksCreated[lockName];
				Assert.IsTrue(lock_Renamed.lockAttempts > 0, "# calls to Lock.obtain is 0 (after instantiating IndexWriter)");
			}
			
			writer.Close();
		}
Exemplo n.º 18
0
        public override void  SetUp()
        {
            base.SetUp();
            RAMDirectory directory = new RAMDirectory();
            IndexWriter  writer    = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
            long         theLong   = System.Int64.MaxValue;
            double       theDouble = System.Double.MaxValue;
            sbyte        theByte   = (sbyte)System.SByte.MaxValue;
            short        theShort  = System.Int16.MaxValue;
            int          theInt    = System.Int32.MaxValue;
            float        theFloat  = System.Single.MaxValue;

            for (int i = 0; i < NUM_DOCS; i++)
            {
                Document doc = new Document();
                doc.Add(new Field("theLong", System.Convert.ToString(theLong--), Field.Store.NO, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("theDouble", (theDouble--).ToString("E16"), Field.Store.NO, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("theByte", System.Convert.ToString((sbyte)theByte--), Field.Store.NO, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("theShort", System.Convert.ToString(theShort--), Field.Store.NO, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("theInt", System.Convert.ToString(theInt--), Field.Store.NO, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("theFloat", (theFloat--).ToString("E8"), Field.Store.NO, Field.Index.NOT_ANALYZED));
                writer.AddDocument(doc, null);
            }
            writer.Close();
            reader = IndexReader.Open((Directory)directory, true, null);
        }
Exemplo n.º 19
0
        public override void  SetUp()
        {
            base.SetUp();
            System.String[] docText   = new System.String[] { "docThatNeverMatchesSoWeCanRequireLastDocCollectedToBeGreaterThanZero", "one blah three", "one foo three multiOne", "one foobar three multiThree", "blueberry pancakes", "blueberry pie", "blueberry strudel", "blueberry pizza" };
            Directory       directory = new RAMDirectory();
            IndexWriter     iw        = new IndexWriter(directory, new WhitespaceAnalyzer(), true, MaxFieldLength.UNLIMITED, null);

            for (int i = 0; i < N_DOCS; i++)
            {
                Add(docText[i % docText.Length], iw);
            }
            iw.Close();
            searcher = new IndexSearcher(directory, true, null);

            System.String qtxt = "one";
            // start from 1, so that the 0th doc never matches
            for (int i = 0; i < docText.Length; i++)
            {
                qtxt += (' ' + docText[i]);                 // large query so that search will be longer
            }
            QueryParser queryParser = new QueryParser(Util.Version.LUCENE_CURRENT, FIELD_NAME, new WhitespaceAnalyzer());

            query = queryParser.Parse(qtxt);

            // warm the searcher
            searcher.Search(query, null, 1000, null);
        }
Exemplo n.º 20
0
		public void MrsJones()
		{
			var dir = new RAMDirectory();
			var analyzer = new WhitespaceAnalyzer();
			var writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
			var document = new Lucene.Net.Documents.Document();
			document.Add(new Field("Name", "MRS. SHABA", Field.Store.NO, Field.Index.ANALYZED_NO_NORMS));
			writer.AddDocument(document);

			writer.Close(true);

			

			var searcher = new IndexSearcher(dir, true);

			var termEnum = searcher.GetIndexReader().Terms();
			while (termEnum.Next())
			{
				var buffer = termEnum.Term().Text();
				Console.WriteLine(buffer);
			} 

			var queryParser = new QueryParser(Version.LUCENE_29, "", analyzer);
			queryParser.SetLowercaseExpandedTerms(false);
			var query = queryParser.Parse("Name:MRS.*");
			Console.WriteLine(query);
			var result = searcher.Search(query, 10);

			Assert.NotEqual(0,result.totalHits);
		}
Exemplo n.º 21
0
        public virtual void  TestPhrasePrefixWithBooleanQuery()
        {
            RAMDirectory indexStore = new RAMDirectory();
            IndexWriter  writer     = new IndexWriter(indexStore, new StandardAnalyzer(Util.Version.LUCENE_CURRENT, Support.Compatibility.SetFactory.CreateHashSet <string>()), true, IndexWriter.MaxFieldLength.LIMITED, null);

            Add("This is a test", "object", writer);
            Add("a note", "note", writer);
            writer.Close();

            IndexSearcher searcher = new IndexSearcher(indexStore, true, null);

            // This query will be equivalent to +type:note +body:"a t*"
            BooleanQuery q = new BooleanQuery();

            q.Add(new TermQuery(new Term("type", "note")), Occur.MUST);

            MultiPhraseQuery trouble = new MultiPhraseQuery();

            trouble.Add(new Term("body", "a"));
            trouble.Add(new Term[] { new Term("body", "test"), new Term("body", "this") });
            q.Add(trouble, Occur.MUST);

            // exception will be thrown here without fix for #35626:
            ScoreDoc[] hits = searcher.Search(q, null, 1000, null).ScoreDocs;
            Assert.AreEqual(0, hits.Length, "Wrong number of hits");
            searcher.Close();
        }
Exemplo n.º 22
0
        public void TestSimpleSearch()
        {
            var bookGuid = Guid.Parse("11111111-1111-1111-1111-111111111111");

            const string dataToIndex = @"some test string
            some other line
            next line";

            var bookGuid2 = Guid.Parse("11111111-1111-1111-1111-111111111112");

            const string dataToIndex2 = @"some test2 string
            some other line
            next line";

            //TODO: abstract references to lucene
            using (var directory = new RAMDirectory())
            {
                /*var indexer = new BookDataIndexer(directory);

                indexer.IndexContent(bookGuid, new StringReader(dataToIndex));
                indexer.IndexContent(bookGuid2, new StringReader(dataToIndex2));

                var searcher = new BookSearchService(directory);

                IEnumerable<SearchResult> searchResults = searcher.Search("test");
                Assert.That(searchResults.Single().BookId, Is.EqualTo(bookGuid));

                searchResults = searcher.Search("test2");
                Assert.That(searchResults.Single().BookId, Is.EqualTo(bookGuid2));*/
            }
        }
Exemplo n.º 23
0
        public virtual void  TestDemo_Renamed()
        {
            Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT);

            // Store the index in memory:
            Directory directory = new RAMDirectory();
            // To store an index on disk, use this instead:
            //Directory directory = FSDirectory.open("/tmp/testindex");
            IndexWriter iwriter = new IndexWriter(directory, analyzer, true, new IndexWriter.MaxFieldLength(25000));
            Document    doc     = new Document();

            System.String text = "This is the text to be indexed.";
            doc.Add(new Field("fieldname", text, Field.Store.YES, Field.Index.ANALYZED));
            iwriter.AddDocument(doc);
            iwriter.Close();

            // Now search the index:
            IndexSearcher isearcher = new IndexSearcher(directory, true); // read-only=true
            // Parse a simple query that searches for "text":
            QueryParser parser = new QueryParser(Util.Version.LUCENE_CURRENT, "fieldname", analyzer);
            Query       query  = parser.Parse("text");

            ScoreDoc[] hits = isearcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(1, hits.Length);
            // Iterate through the results:
            for (int i = 0; i < hits.Length; i++)
            {
                Document hitDoc = isearcher.Doc(hits[i].Doc);
                Assert.AreEqual(hitDoc.Get("fieldname"), "This is the text to be indexed.");
            }
            isearcher.Close();
            directory.Close();
        }
        public void HelloWorldTest()
        {
            Directory directory = new RAMDirectory();
            Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_29);
            IndexWriter writer = new IndexWriter(directory,
                analyzer,
                IndexWriter.MaxFieldLength.UNLIMITED);

            Document doc = new Document();
            doc.Add(new Field("id", "1", Field.Store.YES, Field.Index.NO));
            doc.Add(new Field("postBody", "sample test", Field.Store.YES, Field.Index.ANALYZED));
            writer.AddDocument(doc);
            writer.Optimize();
            writer.Commit();
            writer.Close();

            QueryParser parser = new QueryParser(Version.LUCENE_29, "postBody", analyzer);
            Query query = parser.Parse("sample test");

            //Setup searcher
            IndexSearcher searcher = new IndexSearcher(directory, true);
            //Do the search
            var hits = searcher.Search(query, null, 10);

            for (int i = 0; i < hits.TotalHits; i++)
            {
                var doc1 = hits.ScoreDocs[i];
            }

            searcher.Close();
            directory.Close();
        }
Exemplo n.º 25
0
    protected void AddIndexbtn_Click(object sender, EventArgs e)
    {
        Lucene.Net.Store.Directory ramdir = new Lucene.Net.Store.RAMDirectory();

        Console.WriteLine("Indexing...");
        DateTime start = DateTime.Now;

        string path4 = Server.MapPath("./") + @"1.4\\";
        if (System.IO.Directory.Exists(path4))//是否存在目录
        {
            Indexer.IntranetIndexer indexer4 = new Indexer.IntranetIndexer(Server.MapPath("index\\1.4\\"));
            indexer4.AddDirectory(new System.IO.DirectoryInfo(path4), "*.*");
            indexer4.Close();
        }
        //IntranetIndexer indexer = new IntranetIndexer(ramdir);//把索引写进内存

        string path5 = Server.MapPath("./") + @"1.5\\";
        if (System.IO.Directory.Exists(path5))
        {
             Indexer.IntranetIndexer indexer5 = new Indexer.IntranetIndexer(Server.MapPath("index\\1.5\\"));
             indexer5.AddDirectory(new System.IO.DirectoryInfo(path5), "*.*");
             indexer5.Close();

        }

        Console.WriteLine("Done. Took " + (DateTime.Now - start));
        Response.Write("<script type='text/javascript'>window.alert(' 创建索引成功,并已经优化!!! ');</script>");
    }
Exemplo n.º 26
0
        public void CreateSearchIndex()
        {
            directory = new RAMDirectory();
            analyzer = new StandardAnalyzer(Version.LUCENE_30);
            var ixw = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
            LookupTable = new Dictionary<string, BaseContent>();
            foreach (BaseContent p in Service.PoIs.ToList())
            {
                var document = new Document();
                document.Add(new Field("id", p.Id.ToString(), Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
                string all = p.Name + " ";
                foreach (MetaInfo mi in p.EffectiveMetaInfo)
                {
                    string value;
                    if (mi.Type != MetaTypes.text || !p.Labels.TryGetValue(mi.Label, out value)) continue;
                    document.Add(new Field(mi.Label, value, Field.Store.YES, Field.Index.ANALYZED));
                    all += value + " ";
                }
                document.Add(new Field("All", all, Field.Store.YES, Field.Index.ANALYZED));

                LookupTable[p.Id.ToString()] = p;
                ixw.AddDocument(document);
            }
            ixw.Commit();
        }
Exemplo n.º 27
0
        public virtual void  TestFarsi()
        {
            /* build an index */
            RAMDirectory farsiIndex = new RAMDirectory();
            IndexWriter  writer     = new IndexWriter(farsiIndex, new SimpleAnalyzer(), T, IndexWriter.MaxFieldLength.LIMITED, null);
            Document     doc        = new Document();

            doc.Add(new Field("content", "\u0633\u0627\u0628", Field.Store.YES, Field.Index.NOT_ANALYZED));
            doc.Add(new Field("body", "body", Field.Store.YES, Field.Index.NOT_ANALYZED));
            writer.AddDocument(doc, null);

            writer.Optimize(null);
            writer.Close();

            IndexReader   reader = IndexReader.Open((Directory)farsiIndex, true, null);
            IndexSearcher search = new IndexSearcher(reader);
            Query         q      = new TermQuery(new Term("body", "body"));

            // Neither Java 1.4.2 nor 1.5.0 has Farsi Locale collation available in
            // RuleBasedCollator.  However, the Arabic Locale seems to order the Farsi
            // characters properly.
            System.Globalization.CompareInfo collator = new System.Globalization.CultureInfo("ar").CompareInfo;

            // Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
            // orders the U+0698 character before the U+0633 character, so the single
            // index Term below should NOT be returned by a TermRangeFilter with a Farsi
            // Collator (or an Arabic one for the case when Farsi is not supported).
            int numHits = search.Search(q, new TermRangeFilter("content", "\u062F", "\u0698", T, T, collator), 1000, null).TotalHits;

            Assert.AreEqual(0, numHits, "The index Term should not be included.");

            numHits = search.Search(q, new TermRangeFilter("content", "\u0633", "\u0638", T, T, collator), 1000, null).TotalHits;
            Assert.AreEqual(1, numHits, "The index Term should be included.");
            search.Close();
        }
Exemplo n.º 28
0
        private void button1_Click(object sender, EventArgs e)
        {
            Directory index = new RAMDirectory();
            StandardAnalyzer analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_29);

            IndexWriter w = new IndexWriter(index, analyzer);

            addDoc(w, "Lucene in Action");
            addDoc(w, "Lucene for Dummies");
            addDoc(w, "Managing Gigabytes");
            addDoc(w, "The Art of Computer Science");
            w.Close();

            String querystr = "Lucene in Action";

            Query q =  new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "title", analyzer).Parse(querystr);
            //q.Parse();

            int hitsPerPage = 10;
            IndexReader reader = IndexReader.Open(index,true);
            IndexSearcher searcher = new IndexSearcher(reader);
            TopScoreDocCollector collector = TopScoreDocCollector.Create(hitsPerPage, true);
            searcher.Search(q, collector);
            ScoreDoc[] hits = collector.TopDocs().ScoreDocs;

            System.Console.WriteLine("Found {0} Hits", hits.Length);

            foreach (var item in hits)
            {
                int docId = item.Doc;
                Document d = searcher.Doc(docId);
                System.Console.WriteLine(d.Get("title") + " " + item.Score);
            }
        }
        public Directory DirectoryFor(string id, bool persistent)
        {
            if (persistent)
            {
                var info = new DirectoryInfo(Path.Combine(_root.FullName, id+".index"));

                var directory = new SimpleFSDirectory(info);
                if (!info.Exists || !info.EnumerateFiles().Any())
                {
                    new IndexWriter(directory, new StandardAnalyzer(Version.LUCENE_29),
                                    true,
                                    IndexWriter.MaxFieldLength.UNLIMITED)
                        .Dispose();
                }

                return directory;
            }
            else
            {
                if (!_inMemoryDirectories.ContainsKey(id))
                {
                    _inMemoryDirectories[id] = new RAMDirectory();
                }
                return _inMemoryDirectories[id];
            }
        }
Exemplo n.º 30
0
        public override void  SetUp()
        {
            base.SetUp();
            directory = new RAMDirectory();
            IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            Document doc = new Document();

            doc.Add(new Field("field", "one two three four five", Field.Store.YES, Field.Index.ANALYZED));
            doc.Add(new Field("sorter", "b", Field.Store.YES, Field.Index.ANALYZED));
            writer.AddDocument(doc, null);

            doc = new Document();
            doc.Add(new Field("field", "one two three four", Field.Store.YES, Field.Index.ANALYZED));
            doc.Add(new Field("sorter", "d", Field.Store.YES, Field.Index.ANALYZED));
            writer.AddDocument(doc, null);

            doc = new Document();
            doc.Add(new Field("field", "one two three y", Field.Store.YES, Field.Index.ANALYZED));
            doc.Add(new Field("sorter", "a", Field.Store.YES, Field.Index.ANALYZED));
            writer.AddDocument(doc, null);

            doc = new Document();
            doc.Add(new Field("field", "one two x", Field.Store.YES, Field.Index.ANALYZED));
            doc.Add(new Field("sorter", "c", Field.Store.YES, Field.Index.ANALYZED));
            writer.AddDocument(doc, null);

            writer.Optimize(null);
            writer.Close();

            searcher = new IndexSearcher(directory, true, null);
            query    = new TermQuery(new Term("field", "three"));
            filter   = NewStaticFilterB();
        }
Exemplo n.º 31
0
        public void Test_IndexReader_IsCurrent()
        {
            RAMDirectory ramDir = new RAMDirectory();
            IndexWriter writer = new IndexWriter(ramDir, new KeywordAnalyzer(), true, new IndexWriter.MaxFieldLength(1000));
            Field field = new Field("TEST", "mytest", Field.Store.YES, Field.Index.ANALYZED);
            Document doc = new Document();
            doc.Add(field);
            writer.AddDocument(doc);

            IndexReader reader = writer.GetReader();

            writer.DeleteDocuments(new Lucene.Net.Index.Term("TEST", "mytest"));

            Assert.IsFalse(reader.IsCurrent());

            int resCount1 = new IndexSearcher(reader).Search(new TermQuery(new Term("TEST", "mytest")),100).TotalHits;
            Assert.AreEqual(1, resCount1);

            writer.Commit();

            Assert.IsFalse(reader.IsCurrent());

            int resCount2 = new IndexSearcher(reader).Search(new TermQuery(new Term("TEST", "mytest")),100).TotalHits;
            Assert.AreEqual(1, resCount2, "Reopen not invoked yet, resultCount must still be 1.");

            reader = reader.Reopen();
            Assert.IsTrue(reader.IsCurrent());

            int resCount3 = new IndexSearcher(reader).Search(new TermQuery(new Term("TEST", "mytest")), 100).TotalHits;
            Assert.AreEqual(0, resCount3, "After reopen, resultCount must be 0.");

            reader.Close();
            writer.Dispose();
        }
Exemplo n.º 32
0
		private static void Main(string[] args)
		{
		    var ramDirectory = new RAMDirectory();
		    var spellChecker = new SpellChecker.Net.Search.Spell.SpellChecker(ramDirectory);
		    var ms = new MemoryStream();
		    var sw = new StreamWriter(ms);
            sw.WriteLine("Book");
            sw.WriteLine("Bath");
            sw.WriteLine("Bed");
            sw.WriteLine("Make");
            sw.WriteLine("Model");
            sw.WriteLine("Vacum");
            sw.WriteLine("Wending machine");
            sw.Flush();
		    ms.Position = 0;
            spellChecker.setStringDistance(new JaroWinklerDistance());
            spellChecker.SetAccuracy(0.3f);
            spellChecker.IndexDictionary(new PlainTextDictionary(ms), CancellationToken.None);

		    var indexReader = IndexReader.Open(ramDirectory, true);
		    var termEnum = indexReader.Terms();
		    while (termEnum.Next())
		    {
		        Console.WriteLine(termEnum.Term);
		    }

		    var suggestSimilar = spellChecker.SuggestSimilar("both", 10);
		    foreach (var s in suggestSimilar)
		    {
		        Console.WriteLine(s);
		    }
		}
        public void Init()
        {
            facetHandlers = new List<IFacetHandler>();

            directory = new RAMDirectory();
            analyzer = new WhitespaceAnalyzer();
            selectionProperties = new Dictionary<string, string>();
            IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);

            writer.AddDocument(Doc("prop1=val1", "prop2=val1", "prop5=val1"));
            writer.AddDocument(Doc("prop1=val2", "prop3=val1", "prop7=val7"));
            writer.AddDocument(Doc("prop1=val2", "prop3=val2", "prop3=val3"));
            writer.AddDocument(Doc("prop1=val1", "prop2=val1"));
            writer.AddDocument(Doc("prop1=val1", "prop2=val1"));
            writer.AddDocument(Doc("prop1=val1", "prop2=val1", "prop4=val2", "prop4=val3"));
            writer.Commit();

            attributesFacetHandler = new AttributesFacetHandler(AttributeHandlerName, AttributeHandlerName, null, null,
                new Dictionary<string, string>());
            facetHandlers.Add(attributesFacetHandler);
            IndexReader reader = IndexReader.Open(directory, true);
            boboReader = BoboIndexReader.GetInstance(reader, facetHandlers);
            attributesFacetHandler.LoadFacetData(boboReader);
            browser = new BoboBrowser(boboReader);
        }
Exemplo n.º 34
0
        public void Ensure_Children_Sorted_With_Examine()
        {
            using (var luceneDir = new RAMDirectory())
            {
                var indexer = IndexInitializer.GetUmbracoIndexer(luceneDir);
                indexer.RebuildIndex();
                var searcher = IndexInitializer.GetUmbracoSearcher(luceneDir);
                var ctx = GetUmbracoContext("/test", 1234);
                var cache = new ContextualPublishedMediaCache(new PublishedMediaCache(searcher, indexer), ctx);

                //we are using the media.xml media to test the examine results implementation, see the media.xml file in the ExamineHelpers namespace
                var publishedMedia = cache.GetById(1111);
                var rootChildren = publishedMedia.Children().ToArray();
                var currSort = 0;
                for (var i = 0; i < rootChildren.Count(); i++)
                {
                    Assert.GreaterOrEqual(rootChildren[i].SortOrder, currSort);
                    currSort = rootChildren[i].SortOrder;
                }
            }

            



        }
Exemplo n.º 35
0
		public virtual void  TestRAMDirectoryNoLocking()
		{
			Directory dir = new RAMDirectory();
			dir.SetLockFactory(NoLockFactory.GetNoLockFactory());
			
			Assert.IsTrue(typeof(NoLockFactory).IsInstanceOfType(dir.GetLockFactory()), "RAMDirectory.setLockFactory did not take");
			
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
			
			// Create a 2nd IndexWriter.  This is normally not allowed but it should run through since we're not
			// using any locks:
			IndexWriter writer2 = null;
			try
			{
				writer2 = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
			}
			catch (System.Exception e)
			{
				System.Console.Out.WriteLine(e.StackTrace);
				Assert.Fail("Should not have hit an IOException with no locking");
			}
			
			writer.Close();
			if (writer2 != null)
			{
				writer2.Close();
			}
		}
Exemplo n.º 36
0
        public void TestSpanRegex()
        {
            RAMDirectory directory = new RAMDirectory();
            IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
            Document doc = new Document();
            // doc.Add(new Field("field", "the quick brown fox jumps over the lazy dog",
            // Field.Store.NO, Field.Index.ANALYZED));
            // writer.AddDocument(doc);
            // doc = new Document();
            doc.Add(new Field("field", "auto update", Field.Store.NO,
                Field.Index.ANALYZED));
            writer.AddDocument(doc);
            doc = new Document();
            doc.Add(new Field("field", "first auto update", Field.Store.NO,
                Field.Index.ANALYZED));
            writer.AddDocument(doc);
            writer.Optimize();
            writer.Close();

            IndexSearcher searcher = new IndexSearcher(directory, true);
            SpanRegexQuery srq = new SpanRegexQuery(new Term("field", "aut.*"));
            SpanFirstQuery sfq = new SpanFirstQuery(srq, 1);
            // SpanNearQuery query = new SpanNearQuery(new SpanQuery[] {srq, stq}, 6,
            // true);
            int numHits = searcher.Search(sfq, null, 1000).TotalHits;
            Assert.AreEqual(1, numHits);
        }
        public static void CreateIndex() {
            try
            {
                var cloudAccount = Azure.GetStorageAccount();

                using (var cacheDirectory = new RAMDirectory())
                {
                    using (var azureDirectory = new AzureDirectory(cloudAccount, Azure.StorageContainerName, cacheDirectory))
                    {
                        using (Analyzer analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30))
                        {
                            using (var indexWriter = new IndexWriter(azureDirectory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED))
                            {
                                AddDocuments(indexWriter);

                                indexWriter.Commit();
                            }
                        }
                    }
                }
            }
            catch (StorageException ex)
            {
                Trace.TraceError(ex.Message);
            }
        }
        public virtual void TestCachingWorks()
        {
            Directory dir = new RAMDirectory();
            IndexWriter writer = new IndexWriter(dir, new KeywordAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
            writer.Close();

            IndexReader reader = IndexReader.Open(dir, true);

            MockFilter filter = new MockFilter();
            CachingWrapperFilter cacher = new CachingWrapperFilter(filter);

            // first time, nested filter is called
            cacher.GetDocIdSet(reader);
            Assert.IsTrue( filter.WasCalled(),"first time");

            // make sure no exception if cache is holding the wrong docIdSet
            cacher.GetDocIdSet(reader);

            // second time, nested filter should not be called
            filter.Clear();
            cacher.GetDocIdSet(reader);
            Assert.IsFalse(filter.WasCalled(),"second time" );

            reader.Close();
        }
Exemplo n.º 39
0
        public void MissingTerms_Test()
        {
            string fieldName = "field1";
            RAMDirectory rd = new RAMDirectory();
            IndexWriter w = new IndexWriter(rd, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            for (int i = 0; i < 100; i++)
            {
                Document doc = new Document();
                int term = i * 10; //terms are units of 10;
                doc.Add(new Field(fieldName, "" + term, Field.Store.YES, Field.Index.NOT_ANALYZED));
                w.AddDocument(doc);
            }
            w.Close();
            IndexReader reader = IndexReader.Open(rd, true);

            TermsFilter tf = new TermsFilter();
            tf.AddTerm(new Term(fieldName, "19"));
            OpenBitSet bits = (OpenBitSet)tf.GetDocIdSet(reader);
            Assert.AreEqual(0, bits.Cardinality(), "Must match nothing");

            tf.AddTerm(new Term(fieldName, "20"));
            bits = (OpenBitSet)tf.GetDocIdSet(reader);
            Assert.AreEqual(1, bits.Cardinality(), "Must match 1");

            tf.AddTerm(new Term(fieldName, "10"));
            bits = (OpenBitSet)tf.GetDocIdSet(reader);
            Assert.AreEqual(2, bits.Cardinality(), "Must match 2");

            tf.AddTerm(new Term(fieldName, "00"));
            bits = (OpenBitSet)tf.GetDocIdSet(reader);
            Assert.AreEqual(2, bits.Cardinality(), "Must match 2");
        }
Exemplo n.º 40
0
        public Engine()
        {
            var directory = new RAMDirectory();
            var analyzer = new StandardAnalyzer(Version.LUCENE_30);

            using (var indexWriter = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED))
            {
                for (int i = 0; i < 10000; i++)
                {
                    Console.Write(".");
                    var document = new Document();
                    document.Add(new Field("Id", i.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED));
                    document.Add(new Field("Name", "Name" + i.ToString(), Field.Store.YES, Field.Index.ANALYZED));
                    indexWriter.AddDocument(document);
                }
            }

            Console.ReadKey();

            var queryParser = new QueryParser(Version.LUCENE_30, "Name", analyzer);
            var query = queryParser.Parse("Name37~");

            IndexReader indexReader = IndexReader.Open(directory, true);
            var searcher = new IndexSearcher(indexReader);

            TopDocs resultDocs = searcher.Search(query, indexReader.MaxDoc);
        }
Exemplo n.º 41
0
        public virtual void  TestBooleanQueryContainingSingleTermPrefixQuery()
        {
            // this tests against bug 33161 (now fixed)
            // In order to cause the bug, the outer query must have more than one term
            // and all terms required.
            // The contained PhraseMultiQuery must contain exactly one term array.

            RAMDirectory indexStore = new RAMDirectory();
            IndexWriter  writer     = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            Add("blueberry pie", writer);
            Add("blueberry chewing gum", writer);
            Add("blue raspberry pie", writer);
            writer.Optimize(null);
            writer.Close();

            IndexSearcher searcher = new IndexSearcher(indexStore, true, null);
            // This query will be equivalent to +body:pie +body:"blue*"
            BooleanQuery q = new BooleanQuery();

            q.Add(new TermQuery(new Term("body", "pie")), Occur.MUST);

            MultiPhraseQuery trouble = new MultiPhraseQuery();

            trouble.Add(new Term[] { new Term("body", "blueberry"), new Term("body", "blue") });
            q.Add(trouble, Occur.MUST);

            // exception will be thrown here without fix
            ScoreDoc[] hits = searcher.Search(q, null, 1000, null).ScoreDocs;

            Assert.AreEqual(2, hits.Length, "Wrong number of hits");
            searcher.Close();
        }
Exemplo n.º 42
0
        public void Can_Add_Multiple_Values_To_Single_Index_Field()
        {
            using (var d = new RAMDirectory())
            using (var customIndexer = IndexInitializer.GetUmbracoIndexer(d))
            {

                EventHandler<DocumentWritingEventArgs> handler = (sender, args) =>
                {
                    args.Document.Add(new Field("headerText", "another value", Field.Store.YES, Field.Index.ANALYZED));
                };
                
                customIndexer.DocumentWriting += handler;

                customIndexer.RebuildIndex();

                customIndexer.DocumentWriting += handler;
                
                var customSearcher = IndexInitializer.GetLuceneSearcher(d);
                var results = customSearcher.Search(customSearcher.CreateSearchCriteria().NodeName("home").Compile());
                Assert.Greater(results.TotalItemCount, 0);
                foreach (var result in results)
                {
                    var vals = result.GetValues("headerText");
                    Assert.AreEqual(2, vals.Count());
                    Assert.AreEqual("another value", vals.ElementAt(1));
                }
            }
        }
Exemplo n.º 43
0
 public virtual void  TestDemo_Renamed()
 {
     
     Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT);
     
     // Store the index in memory:
     Directory directory = new RAMDirectory();
     // To store an index on disk, use this instead:
     //Directory directory = FSDirectory.open("/tmp/testindex");
     IndexWriter iwriter = new IndexWriter(directory, analyzer, true, new IndexWriter.MaxFieldLength(25000));
     Document doc = new Document();
     System.String text = "This is the text to be indexed.";
     doc.Add(new Field("fieldname", text, Field.Store.YES, Field.Index.ANALYZED));
     iwriter.AddDocument(doc);
     iwriter.Close();
     
     // Now search the index:
     IndexSearcher isearcher = new IndexSearcher(directory, true); // read-only=true
     // Parse a simple query that searches for "text":
     QueryParser parser = new QueryParser(Util.Version.LUCENE_CURRENT, "fieldname", analyzer);
     Query query = parser.Parse("text");
     ScoreDoc[] hits = isearcher.Search(query, null, 1000).ScoreDocs;
     Assert.AreEqual(1, hits.Length);
     // Iterate through the results:
     for (int i = 0; i < hits.Length; i++)
     {
         Document hitDoc = isearcher.Doc(hits[i].Doc);
         Assert.AreEqual(hitDoc.Get("fieldname"), "This is the text to be indexed.");
     }
     isearcher.Close();
     directory.Close();
 }
 // Methods
 public SortableInMemoryIndexSearchContext(ILuceneIndex index)
 {
     Assert.ArgumentNotNull(index, "index");
     var idx = new RAMDirectory(index.Directory);
     this._scope = new LockScope(idx);
     this._writer = index.CreateWriter(false);
 }
Exemplo n.º 45
0
		protected Lucene.Net.Store.Directory OpenOrCreateLuceneDirectory(IndexDefinition indexDefinition, string indexName = null)
		{
			Lucene.Net.Store.Directory directory;
			if (indexDefinition.IsTemp || configuration.RunInMemory)
			{
				directory = new RAMDirectory();
				new IndexWriter(directory, dummyAnalyzer, IndexWriter.MaxFieldLength.UNLIMITED).Close(); // creating index structure
			}
			else
			{
				var indexDirectory = indexName ?? IndexDefinitionStorage.FixupIndexName(indexDefinition.Name, path);
				var indexFullPath = Path.Combine(path, MonoHttpUtility.UrlEncode(indexDirectory));
				directory = FSDirectory.Open(new DirectoryInfo(indexFullPath));

				if (!IndexReader.IndexExists(directory))
				{
					//creating index structure if we need to
					new IndexWriter(directory, dummyAnalyzer, IndexWriter.MaxFieldLength.UNLIMITED).Close();
				}
				else
				{
					// forcefully unlock locked indexes if any
					if (IndexWriter.IsLocked(directory))
						IndexWriter.Unlock(directory);
				}
			}

			return directory;
		}
Exemplo n.º 46
0
        public void testMissingTerms()
        {
            String fieldName = "field1";
            Directory rd = new RAMDirectory();
            var w = new IndexWriter(rd, new KeywordAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            for (int i = 0; i < 100; i++)
            {
                var doc = new Document();
                int term = i*10; //terms are units of 10;
                doc.Add(new Field(fieldName, "" + term, Field.Store.YES, Field.Index.ANALYZED));
                w.AddDocument(doc);
            }
            IndexReader reader = w.GetReader();
            w.Close();

            TermsFilter tf = new TermsFilter();
            tf.AddTerm(new Term(fieldName, "19"));
            FixedBitSet bits = (FixedBitSet) tf.GetDocIdSet(reader);
            Assert.AreEqual(0, bits.Cardinality(), "Must match nothing");

            tf.AddTerm(new Term(fieldName, "20"));
            bits = (FixedBitSet) tf.GetDocIdSet(reader);
            Assert.AreEqual(1, bits.Cardinality(), "Must match 1");

            tf.AddTerm(new Term(fieldName, "10"));
            bits = (FixedBitSet) tf.GetDocIdSet(reader);
            Assert.AreEqual(2, bits.Cardinality(), "Must match 2");

            tf.AddTerm(new Term(fieldName, "00"));
            bits = (FixedBitSet) tf.GetDocIdSet(reader);
            Assert.AreEqual(2, bits.Cardinality(), "Must match 2");

            reader.Close();
            rd.Close();
        }
Exemplo n.º 47
0
        public virtual void  TestPhrasePrefix()
        {
            RAMDirectory indexStore = new RAMDirectory();
            IndexWriter  writer     = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);
            Document     doc1       = new Document();
            Document     doc2       = new Document();
            Document     doc3       = new Document();
            Document     doc4       = new Document();
            Document     doc5       = new Document();

            doc1.Add(new Field("body", "blueberry pie", Field.Store.YES, Field.Index.ANALYZED));
            doc2.Add(new Field("body", "blueberry strudel", Field.Store.YES, Field.Index.ANALYZED));
            doc3.Add(new Field("body", "blueberry pizza", Field.Store.YES, Field.Index.ANALYZED));
            doc4.Add(new Field("body", "blueberry chewing gum", Field.Store.YES, Field.Index.ANALYZED));
            doc5.Add(new Field("body", "piccadilly circus", Field.Store.YES, Field.Index.ANALYZED));
            writer.AddDocument(doc1, null);
            writer.AddDocument(doc2, null);
            writer.AddDocument(doc3, null);
            writer.AddDocument(doc4, null);
            writer.AddDocument(doc5, null);
            writer.Optimize(null);
            writer.Close();

            IndexSearcher searcher = new IndexSearcher(indexStore, true, null);

            //PhrasePrefixQuery query1 = new PhrasePrefixQuery();
            MultiPhraseQuery query1 = new MultiPhraseQuery();
            //PhrasePrefixQuery query2 = new PhrasePrefixQuery();
            MultiPhraseQuery query2 = new MultiPhraseQuery();

            query1.Add(new Term("body", "blueberry"));
            query2.Add(new Term("body", "strawberry"));

            System.Collections.ArrayList termsWithPrefix = new System.Collections.ArrayList();
            IndexReader ir = IndexReader.Open((Directory)indexStore, true, null);

            // this TermEnum gives "piccadilly", "pie" and "pizza".
            System.String prefix = "pi";
            TermEnum      te     = ir.Terms(new Term("body", prefix + "*"), null);

            do
            {
                if (te.Term.Text.StartsWith(prefix))
                {
                    termsWithPrefix.Add(te.Term);
                }
            }while (te.Next(null));

            query1.Add((Term[])termsWithPrefix.ToArray(typeof(Term)));
            query2.Add((Term[])termsWithPrefix.ToArray(typeof(Term)));

            ScoreDoc[] result;
            result = searcher.Search(query1, null, 1000, null).ScoreDocs;
            Assert.AreEqual(2, result.Length);

            result = searcher.Search(query2, null, 1000, null).ScoreDocs;
            Assert.AreEqual(0, result.Length);
        }
Exemplo n.º 48
0
        public virtual void  CreateDummySearcher()
        {
            // Create a dummy index with nothing in it.
            // This could possibly fail if Lucene starts checking for docid ranges...
            RAMDirectory rd = new RAMDirectory();
            IndexWriter  iw = new IndexWriter(rd, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            iw.AddDocument(new Document(), null);
            iw.Close();
            s = new IndexSearcher(rd, null);
        }
Exemplo n.º 49
0
        public override void  SetUp()
        {
            base.SetUp();
            RAMDirectory directory = new RAMDirectory();
            IndexWriter  writer    = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            for (int i = 0; i < docFields.Length; i++)
            {
                Document document = new Document();
                document.Add(new Field(field, docFields[i], Field.Store.NO, Field.Index.ANALYZED));
                writer.AddDocument(document, null);
            }
            writer.Close();
            searcher = new IndexSearcher(directory, true, null);

            // Make big index
            dir2 = new MockRAMDirectory(directory);

            // First multiply small test index:
            mulFactor = 1;
            int docCount = 0;

            do
            {
                Directory   copy        = new RAMDirectory(dir2, null);
                IndexWriter indexWriter = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED, null);
                indexWriter.AddIndexesNoOptimize(null, new[] { copy });
                docCount = indexWriter.MaxDoc();
                indexWriter.Close();
                mulFactor *= 2;
            } while (docCount < 3000);

            IndexWriter w   = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED, null);
            Document    doc = new Document();

            doc.Add(new Field("field2", "xxx", Field.Store.NO, Field.Index.ANALYZED));
            for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
            {
                w.AddDocument(doc, null);
            }
            doc = new Document();
            doc.Add(new Field("field2", "big bad bug", Field.Store.NO, Field.Index.ANALYZED));
            for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
            {
                w.AddDocument(doc, null);
            }
            // optimize to 1 segment
            w.Optimize(null);
            reader = w.GetReader(null);
            w.Close();
            bigSearcher = new IndexSearcher(reader);
        }
Exemplo n.º 50
0
        public void Test_Store_RAMDirectory()
        {
            Lucene.Net.Store.RAMDirectory ramDIR = new Lucene.Net.Store.RAMDirectory();

            //Index 1 Doc
            Lucene.Net.Analysis.Analyzer analyzer = new Lucene.Net.Analysis.Core.WhitespaceAnalyzer(Version.LUCENE_CURRENT);
            var conf = new IndexWriterConfig(Version.LUCENE_CURRENT, analyzer);

            Lucene.Net.Index.IndexWriter  wr  = new Lucene.Net.Index.IndexWriter(ramDIR, conf /*new Lucene.Net.Analysis.WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED*/);
            Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
            doc.Add(new Lucene.Net.Documents.Field("field1", "value1 value11", Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.ANALYZED));
            wr.AddDocument(doc);
            wr.Dispose();

            //now serialize it
            System.Runtime.Serialization.Formatters.Binary.BinaryFormatter serializer = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
            System.IO.MemoryStream memoryStream = new System.IO.MemoryStream();
            serializer.Serialize(memoryStream, ramDIR);

            //Close DIR
            ramDIR.Dispose();
            ramDIR = null;

            //now deserialize
            memoryStream.Seek(0, System.IO.SeekOrigin.Begin);
            Lucene.Net.Store.RAMDirectory ramDIR2 = (Lucene.Net.Store.RAMDirectory)serializer.Deserialize(memoryStream);

            //Add 1 more doc
            Lucene.Net.Analysis.Analyzer analyzer2 = new Lucene.Net.Analysis.Core.WhitespaceAnalyzer(Version.LUCENE_CURRENT);
            var conf2 = new IndexWriterConfig(Version.LUCENE_CURRENT, analyzer);

            wr  = new Lucene.Net.Index.IndexWriter(ramDIR2, conf2 /*new Lucene.Net.Analysis.WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED*/);
            doc = new Lucene.Net.Documents.Document();
            doc.Add(new Lucene.Net.Documents.Field("field1", "value1 value11", Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.ANALYZED));
            wr.AddDocument(doc);
            wr.Dispose();

            Lucene.Net.Search.TopDocs topDocs;
            //Search
            using (var reader = DirectoryReader.Open(ramDIR2))
            {
                Lucene.Net.Search.IndexSearcher             s  = new Lucene.Net.Search.IndexSearcher(reader);
                Lucene.Net.QueryParsers.Classic.QueryParser qp = new Lucene.Net.QueryParsers.Classic.QueryParser(Version.LUCENE_CURRENT, "field1", new Lucene.Net.Analysis.Standard.StandardAnalyzer(Version.LUCENE_CURRENT));
                Lucene.Net.Search.Query q = qp.Parse("value1");
                topDocs = s.Search(q, 100);
            }

            Assert.AreEqual(topDocs.TotalHits, 2, "See the issue: LUCENENET-174");
        }
Exemplo n.º 51
0
        public void Test_SegmentTermVector_IndexOf()
        {
            Lucene.Net.Store.RAMDirectory directory = new Lucene.Net.Store.RAMDirectory();
            Lucene.Net.Analysis.Analyzer  analyzer  = new Lucene.Net.Analysis.WhitespaceAnalyzer();
            Lucene.Net.Index.IndexWriter  writer    = new Lucene.Net.Index.IndexWriter(directory, analyzer, Lucene.Net.Index.IndexWriter.MaxFieldLength.LIMITED);
            Lucene.Net.Documents.Document document  = new Lucene.Net.Documents.Document();
            document.Add(new Lucene.Net.Documents.Field("contents", new System.IO.StreamReader(new System.IO.MemoryStream(System.Text.Encoding.ASCII.GetBytes("a_ a0"))), Lucene.Net.Documents.Field.TermVector.WITH_OFFSETS));
            writer.AddDocument(document);
            Lucene.Net.Index.IndexReader        reader = writer.GetReader();
            Lucene.Net.Index.TermPositionVector tpv    = reader.GetTermFreqVector(0, "contents") as Lucene.Net.Index.TermPositionVector;
            //Console.WriteLine("tpv: " + tpv);
            int index = tpv.IndexOf("a_");

            Assert.AreEqual(index, 1, "See the issue: LUCENENET-183");
        }
Exemplo n.º 52
0
        public virtual void  TestSimilarity_Renamed()
        {
            RAMDirectory store  = new RAMDirectory();
            IndexWriter  writer = new IndexWriter(store, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            writer.SetSimilarity(new SimpleSimilarity());

            Document d1 = new Document();

            d1.Add(new Field("field", "a c", Field.Store.YES, Field.Index.ANALYZED));

            Document d2 = new Document();

            d2.Add(new Field("field", "a b c", Field.Store.YES, Field.Index.ANALYZED));

            writer.AddDocument(d1, null);
            writer.AddDocument(d2, null);
            writer.Optimize(null);
            writer.Close();

            Searcher searcher = new IndexSearcher(store, true, null);

            searcher.Similarity = new SimpleSimilarity();

            Term a = new Term("field", "a");
            Term b = new Term("field", "b");
            Term c = new Term("field", "c");

            searcher.Search(new TermQuery(b), new AnonymousClassCollector(this), null);

            BooleanQuery bq = new BooleanQuery();

            bq.Add(new TermQuery(a), Occur.SHOULD);
            bq.Add(new TermQuery(b), Occur.SHOULD);
            //System.out.println(bq.toString("field"));
            searcher.Search(bq, new AnonymousClassCollector1(this), null);

            PhraseQuery pq = new PhraseQuery();

            pq.Add(a);
            pq.Add(c);
            //System.out.println(pq.toString("field"));
            searcher.Search(pq, new AnonymousClassCollector2(this), null);

            pq.Slop = 2;
            //System.out.println(pq.toString("field"));
            searcher.Search(pq, new AnonymousClassCollector3(this), null);
        }
Exemplo n.º 53
0
        public override void  SetUp()
        {
            base.SetUp();
            directory = new RAMDirectory();
            IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            Document doc = new Document();

            doc.Add(new Field("partnum", "Q36", Field.Store.YES, Field.Index.NOT_ANALYZED));
            doc.Add(new Field("description", "Illidium Space Modulator", Field.Store.YES, Field.Index.ANALYZED));
            writer.AddDocument(doc, null);

            writer.Close();

            searcher = new IndexSearcher(directory, true, null);
        }
Exemplo n.º 54
0
        // test using a sparse index (with deleted docs). The DocIdSet should be not cacheable, as it uses TermDocs if the range contains 0
        public void TestSparseIndex()
        {
            RAMDirectory dir    = new RAMDirectory();
            IndexWriter  writer = new IndexWriter(dir, new SimpleAnalyzer(), T, IndexWriter.MaxFieldLength.LIMITED, null);

            for (int d = -20; d <= 20; d++)
            {
                Document doc = new Document();
                doc.Add(new Field("id", d.ToString(), Field.Store.NO, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("body", "body", Field.Store.NO, Field.Index.NOT_ANALYZED));
                writer.AddDocument(doc, null);
            }

            writer.Optimize(null);
            writer.DeleteDocuments(null, new Term("id", "0"));
            writer.Close();

            IndexReader   reader = IndexReader.Open((Directory)dir, true, null);
            IndexSearcher Search = new IndexSearcher(reader);

            Assert.True(reader.HasDeletions);

            ScoreDoc[] result;
            Query      q = new TermQuery(new Term("body", "body"));

            FieldCacheRangeFilter <sbyte?> fcrf;

            result = Search.Search(q, fcrf = FieldCacheRangeFilter.NewByteRange("id", -20, 20, T, T), 100, null).ScoreDocs;
            Assert.False(fcrf.GetDocIdSet(reader.GetSequentialSubReaders()[0], null).IsCacheable, "DocIdSet must be not cacheable");
            Assert.AreEqual(40, result.Length, "find all");

            result = Search.Search(q, fcrf = FieldCacheRangeFilter.NewByteRange("id", 0, 20, T, T), 100, null).ScoreDocs;
            Assert.False(fcrf.GetDocIdSet(reader.GetSequentialSubReaders()[0], null).IsCacheable, "DocIdSet must be not cacheable");
            Assert.AreEqual(20, result.Length, "find all");

            result = Search.Search(q, fcrf = FieldCacheRangeFilter.NewByteRange("id", -20, 0, T, T), 100, null).ScoreDocs;
            Assert.False(fcrf.GetDocIdSet(reader.GetSequentialSubReaders()[0], null).IsCacheable, "DocIdSet must be not cacheable");
            Assert.AreEqual(20, result.Length, "find all");

            result = Search.Search(q, fcrf = FieldCacheRangeFilter.NewByteRange("id", 10, 20, T, T), 100, null).ScoreDocs;
            Assert.True(fcrf.GetDocIdSet(reader.GetSequentialSubReaders()[0], null).IsCacheable, "DocIdSet must be not cacheable");
            Assert.AreEqual(11, result.Length, "find all");

            result = Search.Search(q, fcrf = FieldCacheRangeFilter.NewByteRange("id", -20, -10, T, T), 100, null).ScoreDocs;
            Assert.True(fcrf.GetDocIdSet(reader.GetSequentialSubReaders()[0], null).IsCacheable, "DocIdSet must be not cacheable");
            Assert.AreEqual(11, result.Length, "find all");
        }
Exemplo n.º 55
0
        static TestNumericRangeQuery32()
        {
            {
                try
                {
                    // set the theoretical maximum term count for 8bit (see docs for the number)
                    BooleanQuery.MaxClauseCount = 3 * 255 * 2 + 255;

                    directory = new RAMDirectory();
                    IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, MaxFieldLength.UNLIMITED, null);

                    NumericField field8 = new NumericField("field8", 8, Field.Store.YES, true), field4 = new NumericField("field4", 4, Field.Store.YES, true), field2 = new NumericField("field2", 2, Field.Store.YES, true), fieldNoTrie = new NumericField("field" + System.Int32.MaxValue, System.Int32.MaxValue, Field.Store.YES, true), ascfield8 = new NumericField("ascfield8", 8, Field.Store.NO, true), ascfield4 = new NumericField("ascfield4", 4, Field.Store.NO, true), ascfield2 = new NumericField("ascfield2", 2, Field.Store.NO, true);

                    Document doc = new Document();
                    // add fields, that have a distance to test general functionality
                    doc.Add(field8); doc.Add(field4); doc.Add(field2); doc.Add(fieldNoTrie);
                    // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive
                    doc.Add(ascfield8); doc.Add(ascfield4); doc.Add(ascfield2);

                    // Add a series of noDocs docs with increasing int values
                    for (int l = 0; l < noDocs; l++)
                    {
                        int val = distance * l + startOffset;
                        field8.SetIntValue(val);
                        field4.SetIntValue(val);
                        field2.SetIntValue(val);
                        fieldNoTrie.SetIntValue(val);

                        val = l - (noDocs / 2);
                        ascfield8.SetIntValue(val);
                        ascfield4.SetIntValue(val);
                        ascfield2.SetIntValue(val);
                        writer.AddDocument(doc, null);
                    }

                    writer.Optimize(null);
                    writer.Close();
                    searcher = new IndexSearcher(directory, true, null);
                }
                catch (System.Exception e)
                {
                    throw new System.SystemException("", e);
                }
            }
        }
Exemplo n.º 56
0
        public override void  SetUp()
        {
            base.SetUp();
            directory = new RAMDirectory();


            IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            for (int i = 0; i < values.Length; i++)
            {
                Document doc = new Document();
                doc.Add(new Field(FIELD, values[i], Field.Store.YES, Field.Index.ANALYZED));
                writer.AddDocument(doc, null);
            }
            writer.Close();
            indexSearcher = new IndexSearcher(directory, false, null);
            indexReader   = indexSearcher.IndexReader;
        }
Exemplo n.º 57
0
        public override void  SetUp()
        {
            base.SetUp();
            RAMDirectory directory = new RAMDirectory();
            IndexWriter  writer    = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            writer.AddDocument(Doc(new Field[] { Field("id", "0"), Field("gender", "male"), Field("first", "james"), Field("last", "jones") }), null);

            writer.AddDocument(Doc(new Field[] { Field("id", "1"), Field("gender", "male"), Field("first", "james"), Field("last", "smith"), Field("gender", "female"), Field("first", "sally"), Field("last", "jones") }), null);

            writer.AddDocument(Doc(new Field[] { Field("id", "2"), Field("gender", "female"), Field("first", "greta"), Field("last", "jones"), Field("gender", "female"), Field("first", "sally"), Field("last", "smith"), Field("gender", "male"), Field("first", "james"), Field("last", "jones") }), null);

            writer.AddDocument(Doc(new Field[] { Field("id", "3"), Field("gender", "female"), Field("first", "lisa"), Field("last", "jones"), Field("gender", "male"), Field("first", "bob"), Field("last", "costas") }), null);

            writer.AddDocument(Doc(new Field[] { Field("id", "4"), Field("gender", "female"), Field("first", "sally"), Field("last", "smith"), Field("gender", "female"), Field("first", "linda"), Field("last", "dixit"), Field("gender", "male"), Field("first", "bubba"), Field("last", "jones") }), null);

            writer.Close();
            searcher = new IndexSearcher(directory, true, null);
        }
Exemplo n.º 58
0
        public virtual void  TestFilteredSearch_Renamed()
        {
            bool         enforceSingleSegment = true;
            RAMDirectory directory            = new RAMDirectory();

            int[] filterBits            = { 1, 36 };
            SimpleDocIdSetFilter filter = new SimpleDocIdSetFilter(filterBits);
            IndexWriter          writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);

            SearchFiltered(writer, directory, filter, enforceSingleSegment);
            // run the test on more than one segment
            enforceSingleSegment = false;
            // reset - it is stateful
            filter.Reset();
            writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
            // we index 60 docs - this will create 6 segments
            writer.SetMaxBufferedDocs(10);
            SearchFiltered(writer, directory, filter, enforceSingleSegment);
        }
Exemplo n.º 59
0
        public virtual void TestSeekToEOFThenBack()
        {
            RAMDirectory dir = new RAMDirectory();

            IndexOutput o     = dir.CreateOutput("out", NewIOContext(Random));
            var         bytes = new byte[3 * RAMInputStream.BUFFER_SIZE];

            o.WriteBytes(bytes, 0, bytes.Length);
            o.Dispose();

            IndexInput i = dir.OpenInput("out", NewIOContext(Random));

            i.Seek(2 * RAMInputStream.BUFFER_SIZE - 1);
            i.Seek(3 * RAMInputStream.BUFFER_SIZE);
            i.Seek(RAMInputStream.BUFFER_SIZE);
            i.ReadBytes(bytes, 0, 2 * RAMInputStream.BUFFER_SIZE);
            i.Dispose();
            dir.Dispose();
        }
Exemplo n.º 60
0
        public virtual void  TestDocBoost_Renamed()
        {
            RAMDirectory store  = new RAMDirectory();
            IndexWriter  writer = new IndexWriter(store, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            IFieldable f1 = new Field("field", "word", Field.Store.YES, Field.Index.ANALYZED);
            IFieldable f2 = new Field("field", "word", Field.Store.YES, Field.Index.ANALYZED);

            f2.Boost = 2.0f;

            Document d1 = new Document();
            Document d2 = new Document();
            Document d3 = new Document();
            Document d4 = new Document();

            d3.Boost = 3.0f;
            d4.Boost = 2.0f;

            d1.Add(f1);             // boost = 1
            d2.Add(f2);             // boost = 2
            d3.Add(f1);             // boost = 3
            d4.Add(f2);             // boost = 4

            writer.AddDocument(d1, null);
            writer.AddDocument(d2, null);
            writer.AddDocument(d3, null);
            writer.AddDocument(d4, null);
            writer.Optimize(null);
            writer.Close();

            float[] scores = new float[4];

            new IndexSearcher(store, true, null).Search(new TermQuery(new Term("field", "word")), new AnonymousClassCollector(scores, this), null);

            float lastScore = 0.0f;

            for (int i = 0; i < 4; i++)
            {
                Assert.IsTrue(scores[i] > lastScore);
                lastScore = scores[i];
            }
        }