コード例 #1
0
ファイル: TestDirectory.cs プロジェクト: stgwilli/ravendb
        public virtual void  TestDetectClose()
        {
            Directory dir = new RAMDirectory();

            dir.Close();
            try
            {
                dir.CreateOutput("test");
                Assert.Fail("did not hit expected exception");
            }
            catch (AlreadyClosedException ace)
            {
            }

            dir = FSDirectory.Open(new System.IO.FileInfo(SupportClass.AppSettings.Get("tempDir", System.IO.Path.GetTempPath())));
            dir.Close();
            try
            {
                dir.CreateOutput("test");
                Assert.Fail("did not hit expected exception");
            }
            catch (AlreadyClosedException ace)
            {
            }
        }
コード例 #2
0
		public virtual void  TestDetectClose()
		{
			Directory dir = new RAMDirectory();
			dir.Close();

            Assert.Throws<AlreadyClosedException>(() => dir.CreateOutput("test"), "did not hit expected exception");
			
			dir = FSDirectory.Open(new System.IO.DirectoryInfo(AppSettings.Get("tempDir", System.IO.Path.GetTempPath())));
			dir.Close();
			Assert.Throws<AlreadyClosedException>(() => dir.CreateOutput("test"), "did not hit expected exception");
		}
コード例 #3
0
        public virtual void  TestDetectClose()
        {
            Directory dir = new RAMDirectory();

            dir.Close();

            Assert.Throws <AlreadyClosedException>(() => dir.CreateOutput("test", null), "did not hit expected exception");

            dir = FSDirectory.Open(new System.IO.DirectoryInfo(AppSettings.Get("tempDir", System.IO.Path.GetTempPath())));
            dir.Close();
            Assert.Throws <AlreadyClosedException>(() => dir.CreateOutput("test", null), "did not hit expected exception");
        }
コード例 #4
0
ファイル: TermsFilterTest.cs プロジェクト: Nangal/lucene.net
        public void testMissingTerms()
        {
            String fieldName = "field1";
            Directory rd = new RAMDirectory();
            var w = new IndexWriter(rd, new KeywordAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
            for (int i = 0; i < 100; i++)
            {
                var doc = new Document();
                int term = i*10; //terms are units of 10;
                doc.Add(new Field(fieldName, "" + term, Field.Store.YES, Field.Index.ANALYZED));
                w.AddDocument(doc);
            }
            IndexReader reader = w.GetReader();
            w.Close();

            TermsFilter tf = new TermsFilter();
            tf.AddTerm(new Term(fieldName, "19"));
            FixedBitSet bits = (FixedBitSet) tf.GetDocIdSet(reader);
            Assert.AreEqual(0, bits.Cardinality(), "Must match nothing");

            tf.AddTerm(new Term(fieldName, "20"));
            bits = (FixedBitSet) tf.GetDocIdSet(reader);
            Assert.AreEqual(1, bits.Cardinality(), "Must match 1");

            tf.AddTerm(new Term(fieldName, "10"));
            bits = (FixedBitSet) tf.GetDocIdSet(reader);
            Assert.AreEqual(2, bits.Cardinality(), "Must match 2");

            tf.AddTerm(new Term(fieldName, "00"));
            bits = (FixedBitSet) tf.GetDocIdSet(reader);
            Assert.AreEqual(2, bits.Cardinality(), "Must match 2");

            reader.Close();
            rd.Close();
        }
コード例 #5
0
ファイル: TestDemo.cs プロジェクト: Nangal/lucene.net
 public virtual void  TestDemo_Renamed()
 {
     
     Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT);
     
     // Store the index in memory:
     Directory directory = new RAMDirectory();
     // To store an index on disk, use this instead:
     //Directory directory = FSDirectory.open("/tmp/testindex");
     IndexWriter iwriter = new IndexWriter(directory, analyzer, true, new IndexWriter.MaxFieldLength(25000));
     Document doc = new Document();
     System.String text = "This is the text to be indexed.";
     doc.Add(new Field("fieldname", text, Field.Store.YES, Field.Index.ANALYZED));
     iwriter.AddDocument(doc);
     iwriter.Close();
     
     // Now search the index:
     IndexSearcher isearcher = new IndexSearcher(directory, true); // read-only=true
     // Parse a simple query that searches for "text":
     QueryParser parser = new QueryParser(Util.Version.LUCENE_CURRENT, "fieldname", analyzer);
     Query query = parser.Parse("text");
     ScoreDoc[] hits = isearcher.Search(query, null, 1000).ScoreDocs;
     Assert.AreEqual(1, hits.Length);
     // Iterate through the results:
     for (int i = 0; i < hits.Length; i++)
     {
         Document hitDoc = isearcher.Doc(hits[i].Doc);
         Assert.AreEqual(hitDoc.Get("fieldname"), "This is the text to be indexed.");
     }
     isearcher.Close();
     directory.Close();
 }
コード例 #6
0
        public virtual void  TestDemo_Renamed()
        {
            Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT);

            // Store the index in memory:
            Directory directory = new RAMDirectory();
            // To store an index on disk, use this instead:
            //Directory directory = FSDirectory.open("/tmp/testindex");
            IndexWriter iwriter = new IndexWriter(directory, analyzer, true, new IndexWriter.MaxFieldLength(25000));
            Document    doc     = new Document();

            System.String text = "This is the text to be indexed.";
            doc.Add(new Field("fieldname", text, Field.Store.YES, Field.Index.ANALYZED));
            iwriter.AddDocument(doc);
            iwriter.Close();

            // Now search the index:
            IndexSearcher isearcher = new IndexSearcher(directory, true); // read-only=true
            // Parse a simple query that searches for "text":
            QueryParser parser = new QueryParser(Util.Version.LUCENE_CURRENT, "fieldname", analyzer);
            Query       query  = parser.Parse("text");

            ScoreDoc[] hits = isearcher.Search(query, null, 1000).ScoreDocs;
            Assert.AreEqual(1, hits.Length);
            // Iterate through the results:
            for (int i = 0; i < hits.Length; i++)
            {
                Document hitDoc = isearcher.Doc(hits[i].Doc);
                Assert.AreEqual(hitDoc.Get("fieldname"), "This is the text to be indexed.");
            }
            isearcher.Close();
            directory.Close();
        }
コード例 #7
0
        public void HelloWorldTest()
        {
            Directory directory = new RAMDirectory();
            Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_29);
            IndexWriter writer = new IndexWriter(directory,
                analyzer,
                IndexWriter.MaxFieldLength.UNLIMITED);

            Document doc = new Document();
            doc.Add(new Field("id", "1", Field.Store.YES, Field.Index.NO));
            doc.Add(new Field("postBody", "sample test", Field.Store.YES, Field.Index.ANALYZED));
            writer.AddDocument(doc);
            writer.Optimize();
            writer.Commit();
            writer.Close();

            QueryParser parser = new QueryParser(Version.LUCENE_29, "postBody", analyzer);
            Query query = parser.Parse("sample test");

            //Setup searcher
            IndexSearcher searcher = new IndexSearcher(directory, true);
            //Do the search
            var hits = searcher.Search(query, null, 10);

            for (int i = 0; i < hits.TotalHits; i++)
            {
                var doc1 = hits.ScoreDocs[i];
            }

            searcher.Close();
            directory.Close();
        }
コード例 #8
0
ファイル: TestRAMDirectory.cs プロジェクト: stgwilli/ravendb
        public virtual void  TestIllegalEOF()
        {
            RAMDirectory dir = new RAMDirectory();
            IndexOutput  o   = dir.CreateOutput("out");

            byte[] b = new byte[1024];
            o.WriteBytes(b, 0, 1024);
            o.Close();
            IndexInput i = dir.OpenInput("out");

            i.Seek(1024);
            i.Close();
            dir.Close();
        }
コード例 #9
0
		public virtual void  TestDetectClose()
		{
			Directory dir = new RAMDirectory();
			dir.Close();
			try
			{
				dir.CreateOutput("test");
				Assert.Fail("did not hit expected exception");
			}
			catch (AlreadyClosedException ace)
			{
			}
			
			dir = FSDirectory.Open(new System.IO.FileInfo(Support.AppSettings.Get("tempDir", System.IO.Path.GetTempPath())));
			dir.Close();
			try
			{
				dir.CreateOutput("test");
				Assert.Fail("did not hit expected exception");
			}
			catch (AlreadyClosedException ace)
			{
			}
		}
コード例 #10
0
ファイル: TestOldPatches.cs プロジェクト: zfxsss/lucenenet
        public void Test_Store_RAMDirectory()
        {
            Lucene.Net.Store.RAMDirectory ramDIR = new Lucene.Net.Store.RAMDirectory();

            //Index 1 Doc
            Lucene.Net.Index.IndexWriter  wr  = new Lucene.Net.Index.IndexWriter(ramDIR, new Lucene.Net.Analysis.WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
            Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
            doc.Add(new Lucene.Net.Documents.Field("field1", "value1 value11", Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.ANALYZED));
            wr.AddDocument(doc);
            wr.Dispose();

            //now serialize it
            System.Runtime.Serialization.Formatters.Binary.BinaryFormatter serializer = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
            System.IO.MemoryStream memoryStream = new System.IO.MemoryStream();
            serializer.Serialize(memoryStream, ramDIR);

            //Close DIR
            ramDIR.Close();
            ramDIR = null;

            //now deserialize
            memoryStream.Seek(0, System.IO.SeekOrigin.Begin);
            Lucene.Net.Store.RAMDirectory ramDIR2 = (Lucene.Net.Store.RAMDirectory)serializer.Deserialize(memoryStream);

            //Add 1 more doc
            wr  = new Lucene.Net.Index.IndexWriter(ramDIR2, new Lucene.Net.Analysis.WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
            doc = new Lucene.Net.Documents.Document();
            doc.Add(new Lucene.Net.Documents.Field("field1", "value1 value11", Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.ANALYZED));
            wr.AddDocument(doc);
            wr.Dispose();

            //Search
            Lucene.Net.Search.IndexSearcher     s       = new Lucene.Net.Search.IndexSearcher(ramDIR2);
            Lucene.Net.QueryParsers.QueryParser qp      = new Lucene.Net.QueryParsers.QueryParser(Version.LUCENE_CURRENT, "field1", new Lucene.Net.Analysis.Standard.StandardAnalyzer(Version.LUCENE_CURRENT));
            Lucene.Net.Search.Query             q       = qp.Parse("value1");
            Lucene.Net.Search.TopDocs           topDocs = s.Search(q, 100);
            s.Close();

            Assert.AreEqual(topDocs.TotalHits, 2, "See the issue: LUCENENET-174");
        }
コード例 #11
0
ファイル: Program.cs プロジェクト: wesleysanfer/Talks
        static void Main(string[] args)
        {
            int maxLength = GeohashPrefixTree.GetMaxLevelsPossible();
            strategy = new RecursivePrefixTreeStrategy(
                new GeohashPrefixTree(context, maxLength));

            var dir = new RAMDirectory();
            var writer = new IndexWriter(dir, new SimpleAnalyzer(), true,
                IndexWriter.MaxFieldLength.UNLIMITED);

            AddPoint(writer, "London", -81.233040, 42.983390);
            AddPoint(writer, "East New York", -73.882360, 40.666770);
            AddPoint(writer, "Manhattan", -73.966250, 40.783430);
            AddPoint(writer, "New York City", -74.005970, 40.714270);
            AddPoint(writer, "Oslo", 10.746090, 59.912730);
            AddPoint(writer, "Bergen", 5.324150, 60.392990);
            AddPoint(writer, "Washington, D. C.", -77.036370, 38.895110);

            writer.Close();

            // Origin point - Oslo Spektrum
            const double lat = 59.9138688;
            const double lng = 10.752245399999993;
            const double radius = 600;
            var query = strategy.MakeQuery(new SpatialArgs(SpatialOperation.IsWithin,
                context.MakeCircle(lng, lat, radius)), fieldInfo);

            var searcher = new IndexSearcher(dir);
            var results = searcher.Search(query, null, 100);
            foreach (var topDoc in results.ScoreDocs)
            {
                var name = searcher.Doc(topDoc.doc).Get("Name");
                Console.WriteLine(name);
            }
            searcher.Close();
            dir.Close();
        }
コード例 #12
0
        public virtual void  TestPhrasePrefix()
        {
            RAMDirectory indexStore = new RAMDirectory();
            IndexWriter  writer     = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED, null);

            Add("blueberry pie", writer);
            Add("blueberry strudel", writer);
            Add("blueberry pizza", writer);
            Add("blueberry chewing gum", writer);
            Add("bluebird pizza", writer);
            Add("bluebird foobar pizza", writer);
            Add("piccadilly circus", writer);
            writer.Optimize(null);
            writer.Close();

            IndexSearcher searcher = new IndexSearcher(indexStore, true, null);

            // search for "blueberry pi*":
            MultiPhraseQuery query1 = new MultiPhraseQuery();
            // search for "strawberry pi*":
            MultiPhraseQuery query2 = new MultiPhraseQuery();

            query1.Add(new Term("body", "blueberry"));
            query2.Add(new Term("body", "strawberry"));

            System.Collections.ArrayList termsWithPrefix = new System.Collections.ArrayList();
            IndexReader ir = IndexReader.Open((Directory)indexStore, true, null);

            // this TermEnum gives "piccadilly", "pie" and "pizza".
            System.String prefix = "pi";
            TermEnum      te     = ir.Terms(new Term("body", prefix), null);

            do
            {
                if (te.Term.Text.StartsWith(prefix))
                {
                    termsWithPrefix.Add(te.Term);
                }
            }while (te.Next(null));

            query1.Add((Term[])termsWithPrefix.ToArray(typeof(Term)));
            Assert.AreEqual("body:\"blueberry (piccadilly pie pizza)\"", query1.ToString());
            query2.Add((Term[])termsWithPrefix.ToArray(typeof(Term)));
            Assert.AreEqual("body:\"strawberry (piccadilly pie pizza)\"", query2.ToString());

            ScoreDoc[] result;
            result = searcher.Search(query1, null, 1000, null).ScoreDocs;
            Assert.AreEqual(2, result.Length);
            result = searcher.Search(query2, null, 1000, null).ScoreDocs;
            Assert.AreEqual(0, result.Length);

            // search for "blue* pizza":
            MultiPhraseQuery query3 = new MultiPhraseQuery();

            termsWithPrefix.Clear();
            prefix = "blue";
            te     = ir.Terms(new Term("body", prefix), null);
            do
            {
                if (te.Term.Text.StartsWith(prefix))
                {
                    termsWithPrefix.Add(te.Term);
                }
            }while (te.Next(null));
            query3.Add((Term[])termsWithPrefix.ToArray(typeof(Term)));
            query3.Add(new Term("body", "pizza"));

            result = searcher.Search(query3, null, 1000, null).ScoreDocs;
            Assert.AreEqual(2, result.Length);             // blueberry pizza, bluebird pizza
            Assert.AreEqual("body:\"(blueberry bluebird) pizza\"", query3.ToString());

            // test slop:
            query3.Slop = 1;
            result      = searcher.Search(query3, null, 1000, null).ScoreDocs;
            Assert.AreEqual(3, result.Length);             // blueberry pizza, bluebird pizza, bluebird foobar pizza

            MultiPhraseQuery query4 = new MultiPhraseQuery();

            // okay, all terms must belong to the same field
            Assert.Throws <ArgumentException>(() =>
            {
                query4.Add(new Term("field1", "foo"));
                query4.Add(new Term("field2", "foobar"));
            });

            searcher.Close();
            indexStore.Close();
        }
コード例 #13
0
ファイル: Form1.cs プロジェクト: usmanghani/Misc
        private void button5_Click(object sender, EventArgs e)
        {
            FilterData.PrepareCharMap();
            textBox1.Clear();
            DiacriticAnalyzer analyzer = new DiacriticAnalyzer(FilterData.stopWords);
            string contents = File.ReadAllText("c:\\1.txt");
            TokenStream stream = analyzer.TokenStream(new StringReader(contents));
            Token t = null;
            while ((t = stream.Next()) != null)
            {
                textBox1.AppendText(t.TermText() + Environment.NewLine);
            }

            Store.RAMDirectory dir = new Store.RAMDirectory();
            IndexWriter indexer = new IndexWriter(dir, analyzer, true);
            Documents.Document doc = new Lucene.Net.Documents.Document();
            doc.Add(Documents.Field.Text("contents", contents));
            indexer.AddDocument(doc);
            indexer.Close();

            IndexSearcher searcher = new IndexSearcher(dir);
            Hits hits = searcher.Search(QueryParser.Parse("انعمت", "contents", analyzer));
            MessageBox.Show(hits.Length().ToString());
            searcher.Close();

            dir.Close();
        }
コード例 #14
0
ファイル: BasicHebrewTests.cs プロジェクト: kirillkh/HebMorph
        protected void AssertFoundInText(string whatToIndex, string whatToSearch)
        {
            Directory d = new RAMDirectory();

            IndexWriter writer = new IndexWriter(d, analyzer, true, new IndexWriter.MaxFieldLength(10000));
            Document doc = new Document();
            doc.Add(new Field("content", whatToIndex, Field.Store.YES, Field.Index.ANALYZED));
            writer.AddDocument(doc);
            writer.Close();
            writer = null;

            IndexSearcher searcher = new IndexSearcher(d, true); // read-only=true
            QueryParser qp = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "content", analyzer);
            Query query = qp.Parse(whatToSearch);
            ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;

            Assert(hits.Length == 1);

            searcher.Close();

            d.Close();
        }
コード例 #15
0
		public virtual void  TestIllegalEOF()
		{
			RAMDirectory dir = new RAMDirectory();
			IndexOutput o = dir.CreateOutput("out");
			byte[] b = new byte[1024];
			o.WriteBytes(b, 0, 1024);
			o.Close();
			IndexInput i = dir.OpenInput("out");
			i.Seek(1024);
			i.Close();
			dir.Close();
		}
コード例 #16
0
        public void TestIndexReload()
        {
            try
            {
                RAMDirectory idxDir = new RAMDirectory();
                Document[] docs = BoboTestCase.BuildData();
                BoboIndexReader.WorkArea workArea = new BoboIndexReader.WorkArea();
                BrowseRequest req;
                BrowseSelection sel;
                BoboBrowser browser;
                BrowseResult result;

                IndexWriter writer = new IndexWriter(idxDir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
                writer.Close();

                int dup = 0;
                for (int j = 0; j < 50; j++)
                {
                    IndexReader idxReader = IndexReader.Open(idxDir, true);
                    BoboIndexReader reader = BoboIndexReader.GetInstance(idxReader, _fconf, workArea);

                    req = new BrowseRequest();
                    req.Offset = 0;
                    req.Count = 10;
                    sel = new BrowseSelection("color");
                    sel.AddValue("red");
                    req.AddSelection(sel);
                    browser = new BoboBrowser(reader);
                    result = browser.Browse(req);

                    Assert.AreEqual(3 * dup, result.NumHits);

                    req = new BrowseRequest();
                    req.Offset = 0;
                    req.Count = 10;
                    sel = new BrowseSelection("tag");
                    sel.AddValue("dog");
                    req.AddSelection(sel);
                    browser = new BoboBrowser(reader);
                    result = browser.Browse(req);

                    Assert.AreEqual(2 * dup, result.NumHits);

                    req = new BrowseRequest();
                    req.Offset = 0;
                    req.Count = 10;
                    sel = new BrowseSelection("tag");
                    sel.AddValue("funny");
                    req.AddSelection(sel);
                    browser = new BoboBrowser(reader);
                    result = browser.Browse(req);

                    Assert.AreEqual(3 * dup, result.NumHits);

                    writer = new IndexWriter(idxDir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
                    for (int k = 0; k <= j; k++)
                    {
                        for (int i = 0; i < docs.Length; i++)
                        {
                            writer.AddDocument(docs[i]);
                        }
                        dup++;
                    }
                    writer.Close();
                }
                idxDir.Close();
            }
            catch (Exception e)
            {
                Assert.Fail(e.Message);
            }
        }
コード例 #17
0
		public virtual void  TestUndeleteAllAfterClose()
		{
			Directory dir = new RAMDirectory();
			IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
			AddDocumentWithFields(writer);
			AddDocumentWithFields(writer);
			writer.Close();
			IndexReader reader = IndexReader.Open(dir);
			reader.DeleteDocument(0);
			reader.DeleteDocument(1);
			reader.Close();
			reader = IndexReader.Open(dir);
			reader.UndeleteAll();
			Assert.AreEqual(2, reader.NumDocs()); // nothing has really been deleted thanks to undeleteAll()
			reader.Close();
			dir.Close();
		}
コード例 #18
0
        public void Test_Store_RAMDirectory()
        {
            Lucene.Net.Store.RAMDirectory ramDIR = new Lucene.Net.Store.RAMDirectory();

            //Index 1 Doc
            Lucene.Net.Index.IndexWriter wr = new Lucene.Net.Index.IndexWriter(ramDIR, new Lucene.Net.Analysis.WhitespaceAnalyzer(), true);
            Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
            doc.Add(new Lucene.Net.Documents.Field("field1", "value1 value11", Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.TOKENIZED));
            wr.AddDocument(doc);
            wr.Close();

            //now serialize it 
            System.Runtime.Serialization.Formatters.Binary.BinaryFormatter serializer = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
            System.IO.MemoryStream memoryStream = new System.IO.MemoryStream();
            serializer.Serialize(memoryStream, ramDIR);

            //Close DIR
            ramDIR.Close();
            ramDIR = null;

            //now deserialize 
            memoryStream.Seek(0, System.IO.SeekOrigin.Begin);
            Lucene.Net.Store.RAMDirectory ramDIR2 = (Lucene.Net.Store.RAMDirectory)serializer.Deserialize(memoryStream);

            //Add 1 more doc
            wr = new Lucene.Net.Index.IndexWriter(ramDIR2, new Lucene.Net.Analysis.WhitespaceAnalyzer(), false);
            doc = new Lucene.Net.Documents.Document();
            doc.Add(new Lucene.Net.Documents.Field("field1", "value1 value11", Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.TOKENIZED));
            wr.AddDocument(doc);
            wr.Close();

            //Search
            Lucene.Net.Search.IndexSearcher s = new Lucene.Net.Search.IndexSearcher(ramDIR2);
            Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field1", new Lucene.Net.Analysis.Standard.StandardAnalyzer());
            Lucene.Net.Search.Query q = qp.Parse("value1");
            Lucene.Net.Search.TopDocs topDocs = s.Search(q, 100);
            s.Close();

            Assert.AreEqual(topDocs.TotalHits, 2, "See the issue: LUCENENET-174");
        }
コード例 #19
0
 public override void  TearDown()
 {
     searcher.Close();
     directory.Close();
     base.TearDown();
 }
コード例 #20
0
        public virtual void  TestQuery()
        {
            RAMDirectory dir = new RAMDirectory();
            IndexWriter  iw  = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED, null);

            iw.SetMaxBufferedDocs(2);             // force multi-segment
            AddDoc("one", iw, 1f);
            AddDoc("two", iw, 20f);
            AddDoc("three four", iw, 300f);
            iw.Close();

            IndexReader   ir         = IndexReader.Open((Directory)dir, false, null);
            IndexSearcher is_Renamed = new IndexSearcher(ir);

            ScoreDoc[] hits;

            // assert with norms scoring turned off

            hits = is_Renamed.Search(new MatchAllDocsQuery(), null, 1000, null).ScoreDocs;
            Assert.AreEqual(3, hits.Length);
            Assert.AreEqual(ir.Document(hits[0].Doc, null).Get("key", null), "one");
            Assert.AreEqual(ir.Document(hits[1].Doc, null).Get("key", null), "two");
            Assert.AreEqual(ir.Document(hits[2].Doc, null).Get("key", null), "three four");

            // assert with norms scoring turned on

            MatchAllDocsQuery normsQuery = new MatchAllDocsQuery("key");

            hits = is_Renamed.Search(normsQuery, null, 1000, null).ScoreDocs;
            Assert.AreEqual(3, hits.Length);

            Assert.AreEqual(ir.Document(hits[0].Doc, null).Get("key", null), "three four");
            Assert.AreEqual(ir.Document(hits[1].Doc, null).Get("key", null), "two");
            Assert.AreEqual(ir.Document(hits[2].Doc, null).Get("key", null), "one");

            // change norm & retest
            ir.SetNorm(0, "key", 400f, null);
            normsQuery = new MatchAllDocsQuery("key");
            hits       = is_Renamed.Search(normsQuery, null, 1000, null).ScoreDocs;
            Assert.AreEqual(3, hits.Length);

            Assert.AreEqual(ir.Document(hits[0].Doc, null).Get("key", null), "one");
            Assert.AreEqual(ir.Document(hits[1].Doc, null).Get("key", null), "three four");
            Assert.AreEqual(ir.Document(hits[2].Doc, null).Get("key", null), "two");

            // some artificial queries to trigger the use of skipTo():

            BooleanQuery bq = new BooleanQuery();

            bq.Add(new MatchAllDocsQuery(), Occur.MUST);
            bq.Add(new MatchAllDocsQuery(), Occur.MUST);
            hits = is_Renamed.Search(bq, null, 1000, null).ScoreDocs;
            Assert.AreEqual(3, hits.Length);

            bq = new BooleanQuery();
            bq.Add(new MatchAllDocsQuery(), Occur.MUST);
            bq.Add(new TermQuery(new Term("key", "three")), Occur.MUST);
            hits = is_Renamed.Search(bq, null, 1000, null).ScoreDocs;
            Assert.AreEqual(1, hits.Length);

            // delete a document:
            is_Renamed.IndexReader.DeleteDocument(0, null);
            hits = is_Renamed.Search(new MatchAllDocsQuery(), null, 1000, null).ScoreDocs;
            Assert.AreEqual(2, hits.Length);

            // test parsable toString()
            QueryParser qp = new QueryParser(Util.Version.LUCENE_CURRENT, "key", analyzer);

            hits = is_Renamed.Search(qp.Parse(new MatchAllDocsQuery().ToString()), null, 1000, null).ScoreDocs;
            Assert.AreEqual(2, hits.Length);

            // test parsable toString() with non default boost
            Query maq = new MatchAllDocsQuery();

            maq.Boost = 2.3f;
            Query pq = qp.Parse(maq.ToString());

            hits = is_Renamed.Search(pq, null, 1000, null).ScoreDocs;
            Assert.AreEqual(2, hits.Length);

            is_Renamed.Close();
            ir.Close();
            dir.Close();
        }