CreateDocument() public static method

public static CreateDocument ( int n, string indexName, int numFields ) : Lucene.Net.Documents.Document
n int
indexName string
numFields int
return Lucene.Net.Documents.Document
示例#1
0
 public override void Run()
 {
     try
     {
         while (run)
         {
             //int n = random.nextInt(2);
             if (type == 0)
             {
                 int      i   = outerInstance.seq.AddAndGet(1);
                 Document doc = DocHelper.CreateDocument(i, "index1", 10);
                 writer.AddDocument(doc);
                 addCount++;
             }
             else if (type == 1)
             {
                 // we may or may not delete because the term may not exist,
                 // however we're opening and closing the reader rapidly
                 IndexReader reader = writer.GetReader();
                 int         id     = r.Next(outerInstance.seq);
                 Term        term   = new Term("id", Convert.ToString(id));
                 int         count  = TestIndexWriterReader.Count(term, reader);
                 writer.DeleteDocuments(term);
                 reader.Dispose();
                 delCount += count;
             }
         }
     }
     catch (Exception ex) when(ex.IsThrowable())
     {
         Console.WriteLine(ex.StackTrace);
         this.ex = ex;
         run     = false;
     }
 }
示例#2
0
        /// <summary>
        /// static boolean hasPendingDeletes(SegmentInfos infos) {
        ///  for (SegmentInfo info : infos) {
        ///    if (info.deletes.Any()) {
        ///      return true;
        ///    }
        ///  }
        ///  return false;
        /// }
        ///
        /// </summary>
        internal virtual void Part2(IndexWriter writer, RangeMergePolicy fsmp)
        {
            for (int x = 20; x < 25; x++)
            {
                writer.AddDocument(DocHelper.CreateDocument(x, "5", 2));
                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
            }
            writer.Flush(false, false);
            for (int x = 25; x < 30; x++)
            {
                writer.AddDocument(DocHelper.CreateDocument(x, "5", 2));
                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
            }
            writer.Flush(false, false);

            //System.out.println("infos3:"+writer.SegmentInfos);

            Term delterm = new Term("id", "8");

            writer.DeleteDocuments(delterm);
            //System.out.println("segdels3:" + writer.docWriter.deletesToString());

            fsmp.doMerge = true;
            fsmp.start   = 1;
            fsmp.length  = 2;
            writer.MaybeMerge();

            // deletes for info1, the newly created segment from the
            // merge should have no deletes because they were applied in
            // the merge
            //SegmentInfo info1 = writer.SegmentInfos[1];
            //Assert.IsFalse(exists(info1, writer.docWriter.segmentDeletes));

            //System.out.println("infos4:"+writer.SegmentInfos);
            //System.out.println("segdels4:" + writer.docWriter.deletesToString());
        }
示例#3
0
        public virtual void TestDeletes1()
        {
            //IndexWriter.debug2 = System.out;
            Directory         dir = new MockDirectoryWrapper(new J2N.Randomizer(Random.NextInt64()), new RAMDirectory());
            IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));

            iwc.SetMergeScheduler(new SerialMergeScheduler());
            iwc.SetMaxBufferedDocs(5000);
            iwc.SetRAMBufferSizeMB(100);
            RangeMergePolicy fsmp = new RangeMergePolicy(this, false);

            iwc.SetMergePolicy(fsmp);
            IndexWriter writer = new IndexWriter(dir, iwc);

            for (int x = 0; x < 5; x++)
            {
                writer.AddDocument(DocHelper.CreateDocument(x, "1", 2));
                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
            }
            //System.out.println("commit1");
            writer.Commit();
            Assert.AreEqual(1, writer.SegmentCount);
            for (int x = 5; x < 10; x++)
            {
                writer.AddDocument(DocHelper.CreateDocument(x, "2", 2));
                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
            }
            //System.out.println("commit2");
            writer.Commit();
            Assert.AreEqual(2, writer.SegmentCount);

            for (int x = 10; x < 15; x++)
            {
                writer.AddDocument(DocHelper.CreateDocument(x, "3", 2));
                //System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
            }

            writer.DeleteDocuments(new Term("id", "1"));

            writer.DeleteDocuments(new Term("id", "11"));

            // flushing without applying deletes means
            // there will still be deletes in the segment infos
            writer.Flush(false, false);
            Assert.IsTrue(writer.bufferedUpdatesStream.Any());

            // get reader flushes pending deletes
            // so there should not be anymore
            IndexReader r1 = writer.GetReader();

            Assert.IsFalse(writer.bufferedUpdatesStream.Any());
            r1.Dispose();

            // delete id:2 from the first segment
            // merge segments 0 and 1
            // which should apply the delete id:2
            writer.DeleteDocuments(new Term("id", "2"));
            writer.Flush(false, false);
            fsmp         = (RangeMergePolicy)writer.Config.MergePolicy;
            fsmp.doMerge = true;
            fsmp.start   = 0;
            fsmp.length  = 2;
            writer.MaybeMerge();

            Assert.AreEqual(2, writer.SegmentCount);

            // id:2 shouldn't exist anymore because
            // it's been applied in the merge and now it's gone
            IndexReader r2 = writer.GetReader();

            int[] id2docs = ToDocsArray(new Term("id", "2"), null, r2);
            Assert.IsTrue(id2docs is null);
            r2.Dispose();

            /*
             * /// // added docs are in the ram buffer
             * /// for (int x = 15; x < 20; x++) {
             * ///  writer.AddDocument(TestIndexWriterReader.CreateDocument(x, "4", 2));
             * ///  System.out.println("numRamDocs(" + x + ")" + writer.numRamDocs());
             * /// }
             * /// Assert.IsTrue(writer.numRamDocs() > 0);
             * /// // delete from the ram buffer
             * /// writer.DeleteDocuments(new Term("id", Integer.toString(13)));
             * ///
             * /// Term id3 = new Term("id", Integer.toString(3));
             * ///
             * /// // delete from the 1st segment
             * /// writer.DeleteDocuments(id3);
             * ///
             * /// Assert.IsTrue(writer.numRamDocs() > 0);
             * ///
             * /// //System.out
             * /// //    .println("segdels1:" + writer.docWriter.deletesToString());
             * ///
             * /// //Assert.IsTrue(writer.docWriter.segmentDeletes.Size() > 0);
             * ///
             * /// // we cause a merge to happen
             * /// fsmp.doMerge = true;
             * /// fsmp.start = 0;
             * /// fsmp.length = 2;
             * /// System.out.println("maybeMerge "+writer.SegmentInfos);
             * ///
             * /// SegmentInfo info0 = writer.SegmentInfos[0];
             * /// SegmentInfo info1 = writer.SegmentInfos[1];
             * ///
             * /// writer.MaybeMerge();
             * /// System.out.println("maybeMerge after "+writer.SegmentInfos);
             * /// // there should be docs in RAM
             * /// Assert.IsTrue(writer.numRamDocs() > 0);
             * ///
             * /// // assert we've merged the 1 and 2 segments
             * /// // and still have a segment leftover == 2
             * /// Assert.AreEqual(2, writer.SegmentInfos.Size());
             * /// Assert.IsFalse(segThere(info0, writer.SegmentInfos));
             * /// Assert.IsFalse(segThere(info1, writer.SegmentInfos));
             * ///
             * /// //System.out.println("segdels2:" + writer.docWriter.deletesToString());
             * ///
             * /// //Assert.IsTrue(writer.docWriter.segmentDeletes.Size() > 0);
             * ///
             * /// IndexReader r = writer.GetReader();
             * /// IndexReader r1 = r.getSequentialSubReaders()[0];
             * /// printDelDocs(r1.GetLiveDocs());
             * /// int[] docs = toDocsArray(id3, null, r);
             * /// System.out.println("id3 docs:"+Arrays.toString(docs));
             * /// // there shouldn't be any docs for id:3
             * /// Assert.IsTrue(docs is null);
             * /// r.Dispose();
             * ///
             * /// part2(writer, fsmp);
             * ///
             */
            // System.out.println("segdels2:"+writer.docWriter.segmentDeletes.toString());
            //System.out.println("close");
            writer.Dispose();
            dir.Dispose();
        }
示例#4
0
        public void Test()
        {
            DirectoryInfo dir     = CreateTempDir(GetType().Name);
            DirectoryInfo destDir = CreateTempDir(GetType().Name);

            Store.Directory fsDir = NewFSDirectory(dir);
            // IndexSplitter.split makes its own commit directly with SIPC/SegmentInfos,
            // so the unreferenced files are expected.
            if (fsDir is MockDirectoryWrapper)
            {
                ((MockDirectoryWrapper)fsDir).AssertNoUnrefencedFilesOnClose = (false);
            }

            MergePolicy mergePolicy = new LogByteSizeMergePolicy();

            mergePolicy.NoCFSRatio          = 1.0;
            mergePolicy.MaxCFSSegmentSizeMB = double.PositiveInfinity;
            IndexWriter iw = new IndexWriter(
                fsDir,
                new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).
                SetOpenMode(OpenMode.CREATE).
                SetMergePolicy(mergePolicy)
                );

            for (int x = 0; x < 100; x++)
            {
                Document doc = DocHelper.CreateDocument(x, "index", 5);
                iw.AddDocument(doc);
            }
            iw.Commit();
            for (int x = 100; x < 150; x++)
            {
                Document doc = DocHelper.CreateDocument(x, "index2", 5);
                iw.AddDocument(doc);
            }
            iw.Commit();
            for (int x = 150; x < 200; x++)
            {
                Document doc = DocHelper.CreateDocument(x, "index3", 5);
                iw.AddDocument(doc);
            }
            iw.Commit();
            DirectoryReader iwReader = iw.GetReader();

            assertEquals(3, iwReader.Leaves.Count);
            iwReader.Dispose();
            iw.Dispose();
            // we should have 2 segments now
            IndexSplitter @is          = new IndexSplitter(dir);
            string        splitSegName = @is.Infos.Info(1).Info.Name;

            @is.Split(destDir, new string[] { splitSegName });
            Store.Directory fsDirDest = NewFSDirectory(destDir);
            DirectoryReader r         = DirectoryReader.Open(fsDirDest);

            assertEquals(50, r.MaxDoc);
            r.Dispose();
            fsDirDest.Dispose();

            // now test cmdline
            DirectoryInfo destDir2 = CreateTempDir(GetType().Name);

            IndexSplitter.Main(new String[] { dir.FullName, destDir2.FullName, splitSegName });
            assertEquals(5, destDir2.GetFiles().Length);
            Store.Directory fsDirDest2 = NewFSDirectory(destDir2);
            r = DirectoryReader.Open(fsDirDest2);
            assertEquals(50, r.MaxDoc);
            r.Dispose();
            fsDirDest2.Dispose();

            // now remove the copied segment from src
            IndexSplitter.Main(new String[] { dir.FullName, "-d", splitSegName });
            r = DirectoryReader.Open(fsDir);
            assertEquals(2, r.Leaves.size());
            r.Dispose();
            fsDir.Dispose();
        }