DeleteAll() public method

Delete all documents in the index.

This method will drop all buffered documents and will remove all segments from the index. This change will not be visible until a Commit() has been called. This method can be rolled back using Rollback().

NOTE: this method is much faster than using deleteDocuments( new MatchAllDocsQuery() ).

NOTE: this method will forcefully abort all merges in progress. If other threads are running Optimize() or any of the addIndexes methods, they will receive MergePolicy.MergeAbortedExceptions.

public DeleteAll ( ) : void
return void
        public void ClearIndex()
        {
            if (System.IO.Directory.GetFiles(this.index.Directory.FullName).Any())
            {
                try
                {
                    var analyzer = new Lucene.Net.Analysis.Standard.StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30);
                    using (var writer = new IndexWriter(this.index, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED))
                    {
                        // remove older index entries
                        writer.DeleteAll();

                        // close handles
                        analyzer.Close();
                        writer.Dispose();
                    }

                    ForceUnlockIndex();
                }
                catch (Exception)
                {
                    throw;
                }
            }
        }
Beispiel #2
0
        //删除全部索引
        public void delAllIndex()
        {
            if (System.IO.Directory.Exists(indexPath) == false)
            {
                System.IO.Directory.CreateDirectory(indexPath);
            }
            FSDirectory fsDirectory = FSDirectory.Open(new DirectoryInfo(indexPath), new NativeFSLockFactory());

            if (!IndexReader.IndexExists(fsDirectory))
            {
                return;
            }
            else
            {
                if (IndexReader.IsLocked(fsDirectory))
                {
                    IndexReader.Unlock(fsDirectory);
                }
            }
            Lucene.Net.Index.IndexWriter iw = new Lucene.Net.Index.IndexWriter(indexPath, new PanGuAnalyzer(), false);
            //  iw.DeleteDocuments(new Lucene.Net.Index.Term("Key", key));
            iw.DeleteAll();
            iw.Optimize();//删除文件后并非从磁盘中移除,而是生成一个.del的文件,需要调用Optimize方法来清除。在清除文件前可以使用UndeleteAll方法恢复
            iw.Close();
        }
Beispiel #3
0
 public void DeleteAllItemsFromIndex()
 {
     using (var writer = new IndexWriter(LuceneDirectory, new StandardAnalyzer(Version.LUCENE_30), IndexWriter.MaxFieldLength.UNLIMITED))
     {
         writer.DeleteAll();
     }
 }
        public void CreateIndexFromDirectory()
        {
            string DirecotryPath = outputDirectoryName == null
                ? Path.GetDirectoryName(Assembly.GetEntryAssembly().Location)
                : Path.GetDirectoryName(outputDirectoryName);
            var directory = FSDirectory.Open(DirecotryPath);
            Analyzer analyzer = new TermAnalyzer();
            IndexWriter writer =
                new IndexWriter(directory, analyzer, true,
                    IndexWriter.MaxFieldLength.UNLIMITED);
            var files = FileHelper.GetFilesFromRelativePath(inputDirectoryName);

            writer.DeleteAll();
            foreach (var fileInfo in files)
            {
                //var detailedPath = DirecotryPath + "\\" + fileInfo.Name;
                //var detailedDirectory = FSDirectory.Open(detailedPath);
                //IndexWriter detailedlWriter = new IndexWriter(detailedDirectory, analyzer, true,
                //    IndexWriter.MaxFieldLength.UNLIMITED);
                Document doc = new Document();
                Document detailedDocument = new Document();
                var path = fileInfo.Directory + "\\" + fileInfo.Name;
                var reader = new StreamReader(path);
                doc.Add(new Field("content", reader, Field.TermVector.WITH_POSITIONS));
                writer.AddDocument(doc);
                reader.Close();
                reader = new StreamReader(path);
                detailedDocument.Add(new Field(fileInfo.Name, reader, Field.TermVector.WITH_POSITIONS));
                //detailedlWriter.AddDocument(detailedDocument);
                //detailedlWriter.Optimize();
                //detailedlWriter.Commit();
            }
            writer.Optimize();
            writer.Commit();
        }
        public static void Run(string[] args)
        {
            IDictionary<string, string> arguments = CommandHelpers.GetArguments(args, 1);
            if (arguments == null)
            {
                PrintUsage();
                return;
            }

            Lucene.Net.Store.Directory directory = CommandHelpers.GetLuceneDirectory(arguments);
            if (directory == null)
            {
                PrintUsage();
                return;
            }

            if (IndexReader.IndexExists(directory))
            {
                using (IndexWriter writer = new IndexWriter(directory, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30), true, IndexWriter.MaxFieldLength.UNLIMITED))
                {
                    writer.DeleteAll();
                    writer.Commit(new Dictionary<string, string>());
                }
            }

            Console.WriteLine("All Done");
        }
        static void Main(string[] args)
        {
            // Initialize Lucene
            Directory directory = FSDirectory.Open(new System.IO.DirectoryInfo("LuceneIndex"));
            StandardAnalyzer analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_29);
            IndexWriter writer = new IndexWriter(directory, analyzer, IndexWriter.MaxFieldLength.UNLIMITED);

            // Open DB connection
            OpenConnections();

            // Insert documents into the index

            // Clear the index
            writer.DeleteAll();
            string sql = "SELECT a.ArticleId, a.Title, a.IntroText, a.Content, a.Published, a.DateCreated, " +
                "u.UserName, a.SectionId FROM Article AS a INNER JOIN aspnet_Users AS u ON " +
                "a.CreatedBy = u.UserId";

            SqlCommand cmd = new SqlCommand(sql, dbConn);
            SqlDataReader reader = cmd.ExecuteReader();

            while (reader.Read())
            {
                // Extract the fields to be indexed
                int articleId = reader.GetInt32(reader.GetOrdinal("ArticleId"));
                string title = reader["Title"].ToString();
                string content = reader["IntroText"].ToString() + reader["Content"].ToString();
                string intro = content.Substring(0, content.Length > 100 ? 100 : content.Length);
                string author = reader["UserName"].ToString();
                int sectionId = reader.GetInt32(reader.GetOrdinal("SectionId"));
                bool published = reader.GetBoolean(reader.GetOrdinal("Published"));
                DateTime pubDate = reader.GetDateTime(reader.GetOrdinal("DateCreated"));
                string strPubDate = DateTools.DateToString(pubDate, DateTools.Resolution.DAY);

                Document doc = new Document();
                doc.Add(new Field("ArticleId", articleId.ToString(), Field.Store.YES, Field.Index.NO));
                doc.Add(new Field("Title", title, Field.Store.YES, Field.Index.ANALYZED));
                doc.Add(new Field("Content", content, Field.Store.NO, Field.Index.ANALYZED));
                doc.Add(new Field("Intro", title, Field.Store.YES, Field.Index.NO));
                doc.Add(new Field("Author", author, Field.Store.YES, Field.Index.NO));
                doc.Add(new Field("SectionId", sectionId.ToString(), Field.Store.NO, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("Published", published.ToString(), Field.Store.NO, Field.Index.NOT_ANALYZED));
                doc.Add(new Field("PubDate", strPubDate, Field.Store.YES, Field.Index.NOT_ANALYZED));

                writer.AddDocument(doc);
            }

            // Close everything, shut down.
            writer.Optimize();
            writer.Commit();
            writer.Close();

            dbConn.Close();
        }
        /// <summary>
        /// Clears the entire index.
        /// </summary>
        /// <param name="luceneVersion">The lucene version.</param>
        /// <param name="fsDirectory">The fs directory.</param>
        /// <param name="maxFieldLength">Maximum length of the field.</param>
		public virtual void ClearIndex(Version luceneVersion, FSDirectory fsDirectory, IndexWriter.MaxFieldLength maxFieldLength)
		{
			var analyzer = new StandardAnalyzer(luceneVersion);

			using (var indexWriter = new IndexWriter(fsDirectory, analyzer, maxFieldLength))
			{
				indexWriter.DeleteAll();

				analyzer.Close();
			}
		}
Beispiel #8
0
	public IndexMaker(string indexDir, string fieldName)
	{

        _indexDir = indexDir;
        _fieldName = fieldName;
		using (Directory directory = FSDirectory.Open(_indexDir))
		using (Analyzer analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30))
		using (IndexWriter writer = new IndexWriter(directory, analyzer, IndexWriter.MaxFieldLength.UNLIMITED))
		{
			writer.DeleteAll();
		}
    }
Beispiel #9
0
        /// <summary>
        /// Clears the index.
        /// </summary>
        public void ClearIndex()
        {
            var analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30);
            using (var writer = new IndexWriter(directoryTemp, analyzer, IndexWriter.MaxFieldLength.UNLIMITED))
            {
                writer.DeleteAll();

                analyzer.Close();
                writer.Optimize();
                writer.Dispose();
            }
        }
 private static void RebuildIndex(Lucene.Net.Store.Directory directory, Analyzer analyzer, List<Product> products)
 {
     IndexWriter writer = new IndexWriter(directory, analyzer, IndexWriter.MaxFieldLength.LIMITED);
     writer.DeleteAll();
     writer.Commit();
     foreach (Product p in products) // Add Documents to the Index.
     {
         AddDocumentToIndex(p, writer);
     }
     writer.Optimize();
     writer.Commit();
     writer.Dispose();
 }
Beispiel #11
0
        public static void Build(IndexWriter writer)
        {
            writer.DeleteAll();

            Populate(GetPrograms(), writer);
            Populate(GetApplications(), writer);
            Populate(GetInitiatives(), writer);
            Populate(GetInitiativeTasks(), writer);

            writer.Optimize();
            writer.Commit();
            writer.Dispose();
        }
        public virtual void  TestDeleteAll()
        {
            for (int pass = 0; pass < 2; pass++)
            {
                bool        autoCommit = (0 == pass);
                Directory   dir        = new MockRAMDirectory();
                IndexWriter modifier   = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
                modifier.SetMaxBufferedDocs(2);
                modifier.SetMaxBufferedDeleteTerms(2);

                int id            = 0;
                int value_Renamed = 100;

                for (int i = 0; i < 7; i++)
                {
                    AddDoc(modifier, ++id, value_Renamed);
                }
                modifier.Commit();

                IndexReader reader = IndexReader.Open(dir);
                Assert.AreEqual(7, reader.NumDocs());
                reader.Close();

                // Add 1 doc (so we will have something buffered)
                AddDoc(modifier, 99, value_Renamed);

                // Delete all
                modifier.DeleteAll();

                // Delete all shouldn't be on disk yet
                reader = IndexReader.Open(dir);
                Assert.AreEqual(7, reader.NumDocs());
                reader.Close();

                // Add a doc and update a doc (after the deleteAll, before the commit)
                AddDoc(modifier, 101, value_Renamed);
                UpdateDoc(modifier, 102, value_Renamed);

                // commit the delete all
                modifier.Commit();

                // Validate there are no docs left
                reader = IndexReader.Open(dir);
                Assert.AreEqual(2, reader.NumDocs());
                reader.Close();

                modifier.Close();
                dir.Close();
            }
        }
        public void Setup()
        {
            var dirProvider = new Mock<IDataDirectoryProvider>();
            dirProvider.Setup(x => x.DataDirectoryPath).Returns(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + "\\MyJobLeadsTestIndex");

            _provider = new LuceneSearchProvider(dirProvider.Object);

            // Load the index and delete all documents so it's fresh for the test
            _indexDirectory = Lucene.Net.Store.FSDirectory.Open(new DirectoryInfo(_provider.LuceneIndexBaseDirectory));
            bool createNewIndex = !IndexReader.IndexExists(_indexDirectory);
            var writer = new IndexWriter(_indexDirectory, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_29), createNewIndex, IndexWriter.MaxFieldLength.UNLIMITED);
            writer.DeleteAll();
            writer.Close();
        }
        protected override Task RunInternal(CancellationToken cancellationToken)
        {
            if (IndexReader.IndexExists(_directory))
            {
                using (var writer = new IndexWriter(_directory, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30), true, IndexWriter.MaxFieldLength.UNLIMITED))
                {
                    writer.DeleteAll();
                    writer.Commit(new Dictionary<string, string>());
                }
            }

            Logger.LogInformation("All Done");

            return Task.FromResult(false);
        }
        public virtual void TestMixedTypesAfterDeleteAll()
        {
            Directory   dir = NewDirectory();
            IndexWriter w   = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)));
            Document    doc = new Document();

            doc.Add(new NumericDocValuesField("foo", 0));
            w.AddDocument(doc);
            w.DeleteAll();

            doc = new Document();
            doc.Add(new SortedDocValuesField("foo", new BytesRef("hello")));
            w.AddDocument(doc);
            w.Dispose();
            dir.Dispose();
        }
        public virtual void TestTypeChangeAfterDeleteAll()
        {
            Directory         dir    = NewDirectory();
            IndexWriterConfig conf   = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random));
            IndexWriter       writer = new IndexWriter(dir, conf);
            Document          doc    = new Document();

            doc.Add(new NumericDocValuesField("dv", 0L));
            writer.AddDocument(doc);
            writer.DeleteAll();
            doc = new Document();
            doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
            writer.AddDocument(doc);
            writer.Dispose();
            dir.Dispose();
        }
Beispiel #17
0
 /// <summary>
 /// Clear all index
 /// </summary>
 /// <returns>true if sucsess, false if directory was locked</returns>
 public bool ClearIndex( )
 {
     try
     {
         var analyzer = new StandardAnalyzer(Version.LUCENE_30);
         using (var writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED))
         {
             writer.DeleteAll();
             analyzer.Close();
         }
         return true;
     }
     catch
     {
         return false;
     }
 }
 public void Index(List<invType> types)
 {
     using(  IndexWriter indexWriter = new IndexWriter(azureDirectory, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), true, new Lucene.Net.Index.IndexWriter.MaxFieldLength(IndexWriter.DEFAULT_MAX_FIELD_LENGTH)))
     {
         indexWriter.DeleteAll();
         types.ForEach(x =>
         {
             if (x.published != null && ((bool)x.published))
             {
                 Document doc = new Document();
                 doc.Add(new Field("typeID", "" + x.typeID, Field.Store.YES, Field.Index.NOT_ANALYZED, Field.TermVector.NO));
                 doc.Add(new Field("typeName", x.typeName, Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO));
                 // doc.Add(new Field("description", , Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.NO));
                 indexWriter.AddDocument(doc);
             }
         });
      }
 }
Beispiel #19
0
        public void BuildIndex(List<User> users)
        {
            FSDirectory directory = FSDirectory.Open(new System.IO.DirectoryInfo(AppData));

            Analyzer analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_29);

            IndexWriter indexWriter = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);

            indexWriter.DeleteAll();

            foreach (User u in users)
            {
                indexWriter.AddDocument(u.GetDocument());
            }

            indexWriter.Optimize();
            indexWriter.Close();
        }
Beispiel #20
0
        public virtual void  TestDeleteAllNRT()
        {
            Directory   dir      = new MockRAMDirectory();
            IndexWriter modifier = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED, null);

            modifier.SetMaxBufferedDocs(2);
            modifier.SetMaxBufferedDeleteTerms(2);

            int id            = 0;
            int value_Renamed = 100;

            for (int i = 0; i < 7; i++)
            {
                AddDoc(modifier, ++id, value_Renamed);
            }
            modifier.Commit(null);

            IndexReader reader = modifier.GetReader(null);

            Assert.AreEqual(7, reader.NumDocs());
            reader.Close();

            AddDoc(modifier, ++id, value_Renamed);
            AddDoc(modifier, ++id, value_Renamed);

            // Delete all
            modifier.DeleteAll(null);

            reader = modifier.GetReader(null);
            Assert.AreEqual(0, reader.NumDocs());
            reader.Close();


            // Roll it back
            modifier.Rollback(null);
            modifier.Close();

            // Validate that the docs are still there
            reader = IndexReader.Open(dir, true, null);
            Assert.AreEqual(7, reader.NumDocs());
            reader.Close();

            dir.Close();
        }
        public void Cleanup()
        {
            string codeBase = Assembly.GetExecutingAssembly().CodeBase;
            UriBuilder uri = new UriBuilder(codeBase);
            string path = Uri.UnescapeDataString(uri.Path);
            var assemblyPath = Path.GetDirectoryName(path);
            string outputFolder = Path.Combine(assemblyPath, "Search");

             var directory = FSDirectory.Open(new DirectoryInfo(outputFolder));
            Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_30);

            using (var writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED))
            {
                writer.DeleteAll();
                writer.Commit();
            }

            Directory.Delete(outputFolder,true);
        }
Beispiel #22
0
 public static bool ClearLuceneIndex()
 {
     try
     {
         using (var analyzer = new RussianAnalyzer(Version.LUCENE_30))
         using (
             var writer = new IndexWriter(LuceneConfig.Directory, analyzer, true,
                 IndexWriter.MaxFieldLength.UNLIMITED))
         {
             // remove older index entries
             writer.DeleteAll();
         }
     }
     catch (Exception)
     {
         return false;
     }
     return true;
 }
Beispiel #23
0
        public void TestDeleteAllIsCurrent()
        {
            // get reader
            IndexReader reader = writer.GetReader();

            // assert index has a document and reader is up2date
            Assert.AreEqual(1, writer.NumDocs(), "One document should be in the index");
            Assert.IsTrue(reader.IsCurrent(), "Document added, reader should be stale ");

            // remove all documents
            writer.DeleteAll();
            writer.Commit();

            // assert document has been deleted (index changed), reader is stale
            Assert.AreEqual(0, writer.NumDocs(), "Document should be removed");
            Assert.IsFalse(reader.IsCurrent(), "Reader should be stale");

            reader.Close();
        }
        public static bool ClearLuceneIndex()
        {
            try
            {
                //var analyzer = new StandardAnalyzer(Version.LUCENE_30);
                var analyzer = GetAnalyzer();
                using (var writer = new IndexWriter(Directory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED))
                {
                    writer.DeleteAll();

                    analyzer.Close();
                }
            }
            catch (Exception e)
            {
                return false;
            }

            return true;
        }
        public static bool ClearLuceneIndex()
        {
            try
            {
                var analyzer = new StandardAnalyzer(Version.LUCENE_30);
                using (var writer = new IndexWriter(_directory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED))
                {
                    // remove older index entries
                    writer.DeleteAll();

                    // close handles
                    analyzer.Close();
                    writer.Dispose();
                }
            }
            catch (Exception)
            {
                return false;
            }
            return true;
        }
Beispiel #26
0
        public static void CreateIndexThreadProc()
        {
            lock (myLock)
            {
                IndexWriter writer = new IndexWriter(directory, analyzer, IndexWriter.MaxFieldLength.UNLIMITED);

                // Clear the index
                writer.DeleteAll();

                int count = 0;
                NietoYostenDbDataContext db = new NietoYostenDbDataContext();
                foreach (Article article in db.Articles)
                {
                    Document doc = ArticleToDocument(article);
                    writer.AddDocument(doc);
                    count++;
                }

                // Commit documents to index
                writer.Optimize();
                writer.Commit();
                writer.Close();
            }
        }
 public virtual void TestTypeChangeAfterDeleteAll()
 {
     Directory dir = NewDirectory();
     IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()));
     IndexWriter writer = new IndexWriter(dir, conf);
     Document doc = new Document();
     doc.Add(new NumericDocValuesField("dv", 0L));
     writer.AddDocument(doc);
     writer.DeleteAll();
     doc = new Document();
     doc.Add(new SortedDocValuesField("dv", new BytesRef("foo")));
     writer.AddDocument(doc);
     writer.Dispose();
     dir.Dispose();
 }
        public virtual void TestMixedTypesAfterDeleteAll()
        {
            Directory dir = NewDirectory();
            IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
            Document doc = new Document();
            doc.Add(new NumericDocValuesField("foo", 0));
            w.AddDocument(doc);
            w.DeleteAll();

            doc = new Document();
            doc.Add(new SortedDocValuesField("foo", new BytesRef("hello")));
            w.AddDocument(doc);
            w.Dispose();
            dir.Dispose();
        }
 public virtual void DeleteAll()
 => IndexWriter.DeleteAll();
Beispiel #30
0
 public static bool ClearLuceneIndex()
 {
     try
     {
         var analyzer = GetAnalyzer();
         using (var writer = new IndexWriter(_directory, analyzer, IndexWriter.MaxFieldLength.UNLIMITED))
         {
             writer.DeleteAll();
             analyzer.Close();
         }
     }
     catch (Exception e)
     {
         return false;
     }
     return true;
 }
Beispiel #31
0
 /// <summary>
 /// Calls <see cref="IndexWriter.DeleteAll()"/> and returns the
 /// generation that reflects this change.
 /// </summary>
 public virtual long DeleteAll()
 {
     writer.DeleteAll();
     // Return gen as of when indexing finished:
     return(indexingGen.Get());
 }
Beispiel #32
0
        public void Clear()
        {
            using (var fsDirectory = FSDirectory.Open(_indexDirectory))
            using (var writer = new IndexWriter(fsDirectory, _analyzer, true, IndexWriter.MaxFieldLength.LIMITED))
                writer.DeleteAll();

            _indexWrite = new IndexWriteRamStorage();
            _reduction = new ReductionRamStorage(_indexDirectory);
        }
Beispiel #33
0
 public void DeleteAllDocuments()
 {
     Writer.DeleteAll();
 }
        private void BuildIndex(ITicketService ticketService)
        {
            lock (buildLock)
            {
                //index writer will open existing index and merge changes with it
                IndexWriter writer = new IndexWriter(TdSearchDirectory, TdIndexAnalyzer, IndexWriter.MaxFieldLength.UNLIMITED);
                writer.SetMergeFactor(25);
                writer.DeleteAll();
                //process tickets in batches of 25
                IPagination<Ticket> tickets = null;
                var p = 1;
                do
                {
                    tickets = ticketService.ListTickets(p, 25, true);

                    foreach (var ticket in tickets)
                    {
                        var doc = CreateIndexDocuementForTicket(ticket);//make the doc
                        
                        //writer.DeleteDocuments(new Term("ticketid", ticket.TicketId.ToString()));//delete any existing references in the index
                        //write the document to (or back to) the index
                        writer.AddDocument(doc);
                    }
                    p++;
                    tickets = (tickets.HasNextPage) ? ticketService.ListTickets(p, 25, true) : null;
                   
                } while (tickets != null);
               
                //optimize and close the writer
                writer.Commit();
                writer.Optimize();
                writer.Close();

                //close the shared instacnes of the directory and searcher so new searches grab new instances.
                ResetTdSearchDirectory();
                ResetTdIndexSearcher();
            }
        }
		public virtual void  TestDeleteAllNRT()
		{
			Directory dir = new MockRAMDirectory();
			IndexWriter modifier = new IndexWriter(dir, false, new WhitespaceAnalyzer(), true);
			modifier.SetMaxBufferedDocs(2);
			modifier.SetMaxBufferedDeleteTerms(2);
			
			int id = 0;
			int value_Renamed = 100;
			
			for (int i = 0; i < 7; i++)
			{
				AddDoc(modifier, ++id, value_Renamed);
			}
			modifier.Commit();
			
			IndexReader reader = modifier.GetReader();
			Assert.AreEqual(7, reader.NumDocs());
			reader.Close();
			
			AddDoc(modifier, ++id, value_Renamed);
			AddDoc(modifier, ++id, value_Renamed);
			
			// Delete all
			modifier.DeleteAll();
			
			reader = modifier.GetReader();
			Assert.AreEqual(0, reader.NumDocs());
			reader.Close();
			
			
			// Roll it back
			modifier.Rollback();
			modifier.Close();
			
			// Validate that the docs are still there
			reader = IndexReader.Open(dir);
			Assert.AreEqual(7, reader.NumDocs());
			reader.Close();
			
			dir.Close();
		}
		public virtual void  TestDeleteAll()
		{
			for (int pass = 0; pass < 2; pass++)
			{
				bool autoCommit = (0 == pass);
				Directory dir = new MockRAMDirectory();
				IndexWriter modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
				modifier.SetMaxBufferedDocs(2);
				modifier.SetMaxBufferedDeleteTerms(2);
				
				int id = 0;
				int value_Renamed = 100;
				
				for (int i = 0; i < 7; i++)
				{
					AddDoc(modifier, ++id, value_Renamed);
				}
				modifier.Commit();
				
				IndexReader reader = IndexReader.Open(dir);
				Assert.AreEqual(7, reader.NumDocs());
				reader.Close();
				
				// Add 1 doc (so we will have something buffered)
				AddDoc(modifier, 99, value_Renamed);
				
				// Delete all
				modifier.DeleteAll();
				
				// Delete all shouldn't be on disk yet
				reader = IndexReader.Open(dir);
				Assert.AreEqual(7, reader.NumDocs());
				reader.Close();
				
				// Add a doc and update a doc (after the deleteAll, before the commit)
				AddDoc(modifier, 101, value_Renamed);
				UpdateDoc(modifier, 102, value_Renamed);
				
				// commit the delete all
				modifier.Commit();
				
				// Validate there are no docs left
				reader = IndexReader.Open(dir);
				Assert.AreEqual(2, reader.NumDocs());
				reader.Close();
				
				modifier.Close();
				dir.Close();
			}
		}
        public virtual void TestDeleteAllNRT()
        {
            Directory dir = NewDirectory();
            IndexWriter modifier = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(2).SetMaxBufferedDeleteTerms(2));

            int id = 0;
            int value = 100;

            for (int i = 0; i < 7; i++)
            {
                AddDoc(modifier, ++id, value);
            }
            modifier.Commit();

            IndexReader reader = modifier.Reader;
            Assert.AreEqual(7, reader.NumDocs);
            reader.Dispose();

            AddDoc(modifier, ++id, value);
            AddDoc(modifier, ++id, value);

            // Delete all
            modifier.DeleteAll();

            reader = modifier.Reader;
            Assert.AreEqual(0, reader.NumDocs);
            reader.Dispose();

            // Roll it back
            modifier.Rollback();
            modifier.Dispose();

            // Validate that the docs are still there
            reader = DirectoryReader.Open(dir);
            Assert.AreEqual(7, reader.NumDocs);
            reader.Dispose();

            dir.Dispose();
        }
        public virtual void TestDeleteAll()
        {
            Directory dir = NewDirectory();
            IndexWriter modifier = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(2).SetMaxBufferedDeleteTerms(2));

            int id = 0;
            int value = 100;

            for (int i = 0; i < 7; i++)
            {
                AddDoc(modifier, ++id, value);
            }
            modifier.Commit();

            IndexReader reader = DirectoryReader.Open(dir);
            Assert.AreEqual(7, reader.NumDocs);
            reader.Dispose();

            // Add 1 doc (so we will have something buffered)
            AddDoc(modifier, 99, value);

            // Delete all
            modifier.DeleteAll();

            // Delete all shouldn't be on disk yet
            reader = DirectoryReader.Open(dir);
            Assert.AreEqual(7, reader.NumDocs);
            reader.Dispose();

            // Add a doc and update a doc (after the deleteAll, before the commit)
            AddDoc(modifier, 101, value);
            UpdateDoc(modifier, 102, value);

            // commit the delete all
            modifier.Commit();

            // Validate there are no docs left
            reader = DirectoryReader.Open(dir);
            Assert.AreEqual(2, reader.NumDocs);
            reader.Dispose();

            modifier.Dispose();
            dir.Dispose();
        }
 public void BuildIndex(IEnumerable<MedStandardInfo> source, IProgress<string> progress)
 {
     progress?.Report($@"Формируем индекс для {source.Count()} документов ...");
     using (var directory = GetDirectory())
     using (var analyzer = GetAnalyzer())
     using (var writer = new IndexWriter(directory, analyzer, IndexWriter.MaxFieldLength.UNLIMITED))
     {
         writer.DeleteAll();
         foreach (var medStandard in source.Where(x => !string.IsNullOrEmpty(x.StandardName)))
         {
             var document = MapMedStandard(medStandard);
             writer.AddDocument(document);
         }
     }
 }