コード例 #1
0
ファイル: UserIndexer.cs プロジェクト: Mike343/Netcoders
        /// <summary>
        /// Runs the indexing process.
        /// </summary>
        /// <param name="users">The users.</param>
        /// <param name="created">if set to <c>true</c> [created].</param>
        public void CreateOrUpdate(IList<User> users, bool created)
        {
            var modifier = new IndexModifier(
                Setting.UserSearchIndexPath.Value,
                new StandardAnalyzer(),
                (!created) ? true : false
            );

            foreach (var user in users)
            {
                if (created)
                {
                    modifier.DeleteDocuments(new Term("id", user.Id.ToString()));
                }

                var document = new Document();

                UserToDocument(user, document);

                modifier.AddDocument(document);
            }

            modifier.Optimize();
            modifier.Close();
        }
コード例 #2
0
        public void threadproc_update(object obj)
        {
            lock (locker) // If a thread is updating the index, no other thread should be doing anything with it.
            {

                try
                {
                    if (searcher != null)
                    {
                        try
                        {
                            searcher.Close();
                        }
                        catch (Exception e)
                        {
                        }
                        searcher = null;
                    }

                    Lucene.Net.Index.IndexModifier modifier = new Lucene.Net.Index.IndexModifier(DBNLConfigurationManager.LuceneElement.IndexingFolder, analyzer, false);

                    // same as build, but uses "modifier" instead of write.
                    // uses additional "where" clause for bugid

                    int id = (int)obj;

                    modifier.DeleteDocuments(new Lucene.Net.Index.Term("id", Convert.ToString(id)));
                    var item = new ContentService().GetContentById(id);
                    modifier.AddDocument(create_doc(
                        item.ContentId, item.Content1));

                    modifier.Flush();
                    modifier.Close();

                }
                catch (Exception e)
                {
                }
            }
        }
コード例 #3
0
		public virtual void  TestIndex()
		{
			Directory ramDir = new RAMDirectory();
			IndexModifier i = new IndexModifier(ramDir, new StandardAnalyzer(), true);
			i.AddDocument(GetDoc());
			Assert.AreEqual(1, i.DocCount());
			i.Flush();
			i.AddDocument(GetDoc(), new SimpleAnalyzer());
			Assert.AreEqual(2, i.DocCount());
			i.Optimize();
			Assert.AreEqual(2, i.DocCount());
			i.Flush();
			i.DeleteDocument(0);
			Assert.AreEqual(1, i.DocCount());
			i.Flush();
			Assert.AreEqual(1, i.DocCount());
			i.AddDocument(GetDoc());
			i.AddDocument(GetDoc());
			i.Flush();
			// depend on merge policy - Assert.AreEqual(3, i.docCount());
			i.DeleteDocuments(allDocTerm);
			Assert.AreEqual(0, i.DocCount());
			i.Optimize();
			Assert.AreEqual(0, i.DocCount());
			
			//  Lucene defaults:
			Assert.IsNull(i.GetInfoStream());
			Assert.IsTrue(i.GetUseCompoundFile());
			Assert.AreEqual(IndexWriter.DISABLE_AUTO_FLUSH, i.GetMaxBufferedDocs());
			Assert.AreEqual(10000, i.GetMaxFieldLength());
			Assert.AreEqual(10, i.GetMergeFactor());
			// test setting properties:
			i.SetMaxBufferedDocs(100);
			i.SetMergeFactor(25);
			i.SetMaxFieldLength(250000);
			i.AddDocument(GetDoc());
			i.SetUseCompoundFile(false);
			i.Flush();
			Assert.AreEqual(100, i.GetMaxBufferedDocs());
			Assert.AreEqual(25, i.GetMergeFactor());
			Assert.AreEqual(250000, i.GetMaxFieldLength());
			Assert.IsFalse(i.GetUseCompoundFile());
			
			// test setting properties when internally the reader is opened:
			i.DeleteDocuments(allDocTerm);
			i.SetMaxBufferedDocs(100);
			i.SetMergeFactor(25);
			i.SetMaxFieldLength(250000);
			i.AddDocument(GetDoc());
			i.SetUseCompoundFile(false);
			i.Optimize();
			Assert.AreEqual(100, i.GetMaxBufferedDocs());
			Assert.AreEqual(25, i.GetMergeFactor());
			Assert.AreEqual(250000, i.GetMaxFieldLength());
			Assert.IsFalse(i.GetUseCompoundFile());
			
			i.Close();
			try
			{
				i.DocCount();
				Assert.Fail();
			}
			catch (System.SystemException e)
			{
				// expected exception
			}
		}
コード例 #4
0
        public virtual void  TestIndex()
        {
            Directory     ramDir = new RAMDirectory();
            IndexModifier i      = new IndexModifier(ramDir, new StandardAnalyzer(), true);

            i.AddDocument(GetDoc());
            Assert.AreEqual(1, i.DocCount());
            i.Flush();
            i.AddDocument(GetDoc(), new SimpleAnalyzer());
            Assert.AreEqual(2, i.DocCount());
            i.Optimize();
            Assert.AreEqual(2, i.DocCount());
            i.Flush();
            i.DeleteDocument(0);
            Assert.AreEqual(1, i.DocCount());
            i.Flush();
            Assert.AreEqual(1, i.DocCount());
            i.AddDocument(GetDoc());
            i.AddDocument(GetDoc());
            i.Flush();
            // depend on merge policy - Assert.AreEqual(3, i.docCount());
            i.DeleteDocuments(allDocTerm);
            Assert.AreEqual(0, i.DocCount());
            i.Optimize();
            Assert.AreEqual(0, i.DocCount());

            //  Lucene defaults:
            Assert.IsNull(i.GetInfoStream());
            Assert.IsTrue(i.GetUseCompoundFile());
            Assert.AreEqual(IndexWriter.DISABLE_AUTO_FLUSH, i.GetMaxBufferedDocs());
            Assert.AreEqual(10000, i.GetMaxFieldLength());
            Assert.AreEqual(10, i.GetMergeFactor());
            // test setting properties:
            i.SetMaxBufferedDocs(100);
            i.SetMergeFactor(25);
            i.SetMaxFieldLength(250000);
            i.AddDocument(GetDoc());
            i.SetUseCompoundFile(false);
            i.Flush();
            Assert.AreEqual(100, i.GetMaxBufferedDocs());
            Assert.AreEqual(25, i.GetMergeFactor());
            Assert.AreEqual(250000, i.GetMaxFieldLength());
            Assert.IsFalse(i.GetUseCompoundFile());

            // test setting properties when internally the reader is opened:
            i.DeleteDocuments(allDocTerm);
            i.SetMaxBufferedDocs(100);
            i.SetMergeFactor(25);
            i.SetMaxFieldLength(250000);
            i.AddDocument(GetDoc());
            i.SetUseCompoundFile(false);
            i.Optimize();
            Assert.AreEqual(100, i.GetMaxBufferedDocs());
            Assert.AreEqual(25, i.GetMergeFactor());
            Assert.AreEqual(250000, i.GetMaxFieldLength());
            Assert.IsFalse(i.GetUseCompoundFile());

            i.Close();
            try
            {
                i.DocCount();
                Assert.Fail();
            }
            catch (System.SystemException e)
            {
                // expected exception
            }
        }
コード例 #5
0
        override public void  Run()
        {
            long endTime = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) + 1000 * TEST_SECONDS;

            try
            {
                while ((DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond) < endTime)
                {
                    int rand = random.Next(101);
                    if (rand < 5)
                    {
                        index.Optimize();
                    }
                    else if (rand < 60)
                    {
                        Document doc = GetDocument();
                        index.AddDocument(doc);
                        idStack.Add(doc.Get("id"));
                        added++;
                    }
                    else
                    {
                        // we just delete the last document added and remove it
                        // from the id stack so that it won't be removed twice:
                        System.String delId = null;
                        try
                        {
                            delId = idStack[idStack.Count - 1] as System.String;
                            idStack.RemoveAt(idStack.Count - 1);
                        }
                        catch (System.ArgumentOutOfRangeException e)
                        {
                            continue;
                        }
                        Term delTerm  = new Term("id", System.Int32.Parse(delId).ToString());
                        int  delCount = index.DeleteDocuments(delTerm);
                        if (delCount != 1)
                        {
                            throw new System.SystemException("Internal error: " + threadNumber + " deleted " + delCount + " documents, term=" + delTerm);
                        }
                        deleted++;
                    }
                    if (maxWait > 0)
                    {
                        rand = random.Next(maxWait);
                        //System.out.println("waiting " + rand + "ms");
                        try
                        {
                            System.Threading.Thread.Sleep(new System.TimeSpan((System.Int64) 10000 * rand));
                        }
                        catch (System.Threading.ThreadInterruptedException ie)
                        {
                            SupportClass.ThreadClass.Current().Interrupt();
                            throw new System.SystemException("", ie);
                        }
                    }
                }
            }
            catch (System.IO.IOException e)
            {
                throw new System.SystemException("", e);
            }
        }
コード例 #6
0
        /// <summary>
        /// Loops thro a list of stories and adds them to the index. If the crawl is an incremental
        /// update then first the story is removed then added again.
        /// </summary>
        /// <param name="modifier">IndexModifer used to update the index</param>
        /// <param name="isIncrementalCrawl">bool indicating if the stories should
        /// be removed from the existing index before being added again.</param>
        /// <param name="stories">StoryCollection containing the stories to add/update
        /// in the index</param>
        private void AddStoriesToIndex(IndexModifier modifier, bool isIncrementalCrawl, StoryCollection stories)
        {
            if (isIncrementalCrawl)
            {

                //remove the stories from the index that have been updated
                Log.DebugFormat("Updating index, removing {0} stories", stories.Count);
                foreach (Story s in stories)
                {
                    Term existingItem = new Term("id", s.StoryID.ToString());
                    int j = modifier.DeleteDocuments(existingItem);
                }
            }

            //add the new documents
            Log.DebugFormat("Adding batch of {0} stories to the index", stories.Count);
            foreach (Story story in stories)
            {
                //spam stories shouldnt be added to the index
                if (story.IsSpam)
                    continue;

                Document doc = new Document();

                doc.Add(new Field("url", story.Url, Field.Store.NO, Field.Index.TOKENIZED));
                doc.Add(new Field("title", story.Title, Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.YES));
                doc.Add(new Field("description", story.Description, Field.Store.NO, Field.Index.TOKENIZED));
                doc.Add(new Field("users", GetUserWhoKickedSearchString(story), Field.Store.NO, Field.Index.TOKENIZED));
                doc.Add(new Field("category", story.Category.Name, Field.Store.NO, Field.Index.TOKENIZED));
                doc.Add(new Field("tags", GetStoryTags(story), Field.Store.NO, Field.Index.TOKENIZED, Field.TermVector.YES));
                doc.Add(new Field("id", story.StoryID.ToString(), Field.Store.YES, Field.Index.UN_TOKENIZED));
                doc.Add(new Field("kickCount", story.KickCount.ToString(), Field.Store.NO, Field.Index.UN_TOKENIZED));
                doc.Add(new Field("dateAdded", DateField.DateToString(story.CreatedOn), Field.Store.NO, Field.Index.UN_TOKENIZED));

                modifier.AddDocument(doc);
                Log.DebugFormat("StoryId {0} added to index", story.StoryID);
            }
        }
コード例 #7
0
ファイル: Searcher.cs プロジェクト: Inzaghi2012/teamlab.v7.5
        public void CreateIndexIfNeeded(List<MsDocEntryPoint> documentation, DateTime? lastModified)
        {
            lock (SynchLock)
            {
                if (IsIndexingNeeded(lastModified) || !IsOperational)
                {
                    try
                    {
                        //Drop all index
                        if (searcher != null)
                            searcher.Close();
                        if (_directory != null)
                            _directory.Close();
                        //Delete dir
                        Directory.Delete(_indexDirectory, true);
                        _directory = FSDirectory.GetDirectory(_indexDirectory, true); //Reopen directory
                        var indexModifier = new IndexModifier(_directory, new StandardAnalyzer(), true);

                        foreach (var entryPoint in documentation)
                        {
                            var pointDoc = new Document();
                            //Id keys
                            pointDoc.Add(new Field("point", entryPoint.Name, Field.Store.YES, Field.Index.NOT_ANALYZED,
                                                   Field.TermVector.YES));
                            if (!string.IsNullOrEmpty(entryPoint.Summary))
                                pointDoc.Add(new Field("summary", entryPoint.Summary, Field.Store.YES,
                                                       Field.Index.ANALYZED, Field.TermVector.NO));
                            if (!string.IsNullOrEmpty(entryPoint.Example))
                                pointDoc.Add(new Field("example", entryPoint.Example, Field.Store.YES,
                                                       Field.Index.ANALYZED, Field.TermVector.NO));

                            foreach (var pointMethod in entryPoint.Methods)
                            {
                                var doc = new Document();
                                //Id keys
                                doc.Add(new Field("point", entryPoint.Name, Field.Store.YES, Field.Index.NOT_ANALYZED,
                                                  Field.TermVector.YES));
                                doc.Add(new Field("path", pointMethod.ToString(), Field.Store.YES,
                                                  Field.Index.NOT_ANALYZED, Field.TermVector.YES));

                                doc.Add(new Field("url", pointMethod.Path, Field.Store.YES, Field.Index.NOT_ANALYZED,
                                                  Field.TermVector.NO));
                                if (!string.IsNullOrEmpty(pointMethod.Notes))
                                    doc.Add(new Field("notes", pointMethod.Notes, Field.Store.YES, Field.Index.ANALYZED,
                                                      Field.TermVector.NO));
                                if (!string.IsNullOrEmpty(pointMethod.Remarks))
                                    doc.Add(new Field("remarks", pointMethod.Remarks, Field.Store.YES,
                                                      Field.Index.ANALYZED, Field.TermVector.NO));
                                if (!string.IsNullOrEmpty(pointMethod.Example))
                                    doc.Add(new Field("examlpe", pointMethod.Example, Field.Store.YES,
                                                      Field.Index.ANALYZED, Field.TermVector.NO));
                                if (!string.IsNullOrEmpty(pointMethod.Returns))
                                    doc.Add(new Field("returns", pointMethod.Returns, Field.Store.YES,
                                                      Field.Index.ANALYZED, Field.TermVector.NO));
                                if (!string.IsNullOrEmpty(pointMethod.ShortName))
                                    doc.Add(new Field("short", pointMethod.ShortName, Field.Store.YES,
                                                      Field.Index.ANALYZED, Field.TermVector.NO));
                                if (!string.IsNullOrEmpty(pointMethod.Summary))
                                    doc.Add(new Field("summary", pointMethod.Summary, Field.Store.YES,
                                                      Field.Index.ANALYZED, Field.TermVector.NO));
                                foreach (var param in pointMethod.Params)
                                {
                                    if (!string.IsNullOrEmpty(param.Description))
                                        doc.Add(new Field("param-" + param.Name + "-description", param.Description,
                                                          Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
                                    if (!string.IsNullOrEmpty(pointMethod.Remarks))
                                        doc.Add(new Field("param-" + param.Name + "-remarks", param.Remarks,
                                                          Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
                                }
                                indexModifier.AddDocument(doc);
                            }
                        }
                        indexModifier.Optimize();
                        indexModifier.Close();
                        searcher = new IndexSearcher(_directory);
                        IsOperational = true;
                    }
                    catch (Exception)
                    {
                        IsOperational = false;
                    }

                }
            }
        }