public void SetUpReader() { SetWorkingDirectory(); var indexReader = DirectoryReader.Open(indexDirectory); _maxDoc = indexReader.MaxDoc; if (debug) { Console.WriteLine(_maxDoc); } indexReader.Dispose(); indexDirectory.Dispose(); }
/// <summary> /// Create a new facet store within the specified directory. /// </summary> /// <param name="directoryFacetInfo">The directory infomation where the facet files are to be placed.</param> public void CreateMultiFacetIndex(DirectoryInfo directoryFacetInfo) { DirectoryTaxonomyWriter facetWriter = null; Lucene.Net.Store.Directory directoryFacet = null; try { // Create the facet writer. directoryFacet = FSDirectory.Open(directoryFacetInfo); facetWriter = new DirectoryTaxonomyWriter(directoryFacet, IndexWriterConfig.OpenMode_e.CREATE); // Commit the index. facetWriter.Commit(); } catch (Exception) { throw; } finally { if (facetWriter != null) { facetWriter.Dispose(); } if (directoryFacet != null) { directoryFacet.Dispose(); } } }
public virtual void TestNoExtraFiles() { Directory directory = NewDirectory(); IndexWriter writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2)); for (int iter = 0; iter < 7; iter++) { if (VERBOSE) { Console.WriteLine("TEST: iter=" + iter); } for (int j = 0; j < 21; j++) { Document doc = new Document(); doc.Add(NewTextField("content", "a b c", Field.Store.NO)); writer.AddDocument(doc); } writer.Dispose(); TestIndexWriter.AssertNoUnreferencedFiles(directory, "testNoExtraFiles"); // Reopen writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(2)); } writer.Dispose(); directory.Dispose(); }
public virtual void TestTotalBytesSize() { Directory d = NewDirectory(); if (d is MockDirectoryWrapper) { ((MockDirectoryWrapper)d).Throttling = Throttling.NEVER; } IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); iwc.SetMaxBufferedDocs(5); iwc.SetMergeScheduler(new TrackingCMS()); if (TestUtil.GetPostingsFormat("id").Equals("SimpleText", StringComparison.Ordinal)) { // no iwc.SetCodec(TestUtil.AlwaysPostingsFormat(new Lucene41PostingsFormat())); } RandomIndexWriter w = new RandomIndexWriter(Random, d, iwc); for (int i = 0; i < 1000; i++) { Document doc = new Document(); doc.Add(new StringField("id", "" + i, Field.Store.NO)); w.AddDocument(doc); if (Random.NextBoolean()) { w.DeleteDocuments(new Term("id", "" + Random.Next(i + 1))); } } Assert.IsTrue(((TrackingCMS)w.IndexWriter.Config.MergeScheduler).TotMergedBytes != 0); w.Dispose(); d.Dispose(); }
private void button1_Click(object sender, EventArgs e) { String indexPath = @"inputFileFS"; if (System.IO.Directory.Exists(indexPath)) { System.IO.Directory.Delete(indexPath, true); } luceneWorkingSet = FSDirectory.Open(indexPath); StandardAnalyzer analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30); IndexWriter writer = new IndexWriter(luceneWorkingSet, analyzer, true, IndexWriter.MaxFieldLength.LIMITED); StreamReader reader = new StreamReader(@"InputFS\sampleText.txt"); Document doc = new Document(); //Start Here doc.Add(new Field("lyrics", reader.ReadToEnd(), Field.Store.YES, Field.Index.ANALYZED)); doc.Add(new Field("songName", "Tonight", Field.Store.YES, Field.Index.NOT_ANALYZED)); writer.AddDocument(doc); writer.Optimize(); writer.Dispose(); luceneWorkingSet.Dispose(); }
/// <summary> /// Remove text from the existing index. /// </summary> /// <param name="directoryIndexInfo">The directory infomation where the index files are located.</param> /// <param name="names">An array of unique names for the text.</param> public void RemoveText(DirectoryInfo directoryIndexInfo, string[] names) { Lucene.Net.Index.IndexWriter writer = null; Lucene.Net.Store.Directory directory = null; try { // If exists. if (names != null && names.Length > 0) { // Create the analyzer. SimpleAnalyzer simpleAnalyzer = new Analyzer.SimpleAnalyzer(); StandardAnalyzer standardAnalyzer = new Analyzer.StandardAnalyzer(simpleAnalyzer); // Create the index writer. directory = FSDirectory.Open(directoryIndexInfo); IndexWriterConfig indexConfig = new IndexWriterConfig(Lucene.Net.Util.LuceneVersion.LUCENE_48, standardAnalyzer); indexConfig.SetOpenMode(IndexWriterConfig.OpenMode_e.APPEND); // Open existing or create new. writer = new IndexWriter(directory, indexConfig); // Create the query. List <Query> queries = new List <Query>(); // For each name. foreach (string name in names) { // Create the query. BooleanQuery query = new BooleanQuery(); query.Add(new TermQuery(new Term("textname", name.ToLower())), BooleanClause.Occur.MUST); // Add the query. queries.Add(query); } // Delete the text. writer.DeleteDocuments(queries.ToArray()); // Commit the index. writer.Commit(); } } catch (Exception) { throw; } finally { if (writer != null) { writer.Dispose(); } if (directory != null) { directory.Dispose(); } } }
public void TearDown() { _cwsDir.Dispose(); _pdfDir.Dispose(); _simpleDir.Dispose(); _conventionDir.Dispose(); }
/// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> public void Dispose() { if (_directory != null) { _directory.Dispose(); } }
public override void TestTearDown() { base.TestTearDown(); _luceneDir.Dispose(); UmbracoExamineSearcher.DisableInitializationCheck = null; BaseUmbracoIndexer.DisableInitializationCheck = null; }
public List <string> Search(string searchword) { Lucene.Net.Store.Directory directory = FSDirectory.Open(Settings.Default.IndexDirectory); Analyzer analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30); IndexReader indexReader = IndexReader.Open(directory, true); Searcher indexSearch = new IndexSearcher(indexReader); var queryParser = new QueryParser(Lucene.Net.Util.Version.LUCENE_30, "Content", analyzer); var query = queryParser.Parse(searchword); List <string> infosearch = new List <string>(); infosearch.Add("Searching for: " + query.ToString().Replace("Content:", "")); TopDocs resultDocs = indexSearch.Search(query, indexReader.MaxDoc); infosearch.Add("Results Found: " + resultDocs.TotalHits); var hits = resultDocs.ScoreDocs; foreach (var hit in hits) { var documentFromSearcher = indexSearch.Doc(hit.Doc); infosearch.Add(documentFromSearcher.Get("Link")); } indexSearch.Dispose(); directory.Dispose(); return(infosearch); }
public void Dispose() { isClosing = true; CloseWriter(); CloseReader(); try { if (index != null) { index.Dispose(); } } catch (Exception ex) { Log.ErrorFormat("Failed to dispose Lucene search index.", ex); } try { if (analyzer != null) { analyzer.Close(); analyzer.Dispose(); } } catch (Exception ex) { Log.ErrorFormat("Failed to close/dispose Lucene search analyzer.", ex); } }
private void DoTest(Random random, TextWriter @out, bool useCompoundFiles, int MAX_DOCS) { Store.Directory directory = NewDirectory(); Analyzer analyzer = new MockAnalyzer(random); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); MergePolicy mp = conf.MergePolicy; mp.NoCFSRatio = useCompoundFiles ? 1.0 : 0.0; IndexWriter writer = new IndexWriter(directory, conf); if (Verbose) { Console.WriteLine("TEST: now build index MAX_DOCS=" + MAX_DOCS); } for (int j = 0; j < MAX_DOCS; j++) { Documents.Document d = new Documents.Document(); d.Add(NewTextField(PRIORITY_FIELD, HIGH_PRIORITY, Field.Store.YES)); d.Add(NewTextField(ID_FIELD, Convert.ToString(j), Field.Store.YES)); writer.AddDocument(d); } writer.Dispose(); // try a search without OR IndexReader reader = DirectoryReader.Open(directory); IndexSearcher searcher = NewSearcher(reader); Query query = new TermQuery(new Term(PRIORITY_FIELD, HIGH_PRIORITY)); @out.WriteLine("Query: " + query.ToString(PRIORITY_FIELD)); if (Verbose) { Console.WriteLine("TEST: search query=" + query); } Sort sort = new Sort(SortField.FIELD_SCORE, new SortField(ID_FIELD, SortFieldType.INT32)); ScoreDoc[] hits = searcher.Search(query, null, MAX_DOCS, sort).ScoreDocs; PrintHits(@out, hits, searcher); CheckHits(hits, MAX_DOCS, searcher); // try a new search with OR searcher = NewSearcher(reader); hits = null; BooleanQuery booleanQuery = new BooleanQuery(); booleanQuery.Add(new TermQuery(new Term(PRIORITY_FIELD, HIGH_PRIORITY)), Occur.SHOULD); booleanQuery.Add(new TermQuery(new Term(PRIORITY_FIELD, MED_PRIORITY)), Occur.SHOULD); @out.WriteLine("Query: " + booleanQuery.ToString(PRIORITY_FIELD)); hits = searcher.Search(booleanQuery, null, MAX_DOCS, sort).ScoreDocs; PrintHits(@out, hits, searcher); CheckHits(hits, MAX_DOCS, searcher); reader.Dispose(); directory.Dispose(); }
public void ShutDownWriter() { _tdIndexWriter.Optimize(); _tdIndexWriter.Dispose(); _tdIndexDirectory.Dispose(); _tdIndexWriter = null; _tdIndexDirectory = null; }
public void TestTearDown() { //set back to 100 _indexer.OptimizationCommitThreshold = 100; _indexer.Dispose(); _searcher.Dispose(); _luceneDir.Dispose(); }
protected override void Dispose(bool disposing) { if (disposing) { isDisposed = true; cacheDirectory.Dispose(); } }
public void Close() { _analyzer.Close(); _analyzer.Dispose(); _searcher.Dispose(); _directory.Dispose(); _logger.Info("Searcher is closed."); }
public void mychaxun() { //if (string.IsNullOrEmpty(this.txtSearch.Text)) // MessageBox.Show("请输入搜索的文本"); string selname = "贾宝玉"; StringBuilder sb = new StringBuilder(); Stopwatch sw = new Stopwatch(); sw.Start(); //索引库目录 Lucene.Net.Store.Directory dir = FSDirectory.Open(new System.IO.DirectoryInfo("F:\\doc"), new NoLockFactory()); IndexReader reader = IndexReader.Open(dir, true); IndexSearcher search = null; try { search = new IndexSearcher(reader); QueryParser parser = new QueryParser(Lucene.Net.Util.Version.LUCENE_30, "body", new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30)); Query query = parser.Parse(LuceneHelper.GetKeyWordSplid(selname)); //执行搜索,获取查询结果集对象 TopDocs ts = search.Search(query, null, 1000); ///获取命中的文档信息对象 ScoreDoc[] docs = ts.ScoreDocs; sw.Stop(); //this.listBox.Items.Clear(); for (int i = 0; i < docs.Length; i++) { int docId = docs[i].Doc; Document doc = search.Doc(docId); //this.listBox.Items.Add(doc.Get("number") + "\r\n"); //this.listBox.Items.Add(doc.Get("body") + "\r\n"); //this.listBox.Items.Add("------------------------\r\n"); string a = doc.Get("number") + "\r\n"; string b = doc.Get("body") + "\r\n"; } } catch (Exception ex) { throw; } finally { if (search != null) { search.Dispose(); } if (dir != null) { dir.Dispose(); } } //this.label.Text = "搜索用时:"; //this.timeBox.Text = sw.ElapsedMilliseconds + "毫秒"; }
public void myindexer() { #region //盘古分词 //Segment.Init(); //string str = "盘古分词demo2"; //Segment segment = new Segment(); //ICollection<WordInfo> words = segment.DoSegment(str); #endregion Stopwatch sw = new Stopwatch(); sw.Start(); IndexWriter writer = null; Analyzer analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30); //Analyzer pganalyzer = new PanGuAnalyzer(); Lucene.Net.Store.Directory dir = FSDirectory.Open(new System.IO.DirectoryInfo("F:\\lucenedata")); try { ////IndexReader:对索引进行读取的类。 //该语句的作用:判断索引库文件夹是否存在以及索引特征文件是否存在。 bool isCreate = !IndexReader.IndexExists(dir); writer = new IndexWriter(dir, analyzer, isCreate, IndexWriter.MaxFieldLength.UNLIMITED); //添加索引 { Document doc = new Document(); //string path = System.IO.Directory.GetParent(System.IO.Directory.GetCurrentDirectory()).Parent.FullName + "F:\\doc\\红楼梦.txt"; string path = "F:\\doc\\红楼梦.txt"; string text = System.IO.File.ReadAllText(path, Encoding.Default); //Field.Store.YES:表示是否存储原值。只有当Field.Store.YES在后面才能用doc.Get("number")取出值来.Field.Index. NOT_ANALYZED:不进行分词保存 doc.Add(new Field("number", 5.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); // Lucene.Net.Documents.Field.TermVector.WITH_POSITIONS_OFFSETS:不仅保存分词还保存分词的距离。 doc.Add(new Field("body", text, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); writer.AddDocument(doc); } writer.Optimize(); sw.Stop(); } catch (Exception ex) { throw; } finally { if (writer != null) { writer.Dispose(); } if (dir != null) { dir.Dispose(); } } }
/// <summary> /// Remove facet documents from the existing index. /// </summary> /// <param name="directoryIndexInfo">The directory infomation where the index files are located.</param> /// <param name="directoryFacetInfo">The directory infomation where the facet files are to be placed.</param> /// <param name="textNames">The array of names for text data.</param> /// <param name="filePaths">The array of full paths (without root 'C:\'. e.g. 'temp/http/index.html') for file documents.</param> public void RemoveMultiFacetDocuments(DirectoryInfo directoryIndexInfo, DirectoryInfo directoryFacetInfo, string[] textNames, string[] filePaths) { Lucene.Net.Index.IndexWriter writer = null; DirectoryTaxonomyWriter facetWriter = null; Lucene.Net.Store.Directory directory = null; Lucene.Net.Store.Directory directoryFacet = null; try { // Create the analyzer. SimpleAnalyzer simpleAnalyzer = new Analyzer.SimpleAnalyzer(); StandardAnalyzer standardAnalyzer = new Analyzer.StandardAnalyzer(simpleAnalyzer); // Create the index writer. directory = FSDirectory.Open(directoryIndexInfo); IndexWriterConfig indexConfig = new IndexWriterConfig(Lucene.Net.Util.LuceneVersion.LUCENE_48, standardAnalyzer); indexConfig.SetOpenMode(IndexWriterConfig.OpenMode_e.APPEND); // Open existing or create new. writer = new IndexWriter(directory, indexConfig); // Create the facet writer. directoryFacet = FSDirectory.Open(directoryFacetInfo); facetWriter = new DirectoryTaxonomyWriter(directoryFacet, IndexWriterConfig.OpenMode_e.APPEND); // Create the delet query. FacetFilter filter = new FacetFilter(); Query[] queries = filter.RemoveDocuments(textNames, filePaths); writer.DeleteDocuments(queries); // Commit the index. writer.Commit(); facetWriter.Commit(); } catch (Exception) { throw; } finally { if (writer != null) { writer.Dispose(); } if (directory != null) { directory.Dispose(); } } }
private void button2_Click(object sender, EventArgs e) { if (string.IsNullOrEmpty(this.txtSearch.Text)) { MessageBox.Show("请输入搜索的文本"); } StringBuilder sb = new StringBuilder(); Stopwatch sw = new Stopwatch(); sw.Start(); //索引库目录 Lucene.Net.Store.Directory dir = FSDirectory.Open(new System.IO.DirectoryInfo("IndexDir"), new NoLockFactory()); IndexReader reader = IndexReader.Open(dir, true); IndexSearcher search = null; try { search = new IndexSearcher(reader); QueryParser parser = new QueryParser(Lucene.Net.Util.Version.LUCENE_30, "body", new PanGuAnalyzer()); Query query = parser.Parse(LuceneHelper.GetKeyWordSplid(this.txtSearch.Text)); //执行搜索,获取查询结果集对象 TopDocs ts = search.Search(query, null, 1000); ///获取命中的文档信息对象 ScoreDoc[] docs = ts.ScoreDocs; sw.Stop(); this.listBox1.Items.Clear(); for (int i = 0; i < docs.Length; i++) { int docId = docs[i].Doc; Document doc = search.Doc(docId); this.listBox1.Items.Add(doc.Get("number") + "\r\n"); this.listBox1.Items.Add(doc.Get("body") + "\r\n"); this.listBox1.Items.Add("------------------------\r\n"); } } catch (Exception ex) { throw; } finally { if (search != null) { search.Dispose(); } if (dir != null) { dir.Dispose(); } } this.label1.Text = "搜索用时:" + sw.ElapsedMilliseconds + "毫秒"; }
public void Dispose() { if (mIndexWriter != null) { mIndexWriter.Dispose(); mIndexWriter = null; } if (mIndex != null) { mIndex.Dispose(); mIndex = null; } }
public void Dispose() { LogManager.GetCurrentClassLogger().Info("Stopping Lucene indexing services."); if (PackageFileSystemWatcher != null) { PackageFileSystemWatcher.Dispose(); } PackageIndexer.Dispose(); Provider.Dispose(); LuceneDirectory.Dispose(); }
public virtual void Dispose() { if (m_searcherMgr != null) { m_searcherMgr.Dispose(); m_searcherMgr = null; } if (writer != null) { writer.Dispose(); dir.Dispose(); writer = null; } }
public void Dispose() { if (searcherMgr != null) { searcherMgr.Dispose(); searcherMgr = null; } if (writer != null) { writer.Dispose(); dir.Dispose(); writer = null; } }
public void TestRecreateTaxonomy() { replicator.Publish(CreateRevision(1)); client.UpdateNow(); // recreate index and taxonomy Directory newTaxo = NewDirectory(); new DirectoryTaxonomyWriter(newTaxo).Dispose(); publishTaxoWriter.ReplaceTaxonomy(newTaxo); publishIndexWriter.DeleteAll(); replicator.Publish(CreateRevision(2)); client.UpdateNow(); newTaxo.Dispose(); }
public void BuildIndex(IEnumerable <SampleDataFileRow> dataToIndex) { foreach (var sampleDataFileRow in dataToIndex) { Document doc = new Document(); doc.Add(new Field("LineNumber", sampleDataFileRow.LineNumber.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); doc.Add(new Field("LineText", sampleDataFileRow.LineText, Field.Store.YES, Field.Index.ANALYZED)); doc.Add(new Field("ID", sampleDataFileRow.Id.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); writer.AddDocument(doc); } writer.Optimize(); writer.Commit(); writer.Dispose(); luceneIndexDirectory.Dispose(); }
public void Dispose() { CloseWriter(); CloseReader(); if (index != null) { index.Dispose(); } if (analyzer != null) { analyzer.Close(); analyzer.Dispose(); } }
protected virtual void Dispose(bool disposing) { if (!_isDisposed) { if (disposing) { _writer.Commit(); _writer.Dispose(); _directory.Dispose(); _analyzer.Dispose(); } _isDisposed = true; } }
public static List <Post> getPitanjaByTag(int TAGID) { Directory directoryPronadjenaPitanjaTagovi = Data.Lucene.Indexing.GetDirectoryPitanjaTagovi(); Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_30); IndexReader indexReader = IndexReader.Open(directoryPronadjenaPitanjaTagovi, true); Searcher searcher = new IndexSearcher(indexReader); //var queryParser = new QueryParser(Version.LUCENE_30, "Naslov", analyzer); var queryParser = new MultiFieldQueryParser(Version.LUCENE_30, new[] { "PostID", "TagID", "DatumKreiranja" }, analyzer); var query = queryParser.Parse(Convert.ToString(TAGID)); // Rastavljanje rečenice na rijeci TopDocs pronadjeno = searcher.Search(query, indexReader.MaxDoc); List <Post> PitanjaTagovi = new List <Post>(); if (pronadjeno != null) { var hits = pronadjeno.ScoreDocs; foreach (var hit in hits) { var documentFromSearcher = searcher.Doc(hit.Doc); using (TriglavBL temp = new TriglavBL()) { PitanjaTagovi.Add(temp.getPitanjeByID(Convert.ToInt32(documentFromSearcher.Get("PostID")))); } } searcher.Dispose(); directoryPronadjenaPitanjaTagovi.Dispose(); return(PitanjaTagovi); } else { searcher.Dispose(); directoryPronadjenaPitanjaTagovi.Dispose(); return(PitanjaTagovi); } }
public virtual void TestNegativeQueryBoost() { Query q = new TermQuery(new Term("foo", "bar")); q.Boost = -42f; Assert.AreEqual(-42f, q.Boost, 0.0f); Store.Directory directory = NewDirectory(); try { Analyzer analyzer = new MockAnalyzer(Random); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); IndexWriter writer = new IndexWriter(directory, conf); try { Documents.Document d = new Documents.Document(); d.Add(NewTextField("foo", "bar", Field.Store.YES)); writer.AddDocument(d); } finally { writer.Dispose(); } IndexReader reader = DirectoryReader.Open(directory); try { IndexSearcher searcher = NewSearcher(reader); ScoreDoc[] hits = searcher.Search(q, null, 1000).ScoreDocs; Assert.AreEqual(1, hits.Length); Assert.IsTrue(hits[0].Score < 0, "score is not negative: " + hits[0].Score); Explanation explain = searcher.Explain(q, hits[0].Doc); Assert.AreEqual(hits[0].Score, explain.Value, 0.001f, "score doesn't match explanation"); Assert.IsTrue(explain.IsMatch, "explain doesn't think doc is a match"); } finally { reader.Dispose(); } } finally { directory.Dispose(); } }
public void GRandom() { int numDocs = TestUtil.NextInt(Random(), (10 * RANDOM_MULTIPLIER), (100 * RANDOM_MULTIPLIER)); Directory dir = null; RandomIndexWriter writer = null; IndexReader ir = null; try { dir = NewDirectory(); writer = new RandomIndexWriter(Random(), dir, new MockAnalyzer(Random(), MockTokenizer.WHITESPACE, false), Similarity, TimeZone); int maxLength = TestUtil.NextInt(Random(), 5, 50); List<string> originals = new List<string>(numDocs); List<string[]> breaks = new List<string[]>(numDocs); for (int i = 0; i < numDocs; i++) { string orig = ""; if (Random().nextBoolean()) { while (!GoodTestString(orig)) { orig = TestUtil.RandomSimpleString(Random(), maxLength); } } else { while (!GoodTestString(orig)) { orig = TestUtil.RandomUnicodeString(Random(), maxLength); } } originals.Add(orig); int totalLength = orig.CodePointCount(0, orig.Length); int breakAt = orig.OffsetByCodePoints(0, TestUtil.NextInt(Random(), 1, totalLength - 1)); string[] broken = new string[2]; broken[0] = orig.Substring(0, breakAt - 0); broken[1] = orig.Substring(breakAt); breaks.Add(broken); Document doc = new Document(); doc.Add(NewTextField("random_break", broken[0] + " " + broken[1], Field.Store.NO)); doc.Add(NewTextField("random_combine", orig, Field.Store.NO)); writer.AddDocument(doc); } writer.Commit(); writer.Dispose(); ir = DirectoryReader.Open(dir); WordBreakSpellChecker wbsp = new WordBreakSpellChecker(); wbsp.MaxChanges = (1); wbsp.MinBreakWordLength = (1); wbsp.MinSuggestionFrequency = (1); wbsp.MaxCombineWordLength = (maxLength); for (int i = 0; i < originals.size(); i++) { string orig = originals[i]; string left = breaks[i][0]; string right = breaks[i][1]; { Term term = new Term("random_break", orig); SuggestWord[][] sw = wbsp.SuggestWordBreaks(term, originals.size(), ir, SuggestMode.SUGGEST_ALWAYS, WordBreakSpellChecker.BreakSuggestionSortMethod.NUM_CHANGES_THEN_MAX_FREQUENCY); bool failed = true; foreach (SuggestWord[] sw1 in sw) { assertTrue(sw1.Length == 2); if (sw1[0].String.equals(left) && sw1[1].String.equals(right)) { failed = false; } } assertFalse("Failed getting break suggestions\n >Original: " + orig + "\n >Left: " + left + "\n >Right: " + right, failed); } { Term[] terms = {new Term("random_combine", left), new Term("random_combine", right)}; CombineSuggestion[] cs = wbsp.SuggestWordCombinations(terms, originals.size(), ir, SuggestMode.SUGGEST_ALWAYS); bool failed = true; foreach (CombineSuggestion cs1 in cs) { assertTrue(cs1.OriginalTermIndexes.Length == 2); if (cs1.Suggestion.String.equals(left + right)) { failed = false; } } assertFalse("Failed getting combine suggestions\n >Original: " + orig + "\n >Left: " + left + "\n >Right: " + right, failed); } } } catch (Exception e) { throw e; } finally { try { ir.Dispose(); } catch (Exception /*e1*/) { } try { writer.Dispose(); } catch (Exception /*e1*/) { } try { dir.Dispose(); } catch (Exception /*e1*/) { } } }
private RAMDirectory(Directory dir, bool closeDir, IOContext context) : this() { foreach (string file in dir.ListAll()) { dir.Copy(this, file, file, context); } if (closeDir) { dir.Dispose(); } }
/// <summary> /// Perform search /// </summary> /// <param name="query"></param> /// <param name="startIndex"></param> /// <param name="blockSize"></param> /// <param name="indexDirEs"></param> /// <param name="indexDirEn"></param> /// <param name="sortBy"></param> /// <returns></returns> public List<IssueDocument> MedesSearch(Query query, int startIndex, int blockSize, Directory indexDirEs, Directory indexDirEn, Directory indexDirHe, string sortBy) { #if DEBUG T.TraceMessage(string.Format("Begin search , query: '{0}'", query.ToString())); #endif List<IssueDocument> result = new List<IssueDocument>(); try { // build a multi searcher across the 2 indexes MultiSearcher mSearcher = CombineSearchers(indexDirEs, indexDirEn, indexDirHe); TopDocs tDocs = null; int iterateLast = startIndex + blockSize; string customScoreField = "article_id"; FieldScoreQuery dateBooster = new FieldScoreQuery(customScoreField, FieldScoreQuery.Type.FLOAT); CustomScoreQuery customQuery = new CustomScoreQuery(query, dateBooster); tDocs = mSearcher.Search(customQuery, 1000); //ScoreDoc[] hits = tpDcs.scoreDocs; if (startIndex + blockSize > tDocs.TotalHits) iterateLast = tDocs.TotalHits; for (int i = startIndex; i < iterateLast; i++) { // Document hitDoc = mSearcher.Doc(hits[i].doc); Document hitDoc = mSearcher.Doc(i); result.Add(new IssueDocument() { Id = Int32.Parse(hitDoc.Get("issue_id").ToString())}); } // close the searcher and indexes mSearcher.Dispose(); indexDirEs.Dispose(); indexDirEn.Dispose(); indexDirHe.Dispose(); } catch (Exception ex) { T.TraceError("Error MedesSearch, query '{0}'", query.ToString()); T.TraceError(ex); throw ex; } return result; }
// LUCENE-1468 private void CheckDirectoryFilter(Directory dir) { string name = "file"; try { dir.CreateOutput(name, NewIOContext(Random())).Dispose(); Assert.IsTrue(SlowFileExists(dir, name)); Assert.IsTrue(Arrays.AsList(dir.ListAll()).Contains(name)); } finally { dir.Dispose(); } }