public virtual void TestReverseDateSort() { IndexSearcher searcher = new IndexSearcher(directory); // Create a Sort object. reverse is set to true. // problem occurs only with SortField.AUTO: Sort sort = new Sort(new SortField(DATE_TIME_FIELD, SortField.AUTO, true)); QueryParser queryParser = new QueryParser(TEXT_FIELD, new WhitespaceAnalyzer()); Query query = queryParser.Parse("Document"); // Execute the search and process the search results. System.String[] actualOrder = new System.String[5]; ScoreDoc[] hits = searcher.Search(query, null, 1000, sort).scoreDocs; for (int i = 0; i < hits.Length; i++) { Document document = searcher.Doc(hits[i].doc); System.String text = document.Get(TEXT_FIELD); actualOrder[i] = text; } searcher.Close(); // Set up the expected order (i.e. Document 5, 4, 3, 2, 1). System.String[] expectedOrder = new System.String[5]; expectedOrder[0] = "Document 5"; expectedOrder[1] = "Document 4"; expectedOrder[2] = "Document 3"; expectedOrder[3] = "Document 2"; expectedOrder[4] = "Document 1"; Assert.AreEqual(new System.Collections.ArrayList(expectedOrder), new System.Collections.ArrayList(actualOrder)); }
public virtual void TestOutOfOrderDocsScoringSort() { // Two Sort criteria to instantiate the multi/single comparators. Sort[] sort = new Sort[] { new Sort(SortField.FIELD_DOC), new Sort() }; bool[][] tfcOptions = new bool[][] { new bool[] { false, false, false }, new bool[] { false, false, true }, new bool[] { false, true, false }, new bool[] { false, true, true }, new bool[] { true, false, false }, new bool[] { true, false, true }, new bool[] { true, true, false }, new bool[] { true, true, true } }; string[] actualTFCClasses = new string[] { "OutOfOrderOneComparatorNonScoringCollector", "OutOfOrderOneComparatorScoringMaxScoreCollector", "OutOfOrderOneComparatorScoringNoMaxScoreCollector", "OutOfOrderOneComparatorScoringMaxScoreCollector", "OutOfOrderOneComparatorNonScoringCollector", "OutOfOrderOneComparatorScoringMaxScoreCollector", "OutOfOrderOneComparatorScoringNoMaxScoreCollector", "OutOfOrderOneComparatorScoringMaxScoreCollector" }; BooleanQuery bq = new BooleanQuery(); // Add a Query with SHOULD, since bw.Scorer() returns BooleanScorer2 // which delegates to BS if there are no mandatory clauses. bq.Add(new MatchAllDocsQuery(), Occur.SHOULD); // Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return // the clause instead of BQ. bq.MinimumNumberShouldMatch = 1; for (int i = 0; i < sort.Length; i++) { for (int j = 0; j < tfcOptions.Length; j++) { TopDocsCollector<Entry> tdc = TopFieldCollector.Create(sort[i], 10, tfcOptions[j][0], tfcOptions[j][1], tfcOptions[j][2], false); Assert.IsTrue(tdc.GetType().Name.EndsWith(actualTFCClasses[j])); @is.Search(bq, tdc); TopDocs td = tdc.TopDocs(); ScoreDoc[] sd = td.ScoreDocs; Assert.AreEqual(10, sd.Length); } } }
/// <summary> /// 构建Query、Filter、Sort /// </summary> /// <param name="query"><see cref="Query"/></param> /// <param name="filter"><see cref="Filter"/></param> /// <param name="sort"><see cref="Sort"/></param> public void BuildQuery(out Query query, out Filter filter, out Sort sort) { BooleanQuery q = new BooleanQuery(); foreach (var clause in clauses) { q.Add(clause); } query = q; if (filters.Count > 0) { BooleanQuery filterQuery = new BooleanQuery(); foreach (var _filter in filters) filterQuery.Add(_filter); filter = new QueryWrapperFilter(filterQuery); } else { filter = null; } if (sortFields.Count > 0) sort = new Sort(sortFields.ToArray()); else sort = null; }
void LUCENENET_100_ClientSearch() { try { Lucene.Net.Search.Searchable s = (Lucene.Net.Search.Searchable)Activator.GetObject(typeof(Lucene.Net.Search.Searchable), @"tcp://localhost:" + ANYPORT + "/Searcher"); Lucene.Net.Search.MultiSearcher searcher = new Lucene.Net.Search.MultiSearcher(new Lucene.Net.Search.Searchable[] { s }); Lucene.Net.Search.Query q = new Lucene.Net.Search.TermQuery(new Lucene.Net.Index.Term("field1", "moon")); Lucene.Net.Search.Sort sort = new Lucene.Net.Search.Sort(); sort.SetSort(new Lucene.Net.Search.SortField("field2", Lucene.Net.Search.SortField.INT)); Lucene.Net.Search.TopDocs h = searcher.Search(q, null, 100, sort); if (h.ScoreDocs.Length != 2) { LUCENENET_100_Exception = new SupportClassException("Test_Search_FieldDoc Error. "); } } catch (SupportClassException ex) { LUCENENET_100_Exception = ex; } catch (Exception ex) { Console.WriteLine(ex); } finally { LUCENENET_100_testFinished = true; } }
public virtual void TestReverseDateSort() { IndexSearcher searcher = NewSearcher(Reader); Sort sort = new Sort(new SortField(DATE_TIME_FIELD, SortField.Type_e.STRING, true)); Query query = new TermQuery(new Term(TEXT_FIELD, "document")); // Execute the search and process the search results. string[] actualOrder = new string[5]; ScoreDoc[] hits = searcher.Search(query, null, 1000, sort).ScoreDocs; for (int i = 0; i < hits.Length; i++) { Document document = searcher.Doc(hits[i].Doc); string text = document.Get(TEXT_FIELD); actualOrder[i] = text; } // Set up the expected order (i.e. Document 5, 4, 3, 2, 1). string[] expectedOrder = new string[5]; expectedOrder[0] = "Document 5"; expectedOrder[1] = "Document 4"; expectedOrder[2] = "Document 3"; expectedOrder[3] = "Document 2"; expectedOrder[4] = "Document 1"; Assert.AreEqual(Arrays.AsList(expectedOrder), Arrays.AsList(actualOrder)); }
public SearchResult Search(Query searchQuery, int maxHits, string NameField = "Name", Sort sort = null) { SearchResult result = new SearchResult(); TopDocs hits = null; result.SearchResultItems = new List<SearchResultItem>(); if(sort != null) { hits = seacher.Search(searchQuery, null, maxHits, sort); } else { hits = seacher.Search(searchQuery, null, maxHits); } for (int i = 0; i < hits.ScoreDocs.Count(); i++) { Document doctemp = seacher.Doc(hits.ScoreDocs[i].Doc); result.SearchResultItems.Add(new SearchResultItem { Id = int.Parse(doctemp.Get("Id")), Name = doctemp.Get(NameField), Type = (DocumentType) Enum.Parse(typeof(DocumentType), doctemp.Get("Type")) }); } result.Hits = hits.ScoreDocs.Count(); return result; }
public virtual void TestString() { Directory dir = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir); Document doc = new Document(); doc.Add(NewStringField("value", "foo", Field.Store.YES)); writer.AddDocument(doc); doc = new Document(); doc.Add(NewStringField("value", "bar", Field.Store.YES)); writer.AddDocument(doc); IndexReader ir = writer.Reader; writer.Dispose(); IndexSearcher searcher = NewSearcher(ir); Sort sort = new Sort(new SortField("value", SortField.Type_e.STRING)); TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort); Assert.AreEqual(2, td.TotalHits); // 'bar' comes before 'foo' Assert.AreEqual("bar", searcher.Doc(td.ScoreDocs[0].Doc).Get("value")); Assert.AreEqual("foo", searcher.Doc(td.ScoreDocs[1].Doc).Get("value")); ir.Dispose(); dir.Dispose(); }
public void TestReverse() { Directory dir = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, Similarity, TimeZone); Document doc = new Document(); doc.Add(NewStringField("value", "foo", Field.Store.NO)); doc.Add(NewStringField("value", "bar", Field.Store.NO)); doc.Add(NewStringField("id", "1", Field.Store.YES)); writer.AddDocument(doc); doc = new Document(); doc.Add(NewStringField("value", "baz", Field.Store.NO)); doc.Add(NewStringField("id", "2", Field.Store.YES)); writer.AddDocument(doc); IndexReader ir = writer.Reader; writer.Dispose(); IndexSearcher searcher = NewSearcher(ir); Sort sort = new Sort(new SortedSetSortField("value", true)); TopDocs td = searcher.Search(new MatchAllDocsQuery(), 10, sort); assertEquals(2, td.TotalHits); // 'bar' comes before 'baz' assertEquals("2", searcher.Doc(td.ScoreDocs[0].Doc).Get("id")); assertEquals("1", searcher.Doc(td.ScoreDocs[1].Doc).Get("id")); ir.Dispose(); dir.Dispose(); }
public SearchHits Search(PreparedQuery query, Sort sort, int maxResults = DefaultMaximumResults) { #if SC62 || SC64 || SC66 return new SearchHits(Searcher.Search(query.Query, sort)); #else return new SearchHits(Searcher.Search(query.Query, null, maxResults, sort), Searcher.IndexReader); #endif }
internal Hits(Searcher s, Query q, Filter f, Sort o) { weight = q.Weight(s); searcher = s; filter = f; sort = o; GetMoreDocs(50); // retrieve 100 initially }
public SearchHits Search(PreparedQuery query, Sort sort, int maxResults = DefaultMaximumResults) { #if FEATURE_CONTENT_SEARCH return new SearchHits(Searcher.Search(query.Query, null, maxResults, sort), Searcher.IndexReader); #else return new SearchHits(Searcher.Search(query.Query, sort)); #endif }
public virtual void BuildSort() { if (SortFields.Count == 0) { return; } CurrentSort = new Sort(SortFields.ToArray()); }
/// <summary> /// Performs a search in the WeBlog search index, with a sort /// </summary> /// <typeparam name="T">The type of the items to be returned from the search</typeparam> /// <param name="query">The query to execute</param> /// <param name="maximumResults">The maximum number of results</param> /// <param name="sortField">The index field to sort on</param> /// <returns>An array of search results, or an empty array if there was an issue</returns> public T[] Execute <T>(QueryBase query, int maximumResults, Action <List <T>, Item> func, string sortField, bool reverseSort) { if (query is CombinedQuery) { // Add on database (query as CombinedQuery).Add(new FieldQuery(Sitecore.Search.BuiltinFields.Database, Sitecore.Context.Database.Name), QueryOccurance.Must); // Add language var langCode = DatabaseCrawler.TransformLanguageCode(Sitecore.Context.Language.Name); (query as CombinedQuery).Add(new FieldQuery(Constants.Index.Fields.Language, langCode), QueryOccurance.Must); } // I have to use Action<T> cause the compiler can't work out how to use implicit operators when T is one of the items classes (generated by CIG) var items = new List <T>(); if (maximumResults > 0) { var index = GetSearchIndex(); if (index != null && index.GetDocumentCount() > 0) { using (var searchContext = new SortableIndexSearchContext(index)) { SearchHits hits; if (!string.IsNullOrEmpty(sortField)) { #if FEATURE_CONTENT_SEARCH var sort = new Lucene.Net.Search.Sort(new SortField(sortField, SortField.STRING, reverseSort)); #else var sort = new Lucene.Net.Search.Sort(sortField, reverseSort); #endif hits = searchContext.Search(query, sort); } else { hits = searchContext.Search(query); } if (hits != null) { foreach (var result in hits.FetchResults(0, maximumResults)) { var item = SearchManager.GetObject(result); if (item != null) { func(items, (Item)item); } } } } } else { Log.Warn("WeBlog index was not found or didn't contain any documents", this); } } return(items.ToArray()); }
public SearchHits Search(Query query, Sort sort) { //Fix for #384289. Previous code line returned PreparedQuery. PreparedQuery made FullText queries from all queries, //for example _name:test query became _name:test*. It produced wrong search results, so I decided to remove //PreparedQuery part. If something wrong noticed during usage, PreparedQuery should be return //and other logic should be created for this part. //return Search(query, SearchContext.Empty, sort); - this returned PreparedQuery. return new SearchHits(Searcher.Search(query, sort)); }
private Hits GetHitsByFiled(string filedname,Analyzer analyzer, string searchStr, IndexSearcher searcher) { MultiFieldQueryParser parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_CURRENT, new string[] { filedname}, analyzer); Query query = parser.Parse(searchStr); Sort sort = new Sort(); SortField f = new SortField("publish_time", SortField.STRING, true);//按照publish_time字段排序,true表示降序 sort.SetSort(f); Hits hits = searcher.Search(query, sort); return hits; }
internal Hits(Searcher s, Query q, Filter f, Sort o) { weight = q.Weight(s); searcher = s; filter = f; sort = o; nDeletions = CountDeletions(s); GetMoreDocs(50); // retrieve 100 initially lengthAtStart = length; }
public PreviousSearchState(Query query, Sort sort, ScoreDoc searchAfterLocal, ScoreDoc searchAfterShard, long[] versions, int numHitsPaged) { this.Versions = (long[])versions.Clone(); this.SearchAfterLocal = searchAfterLocal; this.SearchAfterShard = searchAfterShard; this.Sort = sort; this.Query = query; this.NumHitsPaged = numHitsPaged; SearchTimeNanos = TimeHelper.NanoTime(); }
public void DisplayResults(Query query, Sort sort) { using(var indexSearcher = new IndexSearcher(directory, true)) { indexSearcher.SetDefaultFieldSortScoring(true, false); var results = indexSearcher.Search(query, null, 20, sort); Console.WriteLine("\nResults for: {0} sorted by {1}", query, sort); Console.WriteLine(); } }
public Item[] Search(string index, string[] areas, int maxItems) { List<Item> searchResults = new List<Item>(); var searchIndex = Sitecore.Search.SearchManager.GetIndex(index); using (IndexSearchContext context = searchIndex.CreateSearchContext()) { /***********/ /* First we need to create a BooleanQuery so we can concatenate the different queries */ BooleanQuery completeQuery = new BooleanQuery(); //completeQuery.SetMinimumNumberShouldMatch(2); /* search only in current language */ completeQuery.Add(new TermQuery(new Lucene.Net.Index.Term("_language", Sitecore.Context.Language.Name)), BooleanClause.Occur.MUST); Sort sort = new Sort(new SortField("publish date", true)); Lucene.Net.Search.Searcher searcher = context.Searcher; try { Hits hits = context.Searcher.Search(completeQuery, sort); SearchHits searchHits = new SearchHits(hits); // assuming, no more than every second result is a null-item, that is why the maxItems parameter is multiplied by 2 var results = searchHits.FetchResults(0, maxItems * 2); int count = 0; foreach (SearchResult result in results) { try { Item item = result.GetObject<Item>(); if (item != null) { count++; searchResults.Add(item); } } catch (Exception e) { Log.Error("error while converting lucene search hit in NewsSearcher.cs", e, searchIndex); } if (count >= maxItems) break; } } catch (Exception e) { Log.Error("error while performing a lucene search in NewsSearcher.cs", e, searchIndex); } } return searchResults.ToArray(); }
public void BeforeClassSorterUtilTest() { // only read the values of the undeleted documents, since after addIndexes, // the deleted ones will be dropped from the index. Bits liveDocs = reader.LiveDocs; List<int> values = new List<int>(); for (int i = 0; i < reader.MaxDoc; i++) { if (liveDocs == null || liveDocs.Get(i)) { values.Add(int.Parse(reader.Document(i).Get(ID_FIELD), CultureInfo.InvariantCulture)); } } int idx = Random().nextInt(SORT.Length); Sort sorter = SORT[idx]; if (idx == 1) { // reverse doc sort values.Reverse(); } else { values.Sort(); if (Random().nextBoolean()) { sorter = new Sort(new SortField(NUMERIC_DV_FIELD, SortField.Type_e.LONG, true)); // descending values.Reverse(); } } sortedValues = values.ToArray(); if (VERBOSE) { Console.WriteLine("sortedValues: " + sortedValues); Console.WriteLine("Sorter: " + sorter); } Directory target = NewDirectory(); using (IndexWriter writer = new IndexWriter(target, NewIndexWriterConfig(TEST_VERSION_CURRENT, null))) { using (reader = SortingAtomicReader.Wrap(reader, sorter)) { writer.AddIndexes(reader); } } dir.Dispose(); // CheckIndex the target directory dir = target; TestUtil.CheckIndex(dir); // set reader for tests reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir)); assertFalse("index should not have deletions", reader.HasDeletions); }
public LuceneQueryStatistics(Query query, Filter filter, Sort sort, TimeSpan elapsedPreparationTime, TimeSpan elapsedSearchTime, TimeSpan elapsedRetrievalTime, int totalHits, int skippedHits, int retrievedDocuments) { this.totalHits = totalHits; this.query = query; this.filter = filter; this.sort = sort; this.elapsedPreparationTime = elapsedPreparationTime; this.elapsedSearchTime = elapsedSearchTime; this.elapsedRetrievalTime = elapsedRetrievalTime; this.skippedHits = skippedHits; this.retrievedDocuments = retrievedDocuments; }
protected void Application_Start() { AreaRegistration.RegisterAllAreas(); RegisterGlobalFilters(GlobalFilters.Filters); RegisterRoutes(RouteTable.Routes); CreateLuceneIndexFolder(); string rebuildEvery = ConfigurationManager.AppSettings["RebuildLuceneIndexEveryXDays"]; int rebuildEveryXDays = Convert.ToInt32(rebuildEvery); string path = HttpContext.Current.Server.MapPath("~/App_Data/LuceneIndex"); try { FSDirectory fsd = FSDirectory.Open(new DirectoryInfo(path)); var indexSearcher = new IndexSearcher(fsd, true); var termQuery = new MatchAllDocsQuery(); var sort = new Sort(new SortField(LuceneIndexFieldMap.LastModifiedField, SortField.STRING)); TopDocs topDocs = indexSearcher.Search(termQuery, null, 1, sort); DateTime lastUpdateDate = DateTime.MinValue; foreach (ScoreDoc match in topDocs.ScoreDocs) { Document doc = indexSearcher.Doc(match.doc); string lastUpdate = doc.Get(LuceneIndexFieldMap.LastModifiedField); if (lastUpdate != null) { lastUpdateDate = DateTools.StringToDate(lastUpdate); Debug.WriteLine(lastUpdate); } } TimeSpan ts = DateTime.Now - lastUpdateDate; if (ts.TotalDays > rebuildEveryXDays) { var generator = new DataGenerator(path); generator.WriteIndex(); } } catch (FileNotFoundException exception) { CreateLuceneIndexFolder(); var generator = new DataGenerator(path); generator.WriteIndex(); } }
public override int NextBatchId(string processName) { if (!TflBatchRecordsExist(processName)) { return 1; } var searcher = LuceneSearcherFactory.Create(this, TflBatchEntity(processName)); var query = new TermQuery(new Term("process", processName)); var sort = new Sort(new SortField("id", SortField.INT, true)); var hits = searcher.Search(query, null, 1, sort); if (hits.TotalHits <= 0) return 1; var doc = searcher.Doc(0); return Convert.ToInt32(doc.GetField("id").StringValue) + 1; }
public List<LuceneResult> Search(Query query, Sort sort) { var searcher = new IndexSearcher(_rd); var collector = TopFieldCollector.create(sort ?? new Sort(), searcher.MaxDoc(), false, true, true, sort == null); searcher.Search(query, collector); var docs = collector.TopDocs(); var maxscore = docs.GetMaxScore(); // Note: cheap way to avoid div/zero if(maxscore == 0) { maxscore = 1; } return (from hit in docs.scoreDocs let score = hit.score / maxscore where score >= 0.001f select new LuceneResult(searcher.Doc(hit.doc), score)).ToList(); }
public virtual void TestHugeN() { TaskScheduler service = new LimitedConcurrencyLevelTaskScheduler(4); IndexSearcher[] searchers = new IndexSearcher[] { new IndexSearcher(Reader), new IndexSearcher(Reader, service) }; Query[] queries = new Query[] { new MatchAllDocsQuery(), new TermQuery(new Term("field", "1")) }; Sort[] sorts = new Sort[] { null, new Sort(new SortField("field2", SortField.Type_e.STRING)) }; Filter[] filters = new Filter[] { null, new QueryWrapperFilter(new TermQuery(new Term("field2", "true"))) }; ScoreDoc[] afters = new ScoreDoc[] { null, new FieldDoc(0, 0f, new object[] { new BytesRef("boo!") }) }; foreach (IndexSearcher searcher in searchers) { foreach (ScoreDoc after in afters) { foreach (Query query in queries) { foreach (Sort sort in sorts) { foreach (Filter filter in filters) { searcher.Search(query, int.MaxValue); searcher.SearchAfter(after, query, int.MaxValue); searcher.Search(query, filter, int.MaxValue); searcher.SearchAfter(after, query, filter, int.MaxValue); if (sort != null) { searcher.Search(query, int.MaxValue, sort); searcher.Search(query, filter, int.MaxValue, sort); searcher.Search(query, filter, int.MaxValue, sort, true, true); searcher.Search(query, filter, int.MaxValue, sort, true, false); searcher.Search(query, filter, int.MaxValue, sort, false, true); searcher.Search(query, filter, int.MaxValue, sort, false, false); searcher.SearchAfter(after, query, filter, int.MaxValue, sort); searcher.SearchAfter(after, query, filter, int.MaxValue, sort, true, true); searcher.SearchAfter(after, query, filter, int.MaxValue, sort, true, false); searcher.SearchAfter(after, query, filter, int.MaxValue, sort, false, true); searcher.SearchAfter(after, query, filter, int.MaxValue, sort, false, false); } } } } } } TestUtil.ShutdownExecutorService(service); }
private void RunTest(IndexSearcher searcher, bool reversed) { BooleanQuery newq = new BooleanQuery(false); TermQuery query = new TermQuery(new Term("title", "ipod")); newq.Add(query, BooleanClause.Occur.SHOULD); newq.Add(GetElevatedQuery(new System.String[]{"id", "a", "id", "x"}), BooleanClause.Occur.SHOULD); Sort sort = new Sort(new SortField[]{new SortField("id", new ElevationComparatorSource(priority), false), new SortField(null, SortField.SCORE, reversed)}); TopDocsCollector topCollector = TopFieldCollector.create(sort, 50, false, true, true, true); searcher.Search(newq, null, topCollector); TopDocs topDocs = topCollector.TopDocs(0, 10); int nDocsReturned = topDocs.ScoreDocs.Length; Assert.AreEqual(4, nDocsReturned); // 0 & 3 were elevated Assert.AreEqual(0, topDocs.ScoreDocs[0].doc); Assert.AreEqual(3, topDocs.ScoreDocs[1].doc); if (reversed) { Assert.AreEqual(2, topDocs.ScoreDocs[2].doc); Assert.AreEqual(1, topDocs.ScoreDocs[3].doc); } else { Assert.AreEqual(1, topDocs.ScoreDocs[2].doc); Assert.AreEqual(2, topDocs.ScoreDocs[3].doc); } /* for (int i = 0; i < nDocsReturned; i++) { ScoreDoc scoreDoc = topDocs.scoreDocs[i]; ids[i] = scoreDoc.doc; scores[i] = scoreDoc.score; documents[i] = searcher.doc(ids[i]); System.out.println("ids[i] = " + ids[i]); System.out.println("documents[i] = " + documents[i]); System.out.println("scores[i] = " + scores[i]); } */ }
private TopDocs ExecuteQuery(IndexSearcher searcher, string[] sortFields, Query q, int size) { TopDocs topDocs; if (sortFields != null && sortFields.Length > 0) { var sort = new Sort(sortFields.Select(field => { var desc = field.StartsWith("-"); if (desc) field = field.Substring(1); return new SortField(field, SortField.STRING, desc); }).ToArray()); topDocs = searcher.Search(q, null, size, sort); } else { topDocs = searcher.Search(q, null, size); } return topDocs; }
public NAppIndexReader(Configuration.ConfigManager config) { // TODO: Use Central place to retrieve default setting of Index Full Path indexFullPath = config.GetSetting(SettingKeys.Index_Directory, System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Index")); indexFullPath = System.IO.Path.GetFullPath(indexFullPath); directory = FSDirectory.Open(new System.IO.DirectoryInfo(indexFullPath)); analyzer = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_29); var sortFields = new SortField[2]; sortFields[0] = new SortField(FieldKeys.LogName, SortField.STRING); sortFields[1] = new SortField(FieldKeys.LogID, SortField.LONG); logNameIDSort = new Sort(sortFields); textQueryFields = new string[]{ FieldKeys.Service, FieldKeys.Method, FieldKeys.Detail_Desc, FieldKeys.Detail_Parm, }; }
internal static MergePolicy NewSortingMergePolicy(Sort sort) { // create a MP with a low merge factor so that many merges happen MergePolicy mp; if (Random().nextBoolean()) { TieredMergePolicy tmp = NewTieredMergePolicy(Random()); int numSegs = TestUtil.NextInt(Random(), 3, 5); tmp.SetSegmentsPerTier(numSegs); tmp.SetMaxMergeAtOnce(TestUtil.NextInt(Random(), 2, numSegs)); mp = tmp; } else { LogMergePolicy lmp = NewLogMergePolicy(Random()); lmp.MergeFactor = TestUtil.NextInt(Random(), 3, 5); mp = lmp; } // wrap it with a sorting mp return new SortingMergePolicy(mp, sort); }
/// <summary> /// Searchs text. Called when the "Search text" button is pressed. /// Expects the CreateSearchIndex method to be run first. /// </summary> private bool SearchText() { // Get the search index SearchIndexInfo index = SearchIndexInfoProvider.GetSearchIndexInfo("MyNewIndex"); int numberOfResults = 0; if (index != null) { // Set the properties string searchText = "home"; string path = "/%"; string classNames = ""; string cultureCode = "EN-US"; string defaultCulture = CultureHelper.DefaultCulture.IetfLanguageTag; Lucene.Net.Search.Sort sort = SearchHelper.GetSort("##SCORE##"); bool combineWithDefaultCulture = false; bool checkPermissions = false; bool searchInAttachments = false; string searchIndexes = index.IndexName; int displayResults = 100; int startingPosition = 0; int numberOfProcessedResults = 100; UserInfo userInfo = CMSContext.CurrentUser; string attachmentWhere = ""; string attachmentOrderBy = ""; // Get search results DataSet ds = SearchHelper.Search(searchText, sort, path, classNames, cultureCode, defaultCulture, combineWithDefaultCulture, checkPermissions, searchInAttachments, searchIndexes, displayResults, startingPosition, numberOfProcessedResults, userInfo, out numberOfResults, attachmentWhere, attachmentOrderBy); // If found at least one item if (numberOfResults > 0) { return(true); } } return(false); }
public virtual void TestSortWithoutFillFields() { // There was previously a bug in TopFieldCollector when fillFields was set // to false - the same doc and score was set in ScoreDoc[] array. this test // asserts that if fillFields is false, the documents are set properly. It // does not use Searcher's default search methods (with Sort) since all set // fillFields to true. Sort[] sort = new Sort[] { new Sort(SortField.FIELD_DOC), new Sort() }; for (int i = 0; i < sort.Length; i++) { Query q = new MatchAllDocsQuery(); TopDocsCollector<Entry> tdc = TopFieldCollector.Create(sort[i], 10, false, false, false, true); @is.Search(q, tdc); ScoreDoc[] sd = tdc.TopDocs().ScoreDocs; for (int j = 1; j < sd.Length; j++) { Assert.IsTrue(sd[j].Doc != sd[j - 1].Doc); } } }
public static void BeforeClassSortingAtomicReaderTest() { // sort the index by id (as integer, in NUMERIC_DV_FIELD) Sort sort = new Sort(new SortField(NUMERIC_DV_FIELD, SortField.Type_e.INT)); Sorter.DocMap docMap = new Sorter(sort).Sort(reader); // Sorter.compute also sorts the values NumericDocValues dv = reader.GetNumericDocValues(NUMERIC_DV_FIELD); sortedValues = new int[reader.MaxDoc]; for (int i = 0; i < reader.MaxDoc; ++i) { sortedValues[docMap.OldToNew(i)] = (int)dv.Get(i); } if (VERBOSE) { Console.WriteLine("docMap: " + docMap); Console.WriteLine("sortedValues: " + Arrays.ToString(sortedValues)); } // sort the index by id (as integer, in NUMERIC_DV_FIELD) reader = SortingAtomicReader.Wrap(reader, sort); if (VERBOSE) { Console.WriteLine("mapped-deleted-docs: "); Bits mappedLiveDocs = reader.LiveDocs; for (int i = 0; i < mappedLiveDocs.Length(); i++) { if (!mappedLiveDocs.Get(i)) { Console.WriteLine(i + " "); } } Console.WriteLine(); } TestUtil.CheckReader(reader); }
void LUCENENET_100_ClientSearch() { try { Lucene.Net.Search.Searchable s = (Lucene.Net.Search.Searchable)Activator.GetObject(typeof(Lucene.Net.Search.Searchable), @"tcp://localhost:" + ANYPORT + "/Searcher"); Lucene.Net.Search.MultiSearcher searcher = new Lucene.Net.Search.MultiSearcher(new Lucene.Net.Search.Searchable[] { s }); Lucene.Net.Search.Query q = new Lucene.Net.Search.TermQuery(new Lucene.Net.Index.Term("field1", "moon")); Lucene.Net.Search.Sort sort = new Lucene.Net.Search.Sort(); sort.SetSort(new Lucene.Net.Search.SortField("field2", Lucene.Net.Search.SortField.INT)); Lucene.Net.Search.TopDocs h = searcher.Search(q, null, 100, sort); } catch (Exception ex) { LUCENENET_100_Exception = ex; } finally { LUCENENET_100_testFinished = true; } }
public virtual TopFieldDocs LocalSearch(Query query, int numHits, Sort sort) { return(base.Search(query, numHits, sort)); }
/// <summary> Creates a new {@link TopFieldCollector} from the given /// arguments. /// /// <p/><b>NOTE</b>: The instances returned by this method /// pre-allocate a full array of length /// <code>numHits</code>. /// /// </summary> /// <param name="sort">the sort criteria (SortFields). /// </param> /// <param name="numHits">the number of results to collect. /// </param> /// <param name="fillFields">specifies whether the actual field values should be returned on /// the results (FieldDoc). /// </param> /// <param name="trackDocScores">specifies whether document scores should be tracked and set on the /// results. Note that if set to false, then the results' scores will /// be set to Float.NaN. Setting this to true affects performance, as /// it incurs the score computation on each competitive result. /// Therefore if document scores are not required by the application, /// it is recommended to set it to false. /// </param> /// <param name="trackMaxScore">specifies whether the query's maxScore should be tracked and set /// on the resulting {@link TopDocs}. Note that if set to false, /// {@link TopDocs#GetMaxScore()} returns Float.NaN. Setting this to /// true affects performance as it incurs the score computation on /// each result. Also, setting this true automatically sets /// <code>trackDocScores</code> to true as well. /// </param> /// <param name="docsScoredInOrder">specifies whether documents are scored in doc Id order or not by /// the given {@link Scorer} in {@link #SetScorer(Scorer)}. /// </param> /// <returns> a {@link TopFieldCollector} instance which will sort the results by /// the sort criteria. /// </returns> /// <throws> IOException </throws> public static TopFieldCollector Create(Sort sort, int numHits, bool fillFields, bool trackDocScores, bool trackMaxScore, bool docsScoredInOrder) { if (sort.fields.Length == 0) { throw new System.ArgumentException("Sort must contain at least one field"); } FieldValueHitQueue queue = FieldValueHitQueue.Create(sort.fields, numHits); if (queue.GetComparators().Length == 1) { if (docsScoredInOrder) { if (trackMaxScore) { return(new OneComparatorScoringMaxScoreCollector(queue, numHits, fillFields)); } else if (trackDocScores) { return(new OneComparatorScoringNoMaxScoreCollector(queue, numHits, fillFields)); } else { return(new OneComparatorNonScoringCollector(queue, numHits, fillFields)); } } else { if (trackMaxScore) { return(new OutOfOrderOneComparatorScoringMaxScoreCollector(queue, numHits, fillFields)); } else if (trackDocScores) { return(new OutOfOrderOneComparatorScoringNoMaxScoreCollector(queue, numHits, fillFields)); } else { return(new OutOfOrderOneComparatorNonScoringCollector(queue, numHits, fillFields)); } } } // multiple comparators. if (docsScoredInOrder) { if (trackMaxScore) { return(new MultiComparatorScoringMaxScoreCollector(queue, numHits, fillFields)); } else if (trackDocScores) { return(new MultiComparatorScoringNoMaxScoreCollector(queue, numHits, fillFields)); } else { return(new MultiComparatorNonScoringCollector(queue, numHits, fillFields)); } } else { if (trackMaxScore) { return(new OutOfOrderMultiComparatorScoringMaxScoreCollector(queue, numHits, fillFields)); } else if (trackDocScores) { return(new OutOfOrderMultiComparatorScoringNoMaxScoreCollector(queue, numHits, fillFields)); } else { return(new OutOfOrderMultiComparatorNonScoringCollector(queue, numHits, fillFields)); } } }
internal virtual void TestSort(bool useFrom, bool VERBOSE) { IndexReader reader = null; Directory dir = null; if (!VERBOSE) { Console.WriteLine("Verbosity disabled. Enable manually if needed."); } int numDocs = VERBOSE ? AtLeast(50) : AtLeast(1000); //final int numDocs = AtLeast(50); string[] tokens = new string[] { "a", "b", "c", "d", "e" }; if (VERBOSE) { Console.WriteLine("TEST: make index"); } { dir = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter(Random, dir); // w.setDoRandomForceMerge(false); // w.w.getConfig().SetMaxBufferedDocs(AtLeast(100)); string[] content = new string[AtLeast(20)]; for (int contentIDX = 0; contentIDX < content.Length; contentIDX++) { StringBuilder sb = new StringBuilder(); int numTokens = TestUtil.NextInt32(Random, 1, 10); for (int tokenIDX = 0; tokenIDX < numTokens; tokenIDX++) { sb.Append(tokens[Random.Next(tokens.Length)]).Append(' '); } content[contentIDX] = sb.ToString(); } for (int docIDX = 0; docIDX < numDocs; docIDX++) { Document doc = new Document(); doc.Add(NewStringField("string", TestUtil.RandomRealisticUnicodeString(Random), Field.Store.NO)); doc.Add(NewTextField("text", content[Random.Next(content.Length)], Field.Store.NO)); doc.Add(new SingleField("float", (float)Random.NextDouble(), Field.Store.NO)); int intValue; if (Random.Next(100) == 17) { intValue = int.MinValue; } else if (Random.Next(100) == 17) { intValue = int.MaxValue; } else { intValue = Random.Next(); } doc.Add(new Int32Field("int", intValue, Field.Store.NO)); if (VERBOSE) { Console.WriteLine(" doc=" + doc); } w.AddDocument(doc); } reader = w.GetReader(); w.Dispose(); } // NOTE: sometimes reader has just one segment, which is // important to test IndexSearcher searcher = NewSearcher(reader); IndexReaderContext ctx = searcher.TopReaderContext; ShardSearcher[] subSearchers; int[] docStarts; if (ctx is AtomicReaderContext) { subSearchers = new ShardSearcher[1]; docStarts = new int[1]; subSearchers[0] = new ShardSearcher((AtomicReaderContext)ctx, ctx); docStarts[0] = 0; } else { CompositeReaderContext compCTX = (CompositeReaderContext)ctx; int size = compCTX.Leaves.Count; subSearchers = new ShardSearcher[size]; docStarts = new int[size]; int docBase = 0; for (int searcherIDX = 0; searcherIDX < subSearchers.Length; searcherIDX++) { AtomicReaderContext leave = compCTX.Leaves[searcherIDX]; subSearchers[searcherIDX] = new ShardSearcher(leave, compCTX); docStarts[searcherIDX] = docBase; docBase += leave.Reader.MaxDoc; } } IList <SortField> sortFields = new JCG.List <SortField>(); sortFields.Add(new SortField("string", SortFieldType.STRING, true)); sortFields.Add(new SortField("string", SortFieldType.STRING, false)); sortFields.Add(new SortField("int", SortFieldType.INT32, true)); sortFields.Add(new SortField("int", SortFieldType.INT32, false)); sortFields.Add(new SortField("float", SortFieldType.SINGLE, true)); sortFields.Add(new SortField("float", SortFieldType.SINGLE, false)); sortFields.Add(new SortField(null, SortFieldType.SCORE, true)); sortFields.Add(new SortField(null, SortFieldType.SCORE, false)); sortFields.Add(new SortField(null, SortFieldType.DOC, true)); sortFields.Add(new SortField(null, SortFieldType.DOC, false)); for (int iter = 0; iter < 1000 * RandomMultiplier; iter++) { // TODO: custom FieldComp... Query query = new TermQuery(new Term("text", tokens[Random.Next(tokens.Length)])); Sort sort; if (Random.Next(10) == 4) { // Sort by score sort = null; } else { SortField[] randomSortFields = new SortField[TestUtil.NextInt32(Random, 1, 3)]; for (int sortIDX = 0; sortIDX < randomSortFields.Length; sortIDX++) { randomSortFields[sortIDX] = sortFields[Random.Next(sortFields.Count)]; } sort = new Sort(randomSortFields); } int numHits = TestUtil.NextInt32(Random, 1, numDocs + 5); //final int numHits = 5; if (VERBOSE) { Console.WriteLine("TEST: search query=" + query + " sort=" + sort + " numHits=" + numHits); } int from = -1; int size = -1; // First search on whole index: TopDocs topHits; if (sort is null) { if (useFrom) { TopScoreDocCollector c = TopScoreDocCollector.Create(numHits, Random.NextBoolean()); searcher.Search(query, c); from = TestUtil.NextInt32(Random, 0, numHits - 1); size = numHits - from; TopDocs tempTopHits = c.GetTopDocs(); if (from < tempTopHits.ScoreDocs.Length) { // Can't use TopDocs#topDocs(start, howMany), since it has different behaviour when start >= hitCount // than TopDocs#merge currently has ScoreDoc[] newScoreDocs = new ScoreDoc[Math.Min(size, tempTopHits.ScoreDocs.Length - from)]; Array.Copy(tempTopHits.ScoreDocs, from, newScoreDocs, 0, newScoreDocs.Length); tempTopHits.ScoreDocs = newScoreDocs; topHits = tempTopHits; } else { topHits = new TopDocs(tempTopHits.TotalHits, new ScoreDoc[0], tempTopHits.MaxScore); } } else { topHits = searcher.Search(query, numHits); } } else { TopFieldCollector c = TopFieldCollector.Create(sort, numHits, true, true, true, Random.NextBoolean()); searcher.Search(query, c); if (useFrom) { from = TestUtil.NextInt32(Random, 0, numHits - 1); size = numHits - from; TopDocs tempTopHits = c.GetTopDocs(); if (from < tempTopHits.ScoreDocs.Length) { // Can't use TopDocs#topDocs(start, howMany), since it has different behaviour when start >= hitCount // than TopDocs#merge currently has ScoreDoc[] newScoreDocs = new ScoreDoc[Math.Min(size, tempTopHits.ScoreDocs.Length - from)]; Array.Copy(tempTopHits.ScoreDocs, from, newScoreDocs, 0, newScoreDocs.Length); tempTopHits.ScoreDocs = newScoreDocs; topHits = tempTopHits; } else { topHits = new TopDocs(tempTopHits.TotalHits, new ScoreDoc[0], tempTopHits.MaxScore); } } else { topHits = c.GetTopDocs(0, numHits); } } if (VERBOSE) { if (useFrom) { Console.WriteLine("from=" + from + " size=" + size); } Console.WriteLine(" top search: " + topHits.TotalHits + " totalHits; hits=" + (topHits.ScoreDocs is null ? "null" : topHits.ScoreDocs.Length + " maxScore=" + topHits.MaxScore)); if (topHits.ScoreDocs != null) { for (int hitIDX = 0; hitIDX < topHits.ScoreDocs.Length; hitIDX++) { ScoreDoc sd = topHits.ScoreDocs[hitIDX]; Console.WriteLine(" doc=" + sd.Doc + " score=" + sd.Score); } } } // ... then all shards: Weight w = searcher.CreateNormalizedWeight(query); TopDocs[] shardHits = new TopDocs[subSearchers.Length]; for (int shardIDX = 0; shardIDX < subSearchers.Length; shardIDX++) { TopDocs subHits; ShardSearcher subSearcher = subSearchers[shardIDX]; if (sort is null) { subHits = subSearcher.Search(w, numHits); } else { TopFieldCollector c = TopFieldCollector.Create(sort, numHits, true, true, true, Random.NextBoolean()); subSearcher.Search(w, c); subHits = c.GetTopDocs(0, numHits); } shardHits[shardIDX] = subHits; if (VERBOSE) { Console.WriteLine(" shard=" + shardIDX + " " + subHits.TotalHits + " totalHits hits=" + (subHits.ScoreDocs is null ? "null" : subHits.ScoreDocs.Length.ToString())); if (subHits.ScoreDocs != null) { foreach (ScoreDoc sd in subHits.ScoreDocs) { Console.WriteLine(" doc=" + sd.Doc + " score=" + sd.Score); } } } } // Merge: TopDocs mergedHits; if (useFrom) { mergedHits = TopDocs.Merge(sort, from, size, shardHits); } else { mergedHits = TopDocs.Merge(sort, numHits, shardHits); } if (mergedHits.ScoreDocs != null) { // Make sure the returned shards are correct: for (int hitIDX = 0; hitIDX < mergedHits.ScoreDocs.Length; hitIDX++) { ScoreDoc sd = mergedHits.ScoreDocs[hitIDX]; Assert.AreEqual(ReaderUtil.SubIndex(sd.Doc, docStarts), sd.ShardIndex, "doc=" + sd.Doc + " wrong shard"); } } TestUtil.AssertEquals(topHits, mergedHits); } reader.Dispose(); dir.Dispose(); }
private PreviousSearchState AssertSame(IndexSearcher mockSearcher, NodeState.ShardIndexSearcher shardSearcher, Query q, Sort sort, PreviousSearchState state) { int numHits = TestUtil.NextInt32(Random, 1, 100); if (state != null && state.SearchAfterLocal == null) { // In addition to what we last searched: numHits += state.NumHitsPaged; } if (Verbose) { Console.WriteLine("TEST: query=" + q + " sort=" + sort + " numHits=" + numHits); if (state != null) { Console.WriteLine(" prev: searchAfterLocal=" + state.SearchAfterLocal + " searchAfterShard=" + state.SearchAfterShard + " numHitsPaged=" + state.NumHitsPaged); } } // Single (mock local) searcher: TopDocs hits; if (sort == null) { if (state != null && state.SearchAfterLocal != null) { hits = mockSearcher.SearchAfter(state.SearchAfterLocal, q, numHits); } else { hits = mockSearcher.Search(q, numHits); } } else { hits = mockSearcher.Search(q, numHits, sort); } // Shard searcher TopDocs shardHits; if (sort == null) { if (state != null && state.SearchAfterShard != null) { shardHits = shardSearcher.SearchAfter(state.SearchAfterShard, q, numHits); } else { shardHits = shardSearcher.Search(q, numHits); } } else { shardHits = shardSearcher.Search(q, numHits, sort); } int numNodes = shardSearcher.GetNodeVersions().Length; int[] @base = new int[numNodes]; IList <IndexReaderContext> subs = mockSearcher.TopReaderContext.Children; Assert.AreEqual(numNodes, subs.Count); for (int nodeID = 0; nodeID < numNodes; nodeID++) { @base[nodeID] = subs[nodeID].DocBaseInParent; } if (Verbose) { /* * for(int shardID=0;shardID<shardSearchers.Length;shardID++) { * System.out.println(" shard=" + shardID + " maxDoc=" + shardSearchers[shardID].searcher.getIndexReader().MaxDoc); * } */ Console.WriteLine(" single searcher: " + hits.TotalHits + " totalHits maxScore=" + hits.MaxScore); for (int i = 0; i < hits.ScoreDocs.Length; i++) { ScoreDoc sd = hits.ScoreDocs[i]; Console.WriteLine(" doc=" + sd.Doc + " score=" + sd.Score); } Console.WriteLine(" shard searcher: " + shardHits.TotalHits + " totalHits maxScore=" + shardHits.MaxScore); for (int i = 0; i < shardHits.ScoreDocs.Length; i++) { ScoreDoc sd = shardHits.ScoreDocs[i]; Console.WriteLine(" doc=" + sd.Doc + " (rebased: " + (sd.Doc + @base[sd.ShardIndex]) + ") score=" + sd.Score + " shard=" + sd.ShardIndex); } } int numHitsPaged; if (state != null && state.SearchAfterLocal != null) { numHitsPaged = hits.ScoreDocs.Length; if (state != null) { numHitsPaged += state.NumHitsPaged; } } else { numHitsPaged = hits.ScoreDocs.Length; } bool moreHits; ScoreDoc bottomHit; ScoreDoc bottomHitShards; if (numHitsPaged < hits.TotalHits) { // More hits to page through moreHits = true; if (sort == null) { bottomHit = hits.ScoreDocs[hits.ScoreDocs.Length - 1]; ScoreDoc sd = shardHits.ScoreDocs[shardHits.ScoreDocs.Length - 1]; // Must copy because below we rebase: bottomHitShards = new ScoreDoc(sd.Doc, sd.Score, sd.ShardIndex); if (Verbose) { Console.WriteLine(" save bottomHit=" + bottomHit); } } else { bottomHit = null; bottomHitShards = null; } } else { Assert.AreEqual(hits.TotalHits, numHitsPaged); bottomHit = null; bottomHitShards = null; moreHits = false; } // Must rebase so Assert.AreEqual passes: for (int hitID = 0; hitID < shardHits.ScoreDocs.Length; hitID++) { ScoreDoc sd = shardHits.ScoreDocs[hitID]; sd.Doc += @base[sd.ShardIndex]; } TestUtil.AssertEquals(hits, shardHits); if (moreHits) { // Return a continuation: return(new PreviousSearchState(q, sort, bottomHit, bottomHitShards, shardSearcher.GetNodeVersions(), numHitsPaged)); } else { return(null); } }
public virtual void TestSimple() { int numNodes = TestUtil.NextInt32(Random, 1, 10); double runTimeSec = AtLeast(3); int minDocsToMakeTerms = TestUtil.NextInt32(Random, 5, 20); int maxSearcherAgeSeconds = TestUtil.NextInt32(Random, 1, 3); if (Verbose) { Console.WriteLine("TEST: numNodes=" + numNodes + " runTimeSec=" + runTimeSec + " maxSearcherAgeSeconds=" + maxSearcherAgeSeconds); } Start(numNodes, runTimeSec, maxSearcherAgeSeconds); JCG.List <PreviousSearchState> priorSearches = new JCG.List <PreviousSearchState>(); IList <BytesRef> terms = null; while (J2N.Time.NanoTime() < endTimeNanos) { bool doFollowon = priorSearches.Count > 0 && Random.Next(7) == 1; // Pick a random node; we will run the query on this node: int myNodeID = Random.Next(numNodes); NodeState.ShardIndexSearcher localShardSearcher; PreviousSearchState prevSearchState; if (doFollowon) { // Pretend user issued a followon query: prevSearchState = priorSearches[Random.Next(priorSearches.Count)]; if (Verbose) { Console.WriteLine("\nTEST: follow-on query age=" + ((J2N.Time.NanoTime() - prevSearchState.SearchTimeNanos) / 1000000000.0)); } try { localShardSearcher = m_nodes[myNodeID].Acquire(prevSearchState.Versions); } catch (SearcherExpiredException see) { // Expected, sometimes; in a "real" app we would // either forward this error to the user ("too // much time has passed; please re-run your // search") or sneakily just switch to newest // searcher w/o telling them... if (Verbose) { Console.WriteLine(" searcher expired during local shard searcher init: " + see); } priorSearches.Remove(prevSearchState); continue; } } else { if (Verbose) { Console.WriteLine("\nTEST: fresh query"); } // Do fresh query: localShardSearcher = m_nodes[myNodeID].Acquire(); prevSearchState = null; } IndexReader[] subs = new IndexReader[numNodes]; PreviousSearchState searchState = null; try { // Mock: now make a single reader (MultiReader) from all node // searchers. In a real shard env you can't do this... we // do it to confirm results from the shard searcher // are correct: int docCount = 0; try { for (int nodeID = 0; nodeID < numNodes; nodeID++) { long subVersion = localShardSearcher.GetNodeVersions()[nodeID]; IndexSearcher sub = m_nodes[nodeID].Searchers.Acquire(subVersion); if (sub == null) { nodeID--; while (nodeID >= 0) { subs[nodeID].DecRef(); subs[nodeID] = null; nodeID--; } throw new SearcherExpiredException("nodeID=" + nodeID + " version=" + subVersion); } subs[nodeID] = sub.IndexReader; docCount += subs[nodeID].MaxDoc; } } catch (SearcherExpiredException see) { // Expected if (Verbose) { Console.WriteLine(" searcher expired during mock reader init: " + see); } continue; } IndexReader mockReader = new MultiReader(subs); IndexSearcher mockSearcher = new IndexSearcher(mockReader); Query query; Sort sort; if (prevSearchState != null) { query = prevSearchState.Query; sort = prevSearchState.Sort; } else { if (terms == null && docCount > minDocsToMakeTerms) { // TODO: try to "focus" on high freq terms sometimes too // TODO: maybe also periodically reset the terms...? TermsEnum termsEnum = MultiFields.GetTerms(mockReader, "body").GetEnumerator(); terms = new JCG.List <BytesRef>(); while (termsEnum.MoveNext()) { terms.Add(BytesRef.DeepCopyOf(termsEnum.Term)); } if (Verbose) { Console.WriteLine("TEST: init terms: " + terms.Count + " terms"); } if (terms.Count == 0) { terms = null; } } if (Verbose) { Console.WriteLine(" maxDoc=" + mockReader.MaxDoc); } if (terms != null) { if (Random.NextBoolean()) { query = new TermQuery(new Term("body", terms[Random.Next(terms.Count)])); } else { string t = terms[Random.Next(terms.Count)].Utf8ToString(); string prefix; if (t.Length <= 1) { prefix = t; } else { prefix = t.Substring(0, TestUtil.NextInt32(Random, 1, 2)); } query = new PrefixQuery(new Term("body", prefix)); } if (Random.NextBoolean()) { sort = null; } else { // TODO: sort by more than 1 field int what = Random.Next(3); if (what == 0) { sort = new Sort(SortField.FIELD_SCORE); } else if (what == 1) { // TODO: this sort doesn't merge // correctly... it's tricky because you // could have > 2.1B docs across all shards: //sort = new Sort(SortField.FIELD_DOC); sort = null; } else if (what == 2) { sort = new Sort(new SortField[] { new SortField("docid", SortFieldType.INT32, Random.NextBoolean()) }); } else { sort = new Sort(new SortField[] { new SortField("title", SortFieldType.STRING, Random.NextBoolean()) }); } } } else { query = null; sort = null; } } if (query != null) { try { searchState = AssertSame(mockSearcher, localShardSearcher, query, sort, prevSearchState); } catch (SearcherExpiredException see) { // Expected; in a "real" app we would // either forward this error to the user ("too // much time has passed; please re-run your // search") or sneakily just switch to newest // searcher w/o telling them... if (Verbose) { Console.WriteLine(" searcher expired during search: " + see); Console.Out.Write(see.StackTrace); } // We can't do this in general: on a very slow // computer it's possible the local searcher // expires before we can finish our search: // assert prevSearchState != null; if (prevSearchState != null) { priorSearches.Remove(prevSearchState); } } } } finally { //m_nodes[myNodeID].Release(localShardSearcher); NodeState.Release(localShardSearcher); // LUCENENET: Made Release() static per CA1822 for performance foreach (IndexReader sub in subs) { if (sub != null) { sub.DecRef(); } } } if (searchState != null && searchState.SearchAfterLocal != null && Random.Next(5) == 3) { priorSearches.Add(searchState); if (priorSearches.Count > 200) { priorSearches.Shuffle(Random); priorSearches.RemoveRange(100, priorSearches.Count - 100); // LUCENENET: Converted end index to length } } } Finish(); }
public override TopFieldDocs Search(Query query, Filter filter, int n, Sort sort) { CheckExplanations(query); return(base.Search(query, filter, n, sort)); }
public override TopFieldDocs Search(Weight weight, Filter filter, int nDocs, Sort sort) { return(Search(weight, filter, nDocs, sort, true)); }
public MultiSearcherThread(Searchable searchable, Weight weight, Filter filter, int nDocs, FieldDocSortedHitQueue hq, Sort sort, int i, int[] starts, System.String name) : base(name) { this.searchable = searchable; this.weight = weight; this.filter = filter; this.nDocs = nDocs; this.hq = hq; this.i = i; this.starts = starts; this.sort = sort; }
// TODO: broadcastNodeExpire? then we can purge the // known-stale cache entries... // MOCK: in a real env you have to hit the wire // (send this query to all remote nodes // concurrently): internal virtual TopDocs SearchNode(int nodeID, long[] nodeVersions, Query q, Sort sort, int numHits, ScoreDoc searchAfter) { NodeState.ShardIndexSearcher s = Nodes[nodeID].Acquire(nodeVersions); try { if (sort == null) { if (searchAfter != null) { return(s.LocalSearchAfter(searchAfter, q, numHits)); } else { return(s.LocalSearch(q, numHits)); } } else { Debug.Assert(searchAfter == null); // not supported yet return(s.LocalSearch(q, numHits, sort)); } } finally { Nodes[nodeID].Release(s); } }
static Sort() { INDEXORDER = new Sort(SortField.FIELD_DOC); }
internal virtual void AssertQuery(Query query, Filter filter, Sort sort) { int maxDoc = Searcher.IndexReader.MaxDoc; TopDocs all; int pageSize = TestUtil.NextInt(Random(), 1, maxDoc * 2); if (isVerbose) { Console.WriteLine("\nassertQuery " + (Iter++) + ": query=" + query + " filter=" + filter + " sort=" + sort + " pageSize=" + pageSize); } bool doMaxScore = Random().NextBoolean(); bool doScores = Random().NextBoolean(); if (sort == null) { all = Searcher.Search(query, filter, maxDoc); } else if (sort == Sort.RELEVANCE) { all = Searcher.Search(query, filter, maxDoc, sort, true, doMaxScore); } else { all = Searcher.Search(query, filter, maxDoc, sort, doScores, doMaxScore); } if (isVerbose) { Console.WriteLine(" all.TotalHits=" + all.TotalHits); int upto = 0; foreach (ScoreDoc scoreDoc in all.ScoreDocs) { Console.WriteLine(" hit " + (upto++) + ": id=" + Searcher.Doc(scoreDoc.Doc).Get("id") + " " + scoreDoc); } } int pageStart = 0; ScoreDoc lastBottom = null; while (pageStart < all.TotalHits) { TopDocs paged; if (sort == null) { if (isVerbose) { Console.WriteLine(" iter lastBottom=" + lastBottom); } paged = Searcher.SearchAfter(lastBottom, query, filter, pageSize); } else { if (isVerbose) { Console.WriteLine(" iter lastBottom=" + lastBottom); } if (sort == Sort.RELEVANCE) { paged = Searcher.SearchAfter(lastBottom, query, filter, pageSize, sort, true, doMaxScore); } else { paged = Searcher.SearchAfter(lastBottom, query, filter, pageSize, sort, doScores, doMaxScore); } } if (isVerbose) { Console.WriteLine(" " + paged.ScoreDocs.Length + " hits on page"); } if (paged.ScoreDocs.Length == 0) { break; } AssertPage(pageStart, all, paged); pageStart += paged.ScoreDocs.Length; lastBottom = paged.ScoreDocs[paged.ScoreDocs.Length - 1]; } Assert.AreEqual(all.ScoreDocs.Length, pageStart); }
public virtual void TestRandomStringSort() { Random random = new Random(Random.Next()); int NUM_DOCS = AtLeast(100); Directory dir = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif random, dir); bool allowDups = random.NextBoolean(); ISet <string> seen = new JCG.HashSet <string>(); int maxLength = TestUtil.NextInt32(random, 5, 100); if (VERBOSE) { Console.WriteLine("TEST: NUM_DOCS=" + NUM_DOCS + " maxLength=" + maxLength + " allowDups=" + allowDups); } int numDocs = 0; IList <BytesRef> docValues = new List <BytesRef>(); // TODO: deletions while (numDocs < NUM_DOCS) { Document doc = new Document(); // 10% of the time, the document is missing the value: BytesRef br; if (LuceneTestCase.Random.Next(10) != 7) { string s; if (random.NextBoolean()) { s = TestUtil.RandomSimpleString(random, maxLength); } else { s = TestUtil.RandomUnicodeString(random, maxLength); } if (!allowDups) { if (seen.Contains(s)) { continue; } seen.Add(s); } if (VERBOSE) { Console.WriteLine(" " + numDocs + ": s=" + s); } br = new BytesRef(s); if (DefaultCodecSupportsDocValues) { doc.Add(new SortedDocValuesField("stringdv", br)); doc.Add(new NumericDocValuesField("id", numDocs)); } else { doc.Add(NewStringField("id", Convert.ToString(numDocs), Field.Store.NO)); } doc.Add(NewStringField("string", s, Field.Store.NO)); docValues.Add(br); } else { br = null; if (VERBOSE) { Console.WriteLine(" " + numDocs + ": <missing>"); } docValues.Add(null); if (DefaultCodecSupportsDocValues) { doc.Add(new NumericDocValuesField("id", numDocs)); } else { doc.Add(NewStringField("id", Convert.ToString(numDocs), Field.Store.NO)); } } doc.Add(new StoredField("id", numDocs)); writer.AddDocument(doc); numDocs++; if (random.Next(40) == 17) { // force flush writer.GetReader().Dispose(); } } IndexReader r = writer.GetReader(); writer.Dispose(); if (VERBOSE) { Console.WriteLine(" reader=" + r); } IndexSearcher idxS = NewSearcher( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif r, false); int ITERS = AtLeast(100); for (int iter = 0; iter < ITERS; iter++) { bool reverse = random.NextBoolean(); TopFieldDocs hits; SortField sf; bool sortMissingLast; bool missingIsNull; if (DefaultCodecSupportsDocValues && random.NextBoolean()) { sf = new SortField("stringdv", SortFieldType.STRING, reverse); // Can only use sort missing if the DVFormat // supports docsWithField: sortMissingLast = DefaultCodecSupportsDocsWithField && Random.NextBoolean(); missingIsNull = DefaultCodecSupportsDocsWithField; } else { sf = new SortField("string", SortFieldType.STRING, reverse); sortMissingLast = Random.NextBoolean(); missingIsNull = true; } if (sortMissingLast) { sf.MissingValue = SortField.STRING_LAST; } Sort sort; if (random.NextBoolean()) { sort = new Sort(sf); } else { sort = new Sort(sf, SortField.FIELD_DOC); } int hitCount = TestUtil.NextInt32(random, 1, r.MaxDoc + 20); RandomFilter f = new RandomFilter(random, (float)random.NextDouble(), docValues); int queryType = random.Next(3); if (queryType == 0) { // force out of order BooleanQuery bq = new BooleanQuery(); // Add a Query with SHOULD, since bw.Scorer() returns BooleanScorer2 // which delegates to BS if there are no mandatory clauses. bq.Add(new MatchAllDocsQuery(), Occur.SHOULD); // Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return // the clause instead of BQ. bq.MinimumNumberShouldMatch = 1; hits = idxS.Search(bq, f, hitCount, sort, random.NextBoolean(), random.NextBoolean()); } else if (queryType == 1) { hits = idxS.Search(new ConstantScoreQuery(f), null, hitCount, sort, random.NextBoolean(), random.NextBoolean()); } else { hits = idxS.Search(new MatchAllDocsQuery(), f, hitCount, sort, random.NextBoolean(), random.NextBoolean()); } if (VERBOSE) { Console.WriteLine("\nTEST: iter=" + iter + " " + hits.TotalHits + " hits; topN=" + hitCount + "; reverse=" + reverse + "; sortMissingLast=" + sortMissingLast + " sort=" + sort); } // Compute expected results: var expected = f.MatchValues.ToList(); expected.Sort(new ComparerAnonymousInnerClassHelper(this, sortMissingLast)); if (reverse) { expected.Reverse(); } if (VERBOSE) { Console.WriteLine(" expected:"); for (int idx = 0; idx < expected.Count; idx++) { BytesRef br = expected[idx]; if (br == null && missingIsNull == false) { br = new BytesRef(); } Console.WriteLine(" " + idx + ": " + (br == null ? "<missing>" : br.Utf8ToString())); if (idx == hitCount - 1) { break; } } } if (VERBOSE) { Console.WriteLine(" actual:"); for (int hitIDX = 0; hitIDX < hits.ScoreDocs.Length; hitIDX++) { FieldDoc fd = (FieldDoc)hits.ScoreDocs[hitIDX]; BytesRef br = (BytesRef)fd.Fields[0]; Console.WriteLine(" " + hitIDX + ": " + (br == null ? "<missing>" : br.Utf8ToString()) + " id=" + idxS.Doc(fd.Doc).Get("id")); } } for (int hitIDX = 0; hitIDX < hits.ScoreDocs.Length; hitIDX++) { FieldDoc fd = (FieldDoc)hits.ScoreDocs[hitIDX]; BytesRef br = expected[hitIDX]; if (br == null && missingIsNull == false) { br = new BytesRef(); } // Normally, the old codecs (that don't support // docsWithField via doc values) will always return // an empty BytesRef for the missing case; however, // if all docs in a given segment were missing, in // that case it will return null! So we must map // null here, too: BytesRef br2 = (BytesRef)fd.Fields[0]; if (br2 == null && missingIsNull == false) { br2 = new BytesRef(); } Assert.AreEqual(br, br2, "hit=" + hitIDX + " has wrong sort value"); } } r.Dispose(); dir.Dispose(); }
//public string SelectGysByCondition() //{ // string provence = Request.Params["provence"];//所在省份 // string city = Request.Params["area"];//所在城市 // string industry = Request.Params["industry"];//所属行业 // string comname = Request.Params["comname"];//企业名称 // IMongoQuery condition = null; // condition = MongoDB.Driver.Builders.Query.GTE("供应商用户信息.认证级别", 供应商.认证级别.已审核用户); // if (!string.IsNullOrEmpty(provence) && provence != "--请选择省份--")//根据省份查找 // { // condition = condition.And(MongoDB.Driver.Builders.Query.EQ("所属地域.省份", provence)); // } // if (!string.IsNullOrEmpty(city) && city != "不限" && city != "--请选择城市--")//根据城市查找 // { // condition = condition.And(MongoDB.Driver.Builders.Query.EQ("所属地域.城市", city)); // } // if (!string.IsNullOrEmpty(industry) && industry != "请选择行业")//根据行业查找 // { // condition = condition.And(MongoDB.Driver.Builders.Query.EQ("企业基本信息.所属行业", industry)); // } // if (!string.IsNullOrEmpty(comname))//根据所有条件查找或根据名称查找 // { // condition = condition.And(MongoDB.Driver.Builders.Query.EQ("企业基本信息.企业名称", comname)); // } // string str = string.Empty; // IEnumerable<供应商> userdata = 用户管理.查询用户<供应商>(0, 8, condition); // str += "<table cellpadding='0' cellspacing='0'>"; // str += "<tr><th width='5%'>编号</th><th width='20%'>企业名称</th><th width='12%'>所属行业</th><th width='15%'>企业地址</th></tr>"; // int i = 0; // foreach (var gys in userdata) // { // //str += "<div class='gyspage_list_content'>"; // //if (gys.供应商用户信息.供应商图片.Count > 0) // //{ // // str += "<div class='gyspage_list_content_imgdiv'><a href='/供应商陈列/Supplier_Destination?userID=" + gys.Id + "'><img src='"+gys.供应商用户信息.供应商图片[0]+"' /></a></div>"; // //} // //else // //{ // // str += "<div class='gyspage_list_content_imgdiv'><a href='/供应商陈列/Supplier_Destination?userID=" + gys.Id + "'><img src='~/images/gys_default_pic.jpg' /></a></div>"; // //} // //str += "<div class='gyspage_list_content_listdiv good_paras'>"; // //str += "<table class='table'>"; // //str += "<tr><td class='lab'>企业名称</td><td><a href='/供应商陈列/Supplier_Destination?userID="+gys.Id+"'>"+gys.企业基本信息.企业名称+"</a></td><td class='lab'>成立时间</td><td>"+gys.企业基本信息.成立时间+"</td></tr>"; // //str += "<tr><td class='lab'>所属行业</td><td>" + gys.企业基本信息.所属行业 + "</td><td class='lab'>企业地址</td><td>" + gys.企业基本信息.注册地址 + "</td></tr>"; // //str += "<tr><td class='lab'>联系人</td><td>"+gys.企业联系人信息.联系人姓名+"</td><td class='lab'>信用积分</td><td>"+gys.信用评价信息.总评分+"</td></tr>"; // //str += "<tr><td class='lab'>企业简介</td><td class='colspan' colspan='3'><p style='width:640px;height:70px;text-align:left; text-indent:2em;margin:0 auto;'>" + gys.企业基本信息.企业简介 + "</p></td></tr>"; // //str += "</table></div></div>"; // i++; // str += "<tr><td>" + i + "</td>"; // str += "<td><a href='/供应商陈列/Supplier_Destination?id=" + gys.Id + "'>" + gys.企业基本信息.企业名称 + "</a></td>"; // str += "<td>" + gys.企业基本信息.所属行业 + "</td>"; // str += "<td>" + gys.企业基本信息.注册地址 + "</td></tr>"; // } // str += "</table>"; // return str; //} /// <summary> /// 获取 /// </summary> /// <param name="bQuery"></param> private TopDocs GetSearchResult(BooleanQuery bQuery, Dictionary<string, string> dicKeywords, string indexdic) { TopDocs docs = null; try { //IndexSearcher search = new IndexSearcher(IndexDic(indexdic), true); IndexSearcher search = new IndexSearcher(new Lucene.Net.Store.SimpleFSDirectory(new System.IO.DirectoryInfo(IndexDic(indexdic))), true); Stopwatch stopwatch = Stopwatch.StartNew(); //SortField构造函数第三个字段true为降序,false为升序 //Sort sort = new Sort(new SortField("Title", SortField.DOC, true)); //docs.scoreDocs[1].score查看分数,排序后分数全部为NaN Sort sort = new Sort(new SortField[] { SortField.FIELD_SCORE, new SortField("Rzjb", SortField.INT, true) });//按照分数倒叙排序,再按照时间倒叙排序 docs = search.Search(bQuery, (Lucene.Net.Search.Filter)null, 1000, sort); stopwatch.Stop(); //if (docs != null && docs.totalHits > 0) //{ // for (int i = 0; i < docs.totalHits; i++) // { // 供应商 model = new 供应商(); // model.Id = long.Parse(search.Doc(docs.scoreDocs[i].doc).Get("NumId")); // model.企业基本信息.企业名称 = search.Doc(docs.scoreDocs[i].doc).Get("Name"); // model.企业基本信息.所属行业 = search.Doc(docs.scoreDocs[i].doc).Get("Industry"); // model.所属地域.省份 = search.Doc(docs.scoreDocs[i].doc).Get("Province"); // model.所属地域.城市 = search.Doc(docs.scoreDocs[i].doc).Get("City"); // model.所属地域.区县 = search.Doc(docs.scoreDocs[i].doc).Get("Area"); // model.企业联系人信息.联系人固定电话 = search.Doc(docs.scoreDocs[i].doc).Get("Telephone"); // list.Add(SetHighlighter(dicKeywords, model)); // } //} } catch { } return docs; }
public virtual void TestRandomQueries() { string[] vals = new string[] { "w1", "w2", "w3", "w4", "w5", "xx", "yy", "zzz" }; int tot = 0; BooleanQuery q1 = null; try { // increase number of iterations for more complete testing int num = AtLeast(20); for (int i = 0; i < num; i++) { int level = Random.Next(3); q1 = RandBoolQuery(new Random(Random.Next()), Random.NextBoolean(), level, field, vals, null); // Can't sort by relevance since floating point numbers may not quite // match up. Sort sort = Sort.INDEXORDER; QueryUtils.Check( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, q1, searcher); // baseline sim try { // a little hackish, QueryUtils.check is too costly to do on bigSearcher in this loop. searcher.Similarity = bigSearcher.Similarity; // random sim QueryUtils.Check( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, q1, searcher); } finally { searcher.Similarity = new DefaultSimilarity(); // restore } TopFieldCollector collector = TopFieldCollector.Create(sort, 1000, false, true, true, true); searcher.Search(q1, null, collector); ScoreDoc[] hits1 = collector.GetTopDocs().ScoreDocs; collector = TopFieldCollector.Create(sort, 1000, false, true, true, false); searcher.Search(q1, null, collector); ScoreDoc[] hits2 = collector.GetTopDocs().ScoreDocs; tot += hits2.Length; CheckHits.CheckEqual(q1, hits1, hits2); BooleanQuery q3 = new BooleanQuery(); q3.Add(q1, Occur.SHOULD); q3.Add(new PrefixQuery(new Term("field2", "b")), Occur.SHOULD); TopDocs hits4 = bigSearcher.Search(q3, 1); Assert.AreEqual(mulFactor * collector.TotalHits + NUM_EXTRA_DOCS / 2, hits4.TotalHits); } } catch (Exception e) when(e.IsException()) { // For easier debugging Console.WriteLine("failed query: " + q1); throw; // LUCENENET: CA2200: Rethrow to preserve stack details (https://docs.microsoft.com/en-us/visualstudio/code-quality/ca2200-rethrow-to-preserve-stack-details) } // System.out.println("Total hits:"+tot); }