String q = args[1]; // B public static void search(String indexDir, String q) { Directory dir = FSDirectory.Open(new System.IO.FileInfo(indexDir)); // C IndexSearcher searcher = new IndexSearcher(dir, true); // D QueryParser parser = new QueryParser("contents", new StandardAnalyzer(Version.LUCENE_CURRENT)); // E Query query = parser.Parse(q); // E Lucene.Net.Search.TopDocs hits = searcher.Search(query, 10); // F System.Console.WriteLine("Found " + hits.totalHits + " document(s) that matched query '" + q + "':"); for (int i = 0; i < hits.scoreDocs.Length; i++) { ScoreDoc scoreDoc = hits.ScoreDocs[i]; // G Document doc = searcher.Doc(scoreDoc.doc); // G System.Console.WriteLine(doc.Get("filename")); // G } searcher.Close(); // H }
private static TopDocs DoSearch(IndexSearcher searcher, ScoreDoc after, Query q, Filter filter, int n, Sort sort, bool doDocScores, bool doMaxScore, Collector fc) { if (filter != null) { q = new FilteredQuery(q, filter); } int limit = searcher.IndexReader.MaxDoc; if (limit == 0) { limit = 1; } n = Math.Min(n, limit); if (after != null && after.Doc >= limit) { throw new System.ArgumentException("after.doc exceeds the number of documents in the reader: after.doc=" + after.Doc + " limit=" + limit); } if (sort != null) { if (after != null && !(after is FieldDoc)) { // TODO: if we fix type safety of TopFieldDocs we can // remove this throw new System.ArgumentException("after must be a FieldDoc; got " + after); } const bool fillFields = true; var hitsCollector = TopFieldCollector.Create(sort, n, (FieldDoc)after, fillFields, doDocScores, doMaxScore, false); searcher.Search(q, MultiCollector.Wrap(hitsCollector, fc)); return(hitsCollector.TopDocs()); } else { // TODO: can we pass the right boolean for // in-order instead of hardwired to false...? we'd // need access to the protected IS.search methods // taking Weight... could use reflection... var hitsCollector = TopScoreDocCollector.Create(n, after, false); searcher.Search(q, MultiCollector.Wrap(hitsCollector, fc)); return(hitsCollector.TopDocs()); } }
public virtual void TestMultipleMatchesPerDoc() { PayloadTermQuery query = new PayloadTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"), new MaxPayloadFunction()); TopDocs hits = Searcher.Search(query, null, 100); Assert.IsTrue(hits != null, "hits is null and it shouldn't be"); Assert.IsTrue(hits.TotalHits == 100, "hits Size: " + hits.TotalHits + " is not: " + 100); //they should all have the exact same score, because they all contain seventy once, and we set //all the other similarity factors to be 1 //System.out.println("Hash: " + seventyHash + " Twice Hash: " + 2*seventyHash); Assert.IsTrue(hits.MaxScore == 4.0, hits.MaxScore + " does not equal: " + 4.0); //there should be exactly 10 items that score a 4, all the rest should score a 2 //The 10 items are: 70 + i*100 where i in [0-9] int numTens = 0; for (int i = 0; i < hits.ScoreDocs.Length; i++) { ScoreDoc doc = hits.ScoreDocs[i]; if (doc.Doc % 10 == 0) { numTens++; Assert.IsTrue(doc.Score == 4.0, doc.Score + " does not equal: " + 4.0); } else { Assert.IsTrue(doc.Score == 2, doc.Score + " does not equal: " + 2); } } Assert.IsTrue(numTens == 10, numTens + " does not equal: " + 10); CheckHits.CheckExplanations(query, "field", Searcher, true); Spans spans = MultiSpansWrapper.Wrap(Searcher.TopReaderContext, query); Assert.IsTrue(spans != null, "spans is null and it shouldn't be"); //should be two matches per document int count = 0; //100 hits times 2 matches per hit, we should have 200 in count while (spans.Next()) { count++; } Assert.IsTrue(count == 200, count + " does not equal: " + 200); }
private TopDocs CreateExpectedTopDocs(string queryValue, bool from, ScoreMode scoreMode, IndexIterationContext context) { var hitsToJoinScores = @from ? context.FromHitsToJoinScore[queryValue] : context.ToHitsToJoinScore[queryValue]; var hits = new List <KeyValuePair <int, JoinScore> >(hitsToJoinScores.EntrySet()); hits.Sort(new ComparerAnonymousInnerClassHelper(this, scoreMode)); ScoreDoc[] scoreDocs = new ScoreDoc[Math.Min(10, hits.Count)]; for (int i = 0; i < scoreDocs.Length; i++) { KeyValuePair <int, JoinScore> hit = hits[i]; scoreDocs[i] = new ScoreDoc(hit.Key, hit.Value.Score(scoreMode)); } return(new TopDocs(hits.Count, scoreDocs, hits.Count == 0 ? float.NaN : hits[0].Value.Score(scoreMode))); }
protected internal override Document FetchNextOrNull() { //JAVA TO C# CONVERTER TODO TASK: Java iterators are only converted within the context of 'while' and 'for' loops: if (!_iterator.hasNext()) { return(null); } //JAVA TO C# CONVERTER TODO TASK: Java iterators are only converted within the context of 'while' and 'for' loops: _currentDoc = _iterator.next(); try { return(_searcher.doc(_currentDoc.doc)); } catch (IOException e) { throw new Exception(e); } }
public static JToken MakeResultData(IndexSearcher searcher, string scheme, TopDocs topDocs, int skip, int take, NuGetSearcherManager searcherManager, bool includeExplanation, Query query) { Uri registrationBaseAddress = searcherManager.RegistrationBaseAddress[scheme]; JArray array = new JArray(); for (int i = skip; i < Math.Min(skip + take, topDocs.ScoreDocs.Length); i++) { ScoreDoc scoreDoc = topDocs.ScoreDocs[i]; Document document = searcher.Doc(scoreDoc.Doc); string url = document.Get("Url"); string id = document.Get("Id"); string version = document.Get("Version"); JObject obj = new JObject(); obj["@id"] = new Uri(registrationBaseAddress, url).AbsoluteUri; obj["@type"] = "Package"; obj["registration"] = new Uri(registrationBaseAddress, string.Format("{0}/index.json", id.ToLowerInvariant())).AbsoluteUri; obj["id"] = id; AddField(obj, document, "domain", "Domain"); AddField(obj, document, "description", "Description"); AddField(obj, document, "summary", "Summary"); AddField(obj, document, "title", "Title"); AddField(obj, document, "iconUrl", "IconUrl"); AddFieldAsArray(obj, document, "tags", "Tags"); AddFieldAsArray(obj, document, "authors", "Authors"); obj["version"] = version; obj["versions"] = searcherManager.GetVersions(scheme, scoreDoc.Doc); if (includeExplanation) { Explanation explanation = searcher.Explain(query, scoreDoc.Doc); obj["explanation"] = explanation.ToString(); } array.Add(obj); } return(array); }
private void AssertHits(Query q, float[] scores) { ScoreDoc[] expected = new ScoreDoc[scores.Length]; int[] expectedDocs = new int[scores.Length]; for (int i = 0; i < expected.Length; i++) { expectedDocs[i] = i; expected[i] = new ScoreDoc(i, scores[i]); } TopDocs docs = searcher.Search(q, null, documents.Count, new Sort(new SortField("id", SortFieldType.STRING)), true, false); CheckHits.DoCheckHits( #if FEATURE_INSTANCE_TESTDATA_INITIALIZATION this, #endif Random, q, "", searcher, expectedDocs); CheckHits.CheckHitsQuery(q, expected, docs.ScoreDocs, expectedDocs); CheckHits.CheckExplanations(q, "", searcher); }
public void SortByScoreDesc() { var collector = new GroupTopDocsCollector(100, GroupFieldName); _searcher.Search(new FieldScoreQuery(ScoreFieldName, FieldScoreQuery.Type.INT), collector); var topDocs = collector.GroupTopDocs(); ScoreDoc previousResult = null; foreach (var result in topDocs.GroupScoreDocs) { if (previousResult != null) { Assert.GreaterOrEqual(previousResult.Score, result.Score); } previousResult = result; } }
public List <DynamicMessage> MakeSearchResultsReadable() { List <DynamicMessage> searchResults = new List <DynamicMessage>(); for (int i = 0; i < Hits.TotalHits; i++) { List <string> data = new List <string>(); ScoreDoc d = Hits.ScoreDocs[i]; Document idoc = LuceneService.Searcher.Doc(d.Doc); foreach (var field in ProjectInfo.Data.SelectedFields) { data.Add(idoc.GetField(field).GetStringValue()); } DynamicMessage message = new DynamicMessage(data, ProjectInfo.Data.SelectedFields, ProjectInfo.DateFieldKey, idoc.GetField("id").GetInt32Value().Value); searchResults.Add(message); } return(searchResults); }
private static IEnumerable <int> Search(Searcher searcher, Query query, Sort sort) { TopDocs hits = searcher.Search(query, null, MaxResult, sort); var result = new int[hits.ScoreDocs.Length]; for (int i = 0; i < hits.ScoreDocs.Length; i++) { ScoreDoc match = hits.ScoreDocs[i]; Document doc = searcher.Doc(match.Doc); result[i] = int.Parse(doc.Get(IdFieldName)); Trace.WriteLine(string.Format("Matched id = {0}, Name = {1}", doc.Get(IdFieldName), doc.Get(NameFieldName))); } searcher.Dispose(); return(result); }
public void Should_Throw_SearchException_When_Field_Is_Missing(string fieldName) { // Arrange LuceneDocument document = new LuceneDocument(); document.Add(CreateField("id", "123")); document.Add(CreateField("title", "the title")); document.Add(CreateField("contentsummary", "the summary")); document.Add(CreateField("tags", "tag1 tag2")); document.Add(CreateField("createdby", "gandhi")); document.Add(CreateField("contentlength", "999")); document.Add(CreateField("createdon", DateTime.Today.ToString())); document.RemoveField(fieldName); ScoreDoc scoreDoc = new ScoreDoc(0, 1f); // Act + Assert SearchResultViewModel model = new SearchResultViewModel(document, scoreDoc); }
/** scores[] are in docId order */ protected virtual void CheckValueSource(ValueSource vs, float[] scores, float delta) { FunctionQuery q = new FunctionQuery(vs); // //TODO is there any point to this check? // int expectedDocs[] = new int[scores.length];//fill with ascending 0....length-1 // for (int i = 0; i < expectedDocs.length; i++) { // expectedDocs[i] = i; // } // CheckHits.checkHits(Random, q, "", indexSearcher, expectedDocs); TopDocs docs = indexSearcher.Search(q, 1000);//calculates the score for (int i = 0; i < docs.ScoreDocs.Length; i++) { ScoreDoc gotSD = docs.ScoreDocs[i]; float expectedScore = scores[gotSD.Doc]; assertEquals("Not equal for doc " + gotSD.Doc, expectedScore, gotSD.Score, delta); } CheckHits.CheckExplanations(q, "", indexSearcher); }
private int[] LuceneTopDocs2TitelnummertrackIDs(TopDocs topHits) { DateTime startTime = DateTime.Now; int[] titelnummertrackIDs = new int[System.Math.Min(topHits.TotalHits, MaxDOCSRetrieveResult)]; for (int j = 0; (j < topHits.TotalHits && j < MaxDOCSRetrieveResult); j++) { ScoreDoc match = topHits.ScoreDocs[j]; Document doc = indexFingerLookup.Doc(match.Doc); // voor scannen kunnen we meer hits krijgen int tid = Convert.ToInt32(doc.Get("TITELNUMMERTRACK_ID")); if (j < MaxDOCSRetrieveResult) { titelnummertrackIDs[j] = tid; } } //for j return(titelnummertrackIDs); }
protected virtual void AssertHits(Query q, float[] scores) { ScoreDoc[] expected = new ScoreDoc[scores.Length]; int[] expectedDocs = new int[scores.Length]; for (int i = 0; i < expected.Length; i++) { expectedDocs[i] = i; expected[i] = new ScoreDoc(i, scores[i]); } TopDocs docs = searcher.Search(q, 2, new Sort(new SortField("id", SortFieldType.STRING))); /* * for (int i=0;i<docs.scoreDocs.length;i++) { * System.out.println(searcher.explain(q, docs.scoreDocs[i].doc)); * } */ CheckHits.DoCheckHits(Random(), q, "", searcher, expectedDocs, Similarity); CheckHits.CheckHitsQuery(q, expected, docs.ScoreDocs, expectedDocs); CheckHits.CheckExplanations(q, "", searcher); }
public virtual void TestMinFunction() { PayloadNearQuery query; TopDocs hits; query = NewPhraseQuery("field", "twenty two", true, new MinPayloadFunction()); QueryUtils.Check(query); // all 10 hits should have score = 2 (min payload value) hits = Searcher.Search(query, null, 100); Assert.IsTrue(hits != null, "hits is null and it shouldn't be"); Assert.AreEqual(10, hits.TotalHits, "should be 10 hits"); for (int j = 0; j < hits.ScoreDocs.Length; j++) { ScoreDoc doc = hits.ScoreDocs[j]; Assert.AreEqual(2, doc.Score, doc.Score + " does not equal: " + 2); Explanation explain = Searcher.Explain(query, hits.ScoreDocs[j].Doc); string exp = explain.ToString(); Assert.IsTrue(exp.IndexOf("MinPayloadFunction") > -1, exp); Assert.AreEqual(2f, explain.Value, hits.ScoreDocs[j].Score + " explain value does not equal: " + 2); } }
/// <summary> /// Outputs results to the screen /// </summary> /// <param name="results">Search results</param> public void DisplayResults(TopDocs results, int pagingIndex, int maxDisplay) { listView.Items.Clear(); int offset = pagingIndex * maxDisplay; int realDisplay = Math.Min(maxDisplay, results.ScoreDocs.Length - offset); string[] delimiter1 = new string[] { ".I", ".T", ".A", ".B", ".W", " ." }; int rank = offset; //for (ScoreDoc scoreDoc in results.ScoreDocs) for (int j = offset; j < offset + realDisplay; j++) { ScoreDoc scoreDoc = results.ScoreDocs[j]; rank++; // retrieve the document from the 'ScoreDoc' object Lucene.Net.Documents.Document doc = searcher.Doc(scoreDoc.Doc); string myFieldValue = doc.Get(TEXT_FN).ToString(); string[] array = myFieldValue.Split(delimiter1, StringSplitOptions.None); ListViewItem item = new ListViewItem(rank.ToString()); foreach (string entry in array) { string title = array[2]; string author = array[4]; string bibliographic = array[5]; string firstsentence = array[6]; string fullabstract = ""; for (int i = 6; i < array.Length; i++) { fullabstract = fullabstract + array[i] + " ."; } item.SubItems.Add(title); item.SubItems.Add(author); item.SubItems.Add(bibliographic); item.SubItems.Add(firstsentence); item.SubItems.Add(fullabstract); Console.WriteLine(entry); } listView.Items.Add(item); } }
public virtual void Test() { PayloadNearQuery query; TopDocs hits; query = NewPhraseQuery("field", "twenty two", true, new AveragePayloadFunction()); QueryUtils.Check(query); // all 10 hits should have score = 3 because adjacent terms have payloads of 2,4 // and all the similarity factors are set to 1 hits = Searcher.Search(query, null, 100); Assert.IsTrue(hits != null, "hits is null and it shouldn't be"); // 10 documents were added with the tokens "twenty two", each has 3 instances Assert.AreEqual(10, hits.TotalHits, "should be 10 hits"); for (int j = 0; j < hits.ScoreDocs.Length; j++) { ScoreDoc doc = hits.ScoreDocs[j]; Assert.AreEqual(3, doc.Score, doc.Score + " does not equal: " + 3); } for (int i = 1; i < 10; i++) { query = NewPhraseQuery("field", English.IntToEnglish(i) + " hundred", true, new AveragePayloadFunction()); if (VERBOSE) { Console.WriteLine("TEST: run query=" + query); } // all should have score = 3 because adjacent terms have payloads of 2,4 // and all the similarity factors are set to 1 hits = Searcher.Search(query, null, 100); Assert.IsTrue(hits != null, "hits is null and it shouldn't be"); Assert.AreEqual(100, hits.TotalHits, "should be 100 hits"); for (int j = 0; j < hits.ScoreDocs.Length; j++) { ScoreDoc doc = hits.ScoreDocs[j]; // System.out.println("Doc: " + doc.toString()); // System.out.println("Explain: " + searcher.Explain(query, doc.Doc)); Assert.AreEqual(3, doc.Score, doc.Score + " does not equal: " + 3); } } }
//browse the last 10 results public string Lastten() { string output = "can't be backwards"; int startnum = numofdoc - 20; if (startnum >= 0) { int endnum = startnum + 9; option.Clear(); output = "There are " + numofrelevant.ToString() + " relavant documents.\r\n"; if (numofrelevant > 0) { output = output + "The documents ranked from " + (startnum + 1).ToString() + " to " + (endnum + 1).ToString() + " as follow:\r\n"; } for (int i = startnum; i <= endnum; i++) { ScoreDoc scoredoc = docs.ScoreDocs[i]; Document doc1 = searcher.Doc(scoredoc.Doc); option.Add(doc1.Get(DocID)); //output = output + "Document " + scoredoc.Doc.ToString() + ":\r\n"; output = output + "Rank " + (i + 1).ToString() + ": " + DocID + ":" + doc1.Get(DocID) + "\r\n"; output = output + TITLE + ":" + doc1.Get(TITLE) + "\r\n"; output = output + AUTHOR + ":" + doc1.Get(AUTHOR) + "\r\n"; output = output + BIBLiINFO + ":" + doc1.Get(BIBLiINFO) + "\r\n"; char[] symbols = { '.', '?', '!' }; string[] sentences = doc1.Get(ABSTRACT).ToString().Split(symbols, StringSplitOptions.RemoveEmptyEntries); foreach (string sentence in sentences) { if (sentence.Length > 0) { output = output + "The first sentence of teh abstract:" + sentence + "\r\n"; break;//once I find the first sentence, I will jump out of the loop. } } } numofdoc = endnum + 1; } return(output); }
public override int Compare(ScoreDoc doc1, ScoreDoc doc2) { BytesRef result1 = new BytesRef(); BytesRef result2 = new BytesRef(); m_values.Get(doc1.Doc, result1); m_values.Get(doc2.Doc, result2); if (result1.Length == 0) { if (result2.Length == 0) { return(0); } return(-1); } else if (result2.Length == 0) { return(1); } return(result1.Utf8ToString().CompareTo(result2.Utf8ToString())); }
private static IEnumerable <int> Search(Searcher searcher, Query query, IEnumerable <int> sortOrder, bool reversed = false) { var sort = new Sort(new SortField(IdFieldName, new OpressComparatorSource <int>(sortOrder, Lucene.Net.Search.FieldCache_Fields.DEFAULT.GetInts), reversed)); TopDocs hits = searcher.Search(query, null, MaxResult, sort); var result = new int[hits.ScoreDocs.Length]; for (int i = 0; i < hits.ScoreDocs.Length; i++) { ScoreDoc match = hits.ScoreDocs[i]; Document doc = searcher.Doc(match.Doc); result[i] = int.Parse(doc.Get(IdFieldName)); Trace.WriteLine(string.Format("Matched id = {0}, Name = {1}", doc.Get(IdFieldName), doc.Get(NameFieldName))); } searcher.Dispose(); return(result); }
public List <Document> Search(string queryString) { var documents = new List <Document>(); var parser = new QueryParser(Lucene.Net.Util.Version.LUCENE_30, "B", new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_30)); Query query = parser.Parse(queryString); int hitsPerPage = 5; var collector = TopScoreDocCollector.Create(2 * hitsPerPage, true); searcher.Search(query, collector); ScoreDoc[] hits = collector.TopDocs().ScoreDocs; int hitCount = collector.TotalHits > 10 ? 10 : collector.TotalHits; for (int i = 0; i < hitCount; i++) { ScoreDoc scoreDoc = hits[i]; int docId = scoreDoc.Doc; float docScore = scoreDoc.Score; Document doc = searcher.Doc(docId); documents.Add(doc); } return(documents); }
public static void AssertEquals(TopDocs expected, TopDocs actual) { Assert.AreEqual(expected.TotalHits, actual.TotalHits, "wrong total hits"); Assert.AreEqual(expected.MaxScore, actual.MaxScore, "wrong maxScore"); Assert.AreEqual(expected.ScoreDocs.Length, actual.ScoreDocs.Length, "wrong hit count"); for (int hitIDX = 0; hitIDX < expected.ScoreDocs.Length; hitIDX++) { ScoreDoc expectedSD = expected.ScoreDocs[hitIDX]; ScoreDoc actualSD = actual.ScoreDocs[hitIDX]; Assert.AreEqual(expectedSD.Doc, actualSD.Doc, "wrong hit docID"); Assert.AreEqual(expectedSD.Score, actualSD.Score, "wrong hit score"); if (expectedSD is FieldDoc expectedFieldDoc) { Assert.IsTrue(actualSD is FieldDoc); Assert.AreEqual(expectedFieldDoc.Fields, ((FieldDoc)actualSD).Fields, "wrong sort field values"); } else { Assert.IsFalse(actualSD is FieldDoc); } } }
public virtual void TestAverageFunction() { PayloadNearQuery query; TopDocs hits; query = NewPhraseQuery("field", "twenty two", true, new AveragePayloadFunction()); QueryUtils.Check(query); // all 10 hits should have score = 3 because adjacent terms have payloads of 2,4 // and all the similarity factors are set to 1 hits = Searcher.Search(query, null, 100); Assert.IsTrue(hits != null, "hits is null and it shouldn't be"); Assert.AreEqual(10, hits.TotalHits, "should be 10 hits"); for (int j = 0; j < hits.ScoreDocs.Length; j++) { ScoreDoc doc = hits.ScoreDocs[j]; Assert.AreEqual(3, doc.Score, doc.Score + " does not equal: " + 3); Explanation explain = Searcher.Explain(query, hits.ScoreDocs[j].Doc); string exp = explain.ToString(); Assert.IsTrue(exp.IndexOf("AveragePayloadFunction") > -1, exp); Assert.AreEqual(3f, explain.Value, hits.ScoreDocs[j].Score + " explain value does not equal: " + 3); } }
/// <summary> /// 搜索字符串 /// </summary> /// <typeparam name="T"></typeparam> /// <param name="keyword"></param> /// <param name="searchFeilds"></param> /// <param name="feilds"></param> /// <param name="indexPage"></param> /// <param name="pageSize"></param> /// <returns></returns> public List <T> SearchMultiField <T>(string keyword, string[] searchFeilds, List <string> feilds, int indexPage, int pageSize) where T : class, new() { FSDirectory directory = FSDirectory.Open(new DirectoryInfo(indexPath), new NoLockFactory()); IndexReader reader = IndexReader.Open(directory, true); IndexSearcher searcher = new IndexSearcher(reader); Analyzer stander = new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT); MultiFieldQueryParser parser = new MultiFieldQueryParser(Lucene.Net.Util.Version.LUCENE_30, searchFeilds, stander); //QueryParser qp = new QueryParser(Lucene.Net.Util.Version.LUCENE_30, "content", stander); Query query = parser.Parse(keyword); //query.Slop=100; TopDocs tds = searcher.Search(query, int.MaxValue); //查询起始记录位置 int begin = pageSize * (indexPage - 1); //查询终止记录位置 int end = Math.Min(begin + pageSize, tds.ScoreDocs.Count()); List <T> objArr = new List <T>(); for (int i = begin; i < end; i++) { ScoreDoc s = tds.ScoreDocs[i]; int docid = s.Doc; Document doc = searcher.Doc(docid); Type type = typeof(T); object obj = Activator.CreateInstance(type, null); PropertyInfo[] memberInfos = type.GetProperties();//得到所有公共成员 //string doc.Get("id") foreach (PropertyInfo propertyInfo in memberInfos) { if (feilds == null || feilds.Contains(propertyInfo.Name)) { propertyInfo.SetValue(obj, doc.Get(propertyInfo.Name), null); } } objArr.Add((T)obj); } return(objArr); }
/// <summary> /// Converts Lucene document to index model. /// </summary> /// <param name="query">The query.</param> /// <param name="hit">The scoredoc.</param> /// <returns>Index model</returns> private IndexModelBase LuceneDocToIndexModel(Query query, ScoreDoc hit) { var doc = _indexSearcher.Doc(hit.Doc); IndexModelBase document = new IndexModelBase(); try { var hitJsonField = doc.GetField("JSON"); if (hitJsonField != null) { string hitJson = hitJsonField.GetStringValue(); JObject jObject = JObject.Parse(hitJson); Type indexModelType = Type.GetType($"{ jObject["IndexModelType"].ToStringSafe() }, { jObject["IndexModelAssembly"].ToStringSafe() }"); if (indexModelType != null) { document = ( IndexModelBase )jObject.ToObject(indexModelType); // return the source document as the derived type } else { document = jObject.ToObject <IndexModelBase>(); // return the source document as the base type } } Explanation explanation = _indexSearcher.Explain(query, hit.Doc); document["Explain"] = explanation.ToString(); document.Score = hit.Score; return(document); } catch { // ignore if the result if an exception resulted (most likely cause is getting a result from a non-rock index) } return(null); }
private void DownHeap(int i) { ScoreDoc node = m_heap[i]; // save top node int j = i << 1; // find smaller child int k = j + 1; if (k <= m_size && Compare(m_heap[k], m_heap[j]) < 0) { j = k; } while (j <= m_size && Compare(m_heap[j], node) < 0) { m_heap[i] = m_heap[j]; // shift up child i = j; j = i << 1; k = j + 1; if (k <= m_size && Compare(m_heap[k], m_heap[j]) < 0) { j = k; } } m_heap[i] = node; // install saved node }
static JObject AutoCompleteMakeVersionResult(NuGetSearcherManager searcherManager, bool includePrerelease, TopDocs topDocs) { JObject result = new JObject(); result.Add("@context", new JObject { { "@vocab", "http://schema.nuget.org/schema#" } }); result.Add("indexName", searcherManager.IndexName); if (topDocs.TotalHits > 0) { ScoreDoc scoreDoc = topDocs.ScoreDocs[0]; JArray versions = searcherManager.GetVersionLists(scoreDoc.Doc); result.Add("totalHits", versions.Count()); result["data"] = versions; } else { result.Add("totalHits", 0); result["data"] = new JArray(); } return(result); }
private static void WriteVersions(JsonWriter jsonWriter, NuGetIndexSearcher searcher, bool includePrerelease, NuGetVersion semVerLevel, TopDocs topDocs) { var includeSemVer2 = SemVerHelpers.ShouldIncludeSemVer2Results(semVerLevel); jsonWriter.WritePropertyName("data"); jsonWriter.WriteStartArray(); if (topDocs.TotalHits > 0) { ScoreDoc scoreDoc = topDocs.ScoreDocs[0]; var versions = includePrerelease ? searcher.Versions[scoreDoc.Doc].GetVersions(onlyListed: true, includeSemVer2: includeSemVer2) : searcher.Versions[scoreDoc.Doc].GetStableVersions(onlyListed: true, includeSemVer2: includeSemVer2); foreach (var version in versions) { jsonWriter.WriteValue(version); } } jsonWriter.WriteEndArray(); }
public static JToken AutoCompleteMakeResult(IndexSearcher searcher, TopDocs topDocs, int skip, int take, NuGetSearcherManager searcherManager, bool includeExplanation, Query query) { JArray array = new JArray(); for (int i = skip; i < Math.Min(skip + take, topDocs.ScoreDocs.Length); i++) { ScoreDoc scoreDoc = topDocs.ScoreDocs[i]; Document document = searcher.Doc(scoreDoc.Doc); string id = document.Get("Id"); array.Add(id); } JObject result = new JObject(); result.Add("@context", new JObject { { "@vocab", "http://schema.nuget.org/schema#" } }); result.Add("totalHits", topDocs.TotalHits); result.Add("indexName", searcherManager.IndexName); result.Add("data", array); if (includeExplanation) { JArray explanations = new JArray(); for (int i = skip; i < Math.Min(skip + take, topDocs.ScoreDocs.Length); i++) { ScoreDoc scoreDoc = topDocs.ScoreDocs[i]; Explanation explanation = searcher.Explain(query, scoreDoc.Doc); explanations.Add(explanation.ToString()); } result.Add("explanations", explanations); } return(result); }
private void DoTestHits(ScoreDoc[] hits, int expectedCount, IndexReader reader) { int hitCount = hits.Length; Assert.AreEqual(expectedCount, hitCount, "wrong number of hits"); for (int i = 0; i < hitCount; i++) { reader.Document(hits[i].Doc); reader.GetTermVectors(hits[i].Doc); } }
public override IComparable Value(ScoreDoc doc) { string[] vals = _dataCache.NestedArray.GetTranslatedData(doc.Doc, _dataCache.ValArray); return(new StringArrayComparator(vals)); }
/// <summary> /// Creates one of these objects. </summary> /// <param name="totalHits"> Total number of hits for the query. </param> /// <param name="scoreDocs"> The top hits for the query. </param> /// <param name="fields"> The sort criteria used to find the top hits. </param> /// <param name="maxScore"> The maximum score encountered. </param> public TopFieldDocs(int totalHits, ScoreDoc[] scoreDocs, SortField[] fields, float maxScore) : base(totalHits, scoreDocs, maxScore) { this.Fields = fields; }
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: protected void compareRanks(ScoreDoc[] hits, int[] ranks) throws Exception protected internal virtual void compareRanks(ScoreDoc[] hits, int[] ranks) { assertEquals(ranks.Length, hits.Length); for (int i = 0; i < ranks.Length; i++) { assertEquals(ranks[i], hits[i].doc); } }