private float freq; //prhase frequency in current doc as computed by phraseFreq(). internal PhraseScorer(Weight weight, TermPositions[] tps, int[] offsets, Similarity similarity, byte[] norms):base(similarity) { this.norms = norms; this.weight = weight; this.value_Renamed = weight.Value; // convert tps to a list of phrase positions. // note: phrase-position differs from term-position in that its position // reflects the phrase offset: pp.pos = tp.pos - offset. // this allows to easily identify a matching (exact) phrase // when all PhrasePositions have exactly the same position. for (int i = 0; i < tps.Length; i++) { PhrasePositions pp = new PhrasePositions(tps[i], offsets[i]); if (last != null) { // add next to end of list last.next = pp; } else { first = pp; } last = pp; } pq = new PhraseQueue(tps.Length); // construct empty pq first.doc = - 1; }
public TermWeight(TermQuery enclosingInstance, Searcher searcher) { InitBlock(enclosingInstance); this.similarity = Enclosing_Instance.GetSimilarity(searcher); idfExp = similarity.IdfExplain(Enclosing_Instance.term, searcher); idf = idfExp.Idf; }
internal MatchAllScorer(MatchAllDocsQuery enclosingInstance, IndexReader reader, Similarity similarity, Weight w, byte[] norms):base(similarity) { InitBlock(enclosingInstance); this.termDocs = reader.TermDocs(null); score = w.Value; this.norms = norms; }
/// <summary> Construct a <c>TermScorer</c>. /// /// </summary> /// <param name="weight">The weight of the <c>Term</c> in the query. /// </param> /// <param name="td">An iterator over the documents matching the <c>Term</c>. /// </param> /// <param name="similarity">The <c>Similarity</c> implementation to be used for score /// computations. /// </param> /// <param name="norms">The field norms of the document fields for the <c>Term</c>. /// </param> public /*internal*/ TermScorer(Weight weight, TermDocs td, Similarity similarity, byte[] norms):base(similarity) { this.weight = weight; this.termDocs = td; this.norms = norms; this.weightValue = weight.Value; for (int i = 0; i < SCORE_CACHE_SIZE; i++) scoreCache[i] = Similarity.Tf(i) * weightValue; }
public ConjunctionScorer(Similarity similarity, params Scorer[] scorers):base(similarity) { this.scorers = scorers; coord = similarity.Coord(scorers.Length, scorers.Length); for (int i = 0; i < scorers.Length; i++) { if (scorers[i].NextDoc() == NO_MORE_DOCS) { // If even one of the sub-scorers does not have any documents, this // scorer should not attempt to do any more work. lastDoc = NO_MORE_DOCS; return ; } } // Sort the array the first time... // We don't need to sort the array in any future calls because we know // it will already start off sorted (all scorers on same doc). // note that this comparator is not consistent with equals! System.Array.Sort(scorers, (a, b) => a.DocID() - b.DocID()); // NOTE: doNext() must be called before the re-sorting of the array later on. // The reason is this: assume there are 5 scorers, whose first docs are 1, // 2, 3, 5, 5 respectively. Sorting (above) leaves the array as is. Calling // doNext() here advances all the first scorers to 5 (or a larger doc ID // they all agree on). // However, if we re-sort before doNext() is called, the order will be 5, 3, // 2, 1, 5 and then doNext() will stop immediately, since the first scorer's // docs equals the last one. So the invariant that after calling doNext() // all scorers are on the same doc ID is broken.); if (DoNext() == NO_MORE_DOCS) { // The scorers did not agree on any document. lastDoc = NO_MORE_DOCS; return ; } // If first-time skip distance is any predictor of // scorer sparseness, then we should always try to skip first on // those scorers. // Keep last scorer in it's last place (it will be the first // to be skipped on), but reverse all of the others so that // they will be skipped on in order of original high skip. int end = scorers.Length - 1; int max = end >> 1; for (int i = 0; i < max; i++) { Scorer tmp = scorers[i]; int idx = end - i - 1; scorers[i] = scorers[idx]; scorers[idx] = tmp; } }
internal DocumentsWriter(Directory directory, IndexWriter writer, IndexingChain indexingChain) { InitBlock(); this.directory = directory; this.writer = writer; this.similarity = writer.Similarity; flushedDocCount = writer.MaxDoc(); consumer = indexingChain.GetChain(this); if (consumer is DocFieldProcessor) { docFieldProcessor = (DocFieldProcessor) consumer; } }
public ConstantWeight(ConstantScoreQuery enclosingInstance, Searcher searcher) { InitBlock(enclosingInstance); this.similarity = Enclosing_Instance.GetSimilarity(searcher); }
public ConstantScorer(ConstantScoreQuery enclosingInstance, Similarity similarity, IndexReader reader, Weight w):base(similarity) { InitBlock(enclosingInstance); theScore = w.Value; DocIdSet docIdSet = Enclosing_Instance.internalFilter.GetDocIdSet(reader); if (docIdSet == null) { docIdSetIterator = DocIdSet.EMPTY_DOCIDSET.Iterator(); } else { DocIdSetIterator iter = docIdSet.Iterator(); if (iter == null) { docIdSetIterator = DocIdSet.EMPTY_DOCIDSET.Iterator(); } else { docIdSetIterator = iter; } } }
internal ExactPhraseScorer(Weight weight, TermPositions[] tps, int[] offsets, Similarity similarity, byte[] norms):base(weight, tps, offsets, similarity, norms) { }
public override Explanation Explain(IndexReader reader, int doc) { Explanation result = new Explanation(); result.Description = "weight(" + Query + " in " + doc + "), product of:"; System.Text.StringBuilder docFreqs = new System.Text.StringBuilder(); System.Text.StringBuilder query = new System.Text.StringBuilder(); query.Append('\"'); docFreqs.Append(idfExp.Explain()); for (int i = 0; i < Enclosing_Instance.terms.Count; i++) { if (i != 0) { query.Append(" "); } Term term = Enclosing_Instance.terms[i]; query.Append(term.Text); } query.Append('\"'); Explanation idfExpl = new Explanation(idf, "idf(" + Enclosing_Instance.field + ":" + docFreqs + ")"); // explain query weight Explanation queryExpl = new Explanation(); queryExpl.Description = "queryWeight(" + Query + "), product of:"; Explanation boostExpl = new Explanation(Enclosing_Instance.Boost, "boost"); if (Enclosing_Instance.Boost != 1.0f) { queryExpl.AddDetail(boostExpl); } queryExpl.AddDetail(idfExpl); Explanation queryNormExpl = new Explanation(queryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.Value = boostExpl.Value * idfExpl.Value * queryNormExpl.Value; result.AddDetail(queryExpl); // explain field weight Explanation fieldExpl = new Explanation(); fieldExpl.Description = "fieldWeight(" + Enclosing_Instance.field + ":" + query + " in " + doc + "), product of:"; PhraseScorer scorer = (PhraseScorer)Scorer(reader, true, false); if (scorer == null) { return(new Explanation(0.0f, "no matching docs")); } Explanation tfExplanation = new Explanation(); int d = scorer.Advance(doc); float phraseFreq = (d == doc) ? scorer.CurrentFreq() : 0.0f; tfExplanation.Value = similarity.Tf(phraseFreq); tfExplanation.Description = "tf(phraseFreq=" + phraseFreq + ")"; fieldExpl.AddDetail(tfExplanation); fieldExpl.AddDetail(idfExpl); Explanation fieldNormExpl = new Explanation(); byte[] fieldNorms = reader.Norms(Enclosing_Instance.field); float fieldNorm = fieldNorms != null?Similarity.DecodeNorm(fieldNorms[doc]) : 1.0f; fieldNormExpl.Value = fieldNorm; fieldNormExpl.Description = "fieldNorm(field=" + Enclosing_Instance.field + ", doc=" + doc + ")"; fieldExpl.AddDetail(fieldNormExpl); fieldExpl.Value = tfExplanation.Value * idfExpl.Value * fieldNormExpl.Value; result.AddDetail(fieldExpl); // combine them result.Value = queryExpl.Value * fieldExpl.Value; if (queryExpl.Value == 1.0f) { return(fieldExpl); } return(result); }
public AnonymousClassIDFExplanation1(int df, int max, float idf, Similarity enclosingInstance) { InitBlock(df, max, idf, enclosingInstance); }
public /*internal*/ BooleanScorer(Similarity similarity, int minNrShouldMatch, System.Collections.Generic.List<Scorer> optionalScorers, System.Collections.Generic.List<Scorer> prohibitedScorers) : base(similarity) { InitBlock(); this.minNrShouldMatch = minNrShouldMatch; if (optionalScorers != null && optionalScorers.Count > 0) { foreach (Scorer scorer in optionalScorers) { maxCoord++; if (scorer.NextDoc() != NO_MORE_DOCS) { scorers = new SubScorer(scorer, false, false, bucketTable.NewCollector(0), scorers); } } } if (prohibitedScorers != null && prohibitedScorers.Count > 0) { foreach(Scorer scorer in prohibitedScorers) { int mask = nextMask; nextMask = nextMask << 1; prohibitedMask |= mask; // update prohibited mask if (scorer.NextDoc() != NO_MORE_DOCS) { scorers = new SubScorer(scorer, false, true, bucketTable.NewCollector(mask), scorers); } } } coordFactors = new float[maxCoord]; Similarity sim = Similarity; for (int i = 0; i < maxCoord; i++) { coordFactors[i] = sim.Coord(i, maxCoord - 1); } }
public MatchAllDocsWeight(MatchAllDocsQuery enclosingInstance, Searcher searcher) { InitBlock(enclosingInstance); this.similarity = searcher.Similarity; }
/// <summary>Construct a <see cref="Similarity" /> that delegates all methods to another.</summary> /// <param name="delegee">the Similarity implementation to delegate to</param> public SimilarityDelegator(Similarity delegee) { this.delegee = delegee; }
public MultiPhraseWeight(MultiPhraseQuery enclosingInstance, Searcher searcher) { InitBlock(enclosingInstance); this.similarity = Enclosing_Instance.GetSimilarity(searcher); // compute idf int maxDoc = searcher.MaxDoc; foreach (Term[] terms in enclosingInstance.termArrays) { foreach (Term term in terms) { idf += similarity.Idf(searcher.DocFreq(term), maxDoc); } } }
internal SloppyPhraseScorer(Weight weight, TermPositions[] tps, int[] offsets, Similarity similarity, int slop, byte[] norms):base(weight, tps, offsets, similarity, norms) { this.slop = slop; }
internal void SetSimilarity(Similarity similarity) { lock (this) { this.similarity = similarity; for (int i = 0; i < threadStates.Length; i++) threadStates[i].docState.similarity = similarity; } }
public AnonymousClassIDFExplanation3(float fIdf, System.Text.StringBuilder exp, Similarity enclosingInstance) { InitBlock(fIdf, exp, enclosingInstance); }
/// <summary>Expert: Set the Similarity implementation used by this IndexWriter. /// </summary> public virtual void SetSimilarity(Similarity similarity) { EnsureOpen(); this.similarity = similarity; docWriter.SetSimilarity(similarity); }
internal SloppyPhraseScorer(Weight weight, TermPositions[] tps, int[] offsets, Similarity similarity, int slop, byte[] norms) : base(weight, tps, offsets, similarity, norms) { this.slop = slop; }
private void InitBlock(int df, int max, float idf, Similarity enclosingInstance) { this.df = df; this.max = max; this.idf = idf; this.enclosingInstance = enclosingInstance; }
private readonly int maxDoc; // document count public CachedDfSource(Dictionary<Term,int> dfMap, int maxDoc, Similarity similarity) { this.dfMap = dfMap; this.maxDoc = maxDoc; Similarity = similarity; }
private void InitBlock(float fIdf, System.Text.StringBuilder exp, Similarity enclosingInstance) { this.fIdf = fIdf; this.exp = exp; this.enclosingInstance = enclosingInstance; }
public ConjunctionScorer(Similarity similarity, System.Collections.Generic.ICollection<Scorer> scorers) : this(similarity, scorers.ToArray()) { }