public override sealed Explanation Explain(BasicStats stats) { Explanation result = new Explanation(); result.Description = this.GetType().Name + ", computed from: "; result.Value = CalculateLambda(stats); result.AddDetail(new Explanation(stats.DocFreq, "docFreq")); result.AddDetail(new Explanation(stats.NumberOfDocuments, "numberOfDocuments")); return result; }
public override sealed Explanation Explain(BasicStats stats, float tfn) { Explanation result = new Explanation(); result.Description = this.GetType().Name + ", computed from: "; result.Value = Score(stats, tfn); result.AddDetail(new Explanation(tfn, "tfn")); result.AddDetail(new Explanation(stats.TotalTermFreq, "totalTermFreq")); result.AddDetail(new Explanation(stats.DocFreq, "docFreq")); return result; }
/// <summary> /// Returns an explanation for the score. /// <p>Most basic models use the number of documents and the total term /// frequency to compute Inf<sub>1</sub>. this method provides a generic /// explanation for such models. Subclasses that use other statistics must /// override this method.</p> /// </summary> public virtual Explanation Explain(BasicStats stats, float tfn) { Explanation result = new Explanation(); result.Description = this.GetType().Name + ", computed from: "; result.Value = Score(stats, tfn); result.AddDetail(new Explanation(tfn, "tfn")); result.AddDetail(new Explanation(stats.NumberOfDocuments, "numberOfDocuments")); result.AddDetail(new Explanation(stats.TotalTermFreq, "totalTermFreq")); return result; }
/// <summary> /// Returns an explanation for the normalized term frequency. /// <p>The default normalization methods use the field length of the document /// and the average field length to compute the normalized term frequency. /// this method provides a generic explanation for such methods. /// Subclasses that use other statistics must override this method.</p> /// </summary> public virtual Explanation Explain(BasicStats stats, float tf, float len) { Explanation result = new Explanation(); result.Description = this.GetType().Name + ", computed from: "; result.Value = Tfn(stats, tf, len); result.AddDetail(new Explanation(tf, "tf")); result.AddDetail(new Explanation(stats.AvgFieldLength, "avgFieldLength")); result.AddDetail(new Explanation(len, "len")); return result; }
private Explanation ExplainScore(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) { Explanation result = new Explanation(); // LUCENENET specific - using freq.Value is a change that was made in Lucene 5.0, but is included // in 4.8.0 to remove annoying newlines from the output. // See: https://github.com/apache/lucene-solr/commit/f0bfcbc7d8fbc5bb2791da60af559e8b0ad6eed6 result.Description = "score(doc=" + doc + ",freq=" + freq.Value + "), product of:"; // explain query weight Explanation queryExpl = new Explanation(); queryExpl.Description = "queryWeight, product of:"; Explanation boostExpl = new Explanation(stats.QueryBoost, "boost"); if (stats.QueryBoost != 1.0f) { queryExpl.AddDetail(boostExpl); } queryExpl.AddDetail(stats.Idf); Explanation queryNormExpl = new Explanation(stats.QueryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.Value = boostExpl.Value * stats.Idf.Value * queryNormExpl.Value; result.AddDetail(queryExpl); // explain field weight Explanation fieldExpl = new Explanation(); fieldExpl.Description = "fieldWeight in " + doc + ", product of:"; Explanation tfExplanation = new Explanation(); tfExplanation.Value = Tf(freq.Value); tfExplanation.Description = "tf(freq=" + freq.Value + "), with freq of:"; tfExplanation.AddDetail(freq); fieldExpl.AddDetail(tfExplanation); fieldExpl.AddDetail(stats.Idf); Explanation fieldNormExpl = new Explanation(); float fieldNorm = norms != null?DecodeNormValue(norms.Get(doc)) : 1.0f; fieldNormExpl.Value = fieldNorm; fieldNormExpl.Description = "fieldNorm(doc=" + doc + ")"; fieldExpl.AddDetail(fieldNormExpl); fieldExpl.Value = tfExplanation.Value * stats.Idf.Value * fieldNormExpl.Value; result.AddDetail(fieldExpl); // combine them result.Value = queryExpl.Value * fieldExpl.Value; if (queryExpl.Value == 1.0f) { return(fieldExpl); } return(result); }
/// <summary> /// Explain the custom score. /// Whenever overriding <see cref="CustomScore(int, float, float)" />, /// this method should also be overridden to provide the correct explanation /// for the part of the custom scoring. /// /// </summary> /// <param name="doc">doc being explained</param> /// <param name="subQueryExpl">explanation for the sub-query part</param> /// <param name="valSrcExpl">explanation for the value source part</param> /// <returns>an explanation for the custom score</returns> public virtual Explanation CustomExplain(int doc, Explanation subQueryExpl, Explanation valSrcExpl) { float valSrcScore = 1; if (valSrcExpl != null) { valSrcScore *= valSrcExpl.GetValue(); } Explanation exp = new Explanation(valSrcScore * subQueryExpl.GetValue(), "custom score: product of:"); exp.AddDetail(subQueryExpl); exp.AddDetail(valSrcExpl); return exp; }
private Explanation ExplainScore(int doc, Explanation freq, BM25Stats stats, NumericDocValues norms) { Explanation result = new Explanation(); result.Description = "score(doc=" + doc + ",freq=" + freq + "), product of:"; Explanation boostExpl = new Explanation(stats.QueryBoost * stats.TopLevelBoost, "boost"); if (boostExpl.Value != 1.0f) { result.AddDetail(boostExpl); } result.AddDetail(stats.Idf); Explanation tfNormExpl = new Explanation(); tfNormExpl.Description = "tfNorm, computed from:"; tfNormExpl.AddDetail(freq); tfNormExpl.AddDetail(new Explanation(K1_Renamed, "parameter k1")); if (norms == null) { tfNormExpl.AddDetail(new Explanation(0, "parameter b (norms omitted for field)")); tfNormExpl.Value = (freq.Value * (K1_Renamed + 1)) / (freq.Value + K1_Renamed); } else { float doclen = DecodeNormValue((sbyte)norms.Get(doc)); tfNormExpl.AddDetail(new Explanation(b, "parameter b")); tfNormExpl.AddDetail(new Explanation(stats.Avgdl, "avgFieldLength")); tfNormExpl.AddDetail(new Explanation(doclen, "fieldLength")); tfNormExpl.Value = (freq.Value * (K1_Renamed + 1)) / (freq.Value + K1_Renamed * (1 - b + b * doclen / stats.Avgdl)); } result.AddDetail(tfNormExpl); result.Value = boostExpl.Value * stats.Idf.Value * tfNormExpl.Value; return result; }
public override Explanation Explain(IndexReader reader, int doc) { ComplexExplanation result = new ComplexExplanation(); result.Description = "weight(" + Query + " in " + doc + "), product of:"; System.String field = ((SpanQuery) Query).Field; Explanation idfExpl = new Explanation(idf, "idf(" + field + ": " + idfExp.Explain() + ")"); // explain query weight Explanation queryExpl = new Explanation(); queryExpl.Description = "queryWeight(" + Query + "), product of:"; Explanation boostExpl = new Explanation(Query.Boost, "boost"); if (Query.Boost != 1.0f) queryExpl.AddDetail(boostExpl); queryExpl.AddDetail(idfExpl); Explanation queryNormExpl = new Explanation(queryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.Value = boostExpl.Value * idfExpl.Value * queryNormExpl.Value; result.AddDetail(queryExpl); // explain field weight ComplexExplanation fieldExpl = new ComplexExplanation(); fieldExpl.Description = "fieldWeight(" + field + ":" + internalQuery.ToString(field) + " in " + doc + "), product of:"; Explanation tfExpl = ((SpanScorer)Scorer(reader, true, false)).Explain(doc); fieldExpl.AddDetail(tfExpl); fieldExpl.AddDetail(idfExpl); Explanation fieldNormExpl = new Explanation(); byte[] fieldNorms = reader.Norms(field); float fieldNorm = fieldNorms != null?Similarity.DecodeNorm(fieldNorms[doc]):1.0f; fieldNormExpl.Value = fieldNorm; fieldNormExpl.Description = "fieldNorm(field=" + field + ", doc=" + doc + ")"; fieldExpl.AddDetail(fieldNormExpl); fieldExpl.Match = tfExpl.IsMatch; fieldExpl.Value = tfExpl.Value * idfExpl.Value * fieldNormExpl.Value; result.AddDetail(fieldExpl); System.Boolean? tempAux = fieldExpl.Match; result.Match = tempAux; // combine them result.Value = queryExpl.Value * fieldExpl.Value; if (queryExpl.Value == 1.0f) return fieldExpl; return result; }
/// <summary> /// Explain the score for a single document </summary> /// <param name="doc"> document id within the inverted index segment </param> /// <param name="freq"> Explanation of how the sloppy term frequency was computed </param> /// <returns> document's score </returns> public virtual Explanation Explain(int doc, Explanation freq) { Explanation result = new Explanation(Score(doc, freq.Value), "score(doc=" + doc + ",freq=" + freq.Value + "), with freq of:"); result.AddDetail(freq); return result; }
/// <summary> /// Explains the score. The implementation here provides a basic explanation /// in the format <em>score(name-of-similarity, doc=doc-id, /// freq=term-frequency), computed from:</em>, and /// attaches the score (computed via the <seealso cref="#score(BasicStats, float, float)"/> /// method) and the explanation for the term frequency. Subclasses content with /// this format may add additional details in /// <seealso cref="#explain(Explanation, BasicStats, int, float, float)"/>. /// </summary> /// <param name="stats"> the corpus level statistics. </param> /// <param name="doc"> the document id. </param> /// <param name="freq"> the term frequency and its explanation. </param> /// <param name="docLen"> the document length. </param> /// <returns> the explanation. </returns> public virtual Explanation Explain(BasicStats stats, int doc, Explanation freq, float docLen) { Explanation result = new Explanation(); result.Value = Score(stats, freq.Value, docLen); result.Description = "score(" + this.GetType().Name + ", doc=" + doc + ", freq=" + freq.Value + "), computed from:"; result.AddDetail(freq); Explain(result, stats, doc, freq.Value, docLen); return result; }
private Explanation ExplainScore(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) { Explanation result = new Explanation(); result.Description = "score(doc=" + doc + ",freq=" + freq + "), product of:"; // explain query weight Explanation queryExpl = new Explanation(); queryExpl.Description = "queryWeight, product of:"; Explanation boostExpl = new Explanation(stats.QueryBoost, "boost"); if (stats.QueryBoost != 1.0f) { queryExpl.AddDetail(boostExpl); } queryExpl.AddDetail(stats.Idf); Explanation queryNormExpl = new Explanation(stats.QueryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.Value = boostExpl.Value * stats.Idf.Value * queryNormExpl.Value; result.AddDetail(queryExpl); // explain field weight Explanation fieldExpl = new Explanation(); fieldExpl.Description = "fieldWeight in " + doc + ", product of:"; Explanation tfExplanation = new Explanation(); tfExplanation.Value = Tf(freq.Value); tfExplanation.Description = "tf(freq=" + freq.Value + "), with freq of:"; tfExplanation.AddDetail(freq); fieldExpl.AddDetail(tfExplanation); fieldExpl.AddDetail(stats.Idf); Explanation fieldNormExpl = new Explanation(); float fieldNorm = norms != null?DecodeNormValue(norms.Get(doc)) : 1.0f; fieldNormExpl.Value = fieldNorm; fieldNormExpl.Description = "fieldNorm(doc=" + doc + ")"; fieldExpl.AddDetail(fieldNormExpl); fieldExpl.Value = tfExplanation.Value * stats.Idf.Value * fieldNormExpl.Value; result.AddDetail(fieldExpl); // combine them result.Value = queryExpl.Value * fieldExpl.Value; if (queryExpl.Value == 1.0f) { return(fieldExpl); } return(result); }
public double Score(Rectangle target, Explanation exp) { if (target == null || queryArea <= 0) { return(0); } double targetArea = target.GetArea(null); if (targetArea <= 0) { return(0); } double score = 0; double top = Math.Min(queryExtent.GetMaxY(), target.GetMaxY()); double bottom = Math.Max(queryExtent.GetMinY(), target.GetMinY()); double height = top - bottom; double width = 0; // queries that cross the date line if (queryExtent.GetCrossesDateLine()) { // documents that cross the date line if (target.GetCrossesDateLine()) { double left = Math.Max(queryExtent.GetMinX(), target.GetMinX()); double right = Math.Min(queryExtent.GetMaxX(), target.GetMaxX()); width = right + 360.0 - left; } else { double qryWestLeft = Math.Max(queryExtent.GetMinX(), target.GetMaxX()); double qryWestRight = Math.Min(target.GetMaxX(), 180.0); double qryWestWidth = qryWestRight - qryWestLeft; if (qryWestWidth > 0) { width = qryWestWidth; } else { double qryEastLeft = Math.Max(target.GetMaxX(), -180.0); double qryEastRight = Math.Min(queryExtent.GetMaxX(), target.GetMaxX()); double qryEastWidth = qryEastRight - qryEastLeft; if (qryEastWidth > 0) { width = qryEastWidth; } } } } else { // queries that do not cross the date line if (target.GetCrossesDateLine()) { double tgtWestLeft = Math.Max(queryExtent.GetMinX(), target.GetMinX()); double tgtWestRight = Math.Min(queryExtent.GetMaxX(), 180.0); double tgtWestWidth = tgtWestRight - tgtWestLeft; if (tgtWestWidth > 0) { width = tgtWestWidth; } else { double tgtEastLeft = Math.Max(queryExtent.GetMinX(), -180.0); double tgtEastRight = Math.Min(queryExtent.GetMaxX(), target.GetMaxX()); double tgtEastWidth = tgtEastRight - tgtEastLeft; if (tgtEastWidth > 0) { width = tgtEastWidth; } } } else { double left = Math.Max(queryExtent.GetMinX(), target.GetMinX()); double right = Math.Min(queryExtent.GetMaxX(), target.GetMaxX()); width = right - left; } } // calculate the score if ((width > 0) && (height > 0)) { double intersectionArea = width * height; double queryRatio = intersectionArea / queryArea; double targetRatio = intersectionArea / targetArea; double queryFactor = Math.Pow(queryRatio, queryPower); double targetFactor = Math.Pow(targetRatio, targetPower); score = queryFactor * targetFactor * 10000.0; if (exp != null) { // StringBuilder sb = new StringBuilder(); // sb.append("\nscore=").append(score); // sb.append("\n query=").append(); // sb.append("\n target=").append(target.toString()); // sb.append("\n intersectionArea=").append(intersectionArea); // // sb.append(" queryArea=").append(queryArea).append(" targetArea=").append(targetArea); // sb.append("\n queryRatio=").append(queryRatio).append(" targetRatio=").append(targetRatio); // sb.append("\n queryFactor=").append(queryFactor).append(" targetFactor=").append(targetFactor); // sb.append(" (queryPower=").append(queryPower).append(" targetPower=").append(targetPower).append(")"); exp.Value = (float)score; exp.Description = GetType().Name; Explanation e = null; exp.AddDetail(e = new Explanation((float)intersectionArea, "IntersectionArea")); e.AddDetail(new Explanation((float)width, "width; Query: " + queryExtent)); e.AddDetail(new Explanation((float)height, "height; Target: " + target)); exp.AddDetail(e = new Explanation((float)queryFactor, "Query")); e.AddDetail(new Explanation((float)queryArea, "area")); e.AddDetail(new Explanation((float)queryRatio, "ratio")); e.AddDetail(new Explanation((float)queryPower, "power")); exp.AddDetail(e = new Explanation((float)targetFactor, "Target")); e.AddDetail(new Explanation((float)targetArea, "area")); e.AddDetail(new Explanation((float)targetRatio, "ratio")); e.AddDetail(new Explanation((float)targetPower, "power")); } } else if (exp != null) { exp.Value = 0; exp.Description = "Shape does not intersect"; } return(score); }
public override Explanation Explain(IndexReader reader, int doc) { ComplexExplanation result = new ComplexExplanation(); result.SetDescription("weight(" + GetQuery() + " in " + doc + "), product of:"); System.String field = ((SpanQuery)GetQuery()).GetField(); Explanation idfExpl = new Explanation(idf, "idf(" + field + ": " + idfExp.Explain() + ")"); // explain query weight Explanation queryExpl = new Explanation(); queryExpl.SetDescription("queryWeight(" + GetQuery() + "), product of:"); Explanation boostExpl = new Explanation(GetQuery().GetBoost(), "boost"); if (GetQuery().GetBoost() != 1.0f) { queryExpl.AddDetail(boostExpl); } queryExpl.AddDetail(idfExpl); Explanation queryNormExpl = new Explanation(queryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.SetValue(boostExpl.GetValue() * idfExpl.GetValue() * queryNormExpl.GetValue()); result.AddDetail(queryExpl); // explain field weight ComplexExplanation fieldExpl = new ComplexExplanation(); fieldExpl.SetDescription("fieldWeight(" + field + ":" + query.ToString(field) + " in " + doc + "), product of:"); Explanation tfExpl = Scorer(reader, true, false).Explain(doc); fieldExpl.AddDetail(tfExpl); fieldExpl.AddDetail(idfExpl); Explanation fieldNormExpl = new Explanation(); byte[] fieldNorms = reader.Norms(field); float fieldNorm = fieldNorms != null?Similarity.DecodeNorm(fieldNorms[doc]) : 1.0f; fieldNormExpl.SetValue(fieldNorm); fieldNormExpl.SetDescription("fieldNorm(field=" + field + ", doc=" + doc + ")"); fieldExpl.AddDetail(fieldNormExpl); fieldExpl.SetMatch(tfExpl.IsMatch()); fieldExpl.SetValue(tfExpl.GetValue() * idfExpl.GetValue() * fieldNormExpl.GetValue()); result.AddDetail(fieldExpl); System.Boolean?tempAux = fieldExpl.GetMatch(); result.SetMatch(tempAux); // combine them result.SetValue(queryExpl.GetValue() * fieldExpl.GetValue()); if (queryExpl.GetValue() == 1.0f) { return(fieldExpl); } return(result); }
protected internal override Explanation Explain(int doc) { Explanation result = new Explanation(); Explanation nonPayloadExpl = base.Explain(doc); result.AddDetail(nonPayloadExpl); Explanation payloadBoost = new Explanation(); result.AddDetail(payloadBoost); float avgPayloadScore = (payloadsSeen > 0?(payloadScore / payloadsSeen):1); payloadBoost.Value = avgPayloadScore; payloadBoost.Description = "scorePayload(...)"; result.Value = nonPayloadExpl.Value * avgPayloadScore; result.Description = "bnq, product of:"; return result; }
public override Explanation Explain(AtomicReaderContext context, int doc) { PayloadNearSpanScorer scorer = (PayloadNearSpanScorer)Scorer(context, (context.AtomicReader).LiveDocs); if (scorer != null) { int newDoc = scorer.Advance(doc); if (newDoc == doc) { float freq = scorer.Freq(); Similarity.SimScorer docScorer = Similarity.DoSimScorer(Stats, context); Explanation expl = new Explanation(); expl.Description = "weight(" + Query + " in " + doc + ") [" + Similarity.GetType().Name + "], result of:"; Explanation scoreExplanation = docScorer.Explain(doc, new Explanation(freq, "phraseFreq=" + freq)); expl.AddDetail(scoreExplanation); expl.Value = scoreExplanation.Value; string field = ((SpanQuery)Query).Field; // now the payloads part Explanation payloadExpl = OuterInstance.Function.Explain(doc, field, scorer.PayloadsSeen, scorer.PayloadScore); // combined ComplexExplanation result = new ComplexExplanation(); result.AddDetail(expl); result.AddDetail(payloadExpl); result.Value = expl.Value * payloadExpl.Value; result.Description = "PayloadNearQuery, product of:"; return result; } } return new ComplexExplanation(false, 0.0f, "no matching term"); }
protected internal override void Explain(Explanation expl, BasicStats stats, int doc, float freq, float docLen) { if (stats.TotalBoost != 1.0f) { expl.AddDetail(new Explanation(stats.TotalBoost, "boost")); } Explanation normExpl = Normalization_Renamed.Explain(stats, freq, docLen); float tfn = normExpl.Value; expl.AddDetail(normExpl); expl.AddDetail(BasicModel_Renamed.Explain(stats, tfn)); expl.AddDetail(AfterEffect_Renamed.Explain(stats, tfn)); }
public override Explanation Explain(IndexReader reader, int doc) { ComplexExplanation result = new ComplexExplanation(); result.Description = "weight(" + Query + " in " + doc + "), product of:"; System.String field = ((SpanQuery)Query).Field; Explanation idfExpl = new Explanation(idf, "idf(" + field + ": " + idfExp.Explain() + ")"); // explain query weight Explanation queryExpl = new Explanation(); queryExpl.Description = "queryWeight(" + Query + "), product of:"; Explanation boostExpl = new Explanation(Query.Boost, "boost"); if (Query.Boost != 1.0f) { queryExpl.AddDetail(boostExpl); } queryExpl.AddDetail(idfExpl); Explanation queryNormExpl = new Explanation(queryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.Value = boostExpl.Value * idfExpl.Value * queryNormExpl.Value; result.AddDetail(queryExpl); // explain field weight ComplexExplanation fieldExpl = new ComplexExplanation(); fieldExpl.Description = "fieldWeight(" + field + ":" + internalQuery.ToString(field) + " in " + doc + "), product of:"; Explanation tfExpl = ((SpanScorer)Scorer(reader, true, false)).Explain(doc); fieldExpl.AddDetail(tfExpl); fieldExpl.AddDetail(idfExpl); Explanation fieldNormExpl = new Explanation(); byte[] fieldNorms = reader.Norms(field); float fieldNorm = fieldNorms != null?Similarity.DecodeNorm(fieldNorms[doc]) : 1.0f; fieldNormExpl.Value = fieldNorm; fieldNormExpl.Description = "fieldNorm(field=" + field + ", doc=" + doc + ")"; fieldExpl.AddDetail(fieldNormExpl); fieldExpl.Match = tfExpl.IsMatch; fieldExpl.Value = tfExpl.Value * idfExpl.Value * fieldNormExpl.Value; result.AddDetail(fieldExpl); System.Boolean?tempAux = fieldExpl.Match; result.Match = tempAux; // combine them result.Value = queryExpl.Value * fieldExpl.Value; if (queryExpl.Value == 1.0f) { return(fieldExpl); } return(result); }
public override Explanation Explain(int doc, Explanation freq) { Explanation expl = new Explanation(Score(doc, freq.Value), "sum of:"); foreach (SimScorer subScorer in SubScorers) { expl.AddDetail(subScorer.Explain(doc, freq)); } return expl; }
private Explanation ExplainScore(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) { Explanation result = new Explanation(); result.Description = "score(doc=" + doc + ",freq=" + freq + "), product of:"; // explain query weight Explanation queryExpl = new Explanation(); queryExpl.Description = "queryWeight, product of:"; Explanation boostExpl = new Explanation(stats.QueryBoost, "boost"); if (stats.QueryBoost != 1.0f) { queryExpl.AddDetail(boostExpl); } queryExpl.AddDetail(stats.Idf); Explanation queryNormExpl = new Explanation(stats.QueryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.Value = boostExpl.Value * stats.Idf.Value * queryNormExpl.Value; result.AddDetail(queryExpl); // explain field weight Explanation fieldExpl = new Explanation(); fieldExpl.Description = "fieldWeight in " + doc + ", product of:"; Explanation tfExplanation = new Explanation(); tfExplanation.Value = Tf(freq.Value); tfExplanation.Description = "tf(freq=" + freq.Value + "), with freq of:"; tfExplanation.AddDetail(freq); fieldExpl.AddDetail(tfExplanation); fieldExpl.AddDetail(stats.Idf); Explanation fieldNormExpl = new Explanation(); float fieldNorm = norms != null ? DecodeNormValue(norms.Get(doc)) : 1.0f; fieldNormExpl.Value = fieldNorm; fieldNormExpl.Description = "fieldNorm(doc=" + doc + ")"; fieldExpl.AddDetail(fieldNormExpl); fieldExpl.Value = tfExplanation.Value * stats.Idf.Value * fieldNormExpl.Value; result.AddDetail(fieldExpl); // combine them result.Value = queryExpl.Value * fieldExpl.Value; if (queryExpl.Value == 1.0f) { return fieldExpl; } return result; }
protected internal override void Explain(Explanation expl, BasicStats stats, int doc, float freq, float docLen) { if (stats.TotalBoost != 1.0f) { expl.AddDetail(new Explanation(stats.TotalBoost, "boost")); } Explanation normExpl = Normalization_Renamed.Explain(stats, freq, docLen); Explanation lambdaExpl = Lambda_Renamed.Explain(stats); expl.AddDetail(normExpl); expl.AddDetail(lambdaExpl); expl.AddDetail(Distribution_Renamed.Explain(stats, normExpl.Value, lambdaExpl.Value)); }
protected internal override void Explain(Explanation expl, BasicStats stats, int doc, float freq, float docLen) { expl.AddDetail(new Explanation(m_collectionModel.ComputeProbability(stats), "collection probability")); }
public override Explanation Explain(AtomicReaderContext context, int doc) { PayloadTermSpanScorer scorer = (PayloadTermSpanScorer)Scorer(context, (context.AtomicReader).LiveDocs); if (scorer != null) { int newDoc = scorer.Advance(doc); if (newDoc == doc) { float freq = scorer.SloppyFreq(); Similarity.SimScorer docScorer = Similarity.DoSimScorer(Stats, context); Explanation expl = new Explanation(); expl.Description = "weight(" + Query + " in " + doc + ") [" + Similarity.GetType().Name + "], result of:"; Explanation scoreExplanation = docScorer.Explain(doc, new Explanation(freq, "phraseFreq=" + freq)); expl.AddDetail(scoreExplanation); expl.Value = scoreExplanation.Value; // now the payloads part // QUESTION: Is there a way to avoid this skipTo call? We need to know // whether to load the payload or not // GSI: I suppose we could toString the payload, but I don't think that // would be a good idea string field = ((SpanQuery)Query).Field; Explanation payloadExpl = OuterInstance.Function.Explain(doc, field, scorer.PayloadsSeen, scorer.PayloadScore_Renamed); payloadExpl.Value = scorer.PayloadScore; // combined ComplexExplanation result = new ComplexExplanation(); if (OuterInstance.IncludeSpanScore) { result.AddDetail(expl); result.AddDetail(payloadExpl); result.Value = expl.Value * payloadExpl.Value; result.Description = "btq, product of:"; } else { result.AddDetail(payloadExpl); result.Value = payloadExpl.Value; result.Description = "btq(includeSpanScore=false), result of:"; } result.Match = true; // LUCENE-1303 return result; } } return new ComplexExplanation(false, 0.0f, "no matching term"); }
protected internal override void Explain(Explanation expl, BasicStats stats, int doc, float freq, float docLen) { if (stats.TotalBoost != 1.0f) { expl.AddDetail(new Explanation(stats.TotalBoost, "boost")); } expl.AddDetail(new Explanation(Lambda_Renamed, "lambda")); base.Explain(expl, stats, doc, freq, docLen); }
/// <summary> /// Computes a score factor for a phrase. /// /// <p> /// The default implementation sums the idf factor for /// each term in the phrase. /// </summary> /// <param name="collectionStats"> collection-level statistics </param> /// <param name="termStats"> term-level statistics for the terms in the phrase </param> /// <returns> an Explain object that includes both an idf /// score factor for the phrase and an explanation /// for each term. </returns> public virtual Explanation IdfExplain(CollectionStatistics collectionStats, TermStatistics[] termStats) { long max = collectionStats.MaxDoc(); float idf = 0.0f; Explanation exp = new Explanation(); exp.Description = "idf(), sum of:"; foreach (TermStatistics stat in termStats) { long df = stat.DocFreq(); float termIdf = Idf(df, max); exp.AddDetail(new Explanation(termIdf, "idf(docFreq=" + df + ", maxDocs=" + max + ")")); idf += termIdf; } exp.Value = idf; return exp; }
public override Explanation Explain(AtomicReaderContext context, int doc) { int minShouldMatch = outerInstance.MinimumNumberShouldMatch; ComplexExplanation sumExpl = new ComplexExplanation(); sumExpl.Description = "sum of:"; int coord = 0; float sum = 0.0f; bool fail = false; int shouldMatchCount = 0; using (IEnumerator <BooleanClause> cIter = outerInstance.clauses.GetEnumerator()) { foreach (Weight w in m_weights) { cIter.MoveNext(); BooleanClause c = cIter.Current; if (w.GetScorer(context, context.AtomicReader.LiveDocs) == null) { if (c.IsRequired) { fail = true; Explanation r = new Explanation(0.0f, "no match on required clause (" + c.Query.ToString() + ")"); sumExpl.AddDetail(r); } continue; } Explanation e = w.Explain(context, doc); if (e.IsMatch) { if (!c.IsProhibited) { sumExpl.AddDetail(e); sum += e.Value; coord++; } else { Explanation r = new Explanation(0.0f, "match on prohibited clause (" + c.Query.ToString() + ")"); r.AddDetail(e); sumExpl.AddDetail(r); fail = true; } if (c.Occur == Occur_e.SHOULD) { shouldMatchCount++; } } else if (c.IsRequired) { Explanation r = new Explanation(0.0f, "no match on required clause (" + c.Query.ToString() + ")"); r.AddDetail(e); sumExpl.AddDetail(r); fail = true; } } } if (fail) { sumExpl.Match = false; sumExpl.Value = 0.0f; sumExpl.Description = "Failure to meet condition(s) of required/prohibited clause(s)"; return(sumExpl); } else if (shouldMatchCount < minShouldMatch) { sumExpl.Match = false; sumExpl.Value = 0.0f; sumExpl.Description = "Failure to match minimum number " + "of optional clauses: " + minShouldMatch; return(sumExpl); } sumExpl.Match = 0 < coord ? true : false; sumExpl.Value = sum; float coordFactor = disableCoord ? 1.0f : Coord(coord, m_maxCoord); if (coordFactor == 1.0f) { return(sumExpl); // eliminate wrapper } else { ComplexExplanation result = new ComplexExplanation(sumExpl.IsMatch, sum * coordFactor, "product of:"); result.AddDetail(sumExpl); result.AddDetail(new Explanation(coordFactor, "coord(" + coord + "/" + m_maxCoord + ")")); return(result); } }
protected internal override void Explain(Explanation expl, BasicStats stats, int doc, float freq, float docLen) { expl.AddDetail(new Explanation(collectionModel.ComputeProbability(stats), "collection probability")); }
public virtual Explanation Explain(IndexReader reader, int doc) { ComplexExplanation result = new ComplexExplanation(); result.SetDescription("weight(" + GetQuery() + " in " + doc + "), product of:"); System.String field = ((SpanQuery)GetQuery()).GetField(); System.Text.StringBuilder docFreqs = new System.Text.StringBuilder(); System.Collections.IEnumerator i = terms.GetEnumerator(); while (i.MoveNext()) { System.Collections.DictionaryEntry tmp = (System.Collections.DictionaryEntry)i.Current; Term term = (Term)tmp.Key; docFreqs.Append(term.Text()); docFreqs.Append("="); docFreqs.Append(reader.DocFreq(term)); if (i.MoveNext()) { docFreqs.Append(" "); } } Explanation idfExpl = new Explanation(idf, "idf(" + field + ": " + docFreqs + ")"); // explain query weight Explanation queryExpl = new Explanation(); queryExpl.SetDescription("queryWeight(" + GetQuery() + "), product of:"); Explanation boostExpl = new Explanation(GetQuery().GetBoost(), "boost"); if (GetQuery().GetBoost() != 1.0f) { queryExpl.AddDetail(boostExpl); } queryExpl.AddDetail(idfExpl); Explanation queryNormExpl = new Explanation(queryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.SetValue(boostExpl.GetValue() * idfExpl.GetValue() * queryNormExpl.GetValue()); result.AddDetail(queryExpl); // explain field weight ComplexExplanation fieldExpl = new ComplexExplanation(); fieldExpl.SetDescription("fieldWeight(" + field + ":" + query.ToString(field) + " in " + doc + "), product of:"); Explanation tfExpl = Scorer(reader).Explain(doc); fieldExpl.AddDetail(tfExpl); fieldExpl.AddDetail(idfExpl); Explanation fieldNormExpl = new Explanation(); byte[] fieldNorms = reader.Norms(field); float fieldNorm = fieldNorms != null?Similarity.DecodeNorm(fieldNorms[doc]) : 0.0f; fieldNormExpl.SetValue(fieldNorm); fieldNormExpl.SetDescription("fieldNorm(field=" + field + ", doc=" + doc + ")"); fieldExpl.AddDetail(fieldNormExpl); fieldExpl.SetMatch(tfExpl.IsMatch()); fieldExpl.SetValue(tfExpl.GetValue() * idfExpl.GetValue() * fieldNormExpl.GetValue()); result.AddDetail(fieldExpl); System.Boolean tempAux = fieldExpl.GetMatch(); result.SetMatch(tempAux); // combine them result.SetValue(queryExpl.GetValue() * fieldExpl.GetValue()); if (queryExpl.GetValue() == 1.0f) { return(fieldExpl); } return(result); }
/// <summary> /// Explain the custom score. /// Whenever overriding <see cref="CustomScore(int, float, float[])" />, /// this method should also be overridden to provide the correct explanation /// for the part of the custom scoring. /// </summary> /// <param name="doc">doc being explained</param> /// <param name="subQueryExpl">explanation for the sub-query part</param> /// <param name="valSrcExpls">explanation for the value source part</param> /// <returns>an explanation for the custom score</returns> public virtual Explanation CustomExplain(int doc, Explanation subQueryExpl, Explanation[] valSrcExpls) { if (valSrcExpls.Length == 1) { return CustomExplain(doc, subQueryExpl, valSrcExpls[0]); } if (valSrcExpls.Length == 0) { return subQueryExpl; } float valSrcScore = 1; for (int i = 0; i < valSrcExpls.Length; i++) { valSrcScore *= valSrcExpls[i].GetValue(); } Explanation exp = new Explanation(valSrcScore * subQueryExpl.GetValue(), "custom score: product of:"); exp.AddDetail(subQueryExpl); for (int i = 0; i < valSrcExpls.Length; i++) { exp.AddDetail(valSrcExpls[i]); } return exp; }
protected internal override void Explain(Explanation expl, BasicStats stats, int doc, float freq, float docLen) { if (stats.TotalBoost != 1.0f) { expl.AddDetail(new Explanation(stats.TotalBoost, "boost")); } expl.AddDetail(new Explanation(Mu_Renamed, "mu")); Explanation weightExpl = new Explanation(); weightExpl.Value = (float)Math.Log(1 + freq / (Mu_Renamed * ((LMStats)stats).CollectionProbability)); weightExpl.Description = "term weight"; expl.AddDetail(weightExpl); expl.AddDetail(new Explanation((float)Math.Log(Mu_Renamed / (docLen + Mu_Renamed)), "document norm")); base.Explain(expl, stats, doc, freq, docLen); }