public virtual Explanation Explain(int docId, string field, int numPayloadsSeen, float payloadScore) { Explanation result = new Explanation(); result.Description = this.GetType().Name + ".docScore()"; result.Value = DocScore(docId, field, numPayloadsSeen, payloadScore); return result; }
public void Success_withContext_addsMessageToExplanation() { using (var m = new Explanation()) { Explain.Success("Sample"); Assert.That(m.Details, Has.Count.EqualTo(1)); } }
public override sealed Explanation Explain(BasicStats stats, float tfn) { Explanation result = new Explanation(); result.Description = this.GetType().Name + ", computed from: "; result.Value = Score(stats, tfn); result.AddDetail(new Explanation(tfn, "tfn")); return result; }
public override sealed Explanation Explain(BasicStats stats) { Explanation result = new Explanation(); result.Description = this.GetType().Name + ", computed from: "; result.Value = CalculateLambda(stats); result.AddDetail(new Explanation(stats.DocFreq, "docFreq")); result.AddDetail(new Explanation(stats.NumberOfDocuments, "numberOfDocuments")); return result; }
/// <summary> /// Returns an explanation for the normalized term frequency. /// <p>The default normalization methods use the field length of the document /// and the average field length to compute the normalized term frequency. /// this method provides a generic explanation for such methods. /// Subclasses that use other statistics must override this method.</p> /// </summary> public virtual Explanation Explain(BasicStats stats, float tf, float len) { Explanation result = new Explanation(); result.Description = this.GetType().Name + ", computed from: "; result.Value = Tfn(stats, tf, len); result.AddDetail(new Explanation(tf, "tf")); result.AddDetail(new Explanation(stats.AvgFieldLength, "avgFieldLength")); result.AddDetail(new Explanation(len, "len")); return result; }
/// <summary> /// Returns an explanation for the score. /// <p>Most basic models use the number of documents and the total term /// frequency to compute Inf<sub>1</sub>. this method provides a generic /// explanation for such models. Subclasses that use other statistics must /// override this method.</p> /// </summary> public virtual Explanation Explain(BasicStats stats, float tfn) { Explanation result = new Explanation(); result.Description = this.GetType().Name + ", computed from: "; result.Value = Score(stats, tfn); result.AddDetail(new Explanation(tfn, "tfn")); result.AddDetail(new Explanation(stats.NumberOfDocuments, "numberOfDocuments")); result.AddDetail(new Explanation(stats.TotalTermFreq, "totalTermFreq")); return result; }
public override Explanation Explain(IndexReader reader, int doc) { ComplexExplanation result = new ComplexExplanation(); result.Description = "weight(" + Query + " in " + doc + "), product of:"; System.String field = ((SpanQuery) Query).Field; Explanation idfExpl = new Explanation(idf, "idf(" + field + ": " + idfExp.Explain() + ")"); // explain query weight Explanation queryExpl = new Explanation(); queryExpl.Description = "queryWeight(" + Query + "), product of:"; Explanation boostExpl = new Explanation(Query.Boost, "boost"); if (Query.Boost != 1.0f) queryExpl.AddDetail(boostExpl); queryExpl.AddDetail(idfExpl); Explanation queryNormExpl = new Explanation(queryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.Value = boostExpl.Value * idfExpl.Value * queryNormExpl.Value; result.AddDetail(queryExpl); // explain field weight ComplexExplanation fieldExpl = new ComplexExplanation(); fieldExpl.Description = "fieldWeight(" + field + ":" + internalQuery.ToString(field) + " in " + doc + "), product of:"; Explanation tfExpl = ((SpanScorer)Scorer(reader, true, false)).Explain(doc); fieldExpl.AddDetail(tfExpl); fieldExpl.AddDetail(idfExpl); Explanation fieldNormExpl = new Explanation(); byte[] fieldNorms = reader.Norms(field); float fieldNorm = fieldNorms != null?Similarity.DecodeNorm(fieldNorms[doc]):1.0f; fieldNormExpl.Value = fieldNorm; fieldNormExpl.Description = "fieldNorm(field=" + field + ", doc=" + doc + ")"; fieldExpl.AddDetail(fieldNormExpl); fieldExpl.Match = tfExpl.IsMatch; fieldExpl.Value = tfExpl.Value * idfExpl.Value * fieldNormExpl.Value; result.AddDetail(fieldExpl); System.Boolean? tempAux = fieldExpl.Match; result.Match = tempAux; // combine them result.Value = queryExpl.Value * fieldExpl.Value; if (queryExpl.Value == 1.0f) return fieldExpl; return result; }
/// <summary> /// Explain the custom score. /// Whenever overriding <see cref="CustomScore(int, float, float[])" />, /// this method should also be overridden to provide the correct explanation /// for the part of the custom scoring. /// </summary> /// <param name="doc">doc being explained</param> /// <param name="subQueryExpl">explanation for the sub-query part</param> /// <param name="valSrcExpls">explanation for the value source part</param> /// <returns>an explanation for the custom score</returns> public virtual Explanation CustomExplain(int doc, Explanation subQueryExpl, Explanation[] valSrcExpls) { if (valSrcExpls.Length == 1) { return CustomExplain(doc, subQueryExpl, valSrcExpls[0]); } if (valSrcExpls.Length == 0) { return subQueryExpl; } float valSrcScore = 1; for (int i = 0; i < valSrcExpls.Length; i++) { valSrcScore *= valSrcExpls[i].GetValue(); } Explanation exp = new Explanation(valSrcScore * subQueryExpl.GetValue(), "custom score: product of:"); exp.AddDetail(subQueryExpl); for (int i = 0; i < valSrcExpls.Length; i++) { exp.AddDetail(valSrcExpls[i]); } return exp; }
public override Explanation Explain(AtomicReaderContext context, int doc) { PayloadNearSpanScorer scorer = (PayloadNearSpanScorer)Scorer(context, (context.AtomicReader).LiveDocs); if (scorer != null) { int newDoc = scorer.Advance(doc); if (newDoc == doc) { float freq = scorer.Freq(); Similarity.SimScorer docScorer = Similarity.DoSimScorer(Stats, context); Explanation expl = new Explanation(); expl.Description = "weight(" + Query + " in " + doc + ") [" + Similarity.GetType().Name + "], result of:"; Explanation scoreExplanation = docScorer.Explain(doc, new Explanation(freq, "phraseFreq=" + freq)); expl.AddDetail(scoreExplanation); expl.Value = scoreExplanation.Value; string field = ((SpanQuery)Query).Field; // now the payloads part Explanation payloadExpl = OuterInstance.Function.Explain(doc, field, scorer.PayloadsSeen, scorer.PayloadScore); // combined ComplexExplanation result = new ComplexExplanation(); result.AddDetail(expl); result.AddDetail(payloadExpl); result.Value = expl.Value * payloadExpl.Value; result.Description = "PayloadNearQuery, product of:"; return result; } } return new ComplexExplanation(false, 0.0f, "no matching term"); }
private Explanation DoExplain(IndexReader reader, int doc) { Explanation subQueryExpl = subQueryWeight.Explain(reader, doc); if (!subQueryExpl.IsMatch) { return subQueryExpl; } // match Explanation[] valSrcExpls = new Explanation[valSrcWeights.Length]; for (int i = 0; i < valSrcWeights.Length; i++) { valSrcExpls[i] = valSrcWeights[i].Explain(reader, doc); } Explanation customExp = Enclosing_Instance.GetCustomScoreProvider(reader).CustomExplain(doc, subQueryExpl, valSrcExpls); float sc = Value * customExp.Value; Explanation res = new ComplexExplanation(true, sc, Enclosing_Instance.ToString() + ", product of:"); res.AddDetail(customExp); res.AddDetail(new Explanation(Value, "queryBoost")); // actually using the q boost as q weight (== weight value) return res; }
public override Explanation Explain(AtomicReaderContext context, int doc) { Explanation explain = DoExplain(context, doc); return(explain ?? new Explanation(0.0f, "no matching docs")); }
public override Explanation Explain(AtomicReaderContext context, int doc) { PayloadTermSpanScorer scorer = (PayloadTermSpanScorer)Scorer(context, (context.AtomicReader).LiveDocs); if (scorer != null) { int newDoc = scorer.Advance(doc); if (newDoc == doc) { float freq = scorer.SloppyFreq(); Similarity.SimScorer docScorer = Similarity.DoSimScorer(Stats, context); Explanation expl = new Explanation(); expl.Description = "weight(" + Query + " in " + doc + ") [" + Similarity.GetType().Name + "], result of:"; Explanation scoreExplanation = docScorer.Explain(doc, new Explanation(freq, "phraseFreq=" + freq)); expl.AddDetail(scoreExplanation); expl.Value = scoreExplanation.Value; // now the payloads part // QUESTION: Is there a way to avoid this skipTo call? We need to know // whether to load the payload or not // GSI: I suppose we could toString the payload, but I don't think that // would be a good idea string field = ((SpanQuery)Query).Field; Explanation payloadExpl = OuterInstance.Function.Explain(doc, field, scorer.PayloadsSeen, scorer.PayloadScore_Renamed); payloadExpl.Value = scorer.PayloadScore; // combined ComplexExplanation result = new ComplexExplanation(); if (OuterInstance.IncludeSpanScore) { result.AddDetail(expl); result.AddDetail(payloadExpl); result.Value = expl.Value * payloadExpl.Value; result.Description = "btq, product of:"; } else { result.AddDetail(payloadExpl); result.Value = payloadExpl.Value; result.Description = "btq(includeSpanScore=false), result of:"; } result.Match = true; // LUCENE-1303 return result; } } return new ComplexExplanation(false, 0.0f, "no matching term"); }
private Explanation ExplainScore(int doc, Explanation freq, BM25Stats stats, NumericDocValues norms) { Explanation result = new Explanation(); result.Description = "score(doc=" + doc + ",freq=" + freq + "), product of:"; Explanation boostExpl = new Explanation(stats.QueryBoost * stats.TopLevelBoost, "boost"); if (boostExpl.Value != 1.0f) { result.AddDetail(boostExpl); } result.AddDetail(stats.Idf); Explanation tfNormExpl = new Explanation(); tfNormExpl.Description = "tfNorm, computed from:"; tfNormExpl.AddDetail(freq); tfNormExpl.AddDetail(new Explanation(K1_Renamed, "parameter k1")); if (norms == null) { tfNormExpl.AddDetail(new Explanation(0, "parameter b (norms omitted for field)")); tfNormExpl.Value = (freq.Value * (K1_Renamed + 1)) / (freq.Value + K1_Renamed); } else { float doclen = DecodeNormValue((sbyte)norms.Get(doc)); tfNormExpl.AddDetail(new Explanation(b, "parameter b")); tfNormExpl.AddDetail(new Explanation(stats.Avgdl, "avgFieldLength")); tfNormExpl.AddDetail(new Explanation(doclen, "fieldLength")); tfNormExpl.Value = (freq.Value * (K1_Renamed + 1)) / (freq.Value + K1_Renamed * (1 - b + b * doclen / stats.Avgdl)); } result.AddDetail(tfNormExpl); result.Value = boostExpl.Value * stats.Idf.Value * tfNormExpl.Value; return result; }
public override Explanation Explain(int doc, Explanation freq) { return OuterInstance.Explain(Stats, doc, freq, Norms == null ? 1F : OuterInstance.DecodeNormValue((sbyte)Norms.Get(doc))); }
protected internal override Explanation Explain(int doc) { ComplexExplanation result = new ComplexExplanation(); Explanation nonPayloadExpl = base.Explain(doc); result.AddDetail(nonPayloadExpl); // QUESTION: Is there a way to avoid this skipTo call? We need to know // whether to load the payload or not Explanation payloadBoost = new Explanation(); result.AddDetail(payloadBoost); float payloadScore = GetPayloadScore(); payloadBoost.Value = payloadScore; // GSI: I suppose we could toString the payload, but I don't think that // would be a good idea payloadBoost.Description = "scorePayload(...)"; result.Value = nonPayloadExpl.Value * payloadScore; result.Description = "btq, product of:"; result.Match = nonPayloadExpl.Value == 0?false:true; // LUCENE-1303 return result; }
public int Add(Explanation explanation) { string sql = "insert into Explanation(Explanation_Id ,LoanType_Id ,Explanation_Detail,Explanation_DeleteId,Explanation_CreateTime,Explanation_UpdateTime) values(@Explanation_Id ,@LoanType_Id ,@Explanation_Detail,@Explanation_DeleteId,@Explanation_CreateTime,@Explanation_UpdateTime) "; return(SqlHelper <Explanation> .ExecuteNonQuery(sql, explanation)); }
public int Edit(Explanation explanation) { string sql = "update Explanation set LoanType_Id =@LoanType_Id,Explanation_Detail=@Explanation_Detail,Explanation_UpdateTime=@Explanation_UpdateTime where Explanation_Id=@Explanation_Id"; return(SqlHelper <Explanation> .ExecuteNonQuery(sql, explanation)); }
private static int GetEndOffSet(Explanation e) { return(int.Parse(e.Id.Split('.')[2])); }
/* * Assert that an explanation has the expected score, and optionally that its * sub-details max/sum/factor match to that score. * * @param q String representation of the query for assertion messages * @param doc Document ID for assertion messages * @param score Real score value of doc with query q * @param deep indicates whether a deep comparison of sub-Explanation details should be executed * @param expl The Explanation to match against score */ public static void verifyExplanation(String q, int doc, float score, bool deep, Explanation expl) { float value = expl.Value; assertEquals(q + ": score(doc=" + doc + ")=" + score + " != explanationScore=" + value + " Explanation: " + expl, score, value, explainToleranceDelta(score, value)); if (!deep) { return; } var detail = expl.GetDetails(); // TODO: can we improve this entire method? its really geared to work only with TF/IDF if (expl.Description.EndsWith("computed from:")) { return; // something more complicated. } if (detail != null) { if (detail.Length == 1) { // simple containment, unless its a freq of: (which lets a query explain how the freq is calculated), // just verify contained expl has same score if (!expl.Description.EndsWith("with freq of:")) { verifyExplanation(q, doc, score, deep, detail[0]); } } else { // explanation must either: // - end with one of: "product of:", "sum of:", "max of:", or // - have "max plus <x> times others" (where <x> is float). float x = 0; String descr = expl.Description.ToLowerInvariant(); bool productOf = descr.EndsWith("product of:"); bool sumOf = descr.EndsWith("sum of:"); bool maxOf = descr.EndsWith("max of:"); bool maxTimesOthers = false; if (!(productOf || sumOf || maxOf)) { // maybe 'max plus x times others' int k1 = descr.IndexOf("max plus "); if (k1 >= 0) { k1 += "max plus ".Length; int k2 = descr.IndexOf(" ", k1); try { x = float.Parse(descr.Substring(k1, k2).Trim()); if (descr.Substring(k2).Trim().Equals("times others of:")) { maxTimesOthers = true; } } catch (FormatException e) { } } } // TODO: this is a TERRIBLE assertion!!!! assertTrue( q + ": multi valued explanation description=\"" + descr + "\" must be 'max of plus x times others' or end with 'product of'" + " or 'sum of:' or 'max of:' - " + expl, productOf || sumOf || maxOf || maxTimesOthers); float sum = 0; float product = 1; float max = 0; for (int i = 0; i < detail.Length; i++) { float dval = detail[i].Value; verifyExplanation(q, doc, dval, deep, detail[i]); product *= dval; sum += dval; max = Math.Max(max, dval); } float combined = 0; if (productOf) { combined = product; } else if (sumOf) { combined = sum; } else if (maxOf) { combined = max; } else if (maxTimesOthers) { combined = max + x * (sum - max); } else { assertTrue("should never get here!", false); } assertEquals(q + ": actual subDetails combined==" + combined + " != value=" + value + " Explanation: " + expl, combined, value, explainToleranceDelta(combined, value)); } } }
public static Maybe <T> Some(T instance, Explanation explanation) { return(new Maybe <T>(instance, explanation)); }
private Maybe(T value, Explanation explanation) : this(value) { this.Explanation = explanation; }
private Maybe(Explanation explanation) { this.Explanation = explanation; }
/// <summary> /// Subclasses should implement this method to explain the score. <paramref name="expl"/> /// already contains the score, the name of the class and the doc id, as well /// as the term frequency and its explanation; subclasses can add additional /// clauses to explain details of their scoring formulae. /// <para>The default implementation does nothing.</para> /// </summary> /// <param name="expl"> the explanation to extend with details. </param> /// <param name="stats"> the corpus level statistics. </param> /// <param name="doc"> the document id. </param> /// <param name="freq"> the term frequency. </param> /// <param name="docLen"> the document length. </param> protected internal virtual void Explain(Explanation expl, BasicStats stats, int doc, float freq, float docLen) { }
private void Awake() { Log.L(Explanation <string, int> .CreateInstance().SetExplainer(new ExplainExample()).Explain("17", null)); }
/// <summary> /// Subclasses should implement this method to explain the score. {@code expl} /// already contains the score, the name of the class and the doc id, as well /// as the term frequency and its explanation; subclasses can add additional /// clauses to explain details of their scoring formulae. /// <p>The default implementation does nothing.</p> /// </summary> /// <param name="expl"> the explanation to extend with details. </param> /// <param name="stats"> the corpus level statistics. </param> /// <param name="doc"> the document id. </param> /// <param name="freq"> the term frequency. </param> /// <param name="docLen"> the document length. </param> protected internal virtual void Explain(Explanation expl, BasicStats stats, int doc, float freq, float docLen) { }
public override string ToString(FormatType format, object supplement = null) { switch (format) { case FormatType.Brief: return($"{Id}.{Word} [{Abbreviation}]"); case FormatType.Detail: return($"{Id}.{Word} [{Abbreviation}]{Pinyin?.BeIfNotWhiteSpace($"\nๆผ้ณ๏ผ{Pinyin}")}{Explanation?.BeIfNotWhiteSpace($"\n่งฃ้๏ผ{Explanation}")}{Derivation?.BeIfNotWhiteSpace($"\nๆฅๆบ๏ผ{Derivation}")}{Example?.BeIfNotWhiteSpace($"\nไพๅญ๏ผ{Example}")}"); } return(null); }
public override Explanation Explain(int doc, Explanation freq) { Explanation expl = new Explanation(Score(doc, freq.Value), "sum of:"); foreach (SimScorer subScorer in SubScorers) { expl.AddDetail(subScorer.Explain(doc, freq)); } return expl; }
private Explanation ExplainScore(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) { Explanation result = new Explanation(); result.Description = "score(doc=" + doc + ",freq=" + freq + "), product of:"; // explain query weight Explanation queryExpl = new Explanation(); queryExpl.Description = "queryWeight, product of:"; Explanation boostExpl = new Explanation(stats.QueryBoost, "boost"); if (stats.QueryBoost != 1.0f) { queryExpl.AddDetail(boostExpl); } queryExpl.AddDetail(stats.Idf); Explanation queryNormExpl = new Explanation(stats.QueryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.Value = boostExpl.Value * stats.Idf.Value * queryNormExpl.Value; result.AddDetail(queryExpl); // explain field weight Explanation fieldExpl = new Explanation(); fieldExpl.Description = "fieldWeight in " + doc + ", product of:"; Explanation tfExplanation = new Explanation(); tfExplanation.Value = Tf(freq.Value); tfExplanation.Description = "tf(freq=" + freq.Value + "), with freq of:"; tfExplanation.AddDetail(freq); fieldExpl.AddDetail(tfExplanation); fieldExpl.AddDetail(stats.Idf); Explanation fieldNormExpl = new Explanation(); float fieldNorm = norms != null?DecodeNormValue(norms.Get(doc)) : 1.0f; fieldNormExpl.Value = fieldNorm; fieldNormExpl.Description = "fieldNorm(doc=" + doc + ")"; fieldExpl.AddDetail(fieldNormExpl); fieldExpl.Value = tfExplanation.Value * stats.Idf.Value * fieldNormExpl.Value; result.AddDetail(fieldExpl); // combine them result.Value = queryExpl.Value * fieldExpl.Value; if (queryExpl.Value == 1.0f) { return(fieldExpl); } return(result); }
protected internal override void Explain(Explanation expl, BasicStats stats, int doc, float freq, float docLen) { if (stats.TotalBoost != 1.0f) { expl.AddDetail(new Explanation(stats.TotalBoost, "boost")); } Explanation normExpl = Normalization_Renamed.Explain(stats, freq, docLen); Explanation lambdaExpl = Lambda_Renamed.Explain(stats); expl.AddDetail(normExpl); expl.AddDetail(lambdaExpl); expl.AddDetail(Distribution_Renamed.Explain(stats, normExpl.Value, lambdaExpl.Value)); }
public static void AddExplanation(this ModelStateDictionary modelState, Explanation explanation) { modelState.AddModelError(string.Empty, explanation.ToString()); }
public override Explanation Explain(int doc, Explanation freq) { return OuterInstance.ExplainScore(doc, freq, Stats, Norms); }
public override Explanation Explain(IndexReader reader, int doc) { ComplexExplanation result = new ComplexExplanation(); result.Description = "weight(" + Query + " in " + doc + "), product of:"; System.String field = ((SpanQuery)Query).Field; Explanation idfExpl = new Explanation(idf, "idf(" + field + ": " + idfExp.Explain() + ")"); // explain query weight Explanation queryExpl = new Explanation(); queryExpl.Description = "queryWeight(" + Query + "), product of:"; Explanation boostExpl = new Explanation(Query.Boost, "boost"); if (Query.Boost != 1.0f) { queryExpl.AddDetail(boostExpl); } queryExpl.AddDetail(idfExpl); Explanation queryNormExpl = new Explanation(queryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.Value = boostExpl.Value * idfExpl.Value * queryNormExpl.Value; result.AddDetail(queryExpl); // explain field weight ComplexExplanation fieldExpl = new ComplexExplanation(); fieldExpl.Description = "fieldWeight(" + field + ":" + internalQuery.ToString(field) + " in " + doc + "), product of:"; Explanation tfExpl = ((SpanScorer)Scorer(reader, true, false)).Explain(doc); fieldExpl.AddDetail(tfExpl); fieldExpl.AddDetail(idfExpl); Explanation fieldNormExpl = new Explanation(); byte[] fieldNorms = reader.Norms(field); float fieldNorm = fieldNorms != null?Similarity.DecodeNorm(fieldNorms[doc]) : 1.0f; fieldNormExpl.Value = fieldNorm; fieldNormExpl.Description = "fieldNorm(field=" + field + ", doc=" + doc + ")"; fieldExpl.AddDetail(fieldNormExpl); fieldExpl.Match = tfExpl.IsMatch; fieldExpl.Value = tfExpl.Value * idfExpl.Value * fieldNormExpl.Value; result.AddDetail(fieldExpl); System.Boolean?tempAux = fieldExpl.Match; result.Match = tempAux; // combine them result.Value = queryExpl.Value * fieldExpl.Value; if (queryExpl.Value == 1.0f) { return(fieldExpl); } return(result); }
/// <summary> /// Explain the custom score. /// Whenever overriding <see cref="CustomScore(int, float, float)" />, /// this method should also be overridden to provide the correct explanation /// for the part of the custom scoring. /// /// </summary> /// <param name="doc">doc being explained</param> /// <param name="subQueryExpl">explanation for the sub-query part</param> /// <param name="valSrcExpl">explanation for the value source part</param> /// <returns>an explanation for the custom score</returns> public virtual Explanation CustomExplain(int doc, Explanation subQueryExpl, Explanation valSrcExpl) { float valSrcScore = 1; if (valSrcExpl != null) { valSrcScore *= valSrcExpl.GetValue(); } Explanation exp = new Explanation(valSrcScore * subQueryExpl.GetValue(), "custom score: product of:"); exp.AddDetail(subQueryExpl); exp.AddDetail(valSrcExpl); return exp; }
public override IDeepCopyable CopyTo(IDeepCopyable other) { var dest = other as Immunization; if (dest != null) { base.CopyTo(dest); if (Identifier != null) { dest.Identifier = new List <Hl7.Fhir.Model.Identifier>(Identifier.DeepCopy()); } if (DateElement != null) { dest.DateElement = (Hl7.Fhir.Model.FhirDateTime)DateElement.DeepCopy(); } if (VaccineType != null) { dest.VaccineType = (Hl7.Fhir.Model.CodeableConcept)VaccineType.DeepCopy(); } if (Subject != null) { dest.Subject = (Hl7.Fhir.Model.ResourceReference)Subject.DeepCopy(); } if (RefusedIndicatorElement != null) { dest.RefusedIndicatorElement = (Hl7.Fhir.Model.FhirBoolean)RefusedIndicatorElement.DeepCopy(); } if (ReportedElement != null) { dest.ReportedElement = (Hl7.Fhir.Model.FhirBoolean)ReportedElement.DeepCopy(); } if (Performer != null) { dest.Performer = (Hl7.Fhir.Model.ResourceReference)Performer.DeepCopy(); } if (Requester != null) { dest.Requester = (Hl7.Fhir.Model.ResourceReference)Requester.DeepCopy(); } if (Manufacturer != null) { dest.Manufacturer = (Hl7.Fhir.Model.ResourceReference)Manufacturer.DeepCopy(); } if (Location != null) { dest.Location = (Hl7.Fhir.Model.ResourceReference)Location.DeepCopy(); } if (LotNumberElement != null) { dest.LotNumberElement = (Hl7.Fhir.Model.FhirString)LotNumberElement.DeepCopy(); } if (ExpirationDateElement != null) { dest.ExpirationDateElement = (Hl7.Fhir.Model.Date)ExpirationDateElement.DeepCopy(); } if (Site != null) { dest.Site = (Hl7.Fhir.Model.CodeableConcept)Site.DeepCopy(); } if (Route != null) { dest.Route = (Hl7.Fhir.Model.CodeableConcept)Route.DeepCopy(); } if (DoseQuantity != null) { dest.DoseQuantity = (Hl7.Fhir.Model.Quantity)DoseQuantity.DeepCopy(); } if (Explanation != null) { dest.Explanation = (Hl7.Fhir.Model.Immunization.ImmunizationExplanationComponent)Explanation.DeepCopy(); } if (Reaction != null) { dest.Reaction = new List <Hl7.Fhir.Model.Immunization.ImmunizationReactionComponent>(Reaction.DeepCopy()); } if (VaccinationProtocol != null) { dest.VaccinationProtocol = new List <Hl7.Fhir.Model.Immunization.ImmunizationVaccinationProtocolComponent>(VaccinationProtocol.DeepCopy()); } return(dest); } else { throw new ArgumentException("Can only copy to an object of the same type", "other"); } }
public override Explanation Explain(AtomicReaderContext context, int doc) { int minShouldMatch = outerInstance.MinimumNumberShouldMatch; ComplexExplanation sumExpl = new ComplexExplanation(); sumExpl.Description = "sum of:"; int coord = 0; float sum = 0.0f; bool fail = false; int shouldMatchCount = 0; using (IEnumerator <BooleanClause> cIter = outerInstance.clauses.GetEnumerator()) { foreach (Weight w in m_weights) { cIter.MoveNext(); BooleanClause c = cIter.Current; if (w.GetScorer(context, context.AtomicReader.LiveDocs) == null) { if (c.IsRequired) { fail = true; Explanation r = new Explanation(0.0f, "no match on required clause (" + c.Query.ToString() + ")"); sumExpl.AddDetail(r); } continue; } Explanation e = w.Explain(context, doc); if (e.IsMatch) { if (!c.IsProhibited) { sumExpl.AddDetail(e); sum += e.Value; coord++; } else { Explanation r = new Explanation(0.0f, "match on prohibited clause (" + c.Query.ToString() + ")"); r.AddDetail(e); sumExpl.AddDetail(r); fail = true; } if (c.Occur == Occur_e.SHOULD) { shouldMatchCount++; } } else if (c.IsRequired) { Explanation r = new Explanation(0.0f, "no match on required clause (" + c.Query.ToString() + ")"); r.AddDetail(e); sumExpl.AddDetail(r); fail = true; } } } if (fail) { sumExpl.Match = false; sumExpl.Value = 0.0f; sumExpl.Description = "Failure to meet condition(s) of required/prohibited clause(s)"; return(sumExpl); } else if (shouldMatchCount < minShouldMatch) { sumExpl.Match = false; sumExpl.Value = 0.0f; sumExpl.Description = "Failure to match minimum number " + "of optional clauses: " + minShouldMatch; return(sumExpl); } sumExpl.Match = 0 < coord ? true : false; sumExpl.Value = sum; float coordFactor = disableCoord ? 1.0f : Coord(coord, m_maxCoord); if (coordFactor == 1.0f) { return(sumExpl); // eliminate wrapper } else { ComplexExplanation result = new ComplexExplanation(sumExpl.IsMatch, sum * coordFactor, "product of:"); result.AddDetail(sumExpl); result.AddDetail(new Explanation(coordFactor, "coord(" + coord + "/" + m_maxCoord + ")")); return(result); } }
private Explanation ExplainScore(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) { Explanation result = new Explanation(); result.Description = "score(doc=" + doc + ",freq=" + freq + "), product of:"; // explain query weight Explanation queryExpl = new Explanation(); queryExpl.Description = "queryWeight, product of:"; Explanation boostExpl = new Explanation(stats.QueryBoost, "boost"); if (stats.QueryBoost != 1.0f) { queryExpl.AddDetail(boostExpl); } queryExpl.AddDetail(stats.Idf); Explanation queryNormExpl = new Explanation(stats.QueryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.Value = boostExpl.Value * stats.Idf.Value * queryNormExpl.Value; result.AddDetail(queryExpl); // explain field weight Explanation fieldExpl = new Explanation(); fieldExpl.Description = "fieldWeight in " + doc + ", product of:"; Explanation tfExplanation = new Explanation(); tfExplanation.Value = Tf(freq.Value); tfExplanation.Description = "tf(freq=" + freq.Value + "), with freq of:"; tfExplanation.AddDetail(freq); fieldExpl.AddDetail(tfExplanation); fieldExpl.AddDetail(stats.Idf); Explanation fieldNormExpl = new Explanation(); float fieldNorm = norms != null ? DecodeNormValue(norms.Get(doc)) : 1.0f; fieldNormExpl.Value = fieldNorm; fieldNormExpl.Description = "fieldNorm(doc=" + doc + ")"; fieldExpl.AddDetail(fieldNormExpl); fieldExpl.Value = tfExplanation.Value * stats.Idf.Value * fieldNormExpl.Value; result.AddDetail(fieldExpl); // combine them result.Value = queryExpl.Value * fieldExpl.Value; if (queryExpl.Value == 1.0f) { return fieldExpl; } return result; }
public virtual Explanation Explain(IndexReader reader, int doc) { ComplexExplanation result = new ComplexExplanation(); result.SetDescription("weight(" + GetQuery() + " in " + doc + "), product of:"); System.String field = ((SpanQuery)GetQuery()).GetField(); System.Text.StringBuilder docFreqs = new System.Text.StringBuilder(); System.Collections.IEnumerator i = terms.GetEnumerator(); while (i.MoveNext()) { System.Collections.DictionaryEntry tmp = (System.Collections.DictionaryEntry)i.Current; Term term = (Term)tmp.Key; docFreqs.Append(term.Text()); docFreqs.Append("="); docFreqs.Append(reader.DocFreq(term)); if (i.MoveNext()) { docFreqs.Append(" "); } } Explanation idfExpl = new Explanation(idf, "idf(" + field + ": " + docFreqs + ")"); // explain query weight Explanation queryExpl = new Explanation(); queryExpl.SetDescription("queryWeight(" + GetQuery() + "), product of:"); Explanation boostExpl = new Explanation(GetQuery().GetBoost(), "boost"); if (GetQuery().GetBoost() != 1.0f) { queryExpl.AddDetail(boostExpl); } queryExpl.AddDetail(idfExpl); Explanation queryNormExpl = new Explanation(queryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.SetValue(boostExpl.GetValue() * idfExpl.GetValue() * queryNormExpl.GetValue()); result.AddDetail(queryExpl); // explain field weight ComplexExplanation fieldExpl = new ComplexExplanation(); fieldExpl.SetDescription("fieldWeight(" + field + ":" + query.ToString(field) + " in " + doc + "), product of:"); Explanation tfExpl = Scorer(reader).Explain(doc); fieldExpl.AddDetail(tfExpl); fieldExpl.AddDetail(idfExpl); Explanation fieldNormExpl = new Explanation(); byte[] fieldNorms = reader.Norms(field); float fieldNorm = fieldNorms != null?Similarity.DecodeNorm(fieldNorms[doc]) : 0.0f; fieldNormExpl.SetValue(fieldNorm); fieldNormExpl.SetDescription("fieldNorm(field=" + field + ", doc=" + doc + ")"); fieldExpl.AddDetail(fieldNormExpl); fieldExpl.SetMatch(tfExpl.IsMatch()); fieldExpl.SetValue(tfExpl.GetValue() * idfExpl.GetValue() * fieldNormExpl.GetValue()); result.AddDetail(fieldExpl); System.Boolean tempAux = fieldExpl.GetMatch(); result.SetMatch(tempAux); // combine them result.SetValue(queryExpl.GetValue() * fieldExpl.GetValue()); if (queryExpl.GetValue() == 1.0f) { return(fieldExpl); } return(result); }
public IDFStats(string field, Explanation idf, float queryBoost) { // TODO: Validate? this.Field = field; this.Idf = idf; this.QueryBoost = queryBoost; this.QueryWeight = idf.Value * queryBoost; // compute query weight }
public override sealed SimWeight ComputeWeight(float queryBoost, CollectionStatistics collectionStats, params TermStatistics[] termStats) { Explanation idf = termStats.Length == 1 ? IdfExplain(collectionStats, termStats[0]) : IdfExplain(collectionStats, termStats); return(new IDFStats(collectionStats.Field, idf, queryBoost)); }
protected internal override void Explain(Explanation expl, BasicStats stats, int doc, float freq, float docLen) { expl.AddDetail(new Explanation(m_collectionModel.ComputeProbability(stats), "collection probability")); }
protected internal override void Explain(Explanation expl, BasicStats stats, int doc, float freq, float docLen) { if (stats.TotalBoost != 1.0f) { expl.AddDetail(new Explanation(stats.TotalBoost, "boost")); } Explanation normExpl = Normalization_Renamed.Explain(stats, freq, docLen); float tfn = normExpl.Value; expl.AddDetail(normExpl); expl.AddDetail(BasicModel_Renamed.Explain(stats, tfn)); expl.AddDetail(AfterEffect_Renamed.Explain(stats, tfn)); }
public double Score(Rectangle target, Explanation exp) { if (target == null || queryArea <= 0) { return(0); } double targetArea = target.GetArea(null); if (targetArea <= 0) { return(0); } double score = 0; double top = Math.Min(queryExtent.GetMaxY(), target.GetMaxY()); double bottom = Math.Max(queryExtent.GetMinY(), target.GetMinY()); double height = top - bottom; double width = 0; // queries that cross the date line if (queryExtent.GetCrossesDateLine()) { // documents that cross the date line if (target.GetCrossesDateLine()) { double left = Math.Max(queryExtent.GetMinX(), target.GetMinX()); double right = Math.Min(queryExtent.GetMaxX(), target.GetMaxX()); width = right + 360.0 - left; } else { double qryWestLeft = Math.Max(queryExtent.GetMinX(), target.GetMaxX()); double qryWestRight = Math.Min(target.GetMaxX(), 180.0); double qryWestWidth = qryWestRight - qryWestLeft; if (qryWestWidth > 0) { width = qryWestWidth; } else { double qryEastLeft = Math.Max(target.GetMaxX(), -180.0); double qryEastRight = Math.Min(queryExtent.GetMaxX(), target.GetMaxX()); double qryEastWidth = qryEastRight - qryEastLeft; if (qryEastWidth > 0) { width = qryEastWidth; } } } } else { // queries that do not cross the date line if (target.GetCrossesDateLine()) { double tgtWestLeft = Math.Max(queryExtent.GetMinX(), target.GetMinX()); double tgtWestRight = Math.Min(queryExtent.GetMaxX(), 180.0); double tgtWestWidth = tgtWestRight - tgtWestLeft; if (tgtWestWidth > 0) { width = tgtWestWidth; } else { double tgtEastLeft = Math.Max(queryExtent.GetMinX(), -180.0); double tgtEastRight = Math.Min(queryExtent.GetMaxX(), target.GetMaxX()); double tgtEastWidth = tgtEastRight - tgtEastLeft; if (tgtEastWidth > 0) { width = tgtEastWidth; } } } else { double left = Math.Max(queryExtent.GetMinX(), target.GetMinX()); double right = Math.Min(queryExtent.GetMaxX(), target.GetMaxX()); width = right - left; } } // calculate the score if ((width > 0) && (height > 0)) { double intersectionArea = width * height; double queryRatio = intersectionArea / queryArea; double targetRatio = intersectionArea / targetArea; double queryFactor = Math.Pow(queryRatio, queryPower); double targetFactor = Math.Pow(targetRatio, targetPower); score = queryFactor * targetFactor * 10000.0; if (exp != null) { // StringBuilder sb = new StringBuilder(); // sb.append("\nscore=").append(score); // sb.append("\n query=").append(); // sb.append("\n target=").append(target.toString()); // sb.append("\n intersectionArea=").append(intersectionArea); // // sb.append(" queryArea=").append(queryArea).append(" targetArea=").append(targetArea); // sb.append("\n queryRatio=").append(queryRatio).append(" targetRatio=").append(targetRatio); // sb.append("\n queryFactor=").append(queryFactor).append(" targetFactor=").append(targetFactor); // sb.append(" (queryPower=").append(queryPower).append(" targetPower=").append(targetPower).append(")"); exp.Value = (float)score; exp.Description = GetType().Name; Explanation e = null; exp.AddDetail(e = new Explanation((float)intersectionArea, "IntersectionArea")); e.AddDetail(new Explanation((float)width, "width; Query: " + queryExtent)); e.AddDetail(new Explanation((float)height, "height; Target: " + target)); exp.AddDetail(e = new Explanation((float)queryFactor, "Query")); e.AddDetail(new Explanation((float)queryArea, "area")); e.AddDetail(new Explanation((float)queryRatio, "ratio")); e.AddDetail(new Explanation((float)queryPower, "power")); exp.AddDetail(e = new Explanation((float)targetFactor, "Target")); e.AddDetail(new Explanation((float)targetArea, "area")); e.AddDetail(new Explanation((float)targetRatio, "ratio")); e.AddDetail(new Explanation((float)targetPower, "power")); } } else if (exp != null) { exp.Value = 0; exp.Description = "Shape does not intersect"; } return(score); }
/// <summary> /// Explains the score. The implementation here provides a basic explanation /// in the format <em>score(name-of-similarity, doc=doc-id, /// freq=term-frequency), computed from:</em>, and /// attaches the score (computed via the <seealso cref="#score(BasicStats, float, float)"/> /// method) and the explanation for the term frequency. Subclasses content with /// this format may add additional details in /// <seealso cref="#explain(Explanation, BasicStats, int, float, float)"/>. /// </summary> /// <param name="stats"> the corpus level statistics. </param> /// <param name="doc"> the document id. </param> /// <param name="freq"> the term frequency and its explanation. </param> /// <param name="docLen"> the document length. </param> /// <returns> the explanation. </returns> public virtual Explanation Explain(BasicStats stats, int doc, Explanation freq, float docLen) { Explanation result = new Explanation(); result.Value = Score(stats, freq.Value, docLen); result.Description = "score(" + this.GetType().Name + ", doc=" + doc + ", freq=" + freq.Value + "), computed from:"; result.AddDetail(freq); Explain(result, stats, doc, freq.Value, docLen); return result; }
/// <summary> /// Generates a merged BrowseResult from the supplied <see cref="T:BrowseRequest"/>. /// </summary> /// <param name="req"><see cref="T:BrowseRequest"/> for generating the facets.</param> /// <returns><see cref="T:BrowseResult"/> of the results corresponding to the <see cref="T:BrowseRequest"/>.</returns> public virtual BrowseResult Browse(BrowseRequest req) { BrowseResult result = new BrowseResult(); // index empty if (_subBrowsers == null || _subBrowsers.Length == 0) { return(result); } long start = System.Environment.TickCount; int offset = req.Offset; int count = req.Count; if (offset < 0 || count < 0) { throw new ArgumentOutOfRangeException("both offset and count must be > 0: " + offset + "/" + count); } SortCollector collector = GetSortCollector(req.Sort, req.Query, offset, count, req.FetchStoredFields, req.TermVectorsToFetch, false, req.GroupBy, req.MaxPerGroup, req.CollectDocIdCache); var facetCollectors = new Dictionary <string, IFacetAccessible>(); Browse(req, collector, facetCollectors); if (req.MapReduceWrapper != null) { result.MapReduceResult = req.MapReduceWrapper.Result; } BrowseHit[] hits = null; try { hits = collector.TopDocs; } catch (Exception e) { logger.Error(e.Message, e); result.AddError(e.Message); hits = new BrowseHit[0]; } var q = req.Query; if (q == null) { q = new MatchAllDocsQuery(); } if (req.ShowExplanation) { foreach (BrowseHit hit in hits) { try { Explanation expl = Explain(q, hit.DocId); hit.Explanation = expl; } catch (Exception e) { logger.Error(e.Message, e); result.AddError(e.Message); } } } result.Hits = hits; result.NumHits = collector.TotalHits; result.NumGroups = collector.TotalGroups; result.GroupAccessibles = collector.GroupAccessibles; result.SortCollector = collector; result.TotalDocs = this.NumDocs(); result.AddAll(facetCollectors); long end = System.Environment.TickCount; result.Time = (end - start); // set the transaction ID to trace transactions result.Tid = req.Tid; return(result); }
/// <summary> /// Explains how the score for the specified document was /// computed. /// </summary> public abstract Explanation Explain(IndexSearcher searcher, Explanation firstPassExplanation, int docID);
public override Explanation Explain(int doc, Explanation freq) { return(outerInstance.Explain(stats, doc, freq, norms == null ? 1F : outerInstance.DecodeNormValue((byte)norms.Get(doc)))); }
/// <summary> /// Explain the score for a single document </summary> /// <param name="doc"> document id within the inverted index segment </param> /// <param name="freq"> Explanation of how the sloppy term frequency was computed </param> /// <returns> document's score </returns> public virtual Explanation Explain(int doc, Explanation freq) { Explanation result = new Explanation(Score(doc, freq.Value), "score(doc=" + doc + ",freq=" + freq.Value + "), with freq of:"); result.AddDetail(freq); return result; }
public override Explanation Explain(int doc, Explanation freq) { return(outerInstance.ExplainScore(doc, freq, stats, norms)); }
public override Explanation Explain(IndexReader reader, int doc) { ComplexExplanation result = new ComplexExplanation(); result.SetDescription("weight(" + GetQuery() + " in " + doc + "), product of:"); System.String field = ((SpanQuery)GetQuery()).GetField(); Explanation idfExpl = new Explanation(idf, "idf(" + field + ": " + idfExp.Explain() + ")"); // explain query weight Explanation queryExpl = new Explanation(); queryExpl.SetDescription("queryWeight(" + GetQuery() + "), product of:"); Explanation boostExpl = new Explanation(GetQuery().GetBoost(), "boost"); if (GetQuery().GetBoost() != 1.0f) { queryExpl.AddDetail(boostExpl); } queryExpl.AddDetail(idfExpl); Explanation queryNormExpl = new Explanation(queryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.SetValue(boostExpl.GetValue() * idfExpl.GetValue() * queryNormExpl.GetValue()); result.AddDetail(queryExpl); // explain field weight ComplexExplanation fieldExpl = new ComplexExplanation(); fieldExpl.SetDescription("fieldWeight(" + field + ":" + query.ToString(field) + " in " + doc + "), product of:"); Explanation tfExpl = Scorer(reader, true, false).Explain(doc); fieldExpl.AddDetail(tfExpl); fieldExpl.AddDetail(idfExpl); Explanation fieldNormExpl = new Explanation(); byte[] fieldNorms = reader.Norms(field); float fieldNorm = fieldNorms != null?Similarity.DecodeNorm(fieldNorms[doc]) : 1.0f; fieldNormExpl.SetValue(fieldNorm); fieldNormExpl.SetDescription("fieldNorm(field=" + field + ", doc=" + doc + ")"); fieldExpl.AddDetail(fieldNormExpl); fieldExpl.SetMatch(tfExpl.IsMatch()); fieldExpl.SetValue(tfExpl.GetValue() * idfExpl.GetValue() * fieldNormExpl.GetValue()); result.AddDetail(fieldExpl); System.Boolean?tempAux = fieldExpl.GetMatch(); result.SetMatch(tempAux); // combine them result.SetValue(queryExpl.GetValue() * fieldExpl.GetValue()); if (queryExpl.GetValue() == 1.0f) { return(fieldExpl); } return(result); }
/// <summary> /// Fetches SPF records, parses them, and /// evaluates them to determine whether a particular host is or is not /// permitted to send mail with a given identity. /// </summary> /// <param name="spfStatement">Information about current query.</param> /// <param name="spfExpressions">SPF Expressions that can be used, in case a domain lacks SPF records in the DNS.</param> /// <returns>Result of SPF evaluation, together with an optional explanation string, /// if one exists, and if the result indicates a failure.</returns> internal static async Task <KeyValuePair <SpfResult, string> > CheckHost(SpfStatement spfStatement, SpfExpression[] spfExpressions) { Explanation explanation = null; string[] spfStatementStrings = null; string s; try { string[] txt = await DnsResolver.LookupText(spfStatement.Domain); foreach (string row in txt) { s = row.Trim(); if (s.Length > 1 && s[0] == '"' && s[s.Length - 1] == '"') { s = s.Substring(1, s.Length - 2); } if (!s.StartsWith("v=spf1")) { continue; } if (!(spfStatementStrings is null)) { return(new KeyValuePair <SpfResult, string>(SpfResult.PermanentError, "Multiple SPF records found for " + spfStatement.Domain + ".")); } spfStatementStrings = s.Substring(6).Trim().Split(Space, StringSplitOptions.RemoveEmptyEntries); } } catch (Exception) { spfStatementStrings = null; } if (spfStatementStrings is null) { if (!(spfExpressions is null)) { foreach (SpfExpression expression in spfExpressions) { if (expression.IsApplicable(spfStatement.Domain)) { if (expression.Spf.StartsWith("v=spf1")) { spfStatementStrings = expression.Spf.Substring(6).Trim() .Split(Space, StringSplitOptions.RemoveEmptyEntries); break; } } } } if (spfStatementStrings is null) { return(new KeyValuePair <SpfResult, string>(SpfResult.None, "No SPF records found " + spfStatement.Domain + ".")); } } // Syntax evaluation first, ยง4.6 int c = spfStatementStrings.Length; LinkedList <Mechanism> mechanisms = new LinkedList <Mechanism>(); Redirect redirect = null; int i; try { for (i = 0; i < c; i++) { SpfQualifier qualifier; spfStatement.Reset(spfStatementStrings[i]); spfStatement.SkipWhitespace(); switch (spfStatement.PeekNextCharacter()) { case '+': spfStatement.Position++; qualifier = SpfQualifier.Pass; break; case '-': spfStatement.Position++; qualifier = SpfQualifier.Fail; break; case '~': spfStatement.Position++; qualifier = SpfQualifier.SoftFail; break; case '?': spfStatement.Position++; qualifier = SpfQualifier.Neutral; break; default: qualifier = SpfQualifier.Pass; break; } switch (spfStatement.NextLabel().ToLower()) { case "all": mechanisms.AddLast(new All(spfStatement, qualifier)); break; case "include": mechanisms.AddLast(new Include(spfStatement, qualifier, spfExpressions)); break; case "a": mechanisms.AddLast(new A(spfStatement, qualifier)); break; case "mx": mechanisms.AddLast(new Mx(spfStatement, qualifier)); break; case "ptr": mechanisms.AddLast(new Ptr(spfStatement, qualifier)); break; case "ip4": mechanisms.AddLast(new Ip4(spfStatement, qualifier)); break; case "ip6": mechanisms.AddLast(new Ip6(spfStatement, qualifier)); break; case "exists": mechanisms.AddLast(new Exists(spfStatement, qualifier)); break; case "redirect": if (!(redirect is null)) { return(new KeyValuePair <SpfResult, string>(SpfResult.PermanentError, "Multiple redirect modifiers found in SPF record.")); } redirect = new Redirect(spfStatement, qualifier); break; case "exp": if (!(explanation is null)) { return(new KeyValuePair <SpfResult, string>(SpfResult.PermanentError, "Multiple exp modifiers found in SPF record.")); } explanation = new Explanation(spfStatement, qualifier); break; default: throw new Exception("Syntax error."); } } foreach (Mechanism mechanism in mechanisms) { await mechanism.Expand(); SpfResult result = await mechanism.Matches(); switch (result) { case SpfResult.Pass: switch (mechanism.Qualifier) { case SpfQualifier.Pass: return(new KeyValuePair <SpfResult, string>(SpfResult.Pass, null)); case SpfQualifier.Fail: return(new KeyValuePair <SpfResult, string>(SpfResult.Fail, explanation == null ? null : await explanation.Evaluate())); case SpfQualifier.Neutral: return(new KeyValuePair <SpfResult, string>(SpfResult.Neutral, null)); case SpfQualifier.SoftFail: return(new KeyValuePair <SpfResult, string>(SpfResult.SoftFail, explanation == null ? null : await explanation.Evaluate())); } break; case SpfResult.TemporaryError: return(new KeyValuePair <SpfResult, string>(SpfResult.TemporaryError, explanation == null ? null : await explanation.Evaluate())); case SpfResult.None: case SpfResult.PermanentError: return(new KeyValuePair <SpfResult, string>(SpfResult.PermanentError, explanation == null ? null : await explanation.Evaluate())); } } if (!(redirect is null)) { await redirect.Expand(); string bak = spfStatement.Domain; spfStatement.Domain = redirect.Domain; try { KeyValuePair <SpfResult, string> result = await SpfResolver.CheckHost(spfStatement, spfExpressions); if (result.Key == SpfResult.None) { return(new KeyValuePair <SpfResult, string>(SpfResult.PermanentError, explanation == null ? null : await explanation.Evaluate())); } else if (result.Key != SpfResult.Pass && result.Key != SpfResult.Neutral && string.IsNullOrEmpty(result.Value)) { return(new KeyValuePair <SpfResult, string>(result.Key, explanation == null ? null : await explanation.Evaluate())); } else { return(result); } } finally { spfStatement.Domain = bak; } } } catch (Exception ex) { return(new KeyValuePair <SpfResult, string>(SpfResult.PermanentError, "Unable to evaluate SPF record: " + FirstRow(ex.Message))); } return(new KeyValuePair <SpfResult, string>(SpfResult.Neutral, null)); }
public SpecificationResult(Explanation explanation) : this(explanation.IsSatisfied, new List <Explanation> { explanation }) { }
public override Explanation Explain(IndexReader reader, int doc) { Explanation explain = DoExplain(reader, doc); return(explain == null?new Explanation(0.0f, "no matching docs"):explain); }
/// <summary> /// Computes a score factor for a phrase. /// /// <p> /// The default implementation sums the idf factor for /// each term in the phrase. /// </summary> /// <param name="collectionStats"> collection-level statistics </param> /// <param name="termStats"> term-level statistics for the terms in the phrase </param> /// <returns> an Explain object that includes both an idf /// score factor for the phrase and an explanation /// for each term. </returns> public virtual Explanation IdfExplain(CollectionStatistics collectionStats, TermStatistics[] termStats) { long max = collectionStats.MaxDoc(); float idf = 0.0f; Explanation exp = new Explanation(); exp.Description = "idf(), sum of:"; foreach (TermStatistics stat in termStats) { long df = stat.DocFreq(); float termIdf = Idf(df, max); exp.AddDetail(new Explanation(termIdf, "idf(docFreq=" + df + ", maxDocs=" + max + ")")); idf += termIdf; } exp.Value = idf; return exp; }
private void ExecuteRandomJoin(bool multipleValuesPerDocument, int maxIndexIter, int maxSearchIter, int numberOfDocumentsToIndex) { for (int indexIter = 1; indexIter <= maxIndexIter; indexIter++) { if (VERBOSE) { Console.WriteLine("indexIter=" + indexIter); } Directory dir = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random(), MockTokenizer.KEYWORD, false)) .SetMergePolicy(NewLogMergePolicy())); bool scoreDocsInOrder = TestJoinUtil.Random().NextBoolean(); IndexIterationContext context = CreateContext(numberOfDocumentsToIndex, w, multipleValuesPerDocument, scoreDocsInOrder); IndexReader topLevelReader = w.Reader; w.Dispose(); for (int searchIter = 1; searchIter <= maxSearchIter; searchIter++) { if (VERBOSE) { Console.WriteLine("searchIter=" + searchIter); } IndexSearcher indexSearcher = NewSearcher(topLevelReader); int r = Random().Next(context.RandomUniqueValues.Length); bool from = context.RandomFrom[r]; string randomValue = context.RandomUniqueValues[r]; FixedBitSet expectedResult = CreateExpectedResult(randomValue, from, indexSearcher.IndexReader, context); Query actualQuery = new TermQuery(new Term("value", randomValue)); if (VERBOSE) { Console.WriteLine("actualQuery=" + actualQuery); } var scoreModeLength = Enum.GetNames(typeof(ScoreMode)).Length; ScoreMode scoreMode = (ScoreMode)Random().Next(scoreModeLength); if (VERBOSE) { Console.WriteLine("scoreMode=" + scoreMode); } Query joinQuery; if (from) { joinQuery = JoinUtil.CreateJoinQuery("from", multipleValuesPerDocument, "to", actualQuery, indexSearcher, scoreMode); } else { joinQuery = JoinUtil.CreateJoinQuery("to", multipleValuesPerDocument, "from", actualQuery, indexSearcher, scoreMode); } if (VERBOSE) { Console.WriteLine("joinQuery=" + joinQuery); } // Need to know all documents that have matches. TopDocs doesn't give me that and then I'd be also testing TopDocsCollector... FixedBitSet actualResult = new FixedBitSet(indexSearcher.IndexReader.MaxDoc); TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.Create(10, false); indexSearcher.Search(joinQuery, new CollectorAnonymousInnerClassHelper2(this, scoreDocsInOrder, context, actualResult, topScoreDocCollector)); // Asserting bit set... if (VERBOSE) { Console.WriteLine("expected cardinality:" + expectedResult.Cardinality()); DocIdSetIterator iterator = expectedResult.GetIterator(); for (int doc = iterator.NextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.NextDoc()) { Console.WriteLine(string.Format("Expected doc[{0}] with id value {1}", doc, indexSearcher.Doc(doc).Get("id"))); } Console.WriteLine("actual cardinality:" + actualResult.Cardinality()); iterator = actualResult.GetIterator(); for (int doc = iterator.NextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.NextDoc()) { Console.WriteLine(string.Format("Actual doc[{0}] with id value {1}", doc, indexSearcher.Doc(doc).Get("id"))); } } assertEquals(expectedResult, actualResult); // Asserting TopDocs... TopDocs expectedTopDocs = CreateExpectedTopDocs(randomValue, from, scoreMode, context); TopDocs actualTopDocs = topScoreDocCollector.GetTopDocs(); assertEquals(expectedTopDocs.TotalHits, actualTopDocs.TotalHits); assertEquals(expectedTopDocs.ScoreDocs.Length, actualTopDocs.ScoreDocs.Length); if (scoreMode == ScoreMode.None) { continue; } assertEquals(expectedTopDocs.MaxScore, actualTopDocs.MaxScore, 0.0f); for (int i = 0; i < expectedTopDocs.ScoreDocs.Length; i++) { if (VERBOSE) { string.Format("Expected doc: {0} | Actual doc: {1}\n", expectedTopDocs.ScoreDocs[i].Doc, actualTopDocs.ScoreDocs[i].Doc); string.Format("Expected score: {0} | Actual score: {1}\n", expectedTopDocs.ScoreDocs[i].Score, actualTopDocs.ScoreDocs[i].Score); } assertEquals(expectedTopDocs.ScoreDocs[i].Doc, actualTopDocs.ScoreDocs[i].Doc); assertEquals(expectedTopDocs.ScoreDocs[i].Score, actualTopDocs.ScoreDocs[i].Score, 0.0f); Explanation explanation = indexSearcher.Explain(joinQuery, expectedTopDocs.ScoreDocs[i].Doc); assertEquals(expectedTopDocs.ScoreDocs[i].Score, explanation.Value, 0.0f); } } topLevelReader.Dispose(); dir.Dispose(); } }
internal BM25Stats(string field, Explanation idf, float queryBoost, float avgdl, float[] cache) { this.Field = field; this.Idf = idf; this.QueryBoost = queryBoost; this.Avgdl = avgdl; this.Cache = cache; }
protected internal override void Explain(Explanation expl, BasicStats stats, int doc, float freq, float docLen) { expl.AddDetail(new Explanation(collectionModel.ComputeProbability(stats), "collection probability")); }
protected internal override void Explain(Explanation expl, BasicStats stats, int doc, float freq, float docLen) { if (stats.TotalBoost != 1.0f) { expl.AddDetail(new Explanation(stats.TotalBoost, "boost")); } expl.AddDetail(new Explanation(Lambda_Renamed, "lambda")); base.Explain(expl, stats, doc, freq, docLen); }
public override Explanation CustomExplain(int doc, Explanation subQueryExpl, Explanation valSrcExpl) { return(parent.CustomExplain(doc, subQueryExpl, valSrcExpl)); }
public override Explanation CustomExplain(int doc, Explanation subQueryExpl, Explanation valSrcExpl) { return parent.CustomExplain(doc, subQueryExpl, valSrcExpl); }