/// <summary> </summary> /// <param name="directory">The directory to write the document information to /// </param> /// <param name="analyzer">The analyzer to use for the document /// </param> /// <param name="similarity">The Similarity function /// </param> /// <param name="maxFieldLength">The maximum number of tokens a Field may have /// </param> public /*internal*/ DocumentWriter(Directory directory, Analyzer analyzer, Similarity similarity, int maxFieldLength) { this.directory = directory; this.analyzer = analyzer; this.similarity = similarity; this.maxFieldLength = maxFieldLength; }
internal SpanScorer(Spans spans, Weight weight, Similarity similarity, byte[] norms) : base(similarity) { this.spans = spans; this.norms = norms; this.weight = weight; this.value_Renamed = weight.Value; }
/// <summary>Expert: Resets the normalization factor for the named Field of the named /// document. /// /// </summary> /// <seealso cref="#Norms(String)"> /// </seealso> /// <seealso cref="Similarity#DecodeNorm(byte)"> /// </seealso> public virtual void SetNorm(int doc, System.String field, float value_Renamed) { SetNorm(doc, field, Similarity.EncodeNorm(value_Renamed)); }
/// <summary>Expert: Set the Similarity implementation used by this IndexWriter. /// /// </summary> /// <seealso cref="Similarity#SetDefault(Similarity)"> /// </seealso> public virtual void SetSimilarity(Similarity similarity) { this.similarity = similarity; }
private void InitBlock() { similarity = Similarity.GetDefault(); }
internal AnonymousClassScorer(Monodoc.Lucene.Net.Search.Scorer scorer, System.Collections.BitArray bitset, AnonymousClassWeight enclosingInstance, Monodoc.Lucene.Net.Search.Similarity Param1) : base(Param1) { InitBlock(scorer, bitset, enclosingInstance); }
public virtual Explanation Explain(Monodoc.Lucene.Net.Index.IndexReader reader, int doc) { Explanation result = new Explanation(); result.SetDescription("weight(" + Query + " in " + doc + "), product of:"); System.String field = ((SpanQuery)Query).GetField(); System.Text.StringBuilder docFreqs = new System.Text.StringBuilder(); System.Collections.IEnumerator i = terms.GetEnumerator(); while (i.MoveNext()) { Term term = (Term)i.Current; docFreqs.Append(term.Text()); docFreqs.Append("="); docFreqs.Append(searcher.DocFreq(term)); if (i.MoveNext()) { docFreqs.Append(" "); } } Explanation idfExpl = new Explanation(idf, "idf(" + field + ": " + docFreqs + ")"); // explain query weight Explanation queryExpl = new Explanation(); queryExpl.SetDescription("queryWeight(" + Query + "), product of:"); Explanation boostExpl = new Explanation(Query.GetBoost(), "boost"); if (Query.GetBoost() != 1.0f) { queryExpl.AddDetail(boostExpl); } queryExpl.AddDetail(idfExpl); Explanation queryNormExpl = new Explanation(queryNorm, "queryNorm"); queryExpl.AddDetail(queryNormExpl); queryExpl.SetValue(boostExpl.GetValue() * idfExpl.GetValue() * queryNormExpl.GetValue()); result.AddDetail(queryExpl); // explain Field weight Explanation fieldExpl = new Explanation(); fieldExpl.SetDescription("fieldWeight(" + field + ":" + query.ToString(field) + " in " + doc + "), product of:"); Explanation tfExpl = Scorer(reader).Explain(doc); fieldExpl.AddDetail(tfExpl); fieldExpl.AddDetail(idfExpl); Explanation fieldNormExpl = new Explanation(); byte[] fieldNorms = reader.Norms(field); float fieldNorm = fieldNorms != null?Similarity.DecodeNorm(fieldNorms[doc]) : 0.0f; fieldNormExpl.SetValue(fieldNorm); fieldNormExpl.SetDescription("fieldNorm(Field=" + field + ", doc=" + doc + ")"); fieldExpl.AddDetail(fieldNormExpl); fieldExpl.SetValue(tfExpl.GetValue() * idfExpl.GetValue() * fieldNormExpl.GetValue()); result.AddDetail(fieldExpl); // combine them result.SetValue(queryExpl.GetValue() * fieldExpl.GetValue()); if (queryExpl.GetValue() == 1.0f) { return(fieldExpl); } return(result); }
public override float Score() { float raw = GetSimilarity().Tf(freq) * value_Renamed; // raw score return(raw * Similarity.DecodeNorm(norms[doc])); // normalize }