Norms() public abstract method

Returns the byte-encoded normalization factor for the named field of every document. This is used by the search code to score documents.
public abstract Norms ( System field ) : byte[]
field System
return byte[]
			public override Scorer Scorer(IndexReader reader, bool scoreDocsInOrder, bool topScorer)
			{
				return new MatchAllScorer(enclosingInstance, reader, similarity, this, Enclosing_Instance.normsField != null?reader.Norms(Enclosing_Instance.normsField):null);
			}
Example #2
0
		public static void  CheckNorms(IndexReader reader)
		{
			// test omit norms
			for (int i = 0; i < DocHelper.fields.Length; i++)
			{
				Fieldable f = DocHelper.fields[i];
				if (f.IsIndexed())
				{
					Assert.AreEqual(reader.HasNorms(f.Name()), !f.GetOmitNorms());
					Assert.AreEqual(reader.HasNorms(f.Name()), !DocHelper.noNorms.Contains(f.Name()));
					if (!reader.HasNorms(f.Name()))
					{
						// test for fake norms of 1.0 or null depending on the flag
						byte[] norms = reader.Norms(f.Name());
						byte norm1 = DefaultSimilarity.EncodeNorm(1.0f);
						if (reader.GetDisableFakeNorms())
							Assert.IsNull(norms);
						else
						{
							Assert.AreEqual(norms.Length, reader.MaxDoc());
							for (int j = 0; j < reader.MaxDoc(); j++)
							{
								Assert.AreEqual(norms[j], norm1);
							}
						}
						norms = new byte[reader.MaxDoc()];
						reader.Norms(f.Name(), norms, 0);
						for (int j = 0; j < reader.MaxDoc(); j++)
						{
							Assert.AreEqual(norms[j], norm1);
						}
					}
				}
			}
		}
Example #3
0
        public override Explanation Explain(IndexReader reader, int doc)
        {
            ComplexExplanation result = new ComplexExplanation();

            result.Description = "weight(" + Query + " in " + doc + "), product of:";
            string field = ((SpanQuery)Query).Field;

            Explanation idfExpl = new Explanation(idf, "idf(" + field + ": " + idfExp.Explain() + ")");

            // explain query weight
            Explanation queryExpl = new Explanation();

            queryExpl.Description = "queryWeight(" + Query + "), product of:";

            Explanation boostExpl = new Explanation(Query.Boost, "boost");

            if (Query.Boost != 1.0f)
            {
                queryExpl.AddDetail(boostExpl);
            }
            queryExpl.AddDetail(idfExpl);

            Explanation queryNormExpl = new Explanation(queryNorm, "queryNorm");

            queryExpl.AddDetail(queryNormExpl);

            queryExpl.Value = boostExpl.Value * idfExpl.Value * queryNormExpl.Value;

            result.AddDetail(queryExpl);

            // explain field weight
            ComplexExplanation fieldExpl = new ComplexExplanation();

            fieldExpl.Description = "fieldWeight(" + field + ":" + internalQuery.ToString(field) + " in " + doc + "), product of:";

            Explanation tfExpl = ((SpanScorer)Scorer(reader, true, false)).Explain(doc);

            fieldExpl.AddDetail(tfExpl);
            fieldExpl.AddDetail(idfExpl);

            Explanation fieldNormExpl = new Explanation();

            byte[] fieldNorms = reader.Norms(field);
            float  fieldNorm  = fieldNorms != null?Similarity.DecodeNorm(fieldNorms[doc]) : 1.0f;

            fieldNormExpl.Value       = fieldNorm;
            fieldNormExpl.Description = "fieldNorm(field=" + field + ", doc=" + doc + ")";
            fieldExpl.AddDetail(fieldNormExpl);

            fieldExpl.Match = tfExpl.IsMatch;
            fieldExpl.Value = tfExpl.Value * idfExpl.Value * fieldNormExpl.Value;

            result.AddDetail(fieldExpl);
            bool?tempAux = fieldExpl.Match;

            result.Match = tempAux;

            // combine them
            result.Value = queryExpl.Value * fieldExpl.Value;

            if (queryExpl.Value == 1.0f)
            {
                return(fieldExpl);
            }

            return(result);
        }
		/// <summary> 1. Get a norm from the original reader 2. Clone the original reader 3.
		/// Delete a document and set the norm of the cloned reader 4. Verify the norms
		/// are not the same on each reader 5. Verify the doc deleted is only in the
		/// cloned reader 6. Try to delete a document in the original reader, an
		/// exception should be thrown
		/// 
		/// </summary>
		/// <param name="r1">IndexReader to perform tests on
		/// </param>
		/// <throws>  Exception </throws>
		private void  PerformDefaultTests(IndexReader r1)
		{
			float norm1 = Similarity.DecodeNorm(r1.Norms("field1")[4]);
			
			IndexReader pr1Clone = (IndexReader) r1.Clone();
			pr1Clone.DeleteDocument(10);
			pr1Clone.SetNorm(4, "field1", 0.5f);
			Assert.IsTrue(Similarity.DecodeNorm(r1.Norms("field1")[4]) == norm1);
			Assert.IsTrue(Similarity.DecodeNorm(pr1Clone.Norms("field1")[4]) != norm1);
			
			Assert.IsTrue(!r1.IsDeleted(10));
			Assert.IsTrue(pr1Clone.IsDeleted(10));
			
			// try to update the original reader, which should throw an exception
            Assert.Throws<LockObtainFailedException>(() => r1.DeleteDocument(11),
		                             "Tried to delete doc 11 and an exception should have been thrown");
			pr1Clone.Close();
		}
 /// <summary>
 /// 得到文档某字段的Norm
 /// </summary>
 /// <param name="reader"></param>
 /// <param name="field"></param>
 /// <param name="docId"></param>
 /// <returns></returns>
 public static float GetNorm(IndexReader reader, string field, int docId)
 {
     return SmallFloat.Byte315ToFloat(reader.Norms(field)[docId]);
 }
Example #6
0
 public override Scorer Scorer(IndexReader reader, bool scoreDocsInOrder, bool topScorer)
 {
     return(new SpanScorer(internalQuery.GetSpans(reader), this, similarity, reader.Norms(internalQuery.Field)));
 }
Example #7
0
		public static void  AssertIndexEquals(IndexReader index1, IndexReader index2)
		{
			Assert.AreEqual(index1.NumDocs(), index2.NumDocs(), "IndexReaders have different values for numDocs.");
			Assert.AreEqual(index1.MaxDoc, index2.MaxDoc, "IndexReaders have different values for maxDoc.");
			Assert.AreEqual(index1.HasDeletions, index2.HasDeletions, "Only one IndexReader has deletions.");
			Assert.AreEqual(index1.IsOptimized(), index2.IsOptimized(), "Only one index is optimized.");
			
			// check field names
			System.Collections.Generic.ICollection<string> fieldsNames1 = index1.GetFieldNames(FieldOption.ALL);
			System.Collections.Generic.ICollection<string> fieldsNames2 = index1.GetFieldNames(FieldOption.ALL);

            System.Collections.Generic.ICollection<IFieldable> fields1 = null;
            System.Collections.Generic.ICollection<IFieldable> fields2 = null;

            Assert.AreEqual(fieldsNames1.Count, fieldsNames2.Count, "IndexReaders have different numbers of fields.");
            System.Collections.IEnumerator it1 = fieldsNames1.GetEnumerator();
            System.Collections.IEnumerator it2 = fieldsNames2.GetEnumerator();
			while (it1.MoveNext() && it2.MoveNext())
			{
				Assert.AreEqual((System.String) it1.Current, (System.String) it2.Current, "Different field names.");
			}
			
			// check norms
            it1 = fieldsNames1.GetEnumerator();
			while (it1.MoveNext())
			{
				System.String curField = (System.String) it1.Current;
				byte[] norms1 = index1.Norms(curField);
				byte[] norms2 = index2.Norms(curField);
				if (norms1 != null && norms2 != null)
				{
					Assert.AreEqual(norms1.Length, norms2.Length);
					for (int i = 0; i < norms1.Length; i++)
					{
						Assert.AreEqual(norms1[i], norms2[i], "Norm different for doc " + i + " and field '" + curField + "'.");
					}
				}
				else
				{
					Assert.AreSame(norms1, norms2);
				}
			}
			
			// check deletions
			for (int i = 0; i < index1.MaxDoc; i++)
			{
				Assert.AreEqual(index1.IsDeleted(i), index2.IsDeleted(i), "Doc " + i + " only deleted in one index.");
			}
			
			// check stored fields
			for (int i = 0; i < index1.MaxDoc; i++)
			{
				if (!index1.IsDeleted(i))
				{
					Document doc1 = index1.Document(i);
					Document doc2 = index2.Document(i);
					fields1 = doc1.GetFields();
					fields2 = doc2.GetFields();
					Assert.AreEqual(fields1.Count, fields2.Count, "Different numbers of fields for doc " + i + ".");
					it1 = fields1.GetEnumerator();
					it2 = fields2.GetEnumerator();
					while (it1.MoveNext() && it2.MoveNext())
					{
						Field curField1 = (Field) it1.Current;
						Field curField2 = (Field) it2.Current;
						Assert.AreEqual(curField1.Name, curField2.Name, "Different fields names for doc " + i + ".");
						Assert.AreEqual(curField1.StringValue, curField2.StringValue, "Different field values for doc " + i + ".");
					}
				}
			}
			
			// check dictionary and posting lists
			TermEnum enum1 = index1.Terms();
			TermEnum enum2 = index2.Terms();
			TermPositions tp1 = index1.TermPositions();
			TermPositions tp2 = index2.TermPositions();
			while (enum1.Next())
			{
				Assert.IsTrue(enum2.Next());
				Assert.AreEqual(enum1.Term, enum2.Term, "Different term in dictionary.");
				tp1.Seek(enum1.Term);
				tp2.Seek(enum1.Term);
				while (tp1.Next())
				{
					Assert.IsTrue(tp2.Next());
					Assert.AreEqual(tp1.Doc, tp2.Doc, "Different doc id in postinglist of term " + enum1.Term + ".");
					Assert.AreEqual(tp1.Freq, tp2.Freq, "Different term frequence in postinglist of term " + enum1.Term + ".");
					for (int i = 0; i < tp1.Freq; i++)
					{
						Assert.AreEqual(tp1.NextPosition(), tp2.NextPosition(), "Different positions in postinglist of term " + enum1.Term + ".");
					}
				}
			}
		}
		private void  VerifyIndex(IndexReader ir)
		{
			for (int i = 0; i < NUM_FIELDS; i++)
			{
				System.String field = "f" + i;
				byte[] b = ir.Norms(field);
				Assert.AreEqual(numDocNorms, b.Length, "number of norms mismatches");
				System.Collections.ArrayList storedNorms = (i == 1?modifiedNorms:norms);
				for (int j = 0; j < b.Length; j++)
				{
					float norm = Similarity.DecodeNorm(b[j]);
					float norm1 = (float) storedNorms[j];
					Assert.AreEqual(norm, norm1, 0.000001, "stored norm value of " + field + " for doc " + j + " is " + norm + " - a mismatch!");
				}
			}
		}
Example #9
0
 public override Scorer Scorer(IndexReader reader, bool scoreDocsInOrder, bool topScorer)
 {
     return new SpanScorer(internalQuery.GetSpans(reader), this, similarity, reader.Norms(internalQuery.Field));
 }
Example #10
0
 public override Explanation Explain(IndexReader reader, int doc)
 {
     
     ComplexExplanation result = new ComplexExplanation();
     result.Description = "weight(" + Query + " in " + doc + "), product of:";
     System.String field = ((SpanQuery) Query).Field;
     
     Explanation idfExpl = new Explanation(idf, "idf(" + field + ": " + idfExp.Explain() + ")");
     
     // explain query weight
     Explanation queryExpl = new Explanation();
     queryExpl.Description = "queryWeight(" + Query + "), product of:";
     
     Explanation boostExpl = new Explanation(Query.Boost, "boost");
     if (Query.Boost != 1.0f)
         queryExpl.AddDetail(boostExpl);
     queryExpl.AddDetail(idfExpl);
     
     Explanation queryNormExpl = new Explanation(queryNorm, "queryNorm");
     queryExpl.AddDetail(queryNormExpl);
     
     queryExpl.Value = boostExpl.Value * idfExpl.Value * queryNormExpl.Value;
     
     result.AddDetail(queryExpl);
     
     // explain field weight
     ComplexExplanation fieldExpl = new ComplexExplanation();
     fieldExpl.Description = "fieldWeight(" + field + ":" + internalQuery.ToString(field) + " in " + doc + "), product of:";
     
     Explanation tfExpl = ((SpanScorer)Scorer(reader, true, false)).Explain(doc);
     fieldExpl.AddDetail(tfExpl);
     fieldExpl.AddDetail(idfExpl);
     
     Explanation fieldNormExpl = new Explanation();
     byte[] fieldNorms = reader.Norms(field);
     float fieldNorm = fieldNorms != null?Similarity.DecodeNorm(fieldNorms[doc]):1.0f;
     fieldNormExpl.Value = fieldNorm;
     fieldNormExpl.Description = "fieldNorm(field=" + field + ", doc=" + doc + ")";
     fieldExpl.AddDetail(fieldNormExpl);
     
     fieldExpl.Match = tfExpl.IsMatch;
     fieldExpl.Value = tfExpl.Value * idfExpl.Value * fieldNormExpl.Value;
     
     result.AddDetail(fieldExpl);
     System.Boolean? tempAux = fieldExpl.Match;
     result.Match = tempAux;
     
     // combine them
     result.Value = queryExpl.Value * fieldExpl.Value;
     
     if (queryExpl.Value == 1.0f)
         return fieldExpl;
     
     return result;
 }
		public static void  CheckNorms(IndexReader reader)
		{
			// test omit norms
			for (int i = 0; i < DocHelper.fields.Length; i++)
			{
				Lucene.Net.Documents.Fieldable f = DocHelper.fields[i];
				if (f.IsIndexed())
				{
					Assert.AreEqual(reader.HasNorms(f.Name()), !f.GetOmitNorms());
					Assert.AreEqual(reader.HasNorms(f.Name()), !DocHelper.noNorms.Contains(f.Name()));
					if (!reader.HasNorms(f.Name()))
					{
						// test for fake norms of 1.0
						byte[] norms = reader.Norms(f.Name());
						Assert.AreEqual(norms.Length, reader.MaxDoc());
						for (int j = 0; j < reader.MaxDoc(); j++)
						{
							Assert.AreEqual(norms[j], DefaultSimilarity.EncodeNorm(1.0f));
						}
						norms = new byte[reader.MaxDoc()];
						reader.Norms(f.Name(), norms, 0);
						for (int j = 0; j < reader.MaxDoc(); j++)
						{
							Assert.AreEqual(norms[j], DefaultSimilarity.EncodeNorm(1.0f));
						}
					}
				}
			}
		}
Example #12
0
 public override Scorer Scorer(IndexReader reader, bool scoreDocsInOrder, bool topScorer, IState state)
 {
     return(new MatchAllScorer(enclosingInstance, reader, similarity, this, Enclosing_Instance.normsField != null?reader.Norms(Enclosing_Instance.normsField, state):null, state));
 }
Example #13
0
		/// <summary> 1. Get a norm from the original reader 2. Clone the original reader 3.
		/// Delete a document and set the norm of the cloned reader 4. Verify the norms
		/// are not the same on each reader 5. Verify the doc deleted is only in the
		/// cloned reader 6. Try to delete a document in the original reader, an
		/// exception should be thrown
		/// 
		/// </summary>
		/// <param name="r1">IndexReader to perform tests on
		/// </param>
		/// <throws>  Exception </throws>
		private void  PerformDefaultTests(IndexReader r1)
		{
			float norm1 = Similarity.DecodeNorm(r1.Norms("field1")[4]);
			
			IndexReader pr1Clone = (IndexReader) r1.Clone();
			pr1Clone.DeleteDocument(10);
			pr1Clone.SetNorm(4, "field1", 0.5f);
			Assert.IsTrue(Similarity.DecodeNorm(r1.Norms("field1")[4]) == norm1);
			Assert.IsTrue(Similarity.DecodeNorm(pr1Clone.Norms("field1")[4]) != norm1);
			
			Assert.IsTrue(!r1.IsDeleted(10));
			Assert.IsTrue(pr1Clone.IsDeleted(10));
			
			// try to update the original reader, which should throw an exception
			try
			{
				r1.DeleteDocument(11);
				Assert.Fail("Tried to delete doc 11 and an exception should have been thrown");
			}
			catch (System.Exception exception)
			{
				// expectted
			}
			pr1Clone.Close();
		}
 public override Scorer Scorer(IndexReader reader, bool scoreDocsInOrder, bool topScorer)
 {
     return (Scorer)new MatchNoDocsQuery.MatchNoDocsScorer(this.enclosingInstance, reader, this.similarity, (Weight)this, this.Enclosing_Instance.normsField != null ? reader.Norms(this.Enclosing_Instance.normsField) : (byte[])null);
 }
 public override byte[] Norms(System.String f)
 {
     return(in_Renamed.Norms(f));
 }