Example #1
0
		internal SpanScorer(Spans spans, Weight weight, Similarity similarity, byte[] norms) : base(similarity)
		{
			this.spans = spans;
			this.norms = norms;
			this.weight = weight;
			this.value_Renamed = weight.GetValue();
		}
Example #2
0
		public SpanWeight(SpanQuery query, Searcher searcher)
		{
			this.similarity = query.GetSimilarity(searcher);
			this.query = query;
			this.terms = query.GetTerms();
			
			idf = this.query.GetSimilarity(searcher).Idf(terms, searcher);
		}
Example #3
0
		/// <summary>This ctor used by test code only.
		/// 
		/// </summary>
		/// <param name="directory">The directory to write the document information to
		/// </param>
		/// <param name="analyzer">The analyzer to use for the document
		/// </param>
		/// <param name="similarity">The Similarity function
		/// </param>
		/// <param name="maxFieldLength">The maximum number of tokens a field may have
		/// </param>
		public DocumentWriter(Directory directory, Analyzer analyzer, Similarity similarity, int maxFieldLength)
		{
			InitBlock();
			this.directory = directory;
			this.analyzer = analyzer;
			this.similarity = similarity;
			this.maxFieldLength = maxFieldLength;
		}
Example #4
0
		public DocumentWriter(Directory directory, Analyzer analyzer, IndexWriter writer)
		{
			InitBlock();
			this.directory = directory;
			this.analyzer = analyzer;
			this.similarity = writer.GetSimilarity();
			this.maxFieldLength = writer.GetMaxFieldLength();
			this.termIndexInterval = writer.GetTermIndexInterval();
		}
        public SpanWeight(SpanQuery query, Searcher searcher)
        {
            this.similarity = query.GetSimilarity(searcher);
            this.query = query;
            terms = new System.Collections.Hashtable();
            query.ExtractTerms(terms);

            System.Collections.ArrayList tmp = new System.Collections.ArrayList(terms.Values);

            idf = this.query.GetSimilarity(searcher).Idf(tmp, searcher);
        }
Example #6
0
		protected internal SpanScorer(Spans spans, Weight weight, Similarity similarity, byte[] norms):base(similarity)
		{
			this.spans = spans;
			this.norms = norms;
			this.weight = weight;
			this.value_Renamed = weight.GetValue();
			if (this.spans.Next())
			{
				doc = - 1;
			}
			else
			{
				doc = NO_MORE_DOCS;
				more = false;
			}
		}
                public /*protected internal*/ override bool SetFreqCurrentDoc()
                {
                    if (!more)
                    {
                        return(false);
                    }
                    doc          = spans.Doc();
                    freq         = 0.0f;
                    payloadScore = 0;
                    payloadsSeen = 0;
                    Similarity similarity1 = GetSimilarity();

                    while (more && doc == spans.Doc())
                    {
                        int matchLength = spans.End() - spans.Start();

                        freq += similarity1.SloppyFreq(matchLength);
                        ProcessPayload(similarity1);

                        more = spans.Next();                         // this moves positions to the next match in this
                        // document
                    }
                    return(more || (freq != 0));
                }
        /// <summary> Sets up a RAMDirectory, and adds documents (using English.intToEnglish()) with two fields: field and multiField
        /// and analyzes them using the PayloadAnalyzer
        /// </summary>
        /// <param name="similarity">The Similarity class to use in the Searcher
        /// </param>
        /// <param name="numDocs">The num docs to add
        /// </param>
        /// <returns> An IndexSearcher
        /// </returns>
        /// <throws>  IOException </throws>
        public virtual IndexSearcher SetUp(Similarity similarity, int numDocs)
        {
            RAMDirectory    directory = new RAMDirectory();
            PayloadAnalyzer analyzer  = new PayloadAnalyzer(this);
            IndexWriter     writer    = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);

            writer.SetSimilarity(similarity);
            //writer.infoStream = System.out;
            for (int i = 0; i < numDocs; i++)
            {
                Document doc = new Document();
                doc.Add(new Field(FIELD, English.IntToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
                doc.Add(new Field(MULTI_FIELD, English.IntToEnglish(i) + "  " + English.IntToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
                doc.Add(new Field(NO_PAYLOAD_FIELD, English.IntToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
                writer.AddDocument(doc);
            }
            //writer.optimize();
            writer.Close();

            IndexSearcher searcher = new IndexSearcher(directory, true);

            searcher.Similarity = similarity;
            return(searcher);
        }
Example #9
0
 /// <summary>Expert: Resets the normalization factor for the named field of the named
 /// document.
 ///
 /// </summary>
 /// <seealso cref="Norms(String)">
 /// </seealso>
 /// <seealso cref="Similarity.DecodeNorm(byte)">
 /// </seealso>
 public virtual void  SetNorm(int doc, System.String field, float value_Renamed)
 {
     SetNorm(doc, field, Similarity.EncodeNorm(value_Renamed));
 }
 private void InitBlock(PayloadNearQuery enclosingInstance)
 {
     this.enclosingInstance = enclosingInstance;
     similarity = GetSimilarity();
 }
Example #11
0
		/// <summary>Expert: Set the Similarity implementation used by this IndexWriter.
		/// 
		/// </summary>
		/// <seealso cref="Similarity.SetDefault(Similarity)">
		/// </seealso>
		public virtual void  SetSimilarity(Similarity similarity)
		{
			EnsureOpen();
			this.similarity = similarity;
			docWriter.SetSimilarity(similarity);
		}
 public AnonymousClassWeight(Lucene.Net.Search.Weight weight, Lucene.Net.Search.Similarity similarity, FilteredQuery enclosingInstance)
 {
     this.weight            = weight;
     this.similarity        = similarity;
     this.enclosingInstance = enclosingInstance;
 }
Example #13
0
			public CustomWeight(CustomScoreQuery enclosingInstance, Searcher searcher)
			{
				InitBlock(enclosingInstance);
				this.similarity = Enclosing_Instance.GetSimilarity(searcher);
				this.subQueryWeight = Enclosing_Instance.subQuery.Weight(searcher);
				this.valSrcWeights = new Weight[Enclosing_Instance.valSrcQueries.Length];
				for (int i = 0; i < Enclosing_Instance.valSrcQueries.Length; i++)
				{
					this.valSrcWeights[i] = Enclosing_Instance.valSrcQueries[i].CreateWeight(searcher);
				}
				this.qStrict = Enclosing_Instance.strict;
			}
Example #14
0
		/// <summary>Expert: Set the Similarity implementation used by this IndexWriter.
		/// 
		/// </summary>
		/// <seealso cref="Similarity.SetDefault(Similarity)">
		/// </seealso>
		public virtual void  SetSimilarity(Similarity similarity)
		{
			EnsureOpen();
			this.similarity = similarity;
		}
Example #15
0
        public override float Score()
        {
            float raw = GetSimilarity().Tf(freq) * value_Renamed;       // raw score

            return(raw * Similarity.DecodeNorm(norms[doc]));            // normalize
        }
Example #16
0
 internal JustCompileSpanScorer(Spans spans, Weight weight, Similarity similarity, byte[] norms) : base(spans, weight, similarity, norms)
 {
 }
Example #17
0
		/// <summary>Expert: Set the Similarity implementation used by this IndexWriter.
		/// 
		/// </summary>
		/// <seealso cref="Similarity.SetDefault(Similarity)">
		/// </seealso>
		public virtual void  SetSimilarity(Similarity similarity)
		{
			this.similarity = similarity;
		}
Example #18
0
 private void InitBlock()
 {
     similarity = Similarity.GetDefault();
     mergePolicy = new LogByteSizeMergePolicy(this);
     readerPool = new ReaderPool(this);
 }
Example #19
0
		/// <summary> Writes the document to the directory using the analyzer
		/// and the similarity score; returns the SegmentInfo
		/// describing the new segment
		/// </summary>
		/// <param name="dir">
		/// </param>
		/// <param name="analyzer">
		/// </param>
		/// <param name="similarity">
		/// </param>
		/// <param name="doc">
		/// </param>
		/// <throws>  IOException </throws>
		public static SegmentInfo WriteDoc(Directory dir, Analyzer analyzer, Similarity similarity, Document doc)
		{
			IndexWriter writer = new IndexWriter(dir, analyzer, IndexWriter.MaxFieldLength.LIMITED);
			writer.SetSimilarity(similarity);
			//writer.setUseCompoundFile(false);
			writer.AddDocument(doc);
			writer.Flush();
			SegmentInfo info = writer.NewestSegment();
			writer.Close();
			return info;
		}
		/// <summary> Sets up a RAMDirectory, and adds documents (using English.intToEnglish()) with two fields: field and multiField
		/// and analyzes them using the PayloadAnalyzer
		/// </summary>
		/// <param name="similarity">The Similarity class to use in the Searcher
		/// </param>
		/// <param name="numDocs">The num docs to add
		/// </param>
		/// <returns> An IndexSearcher
		/// </returns>
		/// <throws>  IOException </throws>
		public virtual IndexSearcher SetUp(Similarity similarity, int numDocs)
		{
			RAMDirectory directory = new RAMDirectory();
			PayloadAnalyzer analyzer = new PayloadAnalyzer(this);
			IndexWriter writer = new IndexWriter(directory, analyzer, true);
			writer.SetSimilarity(similarity);
			//writer.infoStream = System.out;
			for (int i = 0; i < numDocs; i++)
			{
				Document doc = new Document();
				doc.Add(new Field(FIELD, English.IntToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
				doc.Add(new Field(MULTI_FIELD, English.IntToEnglish(i) + "  " + English.IntToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
				doc.Add(new Field(NO_PAYLOAD_FIELD, English.IntToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
				writer.AddDocument(doc);
			}
			//writer.optimize();
			writer.Close();
			
			IndexSearcher searcher = new IndexSearcher(directory);
			searcher.SetSimilarity(similarity);
			return searcher;
		}
Example #21
0
        protected virtual void  SetUp()
        {
            fieldInfos = new FieldInfos();
            DocHelper.SetupDoc(testDoc);
            fieldInfos.Add(testDoc);
            DocumentWriter writer = new DocumentWriter(dir, new WhitespaceAnalyzer(), Similarity.GetDefault(), 50);

            Assert.IsTrue(writer != null);
            try
            {
                writer.AddDocument("test", testDoc);
            }
            catch (System.IO.IOException e)
            {
            }
        }
Example #22
0
 internal AnonymousClassConjunctionScorer(int requiredNrMatchers, BooleanScorer2 enclosingInstance, Lucene.Net.Search.Similarity Param1) : base(Param1)
 {
     InitBlock(requiredNrMatchers, enclosingInstance);
 }
		internal DocumentsWriter(Directory directory, IndexWriter writer, IndexingChain indexingChain)
		{
			InitBlock();
			this.directory = directory;
			this.writer = writer;
			this.similarity = writer.GetSimilarity();
			flushedDocCount = writer.MaxDoc();
			
			consumer = indexingChain.GetChain(this);
			if (consumer is DocFieldProcessor)
			{
				docFieldProcessor = (DocFieldProcessor) consumer;
			}
		}
Example #24
0
 /// <summary> Writes the document to the directory segment named "test" using the specified analyzer and similarity</summary>
 /// <param name="">dir
 /// </param>
 /// <param name="">analyzer
 /// </param>
 /// <param name="">similarity
 /// </param>
 /// <param name="">doc
 /// </param>
 public static void  WriteDoc(Directory dir, Analyzer analyzer, Similarity similarity, Document doc)
 {
     WriteDoc(dir, analyzer, similarity, "test", doc);
 }
Example #25
0
 public AnonymousClassWeight(Lucene.Net.Search.Weight weight, Lucene.Net.Search.Similarity similarity, FilteredQuery enclosingInstance)
 {
     InitBlock(weight, similarity, enclosingInstance);
 }
Example #26
0
 private void InitBlock()
 {
     similarity = Similarity.GetDefault();
 }
        public virtual void  TestNormsRefCounting()
        {
            Directory dir1 = new MockRAMDirectory();

            TestIndexReaderReopen.CreateIndex(dir1, false);
            IndexReader reader1 = IndexReader.Open(dir1);

            IndexReader   reader2C        = (IndexReader)reader1.Clone();
            SegmentReader segmentReader2C = SegmentReader.GetOnlySegmentReader(reader2C);

            segmentReader2C.Norms("field1");             // load the norms for the field
            Norm reader2CNorm = (Norm)segmentReader2C.norms_ForNUnit["field1"];

            Assert.IsTrue(reader2CNorm.BytesRef().RefCount() == 2, "reader2CNorm.bytesRef()=" + reader2CNorm.BytesRef());



            IndexReader   reader3C        = (IndexReader)reader2C.Clone();
            SegmentReader segmentReader3C = SegmentReader.GetOnlySegmentReader(reader3C);
            Norm          reader3CCNorm   = (Norm)segmentReader3C.norms_ForNUnit["field1"];

            Assert.AreEqual(3, reader3CCNorm.BytesRef().RefCount());

            // edit a norm and the refcount should be 1
            IndexReader   reader4C        = (IndexReader)reader3C.Clone();
            SegmentReader segmentReader4C = SegmentReader.GetOnlySegmentReader(reader4C);

            Assert.AreEqual(4, reader3CCNorm.BytesRef().RefCount());
            reader4C.SetNorm(5, "field1", 0.33f);

            // generate a cannot update exception in reader1
            try
            {
                reader3C.SetNorm(1, "field1", 0.99f);
                Assert.Fail("did not hit expected exception");
            }
            catch (System.Exception ex)
            {
                // expected
            }

            // norm values should be different
            Assert.IsTrue(Similarity.DecodeNorm(segmentReader3C.Norms("field1")[5]) != Similarity.DecodeNorm(segmentReader4C.Norms("field1")[5]));
            Norm reader4CCNorm = (Norm)segmentReader4C.norms_ForNUnit["field1"];

            Assert.AreEqual(3, reader3CCNorm.BytesRef().RefCount());
            Assert.AreEqual(1, reader4CCNorm.BytesRef().RefCount());

            IndexReader   reader5C        = (IndexReader)reader4C.Clone();
            SegmentReader segmentReader5C = SegmentReader.GetOnlySegmentReader(reader5C);
            Norm          reader5CCNorm   = (Norm)segmentReader5C.norms_ForNUnit["field1"];

            reader5C.SetNorm(5, "field1", 0.7f);
            Assert.AreEqual(1, reader5CCNorm.BytesRef().RefCount());

            reader5C.Close();
            reader4C.Close();
            reader3C.Close();
            reader2C.Close();
            reader1.Close();
            dir1.Close();
        }
 public override void  SetUp()
 {
     base.SetUp();
     similarityOne = new SimilarityOne(this);
     anlzr         = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
 }
Example #29
0
 private void InitBlock(Lucene.Net.Search.Weight weight, Lucene.Net.Search.Similarity similarity, FilteredQuery enclosingInstance)
 {
     this.weight = weight;
     this.similarity = similarity;
     this.enclosingInstance = enclosingInstance;
 }
Example #30
0
 /// <summary> Writes the document to the directory using a segment
 /// named "test"; returns the SegmentInfo describing the new
 /// segment
 /// </summary>
 /// <param name="dir">
 /// </param>
 /// <param name="doc">
 /// </param>
 /// <throws>  IOException </throws>
 public static SegmentInfo WriteDoc(Directory dir, Document doc)
 {
     return(WriteDoc(dir, new WhitespaceAnalyzer(), Similarity.GetDefault(), doc));
 }
        internal DocumentsWriter(Directory directory, IndexWriter writer)
        {
            this.directory = directory;
            this.writer = writer;
            this.similarity = writer.GetSimilarity();
            flushedDocCount = writer.MaxDoc();

            byteBlockAllocator = new ByteBlockAllocator(this);
            waitQueue = new WaitQueue(this);

            /*
              This is the current indexing chain:

              DocConsumer / DocConsumerPerThread
                --> code: DocFieldProcessor / DocFieldProcessorPerThread
                  --> DocFieldConsumer / DocFieldConsumerPerThread / DocFieldConsumerPerField
                    --> code: DocFieldConsumers / DocFieldConsumersPerThread / DocFieldConsumersPerField
                      --> code: DocInverter / DocInverterPerThread / DocInverterPerField
                        --> InvertedDocConsumer / InvertedDocConsumerPerThread / InvertedDocConsumerPerField
                          --> code: TermsHash / TermsHashPerThread / TermsHashPerField
                            --> TermsHashConsumer / TermsHashConsumerPerThread / TermsHashConsumerPerField
                              --> code: FreqProxTermsWriter / FreqProxTermsWriterPerThread / FreqProxTermsWriterPerField
                              --> code: TermVectorsTermsWriter / TermVectorsTermsWriterPerThread / TermVectorsTermsWriterPerField
                        --> InvertedDocEndConsumer / InvertedDocConsumerPerThread / InvertedDocConsumerPerField
                          --> code: NormsWriter / NormsWriterPerThread / NormsWriterPerField
                      --> code: StoredFieldsWriter / StoredFieldsWriterPerThread / StoredFieldsWriterPerField
            */

            // TODO FI: this should be something the user can pass in
            // Build up indexing chain:
            TermsHashConsumer termVectorsWriter = new TermVectorsTermsWriter(this);
            TermsHashConsumer freqProxWriter = new FreqProxTermsWriter();

            InvertedDocConsumer termsHash = new TermsHash(this, true, freqProxWriter,
                                                                 new TermsHash(this, false, termVectorsWriter, null));
            NormsWriter normsWriter = new NormsWriter();
            DocInverter docInverter = new DocInverter(termsHash, normsWriter);
            StoredFieldsWriter fieldsWriter = new StoredFieldsWriter(this);
            DocFieldConsumers docFieldConsumers = new DocFieldConsumers(docInverter, fieldsWriter);
            consumer = docFieldProcessor = new DocFieldProcessor(this, docFieldConsumers);
        }
 public PayloadTermSpanScorer(PayloadTermWeight enclosingInstance, TermSpans spans, Weight weight, Similarity similarity, byte[] norms) : base(spans, weight, similarity, norms)
 {
     InitBlock(enclosingInstance);
     positions = spans.Positions;
 }
 protected internal PayloadNearSpanScorer(PayloadNearQuery enclosingInstance, Lucene.Net.Search.Spans.Spans spans, Weight weight, Similarity similarity, byte[] norms)
     : base(spans, weight, similarity, norms)
 {
     InitBlock(enclosingInstance);
     this.spans = spans;
 }
Example #34
0
        public override float Score(IState state)
        {
            float raw = Similarity.Tf(freq) * value_Renamed;                  // raw score

            return(norms == null?raw:raw *Similarity.DecodeNorm(norms[doc])); // normalize
        }
Example #35
0
 internal AnonymousClassScorer(Lucene.Net.Search.Scorer scorer, Lucene.Net.Search.DocIdSetIterator docIdSetIterator, AnonymousClassWeight enclosingInstance, Lucene.Net.Search.Similarity Param1) : base(Param1)
 {
     InitBlock(scorer, docIdSetIterator, enclosingInstance);
 }
Example #36
0
 public MoreLikeThis(IndexReader ir, Lucene.Net.Search.Similarity sim)
 {
     this.ir = ir;
     this.similarity = sim;
 }
 internal AnonymousClassScorer(TestBooleanScorer enclosingInstance, Lucene.Net.Search.Similarity Param1) : base(Param1)
 {
     InitBlock(enclosingInstance);
 }
Example #38
0
 public void SetSimilarity(Lucene.Net.Search.Similarity similarity)
 {
     this.similarity = similarity;
 }
Example #39
0
 static NormsWriter()
 {
     defaultNorm = Similarity.EncodeNorm(1.0f);
 }
Example #40
0
		public override void  SetUp()
		{
			base.SetUp();
			similarityOne = new SimilarityOne(this);
			anlzr = new StandardAnalyzer();
		}
		internal void  SetSimilarity(Similarity similarity)
		{
			lock (this)
			{
				this.similarity = similarity;
				for (int i = 0; i < threadStates.Length; i++)
					threadStates[i].docState.similarity = similarity;
			}
		}
Example #42
0
 /// <summary>Expert: Set the Similarity implementation used by this IndexWriter.
 ///
 /// </summary>
 /// <seealso cref="Similarity.SetDefault(Similarity)">
 /// </seealso>
 public virtual void  SetSimilarity(Similarity similarity)
 {
     this.similarity = similarity;
 }
Example #43
0
 private void  InitBlock(Lucene.Net.Search.Weight weight, Lucene.Net.Search.Similarity similarity, FilteredQuery enclosingInstance)
 {
     this.weight            = weight;
     this.similarity        = similarity;
     this.enclosingInstance = enclosingInstance;
 }
			internal JustCompileSpanScorer(Spans spans, Weight weight, Similarity similarity, byte[] norms):base(spans, weight, similarity, norms)
			{
			}
Example #45
0
 internal AnonymousClassScorer(System.Collections.BitArray bitset, Lucene.Net.Search.Scorer scorer, AnonymousClassWeight enclosingInstance, Lucene.Net.Search.Similarity Param1) : base(Param1)
 {
     InitBlock(bitset, scorer, enclosingInstance);
 }
Example #46
0
 internal AnonymousClassConjunctionScorer(int requiredNrMatchers, BooleanScorer2 enclosingInstance, Lucene.Net.Search.Similarity defaultSimilarity, IState state, System.Collections.Generic.IList <Scorer> requiredScorers)
     : base(defaultSimilarity, state, requiredScorers)
 {
     InitBlock(requiredNrMatchers, enclosingInstance);
 }
Example #47
0
 internal AnonymousClassSimilarityDelegator(BooleanQuery enclosingInstance, Lucene.Net.Search.Similarity Param1) : base(Param1)
 {
     InitBlock(enclosingInstance);
 }
Example #48
0
        public virtual Explanation Explain(IndexReader reader, int doc)
        {
            Explanation result = new Explanation();

            result.SetDescription("weight(" + GetQuery() + " in " + doc + "), product of:");
            System.String field = ((SpanQuery)GetQuery()).GetField();

            System.Text.StringBuilder      docFreqs = new System.Text.StringBuilder();
            System.Collections.IEnumerator i        = terms.GetEnumerator();
            while (i.MoveNext())
            {
                Term term = (Term)i.Current;
                docFreqs.Append(term.Text());
                docFreqs.Append("=");
                docFreqs.Append(reader.DocFreq(term));

                if (i.MoveNext())
                {
                    docFreqs.Append(" ");
                }
            }

            Explanation idfExpl = new Explanation(idf, "idf(" + field + ": " + docFreqs + ")");

            // explain query weight
            Explanation queryExpl = new Explanation();

            queryExpl.SetDescription("queryWeight(" + GetQuery() + "), product of:");

            Explanation boostExpl = new Explanation(GetQuery().GetBoost(), "boost");

            if (GetQuery().GetBoost() != 1.0f)
            {
                queryExpl.AddDetail(boostExpl);
            }
            queryExpl.AddDetail(idfExpl);

            Explanation queryNormExpl = new Explanation(queryNorm, "queryNorm");

            queryExpl.AddDetail(queryNormExpl);

            queryExpl.SetValue(boostExpl.GetValue() * idfExpl.GetValue() * queryNormExpl.GetValue());

            result.AddDetail(queryExpl);

            // explain field weight
            Explanation fieldExpl = new Explanation();

            fieldExpl.SetDescription("fieldWeight(" + field + ":" + query.ToString(field) + " in " + doc + "), product of:");

            Explanation tfExpl = Scorer(reader).Explain(doc);

            fieldExpl.AddDetail(tfExpl);
            fieldExpl.AddDetail(idfExpl);

            Explanation fieldNormExpl = new Explanation();

            byte[] fieldNorms = reader.Norms(field);
            float  fieldNorm  = fieldNorms != null?Similarity.DecodeNorm(fieldNorms[doc]) : 0.0f;

            fieldNormExpl.SetValue(fieldNorm);
            fieldNormExpl.SetDescription("fieldNorm(field=" + field + ", doc=" + doc + ")");
            fieldExpl.AddDetail(fieldNormExpl);

            fieldExpl.SetValue(tfExpl.GetValue() * idfExpl.GetValue() * fieldNormExpl.GetValue());

            result.AddDetail(fieldExpl);

            // combine them
            result.SetValue(queryExpl.GetValue() * fieldExpl.GetValue());

            if (queryExpl.GetValue() == 1.0f)
            {
                return(fieldExpl);
            }

            return(result);
        }
Example #49
0
 private void  InitBlock()
 {
     similarity = Similarity.GetDefault();
 }
 public PayloadTermSpanScorer(PayloadTermWeight enclosingInstance, TermSpans spans, Weight weight, Similarity similarity, byte[] norms)
     : base(spans, weight, similarity, norms)
 {
     InitBlock(enclosingInstance);
     positions = spans.GetPositions();
 }
 protected internal virtual void ProcessPayload(Similarity similarity)
 {
     if (positions.IsPayloadAvailable())
     {
         payload = positions.GetPayload(payload, 0);
         payloadScore = Enclosing_Instance.Enclosing_Instance.function.CurrentScore(doc, Enclosing_Instance.Enclosing_Instance.term.Field(), spans.Start(), spans.End(), payloadsSeen, payloadScore, similarity.ScorePayload(doc, Enclosing_Instance.Enclosing_Instance.term.Field(), spans.Start(), spans.End(), payload, 0, positions.GetPayloadLength()));
         payloadsSeen++;
     }
     else
     {
         // zero out the payload?
     }
 }
		public override void  SetUp()
		{
			base.SetUp();
			similarityOne = new SimilarityOne(this);
			anlzr = new StandardAnalyzer(Util.Version.LUCENE_CURRENT);
		}
Example #53
0
 public MoreLikeThis(IndexReader ir, Lucene.Net.Search.Similarity sim)
 {
     this.ir         = ir;
     this.similarity = sim;
 }
Example #54
0
		/// <summary> Writes the document to the directory segment named "test" using the specified analyzer and similarity</summary>
		/// <param name="">dir
		/// </param>
		/// <param name="">analyzer
		/// </param>
		/// <param name="">similarity
		/// </param>
		/// <param name="">doc
		/// </param>
		public static void  WriteDoc(Directory dir, Analyzer analyzer, Similarity similarity, Document doc)
		{
			WriteDoc(dir, analyzer, similarity, "test", doc);
		}
 public override void  SetUp()
 {
     base.SetUp();
     similarityOne = new SimilarityOne(this);
     anlzr         = new StandardAnalyzer();
 }
Example #56
0
		/// <summary> Writes the document to the directory segment using the analyzer and the similarity score</summary>
		/// <param name="">dir
		/// </param>
		/// <param name="">analyzer
		/// </param>
		/// <param name="">similarity
		/// </param>
		/// <param name="">segment
		/// </param>
		/// <param name="">doc
		/// </param>
		public static void  WriteDoc(Directory dir, Analyzer analyzer, Similarity similarity, System.String segment, Document doc)
		{
			DocumentWriter writer = new DocumentWriter(dir, analyzer, similarity, 50);
			try
			{
				writer.AddDocument(segment, doc);
			}
			catch (System.IO.IOException e)
			{
				System.Console.Error.WriteLine(e.StackTrace);
			}
		}
Example #57
0
			private float[] vScores; // reused in score() to avoid allocating this array for each doc 
			
			// constructor
			internal CustomScorer(CustomScoreQuery enclosingInstance, Similarity similarity, IndexReader reader, CustomWeight w, Scorer subQueryScorer, Scorer[] valSrcScorers):base(similarity)
			{
				InitBlock(enclosingInstance);
				this.weight = w;
				this.qWeight = w.GetValue();
				this.subQueryScorer = subQueryScorer;
				this.valSrcScorers = valSrcScorers;
				this.reader = reader;
				this.vScores = new float[valSrcScorers.Length];
                this.provider = this.Enclosing_Instance.GetCustomScoreProvider(reader);
			}
Example #58
0
 public void SetSimilarity(Lucene.Net.Search.Similarity similarity)
 {
     this.similarity = similarity;
 }
Example #59
0
 protected internal PayloadNearSpanScorer(PayloadNearQuery enclosingInstance, Lucene.Net.Search.Spans.Spans spans, Weight weight, Similarity similarity, byte[] norms) : base(spans, weight, similarity, norms)
 {
     InitBlock(enclosingInstance);
     this.spans = spans;
 }
 internal AnonymousClassScorer(DocIdSetIterator docIdSetIterator, Lucene.Net.Search.Scorer scorer, AnonymousClassWeight enclosingInstance, Lucene.Net.Search.Similarity similarity)
     : base(similarity)
 {
     this.docIdSetIterator  = docIdSetIterator;
     this.scorer            = scorer;
     this.enclosingInstance = enclosingInstance;
 }