public override System.String HighlightTerm(System.String originalText, TokenGroup tokenGroup)
		{
			if (tokenGroup.GetTotalScore() == 0)
				return originalText;
			float score = tokenGroup.GetTotalScore();
			if (score == 0)
			{
				return originalText;
			}
			
			// try to size sb correctly
			System.Text.StringBuilder sb = new System.Text.StringBuilder(originalText.Length + EXTRA);
			
			sb.Append("<span style=\"");
			if (highlightForeground)
			{
				sb.Append("color: ");
				sb.Append(GetForegroundColorString(score));
				sb.Append("; ");
			}
			if (highlightBackground)
			{
				sb.Append("background: ");
				sb.Append(GetBackgroundColorString(score));
				sb.Append("; ");
			}
			sb.Append("\">");
			sb.Append(originalText);
			sb.Append("</span>");
			return sb.ToString();
		}
        public override System.String HighlightTerm(System.String originalText, TokenGroup tokenGroup)
        {
            if (tokenGroup.GetTotalScore() == 0)
            {
                return(originalText);
            }
            float score = tokenGroup.GetTotalScore();

            if (score == 0)
            {
                return(originalText);
            }

            // try to size sb correctly
            System.Text.StringBuilder sb = new System.Text.StringBuilder(originalText.Length + EXTRA);

            sb.Append("<span style=\"");
            if (highlightForeground)
            {
                sb.Append("color: ");
                sb.Append(GetForegroundColorString(score));
                sb.Append("; ");
            }
            if (highlightBackground)
            {
                sb.Append("background: ");
                sb.Append(GetBackgroundColorString(score));
                sb.Append("; ");
            }
            sb.Append("\">");
            sb.Append(originalText);
            sb.Append("</span>");
            return(sb.ToString());
        }
예제 #3
0
        public virtual System.String HighlightTerm(System.String originalText, TokenGroup tokenGroup)
        {
            if (tokenGroup.GetTotalScore() == 0)
            {
                return(originalText);
            }
            float score = tokenGroup.GetTotalScore();

            if (score == 0)
            {
                return(originalText);
            }
            System.Text.StringBuilder sb = new System.Text.StringBuilder();
            sb.Append("<font ");
            if (highlightForeground)
            {
                sb.Append("color=\"");
                sb.Append(GetForegroundColorString(score));
                sb.Append("\" ");
            }
            if (highlightBackground)
            {
                sb.Append("bgcolor=\"");
                sb.Append(GetBackgroundColorString(score));
                sb.Append("\" ");
            }
            sb.Append(">");
            sb.Append(originalText);
            sb.Append("</font>");
            return(sb.ToString());
        }
예제 #4
0
 /* (non-Javadoc)
  * @see Lucene.Net.Highlight.Formatter#highlightTerm(java.lang.String, Lucene.Net.Highlight.TokenGroup)
  */
 public virtual System.String HighlightTerm(System.String originalText, TokenGroup tokenGroup)
 {
     System.Text.StringBuilder returnBuffer;
     if (tokenGroup.GetTotalScore() > 0)
     {
         returnBuffer = new System.Text.StringBuilder();
         returnBuffer.Append(preTag);
         returnBuffer.Append(originalText);
         returnBuffer.Append(postTag);
         return(returnBuffer.ToString());
     }
     return(originalText);
 }
		/* (non-Javadoc)
		* @see Lucene.Net.Highlight.Formatter#highlightTerm(java.lang.String, Lucene.Net.Highlight.TokenGroup)
		*/
		public virtual System.String HighlightTerm(System.String originalText, TokenGroup tokenGroup)
		{
			System.Text.StringBuilder returnBuffer;
			if (tokenGroup.GetTotalScore() > 0)
			{
				returnBuffer = new System.Text.StringBuilder();
				returnBuffer.Append(preTag);
				returnBuffer.Append(originalText);
				returnBuffer.Append(postTag);
				return returnBuffer.ToString();
			}
			return originalText;
		}
예제 #6
0
		/// <summary> Low level api to get the most relevant (formatted) sections of the document.
		/// This method has been made public to allow visibility of score information held in TextFragment objects.
		/// Thanks to Jason Calabrese for help in redefining the interface.  
		/// </summary>
		/// <param name="">tokenStream
		/// </param>
		/// <param name="">text
		/// </param>
		/// <param name="">maxNumFragments
		/// </param>
		/// <param name="">mergeContiguousFragments
		/// </param>
		/// <throws>  IOException </throws>
		public TextFragment[] GetBestTextFragments(TokenStream tokenStream, System.String text, bool mergeContiguousFragments, int maxNumFragments)
		{
			System.Collections.ArrayList docFrags = new System.Collections.ArrayList();
			System.Text.StringBuilder newText = new System.Text.StringBuilder();
			
			TextFragment currentFrag = new TextFragment(newText, newText.Length, docFrags.Count);
			fragmentScorer.StartFragment(currentFrag);
			docFrags.Add(currentFrag);
			
			FragmentQueue fragQueue = new FragmentQueue(maxNumFragments);
			
			try
			{
				Lucene.Net.Analysis.Token token;
				System.String tokenText;
				int startOffset;
				int endOffset;
				int lastEndOffset = 0;
				textFragmenter.Start(text);
				
				TokenGroup tokenGroup = new TokenGroup();
				token = tokenStream.Next();
				while ((token != null) && (token.StartOffset() < maxDocBytesToAnalyze))
				{
					if ((tokenGroup.numTokens > 0) && (tokenGroup.IsDistinct(token)))
					{
						//the current token is distinct from previous tokens - 
						// markup the cached token group info
						startOffset = tokenGroup.matchStartOffset;
						endOffset = tokenGroup.matchEndOffset;
						tokenText = text.Substring(startOffset, (endOffset) - (startOffset));
						System.String markedUpText = formatter.HighlightTerm(encoder.EncodeText(tokenText), tokenGroup);
						//store any whitespace etc from between this and last group
						if (startOffset > lastEndOffset)
							newText.Append(encoder.EncodeText(text.Substring(lastEndOffset, (startOffset) - (lastEndOffset))));
						newText.Append(markedUpText);
						lastEndOffset = System.Math.Max(endOffset, lastEndOffset);
						tokenGroup.Clear();
						
						//check if current token marks the start of a new fragment						
						if (textFragmenter.IsNewFragment(token))
						{
							currentFrag.SetScore(fragmentScorer.GetFragmentScore());
							//record stats for a new fragment
							currentFrag.textEndPos = newText.Length;
							currentFrag = new TextFragment(newText, newText.Length, docFrags.Count);
							fragmentScorer.StartFragment(currentFrag);
							docFrags.Add(currentFrag);
						}
					}
					
					tokenGroup.AddToken(token, fragmentScorer.GetTokenScore(token));
					
					//				if(lastEndOffset>maxDocBytesToAnalyze)
					//				{
					//					break;
					//				}
					token = tokenStream.Next();
				}
				currentFrag.SetScore(fragmentScorer.GetFragmentScore());
				
				if (tokenGroup.numTokens > 0)
				{
					//flush the accumulated text (same code as in above loop)
					startOffset = tokenGroup.matchStartOffset;
					endOffset = tokenGroup.matchEndOffset;
					tokenText = text.Substring(startOffset, (endOffset) - (startOffset));
					System.String markedUpText = formatter.HighlightTerm(encoder.EncodeText(tokenText), tokenGroup);
					//store any whitespace etc from between this and last group
					if (startOffset > lastEndOffset)
						newText.Append(encoder.EncodeText(text.Substring(lastEndOffset, (startOffset) - (lastEndOffset))));
					newText.Append(markedUpText);
					lastEndOffset = System.Math.Max(lastEndOffset, endOffset);
				}
				
				//Test what remains of the original text beyond the point where we stopped analyzing 
				if ((lastEndOffset < text.Length) && (text.Length < maxDocBytesToAnalyze))
				{
					//append it to the last fragment
					newText.Append(encoder.EncodeText(text.Substring(lastEndOffset)));
				}
				
				currentFrag.textEndPos = newText.Length;
				
				//sort the most relevant sections of the text
				for (System.Collections.IEnumerator i = docFrags.GetEnumerator(); i.MoveNext(); )
				{
					currentFrag = (TextFragment) i.Current;
					
					//If you are running with a version of Lucene before 11th Sept 03
					// you do not have PriorityQueue.insert() - so uncomment the code below					
					/*
					if (currentFrag.getScore() >= minScore)
					{
					fragQueue.put(currentFrag);
					if (fragQueue.size() > maxNumFragments)
					{ // if hit queue overfull
					fragQueue.pop(); // remove lowest in hit queue
					minScore = ((TextFragment) fragQueue.top()).getScore(); // reset minScore
					}
					
					
					}
					*/
					//The above code caused a problem as a result of Christoph Goller's 11th Sept 03
					//fix to PriorityQueue. The correct method to use here is the new "insert" method
					// USE ABOVE CODE IF THIS DOES NOT COMPILE!
					fragQueue.Insert(currentFrag);
				}
				
				//return the most relevant fragments
				TextFragment[] frag = new TextFragment[fragQueue.Size()];
				for (int i = frag.Length - 1; i >= 0; i--)
				{
					frag[i] = (TextFragment) fragQueue.Pop();
				}
				
				//merge any contiguous fragments to improve readability
				if (mergeContiguousFragments)
				{
					MergeContiguousFragments(frag);
					System.Collections.ArrayList fragTexts = new System.Collections.ArrayList();
					for (int i = 0; i < frag.Length; i++)
					{
						if ((frag[i] != null) && (frag[i].GetScore() > 0))
						{
							fragTexts.Add(frag[i]);
						}
					}
					frag = (TextFragment[]) fragTexts.ToArray(typeof(TextFragment));
				}
				
				return frag;
			}
			finally
			{
				if (tokenStream != null)
				{
					try
					{
						tokenStream.Close();
					}
					catch (System.Exception e)
					{
					}
				}
			}
		}
예제 #7
0
		public virtual System.String HighlightTerm(System.String originalText, TokenGroup tokenGroup)
		{
			if (tokenGroup.GetTotalScore() == 0)
				return originalText;
			float score = tokenGroup.GetTotalScore();
			if (score == 0)
			{
				return originalText;
			}
			System.Text.StringBuilder sb = new System.Text.StringBuilder();
			sb.Append("<font ");
			if (highlightForeground)
			{
				sb.Append("color=\"");
				sb.Append(GetForegroundColorString(score));
				sb.Append("\" ");
			}
			if (highlightBackground)
			{
				sb.Append("bgcolor=\"");
				sb.Append(GetBackgroundColorString(score));
				sb.Append("\" ");
			}
			sb.Append(">");
			sb.Append(originalText);
			sb.Append("</font>");
			return sb.ToString();
		}
예제 #8
0
        /// <summary> Low level api to get the most relevant (formatted) sections of the document.
        /// This method has been made public to allow visibility of score information held in TextFragment objects.
        /// Thanks to Jason Calabrese for help in redefining the interface.
        /// </summary>
        /// <param name="">tokenStream
        /// </param>
        /// <param name="">text
        /// </param>
        /// <param name="">maxNumFragments
        /// </param>
        /// <param name="">mergeContiguousFragments
        /// </param>
        /// <throws>  IOException </throws>
        public TextFragment[] GetBestTextFragments(TokenStream tokenStream, System.String text, bool mergeContiguousFragments, int maxNumFragments)
        {
            System.Collections.ArrayList docFrags = new System.Collections.ArrayList();
            System.Text.StringBuilder    newText  = new System.Text.StringBuilder();

            TextFragment currentFrag = new TextFragment(newText, newText.Length, docFrags.Count);

            fragmentScorer.StartFragment(currentFrag);
            docFrags.Add(currentFrag);

            FragmentQueue fragQueue = new FragmentQueue(maxNumFragments);

            try
            {
                Lucene.Net.Analysis.Token token;
                System.String             tokenText;
                int startOffset;
                int endOffset;
                int lastEndOffset = 0;
                textFragmenter.Start(text);

                TokenGroup tokenGroup = new TokenGroup();
                token = tokenStream.Next();
                while ((token != null) && (token.StartOffset() < maxDocBytesToAnalyze))
                {
                    if ((tokenGroup.numTokens > 0) && (tokenGroup.IsDistinct(token)))
                    {
                        //the current token is distinct from previous tokens -
                        // markup the cached token group info
                        startOffset = tokenGroup.matchStartOffset;
                        endOffset   = tokenGroup.matchEndOffset;
                        tokenText   = text.Substring(startOffset, (endOffset) - (startOffset));
                        System.String markedUpText = formatter.HighlightTerm(encoder.EncodeText(tokenText), tokenGroup);
                        //store any whitespace etc from between this and last group
                        if (startOffset > lastEndOffset)
                        {
                            newText.Append(encoder.EncodeText(text.Substring(lastEndOffset, (startOffset) - (lastEndOffset))));
                        }
                        newText.Append(markedUpText);
                        lastEndOffset = System.Math.Max(endOffset, lastEndOffset);
                        tokenGroup.Clear();

                        //check if current token marks the start of a new fragment
                        if (textFragmenter.IsNewFragment(token))
                        {
                            currentFrag.SetScore(fragmentScorer.GetFragmentScore());
                            //record stats for a new fragment
                            currentFrag.textEndPos = newText.Length;
                            currentFrag            = new TextFragment(newText, newText.Length, docFrags.Count);
                            fragmentScorer.StartFragment(currentFrag);
                            docFrags.Add(currentFrag);
                        }
                    }

                    tokenGroup.AddToken(token, fragmentScorer.GetTokenScore(token));

                    //				if(lastEndOffset>maxDocBytesToAnalyze)
                    //				{
                    //					break;
                    //				}
                    token = tokenStream.Next();
                }
                currentFrag.SetScore(fragmentScorer.GetFragmentScore());

                if (tokenGroup.numTokens > 0)
                {
                    //flush the accumulated text (same code as in above loop)
                    startOffset = tokenGroup.matchStartOffset;
                    endOffset   = tokenGroup.matchEndOffset;
                    tokenText   = text.Substring(startOffset, (endOffset) - (startOffset));
                    System.String markedUpText = formatter.HighlightTerm(encoder.EncodeText(tokenText), tokenGroup);
                    //store any whitespace etc from between this and last group
                    if (startOffset > lastEndOffset)
                    {
                        newText.Append(encoder.EncodeText(text.Substring(lastEndOffset, (startOffset) - (lastEndOffset))));
                    }
                    newText.Append(markedUpText);
                    lastEndOffset = System.Math.Max(lastEndOffset, endOffset);
                }

                //Test what remains of the original text beyond the point where we stopped analyzing
                if ((lastEndOffset < text.Length) && (text.Length < maxDocBytesToAnalyze))
                {
                    //append it to the last fragment
                    newText.Append(encoder.EncodeText(text.Substring(lastEndOffset)));
                }

                currentFrag.textEndPos = newText.Length;

                //sort the most relevant sections of the text
                for (System.Collections.IEnumerator i = docFrags.GetEnumerator(); i.MoveNext();)
                {
                    currentFrag = (TextFragment)i.Current;

                    //If you are running with a version of Lucene before 11th Sept 03
                    // you do not have PriorityQueue.insert() - so uncomment the code below

                    /*
                     * if (currentFrag.getScore() >= minScore)
                     * {
                     * fragQueue.put(currentFrag);
                     * if (fragQueue.size() > maxNumFragments)
                     * { // if hit queue overfull
                     * fragQueue.pop(); // remove lowest in hit queue
                     * minScore = ((TextFragment) fragQueue.top()).getScore(); // reset minScore
                     * }
                     *
                     *
                     * }
                     */
                    //The above code caused a problem as a result of Christoph Goller's 11th Sept 03
                    //fix to PriorityQueue. The correct method to use here is the new "insert" method
                    // USE ABOVE CODE IF THIS DOES NOT COMPILE!
                    fragQueue.Insert(currentFrag);
                }

                //return the most relevant fragments
                TextFragment[] frag = new TextFragment[fragQueue.Size()];
                for (int i = frag.Length - 1; i >= 0; i--)
                {
                    frag[i] = (TextFragment)fragQueue.Pop();
                }

                //merge any contiguous fragments to improve readability
                if (mergeContiguousFragments)
                {
                    MergeContiguousFragments(frag);
                    System.Collections.ArrayList fragTexts = new System.Collections.ArrayList();
                    for (int i = 0; i < frag.Length; i++)
                    {
                        if ((frag[i] != null) && (frag[i].GetScore() > 0))
                        {
                            fragTexts.Add(frag[i]);
                        }
                    }
                    frag = (TextFragment[])fragTexts.ToArray(typeof(TextFragment));
                }

                return(frag);
            }
            finally
            {
                if (tokenStream != null)
                {
                    try
                    {
                        tokenStream.Close();
                    }
                    catch (System.Exception e)
                    {
                    }
                }
            }
        }