public override TokenPositioner GetTokenPositioner(Token token)
 {
     return
         token.GetPositionIncrement() == 0
             ? TokenPositioner.NewRow
             : TokenPositioner.NewColumn;
 }
Ejemplo n.º 2
0
        /// <summary> Returns the next input Token whose termText() is not a stop word.</summary>
        public override Token Next(Token result)
        {
            // return the first non-stop word found
            int skippedPositions = 0;

            while ((result = input.Next(result)) != null)
            {
                if (!stopWords.Contains(result.TermBuffer(), 0, result.termLength))
                {
                    if (enablePositionIncrements)
                    {
                        result.SetPositionIncrement(result.GetPositionIncrement() + skippedPositions);
                    }
                    return(result);
                }
                skippedPositions += result.GetPositionIncrement();
            }
            // reached EOS -- return null
            return(null);
        }
Ejemplo n.º 3
0
        /// <summary> Returns the next input Token whose term() is not a stop word.</summary>
        public override Token Next(/* in */ Token reusableToken)
        {
            System.Diagnostics.Debug.Assert(reusableToken != null);
            // return the first non-stop word found
            int skippedPositions = 0;

            for (Token nextToken = input.Next(reusableToken); nextToken != null; nextToken = input.Next(reusableToken))
            {
                if (!stopWords.Contains(nextToken.TermBuffer(), 0, nextToken.TermLength()))
                {
                    if (enablePositionIncrements)
                    {
                        nextToken.SetPositionIncrement(nextToken.GetPositionIncrement() + skippedPositions);
                    }
                    return(nextToken);
                }
                skippedPositions += nextToken.GetPositionIncrement();
            }
            // reached EOS -- return null
            return(null);
        }
Ejemplo n.º 4
0
        public static void Analyze(TextReader reader)
        {
            Lucene.Net.Analysis.Token lastToken = null;
            Analyzer    indexing_analyzer       = new LuceneCommon.BeagleAnalyzer(true);
            TokenStream stream = indexing_analyzer.TokenStream("Text", reader);

            int position = 1;

            for (Lucene.Net.Analysis.Token t = stream.Next(); t != null; t = stream.Next())
            {
                position += (t.GetPositionIncrement() - 1);
                Console.WriteLine(t);
            }
        }
Ejemplo n.º 5
0
 public int GetPositionIncrement()
 {
     return(delegate_Renamed.GetPositionIncrement());
 }
Ejemplo n.º 6
0
        /// <summary> Not an explicit test, just useful to print out some info on performance
        ///
        /// </summary>
        /// <throws>  Exception </throws>
        public virtual void  Performance()
        {
            int[] tokCount  = new int[] { 100, 500, 1000, 2000, 5000, 10000 };
            int[] modCounts = new int[] { 1, 2, 5, 10, 20, 50, 100, 200, 500 };
            for (int k = 0; k < tokCount.Length; k++)
            {
                System.Text.StringBuilder buffer = new System.Text.StringBuilder();
                System.Console.Out.WriteLine("-----Tokens: " + tokCount[k] + "-----");
                for (int i = 0; i < tokCount[k]; i++)
                {
                    buffer.Append(English.IntToEnglish(i).ToUpper()).Append(' ');
                }
                //make sure we produce the same tokens
                ModuloSinkTokenizer sink  = new ModuloSinkTokenizer(this, tokCount[k], 100);
                Token       reusableToken = new Token();
                TokenStream stream        = new TeeTokenFilter(new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), sink);
                while (stream.Next(reusableToken) != null)
                {
                }
                stream = new ModuloTokenFilter(this, new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), 100);
                System.Collections.IList tmp = new System.Collections.ArrayList();
                for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
                {
                    tmp.Add(nextToken.Clone());
                }
                System.Collections.IList sinkList = sink.GetTokens();
                Assert.IsTrue(tmp.Count == sinkList.Count, "tmp Size: " + tmp.Count + " is not: " + sinkList.Count);
                for (int i = 0; i < tmp.Count; i++)
                {
                    Token tfTok   = (Token)tmp[i];
                    Token sinkTok = (Token)sinkList[i];
                    Assert.IsTrue(tfTok.Term().Equals(sinkTok.Term()) == true, tfTok.Term() + " is not equal to " + sinkTok.Term() + " at token: " + i);
                }
                //simulate two fields, each being analyzed once, for 20 documents

                for (int j = 0; j < modCounts.Length; j++)
                {
                    int  tfPos = 0;
                    long start = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond);
                    for (int i = 0; i < 20; i++)
                    {
                        stream = new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString())));
                        for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
                        {
                            tfPos += nextToken.GetPositionIncrement();
                        }
                        stream = new ModuloTokenFilter(this, new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), modCounts[j]);
                        for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
                        {
                            tfPos += nextToken.GetPositionIncrement();
                        }
                    }
                    long finish = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond);
                    System.Console.Out.WriteLine("ModCount: " + modCounts[j] + " Two fields took " + (finish - start) + " ms");
                    int sinkPos = 0;
                    //simulate one field with one sink
                    start = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond);
                    for (int i = 0; i < 20; i++)
                    {
                        sink   = new ModuloSinkTokenizer(this, tokCount[k], modCounts[j]);
                        stream = new TeeTokenFilter(new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), sink);
                        for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
                        {
                            sinkPos += nextToken.GetPositionIncrement();
                        }
                        //System.out.println("Modulo--------");
                        stream = sink;
                        for (Token nextToken = stream.Next(reusableToken); nextToken != null; nextToken = stream.Next(reusableToken))
                        {
                            sinkPos += nextToken.GetPositionIncrement();
                        }
                    }
                    finish = (DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond);
                    System.Console.Out.WriteLine("ModCount: " + modCounts[j] + " Tee fields took " + (finish - start) + " ms");
                    Assert.IsTrue(sinkPos == tfPos, sinkPos + " does not equal: " + tfPos);
                }
                System.Console.Out.WriteLine("- End Tokens: " + tokCount[k] + "-----");
            }
        }
 private void SetCurrentToken(Token token)
 {
     if (token == null) return;
     ClearAttributes();
     _termAtt.SetTermBuffer(token.TermBuffer(), 0, token.TermLength());
     _posIncrAtt.SetPositionIncrement(token.GetPositionIncrement());
     _flagsAtt.SetFlags(token.GetFlags());
     _offsetAtt.SetOffset(token.StartOffset(), token.EndOffset());
     _typeAtt.SetType(token.Type());
     _payloadAtt.SetPayload(token.GetPayload());
 }
Ejemplo n.º 8
0
		/// <summary> Returns the next input Token whose termText() is not a stop word.</summary>
		public override Token Next(Token result)
		{
			// return the first non-stop word found
			int skippedPositions = 0;
			while ((result = input.Next(result)) != null)
			{
				if (!stopWords.Contains(result.TermBuffer(), 0, result.termLength))
				{
					if (enablePositionIncrements)
					{
						result.SetPositionIncrement(result.GetPositionIncrement() + skippedPositions);
					}
					return result;
				}
				skippedPositions += result.GetPositionIncrement();
			}
			// reached EOS -- return null
			return null;
		}
Ejemplo n.º 9
0
		public virtual void  TestPerformance()
		{
			int[] tokCount = new int[]{100, 500, 1000, 2000, 5000, 10000};
			int[] modCounts = new int[]{1, 2, 5, 10, 20, 50, 100, 200, 500};
			for (int k = 0; k < tokCount.Length; k++)
			{
				System.Text.StringBuilder buffer = new System.Text.StringBuilder();
				System.Console.Out.WriteLine("-----Tokens: " + tokCount[k] + "-----");
				for (int i = 0; i < tokCount[k]; i++)
				{
					buffer.Append(English.IntToEnglish(i).ToUpper()).Append(' ');
				}
				//make sure we produce the same tokens
				ModuloSinkTokenizer sink = new ModuloSinkTokenizer(this, tokCount[k], 100);
				Token next = new Token();
				TokenStream result = new TeeTokenFilter(new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), sink);
				while ((next = result.Next(next)) != null)
				{
				}
				result = new ModuloTokenFilter(this, new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), 100);
				next = new Token();
				System.Collections.IList tmp = new System.Collections.ArrayList();
				while ((next = result.Next(next)) != null)
				{
					tmp.Add(next.Clone());
				}
				System.Collections.IList sinkList = sink.GetTokens();
				Assert.IsTrue(tmp.Count == sinkList.Count, "tmp Size: " + tmp.Count + " is not: " + sinkList.Count);
				for (int i = 0; i < tmp.Count; i++)
				{
					Token tfTok = (Token) tmp[i];
					Token sinkTok = (Token) sinkList[i];
					Assert.IsTrue(tfTok.TermText().Equals(sinkTok.TermText()) == true, tfTok.TermText() + " is not equal to " + sinkTok.TermText() + " at token: " + i);
				}
				//simulate two fields, each being analyzed once, for 20 documents
				
				for (int j = 0; j < modCounts.Length; j++)
				{
					int tfPos = 0;
					long start = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
					for (int i = 0; i < 20; i++)
					{
						next = new Token();
						result = new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString())));
						while ((next = result.Next(next)) != null)
						{
							tfPos += next.GetPositionIncrement();
						}
						next = new Token();
						result = new ModuloTokenFilter(this, new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), modCounts[j]);
						while ((next = result.Next(next)) != null)
						{
							tfPos += next.GetPositionIncrement();
						}
					}
					long finish = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
					System.Console.Out.WriteLine("ModCount: " + modCounts[j] + " Two fields took " + (finish - start) + " ms");
					int sinkPos = 0;
					//simulate one field with one sink
					start = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
					for (int i = 0; i < 20; i++)
					{
						sink = new ModuloSinkTokenizer(this, tokCount[k], modCounts[j]);
						next = new Token();
						result = new TeeTokenFilter(new StandardFilter(new StandardTokenizer(new System.IO.StringReader(buffer.ToString()))), sink);
						while ((next = result.Next(next)) != null)
						{
							sinkPos += next.GetPositionIncrement();
						}
						//System.out.println("Modulo--------");
						result = sink;
						while ((next = result.Next(next)) != null)
						{
							sinkPos += next.GetPositionIncrement();
						}
					}
					finish = (System.DateTime.Now.Ticks - 621355968000000000) / 10000;
					System.Console.Out.WriteLine("ModCount: " + modCounts[j] + " Tee fields took " + (finish - start) + " ms");
					Assert.IsTrue(sinkPos == tfPos, sinkPos + " does not equal: " + tfPos);
				}
				System.Console.Out.WriteLine("- End Tokens: " + tokCount[k] + "-----");
			}
		}