Esempio n. 1
0
        // Tokenizes the fields of a document into Postings.
        private void  InvertDocument(Document doc)
        {
            foreach (Field field in doc.Fields())
            {
                System.String fieldName   = field.Name();
                int           fieldNumber = fieldInfos.FieldNumber(fieldName);

                int length   = fieldLengths[fieldNumber];   // length of Field
                int position = fieldPositions[fieldNumber]; // position in Field

                if (field.IsIndexed())
                {
                    if (!field.IsTokenized())
                    {
                        // un-tokenized Field
                        AddPosition(fieldName, field.StringValue(), position++);
                        length++;
                    }
                    else
                    {
                        System.IO.TextReader reader; // find or make Reader
                        if (field.ReaderValue() != null)
                        {
                            reader = field.ReaderValue();
                        }
                        else if (field.StringValue() != null)
                        {
                            reader = new System.IO.StringReader(field.StringValue());
                        }
                        else
                        {
                            throw new System.ArgumentException("Field must have either String or Reader value");
                        }

                        // Tokenize Field and add to postingTable
                        TokenStream stream = analyzer.TokenStream(fieldName, reader);
                        try
                        {
                            for (Token t = stream.Next(); t != null; t = stream.Next())
                            {
                                position += (t.GetPositionIncrement() - 1);
                                AddPosition(fieldName, t.TermText(), position++);
                                if (++length > maxFieldLength)
                                {
                                    break;
                                }
                            }
                        }
                        finally
                        {
                            stream.Close();
                        }
                    }

                    fieldLengths[fieldNumber]   = length;   // save Field length
                    fieldPositions[fieldNumber] = position; // save Field position
                    fieldBoosts[fieldNumber]   *= field.GetBoost();
                }
            }
        }
Esempio n. 2
0
 public QueryTermVector(System.String queryString, Analyzer analyzer)
 {
     if (analyzer != null)
     {
         TokenStream stream = analyzer.TokenStream("", new System.IO.StringReader(queryString));
         if (stream != null)
         {
             Token next = null;
             System.Collections.ArrayList terms = new System.Collections.ArrayList();
             try
             {
                 while ((next = stream.Next()) != null)
                 {
                     terms.Add(next.TermText());
                 }
                 ProcessTerms((System.String[])terms.ToArray(typeof(System.String)));
             }
             catch (System.IO.IOException)
             {
             }
         }
     }
 }
 /// <summary> Builds a GermanStemFilter that uses an exclusiontable.</summary>
 /// <deprecated> Use {@link #GermanStemFilter(Monodoc.Lucene.Net.Analysis.TokenStream, java.util.Set)} instead.
 /// </deprecated>
 public GermanStemFilter(TokenStream in_Renamed, System.Collections.Hashtable exclusiontable) : this(in_Renamed)
 {
     exclusionSet = new System.Collections.Hashtable(new System.Collections.Hashtable(exclusiontable));
 }
 public GermanStemFilter(TokenStream in_Renamed) : base(in_Renamed)
 {
     stemmer = new GermanStemmer();
 }
Esempio n. 5
0
		/// <summary> Builds a GermanStemFilter that uses an exclusiontable.</summary>
		/// <deprecated> Use {@link #GermanStemFilter(Monodoc.Lucene.Net.Analysis.TokenStream, java.util.Set)} instead.
		/// </deprecated>
		public GermanStemFilter(TokenStream in_Renamed, System.Collections.Hashtable exclusiontable):this(in_Renamed)
		{
			exclusionSet = new System.Collections.Hashtable(new System.Collections.Hashtable(exclusiontable));
		}
Esempio n. 6
0
		public GermanStemFilter(TokenStream in_Renamed) : base(in_Renamed)
		{
			stemmer = new GermanStemmer();
		}
		public RussianStemFilter(TokenStream in_Renamed, char[] charset):base(in_Renamed)
		{
			stemmer = new RussianStemmer(charset);
		}
Esempio n. 8
0
		public RussianLowerCaseFilter(TokenStream in_Renamed, char[] charset):base(in_Renamed)
		{
			this.charset = charset;
		}
Esempio n. 9
0
 public RussianStemFilter(TokenStream in_Renamed, char[] charset) : base(in_Renamed)
 {
     stemmer = new RussianStemmer(charset);
 }
Esempio n. 10
0
 public RussianLowerCaseFilter(TokenStream in_Renamed, char[] charset) : base(in_Renamed)
 {
     this.charset = charset;
 }