/// <summary>Construct a new WhitespaceTokenizer using a given {@link Lucene.Net.Util.AttributeSource.AttributeFactory}. </summary>
		public WhitespaceTokenizer(AttributeFactory factory, System.IO.TextReader in_Renamed):base(factory, in_Renamed)
		{
		}
 /// <summary> Creates a new StandardTokenizer with a given {@link Lucene.Net.Util.AttributeSource.AttributeFactory} </summary>
 public StandardTokenizer(AttributeFactory factory, System.IO.TextReader input, bool replaceInvalidAcronym)
     : base(factory)
 {
     InitBlock();
     this.scanner = new StandardTokenizerImpl(input);
     Init(input, replaceInvalidAcronym);
 }
예제 #3
0
파일: Tokenizer.cs 프로젝트: JPT123/ravendb
		/// <summary>Construct a token stream processing the given input using the given AttributeFactory. </summary>
		protected internal Tokenizer(AttributeFactory factory, System.IO.TextReader input):base(factory)
		{
			this.input = CharReader.Get(input);
		}
예제 #4
0
 public CharTokenizer(AttributeFactory factory, System.IO.TextReader input)
     : base(factory, input)
 {
     offsetAtt = (OffsetAttribute) AddAttribute(typeof(OffsetAttribute));
     termAtt = (TermAttribute) AddAttribute(typeof(TermAttribute));
 }
예제 #5
0
 /// <summary>
 /// A TokenStream using the supplied AttributeFactory for creating new <seealso cref="Attribute"/> instances.
 /// </summary>
 protected TokenStream(AttributeFactory factory)
     : base(factory)
 {
     AssertFinal();
 }
예제 #6
0
파일: Tokenizer.cs 프로젝트: JPT123/ravendb
		/// <summary>Construct a tokenizer with null input using the given AttributeFactory. </summary>
		protected internal Tokenizer(AttributeFactory factory):base(factory)
		{
		}
예제 #7
0
		/// <summary>Construct a new LowerCaseTokenizer using a given {@link Lucene.Net.Util.AttributeSource.AttributeFactory}. </summary>
		public LowerCaseTokenizer(AttributeFactory factory, System.IO.TextReader in_Renamed):base(factory, in_Renamed)
		{
		}
예제 #8
0
		/// <summary> A TokenStream using the supplied AttributeFactory for creating new {@link Attribute} instances.</summary>
		protected internal TokenStream(AttributeFactory factory):base(onlyUseNewAPI?factory:new TokenWrapperAttributeFactory(factory))
		{
			InitBlock();
			tokenWrapper = InitTokenWrapper(null);
			Check();
		}
		/// <summary> A TokenStream using the supplied AttributeFactory for creating new <see cref="IAttribute" /> instances.</summary>
        protected internal TokenStream(AttributeFactory factory)
            : base(factory)
		{ }
예제 #10
0
 public KeywordTokenizer(AttributeFactory factory, System.IO.TextReader input, int bufferSize)
     : base(factory, input)
 {
     Init(bufferSize);
 }
	    protected CharTokenizer(AttributeFactory factory, System.IO.TextReader input):base(factory, input)
		{
            offsetAtt = AddAttribute<IOffsetAttribute>();
            termAtt = AddAttribute<ITermAttribute>();
		}
예제 #12
0
        /// <summary> 
        ///     A <see cref="TokenStream"/> using the supplied AttributeFactory for creating 
        ///     new <see cref="IAttribute"/> instances.
        /// </summary>
        #pragma warning disable 618
        protected internal TokenStream(AttributeFactory factory)
            :base( onlyUseNewAPI? factory: new TokenWrapperAttributeFactory(factory))
		{
			InitBlock();

            // REMOVE: in 3.0
           
            tokenWrapper = InitTokenWrapper(null);
			Check();
            #pragma warning restore 618
        }
	  /// <summary>
	  /// Create a CollationAttributeFactory, using the supplied Attribute Factory 
	  /// as the factory for all other attributes. </summary>
	  /// <param name="delegate"> Attribute Factory </param>
	  /// <param name="collator"> CollationKey generator </param>
	  public CollationAttributeFactory(AttributeSource.AttributeFactory @delegate, Collator collator)
	  {
		this.@delegate = @delegate;
		this.collator = collator;
	  }
		/// <summary>Construct a new LowerCaseTokenizer using a given <see cref="Lucene.Net.Util.AttributeSource.AttributeFactory" />. </summary>
		public LowerCaseTokenizer(AttributeFactory factory, System.IO.TextReader @in)
			: base(factory, @in)
		{
		}
		/// <summary>Construct a new WhitespaceTokenizer using a given <see cref="Lucene.Net.Util.AttributeSource.AttributeFactory" />. </summary>
		public WhitespaceTokenizer(AttributeFactory factory, System.IO.TextReader @in)
			: base(factory, @in)
		{
		}
예제 #16
0
			internal TokenWrapperAttributeFactory(AttributeFactory delegate_Renamed)
			{
				this.delegate_Renamed = delegate_Renamed;
			}
예제 #17
0
 /// <summary> Expert: Creates a token stream for numeric values with the specified
 /// <c>precisionStep</c> using the given
 /// <see cref="Lucene.Net.Util.AttributeSource.AttributeFactory" />.
 /// The stream is not yet initialized,
 /// before using set a value using the various set<em>???</em>Value() methods.
 /// </summary>
 public NumericTokenStream(AttributeFactory factory, int precisionStep)
     : base(factory)
 {
     InitBlock();
     this.precisionStep = precisionStep;
     if (precisionStep < 1)
         throw new System.ArgumentException("precisionStep must be >=1");
 }
		/// <summary> Creates a new StandardTokenizer with a given
		/// <see cref="Lucene.Net.Util.AttributeSource.AttributeFactory" />
		/// </summary>
		public StandardTokenizer(Version matchVersion, AttributeFactory factory, System.IO.TextReader input):base(factory)
		{
			InitBlock();
			this.scanner = new StandardTokenizerImpl(input);
			Init(input, matchVersion);
		}
예제 #19
0
 /// <summary>
 /// A TokenStream using the supplied AttributeFactory for creating new <seealso cref="Attribute"/> instances.
 /// </summary>
 protected internal TokenStream(AttributeFactory factory)
     : base(factory)
 {
     //Debug.Assert(AssertFinal());
 }