private void Copy(AttributeSource target, AttributeSource source) { if (target != source) { source.CopyTo(target); } }
public InputWindowToken(ShingleFilter outerInstance, AttributeSource attSource) { this.outerInstance = outerInstance; this.attSource = attSource; this.termAtt = attSource.getAttribute(typeof(CharTermAttribute)); this.offsetAtt = attSource.getAttribute(typeof(OffsetAttribute)); }
public override bool Accept(AttributeSource a) { bool b = (a != null && count % modCount == 0); count++; return(b); }
public InputWindowToken(ShingleFilter outerInstance, AttributeSource attSource) { this.outerInstance = outerInstance; this.attSource = attSource; this.termAtt = attSource.GetAttribute <ICharTermAttribute>(); this.offsetAtt = attSource.GetAttribute <IOffsetAttribute>(); }
public override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { if (MaxEdits_Renamed == 0 || PrefixLength_Renamed >= _term.Text().Length) // can only match if it's exact { return(new SingleTermsEnum(terms.Iterator(null), _term.Bytes)); } return(new FuzzyTermsEnum(terms, atts, Term, MaxEdits_Renamed, PrefixLength_Renamed, Transpositions_Renamed)); }
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: @Override public void reset() throws java.io.IOException public override void reset() { base.reset(); hasMoreTokensInClone = false; clonedToken = null; clonedTermAtt = null; clonedOffsetAtt = null; }
protected override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { if (maxEdits == 0 || prefixLength >= term.Text().Length) // can only match if it's exact { return(new SingleTermsEnum(terms.GetIterator(null), term.Bytes)); } return(new FuzzyTermsEnum(terms, atts, Term, maxEdits, prefixLength, transpositions)); }
protected override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { if (!termLongEnough) { // can only match if it's exact return(new SingleTermsEnum(terms.GetEnumerator(), m_term.Bytes)); } return(new SlowFuzzyTermsEnum(terms, atts, Term, minimumSimilarity, prefixLength)); }
private void PushTok(AttributeSource t) { if (buffer == null) { buffer = new LinkedList <AttributeSource>(); } buffer.AddFirst(t); }
/// <summary> /// Returns the related attributes. </summary> public virtual AttributeSource Attributes() { if (Atts == null) { Atts = new AttributeSource(); } return(Atts); }
/// <summary> /// Constructor for enumeration of all terms from specified <c>reader</c> which share a prefix of /// length <paramref name="prefixLength"/> with <paramref name="term"/> and which have a fuzzy similarity > /// <paramref name="minSimilarity"/>. /// <para/> /// After calling the constructor the enumeration is already pointing to the first /// valid term if such a term exists. /// </summary> /// <param name="terms"> Delivers terms. </param> /// <param name="atts"> <see cref="AttributeSource"/> created by the rewrite method of <see cref="MultiTermQuery"/> /// thats contains information about competitive boosts during rewrite. It is also used /// to cache DFAs between segment transitions. </param> /// <param name="term"> Pattern term. </param> /// <param name="minSimilarity"> Minimum required similarity for terms from the reader. Pass an integer value /// representing edit distance. Passing a fraction is deprecated. </param> /// <param name="prefixLength"> Length of required common prefix. Default value is 0. </param> /// <param name="transpositions"> Transpositions </param> /// <exception cref="IOException"> if there is a low-level IO error </exception> public FuzzyTermsEnum(Terms terms, AttributeSource atts, Term term, float minSimilarity, int prefixLength, bool transpositions) { InitializeInstanceFields(); if (minSimilarity >= 1.0f && minSimilarity != (int)minSimilarity) { throw new ArgumentException("fractional edit distances are not allowed"); } if (minSimilarity < 0.0f) { throw new ArgumentException("minimumSimilarity cannot be less than 0"); } if (prefixLength < 0) { throw new ArgumentException("prefixLength cannot be less than 0"); } this.m_terms = terms; this.term = term; // convert the string into a utf32 int[] representation for fast comparisons string utf16 = term.Text(); this.m_termText = new int[utf16.CodePointCount(0, utf16.Length)]; for (int cp, i = 0, j = 0; i < utf16.Length; i += Character.CharCount(cp)) { m_termText[j++] = cp = utf16.CodePointAt(i); } this.m_termLength = m_termText.Length; this.dfaAtt = atts.AddAttribute <ILevenshteinAutomataAttribute>(); //The prefix could be longer than the word. //It's kind of silly though. It means we must match the entire word. this.m_realPrefixLength = prefixLength > m_termLength ? m_termLength : prefixLength; // if minSimilarity >= 1, we treat it as number of edits if (minSimilarity >= 1f) { this.m_minSimilarity = 0; // just driven by number of edits m_maxEdits = (int)minSimilarity; m_raw = true; } else { this.m_minSimilarity = minSimilarity; // calculate the maximum k edits for this similarity m_maxEdits = InitialMaxDistance(this.m_minSimilarity, m_termLength); m_raw = false; } if (transpositions && m_maxEdits > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE) { throw new NotSupportedException("with transpositions enabled, distances > " + LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE + " are not supported "); } this.transpositions = transpositions; this.m_scaleFactor = 1.0f / (1.0f - this.m_minSimilarity); this.maxBoostAtt = atts.AddAttribute <IMaxNonCompetitiveBoostAttribute>(); bottom = maxBoostAtt.MaxNonCompetitiveBoost; bottomTerm = maxBoostAtt.CompetitiveTerm; BottomChanged(null, true); }
public override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { if (_terms.Size() == 0) { return(TermsEnum.EMPTY); } return(new SeekingTermSetTermsEnum(terms.Iterator(null), _terms, _ords)); }
/// <summary> Re-initialize the state, using this boost value.</summary> /// <param name="docBoost">boost value to use. /// </param> internal void Reset(float docBoost) { position = 0; length = 0; numOverlap = 0; offset = 0; boost = docBoost; attributeSource = null; }
// LUCENENET NOTE: Static methods were moved into the NumericRangeQuery class protected override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { // very strange: java.lang.Number itself is not Comparable, but all subclasses used here are if (min.HasValue && max.HasValue && (min.Value).CompareTo(max.Value) > 0) { return(TermsEnum.EMPTY); } return(new NumericRangeTermsEnum(this, terms.GetIterator(null))); }
/// <summary> Expert: Creates a token stream for numeric values with the specified /// <c>precisionStep</c> using the given <see cref="AttributeSource" />. /// The stream is not yet initialized, /// before using set a value using the various set<em>???</em>Value() methods. /// </summary> public NumericTokenStream(AttributeSource source, int precisionStep) : base(source) { InitBlock(); this.precisionStep = precisionStep; if (precisionStep < 1) { throw new System.ArgumentException("precisionStep must be >=1"); } }
protected LowerCaseKeywordTokenizer(AttributeSource source, System.IO.TextReader input) : base(source, input) { offsetAtt = AddAttribute <IOffsetAttribute>(); termAtt = AddAttribute <ITermAttribute>(); isAsciiCasingSameAsInvariant = CultureInfo.InvariantCulture.CompareInfo.Compare("abcdefghijklmnopqrstuvwxyz", "ABCDEFGHIJKLMNOPQRSTUVWXYZ", CompareOptions.IgnoreCase) == 0; invariantTextInfo = CultureInfo.InvariantCulture.TextInfo; }
protected override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { if (_terms.Count == 0) { return(TermsEnum.EMPTY); } return(new SeekingTermSetTermsEnum(terms.GetEnumerator(), _terms, _ords)); }
public override bool Accept(AttributeSource source) { if (typeAtt == null) { typeAtt = source.AddAttribute <ITypeAttribute>(); } return(typeToMatch.Equals(typeAtt.Type)); }
public override bool Accept(AttributeSource source) { if (typeAtt == null) { typeAtt = source.AddAttribute <ITypeAttribute>(); } //check to see if this is a Category return(typeToMatch.Equals(typeAtt.Type)); }
public override bool Accept(AttributeSource source) { if (typeAtt is null) { typeAtt = source.AddAttribute <ITypeAttribute>(); } //check to see if this is a Category return(typeToMatch.Equals(typeAtt.Type, StringComparison.Ordinal)); }
public override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { TermsEnum tenum = terms.Iterator(null); if (Prefix_Renamed.Bytes().Length == 0) { // no prefix -- match all terms for this field: return tenum; } return new PrefixTermsEnum(tenum, Prefix_Renamed.Bytes()); }
/// <summary> /// Re-initialize the state /// </summary> internal void Reset() { position = 0; length = 0; numOverlap = 0; offset = 0; maxTermFrequency = 0; uniqueTermCount = 0; boost = 1.0f; attributeSource = null; }
/// <summary> /// Tells the actor to die and triggers any effects or animations /// Damage Type 0 = Physical melee /// Damage Type 1 = Physical ranged /// </summary> /// <param name="rDamageValue">Amount of damage to take</param> /// <param name="rDamageType">Damage type taken</param> /// <param name="rAttackAngle">Angle that the damage came from releative to the actor's forward</param> /// <param name="rBone">Transform that the damage it... if known</param> public virtual void OnDeath(float rDamageValue = 0, int rDamageType = 0, float rAttackAngle = 0f, Transform rBone = null) { IsAlive = false; if (AttributeSource != null) { AttributeSource.SetAttributeValue(HealthID, 0f); } StartCoroutine(InternalDeath(rDamageValue, rDamageType, rAttackAngle, rBone)); }
protected override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { TermsEnum tenum = terms.GetEnumerator(); if (_prefix.Bytes.Length == 0) { // no prefix -- match all terms for this field: return(tenum); } return(new PrefixTermsEnum(tenum, _prefix.Bytes)); }
public override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { TermsEnum tenum = terms.Iterator(null); if (Prefix_Renamed.Bytes().Length == 0) { // no prefix -- match all terms for this field: return(tenum); } return(new PrefixTermsEnum(tenum, Prefix_Renamed.Bytes())); }
/// <summary> /// Tells the actor to die and triggers any effects or animations /// Damage Type 0 = Physical melee /// Damage Type 1 = Physical ranged /// </summary> /// <param name="rDamageValue">Amount of damage to take</param> /// <param name="rDamageType">Damage type taken</param> /// <param name="rAttackAngle">Angle that the damage came from releative to the actor's forward</param> /// <param name="rBone">Transform that the damage it... if known</param> /// <param name="rDeathMotion">Motion to activate due to death</param> public virtual void OnKilled(IMessage rMessage) { IsAlive = false; if (AttributeSource != null && HealthID.Length > 0) { AttributeSource.SetAttributeValue(HealthID, 0f); } StartCoroutine(InternalDeath(rMessage)); }
/// <summary> /// Tells the actor to die and triggers any effects or animations /// Damage Type 0 = Physical melee /// Damage Type 1 = Physical ranged /// </summary> /// <param name="rDamageValue">Amount of damage to take</param> /// <param name="rDamageType">Damage type taken</param> /// <param name="rAttackAngle">Angle that the damage came from releative to the actor's forward</param> /// <param name="rBone">Transform that the damage it... if known</param> /// <param name="rDeathMotion">Motion to activate due to death</param> public virtual void OnKilled(IMessage rMessage) { com.ootii.Utilities.Debug.Log.FileWrite(transform.name + ".OnKilled()"); IsAlive = false; if (AttributeSource != null) { AttributeSource.SetAttributeValue(HealthID, 0f); } StartCoroutine(InternalDeath(rMessage)); }
public override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { if (LowerTerm_Renamed != null && UpperTerm_Renamed != null && LowerTerm_Renamed.CompareTo(UpperTerm_Renamed) > 0) { return(TermsEnum.EMPTY); } TermsEnum tenum = terms.Iterator(null); if ((LowerTerm_Renamed == null || (IncludeLower && LowerTerm_Renamed.Length == 0)) && UpperTerm_Renamed == null) { return(tenum); } return(new TermRangeTermsEnum(tenum, LowerTerm_Renamed, UpperTerm_Renamed, IncludeLower, IncludeUpper)); }
protected override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { if (lowerTerm != null && upperTerm != null && lowerTerm.CompareTo(upperTerm) > 0) { return(TermsEnum.EMPTY); } TermsEnum tenum = terms.GetEnumerator(); if ((lowerTerm == null || (includeLower && lowerTerm.Length == 0)) && upperTerm == null) { return(tenum); } return(new TermRangeTermsEnum(tenum, lowerTerm, upperTerm, includeLower, includeUpper)); }
public override bool Accept(AttributeSource source) { try { if (count >= lower && count < upper) { return(true); } return(false); } finally { count++; } }
public override TokenStream TokenStream(string fieldName, TextReader reader) { var attributeSource = new AttributeSource(); attributeSource.AddAttributeImpl(new SpellAttribute()); attributeSource.AddAttributeImpl(new StemAttribute()); var tokenizer = new RussianLetterTokenizer(attributeSource, reader); var lowercaseFilter = new LowerCaseFilter(tokenizer); var badWordsFilter = new BadWordsFilter(lowercaseFilter); var stopWordFilter = new StopFilter(false, badWordsFilter, StopWords); var preFilter = new StemFilter(stopWordFilter, SpellChecker, NumberOfSuggestions); var similarFilter = new SimilarFilter(preFilter); return(similarFilter); }
public override bool accept(AttributeSource source) { if (termAtt == null) { termAtt = source.addAttribute(typeof(CharTermAttribute)); } try { DateTime date = dateFormat.parse(termAtt.ToString()); //We don't care about the date, just that we can parse it as a date if (date != null) { return true; } } catch (ParseException) { } return false; }
internal /*private*/ void SetFinalState(AttributeSource.State finalState) { this.finalState = finalState; }
public override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { if (MaxEdits_Renamed == 0 || PrefixLength_Renamed >= Term_Renamed.Text().Length) // can only match if it's exact { return new SingleTermsEnum(terms.Iterator(null), Term_Renamed.Bytes()); } return new FuzzyTermsEnum(terms, atts, Term, MaxEdits_Renamed, PrefixLength_Renamed, Transpositions_Renamed); }
/// <summary> /// <para>Get the next token from the input stream. /// </para> /// <para>If the next token has <code>positionIncrement > 1</code>, /// <code>positionIncrement - 1</code> <seealso cref="#fillerToken"/>s are /// inserted first. /// </para> /// </summary> /// <param name="target"> Where to put the new token; if null, a new instance is created. </param> /// <returns> On success, the populated token; null otherwise </returns> /// <exception cref="IOException"> if the input stream has a problem </exception> //JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: private InputWindowToken getNextToken(InputWindowToken target) throws java.io.IOException private InputWindowToken getNextToken(InputWindowToken target) { InputWindowToken newTarget = target; if (numFillerTokensToInsert > 0) { if (null == target) { newTarget = new InputWindowToken(this, nextInputStreamToken.cloneAttributes()); } else { nextInputStreamToken.copyTo(target.attSource); } // A filler token occupies no space newTarget.offsetAtt.setOffset(newTarget.offsetAtt.startOffset(), newTarget.offsetAtt.startOffset()); newTarget.termAtt.copyBuffer(fillerToken, 0, fillerToken.Length); newTarget.isFiller = true; --numFillerTokensToInsert; } else if (isNextInputStreamToken) { if (null == target) { newTarget = new InputWindowToken(this, nextInputStreamToken.cloneAttributes()); } else { nextInputStreamToken.copyTo(target.attSource); } isNextInputStreamToken = false; newTarget.isFiller = false; } else if (!exhausted) { if (input.incrementToken()) { if (null == target) { newTarget = new InputWindowToken(this, cloneAttributes()); } else { this.copyTo(target.attSource); } if (posIncrAtt.PositionIncrement > 1) { // Each output shingle must contain at least one input token, // so no more than (maxShingleSize - 1) filler tokens will be inserted. numFillerTokensToInsert = Math.Min(posIncrAtt.PositionIncrement - 1, maxShingleSize - 1); // Save the current token as the next input stream token if (null == nextInputStreamToken) { nextInputStreamToken = cloneAttributes(); } else { this.copyTo(nextInputStreamToken); } isNextInputStreamToken = true; // A filler token occupies no space newTarget.offsetAtt.setOffset(offsetAtt.startOffset(), offsetAtt.startOffset()); newTarget.termAtt.copyBuffer(fillerToken, 0, fillerToken.Length); newTarget.isFiller = true; --numFillerTokensToInsert; } else { newTarget.isFiller = false; } } else { exhausted = true; input.end(); endState = captureState(); numFillerTokensToInsert = Math.Min(posIncrAtt.PositionIncrement, maxShingleSize - 1); if (numFillerTokensToInsert > 0) { nextInputStreamToken = new AttributeSource(AttributeFactory); nextInputStreamToken.addAttribute(typeof(CharTermAttribute)); OffsetAttribute newOffsetAtt = nextInputStreamToken.addAttribute(typeof(OffsetAttribute)); newOffsetAtt.setOffset(offsetAtt.endOffset(), offsetAtt.endOffset()); // Recurse/loop just once: return getNextToken(target); } else { newTarget = null; } } } else { newTarget = null; } return newTarget; }
internal /*private*/ bool Accept(AttributeSource source) { return filter.Accept(source); }
internal bool accept(AttributeSource source) { return filter.accept(source); }
internal SinkTokenStream(AttributeSource source, SinkFilter filter) : base(source) { this.filter = filter; }
public override bool accept(AttributeSource source) { return true; }
internal /*private*/ void AddState(AttributeSource.State state) { if (it != null) { throw new System.SystemException("The tee must be consumed before sinks are consumed."); } cachedStates.AddLast(state); }
/// <summary>Construct a new WhitespaceTokenizer using a given <see cref="AttributeSource" />. </summary> public WhitespaceTokenizer(AttributeSource source, System.IO.TextReader @in) : base(source, @in) { }
/// <summary> Creates a new StandardTokenizer with a given <see cref="AttributeSource" />.</summary> public StandardTokenizer(Version matchVersion, AttributeSource source, System.IO.TextReader input):base(source) { InitBlock(); this.scanner = new StandardTokenizerImpl(input); Init(input, matchVersion); }
public override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { return new SimpleAutomatonTermsEnum(this, terms.Iterator(null)); }
/// <summary> /// Retrieves the attribute names for a given playlist-based attribute source. /// </summary> /// <param name="Source">An AttributeSource specifying the type of media to inspect</param> /// <returns>void</returns> private void getMetadataFromPlaylist(AttributeSource Source) { IWMPPlaylist playlist = null; string name = ""; try { switch (Source) { // DVDs and CDs use the same CD object. case AttributeSource.DVDToc: case AttributeSource.CDPlaylist: playlist = CD.Playlist; break; case AttributeSource.PlaylistCollection: // Retrieve a playlist from the PlaylistCollection. playlist = PLCollection.getAll().Item(0); break; case AttributeSource.CurrentPlaylist: playlist = Player.currentPlaylist; break; default: // This is strictly for debugging, it should never happen. // If it does, we need a new case here or to fix the calling function. throw new ArgumentOutOfRangeException("Source"); } int cAttributes = playlist.attributeCount; // Log the attribute name and writability. for (int i = 0; i < cAttributes; i++) { name = playlist.get_attributeName(i); ListViewItem item = new ListViewItem(Source.ToString()); item.SubItems.Add(name); item.SubItems.Add(playlist.getItemInfo(name)); // We'll assume it's read-only. // If we can actually write it, we'll flip the bool. // We do this because there is no IsReadOnlyItem property // available on the playlist object. bool bRO = true; try { // Cache the value. string temp = playlist.getItemInfo(name); // Try to write something. playlist.setItemInfo(name, "random"); // Write back the cached value. playlist.setItemInfo(name, temp); bRO = false; } catch (COMException) { // Writing the test value failed. // bRO is true by default, so nothing to do here. } item.SubItems.Add(bRO.ToString()); listView1.Items.Add(item); } } catch (NullReferenceException) { ListViewItem item2 = new ListViewItem(Source.ToString()); item2.SubItems.Add("NullReferenceException"); item2.SubItems.Add("Does your library contain a playlist?"); listView1.Items.Add(item2); } catch (ArgumentOutOfRangeException exc) { lblStatus.Text = "Invalid parameter in getMetadataFromPlaylist: " + exc.Message; } catch (COMException exc) { lblStatus.Text = "Exception in getMetadata: " + exc.Message; } catch { lblStatus.Text = "Exception in getMetadata."; throw; } }
public override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { return new SimplePrefixTermsEnum(this, terms.Iterator(null), Prefix); }
public override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { return Compiled.GetTermsEnum(terms); }
public override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { return new TermRangeTermsEnumAnonymousInnerClassHelper(this, terms.Iterator(null), new BytesRef("2"), new BytesRef("7")); }
/// <summary>Construct a new LowerCaseTokenizer using a given <see cref="AttributeSource" />. </summary> public LowerCaseTokenizer(AttributeSource source, System.IO.TextReader @in) : base(source, @in) { }
internal void addState(AttributeSource.State state) { if (it != null) { throw new System.InvalidOperationException("The tee must be consumed before sinks are consumed."); } cachedStates.Add(state); }
public override Tokenizer create(AttributeSource.AttributeFactory factory, Reader reader) { return new ThaiTokenizer(factory, reader); }
/// <summary> Expert: Creates a token stream for numeric values with the specified /// <c>precisionStep</c> using the given <see cref="AttributeSource" />. /// The stream is not yet initialized, /// before using set a value using the various set<em>???</em>Value() methods. /// </summary> public NumericTokenStream(AttributeSource source, int precisionStep):base(source) { InitBlock(); this.precisionStep = precisionStep; if (precisionStep < 1) throw new System.ArgumentException("precisionStep must be >=1"); }
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: @Override public void reset() throws java.io.IOException public override void reset() { base.reset(); gramSize.reset(); inputWindow.Clear(); nextInputStreamToken = null; isNextInputStreamToken = false; numFillerTokensToInsert = 0; isOutputHere = false; noShingleOutput = true; exhausted = false; endState = null; if (outputUnigramsIfNoShingles && !outputUnigrams) { // Fix up gramSize if minValue was reset for outputUnigramsIfNoShingles gramSize.minValue = minShingleSize; } }
/// <summary> /// Displays the attribute information for a given media-based attribute source. /// </summary> /// <param name="Source">An AttributeSource specifying the schema to inspect.</param> /// <returns>void</returns> private void getMetadataFromMedia(AttributeSource Source) { IWMPPlaylist playlist = null; IWMPMedia media = null; string name = ""; try { switch (Source) { case AttributeSource.CDTrack: playlist = CD.Playlist; break; case AttributeSource.CurrentMedia: playlist = Player.currentPlaylist; break; default: // Get a playlist filled with media for the specified schema. playlist = MediaCollection.getByAttribute("MediaType", Source.ToString()); break; } if (0 != playlist.count) { // Get the first item from the playlist. media = playlist.get_Item(0); } else { throw new EmptyPlaylistException(); } int cAttributes = media.attributeCount; // Log the attribute name, value, and writability. for (int i = 0; i < cAttributes; i++) { name = media.getAttributeName(i); ListViewItem item = new ListViewItem(Source.ToString()); item.SubItems.Add(name); item.SubItems.Add(media.getItemInfo(name)); bool bRO = media.isReadOnlyItem(name); item.SubItems.Add(bRO.ToString()); listView1.Items.Add(item); } } catch (EmptyPlaylistException) { ListViewItem item3 = new ListViewItem(Source.ToString()); item3.SubItems.Add("EmptyPlaylistException"); item3.SubItems.Add("Does your library contain media for this type or source?"); listView1.Items.Add(item3); } catch (COMException exc) { lblStatus.Text = "Exception in getMetadata: " + exc.Message; } catch { lblStatus.Text = "Exception in getMetadata."; throw; } // Insert an empty line in the listview. ListViewItem item2 = new ListViewItem(""); listView1.Items.Add(item2); }
/// <summary> /// Returns true, iff the current state of the passed-in <seealso cref="AttributeSource"/> shall be stored /// in the sink. /// </summary> public abstract bool accept(AttributeSource source);
/// <summary>Construct a token stream processing the given input using the given AttributeSource. </summary> protected internal Tokenizer(AttributeSource source, System.IO.TextReader input):base(source) { this.input = CharReader.Get(input); }
/// <summary> A TokenStream that uses the same attributes as the supplied one.</summary> protected internal TokenStream(AttributeSource input) : base(input) { }
public override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { if (LowerTerm_Renamed != null && UpperTerm_Renamed != null && LowerTerm_Renamed.CompareTo(UpperTerm_Renamed) > 0) { return TermsEnum.EMPTY; } TermsEnum tenum = terms.Iterator(null); if ((LowerTerm_Renamed == null || (IncludeLower && LowerTerm_Renamed.Length == 0)) && UpperTerm_Renamed == null) { return tenum; } return new TermRangeTermsEnum(tenum, LowerTerm_Renamed, UpperTerm_Renamed, IncludeLower, IncludeUpper); }
public KeywordTokenizer(AttributeSource source, System.IO.TextReader input, int bufferSize):base(source, input) { Init(bufferSize); }