public override Token Next(Token result) { if (!done) { done = true; int upto = 0; result.Clear(); char[] buffer = result.TermBuffer(); while (true) { int length = input.Read(buffer, upto, buffer.Length - upto); if (length <= 0) { break; } upto += length; if (upto == buffer.Length) { buffer = result.ResizeTermBuffer(1 + buffer.Length); } } result.termLength = upto; return(result); } return(null); }
public override Token Next(/* in */ Token reusableToken) { System.Diagnostics.Debug.Assert(reusableToken != null); if (!done) { done = true; int upto = 0; reusableToken.Clear(); char[] buffer = reusableToken.TermBuffer(); while (true) { int length = input.Read(buffer, upto, buffer.Length - upto); if (length <= 0) { break; } upto += length; if (upto == buffer.Length) { buffer = reusableToken.ResizeTermBuffer(1 + buffer.Length); } } reusableToken.SetTermLength(upto); return(reusableToken); } return(null); }
public override Token Next(/* in */ Token reusableToken) { System.Diagnostics.Debug.Assert(reusableToken != null); reusableToken.Clear(); int length = 0; int start = bufferIndex; char[] buffer = reusableToken.TermBuffer(); while (true) { if (bufferIndex >= dataLen) { offset += dataLen; dataLen = input is Lucene.Net.Index.ReusableStringReader ? ((Lucene.Net.Index.ReusableStringReader) input).Read(ioBuffer) : input.Read((System.Char[]) ioBuffer, 0, ioBuffer.Length); if (dataLen <= 0) { if (length > 0) break; else return null; } bufferIndex = 0; } char c = ioBuffer[bufferIndex++]; if (IsTokenChar(c)) { // if it's a token char if (length == 0) // start of token start = offset + bufferIndex - 1; else if (length == buffer.Length) buffer = reusableToken.ResizeTermBuffer(1 + length); buffer[length++] = Normalize(c); // buffer it, normalized if (length == MAX_WORD_LEN) // buffer overflow! break; } else if (length > 0) // at non-Letter w/ chars break; // return 'em } reusableToken.SetTermLength(length); reusableToken.SetStartOffset(start); reusableToken.SetEndOffset(start + length); return reusableToken; }
public override Token Next(Token token) { token.Clear(); int length = 0; int start = bufferIndex; char[] buffer = token.TermBuffer(); while (true) { if (bufferIndex >= dataLen) { offset += dataLen; dataLen = input is Lucene.Net.Index.DocumentsWriter.ReusableStringReader ? ((Lucene.Net.Index.DocumentsWriter.ReusableStringReader) input).Read(ioBuffer) : input.Read((System.Char[]) ioBuffer, 0, ioBuffer.Length); if (dataLen <= 0) { if (length > 0) break; else return null; } bufferIndex = 0; } char c = ioBuffer[bufferIndex++]; if (IsTokenChar(c)) { // if it's a token char if (length == 0) // start of token start = offset + bufferIndex - 1; else if (length == buffer.Length) buffer = token.ResizeTermBuffer(1 + length); buffer[length++] = Normalize(c); // buffer it, normalized if (length == MAX_WORD_LEN) // buffer overflow! break; } else if (length > 0) // at non-Letter w/ chars break; // return 'em } token.termLength = length; token.startOffset = start; token.endOffset = start + length; return token; }
public override Token Next(Token token) { token.Clear(); if (start == 0) { length = input.Read((System.Char[])ioBuffer, 0, ioBuffer.Length); if (length <= 0) return null; } if (start == length) return null; token.SetTermBuffer(ioBuffer, start, 1); start++; token.termBuffer[0] = System.Char.ToLower(token.termBuffer[0]); return token; }
public override Token Next(Token result) { if (!done) { done = true; int upto = 0; result.Clear(); char[] buffer = result.TermBuffer(); while (true) { int length = input.Read(buffer, upto, buffer.Length - upto); if (length <= 0) break; upto += length; if (upto == buffer.Length) buffer = result.ResizeTermBuffer(1 + buffer.Length); } result.termLength = upto; return result; } return null; }
public override Token Next(Token token) { token.Clear(); if (start == 0) { length = input.Read((System.Char[])ioBuffer, 0, ioBuffer.Length); if (length <= 0) { return(null); } } if (start == length) { return(null); } token.SetTermBuffer(ioBuffer, start, 1); start++; token.termBuffer[0] = System.Char.ToLower(token.termBuffer[0]); return(token); }
public override Token Next(/* in */ Token reusableToken) { System.Diagnostics.Debug.Assert(reusableToken != null); if (!done) { done = true; int upto = 0; reusableToken.Clear(); char[] buffer = reusableToken.TermBuffer(); while (true) { int length = input.Read(buffer, upto, buffer.Length - upto); if (length <= 0) break; upto += length; if (upto == buffer.Length) buffer = reusableToken.ResizeTermBuffer(1 + buffer.Length); } reusableToken.SetTermLength(upto); return reusableToken; } return null; }
// AttributeImpl public override void Clear() { delegate_Renamed.Clear(); }
public override Token Next(Token token) { token.Clear(); int length = 0; int start = bufferIndex; char[] buffer = token.TermBuffer(); while (true) { if (bufferIndex >= dataLen) { offset += dataLen; dataLen = input is Lucene.Net.Index.DocumentsWriter.ReusableStringReader ? ((Lucene.Net.Index.DocumentsWriter.ReusableStringReader)input).Read(ioBuffer) : input.Read((System.Char[])ioBuffer, 0, ioBuffer.Length); if (dataLen <= 0) { if (length > 0) { break; } else { return(null); } } bufferIndex = 0; } char c = ioBuffer[bufferIndex++]; if (IsTokenChar(c)) { // if it's a token char if (length == 0) { // start of token start = offset + bufferIndex - 1; } else if (length == buffer.Length) { buffer = token.ResizeTermBuffer(1 + length); } buffer[length++] = Normalize(c); // buffer it, normalized if (length == MAX_WORD_LEN) { // buffer overflow! break; } } else if (length > 0) { // at non-Letter w/ chars break; // return 'em } } token.termLength = length; token.startOffset = start; token.endOffset = start + length; return(token); }
public override Token Next(/* in */ Token reusableToken) { System.Diagnostics.Debug.Assert(reusableToken != null); reusableToken.Clear(); int length = 0; int start = bufferIndex; char[] buffer = reusableToken.TermBuffer(); while (true) { if (bufferIndex >= dataLen) { offset += dataLen; dataLen = input is Lucene.Net.Index.ReusableStringReader ? ((Lucene.Net.Index.ReusableStringReader)input).Read(ioBuffer) : input.Read((System.Char[])ioBuffer, 0, ioBuffer.Length); if (dataLen <= 0) { if (length > 0) { break; } else { return(null); } } bufferIndex = 0; } char c = ioBuffer[bufferIndex++]; if (IsTokenChar(c)) { // if it's a token char if (length == 0) { // start of token start = offset + bufferIndex - 1; } else if (length == buffer.Length) { buffer = reusableToken.ResizeTermBuffer(1 + length); } buffer[length++] = Normalize(c); // buffer it, normalized if (length == MAX_WORD_LEN) { // buffer overflow! break; } } else if (length > 0) { // at non-Letter w/ chars break; // return 'em } } reusableToken.SetTermLength(length); reusableToken.SetStartOffset(start); reusableToken.SetEndOffset(start + length); return(reusableToken); }
internal virtual void VerifyPayload(TokenStream ts) { Token t = new Token(); for (byte b = 1; ; b++) { t.Clear(); t = ts.Next(t); if (t == null) break; // System.out.println("id="+System.identityHashCode(t) + " " + t); // System.out.println("payload=" + (int)t.getPayload().toByteArray()[0]); Assert.AreEqual(b, t.GetPayload().ToByteArray()[0]); } }