internal TokenizerStream() { m_countTokens = 0; m_headTokens = new TokenizerShortBlock(); m_headStrings = new TokenizerStringBlock(); Reset(); }
internal void Reset() { this.m_lastTokens = null; this.m_currentTokens = this.m_headTokens; this.m_currentStrings = this.m_headStrings; this.m_indexTokens = 0; this.m_indexStrings = 0; }
internal short GetNextFullToken() { if (this.m_currentTokens.m_block.Length <= this.m_indexTokens) { this.m_lastTokens = this.m_currentTokens; this.m_currentTokens = this.m_currentTokens.m_next; this.m_indexTokens = 0; } return this.m_currentTokens.m_block[this.m_indexTokens++]; }
internal short GetNextFullToken() { if (this.m_currentTokens.m_block.Length <= this.m_indexTokens) { this.m_lastTokens = this.m_currentTokens; this.m_currentTokens = this.m_currentTokens.m_next; this.m_indexTokens = 0; } return(this.m_currentTokens.m_block[this.m_indexTokens++]); }
internal void AddToken(short token) { if (this.m_currentTokens.m_block.Length <= this.m_indexTokens) { this.m_currentTokens.m_next = new TokenizerShortBlock(); this.m_currentTokens = this.m_currentTokens.m_next; this.m_indexTokens = 0; } this.m_countTokens++; this.m_currentTokens.m_block[this.m_indexTokens++] = token; }
internal short GetNextFullToken() { if (m_currentTokens.m_block.Length <= m_indexTokens) { m_lastTokens = m_currentTokens; m_currentTokens = m_currentTokens.m_next; m_indexTokens = 0; } return(m_currentTokens.m_block[m_indexTokens++]); }
internal void Reset() { m_lastTokens = null; m_currentTokens = m_headTokens; m_currentStrings = m_headStrings; m_indexTokens = 0; m_indexStrings = 0; #if _DEBUG m_bLastWasCStr = false; #endif }
internal void AddToken(short token) { if (this.m_currentTokens.m_block.Length <= this.m_indexTokens) { this.m_currentTokens.m_next = new TokenizerShortBlock(); this.m_currentTokens = this.m_currentTokens.m_next; this.m_indexTokens = 0; } this.m_countTokens++; this.m_currentTokens.m_block[this.m_indexTokens++] = token; }
internal void AddToken(short token) { if (m_currentTokens.m_block.Length <= m_indexTokens) { m_currentTokens.m_next = new TokenizerShortBlock(); m_currentTokens = m_currentTokens.m_next; m_indexTokens = 0; } m_countTokens++; m_currentTokens.m_block[m_indexTokens++] = token; }
internal void Reset() { this.m_lastTokens = null; this.m_currentTokens = this.m_headTokens; this.m_currentStrings = this.m_headStrings; this.m_indexTokens = 0; this.m_indexStrings = 0; }
internal short GetNextFullToken() { if (m_currentTokens.m_block.Length <= m_indexTokens) { m_lastTokens = m_currentTokens; m_currentTokens = m_currentTokens.m_next; m_indexTokens = 0; } return m_currentTokens.m_block[m_indexTokens++]; }
internal void Reset() { m_lastTokens = null; m_currentTokens = m_headTokens; m_currentStrings = m_headStrings; m_indexTokens = 0; m_indexStrings = 0; #if _DEBUG m_bLastWasCStr = false; #endif }
internal void AddToken( short token ) { if (m_currentTokens.m_block.Length <= m_indexTokens) { m_currentTokens.m_next = new TokenizerShortBlock(); m_currentTokens = m_currentTokens.m_next; m_indexTokens = 0; } m_countTokens++; m_currentTokens.m_block[m_indexTokens++] = token; }
internal TokenizerStream() { m_countTokens = 0; m_headTokens = new TokenizerShortBlock(); m_headStrings = new TokenizerStringBlock(); Reset(); }