internal TokenizerStream() { m_countTokens = 0; m_headTokens = new TokenizerShortBlock(); m_headStrings = new TokenizerStringBlock(); Reset(); }
internal short GetNextFullToken() { if (m_currentTokens.m_block.Length <= m_indexTokens) { m_lastTokens = m_currentTokens; m_currentTokens = m_currentTokens.m_next; m_indexTokens = 0; } return(m_currentTokens.m_block[m_indexTokens++]); }
internal void Reset() { m_lastTokens = null; m_currentTokens = m_headTokens; m_currentStrings = m_headStrings; m_indexTokens = 0; m_indexStrings = 0; #if _DEBUG m_bLastWasCStr = false; #endif }
internal void AddToken(short token) { if (m_currentTokens.m_block.Length <= m_indexTokens) { m_currentTokens.m_next = new TokenizerShortBlock(); m_currentTokens = m_currentTokens.m_next; m_indexTokens = 0; } m_countTokens++; m_currentTokens.m_block[m_indexTokens++] = token; }