public void start() { var fs = new myReader(); fs.Open(uri); progress_total = fs.size; fs.Seek(0, SeekOrigin.Begin); int nRead; nRead = fs.Read(block, block_prefix, page_size); for (; nRead > 0; nRead = fs.Read(block, block_prefix, page_size)) { #if bg_parse m_block.push(new myBlock(nRead, block)); m_nBlock++; block = new byte[page_size + block_prefix]; #else parseBlock(nRead, block); #endif } //save last record if (type != myTkType.t_eol) { type = myTkType.t_eol; executeRule(); } fs.Close(); fs.Dispose(); }
public myToken(char c) { switch (c) { //case ' ': //case '\t': // type = myTkType.t_spec; // break; case '"': type = myTkType.t_dblq; break; case ',': type = myTkType.t_comma; break; //case '[': // type = myTkType.t_lsqb; // break; //case ']': // type = myTkType.t_rsqb; // break; default: type = myTkType.t_other; break; } val = c; }
void tokenParse23() { type = myTkType.t_other; Debug.Assert(wchr < 0x10000); res.add((char)(wchr)); //char 1 executeRule(); }
void tokenParse4() { type = myTkType.t_other; Debug.Assert(wchr >= 0x10000); wchr -= 0x10000; res.add((char)(0xD800 | (wchr >> 10))); res.add((char)(0xDC00 | (wchr & 0x3FF))); //char 1 executeRule(); //char 2 executeRule(); }
public myToken(char c) { switch (c) { case '"': type = myTkType.t_dblq; break; case ',': type = myTkType.t_comma; break; default: type = myTkType.t_other; break; } val = c; }
void tokenParse() { res.add((char)wchr); switch (wchr) { case '"': type = myTkType.t_dblq; break; case ',': type = myTkType.t_comma; break; case '\n': case '\r': type = myTkType.t_eol; break; default: type = myTkType.t_other; break; } executeRule(); }