void Get() { for (; ; ) { t = la; la = scanner.Scan(); if (la.kind <= maxT) { ++errDist; break; } la = t; } }
// make sure that peeking starts at the current scan position public void ResetPeek() { pt = tokens; }
void _Parse() { la = new Token(); la.val = ""; Get(); TokenDeclarations(); //>>>>>[mb]>>>>>>> //ïðèøëîñü óáðàòü, òàê êàê áûëè ïðîáëåìû ñ CREATE TRIGGER, ðåøèëè îáðàáàòûâàòü //ïî îäíîé êîíñòðóêöèè èç ïðàâèë, âíåøíèé êëàññ //(òåêóùåå èìÿ - SqlAccessories.ScriptAnalyze.Create.Analyzer, ìåòîä - FillDataBase()) //÷òîáû ïàðñåð íå ðóãàëñÿ íà íàëè÷èå êàêèõ-òî ñëîâ âìåñòî êîíöà ôàéëà // Expect(0); //<<<<<<<<<<<<<<<< scanner.RollBack(); }
// peek for the next token, ignore pragmas public Token Peek() { if (pt.next == null) { do { pt = pt.next = NextToken(); if (pt.FullValue == null) pt.FullValue = pt.val; } while (pt.kind > maxT); // skip pragmas } else { do { pt = pt.next; } while (pt.kind > maxT); } return pt; }
// get the next token (possibly a token already seen during peeking) public Token Scan() { if ( t!= null && t.val == "\0" && tokens.next == null) return t; if (tokens.next == null) { Token token = this.NextToken(); if (token.FullValue == null) token.FullValue = token.val; return token; } else { pt = tokens = tokens.next; return tokens; } }
//<<<<<<<[mb]<<<<<<<<<<<<<< Token NextToken() { //>>>>>>>[mb]>>>>>>>>>>>>>> //save parameters needed for roling back this.ch_back = this.ch; this.pos_back = this.pos; this.col_back = this.col; this.line_back = this.line; this.t_back = this.t; //<<<<<<<[mb]<<<<<<<<<<<<<< while (ch == ' ' || ch >= 9 && ch <= 10 || ch == 13 ) NextCh(); t = new Token(); t.pos = pos; t.col = col; t.line = line; int state; if (start.ContainsKey(ch)) { state = (int) start[ch]; } else { state = 0; } tlen = 0; if (ch == 'n' || ch == 'N') { AddCh(); if (ch == '\'') { apostrophN: this.AddCh(); while (ch != '\'' && ch != '\0' && ch != Buffer.EOF) this.AddCh(); this.AddCh(); if (ch == '\'') { goto apostrophN; } t.kind = 3; t.val = new String(tval, 1, tlen-1); t.FullValue = "N" + new String(tval, 1, tlen - 1); return t; } } else if (ch == '\'') { apostroph: this.AddCh(); while (ch != '\'' && ch != '\0' && ch != Buffer.EOF) this.AddCh(); this.AddCh(); if (ch == '\'' && ch != '\0') { goto apostroph; } t.kind = 2; t.val = new String(tval, 0, tlen); return t; } else AddCh(); switch (state) { case -1: { t.kind = eofSym; break; } // NextCh already done case 0: { t.kind = noSym; break; } // NextCh already done case 1: if (ch >= '0' && ch <= '9' || ch == '_' || char.IsLetter((char)ch)) {AddCh(); goto case 1;} else {t.kind = 1; break;} case 2: {t.kind = 2; break;} case 3: {t.kind = 3; break;} case 4: {t.kind = 4; break;} case 5: {t.kind = 5; break;} case 6: {t.kind = 6; break;} case 7: {t.kind = 7; break;} case 8: {t.kind = 8; break;} } t.val = new String(tval, 0, tlen); return t; }
//>>>>>>>[mb]>>>>>>>>>>>>>> //return to previous scanner state public void RollBack() { this.ch = this.ch_back; this.pos = this.pos_back; this.buffer.Pos = this.pos_back; this.col = this.col_back; this.line = this.line_back; this.t = this.tokens = this.t_back; if (tokens != null) tokens.next = null; }
////Ñòàðûé êîíñòðóêòîð. Íå ïðåîáðàçóåò äàííûå â Unicode!!! //public Scanner(Stream s) //{ // buffer = new Buffer(s, true); // Init(); //} void Init() { pos = -1; line = 1; col = 0; oldEols = 0; NextCh(); if (ch == 0xEF) { // check optional byte order mark for UTF-8 NextCh(); int ch1 = ch; NextCh(); int ch2 = ch; if (ch1 != 0xBB || ch2 != 0xBF) { throw new AnalyzingException(String.Format("Illegal byte order mark: EF {0,2:X} {1,2:X}", ch1, ch2)); } buffer = new UTF8Buffer(buffer); col = 0; NextCh(); } // 0xFF îçíà÷àåò, ÷òî òåêñò áûë ïåðåêîäèðîâàí â Unicode 16be else if(ch == 0xFF) { buffer = new UTF16beBuffer(buffer); col = 0; NextCh(); } start = new Hashtable(128); for (int i = 95; i <= 95; ++i) start[i] = 1; for (int i = 97; i <= 122; ++i) start[i] = 1; start[124] = 2; start[40] = 3; start[41] = 4; start[91] = 5; start[93] = 6; start[123] = 7; start[125] = 8; start[Buffer.EOF] = -1; pt = tokens = new Token(); // first token is a dummy }