//////////////////////////////////////////////////////////////// // Dump ALL properties // //////////////////////////////////////////////////////////////// public void DumpAll() { while (Read()) { CError.Write("NodeType = " + NodeType + "\t|\t"); CError.Write("NodeName = " + Name + "\t|\t"); CError.Write("NodeLocalName = " + LocalName + "\t|\t"); CError.Write("NodeNamespace = " + NamespaceURI + "\t|\t"); CError.Write("NodePrefix = " + Prefix + "\t|\t"); CError.Write("NodeHasValue = " + (HasValue).ToString() + "\t|\t"); CError.Write("NodeValue = " + Value + "\t|\t"); CError.Write("NodeDepth = " + Depth + "\t|\t"); CError.Write("IsEmptyElement = " + IsEmptyElement.ToString() + "\t|\t"); CError.Write("IsDefault = " + IsDefault.ToString() + "\t|\t"); CError.Write("XmlSpace = " + XmlSpace + "\t|\t"); CError.Write("XmlLang = " + XmlLang + "\t|\t"); CError.Write("AttributeCount = " + AttributeCount + "\t|\t"); CError.Write("HasAttributes = " + HasAttributes.ToString() + "\t|\t"); CError.Write("EOF = " + EOF.ToString() + "\t|\t"); CError.Write("ReadState = " + ReadState.ToString() + "\t|\t"); CError.WriteLine(); } }
private static Rule Rule(S oldState, EOF eof, S throughState, S newState = null) => new Rule( oldState: oldState, description: "End of file", test: c => c.endOfFile, throughState: throughState, newState: newState ?? throughState);
/// <inheritdoc/> public override int GetHashCode() { unchecked { int hash = 17; hash = hash * 23 + Unexpected.GetHashCode(); hash = hash * 23 + EOF.GetHashCode(); hash = hash * 23 + Expected.GetHashCode(); hash = hash * 23 + ErrorPos.GetHashCode(); hash = hash * 23 + Message?.GetHashCode() ?? 0; return(hash); } }
/// <summary> /// Consume the next token found in the stream. /// </summary> /// <returns>The next token in the stream.</returns> public Token Consume() { // If we've previously peeked a token, return that one first. if (peeked != null) { var p = peeked; peeked = null; return(p); } enumerator ??= Start(); Token token; var pos = Parser.Position; var i = 0; try { // Call the current state if (enumerator.MoveNext()) { token = enumerator.Current; } else { token = new EOF(Parser.Position); } if (Parser.Position == pos && i++ >= 10) { throw new LexerException(pos, "Infinite loop detected"); } else { i = 0; } } catch (Exception ex) when(ex is not LexerException) { throw new LexerException(pos, ex); } return(token); }
public override void CaseEOF(EOF node) { base.CaseEOF(node); Console.WriteLine("Semantic Analyzation complete."); }
public override void CaseEOF(EOF node) { last = false; }
protected virtual Token GetToken() { int dfa_state = 0; int start_pos = pos; int start_line = line; int accept_state = -1; int accept_token = -1; int accept_length = -1; int accept_pos = -1; int accept_line = -1; int[][][] gotoTable = Lexer.gotoTable[currentState.id()]; int[] accept = Lexer.accept[currentState.id()]; text.Length = 0; while (true) { int c = GetChar(); if (c != -1) { switch (c) { case 10: if (cr) { cr = false; } else { line++; pos = 0; } break; case 13: line++; pos = 0; cr = true; break; default: pos++; cr = false; break; } ; text.Append((char)c); do { int oldState = (dfa_state < -1) ? (-2 - dfa_state) : dfa_state; dfa_state = -1; int[][] tmp1 = gotoTable[oldState]; int low = 0; int high = tmp1.Length - 1; while (low <= high) { int middle = (low + high) / 2; int[] tmp2 = tmp1[middle]; if (c < tmp2[0]) { high = middle - 1; } else if (c > tmp2[1]) { low = middle + 1; } else { dfa_state = tmp2[2]; break; } } }while(dfa_state < -1); } else { dfa_state = -1; } if (dfa_state >= 0) { if (accept[dfa_state] != -1) { accept_state = dfa_state; accept_token = accept[dfa_state]; accept_length = text.Length; accept_pos = pos; accept_line = line; } } else { if (accept_state != -1) { switch (accept_token) { case 0: { Token token = New0( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return(token); } case 1: { Token token = New1( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return(token); } case 2: { Token token = New2( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return(token); } case 3: { Token token = New3( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return(token); } case 4: { Token token = New4( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return(token); } case 5: { Token token = New5( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return(token); } case 6: { Token token = New6( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return(token); } case 7: { Token token = New7( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return(token); } case 8: { Token token = New8( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return(token); } case 9: { Token token = New9( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return(token); } case 10: { Token token = New10( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return(token); } case 11: { Token token = New11( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return(token); } } } else { if (text.Length > 0) { throw new LexerException( "[" + (start_line + 1) + "," + (start_pos + 1) + "]" + " Unknown token: " + text); } else { EOF token = new EOF( start_line + 1, start_pos + 1); return(token); } } } } }
public ChartSheetContentSequence(IStreamReader reader) : base(reader) { // reset id counter for chart groups ChartFormatIdGenerator.Instance.StartNewChartsheetSubstream(); ChartAxisIdGenerator.Instance.StartNewChartsheetSubstream(); // CHARTSHEETCONTENT = [WriteProtect] [SheetExt] [WebPub] *HFPicture PAGESETUP PrintSize [HeaderFooter] [BACKGROUND] *Fbi *Fbi2 [ClrtClient] [PROTECTION] // [Palette] [SXViewLink] [PivotChartBits] [SBaseRef] [MsoDrawingGroup] OBJECTS Units CHARTFOMATS SERIESDATA *WINDOW *CUSTOMVIEW [CodeName] [CRTMLFRT] EOF // [WriteProtect] if (BiffRecord.GetNextRecordType(reader) == RecordType.WriteProtect) { this.WriteProtect = (WriteProtect)BiffRecord.ReadRecord(reader); } // [SheetExt] if (BiffRecord.GetNextRecordType(reader) == RecordType.SheetExt) { this.SheetExt = (SheetExt)BiffRecord.ReadRecord(reader); } // [WebPub] if (BiffRecord.GetNextRecordType(reader) == RecordType.WebPub) { this.WebPub = (WebPub)BiffRecord.ReadRecord(reader); } // *HFPicture while (BiffRecord.GetNextRecordType(reader) == RecordType.HFPicture) { this.HFPictures.Add((HFPicture)BiffRecord.ReadRecord(reader)); } // PAGESETUP this.PageSetupSequence = new PageSetupSequence(reader); // PrintSize if (BiffRecord.GetNextRecordType(reader) == RecordType.PrintSize) { this.PrintSize = (PrintSize)BiffRecord.ReadRecord(reader); } // [HeaderFooter] if (BiffRecord.GetNextRecordType(reader) == RecordType.HeaderFooter) { this.HeaderFooter = (HeaderFooter)BiffRecord.ReadRecord(reader); } // [BACKGROUND] if (BiffRecord.GetNextRecordType(reader) == RecordType.BkHim) { this.BackgroundSequence = new BackgroundSequence(reader); } // *Fbi this.Fbis = new List <Fbi>(); while (BiffRecord.GetNextRecordType(reader) == RecordType.Fbi) { this.Fbis.Add((Fbi)BiffRecord.ReadRecord(reader)); } // *Fbi2 this.Fbi2s = new List <Fbi2>(); while (BiffRecord.GetNextRecordType(reader) == RecordType.Fbi2) { this.Fbi2s.Add((Fbi2)BiffRecord.ReadRecord(reader)); } // [ClrtClient] if (BiffRecord.GetNextRecordType(reader) == RecordType.ClrtClient) { this.ClrtClient = (ClrtClient)BiffRecord.ReadRecord(reader); } // [PROTECTION] this.ProtectionSequence = new ProtectionSequence(reader); // [Palette] if (BiffRecord.GetNextRecordType(reader) == RecordType.Palette) { this.Palette = (Palette)BiffRecord.ReadRecord(reader); } // [SXViewLink] if (BiffRecord.GetNextRecordType(reader) == RecordType.SXViewLink) { this.SXViewLink = (SXViewLink)BiffRecord.ReadRecord(reader); } // [PivotChartBits] if (BiffRecord.GetNextRecordType(reader) == RecordType.PivotChartBits) { this.PivotChartBits = (PivotChartBits)BiffRecord.ReadRecord(reader); } // [SBaseRef] if (BiffRecord.GetNextRecordType(reader) == RecordType.SBaseRef) { this.SBaseRef = (SBaseRef)BiffRecord.ReadRecord(reader); } // [MsoDrawingGroup] if (BiffRecord.GetNextRecordType(reader) == RecordType.MsoDrawingGroup) { this.MsoDrawingGroup = (MsoDrawingGroup)BiffRecord.ReadRecord(reader); } // OBJECTS this.ObjectsSequence = new ObjectsSequence(reader); // Units this.Units = (Units)BiffRecord.ReadRecord(reader); // CHARTFOMATS this.ChartFormatsSequence = new ChartFormatsSequence(reader); // SERIESDATA this.SeriesDataSequence = new SeriesDataSequence(reader); // *WINDOW this.WindowSequences = new List <WindowSequence>(); while (BiffRecord.GetNextRecordType(reader) == RecordType.Window2) { this.WindowSequences.Add(new WindowSequence(reader)); } // *CUSTOMVIEW this.CustomViewSequences = new List <CustomViewSequence>(); // CUSTOMVIEW seems to be totally optional, // so check for the existence of the next sequences while (BiffRecord.GetNextRecordType(reader) != RecordType.CodeName && BiffRecord.GetNextRecordType(reader) != RecordType.CrtMlFrt && BiffRecord.GetNextRecordType(reader) != RecordType.EOF) { this.CustomViewSequences.Add(new CustomViewSequence(reader)); } //this.CustomViewSequences = new List<CustomViewSequence>(); //while (BiffRecord.GetNextRecordType(reader) == RecordType.UserSViewBegin) //{ // this.CustomViewSequences.Add(new CustomViewSequence(reader)); //} // [CodeName] if (BiffRecord.GetNextRecordType(reader) == RecordType.CodeName) { this.CodeName = (CodeName)BiffRecord.ReadRecord(reader); } // [CRTMLFRT] if (BiffRecord.GetNextRecordType(reader) == RecordType.CrtMlFrt) { this.CrtMlfrtSequence = new CrtMlfrtSequence(reader); } // EOF this.EOF = (EOF)BiffRecord.ReadRecord(reader); }
public virtual void CaseEOF(EOF node) { DefaultCase(node); }
protected virtual Token GetToken() { int dfa_state = 0; int start_pos = pos; int start_line = line; int accept_state = -1; int accept_token = -1; int accept_length = -1; int accept_pos = -1; int accept_line = -1; int[][][] gotoTable = Lexer.gotoTable[currentState.id()]; int[] accept = Lexer.accept[currentState.id()]; text.Length = 0; while(true) { int c = GetChar(); if(c != -1) { switch(c) { case 10: if(cr) { cr = false; } else { line++; pos = 0; } break; case 13: line++; pos = 0; cr = true; break; default: pos++; cr = false; break; }; text.Append((char) c); do { int oldState = (dfa_state < -1) ? (-2 -dfa_state) : dfa_state; dfa_state = -1; int[][] tmp1 = gotoTable[oldState]; int low = 0; int high = tmp1.Length - 1; while(low <= high) { int middle = (low + high) / 2; int[] tmp2 = tmp1[middle]; if(c < tmp2[0]) { high = middle - 1; } else if(c > tmp2[1]) { low = middle + 1; } else { dfa_state = tmp2[2]; break; } } }while(dfa_state < -1); } else { dfa_state = -1; } if(dfa_state >= 0) { if(accept[dfa_state] != -1) { accept_state = dfa_state; accept_token = accept[dfa_state]; accept_length = text.Length; accept_pos = pos; accept_line = line; } } else { if(accept_state != -1) { switch(accept_token) { case 0: { Token token = New0( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 1: { Token token = New1( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 2: { Token token = New2( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 3: { Token token = New3( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 4: { Token token = New4( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 5: { Token token = New5( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 6: { Token token = New6( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 7: { Token token = New7( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 8: { Token token = New8( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 9: { Token token = New9( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 10: { Token token = New10( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 11: { Token token = New11( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 12: { Token token = New12( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 13: { Token token = New13( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 14: { Token token = New14( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 15: { Token token = New15( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 16: { Token token = New16( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 17: { Token token = New17( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 18: { Token token = New18( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 19: { Token token = New19( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 20: { Token token = New20( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 21: { Token token = New21( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 22: { Token token = New22( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 23: { Token token = New23( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 24: { Token token = New24( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 25: { Token token = New25( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 26: { Token token = New26( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 27: { Token token = New27( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 28: { Token token = New28( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 29: { Token token = New29( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } case 30: { Token token = New30( GetText(accept_length), start_line + 1, start_pos + 1); PushBack(accept_length); pos = accept_pos; line = accept_line; return token; } } } else { if(text.Length > 0) { throw new LexerException( "[" + (start_line + 1) + "," + (start_pos + 1) + "]" + " Unknown token: " + text); } else { EOF token = new EOF( start_line + 1, start_pos + 1); return token; } } } } }
public override void CaseEOF(EOF node) { }
public override void CaseEOF(EOF node) { index = 10; }
public Start Parse() { Push(0, null); IList ign = null; while (true) { while (Index(lexer.Peek()) == -1) { if (ign == null) { ign = new TypedList(NodeCast.Instance); } ign.Add(lexer.Next()); } if (ign != null) { ignoredTokens.SetIn(lexer.Peek(), ign); ign = null; } last_pos = lexer.Peek().Pos; last_line = lexer.Peek().Line; last_token = lexer.Peek(); int index = Index(lexer.Peek()); action[0] = actionTable[State()][0][1]; action[1] = actionTable[State()][0][2]; int low = 1; int high = actionTable[State()].Length - 1; while (low <= high) { int middle = (low + high) / 2; if (index < actionTable[State()][middle][0]) { high = middle - 1; } else if (index > actionTable[State()][middle][0]) { low = middle + 1; } else { action[0] = actionTable[State()][middle][1]; action[1] = actionTable[State()][middle][2]; break; } } switch (action[0]) { case SHIFT: { ArrayList list = new ArrayList(); list.Add(lexer.Next()); Push(action[1], list); last_shift = action[1]; } break; case REDUCE: switch (action[1]) { case 0: { ArrayList list = New0(); Push(GoTo(0), list); } break; case 1: { ArrayList list = New1(); Push(GoTo(1), list); } break; case 2: { ArrayList list = New2(); Push(GoTo(1), list); } break; case 3: { ArrayList list = New3(); Push(GoTo(2), list); } break; case 4: { ArrayList list = New4(); Push(GoTo(2), list); } break; case 5: { ArrayList list = New5(); Push(GoTo(2), list); } break; case 6: { ArrayList list = New6(); Push(GoTo(3), list); } break; case 7: { ArrayList list = New7(); Push(GoTo(4), list); } break; case 8: { ArrayList list = New8(); Push(GoTo(5), list); } break; case 9: { ArrayList list = New9(); Push(GoTo(6), list); } break; case 10: { ArrayList list = New10(); Push(GoTo(6), list); } break; case 11: { ArrayList list = New11(); Push(GoTo(7), list); } break; case 12: { ArrayList list = New12(); Push(GoTo(8), list); } break; case 13: { ArrayList list = New13(); Push(GoTo(8), list); } break; case 14: { ArrayList list = New14(); Push(GoTo(9), list); } break; case 15: { ArrayList list = New15(); Push(GoTo(9), list); } break; case 16: { ArrayList list = New16(); Push(GoTo(10), list); } break; case 17: { ArrayList list = New17(); Push(GoTo(10), list); } break; case 18: { ArrayList list = New18(); Push(GoTo(10), list); } break; } break; case ACCEPT: { EOF node2 = (EOF)lexer.Next(); PProg node1 = (PProg)((ArrayList)Pop())[0]; Start node = new Start(node1, node2); return(node); } case ERROR: throw new ParserException(last_token, "[" + last_line + "," + last_pos + "] " + errorMessages[errors[action[1]]]); } } }
public override void CaseEOF(EOF node) { index = 28; }