public void Visit(TokenEos tok) { }
/// <summary> /// This method parses the output of the .p procedure that exports the database info /// and fills _dataBases /// It then updates the parser with the new info /// </summary> private static void Read(string filePath) { if (!File.Exists(filePath)) return; _dataBases.Clear(); _sequences.Clear(); var defaultToken = new TokenEos(null, 0, 0, 0, 0); ParsedDataBase currentDb = null; ParsedTable currentTable = null; Utils.ForEachLine(filePath, null, (i, items) => { var splitted = items.Split('\t'); switch (items[0]) { case 'H': // base //#H|<Dump date ISO 8601>|<Dump time>|<Logical DB name>|<Physical DB name>|<Progress version> if (splitted.Count() != 6) return; currentDb = new ParsedDataBase( splitted[3], splitted[4], splitted[5], new List<ParsedTable>()); _dataBases.Add(currentDb); break; case 'S': if (splitted.Count() != 3 || currentDb == null) return; _sequences.Add(new CompletionItem { DisplayText = splitted[1], Type = CompletionType.Sequence, SubString = currentDb.LogicalName }); break; case 'T': // table //#T|<Table name>|<Table ID>|<Table CRC>|<Dump name>|<Description> if (splitted.Count() != 6 || currentDb == null) return; currentTable = new ParsedTable( splitted[1], defaultToken, splitted[2], splitted[3], splitted[4], splitted[5], "", false, new List<ParsedField>(), new List<ParsedIndex>(), new List<ParsedTrigger>() , "", ""); currentDb.Tables.Add(currentTable); break; case 'X': // trigger //#X|<Parent table>|<Event>|<Proc name>|<Trigger CRC> if (splitted.Count() != 5 || currentTable == null) return; currentTable.Triggers.Add(new ParsedTrigger( splitted[2], splitted[3])); break; case 'I': // index //#I|<Parent table>|<Index name>|<Primary? 0/1>|<Unique? 0/1>|<Index CRC>|<Fileds separated with %> if (splitted.Count() != 7 || currentTable == null) return; var flag = splitted[3].Equals("1") ? ParsedIndexFlag.Primary : ParsedIndexFlag.None; if (splitted[4].Equals("1")) flag = flag | ParsedIndexFlag.Unique; currentTable.Indexes.Add(new ParsedIndex( splitted[2], flag, splitted[6].Split('%').ToList())); break; case 'F': // field //#F|<Parent table>|<Field name>|<Type>|<Format>|<Order #>|<Mandatory? 0/1>|<Extent? 0/1>|<Part of index? 0/1>|<Part of PK? 0/1>|<Initial value>|<Desription> if (splitted.Count() != 12 || currentTable == null) return; var flag2 = splitted[6].Equals("1") ? ParsedFieldFlag.Mandatory : ParsedFieldFlag.None; if (splitted[7].Equals("1")) flag2 = flag2 | ParsedFieldFlag.Extent; if (splitted[8].Equals("1")) flag2 = flag2 | ParsedFieldFlag.Index; if (splitted[9].Equals("1")) flag2 = flag2 | ParsedFieldFlag.Primary; var curField = new ParsedField( splitted[2], splitted[3], splitted[4], int.Parse(splitted[5]), flag2, splitted[10], splitted[11], ParsedAsLike.None); curField.Type = ParserHandler.ConvertStringToParsedPrimitiveType(curField.TempType, false); currentTable.Fields.Add(curField); break; } }); }
/// <summary> /// Parses a text into a list of parsedItems /// </summary> public Parser(ProLexer proLexer, string filePathBeingParsed, ParsedScopeItem defaultScope, bool matchKnownWords) { // process inputs _filePathBeingParsed = filePathBeingParsed; _matchKnownWords = matchKnownWords && KnownStaticItems != null; // the first of this list represents the file currently being parsed _parsedIncludes.Add( new ParsedIncludeFile( "root", new TokenEos(null, 0, 0, 0, 0), // the preprocessed variable {0} equals to the filename... new Dictionary <string, List <Token> >(StringComparer.CurrentCultureIgnoreCase) { { "0", new List <Token> { new TokenWord(Path.GetFileName(FilePathBeingParsed), 0, 0, 0, 0) } } }, _filePathBeingParsed, null) ); // init context _context = new ParseContext { BlockStack = new Stack <BlockInfo>(), PreProcIfStack = new Stack <ParsedPreProcBlock>(), UibBlockStack = new Stack <ParsedPreProcBlock>() }; // create root item if (defaultScope == null) { var rootToken = new TokenEos(null, 0, 0, 0, 0); rootToken.OwnerNumber = 0; _rootScope = new ParsedFile("Root", rootToken); AddParsedItem(_rootScope, rootToken.OwnerNumber); } else { _rootScope = defaultScope; } _context.Scope = _rootScope; // Analyze _tokenList = proLexer.GetTokensList; _tokenCount = _tokenList.Count; ReplacePreProcVariablesAhead(1); // replaces a preproc var {&x} at token position 0 ReplacePreProcVariablesAhead(2); // replaces a preproc var {&x} at token position 1 while (MoveNext()) { try { Analyze(); } catch (Exception e) { ErrorHandler.LogError(e, "Error while parsing the following file : " + filePathBeingParsed); } } // add missing values to the line dictionary var current = new LineInfo(GetCurrentDepth(), _rootScope); for (int i = proLexer.MaxLine; i >= 0; i--) { if (_lineInfo.ContainsKey(i)) { current = _lineInfo[i]; } else { _lineInfo.Add(i, current); } } // check that we match an &ENDIF for each &IF if (_context.PreProcIfStack.Count > 0) { _parserErrors.Add(new ParserError(ParserErrorType.MismatchNumberOfIfEndIf, PeekAt(0), _context.PreProcIfStack.Count, _parsedIncludes)); } // dispose _context.BlockStack.Clear(); _context.PreProcIfStack.Clear(); _context.UibBlockStack.Clear(); _context = null; _tokenList = null; // if we are parsing an include file that was saved for later use, update it if (SavedLexerInclude.ContainsKey(filePathBeingParsed)) { SavedLexerInclude.Remove(filePathBeingParsed); } }
public void Visit(TokenEos tok) { if (tok.Line < FromLine || tok.Line > ToLine) return; //Npp.StyleText((int)TextStyle.Default, tok.StartPosition, tok.EndPosition); }