public int AnalyseFile(String filePath) { m_Comment = ""; m_SlashStarComment = false; int _Ret = 0; if (!File.Exists(filePath)) { return(-1); } string ext = Path.GetExtension(filePath).ToLower(); if (string.IsNullOrEmpty(ext) || !ext.Equals(".seq")) //only .seq files are parsed { return(-1); } m_Scope = m_Model.GetRelativePath(filePath); // the sequence relative to project-dir m_IsClassDef = false; /* TODO * if (resource.getProjectRelativePath().segment(0).equalsIgnoreCase("SOURCE")) { * m_IsClassDef = true; //Version 1 its in //SOURCE//... * } * if (resource.getProjectRelativePath().segment(0).equalsIgnoreCase("APP")) { * m_IsClassDef = true; //Version 2 its in //APP//PLUGINS//... * }*/ if (!m_IsClassDef) //each SEQ includes itself { m_Model.UpdateObjList(new Obj(m_Scope, "", m_Scope, "")); } int levelCurrent = 0; int levelPrev = -1; int lineNo = 0; foreach (string _line in File.ReadAllLines(filePath)) { evaluate(_line, lineNo); if (_line.Contains("{")) { levelCurrent++; } if (_line.Contains("}")) { levelCurrent--; } uint lev = (uint)levelPrev; NPP.SetFoldLevel(lineNo, lev, levelCurrent > levelPrev); //Todo !! levelPrev = levelCurrent; lineNo++; } return(_Ret); }
void RebuildObjList() { // add the basic types to Intelisense ObjDecl _A; for (int i = 0; i < BASIC_TYPES.Count; i++) { _A = new ObjDecl(BASIC_TYPES[i], ObjDecl.TClassType.tCTType, "", "", "", "", 0, 0); UpdateObjDecl(_A); } DateTime _start = DateTime.Now; //Log.getInstance().Add("collecting files ", Log.EnuSeverity.Info, ""); Tokenizer _tokenizer = new Tokenizer(); LinkedList <Tokenizer.Token> _Tokens = new LinkedList <Tokenizer.Token>(); List <String> Dirs = new List <String>(); //stack of directories relative to _path int k = 0; try { String[] _SubProj = getSubProj(); for (int i = 0; i < _SubProj.Length; i++) { Dirs.Add(Path.Combine(m_ProjectDir, getSeqDir(_SubProj[i]))); } while (k < Dirs.Count) { FileInfo[] _files = new DirectoryInfo(Dirs[k]).GetFiles(); for (int i = 0; i < _files.Length; i++) { if (_files[i].Extension.Equals(".seq", StringComparison.OrdinalIgnoreCase)) { _Tokens.AddLast(_tokenizer.TokenizeFile(_files[i].FullName)); NPP.SetFoldLevel(1, 1, true); //Todo !! NPP.SetFoldLevel(2, 2, false); NPP.SetFoldLevel(3, 2, false); NPP.SetFoldLevel(4, 1, false); } } DirectoryInfo[] _Dirs = new DirectoryInfo(Dirs[k]).GetDirectories(); for (int i = 0; i < _Dirs.Length; i++) { // If the file is a directory (or is in some way invalid) we'll skip it Dirs.Insert(k + 1, _Dirs[i].FullName); } k++; } //Log.getInstance().Add("Tokenized all" , Log.EnuSeverity.Info, ""); Parser2 _parser2 = new Parser2(this, m_ProjectDir); _parser2.ParseTokens(_Tokens); LinkedList <Parser2.Context.Log> .Enumerator _l = _parser2.GetLogs().GetEnumerator(); while (_l.MoveNext()) { Log.getInstance().Add(_l.Current.m_Text, Log.EnuSeverity.Warn, _l.Current.m_Cmd.AsText()); } //update database with parserresult LinkedList <Parser2.CmdBase> .Enumerator _Cmds; List <String> .Enumerator _Scopes = _parser2.GetScopes().GetEnumerator(); while (_Scopes.MoveNext()) { //Log.getInstance().Add("write DB " + _Scopes.Current, Log.EnuSeverity.Info, ""); // if(!m_IsClassDef) { { //each SEQ includes itself this.UpdateObjList(new Obj(_Scopes.Current, "", _Scopes.Current, "", 0, 0)); } _Cmds = _parser2.GetCmds(_Scopes.Current).GetEnumerator(); while (_Cmds.MoveNext()) { PublishCmdToDB(_Scopes.Current, _Cmds.Current); } } Log.getInstance().Add("Parsing done ", Log.EnuSeverity.Info, ""); } catch (Exception ex) { Log.getInstance().Add(ex.Message, Log.EnuSeverity.Error, ""); } finally { } }