void ParseIncludeFiles(IEnumerable <string> includeFiles) { tempIncludeFunctionContent = new TreeNode(); tempIncludeFunctionList = new List <lexer.functionData>(); foreach (string includeFile in includeFiles) { //Open the include file: StreamReader file = new StreamReader(includeFile); string text = file.ReadToEnd(); file.Close(); //Get path length: int pathLength = (pwnPath + "\\include").Length + 1; //Length is required to omit path from string //Create the node: TreeNode node = tempIncludeFunctionContent.Nodes.Add(includeFile.Remove(0, pathLength)); //Analyse the code: lexer lex = new lexer(); List <lexer.functionData> functionList = new List <lexer.functionData>(); lex.includeAnalysis(text, ref functionList); foreach (lexer.functionData function in functionList) { node.Nodes.Add(function.fullIdentiferDataTypes).Nodes.Add("Add to code (double click)"); } //Add function list to previous list: tempIncludeFunctionList.AddRange(functionList); } }
void openFile(string path) { //Open the file: StreamReader file = new StreamReader(path, Encoding.Default); string text = file.ReadToEnd(); codeEditor.Text = text; file.Close(); fileSaved = true; //Set the current file path: currentFilePath = path; //Set the current file name: string[] tokens = currentFilePath.Split('\\'); currentFileName = tokens[tokens.Length - 1]; //Remove extension: tokens = currentFileName.Split('.'); currentFileName = tokens[0]; //Just loaded the new file - co changes possible: noChanges = true; //Lexer: List <lexer.defineData> userDefineList = null; List <lexer.functionData> userFunctionList = null; lexer lex = new lexer(); lex.codeAnalysis(text, ref userDefineList, ref userFunctionList, false); //Autocomplete list: ScanDocumentThreaded(true); //Recently opened: StreamWriter recentlyFile = new StreamWriter(appPath + "\\PawnStuff\\last_opened.data", false); recentlyFile.Write(path); recentlyFile.Close(); //Update caption: this.Text = "Pawnfox - " + currentFileName; }
static void Main(string[] args) { List <string> Tokens = new List <string>(); List <string> definiciones = new List <string>(); //Prueba Console.WriteLine("Please enter your text file path"); String a = Console.ReadLine(); string text = ""; bool errorB = true; string sample = System.IO.File.ReadAllText(a.ToString()); string[] output = a.Split('.'); var defs = new TokenDefinition[] { //comentarios new TokenDefinition(@"(/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*+/)|^(//.*)", "T_Comentario"), new TokenDefinition(@"/\*.*", "Inicio_Coment"), new TokenDefinition(@".*\*/", "Final_Coment"), //string new TokenDefinition(@"([""'])(?:\\\1|.)*?\1", "T_string"), new TokenDefinition(@"^[\""].*", "Incomplete_String"), //numeros new TokenDefinition(@"[-+]?\d+\.\d*([eE][-+]?\d+)?", "T_DoubleConstant"), new TokenDefinition(@"[0][Xx][\d*a-fA-F]+", "T_Hexadecimal"), new TokenDefinition(@"[-+]?\d+", "T_IntConstant"), //true y false new TokenDefinition(@"[t][r][u][e]", "T_TRUE"), new TokenDefinition(@"[f][a][l][s][e]", "T_FALSE"), //PALABRAS RESERVADAS new TokenDefinition(@"[v][o][i][d](?![_A-Za-z\d]+)", "T_void"), new TokenDefinition(@"[i][n][t](?![A-Za-z\d_]+)", "T_int"), new TokenDefinition(@"[d][o][u][b][l][e](?![A-Za-z\d_]+)", "T_double"), new TokenDefinition(@"[b][o][o][l](?![A-Za-z\d_]+)", "T_bool"), new TokenDefinition(@"[s][t][r][i][n][g](?![A-Za-z\d_]+)", "T_string"), new TokenDefinition(@"[c][l][a][s][s](?![A-Za-z\d_]+)", "T_class"), new TokenDefinition(@"[c][o][n][s][t](?![A-Za-z\d_]+)", "T_const"), new TokenDefinition(@"[i][n][t][e][r][f][a][c][e](?![A-Za-z\d_]+)", "T_interface"), new TokenDefinition(@"[n][u][l][l](?![A-Za-z\d_]+)", "T_null"), new TokenDefinition(@"[t][h][i][s](?![A-Za-z\d_]+)", "T_this"), new TokenDefinition(@"[f][o][r](?![A-Za-z\d_]+)", "T_for"), new TokenDefinition(@"[w][h][i][l][e](?![A-Za-z\d_]+)", "T_while"), new TokenDefinition(@"[f][o][r][e][a][c][h](?![A-Za-z\d_]+)", "T_foreach"), new TokenDefinition(@"[i][f](?![A-Za-z\d_]+)", "T_if"), new TokenDefinition(@"[e][l][s][e](?![A-Za-z\d_]+)", "T_else"), new TokenDefinition(@"[r][e][t][u][r][n](?![A-Za-z\d_]+)", "T_return"), new TokenDefinition(@"[b][r][e][a][k](?![A-Za-z\d_]+)", "T_break"), new TokenDefinition(@"[N][e][w](?![A-Za-z\d_]+)", "T_New"), new TokenDefinition(@"[N][e][w][A][r][r][a][y](?![A-Za-z\d_]+)", "T_NewArray"), new TokenDefinition(@"[C][o][n][s][o][l][e](?![A-Za-z\d_]+)", "T_Console"), new TokenDefinition(@"[W][r][i][t][e][L][i][n][e](?![A-Za-z\d_]+)", "T_WriteLine"), //identificadores new TokenDefinition(@"[_A-Za-z][_A-Za-z0-9]{0,30}", "T_Identifier"), //Operadores y carcteres de puntuacion new TokenDefinition(@"\.", "."), new TokenDefinition(@"\+", "+"), new TokenDefinition(@"\-", "-"), new TokenDefinition(@"\*", "*"), new TokenDefinition(@"\/", "/"), new TokenDefinition(@"\:", ":"), new TokenDefinition(@"[<][=]", ">"), new TokenDefinition(@"\<", "<"), new TokenDefinition(@"[>][=]", ">="), new TokenDefinition(@"\>", ">"), new TokenDefinition(@"[!][=]", "!="), new TokenDefinition(@"[=][=]", "=="), new TokenDefinition(@"\=", "="), new TokenDefinition(@"\!", "!"), new TokenDefinition(@"[&&]", "&&"), new TokenDefinition(@"[|][|]", "||"), new TokenDefinition(@"[[][]]", "[]"), new TokenDefinition(@"\[", "["), new TokenDefinition(@"\]", "]"), new TokenDefinition(@"[(][)]", "()"), new TokenDefinition(@"\(", "("), new TokenDefinition(@"\)", ")"), new TokenDefinition(@"\s", "SPACE"), new TokenDefinition(@"\%", "%"), new TokenDefinition(@"[{][}]", "{}"), new TokenDefinition(@"\}", "}"), new TokenDefinition(@"\{", "{"), new TokenDefinition(@"\,", ","), new TokenDefinition(@"\;", ";"), //error new TokenDefinition(@".", "Error") }; TextReader r = new StringReader(sample); lexer l = new lexer(r, defs); while (l.Next()) { if (l.Token.ToString() == "Inicio_Coment") { errorB = false; } if (l.Token.ToString() == "Final_Coment" & !errorB) { errorB = true; } if (l.Token.ToString() != "Error" && l.TokenContents.ToString().Length < 31 && l.Token.ToString() != "Incomplete_String") { if (l.Token.ToString() != "SPACE" & l.Token.ToString() != "T_Comentario" & l.Token.ToString() != "Final_Coment" & errorB) { Tokens.Add(l.TokenContents); definiciones.Add(l.Token.ToString()); text += l.TokenContents + " line " + l.LineNumber + " cols " + l.Position + " is " + l.Token + "\n\r"; System.IO.File.WriteAllText(output[0] + ".out", text); } } else if (l.TokenContents.ToString().Length < 31 && l.Token.ToString() != "Incomplete_String") { text += "*** Error line " + l.LineNumber + " *** Unrecognized char: " + l.TokenContents + "\n\r"; System.IO.File.WriteAllText(output[0] + ".out", text); } else if (l.Token.ToString() != "T_Comentario" && l.Token.ToString() != "Incomplete_String") { text += "*** Error line " + l.LineNumber + " Secuencia mayor al limite permitido" + "\n\r"; System.IO.File.WriteAllText(output[0] + ".out", text); } else if (l.Token.ToString() == "Incomplete_String") { text += "*** Error line " + l.LineNumber + " string sin terminar" + "\n\r"; System.IO.File.WriteAllText(output[0] + ".out", text); } } if (!errorB) { text += "*** Error line " + l.LineNumber + "*** EOF in unfinished comment: " + "\n\r"; System.IO.File.WriteAllText(output[0] + ".out", text); } else if (l.Token.ToString() == "Incomplete_String") { text += "*** Error line " + l.LineNumber + "*** EOF in unfinished string: " + "\n\r"; System.IO.File.WriteAllText(output[0] + ".out", text); } Console.WriteLine(text); Sintatico S = new Sintatico(Tokens, definiciones); Console.ReadKey(); }
private void analyzeThread_DoWork(object sender, DoWorkEventArgs e) { //Variables: string[] lines = (e.Argument.ToString()).Split('\n'); //Define variables: tempDefineContent = new TreeNode(); TreeNode maxNode = tempDefineContent.Nodes.Add("MAX"); TreeNode dialogNode = tempDefineContent.Nodes.Add("DIALOG"); TreeNode colorNode = tempDefineContent.Nodes.Add("COLORS"); TreeNode otherNode = tempDefineContent.Nodes.Add("Other"); //User function variables: tempUserFunctionContent = new TreeNode(); //Enum variables: TreeNode tempNode = null; tempEnumContent = new TreeNode(); int stage = 0; //PVar variables: tempPVarContent = new TreeNode(); //Analyse: tempUserFunctionList = null; List <lexer.defineData> defineList = null; lexer lex = new lexer(); lex.codeAnalysis(e.Argument.ToString(), ref defineList, ref tempUserFunctionList, false); foreach (lexer.defineData define in defineList) { if (define.identifer.ToLower().Contains("max")) { maxNode.Nodes.Add(define.identifer + " " + define.value); } else if (define.identifer.ToLower().Contains("dialog")) { dialogNode.Nodes.Add(define.identifer + " " + define.value); } else if (define.identifer.ToLower().Contains("color")) { colorNode.Nodes.Add(define.identifer + " " + define.value); } else { otherNode.Nodes.Add(define.identifer + " " + define.value); } } foreach (lexer.functionData function in tempUserFunctionList) { tempNode = tempUserFunctionContent.Nodes.Add(function.fullIdentiferDataTypes); TreeNode callNode = tempNode.Nodes.Add("Calls"); tempNode.Nodes.Add("Add to code (double click)"); //Call stuff: foreach (int call in function.occurences) { callNode.Nodes.Add((call + 1).ToString()); } } foreach (string line in lines) { //PVars: if (line.ToLower().Contains("setpvar") || line.ToLower().Contains("getpvar")) { string[] tokens = line.Split('\"'); if (tokens.Length >= 3) { string varName = "\"" + tokens[1] + "\""; varName = varName.Replace("\"", ""); if (!varName.Contains('%')) { bool exists = false; foreach (TreeNode node in tempPVarContent.Nodes) { if (node.Text == varName) { exists = true; break; } } if (!exists) { tempPVarContent.Nodes.Add(varName); } } } } if (line.Contains("enum") || stage != 0) { if (line.Contains("enum")) { tempNode = new TreeNode(line); stage = 1; } else { if (line.Contains('{') && !line.Contains('}')) { //Skip line (continue would be dangerous) } else if (line.Contains('}') && !line.Contains('{')) { tempEnumContent.Nodes.Add(tempNode); stage = 0; } else { tempNode.Nodes.Add(line); } } } } }