public bool doTest() { ReqDisplay.title("Requirement #10c"); ReqDisplay.message("Testing semi extraction"); result = FileUtils.fileLines(fileSpec2); if (!result) { return(false); } var toker = new Toker(); toker.doReturnComments = true; toker.open(fileSpec2); var semi = new Semi(); semi.toker = toker; while (!semi.isDone()) { semi.get(); replace(semi, "\n", "\\n"); replace(semi, "\r", "\\r"); //replace(semi, ) semi.show(); } return(result); }
public bool testToker2(string path) { Toker toker = new Toker(); string fqf = System.IO.Path.GetFullPath(path); if (!toker.open(fqf)) { Console.Write("\n can't open {0}\n", fqf); return(false); } toker.setSpecialSingleChars(new List <string> { "<", ">" }); toker.setSpecialCharPairs(new List <string> { "<<", "==" }); while (!toker.isDone()) { Token tok = toker.getTok(); Console.Write("\n -- line#{0, 4} : {1}", toker.lineCount(), tok); } toker.close(); return(true); }
public bool doTest() { ReqDisplay.title("Requirement #10b"); ReqDisplay.message("Testing token extraction"); result = FileUtils.fileLines(fileSpec1); if (!result) { return(false); } var toker = new Toker(); toker.doReturnComments = true; toker.open(fileSpec1); var tokList = new List <string>(); while (!toker.isDone()) { var tok = toker.getTok(); if (tok == "\n") { tok = "\\n"; } if (tok == "\r") { tok = "\\r"; } tokList.Add(tok); } ReqDisplay.showList(tokList, "Tokens:"); return(result); }
public bool doTest() { ReqDisplay.title("Requirement #6"); ReqDisplay.message("Semi uses to get tokens until a terminator is retrieved"); var toker = new Toker(); fileSpec = Path.GetFullPath(fileSpec); if (!toker.open(fileSpec)) { Console.Write("\n toker can't open \"{0}\"", fileSpec); return(result = false); } Console.Write("\n processing file \"{0}\"", fileSpec); var semi = new Semi(); semi.toker = toker; while (!semi.isDone()) { semi.get(); semi.show(); } return(result); }
public static bool findSequence(bool findAll, params string[] toks) { var found = false; if (!File.Exists(file)) { return(false); } var semi = new Semi(); var toker = new Toker(); toker.open(file); semi.toker = toker; while (!semi.isDone()) { semi.get(); if (semi.hasSequence(toks)) { semi.show(); found = true; if (findAll == false) { return(true); } } } return(found); }
//----< test for each tokenizer case by index>--- static private bool testTokenizer(int testIndex) { Toker toker = new Toker(); StreamWriter file = new StreamWriter("../../" + testCaseResult + testIndex + ".txt"); // read from the testcase cs file string fqf = System.IO.Path.GetFullPath("../../" + testCaseFileName + testIndex + ".cs"); // open the write to file if (!toker.open(fqf)) { file.Close(); Console.Write("\n can't open {0}\n", fqf); return(false); } else { Console.Write("\n processing file: {0}", fqf); } // write token into the output file untill eof while (!toker.isDone()) { StringBuilder tok = toker.getTok(); file.Write("\n -- line#{0, 4} : {1}", toker.lineCount(), tok); } toker.close(); file.Close(); return(compareTwoFiles("../../" + testCaseResult + testIndex + ".txt", "../../" + testCaseResultStandard + testIndex + ".txt")); }
//----< methods that used to initiate the tokenizer tester>----------- public bool testToken(string path) { string fqf = System.IO.Path.GetFullPath(path); if (!toker.open(fqf)) { Console.Write("\n can't open {0}\n", fqf); return(false); } while (!toker.isDone()) { Token tok = toker.getTok(); Console.Write("\n -- line#{0, 4} : {1}", toker.lineCount(), tok); } toker.close(); return(true); }
//----------------<Analyze dependency>--------------------------------------- public void ConnectNode(TypeAnalysis typea, string fqf) { List <string> namestore = new List <string>(); string filename = fqf.Substring(fqf.LastIndexOf('\\') + 1); CsNode <string, string> node = csgraph.findNode(filename); Toker toker = new Toker(); toker.doReturnComments = false; if (!toker.open(fqf)) { Console.Write("\n can't open {0}\n", fqf); } //else //{ // Console.Write("\n processing file: {0}\n", fqf); //} while (!toker.isDone()) { Token tok = toker.getTok(); if (tok == null) { continue; } if (typea.typetable_.table.ContainsKey(tok))//the key exist in the type table. { if (typea.typetable_.table[tok][0].namesp == "") { namestore.Add(tok); } else { List <TypeItem> list_it = typea.typetable_.table[tok]; foreach (TypeItem it in list_it) { if (namestore.Contains(it.namesp)) { //connect the node node.addChild(csgraph.findNode(it.file), ""); } } } } } toker.close(); }
public bool doTest() { ReqDisplay.title("Requirement #5"); ReqDisplay.message("Toker reads one token with each call to getTok()"); var toker = new Toker(); fileSpec = Path.GetFullPath(fileSpec); if (!toker.open(fileSpec)) { Console.Write("\n Toker can't open file \"{0}\"", fileSpec); return(result = false); } Console.Write("\n tokenizing file \"{0}\"", fileSpec); for (var i = 0; i < 5; ++i) { Console.Write("\n called Toker.getTok() to get \"{0}\"", toker.getTok()); } return(result); }
static bool Requirement4(string file) { Console.Write("\n\n Demonstrate requirement 4: "); Console.Write("\n ============================\n"); Console.Write(" The file content: \n\n"); fs_ = new System.IO.StreamReader("../../" + file, true); int ch; while (!fs_.EndOfStream) { ch = fs_.Read(); Console.Write("{0}", (char)ch); } fs_.Close(); Console.Write("\n ============================\n"); Console.Write("The output: \n"); Toker toker = new Toker(); string fqf = System.IO.Path.GetFullPath("../../" + file); if (!toker.open(fqf)) { Console.Write("\n can't open {0}\n", fqf); return(false); } else { Console.Write("\n processing file: {0}", fqf); } while (!toker.isDone()) { StringBuilder tok = toker.getTok(); Console.Write("\n -- line#{0, 4} : {1}", toker.lineCount(), tok); } toker.close(); return(true); }
//----< opens member tokenizer with specified file >----------------- public bool open(string fileName) { return(toker.open(fileName)); }
//----< opens member tokenizer with specified file >----------------- public bool open(string fileName) { return(toker.open(fileName));//Tokenizer.Toker.open() }